Commit 01a72668 authored by Laura A DeCicco's avatar Laura A DeCicco
Browse files

Removing imports

parent f5481e00
......@@ -17,7 +17,8 @@ Authors@R: c(person("Robert", "Hirsch", role = c("aut"),
person("Lindsay","Carr", role=c("ctb"),
email = "lcarr@usgs.gov"),
person("David","Watkins", role=c("aut"),
email = "wwatkins@usgs.gov"))
email = "wwatkins@usgs.gov",
comment=c(ORCID="0000-0002-7544-0700")))
Description: Collection of functions to help retrieve U.S. Geological Survey
(USGS) and U.S. Environmental Protection Agency (EPA) water quality and
hydrology data from web services. USGS web services are discovered from
......
......@@ -46,42 +46,18 @@ export(whatWQPsites)
export(zeroPad)
import(stats)
import(utils)
importFrom(curl,curl_version)
importFrom(dplyr,arrange)
importFrom(dplyr,bind_cols)
importFrom(dplyr,bind_rows)
importFrom(dplyr,everything)
importFrom(dplyr,full_join)
importFrom(dplyr,left_join)
importFrom(dplyr,mutate)
importFrom(dplyr,mutate_)
importFrom(dplyr,mutate_each_)
importFrom(dplyr,select)
importFrom(dplyr,select_)
importFrom(httr,GET)
importFrom(httr,HEAD)
importFrom(httr,POST)
importFrom(httr,RETRY)
importFrom(httr,content)
importFrom(httr,content_type)
importFrom(httr,headers)
importFrom(httr,status_code)
importFrom(httr,stop_for_status)
importFrom(httr,user_agent)
importFrom(httr,write_disk)
importFrom(jsonlite,fromJSON)
importFrom(lubridate,fast_strptime)
importFrom(lubridate,parse_date_time)
importFrom(readr,col_character)
importFrom(readr,col_number)
importFrom(readr,cols)
importFrom(readr,problems)
importFrom(readr,read_delim)
importFrom(readr,read_lines)
importFrom(reshape2,dcast)
importFrom(reshape2,melt)
importFrom(stats,na.omit)
importFrom(tools,file_ext)
importFrom(xml2,read_xml)
importFrom(xml2,xml_attr)
importFrom(xml2,xml_attrs)
......
......@@ -5,7 +5,6 @@
#'
#' @param values named list with arguments to send to the Water Quality Portal
#' @return values named list with corrected arguments to send to the Water Quality Portal
#' @importFrom lubridate parse_date_time
#' @export
#' @keywords internal
#' @examples
......@@ -30,7 +29,7 @@ checkWQPdates <- function(values){
splitDates <- unlist(strsplit(dateInput, "-"))
if(length(splitDates) == 3){
if(nchar(splitDates[1]) == 4){ #R object
dates <- as.Date(parse_date_time(dateInput, "%Y-%m-%d"))
dates <- as.Date(lubridate::parse_date_time(dateInput, "%Y-%m-%d"))
dates <- format(dates, format="%m-%d-%Y")
values[i] <- dates
} else if (nchar(splitDates[3]) != 4){ #The way WQP wants it == 4, so this is probably a 2 digit year or something
......
......@@ -5,16 +5,6 @@
#'
#' @param obs_url character containing the url for the retrieval
#' @param \dots information to pass to header request
#' @importFrom httr GET
#' @importFrom httr POST
#' @importFrom httr RETRY
#' @importFrom httr user_agent
#' @importFrom httr stop_for_status
#' @importFrom httr status_code
#' @importFrom httr headers
#' @importFrom httr content
#' @importFrom httr content_type
#' @importFrom curl curl_version
#' @importFrom xml2 xml_text
#' @importFrom xml2 xml_child
#' @importFrom xml2 read_xml
......@@ -34,20 +24,20 @@ getWebServiceData <- function(obs_url, ...){
returnedList <- retryGetOrPost(obs_url, ...)
if(status_code(returnedList) == 400){
response400 <- content(returnedList, type="text", encoding = "UTF-8")
if(httr::status_code(returnedList) == 400){
response400 <- httr::content(returnedList, type="text", encoding = "UTF-8")
statusReport <- xml_text(xml_child(read_xml(response400), 2)) # making assumption that - body is second node
statusMsg <- gsub(pattern=", server=.*", replacement="", x = statusReport)
stop(statusMsg)
} else if(status_code(returnedList) != 200){
} else if(httr::status_code(returnedList) != 200){
message("For: ", obs_url,"\n")
stop_for_status(returnedList)
httr::stop_for_status(returnedList)
} else {
headerInfo <- headers(returnedList)
headerInfo <- httr::headers(returnedList)
if(headerInfo$`content-type` %in% c("text/tab-separated-values;charset=UTF-8")){
returnedDoc <- content(returnedList, type="text",encoding = "UTF-8")
returnedDoc <- httr::content(returnedList, type="text",encoding = "UTF-8")
} else if (headerInfo$`content-type` %in%
c("application/zip",
"application/zip;charset=UTF-8",
......@@ -61,7 +51,7 @@ getWebServiceData <- function(obs_url, ...){
} else {
returnedDoc <- content(returnedList,encoding = "UTF-8")
returnedDoc <- httr::content(returnedList,encoding = "UTF-8")
if(grepl("No sites/data found using the selection criteria specified", returnedDoc)){
message(returnedDoc)
}
......@@ -86,7 +76,7 @@ getWebServiceData <- function(obs_url, ...){
default_ua <- function() {
versions <- c(
libcurl = curl_version()$version,
libcurl = curl::curl_version()$version,
httr = as.character(packageVersion("httr")),
dataRetrieval = as.character(packageVersion("dataRetrieval"))
)
......@@ -96,11 +86,9 @@ default_ua <- function() {
#' getting header information from a WQP query
#'
#'@param url the query url
#'@importFrom httr HEAD
#'@importFrom httr headers
getQuerySummary <- function(url){
queryHEAD <- HEAD(url)
retquery <- headers(queryHEAD)
queryHEAD <- httr::HEAD(url)
retquery <- httr::headers(queryHEAD)
retquery[grep("-count",names(retquery))] <- as.numeric(retquery[grep("-count",names(retquery))])
......@@ -114,13 +102,13 @@ getQuerySummary <- function(url){
retryGetOrPost <- function(obs_url, ...) {
resp <- NULL
if (nchar(obs_url) < 2048 || grepl(pattern = "ngwmn", x = obs_url)) {
resp <- RETRY("GET", obs_url, ..., user_agent(default_ua()))
resp <- httr::RETRY("GET", obs_url, ..., httr::user_agent(default_ua()))
} else {
split <- strsplit(obs_url, "?", fixed=TRUE)
obs_url <- split[[1]][1]
query <- split[[1]][2]
resp <- RETRY("POST", obs_url, ..., body = query,
content_type("application/x-www-form-urlencoded"), user_agent(default_ua()))
resp <- httr::RETRY("POST", obs_url, ..., body = query,
httr::content_type("application/x-www-form-urlencoded"), user_agent(default_ua()))
}
return(resp)
}
\ No newline at end of file
......@@ -18,7 +18,6 @@
#' @importFrom xml2 xml_text
#' @importFrom xml2 xml_attr
#' @importFrom xml2 xml_find_first
#' @importFrom lubridate parse_date_time
#' @examples
#' \dontrun{
#' obs_url <- paste("http://cida.usgs.gov/ngwmn_cache/sos?request=GetObservation",
......@@ -137,7 +136,6 @@ importNGWMN <- function(input, asDateTime=FALSE, tz="UTC"){
#' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
#' @importFrom xml2 xml_attr xml_find_all xml_text
#' @importFrom dplyr mutate
#' @importFrom lubridate parse_date_time
#' @export
#' @examples
#' baseURL <- "https://waterservices.usgs.gov/nwis/dv/?format=waterml,2.0"
......@@ -187,7 +185,7 @@ importWaterML2 <- function(input, asDateTime=FALSE, tz="UTC") {
timeDF$dateTime[logicVec] <- rawTime[logicVec]
}
if(asDateTime){
timeDF$dateTime <- parse_date_time(timeDF$dateTime, c("%Y","%Y-%m-%d","%Y-%m-%dT%H:%M","%Y-%m-%dT%H:%M:%S",
timeDF$dateTime <- lubridate::parse_date_time(timeDF$dateTime, c("%Y","%Y-%m-%d","%Y-%m-%dT%H:%M","%Y-%m-%dT%H:%M:%S",
"%Y-%m-%dT%H:%M:%OS","%Y-%m-%dT%H:%M:%OS%z"), exact = TRUE)
#^^setting tz in as.POSIXct just sets the attribute, does not convert the time!
attr(timeDF$dateTime, 'tzone') <- tz
......
......@@ -44,8 +44,6 @@
#' @export
#' @import utils
#' @import stats
#' @importFrom lubridate parse_date_time
#' @importFrom dplyr left_join
#' @importFrom readr read_lines
#' @importFrom readr read_delim
#' @importFrom readr problems
......@@ -167,7 +165,7 @@ importRDB1 <- function(obs_url, asDateTime=TRUE, convertType = TRUE, tz="UTC"){
if(length(badCols) > 0){
readr.data <- fixErrors(readr.data, readr.data.char, "no trailing characters", as.numeric)
readr.data <- fixErrors(readr.data, readr.data.char, "date like", parse_date_time, c("%Y-%m-%d %H:%M:%S","%Y-%m-%d","%Y"))
readr.data <- fixErrors(readr.data, readr.data.char, "date like", lubridate::parse_date_time, c("%Y-%m-%d %H:%M:%S","%Y-%m-%d","%Y"))
}
if(length(grep("_va", names(readr.data))) > 0 &&
......@@ -209,7 +207,7 @@ importRDB1 <- function(obs_url, asDateTime=TRUE, convertType = TRUE, tz="UTC"){
if(all(c(paste0(i,"_dt"),paste0(i,"_tm")) %in% header.names)){
varname <- paste0(i,"_dateTime")
varval <- suppressWarnings(parse_date_time(paste(readr.data[,paste0(i,"_dt")],readr.data[,paste0(i,"_tm")]), c("%Y-%m-%d %H:%M:%S","%Y-%m-%d %H:%M"), tz = "UTC"))
varval <- suppressWarnings(lubridate::parse_date_time(paste(readr.data[,paste0(i,"_dt")],readr.data[,paste0(i,"_tm")]), c("%Y-%m-%d %H:%M:%S","%Y-%m-%d %H:%M"), tz = "UTC"))
if(!all(is.na(varval))){
readr.data[,varname] <- varval
......@@ -236,7 +234,7 @@ importRDB1 <- function(obs_url, asDateTime=TRUE, convertType = TRUE, tz="UTC"){
}
if("DATE" %in% header.names){
readr.data[,"DATE"] <- parse_date_time(readr.data[,"DATE"], "Ymd")
readr.data[,"DATE"] <- lubridate::parse_date_time(readr.data[,"DATE"], "Ymd")
}
if(all(c("DATE","TIME","TZCD") %in% header.names)){
......@@ -291,7 +289,7 @@ convertTZ <- function(df, tz.name, date.time.cols, tz, flip.cols=TRUE){
code=c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST","", NA),
stringsAsFactors = FALSE)
offset <- left_join(df[,tz.name,drop=FALSE],offsetLibrary, by=setNames("code",tz.name))
offset <- dplyr::left_join(df[,tz.name,drop=FALSE],offsetLibrary, by=setNames("code",tz.name))
offset <- offset$offset
df[,paste0(tz.name,"_reported")] <- df[,tz.name,drop=FALSE]
......
......@@ -22,12 +22,6 @@
#' @importFrom dplyr mutate_
#' @importFrom dplyr mutate_each_
#' @importFrom dplyr select_
#' @importFrom dplyr left_join
#' @importFrom lubridate parse_date_time
#' @importFrom lubridate fast_strptime
#' @importFrom httr GET
#' @importFrom httr user_agent
#' @importFrom httr write_disk
#' @examples
#' # These examples require an internet connection to run
#'
......@@ -57,7 +51,7 @@ importWQP <- function(obs_url, zip=FALSE, tz="UTC"){
message("zip encoding access still in development")
temp <- tempfile()
temp <- paste0(temp,".zip")
doc <- getWebServiceData(obs_url, write_disk(temp))
doc <- getWebServiceData(obs_url, httr::write_disk(temp))
headerInfo <- headers(doc)
doc <- unzip(temp, exdir=tempdir())
unlink(temp)
......@@ -116,23 +110,22 @@ importWQP <- function(obs_url, zip=FALSE, tz="UTC"){
code=c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST","", NA),
stringsAsFactors = FALSE)
retval <- left_join(retval, offsetLibrary, by=c("ActivityStartTime/TimeZoneCode"="code"))
retval <- dplyr::left_join(retval, offsetLibrary, by=c("ActivityStartTime/TimeZoneCode"="code"))
names(retval)[names(retval) == "offset"] <- "timeZoneStart"
retval <- left_join(retval, offsetLibrary, by=c("ActivityEndTime/TimeZoneCode"="code"))
retval <- dplyr::left_join(retval, offsetLibrary, by=c("ActivityEndTime/TimeZoneCode"="code"))
names(retval)[names(retval) == "offset"] <- "timeZoneEnd"
dateCols <- c("ActivityStartDate","ActivityEndDate","AnalysisStartDate","PreparationStartDate")
for(i in dateCols){
retval[,i] <- suppressWarnings(as.Date(parse_date_time(retval[[i]], c("Ymd", "mdY"))))
retval[,i] <- suppressWarnings(as.Date(lubridate::parse_date_time(retval[[i]], c("Ymd", "mdY"))))
}
retval <- mutate_(retval, ActivityStartDateTime=~paste(ActivityStartDate, `ActivityStartTime/Time`))
retval <- mutate_(retval, ActivityEndDateTime=~paste(ActivityEndDate, `ActivityEndTime/Time`))
retval <- mutate_(retval, ActivityStartDateTime=~fast_strptime(ActivityStartDateTime, '%Y-%m-%d %H:%M:%S')+60*60*timeZoneStart)
retval <- mutate_(retval, ActivityEndDateTime=~fast_strptime(ActivityEndDateTime, '%Y-%m-%d %H:%M:%S')+60*60*timeZoneStart)
retval <- mutate_(retval, ActivityStartDateTime=~lubridate::fast_strptime(ActivityStartDateTime, '%Y-%m-%d %H:%M:%S')+60*60*timeZoneStart)
retval <- mutate_(retval, ActivityEndDateTime=~lubridate::fast_strptime(ActivityEndDateTime, '%Y-%m-%d %H:%M:%S')+60*60*timeZoneStart)
retval <- select_(retval, ~-timeZoneEnd, ~-timeZoneStart)
}
......
......@@ -41,10 +41,6 @@
#' @export
#' @import utils
#' @import stats
#' @importFrom lubridate parse_date_time
#' @importFrom dplyr full_join
#' @importFrom dplyr bind_rows
#' @importFrom dplyr arrange
#' @importFrom xml2 read_xml
#' @importFrom xml2 xml_find_all
#' @importFrom xml2 xml_children
......@@ -100,7 +96,7 @@
#' #raw XML
#' url <- constructNWISURL(service = 'dv', siteNumber = '02319300', parameterCd = "00060",
#' startDate = "2014-01-01", endDate = "2014-01-01")
#' raw <- content(GET(url), as = 'raw')
#' raw <- content(httr::GET(url), as = 'raw')
#' rawParsed <- importWaterML1(raw)
#' }
#' filePath <- system.file("extdata", package="dataRetrieval")
......@@ -186,7 +182,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
dateTime <- xml_attr(obs,"dateTime")
if(asDateTime){
numChar <- nchar(dateTime)
dateTime <- parse_date_time(dateTime, c("%Y","%Y-%m-%d","%Y-%m-%dT%H:%M",
dateTime <- lubridate::parse_date_time(dateTime, c("%Y","%Y-%m-%d","%Y-%m-%dT%H:%M",
"%Y-%m-%dT%H:%M:%S","%Y-%m-%dT%H:%M:%OS",
"%Y-%m-%dT%H:%M:%OS%z"), exact = TRUE)
if(any(numChar < 20) & any(numChar > 16)){
......@@ -232,7 +228,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
if(is.null(obsDF)){
obsDF <- valParentDF
}else{
obsDF <- full_join(obsDF, valParentDF, by = c("dateTime","tz_cd"))
obsDF <- dplyr::full_join(obsDF, valParentDF, by = c("dateTime","tz_cd"))
}
}else{
#need column names for joining later
......@@ -295,17 +291,17 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
deleteCols <- grepl(obsColName,colnames(sameSite))
sameSite <- sameSite[,!deleteCols]
sameSite_simNames <- intersect(colnames(sameSite), colnames(df))
sameSite <- full_join(sameSite, df, by = sameSite_simNames)
sameSite <- dplyr::full_join(sameSite, df, by = sameSite_simNames)
sameSite <- sameSite[order(as.Date(sameSite$dateTime)),]
mergedDF <- bind_rows(sameSite, diffSite)
mergedDF <- dplyr::bind_rows(sameSite, diffSite)
}else{
similarNames <- intersect(colnames(mergedDF), colnames(df))
mergedDF <- full_join(mergedDF, df, by=similarNames)
mergedDF <- dplyr::full_join(mergedDF, df, by=similarNames)
}
}
mergedSite <- full_join(mergedSite, siteDF, by = colnames(mergedSite))
mergedVar <- full_join(mergedVar, varText, by = colnames(mergedVar))
mergedStat <- full_join(mergedStat, statDF, by = colnames(mergedStat))
mergedSite <- dplyr::full_join(mergedSite, siteDF, by = colnames(mergedSite))
mergedVar <- dplyr::full_join(mergedVar, varText, by = colnames(mergedVar))
mergedStat <- dplyr::full_join(mergedStat, statDF, by = colnames(mergedStat))
}
}
......@@ -323,7 +319,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
mergedNames <- names(mergedDF)
tzLoc <- grep("tz_cd", names(mergedDF))
mergedDF <- mergedDF[c(mergedNames[-tzLoc],mergedNames[tzLoc])]
mergedDF <- arrange(mergedDF,site_no, dateTime)
mergedDF <- dplyr::arrange(mergedDF,site_no, dateTime)
names(mergedDF) <- make.names(names(mergedDF))
......
......@@ -13,9 +13,6 @@
#' for more information on time zones.
#' @import utils
#' @importFrom dplyr mutate
#' @importFrom dplyr bind_rows
#' @importFrom dplyr bind_cols
#' @importFrom stats na.omit
#' @export
#' @examples
#' \dontrun{
......@@ -58,14 +55,14 @@ readNGWMNdata <- function(service, ..., asDateTime = TRUE, tz = "UTC"){
#these attributes are pulled out and saved when doing binds to be reattached
attrs <- c("url","gml:identifier","generationDate","responsibleParty", "contact")
featureID <- na.omit(gsub(":",".",dots[['siteNumbers']]))
featureID <- stats::na.omit(gsub(":",".",dots[['siteNumbers']]))
for(f in featureID){
obsFID <- retrieveObservation(featureID = f, asDateTime, attrs, tz = tz)
obsFIDattr <- saveAttrs(attrs, obsFID)
obsFID <- removeAttrs(attrs, obsFID)
allObs <- bind_rows(allObs, obsFID)
allAttrs <- bind_rows(allAttrs, obsFIDattr)
allObs <- dplyr::bind_rows(allObs, obsFID)
allAttrs <- dplyr::bind_rows(allAttrs, obsFIDattr)
}
......@@ -83,7 +80,7 @@ readNGWMNdata <- function(service, ..., asDateTime = TRUE, tz = "UTC"){
} else if (service == "featureOfInterest") {
if("siteNumbers" %in% names(dots)){
featureID <- na.omit(gsub(":",".",dots[['siteNumbers']]))
featureID <- stats::na.omit(gsub(":",".",dots[['siteNumbers']]))
allSites <- tryCatch({
retrieveFeatureOfInterest(featureID = featureID)
})
......
......@@ -35,7 +35,6 @@
#' }
#'
#' @seealso \code{\link{renameNWISColumns}}, \code{\link{importWaterML1}}
#' @importFrom lubridate parse_date_time
#' @export
#' @keywords data import USGS web service
#' @examples
......
......@@ -73,9 +73,6 @@
#' variableInfo \tab data frame \tab A data frame containing information on the requested parameters \cr
#' }
#' @export
#' @importFrom reshape2 melt
#' @importFrom reshape2 dcast
#' @importFrom dplyr left_join
#' @seealso \code{\link{readWQPdata}}, \code{\link{whatWQPsites}},
#' \code{\link{readWQPqw}}, \code{\link{constructNWISURL}}
#' @examples
......@@ -173,7 +170,7 @@ readNWISqw <- function (siteNumbers,parameterCd,startDate="",endDate="",
if( !(is.null(siteNumbers)) && !(is.na(siteNumbers)) & length(siteNumbers) > 0){
siteInfo <- readNWISsite(siteNumbers)
if(nrow(data) > 0){
siteInfo <- left_join(unique(data[,c("agency_cd","site_no")]),siteInfo, by=c("agency_cd","site_no"))
siteInfo <- dplyr::left_join(unique(data[,c("agency_cd","site_no")]),siteInfo, by=c("agency_cd","site_no"))
}
attr(data, "siteInfo") <- siteInfo
}
......
......@@ -129,7 +129,6 @@ readNWISuv <- function (siteNumbers,parameterCd,startDate="",endDate="", tz="UTC
#' }
#' @seealso \code{\link{constructNWISURL}}, \code{\link{importRDB1}}
#' @export
#' @importFrom dplyr left_join
#' @examples
#' site_ids <- c('01594440','040851325')
#' \dontrun{
......@@ -166,7 +165,7 @@ readNWISpeak <- function (siteNumbers,startDate="",endDate="", asDateTime=TRUE,
siteInfo <- readNWISsite(siteNumbers)
siteInfo <- left_join(unique(data[,c("agency_cd","site_no")]),siteInfo, by=c("agency_cd","site_no"))
siteInfo <- dplyr::left_join(unique(data[,c("agency_cd","site_no")]),siteInfo, by=c("agency_cd","site_no"))
attr(data, "siteInfo") <- siteInfo
attr(data, "variableInfo") <- NULL
......@@ -291,7 +290,6 @@ readNWISrating <- function (siteNumber,type="base",convertType = TRUE){
#' }
#' @seealso \code{\link{constructNWISURL}}, \code{\link{importRDB1}}
#' @export
#' @importFrom dplyr left_join
#' @examples
#' site_ids <- c('01594440','040851325')
#' \dontrun{
......@@ -334,7 +332,7 @@ readNWISmeas <- function (siteNumbers,startDate="",endDate="", tz="UTC", expande
siteInfo <- readNWISsite(siteNumbers)
siteInfo <- left_join(unique(data[,c("agency_cd","site_no")]),siteInfo, by=c("agency_cd","site_no"))
siteInfo <- dplyr::left_join(unique(data[,c("agency_cd","site_no")]),siteInfo, by=c("agency_cd","site_no"))
attr(data, "url") <- url
attr(data, "comment") <- comment
......@@ -396,7 +394,6 @@ readNWISmeas <- function (siteNumbers,startDate="",endDate="", tz="UTC", expande
#'
#' @seealso \code{\link{constructNWISURL}}, \code{\link{importRDB1}}
#' @export
#' @importFrom dplyr left_join
#' @examples
#' site_id <- "434400121275801"
#' \dontrun{
......@@ -422,7 +419,7 @@ readNWISgwl <- function (siteNumbers,startDate="",endDate="", convertType = TRUE
}
}
siteInfo <- readNWISsite(siteNumbers)
siteInfo <- left_join(unique(data[,c("agency_cd","site_no")]),siteInfo, by=c("agency_cd","site_no"))
siteInfo <- dplyr::left_join(unique(data[,c("agency_cd","site_no")]),siteInfo, by=c("agency_cd","site_no"))
attr(data, "siteInfo") <- siteInfo
}
......@@ -467,7 +464,6 @@ readNWISgwl <- function (siteNumbers,startDate="",endDate="", convertType = TRUE
#' }
#' @seealso \code{\link{constructNWISURL}}, \code{\link{importRDB1}}
#' @export
#' @importFrom dplyr left_join
#' @examples
#' \dontrun{
#' x1 <- readNWISstat(siteNumbers=c("02319394"),
......@@ -505,7 +501,7 @@ readNWISstat <- function(siteNumbers, parameterCd, startDate = "", endDate = "",
siteInfo <- readNWISsite(siteNumbers)
if(nrow(data) > 0){
siteInfo <- left_join(unique(data[,c("agency_cd","site_no")]),siteInfo, by=c("agency_cd","site_no"))
siteInfo <- dplyr::left_join(unique(data[,c("agency_cd","site_no")]),siteInfo, by=c("agency_cd","site_no"))
}
attr(data, "siteInfo") <- siteInfo
......
......@@ -47,7 +47,6 @@
#' queryTime \tab POSIXct \tab The time the data was returned \cr
#' }
#' @export
#' @importFrom lubridate parse_date_time
#' @examples
#' \dontrun{
#' availableData <- whatNWISdata(siteNumber = '05114000')
......@@ -106,8 +105,8 @@ whatNWISdata <- function(...){
}
if(nrow(SiteFile) > 0){
SiteFile$begin_date <- as.Date(parse_date_time(SiteFile$begin_date, c("Ymd", "mdY", "Y!")))
SiteFile$end_date <- as.Date(parse_date_time(SiteFile$end_date, c("Ymd", "mdY", "Y!")))
SiteFile$begin_date <- as.Date(lubridate::parse_date_time(SiteFile$begin_date, c("Ymd", "mdY", "Y!")))
SiteFile$end_date <- as.Date(lubridate::parse_date_time(SiteFile$end_date, c("Ymd", "mdY", "Y!")))
}
return(SiteFile)
......
......@@ -28,7 +28,6 @@
#' @importFrom xml2 xml_root
#' @importFrom xml2 xml_children
#' @importFrom xml2 xml_attr
#' @importFrom dplyr bind_rows
#'
#' @examples
#' \dontrun{
......@@ -71,7 +70,7 @@ whatNWISsites <- function(...){
if(is.null(retVal)){
retVal <- df
}else{
retVal <- bind_rows(retVal, df)
retVal <- dplyr::bind_rows(retVal, df)
}
}
......
......@@ -93,8 +93,6 @@ whatWQPmetrics <- function(...){
#'
#' @export
#' @import utils
#' @importFrom tools file_ext
#' @importFrom jsonlite fromJSON
#' @examples
#' \dontrun{
#' site1 <- whatWQPdata(siteid="USGS-01594440")
......@@ -118,14 +116,14 @@ whatWQPdata <- function(..., saveFile = tempfile()){
urlCall,
"&mimeType=geojson&sorted=no")
if(file_ext(saveFile) != ".geojson"){
if(tools::file_ext(saveFile) != ".geojson"){
saveFile <- paste0(saveFile,".geojson")
}
doc <- getWebServiceData(urlCall, write_disk(saveFile))
headerInfo <- attr(doc, "headerInfo")
retval <- as.data.frame(fromJSON(saveFile), stringsAsFactors = FALSE)
retval <- as.data.frame(jsonlite::fromJSON(saveFile), stringsAsFactors = FALSE)
df_cols <- as.integer(which(sapply(retval, class) == "data.frame"))
y <- retval[,-df_cols]
......
......@@ -96,7 +96,7 @@ tzIssue <- importWaterML1(tzURL, TRUE, "America/Chicago")
#raw XML
url <- constructNWISURL(service = 'dv', siteNumber = '02319300', parameterCd = "00060",
startDate = "2014-01-01", endDate = "2014-01-01")
raw <- content(GET(url), as = 'raw')
raw <- content(httr::GET(url), as = 'raw')
rawParsed <- importWaterML1(raw)
}
filePath <- system.file("extdata", package="dataRetrieval")
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment