Commit 01a72668 authored by Laura A DeCicco's avatar Laura A DeCicco
Browse files

Removing imports

parent f5481e00
...@@ -17,7 +17,8 @@ Authors@R: c(person("Robert", "Hirsch", role = c("aut"), ...@@ -17,7 +17,8 @@ Authors@R: c(person("Robert", "Hirsch", role = c("aut"),
person("Lindsay","Carr", role=c("ctb"), person("Lindsay","Carr", role=c("ctb"),
email = "lcarr@usgs.gov"), email = "lcarr@usgs.gov"),
person("David","Watkins", role=c("aut"), person("David","Watkins", role=c("aut"),
email = "wwatkins@usgs.gov")) email = "wwatkins@usgs.gov",
comment=c(ORCID="0000-0002-7544-0700")))
Description: Collection of functions to help retrieve U.S. Geological Survey Description: Collection of functions to help retrieve U.S. Geological Survey
(USGS) and U.S. Environmental Protection Agency (EPA) water quality and (USGS) and U.S. Environmental Protection Agency (EPA) water quality and
hydrology data from web services. USGS web services are discovered from hydrology data from web services. USGS web services are discovered from
......
...@@ -46,42 +46,18 @@ export(whatWQPsites) ...@@ -46,42 +46,18 @@ export(whatWQPsites)
export(zeroPad) export(zeroPad)
import(stats) import(stats)
import(utils) import(utils)
importFrom(curl,curl_version)
importFrom(dplyr,arrange)
importFrom(dplyr,bind_cols)
importFrom(dplyr,bind_rows)
importFrom(dplyr,everything) importFrom(dplyr,everything)
importFrom(dplyr,full_join)
importFrom(dplyr,left_join)
importFrom(dplyr,mutate) importFrom(dplyr,mutate)
importFrom(dplyr,mutate_) importFrom(dplyr,mutate_)
importFrom(dplyr,mutate_each_) importFrom(dplyr,mutate_each_)
importFrom(dplyr,select) importFrom(dplyr,select)
importFrom(dplyr,select_) importFrom(dplyr,select_)
importFrom(httr,GET)
importFrom(httr,HEAD)
importFrom(httr,POST)
importFrom(httr,RETRY)
importFrom(httr,content)
importFrom(httr,content_type)
importFrom(httr,headers)
importFrom(httr,status_code)
importFrom(httr,stop_for_status)
importFrom(httr,user_agent)
importFrom(httr,write_disk)
importFrom(jsonlite,fromJSON)
importFrom(lubridate,fast_strptime)
importFrom(lubridate,parse_date_time)
importFrom(readr,col_character) importFrom(readr,col_character)
importFrom(readr,col_number) importFrom(readr,col_number)
importFrom(readr,cols) importFrom(readr,cols)
importFrom(readr,problems) importFrom(readr,problems)
importFrom(readr,read_delim) importFrom(readr,read_delim)
importFrom(readr,read_lines) importFrom(readr,read_lines)
importFrom(reshape2,dcast)
importFrom(reshape2,melt)
importFrom(stats,na.omit)
importFrom(tools,file_ext)
importFrom(xml2,read_xml) importFrom(xml2,read_xml)
importFrom(xml2,xml_attr) importFrom(xml2,xml_attr)
importFrom(xml2,xml_attrs) importFrom(xml2,xml_attrs)
......
...@@ -5,7 +5,6 @@ ...@@ -5,7 +5,6 @@
#' #'
#' @param values named list with arguments to send to the Water Quality Portal #' @param values named list with arguments to send to the Water Quality Portal
#' @return values named list with corrected arguments to send to the Water Quality Portal #' @return values named list with corrected arguments to send to the Water Quality Portal
#' @importFrom lubridate parse_date_time
#' @export #' @export
#' @keywords internal #' @keywords internal
#' @examples #' @examples
...@@ -30,7 +29,7 @@ checkWQPdates <- function(values){ ...@@ -30,7 +29,7 @@ checkWQPdates <- function(values){
splitDates <- unlist(strsplit(dateInput, "-")) splitDates <- unlist(strsplit(dateInput, "-"))
if(length(splitDates) == 3){ if(length(splitDates) == 3){
if(nchar(splitDates[1]) == 4){ #R object if(nchar(splitDates[1]) == 4){ #R object
dates <- as.Date(parse_date_time(dateInput, "%Y-%m-%d")) dates <- as.Date(lubridate::parse_date_time(dateInput, "%Y-%m-%d"))
dates <- format(dates, format="%m-%d-%Y") dates <- format(dates, format="%m-%d-%Y")
values[i] <- dates values[i] <- dates
} else if (nchar(splitDates[3]) != 4){ #The way WQP wants it == 4, so this is probably a 2 digit year or something } else if (nchar(splitDates[3]) != 4){ #The way WQP wants it == 4, so this is probably a 2 digit year or something
......
...@@ -5,16 +5,6 @@ ...@@ -5,16 +5,6 @@
#' #'
#' @param obs_url character containing the url for the retrieval #' @param obs_url character containing the url for the retrieval
#' @param \dots information to pass to header request #' @param \dots information to pass to header request
#' @importFrom httr GET
#' @importFrom httr POST
#' @importFrom httr RETRY
#' @importFrom httr user_agent
#' @importFrom httr stop_for_status
#' @importFrom httr status_code
#' @importFrom httr headers
#' @importFrom httr content
#' @importFrom httr content_type
#' @importFrom curl curl_version
#' @importFrom xml2 xml_text #' @importFrom xml2 xml_text
#' @importFrom xml2 xml_child #' @importFrom xml2 xml_child
#' @importFrom xml2 read_xml #' @importFrom xml2 read_xml
...@@ -34,20 +24,20 @@ getWebServiceData <- function(obs_url, ...){ ...@@ -34,20 +24,20 @@ getWebServiceData <- function(obs_url, ...){
returnedList <- retryGetOrPost(obs_url, ...) returnedList <- retryGetOrPost(obs_url, ...)
if(status_code(returnedList) == 400){ if(httr::status_code(returnedList) == 400){
response400 <- content(returnedList, type="text", encoding = "UTF-8") response400 <- httr::content(returnedList, type="text", encoding = "UTF-8")
statusReport <- xml_text(xml_child(read_xml(response400), 2)) # making assumption that - body is second node statusReport <- xml_text(xml_child(read_xml(response400), 2)) # making assumption that - body is second node
statusMsg <- gsub(pattern=", server=.*", replacement="", x = statusReport) statusMsg <- gsub(pattern=", server=.*", replacement="", x = statusReport)
stop(statusMsg) stop(statusMsg)
} else if(status_code(returnedList) != 200){ } else if(httr::status_code(returnedList) != 200){
message("For: ", obs_url,"\n") message("For: ", obs_url,"\n")
stop_for_status(returnedList) httr::stop_for_status(returnedList)
} else { } else {
headerInfo <- headers(returnedList) headerInfo <- httr::headers(returnedList)
if(headerInfo$`content-type` %in% c("text/tab-separated-values;charset=UTF-8")){ if(headerInfo$`content-type` %in% c("text/tab-separated-values;charset=UTF-8")){
returnedDoc <- content(returnedList, type="text",encoding = "UTF-8") returnedDoc <- httr::content(returnedList, type="text",encoding = "UTF-8")
} else if (headerInfo$`content-type` %in% } else if (headerInfo$`content-type` %in%
c("application/zip", c("application/zip",
"application/zip;charset=UTF-8", "application/zip;charset=UTF-8",
...@@ -61,7 +51,7 @@ getWebServiceData <- function(obs_url, ...){ ...@@ -61,7 +51,7 @@ getWebServiceData <- function(obs_url, ...){
} else { } else {
returnedDoc <- content(returnedList,encoding = "UTF-8") returnedDoc <- httr::content(returnedList,encoding = "UTF-8")
if(grepl("No sites/data found using the selection criteria specified", returnedDoc)){ if(grepl("No sites/data found using the selection criteria specified", returnedDoc)){
message(returnedDoc) message(returnedDoc)
} }
...@@ -86,7 +76,7 @@ getWebServiceData <- function(obs_url, ...){ ...@@ -86,7 +76,7 @@ getWebServiceData <- function(obs_url, ...){
default_ua <- function() { default_ua <- function() {
versions <- c( versions <- c(
libcurl = curl_version()$version, libcurl = curl::curl_version()$version,
httr = as.character(packageVersion("httr")), httr = as.character(packageVersion("httr")),
dataRetrieval = as.character(packageVersion("dataRetrieval")) dataRetrieval = as.character(packageVersion("dataRetrieval"))
) )
...@@ -96,11 +86,9 @@ default_ua <- function() { ...@@ -96,11 +86,9 @@ default_ua <- function() {
#' getting header information from a WQP query #' getting header information from a WQP query
#' #'
#'@param url the query url #'@param url the query url
#'@importFrom httr HEAD
#'@importFrom httr headers
getQuerySummary <- function(url){ getQuerySummary <- function(url){
queryHEAD <- HEAD(url) queryHEAD <- httr::HEAD(url)
retquery <- headers(queryHEAD) retquery <- httr::headers(queryHEAD)
retquery[grep("-count",names(retquery))] <- as.numeric(retquery[grep("-count",names(retquery))]) retquery[grep("-count",names(retquery))] <- as.numeric(retquery[grep("-count",names(retquery))])
...@@ -114,13 +102,13 @@ getQuerySummary <- function(url){ ...@@ -114,13 +102,13 @@ getQuerySummary <- function(url){
retryGetOrPost <- function(obs_url, ...) { retryGetOrPost <- function(obs_url, ...) {
resp <- NULL resp <- NULL
if (nchar(obs_url) < 2048 || grepl(pattern = "ngwmn", x = obs_url)) { if (nchar(obs_url) < 2048 || grepl(pattern = "ngwmn", x = obs_url)) {
resp <- RETRY("GET", obs_url, ..., user_agent(default_ua())) resp <- httr::RETRY("GET", obs_url, ..., httr::user_agent(default_ua()))
} else { } else {
split <- strsplit(obs_url, "?", fixed=TRUE) split <- strsplit(obs_url, "?", fixed=TRUE)
obs_url <- split[[1]][1] obs_url <- split[[1]][1]
query <- split[[1]][2] query <- split[[1]][2]
resp <- RETRY("POST", obs_url, ..., body = query, resp <- httr::RETRY("POST", obs_url, ..., body = query,
content_type("application/x-www-form-urlencoded"), user_agent(default_ua())) httr::content_type("application/x-www-form-urlencoded"), user_agent(default_ua()))
} }
return(resp) return(resp)
} }
\ No newline at end of file
...@@ -18,7 +18,6 @@ ...@@ -18,7 +18,6 @@
#' @importFrom xml2 xml_text #' @importFrom xml2 xml_text
#' @importFrom xml2 xml_attr #' @importFrom xml2 xml_attr
#' @importFrom xml2 xml_find_first #' @importFrom xml2 xml_find_first
#' @importFrom lubridate parse_date_time
#' @examples #' @examples
#' \dontrun{ #' \dontrun{
#' obs_url <- paste("http://cida.usgs.gov/ngwmn_cache/sos?request=GetObservation", #' obs_url <- paste("http://cida.usgs.gov/ngwmn_cache/sos?request=GetObservation",
...@@ -137,7 +136,6 @@ importNGWMN <- function(input, asDateTime=FALSE, tz="UTC"){ ...@@ -137,7 +136,6 @@ importNGWMN <- function(input, asDateTime=FALSE, tz="UTC"){
#' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla" #' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
#' @importFrom xml2 xml_attr xml_find_all xml_text #' @importFrom xml2 xml_attr xml_find_all xml_text
#' @importFrom dplyr mutate #' @importFrom dplyr mutate
#' @importFrom lubridate parse_date_time
#' @export #' @export
#' @examples #' @examples
#' baseURL <- "https://waterservices.usgs.gov/nwis/dv/?format=waterml,2.0" #' baseURL <- "https://waterservices.usgs.gov/nwis/dv/?format=waterml,2.0"
...@@ -187,7 +185,7 @@ importWaterML2 <- function(input, asDateTime=FALSE, tz="UTC") { ...@@ -187,7 +185,7 @@ importWaterML2 <- function(input, asDateTime=FALSE, tz="UTC") {
timeDF$dateTime[logicVec] <- rawTime[logicVec] timeDF$dateTime[logicVec] <- rawTime[logicVec]
} }
if(asDateTime){ if(asDateTime){
timeDF$dateTime <- parse_date_time(timeDF$dateTime, c("%Y","%Y-%m-%d","%Y-%m-%dT%H:%M","%Y-%m-%dT%H:%M:%S", timeDF$dateTime <- lubridate::parse_date_time(timeDF$dateTime, c("%Y","%Y-%m-%d","%Y-%m-%dT%H:%M","%Y-%m-%dT%H:%M:%S",
"%Y-%m-%dT%H:%M:%OS","%Y-%m-%dT%H:%M:%OS%z"), exact = TRUE) "%Y-%m-%dT%H:%M:%OS","%Y-%m-%dT%H:%M:%OS%z"), exact = TRUE)
#^^setting tz in as.POSIXct just sets the attribute, does not convert the time! #^^setting tz in as.POSIXct just sets the attribute, does not convert the time!
attr(timeDF$dateTime, 'tzone') <- tz attr(timeDF$dateTime, 'tzone') <- tz
......
...@@ -44,8 +44,6 @@ ...@@ -44,8 +44,6 @@
#' @export #' @export
#' @import utils #' @import utils
#' @import stats #' @import stats
#' @importFrom lubridate parse_date_time
#' @importFrom dplyr left_join
#' @importFrom readr read_lines #' @importFrom readr read_lines
#' @importFrom readr read_delim #' @importFrom readr read_delim
#' @importFrom readr problems #' @importFrom readr problems
...@@ -167,7 +165,7 @@ importRDB1 <- function(obs_url, asDateTime=TRUE, convertType = TRUE, tz="UTC"){ ...@@ -167,7 +165,7 @@ importRDB1 <- function(obs_url, asDateTime=TRUE, convertType = TRUE, tz="UTC"){
if(length(badCols) > 0){ if(length(badCols) > 0){
readr.data <- fixErrors(readr.data, readr.data.char, "no trailing characters", as.numeric) readr.data <- fixErrors(readr.data, readr.data.char, "no trailing characters", as.numeric)
readr.data <- fixErrors(readr.data, readr.data.char, "date like", parse_date_time, c("%Y-%m-%d %H:%M:%S","%Y-%m-%d","%Y")) readr.data <- fixErrors(readr.data, readr.data.char, "date like", lubridate::parse_date_time, c("%Y-%m-%d %H:%M:%S","%Y-%m-%d","%Y"))
} }
if(length(grep("_va", names(readr.data))) > 0 && if(length(grep("_va", names(readr.data))) > 0 &&
...@@ -209,7 +207,7 @@ importRDB1 <- function(obs_url, asDateTime=TRUE, convertType = TRUE, tz="UTC"){ ...@@ -209,7 +207,7 @@ importRDB1 <- function(obs_url, asDateTime=TRUE, convertType = TRUE, tz="UTC"){
if(all(c(paste0(i,"_dt"),paste0(i,"_tm")) %in% header.names)){ if(all(c(paste0(i,"_dt"),paste0(i,"_tm")) %in% header.names)){
varname <- paste0(i,"_dateTime") varname <- paste0(i,"_dateTime")
varval <- suppressWarnings(parse_date_time(paste(readr.data[,paste0(i,"_dt")],readr.data[,paste0(i,"_tm")]), c("%Y-%m-%d %H:%M:%S","%Y-%m-%d %H:%M"), tz = "UTC")) varval <- suppressWarnings(lubridate::parse_date_time(paste(readr.data[,paste0(i,"_dt")],readr.data[,paste0(i,"_tm")]), c("%Y-%m-%d %H:%M:%S","%Y-%m-%d %H:%M"), tz = "UTC"))
if(!all(is.na(varval))){ if(!all(is.na(varval))){
readr.data[,varname] <- varval readr.data[,varname] <- varval
...@@ -236,7 +234,7 @@ importRDB1 <- function(obs_url, asDateTime=TRUE, convertType = TRUE, tz="UTC"){ ...@@ -236,7 +234,7 @@ importRDB1 <- function(obs_url, asDateTime=TRUE, convertType = TRUE, tz="UTC"){
} }
if("DATE" %in% header.names){ if("DATE" %in% header.names){
readr.data[,"DATE"] <- parse_date_time(readr.data[,"DATE"], "Ymd") readr.data[,"DATE"] <- lubridate::parse_date_time(readr.data[,"DATE"], "Ymd")
} }
if(all(c("DATE","TIME","TZCD") %in% header.names)){ if(all(c("DATE","TIME","TZCD") %in% header.names)){
...@@ -291,7 +289,7 @@ convertTZ <- function(df, tz.name, date.time.cols, tz, flip.cols=TRUE){ ...@@ -291,7 +289,7 @@ convertTZ <- function(df, tz.name, date.time.cols, tz, flip.cols=TRUE){
code=c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST","", NA), code=c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST","", NA),
stringsAsFactors = FALSE) stringsAsFactors = FALSE)
offset <- left_join(df[,tz.name,drop=FALSE],offsetLibrary, by=setNames("code",tz.name)) offset <- dplyr::left_join(df[,tz.name,drop=FALSE],offsetLibrary, by=setNames("code",tz.name))
offset <- offset$offset offset <- offset$offset
df[,paste0(tz.name,"_reported")] <- df[,tz.name,drop=FALSE] df[,paste0(tz.name,"_reported")] <- df[,tz.name,drop=FALSE]
......
...@@ -22,12 +22,6 @@ ...@@ -22,12 +22,6 @@
#' @importFrom dplyr mutate_ #' @importFrom dplyr mutate_
#' @importFrom dplyr mutate_each_ #' @importFrom dplyr mutate_each_
#' @importFrom dplyr select_ #' @importFrom dplyr select_
#' @importFrom dplyr left_join
#' @importFrom lubridate parse_date_time
#' @importFrom lubridate fast_strptime
#' @importFrom httr GET
#' @importFrom httr user_agent
#' @importFrom httr write_disk
#' @examples #' @examples
#' # These examples require an internet connection to run #' # These examples require an internet connection to run
#' #'
...@@ -57,7 +51,7 @@ importWQP <- function(obs_url, zip=FALSE, tz="UTC"){ ...@@ -57,7 +51,7 @@ importWQP <- function(obs_url, zip=FALSE, tz="UTC"){
message("zip encoding access still in development") message("zip encoding access still in development")
temp <- tempfile() temp <- tempfile()
temp <- paste0(temp,".zip") temp <- paste0(temp,".zip")
doc <- getWebServiceData(obs_url, write_disk(temp)) doc <- getWebServiceData(obs_url, httr::write_disk(temp))
headerInfo <- headers(doc) headerInfo <- headers(doc)
doc <- unzip(temp, exdir=tempdir()) doc <- unzip(temp, exdir=tempdir())
unlink(temp) unlink(temp)
...@@ -116,23 +110,22 @@ importWQP <- function(obs_url, zip=FALSE, tz="UTC"){ ...@@ -116,23 +110,22 @@ importWQP <- function(obs_url, zip=FALSE, tz="UTC"){
code=c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST","", NA), code=c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST","", NA),
stringsAsFactors = FALSE) stringsAsFactors = FALSE)
retval <- left_join(retval, offsetLibrary, by=c("ActivityStartTime/TimeZoneCode"="code")) retval <- dplyr::left_join(retval, offsetLibrary, by=c("ActivityStartTime/TimeZoneCode"="code"))
names(retval)[names(retval) == "offset"] <- "timeZoneStart" names(retval)[names(retval) == "offset"] <- "timeZoneStart"
retval <- left_join(retval, offsetLibrary, by=c("ActivityEndTime/TimeZoneCode"="code")) retval <- dplyr::left_join(retval, offsetLibrary, by=c("ActivityEndTime/TimeZoneCode"="code"))
names(retval)[names(retval) == "offset"] <- "timeZoneEnd" names(retval)[names(retval) == "offset"] <- "timeZoneEnd"
dateCols <- c("ActivityStartDate","ActivityEndDate","AnalysisStartDate","PreparationStartDate") dateCols <- c("ActivityStartDate","ActivityEndDate","AnalysisStartDate","PreparationStartDate")
for(i in dateCols){ for(i in dateCols){
retval[,i] <- suppressWarnings(as.Date(parse_date_time(retval[[i]], c("Ymd", "mdY")))) retval[,i] <- suppressWarnings(as.Date(lubridate::parse_date_time(retval[[i]], c("Ymd", "mdY"))))
} }
retval <- mutate_(retval, ActivityStartDateTime=~paste(ActivityStartDate, `ActivityStartTime/Time`)) retval <- mutate_(retval, ActivityStartDateTime=~paste(ActivityStartDate, `ActivityStartTime/Time`))
retval <- mutate_(retval, ActivityEndDateTime=~paste(ActivityEndDate, `ActivityEndTime/Time`)) retval <- mutate_(retval, ActivityEndDateTime=~paste(ActivityEndDate, `ActivityEndTime/Time`))
retval <- mutate_(retval, ActivityStartDateTime=~fast_strptime(ActivityStartDateTime, '%Y-%m-%d %H:%M:%S')+60*60*timeZoneStart) retval <- mutate_(retval, ActivityStartDateTime=~lubridate::fast_strptime(ActivityStartDateTime, '%Y-%m-%d %H:%M:%S')+60*60*timeZoneStart)
retval <- mutate_(retval, ActivityEndDateTime=~fast_strptime(ActivityEndDateTime, '%Y-%m-%d %H:%M:%S')+60*60*timeZoneStart) retval <- mutate_(retval, ActivityEndDateTime=~lubridate::fast_strptime(ActivityEndDateTime, '%Y-%m-%d %H:%M:%S')+60*60*timeZoneStart)
retval <- select_(retval, ~-timeZoneEnd, ~-timeZoneStart) retval <- select_(retval, ~-timeZoneEnd, ~-timeZoneStart)
} }
......
...@@ -41,10 +41,6 @@ ...@@ -41,10 +41,6 @@
#' @export #' @export
#' @import utils #' @import utils
#' @import stats #' @import stats
#' @importFrom lubridate parse_date_time
#' @importFrom dplyr full_join
#' @importFrom dplyr bind_rows
#' @importFrom dplyr arrange
#' @importFrom xml2 read_xml #' @importFrom xml2 read_xml
#' @importFrom xml2 xml_find_all #' @importFrom xml2 xml_find_all
#' @importFrom xml2 xml_children #' @importFrom xml2 xml_children
...@@ -100,7 +96,7 @@ ...@@ -100,7 +96,7 @@
#' #raw XML #' #raw XML
#' url <- constructNWISURL(service = 'dv', siteNumber = '02319300', parameterCd = "00060", #' url <- constructNWISURL(service = 'dv', siteNumber = '02319300', parameterCd = "00060",
#' startDate = "2014-01-01", endDate = "2014-01-01") #' startDate = "2014-01-01", endDate = "2014-01-01")
#' raw <- content(GET(url), as = 'raw') #' raw <- content(httr::GET(url), as = 'raw')
#' rawParsed <- importWaterML1(raw) #' rawParsed <- importWaterML1(raw)
#' } #' }
#' filePath <- system.file("extdata", package="dataRetrieval") #' filePath <- system.file("extdata", package="dataRetrieval")
...@@ -186,7 +182,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){ ...@@ -186,7 +182,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
dateTime <- xml_attr(obs,"dateTime") dateTime <- xml_attr(obs,"dateTime")
if(asDateTime){ if(asDateTime){
numChar <- nchar(dateTime) numChar <- nchar(dateTime)
dateTime <- parse_date_time(dateTime, c("%Y","%Y-%m-%d","%Y-%m-%dT%H:%M", dateTime <- lubridate::parse_date_time(dateTime, c("%Y","%Y-%m-%d","%Y-%m-%dT%H:%M",
"%Y-%m-%dT%H:%M:%S","%Y-%m-%dT%H:%M:%OS", "%Y-%m-%dT%H:%M:%S","%Y-%m-%dT%H:%M:%OS",
"%Y-%m-%dT%H:%M:%OS%z"), exact = TRUE) "%Y-%m-%dT%H:%M:%OS%z"), exact = TRUE)
if(any(numChar < 20) & any(numChar > 16)){ if(any(numChar < 20) & any(numChar > 16)){
...@@ -232,7 +228,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){ ...@@ -232,7 +228,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
if(is.null(obsDF)){ if(is.null(obsDF)){
obsDF <- valParentDF obsDF <- valParentDF
}else{ }else{
obsDF <- full_join(obsDF, valParentDF, by = c("dateTime","tz_cd")) obsDF <- dplyr::full_join(obsDF, valParentDF, by = c("dateTime","tz_cd"))
} }
}else{ }else{
#need column names for joining later #need column names for joining later
...@@ -295,17 +291,17 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){ ...@@ -295,17 +291,17 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
deleteCols <- grepl(obsColName,colnames(sameSite)) deleteCols <- grepl(obsColName,colnames(sameSite))
sameSite <- sameSite[,!deleteCols] sameSite <- sameSite[,!deleteCols]
sameSite_simNames <- intersect(colnames(sameSite), colnames(df)) sameSite_simNames <- intersect(colnames(sameSite), colnames(df))
sameSite <- full_join(sameSite, df, by = sameSite_simNames) sameSite <- dplyr::full_join(sameSite, df, by = sameSite_simNames)
sameSite <- sameSite[order(as.Date(sameSite$dateTime)),] sameSite <- sameSite[order(as.Date(sameSite$dateTime)),]
mergedDF <- bind_rows(sameSite, diffSite) mergedDF <- dplyr::bind_rows(sameSite, diffSite)
}else{ }else{
similarNames <- intersect(colnames(mergedDF), colnames(df)) similarNames <- intersect(colnames(mergedDF), colnames(df))
mergedDF <- full_join(mergedDF, df, by=similarNames) mergedDF <- dplyr::full_join(mergedDF, df, by=similarNames)
} }
} }
mergedSite <- full_join(mergedSite, siteDF, by = colnames(mergedSite)) mergedSite <- dplyr::full_join(mergedSite, siteDF, by = colnames(mergedSite))
mergedVar <- full_join(mergedVar, varText, by = colnames(mergedVar)) mergedVar <- dplyr::full_join(mergedVar, varText, by = colnames(mergedVar))
mergedStat <- full_join(mergedStat, statDF, by = colnames(mergedStat)) mergedStat <- dplyr::full_join(mergedStat, statDF, by = colnames(mergedStat))
} }
} }
...@@ -323,7 +319,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){ ...@@ -323,7 +319,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
mergedNames <- names(mergedDF) mergedNames <- names(mergedDF)
tzLoc <- grep("tz_cd", names(mergedDF)) tzLoc <- grep("tz_cd", names(mergedDF))
mergedDF <- mergedDF[c(mergedNames[-tzLoc],mergedNames[tzLoc])] mergedDF <- mergedDF[c(mergedNames[-tzLoc],mergedNames[tzLoc])]
mergedDF <- arrange(mergedDF,site_no, dateTime) mergedDF <- dplyr::arrange(mergedDF,site_no, dateTime)
names(mergedDF) <- make.names(names(mergedDF)) names(mergedDF) <- make.names(names(mergedDF))
......
...@@ -13,9 +13,6 @@ ...@@ -13,9 +13,6 @@
#' for more information on time zones. #' for more information on time zones.
#' @import utils #' @import utils
#' @importFrom dplyr mutate #' @importFrom dplyr mutate
#' @importFrom dplyr bind_rows
#' @importFrom dplyr bind_cols
#' @importFrom stats na.omit
#' @export #' @export
#' @examples #' @examples
#' \dontrun{ #' \dontrun{
...@@ -58,14 +55,14 @@ readNGWMNdata <- function(service, ..., asDateTime = TRUE, tz = "UTC"){ ...@@ -58,14 +55,14 @@ readNGWMNdata <- function(service, ..., asDateTime = TRUE, tz = "UTC"){
#these attributes are pulled out and saved when doing binds to be reattached #these attributes are pulled out and saved when doing binds to be reattached
attrs <- c("url","gml:identifier","generationDate","responsibleParty", "contact") attrs <- c("url","gml:identifier","generationDate","responsibleParty", "contact")
featureID <- na.omit(gsub(":",".",dots[['siteNumbers']])) featureID <- stats::na.omit(gsub(":",".",dots[['siteNumbers']]))
for(f in featureID){ for(f in featureID){
obsFID <- retrieveObservation(featureID = f, asDateTime, attrs, tz = tz) obsFID <- retrieveObservation(featureID = f, asDateTime, attrs, tz = tz)
obsFIDattr <- saveAttrs(attrs, obsFID) obsFIDattr <- saveAttrs(attrs, obsFID)
obsFID <- removeAttrs(attrs, obsFID) obsFID <- removeAttrs(attrs, obsFID)
allObs <- bind_rows(allObs, obsFID) allObs <- dplyr::bind_rows(allObs, obsFID)
allAttrs <- bind_rows(allAttrs, obsFIDattr) allAttrs <- dplyr::bind_rows(allAttrs, obsFIDattr)
} }
...@@ -83,7 +80,7 @@ readNGWMNdata <- function(service, ..., asDateTime = TRUE, tz = "UTC"){ ...@@ -83,7 +80,7 @@ readNGWMNdata <- function(service, ..., asDateTime = TRUE, tz = "UTC"){
} else if (service == "featureOfInterest") { } else if (service == "featureOfInterest") {
if("siteNumbers" %in% names(dots)){ if("siteNumbers" %in% names(dots)){
featureID <- na.omit(gsub(":",".",dots[['siteNumbers']])) featureID <- stats::na.omit(gsub(":",".",dots[['siteNumbers']]))
allSites <- tryCatch({ allSites <- tryCatch({
retrieveFeatureOfInterest(featureID = featureID) retrieveFeatureOfInterest(featureID = featureID)
}) })
......
...@@ -35,7 +35,6 @@ ...@@ -35,7 +35,6 @@
#' } #' }
#' #'
#' @seealso \code{\link{renameNWISColumns}}, \code{\link{importWaterML1}} #' @seealso \code{\link{renameNWISColumns}}, \code{\link{importWaterML1}}
#' @importFrom lubridate parse_date_time
#' @export #' @export
#' @keywords data import USGS web service #' @keywords data import USGS web service
#' @examples #' @examples
......
...@@ -73,9 +73,6 @@ ...@@ -73,9 +73,6 @@
#' variableInfo \tab data frame \tab A data frame containing information on the requested parameters \cr #' variableInfo \tab data frame \tab A data frame containing information on the requested parameters \cr
#' } #' }
#' @export #' @export
#' @importFrom reshape2 melt
#' @importFrom reshape2 dcast
#' @importFrom dplyr left_join
#' @seealso \code{\link{readWQPdata}}, \code{\link{whatWQPsites}}, #' @seealso \code{\link{readWQPdata}}, \code{\link{whatWQPsites}},
#' \code{\link{readWQPqw}}, \code{\link{constructNWISURL}} #' \code{\link{readWQPqw}}, \code{\link{constructNWISURL}}
#' @examples #' @examples
...@@ -173,7 +170,7 @@ readNWISqw <- function (siteNumbers,parameterCd,startDate="",endDate="", ...@@ -173,7 +170,7 @@ readNWISqw <- function (siteNumbers,parameterCd,startDate="",endDate="",
if( !(is.null(siteNumbers)) && !(is.na(siteNumbers)) & length(siteNumbers) > 0){ if( !(is.null(siteNumbers)) && !(is.na(siteNumbers)) & length(siteNumbers) > 0){