Commit 60ade1b0 authored by Laura A DeCicco's avatar Laura A DeCicco
Browse files

Fixes #477

parent 8598e6c6
......@@ -7,6 +7,7 @@ dataRetrieval 2.7.7
* Fixed timezone bug in RDB requests that had mixed timezones.
* Updated internal data for parameter codes
* Added parameterCd argument to readNWISgwl as the services had changed
* Allow a "service" argument in readWQPdata, whatWQPsites
dataRetrieval 2.7.5
==========
......
......@@ -314,7 +314,7 @@ constructWQPURL <- function(siteNumbers,parameterCd,startDate,endDate,zip=TRUE){
parameterCd <- paste(parameterCd, collapse=";")
}
baseURL <- drURL("wqpData", siteid = siteNumbers, Access=pkg.env$access)
baseURL <- drURL("Result", siteid = siteNumbers, Access=pkg.env$access)
url <- paste0(baseURL,
ifelse(pCodeLogic,"&pCode=","&characteristicName="),
parameterCd)
......
......@@ -134,30 +134,31 @@
#' parameterCd = "00010")
#'
#' }
readWQPdata <- function(..., querySummary=FALSE, tz="UTC", ignore_attributes = FALSE){
readWQPdata <- function(..., querySummary=FALSE, tz="UTC",
ignore_attributes = FALSE){
tz <- match.arg(tz, OlsonNames())
values <- readWQPdots(...)
values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
valuesList <- readWQPdots(...)
service <- valuesList$service
baseURL <- drURL("wqpData")
urlCall <- paste0(baseURL,
urlCall,
"&mimeType=tsv")
values <- sapply(valuesList$values, function(x) URLencode(x, reserved = TRUE))
baseURL <- drURL(service, arg.list=values)
baseURL <- appendDrURL(baseURL, mimeType = "tsv")
if(querySummary){
retquery <- getQuerySummary(urlCall)
retquery <- getQuerySummary(baseURL)
return(retquery)
} else {
retval <- importWQP(urlCall, zip= values["zip"] == "yes", tz=tz)
retval <- importWQP(baseURL, zip = values["zip"] == "yes", tz=tz)
if(!all(is.na(retval)) & !ignore_attributes){
siteInfo <- whatWQPsites(...)
siteInfo <- whatWQPsites(..., service = "Station")
siteInfoCommon <- data.frame(station_nm=siteInfo$MonitoringLocationName,
agency_cd=siteInfo$OrganizationIdentifier,
......@@ -199,12 +200,12 @@ readWQPdata <- function(..., querySummary=FALSE, tz="UTC", ignore_attributes = F
} else {
if(!ignore_attributes){
message("The following url returned no data:\n")
message(urlCall)
message(baseURL)
}
}
attr(retval, "queryTime") <- Sys.time()
attr(retval, "url") <- urlCall
attr(retval, "url") <- baseURL
return(retval)
}
......
......@@ -16,6 +16,16 @@ readWQPdots <- function(...){
values['bBox'] <- gsub(pattern = ";", replacement = ",", x = values['bBox'])
}
if("service" %in% names(matchReturn)){
service <- matchReturn$service
matchReturn$service <- NULL
} else {
service <- "Result"
}
match.arg(service, c("Result", "Station", "Activity",
"ActivityMetric", "SiteSummary"))
values <- checkWQPdates(values)
names(values)[names(values) == "siteNumber"] <- "siteid"
......@@ -48,5 +58,5 @@ readWQPdots <- function(...){
values["zip"] <- "yes"
}
return(values)
return(list(values=values, service=service))
}
......@@ -55,11 +55,12 @@ setAccess = function(access="public"){
pkg.env$pCode = "https://nwis.waterdata.usgs.gov/nwis/pmcodes/pmcodes"
# NOTE: state water use are still in: constructUseURL
pkg.env$wqpData = "https://www.waterqualitydata.us/data/Result/search"
pkg.env$wqpStation = "https://www.waterqualitydata.us/data/Station/search"
pkg.env$wqpActivity = "https://www.waterqualitydata.us/data/Activity/search"
pkg.env$wqpMetrics = "https://www.waterqualitydata.us/data/ActivityMetric/search"
pkg.env$wqpSiteSummary = "https://www.waterqualitydata.us/data/summary/monitoringLocation/search"
pkg.env$Result = "https://www.waterqualitydata.us/data/Result/search"
pkg.env$Station = "https://www.waterqualitydata.us/data/Station/search"
pkg.env$Activity = "https://www.waterqualitydata.us/data/Activity/search"
pkg.env$ActivityMetric = "https://www.waterqualitydata.us/data/ActivityMetric/search"
pkg.env$SiteSummary = "https://www.waterqualitydata.us/data/summary/monitoringLocation/search"
pkg.env$NGWMN = "https://cida.usgs.gov/ngwmn_cache/sos"
}
......
......@@ -12,24 +12,31 @@ whatWQPsamples <- function(...){
values <- readWQPdots(...)
values <- values$values
if("tz" %in% names(values)){
values <- values[!(names(values) %in% "tz")]
}
if("service" %in% names(values)){
values <- values[!(names(values) %in% "service")]
}
values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
baseURL <- drURL("wqpActivity")
urlCall <- paste0(baseURL,
urlCall,
"&mimeType=tsv")
baseURL <- drURL("Activity", arg.list = values)
baseURL <- appendDrURL(baseURL, mimeType = "tsv")
withCallingHandlers({
retval <- importWQP(urlCall, zip=values["zip"] == "yes")
retval <- importWQP(baseURL, zip=values["zip"] == "yes")
}, warning=function(w) {
if (any( grepl( "Number of rows returned not matched in header", w)))
invokeRestart("muffleWarning")
})
attr(retval, "queryTime") <- Sys.time()
attr(retval, "url") <- urlCall
attr(retval, "url") <- baseURL
return(retval)
}
......@@ -48,24 +55,31 @@ whatWQPmetrics <- function(...){
values <- readWQPdots(...)
values <- values$values
if("tz" %in% names(values)){
values <- values[!(names(values) %in% "tz")]
}
if("service" %in% names(values)){
values <- values[!(names(values) %in% "service")]
}
values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
baseURL <- drURL("wqpMetrics")
urlCall <- paste0(baseURL,
urlCall,
"&mimeType=tsv")
baseURL <- drURL("ActivityMetric", arg.list = values)
baseURL <- appendDrURL(baseURL, mimeType = "tsv")
withCallingHandlers({
retval <- importWQP(urlCall, zip=values["zip"] == "yes")
retval <- importWQP(baseURL, zip=values["zip"] == "yes")
}, warning=function(w) {
if (any( grepl( "Number of rows returned not matched in header", w)))
invokeRestart("muffleWarning")
})
attr(retval, "queryTime") <- Sys.time()
attr(retval, "url") <- urlCall
attr(retval, "url") <- baseURL
return(retval)
}
......@@ -123,22 +137,28 @@ whatWQPdata <- function(..., saveFile = tempfile()){
values <- readWQPdots(...)
values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
values <- values$values
urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
if("tz" %in% names(values)){
values <- values[!(names(values) %in% "tz")]
}
if("service" %in% names(values)){
values <- values[!(names(values) %in% "service")]
}
baseURL <- drURL("wqpStation")
urlCall <- paste0(baseURL,
urlCall,
"&mimeType=geojson")
values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
baseURL <- drURL("Station", arg.list = values)
baseURL <- appendDrURL(baseURL, mimeType = "geojson")
saveFile_zip <- saveFile
if(tools::file_ext(saveFile) != ".zip"){
saveFile_zip <- paste0(saveFile,".zip")
}
doc <- getWebServiceData(urlCall, httr::write_disk(saveFile_zip))
doc <- getWebServiceData(baseURL, httr::write_disk(saveFile_zip))
headerInfo <- attr(doc, "headerInfo")
if(headerInfo$`total-site-count` == 0){
......@@ -192,7 +212,7 @@ whatWQPdata <- function(..., saveFile = tempfile()){
}
attr(y, "queryTime") <- Sys.time()
attr(y, "url") <- urlCall
attr(y, "url") <- baseURL
attr(y, "file") <- saveFile
return(y)
}
......@@ -69,23 +69,26 @@ whatWQPsites <- function(...){
values <- readWQPdots(...)
values <- values$values
if("tz" %in% names(values)){
values <- values[!(names(values) %in% "tz")]
}
if("service" %in% names(values)){
values <- values[!(names(values) %in% "service")]
}
values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
baseURL <- drURL("wqpStation")
urlCall <- paste0(baseURL,
urlCall,
"&mimeType=tsv")
baseURL <- drURL("Station", arg.list = values)
baseURL <- appendDrURL(baseURL, mimeType = "tsv")
retval <- importWQP(urlCall, zip=values["zip"] == "yes")
retval <- importWQP(baseURL, zip=values["zip"] == "yes")
attr(retval, "queryTime") <- Sys.time()
attr(retval, "url") <- urlCall
attr(retval, "url") <- baseURL
return(retval)
}
......@@ -107,27 +110,29 @@ readWQPsummary <- function(...){
values <- readWQPdots(...)
values <- values$values
if("tz" %in% names(values)){
values <- values[!(names(values) %in% "tz")]
}
values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
if("service" %in% names(values)){
values <- values[!(names(values) %in% "service")]
}
baseURL <- drURL("wqpSiteSummary")
urlCall <- paste0(baseURL,
urlCall,
"&mimeType=csv")
values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
baseURL <- drURL("SiteSummary", arg.list = values)
baseURL <- appendDrURL(baseURL, mimeType = "csv")
withCallingHandlers({
retval <- importWQP(urlCall, zip=values["zip"] == "yes", csv = TRUE)
retval <- importWQP(baseURL, zip=values["zip"] == "yes", csv = TRUE)
}, warning=function(w) {
if (any( grepl( "Number of rows returned not matched in header", w)))
invokeRestart("muffleWarning")
})
attr(retval, "queryTime") <- Sys.time()
attr(retval, "url") <- urlCall
attr(retval, "url") <- baseURL
return(retval)
......
......@@ -282,14 +282,14 @@ test_that("readWQPdots working", {
# NWIS names (siteNumber) converted to WQP expected names (siteid)
formArgs_site <- dataRetrieval:::readWQPdots(siteNumber="04010301")
expect_true(length(formArgs_site) == 2)
expect_true("siteid" %in% names(formArgs_site))
expect_false("siteNumber" %in% names(formArgs_site))
expect_true("siteid" %in% names(formArgs_site$values))
expect_false("siteNumber" %in% names(formArgs_site$values))
# NWIS names (stateCd) converted to WQP expected names (statecode)
formArgs <- dataRetrieval:::readWQPdots(stateCd="OH",parameterCd="00665")
expect_true(length(formArgs) == 3)
expect_true("statecode" %in% names(formArgs))
expect_false("stateCd" %in% names(formArgs))
expect_true(length(formArgs$values) == 3)
expect_true("statecode" %in% names(formArgs$values))
expect_false("stateCd" %in% names(formArgs$values))
})
# context("NGWMN")
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment