Commit 8b6a72ac authored by David Watkins's avatar David Watkins
Browse files

Merge branch 'master' of github.com:USGS-R/dataRetrieval

parents d0e3d7b1 2d8868b8
......@@ -35,11 +35,14 @@ Imports:
utils,
dplyr,
xml2,
readr (>= 1.0.0)
readr (>= 1.0.0),
jsonlite
Suggests:
xtable,
knitr,
testthat
testthat,
geojsonio,
leaflet
VignetteBuilder: knitr
BuildVignettes: true
BugReports: https://github.com/USGS-R/dataRetrieval/issues
......
......@@ -40,6 +40,8 @@ export(stateCd)
export(stateCdLookup)
export(whatNWISdata)
export(whatNWISsites)
export(whatWQPdata)
export(whatWQPmetrics)
export(whatWQPsamples)
export(whatWQPsites)
export(zeroPad)
......@@ -70,6 +72,7 @@ importFrom(httr,status_code)
importFrom(httr,stop_for_status)
importFrom(httr,user_agent)
importFrom(httr,write_disk)
importFrom(jsonlite,fromJSON)
importFrom(lubridate,fast_strptime)
importFrom(lubridate,parse_date_time)
importFrom(readr,col_character)
......
......@@ -52,16 +52,15 @@ getWebServiceData <- function(obs_url, ...){
headerInfo <- headers(returnedList)
if(headerInfo$`content-type` == "text/tab-separated-values;charset=UTF-8"){
if(headerInfo$`content-type` %in% c("text/tab-separated-values;charset=UTF-8")){
returnedDoc <- content(returnedList, type="text",encoding = "UTF-8")
} else if (headerInfo$`content-type` %in%
c("application/zip", "application/zip;charset=UTF-8")) {
c("application/zip", "application/zip;charset=UTF-8","application/vnd.geo+json;charset=UTF-8")) {
returnedDoc <- returnedList
} else if (headerInfo$`content-type` %in% c("text/html","text/html; charset=UTF-8") ){
txt <- readBin(returnedList$content, character())
message(txt)
return(txt)
} else {
returnedDoc <- content(returnedList,encoding = "UTF-8")
if(grepl("No sites/data found using the selection criteria specified", returnedDoc)){
......
......@@ -111,7 +111,8 @@ importRDB1 <- function(obs_url, asDateTime=TRUE, convertType = TRUE, tz="UTC"){
readr.meta <- readr.total[grep("^#", readr.total)]
meta.rows <- length(readr.meta)
header.names <- strsplit(readr.total[meta.rows+1],"\t")[[1]]
types.names <- strsplit(readr.total[meta.rows+2],"\t")[[1]]
if(convertType){
readr.data <- suppressWarnings(read_delim(doc, skip = (meta.rows+2),delim="\t",col_names = FALSE))
#defaults to time in seconds in readr 0.2.2.9??
......@@ -175,6 +176,20 @@ importRDB1 <- function(obs_url, asDateTime=TRUE, convertType = TRUE, tz="UTC"){
readr.data[,vaCols] <- sapply(readr.data[,vaCols], as.numeric)
}
columnTypes <- sapply(readr.data, typeof)
columnsThatMayBeWrong <- grep("n",types.names)[which(!(columnTypes[grep("n",types.names)] %in% c("double","integer")))]
for(i in columnsThatMayBeWrong){
readr.data[[i]] <- tryCatch({
as.numeric(readr.data[[i]])
},
warning=function(cond) {
message(paste("Column",i,"contains characters that cannot be automatically converted to numeric."))
return(readr.data[[i]])
}
)
}
comment(readr.data) <- readr.meta
problems.orig <- problems(readr.data)
......
......@@ -363,7 +363,9 @@ readNWISdots <- function(...){
return(list(values=values, service=service))
}
# convert variables in dots to usable format
#' convert variables in dots to usable format
#'
#' @keywords internal
convertLists <- function(...){
matchReturn <- c(do.call("c",list(...)[sapply(list(...), class) == "list"]), #get the list parts
list(...)[sapply(list(...), class) != "list"]) # get the non-list parts
......
......@@ -22,18 +22,16 @@ readWQPdots <- function(...){
stCd <- values["statecode"]
stCdPrefix <- "US:"
if(!grepl(stCdPrefix, stCd)){
values["statecode"] <- paste0(stCdPrefix, stateCdLookup(stCd, "id"))
values["statecode"] <- paste0(stCdPrefix, zeroPad(stateCdLookup(stCd, "id"),2))
}
}
names(values)[names(values) == "countyCd"] <- "countycode"
if("countycode" %in% names(values)){
if(!grepl(values["statecode"], values["countycode"])){
stCd <- gsub("US:", "", values["statecode"])
values["countycode"] <- paste(values["statecode"],
countyCdLookup(stCd, values["countycode"], "id"),
sep=":")
}
if(all(c("countycode","statecode") %in% names(values))){
stCd <- gsub("US:", "", values["statecode"])
values["countycode"] <- paste(values["statecode"],
countyCdLookup(stCd, values["countycode"], "id"),
sep=":")
}
if("zip" %in% names(values)){
......
......@@ -19,9 +19,6 @@
#' }
whatWQPsamples <- function(...){
matchReturn <- c(do.call("c",list(...)[sapply(list(...), class) == "list"]), #get the list parts
list(...)[sapply(list(...), class) != "list"]) # get the non-list parts
values <- readWQPdots(...)
values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
......@@ -42,29 +39,99 @@ whatWQPsamples <- function(...){
return(retval)
}
# Only STORET and not publically mapped yet:
# whatWQPmetrics <- function(...){
#
# matchReturn <- c(do.call("c",list(...)[sapply(list(...), class) == "list"]), #get the list parts
# list(...)[sapply(list(...), class) != "list"]) # get the non-list parts
#
# values <- readWQPdots(...)
#
# values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
#
# urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
#
#
# baseURL <- drURL("wqpMetrics")
# urlCall <- paste0(baseURL,
# urlCall,
# "&mimeType=tsv&sorted=no")
#
# retval <- importWQP(urlCall, zip=values["zip"] == "yes")
#
# attr(retval, "queryTime") <- Sys.time()
# attr(retval, "url") <- urlCall
#
# return(retval)
# }
#
#' Activity Metrics from Water Quality Portal
#'
#' Returns a list of sites from the Water Quality Portal web service. This function gets the data from: \url{https://www.waterqualitydata.us}.
#' Arguments to the function should be based on \url{https://www.waterqualitydata.us/webservices_documentation}
#'
#' @param \dots see \url{https://www.waterqualitydata.us/webservices_documentation} for a complete list of options. A list of arguments can also be supplied.
#' @keywords data import WQP web service
#' @return A data frame
#'
#' @export
#' @import utils
#' @examples
#' \dontrun{
#'
#' type <- "Stream"
#' sites <- whatWQPmetrics(countycode="US:55:025",siteType=type)
#' lakeSites <- whatWQPmetrics(siteType = "Lake, Reservoir, Impoundment", statecode = "US:55")
#' }
whatWQPmetrics <- function(...){
values <- readWQPdots(...)
values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
baseURL <- drURL("wqpMetrics")
urlCall <- paste0(baseURL,
urlCall,
"&mimeType=tsv&sorted=no")
retval <- importWQP(urlCall, zip=values["zip"] == "yes")
attr(retval, "queryTime") <- Sys.time()
attr(retval, "url") <- urlCall
return(retval)
}
#' Data Available from Water Quality Portal
#'
#' Returns a list of sites from the Water Quality Portal web service. This function gets the data from: \url{https://www.waterqualitydata.us}.
#' Arguments to the function should be based on \url{https://www.waterqualitydata.us/webservices_documentation}
#'
#' @param \dots see \url{https://www.waterqualitydata.us/webservices_documentation} for a complete list of options. A list of arguments can also be supplied.
#' @keywords data import WQP web service
#' @importFrom jsonlite fromJSON
#' @return A list
#'
#' @export
#' @import utils
#' @examples
#' \dontrun{
#' site1 <- whatWQPdata(siteid="USGS-01594440")
#'
#' type <- "Stream"
#' sites <- whatWQPdata(countycode="US:55:025",siteType=type)
#'
#' library(leaflet)
#' library(geojsonio)
#' leaflet() %>%
#' addGeoJSON(geojson_read(attr(sites,"file"))) %>%
#' addProviderTiles("CartoDB.Positron")
#'
#' lakeSites <- whatWQPdata(siteType = "Lake, Reservoir, Impoundment", statecode = "US:55")
#' }
whatWQPdata <- function(...){
message("DISCLAIMER: This function is still in flux,
and no future behavior or output is guaranteed")
values <- readWQPdots(...)
values <- sapply(values, function(x) URLencode(x, reserved = TRUE))
urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
baseURL <- drURL("wqpStation")
urlCall <- paste0(baseURL,
urlCall,
"&mimeType=geojson&sorted=no")
temp <- tempfile()
temp <- paste0(temp,".geojson")
doc <- getWebServiceData(urlCall, write_disk(temp))
headerInfo <- attr(doc, "headerInfo")
retval <- fromJSON(temp)
attr(retval, "queryTime") <- Sys.time()
attr(retval, "url") <- urlCall
attr(retval, "file") <- temp
return(retval)
}
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/readNWISdata.r
\name{convertLists}
\alias{convertLists}
\title{convert variables in dots to usable format}
\usage{
convertLists(...)
}
\description{
convert variables in dots to usable format
}
\keyword{internal}
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/whatWQPdata.R
\name{whatWQPdata}
\alias{whatWQPdata}
\title{Data Available from Water Quality Portal}
\usage{
whatWQPdata(...)
}
\arguments{
\item{\dots}{see \url{https://www.waterqualitydata.us/webservices_documentation} for a complete list of options. A list of arguments can also be supplied.}
}
\value{
A list
}
\description{
Returns a list of sites from the Water Quality Portal web service. This function gets the data from: \url{https://www.waterqualitydata.us}.
Arguments to the function should be based on \url{https://www.waterqualitydata.us/webservices_documentation}
}
\examples{
\dontrun{
site1 <- whatWQPdata(siteid="USGS-01594440")
type <- "Stream"
sites <- whatWQPdata(countycode="US:55:025",siteType=type)
library(leaflet)
library(geojsonio)
leaflet() \%>\%
addGeoJSON(geojson_read(attr(sites,"file"))) \%>\%
addProviderTiles("CartoDB.Positron")
lakeSites <- whatWQPdata(siteType = "Lake, Reservoir, Impoundment", statecode = "US:55")
}
}
\keyword{WQP}
\keyword{data}
\keyword{import}
\keyword{service}
\keyword{web}
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/whatWQPdata.R
\name{whatWQPmetrics}
\alias{whatWQPmetrics}
\title{Activity Metrics from Water Quality Portal}
\usage{
whatWQPmetrics(...)
}
\arguments{
\item{\dots}{see \url{https://www.waterqualitydata.us/webservices_documentation} for a complete list of options. A list of arguments can also be supplied.}
}
\value{
A data frame
}
\description{
Returns a list of sites from the Water Quality Portal web service. This function gets the data from: \url{https://www.waterqualitydata.us}.
Arguments to the function should be based on \url{https://www.waterqualitydata.us/webservices_documentation}
}
\examples{
\dontrun{
type <- "Stream"
sites <- whatWQPmetrics(countycode="US:55:025",siteType=type)
lakeSites <- whatWQPmetrics(siteType = "Lake, Reservoir, Impoundment", statecode = "US:55")
}
}
\keyword{WQP}
\keyword{data}
\keyword{import}
\keyword{service}
\keyword{web}
......@@ -46,6 +46,12 @@ test_that("External importRDB1 tests", {
#
# iceNoConvert <- importRDB1(urlIce, convertType=FALSE)
# expect_that(sum(iceNoConvert$X01_00060 == "Ice") > 0, is_true())
site <- "05427850"
url <- constructNWISURL(site,"00060","2015-01-01", "","dv",format="tsv")
expect_message(importRDB1(url))
})
context("importRDB")
......
......@@ -253,13 +253,14 @@ test_that("NGWMN functions working", {
expect_true(is.numeric(oneSite$value))
#sites with colons and NAs work
na_colons <- c(NA, bboxSites$site[200:212], NA, NA)
returnDF <- readNGWMNdata(service = "observation",
featureID = na_colons, asDateTime = FALSE)
expect_is(returnDF, "data.frame")
expect_true(nrow(returnDF) > 1)
expect_true(!is.null(attributes(returnDF)$siteInfo))
sites <- c("USGS:424427089494701", NA)
siteInfo <- readNGWMNsites(sites)
expect_is(siteInfo, "data.frame")
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment