diff --git a/DESCRIPTION b/DESCRIPTION index 536d8dc15a827b80aa1d358d56cb4b95e909b698..99fc11f8774489e6aaa27fc7a6147b5c7903c058 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,4 +1,4 @@ -Package: dataRetrievalSNAPSHOT +Package: dataRetrieval Type: Package Title: Retrieval functions for USGS data Version: 1.2.2 @@ -51,6 +51,7 @@ Collate: 'processQWData.r' 'constructNWISURL.r' 'getDataAvailability.r' + 'getMultipleParameterNames.r' Depends: R (>= 2.15.0) Imports: diff --git a/NAMESPACE b/NAMESPACE index e3a7026e8e22b7d5e2a1b7907368ee9868063cc5..da28655b01bc5179810dd2220326bf9a9fe12153 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -6,11 +6,12 @@ export(dateFormatCheck) export(formatCheckDate) export(formatCheckParameterCd) export(formatCheckSiteNumber) +export(getDVData) export(getDailyDataFromFile) -export(getDataAvailablilty) +export(getDataAvailability) export(getDataFromFile) -export(getDVData) export(getMetaData) +export(getMultipleParameterNames) export(getParameterInfo) export(getPreLoadedDailyData) export(getPreLoadedData) diff --git a/R/getDataAvailability.r b/R/getDataAvailability.r index 1ab606ca1626a955c3cea5b3e1478088d220a298..a9920a18f52f8e19286eff6d7831a77bb83dbaf5 100644 --- a/R/getDataAvailability.r +++ b/R/getDataAvailability.r @@ -3,15 +3,15 @@ #' Imports a table of available parameters, period of record, and count. There is also an option to load the long parameter names and additional information on the parameters with longNames=TRUE. #' #' @param siteNumber string USGS site number. This is usually an 8 digit number -#' @param interactive logical Option for interactive mode. If true, there is user interaction for error handling and data checks. +#' @param interactive logical Option for interactive mode. If true, a progress indicator is printed to the console. #' @param longNames logical indicates whether or not to make a web call to get long names of parameters. Be aware this could take a very long time if the station has lots of data. #' @keywords data import USGS web service #' @return retval dataframe with all information found in the expanded site file #' @export #' @examples #' # These examples require an internet connection to run -#' availableData <- getDataAvailablilty('05114000',interactive=FALSE) -getDataAvailablilty <- function(siteNumber="",interactive=TRUE, longNames=FALSE){ +#' availableData <- getDataAvailability('05114000',interactive=FALSE) +getDataAvailability <- function(siteNumber="",interactive=TRUE, longNames=FALSE){ # Checking for 8 digit site ID: siteNumber <- formatCheckSiteNumber(siteNumber,interactive=interactive) @@ -38,21 +38,10 @@ getDataAvailablilty <- function(siteNumber="",interactive=TRUE, longNames=FALSE) SiteFile$endDate <- as.Date(SiteFile$endDate) SiteFile$count <- as.numeric(SiteFile$count) + pCodes <- unique(SiteFile$parameter_cd) + if(longNames){ - pCodes <- unique(SiteFile$pCode) - numObs <- length(pCodes) - printUpdate <- floor(seq(1,numObs,numObs/100)) - for (i in 1:numObs){ - if (1 == i) { - pcodeINFO <- getParameterInfo(pCodes[i]) - } else { - pcodeINFO <- rbind(pcodeINFO, getParameterInfo(pCodes[i])) - } - if(interactive) { - cat("Percent complete: \n") - if(i %in% printUpdate) cat(floor(i*100/numObs),"\t") - } - } + pcodeINFO <- getMultipleParameterNames(pCodes,interactive) SiteFile <- merge(SiteFile,pcodeINFO,by="parameter_cd") } diff --git a/R/getMultipleParameterNames.r b/R/getMultipleParameterNames.r new file mode 100644 index 0000000000000000000000000000000000000000..83afe7c07caa6e2c596b216f367ff6916a5452e2 --- /dev/null +++ b/R/getMultipleParameterNames.r @@ -0,0 +1,32 @@ +#' USGS Mulitple Parameter List +#' +#' Imports a table of information on a set of parameters such as parameter name, units, group, and srs name. +#' Warning! This function can be very slow because an individual web service call has to be made for each parameter. +#' There is currently no way to request multiple parameters from the web service and get the extended information. +#' +#' @param pCodes vector set of 5-digit parameter codes to gather information on +#' @param interactive logical Option for interactive mode. If true, a progress indicator is printed to the console. +#' @keywords data import USGS web service +#' @return retval dataframe with all information found in the expanded site file +#' @export +#' @examples +#' # These examples require an internet connection to run +#' availableData <- getMultipleParameterNames(c("00060", "00065", "00010"),interactive=FALSE) +getMultipleParameterNames <- function(pCodes, interactive=TRUE){ + + numObs <- length(pCodes) + printUpdate <- floor(seq(1,numObs,(numObs-1)/100)) + if(interactive) cat("Percent complete: \n") + for (i in 1:numObs){ + if (1 == i) { + pcodeINFO <- getParameterInfo(pCodes[i]) + } else { + pcodeINFO <- rbind(pcodeINFO, getParameterInfo(pCodes[i])) + } + if(interactive) { + + if(i %in% printUpdate) cat(floor(i*100/numObs),"\t") + } + } + return(pcodeINFO) +} \ No newline at end of file diff --git a/inst/doc/dataRetrieval.Rnw b/inst/doc/dataRetrieval.Rnw index 5c4ccc9f9b65907e21438dd08d4d3c85182995c4..2fca66a961d5fc9d2aa51588ef6e4af3146bb9b9 100644 --- a/inst/doc/dataRetrieval.Rnw +++ b/inst/doc/dataRetrieval.Rnw @@ -44,7 +44,7 @@ \SweaveOpts{concordance=TRUE} %------------------------------------------------------------ -\title{Introduction to the dataRetrieval package} +\title{The dataRetrieval R package} %------------------------------------------------------------ \author[1]{Laura De Cicco} \author[1]{Robert Hirsch} @@ -67,12 +67,12 @@ For information on getting started in R, downloading and installing the package, %------------------------------------------------------------ -\section{General USGS Web Retrieval Examples} +\section{General USGS Web Retrievals} %------------------------------------------------------------ In this section, we will run through 5 examples, documenting how to get raw data from the web. This includes site information (\ref{sec:usgsSite}), measured parameter information (\ref{sec:usgsParams}), historical daily values(\ref{sec:usgsDaily}), real-time current values (\ref{sec:usgsRT}), and water quality data (\ref{sec:usgsWQP}) or (\ref{sec:usgsSTORET}). We will use the Choptank River near Greensboro, MD as an example. The site-ID for this gage station is 01491000. Daily discharge measurements are available as far back as 1948. Additionally, forms of nitrate have been measured dating back to 1964. The functions/examples in this section are for raw data retrieval. This may or may not be the easiest data to work with. In the next section, we will use functions that retrieve and process the data in a dataframe that may prove more friendly for R analysis. %------------------------------------------------------------ -\subsection{USGS Web Retrieval Introduction} +\subsection{Introduction} %------------------------------------------------------------ The United States Geological Survey organizes their hydrological data in standard structure. Streamgages are located throughout the United States, and each streamgage has a unique ID. Often (but not always), these ID's are 8 digits. The first step to finding data is discoving this 8-digit ID. One potential tool for discovering data is Environmental Data Discovery and Transformation (EnDDaT): \url{http://cida.usgs.gov/enddat/}. Follow the example on the EnDDaT web page to learn how to discover USGS stations and available data from any location in the United States. @@ -122,10 +122,17 @@ print(data.table, %------------------------------------------------------------ -\subsection{USGS Site Information Retrievals} +\subsection{Site Information} \label{sec:usgsSite} %------------------------------------------------------------ -To obtain all of the available site information, use the getSiteFileData function: + +%------------------------------------------------------------ +\subsubsection{getSiteFileData} +\label{sec:usgsSiteFileData} +%------------------------------------------------------------ +Use the getSiteFileData function to obtain all of the information available for a particular USGS site such as full station name, drainage area, latitude, and longitude: + + <<label=getSite, echo=TRUE>>= library(dataRetrieval) # Site ID for Choptank River near Greensboro, MD @@ -141,7 +148,40 @@ ChoptankInfo$station.nm Site information is obtained from \url{http://waterservices.usgs.gov/rest/Site-Test-Tool.html} %------------------------------------------------------------ -\subsection{USGS Parameter Information Retrievals} +\subsubsection{getDataAvailability} +\label{sec:usgsDataAvailability} +%------------------------------------------------------------ +To find out the data available at a particular USGS site, such as measured parameters, period of record, and number of samples (count), use the getDataAvailability function: + +<<label=getSite, echo=TRUE>>= +# Site ID for Choptank River near Greensboro, MD +siteNumber <- "01491000" +ChoptankAvailableData <- getDataAvailability(siteNumber) +head(ChoptankAvailableData) +@ + +There is an additional argument to the getDataAvailability called longNames, which defaults to FALSE. Setting longNames to TRUE will cause the function to make a web service call for each parameter and return expanded information on that parameter. Currently, this is a very slow process because each parameter code makes a unique web service call. If the site does not have many measured parameters, setting longNames to TRUE is reasonable. + +It is also possible to only request information for certain variables. In the following example, we retrieve just the daily value (dv) parameter information from the Choptank data availability dataframe (excluding all unit value and water quality values). + +<<label=getSite, echo=TRUE>>= +# Continuing from the previous example: +# This pulls out just the daily data: +ChoptankDailyData <- ChoptankAvailableData["dv" == ChoptankAvailableData$service,] + +#Now, make a call to get all of the parameter information: +pCodeINFO <- getMultipleParameterNames(ChoptankDailyData$parameter_cd) + +#Merge the available dataframe with the parameter information dataframe: +ChoptankDailyData <- merge(ChoptankDailyData,pCodeINFO,by="parameter_cd") +head(ChoptankDailyData) + +@ + + + +%------------------------------------------------------------ +\subsection{Parameter Information} \label{sec:usgsParams} %------------------------------------------------------------ To obtain all of the available information concerning a measured parameter, use the getParameterInfo function: @@ -159,12 +199,12 @@ parameterINFO$parameter_nm Parameter information is obtained from \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/} %------------------------------------------------------------ -\subsection{USGS Daily Value Retrievals} +\subsection{Daily Values} \label{sec:usgsDaily} %------------------------------------------------------------ To obtain historic daily records of USGS data, use the retrieveNWISData function. The arguments for this function are siteNumber, parameterCd, startDate, endDate, statCd, and a logical (true/false) interactive. There are 2 default argument: statCd (defaults to \texttt{"}00003\texttt{"}), and interactive (defaults to TRUE). If you want to use the default values, you do not need to list them in the function call. Setting the 'interactive' option to true will walk you through the function. It might make more sense to run large batch collections with the interactive option set to FALSE. -The dates (start and end) need to be in the format \texttt{"}YYYY-MM-DD\texttt{"}. Setting the start date to \texttt{"}\texttt{"} will indicate to the program to ask for the earliest date, setting the end date to \texttt{"}\texttt{"} will ask for the latest available date. +The dates (start and end) need to be in the format \texttt{"}YYYY-MM-DD\texttt{"} (note: the user does need to include the quotes). Setting the start date to \texttt{"}\texttt{"} will indicate to the program to ask for the earliest date, setting the end date to \texttt{"}\texttt{"} will ask for the latest available date. <<label=getNWISDaily, echo=TRUE>>= # Using defaults: @@ -233,7 +273,7 @@ There are occasions where NWIS values are not reported as numbers, instead there %------------------------------------------------------------ -\subsection{USGS Unit Value Retrievals} +\subsection{Unit Values} \label{sec:usgsRT} %------------------------------------------------------------ Any data that are collected at regular time intervals (such as 15-minute or hourly) are known as \texttt{"}Unit Values\texttt{"} - many of these are delivered on a real time basis and very recent data (even less than an hour old in many cases) are available through the function retrieveUnitNWISData. Some of these Unit Values are available for the past several years, and some are only available for a recent time period such as 120 days or a year. Here is an example of a retrieval of such data. @@ -275,10 +315,10 @@ title(ChoptankInfo$station.nm) %------------------------------------------------------------ -\subsection{USGS Water Quality Retrievals} +\subsection{Water Quality Values} \label{sec:usgsWQP} %------------------------------------------------------------ -To get water quality data from water samples collected at the streamgage (as distinct from unit values collected through some type of automatic monitor) we can use the dataRetrieval package from the water quality data portal: \url{http://www.waterqualitydata.us/}. The raw data are obtained from the function getRawQWData, with the similar input arguments: siteNumber, parameterCd, startDate, endDate, and interactive. The difference is in parameterCd, in this function multiple parameters can be queried using a \texttt{"};\texttt{"} separator, and setting parameterCd to \texttt{"}\texttt{"} will return all of the measured observations. The raw data can be overwelming (as will be demonstrated), a simplified version of the data can be obtained using getQWData. +To get USGS water quality data from water samples collected at the streamgage (as distinct from unit values collected through some type of automatic monitor) we can use the Water Quality Data Portal: \url{http://www.waterqualitydata.us/}. The raw data are obtained from the function getRawQWData, with the similar input arguments: siteNumber, parameterCd, startDate, endDate, and interactive. The difference is in parameterCd, in this function multiple parameters can be queried using a \texttt{"};\texttt{"} separator, and setting parameterCd to \texttt{"}\texttt{"} will return all of the measured observations. The raw data can be overwelming, a simplified version of the data can be obtained using getQWData. <<label=getQW, echo=TRUE>>= @@ -292,9 +332,7 @@ dissolvedNitrate <- getRawQWData(siteNumber, parameterCd, startDate, endDate) @ -There is a large amount of data returned for each observation. The column names are listed in Appendix 2 (\ref{sec:appendix2WQP}). - -To get a simplified dataframe that contains only datetime, value, and qualifier, use the function getQWData: +There is a large amount of data returned for each observation. The column names are listed in Appendix 2 (\ref{sec:appendix2WQP}). To get a simplified dataframe that contains only datetime, value, and qualifier, use the function getQWData: <<label=getQWData, echo=TRUE>>= dissolvedNitrateSimple <- getQWData(siteNumber, parameterCd, @@ -325,10 +363,10 @@ title(ChoptankInfo$station.nm) \end{figure} %------------------------------------------------------------ -\subsection{Other Water Quality Retrievals} +\subsection{STORET Water Quality Retrievals} \label{sec:usgsSTORET} %------------------------------------------------------------ -There are additional data sets available on the Water Quality Portal (\url{http://www.waterqualitydata.us/}). These data sets can be housed in either the STORET or NWIS database. Since STORET does not use USGS parameter codes, a \texttt{"}characteristic name\texttt{"} must be supplied. The following example retrieves specific conductance from a DNR site in Wisconsin. +There are additional data sets available on the Water Quality Data Portal (\url{http://www.waterqualitydata.us/}). These data sets can be housed in either the STORET (data from EPA) or NWIS database. Since STORET does not use USGS parameter codes, a \texttt{"}characteristic name\texttt{"} must be supplied. The following example retrieves specific conductance from a DNR site in Wisconsin. <<label=getQWData, echo=TRUE>>= specificCond <- getWQPData('WIDNR_WQX-10032762', @@ -338,7 +376,7 @@ head(specificCond) %------------------------------------------------------------ -\section{USGS Web Retrieval Examples Structured For Use In The EGRET Package} +\section{Data Retrievals Structured For Use In The EGRET Package} %------------------------------------------------------------ Rather than using the raw data as retrieved by the web, the dataRetrieval package also includes functions that return the data in a structure that has been designed to work with the EGRET R package (\url{https://github.com/USGS-R/EGRET/wiki}). In general, these dataframes may be much more 'R-friendly' than the raw data, and will contain additional date information that allows for efficient data analysis. @@ -362,7 +400,6 @@ The function to obtain the daily values (discharge in this case) is getDVData. <<firstExample>>= siteNumber <- "01491000" -parameterCd <- "00631" # Nitrate startDate <- "1964-01-01" endDate <- "2013-01-01" # This call will get NWIS data that is in cfs, and convert it @@ -415,13 +452,12 @@ print(data.table, caption.placement="top",include.rownames=FALSE,table.placement @ \footnotetext[1]{Flow columns are populated from data in the Daily dataframe after calling the mergeReport function.} -In the typical case where none of the data are censored (that is, no values are reported as \texttt{"}less-than\texttt{"} values) the ConcLow = ConcHigh = ConcAve all of which are equal to the reported value and Uncen=0. In the typical form of censoring where a value is reported as less than the reporting limit, then ConcLow = NA, ConcHigh = reporting limit, ConcAve = 0.5 * reporting limit, and Uncen = 1. The next section describes a more complex situation where concentrations are computed as the sum of one or more measured parameters. - - %------------------------------------------------------------ -\subsection{Complex Sample Data Example} +\subsection{Censored Data Evaluation} %------------------------------------------------------------ -As an example, let us say that in 2004 and earlier, we computed a total phosphorus (tp) as the sum of dissolved phosphorus (dp) and particulate phosphorus (pp). From 2005 and onward, we have direct measurements of total phosphorus (tp). A small subset of this fictional data looks like this: +In the typical case where none of the data are censored (that is, no values are reported as \texttt{"}less-than\texttt{"} values) the ConcLow = ConcHigh = ConcAve all of which are equal to the reported value and Uncen=0. In the typical form of censoring where a value is reported as less than the reporting limit, then ConcLow = NA, ConcHigh = reporting limit, ConcAve = 0.5 * reporting limit, and Uncen = 1. The next section describes a more complex situation where concentrations are computed as the sum of one or more measured parameters. + +As an example to understand how the dataRetrieval package handles a more complex censoring problem, let us say that in 2004 and earlier, we computed a total phosphorus (tp) as the sum of dissolved phosphorus (dp) and particulate phosphorus (pp). From 2005 and onward, we have direct measurements of total phosphorus (tp). A small subset of this fictional data looks like this: \begin{center} @@ -455,47 +491,29 @@ getPreLoadedSampleData(DF) @ -%------------------------------------------------------------ -\subsection{Merge Report} -%------------------------------------------------------------ -Finally, there is a function called mergeReport that will look at both the Daily and Sample dataframe, and populate Q and LogQ columns into the Sample dataframe. The default arguments are Daily and Sample, however if you want to use other similarly structured dataframes, you can specify localDaily or localSample. - -<<mergeExample>>= -startDate <-'1985-01-01' -endDate <- '1985-03-31' -site <- '01594440' -Daily <- getDVData(site,'00060', startDate, endDate, interactive=FALSE) -Sample <- getSampleData(site,'01075', startDate, endDate, interactive=FALSE) -Sample <- mergeReport() -head(Sample) -@ - - - -\newpage %------------------------------------------------------------ -\section{Ingesting User-Generated Data Files To Structure Them For Use In The EGRET Package} +\subsection{User-Generated Data Files} %------------------------------------------------------------ Aside from retrieving data from the USGS web services, the dataRetrieval package includes functions to generate the Daily and Sample data frame from local files. %------------------------------------------------------------ -\subsection{getDailyDataFromFile} +\subsubsection{getDailyDataFromFile} %------------------------------------------------------------ getDailyDataFromFile will load a user-supplied text file and convert it to the Daily dataframe. The file should have two columns, the first dates, the second values. The dates should be formatted either mm/dd/yyyy or yyyy-mm-dd. Using a 4-digit year is required. This function has the following inputs: filePath, fileName,hasHeader (TRUE/FALSE), separator, qUnit, and interactive (TRUE/FALSE). filePath is a string that defines the path to your file. This can either be a full path, or path relative to your R working directory. The input fileName is a string that defines the file name (including the extension). Text files that contain this sort of data require some sort of a separator, for example, a 'csv' file (comma-separated value) file uses a comma to separate the date and value column. A tab delimited file would use a tab (\texttt{"}\verb@\t@\texttt{"}) rather than the comma (\texttt{"},\texttt{"}). The type of separator you use can be defined in the function call in the \texttt{"}separator\texttt{"} argument, the default is \texttt{"},\texttt{\texttt{"}}. Another function input is a logical variable: hasHeader. The default is TRUE. If your data does not have column names, set this variable to FALSE. -Finally, qUnit is a numeric input that defines the discharge units. Flow from the NWIS web results are typically given in cubic feet per second (qUnit=1), but the EGRET package requires flow to be given in cubic meters per second (qUnit=2). Other allowed values are 10\verb@^@3 cubic feet per second (qUnit=3) and 10\verb@^@3 cubic meters per second (qUnit=4). If you do not want your data to be converted, use qUnit=2. The default is qUnit=1 (assumes flow is in cubic feet per second). +Finally, qUnit is a numeric argument that defines the discharge units used in the input file. The default is qUnit = 1 which assumes discharge is in cubic feet per second. If the discharge in the file is already in cubic meters per second then set qUnit = 2. If it is in some other units (like liters per second or acre-feet per day), the user will have to pre-process the data with a unit conversion that changes it to either cubic feet per second or cubic meters per second. So, if you have a file called \texttt{"}ChoptankRiverFlow.txt\texttt{"} located in a folder called \texttt{"}RData\texttt{"} on the C drive (this is a Window's example), and the file is structured as follows (tab-separated): \begin{verbatim} date Qdaily -10/1/1999 3.029902561 -10/2/1999 2.406931941 -10/3/1999 2.152080324 -10/4/1999 2.152080324 -10/5/1999 3.19980364 -10/6/1999 2.775050944 +10/1/1999 107 +10/2/1999 85 +10/3/1999 76 +10/4/1999 76 +10/5/1999 113 +10/6/1999 98 ... \end{verbatim} @@ -507,7 +525,7 @@ Daily <- getDailyDataFromFile(filePath,fileName,separator="\t",interactive=FALSE @ %------------------------------------------------------------ -\subsection{getSampleDataFromFile} +\subsubsection{getSampleDataFromFile} %------------------------------------------------------------ Similarly to the previous section, getSampleDataFromFile will import a user-generated file and populate the Sample dataframe. The difference between sample data and flow data is that the code requires a third column that contains a remark code, either blank or \texttt{"}\verb@<@\texttt{"}, which will tell the program that the data was 'left-censored' (or, below the detection limit of the sensor). Therefore, the data is required to be in the form: date, remark, value. If multiple constituents are going to be used, the format can be date, remark\_A, value\_A, remark\_b, value\_b, etc... An example of a comma-delimited file would be: @@ -527,8 +545,30 @@ filePath <- "C:/RData/" Sample <- getSampleDataFromFile(filePath,fileName,separator=",",interactive=FALSE) @ +%------------------------------------------------------------ +\subsection{Merge Report} +%------------------------------------------------------------ +Finally, there is a function called mergeReport that will look at both the Daily and Sample dataframe, and populate Q and LogQ columns into the Sample dataframe. The default arguments are Daily and Sample, however if you want to use other similarly structured dataframes, you can specify localDaily or localSample. Once mergeReport has been run, the Sample dataframe will be augumented with the daily discharges for all the days with samples. None of the water quality functions in EGRET will work without first having run the mergeReport function. + + +<<mergeExample>>= +siteNumber <- "01491000" +parameterCd <- "00631" # Nitrate +startDate <- "1964-01-01" +endDate <- "2013-01-01" + +Daily <- getDVData(siteNumber, "00060", startDate, endDate,interactive=FALSE) +Sample <- getSampleData(siteNumber,parameterCd, startDate, endDate, interactive=FALSE) +Sample <- mergeReport() +head(Sample) +@ + + +%------------------------------------------------------------ +\subsection{EGRET Plots} +%------------------------------------------------------------ +The EGRET package.... -\newpage \appendix %------------------------------------------------------------ \section{Appendix 1: Getting Started} diff --git a/man/compressData.Rd b/man/compressData.Rd index 17d40a4d71f6d04dd298450d5fc90f0746fd515e..92d9134d44d9369c41a427c163d614c96601a8d5 100644 --- a/man/compressData.Rd +++ b/man/compressData.Rd @@ -36,6 +36,6 @@ value3 <- c(3,4,5) dataInput <- data.frame(dateTime, comment1, value1, comment2, value2, comment3, value3, stringsAsFactors=FALSE) compressData(dataInput, interactive=FALSE) } -\keyword{flow} \keyword{WRTDS} +\keyword{flow} diff --git a/man/constructNWISURL.Rd b/man/constructNWISURL.Rd index b62ae3135fa780f25f94b67cf6a6f4fd18108ec9..fe2096e0ae220a8e227ef02106d805ab2727f6e0 100644 --- a/man/constructNWISURL.Rd +++ b/man/constructNWISURL.Rd @@ -50,9 +50,9 @@ url_qw_single <- constructNWISURL(siteNumber,"34220",startDate,endDate,'qwdata') url_qw <- constructNWISURL(siteNumber,c('34247','30234','32104','34220'),startDate,endDate,'qwdata') url_wqp <- constructNWISURL(siteNumber,"34220",startDate,endDate,'wqp') } +\keyword{USGS} \keyword{data} \keyword{import} \keyword{service} -\keyword{USGS} \keyword{web} diff --git a/man/dataOverview.Rd b/man/dataOverview.Rd index c99da9b50f9d0142972deef2188cd549ca498cfd..6fe56a3ac2930bf546840bcbdb75fea354dadd3f 100644 --- a/man/dataOverview.Rd +++ b/man/dataOverview.Rd @@ -21,8 +21,8 @@ dataOverview(localDaily = exDaily, localSample = exSample) \seealso{ \code{\link{mergeReport}} } -\keyword{data} -\keyword{import} \keyword{USGS} \keyword{WRTDS} +\keyword{data} +\keyword{import} diff --git a/man/dateFormatCheck.Rd b/man/dateFormatCheck.Rd index 99532817221093d2a64e3ce0e6ac8e3e3aabd07b..dd75fee619a69001adad54bba538e9d5980ee496 100644 --- a/man/dateFormatCheck.Rd +++ b/man/dateFormatCheck.Rd @@ -18,6 +18,6 @@ date <- '1985-01-01' dateFormatCheck(date) } -\keyword{flow} \keyword{WRTDS} +\keyword{flow} diff --git a/man/formatCheckDate.Rd b/man/formatCheckDate.Rd index 0bf8637101b5ac2a8f016bcc8faccdc3edac4dfe..64c54b65061785ecf5741d3f1c4eac8c9ac3af0d 100644 --- a/man/formatCheckDate.Rd +++ b/man/formatCheckDate.Rd @@ -27,6 +27,6 @@ Date <- '1985-01-01' dateString <- 'StartDate' formatCheckDate(Date, dateString, interactive = FALSE) } -\keyword{flow} \keyword{WRTDS} +\keyword{flow} diff --git a/man/formatCheckParameterCd.Rd b/man/formatCheckParameterCd.Rd index 086bea0aa058edc05bb1775bf7025433c33b96ee..d1574546ed4940e46494b4c4bbd46bacce0da90e 100644 --- a/man/formatCheckParameterCd.Rd +++ b/man/formatCheckParameterCd.Rd @@ -23,6 +23,6 @@ pCode <- '01234' formatCheckParameterCd(pCode, interactive = FALSE) } -\keyword{flow} \keyword{WRTDS} +\keyword{flow} diff --git a/man/formatCheckSiteNumber.Rd b/man/formatCheckSiteNumber.Rd index 60a912a66d584e466306e309483b57e7a1f38d54..5d8b42cd9e9cb3a8d96bdf4d70ac01ec0fbe8a03 100644 --- a/man/formatCheckSiteNumber.Rd +++ b/man/formatCheckSiteNumber.Rd @@ -22,6 +22,6 @@ site<- '01234567' formatCheckSiteNumber(site, interactive = FALSE) } -\keyword{flow} \keyword{WRTDS} +\keyword{flow} diff --git a/man/getDVData.Rd b/man/getDVData.Rd index 00c4faca602080dadd5471d104db558a7dfccfa8..7dc0a25d34cf307103f25a063e3a4f770f2910c2 100644 --- a/man/getDVData.Rd +++ b/man/getDVData.Rd @@ -46,8 +46,8 @@ Daily <- getDVData('01594440','00060', '1985-01-01', '1985-03-31', interactive=F \code{\link{retrieveNWISData}}, \code{\link{populateDaily}} } -\keyword{data} -\keyword{import} \keyword{USGS} \keyword{WRTDS} +\keyword{data} +\keyword{import} diff --git a/man/getDailyDataFromFile.Rd b/man/getDailyDataFromFile.Rd index 95f1ece41ac37dce3485e090992a4e76fbc70cc5..2c8ded37b9a18f130b42c18ecc9f4d3787a3c696 100644 --- a/man/getDailyDataFromFile.Rd +++ b/man/getDailyDataFromFile.Rd @@ -40,9 +40,9 @@ filePath <- '~/RData/' # Sample format fileName <- 'ChoptankRiverFlow.txt' \dontrun{getDailyDataFromFile(filePath,fileName,separator="\\t")} } +\keyword{USGS} +\keyword{WRTDS} \keyword{data} \keyword{file} \keyword{import} -\keyword{USGS} -\keyword{WRTDS} diff --git a/man/getDataAvailability.Rd b/man/getDataAvailability.Rd new file mode 100644 index 0000000000000000000000000000000000000000..06dbf8e02217726e0d4a9cc9e24f837a4ac1eb38 --- /dev/null +++ b/man/getDataAvailability.Rd @@ -0,0 +1,39 @@ +\name{getDataAvailability} +\alias{getDataAvailability} +\title{USGS data availability} +\usage{ + getDataAvailability(siteNumber = "", interactive = TRUE, + longNames = FALSE) +} +\arguments{ + \item{siteNumber}{string USGS site number. This is + usually an 8 digit number} + + \item{interactive}{logical Option for interactive mode. + If true, a progress indicator is printed to the console.} + + \item{longNames}{logical indicates whether or not to make + a web call to get long names of parameters. Be aware this + could take a very long time if the station has lots of + data.} +} +\value{ + retval dataframe with all information found in the + expanded site file +} +\description{ + Imports a table of available parameters, period of + record, and count. There is also an option to load the + long parameter names and additional information on the + parameters with longNames=TRUE. +} +\examples{ +# These examples require an internet connection to run +availableData <- getDataAvailability('05114000',interactive=FALSE) +} +\keyword{USGS} +\keyword{data} +\keyword{import} +\keyword{service} +\keyword{web} + diff --git a/man/getMetaData.Rd b/man/getMetaData.Rd index 77a11bcff1f726b0ecde6225df4d5f1b3b21a8fe..fb57f7d818c177f9756645238dada84569867e04 100644 --- a/man/getMetaData.Rd +++ b/man/getMetaData.Rd @@ -39,10 +39,10 @@ # Automatically gets information about site 05114000 and temperature, no interaction with user INFO <- getMetaData('05114000','00010',interactive=FALSE) } +\keyword{USGS} +\keyword{WRTDS} \keyword{data} \keyword{import} \keyword{service} -\keyword{USGS} \keyword{web} -\keyword{WRTDS} diff --git a/man/getMultipleParameterNames.Rd b/man/getMultipleParameterNames.Rd new file mode 100644 index 0000000000000000000000000000000000000000..dc266131310a138046da77d2b30e5bf2c1c45482 --- /dev/null +++ b/man/getMultipleParameterNames.Rd @@ -0,0 +1,36 @@ +\name{getMultipleParameterNames} +\alias{getMultipleParameterNames} +\title{USGS Mulitple Parameter List} +\usage{ + getMultipleParameterNames(pCodes, interactive = TRUE) +} +\arguments{ + \item{pCodes}{vector set of 5-digit parameter codes to + gather information on} + + \item{interactive}{logical Option for interactive mode. + If true, a progress indicator is printed to the console.} +} +\value{ + retval dataframe with all information found in the + expanded site file +} +\description{ + Imports a table of information on a set of parameters + such as parameter name, units, group, and srs name. + Warning! This function can be very slow because an + individual web service call has to be made for each + parameter. There is currently no way to request multiple + parameters from the web service and get the extended + information. +} +\examples{ +# These examples require an internet connection to run +availableData <- getMultipleParameterNames(c("00060", "00065", "00010"),interactive=FALSE) +} +\keyword{USGS} +\keyword{data} +\keyword{import} +\keyword{service} +\keyword{web} + diff --git a/man/getParameterInfo.Rd b/man/getParameterInfo.Rd index 6344c8980314b462fbeb13a1be1a227e0e06a434..fb9df67602ba06d11c33ae51c97619337e355980 100644 --- a/man/getParameterInfo.Rd +++ b/man/getParameterInfo.Rd @@ -28,9 +28,9 @@ paramINFO <- getParameterInfo('01075') paramINFO2 <- getParameterInfo('00931',interactive=FALSE) } +\keyword{USGS} \keyword{data} \keyword{import} \keyword{service} -\keyword{USGS} \keyword{web} diff --git a/man/getPreLoadedDailyData.Rd b/man/getPreLoadedDailyData.Rd index a39fbbe6d2b72f71d83137208ac0c65157e98722..8be9941cee8a1e22b9c42a2f1e1783955a01990b 100644 --- a/man/getPreLoadedDailyData.Rd +++ b/man/getPreLoadedDailyData.Rd @@ -28,7 +28,7 @@ \examples{ Daily <- getPreLoadedDailyData(ChoptankRiverFlow, interactive=FALSE) } +\keyword{WRTDS} \keyword{data} \keyword{import} -\keyword{WRTDS} diff --git a/man/getPreLoadedSampleData.Rd b/man/getPreLoadedSampleData.Rd index 50708517441fb2c28f93936156fd4fd6c024f9c7..1e8834a400df76773c56f0d66f2f281f4bffabf2 100644 --- a/man/getPreLoadedSampleData.Rd +++ b/man/getPreLoadedSampleData.Rd @@ -23,7 +23,7 @@ \examples{ Sample <- getPreLoadedSampleData(ChoptankRiverNitrate, interactive=FALSE) } +\keyword{WRTDS} \keyword{data} \keyword{import} -\keyword{WRTDS} diff --git a/man/getQWData.Rd b/man/getQWData.Rd index f38d6a2322478fb8c29957201f772fe36d05034b..8348de8d5e2cd018928e1498e8d0ce37842488d7 100644 --- a/man/getQWData.Rd +++ b/man/getQWData.Rd @@ -46,9 +46,9 @@ rawProcessedSample <- getQWData('01594440','01075', '1985-01-01', '1985-03-31') rawProcessedSampleAll <- getQWData('05114000','', '1985-01-01', '1985-03-31') rawProcessedSampleSelect <- getQWData('05114000','00915;00931', '1985-01-01', '1985-04-30', interactive=FALSE) } +\keyword{USGS} \keyword{data} \keyword{import} \keyword{service} -\keyword{USGS} \keyword{web} diff --git a/man/getRawQWData.Rd b/man/getRawQWData.Rd index a77fc37ffda4adb1e1bc2c75b3a3a5b26ab57a98..738023e62faec1f5e19e3f7973eb04b875e5d7d0 100644 --- a/man/getRawQWData.Rd +++ b/man/getRawQWData.Rd @@ -45,9 +45,9 @@ rawSample <- getRawQWData('01594440','01075', '1985-01-01', '1985-03-31') rawSampleAll <- getRawQWData('05114000','', '1985-01-01', '1985-03-31') rawSampleSelect <- getRawQWData('05114000',c('00915','00931'), '1985-01-01', '1985-04-30', interactive=FALSE) } +\keyword{USGS} \keyword{data} \keyword{import} \keyword{service} -\keyword{USGS} \keyword{web} diff --git a/man/getSampleData.Rd b/man/getSampleData.Rd index 782b13c6c3bf602196a178982319a1be56c4f766..f8fc6db8dd5b2a7f164977fe2e1c287b8eefcb84 100644 --- a/man/getSampleData.Rd +++ b/man/getSampleData.Rd @@ -46,8 +46,8 @@ Sample_Select <- getSampleData('05114000','00915;00931', '', '', interactive=FAL \code{\link{compressData}}, \code{\link{populateSampleColumns}} } -\keyword{data} -\keyword{import} \keyword{USGS} \keyword{WRTDS} +\keyword{data} +\keyword{import} diff --git a/man/getSampleDataFromFile.Rd b/man/getSampleDataFromFile.Rd index 34a0e31df6f8ab4745627cbc5afc3f4ea08c175f..f295b54c36bbffe76ecd4934edde2f67b6531f51 100644 --- a/man/getSampleDataFromFile.Rd +++ b/man/getSampleDataFromFile.Rd @@ -35,9 +35,9 @@ filePath <- '~/RData/' # Sample format fileName <- 'ChoptankRiverNitrate.csv' #Sample <- getSampleDataFromFile(filePath,fileName, separator=";",interactive=FALSE) } +\keyword{USGS} +\keyword{WRTDS} \keyword{data} \keyword{file} \keyword{import} -\keyword{USGS} -\keyword{WRTDS} diff --git a/man/getSiteFileData.Rd b/man/getSiteFileData.Rd index 53291b62e542ee7599124ff7866c16c95ed96e9a..37a7d14a54aa31799bcb74be2ebf6e8137323eac 100644 --- a/man/getSiteFileData.Rd +++ b/man/getSiteFileData.Rd @@ -24,9 +24,9 @@ # These examples require an internet connection to run siteINFO <- getSiteFileData('05114000',interactive=FALSE) } +\keyword{USGS} \keyword{data} \keyword{import} \keyword{service} -\keyword{USGS} \keyword{web} diff --git a/man/getWQPData.Rd b/man/getWQPData.Rd index 3f5c8e143044f445ad8c2d9c19f888cfaba54b7e..a3b275d477aa664bc684d798eb3b33d64ddcf9f5 100644 --- a/man/getWQPData.Rd +++ b/man/getWQPData.Rd @@ -43,9 +43,9 @@ getWQPData('USGS-01594440','Chloride', '', '') getWQPData('WIDNR_WQX-10032762','Specific conductance', '', '') } +\keyword{WQP} \keyword{data} \keyword{import} \keyword{service} \keyword{web} -\keyword{WQP} diff --git a/man/mergeReport.Rd b/man/mergeReport.Rd index 800f34ba5ab6684f1af1975045caad09cf96f825..5efff9d3878cddc49ca7a82527a2ece4d08a4f52 100644 --- a/man/mergeReport.Rd +++ b/man/mergeReport.Rd @@ -33,8 +33,8 @@ Sample <- mergeReport(interactive=FALSE) \code{\link{getDVData}}, \code{\link{populateSampleColumns}} } -\keyword{data} -\keyword{import} \keyword{USGS} \keyword{WRTDS} +\keyword{data} +\keyword{import} diff --git a/man/populateDaily.Rd b/man/populateDaily.Rd index e59aafb215c75061b4ac028d2c5a414956f109af..5722f563c0cd546c3c1cb92f01026ffb4690c6a3 100644 --- a/man/populateDaily.Rd +++ b/man/populateDaily.Rd @@ -33,6 +33,6 @@ Daily <- populateDaily(dataInput, 2, interactive=FALSE) \author{ Robert M. Hirsch \email{rhirsch@usgs.gov} } -\keyword{flow} \keyword{WRTDS} +\keyword{flow} diff --git a/man/processQWData.Rd b/man/processQWData.Rd index d5cc07572769a86d41ddf0ae9eb1f8082b06b5f6..660c8bb571bcef093ad463a30d4d779727da1c13 100644 --- a/man/processQWData.Rd +++ b/man/processQWData.Rd @@ -25,9 +25,9 @@ rawSample <- getRawQWData('01594440','01075', '1985-01-01', '1985-03-31') rawSampleSelect <- processQWData(rawSample) } +\keyword{USGS} \keyword{data} \keyword{import} \keyword{service} -\keyword{USGS} \keyword{web} diff --git a/man/retrieveNWISData.Rd b/man/retrieveNWISData.Rd index 80c547cd5cecffd3ad45efab4e28df5c63ce1e00..ec7d95ceeb50d9f359e53b6cc1f172330b3a18ee 100644 --- a/man/retrieveNWISData.Rd +++ b/man/retrieveNWISData.Rd @@ -47,9 +47,9 @@ rawDailyQ <- retrieveNWISData(siteNumber,pCode, startDate, endDate) rawDailyTemperature <- retrieveNWISData(siteNumber,'00010', startDate, endDate, StatCd='00001',interactive=FALSE) rawDailyQAndTempMeanMax <- retrieveNWISData(siteNumber,'00010,00060', startDate, endDate, StatCd='00001,00003', interactive=FALSE) } +\keyword{USGS} \keyword{data} \keyword{import} \keyword{service} -\keyword{USGS} \keyword{web} diff --git a/man/retrieveNWISqwData.Rd b/man/retrieveNWISqwData.Rd index 4e5a169e402d4b4af02344a248be66f16508e587..254045c748b2520c03b207a717b6a2034639f995 100644 --- a/man/retrieveNWISqwData.Rd +++ b/man/retrieveNWISqwData.Rd @@ -44,9 +44,9 @@ data$dateTime <- as.Date(data$dateTime) compressedData <- compressData(data, interactive=interactive) Sample <- populateSampleColumns(compressedData) } +\keyword{USGS} \keyword{data} \keyword{import} \keyword{service} -\keyword{USGS} \keyword{web} diff --git a/man/retrieveUnitNWISData.Rd b/man/retrieveUnitNWISData.Rd index 5228b6abf6422ca473a5b364761c62350c086d65..4622f8dd8136872c8f58ed1f8cd1eafa15ae7f9b 100644 --- a/man/retrieveUnitNWISData.Rd +++ b/man/retrieveUnitNWISData.Rd @@ -42,9 +42,9 @@ EndDate <- as.character(Sys.Date()) # These examples require an internet connection to run rawData <- retrieveUnitNWISData(siteNumber,ParameterCd,StartDate,EndDate,interactive=FALSE) } +\keyword{USGS} \keyword{data} \keyword{import} \keyword{service} -\keyword{USGS} \keyword{web}