diff --git a/NAMESPACE b/NAMESPACE
index 2e46c6ae4ecb1125324ba87dcca07ada497b0308..a618004aab011564717db5b9a77c49c5493fb36d 100644
--- a/NAMESPACE
+++ b/NAMESPACE
@@ -2,23 +2,23 @@
 
 export(constructNWISURL)
 export(constructWQPURL)
-export(getNWISData)
-export(getNWISDataAvailability)
-export(getNWISPcodeInfo)
-export(getNWISSiteInfo)
-export(getNWISSites)
-export(getNWISdvData)
-export(getNWISqwData)
-export(getNWISunitData)
-export(getRDB1Data)
-export(getWQPData)
-export(getWQPSites)
-export(getWQPqwData)
-export(getWaterML1Data)
-export(getWaterML2Data)
+export(importRDB1)
+export(importWQP)
+export(importWaterML1)
+export(importWaterML2)
 export(padVariable)
-export(parseWQPData)
+export(readNWISdata)
+export(readNWISdv)
+export(readNWISpCode)
+export(readNWISqw)
+export(readNWISsite)
+export(readNWISunit)
+export(readWQPdata)
+export(readWQPqw)
 export(renameColumns)
+export(whatNWISData)
+export(whatNWISsites)
+export(whatWQPsites)
 import(RCurl)
 import(XML)
 import(reshape2)
diff --git a/NEWS b/NEWS
index 2b4d6f4c5ffd72c75ad9a69025fedd7a11ae1a61..455366c1f883e80fb446eeab0a6735bcdffbe5e7 100644
--- a/NEWS
+++ b/NEWS
@@ -21,14 +21,14 @@ dataRetrieval 1.3.3
 * Updated getNWISSiteInfo to retrieve multiple site file datasets at once using a vector of siteNumbers as input argument.
 * Updated error-handling for Web service calls. More information is returned when errors happen
 * Added some basic processing to Water Quality Portal raw data retrievals. Date columns are returned as Date objects, value columns are numeric, and a column is created from the date/time/timezone columns that is POSIXct.
-* Added very generalized NWIS and WQP retrieval functions (getNWISData, getNWISSites, getGeneralWQPData, and getWQPSites) which allow the user to use any argument available on the Web service platform.
+* Added very generalized NWIS and WQP retrieval functions (getNWISData, getNWISSites, getGeneralWQPData, and whatWQPsites) which allow the user to use any argument available on the Web service platform.
 
 
 dataRetrieval 1.3.2
 ===========
 
-* Deprecated getQWData, updated getWQPData to take either parameter code or characteristic name.
-* Changed the name of raw data retrievals to: getNWISqwData, getNWISunitData, getNWISdvData, and getWQPqwData (from: getNWISqwData, retrieveUnitNWISData, retrieveNWISData, getRawQWData)
+* Deprecated getQWData, updated readWQPdata to take either parameter code or characteristic name.
+* Changed the name of raw data retrievals to: readNWISqw, getNWISunitData, getNWISdvData, and getWQPqwData (from: readNWISqw, retrieveUnitNWISData, retrieveNWISData, getRawQWData)
 * Added NA warning to getDVData function
 * Updated mergeReport to allow for Sample data with different measurements taken on the same day
 
@@ -53,4 +53,4 @@ dataRetrieval 1.2.1
 
 * Expanded the capabilities to retrieve raw data from the web services.
 
-* Added Storet data retrievals in getWQPData function
+* Added Storet data retrievals in readWQPdata function
diff --git a/R/getNWISData.r b/R/getNWISData.r
index 98534ae9ae7081364fda19881f093e19904a97b6..8f541d7937dbe35fad2018b3f4ac89c940a5e936 100644
--- a/R/getNWISData.r
+++ b/R/getNWISData.r
@@ -10,9 +10,9 @@
 #' @return retval dataframe 
 #' @export
 #' @examples
-#' dataTemp <- getNWISData(stateCd="OH",parameterCd="00010")
-#' dataTempUnit <- getNWISData(sites="03086500", service="iv", parameterCd="00010")
-getNWISData <- function(service="dv", ...){
+#' dataTemp <- readNWISdata(stateCd="OH",parameterCd="00010")
+#' dataTempUnit <- readNWISdata(sites="03086500", service="iv", parameterCd="00010")
+readNWISdata <- function(service="dv", ...){
   
   matchReturn <- list(...)
   
@@ -27,6 +27,6 @@ getNWISData <- function(service="dv", ...){
     urlCall <- paste0(urlCall,"&siteOutput=expanded")
   }
   
-  retval <- getRDB1Data(urlCall)
+  retval <- importRDB1(urlCall)
   return(retval)
 }
diff --git a/R/getNWISDataAvailability.r b/R/getNWISDataAvailability.r
index 97c17b2f54536a685cae47c84aedf05202d0a4ef..16d577eb47996c52ee2db9b1f04fcac9cdd3daea 100644
--- a/R/getNWISDataAvailability.r
+++ b/R/getNWISDataAvailability.r
@@ -9,12 +9,11 @@
 #' @export
 #' @import RCurl
 #' @examples
-#' # These examples require an internet connection to run
-#' availableData <- getNWISDataAvailability('05114000')
+#' availableData <- whatNWISData('05114000')
 #' # To find just unit value ('instantaneous') data:
-#' uvData <- getNWISDataAvailability('05114000',type="uv")
-#' uvDataMulti <- getNWISDataAvailability(c('05114000','09423350'),type="uv")
-getNWISDataAvailability <- function(siteNumber,type=c("uv","dv","qw")){
+#' uvData <- whatNWISData('05114000',type="uv")
+#' uvDataMulti <- whatNWISData(c('05114000','09423350'),type="uv")
+whatNWISData <- function(siteNumber,type=c("uv","dv","qw")){
   
   siteNumber <- paste(siteNumber,collapse=",")
   
diff --git a/R/getNWISPcodeInfo.r b/R/getNWISPcodeInfo.r
index e98d6bfc8d3c04c84b7f02c1710b7d717a7f2245..3f110c026da27ac4914ca9d4f6e27303572989be 100644
--- a/R/getNWISPcodeInfo.r
+++ b/R/getNWISPcodeInfo.r
@@ -9,8 +9,8 @@
 #' @export
 #' @examples
 #' # These examples require an internet connection to run
-#' paramINFO <- getNWISPcodeInfo(c('01075','00060','00931'))
-getNWISPcodeInfo <- function(parameterCd){
+#' paramINFO <- readNWISpCode(c('01075','00060','00931'))
+readNWISpCode <- function(parameterCd){
  
   
   parameterCdFile <- parameterCdFile
diff --git a/R/getNWISSiteInfo.r b/R/getNWISSiteInfo.r
index e7201f232ffe647ded4d208473c1ac81302004c4..8ad90d3872638d4e1f4c8eaa0a6270bb4435e0b9 100644
--- a/R/getNWISSiteInfo.r
+++ b/R/getNWISSiteInfo.r
@@ -8,9 +8,9 @@
 #' @export
 #' @examples
 #' # These examples require an internet connection to run
-#' siteINFO <- getNWISSiteInfo('05114000')
-#' siteINFOMulti <- getNWISSiteInfo(c('05114000','09423350'))
-getNWISSiteInfo <- function(siteNumber){
+#' siteINFO <- readNWISsite('05114000')
+#' siteINFOMulti <- readNWISsite(c('05114000','09423350'))
+readNWISsite <- function(siteNumber){
   
   siteNumber <- paste(siteNumber,collapse=",")
   urlSitefile <- paste("http://waterservices.usgs.gov/nwis/site/?format=rdb&siteOutput=Expanded&sites=",siteNumber,sep = "")
diff --git a/R/getNWISSites.R b/R/getNWISSites.R
index 0cfe7c4db81aea6eda5695764b3904f6f9c2dfc5..7d4d1197d8e2d8b963ee973403b99d8be3644a14 100644
--- a/R/getNWISSites.R
+++ b/R/getNWISSites.R
@@ -10,8 +10,8 @@
 #' @export
 #' @import XML
 #' @examples
-#' siteListPhos <- getNWISSites(stateCd="OH",parameterCd="00665")
-getNWISSites <- function(...){
+#' siteListPhos <- whatNWISsites(stateCd="OH",parameterCd="00665")
+whatNWISsites <- function(...){
   
   matchReturn <- list(...)
 
diff --git a/R/getNWISdvData.r b/R/getNWISdvData.r
index 80a2ae5d240741f20fa03d0b079b22840bd3fe5a..637b265af76b4fee65a74eb878cf6c857800cee5 100644
--- a/R/getNWISdvData.r
+++ b/R/getNWISdvData.r
@@ -16,29 +16,28 @@
 #' @export
 #' @keywords data import USGS web service
 #' @examples
-#' # These examples require an internet connection to run
 #' siteNumber <- '04085427'
 #' startDate <- '2012-01-01'
 #' endDate <- '2012-06-30'
 #' pCode <- '00060'
-#' rawDailyQ <- getNWISdvData(siteNumber,pCode, startDate, endDate)
-#' rawDailyTemperature <- getNWISdvData(siteNumber,'00010', 
+#' rawDailyQ <- readNWISdv(siteNumber,pCode, startDate, endDate)
+#' rawDailyTemperature <- readNWISdv(siteNumber,'00010', 
 #'        startDate, endDate, statCd='00001')
-#' rawDailyTemperatureTSV <- getNWISdvData(siteNumber,'00010', 
+#' rawDailyTemperatureTSV <- readNWISdv(siteNumber,'00010', 
 #'        startDate, endDate, statCd='00001',format='tsv')
-#' rawDailyQAndTempMeanMax <- getNWISdvData(siteNumber,c('00010','00060'),
+#' rawDailyQAndTempMeanMax <- readNWISdv(siteNumber,c('00010','00060'),
 #'        startDate, endDate, statCd=c('00001','00003'))
-#' rawDailyMultiSites<- getNWISdvData(c("01491000","01645000"),c('00010','00060'),
+#' rawDailyMultiSites<- readNWISdv(c("01491000","01645000"),c('00010','00060'),
 #'        startDate, endDate, statCd=c('00001','00003'))
-getNWISdvData <- function (siteNumber,parameterCd,startDate,endDate,statCd="00003",format="tsv"){  
+readNWISdv <- function (siteNumber,parameterCd,startDate,endDate,statCd="00003",format="tsv"){  
   
   url <- constructNWISURL(siteNumber,parameterCd,startDate,endDate,"dv",statCd=statCd,format=format)
   
   if (format == "xml") {
-    data <- getWaterML1Data(url)
+    data <- importWaterML1(url)
     data$datetime <- as.Date(data$datetime)
   } else {
-    data <- getRDB1Data(url,asDateTime=FALSE)
+    data <- importRDB1(url,asDateTime=FALSE)
   }
   
   return (data)
diff --git a/R/getNWISqwData.r b/R/getNWISqwData.r
index aa2173ab87def5201baa385b828066a96af9b08b..d7e8841b0ff927e126ee90f011644c6b06207447 100644
--- a/R/getNWISqwData.r
+++ b/R/getNWISqwData.r
@@ -15,21 +15,20 @@
 #' @return data dataframe with agency, site, dateTime, value, and code columns
 #' @export
 #' @import reshape2
-#' @seealso \code{\link{getWQPData}}, \code{\link{getWQPSites}}, 
-#' \code{\link{getWQPqwData}}, \code{\link{constructNWISURL}}
+#' @seealso \code{\link{readWQPdata}}, \code{\link{whatWQPsites}}, 
+#' \code{\link{readWQPqw}}, \code{\link{constructNWISURL}}
 #' @examples
-#' # These examples require an internet connection to run
 #' siteNumber <- c('04024430','04024000')
 #' startDate <- '2010-01-01'
 #' endDate <- ''
 #' pCodes <- c('34247','30234','32104','34220')
-#' rawNWISqwData <- getNWISqwData(siteNumber,pCodes,startDate,endDate)
-#' rawNWISqwDataExpand <- getNWISqwData(siteNumber,pCodes,startDate,endDate,expanded=TRUE)
-getNWISqwData <- function (siteNumber,pCodes,startDate,endDate,expanded=FALSE){  
+#' rawNWISqwData <- readNWISqw(siteNumber,pCodes,startDate,endDate)
+#' rawNWISqwDataExpand <- readNWISqw(siteNumber,pCodes,startDate,endDate,expanded=TRUE)
+readNWISqw <- function (siteNumber,pCodes,startDate,endDate,expanded=FALSE){  
   
   url <- constructNWISURL(siteNumber,pCodes,startDate,endDate,"qw",expanded=expanded)
   
-  data <- getRDB1Data(url,asDateTime=TRUE, qw=TRUE)
+  data <- importRDB1(url,asDateTime=TRUE, qw=TRUE)
   
   if(expanded){
     columnsToMelt <- c("agency_cd","site_no","sample_dt","sample_tm",
diff --git a/R/getNWISunitData.r b/R/getNWISunitData.r
index 33174d292a38b8cc427ab3f6fa61b8a6acaeed67..2f6cf288e0450d766e73cc71dde9b2592dcc2612 100644
--- a/R/getNWISunitData.r
+++ b/R/getNWISunitData.r
@@ -20,17 +20,17 @@
 #' startDate <- "2014-10-10"
 #' endDate <- "2014-10-10"
 #' # These examples require an internet connection to run
-#' rawData <- getNWISunitData(siteNumber,parameterCd,startDate,endDate)
+#' rawData <- readNWISunit(siteNumber,parameterCd,startDate,endDate)
 #' summary(rawData)
-#' rawData2 <- getNWISunitData(siteNumber,parameterCd,startDate,endDate,"tsv")
+#' rawData2 <- readNWISunit(siteNumber,parameterCd,startDate,endDate,"tsv")
 #' summary(rawData2)
-getNWISunitData <- function (siteNumber,parameterCd,startDate,endDate,format="xml"){  
+readNWISunit <- function (siteNumber,parameterCd,startDate,endDate,format="xml"){  
   
   url <- constructNWISURL(siteNumber,parameterCd,startDate,endDate,"uv",format=format)
   if (format == "xml") {
-    data <- getWaterML1Data(url)
+    data <- importWaterML1(url)
   } else {
-    data <- getRDB1Data(url,asDateTime=TRUE)
+    data <- importRDB1(url,asDateTime=TRUE)
   }
 
   return (data)
diff --git a/R/getRDB1Data.r b/R/getRDB1Data.r
index 7f9bd732a255ff9f828700db20b1e665f2f10e25..77930c8527ee8d21c9d32c34632097931c4ff294 100644
--- a/R/getRDB1Data.r
+++ b/R/getRDB1Data.r
@@ -16,14 +16,14 @@
 #' property <- "00060"
 #' obs_url <- constructNWISURL(siteNumber,property,
 #'          startDate,endDate,"dv",format="tsv")
-#' data <- getRDB1Data(obs_url)
+#' data <- importRDB1(obs_url)
 #' urlMulti <- constructNWISURL("04085427",c("00060","00010"),
 #'          startDate,endDate,"dv",statCd=c("00003","00001"),"tsv")
-#' multiData <- getRDB1Data(urlMulti)
+#' multiData <- importRDB1(urlMulti)
 #' unitDataURL <- constructNWISURL(siteNumber,property,
 #'          "2014-10-10","2014-10-10","uv",format="tsv")
-#' unitData <- getRDB1Data(unitDataURL, asDateTime=TRUE)
-getRDB1Data <- function(obs_url,asDateTime=FALSE, qw=FALSE){
+#' unitData <- importRDB1(unitDataURL, asDateTime=TRUE)
+importRDB1 <- function(obs_url,asDateTime=FALSE, qw=FALSE){
   
   retval = tryCatch({
     h <- basicHeaderGatherer()
diff --git a/R/getWQPData.r b/R/getWQPData.r
index ee65961c709d0147be9bcf26f44423f553723e57..95a8074ec72fe839d9e1c1be50ee8f21427817ca 100644
--- a/R/getWQPData.r
+++ b/R/getWQPData.r
@@ -11,9 +11,9 @@
 #' @examples
 #' \dontrun{
 #' nameToUse <- "pH"
-#' pHData <- getWQPData(siteid="USGS-04024315",characteristicName=nameToUse)
+#' pHData <- readWQPdata(siteid="USGS-04024315",characteristicName=nameToUse)
 #' }
-getWQPData <- function(...){
+readWQPdata <- function(...){
   
   matchReturn <- list(...)
   
@@ -36,7 +36,7 @@ getWQPData <- function(...){
                    urlCall,
                    "&mimeType=tsv",sep = "")
 
-  retVal <- parseWQPData(urlCall)
+  retVal <- importWQP(urlCall)
   return(retVal)
   
 }
\ No newline at end of file
diff --git a/R/getWQPSites.R b/R/getWQPSites.R
index 1d3d2d3f952630286e57bbd72cbe7eab006efbcd..51bf9d37a6907ddd0bf0e1a5e8c022b6eaf1afdb 100644
--- a/R/getWQPSites.R
+++ b/R/getWQPSites.R
@@ -9,12 +9,13 @@
 #' (subsequent qualifier/value columns could follow depending on requested parameter codes)
 #' @export
 #' @examples
+#' 
+#' site1 <- whatWQPsites(siteid="USGS-01594440")
 #' \dontrun{
-#' site1 <- getWQPSites(siteid="USGS-01594440")
 #' type <- "Stream"
-#' sites <- getWQPSites(statecode="US:55",countycode="US:55:025",siteType=type)
+#' sites <- whatWQPsites(statecode="US:55",countycode="US:55:025",siteType=type)
 #' }
-getWQPSites <- function(...){
+whatWQPsites <- function(...){
 
   matchReturn <- list(...)
   
diff --git a/R/getWQPqwData.r b/R/getWQPqwData.r
index 3145303ba8cb354f25e34547121916838b71aecf..1f7fe43d5be9af23c0cdcaa957f30d90f094de19 100644
--- a/R/getWQPqwData.r
+++ b/R/getWQPqwData.r
@@ -17,16 +17,16 @@
 #' start and end times.
 #' @export
 #' @import RCurl
-#' @seealso \code{\link{getWQPData}}, \code{\link{getWQPSites}}, 
-#' \code{\link{getNWISqwData}}, and \code{\link{parseWQPData}}
+#' @seealso \code{\link{readWQPdata}}, \code{\link{whatWQPsites}}, 
+#' \code{\link{readNWISqw}}, and \code{\link{importWQP}}
 #' @examples
-#' rawPcode <- getWQPqwData('USGS-01594440','01075', '1985-01-01', '1985-03-31')
-#' rawCharacteristicName <- getWQPqwData('WIDNR_WQX-10032762','Specific conductance', '', '')
+#' rawPcode <- readWQPqw('USGS-01594440','01075', '1985-01-01', '1985-03-31')
+#' rawCharacteristicName <- readWQPqw('WIDNR_WQX-10032762','Specific conductance', '', '')
 #' 
-getWQPqwData <- function(siteNumber,parameterCd,startDate,endDate){
+readWQPqw <- function(siteNumber,parameterCd,startDate,endDate){
 
   url <- constructWQPURL(siteNumber,parameterCd,startDate,endDate)
-  retVal <- parseWQPData(url)
+  retVal <- importWQP(url)
   return(retVal)
   
 }
diff --git a/R/getWaterML1Data.r b/R/getWaterML1Data.r
index 74a51381d6aea7d969be86a64f51d61ba36f6f0b..b0f9ccecf4753d1ba1a4132bfd53849ec2f73b0c 100644
--- a/R/getWaterML1Data.r
+++ b/R/getWaterML1Data.r
@@ -15,20 +15,20 @@
 #' property <- '00060'
 #' urlBase <- "http://waterservices.usgs.gov/nwis"
 #' obs_url <- constructNWISURL(siteNumber,property,startDate,endDate,'dv')
-#' data <- getWaterML1Data(obs_url)
+#' data <- importWaterML1(obs_url)
 #' urlMulti <- constructNWISURL("04085427",c("00060","00010"),
 #'             startDate,endDate,'dv',statCd=c("00003","00001"))
-#' multiData <- getWaterML1Data(urlMulti)
+#' multiData <- importWaterML1(urlMulti)
 #' groundWaterSite <- "431049071324301"
 #' startGW <- "2013-10-01"
 #' endGW <- "2014-06-30"
 #' groundwaterExampleURL <- constructNWISURL(groundWaterSite, NA,
 #'           startGW,endGW, service="gwlevels", format="xml")
-#' groundWater <- getWaterML1Data(groundwaterExampleURL)
+#' groundWater <- importWaterML1(groundwaterExampleURL)
 #' unitDataURL <- constructNWISURL(siteNumber,property,
 #'          "2014-10-10","2014-10-10",'uv',format='xml')
-#' unitData <- getWaterML1Data(unitDataURL)
-getWaterML1Data <- function(obs_url){
+#' unitData <- importWaterML1(unitDataURL)
+importWaterML1 <- function(obs_url){
   
   h <- basicHeaderGatherer()
   doc = tryCatch({
diff --git a/R/getWaterML2Data.r b/R/getWaterML2Data.r
index fd5bb469f65e9933ccb60bd24031e4d07130e448..6ff20c31f6f8c3053683e99dcb88edf4235e651e 100644
--- a/R/getWaterML2Data.r
+++ b/R/getWaterML2Data.r
@@ -14,8 +14,8 @@
 #'      "endDT=2014-09-08",
 #'      "statCd=00003",
 #'      "parameterCd=00060",sep="&")
-#' \dontrun{dataReturned3 <- getWaterML2Data(URL)}
-getWaterML2Data <- function(obs_url){
+#' \dontrun{dataReturned3 <- importWaterML2(URL)}
+importWaterML2 <- function(obs_url){
   
   h <- basicHeaderGatherer()
   doc = tryCatch({
diff --git a/R/parseWQPData.R b/R/parseWQPData.R
index 6463452bb51497804556b60490ae6d1309ed0c21..ee13dad6bd99698968e159a89fbb3c9f257bbd91 100644
--- a/R/parseWQPData.R
+++ b/R/parseWQPData.R
@@ -13,9 +13,9 @@
 #' 
 #' ## Examples take longer than 5 seconds:
 #' rawSampleURL <- constructWQPURL('USGS-01594440','01075', '1985-01-01', '1985-03-31')
-#' rawSample <- parseWQPData(rawSampleURL)
+#' rawSample <- importWQP(rawSampleURL)
 #' 
-parseWQPData <- function(url){
+importWQP <- function(url){
   
   h <- basicHeaderGatherer()
   
diff --git a/R/renameColumns.R b/R/renameColumns.R
index dedb3736d757dd35a82e62a354686e72f60e7393..aad4b59cbb0a7ccc6b552214813ce923faff5ee9 100644
--- a/R/renameColumns.R
+++ b/R/renameColumns.R
@@ -9,11 +9,11 @@
 #' @examples
 #' # This example requires an internet connection to run
 #' siteNumber <- '05114000' 
-#' rawData <- getNWISdvData(siteNumber,c("00010","00060","00300"),
+#' rawData <- readNWISdv(siteNumber,c("00010","00060","00300"),
 #'           "2001-01-01","2002-01-01",statCd=c("00001","00003"))
 #' rawData <- renameColumns(rawData)
 #' date <- "2014-10-10"
-#' rawData2 <- getNWISunitData(siteNumber,c("00010","00060"),date,date)
+#' rawData2 <- readNWISunit(siteNumber,c("00010","00060"),date,date)
 #' rawData2 <- renameColumns(rawData2)
 #' head(rawData2)
 renameColumns <- function(rawData){
@@ -27,7 +27,7 @@ renameColumns <- function(rawData){
   pCodes <- sapply(strsplit(dataCol_names, "_"), function(x) x[2])
   statCd <- sapply(strsplit(dataCol_names, "_"), function(x) x[3])
   
-  pcodeINFO <- getNWISPcodeInfo(pCodes)
+  pcodeINFO <- readNWISpCode(pCodes)
   multipleCodes <- anyDuplicated(pCodes)
   
   statCd <- sub("00001", "_Max", statCd)
diff --git a/README.md b/README.md
index 9e77d229aee9599e4083ed63c31d47ccec127529..c66ffbf432523d51bbd100ceb475d76ac0554929 100644
--- a/README.md
+++ b/README.md
@@ -14,12 +14,14 @@ Retrieval functions for USGS and EPA hydrologic and water quality data.
 | -------------| -------------| ------------- |:-------------|
 |NWIS | `getNWISSites` | `getNWISSiteInfo`| `getNWISData` |
 | | `getNWISDataAvailability` |  | `getNWISdvData` |
-| | | `getNWISPcodeInfo` | `getNWISqwData`|
+| | | `getNWISPcodeInfo` | `readNWISqw`|
 | | | | `getNWISunitData` |
-| Water Quality Portal | `getWQPSites` |  | `getWQPData` |
+| Water Quality Portal* | `whatWQPsites` |  | `readWQPdata` |
 | | | | `getWQPqwData` |
 
 
+* Water Quality Portal includes USGS, EPA, and USDA data
+
 
 Moving `EGRET` specific functions to `EGRET` (version 2.0.0 and greater):
 
@@ -53,7 +55,10 @@ To install the dataRetrieval package, you must be using R 3.0 or greater and run
 ---------------
 
 ###dataRetrieval 1.5.0
-Changing naming convention one last time. Migrating `EGRET` specific retrievals to `EGRET`.
+
+* Changing naming convention one last time. Migrating `EGRET` specific retrievals to `EGRET`.
+* Added back WaterML2 parsing tool
+* Added specific groundwater, rating, peak, and surfacewater measurement functions
 
 
 
@@ -82,13 +87,13 @@ Changed naming convention:
 * Updated getNWISSiteInfo to retrieve multiple site file datasets at once using a vector of siteNumbers as input argument.
 * Updated error-handling for Web service calls. More information is returned when errors happen
 * Added some basic processing to Water Quality Portal raw data retrievals. Date columns are returned as Date objects, value columns are numeric, and a column is created from the date/time/timezone columns that is POSIXct.
-* Added very generalized NWIS and WQP retrieval functions (getNWISData, getNWISSites, getGeneralWQPData, and getWQPSites) which allow the user to use any argument available on the Web service platform.
+* Added very generalized NWIS and WQP retrieval functions (getNWISData, getNWISSites, getGeneralWQPData, and whatWQPsites) which allow the user to use any argument available on the Web service platform.
 
 
 ###dataRetrieval 1.3.2
 
-* Deprecated getQWData, updated getWQPData to take either parameter code or characteristic name.
-* Changed the name of raw data retrievals to: getNWISqwData, getNWISunitData, getNWISdvData, and getWQPqwData (from: getNWISqwData, retrieveUnitNWISData, retrieveNWISData, getRawQWData)
+* Deprecated getQWData, updated readWQPdata to take either parameter code or characteristic name.
+* Changed the name of raw data retrievals to: readNWISqw, getNWISunitData, getNWISdvData, and getWQPqwData (from: readNWISqw, retrieveUnitNWISData, retrieveNWISData, getRawQWData)
 * Added NA warning to getDVData function
 * Updated mergeReport to allow for Sample data with different measurements taken on the same day
 
diff --git a/inst/doc/dataRetrieval.R b/inst/doc/dataRetrieval.R
deleted file mode 100644
index e6937e55c196fe9e8a41fb4665b893f69d51d005..0000000000000000000000000000000000000000
--- a/inst/doc/dataRetrieval.R
+++ /dev/null
@@ -1,297 +0,0 @@
-## ----openLibrary, echo=FALSE------------------------------
-library(xtable)
-options(continue=" ")
-options(width=60)
-library(knitr)
-
-
-## ----include=TRUE ,echo=FALSE,eval=TRUE-------------------
-opts_chunk$set(highlight=TRUE, tidy=TRUE, keep.space=TRUE, keep.blank.space=FALSE, keep.comment=TRUE, tidy=FALSE,comment="")
-knit_hooks$set(inline = function(x) {
-   if (is.numeric(x)) round(x, 3)})
-knit_hooks$set(crop = hook_pdfcrop)
-
-bold.colHeaders <- function(x) {
-  x <- gsub("\\^(\\d)","$\\^\\1$",x)
-  x <- gsub("\\%","\\\\%",x)
-  x <- gsub("\\_"," ",x)
-  returnX <- paste("\\multicolumn{1}{c}{\\textbf{\\textsf{", x, "}}}", sep = "")
-}
-addSpace <- function(x) ifelse(x != "1", "[5pt]","")
-
-## ----workflow, echo=TRUE,eval=FALSE-----------------------
-#  library(dataRetrieval)
-#  # Choptank River near Greensboro, MD
-#  siteNumber <- "01491000"
-#  ChoptankInfo <- getNWISSiteInfo(siteNumber)
-#  parameterCd <- "00060"
-#  
-#  #Raw daily data:
-#  rawDailyData <- getNWISdvData(siteNumber,parameterCd,
-#                        "1980-01-01","2010-01-01")
-#  # Data compiled for EGRET analysis
-#  Daily <- getNWISDaily(siteNumber,parameterCd,
-#                        "1980-01-01","2010-01-01")
-#  
-#  # Sample data Nitrate:
-#  parameterCd <- "00618"
-#  Sample <- getNWISSample(siteNumber,parameterCd,
-#                        "1980-01-01","2010-01-01")
-#  
-#  # Metadata on site and nitrate:
-#  INFO <- getNWISInfo(siteNumber,parameterCd)
-#  
-#  # Merge discharge and nitrate data to one dataframe:
-#  Sample <- mergeReport()
-#  
-
-## ----tableParameterCodes, echo=FALSE,results='asis'-------
-pCode <- c('00060', '00065', '00010','00045','00400')
-shortName <- c("Discharge [ft$^3$/s]","Gage height [ft]","Temperature [C]", "Precipitation [in]", "pH")
-
-data.df <- data.frame(pCode, shortName, stringsAsFactors=FALSE)
-
-print(xtable(data.df,
-       label="tab:params",
-       caption="Common USGS Parameter Codes"),
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.text.function = function(x) {x},
-       sanitize.colnames.function =  bold.colHeaders,
-       sanitize.rownames.function = addSpace
-      )
-
-
-## ----tableParameterCodesDataRetrieval---------------------
-library(dataRetrieval)
-parameterCdFile <-  parameterCdFile
-names(parameterCdFile)
-
-## ----tableStatCodes, echo=FALSE,results='asis'------------
-StatCode <- c('00001', '00002', '00003','00008')
-shortName <- c("Maximum","Minimum","Mean", "Median")
-
-data.df <- data.frame(StatCode, shortName, stringsAsFactors=FALSE)
-
-print(xtable(data.df,label="tab:stat",
-           caption="Commonly used USGS Stat Codes"),
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.colnames.function = bold.colHeaders,
-       sanitize.rownames.function = addSpace
-      )
-
-
-## ----getSite, echo=TRUE-----------------------------------
-siteNumbers <- c("01491000","01645000") 
-siteINFO <- getNWISSiteInfo(siteNumbers)
-
-## ----siteNames2, echo=TRUE--------------------------------
-siteINFO$station.nm
-
-## ----getSiteExtended, echo=TRUE---------------------------
-# Continuing from the previous example:
-# This pulls out just the daily data:
-
-dailyDataAvailable <- getNWISDataAvailability(siteNumbers,
-                    type="dv")
-
-
-## ----tablegda, echo=FALSE,results='asis'------------------
-tableData <- with(dailyDataAvailable, 
-      data.frame( 
-      siteNumber= site_no,
-      srsname=srsname, 
-      startDate=as.character(startDate), 
-      endDate=as.character(endDate), 
-      count=as.character(count),
-      units=parameter_units,
-      statCd = statCd,
-      stringsAsFactors=FALSE)
-      )
-
-tableData$units[which(tableData$units == "ft3/s")] <- "ft$^3$/s"
-tableData$units[which(tableData$units == "uS/cm @25C")] <- "$\\mu$S/cm @25C"
-
-
-print(xtable(tableData,label="tab:gda",
-    caption="Daily mean data availabile at the Choptank River near Greensboro, MD. [Some columns deleted for space considerations]"),
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.text.function = function(x) {x},
-       sanitize.colnames.function =  bold.colHeaders,
-       sanitize.rownames.function = addSpace
-      )
-
-
-## ----label=getPCodeInfo, echo=TRUE------------------------
-# Using defaults:
-parameterCd <- "00618" 
-parameterINFO <- getNWISPcodeInfo(parameterCd)
-colnames(parameterINFO)
-
-## ----siteNames, echo=TRUE---------------------------------
-parameterINFO$parameter_nm
-
-## ----label=getNWISDaily, echo=TRUE, eval=TRUE-------------
-
-# Continuing with our Choptank River example
-siteNumber <- "01491000"
-parameterCd <- "00060"  # Discharge
-startDate <- ""  # Will request earliest date
-endDate <- "" # Will request latest date
-
-discharge <- getNWISdvData(siteNumber, 
-                    parameterCd, startDate, endDate)
-names(discharge)
-
-## ----label=getNWIStemperature, echo=TRUE------------------
-
-parameterCd <- c("00010","00060")  # Temperature and discharge
-statCd <- c("00001","00003")  # Mean and maximum
-startDate <- "2012-01-01"
-endDate <- "2012-05-01"
-
-temperatureAndFlow <- getNWISdvData(siteNumber, parameterCd, 
-        startDate, endDate, statCd=statCd)
-
-
-## ----label=renameColumns, echo=TRUE-----------------------
-names(temperatureAndFlow)
-
-temperatureAndFlow <- renameColumns(temperatureAndFlow)
-names(temperatureAndFlow)
-
-## ----getNWIStemperaturePlot, echo=TRUE, fig.cap="Temperature and discharge plot of Choptank River in 2012.",out.width='1\\linewidth',out.height='1\\linewidth',fig.show='hold'----
-par(mar=c(5,5,5,5)) #sets the size of the plot window
-
-with(temperatureAndFlow, plot(
-  datetime, Temperature_water_degrees_Celsius_Max_01,
-  xlab="Date",ylab="Max Temperature [C]"
-  ))
-par(new=TRUE)
-with(temperatureAndFlow, plot(
-  datetime, Discharge_cubic_feet_per_second,
-  col="red",type="l",xaxt="n",yaxt="n",xlab="",ylab="",axes=FALSE
-  ))
-axis(4,col="red",col.axis="red")
-mtext(expression(paste("Mean Discharge [ft"^"3","/s]",
-                       sep="")),side=4,line=3,col="red")
-title(paste(siteINFO$station.nm[1],"2012",sep=" "))
-legend("topleft", c("Max Temperature", "Mean Discharge"), 
-       col=c("black","red"),lty=c(NA,1),pch=c(1,NA))
-
-## ----label=getNWISUnit, echo=TRUE-------------------------
-
-parameterCd <- "00060"  # Discharge
-startDate <- "2012-05-12" 
-endDate <- "2012-05-13" 
-dischargeToday <- getNWISunitData(siteNumber, parameterCd, 
-        startDate, endDate)
-
-## ----dischargeData, echo=FALSE----------------------------
-head(dischargeToday)
-
-## ----label=getQW, echo=TRUE-------------------------------
- 
-# Dissolved Nitrate parameter codes:
-parameterCd <- c("00618","71851")
-startDate <- "1985-10-01"
-endDate <- "2012-09-30"
-
-dissolvedNitrate <- getNWISqwData(siteNumber, parameterCd, 
-      startDate, endDate, expanded=TRUE)
-names(dissolvedNitrate)
-
-
-
-## ----getQWtemperaturePlot, echo=TRUE, fig.cap=paste(parameterINFO$parameter_nm, "at", siteINFO$station.nm[1])----
-with(dissolvedNitrate, plot(
-  startDateTime, result_va_00618,
-  xlab="Date",ylab = paste(parameterINFO$srsname,
-      "[",parameterINFO$parameter_units,"]")
-  ))
-title(siteINFO$station.nm[1])
-
-## ----label=geturl, echo=TRUE, eval=FALSE------------------
-#  # Dissolved Nitrate parameter codes:
-#  pCode <- c("00618","71851")
-#  startDate <- "1964-06-11"
-#  endDate <- "2012-12-18"
-#  url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,'qw')
-#  url_dv <- constructNWISURL(siteNumber,"00060",startDate,endDate,
-#                             'dv',statCd="00003")
-#  url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'uv')
-
-## ----label=getQWData, echo=TRUE, eval=FALSE---------------
-#  specificCond <- getWQPqwData('WIDNR_WQX-10032762',
-#                  'Specific conductance','2011-05-01','2011-09-30')
-
-## ----siteSearch-------------------------------------------
-sites <- getNWISSites(bBox="-83.0,36.5,-81.0,38.5", 
-                      parameterCd="00010,00060",
-                      hasDataTypeCd="dv")
-
-names(sites)
-nrow(sites)
-
-## ----dataExample------------------------------------------
-dischargeWI <- getNWISData(stateCd="WI",
-                           parameterCd="00060",
-                           drainAreaMin="50",
-                           statCd="00003")
-names(dischargeWI)
-nrow(dischargeWI)
-
-## ----NJChloride, eval=FALSE-------------------------------
-#  
-#  sitesNJ <- getWQPSites(statecode="US:34",
-#                         characteristicName="Chloride")
-#  
-
-## ----phData, eval=FALSE-----------------------------------
-#  
-#  dataPH <- getWQPData(statecode="US:55",
-#                   characteristicName="pH")
-#  
-
-## ----helpFunc,eval = FALSE--------------------------------
-#  ?getNWISPcodeInfo
-
-## ----rawFunc,eval = TRUE----------------------------------
-getNWISPcodeInfo
-
-## ----seeVignette,eval = FALSE-----------------------------
-#  vignette(dataRetrieval)
-
-## ----installFromCran,eval = FALSE-------------------------
-#  install.packages("dataRetrieval",
-#  repos=c("http://usgs-r.github.com","http://cran.us.r-project.org"),
-#  dependencies=TRUE,
-#  type="both")
-
-## ----openLibraryTest, eval=FALSE--------------------------
-#  library(dataRetrieval)
-
-## ----label=getSiteApp, echo=TRUE--------------------------
-availableData <- getNWISDataAvailability(siteNumber)
-dailyData <- availableData["dv" == availableData$service,]
-dailyData <- dailyData["00003" == dailyData$statCd,]
-
-tableData <- with(dailyData, 
-      data.frame(
-        shortName=srsname, 
-        Start=startDate, 
-        End=endDate, 
-        Count=count,
-        Units=parameter_units)
-      )
-tableData
-
-## ----label=saveData, echo=TRUE, eval=FALSE----------------
-#  write.table(tableData, file="tableData.tsv",sep="\t",
-#              row.names = FALSE,quote=FALSE)
-
diff --git a/inst/doc/dataRetrieval.Rnw b/inst/doc/dataRetrieval.Rnw
deleted file mode 100644
index 866a27962dd18beeeec2a8ca3101bc99b3a4ab93..0000000000000000000000000000000000000000
--- a/inst/doc/dataRetrieval.Rnw
+++ /dev/null
@@ -1,797 +0,0 @@
-%\VignetteIndexEntry{Introduction to the dataRetrieval package}
-%\VignetteEngine{knitr::knitr}
-%\VignetteDepends{}
-%\VignetteSuggests{xtable,EGRET}
-%\VignetteImports{zoo, XML, RCurl, reshape2,lubridate}
-%\VignettePackage{dataRetrieval}
-
-\documentclass[a4paper,11pt]{article}
-
-\usepackage{amsmath}
-\usepackage{times}
-\usepackage{hyperref}
-\usepackage[numbers, round]{natbib}
-\usepackage[american]{babel}
-\usepackage{authblk}
-\usepackage{subfig}
-\usepackage{placeins}
-\usepackage{footnote}
-\usepackage{tabularx}
-\usepackage{threeparttable}
-\usepackage{parskip}
-
-\usepackage{csquotes}
-\usepackage{setspace}
-
-% \doublespacing
-
-\renewcommand{\topfraction}{0.85}
-\renewcommand{\textfraction}{0.1}
-\usepackage{graphicx}
-
-
-\usepackage{mathptmx}% Times Roman font
-\usepackage[scaled=.90]{helvet}% Helvetica, served as a model for arial
-
-% \usepackage{indentfirst}
-% \setlength\parindent{20pt}
-\setlength{\parskip}{0pt}
-
-\usepackage{courier}
-
-\usepackage{titlesec}
-\usepackage{titletoc}
-
-\titleformat{\section}
-  {\normalfont\sffamily\bfseries\LARGE}
-  {\thesection}{0.5em}{}
-\titleformat{\subsection}
-  {\normalfont\sffamily\bfseries\Large}
-  {\thesubsection}{0.5em}{}
-\titleformat{\subsubsection}
-  {\normalfont\sffamily\large}
-  {\thesubsubsection}{0.5em}{}
-  
-\titlecontents{section}
-[2em]                 % adjust left margin
-{\sffamily}             % font formatting
-{\contentslabel{2.3em}} % section label and offset
-{\hspace*{-2.3em}}
-{\titlerule*[0.25pc]{.}\contentspage}
-  
-\titlecontents{subsection}
-[4.6em]                 % adjust left margin
-{\sffamily}             % font formatting
-{\contentslabel{2.3em}} % section label and offset
-{\hspace*{-2.3em}}
-{\titlerule*[0.25pc]{.}\contentspage}
-  
-\titlecontents{subsubsection}
-[6.9em]                 % adjust left margin
-{\sffamily}             % font formatting
-{\contentslabel{2.3em}} % section label and offset
-{\hspace*{-2.3em}}
-{\titlerule*[0.25pc]{.}\contentspage}
-
-\titlecontents{table}
-[0em]                 % adjust left margin
-{\sffamily}             % font formatting
-{Table\hspace*{2em} \contentslabel {2em}} % section label and offset
-{\hspace*{4em}}
-{\titlerule*[0.25pc]{.}\contentspage}
-
-\titlecontents{figure}
-[0em]                 % adjust left margin
-{\sffamily}             % font formatting
-{Figure\hspace*{2em} \contentslabel {2em}} % section label and offset
-{\hspace*{4em}}
-{\titlerule*[0.25pc]{.}\contentspage}
-
-%Italisize and change font of urls:
-\urlstyle{sf}
-\renewcommand\UrlFont\itshape
-
-\usepackage{caption}
-\captionsetup{
-  font={sf},
-  labelfont={bf,sf},
-  labelsep=period,
-  justification=justified,
-  singlelinecheck=false
-}
-
-
-
-\textwidth=6.2in
-\textheight=8.5in
-\parskip=.3cm
-\oddsidemargin=.1in
-\evensidemargin=.1in
-\headheight=-.3in
-
-
-%------------------------------------------------------------
-% newcommand
-%------------------------------------------------------------
-\newcommand{\scscst}{\scriptscriptstyle}
-\newcommand{\scst}{\scriptstyle}
-\newcommand{\Robject}[1]{{\texttt{#1}}}
-\newcommand{\Rfunction}[1]{{\texttt{#1}}}
-\newcommand{\Rclass}[1]{\textit{#1}}
-\newcommand{\Rpackage}[1]{\textit{#1}}
-\newcommand{\Rexpression}[1]{\texttt{#1}}
-\newcommand{\Rmethod}[1]{{\texttt{#1}}}
-\newcommand{\Rfunarg}[1]{{\texttt{#1}}}
-
-\begin{document}
-
-<<openLibrary, echo=FALSE>>=
-library(xtable)
-options(continue=" ")
-options(width=60)
-library(knitr)
-
-@
-
-\renewenvironment{knitrout}{\begin{singlespace}}{\end{singlespace}}
-\renewcommand*\listfigurename{Figures}
-
-\renewcommand*\listtablename{Tables}
-
-
-%------------------------------------------------------------
-\title{The dataRetrieval R package}
-%------------------------------------------------------------
-\author[1]{Laura De Cicco}
-\author[1]{Robert Hirsch}
-\affil[1]{United States Geological Survey}
-
-
-<<include=TRUE ,echo=FALSE,eval=TRUE>>=
-opts_chunk$set(highlight=TRUE, tidy=TRUE, keep.space=TRUE, keep.blank.space=FALSE, keep.comment=TRUE, tidy=FALSE,comment="")
-knit_hooks$set(inline = function(x) {
-   if (is.numeric(x)) round(x, 3)})
-knit_hooks$set(crop = hook_pdfcrop)
-
-bold.colHeaders <- function(x) {
-  x <- gsub("\\^(\\d)","$\\^\\1$",x)
-  x <- gsub("\\%","\\\\%",x)
-  x <- gsub("\\_"," ",x)
-  returnX <- paste("\\multicolumn{1}{c}{\\textbf{\\textsf{", x, "}}}", sep = "")
-}
-addSpace <- function(x) ifelse(x != "1", "[5pt]","")
-@
-
-\noindent{\huge\textsf{\textbf{The dataRetrieval R package}}}
-
-\noindent\textsf{By Laura De Cicco and Robert Hirsch}
-
-\noindent\textsf{\today}
-
-% \maketitle
-% 
-% \newpage 
-
-\tableofcontents
-\listoffigures
-\listoftables
-
-\newpage
-
-%------------------------------------------------------------
-\section{Introduction to dataRetrieval}
-%------------------------------------------------------------ 
-The dataRetrieval package was created to simplify the process of loading hydrologic data into the R environment. It has been specifically designed to work seamlessly with the EGRET R package: Exploration and Graphics for RivEr Trends. See: \url{https://github.com/USGS-R/EGRET/wiki} or \url{http://dx.doi.org/10.3133/tm4A10} for information on EGRET. EGRET is designed to provide analysis of water quality data sets using the Weighted Regressions on Time, Discharge and Season (WRTDS) method as well as analysis of discharge trends using robust time-series smoothing techniques.  Both of these capabilities provide both tabular and graphical analyses of long-term data sets.
-
-
-The dataRetrieval package is designed to retrieve many of the major data types of U.S. Geological Survey (USGS) hydrologic data that are available on the Web. Users may also load data from other sources (text files, spreadsheets) using dataRetrieval.  Section \ref{sec:genRetrievals} provides examples of how one can obtain raw data from USGS sources on the Web and load them into dataframes within the R environment.  The functionality described in section \ref{sec:genRetrievals} is for general use and is not tailored for the specific uses of the EGRET package.  The functionality described in section \ref{sec:EGRETdfs} is tailored specifically to obtaining input from the Web and structuring it for use in the EGRET package.  The functionality described in section \ref{sec:userFiles} is for converting hydrologic data from user-supplied files and structuring it specifically for use in the EGRET package.
-
-For information on getting started in R and installing the package, see (\ref{sec:appendix1}): Getting Started. Any use of trade, firm, or product names is for descriptive purposes only and does not imply endorsement by the U.S. Government.
-
-A quick workflow for major dataRetrieval functions:
-
-<<workflow, echo=TRUE,eval=FALSE>>=
-library(dataRetrieval)
-# Choptank River near Greensboro, MD
-siteNumber <- "01491000" 
-ChoptankInfo <- getNWISSiteInfo(siteNumber)
-parameterCd <- "00060"
-
-#Raw daily data:
-rawDailyData <- getNWISdvData(siteNumber,parameterCd,
-                      "1980-01-01","2010-01-01")
-# Data compiled for EGRET analysis
-Daily <- getNWISDaily(siteNumber,parameterCd,
-                      "1980-01-01","2010-01-01")
-
-# Sample data Nitrate:
-parameterCd <- "00618"
-Sample <- getNWISSample(siteNumber,parameterCd,
-                      "1980-01-01","2010-01-01")
-
-# Metadata on site and nitrate:
-INFO <- getNWISInfo(siteNumber,parameterCd)
-
-# Merge discharge and nitrate data to one dataframe:
-Sample <- mergeReport()
-
-@
-
-
-%------------------------------------------------------------
-\section{USGS Web Retrievals}
-\label{sec:genRetrievals}
-%------------------------------------------------------------ 
-In this section, five examples of Web retrievals document how to get raw data. This data includes site information (\ref{sec:usgsSite}), measured parameter information (\ref{sec:usgsParams}), historical daily values(\ref{sec:usgsDaily}), unit values (which include real-time data but can also include other sensor data stored at regular time intervals) (\ref{sec:usgsRT}), and water quality data (\ref{sec:usgsWQP}) or (\ref{sec:usgsSTORET}). We will use the Choptank River near Greensboro, MD as an example.  Daily discharge measurements are available as far back as 1948.  Additionally, nitrate has been measured since 1964. 
-
-% %------------------------------------------------------------
-% \subsection{Introduction}
-% %------------------------------------------------------------
-The USGS organizes hydrologic data in a standard structure.  Streamgages are located throughout the United States, and each streamgage has a unique ID (referred in this document and throughout the dataRetrieval package as \enquote{siteNumber}).  Often (but not always), these ID's are 8 digits.  The first step to finding data is discovering this siteNumber. There are many ways to do this, one is the National Water Information System: Mapper \url{http://maps.waterdata.usgs.gov/mapper/index.html}.
-
-Once the siteNumber is known, the next required input for USGS data retrievals is the \enquote{parameter code}.  This is a 5-digit code that specifies the measured parameter being requested.  For example, parameter code 00631 represents \enquote{Nitrate plus nitrite, water, filtered, milligrams per liter as nitrogen}, with units of \enquote{mg/l as N}. A complete list of possible USGS parameter codes can be found at \url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes?help}.
-
-Not every station will measure all parameters. A short list of commonly measured parameters is shown in Table \ref{tab:params}.
-
-
-<<tableParameterCodes, echo=FALSE,results='asis'>>=
-pCode <- c('00060', '00065', '00010','00045','00400')
-shortName <- c("Discharge [ft$^3$/s]","Gage height [ft]","Temperature [C]", "Precipitation [in]", "pH")
-
-data.df <- data.frame(pCode, shortName, stringsAsFactors=FALSE)
-
-print(xtable(data.df,
-       label="tab:params",
-       caption="Common USGS Parameter Codes"),
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.text.function = function(x) {x},
-       sanitize.colnames.function =  bold.colHeaders,
-       sanitize.rownames.function = addSpace
-      )
-
-@
-
-A complete list (as of September 25, 2013) is available as data attached to the package. It is accessed by the following:
-
-<<tableParameterCodesDataRetrieval>>=
-library(dataRetrieval)
-parameterCdFile <-  parameterCdFile
-names(parameterCdFile)
-@
-
-
-For unit values data (sensor data measured at regular time intervals such as 15 minutes or hourly), knowing the parameter code and siteNumber is enough to make a request for data.  For most variables that are measured on a continuous basis, the USGS also stores the historical data as daily values.  These daily values are statistical summaries of the continuous data, e.g. maximum, minimum, mean, or median. The different statistics are specified by a 5-digit statistics code.  A complete list of statistic codes can be found here:
-
-\url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
-
-Some common codes are shown in Table \ref{tab:stat}.
-
-<<tableStatCodes, echo=FALSE,results='asis'>>=
-StatCode <- c('00001', '00002', '00003','00008')
-shortName <- c("Maximum","Minimum","Mean", "Median")
-
-data.df <- data.frame(StatCode, shortName, stringsAsFactors=FALSE)
-
-print(xtable(data.df,label="tab:stat",
-           caption="Commonly used USGS Stat Codes"),
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.colnames.function = bold.colHeaders,
-       sanitize.rownames.function = addSpace
-      )
-
-@
-
-Examples for using these siteNumber's, parameter codes, and stat codes will be presented in subsequent sections.
-
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsection{Site Information}
-\label{sec:usgsSite}
-%------------------------------------------------------------
-
-%------------------------------------------------------------
-\subsubsection{getNWISSiteInfo}
-\label{sec:usgsSiteFileData}
-%------------------------------------------------------------
-Use the \texttt{getNWISSiteInfo} function to obtain all of the information available for a particular USGS site such as full station name, drainage area, latitude, and longitude. \texttt{getNWISSiteInfo} can also access information about multiple sites with a vector input.
-
-
-<<getSite, echo=TRUE>>=
-siteNumbers <- c("01491000","01645000") 
-siteINFO <- getNWISSiteInfo(siteNumbers)
-@
-
-A specific example piece of information can be retrieved, in this case a station name, as follows:
-
-<<siteNames2, echo=TRUE>>=
-siteINFO$station.nm
-@
-Site information is obtained from \url{http://waterservices.usgs.gov/rest/Site-Test-Tool.html}
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsubsection{getNWISDataAvailability}
-\label{sec:usgsDataAvailability}
-%------------------------------------------------------------
-To discover what data is available for a particular USGS site, including measured parameters, period of record, and number of samples (count), use the \texttt{getNWISDataAvailability} function. It is possible to limit the retrieval information to a subset of types (\texttt{"}dv\texttt{"}, \texttt{"}uv\texttt{"}, or \texttt{"}qw\texttt{"}). In the following example, we limit the retrieved Choptank data to only daily data. Leaving the \texttt{"}type\texttt{"} argument blank returns all of the available data for that site.
-
-
-<<getSiteExtended, echo=TRUE>>=
-# Continuing from the previous example:
-# This pulls out just the daily data:
-
-dailyDataAvailable <- getNWISDataAvailability(siteNumbers,
-                    type="dv")
-
-@
-
-<<tablegda, echo=FALSE,results='asis'>>=
-tableData <- with(dailyDataAvailable, 
-      data.frame( 
-      siteNumber= site_no,
-      srsname=srsname, 
-      startDate=as.character(startDate), 
-      endDate=as.character(endDate), 
-      count=as.character(count),
-      units=parameter_units,
-      statCd = statCd,
-      stringsAsFactors=FALSE)
-      )
-
-tableData$units[which(tableData$units == "ft3/s")] <- "ft$^3$/s"
-tableData$units[which(tableData$units == "uS/cm @25C")] <- "$\\mu$S/cm @25C"
-
-
-print(xtable(tableData,label="tab:gda",
-    caption="Daily mean data availabile at the Choptank River near Greensboro, MD. [Some columns deleted for space considerations]"),
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.text.function = function(x) {x},
-       sanitize.colnames.function =  bold.colHeaders,
-       sanitize.rownames.function = addSpace
-      )
-
-@
-
-
-
-See Section \ref{app:createWordTable} for instructions on converting an R dataframe to a table in Microsoft\textregistered\ software Excel or Word to display a data availability table similar to Table \ref{tab:gda}. Excel, Microsoft, PowerPoint, Windows, and Word are registered trademarks of Microsoft Corporation in the United States and other countries.
-
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsection{Parameter Information}
-\label{sec:usgsParams}
-%------------------------------------------------------------
-To obtain all of the available information concerning a measured parameter (or multiple parameters), use the \texttt{getNWISPcodeInfo} function:
-
-<<label=getPCodeInfo, echo=TRUE>>=
-# Using defaults:
-parameterCd <- "00618" 
-parameterINFO <- getNWISPcodeInfo(parameterCd)
-colnames(parameterINFO)
-@
-
-A specific example piece of information, in this case parameter name, can be obtained as follows:
-
-<<siteNames, echo=TRUE>>=
-parameterINFO$parameter_nm
-@
-Parameter information can obtained from \url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes}
-\FloatBarrier
-%------------------------------------------------------------
-\subsection{Daily Values}
-\label{sec:usgsDaily}
-%------------------------------------------------------------
-To obtain daily records of USGS data, use the \texttt{getNWISdvData} function. The arguments for this function are siteNumber, parameterCd, startDate, endDate, statCd, and a logical (TRUE/FALSE) interactive. There are 2 default arguments: statCd (defaults to \texttt{"}00003\texttt{"}), and interactive (defaults to TRUE).  If you want to use the default values, you do not need to list them in the function call. By setting the \texttt{"}interactive\texttt{"} option to FALSE, the operation of the function will advance automatically. It might make more sense to run large batch collections with the interactive option set to FALSE. 
-
-The dates (start and end) must be in the format \texttt{"}YYYY-MM-DD\texttt{"} (note: the user must include the quotes).  Setting the start date to \texttt{"}\texttt{"} (no space) will prompt the program to ask for the earliest date, and setting the end date to \texttt{"}\texttt{"} (no space) will prompt for the latest available date.
-
-<<label=getNWISDaily, echo=TRUE, eval=TRUE>>=
-
-# Continuing with our Choptank River example
-siteNumber <- "01491000"
-parameterCd <- "00060"  # Discharge
-startDate <- ""  # Will request earliest date
-endDate <- "" # Will request latest date
-
-discharge <- getNWISdvData(siteNumber, 
-                    parameterCd, startDate, endDate)
-names(discharge)
-@
-
-The column \texttt{"}datetime\texttt{"} in the returned dataframe is automatically imported as a variable of class \texttt{"}Date\texttt{"} in R. Each requested parameter has a value and remark code column.  The names of these columns depend on the requested parameter and stat code combinations. USGS remark codes are often \texttt{"}A\texttt{"} (approved for publication) or \texttt{"}P\texttt{"} (provisional data subject to revision). A more complete list of remark codes can be found here:
-\url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes}
-
-Another example that doesn't use the defaults would be a request for mean and maximum daily temperature and discharge in early 2012:
-<<label=getNWIStemperature, echo=TRUE>>=
-
-parameterCd <- c("00010","00060")  # Temperature and discharge
-statCd <- c("00001","00003")  # Mean and maximum
-startDate <- "2012-01-01"
-endDate <- "2012-05-01"
-
-temperatureAndFlow <- getNWISdvData(siteNumber, parameterCd, 
-        startDate, endDate, statCd=statCd)
-
-@
-
-Daily data is pulled from \url{http://waterservices.usgs.gov/rest/DV-Test-Tool.html}.
-
-The column names can be automatically adjusted based on the parameter and statistic codes using the \texttt{renameColumns} function. This is not necessary, but may be useful when analyzing the data. 
-
-<<label=renameColumns, echo=TRUE>>=
-names(temperatureAndFlow)
-
-temperatureAndFlow <- renameColumns(temperatureAndFlow)
-names(temperatureAndFlow)
-@
-
-An example of plotting the above data (Figure \ref{fig:getNWIStemperaturePlot}):
-
-<<getNWIStemperaturePlot, echo=TRUE, fig.cap="Temperature and discharge plot of Choptank River in 2012.",out.width='1\\linewidth',out.height='1\\linewidth',fig.show='hold'>>=
-par(mar=c(5,5,5,5)) #sets the size of the plot window
-
-with(temperatureAndFlow, plot(
-  datetime, Temperature_water_degrees_Celsius_Max_01,
-  xlab="Date",ylab="Max Temperature [C]"
-  ))
-par(new=TRUE)
-with(temperatureAndFlow, plot(
-  datetime, Discharge_cubic_feet_per_second,
-  col="red",type="l",xaxt="n",yaxt="n",xlab="",ylab="",axes=FALSE
-  ))
-axis(4,col="red",col.axis="red")
-mtext(expression(paste("Mean Discharge [ft"^"3","/s]",
-                       sep="")),side=4,line=3,col="red")
-title(paste(siteINFO$station.nm[1],"2012",sep=" "))
-legend("topleft", c("Max Temperature", "Mean Discharge"), 
-       col=c("black","red"),lty=c(NA,1),pch=c(1,NA))
-@
-
-
-There are occasions where NWIS values are not reported as numbers, instead there might be text describing a certain event such as \enquote{Ice.}  Any value that cannot be converted to a number will be reported as NA in this package (not including remark code columns).
-
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsection{Unit Values}
-\label{sec:usgsRT}
-%------------------------------------------------------------
-Any data collected at regular time intervals (such as 15-minute or hourly) are known as \enquote{unit values.} Many of these are delivered on a real time basis and very recent data (even less than an hour old in many cases) are available through the function \texttt{getNWISunitData}.  Some of these unit values are available for many years, and some are only available for a recent time period such as 120 days.  Here is an example of a retrieval of such data.  
-
-<<label=getNWISUnit, echo=TRUE>>=
-
-parameterCd <- "00060"  # Discharge
-startDate <- "2012-05-12" 
-endDate <- "2012-05-13" 
-dischargeToday <- getNWISunitData(siteNumber, parameterCd, 
-        startDate, endDate)
-@
-
-The retrieval produces the following dataframe:
-
-<<dischargeData, echo=FALSE>>=
-head(dischargeToday)
-@
-
-Note that time now becomes important, so the variable datetime is a POSIXct, and the time zone is included in a separate column. Data are retrieved from \url{http://waterservices.usgs.gov/rest/IV-Test-Tool.html}. There are occasions where NWIS values are not reported as numbers, instead a common example is \enquote{Ice.}  Any value that cannot be converted to a number will be reported as NA in this package.
-
-\newpage
-
-
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsection{Water Quality Values}
-\label{sec:usgsWQP}
-%------------------------------------------------------------
-To get USGS water quality data from water samples collected at the streamgage or other monitoring site (as distinct from unit values collected through some type of automatic monitor) we can use the function \texttt{getNWISqwData}, with the input arguments: siteNumber, parameterCd, startDate, endDate, and interactive (similar to \texttt{getNWISunitData} and \texttt{getNWISdvData}). Additionally, the argument \texttt{"}expanded\texttt{"} is a logical input that allows the user to choose between a simple return of datetimes/qualifier/values (expanded=FALSE), or a more complete and verbose output (expanded=TRUE). Expanded = TRUE includes such columns as remark codes, value qualifying text, and detection level.
-
-
-<<label=getQW, echo=TRUE>>=
- 
-# Dissolved Nitrate parameter codes:
-parameterCd <- c("00618","71851")
-startDate <- "1985-10-01"
-endDate <- "2012-09-30"
-
-dissolvedNitrate <- getNWISqwData(siteNumber, parameterCd, 
-      startDate, endDate, expanded=TRUE)
-names(dissolvedNitrate)
-
-
-@
-
-
-<<getQWtemperaturePlot, echo=TRUE, fig.cap=paste(parameterINFO$parameter_nm, "at", siteINFO$station.nm[1])>>=
-with(dissolvedNitrate, plot(
-  startDateTime, result_va_00618,
-  xlab="Date",ylab = paste(parameterINFO$srsname,
-      "[",parameterINFO$parameter_units,"]")
-  ))
-title(siteINFO$station.nm[1])
-@
-
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsection{URL Construction}
-\label{sec:usgsURL}
-%------------------------------------------------------------
-There may be times when you might be interested in seeing the URL (Web address) that was used to obtain the raw data. The \texttt{constructNWISURL} function returns the URL.  In addition to input variables that have been described, there is a new argument \texttt{"}service\texttt{"}. The service argument can be \texttt{"}dv\texttt{"} (daily values), \texttt{"}uv\texttt{"} (unit values), \texttt{"}qw\texttt{"} (NWIS water quality values), or \texttt{"}wqp\texttt{"} (general Water Quality Portal values).
- 
-
-<<label=geturl, echo=TRUE, eval=FALSE>>=
-# Dissolved Nitrate parameter codes:
-pCode <- c("00618","71851")
-startDate <- "1964-06-11"
-endDate <- "2012-12-18"
-url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,'qw')
-url_dv <- constructNWISURL(siteNumber,"00060",startDate,endDate,
-                           'dv',statCd="00003")
-url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'uv')
-@
-
-
-
-%------------------------------------------------------------
-\section{Water Quality Portal Web Retrievals}
-\label{sec:usgsSTORET}
-%------------------------------------------------------------
-There are additional water quality data sets available from the Water Quality Data Portal (\url{http://www.waterqualitydata.us/}).  These data sets can be housed in either the STORET database (data from EPA), NWIS database (data from USGS), STEWARDS database (data from USDA), and additional databases are slated to be included.  Because only USGS uses parameter codes, a \texttt{"}characteristic name\texttt{"} must be supplied.  The \texttt{getWQPqwData} function can take either a USGS parameter code, or a more general characteristic name in the parameterCd input argument. The Water Quality Data Portal includes data discovery tools and information on characteristic names. The following example retrieves specific conductance from a DNR site in Wisconsin. 
-
-
-<<label=getQWData, echo=TRUE, eval=FALSE>>=
-specificCond <- getWQPqwData('WIDNR_WQX-10032762',
-                'Specific conductance','2011-05-01','2011-09-30')
-@
-
-Guidance for finding characteristic names can be found at: \url{http://www.waterqualitydata.us/webservices_documentation.jsp}.
-
-\FloatBarrier
-
-%------------------------------------------------------------
-\section{Generalized Retrievals}
-\label{sec:general}
-%------------------------------------------------------------
-The previous examples all took specific input arguments: siteNumber, parameterCd (or characteristic name), startDate, endDate, etc. However, the Web services that supply the data can accept a wide variety of additional arguments. 
-
-%------------------------------------------------------------
-\subsubsection{NWIS sites}
-\label{sec:NWISGenSite}
-%------------------------------------------------------------
-The function \texttt{getNWISSites} can be used to discover NWIS sites based on any query that the NWIS Site Service offers. This is done by using the \texttt{"..."} argument, which allows the user to use any arbitrary input argument. We can then use the service here:
-
-\url{http://waterservices.usgs.gov/rest/Site-Test-Tool.html}
-
-to discover many options for searching for NWIS sites. For example, you may want to search for sites in a lat/lon bounding box, or only sites tidal streams, or sites with water quality samples, sites above a certain altitude, etc. The results of this site query generate a URL. For example, the tool provided a search within a specified bounding box, for sites that have daily discharge (parameter code = 00060) and temperature (parameter code = 00010). The generated URL is:
-
-\url{http://waterservices.usgs.gov/nwis/site/?format=rdb&bBox=-83.0,36.5,-81.0,38.5&parameterCd=00010,00060&hasDataTypeCd=dv}
-
-The following dataRetrieval code can be used to get those sites:
-
-<<siteSearch>>=
-sites <- getNWISSites(bBox="-83.0,36.5,-81.0,38.5", 
-                      parameterCd="00010,00060",
-                      hasDataTypeCd="dv")
-
-names(sites)
-nrow(sites)
-@
-
-
-%------------------------------------------------------------
-\subsubsection{NWIS data}
-\label{sec:NWISGenData}
-%------------------------------------------------------------
-For NWIS data, the function \texttt{getNWISData} can be used. The argument listed in the R help file is \texttt{"..."} and \texttt{"}service\texttt{"} (only for data requests). Table \ref{tab:NWISGeneral} describes the services are available.
-
-\begin{table}[!ht]
-\begin{minipage}{\linewidth}
-{\footnotesize
-\caption{NWIS general data calls} 
-\label{tab:NWISGeneral}
-\begin{tabular}{lll}
-  \hline
-\multicolumn{1}{c}{\textbf{\textsf{Service}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Description}}}  &
-\multicolumn{1}{c}{\textbf{\textsf{Reference URL}}} \\  [0pt]
-  \hline
-  daily values &  dv & \url{http://waterservices.usgs.gov/rest/DV-Test-Tool.html}\\
-  [5pt]instantaneous & iv & \url{http://waterservices.usgs.gov/rest/IV-Test-Tool.html}\\
-  [5pt]groundwater levels & gwlevels & \url{http://waterservices.usgs.gov/rest/GW-Levels-Test-Tool.html}\\
-  [5pt]water quality & qwdata & \url{http://nwis.waterdata.usgs.gov/nwis/qwdata}\\
-   \hline
-\end{tabular}
-}
-\end{minipage}
-\end{table}
-
-The \texttt{"..."} argument allows the user to create their own queries based on the instructions found in the web links above. The links provide instructions on how to create a URL to request data. Perhaps you want sites only in Wisconsin, with a drainage area less than 50 mi$^2$, and the most recent daily dischage data. That request would be done as follows:
-
-<<dataExample>>=
-dischargeWI <- getNWISData(stateCd="WI",
-                           parameterCd="00060",
-                           drainAreaMin="50",
-                           statCd="00003")
-names(dischargeWI)
-nrow(dischargeWI)
-@
-
-%------------------------------------------------------------
-\subsubsection{Water Quality Portal sites}
-\label{sec:WQPGenSite}
-%------------------------------------------------------------
-
-Just as with NWIS, the Water Quality Portal (WQP) offers a variety of ways to search for sites and request data. The possible Web service arguments for WQP site searches is found here:
-
-\url{http://www.waterqualitydata.us/webservices_documentation.jsp}
-
-To discover available sites in the WQP in New Jersey that have measured Chloride, use the function \texttt{getWQPSites}.
-
-<<NJChloride, eval=FALSE>>=
-
-sitesNJ <- getWQPSites(statecode="US:34",
-                       characteristicName="Chloride")
-
-@
-
-
-%------------------------------------------------------------
-\subsubsection{Water Quality Portal data}
-\label{sec:WQPGenData}
-%------------------------------------------------------------
-Finally, to get data from the WQP using generalized Web service calls, use the function \texttt{getWQPData}. For example, to get all the pH data in Wisconsin:
-
-<<phData, eval=FALSE>>=
-
-dataPH <- getWQPData(statecode="US:55", 
-                 characteristicName="pH")
-
-@
-
-
-
-\FloatBarrier
-
-\clearpage
-
-
-%------------------------------------------------------------ 
-\section{Getting Started in R}
-\label{sec:appendix1}
-%------------------------------------------------------------ 
-This section describes the options for downloading and installing the dataRetrieval package.
-
-%------------------------------------------------------------
-\subsection{New to R?}
-%------------------------------------------------------------ 
-If you are new to R, you will need to first install the latest version of R, which can be found here: \url{http://www.r-project.org/}.
-
-At any time, you can get information about any function in R by typing a question mark before the functions name.  This will open a file (in RStudio, in the Help window) that describes the function, the required arguments, and provides working examples.
-
-<<helpFunc,eval = FALSE>>=
-?getNWISPcodeInfo
-@
-
-This will open a help file similar to Figure \ref{fig:help}.
-
-\FloatBarrier
-
-To see the raw code for a particular code, type the name of the function, without parentheses.:
-<<rawFunc,eval = TRUE>>=
-getNWISPcodeInfo
-@
-
-
-\begin{figure}[ht!]
-\centering
- \resizebox{0.95\textwidth}{!}{\includegraphics{Rhelp.png}} 
-\caption{A simple R help file}
-\label{fig:help}
-\end{figure}
-
-Additionally, many R packages have vignette files attached (such as this paper). To view the vignette:
-<<seeVignette,eval = FALSE>>=
-vignette(dataRetrieval)
-@
-
-\FloatBarrier
-\clearpage
-%------------------------------------------------------------
-\subsection{R User: Installing dataRetrieval}
-%------------------------------------------------------------ 
-The following command installs dataRetrieval and subsequent required packages:
-
-<<installFromCran,eval = FALSE>>=
-install.packages("dataRetrieval", 
-repos=c("http://usgs-r.github.com","http://cran.us.r-project.org"),
-dependencies=TRUE,
-type="both")
-@
-
-After installing the package, you need to open the library each time you re-start R.  This is done with the simple command:
-<<openLibraryTest, eval=FALSE>>=
-library(dataRetrieval)
-@
-
-
-%------------------------------------------------------------ 
-\section{Creating tables in Microsoft\textregistered\ software from R}
-\label{app:createWordTable}
-%------------------------------------------------------------
-There are a few steps that are required in order to create a table in Microsoft\textregistered\ software (Excel, Word, PowerPoint, etc.) from an R dataframe. There are certainly a variety of good methods, one of which is detailed here. The example we will step through here will be to create a table in Microsoft Excel based on the dataframe tableData:
-
-<<label=getSiteApp, echo=TRUE>>=
-availableData <- getNWISDataAvailability(siteNumber)
-dailyData <- availableData["dv" == availableData$service,]
-dailyData <- dailyData["00003" == dailyData$statCd,]
-
-tableData <- with(dailyData, 
-      data.frame(
-        shortName=srsname, 
-        Start=startDate, 
-        End=endDate, 
-        Count=count,
-        Units=parameter_units)
-      )
-tableData
-@
-
-First, save the dataframe as a tab delimited file (you don't want to use comma delimited because there are commas in some of the data elements):
-
-
-<<label=saveData, echo=TRUE, eval=FALSE>>=
-write.table(tableData, file="tableData.tsv",sep="\t",
-            row.names = FALSE,quote=FALSE)
-@
-
-This will save a file in your working directory called tableData.tsv.  You can see your working directory by typing getwd() in the R console. Opening the file in a general-purpose text editor, you should see the following:
-
-\begin{verbatim}
-shortName  Start  End	Count	Units
-Temperature, water	2010-10-01	2012-06-24	575	deg C
-Stream flow, mean. daily	1948-01-01	2013-03-13	23814	ft3/s
-Specific conductance	2010-10-01	2012-06-24	551	uS/cm @25C
-Suspended sediment concentration (SSC)	1980-10-01	1991-09-30	3651	mg/l
-Suspended sediment discharge	1980-10-01	1991-09-30	3652	tons/day
-\end{verbatim}
-
-Next, follow the steps below to open this file in Excel:
-\begin{enumerate}
-\item Open Excel
-\item Click on the File tab
-\item Click on the Open option
-\item Navigate to the working directory (as shown in the results of \texttt{getwd()})
-\item Next to the File name text box, change the dropdown type to All Files (*.*)
-\item Double click tableData.tsv
-\item A text import wizard will open up, in the first window, choose the Delimited radio button if it is not automatically picked, then click on Next.
-\item In the second window, click on the Tab delimiter if it is not automatically checked, then click Finished.
-\item Use the many formatting tools within Excel to customize the table
-\end{enumerate}
-
-From Excel, it is simple to copy and paste the tables in other Microsoft\textregistered\ software. An example using one of the default Excel table formats is here.
-
-\begin{figure}[ht!]
-\centering
- \resizebox{0.9\textwidth}{!}{\includegraphics{table1.png}} 
-\caption{A simple table produced in Microsoft\textregistered\ Excel. Additional formatting will be requried, for example converting u to  $\mu$ }
-\label{overflow}
-\end{figure}
-
-\clearpage
-
-%-------------------------------------
-\section{Disclaimer}
-%------------------------------------
-This information is preliminary and is subject to revision. It is being provided to meet the need for timely best science. The information is provided on the condition that neither the U.S. Geological Survey nor the U.S. Government may be held liable for any damages resulting from the authorized or unauthorized use of the information.
-
-
-\end{document}
diff --git a/inst/doc/dataRetrieval.pdf b/inst/doc/dataRetrieval.pdf
deleted file mode 100644
index 1f8f6d3ac237a315b5c7baec47798d3e1f417d4c..0000000000000000000000000000000000000000
Binary files a/inst/doc/dataRetrieval.pdf and /dev/null differ
diff --git a/man/getRDB1Data.Rd b/man/importRDB1.Rd
similarity index 84%
rename from man/getRDB1Data.Rd
rename to man/importRDB1.Rd
index 674e5526b0dbc20a0ce20ecc5b101c0d2fd2d5e6..00df135a5f3317716c425a65478ca80709684aeb 100644
--- a/man/getRDB1Data.Rd
+++ b/man/importRDB1.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getRDB1Data}
-\alias{getRDB1Data}
+\name{importRDB1}
+\alias{importRDB1}
 \title{Function to return data from the NWIS RDB 1.0 format}
 \usage{
-getRDB1Data(obs_url, asDateTime = FALSE, qw = FALSE)
+importRDB1(obs_url, asDateTime = FALSE, qw = FALSE)
 }
 \arguments{
 \item{obs_url}{string containing the url for the retrieval}
@@ -27,12 +27,12 @@ offering <- "00003"
 property <- "00060"
 obs_url <- constructNWISURL(siteNumber,property,
          startDate,endDate,"dv",format="tsv")
-data <- getRDB1Data(obs_url)
+data <- importRDB1(obs_url)
 urlMulti <- constructNWISURL("04085427",c("00060","00010"),
          startDate,endDate,"dv",statCd=c("00003","00001"),"tsv")
-multiData <- getRDB1Data(urlMulti)
+multiData <- importRDB1(urlMulti)
 unitDataURL <- constructNWISURL(siteNumber,property,
          "2014-10-10","2014-10-10","uv",format="tsv")
-unitData <- getRDB1Data(unitDataURL, asDateTime=TRUE)
+unitData <- importRDB1(unitDataURL, asDateTime=TRUE)
 }
 
diff --git a/man/parseWQPData.Rd b/man/importWQP.Rd
similarity index 87%
rename from man/parseWQPData.Rd
rename to man/importWQP.Rd
index 52cca7ad21aab744dc394736ebad2931dedeb62d..878ca1c6ce223fece1a76997051c43e72a9916cf 100644
--- a/man/parseWQPData.Rd
+++ b/man/importWQP.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{parseWQPData}
-\alias{parseWQPData}
+\name{importWQP}
+\alias{importWQP}
 \title{Basic Water Quality Portal Data grabber}
 \usage{
-parseWQPData(url)
+importWQP(url)
 }
 \arguments{
 \item{url}{string URL to Water Quality Portal#'}
@@ -20,7 +20,7 @@ Imports data from the Water Quality Portal based on a specified url.
 
 ## Examples take longer than 5 seconds:
 rawSampleURL <- constructWQPURL('USGS-01594440','01075', '1985-01-01', '1985-03-31')
-rawSample <- parseWQPData(rawSampleURL)
+rawSample <- importWQP(rawSampleURL)
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/getWaterML1Data.Rd b/man/importWaterML1.Rd
similarity index 83%
rename from man/getWaterML1Data.Rd
rename to man/importWaterML1.Rd
index 58179dbb4bd41f1e23c4ecbb5c7d9fb697475efb..7528ebd33c15c4c2de2299355c82fef9a56aa3cd 100644
--- a/man/getWaterML1Data.Rd
+++ b/man/importWaterML1.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getWaterML1Data}
-\alias{getWaterML1Data}
+\name{importWaterML1}
+\alias{importWaterML1}
 \title{Function to return data from the NWISWeb WaterML1.1 service}
 \usage{
-getWaterML1Data(obs_url)
+importWaterML1(obs_url)
 }
 \arguments{
 \item{obs_url}{string containing the url for the retrieval}
@@ -23,18 +23,18 @@ offering <- '00003'
 property <- '00060'
 urlBase <- "http://waterservices.usgs.gov/nwis"
 obs_url <- constructNWISURL(siteNumber,property,startDate,endDate,'dv')
-data <- getWaterML1Data(obs_url)
+data <- importWaterML1(obs_url)
 urlMulti <- constructNWISURL("04085427",c("00060","00010"),
             startDate,endDate,'dv',statCd=c("00003","00001"))
-multiData <- getWaterML1Data(urlMulti)
+multiData <- importWaterML1(urlMulti)
 groundWaterSite <- "431049071324301"
 startGW <- "2013-10-01"
 endGW <- "2014-06-30"
 groundwaterExampleURL <- constructNWISURL(groundWaterSite, NA,
           startGW,endGW, service="gwlevels", format="xml")
-groundWater <- getWaterML1Data(groundwaterExampleURL)
+groundWater <- importWaterML1(groundwaterExampleURL)
 unitDataURL <- constructNWISURL(siteNumber,property,
          "2014-10-10","2014-10-10",'uv',format='xml')
-unitData <- getWaterML1Data(unitDataURL)
+unitData <- importWaterML1(unitDataURL)
 }
 
diff --git a/man/getWaterML2Data.Rd b/man/importWaterML2.Rd
similarity index 84%
rename from man/getWaterML2Data.Rd
rename to man/importWaterML2.Rd
index 7024add4e460a49bd71ed8384056b7f13432d4d9..262dd6fd3fe33edb5edfd1209689f6a1769e1e43 100644
--- a/man/getWaterML2Data.Rd
+++ b/man/importWaterML2.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getWaterML2Data}
-\alias{getWaterML2Data}
+\name{importWaterML2}
+\alias{importWaterML2}
 \title{Function to return data from the WaterML2 data}
 \usage{
-getWaterML2Data(obs_url)
+importWaterML2(obs_url)
 }
 \arguments{
 \item{obs_url}{string containing the url for the retrieval}
@@ -21,6 +21,6 @@ URL <- paste(baseURL, "sites=01646500",
      "endDT=2014-09-08",
      "statCd=00003",
      "parameterCd=00060",sep="&")
-\dontrun{dataReturned3 <- getWaterML2Data(URL)}
+\dontrun{dataReturned3 <- importWaterML2(URL)}
 }
 
diff --git a/man/getNWISData.Rd b/man/readNWISdata.Rd
similarity index 75%
rename from man/getNWISData.Rd
rename to man/readNWISdata.Rd
index 48b4f2d12d51441153f6076a1c9deea827694530..3b209e924e138729d285e12cb63e11bb6b7d4489 100644
--- a/man/getNWISData.Rd
+++ b/man/readNWISdata.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISData}
-\alias{getNWISData}
+\name{readNWISdata}
+\alias{readNWISdata}
 \title{General Data Import from NWIS}
 \usage{
-getNWISData(service = "dv", ...)
+readNWISdata(service = "dv", ...)
 }
 \arguments{
 \item{service}{string. Possible values are "iv" (for instantaneous), "dv" (for daily values), "gwlevels"
@@ -19,8 +19,8 @@ Returns data from the NWIS web service.
 Arguments to the function should be based on \url{http://waterservices.usgs.gov} service calls.
 }
 \examples{
-dataTemp <- getNWISData(stateCd="OH",parameterCd="00010")
-dataTempUnit <- getNWISData(sites="03086500", service="iv", parameterCd="00010")
+dataTemp <- readNWISdata(stateCd="OH",parameterCd="00010")
+dataTempUnit <- readNWISdata(sites="03086500", service="iv", parameterCd="00010")
 }
 \keyword{NWIS}
 \keyword{data}
diff --git a/man/getNWISdvData.Rd b/man/readNWISdv.Rd
similarity index 79%
rename from man/getNWISdvData.Rd
rename to man/readNWISdv.Rd
index 93a9287c73c2b75d5772c371624103c2aef54539..13ba233bd15c5b30517c630a637eb9db0b444ff9 100644
--- a/man/getNWISdvData.Rd
+++ b/man/readNWISdv.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISdvData}
-\alias{getNWISdvData}
+\name{readNWISdv}
+\alias{readNWISdv}
 \title{Raw Data Import for USGS NWIS Data}
 \usage{
-getNWISdvData(siteNumber, parameterCd, startDate, endDate, statCd = "00003",
+readNWISdv(siteNumber, parameterCd, startDate, endDate, statCd = "00003",
   format = "tsv")
 }
 \arguments{
@@ -30,19 +30,18 @@ A list of parameter codes can be found here: \url{http://help.waterdata.usgs.gov
 A list of statistic codes can be found here: \url{http://help.waterdata.usgs.gov/code/stat_code_query?fmt=html}
 }
 \examples{
-# These examples require an internet connection to run
 siteNumber <- '04085427'
 startDate <- '2012-01-01'
 endDate <- '2012-06-30'
 pCode <- '00060'
-rawDailyQ <- getNWISdvData(siteNumber,pCode, startDate, endDate)
-rawDailyTemperature <- getNWISdvData(siteNumber,'00010',
+rawDailyQ <- readNWISdv(siteNumber,pCode, startDate, endDate)
+rawDailyTemperature <- readNWISdv(siteNumber,'00010',
        startDate, endDate, statCd='00001')
-rawDailyTemperatureTSV <- getNWISdvData(siteNumber,'00010',
+rawDailyTemperatureTSV <- readNWISdv(siteNumber,'00010',
        startDate, endDate, statCd='00001',format='tsv')
-rawDailyQAndTempMeanMax <- getNWISdvData(siteNumber,c('00010','00060'),
+rawDailyQAndTempMeanMax <- readNWISdv(siteNumber,c('00010','00060'),
        startDate, endDate, statCd=c('00001','00003'))
-rawDailyMultiSites<- getNWISdvData(c("01491000","01645000"),c('00010','00060'),
+rawDailyMultiSites<- readNWISdv(c("01491000","01645000"),c('00010','00060'),
        startDate, endDate, statCd=c('00001','00003'))
 }
 \keyword{USGS}
diff --git a/man/getNWISPcodeInfo.Rd b/man/readNWISpCode.Rd
similarity index 83%
rename from man/getNWISPcodeInfo.Rd
rename to man/readNWISpCode.Rd
index c8fc769dec02cbd7cbeb47948467e43a50f11f45..09f1edff9b0fe845265571cc7e15b2c6587ce8f9 100644
--- a/man/getNWISPcodeInfo.Rd
+++ b/man/readNWISpCode.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISPcodeInfo}
-\alias{getNWISPcodeInfo}
+\name{readNWISpCode}
+\alias{readNWISpCode}
 \title{USGS Parameter Data Retrieval}
 \usage{
-getNWISPcodeInfo(parameterCd)
+readNWISpCode(parameterCd)
 }
 \arguments{
 \item{parameterCd}{vector of USGS parameter codes.  This is usually an 5 digit number.}
@@ -17,7 +17,7 @@ This function gets the data from here: \url{http://nwis.waterdata.usgs.gov/nwis/
 }
 \examples{
 # These examples require an internet connection to run
-paramINFO <- getNWISPcodeInfo(c('01075','00060','00931'))
+paramINFO <- readNWISpCode(c('01075','00060','00931'))
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/getNWISqwData.Rd b/man/readNWISqw.Rd
similarity index 77%
rename from man/getNWISqwData.Rd
rename to man/readNWISqw.Rd
index c37a4abb7e0544c5ccf99c3f14b0b2d92e489241..9e8ab3800acb4670030551c7361cb9bc1662691e 100644
--- a/man/getNWISqwData.Rd
+++ b/man/readNWISqw.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISqwData}
-\alias{getNWISqwData}
+\name{readNWISqw}
+\alias{readNWISqw}
 \title{Raw Data Import for USGS NWIS QW Data}
 \usage{
-getNWISqwData(siteNumber, pCodes, startDate, endDate, expanded = FALSE)
+readNWISqw(siteNumber, pCodes, startDate, endDate, expanded = FALSE)
 }
 \arguments{
 \item{siteNumber}{string or vector of of USGS site numbers.  This is usually an 8 digit number}
@@ -27,17 +27,16 @@ A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov
 A list of statistic codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
 }
 \examples{
-# These examples require an internet connection to run
 siteNumber <- c('04024430','04024000')
 startDate <- '2010-01-01'
 endDate <- ''
 pCodes <- c('34247','30234','32104','34220')
-rawNWISqwData <- getNWISqwData(siteNumber,pCodes,startDate,endDate)
-rawNWISqwDataExpand <- getNWISqwData(siteNumber,pCodes,startDate,endDate,expanded=TRUE)
+rawNWISqwData <- readNWISqw(siteNumber,pCodes,startDate,endDate)
+rawNWISqwDataExpand <- readNWISqw(siteNumber,pCodes,startDate,endDate,expanded=TRUE)
 }
 \seealso{
-\code{\link{getWQPData}}, \code{\link{getWQPSites}},
-\code{\link{getWQPqwData}}, \code{\link{constructNWISURL}}
+\code{\link{readWQPdata}}, \code{\link{whatWQPsites}},
+\code{\link{readWQPqw}}, \code{\link{constructNWISURL}}
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/getNWISSiteInfo.Rd b/man/readNWISsite.Rd
similarity index 75%
rename from man/getNWISSiteInfo.Rd
rename to man/readNWISsite.Rd
index 27d7521730f6949190b9b542169f86f2e71d7fce..9d83a9d80e0a3fcc0ee09da57e3f3e44f4f197f0 100644
--- a/man/getNWISSiteInfo.Rd
+++ b/man/readNWISsite.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISSiteInfo}
-\alias{getNWISSiteInfo}
+\name{readNWISsite}
+\alias{readNWISsite}
 \title{USGS Site File Data Retrieval}
 \usage{
-getNWISSiteInfo(siteNumber)
+readNWISsite(siteNumber)
 }
 \arguments{
 \item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
@@ -16,8 +16,8 @@ Imports data from USGS site file site. This function gets data from here: \url{h
 }
 \examples{
 # These examples require an internet connection to run
-siteINFO <- getNWISSiteInfo('05114000')
-siteINFOMulti <- getNWISSiteInfo(c('05114000','09423350'))
+siteINFO <- readNWISsite('05114000')
+siteINFOMulti <- readNWISsite(c('05114000','09423350'))
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/getNWISunitData.Rd b/man/readNWISunit.Rd
similarity index 86%
rename from man/getNWISunitData.Rd
rename to man/readNWISunit.Rd
index 773a5cef1e43c3c9e021aaa4dba69714678a0902..1d0eb2228379d44accae5e54d5838b9613918621 100644
--- a/man/getNWISunitData.Rd
+++ b/man/readNWISunit.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISunitData}
-\alias{getNWISunitData}
+\name{readNWISunit}
+\alias{readNWISunit}
 \title{Raw Data Import for Instantaneous USGS NWIS Data}
 \usage{
-getNWISunitData(siteNumber, parameterCd, startDate, endDate, format = "xml")
+readNWISunit(siteNumber, parameterCd, startDate, endDate, format = "xml")
 }
 \arguments{
 \item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
@@ -32,9 +32,9 @@ parameterCd <- '00060'
 startDate <- "2014-10-10"
 endDate <- "2014-10-10"
 # These examples require an internet connection to run
-rawData <- getNWISunitData(siteNumber,parameterCd,startDate,endDate)
+rawData <- readNWISunit(siteNumber,parameterCd,startDate,endDate)
 summary(rawData)
-rawData2 <- getNWISunitData(siteNumber,parameterCd,startDate,endDate,"tsv")
+rawData2 <- readNWISunit(siteNumber,parameterCd,startDate,endDate,"tsv")
 summary(rawData2)
 }
 \keyword{USGS}
diff --git a/man/getWQPData.Rd b/man/readWQPdata.Rd
similarity index 85%
rename from man/getWQPData.Rd
rename to man/readWQPdata.Rd
index 7fa0bdd33b8da644b53bd7292c033bf145c4c729..be8319514f4d18bd6b19b0e3072db6571cd6ec9a 100644
--- a/man/getWQPData.Rd
+++ b/man/readWQPdata.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getWQPData}
-\alias{getWQPData}
+\name{readWQPdata}
+\alias{readWQPdata}
 \title{General Data Import from Water Quality Portal}
 \usage{
-getWQPData(...)
+readWQPdata(...)
 }
 \arguments{
 \item{\dots}{see \url{www.waterqualitydata.us/webservices_documentation.jsp} for a complete list of options}
@@ -19,7 +19,7 @@ because it allows for other agencies rather than the USGS.
 \examples{
 \dontrun{
 nameToUse <- "pH"
-pHData <- getWQPData(siteid="USGS-04024315",characteristicName=nameToUse)
+pHData <- readWQPdata(siteid="USGS-04024315",characteristicName=nameToUse)
 }
 }
 \keyword{WQP}
diff --git a/man/getWQPqwData.Rd b/man/readWQPqw.Rd
similarity index 73%
rename from man/getWQPqwData.Rd
rename to man/readWQPqw.Rd
index af6da3c3a512b37bab2af44935d6382d1fb65569..cba6913232f3aeb1df3ebb3c242eb8c5c07212a3 100644
--- a/man/getWQPqwData.Rd
+++ b/man/readWQPqw.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getWQPqwData}
-\alias{getWQPqwData}
+\name{readWQPqw}
+\alias{readWQPqw}
 \title{Raw Data Import for Water Quality Portal}
 \usage{
-getWQPqwData(siteNumber, parameterCd, startDate, endDate)
+readWQPqw(siteNumber, parameterCd, startDate, endDate)
 }
 \arguments{
 \item{siteNumber}{string site number. This needs to include the full agency code prefix.}
@@ -28,15 +28,12 @@ either USGS, or other Water Quality Portal offered sites. It is required to use
 site name, such as 'USGS-01234567'.
 }
 \examples{
-# These examples require an internet connection to run
-\dontrun{
-rawPcode <- getWQPqwData('USGS-01594440','01075', '1985-01-01', '1985-03-31')
-rawCharacteristicName <- getWQPqwData('WIDNR_WQX-10032762','Specific conductance', '', '')
-}
+rawPcode <- readWQPqw('USGS-01594440','01075', '1985-01-01', '1985-03-31')
+rawCharacteristicName <- readWQPqw('WIDNR_WQX-10032762','Specific conductance', '', '')
 }
 \seealso{
-\code{\link{getWQPData}}, \code{\link{getWQPSites}},
-\code{\link{getNWISqwData}}, and \code{\link{parseWQPData}}
+\code{\link{readWQPdata}}, \code{\link{whatWQPsites}},
+\code{\link{readNWISqw}}, and \code{\link{importWQP}}
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/renameColumns.Rd b/man/renameColumns.Rd
index bb8142710aa3bc48ae68038414d67a2800e7367c..6a747b7baad7cbd2e0ce826386d65b4be0bb3fac 100644
--- a/man/renameColumns.Rd
+++ b/man/renameColumns.Rd
@@ -17,11 +17,11 @@ Rename columns coming back from NWIS data retrievals
 \examples{
 # This example requires an internet connection to run
 siteNumber <- '05114000'
-rawData <- getNWISdvData(siteNumber,c("00010","00060","00300"),
+rawData <- readNWISdv(siteNumber,c("00010","00060","00300"),
           "2001-01-01","2002-01-01",statCd=c("00001","00003"))
 rawData <- renameColumns(rawData)
 date <- "2014-10-10"
-rawData2 <- getNWISunitData(siteNumber,c("00010","00060"),date,date)
+rawData2 <- readNWISunit(siteNumber,c("00010","00060"),date,date)
 rawData2 <- renameColumns(rawData2)
 head(rawData2)
 }
diff --git a/man/getNWISDataAvailability.Rd b/man/whatNWISData.Rd
similarity index 58%
rename from man/getNWISDataAvailability.Rd
rename to man/whatNWISData.Rd
index 8fdb4950a8b29dc6483654c88d26388d7fe91923..374a2728e01615ec496a23127e34699b46c51a45 100644
--- a/man/getNWISDataAvailability.Rd
+++ b/man/whatNWISData.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISDataAvailability}
-\alias{getNWISDataAvailability}
+\name{whatNWISData}
+\alias{whatNWISData}
 \title{USGS data availability}
 \usage{
-getNWISDataAvailability(siteNumber, type = c("uv", "dv", "qw"))
+whatNWISData(siteNumber, type = c("uv", "dv", "qw"))
 }
 \arguments{
 \item{siteNumber}{string USGS site number.}
@@ -17,11 +17,10 @@ retval dataframe with all information found in the expanded site file
 Imports a table of available parameters, period of record, and count.
 }
 \examples{
-# These examples require an internet connection to run
-availableData <- getNWISDataAvailability('05114000')
+availableData <- whatNWISData('05114000')
 # To find just unit value ('instantaneous') data:
-uvData <- getNWISDataAvailability('05114000',type="uv")
-uvDataMulti <- getNWISDataAvailability(c('05114000','09423350'),type="uv")
+uvData <- whatNWISData('05114000',type="uv")
+uvDataMulti <- whatNWISData(c('05114000','09423350'),type="uv")
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/getNWISSites.Rd b/man/whatNWISsites.Rd
similarity index 85%
rename from man/getNWISSites.Rd
rename to man/whatNWISsites.Rd
index 3921badfae3d4d49bb7973b0982b07203f1cbb7e..6f94628446bf3bc006aebced597ae0fde9a3cf5b 100644
--- a/man/getNWISSites.Rd
+++ b/man/whatNWISsites.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISSites}
-\alias{getNWISSites}
+\name{whatNWISsites}
+\alias{whatNWISsites}
 \title{Site Data Import from NWIS}
 \usage{
-getNWISSites(...)
+whatNWISsites(...)
 }
 \arguments{
 \item{\dots}{see \url{http://waterservices.usgs.gov/rest/Site-Service.html#Service} for a complete list of options}
@@ -17,7 +17,7 @@ Arguments to the function should be based on \url{http://waterservices.usgs.gov/
 Mapper format is used
 }
 \examples{
-siteListPhos <- getNWISSites(stateCd="OH",parameterCd="00665")
+siteListPhos <- whatNWISsites(stateCd="OH",parameterCd="00665")
 }
 \keyword{NWIS}
 \keyword{data}
diff --git a/man/getWQPSites.Rd b/man/whatWQPsites.Rd
similarity index 81%
rename from man/getWQPSites.Rd
rename to man/whatWQPsites.Rd
index 6ac4d2163e830967c3d04f2d2b61cc5b518a3af7..bf1972d04252fa9a6c310449f764adb21c0e59b7 100644
--- a/man/getWQPSites.Rd
+++ b/man/whatWQPsites.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getWQPSites}
-\alias{getWQPSites}
+\name{whatWQPsites}
+\alias{whatWQPsites}
 \title{Site Data Import from Water Quality Portal}
 \usage{
-getWQPSites(...)
+whatWQPsites(...)
 }
 \arguments{
 \item{\dots}{see \url{www.waterqualitydata.us/webservices_documentation.jsp} for a complete list of options}
@@ -17,10 +17,10 @@ Returns a list of sites from the Water Quality Portal web service. This function
 Arguments to the function should be based on \url{www.waterqualitydata.us/webservices_documentation.jsp}
 }
 \examples{
+site1 <- whatWQPsites(siteid="USGS-01594440")
 \dontrun{
-site1 <- getWQPSites(siteid="USGS-01594440")
 type <- "Stream"
-sites <- getWQPSites(statecode="US:55",countycode="US:55:025",siteType=type)
+sites <- whatWQPsites(statecode="US:55",countycode="US:55:025",siteType=type)
 }
 }
 \keyword{WQP}