diff --git a/DESCRIPTION b/DESCRIPTION
index 9d98bd253ea523b204ec04fc51d493c208906d05..879edb2f6600721c6acf412ba7cfd3ac49f61726 100644
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -1,14 +1,16 @@
 Package: dataRetrieval
 Type: Package
 Title: Retrieval functions for USGS and EPA hydrologic and water quality data
-Version: 1.4.1.9000
+Version: 1.5.0.9000
 Date: 2014-10-13
 Authors@R: c( person("Robert", "Hirsch", role = c("aut"),
     email = "rhirsch@usgs.gov"),
     person("Laura", "DeCicco", role = c("aut","cre"),
-    email = "ldecicco@usgs.gov"))
-Description: Collection of functions to help retrieve USGS data from either web
-    services or user-provided data files.
+    email = "ldecicco@usgs.gov"),
+    person("David","Lorenz", role=c("aut"),
+    email = "lorenz@usgs.gov"))
+Description: Collection of functions to help retrieve USGS data from web
+    services.
 License: Unlimited | file LICENSE
 Copyright: This software is in the public domain because it contains materials
     that originally came from the United States Geological Survey, an agency of
@@ -18,18 +20,17 @@ Copyright: This software is in the public domain because it contains materials
 Depends:
     R (>= 3.0)
 Imports:
-    zoo,
     XML,
     RCurl,
     reshape2,
-    lubridate
+    lubridate,
+    plyr,
+    httr
 Suggests:
     xtable,
-    EGRET,
     knitr,
     testthat
 LazyLoad: yes
 LazyData: yes
 VignetteBuilder: knitr
 BuildVignettes: true
-Additional_repositories: http://usgs-r.github.com
diff --git a/NAMESPACE b/NAMESPACE
index 9ae99572f5ec7e5ea1f8872ca9af542e053a3432..6f759704bc0462ec8347309037235671beacac6f 100644
--- a/NAMESPACE
+++ b/NAMESPACE
@@ -1,50 +1,42 @@
 # Generated by roxygen2 (4.0.2): do not edit by hand
 
-export(checkStartEndDate)
-export(compressData)
 export(constructNWISURL)
-export(dataOverview)
-export(dateFormatCheck)
-export(formatCheckDate)
-export(formatCheckParameterCd)
-export(getDailyDataFromFile)
-export(getDataFromFile)
-export(getNWISDaily)
+export(constructWQPURL)
 export(getNWISData)
 export(getNWISDataAvailability)
-export(getNWISInfo)
 export(getNWISPcodeInfo)
-export(getNWISSample)
 export(getNWISSiteInfo)
 export(getNWISSites)
 export(getNWISdvData)
 export(getNWISqwData)
 export(getNWISunitData)
-export(getRDB1Data)
-export(getUserDaily)
-export(getUserInfo)
-export(getUserSample)
 export(getWQPData)
-export(getWQPInfo)
-export(getWQPSample)
 export(getWQPSites)
 export(getWQPqwData)
-export(getWaterML1Data)
-export(mergeReport)
-export(padVariable)
-export(populateConcentrations)
-export(populateDaily)
-export(populateDateColumns)
-export(populateParameterINFO)
-export(populateSampleColumns)
-export(populateSiteINFO)
-export(processQWData)
-export(readWQPData)
-export(removeDuplicates)
-export(renameColumns)
+export(importRDB1)
+export(importWQP)
+export(importWaterML1)
+export(importWaterML2)
+export(readNWISdata)
+export(readNWISdv)
+export(readNWISgwl)
+export(readNWISmeas)
+export(readNWISpCode)
+export(readNWISpeak)
+export(readNWISqw)
+export(readNWISrating)
+export(readNWISsite)
+export(readNWISuv)
+export(readWQPdata)
+export(readWQPqw)
+export(renameNWISColumns)
+export(whatNWISdata)
+export(whatNWISsites)
+export(whatWQPsites)
+export(zeroPad)
 import(RCurl)
 import(XML)
+import(httr)
+import(lubridate)
 import(reshape2)
-import(zoo)
-importFrom(lubridate,decimal_date)
-importFrom(lubridate,parse_date_time)
+importFrom(plyr,rbind.fill.matrix)
diff --git a/NEWS b/NEWS
index 2b4d6f4c5ffd72c75ad9a69025fedd7a11ae1a61..455366c1f883e80fb446eeab0a6735bcdffbe5e7 100644
--- a/NEWS
+++ b/NEWS
@@ -21,14 +21,14 @@ dataRetrieval 1.3.3
 * Updated getNWISSiteInfo to retrieve multiple site file datasets at once using a vector of siteNumbers as input argument.
 * Updated error-handling for Web service calls. More information is returned when errors happen
 * Added some basic processing to Water Quality Portal raw data retrievals. Date columns are returned as Date objects, value columns are numeric, and a column is created from the date/time/timezone columns that is POSIXct.
-* Added very generalized NWIS and WQP retrieval functions (getNWISData, getNWISSites, getGeneralWQPData, and getWQPSites) which allow the user to use any argument available on the Web service platform.
+* Added very generalized NWIS and WQP retrieval functions (getNWISData, getNWISSites, getGeneralWQPData, and whatWQPsites) which allow the user to use any argument available on the Web service platform.
 
 
 dataRetrieval 1.3.2
 ===========
 
-* Deprecated getQWData, updated getWQPData to take either parameter code or characteristic name.
-* Changed the name of raw data retrievals to: getNWISqwData, getNWISunitData, getNWISdvData, and getWQPqwData (from: getNWISqwData, retrieveUnitNWISData, retrieveNWISData, getRawQWData)
+* Deprecated getQWData, updated readWQPdata to take either parameter code or characteristic name.
+* Changed the name of raw data retrievals to: readNWISqw, getNWISunitData, getNWISdvData, and getWQPqwData (from: readNWISqw, retrieveUnitNWISData, retrieveNWISData, getRawQWData)
 * Added NA warning to getDVData function
 * Updated mergeReport to allow for Sample data with different measurements taken on the same day
 
@@ -53,4 +53,4 @@ dataRetrieval 1.2.1
 
 * Expanded the capabilities to retrieve raw data from the web services.
 
-* Added Storet data retrievals in getWQPData function
+* Added Storet data retrievals in readWQPdata function
diff --git a/R/basicWQPData.R b/R/basicWQPData.R
deleted file mode 100644
index 13ffe915cea99c936f60555f0a03de8b1c792eed..0000000000000000000000000000000000000000
--- a/R/basicWQPData.R
+++ /dev/null
@@ -1,130 +0,0 @@
-#' Basic Water Quality Portal Data grabber
-#'
-#' Imports data from the Water Quality Portal based on a specified url.
-#' 
-#' @param url string URL to Water Quality Portal#' @keywords data import USGS web service
-#' @return retval dataframe raw data returned from the Water Quality Portal. Additionally, a POSIXct dateTime column is supplied for 
-#' start and end times.
-#' @export
-#' @import RCurl
-#' @importFrom lubridate parse_date_time
-#' @examples
-#' # These examples require an internet connection to run
-#' \dontrun{
-#' ## Examples take longer than 5 seconds:
-#' rawSampleURL <- constructNWISURL('USGS-01594440','01075', '1985-01-01', '1985-03-31',"wqp")
-#' rawSample <- readWQPData(rawSampleURL)
-#' }
-readWQPData <- function(url){
-  
-  h <- basicHeaderGatherer()
-  
-  retval = tryCatch({  
-    doc <- getURL(url, headerfunction = h$update)
-    
-  }, warning = function(w) {
-    message(paste("URL caused a warning:", url))
-    message(w)
-  }, error = function(e) {
-    message(paste("URL does not seem to exist:", url))
-    message(e)
-    return(NA)
-  })
-  
-  if(h$value()["Content-Type"] == "text/tab-separated-values;charset=UTF-8"){
-    
-    numToBeReturned <- as.numeric(h$value()["Total-Result-Count"])
-    
-    if (!is.na(numToBeReturned) | numToBeReturned != 0){
-      
-      
-      namesData <- read.delim(textConnection(doc), header = TRUE, quote="\"", 
-                              dec=".", sep='\t', 
-                              colClasses='character', 
-                              fill = TRUE,nrow=1)
-      classColumns <- setNames(rep('character',ncol(namesData)),names(namesData))
-      
-      classColumns[grep("MeasureValue",names(classColumns))] <- NA
-      
-      retval <- read.delim(textConnection(doc), header = TRUE, quote="\"", 
-                           dec=".", sep='\t', 
-                           colClasses=as.character(classColumns), 
-                           fill = TRUE)    
-      actualNumReturned <- nrow(retval)
-      
-      retval[,names(which(sapply(retval[,grep("MeasureValue",names(retval))], function(x)all(is.na(x)))))] <- ""
-      
-      if(actualNumReturned != numToBeReturned) warning(numToBeReturned, " sample results were expected, ", actualNumReturned, " were returned")
-      
-      timeZoneLibrary <- setNames(c("America/New_York","America/New_York","America/Chicago","America/Chicago",
-                                    "America/Denver","America/Denver","America/Los_Angeles","America/Los_Angeles",
-                                    "America/Anchorage","America/Anchorage","America/Honolulu","America/Honolulu"),
-                                  c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST"))
-      timeZoneStart <- as.character(timeZoneLibrary[retval$ActivityStartTime.TimeZoneCode])
-      timeZoneEnd <- as.character(timeZoneLibrary[retval$ActivityEndTime.TimeZoneCode])
-      timeZoneStart[is.na(timeZoneStart)] <- ""
-      timeZoneEnd[is.na(timeZoneEnd)] <- ""
-      
-      if("ActivityStartDate" %in% names(retval)){
-        if(any(retval$ActivityStartDate != "")){
-          suppressWarnings(retval$ActivityStartDate <- as.Date(parse_date_time(retval$ActivityStartDate, c("Ymd", "mdY"))))
-        }
-      }
-
-      if("ActivityEndDate" %in% names(retval)){
-        if(any(retval$ActivityEndDate != "")){
-          suppressWarnings(retval$ActivityEndDate <- as.Date(parse_date_time(retval$ActivityEndDate, c("Ymd", "mdY"))))
-        }        
-      }
-
-      if(any(!is.na(timeZoneStart))){
-        if(length(unique(timeZoneStart)) == 1){
-          retval$ActivityStartDateTime <- with(retval, as.POSIXct(paste(ActivityStartDate, ActivityStartTime.Time),format="%Y-%m-%d %H:%M:%S", tz=unique(timeZoneStart)))
-        } else {
-          
-          mostCommonTZ <- names(sort(summary(as.factor(timeZoneStart)),decreasing = TRUE)[1])
-
-          retval$ActivityStartDateTime <- with(retval, as.POSIXct(paste(ActivityStartDate, ActivityStartTime.Time),
-                                format="%Y-%m-%d %H:%M:%S", 
-                                tz=mostCommonTZ))
-          additionalTZs <- names(sort(summary(as.factor(timeZoneStart)),decreasing = TRUE)[-1])
-          for(i in additionalTZs){
-            retval$ActivityStartDateTime[timeZoneStart == i] <-  with(retval[timeZoneStart == i,], 
-                               as.POSIXct(paste(ActivityStartDate, ActivityStartTime.Time),
-                               format="%Y-%m-%d %H:%M:%S", 
-                               tz=i))      
-          }
-        }
-      }
-      
-      if(any(!is.na(timeZoneEnd))){
-        if(length(unique(timeZoneEnd)) == 1){
-          retval$ActivityEndDateTime <- with(retval, as.POSIXct(paste(ActivityEndDate, ActivityEndTime.Time), format="%Y-%m-%d %H:%M:%S",tz=unique(timeZoneEnd)))
-        } else {
-          mostCommonTZ <- names(sort(summary(as.factor(timeZoneEnd)),decreasing = TRUE)[1])
-          
-          retval$ActivityEndDateTime <- with(retval, as.POSIXct(paste(ActivityEndDate, ActivityEndTime.Time),
-                                      format="%Y-%m-%d %H:%M:%S", 
-                                      tz=mostCommonTZ))
-          additionalTZs <- names(sort(summary(as.factor(timeZoneEnd)),decreasing = TRUE)[-1])
-          for(i in additionalTZs){
-            retval$ActivityEndDateTime[timeZoneEnd == i] <-  with(retval[timeZoneEnd == i,], 
-                          as.POSIXct(paste(ActivityEndDate, ActivityEndTime.Time),
-                                     format="%Y-%m-%d %H:%M:%S", 
-                                     tz=i))      
-          }
-        }
-      }
-          
-      return(retval)
-      
-    } else {
-      warning("No data to retrieve")
-      return(NA)
-    }
-  } else {
-    message(paste("URL caused an error:", url))
-    message("Content-Type=",h$value()["Content-Type"])
-    return(NA)
-  }
-}
\ No newline at end of file
diff --git a/R/checkStartEndDate.r b/R/checkStartEndDate.r
deleted file mode 100644
index 4754012283b3526d16f9c91248aaa9e2726dce69..0000000000000000000000000000000000000000
--- a/R/checkStartEndDate.r
+++ /dev/null
@@ -1,37 +0,0 @@
-#' checkStartEndDate
-#'
-#' Checks that the start date is before the end date.  If not, it will give the user the opportunity to correct, otherwise will create a warning.
-#'
-#' @param startDate string
-#' @param endDate string
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords WRTDS flow
-#' @return vector where first value is startDate, second is endDate
-#' @export
-#' @examples
-#' startDate <- '1985-01-01'
-#' endDate <- '1990-01-01'
-#' checkStartEndDate(startDate, endDate)
-checkStartEndDate <- function(startDate, endDate,interactive=TRUE){
-  start <- as.Date("1850-01-01")
-  end <- as.Date(Sys.Date())
-  
-  if (nzchar(startDate)) start <- as.Date(startDate)
-  if (nzchar(endDate)) end <- as.Date(endDate)
-  if (start > end) {
-    if (interactive){
-      cat ("Start date must be before end date, you entered Start = ", startDate, " End = ", endDate, "\n")
-      cat ("please re-enter startDate (YYYY-MM-DD) - hit Enter for earliest date as startDate: \n")
-      startDate <- readline()
-      cat("Please re-enter endDate (YYYY-MM-DD) - hit Enter for latest date as endDate: \n")
-      endDate <- readline()
-    } else {
-      warningMessage <- "Starting date was not before ending date, dates will be ignored"
-      warning(warningMessage)
-      startDate <- as.Date("1851-01-01")
-      endDate <- as.Date(Sys.Date())
-    }
-    
-  }  
-  return(c(startDate,endDate))
-}
diff --git a/R/compressData.r b/R/compressData.r
deleted file mode 100644
index 8f944da52846869945b42f8565cead6fa3089854..0000000000000000000000000000000000000000
--- a/R/compressData.r
+++ /dev/null
@@ -1,93 +0,0 @@
-#' Compress sample data frame
-#'
-#' Using raw data that has at least dateTime, value, code, populates the measured data portion of the Sample data frame used in WRTDS
-#' ConcLow  = Lower bound for an observed concentration
-#' ConcHigh = Upper bound for an observed concentration
-#' ConcAve  = Average of ConcLow and ConcHigh.  If ConcLow is NA, then ConcAve = ConcHigh/2
-#' Uncen    = 1 if uncensored, 0 if censored
-#'
-#' @param data dataframe contains at least dateTime, value, code columns
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords WRTDS flow
-#' @return dataframe returnDataFrame data frame containing dateTime, ConcHigh, ConcLow, Uncen, ConcAve
-#' @export
-#' @examples
-#' dateTime <- c('1985-01-01', '1985-01-02', '1985-01-03')
-#' comment1 <- c("","","")
-#' value1 <- c(1,2,3)
-#' comment2 <- c("","<","")
-#' value2 <- c(2,3,4)
-#' comment3 <- c("","","<")
-#' value3 <- c(3,4,5)
-#' dataInput <- data.frame(dateTime, comment1, value1, 
-#'       comment2, value2, 
-#'       comment3, value3, stringsAsFactors=FALSE)
-#' compressData(dataInput)
-compressData <- function(data, interactive=TRUE){  
-  
-  data <- as.data.frame(data, stringsAsFactors=FALSE)
-  numColumns <- ncol(data)
-  numDataColumns <- (numColumns-1)/2
-  lowConcentration <- rep(0,nrow(data))
-  highConcentration <- rep(0,nrow(data))
-  uncensored <- rep(0,nrow(data))
-  
-  i <- 1
-  while (i <= numDataColumns) {
-    code <- data[2*i]
-    value <- data[2*i+1]
-    value <- as.numeric(unlist(value))
-    value[is.na(value)] <- 0
-    returnDataFrame <- as.data.frame(matrix(ncol=2,nrow=nrow(code)))
-    colnames(returnDataFrame) <- c('code','value')
-    returnDataFrame$code <- code[[1]]
-    returnDataFrame$code <- ifelse(is.na(returnDataFrame$code),"",returnDataFrame$code)
-    returnDataFrame$value <- value
-    concentrationColumns <- populateConcentrations(returnDataFrame)
-    lowConcentration <- lowConcentration + concentrationColumns$ConcLow
-    highConcentration <- highConcentration + concentrationColumns$ConcHigh
-    i <- i + 1
-  }
-  
-  names(data) <- c('dateTime', 'code', 'value')
-  returnDataFrame <- as.data.frame(matrix(ncol=3,nrow=nrow(data)))
-  names(returnDataFrame) <- c('dateTime', 'ConcLow', 'ConcHigh')
-  
-  data$dateTime <- as.character(data$dateTime)
-  if(dateFormatCheck(data$dateTime)){
-    returnDataFrame$dateTime <- as.Date(data$dateTime)  
-  } else {
-    data$dateTime <- as.Date(data$dateTime,format="%m/%d/%Y")
-    returnDataFrame$dateTime <- as.Date(data$dateTime,format="%m/%d/%Y")
-  }
-  returnDataFrame$ConcLow <- as.numeric(lowConcentration)
-  returnDataFrame$ConcHigh <- as.numeric(highConcentration)
-  Uncen1<-ifelse(returnDataFrame$ConcLow==returnDataFrame$ConcHigh,1,0)
-  returnDataFrame$Uncen<-ifelse(is.na(returnDataFrame$ConcLow),0,Uncen1)
-  
-  flaggedData1 <- returnDataFrame[(returnDataFrame$ConcLow == 0 & returnDataFrame$ConcHigh == 0),]
-  returnDataFrame <- returnDataFrame[!(returnDataFrame$ConcLow == 0 & returnDataFrame$ConcHigh == 0),]
-  
-  if (nrow(flaggedData1) > 0){
-    WarningMessage <- paste("Deleted ", nrow(flaggedData1), " rows of data because concentration was reported as 0.0, the program is unable to interpret that result and is therefore deleting it.", sep="")    
-    warning(WarningMessage)
-    if (interactive){
-      cat("Deleted Rows:\n")
-      print(flaggedData1)
-    }
-  }
-  
-  flaggedData2 <- returnDataFrame[(returnDataFrame$ConcLow > returnDataFrame$ConcHigh),]
-  returnDataFrame <- returnDataFrame[(returnDataFrame$ConcLow <= returnDataFrame$ConcHigh),]
-  
-  if (nrow(flaggedData2) > 0){
-    WarningMessage <- paste("Deleted ", nrow(flaggedData2), " rows of data because the high concentration was reported lower than the low concentration, the program is unable to interpret that result and is therefore deleting it.", sep="")    
-    warning(WarningMessage)
-    if (interactive){
-      cat("Deleted Rows:\n")
-      print(flaggedData2)
-    }
-  }
-  
-  return(returnDataFrame)
-}
diff --git a/R/constructNWISURL.r b/R/constructNWISURL.r
index 7b838f7b9a31070588f1ba46cab1c14eae3f6516..3658b869485411b6d892f33a34a0d14db9fc955b 100644
--- a/R/constructNWISURL.r
+++ b/R/constructNWISURL.r
@@ -9,12 +9,13 @@
 #' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
 #' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
 #' @param statCd string or vector USGS statistic code only used for daily value service. This is usually 5 digits.  Daily mean (00003) is the default.
-#' @param service string USGS service to call. Possible values are "dv" (daily values), "uv" (unit/instantaneous values), "qw" (water quality data), "gwlevels" (groundwater),and "wqp" (water quality portal, which can include STORET).
+#' @param service string USGS service to call. Possible values are "dv" (daily values), "uv" (unit/instantaneous values), 
+#'  "qw" (water quality data), "gwlevels" (groundwater),and "rating" (rating curve), "peak", "meas" (discrete streamflow measurements).
 #' @param format string, can be "tsv" or "xml", and is only applicable for daily and unit value requests.  "tsv" returns results faster, but there is a possiblitiy that an incomplete file is returned without warning. XML is slower, 
 #' but will offer a warning if the file was incomplete (for example, if there was a momentary problem with the internet connection). It is possible to safely use the "tsv" option, 
 #' but the user must carefully check the results to see if the data returns matches what is expected. The default is therefore "xml". 
 #' @param expanded logical defaults to FALSE. If TRUE, retrieves additional information, only applicable for qw data.
-#' @param interactive logical Option for interactive mode.  If TRUE, there is user interaction for error handling and data checks.
+#' @param ratingType can be "base", "corr", or "exsa". Only applies to rating curve data.
 #' @keywords data import USGS web service
 #' @return url string
 #' @export
@@ -26,26 +27,41 @@
 #' pCode <- c("00060","00010")
 #' url_daily <- constructNWISURL(siteNumber,pCode,
 #'            startDate,endDate,'dv',statCd=c("00003","00001"))
-#' url_unit <- constructNWISURL(siteNumber,pCode,"2012-06-28","2012-06-30",'iv')
 #' \dontrun{
+#' # Not running for time considerations
+#' url_unit <- constructNWISURL(siteNumber,pCode,"2012-06-28","2012-06-30",'iv')
+#' 
 #' url_qw_single <- constructNWISURL(siteNumber,"01075",startDate,endDate,'qw')
 #' url_qw <- constructNWISURL(siteNumber,c('01075','00029','00453'),
 #'            startDate,endDate,'qw')
-#' url_wqp <- constructNWISURL(paste("USGS",siteNumber,sep="-"),c('01075','00029','00453'),
-#'            startDate,endDate,'wqp')
 #' url_daily_tsv <- constructNWISURL(siteNumber,pCode,startDate,endDate,'dv',
 #'            statCd=c("00003","00001"),format="tsv")
+#' url_rating <- constructNWISURL(siteNumber,service="rating",ratingType="base")
+#' url_peak <- constructNWISURL(siteNumber, service="peak")
+#' url_meas <- constructNWISURL(siteNumber, service="meas")
 #'            }
-constructNWISURL <- function(siteNumber,parameterCd,startDate,endDate,service,statCd="00003", format="xml",expanded=FALSE,interactive=TRUE){
+constructNWISURL <- function(siteNumber,parameterCd="00060",startDate="",endDate="",
+                             service,statCd="00003", format="xml",expanded=FALSE,
+                             ratingType="base"){
 
-  startDate <- formatCheckDate(startDate, "StartDate", interactive=interactive)
-  endDate <- formatCheckDate(endDate, "EndDate", interactive=interactive)
+  service <- match.arg(service, c("dv","uv","iv","qw","gwlevels","rating","peak","meas"))
+  
+  if(any(!is.na(parameterCd))){
+    pcodeCheck <- all(nchar(parameterCd) == 5) & all(!is.na(suppressWarnings(as.numeric(parameterCd))))
+    
+    if(!pcodeCheck){
+      badIndex <- which(parameterCd %in% parameterCdFile$parameter_cd)
+      if(length(badIndex) > 0){
+        badPcode <- parameterCd[-badIndex]
+      } else {
+        badPcode <- parameterCd
+      }
+      message("The following pCodes may be unavailable:",paste(badPcode,collapse=","))
+    }
+  }
   
-  dateReturn <- checkStartEndDate(startDate, endDate, interactive=interactive)
-  startDate <- dateReturn[1]
-  endDate <- dateReturn[2]
   multipleSites <- length(siteNumber) > 1
-  multiplePcodes <- length(parameterCd)>1
+  
   siteNumber <- paste(siteNumber, collapse=",")
   
   switch(service,
@@ -60,6 +76,8 @@ constructNWISURL <- function(siteNumber,parameterCd,startDate,endDate,service,st
                searchCriteria <- "search_site_no"
              }
              
+             multiplePcodes <- length(parameterCd)>1
+             
              if(multiplePcodes){
                pCodes <- paste(parameterCd, collapse=",")
                pCodes <- paste('multiple_parameter_cds', pCodes, sep="=")
@@ -94,49 +112,39 @@ constructNWISURL <- function(siteNumber,parameterCd,startDate,endDate,service,st
                url <- paste(url,"&end_date=",endDate,sep="")
              }
            },
-         wqp = {
-      
-           #Check for pcode:
-           if(all(nchar(parameterCd) == 5)){
-             suppressWarnings(pCodeLogic <- all(!is.na(as.numeric(parameterCd))))
-           } else {
-             pCodeLogic <- FALSE
-             parameterCd <- gsub(",","%2C",parameterCd)
-             parameterCd <- URLencode(parameterCd)
-           }
-           
-           if(multiplePcodes){
-             parameterCd <- paste(parameterCd, collapse=";")
-           }
-           
-           if (nzchar(startDate)){
-             startDate <- format(as.Date(startDate), format="%m-%d-%Y")
-           }
-           if (nzchar(endDate)){
-             endDate <- format(as.Date(endDate), format="%m-%d-%Y")
-           }
-           
-           baseURL <- "http://www.waterqualitydata.us/Result/search?siteid="
-           url <- paste0(baseURL,
-                        siteNumber,
-                        ifelse(pCodeLogic,"&pCode=","&characteristicName="),
-                        parameterCd,
-                        "&startDateLo=",
-                        startDate,
-                        "&startDateHi=",
-                        endDate,
-                        "&countrycode=US&mimeType=tsv")
-           },
+        rating = {
+          ratingType <- match.arg(ratingType, c("base", "corr", "exsa"))
+          url <- paste0("http://waterdata.usgs.gov/nwisweb/get_ratings?site_no=",
+                siteNumber, "&file_type=", ratingType)
+        },
+        peak = {
+          url <- paste0("http://nwis.waterdata.usgs.gov/usa/nwis/peak/?site_no=", siteNumber,
+                "&range_selection=date_range&format=rdb")
+          if (nzchar(startDate)) {
+            url <- paste0(url,"&begin_date=",startDate)
+          }
+          if(nzchar(endDate)){
+            url <- paste0(url, "&end_date=", endDate)
+          }
+        },
+        meas = {
+          url <- paste0("http://waterdata.usgs.gov/nwis/measurements?site_no=", siteNumber,
+                "&range_selection=date_range&format=rdb")
+          if (nzchar(startDate)) {
+            url <- paste0(url,"&begin_date=",startDate)
+          }
+          if(nzchar(endDate)){
+            url <- paste0(url, "&end_date=", endDate)
+          }
+
+        },
+        
         { # this will be either dv or uv
-           
+          multiplePcodes <- length(parameterCd)>1
           # Check for 5 digit parameter code:
           if(multiplePcodes){
             parameterCd <- paste(parameterCd, collapse=",")
-          } else {
-            if("gwlevels" != service){
-              parameterCd <- formatCheckParameterCd(parameterCd, interactive=interactive)
-            }
-          }
+          } 
           
           if ("uv"==service) {
             service <- "iv"
@@ -145,19 +153,28 @@ constructNWISURL <- function(siteNumber,parameterCd,startDate,endDate,service,st
             baseURL <- paste0("http://waterservices.usgs.gov/nwis/",service)  
           }
           
-          if ("xml"==format){ 
-            if("gwlevels" == service){
-              format <- "waterml"
-            } else {
-              format <- "waterml,1.1"
-            }            
-          } else if ("tsv" == format){
-            format <- "rdb,1.0"
-          } else {
-            warning("non-supported format requested, please choose xml or tsv")
-          }
+          format <- match.arg(format, c("xml","tsv","wml1","wml2","rdb"))
+          
+          formatURL <- switch(format,
+            xml = {if ("gwlevels" == service) {
+                "waterml"
+              } else {
+                "waterml,1.1"
+              }
+            },
+            rdb = "rdb,1.0",
+            tsv = "rdb,1.0",
+            wml2 = "waterml,2.0",
+            wml1 = {if ("gwlevels" == service) {
+                "waterml"
+              } else {
+                "waterml,1.1"
+              }
+            }
+          )
+
           
-          url <- paste0(baseURL,"/?site=",siteNumber, "&format=", format)
+          url <- paste0(baseURL,"/?site=",siteNumber, "&format=", formatURL)
           
           if("gwlevels"!= service){
             url <- paste0(url, "&ParameterCd=",parameterCd)
@@ -184,10 +201,77 @@ constructNWISURL <- function(siteNumber,parameterCd,startDate,endDate,service,st
         }
          
     )
+  
   if(url.exists(url)){
     return(url)
   } else {
-    stop("The following url doesn't seem to exist:\n",url)
+    stop("The following url doesn't seem to exist:\n",url)    
+  }  
+}
+
+
+
+
+
+#' Construct WQP url for data retrieval
+#'
+#' Imports data from WQP web service. This function gets the data from here: \url{http://nwis.waterdata.usgs.gov/nwis/qwdata}
+#' A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/}
+#' A list of statistic codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
+#'
+#' @param siteNumber string or vector of strings USGS site number.  This is usually an 8 digit number
+#' @param parameterCd string or vector of USGS parameter code.  This is usually an 5 digit number.
+#' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
+#' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
+#' @keywords data import WQP web service
+#' @return url string
+#' @export
+#' @import RCurl
+#' @examples
+#' siteNumber <- '01594440'
+#' startDate <- '1985-01-01'
+#' endDate <- ''
+#' pCode <- c("00060","00010")
+#' url_wqp <- constructWQPURL(paste("USGS",siteNumber,sep="-"),
+#'            c('01075','00029','00453'),
+#'            startDate,endDate)
+constructWQPURL <- function(siteNumber,parameterCd,startDate,endDate){
+  
+  multipleSites <- length(siteNumber) > 1
+  multiplePcodes <- length(parameterCd)>1
+  siteNumber <- paste(siteNumber, collapse=",")
+
+  if(all(nchar(parameterCd) == 5)){
+    suppressWarnings(pCodeLogic <- all(!is.na(as.numeric(parameterCd))))
+  } else {
+    pCodeLogic <- FALSE
+    parameterCd <- gsub(",","%2C",parameterCd)
+    parameterCd <- URLencode(parameterCd)
   }
   
+  if(multiplePcodes){
+    parameterCd <- paste(parameterCd, collapse=";")
+  }
+
+
+  
+  baseURL <- "http://www.waterqualitydata.us/Result/search?siteid="
+  url <- paste0(baseURL,
+                siteNumber,
+                ifelse(pCodeLogic,"&pCode=","&characteristicName="),
+                parameterCd)
+  
+  if (nzchar(startDate)){
+    startDate <- format(as.Date(startDate), format="%m-%d-%Y")
+    url <- paste0(url, "&startDateLo=",startDate)
+  }
+  
+  if (nzchar(endDate)){
+    endDate <- format(as.Date(endDate), format="%m-%d-%Y")
+    url <- paste0(url, "&startDateHi=",endDate)
+  }
+  
+  url <- paste0(url,"&countrycode=US&mimeType=tsv")
+  return(url)
+
 }
diff --git a/R/dataOverview.r b/R/dataOverview.r
deleted file mode 100644
index 85c811376875aa5a37e7cbbfcacfb79d010c3b18..0000000000000000000000000000000000000000
--- a/R/dataOverview.r
+++ /dev/null
@@ -1,37 +0,0 @@
-#' Data Overview for WRTDS
-#'
-#' Gives a summary of data to be used for WRTDS analysis
-#'
-#' @param Daily dataframe
-#' @param Sample dataframe
-#' @keywords data import USGS WRTDS
-#' @export
-#' @seealso \code{\link{mergeReport}}
-#' @examples
-#' # These examples require an internet connection to run
-#' exDaily <- getNWISDaily('01594440','00060', '1985-01-01', '1985-03-31', interactive=FALSE)
-#' exSample <- getNWISSample('01594440','01075', '1985-01-01', '1985-03-31', interactive=FALSE)
-#' dataOverview(Daily = exDaily, Sample = exSample)
-dataOverview <- function(Daily, Sample ){
-
-  numDays<-length(Daily$Date)
-  numSamples<-length(Sample$Date)
-  numYears<-round(numDays/365.25,digits=0)
-  cat("\n Discharge Record is",numDays,"days long, which is",numYears,"years")
-  cat("\n First day of the discharge record is", as.character(Daily$Date[1]),"and last day is",as.character(Daily$Date[numDays]))
-  cat("\n The water quality record has",numSamples,"samples")
-  cat("\n The first sample is from", as.character(Sample$Date[1]),"and the last sample is from",as.character(Sample$Date[numSamples]))
-  if(Sample$Date[1]<Daily$Date[1]) cat("\n WE HAVE A PROBLEM first sample is from before the first daily discharge")	
-  if(Sample$Date[numSamples]>Daily$Date[numDays]) cat("\n WE HAVE A PROBLEM last sample is from after the last daily discharge")
-  Qmin<-signif(min(Daily$Q),digits=3)
-  Qmean<-signif(mean(Daily$Q),digits=3)
-  Qmax<-signif(max(Daily$Q),digits=3)
-  Cmin<-signif(min(Sample$ConcHigh),digits=2)
-  Cmean<-signif(mean(Sample$ConcHigh),digits=2)
-  Cmax<-signif(max(Sample$ConcHigh),digits=2)
-  cat("\n Discharge: Minimum, mean and maximum",Qmin,Qmean,Qmax)
-  cat("\n Concentration: Minimum, mean and maximum",Cmin,Cmean,Cmax)
-  pct<-sum(Sample$Uncen)
-  pct<-((numSamples-pct)/numSamples)*100
-  cat("\n Percentage of the sample values that are censored is",signif(pct,digits=2),"%")
-}
diff --git a/R/dateFormatCheck.r b/R/dateFormatCheck.r
deleted file mode 100644
index c3f9dbdf3858a14b51d145347446955280f5b3f1..0000000000000000000000000000000000000000
--- a/R/dateFormatCheck.r
+++ /dev/null
@@ -1,30 +0,0 @@
-#' Check date format
-#'
-#' Checks to see if format is YYYY-MM-DD. Also performs a few other date checks.
-#'
-#' @param date string
-#' @keywords WRTDS flow
-#' @return condition logical if TRUE, 
-#' @export
-#' @examples
-#' date <- '1985-01-01'
-#' dateFormatCheck(date)
-#' dateWrong <- '1999/1/7'
-#' dateFormatCheck(dateWrong)
-dateFormatCheck <- function(date){  # checks for the format YYYY-MM-DD
-  parts <- strsplit(date,"-",fixed=TRUE)
-  condition <- FALSE
-  if (length(parts[[1]])>1) {
-    if (nchar(parts[[1]][1]) == 4 && nchar(parts[[1]][2]) == 2 && nchar(parts[[1]][3]) == 2){
-      testYear <- as.numeric(parts[[1]][1])
-      testMonth <- as.numeric(parts[[1]][2])
-      testDay <- as.numeric(parts[[1]][3])
-      if (!is.na(testYear) && !is.na(testMonth) && !is.na(testDay)){
-        if (testMonth <= 12 && testDay <= 31){
-          condition <- TRUE
-        }        
-      }      
-    }
-  }
-  return(condition)
-}
diff --git a/R/deprecatedFunctions.R b/R/deprecatedFunctions.R
new file mode 100644
index 0000000000000000000000000000000000000000..d19c35b580fa280298023c295e94642e35142cba
--- /dev/null
+++ b/R/deprecatedFunctions.R
@@ -0,0 +1,81 @@
+#' @rdname whatNWISsites
+#' @export
+getNWISSites<- function(...){
+  message("This function is being deprecated. Please use whatNWISsites in the future.")
+  whatNWISsites(...)
+}
+
+#' @rdname whatNWISdata
+#' @export
+getNWISDataAvailability <- function(siteNumbers,service=c("uv","dv","qw")){
+  message("This function is being deprecated. Please use whatNWISdata in the future.")
+  whatNWISdata(siteNumber=siteNumber, service=service)
+}
+
+
+#' @rdname whatWQPsites
+#' @export
+getWQPSites<- function(...){
+  message("This function is being deprecated. Please use whatWQPsites in the future.")
+  whatWQPsites(...)
+}
+
+#' @rdname readNWISsite
+#' @export
+getNWISSiteInfo<- function(siteNumbers){
+  message("This function is being deprecated. Please use readNWISsite in the future.")
+  readNWISsite(siteNumbers)
+}
+
+#' @rdname readNWISpCode
+#' @export
+getNWISPcodeInfo<- function(parameterCd){
+  message("This function is being deprecated. Please use readNWISpCode in the future.")
+  readNWISpCode(parameterCd)
+}
+
+#' @rdname readNWISdata
+#' @export
+getNWISData<- function(service="dv", ...){
+  message("This function is being deprecated. Please use readNWISdata in the future.")
+  readNWISdata(service=service, ...)
+}
+
+#' @rdname readNWISdv
+#' @export
+getNWISdvData<- function(siteNumber,parameterCd,startDate="",endDate="",statCd="00003"){
+  message("This function is being deprecated. Please use readNWISdv in the future.")
+  readNWISdv(siteNumber,parameterCd,startDate,endDate,statCd)
+}
+
+#' @rdname readNWISuv
+#' @export
+getNWISunitData<- function(siteNumbers,parameterCd,startDate="",endDate="", tz=""){
+  message("This function is being deprecated. Please use readNWISuv in the future.")
+  readNWISuv(siteNumber,parameterCd,startDate,endDate, tz)
+}
+
+#' @rdname readNWISqw
+#' @export
+getNWISqwData<- function(siteNumber,pCodes,startDate="",endDate="",
+                         expanded=FALSE,reshape=TRUE,tz=""){
+  message("This function is being deprecated. Please use readNWISqw in the future.")
+  readNWISqw(siteNumber,pCodes,startDate,endDate,
+             expanded,reshape,tz)
+}
+
+#' @rdname readWQPqw
+#' @export
+getWQPqwData<- function(siteNumber,parameterCd,startDate="",endDate=""){
+  message("This function is being deprecated. Please use readWQPqw in the future.")
+  readWQPqw(siteNumber,parameterCd,startDate,endDate)
+}
+
+#' @rdname readWQPdata
+#' @export
+getWQPData<- function(...){
+  message("This function is being deprecated. Please use readWQPdata in the future.")
+  readWQPdata(...)
+}
+
+
diff --git a/R/formatCheckDate.r b/R/formatCheckDate.r
deleted file mode 100644
index 6d32a0027fa8c628b59dda55e2f6636b52319414..0000000000000000000000000000000000000000
--- a/R/formatCheckDate.r
+++ /dev/null
@@ -1,30 +0,0 @@
-#' formatCheckDate 
-#'
-#' Response to the date format checker.  If the date is not formated correctly, it will give the user the opportunity to correct, otherwise will create a warning.
-#'
-#' @param Date string
-#' @param dateString string used in either error message or interactive message. An example would be "startDate"
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords WRTDS flow
-#' @return condition logical if TRUE, 
-#' @export
-#' @examples
-#' Date <- '1985-01-01'
-#' dateString <- 'startDate'
-#' formatCheckDate(Date, dateString, interactive = FALSE)
-formatCheckDate <- function(Date, dateString,interactive=TRUE){
-  if(nzchar(Date)){
-    if (!dateFormatCheck(Date)){
-      if (interactive){
-        cat("Date must be entered in the form YYYY-MM-DD (no quotes), you entered: ", Date, "as the startDate.\n")
-        cat("Please re-enter ", dateString, ":\n")
-        Date <- readline()
-      } else {
-        warningMessage <- paste(dateString, " must be entered in the form YYYY-MM-DD, you entered: ", Date, ". ", dateString, " will be ignored",sep="")
-        warning(warningMessage)
-        Date <- ""
-      }
-    }
-  }
-  return(Date)
-}
diff --git a/R/formatCheckParameterCd.r b/R/formatCheckParameterCd.r
deleted file mode 100644
index 20b1ee0763ffc6b4fe803f05fdb3d81449f557be..0000000000000000000000000000000000000000
--- a/R/formatCheckParameterCd.r
+++ /dev/null
@@ -1,44 +0,0 @@
-#' formatCheckParameterCd
-#'
-#' Checks that the parameter code is 5 digits. If it is less, it will pad the string with zeros. If more, ask the user to re-enter.
-#'
-#' @param parameterCd string to check
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords WRTDS flow
-#' @return parameterCd string
-#' @export
-#' @examples
-#' pCode <- '01234'
-#' formatCheckParameterCd(pCode)
-formatCheckParameterCd <- function(parameterCd, interactive=TRUE){     #checks for a 5 digit number
-  
-  pCodeReturn <- rep(NA,length(parameterCd))
-  index <- 1
-  
-  for (i in parameterCd){
-  
-    if (nchar(i) < 5){      
-      if (interactive){
-        message("Most USGS parameter codes are 5 digits long, you entered a ", nchar(i), " digit number = ", i , ".\n")
-        
-        i <- padVariable(i,5)
-        message("The following parameter code will be used instead:",i,"\n")
-        message("If you would like to change the parameter code, enter it here (no quotes), otherwise hit return:\n")
-        tempparameterCd <- readline()
-        if (nzchar(tempparameterCd)){
-          i <- tempparameterCd
-        }
-      } else {
-        tempText <- padVariable(i,5)
-        warningMessage <- paste("Most USGS parameter codes are 5 digits long, you entered ", 
-                                i , ".\n",tempText," will be used instead", sep="")
-        warning(warningMessage)
-        i <- padVariable(i,5)
-      }
-      
-    } 
-    pCodeReturn[index] <- i
-    index <- index + 1
-  }
-  return(pCodeReturn)
-}
diff --git a/R/getDVData.r b/R/getDVData.r
deleted file mode 100644
index 8ddc694c1806d11e2b83053cd6c37249d651f98d..0000000000000000000000000000000000000000
--- a/R/getDVData.r
+++ /dev/null
@@ -1,39 +0,0 @@
-#' Import NWIS Daily Data for EGRET analysis
-#'
-#' Imports data from NWIS web service. This function gets the data from here: \url{http://waterservices.usgs.gov/}
-#' A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/}
-#' A list of statistic codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
-#'
-#' @param siteNumber string USGS site number.  This is usually an 8 digit number
-#' @param parameterCd string USGS parameter code.  This is usually an 5 digit number.
-#' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
-#' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @param convert logical Option to include a conversion from cfs to cms (35.314667). The default is TRUE, 
-#' which is appropriate for using NWIS data in the EGRET package.  Set this to FALSE to not include the conversion. If the parameter code is not 00060 (NWIS discharge),
-#' there is no conversion applied.
-#' @param format string, can be "tsv" or "xml", and is only applicable for daily and unit value requests.  "tsv" returns results faster, but there is a possiblitiy that an incomplete file is returned without warning. XML is slower, 
-#' but will offer a warning if the file was incomplete (for example, if there was a momentary problem with the internet connection). It is possible to safely use the "tsv" option, 
-#' but the user must carefully check the results to see if the data returns matches what is expected. The default is "tsv".
-#' @keywords data import USGS WRTDS
-#' @export
-#' @return Daily dataframe
-#' @seealso \code{\link{getNWISdvData}}, \code{\link{populateDaily}}
-#' @examples
-#' # These examples require an internet connection to run
-#' Daily <- getNWISDaily('01594440','00060', '1985-01-01', '1985-03-31')
-#' DailyCFS <- getNWISDaily('01594440','00060', '1985-01-01', '1985-03-31',convert=FALSE)
-#' DailySuspSediment <- getNWISDaily('01594440','80154', '1985-01-01', '1985-03-31')
-getNWISDaily <- function (siteNumber,parameterCd,startDate,endDate,interactive=TRUE,convert=TRUE,format="tsv"){
-
-  data <- getNWISdvData(siteNumber,parameterCd,startDate,endDate,interactive=interactive,format=format)
-  
-  #  need to setup conversion factor because the NWIS data are in cfs but we store in cms
-  names(data) <- c('agency', 'site', 'dateTime', 'value', 'code')  # do a merge instead?
-  
-  qConvert <- ifelse("00060" == parameterCd, 35.314667, 1)
-  qConvert<- ifelse(convert,qConvert,1)
-  
-  localDaily <- populateDaily(data,qConvert,interactive=interactive)
-  return (localDaily)
-}
diff --git a/R/getDailyDataFromFile.r b/R/getDailyDataFromFile.r
deleted file mode 100644
index e435fc20d887af5c4d81d581da56c16fa7a3fb79..0000000000000000000000000000000000000000
--- a/R/getDailyDataFromFile.r
+++ /dev/null
@@ -1,65 +0,0 @@
-#' Import Daily Data for WRTDS
-#'
-#' This function is being deprecated for \code{\link{getUserDaily}}.
-#'
-#' @param filePath string specifying the path to the file
-#' @param fileName string name of file to open
-#' @param hasHeader logical true if the first row of data is the column headers
-#' @param separator string character that separates data cells
-#' @param qUnit number 1 is cubic feet per second, 2 is cubic meters per second, 3 is 10^3 cubic feet per second, and 4 is 10^3 cubic meters per second
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords data import file
-#' @keywords data import USGS WRTDS
-#' @export
-#' @return Daily dataframe
-#' @examples
-#' filePath <- system.file("extdata", package="dataRetrieval")
-#' filePath <- paste(filePath,"/",sep="")
-#' fileName <- "ChoptankRiverFlow.txt"
-#' \dontrun{Daily <- getDailyDataFromFile(filePath,fileName,separator="\t")}
-getDailyDataFromFile <- function (filePath,fileName,hasHeader=TRUE,separator=",",qUnit=1,interactive=TRUE){
-  
-  warning("This function is being deprecated, please use getUserDaily")
-  
-  data <- getDataFromFile(filePath,fileName,hasHeader=hasHeader,separator=separator)
-  convertQ<-c(35.314667,1,0.035314667,0.001)
-  qConvert<-convertQ[qUnit]
-  if (interactive){
-    if(qUnit==1) cat("\n the input discharge are assumed to be in cubic feet per second\nif they are in cubic meters per second, then the call to getDailyDataFromFile should specify qUnit=2\n")
-  }
-  localDaily <- populateDaily(data,qConvert, interactive=interactive)
-  localDaily <- localDaily[!is.na(localDaily$Q),]
-  return(localDaily)
-}
-
-#' Import user daily data for EGRET analysis
-#'
-#' Imports data from a user-supplied file, and converts it to a Daily data frame, appropriate for WRTDS calculations.
-#'
-#' @param filePath string specifying the path to the file
-#' @param fileName string name of file to open
-#' @param hasHeader logical true if the first row of data is the column headers
-#' @param separator string character that separates data cells
-#' @param qUnit number 1 is cubic feet per second, 2 is cubic meters per second, 3 is 10^3 cubic feet per second, and 4 is 10^3 cubic meters per second
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords data import file
-#' @keywords data import USGS WRTDS
-#' @export
-#' @return Daily dataframe
-#' @examples
-#' filePath <- system.file("extdata", package="dataRetrieval")
-#' filePath <- paste(filePath,"/",sep="")
-#' fileName <- "ChoptankRiverFlow.txt"
-#' Daily <- getUserDaily(filePath,fileName,separator="\t")
-getUserDaily <- function (filePath,fileName,hasHeader=TRUE,separator=",",qUnit=1,interactive=TRUE){
-  data <- getDataFromFile(filePath,fileName,hasHeader=hasHeader,separator=separator)
-  convertQ<-c(35.314667,1,0.035314667,0.001)
-  qConvert<-convertQ[qUnit]
-  if (interactive){
-    if(qUnit==1) cat("\n the input discharge are assumed to be in cubic feet per second\nif they are in cubic meters per second, then the call to getDailyDataFromFile should specify qUnit=2\n")
-  }
-  names(data) <- c("dateTime", "value")
-  localDaily <- populateDaily(data,qConvert, interactive=interactive)
-  localDaily <- localDaily[!is.na(localDaily$Q),]
-  return(localDaily)
-}
diff --git a/R/getDataAvailability.r b/R/getDataAvailability.r
deleted file mode 100644
index 97c17b2f54536a685cae47c84aedf05202d0a4ef..0000000000000000000000000000000000000000
--- a/R/getDataAvailability.r
+++ /dev/null
@@ -1,70 +0,0 @@
-#' USGS data availability
-#'
-#' Imports a table of available parameters, period of record, and count.
-#'
-#' @param siteNumber string USGS site number.
-#' @param type vector string. Options are "uv", "dv", "qw"
-#' @keywords data import USGS web service
-#' @return retval dataframe with all information found in the expanded site file
-#' @export
-#' @import RCurl
-#' @examples
-#' # These examples require an internet connection to run
-#' availableData <- getNWISDataAvailability('05114000')
-#' # To find just unit value ('instantaneous') data:
-#' uvData <- getNWISDataAvailability('05114000',type="uv")
-#' uvDataMulti <- getNWISDataAvailability(c('05114000','09423350'),type="uv")
-getNWISDataAvailability <- function(siteNumber,type=c("uv","dv","qw")){
-  
-  siteNumber <- paste(siteNumber,collapse=",")
-  
-  urlSitefile <- paste("http://waterservices.usgs.gov/nwis/site/?format=rdb&seriesCatalogOutput=true&sites=",siteNumber,sep = "")
- 
-  doc = tryCatch({
-    h <- basicHeaderGatherer()
-    doc <- getURL(urlSitefile, headerfunction = h$update)
-    
-  }, warning = function(w) {
-    message(paste("URL caused a warning:", urlSitefile))
-    message(w)
-  }, error = function(e) {
-    message(paste("URL does not seem to exist:", urlSitefile))
-    message(e)
-    return(NA)
-  }) 
-  
-  if(h$value()["Content-Type"] == "text/plain;charset=UTF-8"){
-    SiteFile <- read.delim(
-      textConnection(doc),
-      header = TRUE,
-      quote="\"",
-      dec=".",
-      sep='\t',
-      colClasses=c('character'),
-      fill = TRUE,
-      comment.char="#")
-    
-    SiteFile <- SiteFile[-1,]
-    
-    SiteFile <- with(SiteFile, data.frame(site_no=site_no, parameter_cd=parm_cd, statCd=stat_cd, startDate=begin_date,endDate=end_date, count=count_nu,service=data_type_cd,stringsAsFactors = FALSE))
-    
-    SiteFile <- SiteFile[!is.na(SiteFile$parameter_cd),]
-    SiteFile <- SiteFile["" != SiteFile$parameter_cd,]
-    SiteFile$startDate <- as.Date(SiteFile$startDate)
-    SiteFile$endDate <- as.Date(SiteFile$endDate)
-    SiteFile$count <- as.numeric(SiteFile$count)
-    
-    pCodes <- unique(SiteFile$parameter_cd)
-    
-    parameterCdFile <- parameterCdFile
-    
-    pcodeINFO <- parameterCdFile[parameterCdFile$parameter_cd %in% pCodes,]
-    SiteFile <- merge(SiteFile,pcodeINFO,by="parameter_cd")
-    SiteFile <- SiteFile[SiteFile$service %in% type,]
-    return(SiteFile)
-  } else {
-    message(paste("URL caused an error:", urlSitefile))
-    message("Content-Type=",h$value()["Content-Type"])
-    return(NA)
-  }
-}
diff --git a/R/getDataFromFile.r b/R/getDataFromFile.r
deleted file mode 100644
index 1bffc892f363f1626142202417723b937cf8c94f..0000000000000000000000000000000000000000
--- a/R/getDataFromFile.r
+++ /dev/null
@@ -1,46 +0,0 @@
-#' Basic Data Import for Water Flow Data
-#'
-#' Imports data from user-supplied data file. Specifically used to import water flow data for use in the WRTDS package.
-#' For WRTDS usage, the first column is expected to be dates, the second column measured values.
-#' The third column is optional, it contains any remark codes.
-#'
-#' @param filePath string specifying the path to the file
-#' @param fileName string name of file to open
-#' @param hasHeader logical true if the first row of data is the column headers
-#' @param separator string character that separates data cells
-#' @keywords data import file
-#' @return retval dataframe with dateTime, value, and code columns
-#' @export
-#' @examples
-#' # Examples of how to use getDataFromFile:
-#' # Change the file path and file name to something meaningful:
-#' filePath <- system.file("extdata", package="dataRetrieval")
-#' filePath <- paste(filePath,"/",sep="")
-#' fileName <- 'ChoptankRiverFlow.txt'
-#' ChopData <- getDataFromFile(filePath,fileName, separator="\t")
-getDataFromFile <- function (filePath,fileName,hasHeader=TRUE,separator=","){
-  totalPath <- paste(filePath,fileName,sep="");  
-  retval <- read.delim(  
-    totalPath, 
-    header = hasHeader,
-    sep=separator,
-    colClasses=c('character'),
-    fill = TRUE, 
-    comment.char="#")
-  
-  if(ncol(retval) == 2){
-    numCol <- 2
-  } else {
-    numCol <- seq(from = 3,to = ncol(retval), by = 2)
-  }
-  
-  if(dateFormatCheck(retval[,1])){
-    retval[,1] <- as.Date(retval[,1])  
-  } else {
-    retval[,1] <- as.Date(retval[,1],format="%m/%d/%Y")
-  }
-  
-  retval[,numCol] <- sapply(numCol, function(x) as.numeric(retval[,x]))
-
-  return (retval)
-}
diff --git a/R/getMetaData.r b/R/getMetaData.r
deleted file mode 100644
index 281d558c75afa9b0e76675df357857c2ca0ba032..0000000000000000000000000000000000000000
--- a/R/getMetaData.r
+++ /dev/null
@@ -1,255 +0,0 @@
-#' Import Metadata for USGS Data
-#'
-#' Populates INFO data frame for EGRET study.  If either station number or parameter code supplied, imports data about a particular USGS site from NWIS web service. 
-#' This function gets the data from here: \url{http://waterservices.usgs.gov/}
-#' A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/}
-#' If either station number or parameter code is not supplied, the user will be asked to input data.
-#' Additionally, the user will be asked for:
-#' staAbbrev - station abbreviation, will be used in naming output files and for structuring batch jobs
-#' constitAbbrev - constitute abbreviation
-#'
-#' @param siteNumber string USGS site number.  This is usually an 8 digit number
-#' @param parameterCd string USGS parameter code.  This is usually an 5 digit number.
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords data import USGS web service
-#' @export
-#' @return INFO dataframe with at least param.nm, param.units, parameShortName, paramNumber
-#' @examples
-#' # These examples require an internet connection to run
-#' # Automatically gets information about site 05114000 and temperature, no interaction with user
-#' INFO <- getNWISInfo('05114000','00010')
-getNWISInfo <- function(siteNumber, parameterCd,interactive=TRUE){
-  if (nzchar(siteNumber)){
-    INFO <- getNWISSiteInfo(siteNumber)
-  } else {
-    INFO <- as.data.frame(matrix(ncol = 2, nrow = 1))
-    names(INFO) <- c('site.no', 'shortName')    
-  }
-  INFO <- populateSiteINFO(INFO, siteNumber, interactive=interactive)
-  
-  if (nzchar(parameterCd)){
-    parameterData <- getNWISPcodeInfo(parameterCd,interactive=interactive)
-    INFO$param.nm <- parameterData$parameter_nm
-    INFO$param.units <- parameterData$parameter_units
-    INFO$paramShortName <- parameterData$srsname
-    INFO$paramNumber <- parameterData$parameter_cd
-  } 
-  
-  INFO <- populateParameterINFO(parameterCd, INFO, interactive=interactive)
-  INFO$paStart <- 10
-  INFO$paLong <- 12
-  
-  return(INFO)
-}
-
-#' Import Metadata for Water Quality Portal Data
-#'
-#' Populates INFO data frame for EGRET study. If siteNumber or parameter code (for USGS) or characteristic name 
-#' (for non-USGS) is provided, the function will make a call to the Water Quality Portal to get metadata information.
-#' staAbbrev - station abbreviation, will be used in naming output files and for structuring batch jobs
-#' constitAbbrev - constitute abbreviation
-#'
-#' @param siteNumber string site number. 
-#' @param parameterCd string USGS parameter code or characteristic name.
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords data import USGS web service WRTDS
-#' @export
-#' @return INFO dataframe with agency, site, dateTime, value, and code columns
-#' @examples
-#' # These examples require an internet connection to run
-#' # Automatically gets information about site 01594440 and temperature, no interaction with user
-#' nameToUse <- 'Specific conductance'
-#' pcodeToUse <- '00095'
-#' \dontrun{
-#' INFO <- getWQPInfo('USGS-04024315',pcodeToUse,interactive=TRUE)
-#' INFO2 <- getWQPInfo('WIDNR_WQX-10032762',nameToUse)
-#' # To adjust the label names:
-#' INFO$shortName <- "Little"
-#' INFO$paramShortName <- "SC"
-#' }
-getWQPInfo <- function(siteNumber, parameterCd, interactive=FALSE){
-  
-  #Check for pcode:
-  pCodeLogic <- (all(nchar(parameterCd) == 5) & suppressWarnings(all(!is.na(as.numeric(parameterCd)))))
-
-  if (pCodeLogic){
-    
-    siteInfo <- getWQPSites(siteid=siteNumber, pCode=parameterCd)
-
-    parameterData <- getNWISPcodeInfo(parameterCd = parameterCd)
-    
-    siteInfo$param.nm <- parameterData$parameter_nm
-    siteInfo$param.units <- parameterData$parameter_units
-    siteInfo$paramShortName <- parameterData$srsname
-    siteInfo$paramNumber <- parameterData$parameter_cd
-    siteInfo$constitAbbrev <- parameterData$parameter_cd
-
-  } else {
-    siteInfo <- getWQPSites(siteid=siteNumber, characteristicName=parameterCd)
-
-    siteInfo$param.nm <- parameterCd
-    siteInfo$param.units <- ""
-    siteInfo$paramShortName <- parameterCd
-    siteInfo$paramNumber <- ""
-    siteInfo$constitAbbrev <- parameterCd
-  }
-  
-  siteInfo$station.nm <- siteInfo$MonitoringLocationName
-  siteInfo$shortName <- siteInfo$station.nm 
-  siteInfo$site.no <- siteInfo$MonitoringLocationIdentifier
-  
-  if(interactive){
-    cat("Your site for data is", as.character(siteInfo$site.no),".\n")
-    if (!nzchar(siteInfo$station.nm)){
-      cat("No station name was listed for site: ", siteInfo$site.no, ". Please enter a station name here(no quotes): \n")
-      siteInfo$station.nm <- readline()
-    }
-    cat("Your site name is", siteInfo$station.nm,",")
-    cat("but you can modify this to a short name in a style you prefer. \nThis name will be used to label graphs and tables. \n")
-    cat("If you want the program to use the name given above, just do a carriage return, otherwise enter the preferred short name(no quotes):\n")
-    siteInfo$shortName <- readline()
-    if (!nzchar(siteInfo$shortName)) siteInfo$shortName <- siteInfo$station.nm
-    
-    cat("Your water quality data are for parameter number", siteInfo$paramNumber, "which has the name:'", siteInfo$param.nm, "'.\n")
-    cat("Typically you will want a shorter name to be used in graphs and tables. The suggested short name is:'", siteInfo$paramShortName, "'.\n")
-    cat("If you would like to change the short name, enter it here, otherwise just hit enter (no quotes):")
-    shortNameTemp <- readline()
-    if (nchar(shortNameTemp)>0) siteInfo$paramShortName <- shortNameTemp
-    cat("The units for the water quality data are: ", siteInfo$param.units, ".\n")
-    cat("It is helpful to set up a constiuent abbreviation when doing multi-constituent studies, enter a unique id (three or four characters should work something like tn or tp or NO3).\nIt is case sensitive.  Even if you don't feel you need an abbreviation you need to enter something (no quotes):\n")
-    siteInfo$constitAbbrev <- readline()
-  }
-  
-  if (interactive){
-    cat("It is helpful to set up a station abbreviation when doing multi-site studies, enter a unique id (three or four characters should work).\nIt is case sensitive.  Even if you don't feel you need an abbreviation for your site you need to enter something(no quotes):\n")
-    siteInfo$staAbbrev <- readline()
-  } else {
-    siteInfo$staAbbrev <- NA
-  }
-
-  if(siteInfo$DrainageAreaMeasure.MeasureUnitCode == "sq mi"){
-    siteInfo$drainSqKm <- as.numeric(siteInfo$DrainageAreaMeasure.MeasureValue) * 2.5899881 
-  } else {
-    warning("Please check the units for drainage area. The value for INFO$drainSqKm needs to be in square kilometers,")
-    siteInfo$drainSqKm <- as.numeric(siteInfo$DrainageAreaMeasure.MeasureValue)
-  }
-  
-  if(interactive){
-    if(is.na(siteInfo$drainSqKm)){
-      cat("No drainage area was listed in the WQP site file for this site.\n")
-      cat("Please enter the drainage area, you can enter it in the units of your choice.\nEnter the area, then enter drainage area code, \n1 is square miles, \n2 is square kilometers, \n3 is acres, \n4 is hectares.\n")
-      cat("Area(no quotes):\n")
-      siteInfo$drain.area.va <- readline()
-      siteInfo$drain.area.va <- as.numeric(siteInfo$drain.area.va)
-      cat("Unit Code (1-4, no quotes):")
-      qUnit <- readline()
-      qUnit <- as.numeric(qUnit)
-      conversionVector <- c(2.5899881, 1.0, 0.0040468564, 0.01)
-      siteInfo$drainSqKm <- siteInfo$drain.area.va * conversionVector[qUnit]
-    }
-  }
-  
-  siteInfo$queryTime <- Sys.time()
-  siteInfo$paStart <- 10
-  siteInfo$paLong <- 12
-  
-  return(siteInfo)
-}
-
-
-
-#' Import Metadata from User-Generated File
-#'
-#' Populates INFO data frame for EGRET study. Accepts a user generated file with any metadata that might 
-#' be important for the analysis. 
-#' Additionally, EGRET analysis requires:"drainSqKm", "staAbbrev", "constitAbbrev", 
-#' "param.units", "paramShortName","shortName". If interactive=TRUE, the function will ask for these
-#' fields if they aren't supplied in the file.
-#'
-#' @param filePath string specifying the path to the file
-#' @param fileName string name of file to open
-#' @param hasHeader logical true if the first row of data is the column headers
-#' @param separator string character that separates data cells
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords data import USGS web service WRTDS
-#' @export
-#' @return INFO dataframe with agency, site, dateTime, value, and code columns
-#' @examples
-#' filePath <- system.file("extdata", package="dataRetrieval")
-#' filePath <- paste(filePath,"/",sep="")
-#' fileName <- 'infoTest.csv'
-#' INFO <- getUserInfo(filePath,fileName, separator=",",interactive=FALSE)
-getUserInfo <- function(filePath,fileName,hasHeader=TRUE,separator=",",interactive=FALSE){
-  
-  totalPath <- paste(filePath,fileName,sep="");  
-  siteInfo <- read.delim(  
-    totalPath, 
-    header = hasHeader,
-    sep=separator,
-    colClasses=c('character'),
-    fill = TRUE, 
-    comment.char="#")
-  
-  if(interactive){
-
-    if (!nzchar(siteInfo$station.nm)){
-      cat("No station name was listed. Please enter a station name here(no quotes): \n")
-      siteInfo$station.nm <- readline()
-    }
-    cat("Your site name is", siteInfo$station.nm,",")
-    cat("but you can modify this to a short name in a style you prefer. \nThis name will be used to label graphs and tables. \n")
-    cat("If you want the program to use the name given above, just do a carriage return, otherwise enter the preferred short name(no quotes):\n")
-    siteInfo$shortName <- readline()
-    if (!nzchar(siteInfo$shortName)) siteInfo$shortName <- siteInfo$station.nm
-    
-    if (!nzchar(siteInfo$param.nm)){
-      cat("No water quality parameter name was listed.\nPlease enter the name here(no quotes): \n")
-      siteInfo$param.nm <- readline()
-    }
-    
-    cat("Your water quality data are for '", siteInfo$param.nm, "'.\n")
-    cat("Typically you will want a shorter name to be used in graphs and tables. The suggested short name is:'", siteInfo$paramShortName, "'.\n")
-    cat("If you would like to change the short name, enter it here, otherwise just hit enter (no quotes):")
-    shortNameTemp <- readline()
-    
-    if (nchar(shortNameTemp)>0) siteInfo$paramShortName <- shortNameTemp
-    
-    if (!nzchar(siteInfo$param.units)){
-      cat("No water quality parameter unit was listed.\nPlease enter the units here(no quotes): \n")
-      siteInfo$param.nm <- readline()
-    }
-    cat("The units for the water quality data are: ", siteInfo$param.units, ".\n")
-    cat("It is helpful to set up a constiuent abbreviation when doing multi-constituent studies, enter a unique id (three or four characters should work something like tn or tp or NO3).\nIt is case sensitive.  Even if you don't feel you need an abbreviation you need to enter something (no quotes):\n")
-    siteInfo$constitAbbrev <- readline()
-
-    cat("It is helpful to set up a station abbreviation when doing multi-site studies, enter a unique id (three or four characters should work).\nIt is case sensitive.  Even if you don't feel you need an abbreviation for your site you need to enter something(no quotes):\n")
-    siteInfo$staAbbrev <- readline()
-  
-    if(is.na(siteInfo$drainSqKm)){
-      cat("No drainage area was listed as a column named 'drainSqKm'.\n")
-      cat("Please enter the drainage area, you can enter it in the units of your choice.\nEnter the area, then enter drainage area code, \n1 is square miles, \n2 is square kilometers, \n3 is acres, \n4 is hectares.\n")
-      cat("Area(no quotes):\n")
-      siteInfo$drain.area.va <- readline()
-      siteInfo$drain.area.va <- as.numeric(siteInfo$drain.area.va)
-      cat("Unit Code (1-4, no quotes):")
-      qUnit <- readline()
-      qUnit <- as.numeric(qUnit)
-      conversionVector <- c(2.5899881, 1.0, 0.0040468564, 0.01)
-      siteInfo$drainSqKm <- siteInfo$drain.area.va * conversionVector[qUnit]
-    }
-  } else {
-    requiredColumns <- c("drainSqKm", "staAbbrev", "constitAbbrev", 
-                         "param.units", "paramShortName","shortName")
-    if(!all(requiredColumns %in% names(siteInfo))){
-      message("The following columns are expected in the EGRET package:\n")
-      message(requiredColumns[!(requiredColumns %in% names(siteInfo))])
-    }
-  }
-  
-  siteInfo$queryTime <- Sys.time()
-  siteInfo$paStart <- 10
-  siteInfo$paLong <- 12
-  
-  return(siteInfo)
-}
-
diff --git a/R/getNWISSites.R b/R/getNWISSites.R
deleted file mode 100644
index 0cfe7c4db81aea6eda5695764b3904f6f9c2dfc5..0000000000000000000000000000000000000000
--- a/R/getNWISSites.R
+++ /dev/null
@@ -1,73 +0,0 @@
-#' Site Data Import from NWIS
-#'
-#' Returns a list of sites from the NWIS web service. This function gets the data from: \url{http://waterservices.usgs.gov/rest/Site-Test-Tool.html}.
-#' Arguments to the function should be based on \url{http://waterservices.usgs.gov/rest/Site-Service.html#Service}
-#' Mapper format is used
-#'
-#' @param \dots see \url{http://waterservices.usgs.gov/rest/Site-Service.html#Service} for a complete list of options
-#' @keywords data import NWIS web service
-#' @return retval dataframe with agency_cd, site_no, station_nm, site_tp_cd, dec_lat_va, and dec_long_va.
-#' @export
-#' @import XML
-#' @examples
-#' siteListPhos <- getNWISSites(stateCd="OH",parameterCd="00665")
-getNWISSites <- function(...){
-  
-  matchReturn <- list(...)
-
-  values <- sapply(matchReturn, function(x) URLencode(as.character(paste(eval(x),collapse="",sep=""))))
-  
-  urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
-  
-  
-  baseURL <- "http://waterservices.usgs.gov/nwis/site/?format=mapper&"
-  urlCall <- paste(baseURL,
-                   urlCall,sep = "")
-  
-  h <- basicHeaderGatherer()
-  doc = tryCatch({
-    returnedDoc <- getURI(urlCall, headerfunction = h$update)
-    if(h$value()["Content-Type"] == "text/xml;charset=UTF-8"){
-      xmlTreeParse(returnedDoc, getDTD = FALSE, useInternalNodes = TRUE)
-    } else {
-      message(paste("URL caused an error:", urlCall))
-      message("Content-Type=",h$value()["Content-Type"])
-      return(NA)
-    }   
-    
-  }, warning = function(w) {
-    message(paste("URL caused a warning:", urlCall))
-    message(w)
-  }, error = function(e) {
-    message(paste("URL does not seem to exist:", urlCall))
-    message(e)
-    return(NA)
-  }) 
-  
-  doc <- xmlRoot(doc)
-  numChunks <- xmlSize(doc)
-  for(i in 1:numChunks){
-    chunk <- doc[[1]]
-    site_no <- as.character(xpathApply(chunk, "site/@sno"))
-    station_nm <- as.character(xpathApply(chunk, "site/@sna"))
-    site_tp_cd <- as.character(xpathApply(chunk, "site/@cat"))
-    dec_lat_va <- as.numeric(xpathApply(chunk, "site/@lat"))
-    dec_long_va <- as.numeric(xpathApply(chunk, "site/@lng"))
-    agency_cd <- as.character(xpathApply(chunk, "site/@agc"))
-    
-    df <- data.frame(agency_cd, site_no, station_nm, site_tp_cd, 
-                     dec_lat_va, dec_long_va, stringsAsFactors=FALSE) 
-    
-    if(1==i){
-      retval <- df
-    } else {
-      retval <- rbind(retval, df)
-    }
-  }
-  
-  retval <- retval[!duplicated(retval),]
-  
-  retval$queryTime <- Sys.time()
-  
-  return(retval)
-}
diff --git a/R/getParameterInfo.r b/R/getParameterInfo.r
deleted file mode 100644
index 46890671d2e920e01a54e0a8295b7821aefab818..0000000000000000000000000000000000000000
--- a/R/getParameterInfo.r
+++ /dev/null
@@ -1,22 +0,0 @@
-#' USGS Parameter Data Retrieval
-#'
-#' Imports data from NWIS about meaured parameter based on user-supplied parameter code.
-#' This function gets the data from here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes}
-#'
-#' @param parameterCd vector of USGS parameter codes.  This is usually an 5 digit number.
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords data import USGS web service
-#' @return parameterData dataframe with all information from the USGS about the particular parameter (usually code, name, short name, units, and CAS registry numbers)
-#' @export
-#' @examples
-#' # These examples require an internet connection to run
-#' paramINFO <- getNWISPcodeInfo(c('01075','00060','00931'))
-getNWISPcodeInfo <- function(parameterCd,interactive=TRUE){
-  parameterCd <- formatCheckParameterCd(parameterCd, interactive=interactive)
-  
-  parameterCdFile <- parameterCdFile
-  
-  parameterData <- parameterCdFile[parameterCdFile$parameter_cd %in% parameterCd,]
-
-  return(parameterData)
-}
diff --git a/R/getRDB1Data.r b/R/getRDB1Data.r
deleted file mode 100644
index 7f9bd732a255ff9f828700db20b1e665f2f10e25..0000000000000000000000000000000000000000
--- a/R/getRDB1Data.r
+++ /dev/null
@@ -1,180 +0,0 @@
-#' Function to return data from the NWIS RDB 1.0 format
-#'
-#' This function accepts a url parameter that already contains the desired
-#' NWIS site, parameter code, statistic, startdate and enddate. 
-#'
-#' @param obs_url string containing the url for the retrieval
-#' @param asDateTime logical, if TRUE returns date and time as POSIXct, if FALSE, Date
-#' @param qw logical, if TRUE parses as water quality data (where dates/times are in start and end times)
-#' @return data a data frame containing columns agency, site, dateTime, values, and remark codes for all requested combinations
-#' @export
-#' @examples
-#' siteNumber <- "02177000"
-#' startDate <- "2012-09-01"
-#' endDate <- "2012-10-01"
-#' offering <- "00003"
-#' property <- "00060"
-#' obs_url <- constructNWISURL(siteNumber,property,
-#'          startDate,endDate,"dv",format="tsv")
-#' data <- getRDB1Data(obs_url)
-#' urlMulti <- constructNWISURL("04085427",c("00060","00010"),
-#'          startDate,endDate,"dv",statCd=c("00003","00001"),"tsv")
-#' multiData <- getRDB1Data(urlMulti)
-#' unitDataURL <- constructNWISURL(siteNumber,property,
-#'          "2014-10-10","2014-10-10","uv",format="tsv")
-#' unitData <- getRDB1Data(unitDataURL, asDateTime=TRUE)
-getRDB1Data <- function(obs_url,asDateTime=FALSE, qw=FALSE){
-  
-  retval = tryCatch({
-    h <- basicHeaderGatherer()
-    doc <- getURL(obs_url, headerfunction = h$update)
-    
-  }, warning = function(w) {
-    message(paste("URL caused a warning:", obs_url))
-    message(w)
-  }, error = function(e) {
-    message(paste("URL does not seem to exist:", obs_url))
-    message(e)
-    return(NA)
-  })   
-  
-  if(as.character(h$value()["Content-Type"]) == "text/plain;charset=UTF-8" | as.character(h$value()["Content-Type"]) == "text/plain"){
-    
-#     comments <- readLines(doc)
-    
-    tmp <- read.delim(  
-      textConnection(doc), 
-      header = TRUE, 
-      quote="\"", 
-      dec=".", 
-      sep='\t',
-      colClasses=c('character'),
-      fill = TRUE, 
-      comment.char="#")
-    
-    dataType <- tmp[1,]
-    data <- tmp[-1,]
-    
-    multiSiteCorrections <- -which(as.logical(apply(data[,1:2], 1, FUN=function(x) all(x %in% as.character(dataType[,1:2])))))
-    
-    if(length(multiSiteCorrections) > 0){
-      data <- data[multiSiteCorrections,]
-      
-      findRowsWithHeaderInfo <- as.integer(apply(data[,1:2], 1, FUN = function(x) if(x[1] == names(data)[1] & x[2] == names(data)[2]) 1 else 0))
-      findRowsWithHeaderInfo <- which(findRowsWithHeaderInfo == 0)
-      data <- data[findRowsWithHeaderInfo,]
-      
-    }
-    
-    timeZoneLibrary <- setNames(c("America/New_York","America/New_York","America/Chicago","America/Chicago",
-                                  "America/Denver","America/Denver","America/Los_Angeles","America/Los_Angeles",
-                                  "America/Anchorage","America/Anchorage","America/Honolulu","America/Honolulu"),
-                                c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST"))
-    
-    data[,grep('n$', dataType)] <- suppressWarnings(sapply(data[,grep('n$', dataType)], function(x) as.numeric(x)))
-    
-    if(length(grep('d$', dataType)) > 0){
-      if (asDateTime & !qw){
-        
-        if("tz_cd" %in% names(data)){
-          timeZone <- as.character(timeZoneLibrary[data$tz_cd])
-        } else {
-          timeZone <- NULL
-        }
-        
-        
-        if(length(unique(timeZone)) == 1){
-          data[,regexpr('d$', dataType) > 0] <- as.POSIXct(data[,regexpr('d$', dataType) > 0], "%Y-%m-%d %H:%M", tz = unique(timeZone))
-        } else {
-          
-          mostCommonTZ <- names(sort(summary(as.factor(timeZone)),decreasing = TRUE)[1])
-
-          data[,grep('d$', dataType)] <- as.POSIXct(data[,grep('d$', dataType)], "%Y-%m-%d %H:%M", tz = mostCommonTZ)
-          additionalTZs <- names(sort(summary(as.factor(timeZone)),decreasing = TRUE)[-1])
-          for(i in additionalTZs){
-            data[timeZone == i,grep('d$', dataType)] <-  as.POSIXct(data[,grep('d$', dataType)], "%Y-%m-%d %H:%M", tz = i)
-          }
-        }
-       
-      } else if (qw){
-        
-        if("sample_start_time_datum_cd" %in% names(data)){
-          timeZoneStart <- as.character(timeZoneLibrary[data$sample_start_time_datum_cd])
-        } else {
-          timeZoneStart <- NA
-        }
-        
-        if("sample_end_time_datum_cd" %in% names(data)){
-          timeZoneEnd <- as.character(timeZoneLibrary[data$sample_end_time_datum_cd])
-        } else {
-          timeZoneEnd <- NA
-        }
-        timeZoneStart[is.na(timeZoneStart)] <- ""
-        timeZoneEnd[is.na(timeZoneEnd)] <- ""
-        
-        if("sample_dt" %in% names(data)){
-          if(any(data$sample_dt != "")){
-            suppressWarnings(data$sample_dt <- as.Date(parse_date_time(data$sample_dt, c("Ymd", "mdY"))))
-          }
-        }
-        
-        if("sample_end_dt" %in% names(data)){
-          if(any(data$sample_end_dt != "")){
-            suppressWarnings(data$sample_end_dt <- as.Date(parse_date_time(data$sample_end_dt, c("Ymd", "mdY"))))
-          }        
-        }
-        
-        if(any(!is.na(timeZoneStart))){
-          if(length(unique(timeZoneStart)) == 1){
-            data$startDateTime <- with(data, as.POSIXct(paste(sample_dt, sample_tm),format="%Y-%m-%d %H:%M", tz=unique(timeZoneStart)))
-          } else {
-            
-            mostCommonTZ <- names(sort(summary(as.factor(timeZoneStart)),decreasing = TRUE)[1])
-            
-            data$startDateTime <- with(data, as.POSIXct(paste(sample_dt, sample_tm),
-                                            format="%Y-%m-%d %H:%M", 
-                                            tz=mostCommonTZ))
-            additionalTZs <- names(sort(summary(as.factor(timeZoneStart)),decreasing = TRUE)[-1])
-            for(i in additionalTZs){
-              data$startDateTime[timeZoneStart == i] <-  with(data[timeZoneStart == i,], 
-                                  as.POSIXct(paste(sample_dt, sample_tm),
-                                             format="%Y-%m-%d %H:%M", 
-                                             tz=i))
-            }
-          }
-        }
-        
-        if(any(!is.na(timeZoneEnd))){
-          if(length(unique(timeZoneEnd)) == 1){
-            data$endDateTime <- with(data, as.POSIXct(paste(sample_end_dt, sample_end_tm),format="%Y-%m-%d %H:%M", tz=unique(timeZoneEnd)))
-          } else {
-            
-            mostCommonTZ <- names(sort(summary(as.factor(timeZoneEnd)),decreasing = TRUE)[1])
-            
-            data$endDateTime <- with(data, as.POSIXct(paste(sample_end_dt, sample_end_tm),
-                                format="%Y-%m-%d %H:%M", 
-                                tz=mostCommonTZ))
-            additionalTZs <- names(sort(summary(as.factor(timeZoneEnd)),decreasing = TRUE)[-1])
-            for(i in additionalTZs){
-              data$endDateTime[timeZoneEnd == i] <-  with(data[timeZoneStart == i,], 
-                                as.POSIXct(paste(sample_end_dt, sample_end_tm),
-                                           format="%Y-%m-%d %H:%M", 
-                                           tz=i))
-            }
-          }
-        }
-        
-      } else {
-        for (i in grep('d$', dataType)){
-          data[,i] <- as.Date(data[,i])
-        }
-      }
-    }
-    
-    row.names(data) <- NULL
-    return(data)
-  } else {
-    message(paste("URL caused an error:", obs_url))
-    message("Content-Type=",h$value()["Content-Type"])
-  }
-}
diff --git a/R/getSTORETSampleData.R b/R/getSTORETSampleData.R
deleted file mode 100644
index a343d693e65398c63b36a94cb14b26d39464e509..0000000000000000000000000000000000000000
--- a/R/getSTORETSampleData.R
+++ /dev/null
@@ -1,47 +0,0 @@
-#' Import Sample Data for WRTDS
-#'
-#' Imports data from the Water Quality Portal, so it could be STORET, NWIS, or . This function gets the data from: \url{http://www.waterqualitydata.us}
-#' For raw data, use getWQPData.  This function will retrieve the raw data, and compress it (summing constituents). See
-#' chapter 7 of the EGRET user guide for more details, then converts it to the Sample dataframe structure.
-#'
-#' @param siteNumber string site number.  If USGS, it should be in the form :'USGS-XXXXXXXXX...'
-#' @param characteristicName string
-#' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
-#' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords data import USGS WRTDS
-#' @export
-#' @return Sample dataframe
-#' @seealso \code{\link{getWQPData}}, \code{\link{getWQPSites}}, 
-#' \code{\link{getWQPqwData}}, \code{\link{getNWISqwData}}, and \code{\link{readWQPData}}, 
-#' \code{\link{compressData}}, \code{\link{populateSampleColumns}}
-#' @examples
-#' # These examples require an internet connection to run
-#' \dontrun{
-#' Sample_01075 <- getWQPSample('USGS-01594440','Chloride', '', '')
-#' Sample_All <- getWQPSample('WIDNR_WQX-10032762','Specific conductance', '', '')
-#' }
-getWQPSample <- function(siteNumber,characteristicName,startDate,endDate,interactive=TRUE){
-  
-  retval <- getWQPqwData(siteNumber=siteNumber,
-                              parameterCd=characteristicName,
-                              startDate=startDate,
-                              endDate=endDate,
-                              interactive=interactive)
-  #Check for pcode:
-  if(all(nchar(characteristicName) == 5)){
-    suppressWarnings(pCodeLogic <- all(!is.na(as.numeric(characteristicName))))
-  } else {
-    pCodeLogic <- FALSE
-  }
-  
-  if(nrow(retval) > 0){
-    data <- processQWData(retval,pCodeLogic)
-  } else {
-    data <- NULL
-  }
-  
-  compressedData <- compressData(data, interactive=interactive)
-  Sample <- populateSampleColumns(compressedData)
-  return(Sample)
-}
diff --git a/R/getSampleData.r b/R/getSampleData.r
deleted file mode 100644
index a32670102220a4bfa1c46df2fe0b68f19ffeb337..0000000000000000000000000000000000000000
--- a/R/getSampleData.r
+++ /dev/null
@@ -1,35 +0,0 @@
-#' Import NWIS Sample Data for EGRET analysis
-#'
-#' Imports data from NWIS web service. This function gets the data from here: \url{http://nwis.waterdata.usgs.gov/nwis/qwdata/}
-#' A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/}
-#' A list of statistic codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
-#' For raw data, use getQWData.  This function will retrieve the raw data, and compress it (summing constituents). See
-#' section 3.4 of the vignette for more details.
-#'
-#' @param siteNumber string USGS site number.  This is usually an 8 digit number
-#' @param parameterCd string USGS parameter code.  This is usually an 5 digit number.
-#' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
-#' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords data import USGS WRTDS
-#' @export
-#' @return Sample dataframe
-#' @seealso \code{\link{compressData}}, \code{\link{populateSampleColumns}}, , \code{\link{getNWISSample}}
-#' @examples
-#' # These examples require an internet connection to run
-#' Sample_01075 <- getNWISSample('01594440','01075', '1985-01-01', '1985-03-31')
-#' Sample_All2 <- getNWISSample('05114000',c('00915','00931'), '1985-01-01', '1985-03-31')
-#' Sample_Select <- getNWISSample('05114000',c('00915','00931'), '', '')
-getNWISSample <- function(siteNumber,parameterCd,startDate,endDate,interactive=TRUE){
-  
-  rawSample <- getNWISqwData(siteNumber,parameterCd,startDate,endDate)
-  dataColumns <- grep("p\\d{5}",names(rawSample))
-  remarkColumns <- grep("r\\d{5}",names(rawSample))
-  totalColumns <-c(grep("sample_dt",names(rawSample)), dataColumns, remarkColumns)
-  totalColumns <- totalColumns[order(totalColumns)]
-  compressedData <- compressData(rawSample[,totalColumns], interactive=interactive)
-  Sample <- populateSampleColumns(compressedData)
-  return(Sample)
-}
-
-
diff --git a/R/getSampleDataFromFile.r b/R/getSampleDataFromFile.r
deleted file mode 100644
index 2b6e0d37c5e6537ad56a94ec5c2e1a25d0664547..0000000000000000000000000000000000000000
--- a/R/getSampleDataFromFile.r
+++ /dev/null
@@ -1,24 +0,0 @@
-#' Import user sample data for EGRET analysis
-#'
-#' Imports data from a user-supplied file, and converts it to a Sample data frame (including summing multiple constituents), appropriate for WRTDS calculations. 
-#'
-#' @param filePath string specifying the path to the file
-#' @param fileName string name of file to open
-#' @param hasHeader logical true if the first row of data is the column headers
-#' @param separator string character that separates data cells
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords data import file
-#' @seealso \code{\link{compressData}}, \code{\link{populateSampleColumns}}
-#' @export
-#' @return Sample dataframe
-#' @examples
-#' filePath <- system.file("extdata", package="dataRetrieval")
-#' filePath <- paste(filePath,"/",sep="")
-#' fileName <- 'ChoptankRiverNitrate.csv'
-#' Sample <- getUserSample(filePath,fileName, separator=";",interactive=FALSE)
-getUserSample <- function (filePath,fileName,hasHeader=TRUE,separator=",", interactive=TRUE){
-  data <- getDataFromFile(filePath,fileName,hasHeader=hasHeader,separator=separator)
-  compressedData <- compressData(data, interactive=interactive)
-  Sample <- populateSampleColumns(compressedData)
-  return(Sample)
-}
diff --git a/R/getSiteFileData.r b/R/getSiteFileData.r
deleted file mode 100644
index e7201f232ffe647ded4d208473c1ac81302004c4..0000000000000000000000000000000000000000
--- a/R/getSiteFileData.r
+++ /dev/null
@@ -1,57 +0,0 @@
-#' USGS Site File Data Retrieval
-#'
-#' Imports data from USGS site file site. This function gets data from here: \url{http://waterservices.usgs.gov/}
-#'
-#' @param siteNumber string USGS site number.  This is usually an 8 digit number
-#' @keywords data import USGS web service
-#' @return retval dataframe with all information found in the expanded site file
-#' @export
-#' @examples
-#' # These examples require an internet connection to run
-#' siteINFO <- getNWISSiteInfo('05114000')
-#' siteINFOMulti <- getNWISSiteInfo(c('05114000','09423350'))
-getNWISSiteInfo <- function(siteNumber){
-  
-  siteNumber <- paste(siteNumber,collapse=",")
-  urlSitefile <- paste("http://waterservices.usgs.gov/nwis/site/?format=rdb&siteOutput=Expanded&sites=",siteNumber,sep = "")
-  
-  doc = tryCatch({
-    h <- basicHeaderGatherer()
-    doc <- getURL(urlSitefile, headerfunction = h$update)
-    
-  }, warning = function(w) {
-    message(paste("URL caused a warning:", urlSitefile))
-    message(w)
-  }, error = function(e) {
-    message(paste("URL does not seem to exist:", urlSitefile))
-    message(e)
-    return(NA)
-  }) 
-  
-  if(h$value()["Content-Type"] == "text/plain;charset=UTF-8"){
-  
-    SiteFile <- read.delim(
-      textConnection(doc),
-      header = TRUE,
-      quote="\"",
-      dec=".",
-      sep='\t',
-      colClasses=c('character'),
-      fill = TRUE,
-      comment.char="#")
-    
-    INFO <- SiteFile[-1,]
-    names(INFO) <- gsub("_",".",names(INFO))
-    
-    INFO$queryTime <- Sys.time()
-    INFO$dec.lat.va <- as.numeric(INFO$dec.lat.va)
-    INFO$dec.long.va <- as.numeric(INFO$dec.long.va)
-    INFO$alt.va <- as.numeric(INFO$alt.va)
-    
-    return(INFO)
-  } else {
-    message(paste("URL caused an error:", urlSitefile))
-    message("Content-Type=",h$value()["Content-Type"])
-    return(NA)
-  }
-}
diff --git a/R/getWaterML1Data.r b/R/getWaterML1Data.r
deleted file mode 100644
index 397e4fa2b701d0c10de0855cb35e41c97ecc0caf..0000000000000000000000000000000000000000
--- a/R/getWaterML1Data.r
+++ /dev/null
@@ -1,153 +0,0 @@
-#' Function to return data from the NWISWeb WaterML1.1 service
-#'
-#' This function accepts a url parameter that already contains the desired
-#' NWIS site, parameter code, statistic, startdate and enddate. 
-#'
-#' @param obs_url string containing the url for the retrieval
-#' @return mergedDF a data frame containing columns agency, site, dateTime, values, and remark codes for all requested combinations
-#' @export
-#' @import XML
-#' @examples
-#' siteNumber <- "02177000"
-#' startDate <- "2012-09-01"
-#' endDate <- "2012-10-01"
-#' offering <- '00003'
-#' property <- '00060'
-#' urlBase <- "http://waterservices.usgs.gov/nwis"
-#' obs_url <- constructNWISURL(siteNumber,property,startDate,endDate,'dv')
-#' data <- getWaterML1Data(obs_url)
-#' urlMulti <- constructNWISURL("04085427",c("00060","00010"),
-#'             startDate,endDate,'dv',statCd=c("00003","00001"))
-#' multiData <- getWaterML1Data(urlMulti)
-#' groundWaterSite <- "431049071324301"
-#' startGW <- "2013-10-01"
-#' endGW <- "2014-06-30"
-#' groundwaterExampleURL <- constructNWISURL(groundWaterSite, NA,
-#'           startGW,endGW, service="gwlevels", format="xml",interactive=FALSE)
-#' groundWater <- getWaterML1Data(groundwaterExampleURL)
-#' unitDataURL <- constructNWISURL(siteNumber,property,
-#'          "2014-10-10","2014-10-10",'uv',format='xml')
-#' unitData <- getWaterML1Data(unitDataURL)
-getWaterML1Data <- function(obs_url){
-  
-  h <- basicHeaderGatherer()
-  doc = tryCatch({
-    returnedDoc <- getURI(obs_url, headerfunction = h$update)
-    if(h$value()["Content-Type"] == "text/xml;charset=UTF-8"){
-      xmlTreeParse(returnedDoc, getDTD = FALSE, useInternalNodes = TRUE)
-    } else {
-      message(paste("URL caused an error:", obs_url))
-      message("Content-Type=",h$value()["Content-Type"])
-      return(NA)
-    }   
-    
-  }, warning = function(w) {
-    message(paste("URL caused a warning:", obs_url))
-    message(w)
-  }, error = function(e) {
-    message(paste("URL does not seem to exist:", obs_url))
-    message(e)
-    return(NA)
-  }) 
-  
-  
-  doc <- xmlRoot(doc)
-  ns <- xmlNamespaceDefinitions(doc, simplify = TRUE)  
-  timeSeries <- xpathApply(doc, "//ns1:timeSeries", namespaces = ns)
-  
-  for (i in 1:length(timeSeries)){
-    
-    chunk <- xmlDoc(timeSeries[[i]])
-    chunk <- xmlRoot(chunk)
-    chunkNS <- xmlNamespaceDefinitions(chunk, simplify = TRUE)  
-    
-    #     site <- as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:siteProperty[@name='hucCd']", namespaces = chunkNS, xmlValue))
-    site <- as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:siteCode", namespaces = chunkNS, xmlValue))
-    agency <- as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:siteCode/@agencyCode", namespaces = chunkNS))
-    pCode <-as.character(xpathApply(chunk, "ns1:variable/ns1:variableCode", namespaces = chunkNS, xmlValue))
-    statCd <- as.character(xpathApply(chunk, "ns1:variable/ns1:options/ns1:option/@optionCode", namespaces = chunkNS))
-    
-    valuesIndex <- as.numeric(which("values" == names(chunk)))
-    
-    zoneAbbrievs <- c(as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:timeZoneInfo/ns1:defaultTimeZone/@zoneAbbreviation", namespaces = chunkNS)),
-                      as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:timeZoneInfo/ns1:daylightSavingsTimeZone/@zoneAbbreviation", namespaces = chunkNS)))
-    
-    names(zoneAbbrievs) <- c(as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:timeZoneInfo/ns1:defaultTimeZone/@zoneOffset", namespaces = chunkNS)),
-                             as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:timeZoneInfo/ns1:daylightSavingsTimeZone/@zoneOffset", namespaces = chunkNS)))
-    
-    for (j in valuesIndex){
-      subChunk <- xmlRoot(xmlDoc(chunk[[j]]))
-      
-      methodID <- as.character(xpathSApply(subChunk, "ns1:method/@methodID", namespaces = chunkNS))
-      
-      methodID <- padVariable(methodID,2)
-      
-      value <- as.numeric(xpathSApply(subChunk, "ns1:value",namespaces = chunkNS, xmlValue))
-      if(length(value)!=0){
-        datetime <- as.POSIXct(strptime(xpathSApply(subChunk, "ns1:value/@dateTime",namespaces = chunkNS),"%Y-%m-%dT%H:%M:%S"))
-        tzHours <- substr(xpathSApply(subChunk, "ns1:value/@dateTime",namespaces = chunkNS),
-                          24,
-                          nchar(xpathSApply(subChunk, "ns1:value/@dateTime",namespaces = chunkNS)))
-        if(mean(nchar(tzHours),rm.na=TRUE) == 6){
-          tzAbbriev <- zoneAbbrievs[tzHours]
-        } else {
-          tzAbbriev <- rep(as.character(zoneAbbrievs[1]),length(datetime))
-        }
-        
-        timeZoneLibrary <- setNames(c("America/New_York","America/New_York","America/Chicago","America/Chicago",
-                                      "America/Denver","America/Denver","America/Los_Angeles","America/Los_Angeles",
-                                      "America/Anchorage","America/Anchorage","America/Honolulu","America/Honolulu"),
-                                    c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST"))
-        timeZone <- as.character(timeZoneLibrary[tzAbbriev])
-        if(length(unique(timeZone)) == 1){
-          datetime <- as.POSIXct(as.character(datetime), tz = unique(timeZone))
-        } else {
-          warning("Mixed time zone information")
-          for(i in seq_along(datetime)){
-            datetime[i] <- as.POSIXct(as.character(datetime[i]), tz = timeZone[i])
-          }
-        }
-        
-        qualifier <- as.character(xpathSApply(subChunk, "ns1:value/@qualifiers",namespaces = chunkNS))
-        
-        valueName <- paste(methodID,pCode,statCd,sep="_")
-        qualName <- paste(methodID,pCode,statCd,"cd",sep="_")
-        valueName <- paste("X",valueName,sep="")
-        qualName <- paste("X",qualName,sep="")
-        
-        assign(valueName,value)
-        assign(qualName,qualifier)
-        
-        if(length(get(qualName))!=0){
-          df <- data.frame(rep(agency,length(datetime)),
-                           rep(site,length(datetime)),
-                           datetime,
-                           tzAbbriev,
-                           get(valueName),
-                           get(qualName),
-                           stringsAsFactors=FALSE)
-          
-          names(df) <- c("agency_cd","site_no","datetime","tz_cd",valueName,qualName)
-        } else {
-          df <- data.frame(rep(agency,length(datetime)),
-                           rep(site,length(datetime)),
-                           datetime,
-                           tzAbbriev,
-                           get(valueName),stringsAsFactors=FALSE)
-          
-          names(df) <- c("agency_cd","site_no","datetime","tz_cd",valueName)       
-        }
-        
-        if (1 == i & valuesIndex[1] == j){
-          mergedDF <- df
-        } else {
-          similarNames <- intersect(names(mergedDF), names(df))
-          mergedDF <- merge(mergedDF, df,by=similarNames,all=TRUE)
-          #         mergedDF <- merge(mergedDF, df,by=c("agency_cd","site_no","datetime","tz_cd"),all=TRUE)
-        }
-      }
-    }
-  }
-  
-  return (mergedDF)
-}
diff --git a/R/importRDB1.r b/R/importRDB1.r
new file mode 100644
index 0000000000000000000000000000000000000000..5ce9cd2566601f21ef99f3b2d8578bd6c25fd0bc
--- /dev/null
+++ b/R/importRDB1.r
@@ -0,0 +1,230 @@
+
+#' Function to return data from the NWIS RDB 1.0 format
+#'
+#' This function accepts a url parameter that already contains the desired
+#' NWIS site, parameter code, statistic, startdate and enddate. It is not
+#' recommended to use the RDB format for importing multi-site data. 
+#'
+#' @param obs_url string containing the url for the retrieval
+#' @param asDateTime logical, if TRUE returns date and time as POSIXct, if FALSE, Date
+#' @param qw logical, if TRUE parses as water quality data (where dates/times are in start and end times)
+#' @param tz string to set timezone attribute of datetime. Default is an empty quote, which converts the 
+#' datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+#' Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+#' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
+#' @param convertType logical, defaults to TRUE. If TRUE, the function will convert the data to dates, datetimes,
+#' numerics based on a standard algorithm. If false, everything is returned as a string.
+#' @return data a data frame containing columns agency, site, dateTime (converted to UTC), values, and remark codes for all requested combinations
+#' @export
+#' @import RCurl
+#' @examples
+#' siteNumber <- "02177000"
+#' startDate <- "2012-09-01"
+#' endDate <- "2012-10-01"
+#' offering <- "00003"
+#' property <- "00060"
+#' obs_url <- constructNWISURL(siteNumber,property,
+#'          startDate,endDate,"dv",format="tsv")
+#' data <- importRDB1(obs_url)
+#' urlMultiPcodes <- constructNWISURL("04085427",c("00060","00010"),
+#'          startDate,endDate,"dv",statCd=c("00003","00001"),"tsv")
+#' multiData <- importRDB1(urlMultiPcodes)
+#' unitDataURL <- constructNWISURL(siteNumber,property,
+#'          "2013-11-03","2013-11-03","uv",format="tsv") #includes timezone switch
+#' unitData <- importRDB1(unitDataURL, asDateTime=TRUE)
+#' qwURL <- constructNWISURL(c('04024430','04024000'),
+#'           c('34247','30234','32104','34220'),
+#'          "2010-11-03","","qw",format="rdb") 
+#' qwData <- importRDB1(qwURL, qw=TRUE, tz="America/Chicago")
+#' iceSite <- '04024430'
+#' start <- "2013-11-09"
+#' end <- "2013-11-28"
+#' urlIce <- constructNWISURL(iceSite,"00060",start, end,"uv",format="tsv")
+#' 
+#' # User file:
+#' filePath <- system.file("extdata", package="dataRetrieval")
+#' fileName <- "RDB1Example.txt"
+#' fullPath <- file.path(filePath, fileName)
+#' importUserRDB <- importRDB1(fullPath)
+importRDB1 <- function(obs_url, asDateTime=FALSE, qw=FALSE, convertType = TRUE, tz=""){
+  
+  if(tz != ""){
+    tz <- match.arg(tz, c("America/New_York","America/Chicago",
+                          "America/Denver","America/Los_Angeles",
+                          "America/Anchorage","America/Honolulu",
+                          "America/Jamaica","America/Managua",
+                          "America/Phoenix","America/Metlakatla"))
+  }
+  
+  if(url.exists(obs_url)){
+    
+    # 400 bad site id
+    # 404 outside date range, wrong pcode
+    # 200 cool
+    
+    
+    retval = tryCatch({
+      h <- basicHeaderGatherer()
+      doc <- getURL(obs_url, headerfunction = h$update)
+
+      fileVecChar <- scan(obs_url, what = "", sep = "\n", quiet=TRUE)
+      pndIndx<-regexpr("^#", fileVecChar)
+      hdr <- fileVecChar[pndIndx > 0L]
+      
+      if(!(as.character(h$value()["Content-Type"]) == "text/plain;charset=UTF-8" | 
+           as.character(h$value()["Content-Type"]) == "text/plain")){
+        message(paste("URL caused an error:", obs_url))
+        message("Content-Type=",h$value()["Content-Type"])
+      }
+      doc <- textConnection(doc)
+      
+    }, warning = function(w) {
+      message(paste("URL caused a warning:", obs_url))
+      message(w)
+    }, error = function(e) {
+      message(paste("URL does not seem to exist:", obs_url))
+      message(e)
+      return(NA)
+    })
+  } else {
+    doc <- obs_url
+    fileVecChar <- scan(obs_url, what = "", sep = "\n", quiet=TRUE)
+    pndIndx<-regexpr("^#", fileVecChar)
+    hdr <- fileVecChar[pndIndx > 0L]
+  }
+  
+  tmp <- read.delim(  
+    doc, 
+    header = TRUE, 
+    quote="\"", 
+    dec=".", 
+    sep='\t',
+    colClasses=c('character'),
+    fill = TRUE, 
+    comment.char="#")
+  
+  dataType <- tmp[1,]
+  data <- tmp[-1,]
+  
+  if(convertType){
+    
+    #This will break if the 2nd (or greater) site has more columns than the first
+    #Therefore, using RDB is not recommended for multi-site queries.
+    #This correction will work if each site has the same number of columns
+    multiSiteCorrections <- -which(as.logical(apply(data[,1:2], 1, FUN=function(x) all(x %in% as.character(dataType[,1:2])))))
+    
+    if(length(multiSiteCorrections) > 0){
+      data <- data[multiSiteCorrections,]
+      
+      findRowsWithHeaderInfo <- as.integer(apply(data[,1:2], 1, FUN = function(x) if(x[1] == names(data)[1] & x[2] == names(data)[2]) 1 else 0))
+      findRowsWithHeaderInfo <- which(findRowsWithHeaderInfo == 0)
+      data <- data[findRowsWithHeaderInfo,]
+    }
+    
+    offsetLibrary <- setNames(c(5, 4, 6, 5, 7, 6, 8, 7, 9, 8, 10, 10),
+                                c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST"))
+    
+    # The suppressed warning occurs when there is text (such as ice) in the numeric coluym
+    data[,grep('n$', dataType)] <- suppressWarnings(sapply(data[,grep('n$', dataType)], function(x) as.numeric(x)))
+    
+    numberColumns <- grep("_va",names(data))    
+    data[,numberColumns] <- sapply(data[,numberColumns],as.numeric)
+    
+    intColumns <- grep("_nu",names(data))
+    
+    if("current_rating_nu" %in% names(data)){
+      intColumns <- intColumns[!("current_rating_nu" %in% names(data)[intColumns])]
+      data$current_rating_nu <- gsub(" ", "", data$current_rating_nu)
+    }
+    data[,intColumns] <- sapply(data[,intColumns],as.integer)
+    
+    if(length(grep('d$', dataType)) > 0){
+      if (asDateTime & !qw){
+        
+        if("tz_cd" %in% names(data)){
+          offset <- offsetLibrary[data$tz_cd]
+        } else {
+          offset <- 0
+        }
+        offset[is.na(offset)] <- 0
+        
+        data[,regexpr('d$', dataType) > 0] <- as.POSIXct(data[,regexpr('d$', dataType) > 0], "%Y-%m-%d %H:%M", tz = "UTC")
+        data[,regexpr('d$', dataType) > 0] <- data[,regexpr('d$', dataType) > 0] + offset*60*60
+        data[,regexpr('d$', dataType) > 0] <- as.POSIXct(data[,regexpr('d$', dataType) > 0])
+        
+        if(tz != ""){
+          attr(data[,regexpr('d$', dataType) > 0], "tzone") <- tz
+        }
+       
+      } else if (qw){
+        
+        if("sample_start_time_datum_cd" %in% names(data)){
+          timeZoneStartOffset <- offsetLibrary[data$sample_start_time_datum_cd]
+          timeZoneStartOffset[is.na(timeZoneStartOffset)] <- 0
+        } else {
+          timeZoneStartOffset <- 0
+        }
+        
+        if("sample_end_time_datum_cd" %in% names(data)){
+          timeZoneEndOffset <- offsetLibrary[data$sample_end_time_datum_cd]
+          timeZoneEndOffset[is.na(timeZoneEndOffset)] <- 0
+          composite <- TRUE
+        } else {
+          composite <- FALSE
+          if(any(data$sample_end_dt != "") & any(data$sample_end_dm != "")){
+            if(which(data$sample_end_dt != "") == which(data$sample_end_dm != "")){
+              composite <- TRUE
+            }
+          }
+          timeZoneEndOffset <- 0
+        }
+        
+        if("sample_dt" %in% names(data)){
+          if(any(data$sample_dt != "")){
+            suppressWarnings(data$sample_dt <- as.Date(parse_date_time(data$sample_dt, c("Ymd", "mdY"))))
+          }
+        }
+        
+        if("sample_end_dt" %in% names(data)){
+          if(any(data$sample_end_dt != "")){
+            suppressWarnings(data$sample_end_dt <- as.Date(parse_date_time(data$sample_end_dt, c("Ymd", "mdY"))))
+          }        
+        }
+        
+        data$startDateTime <- with(data, as.POSIXct(paste(sample_dt, sample_tm),format="%Y-%m-%d %H:%M", tz = "UTC"))
+        data$startDateTime <- data$startDateTime + timeZoneStartOffset*60*60
+        data$startDateTime <- as.POSIXct(data$startDateTime)
+        
+        if(tz != ""){
+          attr(data$startDateTime, "tzone") <- tz
+        }
+        
+        if(composite){
+          data$endDateTime <- with(data, as.POSIXct(paste(sample_end_dt, sample_end_tm),format="%Y-%m-%d %H:%M", tz = "UTC"))
+          data$endDateTime <- data$endDateTime + timeZoneEndOffset*60*60
+          data$endDateTime <- as.POSIXct(data$endDateTime)
+          
+          if(tz != ""){
+            attr(data$endDateTime, "tzone") <- tz
+          }
+        }
+        
+      } else {
+        for (i in grep('d$', dataType)){
+          if (all(data[,i] != "")){
+            data[,i] <- as.character(data[,i])
+          }
+        }
+      }
+    }
+  
+    row.names(data) <- NULL
+  }
+  
+  comment(data) <- hdr
+  attr(data, "url") <- obs_url
+  attr(data, "queryTime") <- Sys.time()
+  
+  return(data)
+
+}
diff --git a/R/importWQP.R b/R/importWQP.R
new file mode 100644
index 0000000000000000000000000000000000000000..6cd2c41b2b74e61671a432409533f35bc5c32ae7
--- /dev/null
+++ b/R/importWQP.R
@@ -0,0 +1,128 @@
+#' Basic Water Quality Portal Data grabber
+#'
+#' Imports data from the Water Quality Portal based on a specified url.
+#' 
+#' @param url string URL to Water Quality Portal#' @keywords data import USGS web service
+#' @param zip logical used to request the data in a zip format (TRUE)
+#' @param tz string to set timezone attribute of datetime. Default is an empty quote, which converts the 
+#' datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+#' Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+#' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
+#' @return retval dataframe raw data returned from the Water Quality Portal. Additionally, a POSIXct dateTime column is supplied for 
+#' start and end times.
+#' @export
+#' @import RCurl
+#' @import httr
+#' @import lubridate
+#' @examples
+#' # These examples require an internet connection to run
+#' \dontrun{
+#' ## Examples take longer than 5 seconds:
+#' rawSampleURL <- constructWQPURL('USGS-01594440','01075', '', '')
+#' rawSample <- importWQP(rawSampleURL)
+#' url2 <- paste0(rawSampleURL,"&zip=yes")
+#' rawSample2 <- importWQP(url2, TRUE)
+#' }
+importWQP <- function(url, zip=FALSE, tz=""){
+  
+  h <- basicHeaderGatherer()
+  
+  tryCatch({  
+    if(zip){
+      headerInfo <- HEAD(url)$headers
+      temp <- tempfile()
+      options(timeout = 120)
+      download.file(url,temp, quiet=TRUE, mode='wb')
+      doc <- unzip(temp)
+      unlink(temp)
+    } else {
+      doc <- getURL(url, headerfunction = h$update)
+      headerInfo <- h$value()
+    
+    }
+  }, warning = function(w) {
+    message(paste("URL caused a warning:", url))
+    message(w)
+  }, error = function(e) {
+    message(paste("URL does not seem to exist:", url))
+    message(e)
+    return(NA)
+  })
+  
+  if(tz != ""){
+    tz <- match.arg(tz, c("America/New_York","America/Chicago",
+                          "America/Denver","America/Los_Angeles",
+                          "America/Anchorage","America/Honolulu",
+                          "America/Jamaica","America/Managua",
+                          "America/Phoenix","America/Metlakatla"))
+  }
+    
+  numToBeReturned <- as.numeric(headerInfo["Total-Result-Count"])
+  
+  if (!is.na(numToBeReturned) | numToBeReturned != 0){
+
+    suppressWarnings(namesData <- read.delim(if(zip) doc else textConnection(doc) , header = TRUE, quote="\"",
+                                             dec=".", sep='\t',
+                                             colClasses='character',
+                                             fill = TRUE,nrow=1))
+          
+    classColumns <- setNames(rep('character',ncol(namesData)),names(namesData))
+    
+    classColumns[grep("MeasureValue",names(classColumns))] <- NA
+    
+    suppressWarnings(retval <- read.delim(if(zip) doc else textConnection(doc), header = TRUE, quote="\"", 
+                         dec=".", sep='\t', 
+                         colClasses=as.character(classColumns), 
+                         fill = TRUE))
+    
+    actualNumReturned <- nrow(retval)
+    
+    retval[,names(which(sapply(retval[,grep("MeasureValue",names(retval))], function(x)all(is.na(x)))))] <- ""
+    
+    if(actualNumReturned != numToBeReturned) warning(numToBeReturned, " sample results were expected, ", actualNumReturned, " were returned")
+    
+    offsetLibrary <- setNames(c(5, 4, 6, 5, 7, 6, 8, 7, 9, 8, 10, 10),
+                              c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST"))
+    
+    timeZoneStart <- offsetLibrary[retval$ActivityStartTime.TimeZoneCode]
+    timeZoneEnd <- offsetLibrary[retval$ActivityEndTime.TimeZoneCode]
+    timeZoneStart[is.na(timeZoneStart)] <- 0
+    timeZoneEnd[is.na(timeZoneEnd)] <- 0
+    
+    if("ActivityStartDate" %in% names(retval)){
+      if(any(retval$ActivityStartDate != "")){
+        suppressWarnings(retval$ActivityStartDate <- as.Date(parse_date_time(retval$ActivityStartDate, c("Ymd", "mdY"))))
+      }
+    }
+
+    if("ActivityEndDate" %in% names(retval)){
+      if(any(retval$ActivityEndDate != "")){
+        suppressWarnings(retval$ActivityEndDate <- as.Date(parse_date_time(retval$ActivityEndDate, c("Ymd", "mdY"))))
+      }        
+    }
+
+    if(any(!is.na(timeZoneStart))){
+      
+      retval$ActivityStartDateTime <- with(retval, as.POSIXct(paste(ActivityStartDate, ActivityStartTime.Time),format="%Y-%m-%d %H:%M:%S", tz = "UTC"))
+      retval$ActivityStartDateTime <- retval$ActivityStartDateTime + timeZoneStart*60*60
+      retval$ActivityStartDateTime <- as.POSIXct(retval$ActivityStartDateTime)
+      
+    }
+    
+    if(any(!is.na(timeZoneEnd))){
+      
+      retval$ActivityEndDateTime <- with(retval, as.POSIXct(paste(ActivityEndDate, ActivityEndTime.Time),format="%Y-%m-%d %H:%M:%S", tz = "UTC"))
+      retval$ActivityEndDateTime <- retval$ActivityEndDateTime + timeZoneEnd*60*60
+      retval$ActivityEndDateTime <- as.POSIXct(retval$ActivityEndDateTime)
+      
+      
+    }
+        
+    return(retval)
+    
+  } else {
+    warning("No data to retrieve")
+    return(NA)
+  }
+
+}
\ No newline at end of file
diff --git a/R/importWaterML1.r b/R/importWaterML1.r
new file mode 100644
index 0000000000000000000000000000000000000000..4e138253975a68bc05631cac999e55f72d7a55ae
--- /dev/null
+++ b/R/importWaterML1.r
@@ -0,0 +1,386 @@
+#' Function to return data from the NWISWeb WaterML1.1 service
+#'
+#' This function accepts a url parameter that already contains the desired
+#' NWIS site, parameter code, statistic, startdate and enddate. 
+#'
+#' @param obs_url string containing the url for the retrieval
+#' @param asDateTime logical, if TRUE returns date and time as POSIXct, if FALSE, Date
+#' @param tz string to set timezone attribute of datetime. Default is an empty quote, which converts the 
+#' datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+#' Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+#' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
+#' @return mergedDF a data frame containing columns agency, site, dateTime, values, and remark codes for all requested combinations
+#' @export
+#' @import XML
+#' @import RCurl
+#' @import reshape2
+#' @examples
+#' siteNumber <- "02177000"
+#' startDate <- "2012-09-01"
+#' endDate <- "2012-10-01"
+#' offering <- '00003'
+#' property <- '00060'
+#' obs_url <- constructNWISURL(siteNumber,property,startDate,endDate,'dv')
+#' data <- importWaterML1(obs_url,TRUE)
+#' 
+#' groundWaterSite <- "431049071324301"
+#' startGW <- "2013-10-01"
+#' endGW <- "2014-06-30"
+#' groundwaterExampleURL <- constructNWISURL(groundWaterSite, NA,
+#'           startGW,endGW, service="gwlevels")
+#' groundWater <- importWaterML1(groundwaterExampleURL)
+#' 
+#' unitDataURL <- constructNWISURL(siteNumber,property,
+#'          "2013-11-03","2013-11-03",'uv')
+#' unitData <- importWaterML1(unitDataURL,TRUE)
+#' 
+#' filePath <- system.file("extdata", package="dataRetrieval")
+#' fileName <- "WaterML1Example.xml"
+#' fullPath <- file.path(filePath, fileName)
+#' importUserWM1 <- importWaterML1(fullPath,TRUE)
+#'
+#' # Two sites, two pcodes, one site has two data descriptors:
+#' siteNumber <- c('01480015',"04085427")
+#' obs_url <- constructNWISURL(siteNumber,c("00060","00010"),startDate,endDate,'dv')
+#' data <- importWaterML1(obs_url)
+#' data$dateTime <- as.Date(data$dateTime)
+#' data <- renameNWISColumns(data)
+#' names(attributes(data))
+#' attr(data, "url")
+#' attr(data, "disclaimer")
+importWaterML1 <- function(obs_url,asDateTime=FALSE, tz=""){
+  
+  if(url.exists(obs_url)){
+    doc = tryCatch({
+      h <- basicHeaderGatherer()
+      returnedDoc <- getURI(obs_url, headerfunction = h$update)
+      if(h$value()["Content-Type"] == "text/xml;charset=UTF-8"){
+        xmlTreeParse(returnedDoc, getDTD = FALSE, useInternalNodes = TRUE)
+      } else {
+        message(paste("URL caused an error:", obs_url))
+        message("Content-Type=",h$value()["Content-Type"])
+        return(NA)
+      }   
+      
+    }, warning = function(w) {
+      message(paste("URL caused a warning:", obs_url))
+      message(w)
+    }, error = function(e) {
+      message(paste("URL does not seem to exist:", obs_url))
+      message(e)
+      return(NA)
+    }) 
+  } else {
+    doc <- xmlTreeParse(obs_url, getDTD = FALSE, useInternalNodes = TRUE)
+  }
+  
+  if(tz != ""){
+    tz <- match.arg(tz, c("America/New_York","America/Chicago",
+                          "America/Denver","America/Los_Angeles",
+                          "America/Anchorage","America/Honolulu",
+                          "America/Jamaica","America/Managua",
+                          "America/Phoenix","America/Metlakatla"))
+  }
+  
+  doc <- xmlRoot(doc)
+  ns <- xmlNamespaceDefinitions(doc, simplify = TRUE)  
+  queryInfo <- xmlToList(xmlRoot(xmlDoc(doc[["queryInfo"]])))
+  names(queryInfo) <- make.unique(names(queryInfo))
+  
+  noteIndex <- grep("note",names(queryInfo))
+  
+  noteTitles <- as.character(lapply(queryInfo[noteIndex], function(x) x$.attrs))
+  notes <- as.character(lapply(queryInfo[noteIndex], function(x) x$text))
+  names(notes) <- noteTitles
+  
+  timeSeries <- xpathApply(doc, "//ns1:timeSeries", namespaces = ns)
+  
+  
+  if(0 == length(timeSeries)){
+    message("Returning an empty dataset")
+    df <- data.frame()
+    attr(df, "queryInfo") <- queryInfo
+    return(df)
+  }
+  
+  attList <- list()
+  dataColumns <- c()
+  qualColumns <- c()
+  
+  for (i in 1:length(timeSeries)){
+    
+    chunk <- xmlDoc(timeSeries[[i]])
+    chunk <- xmlRoot(chunk)
+    chunkNS <- xmlNamespaceDefinitions(chunk, simplify = TRUE)  
+      
+    uniqueName <- as.character(xpathApply(chunk, "@name", namespaces = chunkNS))
+    site <- as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:siteCode", namespaces = chunkNS, xmlValue))
+    agency <- as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:siteCode/@agencyCode", namespaces = chunkNS))
+    pCode <-as.character(xpathApply(chunk, "ns1:variable/ns1:variableCode", namespaces = chunkNS, xmlValue))
+    statCd <- as.character(xpathApply(chunk, "ns1:variable/ns1:options/ns1:option/@optionCode", namespaces = chunkNS))
+    noValue <- as.numeric(xpathApply(chunk, "ns1:variable/ns1:noDataValue", namespaces = chunkNS, xmlValue))
+    
+    extraSiteData <-  xmlToList(xmlRoot(xmlDoc(chunk[["sourceInfo"]])))
+    extraVariableData <-  xmlToList(xmlRoot(xmlDoc(chunk[["variable"]])))
+    
+    valuesIndex <- as.numeric(which("values" == names(chunk)))
+
+        
+    zoneAbbrievs <- c(as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:timeZoneInfo/ns1:defaultTimeZone/@zoneAbbreviation", namespaces = chunkNS)),
+                      as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:timeZoneInfo/ns1:daylightSavingsTimeZone/@zoneAbbreviation", namespaces = chunkNS)))
+    names(zoneAbbrievs) <- c(as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:timeZoneInfo/ns1:defaultTimeZone/@zoneOffset", namespaces = chunkNS)),
+                      as.character(xpathApply(chunk, "ns1:sourceInfo/ns1:timeZoneInfo/ns1:daylightSavingsTimeZone/@zoneOffset", namespaces = chunkNS)))
+    
+
+    for (j in valuesIndex){
+      subChunk <- xmlRoot(xmlDoc(chunk[[j]]))
+      
+      methodID <- as.character(xpathSApply(subChunk, "ns1:method/@methodID", namespaces = chunkNS))
+      
+      methodID <- zeroPad(methodID,2)
+      
+      value <- as.numeric(xpathSApply(subChunk, "ns1:value",namespaces = chunkNS, xmlValue))  
+      
+      if(length(value)!=0){
+      
+        value[value == noValue] <- NA
+            
+        attNames <- xpathSApply(subChunk, "ns1:value/@*",namespaces = chunkNS)
+        attributeNames <- unique(names(attNames))
+  
+        x <- lapply(attributeNames, function(x) xpathSApply(subChunk, paste0("ns1:value/@",x),namespaces = chunkNS))
+        
+        
+        methodDescription <- as.character(xpathApply(subChunk, "ns1:method/ns1:methodDescription", namespaces = chunkNS, xmlValue))
+        
+        valueName <- paste("X",pCode,statCd,sep="_")
+        
+        if(length(methodDescription) > 0 && methodDescription != ""){
+          valueName <- paste("X",methodDescription,pCode,statCd,sep="_") 
+        }
+        
+         
+        assign(valueName,value)
+        
+        df <- data.frame(agency = rep(agency,length(value)),
+                         site_no = rep(site,length(value)),
+                         stringsAsFactors=FALSE)
+        
+        if(length(attributeNames) > 0){
+          for(k in 1:length(attributeNames)){
+            attVal <- as.character(x[[k]])
+            if(length(attVal) == nrow(df)){
+              df$temp <- as.character(x[[k]])
+              
+            } else {
+              attrList <- xpathApply(subChunk, "ns1:value", namespaces = chunkNS, xmlAttrs)
+              df$temp <- sapply(1:nrow(df),function(x) as.character(attrList[[x]][attributeNames[k]]))
+              df$temp[is.na(df$temp)] <- ""
+            }
+            names(df)[which(names(df) %in% "temp")] <- attributeNames[k]
+            
+          }
+        }
+        
+        df <- cbind(df, get(valueName))
+        names(df)[length(df)] <- valueName
+        
+        if("qualifiers" %in% names(df)){
+          qualName <- paste(valueName,"cd",sep="_")
+          names(df)[which(names(df) == "qualifiers")] <- qualName
+          qualColumns <- c(qualColumns, qualName)
+        }
+        
+        dataColumns <- c(dataColumns, valueName)
+        
+        if("dateTime" %in% attributeNames){
+          
+          datetime <- xpathSApply(subChunk, "ns1:value/@dateTime",namespaces = chunkNS)
+          
+          numChar <- nchar(datetime)
+          
+          if(asDateTime){
+            
+            # Common options:
+            # YYYY numChar=4
+            # YYYY-MM-DD numChar=10
+            # YYYY-MM-DDTHH:MM numChar=16
+            # YYYY-MM-DDTHH:MM:SS numChar=19
+            # YYYY-MM-DDTHH:MM:SSZ numChar=20
+            # YYYY-MM-DDTHH:MM:SS.000 numChar=23
+            # YYYY-MM-DDTHH:MM:SS.000-XX:00 numChar=29
+                        
+            if(abs(max(numChar) - min(numChar)) != 0){
+              message("Mixed date types, not converted to POSIXct")
+            } else {
+              numChar <- numChar[1]
+              if(numChar == 4){
+                datetime <- as.POSIXct(datetime, "%Y", tz = "UTC")
+              } else if(numChar == 10){
+                datetime <- as.POSIXct(datetime, "%Y-%m-%d", tz = "UTC")
+              } else if(numChar == 16){
+                datetime <- as.POSIXct(datetime, "%Y-%m-%dT%H:%M", tz = "UTC")
+              } else if(numChar == 19){
+                datetime <- as.POSIXct(datetime, "%Y-%m-%dT%H:%M:%S", tz = "UTC")
+              } else if(numChar == 20){
+                datetime <- as.POSIXct(datetime, "%Y-%m-%dT%H:%M:%S", tz = "UTC")
+              }  else if(numChar == 23){
+                datetime <- as.POSIXct(datetime, "%Y-%m-%dT%H:%M:%OS", tz = "UTC")
+              } else if(numChar == 24){
+                datetime <- substr(datetime,1,23)
+                datetime <- as.POSIXct(datetime, "%Y-%m-%dT%H:%M:%OS", tz = "UTC")
+                df$tz_cd <- rep(zoneAbbrievs[1], nrow(df))
+              } else if(numChar == 29){
+                tzOffset <- as.character(substr(datetime,24,numChar))
+                
+                tzHours <- as.numeric(substr(tzOffset,1,3))
+  
+                datetime <- substr(datetime,1,23)
+                datetime <- as.POSIXct(datetime, "%Y-%m-%dT%H:%M:%OS", tz = "UTC")
+                datetime <- datetime + tzHours*60*60
+                df$tz_cd <- as.character(zoneAbbrievs[tzOffset]) 
+              }
+              
+              if(!("tz_cd" %in% names(df))){
+                df$tz_cd <- zoneAbbrievs[1]
+                tzHours <- as.numeric(substr(names(zoneAbbrievs[1]),1,3))
+                datetime <- datetime + tzHours*60*60
+              }
+            }
+            
+            if(tz != ""){
+              attr(datetime, "tzone") <- tz
+            }
+            
+            
+          } else {
+            
+            datetime <- as.character(datetime)
+            if(any(numChar) == 29){
+              tzOffset <- as.character(substr(datetime,24,numChar))
+              df$tz_cd <- as.character(zoneAbbrievs[tzOffset]) 
+              df$tz_cd[is.na(df$tz_cd)] <- zoneAbbrievs[1]
+            } else {
+              df$tz_cd <- zoneAbbrievs[1]
+            }
+            
+          }
+          
+          df$dateTime <- datetime     
+          
+        }
+        
+        colNames <- names(df)
+        
+        if( exists("qualName")){
+          columnsOrdered <- c("agency","site_no","dateTime","tz_cd",attributeNames[attributeNames != "dateTime"],qualName,valueName)
+        } else {
+          columnsOrdered <- c("agency","site_no","dateTime","tz_cd",attributeNames[attributeNames != "dateTime"],valueName)
+        }
+        
+        columnsOrderd <- columnsOrdered[columnsOrdered %in% names(df)]
+        
+        
+        
+        df <- df[,columnsOrderd]
+                  
+        names(extraSiteData) <- make.unique(names(extraSiteData))
+        
+        sitePropertyIndex <- grep("siteProperty",names(extraSiteData))
+        
+        siteInfo <- data.frame(station_nm=extraSiteData$siteName,
+                               site_no=extraSiteData$siteCode$text,
+                               agency=extraSiteData$siteCode$.attrs[["agencyCode"]],
+                               timeZoneOffset=extraSiteData$timeZoneInfo$defaultTimeZone[1],
+                               timeZoneAbbreviation=extraSiteData$timeZoneInfo$defaultTimeZone[2],
+                               dec_lat_va=as.numeric(extraSiteData$geoLocation$geogLocation$latitude),
+                               dec_lon_va=as.numeric(extraSiteData$geoLocation$geogLocation$longitude),
+                               srs=extraSiteData$geoLocation$geogLocation$.attrs[["srs"]],
+                               stringsAsFactors=FALSE)
+
+        properties <- as.character(lapply(extraSiteData[sitePropertyIndex], function(x) {
+          if(".attrs" %in% names(x)){
+            x$.attrs
+          } else {
+            NA
+          }              
+          }))
+    
+        propertyValues <- as.character(lapply(extraSiteData[sitePropertyIndex], function(x) {
+          if("text" %in% names(x)){
+            x$text
+          } else {
+            NA
+          }              
+          }))
+        
+        names(propertyValues) <- properties
+        propertyValues <- propertyValues[propertyValues != "NA"]
+        siteInfo <- cbind(siteInfo, t(propertyValues))            
+        
+        names(extraVariableData) <- make.unique(names(extraVariableData))
+        variableInfo <- data.frame(parameterCd=extraVariableData$variableCode$text,
+                                   parameter_nm=extraVariableData$variableName,
+                                   parameter_desc=extraVariableData$variableDescription,
+                                   valueType=extraVariableData$valueType,
+                                   param_units=extraVariableData$unit$unitCode,
+                                   noDataValue=as.numeric(extraVariableData$noDataValue),
+                                   stringsAsFactors=FALSE)
+        
+        if (1 == i & valuesIndex[1] == j){
+          mergedDF <- df
+          siteInformation <- siteInfo
+          variableInformation <- variableInfo
+          
+        } else {
+          similarNames <- intersect(names(mergedDF), names(df))
+          mergedDF <- merge(mergedDF, df,by=similarNames,all=TRUE)
+          
+          similarSites <- intersect(names(siteInformation), names(siteInfo))
+          siteInformation <- merge(siteInformation, siteInfo, by=similarSites, all=TRUE)
+          
+          similarVariables <- intersect(names(variableInformation),names(variableInfo))
+          variableInformation <- merge(variableInformation, variableInfo, by=similarVariables, all=TRUE)
+        }
+      }
+    }
+    attList[[uniqueName]] <- list(extraSiteData, extraVariableData)
+
+    
+  }
+  
+  dataColumns <- unique(dataColumns)
+  qualColumns <- unique(qualColumns)
+  
+  sortingColumns <- names(mergedDF)[!(names(mergedDF) %in% c(dataColumns,qualColumns))]
+
+  meltedmergedDF  <- melt(mergedDF,id.vars=sortingColumns)
+  meltedmergedDF  <- meltedmergedDF[!is.na(meltedmergedDF$value),] 
+
+  castFormula <- as.formula(paste(paste(sortingColumns, collapse="+"),"variable",sep="~"))
+  mergedDF2 <- dcast(meltedmergedDF, castFormula, drop=FALSE)
+  dataColumns2 <- !(names(mergedDF2) %in% sortingColumns)
+  if(sum(dataColumns2) == 1){
+    mergedDF <- mergedDF2[!is.na(mergedDF2[,dataColumns2]),]
+  } else {
+    mergedDF <- mergedDF2[rowSums(is.na(mergedDF2[,dataColumns2])) != sum(dataColumns2),]
+  }
+  
+  if(length(dataColumns) > 1){
+    mergedDF[,dataColumns] <- lapply(mergedDF[,dataColumns], function(x) as.numeric(x))
+  } else {
+    mergedDF[,dataColumns] <- as.numeric(mergedDF[,dataColumns])
+  }
+  
+  
+  row.names(mergedDF) <- NULL
+  attr(mergedDF, "url") <- obs_url
+  attr(mergedDF, "attributeList") <- attList
+  attr(mergedDF, "siteInfo") <- siteInformation
+  attr(mergedDF, "variableInfo") <- variableInformation
+  attr(mergedDF, "disclaimer") <- notes["disclaimer"]
+  attr(mergedDF, "queryInfo") <- queryInfo
+  attr(mergedDF, "queryTime") <- Sys.time()
+  
+  return (mergedDF)
+}
diff --git a/R/importWaterML2.r b/R/importWaterML2.r
new file mode 100644
index 0000000000000000000000000000000000000000..6b9e9107ebc0ee9c22820e12fb8b2a0fd40f02e6
--- /dev/null
+++ b/R/importWaterML2.r
@@ -0,0 +1,160 @@
+#' Function to return data from the WaterML2 data
+#'
+#' This function accepts a url parameter for a WaterML2 getObservation 
+#'
+#' @param obs_url string containing the url for the retrieval
+#' @param asDateTime logical, if TRUE returns date and time as POSIXct, if FALSE, Date
+#' @param tz string to set timezone attribute of datetime. Default is an empty quote, which converts the 
+#' datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+#' Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+#' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
+#' @return mergedDF a data frame containing columns agency, site, dateTime, values, and remark codes for all requested combinations
+#' @export
+#' @import XML
+#' @import RCurl
+#' @importFrom plyr rbind.fill.matrix
+#' @examples
+#' baseURL <- "http://waterservices.usgs.gov/nwis/dv/?format=waterml,2.0"
+#' URL <- paste(baseURL, "sites=01646500",
+#'      "startDT=2014-09-01",
+#'      "endDT=2014-09-08",
+#'      "statCd=00003",
+#'      "parameterCd=00060",sep="&")
+#' URL2 <- paste("http://cida.usgs.gov/noreast-sos/simple?request=GetObservation",
+#'      "featureID=MD-BC-BC-05",
+#'      "offering=RAW",
+#'      "observedProperty=WATER",sep="&")
+#' \dontrun{
+#' dataReturned1 <- importWaterML2(URL)
+#' dataReturn2 <- importWaterML2(URL2, TRUE)
+#' URLmulti <-  paste(baseURL,
+#'   "sites=04024430,04024000",
+#'   "startDT=2014-09-01",
+#'   "endDT=2014-09-08",
+#'   "statCd=00003",
+#'   "parameterCd=00060",sep="&")
+#' dataReturnMulti <- importWaterML2(URLmulti)
+#' filePath <- system.file("extdata", package="dataRetrieval")
+#' fileName <- "WaterML2Example.xml"
+#' fullPath <- file.path(filePath, fileName)
+#' UserData <- importWaterML2(fullPath)
+#' }
+importWaterML2 <- function(obs_url, asDateTime=FALSE, tz=""){
+  
+  if(url.exists(obs_url)){
+    doc = tryCatch({
+      h <- basicHeaderGatherer()
+      returnedDoc <- getURL(obs_url, headerfunction = h$update)
+      if(h$value()["Content-Type"] == "text/xml;charset=UTF-8" | 
+           h$value()["Content-Type"] == "text/xml; subtype=gml/3.1.1;charset=UTF-8"){
+        xmlTreeParse(returnedDoc, getDTD = FALSE, useInternalNodes = TRUE)
+      } else {
+        message(paste("URL caused an error:", obs_url))
+        message("Content-Type=",h$value()["Content-Type"])
+        return(NA)
+      }   
+      
+    }, warning = function(w) {
+      message(paste("URL caused a warning:", obs_url))
+      message(w)
+    }, error = function(e) {
+      message(paste("URL does not seem to exist:", obs_url))
+      message(e)
+      return(NA)
+    }) 
+  } else {
+    doc <- xmlTreeParse(obs_url, getDTD = FALSE, useInternalNodes = TRUE)
+  }
+  
+  if(tz != ""){
+    tz <- match.arg(tz, c("America/New_York","America/Chicago",
+                          "America/Denver","America/Los_Angeles",
+                          "America/Anchorage","America/Honolulu",
+                          "America/Jamaica","America/Managua",
+                          "America/Phoenix","America/Metlakatla"))
+  }
+  
+  doc <- xmlRoot(doc)
+  
+  ns <- xmlNamespaceDefinitions(doc, simplify = TRUE)  
+  
+  
+  timeSeries <- xpathApply(doc, "//wml2:Collection", namespaces = ns)
+  
+  if(0 == length(timeSeries)){
+    stop("No data to return for URL:", obs_url)
+  }
+  
+  for (i in 1:length(timeSeries)){
+  
+    chunk <- xmlDoc(timeSeries[[i]])
+    chunk <- xmlRoot(chunk)
+    chunkNS <- xmlNamespaceDefinitions(chunk, simplify = TRUE)
+    
+    xp <- xpathApply(chunk, "//wml2:MeasurementTimeseries/wml2:point/wml2:MeasurementTVP", 
+                     xpathSApply, ".//*[not(*)]", 
+                     function(x) setNames(ifelse(nzchar(xmlValue(x)), 
+                                                 xmlValue(x), 
+                                                    ifelse("qualifier" == xmlName(x),
+                                                           xpathSApply(x,"./@xlink:title",namespaces = ns),"")), #originally I had the "" as xmlAttr(x) 
+                                                            xmlName(x)), 
+                     namespaces = chunkNS)
+  
+    if(length(xpathApply(doc, 
+                  "//wml2:MeasurementTimeseries/wml2:point/wml2:MeasurementTVP/wml2:metadata/wml2:TVPMeasurementMetadata", 
+                  xmlValue, namespaces = ns)) != 0){
+      xp <- xp[-1]
+    }
+      
+    DF2 <- do.call(rbind.fill.matrix, lapply(xp, t))
+    DF2 <- as.data.frame(DF2,stringsAsFactors=FALSE)
+    
+    if(asDateTime){
+    
+      DF2$time <- gsub(":","",DF2$time)
+      DF2$time <- ifelse(nchar(DF2$time) > 18,
+                                   as.POSIXct(DF2$time, format="%Y-%m-%dT%H%M%S%z",tz="UTC"),
+                                         as.POSIXct(DF2$time, format="%Y-%m-%dT%H%M%S",tz="UTC"))
+      
+      DF2$time <- as.POSIXct(DF2$time, origin = "1970-01-01", tz="UTC")
+      
+      if(tz != ""){
+        attr(DF2$time, "tzone") <- tz
+      }
+      
+    } else {
+      DF2$time <- as.Date(DF2$time)
+    }
+  
+    DF2$value <- as.numeric(DF2$value)
+    # Very specific to USGS:
+    defaultQualifier <- as.character(xpathApply(chunk, "//wml2:defaultPointMetadata/wml2:DefaultTVPMeasurementMetadata/wml2:qualifier/@xlink:title",namespaces = chunkNS))
+    
+    if (length(defaultQualifier) == 0 && (typeof(defaultQualifier) == "character")) {
+      defaultQualifier <- "NA"
+    }
+    
+    if("qualifier" %in% names(DF2)){
+      DF2$qualifier <- ifelse(defaultQualifier != DF2$qualifier,DF2$qualifier,defaultQualifier)
+    } else {
+      DF2$qualifier <- rep(defaultQualifier,nrow(DF2))
+    }
+    
+    
+    DF2$qualifier <- ifelse("Provisional data subject to revision." == DF2$qualifier, "P",
+                               ifelse("Approved for publication. Processing and review completed." == DF2$qualifier, "A", DF2$qualifier))
+    
+  
+    id <- as.character(xpathApply(chunk, "//gml:identifier", xmlValue, namespaces = chunkNS))
+    DF2$identifier <- rep(id, nrow(DF2))
+    
+    if (1 == i ){
+      mergedDF <- DF2
+    } else {
+      similarNames <- intersect(names(mergedDF), names(DF2))
+      mergedDF <- merge(mergedDF, DF2,by=similarNames,all=TRUE)
+    }
+  }
+
+  return (mergedDF)
+}
diff --git a/R/mergeReport.r b/R/mergeReport.r
deleted file mode 100644
index 8be574094d75fab6e70fb5fd7cc53cf313d93d22..0000000000000000000000000000000000000000
--- a/R/mergeReport.r
+++ /dev/null
@@ -1,26 +0,0 @@
-#' Merge Sample and Daily Data for WRTDS
-#'
-#' Merges the flow data from the daily record into the sample record.
-#'
-#' @param Daily dataframe containing the daily data, default is Daily
-#' @param Sample dataframe containing the sample data, default is Sample
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords data import USGS WRTDS
-#' @export
-#' @return newSample dataframe with merged flow information
-#' @seealso \code{\link{getNWISDaily}}, \code{\link{getNWISSample}}
-#' @examples
-#' # These examples require an internet connection to run
-#' Daily <- getNWISDaily('01594440','00060', '1985-01-01', '1985-03-31')
-#' Sample <- getNWISSample('01594440','01075', '1985-01-01', '1985-03-31')
-#' Sample <- mergeReport(Daily, Sample)
-mergeReport<-function(Daily, Sample, interactive=TRUE){
-  
-  if (interactive){
-    dataOverview(Daily, Sample)  
-  }
-  
-  newSample <- merge(Daily[,c("Date","Q","LogQ")],Sample,by = "Date",all.y = TRUE)
-
-  return(newSample)
-}
diff --git a/R/populateConcentrations.r b/R/populateConcentrations.r
deleted file mode 100644
index 582e1d70ba616853eb256c13ebf5578cdd84eba5..0000000000000000000000000000000000000000
--- a/R/populateConcentrations.r
+++ /dev/null
@@ -1,22 +0,0 @@
-#' Populate Concentration Columns
-#'
-#' Creates ConcLow, ConcHigh, Uncen (0 if censored, 1 if uncensored) columns for Sample data frame for WRTDS study.
-#'
-#' @param rawData vector with value and code columns
-#' @return concentrationColumns dataframe
-#' @export
-#' @examples
-#' code <- c("","<","")
-#' value <- c(1,2,3)
-#' dataInput <- data.frame(value, code, stringsAsFactors=FALSE)
-#' concentrationDF <- populateConcentrations(dataInput)
-populateConcentrations <- function(rawData){  # rawData is a dataframe with value, code
-  concentrationColumns <- as.data.frame(matrix(ncol=3,nrow=length(rawData$value)))
-  colnames(concentrationColumns) <- c('ConcLow','ConcHigh','Uncen')  
-  concentrationColumns$ConcLow <- as.numeric(ifelse((rawData$code!="<" | is.na(rawData$code)),rawData$value,0))
-  concentrationColumns$ConcHigh <- as.numeric(rawData$value)
-  tempConcLow<-ifelse((rawData$code!="<" | is.na(rawData$code)),rawData$value,0)
-  concentrationColumns$Uncen <- ifelse(tempConcLow==0,0,1)  
-  #Add if value = NA?
-  return (concentrationColumns)  # returns ConcLow, ConcHigh, Uncen (0 if censored, 1 if uncensored)
-}
diff --git a/R/populateDaily.r b/R/populateDaily.r
deleted file mode 100644
index 2ec2d3ed66d39e59db1361729859076842e20ff2..0000000000000000000000000000000000000000
--- a/R/populateDaily.r
+++ /dev/null
@@ -1,114 +0,0 @@
-#' Populate Daily data frame
-#'
-#' Using raw data that has at least dateTime, value, code, populates the rest of the basic Daily data frame used in WRTDS
-#'
-#' @param rawData dataframe contains at least dateTime, value, code columns
-#' @param qConvert string conversion to cubic meters per second
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @keywords WRTDS flow
-#' @author Robert M. Hirsch \email{rhirsch@@usgs.gov}
-#' @import zoo
-#' @return dataframe Daily
-#' @export
-#' @import zoo
-#' @examples
-#' dateTime <- c('1985-01-01', '1985-01-02', '1985-01-03')
-#' value <- c(1,2,3)
-#' code <- c("","","")
-#' dataInput <- data.frame(dateTime, value, code, stringsAsFactors=FALSE)
-#' Daily <- populateDaily(dataInput, 2)
-populateDaily <- function(rawData,qConvert,interactive=TRUE){  # rawData is a dataframe with at least dateTime, value, code
-  
-#   require(zoo)
-  
-  localDaily <- as.data.frame(matrix(ncol=2,nrow=length(rawData$value)))
-  colnames(localDaily) <- c('Date','Q')
-  localDaily$Date <- rawData$dateTime
-  
-  # need to convert to cubic meters per second to store the values
-  localDaily$Q <- rawData$value/qConvert
-  
-  dateFrame <- populateDateColumns(rawData$dateTime)
-  localDaily <- cbind(localDaily, dateFrame[,-1])
-  
-  localDaily$Date <- as.Date(localDaily$Date)
-  
-  if(length(rawData$code) != 0) localDaily$Qualifier <- rawData$code
-  
-  localDaily$i <- 1:nrow(localDaily)
-  
-  noDataValue <- -999999
-  
-  nd <- localDaily$Q==noDataValue
-  
-  localDaily$Q<-ifelse(nd,NA,localDaily$Q)
-  
-  zeros<-which(localDaily$Q<=0)
-  
-  nz<-length(zeros)
-  
-  if(nz>0) {
-
-    qshift<- 0.001*mean(localDaily$Q, na.rm=TRUE) 
-    if (interactive){
-      
-      zeroNums <- length(which(localDaily$Q == 0))
-
-      if (zeroNums > 0){
-        cat("There were", as.character(zeroNums), "zero flow days \n")
-      }
-      
-      cat("All days had",as.character(qshift),"cms added to the discharge value.\n")
-      
-    }
-  } else {
-    qshift<-0.0
-  }
-  
-  negNums <- length(which(localDaily$Q<0))
-  if (negNums > 0) {
-    cat("There were", as.character(negNums), "negative flow days \n")
-    cat("Negative values are not supported in the EGRET package\n")
-  }
-  
-  localDaily$Q<-localDaily$Q+qshift
-  
-  localDaily$LogQ <- log(localDaily$Q)
-  
-  Qzoo<-zoo(localDaily$Q)
-  
-  if (length(rawData$dateTime) < 30){
-    if (interactive){
-      cat("This program requires at least 30 data points. You have only provided:", length(rawData$dateTime),"Rolling means will not be calculated.\n")
-    }
-    warning("This program requires at least 30 data points. Rolling means will not be calculated.")
-  } else {
-    localDaily$Q7<-as.numeric(rollapply(Qzoo,7,mean,na.rm=FALSE,fill=NA,align="right"))
-    localDaily$Q30<-as.numeric(rollapply(Qzoo,30,mean,na.rm=FALSE,fill=NA,align="right"))    
-  }
-  
-  dataPoints <- nrow(localDaily)
-  difference <- (localDaily$Julian[dataPoints] - localDaily$Julian[1])+1  
-  if (interactive){
-    cat("There are", as.character(dataPoints), "data points, and", as.character(difference), "days.\n")
-
-    #these next two lines show the user where the gaps in the data are if there are any
-    n<-nrow(localDaily)
-    for(i in 2:n) {
-      if((localDaily$Julian[i]-localDaily$Julian[i-1])>1) cat("\n discharge data jumps from",as.character(localDaily$Date[i-1]),"to",as.character(localDaily$Date[i]))
-    }
-    
-    numNAs <- sum(is.na(localDaily$Q))
-    if(numNAs > 0){
-      cat(numNAs, "discharge measurements are not reported (NA's). \nMany of the EGRET functions will not work with missing discharge measurements.")
-      if (localDaily$Julian[max(which(is.na(localDaily$Q)),na.rm = TRUE)]-
-           localDaily$Julian[min(which(is.na(localDaily$Q)),na.rm = TRUE)]+1 ==  numNAs){
-        cat("\nNA gap is from",as.character(localDaily$Date[min(which(is.na(localDaily$Q)),na.rm = TRUE)]),"to",
-            as.character(localDaily$Date[max(which(is.na(localDaily$Q)),na.rm = TRUE)]))
-      } 
-    }
-    
-  }
-  
-  return (localDaily)  
-}
diff --git a/R/populateDateColumns.r b/R/populateDateColumns.r
deleted file mode 100644
index 9e0503cd43443b8af4bdf045a389bda40739aac9..0000000000000000000000000000000000000000
--- a/R/populateDateColumns.r
+++ /dev/null
@@ -1,36 +0,0 @@
-#' Populate Date Columns
-#'
-#' Creates various date columns for WRTDS study.
-#'
-#' @param rawData vector with dateTime
-#' @return DateFrame dataframe
-#' @importFrom lubridate decimal_date
-#' @export
-#' @examples
-#' dateTime <- c('1984-02-28 13:56', '1984-03-01', '1986-03-01')
-#' expandedDateDF <- populateDateColumns(dateTime)
-populateDateColumns <- function(rawData){  # rawData is a vector of dates
-  DateFrame <- as.data.frame(matrix(ncol=1,nrow=length(rawData)))
-  colnames(DateFrame) <- c('Date')  
-  DateFrame$Date <- rawData
-  dateTime <- as.POSIXlt(rawData)
-  DateFrame$Julian <- as.numeric(julian(dateTime,origin=as.Date("1850-01-01")))
-  DateFrame$Month <- dateTime$mon + 1
-  DateFrame$Day <- dateTime$yday + 1
-  year <- dateTime$year + 1900
-  hour <- dateTime$hour
-  minute <- dateTime$min
-  
-  if (sum(hour) == 0 & sum(minute) == 0){
-    dateTime$hour <- rep(12,length(dateTime))
-  }
-  
-  leapOffset <- ifelse((year%%4 == 0) & ((year%%100 != 0) | (year%%400 == 0)), 0,1)
-  
-  DateFrame$Day[DateFrame$Day > 59] <- DateFrame$Day[DateFrame$Day > 59] + leapOffset[DateFrame$Day > 59]
-
-  DateFrame$DecYear <- decimal_date(dateTime)
-  DateFrame$MonthSeq <- ((year-1850)*12)+DateFrame$Month
-  return (DateFrame)
-  
-}
diff --git a/R/populateParameterINFO.r b/R/populateParameterINFO.r
deleted file mode 100644
index c21ecc6c81a5ff30aa7a3d40bd8317274f0edee4..0000000000000000000000000000000000000000
--- a/R/populateParameterINFO.r
+++ /dev/null
@@ -1,55 +0,0 @@
-#' Populate Parameter Information Columns
-#'
-#' Populates INFO data frame with additional user-supplied information concerning the measured parameter.
-#'
-#' @param INFO dataframe with value and code columns. Default is INFO
-#' @param parameterCd string USGS parameter code
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @return INFO dataframe
-#' @export
-#' @examples
-#' #This example requires an internet connection to run
-#' INFO <- getNWISSiteInfo('01594440')
-#' parameterCd <- "01075"
-#' parameterData <- getNWISPcodeInfo(parameterCd)
-#' INFO$param.nm <- parameterData$parameter_nm
-#' INFO$param.units <- parameterData$parameter_units
-#' INFO$paramShortName <- parameterData$srsname
-#' INFO$paramNumber <- parameterData$parameter_cd
-#' INFO <- populateParameterINFO(parameterCd, INFO)
-populateParameterINFO <- function(parameterCd, INFO, interactive=TRUE){
-  if (nzchar(parameterCd)){
-    if(interactive){
-      cat("Your water quality data are for parameter number", INFO$paramNumber, "which has the name:'", INFO$param.nm, "'.\n")
-      cat("Typically you will want a shorter name to be used in graphs and tables. The suggested short name is:'", INFO$paramShortName, "'.\n")
-      cat("If you would like to change the short name, enter it here, otherwise just hit enter (no quotes):")
-      shortNameTemp <- readline()
-      if (nchar(shortNameTemp)>0) INFO$paramShortName <- shortNameTemp
-      cat("The units for the water quality data are: ", INFO$param.units, ".\n")
-      cat("It is helpful to set up a constiuent abbreviation when doing multi-constituent studies, enter a unique id (three or four characters should work something like tn or tp or NO3).\nIt is case sensitive.  Even if you don't feel you need an abbreviation you need to enter something (no quotes):\n")
-      INFO$constitAbbrev <- readline()
-    } else {
-      INFO$constitAbbrev <- INFO$paramShortName
-    }
-  } else {
-    if (interactive){
-      INFO$paramNumber <- NA
-      cat("Enter a long name for the water quality data (no quotes):\n")
-      INFO$param.nm <- readline()
-      cat("Enter a short name to be used in graphs and tables(no quotes):\n")
-      INFO$paramShortName <- readline()
-      cat("It is helpful to set up a constiuent abbreviation when doing multi-constituent studies, enter a unique id (three or four characters should work something like tn or tp or NO3).\nIt is case sensitive.  Even if you don't feel you need an abbreviation you need to enter something (no quotes):\n")
-      INFO$constitAbbrev <- readline()
-      cat("Enter the units of the water quality data(no quotes):\n")
-      INFO$param.units <- readline()
-    } else {
-      INFO$paramNumber <- NA
-      INFO$param.nm <- NA
-      INFO$paramShortName <- NA
-      INFO$constitAbbrev <- NA
-      INFO$param.units <- NA      
-    }
-  } 
-  
-  return(INFO)
-}
diff --git a/R/populateSampleColumns.r b/R/populateSampleColumns.r
deleted file mode 100644
index 38461e7169e75d75d986e7bd0142768b8ccd9a38..0000000000000000000000000000000000000000
--- a/R/populateSampleColumns.r
+++ /dev/null
@@ -1,32 +0,0 @@
-#' Populate Sample Columns
-#'
-#' Creates ConcAve and ConcLow based on Uncen. Removes any samples with NA values in ConcHigh
-#'
-#' @param rawData dataframe with dateTime, ConcLow, ConcHigh, Uncen
-#' @return Sample2 dataframe
-#' @export
-#' @examples
-#' dateTime <- c('1985-01-01', '1985-01-02', '1985-01-03')
-#' ConcLow <- c(1,2,0)
-#' ConcHigh <- c(1,2,3)
-#' Uncen <- c(1,1,0)
-#' dataInput <- data.frame(dateTime, ConcLow, ConcHigh, Uncen, stringsAsFactors=FALSE)
-#' Sample <- populateSampleColumns(dataInput)
-populateSampleColumns <- function(rawData){  # rawData is a dataframe with dateTime, ConcLow, ConcHigh, Uncen
-  Sample <- as.data.frame(matrix(ncol=3,nrow=length(rawData$dateTime)))
-  colnames(Sample) <- c('Date', 'ConcLow','ConcHigh')
-  Sample$Date <- rawData$dateTime
-  Sample$ConcLow <- rawData$ConcLow
-  Sample$ConcHigh <- rawData$ConcHigh
-  Sample$Uncen <- rawData$Uncen
-  Sample$ConcAve <- (Sample$ConcLow+Sample$ConcHigh)/2
-  Sample$ConcLow <- ifelse((rawData$ConcLow == 0.0 & rawData$Uncen == 0),NA,rawData$ConcLow)
-  
-  dateFrame <- populateDateColumns(rawData$dateTime)
-  Sample <- cbind(Sample, dateFrame[,-1])
-  
-  Sample$SinDY <- sin(2*pi*Sample$DecYear)
-  Sample$CosDY <- cos(2*pi*Sample$DecYear)
-  Sample2 <- subset(Sample, (!is.na(Sample$ConcHigh)))  # Was just ConcHigh.....
-  return (Sample2)  
-}
diff --git a/R/populateSiteINFO.r b/R/populateSiteINFO.r
deleted file mode 100644
index 6a57df2ae484237ee7927aca447b783353082596..0000000000000000000000000000000000000000
--- a/R/populateSiteINFO.r
+++ /dev/null
@@ -1,104 +0,0 @@
-#' Populate Site Information Columns
-#'
-#' Populates INFO data frame with additional user-supplied information. Also removes fields not related to WRTDS study.
-#'
-#' @param INFO dataframe with value and code columns
-#' @param siteNumber string USGS site number
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @return INFO dataframe
-#' @export
-#' @examples
-#' #This example requires an internet connection to run
-#' INFO <- getNWISSiteInfo('01594440')
-#' siteNumber <- "01594440"
-#' siteINFO <- populateSiteINFO(INFO, siteNumber)
-populateSiteINFO <- function(INFO, siteNumber,interactive=TRUE){
-  if (nzchar(siteNumber)){
-    
-    if (!nzchar(INFO$site.no)) {
-      INFO$site.no <- siteNumber
-    }
-    
-    if (interactive){
-      cat("Your site for streamflow data is", as.character(INFO$site.no),".\n")
-      if (!nzchar(INFO$station.nm)){
-        cat("No station name was listed in the USGS site file for site: ", INFO$site.no, ". Please enter a station name here(no quotes): \n")
-        INFO$station.nm <- readline()
-      }
-      cat("Your site name is", INFO$station.nm,",")
-      cat("but you can modify this to a short name in a style you prefer. \nThis name will be used to label graphs and tables. \n")
-      cat("If you want the program to use the name given above, just do a carriage return, otherwise enter the preferred short name(no quotes):\n")
-      INFO$shortName <- readline()
-      if (!nzchar(INFO$shortName)) INFO$shortName <- INFO$station.nm
-      if (!nzchar(INFO$dec.lat.va) || !nzchar(INFO$dec.long.va)){
-        cat("No latitude or longitude was listed in the USGS site file for this site.\n")
-        cat("Please enter a latitude and longitude in decimal degrees, positive latitudes are north, negative are south, positive longitudes are east, \nnegative longitudes are west, so for example a site in the northeastern US might look like, 40.1, -83.2\nThese only need to be sufficiently accurate to place them on a map of the study area.\n\n")
-        cat("Latitude(no quotes):\n")
-        INFO$dec.lat.va <- readline()
-        cat("Longitude(no quotes):\n")
-        INFO$dec.long.va <- readline()
-      }
-      cat("The latitude and longitude of the site are: ",INFO$dec.lat.va, ", ", INFO$dec.long.va, "(degrees north and west).\n")
-      if (!nzchar(INFO$drain.area.va)){
-        cat("No drainage area was listed in the USGS site file for this site.\n")
-        cat("Please enter the drainage area, you can enter it in the units of your choice.\nEnter the area, then enter drainage area code, \n1 is square miles\n2 is square kilometers\n3 is acres\n4 is hectares.\n")
-        cat("Area(no quotes):\n")
-        INFO$drain.area.va <- readline()
-        INFO$drain.area.va <- as.numeric(INFO$drain.area.va)
-        cat("Unit Code (1-4, no quotes):")
-        qUnit <- readline()
-        qUnit <- as.numeric(qUnit)
-        conversionVector <- c(2.5899881, 1.0, 0.0040468564, 0.01)
-        INFO$drainSqKm <- INFO$drain.area.va * conversionVector[qUnit]
-      } else {
-        INFO$drain.area.va <- as.numeric(INFO$drain.area.va)
-        INFO$contrib.drain.area.va <- as.numeric(INFO$contrib.drain.area.va)
-        INFO$drainSqKm <- INFO$drain.area.va * 2.5899881
-      }    
-      cat("The drainage area at this site is ", INFO$drain.area.va, "square miles which is being stored as", INFO$drainSqKm, "square kilometers.\n")    
-    } else {
-      INFO$drain.area.va <- as.numeric(INFO$drain.area.va)
-      INFO$contrib.drain.area.va <- as.numeric(INFO$contrib.drain.area.va)
-      INFO$drainSqKm <- INFO$drain.area.va * 2.5899881      
-      INFO$shortName <- INFO$station.nm
-    }    
-  } else {
-    if (interactive){
-      cat("The program needs to know a site number or id, please enter that here (don't use quotes) - Enter to leave blank:")
-      INFO$site.no <- readline()
-      cat("Please enter a site name that will be used to label all graphs and tables(no quotes):\n")
-      INFO$shortName <- readline()
-      cat("Please enter a latitude and longitude in decimal degrees, positive latitudes are north, negative are south, positive longitudes are east, \nnegative longitudes are west, so for example a site in the northeastern US might look like, 40.1, -83.2\nThese only need to be sufficiently accurate to place them on a map of the study area.\n\n")
-      cat("Latitude(no quotes):\n")
-      INFO$dec.lat.va <- readline()
-      cat("Longitude(no quotes):\n")
-      INFO$dec.long.va <- readline()
-      INFO$dec.lat.va <- as.numeric(INFO$dec.lat.va)
-      INFO$dec.long.va <- as.numeric(INFO$dec.long.va)
-      cat("Please enter the drainage area, you can enter it in the units of your choice.\nEnter the area, then enter drainage area code, 1 is square miles, 2 is square kilometers, 3 is acres, and 4 is hectares.\n")
-      cat("Area(no quotes):\n")
-      INFO$drain.area.va <- readline()
-      INFO$drain.area.va <- as.numeric(INFO$drain.area.va)
-      cat("Unit Code (1-4, no quotes)\nrepresenting \n1: sq mi \n2: sq km \n3: sq m\n4: sq 100*km):")
-      qUnit <- readline()
-      qUnit <- as.numeric(qUnit)
-      conversionVector <- c(2.5899881, 1.0, 0.0040468564, 0.01)
-      INFO$drainSqKm <- INFO$drain.area.va * conversionVector[qUnit]
-      cat("The drainage area is being stored as", INFO$drainSqKm, "square kilometers.\n")
-    } else {
-      INFO$site.no <- NA
-      INFO$shortName <- NA
-      INFO$dec.lat.va <- NA
-      INFO$dec.long.va <- NA
-      INFO$drain.area.va <- NA
-      INFO$drainSqKm <- NA
-    }
-  }
-  if (interactive){
-    cat("It is helpful to set up a station abbreviation when doing multi-site studies, enter a unique id (three or four characters should work).\nIt is case sensitive.  Even if you don't feel you need an abbreviation for your site you need to enter something(no quotes):\n")
-    INFO$staAbbrev <- readline()
-  } else {
-    INFO$staAbbrev <- NA
-  }
-  return(INFO)  
-}
diff --git a/R/processQWData.r b/R/processQWData.r
deleted file mode 100644
index 329374effcd4cc93368afc6a1ea3aa95887cb3ca..0000000000000000000000000000000000000000
--- a/R/processQWData.r
+++ /dev/null
@@ -1,54 +0,0 @@
-#' Processing of USGS NWIS Water Quality Data
-#'
-#' Processes water quality portal data. This function looks at detection limit and detection 
-#' conditions to determine if a value is left censored or not. Censored values are given the qualifier
-#' "<".  The dataframe is also converted from a long to wide format.
-#' 
-#' @param data dataframe from Water Quality Portal
-#' @param pCode logical if TRUE, assume data came from a pCode search, if FALSE, characteristic name.
-#' @keywords data import USGS web service
-#' @return data dataframe with first column dateTime, and at least one qualifier and value columns
-#' (subsequent qualifier/value columns could follow depending on the number of parameter codes)
-#' @export
-#' @examples
-#' # These examples require an internet connection to run
-#' \dontrun{
-#' rawSample <- getWQPqwData('USGS-01594440','', '', '')
-#' rawSampleSelect <- processQWData(rawSample)
-#' }
-processQWData <- function(data,pCode=TRUE){
-
-  qualifier <- ifelse((data$ResultDetectionConditionText == "Not Detected" | 
-                    data$ResultDetectionConditionText == "Detected Not Quantified" |
-                    data$ResultMeasureValue < data$DetectionQuantitationLimitMeasure.MeasureValue),"<","")
-  
-  correctedData<-ifelse((nchar(qualifier)==0),data$ResultMeasureValue,data$DetectionQuantitationLimitMeasure.MeasureValue)
-  test <- data.frame(data$USGSPCode)
-  
-  test$dateTime <- data$ActivityStartDate
-  
-  originalLength <- nrow(test)
-  test$qualifier <- qualifier
-  test$value <- as.numeric(correctedData)
-  
-  test <- test[!is.na(test$dateTime),]
-  newLength <- nrow(test)
-  if (originalLength != newLength){
-    numberRemoved <- originalLength - newLength
-    warningMessage <- paste(numberRemoved, " rows removed because no date was specified", sep="")
-    warning(warningMessage)
-  }
-  
-  if (pCode){
-    colnames(test)<- c("USGSPCode","dateTime","qualifier","value")
-    newTimeVar <- "USGSPCode"
-  } else {
-    colnames(test)<- c("CharacteristicName","dateTime","qualifier","value")
-    newTimeVar <- "CharacteristicName"
-  }
-  
-  data <- suppressWarnings(reshape(test, idvar="dateTime", timevar = newTimeVar, direction="wide"))  
-  data$dateTime <- format(data$dateTime, "%Y-%m-%d")
-  data$dateTime <- as.Date(data$dateTime)
-  return(data)
-}
diff --git a/R/getNWISData.r b/R/readNWISdata.r
similarity index 57%
rename from R/getNWISData.r
rename to R/readNWISdata.r
index 98534ae9ae7081364fda19881f093e19904a97b6..0a7e96a0553708545675c912daf957bb1166cfca 100644
--- a/R/getNWISData.r
+++ b/R/readNWISdata.r
@@ -10,23 +10,37 @@
 #' @return retval dataframe 
 #' @export
 #' @examples
-#' dataTemp <- getNWISData(stateCd="OH",parameterCd="00010")
-#' dataTempUnit <- getNWISData(sites="03086500", service="iv", parameterCd="00010")
-getNWISData <- function(service="dv", ...){
+#' dataTemp <- readNWISdata(stateCd="OH",parameterCd="00010")
+#' dataTempUnit <- readNWISdata(sites="03086500", service="iv", parameterCd="00010")
+#' #Empty:
+#' multiSite <- readNWISdata(sites=c("04025000","04072150"), service="iv", parameterCd="00010")
+#' #Not empty:
+#' multiSite <- readNWISdata(sites=c("04025500","040263491"), service="iv", parameterCd="00060")
+readNWISdata <- function(service="dv", ...){
   
   matchReturn <- list(...)
   
-  values <- sapply(matchReturn, function(x) URLencode(as.character(paste(eval(x),collapse="",sep=""))))
+  values <- sapply(matchReturn, function(x) URLencode(as.character(paste(eval(x),collapse=",",sep=""))))
   
   urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
   
+  if(service %in% c("dv","iv","gwlevels")){
+    format <- "waterml"
+  } else {
+    format <- "rdb1,1"
+  }
   
-  baseURL <- paste0("http://waterservices.usgs.gov/nwis/",service,"/?format=rdb&")
+  baseURL <- paste0("http://waterservices.usgs.gov/nwis/",service,"/?format=",format,"&")
   urlCall <- paste0(baseURL,urlCall)
+  
   if(service=="qwdata"){
     urlCall <- paste0(urlCall,"&siteOutput=expanded")
+    retval <- importRDB1(urlCall)
+  } else {
+
+    retval <- importWaterML1(urlCall, asDateTime = ("iv" == service))
   }
   
-  retval <- getRDB1Data(urlCall)
+  
   return(retval)
 }
diff --git a/R/readNWISdv.r b/R/readNWISdv.r
new file mode 100644
index 0000000000000000000000000000000000000000..256c8d7d6e1e9402b39588e1ee6a7d18770354d3
--- /dev/null
+++ b/R/readNWISdv.r
@@ -0,0 +1,39 @@
+#' Raw Data Import for USGS NWIS Data
+#'
+#' Imports data from NWIS web service. This function gets the data from here: \url{http://waterservices.usgs.gov/}
+#' A list of parameter codes can be found here: \url{http://help.waterdata.usgs.gov/codes-and-parameters/parameters}
+#' A list of statistic codes can be found here: \url{http://help.waterdata.usgs.gov/code/stat_code_query?fmt=html}
+#'
+#' @param siteNumber string USGS site number.  This is usually an 8 digit number. Multiple sites can be requested with a string vector.
+#' @param parameterCd string or vector of USGS parameter code.  This is usually an 5 digit number..
+#' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
+#' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
+#' @param statCd string USGS statistic code. This is usually 5 digits.  Daily mean (00003) is the default.
+#' @return data dataframe with agency, site, dateTime, value, and code columns
+#' @export
+#' @keywords data import USGS web service
+#' @examples
+#' siteNumber <- '04085427'
+#' startDate <- '2012-01-01'
+#' endDate <- '2012-06-30'
+#' pCode <- '00060'
+#' rawDailyQ <- readNWISdv(siteNumber,pCode, startDate, endDate)
+#' rawDailyQAndTempMeanMax <- readNWISdv(siteNumber,c('00010','00060'),
+#'        startDate, endDate, statCd=c('00001','00003'))
+#' rawDailyQAndTempMeanMax <- renameNWISColumns(rawDailyQAndTempMeanMax)
+#' rawDailyMultiSites<- readNWISdv(c("01491000","01645000"),c('00010','00060'),
+#'        startDate, endDate, statCd=c('00001','00003'))
+#' # Site with no data:
+#' x <- readNWISdv("10258500","00060", "2014-09-08", "2014-09-14")
+#' names(attributes(x))
+#' attr(x, "siteInfo")
+#' attr(x, "variableInfo")
+readNWISdv <- function (siteNumber,parameterCd,startDate="",endDate="",statCd="00003"){  
+  
+  url <- constructNWISURL(siteNumber,parameterCd,startDate,endDate,"dv",statCd=statCd)
+
+  data <- importWaterML1(url, asDateTime=FALSE)
+  data$dateTime <- as.Date(data$dateTime)
+
+  return (data)
+}
diff --git a/R/readNWISpCode.r b/R/readNWISpCode.r
new file mode 100644
index 0000000000000000000000000000000000000000..1664e17738373ba92e674db51f425604573f7588
--- /dev/null
+++ b/R/readNWISpCode.r
@@ -0,0 +1,56 @@
+#' USGS Parameter Data Retrieval
+#'
+#' Imports data from NWIS about meaured parameter based on user-supplied parameter code.
+#' This function gets the data from here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes}
+#'
+#' @param parameterCd vector of USGS parameter codes.  This is usually an 5 digit number.
+#' @keywords data import USGS web service
+#' @return parameterData dataframe with all information from the USGS about the particular parameter (usually code, name, short name, units, and CAS registry numbers)
+#' @export
+#' @examples
+#' # These examples require an internet connection to run
+#' paramINFO <- readNWISpCode(c('01075','00060','00931'))
+readNWISpCode <- function(parameterCd){
+ 
+  pcodeCheck <- all(nchar(parameterCd) == 5) & all(!is.na(suppressWarnings(as.numeric(parameterCd))))
+  
+  if(!pcodeCheck){
+    goodIndex <- which(parameterCd %in% parameterCdFile$parameter_cd)
+    if(length(goodIndex) > 0){
+      badPcode <- parameterCd[-goodIndex]
+    } else {
+      badPcode <- parameterCd
+    }
+    message("The following pCodes seem mistyped:",paste(badPcode,collapse=","))
+    parameterCd <- parameterCd[goodIndex]
+  }
+  
+  parameterCdFile <- parameterCdFile
+  
+  parameterData <- parameterCdFile[parameterCdFile$parameter_cd %in% parameterCd,]
+
+  if(nrow(parameterData) != length(parameterCd)){
+    
+    if(length(parameterCd) == 1){
+      url <- paste0("http://nwis.waterdata.usgs.gov/nwis/pmcodes/pmcodes?radio_pm_search=pm_search",
+                   "&pm_search=", parameterCd,
+                   "&format=rdb", "&show=parameter_group_nm",
+                   "&show=parameter_nm", "&show=casrn",
+                   "&show=srsname", "&show=parameter_units")
+      newData <- importRDB1(url,asDateTime = FALSE)
+    } else {
+      
+      #TODO: add else...
+      fullURL <- "http://nwis.waterdata.usgs.gov/nwis/pmcodes/pmcodes?radio_pm_search=param_group&pm_group=All+--+include+all+parameter+groups&format=rdb&show=parameter_group_nm&show=parameter_nm&show=casrn&show=srsname&show=parameter_units"
+      fullPcodeDownload <- importRDB1(fullURL)
+      newData <- fullPcodeDownload[fullPcodeDownload$parameter_cd %in% parameterCd,]
+      
+    }
+    return(newData)
+    
+  } else {
+    return(parameterData)
+  }
+  
+  
+}
diff --git a/R/retrieveNWISqwData.r b/R/readNWISqw.r
similarity index 61%
rename from R/retrieveNWISqwData.r
rename to R/readNWISqw.r
index 44a434511af5f3189b00b5306381d945017aa210..f35a567ad735d0c371a0b2dc487ab5049b61d70c 100644
--- a/R/retrieveNWISqwData.r
+++ b/R/readNWISqw.r
@@ -11,44 +11,47 @@
 #' @param expanded logical defaults to FALSE. If TRUE, retrieves additional information. Expanded data includes
 #' remark_cd (remark code), result_va (result value), val_qual_tx (result value qualifier code), meth_cd (method code),
 #' dqi_cd (data-quality indicator code), rpt_lev_va (reporting level), and rpt_lev_cd (reporting level type).
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
+#' @param reshape logical. Will reshape the data if TRUE (default)
+#' @param tz string to set timezone attribute of datetime. Default is an empty quote, which converts the 
+#' datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+#' Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+#' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
 #' @keywords data import USGS web service
 #' @return data dataframe with agency, site, dateTime, value, and code columns
 #' @export
 #' @import reshape2
-#' @seealso \code{\link{getWQPData}}, \code{\link{getWQPSites}}, 
-#' \code{\link{getWQPqwData}}, \code{\link{constructNWISURL}}
+#' @seealso \code{\link{readWQPdata}}, \code{\link{whatWQPsites}}, 
+#' \code{\link{readWQPqw}}, \code{\link{constructNWISURL}}
 #' @examples
-#' # These examples require an internet connection to run
 #' siteNumber <- c('04024430','04024000')
 #' startDate <- '2010-01-01'
 #' endDate <- ''
 #' pCodes <- c('34247','30234','32104','34220')
-#' rawNWISqwData <- getNWISqwData(siteNumber,pCodes,startDate,endDate)
-#' rawNWISqwDataExpand <- getNWISqwData(siteNumber,pCodes,startDate,endDate,expanded=TRUE)
-#' # To get data in Sample dataframe format:
-#' dataColumns <- grep("p\\d{5}",names(rawNWISqwData))
-#' remarkColumns <- grep("r\\d{5}",names(rawNWISqwData))
-#' totalColumns <-c(grep("sample_dt",names(rawNWISqwData)), dataColumns, remarkColumns)
-#' totalColumns <- totalColumns[order(totalColumns)]
-#' compressedData <- compressData(rawNWISqwData[,totalColumns])
-#' Sample <- populateSampleColumns(compressedData)
-getNWISqwData <- function (siteNumber,pCodes,startDate,endDate,expanded=FALSE,interactive=TRUE){  
+#' rawNWISqwData <- readNWISqw(siteNumber,pCodes,startDate,endDate)
+#' rawNWISqwDataExpandReshaped <- readNWISqw(siteNumber,pCodes,
+#'           startDate,endDate,expanded=TRUE)
+#' rawNWISqwDataExpand <- readNWISqw(siteNumber,pCodes,
+#'           startDate,endDate,expanded=TRUE,reshape=FALSE)
+readNWISqw <- function (siteNumber,pCodes,startDate="",endDate="",
+                        expanded=FALSE,reshape=TRUE,tz=""){  
   
-  url <- constructNWISURL(siteNumber,pCodes,startDate,endDate,"qw",expanded=expanded,interactive=interactive)
+  url <- constructNWISURL(siteNumber,pCodes,startDate,endDate,"qw",expanded=expanded)
   
-  data <- getRDB1Data(url,asDateTime=TRUE, qw=TRUE)
+  data <- importRDB1(url,asDateTime=TRUE, qw=TRUE, tz = tz)
+  originalHeader <- comment(data)
   
-  if(expanded){
+  if(reshape & expanded){
     columnsToMelt <- c("agency_cd","site_no","sample_dt","sample_tm",
                        "sample_end_dt","sample_end_tm","sample_start_time_datum_cd","tm_datum_rlbty_cd",
                        "parm_cd","startDateTime","endDateTime")
+    columnsToMelt <- columnsToMelt[columnsToMelt %in% names(data)]
     longDF <- melt(data, columnsToMelt)
     wideDF <- dcast(longDF, ... ~ variable + parm_cd )
     wideDF[,grep("_va_",names(wideDF))] <- sapply(wideDF[,grep("_va_",names(wideDF))], function(x) as.numeric(x))
     
     groupByPCode <- as.vector(sapply(pCodes, function(x) grep(x, names(wideDF)) ))
     data <- wideDF[,c(1:length(columnsToMelt)-1,groupByPCode)]
+    comment(data) <- originalHeader
     
   }
   
diff --git a/R/readNWISsite.r b/R/readNWISsite.r
new file mode 100644
index 0000000000000000000000000000000000000000..79f38914a74de2b8135edcfec068afdd529a6705
--- /dev/null
+++ b/R/readNWISsite.r
@@ -0,0 +1,22 @@
+#' USGS Site File Data Retrieval
+#'
+#' Imports data from USGS site file site. This function gets data from here: \url{http://waterservices.usgs.gov/}
+#'
+#' @param siteNumbers string USGS site number.  This is usually an 8 digit number
+#' @keywords data import USGS web service
+#' @return retval dataframe with all information found in the expanded site file
+#' @export
+#' @examples
+#' # These examples require an internet connection to run
+#' siteINFO <- readNWISsite('05114000')
+#' siteINFOMulti <- readNWISsite(c('05114000','09423350'))
+readNWISsite <- function(siteNumbers){
+  
+  siteNumber <- paste(siteNumbers,collapse=",")
+  urlSitefile <- paste("http://waterservices.usgs.gov/nwis/site/?format=rdb&siteOutput=Expanded&sites=",siteNumber,sep = "")
+  
+  data <- importRDB1(urlSitefile,asDateTime=FALSE)
+  
+  return(data)
+ 
+}
diff --git a/R/readNWISunit.r b/R/readNWISunit.r
new file mode 100644
index 0000000000000000000000000000000000000000..1837a57e5aa86be8f224e56a8291439680c9f48a
--- /dev/null
+++ b/R/readNWISunit.r
@@ -0,0 +1,136 @@
+#' Raw Data Import for Instantaneous USGS NWIS Data
+#'
+#' Imports data from NWIS web service. This function gets the data from here: \url{http://waterservices.usgs.gov/}
+#' A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/}
+#' A list of statistic codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
+#'
+#' @param siteNumbers string USGS site number (or multiple sites).  This is usually an 8 digit number
+#' @param parameterCd string USGS parameter code.  This is usually an 5 digit number.
+#' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
+#' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
+#' @param tz string to set timezone attribute of datetime. Default is an empty quote, which converts the 
+#' datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+#' Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+#' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
+#' @keywords data import USGS web service
+#' @return data dataframe with agency, site, dateTime, time zone, value, and code columns
+#' @export
+#' @examples
+#' siteNumber <- '05114000'
+#' parameterCd <- '00060'
+#' startDate <- "2014-10-10"
+#' endDate <- "2014-10-10"
+#' # These examples require an internet connection to run
+#' rawData <- readNWISuv(siteNumber,parameterCd,startDate,endDate)
+#' 
+#' timeZoneChange <- readNWISuv(c('04024430','04024000'),parameterCd,
+#'          "2013-11-03","2013-11-03")
+#' firstSite <- timeZoneChange[timeZoneChange$site_no == '04024430',]
+readNWISuv <- function (siteNumbers,parameterCd,startDate="",endDate="", tz=""){  
+  
+  url <- constructNWISURL(siteNumbers,parameterCd,startDate,endDate,"uv",format="xml")
+
+  data <- importWaterML1(url,asDateTime=TRUE,tz=tz)
+
+  return (data)
+}
+
+#' Reads peak flow data from NWISweb.
+#' 
+#' 
+#' 
+#' @param siteNumber string USGS site number.  This is usually an 8 digit number
+#' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
+#' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
+#' @export
+#' @examples
+#' siteNumber <- '01594440'
+#' data <- readNWISpeak(siteNumber)
+readNWISpeak <- function (siteNumber,startDate="",endDate=""){  
+  
+  # Doesn't seem to be a peak xml service
+  url <- constructNWISURL(siteNumber,NA,startDate,endDate,"peak")
+  
+  data <- importRDB1(url, asDateTime=FALSE)
+    
+  return (data)
+}
+
+#' Reads the current rating table for an active USGS streamgage.
+#' 
+#' 
+#' 
+#' @param siteNumber string USGS site number.  This is usually an 8 digit number
+#' @param type string can be "base", "corr", or "exsa"
+#' @export
+#' @examples
+#' siteNumber <- '01594440'
+#' data <- readNWISrating(siteNumber, "base")
+#' attr(data, "RATING")
+readNWISrating <- function (siteNumber,type="base"){  
+  
+  # No rating xml service 
+  url <- constructNWISURL(siteNumber,service="rating",ratingType = type)
+    
+  data <- importRDB1(url, asDateTime=FALSE)
+  
+  if(type == "base") {
+    Rat <- grep("//RATING ", comment(data), value=TRUE, fixed=TRUE)
+    Rat <- sub("# //RATING ", "", Rat)
+    Rat <- scan(text=Rat, sep=" ", what="")
+    attr(data, "RATING") <- Rat
+  }
+  
+  return (data)
+}
+
+#'Reads surface-water measurement data from NWISweb.
+#'
+#'
+#'
+#' @param siteNumber string USGS site number.  This is usually an 8 digit number
+#' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
+#' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
+#' @param tz string to set timezone attribute of datetime. Default is an empty quote, which converts the 
+#' datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+#' Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+#' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
+#' @export
+#' @examples
+#' siteNumber <- '01594440'
+#' data <- readNWISmeas(siteNumber)
+readNWISmeas <- function (siteNumber,startDate="",endDate="", tz=""){  
+  
+  # Doesn't seem to be a WaterML1 format option
+  url <- constructNWISURL(siteNumber,NA,startDate,endDate,"meas")
+  
+  data <- importRDB1(url,asDateTime=FALSE,tz=tz)
+  
+  if("diff_from_rating_pc" %in% names(data)){
+    data$diff_from_rating_pc <- as.numeric(data$diff_from_rating_pc)
+  }
+  
+  return (data)
+}
+
+#' Reads groundwater level measurements from NWISweb.
+#'
+#' Reads groundwater level measurements from NWISweb. Mixed date/times come back from the service 
+#' depending on the year that the data was collected. 
+#'
+#' @param siteNumbers string USGS site number (or multiple sites).  This is usually an 8 digit number
+#' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
+#' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
+#' @export
+#' @examples
+#' siteNumber <- "434400121275801"
+#' data <- readNWISgwl(siteNumber, '','')
+#' sites <- c("434400121275801", "375907091432201")
+#' data2 <- readNWISgwl(sites, '','')
+readNWISgwl <- function (siteNumbers,startDate="",endDate=""){  
+  
+  url <- constructNWISURL(siteNumbers,NA,startDate,endDate,"gwlevels",format="wml1")
+  data <- importWaterML1(url,asDateTime=FALSE)
+  return (data)
+}
+
diff --git a/R/getWQPData.r b/R/readWQPdata.R
similarity index 87%
rename from R/getWQPData.r
rename to R/readWQPdata.R
index f68b40791620da1a2ef684d79a879d10e1f15bfc..6ef6b17c56fb9130d8c15fbe15eb1d7e612ea404 100644
--- a/R/getWQPData.r
+++ b/R/readWQPdata.R
@@ -11,9 +11,9 @@
 #' @examples
 #' \dontrun{
 #' nameToUse <- "pH"
-#' pHData <- getWQPData(siteid="USGS-04024315",characteristicName=nameToUse)
+#' pHData <- readWQPdata(siteid="USGS-04024315",characteristicName=nameToUse)
 #' }
-getWQPData <- function(...){
+readWQPdata <- function(...){
   
   matchReturn <- list(...)
   
@@ -32,11 +32,11 @@ getWQPData <- function(...){
   
   
   baseURL <- "http://www.waterqualitydata.us/Result/search?"
-  urlCall <- paste(baseURL,
+  urlCall <- paste0(baseURL,
                    urlCall,
-                   "&mimeType=tsv",sep = "")
+                   "&mimeType=tsv")
 
-  retVal <- readWQPData(urlCall)
+  retVal <- importWQP(urlCall,FALSE)
   return(retVal)
   
 }
\ No newline at end of file
diff --git a/R/getRawQWData.r b/R/readWQPqw.r
similarity index 63%
rename from R/getRawQWData.r
rename to R/readWQPqw.r
index 3991e17956bb626e77db8bf6d73c5ec2a46a0060..75bbf6f559f092b4ca846c932971184d86801362 100644
--- a/R/getRawQWData.r
+++ b/R/readWQPqw.r
@@ -12,24 +12,23 @@
 #' Leaving this blank will return all of the measured values during the specified time period.
 #' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
 #' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
 #' @keywords data import USGS web service
 #' @return retval dataframe raw data returned from the Water Quality Portal. Additionally, a POSIXct dateTime column is supplied for 
 #' start and end times.
 #' @export
 #' @import RCurl
-#' @seealso \code{\link{getWQPData}}, \code{\link{getWQPSites}}, 
-#' \code{\link{getNWISqwData}}, and \code{\link{readWQPData}}
+#' @seealso \code{\link{readWQPdata}}, \code{\link{whatWQPsites}}, 
+#' \code{\link{readNWISqw}}, and \code{\link{importWQP}}
 #' @examples
-#' # These examples require an internet connection to run
 #' \dontrun{
-#' rawPcode <- getWQPqwData('USGS-01594440','01075', '1985-01-01', '1985-03-31')
-#' rawCharacteristicName <- getWQPqwData('WIDNR_WQX-10032762','Specific conductance', '', '')
+#' rawPcode <- readWQPqw('USGS-01594440','01075', '', '')
+#' rawCharacteristicName <- readWQPqw('WIDNR_WQX-10032762','Specific conductance', '', '')
 #' }
-getWQPqwData <- function(siteNumber,parameterCd,startDate,endDate,interactive=TRUE){
+readWQPqw <- function(siteNumber,parameterCd,startDate="",endDate=""){
 
-  url <- constructNWISURL(siteNumber,parameterCd,startDate,endDate,"wqp",interactive=interactive)
-  retVal <- readWQPData(url)
+  url <- constructWQPURL(siteNumber,parameterCd,startDate,endDate)
+  retVal <- importWQP(url,TRUE)
+  attr(retVal, "url") <- url
   return(retVal)
   
 }
diff --git a/R/removeDuplicates.r b/R/removeDuplicates.r
deleted file mode 100644
index 00687cc10f8a30b2168b0192ca262daa8c7e8f0f..0000000000000000000000000000000000000000
--- a/R/removeDuplicates.r
+++ /dev/null
@@ -1,17 +0,0 @@
-#' Remove Duplicates
-#'
-#' Removes observations from the data frame Sample when the observation has the identical date and value as another observation
-#'
-#' @param Sample dataframe with at least DecYear and ConcHigh, default name is Sample
-#' @export
-#' @return Sample1 dataframe
-#' @examples
-#' DecYear <- c('1985.01', '1985.01', '1985.02', '1985.02', '1985.03')
-#' ConcHigh <- c(1,2,3,3,5)
-#' dataInput <- data.frame(DecYear, ConcHigh, stringsAsFactors=FALSE)
-#' removeDuplicates(dataInput)
-removeDuplicates <- function(Sample) {  
-  Sample1 <- Sample[!duplicated(Sample[c("DecYear","ConcHigh")]),]
-  
-  return(Sample1)
-}
diff --git a/R/renameColumns.R b/R/renameColumns.R
index cff16c8f4b37ac712b58b07d4ce51a32296affcc..480c805f0b2e769481b7fb0e8da79790ff29c445 100644
--- a/R/renameColumns.R
+++ b/R/renameColumns.R
@@ -1,69 +1,109 @@
 #' renameColumns
 #'
-#' Rename columns coming back from NWIS data retrievals
+#' Rename columns coming back from NWIS data retrievals.  Daily and unit value columns
+#' have names derived from their data descriptor, parameter, and statistic codes. This
+#' function reads information from the header and the arguments in the call to
+#' to rename those columns.
 #'
-#' @param rawData dataframe returned from retrieval functions
-#' @keywords data import USGS web service
-#' @return rawData dataframe with improved column names
+#' @param rawData the daily- or unit-values datset retrieved from NWISweb.
+#' @param p00010 the base name for parameter code 00010.
+#' @param p00045 the base name for parameter code 00045.
+#' @param p00060 the base name for parameter code 00060.
+#' @param p00065 the base name for parameter code 00065.
+#' @param p00095 the base name for parameter code 00095.
+#' @param p00300 the base name for parameter code 00300.
+#' @param p00400 the base name for parameter code 00400.
+#' @param p62611 the base name for parameter code 62611.
+#' @param p63680 the base name for parameter code 63680.
+#' @param p72019 the base name for parameter code 72019.
+#' @param \dots named arguments for the base name for any other parameter code. The
+#'form of the name must be like pXXXXX, where XXXXX is the parameter code.
+#' @return A dataset like \code{data} with selected columns renamed.
+#' @note The following statistics codes are converted by \code{renameNWISColumns}. See
+#'\url{http://help.waterdata.usgs.gov/stat_cd_nm} for information about USGS statistics codes.
+#'\describe{
+#'\item{00001}{Maximum value, suffix: Max}
+#'\item{00002}{Minimum value, suffix: Min}
+#'\item{00003}{Mean value, no suffix}
+#'\item{00006}{Sum of values, suffix: Sum}
+#'\item{00007}{Modal value, suffix: Mode}
+#'\item{00008}{Median value, suffix: Median}
+#'\item{00011}{Instantaneous Value, suffix: Inst}
+#'\item{00012}{Equivalent mean value, suffix: EqMean}
+#'\item{00021}{Tidal high-high value, suffix: HiHiTide}
+#'\item{00022}{Tidal low-high value, suffix: LoHiTide}
+#'\item{00023}{Tidal high-low value, suffix: HiLoTide}
+#'\item{00024}{Tidal low-low value, suffix: LoLoTide}
+#'}
+#' @seealso \code{\link{readNWISdv}}, \code{\link{readNWISuv}}
+#' @keywords manip IO
 #' @export
 #' @examples
-#' # This example requires an internet connection to run
-#' siteNumber <- '05114000' 
-#' rawData <- getNWISdvData(siteNumber,c("00010","00060","00300"),
-#'           "2001-01-01","2002-01-01",statCd=c("00001","00003"))
-#' rawData <- renameColumns(rawData)
-#' date <- "2014-10-10"
-#' rawData2 <- getNWISunitData(siteNumber,c("00010","00060"),date,date)
-#' rawData2 <- renameColumns(rawData2)
-#' head(rawData2)
-renameColumns <- function(rawData){
+#' siteWithTwo <- '01480015'
+#' startDate <- "2012-09-01"
+#' endDate <- "2012-10-01"
+#' url2 <- constructNWISURL(siteWithTwo, "00060",startDate,endDate,'dv')
+#' twoResults <- importWaterML1(url2,TRUE)
+#' twoResults <- renameNWISColumns(twoResults)
+renameNWISColumns <- function(rawData, p00010="Wtemp", p00045="Precip",
+                          p00060="Flow", p00065="GH", p00095="SpecCond", p00300="DO",
+                          p00400="pH", p62611="GWL", p63680="Turb", p72019="WLBLS",
+                          ...){
   
-  columnNames <- names(rawData)
+  Cnames <- names(rawData)
   
-  dataCols <- columnNames["X" == substring(columnNames, 1, 1)]
-  dataCol_cds <- dataCols["cd" == substring(dataCols, nchar(dataCols)-1, nchar(dataCols))]
-  dataCol_names <- dataCols[!(dataCols %in% dataCol_cds)]
+  Conv <- list(...)
+  Conv$p00010 <- p00010
+  Conv$p00060 <- p00060
+  Conv$p00045 <- p00045
+  Conv$p00065 <- p00065
+  Conv$p00095 <- p00095
+  Conv$p00300 <- p00300
+  Conv$p00400 <- p00400
+  Conv$p62611 <- p62611
+  Conv$p63680 <- p63680
+  Conv$p72019 <- p72019
   
-  pCodes <- sapply(strsplit(dataCol_names, "_"), function(x) x[2])
-  statCd <- sapply(strsplit(dataCol_names, "_"), function(x) x[3])
+  Conv$s00001 <- "Max"
+  Conv$s00002 <- "Min"
+  Conv$s00003 <- ""
+  Conv$s00006 <- "Sum"
+  Conv$s00007 <- "Mode"
+  Conv$s00008 <- "Median"
+  Conv$s00011<- "Inst" # Why is this in dv?
+  Conv$s00012<- "EqMean"
+  Conv$s00021<- "HiHiTide"
+  Conv$s00022<- "LoHiTide"
+  Conv$s00023<- "HiLoTide"
+  Conv$s00024<- "LoLoTide"
+
+  dataColumns <- grep("X_", Cnames)
   
-  pcodeINFO <- getNWISPcodeInfo(pCodes,interactive=FALSE)
-  multipleCodes <- anyDuplicated(pCodes)
-  
-  statCd <- sub("00001", "_Max", statCd)
-  statCd <- sub("00002", "_Min", statCd)
-  statCd <- sub("00003", "", statCd) # Leave mean blank
-  statCd <- sub("00011", "", statCd) # Also leaving blank
-  
-  DDnum <- sapply(strsplit(dataCol_names, "_"), function(x) x[1])
-  DDnum <- gsub("X","",DDnum)
-  
-  if (!any(duplicated(pCodes))){
-    dataColNames <- pcodeINFO$parameter_nm[which(pcodeINFO$parameter_cd %in% pCodes)]    
-#     dataColNames <- pcodeINFO$srsname[which(pcodeINFO$parameter_cd %in% pCodes)]  
-    dataColNames <- paste(dataColNames,statCd,sep="")
-  } else {
-    dataColNames <- rep(NA,length(dataCol_names))    
-    for (i in 1:length(dataCol_names)){
-      dataColNames[i] <- pcodeINFO$parameter_nm[which(pcodeINFO$parameter_cd %in% pCodes[i])]
-#       dataColNames[i] <- pcodeINFO$srsname[which(pcodeINFO$parameter_cd %in% pCodes[i])]
-      if((!(pCodes[i] %in% duplicated(pCodes))) && (pCodes[i] != pCodes[anyDuplicated(pCodes)])){
-        dataColNames[i] <- paste(dataColNames[i],statCd[i],sep="")
-      } else {
-        dataColNames[i] <- paste(dataColNames[i],statCd[i],"_",DDnum[i],sep="")        
+  for (i in dataColumns){
+    chunks <- strsplit(Cnames[i], "_")[[1]]
+    
+    #Pcodes:
+    for(j in 1:length(chunks)){
+      if(paste0("p",chunks[j]) %in% names(Conv)){
+        chunks[j] <- as.character(Conv[paste0("p",chunks[j])])
+        Cnames[i] <- paste(chunks, collapse ="_")
+        break
+      }
+    }
+    #Stat codes:
+    for(j in 1:length(chunks)){
+      if(paste0("s",chunks[j]) %in% names(Conv)){
+        chunks[j] <- as.character(Conv[paste0("s",chunks[j])])
+        chunks <- chunks[chunks != ""]
+        Cnames[i] <- paste(chunks, collapse ="_")
+        break
       }
-      
     }
-    
   }
-  dataColCDS <- paste(dataColNames, "_cd")
-  columnNames[which(columnNames %in% dataCol_names)] <- dataColNames
-  columnNames[which(columnNames %in% dataCol_cds)] <- dataColCDS
-  
-  columnNames <- gsub("[$,. ]","_",columnNames)
-  columnNames <- gsub("__","_",columnNames)
   
-  names(rawData) <- columnNames
+  Cnames <- gsub("X_","",Cnames)
+
+  names(rawData) <- Cnames
   
   return(rawData)
 }
diff --git a/R/retrieveNWISData.r b/R/retrieveNWISData.r
deleted file mode 100644
index 1c8428ac5d4bb19d4fca113262ab1a33a4dc7333..0000000000000000000000000000000000000000
--- a/R/retrieveNWISData.r
+++ /dev/null
@@ -1,46 +0,0 @@
-#' Raw Data Import for USGS NWIS Data
-#'
-#' Imports data from NWIS web service. This function gets the data from here: \url{http://waterservices.usgs.gov/}
-#' A list of parameter codes can be found here: \url{http://help.waterdata.usgs.gov/codes-and-parameters/parameters}
-#' A list of statistic codes can be found here: \url{http://help.waterdata.usgs.gov/code/stat_code_query?fmt=html}
-#'
-#' @param siteNumber string USGS site number.  This is usually an 8 digit number. Multiple sites can be requested with a string vector.
-#' @param parameterCd string or vector of USGS parameter code.  This is usually an 5 digit number..
-#' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
-#' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
-#' @param statCd string USGS statistic code. This is usually 5 digits.  Daily mean (00003) is the default.
-#' @param format string, can be 'tsv' or 'xml', and is only applicable for daily and unit value requests.  'tsv' returns results faster, but there is a possiblitiy that an incomplete file is returned without warning. XML is slower, 
-#' but will offer a warning if the file was incomplete (for example, if there was a momentary problem with the internet connection). It is possible to safely use the 'tsv' option, 
-#' but the user must carefully check the results to see if the data returns matches what is expected. The default is 'tsv'.
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @return data dataframe with agency, site, dateTime, value, and code columns
-#' @export
-#' @keywords data import USGS web service
-#' @examples
-#' # These examples require an internet connection to run
-#' siteNumber <- '04085427'
-#' startDate <- '2012-01-01'
-#' endDate <- '2012-06-30'
-#' pCode <- '00060'
-#' rawDailyQ <- getNWISdvData(siteNumber,pCode, startDate, endDate)
-#' rawDailyTemperature <- getNWISdvData(siteNumber,'00010', 
-#'        startDate, endDate, statCd='00001')
-#' rawDailyTemperatureTSV <- getNWISdvData(siteNumber,'00010', 
-#'        startDate, endDate, statCd='00001',format='tsv')
-#' rawDailyQAndTempMeanMax <- getNWISdvData(siteNumber,c('00010','00060'),
-#'        startDate, endDate, statCd=c('00001','00003'))
-#' rawDailyMultiSites<- getNWISdvData(c("01491000","01645000"),c('00010','00060'),
-#'        startDate, endDate, statCd=c('00001','00003'))
-getNWISdvData <- function (siteNumber,parameterCd,startDate,endDate,statCd="00003",format="tsv",interactive=TRUE){  
-  
-  url <- constructNWISURL(siteNumber,parameterCd,startDate,endDate,"dv",statCd=statCd,format=format,interactive=interactive)
-  
-  if (format == "xml") {
-    data <- getWaterML1Data(url)
-    data$datetime <- as.Date(data$datetime)
-  } else {
-    data <- getRDB1Data(url,asDateTime=FALSE)
-  }
-  
-  return (data)
-}
diff --git a/R/retrieveUnitNWISData.r b/R/retrieveUnitNWISData.r
deleted file mode 100644
index 2969337d5abfed6f1903bf54b765816b7a5af9ad..0000000000000000000000000000000000000000
--- a/R/retrieveUnitNWISData.r
+++ /dev/null
@@ -1,38 +0,0 @@
-#' Raw Data Import for Instantaneous USGS NWIS Data
-#'
-#' Imports data from NWIS web service. This function gets the data from here: \url{http://waterservices.usgs.gov/}
-#' A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/}
-#' A list of statistic codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
-#'
-#' @param siteNumber string USGS site number.  This is usually an 8 digit number
-#' @param parameterCd string USGS parameter code.  This is usually an 5 digit number.
-#' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
-#' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
-#' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
-#' @param format string, can be "tsv" or "xml", and is only applicable for daily and unit value requests.  "tsv" returns results faster, but there is a possiblitiy that an incomplete file is returned without warning. XML is slower, 
-#' but will offer a warning if the file was incomplete (for example, if there was a momentary problem with the internet connection). It is possible to safely use the "tsv" option, 
-#' but the user must carefully check the results to see if the data returns matches what is expected. The default is therefore "xml". 
-#' @keywords data import USGS web service
-#' @return data dataframe with agency, site, dateTime, time zone, value, and code columns
-#' @export
-#' @examples
-#' siteNumber <- '05114000'
-#' parameterCd <- '00060'
-#' startDate <- "2014-10-10"
-#' endDate <- "2014-10-10"
-#' # These examples require an internet connection to run
-#' rawData <- getNWISunitData(siteNumber,parameterCd,startDate,endDate)
-#' summary(rawData)
-#' rawData2 <- getNWISunitData(siteNumber,parameterCd,startDate,endDate,"tsv")
-#' summary(rawData2)
-getNWISunitData <- function (siteNumber,parameterCd,startDate,endDate,format="xml",interactive=TRUE){  
-  
-  url <- constructNWISURL(siteNumber,parameterCd,startDate,endDate,"uv",format=format,interactive=interactive)
-  if (format == "xml") {
-    data <- getWaterML1Data(url)
-  } else {
-    data <- getRDB1Data(url,asDateTime=TRUE)
-  }
-
-  return (data)
-}
diff --git a/R/tabbedDataRetrievals.R b/R/tabbedDataRetrievals.R
index cda474b6c1b5807c860ba9ea86119e12dbaaa1e4..c626c89ff14ca5a897f7d8aa1d2786149c410be6 100644
--- a/R/tabbedDataRetrievals.R
+++ b/R/tabbedDataRetrievals.R
@@ -3,8 +3,8 @@
 #' \tabular{ll}{
 #' Package: \tab dataRetrieval\cr
 #' Type: \tab Package\cr
-#' Version: \tab 1.4.0\cr
-#' Date: \tab 2014-09-16\cr
+#' Version: \tab 1.5.0\cr
+#' Date: \tab 2014-11-16\cr
 #' License: \tab Unlimited for this package, dependencies have more restrictive licensing.\cr
 #' Copyright: \tab This software is in the public domain because it contains materials
 #' that originally came from the United States Geological Survey, an agency of
@@ -26,7 +26,7 @@ NULL
 
 #' List of USGS parameter codes
 #'
-#' Complete list of USGS parameter codes as of September 25, 2013.
+#' Complete list of USGS parameter codes as of November 7, 2014.
 #'
 #' @name parameterCdFile
 #' @docType data
diff --git a/R/whatNWISData.r b/R/whatNWISData.r
new file mode 100644
index 0000000000000000000000000000000000000000..eb15e93ed3abdded98143f57155790b9729ae59f
--- /dev/null
+++ b/R/whatNWISData.r
@@ -0,0 +1,84 @@
+#' USGS data availability
+#'
+#' Imports a table of available parameters, period of record, and count. See \url{http://waterservices.usgs.gov/rest/Site-Service.html}
+#' for more information.
+#'
+#' @param siteNumbers string vector of USGS site number or multiple sites.
+#' @param service vector string. Options are "all", or one or many of "dv"(daily values),
+#'      "uv","rt", or "iv"(unit values), "qw"(water-quality),"sv"(sites visits),"pk"(peak measurements),
+#'      "gw"(groundwater levels), "ad" (sites included in USGS Annual Water Data Reports External Link), 
+#'      "aw" (sites monitored by the USGS Active Groundwater Level Network External Link), "id" (historical 
+#'      instantaneous values)
+#' @param parameterCd string vector of valid parameter codes to return. Defaults to "all" which will not perform a filter.
+#' @param statCd string vector of all statistic codes to return. Defaults to "all" which will not perform a filter.
+#' @keywords data import USGS web service
+#' @return retval dataframe with all information found in the expanded site file
+#' @export
+#' @import RCurl
+#' @import lubridate
+#' @examples
+#' availableData <- whatNWISdata('05114000')
+#' # To find just unit value ('instantaneous') data:
+#' uvData <- whatNWISdata('05114000',service="uv")
+#' uvDataMulti <- whatNWISdata(c('05114000','09423350'),service=c("uv","dv"))
+#' siteNumbers <- c("01491000","01645000")
+#' flowAndTemp <- whatNWISdata(siteNumbers, parameterCd=c("00060","00010"))
+whatNWISdata <- function(siteNumbers,service="all",parameterCd="all",statCd="all"){
+  
+  siteNumber <- paste(siteNumbers,collapse=",")
+  
+  if(!("all" %in% service)){
+    service <- match.arg(service, c("dv","uv","qw","ad","id","pk","sv","gw","aw","all","ad","iv","rt"), several.ok = TRUE)
+  }
+  
+  if(!("all" %in% parameterCd)){
+    parameterCdCheck <- all(nchar(parameterCd) == 5) & all(!is.na(suppressWarnings(as.numeric(parameterCd))))
+    
+    if(!parameterCdCheck){
+      goodIndex <- which(parameterCd %in% parameterCdFile$parameter_cd)
+      if(length(goodIndex) > 0){
+        badparameterCd <- parameterCd[-goodIndex]
+      } else {
+        badparameterCd <- parameterCd
+      }
+      message("The following parameterCds seem mistyped:",paste(badparameterCd,collapse=","), "and will be ignored.")
+      parameterCd <- parameterCd[goodIndex]
+    }
+  }
+  
+  
+  urlSitefile <- paste("http://waterservices.usgs.gov/nwis/site/?format=rdb&seriesCatalogOutput=true&sites=",siteNumber,sep = "")
+ 
+  SiteFile <- importRDB1(urlSitefile, asDateTime = FALSE)
+  
+  headerInfo <- comment(SiteFile)
+  
+  parameterCds <- unique(SiteFile$parm_cd)
+  
+  parameterCdFile <- parameterCdFile
+  
+  parameterCdINFO <- parameterCdFile[parameterCdFile$parameter_cd %in% parameterCds,]
+  SiteFile <- merge(SiteFile,parameterCdINFO,by.x="parm_cd" ,by.y="parameter_cd",all=TRUE)
+  
+  
+  if(!("all" %in% service)){
+    SiteFile <- SiteFile[SiteFile$data_type_cd %in% service,]
+  }
+  if(!("all" %in% statCd)){
+    SiteFile <- SiteFile[SiteFile$stat_cd %in% statCd,]
+  }
+  if(!("all" %in% parameterCd)){
+    SiteFile <- SiteFile[SiteFile$parm_cd %in% parameterCd,]
+  }
+  
+  
+  SiteFile$begin_date <- as.Date(parse_date_time(SiteFile$begin_date, c("Ymd", "mdY", "Y!")))
+  SiteFile$end_date <- as.Date(parse_date_time(SiteFile$end_date, c("Ymd", "mdY", "Y!")))
+  
+  comment(SiteFile) <- headerInfo
+  attr(SiteFile, "url") <- urlSitefile
+  attr(SiteFile, "queryTime") <- Sys.time()
+  
+  return(SiteFile)
+
+}
diff --git a/R/whatNWISsites.R b/R/whatNWISsites.R
new file mode 100644
index 0000000000000000000000000000000000000000..9fefc9dca1f0718da2ad66ff9e41dd727b1d23df
--- /dev/null
+++ b/R/whatNWISsites.R
@@ -0,0 +1,79 @@
+#' Site Data Import from NWIS
+#'
+#' Returns a list of sites from the NWIS web service. This function gets the data from: \url{http://waterservices.usgs.gov/rest/Site-Test-Tool.html}.
+#' Arguments to the function should be based on \url{http://waterservices.usgs.gov/rest/Site-Service.html#Service}
+#' Mapper format is used
+#'
+#' @param \dots see \url{http://waterservices.usgs.gov/rest/Site-Service.html#Service} for a complete list of options
+#' @keywords data import NWIS web service
+#' @return retval dataframe with agency_cd, site_no, station_nm, site_tp_cd, dec_lat_va, and dec_long_va.
+#' @export
+#' @import XML
+#' @examples
+#' siteListPhos <- whatNWISsites(stateCd="OH",parameterCd="00665")
+whatNWISsites <- function(...){
+  
+  matchReturn <- list(...)
+
+  values <- sapply(matchReturn, function(x) URLencode(as.character(paste(eval(x),collapse="",sep=""))))
+  
+  urlCall <- paste(paste(names(values),values,sep="="),collapse="&")
+  
+  
+  baseURL <- "http://waterservices.usgs.gov/nwis/site/?format=mapper&"
+  urlCall <- paste(baseURL,
+                   urlCall,sep = "")
+  
+  if(url.exists(urlCall)){
+    h <- basicHeaderGatherer()
+    doc = tryCatch({
+      returnedDoc <- getURI(urlCall, headerfunction = h$update)
+      if(h$value()["Content-Type"] == "text/xml;charset=UTF-8"){
+        xmlTreeParse(returnedDoc, getDTD = FALSE, useInternalNodes = TRUE)
+      } else {
+        message(paste("URL caused an error:", urlCall))
+        message("Content-Type=",h$value()["Content-Type"])
+        return(NA)
+      }   
+      
+    }, warning = function(w) {
+      message(paste("URL caused a warning:", urlCall))
+      message(w)
+    }, error = function(e) {
+      message(paste("URL does not seem to exist:", urlCall))
+      message(e)
+      return(NA)
+    }) 
+    
+    doc <- xmlRoot(doc)
+    numChunks <- xmlSize(doc)
+    for(i in 1:numChunks){
+      chunk <- doc[[1]]
+      site_no <- as.character(xpathApply(chunk, "site/@sno"))
+      station_nm <- as.character(xpathApply(chunk, "site/@sna"))
+      site_tp_cd <- as.character(xpathApply(chunk, "site/@cat"))
+      dec_lat_va <- as.numeric(xpathApply(chunk, "site/@lat"))
+      dec_long_va <- as.numeric(xpathApply(chunk, "site/@lng"))
+      agency_cd <- as.character(xpathApply(chunk, "site/@agc"))
+      
+      df <- data.frame(agency_cd, site_no, station_nm, site_tp_cd, 
+                       dec_lat_va, dec_long_va, stringsAsFactors=FALSE) 
+      
+      if(1==i){
+        retval <- df
+      } else {
+        retval <- rbind(retval, df)
+      }
+    }
+    
+    retval <- retval[!duplicated(retval),]
+    
+    retval$queryTime <- Sys.time()
+    attr(retval, "url") <- urlCall
+    attr(retval, "queryTime") <- Sys.time()
+    
+    return(retval)
+  } else {
+    message("URL caused an error:", urlCall)
+  }
+}
diff --git a/R/getWQPSites.R b/R/whatWQPsites.R
similarity index 94%
rename from R/getWQPSites.R
rename to R/whatWQPsites.R
index 1d3d2d3f952630286e57bbd72cbe7eab006efbcd..a027b6e23132127e822f1da611ad8cee40f62634 100644
--- a/R/getWQPSites.R
+++ b/R/whatWQPsites.R
@@ -10,11 +10,12 @@
 #' @export
 #' @examples
 #' \dontrun{
-#' site1 <- getWQPSites(siteid="USGS-01594440")
+#' site1 <- whatWQPsites(siteid="USGS-01594440")
+#' 
 #' type <- "Stream"
-#' sites <- getWQPSites(statecode="US:55",countycode="US:55:025",siteType=type)
+#' sites <- whatWQPsites(statecode="US:55",countycode="US:55:025",siteType=type)
 #' }
-getWQPSites <- function(...){
+whatWQPsites <- function(...){
 
   matchReturn <- list(...)
   
diff --git a/R/padVariable.r b/R/zeroPad.r
similarity index 84%
rename from R/padVariable.r
rename to R/zeroPad.r
index 0f2b04f588985f10084c3cc48bdfb80815a6f252..9aa4d4616df8bf665274f0d539e8d436346e95ae 100644
--- a/R/padVariable.r
+++ b/R/zeroPad.r
@@ -9,10 +9,10 @@
 #' @export
 #' @examples
 #' pCode <- '10'
-#' correctPCode <- padVariable(pCode,5)
+#' correctPCode <- zeroPad(pCode,5)
 #' pCodes <- c('100','1000','0','12345','1565465465465465')
-#' correctPCodes <- padVariable(pCodes,5)
-padVariable <- function(x,padTo){
+#' correctPCodes <- zeroPad(pCodes,5)
+zeroPad <- function(x,padTo){
   if(padTo <= 1) return(x)
   numDigits <- nchar(x)
   padding <- padTo-numDigits
diff --git a/README.md b/README.md
index bd1a1199e18fe3ecb60aa9c82540bc994ad44eb6..e357fec34db1d9ec51883b7f4c843c4b3733f855 100644
--- a/README.md
+++ b/README.md
@@ -5,36 +5,43 @@ Linux: [![travis](https://travis-ci.org/USGS-R/dataRetrieval.svg?branch=master)]
 Windows: [![Build status](https://ci.appveyor.com/api/projects/status/msanha92b500grr7?svg=true)](https://ci.appveyor.com/project/ldecicco-USGS/dataretrieval-787)
 
 
+Retrieval functions for USGS and EPA hydrologic and water quality data.
 
-R package source for data retrieval specifically for the EGRET R package:
+## Function Overview
 
-Please visit the EGRET wiki for more information:
-[EGRET Wiki](https://github.com/USGS-R/EGRET/wiki)
+Web service retrieval functions:
 
-`dataRetrieval`
-=============
+|Function | Inputs | Description | 
+| -------------| ------------------------|:-------------|
+|`readNWISdata` | `...`, service | NWIS data using user-specified queries |
+|`readNWISdv` | `Common 3`, parameter code, statCd | NWIS daily data with `Common` query |
+|`readNWISqw` | `Common 3`, parameter code, expanded | NWIS water quality data with `Common` query |
+|`readNWISuv` | `Common 3`, parameter code | NWIS instantaneous data with `Common` query |
+|`readNWISpCode` | parameterCd | NWIS parameter code information |
+|`readNWISgwl` | `Common 3` | NWIS groundwater level data with `Common` query |
+|`readNWISpeak` | `Common 3` | NWIS peak flow data with `Common` query |
+|`readNWISmeas` | `Common 3` | NWIS surface-water measurement data with `Common` query |
+|`readNWISrating` | siteNumber, type | NWIS rating table for an active USGS streamgage |
+|`readNWISsite` | siteNumber | NWIS site information |
+|`whatNWISsites` | `...` | NWIS site search using user-specified queries |
+|`whatNWISdata` | siteNumber, service | NWIS data availability, including period of record and count |
+|`readWQPdata` | `...` | WQP data using user-specified queries |
+|`readWQPqw` | `Common 3` | WQP data with `Common 3` query and either parameter code or characteristic name|
+|`whatWQPsites` | `...` | WQP site search using user-specified queries |
 
-R package for data retrieval of water quality and hydrology data. This package was designed to integrate with the `EGRET` package. Please visit the `EGRET` wiki for more information:
-[EGRET Wiki](https://github.com/USGS-R/EGRET/wiki)
+* `Common 3` = siteNumber, startDate, endDate
 
-## Function Overview
+Moving `EGRET` specific functions to `EGRET` (version 2.0.0 and greater):
 
 
-|Information Source | Site Query | Meta Data | Data |
+|Information Source | Meta Data | Data |
 | -------------| -------------| ------------- |:-------------|
-|NWIS | `getNWISSites` | `getNWISInfo`* | `getNWISData` |
-| | `getNWISDataAvailability` | `getNWISSiteInfo` | `getNWISDaily`* |
-| | | `getNWISPcodeInfo` | `getNWISSample`* |
-| | | | `getNWISdvData` |
-| | | | `getNWISunitData` |
-| | | | `getNWISqwData` |
-| Water Quality Portal | `getWQPSites` | `getWQPInfo`* | `getWQPSample`* |
-| | | | `getWQPqwData` |
-| | | | `getWQPData` |
-| User-supplied files | | `getUserInfo`* | `getUserDaily`* |
-| | | | `getUserSample`* |
+|NWIS | `getNWISInfo` | `getNWISSample` |
+|  |  | `getNWISDaily` |
+| Water Quality Portal  | `getWQPInfo`| `getWQPSample` |
+| User-supplied files | `getUserInfo` | `getUserDaily`|
+| | | `getUserSample` |
 
-`* Designed specifically to provide dataframes suitable for use by the `EGRET` package.
 
 
 ##Subscribe
@@ -51,13 +58,21 @@ To install the dataRetrieval package, you must be using R 3.0 or greater and run
 	           "http://cran.us.r-project.org"),
 	  dependencies=TRUE)
 
+
 ##Version updates
 ---------------
-###dataRetrieval 1.4.0-in developement
-Changed naming convention:
+
+###dataRetrieval 1.5.0
+
+* Changing naming convention one last time. Migrating `dataRetrieval` specific retrievals to `dataRetrieval`.
+* Added back WaterML2 parsing tool
+* Added specific groundwater, rating, peak, and surfacewater measurement functions
 
 
 
+###dataRetrieval 1.4.0
+Changed naming convention:
+
 |Original Name | New Name |
 | ------------- |:-------------|
 |getDVData | getNWISDaily |
@@ -80,13 +95,13 @@ Changed naming convention:
 * Updated getNWISSiteInfo to retrieve multiple site file datasets at once using a vector of siteNumbers as input argument.
 * Updated error-handling for Web service calls. More information is returned when errors happen
 * Added some basic processing to Water Quality Portal raw data retrievals. Date columns are returned as Date objects, value columns are numeric, and a column is created from the date/time/timezone columns that is POSIXct.
-* Added very generalized NWIS and WQP retrieval functions (getNWISData, getNWISSites, getGeneralWQPData, and getWQPSites) which allow the user to use any argument available on the Web service platform.
+* Added very generalized NWIS and WQP retrieval functions (getNWISData, getNWISSites, getGeneralWQPData, and whatWQPsites) which allow the user to use any argument available on the Web service platform.
 
 
 ###dataRetrieval 1.3.2
 
-* Deprecated getQWData, updated getWQPData to take either parameter code or characteristic name.
-* Changed the name of raw data retrievals to: getNWISqwData, getNWISunitData, getNWISdvData, and getWQPqwData (from: getNWISqwData, retrieveUnitNWISData, retrieveNWISData, getRawQWData)
+* Deprecated getQWData, updated readWQPdata to take either parameter code or characteristic name.
+* Changed the name of raw data retrievals to: readNWISqw, getNWISunitData, getNWISdvData, and getWQPqwData (from: readNWISqw, retrieveUnitNWISData, retrieveNWISData, getRawQWData)
 * Added NA warning to getDVData function
 * Updated mergeReport to allow for Sample data with different measurements taken on the same day
 
diff --git a/data/parameterCdFile.RData b/data/parameterCdFile.RData
index c7fe1a628834a13776eb098978425e22a49998d5..d1f0f73e566ed055f304bad8c67b87312c49eb26 100644
Binary files a/data/parameterCdFile.RData and b/data/parameterCdFile.RData differ
diff --git a/inst/doc/dataRetrieval.R b/inst/doc/dataRetrieval.R
index 3d1418521722a21f1bcfb6bee2535609a2cb32b1..34f5bfa8712569abd5a67710bb6ffceb5a792409 100644
--- a/inst/doc/dataRetrieval.R
+++ b/inst/doc/dataRetrieval.R
@@ -23,26 +23,19 @@ addSpace <- function(x) ifelse(x != "1", "[5pt]","")
 #  library(dataRetrieval)
 #  # Choptank River near Greensboro, MD
 #  siteNumber <- "01491000"
-#  ChoptankInfo <- getNWISSiteInfo(siteNumber)
+#  ChoptankInfo <- readNWISsite(siteNumber)
 #  parameterCd <- "00060"
 #  
 #  #Raw daily data:
-#  rawDailyData <- getNWISdvData(siteNumber,parameterCd,
-#                        "1980-01-01","2010-01-01")
-#  # Data compiled for EGRET analysis
-#  Daily <- getNWISDaily(siteNumber,parameterCd,
+#  rawDailyData <- readNWISdv(siteNumber,parameterCd,
 #                        "1980-01-01","2010-01-01")
 #  
 #  # Sample data Nitrate:
 #  parameterCd <- "00618"
-#  Sample <- getNWISSample(siteNumber,parameterCd,
+#  qwData <- readNWISqw(siteNumber,parameterCd,
 #                        "1980-01-01","2010-01-01")
 #  
-#  # Metadata on site and nitrate:
-#  INFO <- getNWISInfo(siteNumber,parameterCd)
-#  
-#  # Merge discharge and nitrate data to one dataframe:
-#  Sample <- mergeReport()
+#  pCode <- readNWISpCode(parameterCd)
 #  
 
 ## ----tableParameterCodes, echo=FALSE,results='asis'-------
@@ -63,7 +56,7 @@ print(xtable(data.df,
       )
 
 
-## ----tableParameterCodesDataRetrieval---------------------
+## ----tableParameterCodesDataRetrieval, echo=TRUE, eval=TRUE----
 library(dataRetrieval)
 parameterCdFile <-  parameterCdFile
 names(parameterCdFile)
@@ -86,17 +79,21 @@ print(xtable(data.df,label="tab:stat",
 
 ## ----getSite, echo=TRUE-----------------------------------
 siteNumbers <- c("01491000","01645000") 
-siteINFO <- getNWISSiteInfo(siteNumbers)
+siteINFO <- readNWISsite(siteNumbers)
 
 ## ----siteNames2, echo=TRUE--------------------------------
-siteINFO$station.nm
+siteINFO$station_nm
+
+## ----siteNames3, echo=TRUE, eval=FALSE--------------------
+#  comment(siteINFO)
 
 ## ----getSiteExtended, echo=TRUE---------------------------
 # Continuing from the previous example:
-# This pulls out just the daily data:
+# This pulls out just the daily, mean data:
+
+dailyDataAvailable <- whatNWISdata(siteNumbers,
+                    service="dv", statCd="00003")
 
-dailyDataAvailable <- getNWISDataAvailability(siteNumbers,
-                    type="dv")
 
 
 ## ----tablegda, echo=FALSE,results='asis'------------------
@@ -104,11 +101,11 @@ tableData <- with(dailyDataAvailable,
       data.frame( 
       siteNumber= site_no,
       srsname=srsname, 
-      startDate=as.character(startDate), 
-      endDate=as.character(endDate), 
-      count=as.character(count),
+      startDate=as.character(begin_date), 
+      endDate=as.character(end_date), 
+      count=as.character(count_nu),
       units=parameter_units,
-      statCd = statCd,
+#       statCd = stat_cd,
       stringsAsFactors=FALSE)
       )
 
@@ -117,7 +114,7 @@ tableData$units[which(tableData$units == "uS/cm @25C")] <- "$\\mu$S/cm @25C"
 
 
 print(xtable(tableData,label="tab:gda",
-    caption="Daily mean data availabile at the Choptank River near Greensboro, MD. [Some columns deleted for space considerations]"),
+    caption="Reformatted version of output from \\texttt{whatNWISdata} function for the Choptank River near Greensboro, MD, and from Seneca Creek at Dawsonville, MD from the daily values service [Some columns deleted for space considerations]"),
        caption.placement="top",
        size = "\\footnotesize",
        latex.environment=NULL,
@@ -130,7 +127,7 @@ print(xtable(tableData,label="tab:gda",
 ## ----label=getPCodeInfo, echo=TRUE------------------------
 # Using defaults:
 parameterCd <- "00618" 
-parameterINFO <- getNWISPcodeInfo(parameterCd)
+parameterINFO <- readNWISpCode(parameterCd)
 colnames(parameterINFO)
 
 ## ----siteNames, echo=TRUE---------------------------------
@@ -138,13 +135,13 @@ parameterINFO$parameter_nm
 
 ## ----label=getNWISDaily, echo=TRUE, eval=TRUE-------------
 
-# Continuing with our Choptank River example
+# Choptank River near Greensboro, MD:
 siteNumber <- "01491000"
 parameterCd <- "00060"  # Discharge
-startDate <- ""  # Will request earliest date
-endDate <- "" # Will request latest date
+startDate <- "2009-10-01"  
+endDate <- "2012-09-30" 
 
-discharge <- getNWISdvData(siteNumber, 
+discharge <- readNWISdv(siteNumber, 
                     parameterCd, startDate, endDate)
 names(discharge)
 
@@ -155,83 +152,109 @@ statCd <- c("00001","00003")  # Mean and maximum
 startDate <- "2012-01-01"
 endDate <- "2012-05-01"
 
-temperatureAndFlow <- getNWISdvData(siteNumber, parameterCd, 
+temperatureAndFlow <- readNWISdv(siteNumber, parameterCd, 
         startDate, endDate, statCd=statCd)
 
 
 ## ----label=renameColumns, echo=TRUE-----------------------
 names(temperatureAndFlow)
 
-temperatureAndFlow <- renameColumns(temperatureAndFlow)
+temperatureAndFlow <- renameNWISColumns(temperatureAndFlow)
 names(temperatureAndFlow)
 
 ## ----getNWIStemperaturePlot, echo=TRUE, fig.cap="Temperature and discharge plot of Choptank River in 2012.",out.width='1\\linewidth',out.height='1\\linewidth',fig.show='hold'----
+variableInfo <- attr(temperatureAndFlow, "variableInfo")
+siteInfo <- attr(temperatureAndFlow, "siteInfo")
+
 par(mar=c(5,5,5,5)) #sets the size of the plot window
 
-with(temperatureAndFlow, plot(
-  datetime, Temperature_water_degrees_Celsius_Max_01,
-  xlab="Date",ylab="Max Temperature [C]"
-  ))
+plot(temperatureAndFlow$dateTime, temperatureAndFlow$Wtemp_Max,
+  ylab=variableInfo$parameter_desc[1],xlab="" )
 par(new=TRUE)
-with(temperatureAndFlow, plot(
-  datetime, Discharge_cubic_feet_per_second,
+plot(temperatureAndFlow$dateTime, temperatureAndFlow$Flow,
   col="red",type="l",xaxt="n",yaxt="n",xlab="",ylab="",axes=FALSE
-  ))
+  )
 axis(4,col="red",col.axis="red")
-mtext(expression(paste("Mean Discharge [ft"^"3","/s]",
-                       sep="")),side=4,line=3,col="red")
-title(paste(siteINFO$station.nm[1],"2012",sep=" "))
-legend("topleft", c("Max Temperature", "Mean Discharge"), 
+mtext(variableInfo$parameter_desc[2],side=4,line=3,col="red")
+title(paste(siteInfo$station_nm,"2012"))
+legend("topleft", variableInfo$param_units, 
        col=c("black","red"),lty=c(NA,1),pch=c(1,NA))
 
-## ----label=getNWISUnit, echo=TRUE-------------------------
+## ----label=readNWISuv, echo=TRUE--------------------------
 
 parameterCd <- "00060"  # Discharge
 startDate <- "2012-05-12" 
 endDate <- "2012-05-13" 
-dischargeToday <- getNWISunitData(siteNumber, parameterCd, 
+dischargeUnit <- readNWISuv(siteNumber, parameterCd, 
         startDate, endDate)
 
-## ----dischargeData, echo=FALSE----------------------------
-head(dischargeToday)
+## ----dischargeData, echo=TRUE-----------------------------
+head(dischargeUnit)
 
-## ----label=getQW, echo=TRUE-------------------------------
+## ----label=getQW, echo=TRUE, eval=TRUE--------------------
  
 # Dissolved Nitrate parameter codes:
 parameterCd <- c("00618","71851")
 startDate <- "1985-10-01"
 endDate <- "2012-09-30"
 
-dissolvedNitrate <- getNWISqwData(siteNumber, parameterCd, 
-      startDate, endDate, expanded=TRUE)
-names(dissolvedNitrate)
+dfLong <- readNWISqw(siteNumber, parameterCd, 
+      startDate, endDate, expanded=TRUE,reshape=FALSE)
+
+# Or the wide return:
+# dfWide <- readNWISqw(siteNumber, parameterCd, 
+#       startDate, endDate, expanded=TRUE, reshape=TRUE)
+
+
+## ----qwmeta, echo=TRUE, eval=TRUE-------------------------
+
+comment(dfLong)
+
+
+## ----gwlexample, echo=TRUE, eval=TRUE---------------------
+siteNumber <- "434400121275801"
+groundWater <- readNWISgwl(siteNumber)
+
+names(groundWater)
 
 
+## ----peakexample, echo=TRUE, eval=TRUE--------------------
+siteNumber <- '01594440'
+peakData <- readNWISpeak(siteNumber)
+
+
+names(peakData)
+
+
+## ----ratingexample, echo=TRUE, eval=TRUE------------------
+ratingData <- readNWISrating(siteNumber, "base")
+attr(ratingData, "RATING")
+
+names(ratingData)
+
+
+## ----surfexample, echo=TRUE, eval=TRUE--------------------
+surfaceData <- readNWISmeas(siteNumber)
+
+names(surfaceData)
 
-## ----getQWtemperaturePlot, echo=TRUE, fig.cap=paste(parameterINFO$parameter_nm, "at", siteINFO$station.nm[1])----
-with(dissolvedNitrate, plot(
-  startDateTime, result_va_00618,
-  xlab="Date",ylab = paste(parameterINFO$srsname,
-      "[",parameterINFO$parameter_units,"]")
-  ))
-title(siteINFO$station.nm[1])
 
 ## ----label=geturl, echo=TRUE, eval=FALSE------------------
 #  # Dissolved Nitrate parameter codes:
 #  pCode <- c("00618","71851")
 #  startDate <- "1964-06-11"
 #  endDate <- "2012-12-18"
-#  url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,'qw')
+#  url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,"qw")
 #  url_dv <- constructNWISURL(siteNumber,"00060",startDate,endDate,
-#                             'dv',statCd="00003")
-#  url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'uv')
+#                             "dv",statCd="00003")
+#  url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,"uv")
 
 ## ----label=getQWData, echo=TRUE, eval=FALSE---------------
-#  specificCond <- getWQPqwData('WIDNR_WQX-10032762',
+#  specificCond <- readWQPqw('WIDNR_WQX-10032762',
 #                  'Specific conductance','2011-05-01','2011-09-30')
 
 ## ----siteSearch-------------------------------------------
-sites <- getNWISSites(bBox="-83.0,36.5,-81.0,38.5", 
+sites <- whatNWISsites(bBox="-83.0,36.5,-81.0,38.5", 
                       parameterCd="00010,00060",
                       hasDataTypeCd="dv")
 
@@ -239,7 +262,8 @@ names(sites)
 nrow(sites)
 
 ## ----dataExample------------------------------------------
-dischargeWI <- getNWISData(stateCd="WI",
+dischargeWI <- readNWISdata(service="dv",
+                           stateCd="WI",
                            parameterCd="00060",
                            drainAreaMin="50",
                            statCd="00003")
@@ -248,154 +272,69 @@ nrow(dischargeWI)
 
 ## ----NJChloride, eval=FALSE-------------------------------
 #  
-#  sitesNJ <- getWQPSites(statecode="US:34",
+#  sitesNJ <- whatWQPsites(statecode="US:34",
 #                         characteristicName="Chloride")
 #  
 
 ## ----phData, eval=FALSE-----------------------------------
 #  
-#  dataPH <- getWQPData(statecode="US:55",
+#  dataPH <- readWQPdata(statecode="US:55",
 #                   characteristicName="pH")
 #  
 
-## ----ThirdExample-----------------------------------------
-parameterCd <- "00618"
-INFO <- getNWISInfo(siteNumber,parameterCd, interactive=FALSE)
+## ----meta1, eval=TRUE-------------------------------------
 
-## ----WQPInfo, eval=FALSE----------------------------------
-#  parameterCd <- "00618"
-#  INFO_WQP <- getWQPInfo("USGS-01491000",parameterCd)
+attr(dischargeWI, "url")
 
-## ----addInfo, eval=TRUE, echo=TRUE------------------------
+attr(dischargeWI, "queryTime")
 
-INFO$riverInfo <- "Major tributary of the Chesapeake Bay"
-INFO$GreensboroPopulation <- 1931
 
+## ----meta2, eval=TRUE-------------------------------------
 
-## ----firstExample-----------------------------------------
-siteNumber <- "01491000"
-startDate <- "2000-01-01"
-endDate <- "2013-01-01"
-# This call will get NWIS (ft3/s) data , and convert it to m3/s:
-Daily <- getNWISDaily(siteNumber, "00060", startDate, endDate)
+names(attributes(dischargeWI))
 
-## ----colNamesDaily, echo=FALSE,results='asis'-------------
-ColumnName <- c("Date", "Q", "Julian","Month","Day","DecYear","MonthSeq","Qualifier","i","LogQ","Q7","Q30")
-Type <- c("Date", "number", "number","integer","integer","number","integer","string","integer","number","number","number")
-Description <- c("Date", "Discharge in m$^3$/s", "Number of days since January 1, 1850", "Month of the year [1-12]", "Day of the year [1-366]", "Decimal year", "Number of months since January 1, 1850", "Qualifying code", "Index of days, starting with 1", "Natural logarithm of Q", "7 day running average of Q", "30 day running average of Q")
-Units <- c("date", "m$^3$/s","days", "months","days","years","months", "character","days","numeric","m$^3$/s","m$^3$/s")
 
-DF <- data.frame(ColumnName,Type,Description,Units)
-
-print(xtable(DF, caption="Daily dataframe",label="tab:DailyDF1"),
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.text.function = function(x) {x},
-       sanitize.colnames.function =  bold.colHeaders,
-       sanitize.rownames.function = addSpace
-       
-      )
+## ----meta3, eval=TRUE-------------------------------------
 
+siteInfo <- attr(dischargeWI, "siteInfo")
+head(siteInfo)
 
-## ----secondExample----------------------------------------
-parameterCd <- "00618"
-Sample <-getNWISSample(siteNumber,parameterCd,
-      startDate, endDate)
+variableInfo <- attr(dischargeWI, "variableInfo")
 
-## ----STORET,echo=TRUE,eval=FALSE--------------------------
-#  site <- 'WIDNR_WQX-10032762'
-#  characteristicName <- 'Specific conductance'
-#  Sample <-getWQPSample(site,characteristicName,
-#        startDate, endDate)
 
-## ----label=tab:exampleComplexQW, echo=FALSE, eval=TRUE,results='asis'----
-cdate <- c("2003-02-15","2003-06-30","2004-09-15","2005-01-30","2005-05-30","2005-10-30")
-rdp <- c("", "<","<","","","")
-dp <- c(0.02,0.01,0.005,NA,NA,NA)
-rpp <- c("", "","<","","","")
-pp <- c(0.5,0.3,0.2,NA,NA,NA)
-rtp <- c("","","","","<","<")
-tp <- c(NA,NA,NA,0.43,0.05,0.02)
 
-DF <- data.frame(cdate,rdp,dp,rpp,pp,rtp,tp,stringsAsFactors=FALSE)
-
-xTab <- xtable(DF, caption="Example data",digits=c(0,0,0,3,0,3,0,3),label="tab:exampleComplexQW")
-
-print(xTab,
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.colnames.function =  bold.colHeaders,
-       sanitize.rownames.function = addSpace
-      )
-
-
-## ----thirdExample,echo=FALSE------------------------------
-  compressedData <- compressData(DF)
-  Sample <- populateSampleColumns(compressedData)
-
-## ----thirdExampleView,echo=TRUE---------------------------
-  Sample
-
-## ----openDaily, eval = FALSE------------------------------
-#  fileName <- "ChoptankRiverFlow.txt"
-#  filePath <-  "C:/RData/"
-#  Daily <-getFileDaily(filePath,fileName,
-#                      separator="\t")
-
-## ----openSample, eval = FALSE-----------------------------
-#  fileName <- "ChoptankRiverNitrate.csv"
-#  filePath <-  "C:/RData/"
-#  Sample <-getUserSample(filePath,fileName,
-#                                  separator=",")
+## ----meta5, eval=TRUE, eval=FALSE-------------------------
+#  comment(peakData)
+#  
+#  #Which is equivalent to:
+#  # attr(peakData, "comment")
 
-## ----openSample2, eval = FALSE----------------------------
-#  fileName <- "ChoptankPhosphorus.txt"
-#  filePath <-  "C:/RData/"
-#  Sample <-getUserSample(filePath,fileName,
-#                                  separator="\t")
+## ----meta6, eval=TRUE, eval=TRUE--------------------------
+comment(peakData)[c(1:15,58:66)]
 
-## ----mergeExample-----------------------------------------
-siteNumber <- "01491000"
-parameterCd <- "00631"  # Nitrate
-startDate <- "2000-01-01"
-endDate <- "2013-01-01"
-
-Daily <- getNWISDaily(siteNumber, "00060", startDate, endDate)
-Sample <- getNWISSample(siteNumber,parameterCd, startDate, endDate)
-Sample <- mergeReport(Daily,Sample)
-names(Sample)
 
 ## ----helpFunc,eval = FALSE--------------------------------
-#  ?removeDuplicates
-
-## ----rawFunc,eval = TRUE----------------------------------
-removeDuplicates
+#  ?readNWISpCode
 
 ## ----seeVignette,eval = FALSE-----------------------------
 #  vignette(dataRetrieval)
 
 ## ----installFromCran,eval = FALSE-------------------------
-#  install.packages("dataRetrieval",
-#  repos=c("http://usgs-r.github.com","http://cran.us.r-project.org"),
-#  dependencies=TRUE,
-#  type="both")
+#  install.packages("dataRetrieval")
 
 ## ----openLibraryTest, eval=FALSE--------------------------
 #  library(dataRetrieval)
 
 ## ----label=getSiteApp, echo=TRUE--------------------------
-availableData <- getNWISDataAvailability(siteNumber)
-dailyData <- availableData["dv" == availableData$service,]
-dailyData <- dailyData["00003" == dailyData$statCd,]
+availableData <- whatNWISdata(siteNumber, "dv")
+dailyData <- availableData["00003" == availableData$stat_cd,]
 
 tableData <- with(dailyData, 
       data.frame(
         shortName=srsname, 
-        Start=startDate, 
-        End=endDate, 
-        Count=count,
+        Start=begin_date, 
+        End=end_date, 
+        Count=count_nu,
         Units=parameter_units)
       )
 tableData
diff --git a/inst/doc/dataRetrieval.Rnw b/inst/doc/dataRetrieval.Rnw
index bc52720f90eaacf7ffe93f134999ff849f5be8b5..fe19498fbf7f4831c25fb27dcaf1d6fd5c5f5079 100644
--- a/inst/doc/dataRetrieval.Rnw
+++ b/inst/doc/dataRetrieval.Rnw
@@ -1,8 +1,8 @@
 %\VignetteIndexEntry{Introduction to the dataRetrieval package}
 %\VignetteEngine{knitr::knitr}
 %\VignetteDepends{}
-%\VignetteSuggests{xtable,EGRET}
-%\VignetteImports{zoo, XML, RCurl, reshape2,lubridate}
+%\VignetteSuggests{xtable}
+%\VignetteImports{XML, RCurl, reshape2,lubridate,httr,plyr}
 %\VignettePackage{dataRetrieval}
 
 \documentclass[a4paper,11pt]{article}
@@ -142,8 +142,8 @@ library(knitr)
 %------------------------------------------------------------
 \title{The dataRetrieval R package}
 %------------------------------------------------------------
-\author[1]{Laura De Cicco}
-\author[1]{Robert Hirsch}
+\author[1]{Laura A. De Cicco}
+\author[1]{Robert M. Hirsch}
 \affil[1]{United States Geological Survey}
 
 
@@ -164,7 +164,7 @@ addSpace <- function(x) ifelse(x != "1", "[5pt]","")
 
 \noindent{\huge\textsf{\textbf{The dataRetrieval R package}}}
 
-\noindent\textsf{By Laura De Cicco and Robert Hirsch}
+\noindent\textsf{By Laura A. De Cicco and Robert M. Hirsch}
 
 \noindent\textsf{\today}
 
@@ -181,55 +181,108 @@ addSpace <- function(x) ifelse(x != "1", "[5pt]","")
 %------------------------------------------------------------
 \section{Introduction to dataRetrieval}
 %------------------------------------------------------------ 
-The dataRetrieval package was created to simplify the process of loading hydrologic data into the R environment. It has been specifically designed to work seamlessly with the EGRET R package: Exploration and Graphics for RivEr Trends. See: \url{https://github.com/USGS-R/EGRET/wiki} or \url{http://dx.doi.org/10.3133/tm4A10} for information on EGRET. EGRET is designed to provide analysis of water quality data sets using the Weighted Regressions on Time, Discharge and Season (WRTDS) method as well as analysis of discharge trends using robust time-series smoothing techniques.  Both of these capabilities provide both tabular and graphical analyses of long-term data sets.
+The dataRetrieval package was created to simplify the process of loading hydrologic data into the R environment. It is designed to retrieve the major data types of U.S. Geological Survey (USGS) hydrologic data that are available on the Web, as well as data from the Water Quality Portal (WQP), which currently houses water quality data from the Environmental Protection Agency (EPA), U.S. Department of Agriculture (USDA), and USGS. Direct USGS data is obtained from a service called the National Water Information System (NWIS). A lot of useful information about NWIS can be obtained here:
 
-
-The dataRetrieval package is designed to retrieve many of the major data types of U.S. Geological Survey (USGS) hydrologic data that are available on the Web. Users may also load data from other sources (text files, spreadsheets) using dataRetrieval.  Section \ref{sec:genRetrievals} provides examples of how one can obtain raw data from USGS sources on the Web and load them into dataframes within the R environment.  The functionality described in section \ref{sec:genRetrievals} is for general use and is not tailored for the specific uses of the EGRET package.  The functionality described in section \ref{sec:EGRETdfs} is tailored specifically to obtaining input from the Web and structuring it for use in the EGRET package.  The functionality described in section \ref{sec:userFiles} is for converting hydrologic data from user-supplied files and structuring it specifically for use in the EGRET package.
+\url{http://help.waterdata.usgs.gov/}
 
 For information on getting started in R and installing the package, see (\ref{sec:appendix1}): Getting Started. Any use of trade, firm, or product names is for descriptive purposes only and does not imply endorsement by the U.S. Government.
 
-A quick workflow for major dataRetrieval functions:
+A quick workflow for USGS dataRetrieval functions:
 
 <<workflow, echo=TRUE,eval=FALSE>>=
 library(dataRetrieval)
 # Choptank River near Greensboro, MD
 siteNumber <- "01491000" 
-ChoptankInfo <- getNWISSiteInfo(siteNumber)
+ChoptankInfo <- readNWISsite(siteNumber)
 parameterCd <- "00060"
 
 #Raw daily data:
-rawDailyData <- getNWISdvData(siteNumber,parameterCd,
-                      "1980-01-01","2010-01-01")
-# Data compiled for EGRET analysis
-Daily <- getNWISDaily(siteNumber,parameterCd,
+rawDailyData <- readNWISdv(siteNumber,parameterCd,
                       "1980-01-01","2010-01-01")
 
 # Sample data Nitrate:
 parameterCd <- "00618"
-Sample <- getNWISSample(siteNumber,parameterCd,
+qwData <- readNWISqw(siteNumber,parameterCd,
                       "1980-01-01","2010-01-01")
 
-# Metadata on site and nitrate:
-INFO <- getNWISInfo(siteNumber,parameterCd)
-
-# Merge discharge and nitrate data to one dataframe:
-Sample <- mergeReport()
+pCode <- readNWISpCode(parameterCd)
 
 @
 
+USGS data are made available through the National Water Information System (NWIS).
+
+Table \ref{tab:func} describes the functions available in the dataRetrieval package.
+
+\begin{table}[!ht]
+\begin{minipage}{\linewidth}
+{\footnotesize
+\caption{dataRetrieval functions} 
+\label{tab:func}
+\begin{tabular}{lll}
+  \hline
+\multicolumn{1}{c}{\textbf{\textsf{Function Name}}} &
+\multicolumn{1}{c}{\textbf{\textsf{Arguments}}}  &
+\multicolumn{1}{c}{\textbf{\textsf{Description}}} \\  [0pt]
+  \hline
+  \texttt{readNWISdata} &  \texttt{...} & NWIS data using user-specified queries\\
+   & service & \\
+  [5pt]\texttt{readNWISdv} & siteNumber & NWIS daily data\\
+  & parameterCd & \\
+  & startDate & \\
+  & endDate & \\
+  & statCd & \\
+  [5pt]\texttt{readNWISqw} & siteNumber & NWIS water quality data\\
+    & parameterCd & \\
+  & startDate & \\
+  & endDate & \\
+  & expanded & \\
+  [5pt]\texttt{readNWISuv} & siteNumber & NWIS water quality data\\
+  & parameterCd & \\
+  & startDate & \\
+  & endDate & \\
+  [5pt]\texttt{readNWISrating} & siteNumber & NWIS rating table for active streamgage \\
+  & type & \\
+  [5pt]\texttt{readNWISmeas} & siteNumber & NWIS surface-water measurements \\
+  & startDate & \\
+  & endDate & \\
+  [5pt]\texttt{readNWISpeak} & siteNumber & NWIS peak flow data \\
+  & startDate & \\
+  & endDate & \\
+  [5pt]\texttt{readNWISgwl} & siteNumber & NWIS groundwater level measurements \\
+  & startDate & \\
+  & endDate & \\  
+  [5pt]\texttt{readNWISpCode} & parameterCd & NWIS parameter code information\\
+  [5pt]\texttt{readNWISsite} & siteNumber & NWIS site information \\
+  [5pt]\texttt{whatNWISsites} & \texttt{...} & NWIS site search using user-specified queries \\
+  [5pt]\texttt{whatNWISdata} & siteNumber & NWIS data availability, including period of record and count \\ 
+   & service & \\
+   [5pt]\texttt{readWQPdata} & \texttt{...} & WQP data using user-specified queries \\
+   [5pt]\texttt{readWQPqw} & siteNumber & WQP data \\
+     & parameterCd (or characteristic name) & \\
+  & startDate & \\
+  & endDate & \\
+  [5pt]\texttt{whatWQPsites} & \texttt{...} & WQP site search using user-specified queries \\  
+   \hline
+\end{tabular}
+}
+\end{minipage}
+\end{table}
+
+\clearpage
 
 %------------------------------------------------------------
 \section{USGS Web Retrievals}
 \label{sec:genRetrievals}
 %------------------------------------------------------------ 
-In this section, five examples of Web retrievals document how to get raw data. This data includes site information (\ref{sec:usgsSite}), measured parameter information (\ref{sec:usgsParams}), historical daily values(\ref{sec:usgsDaily}), unit values (which include real-time data but can also include other sensor data stored at regular time intervals) (\ref{sec:usgsRT}), and water quality data (\ref{sec:usgsWQP}) or (\ref{sec:usgsSTORET}). We will use the Choptank River near Greensboro, MD as an example.  Daily discharge measurements are available as far back as 1948.  Additionally, nitrate has been measured since 1964. 
+In this section, examples of Web retrievals document how to get raw data. This data includes site information (\ref{sec:usgsSite}), measured parameter information (\ref{sec:usgsParams}), historical daily values(\ref{sec:usgsDaily}), unit values (which include real-time data but can also include other sensor data stored at regular time intervals) (\ref{sec:usgsRT}), water quality data (\ref{sec:usgsWQP}), groundwater level data (\ref{sec:gwl}), peak flow data (\ref{sec:peak}), rating curve data (\ref{sec:rating}, and surface-water measurement data (\ref{sec:meas}). Section \ref{sec:metadata} shows instructions for getting metadata that is attached to each returned dataframe.
 
-% %------------------------------------------------------------
-% \subsection{Introduction}
-% %------------------------------------------------------------
 The USGS organizes hydrologic data in a standard structure.  Streamgages are located throughout the United States, and each streamgage has a unique ID (referred in this document and throughout the dataRetrieval package as \enquote{siteNumber}).  Often (but not always), these ID's are 8 digits.  The first step to finding data is discovering this siteNumber. There are many ways to do this, one is the National Water Information System: Mapper \url{http://maps.waterdata.usgs.gov/mapper/index.html}.
 
-Once the siteNumber is known, the next required input for USGS data retrievals is the \enquote{parameter code}.  This is a 5-digit code that specifies the measured parameter being requested.  For example, parameter code 00631 represents \enquote{Nitrate plus nitrite, water, filtered, milligrams per liter as nitrogen}, with units of \enquote{mg/l as N}. A complete list of possible USGS parameter codes can be found at \url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes?help}.
+Once the siteNumber is known, the next required input for USGS data retrievals is the \enquote{parameter code}.  This is a 5-digit code that specifies the measured parameter being requested.  For example, parameter code 00631 represents \enquote{Nitrate plus nitrite, water, filtered, milligrams per liter as nitrogen}, with units of \enquote{mg/l as N}. 
+
+A useful place to discover USGS codes information, along with other NWIS information is:
+
+\url{http://help.waterdata.usgs.gov/codes-and-parameters}
 
 Not every station will measure all parameters. A short list of commonly measured parameters is shown in Table \ref{tab:params}.
 
@@ -253,18 +306,25 @@ print(xtable(data.df,
 
 @
 
-A complete list (as of September 25, 2013) is available as data attached to the package. It is accessed by the following:
+A complete list (as of November 7, 2014) is available as data attached to the package. It is accessed by the following:
 
-<<tableParameterCodesDataRetrieval>>=
+<<tableParameterCodesDataRetrieval, echo=TRUE, eval=TRUE>>=
 library(dataRetrieval)
 parameterCdFile <-  parameterCdFile
 names(parameterCdFile)
 @
 
+Two output columns that may not be obvious are \enquote{srsname} and \enquote{casrn}. Srsname stands for \enquote{Substance Registry Services}. More information on the srs name can be found here:
+
+\url{http://ofmpub.epa.gov/sor_internet/registry/substreg/home/overview/home.do}
+
+Casrn stands for \enquote{Chemical Abstracts Service (CAS) Registry Number}. More information on CAS can be found here:
+
+\url{http://www.cas.org/content/chemical-substances/faqs}
 
 For unit values data (sensor data measured at regular time intervals such as 15 minutes or hourly), knowing the parameter code and siteNumber is enough to make a request for data.  For most variables that are measured on a continuous basis, the USGS also stores the historical data as daily values.  These daily values are statistical summaries of the continuous data, e.g. maximum, minimum, mean, or median. The different statistics are specified by a 5-digit statistics code.  A complete list of statistic codes can be found here:
 
-\url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
+\url{http://help.waterdata.usgs.gov/code/stat_cd_nm_query?stat_nm_cd=%25&fmt=html&inline=true}
 
 Some common codes are shown in Table \ref{tab:stat}.
 
@@ -285,7 +345,7 @@ print(xtable(data.df,label="tab:stat",
 
 @
 
-Examples for using these siteNumber's, parameter codes, and stat codes will be presented in subsequent sections.
+Examples for using these siteNumbers, parameter codes, and stat codes will be presented in subsequent sections.
 
 \FloatBarrier
 
@@ -295,38 +355,50 @@ Examples for using these siteNumber's, parameter codes, and stat codes will be p
 %------------------------------------------------------------
 
 %------------------------------------------------------------
-\subsubsection{getNWISSiteInfo}
+\subsubsection{readNWISsite}
 \label{sec:usgsSiteFileData}
 %------------------------------------------------------------
-Use the \texttt{getNWISSiteInfo} function to obtain all of the information available for a particular USGS site such as full station name, drainage area, latitude, and longitude. \texttt{getNWISSiteInfo} can also access information about multiple sites with a vector input.
+Use the \texttt{readNWISsite} function to obtain all of the information available for a particular USGS site (or sites) such as full station name, drainage area, latitude, and longitude. \texttt{readNWISsite} can also access information about multiple sites with a vector input.
 
 
 <<getSite, echo=TRUE>>=
 siteNumbers <- c("01491000","01645000") 
-siteINFO <- getNWISSiteInfo(siteNumbers)
+siteINFO <- readNWISsite(siteNumbers)
 @
 
 A specific example piece of information can be retrieved, in this case a station name, as follows:
 
 <<siteNames2, echo=TRUE>>=
-siteINFO$station.nm
+siteINFO$station_nm
 @
 Site information is obtained from \url{http://waterservices.usgs.gov/rest/Site-Test-Tool.html}
+
+Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
+
+<<siteNames3, echo=TRUE, eval=FALSE>>=
+comment(siteINFO)
+@
+
+
+
 \FloatBarrier
 
 %------------------------------------------------------------
-\subsubsection{getNWISDataAvailability}
+\subsubsection{whatNWISdata}
 \label{sec:usgsDataAvailability}
 %------------------------------------------------------------
-To discover what data is available for a particular USGS site, including measured parameters, period of record, and number of samples (count), use the \texttt{getNWISDataAvailability} function. It is possible to limit the retrieval information to a subset of types (\texttt{"}dv\texttt{"}, \texttt{"}uv\texttt{"}, or \texttt{"}qw\texttt{"}). In the following example, we limit the retrieved Choptank data to only daily data. Leaving the \texttt{"}type\texttt{"} argument blank returns all of the available data for that site.
+To discover what data is available for a particular USGS site, including measured parameters, period of record, and number of samples (count), use the \texttt{whatNWISdata} function. It is possible to limit the retrieval information to a subset of services. The possible choices for services are: \texttt{"}dv\texttt{"} (daily values), \texttt{"}uv\texttt{"}, \texttt{"}rt\texttt{"}, or \texttt{"}iv\texttt{"} (unit values), \texttt{"}qw\texttt{"} (water-quality), \texttt{"}sv\texttt{"} (sites visits), \texttt{"}pk\texttt{"} (peak measurements), \texttt{"}gw\texttt{"} (groundwater levels), \texttt{"}ad\texttt{"} (sites included in USGS Annual Water Data Reports External Link), \texttt{"}aw\texttt{"} (sites monitored by the USGS Active Groundwater Level Network External Link), and \texttt{"}id\texttt{"} (historical instantaneous values).
+
+In the following example, we limit the retrieved data to only daily data. The default for \texttt{"}service\texttt{"} is \enquote{all}, which returns all of the available data for that site. Likewise, there are arguments for parameter code (\texttt{parameterCd}) and statistic code (\texttt{statCd}) to filter the results. The default for both is to return all possible values (\enquote{all}). The returned \texttt{"}count\_nu\texttt{"} for \texttt{"}uv\texttt{"} data is the count of days with returned data, not the actual count of returned values.
 
 
 <<getSiteExtended, echo=TRUE>>=
 # Continuing from the previous example:
-# This pulls out just the daily data:
+# This pulls out just the daily, mean data:
+
+dailyDataAvailable <- whatNWISdata(siteNumbers,
+                    service="dv", statCd="00003")
 
-dailyDataAvailable <- getNWISDataAvailability(siteNumbers,
-                    type="dv")
 
 @
 
@@ -335,11 +407,11 @@ tableData <- with(dailyDataAvailable,
       data.frame( 
       siteNumber= site_no,
       srsname=srsname, 
-      startDate=as.character(startDate), 
-      endDate=as.character(endDate), 
-      count=as.character(count),
+      startDate=as.character(begin_date), 
+      endDate=as.character(end_date), 
+      count=as.character(count_nu),
       units=parameter_units,
-      statCd = statCd,
+#       statCd = stat_cd,
       stringsAsFactors=FALSE)
       )
 
@@ -348,7 +420,7 @@ tableData$units[which(tableData$units == "uS/cm @25C")] <- "$\\mu$S/cm @25C"
 
 
 print(xtable(tableData,label="tab:gda",
-    caption="Daily mean data availabile at the Choptank River near Greensboro, MD. [Some columns deleted for space considerations]"),
+    caption="Reformatted version of output from \\texttt{whatNWISdata} function for the Choptank River near Greensboro, MD, and from Seneca Creek at Dawsonville, MD from the daily values service [Some columns deleted for space considerations]"),
        caption.placement="top",
        size = "\\footnotesize",
        latex.environment=NULL,
@@ -369,12 +441,12 @@ See Section \ref{app:createWordTable} for instructions on converting an R datafr
 \subsection{Parameter Information}
 \label{sec:usgsParams}
 %------------------------------------------------------------
-To obtain all of the available information concerning a measured parameter (or multiple parameters), use the \texttt{getNWISPcodeInfo} function:
+To obtain all of the available information concerning a measured parameter (or multiple parameters), use the \texttt{readNWISpCode} function:
 
 <<label=getPCodeInfo, echo=TRUE>>=
 # Using defaults:
 parameterCd <- "00618" 
-parameterINFO <- getNWISPcodeInfo(parameterCd)
+parameterINFO <- readNWISpCode(parameterCd)
 colnames(parameterINFO)
 @
 
@@ -383,33 +455,39 @@ A specific example piece of information, in this case parameter name, can be obt
 <<siteNames, echo=TRUE>>=
 parameterINFO$parameter_nm
 @
-Parameter information can obtained from \url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes}
+Parameter information can obtained from:
+
+\url{http://help.waterdata.usgs.gov/codes-and-parameters/parameters}
+
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Daily Values}
 \label{sec:usgsDaily}
 %------------------------------------------------------------
-To obtain daily records of USGS data, use the \texttt{getNWISdvData} function. The arguments for this function are siteNumber, parameterCd, startDate, endDate, statCd, and a logical (TRUE/FALSE) interactive. There are 2 default arguments: statCd (defaults to \texttt{"}00003\texttt{"}), and interactive (defaults to TRUE).  If you want to use the default values, you do not need to list them in the function call. By setting the \texttt{"}interactive\texttt{"} option to FALSE, the operation of the function will advance automatically. It might make more sense to run large batch collections with the interactive option set to FALSE. 
+To obtain daily records of USGS data, use the \texttt{readNWISdv} function. The arguments for this function are siteNumber, parameterCd, startDate, endDate, and statCd (defaults to \texttt{"}00003\texttt{"}).  If you want to use the default values, you do not need to list them in the function call. 
 
 The dates (start and end) must be in the format \texttt{"}YYYY-MM-DD\texttt{"} (note: the user must include the quotes).  Setting the start date to \texttt{"}\texttt{"} (no space) will prompt the program to ask for the earliest date, and setting the end date to \texttt{"}\texttt{"} (no space) will prompt for the latest available date.
 
 <<label=getNWISDaily, echo=TRUE, eval=TRUE>>=
 
-# Continuing with our Choptank River example
+# Choptank River near Greensboro, MD:
 siteNumber <- "01491000"
 parameterCd <- "00060"  # Discharge
-startDate <- ""  # Will request earliest date
-endDate <- "" # Will request latest date
+startDate <- "2009-10-01"  
+endDate <- "2012-09-30" 
 
-discharge <- getNWISdvData(siteNumber, 
+discharge <- readNWISdv(siteNumber, 
                     parameterCd, startDate, endDate)
 names(discharge)
 @
 
-The column \texttt{"}datetime\texttt{"} in the returned dataframe is automatically imported as a variable of class \texttt{"}Date\texttt{"} in R. Each requested parameter has a value and remark code column.  The names of these columns depend on the requested parameter and stat code combinations. USGS remark codes are often \texttt{"}A\texttt{"} (approved for publication) or \texttt{"}P\texttt{"} (provisional data subject to revision). A more complete list of remark codes can be found here:
-\url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes}
+The column \texttt{"}datetime\texttt{"} in the returned dataframe is automatically imported as a variable of class \texttt{"}Date\texttt{"} in R. Each requested parameter has a value and remark code column.  The names of these columns depend on the requested parameter and stat code combinations. USGS daily value qualification codes are often \texttt{"}A\texttt{"} (approved for publication) or \texttt{"}P\texttt{"} (provisional data subject to revision). A more complete list of daily value qualification codes can be found here:
+
+\url{http://help.waterdata.usgs.gov/codes-and-parameters/daily-value-qualification-code-dv_rmk_cd}
 
 Another example that doesn't use the defaults would be a request for mean and maximum daily temperature and discharge in early 2012:
+
 <<label=getNWIStemperature, echo=TRUE>>=
 
 parameterCd <- c("00010","00060")  # Temperature and discharge
@@ -417,41 +495,40 @@ statCd <- c("00001","00003")  # Mean and maximum
 startDate <- "2012-01-01"
 endDate <- "2012-05-01"
 
-temperatureAndFlow <- getNWISdvData(siteNumber, parameterCd, 
+temperatureAndFlow <- readNWISdv(siteNumber, parameterCd, 
         startDate, endDate, statCd=statCd)
 
 @
 
 Daily data is pulled from \url{http://waterservices.usgs.gov/rest/DV-Test-Tool.html}.
 
-The column names can be automatically adjusted based on the parameter and statistic codes using the \texttt{renameColumns} function. This is not necessary, but may be useful when analyzing the data. 
+The column names can be shortened and simplified using the \texttt{renameNWISColumns} function.  This is not necessary, but may streamline subsequent data analysis and presentation.
 
 <<label=renameColumns, echo=TRUE>>=
 names(temperatureAndFlow)
 
-temperatureAndFlow <- renameColumns(temperatureAndFlow)
+temperatureAndFlow <- renameNWISColumns(temperatureAndFlow)
 names(temperatureAndFlow)
 @
 
 An example of plotting the above data (Figure \ref{fig:getNWIStemperaturePlot}):
 
 <<getNWIStemperaturePlot, echo=TRUE, fig.cap="Temperature and discharge plot of Choptank River in 2012.",out.width='1\\linewidth',out.height='1\\linewidth',fig.show='hold'>>=
+variableInfo <- attr(temperatureAndFlow, "variableInfo")
+siteInfo <- attr(temperatureAndFlow, "siteInfo")
+
 par(mar=c(5,5,5,5)) #sets the size of the plot window
 
-with(temperatureAndFlow, plot(
-  datetime, Temperature_water_degrees_Celsius_Max_01,
-  xlab="Date",ylab="Max Temperature [C]"
-  ))
+plot(temperatureAndFlow$dateTime, temperatureAndFlow$Wtemp_Max,
+  ylab=variableInfo$parameter_desc[1],xlab="" )
 par(new=TRUE)
-with(temperatureAndFlow, plot(
-  datetime, Discharge_cubic_feet_per_second,
+plot(temperatureAndFlow$dateTime, temperatureAndFlow$Flow,
   col="red",type="l",xaxt="n",yaxt="n",xlab="",ylab="",axes=FALSE
-  ))
+  )
 axis(4,col="red",col.axis="red")
-mtext(expression(paste("Mean Discharge [ft"^"3","/s]",
-                       sep="")),side=4,line=3,col="red")
-title(paste(siteINFO$station.nm[1],"2012",sep=" "))
-legend("topleft", c("Max Temperature", "Mean Discharge"), 
+mtext(variableInfo$parameter_desc[2],side=4,line=3,col="red")
+title(paste(siteInfo$station_nm,"2012"))
+legend("topleft", variableInfo$param_units, 
        col=c("black","red"),lty=c(NA,1),pch=c(1,NA))
 @
 
@@ -464,24 +541,40 @@ There are occasions where NWIS values are not reported as numbers, instead there
 \subsection{Unit Values}
 \label{sec:usgsRT}
 %------------------------------------------------------------
-Any data collected at regular time intervals (such as 15-minute or hourly) are known as \enquote{unit values.} Many of these are delivered on a real time basis and very recent data (even less than an hour old in many cases) are available through the function \texttt{getNWISunitData}.  Some of these unit values are available for many years, and some are only available for a recent time period such as 120 days.  Here is an example of a retrieval of such data.  
+Any data collected at regular time intervals (such as 15-minute or hourly) are known as \enquote{unit values.} Many of these are delivered on a real time basis and very recent data (even less than an hour old in many cases) are available through the function \texttt{readNWISuv}.  Some of these unit values are available for many years, and some are only available for a recent time period such as 120 days.  Here is an example of a retrieval of such data.  
 
-<<label=getNWISUnit, echo=TRUE>>=
+<<label=readNWISuv, echo=TRUE>>=
 
 parameterCd <- "00060"  # Discharge
 startDate <- "2012-05-12" 
 endDate <- "2012-05-13" 
-dischargeToday <- getNWISunitData(siteNumber, parameterCd, 
+dischargeUnit <- readNWISuv(siteNumber, parameterCd, 
         startDate, endDate)
 @
 
-The retrieval produces the following dataframe:
+The retrieval produces a data frame that contains 96 rows (one for every 15 minute period in the day).  They include all data collected from the startDate through the endDate (starting and ending with midnight locally-collected time). The dateTime column is converted to \enquote{UTC} (Coordinated Universal Time), so midnight EST will be 5 hours earlier in the dateTime column (the previous day, at 7pm).
+
 
-<<dischargeData, echo=FALSE>>=
-head(dischargeToday)
+<<dischargeData, echo=TRUE>>=
+head(dischargeUnit)
 @
 
-Note that time now becomes important, so the variable datetime is a POSIXct, and the time zone is included in a separate column. Data are retrieved from \url{http://waterservices.usgs.gov/rest/IV-Test-Tool.html}. There are occasions where NWIS values are not reported as numbers, instead a common example is \enquote{Ice.}  Any value that cannot be converted to a number will be reported as NA in this package.
+To override the UTC timezone, specify a valid timezone in the tz argument. Default is \texttt{""}, which will keep the dateTime column in UTC. Other valid timezones are:
+
+\begin{verbatim}
+America/New_York
+America/Chicago
+America/Denver
+America/Los_Angeles
+America/Anchorage
+America/Honolulu
+America/Jamaica
+America/Managua
+America/Phoenix
+America/Metlakatla
+\end{verbatim}
+
+Data are retrieved from \url{http://waterservices.usgs.gov/rest/IV-Test-Tool.html}. There are occasions where NWIS values are not reported as numbers, instead a common example is \enquote{Ice.}  Any value that cannot be converted to a number will be reported as NA in this package.
 
 \newpage
 
@@ -492,35 +585,100 @@ Note that time now becomes important, so the variable datetime is a POSIXct, and
 \subsection{Water Quality Values}
 \label{sec:usgsWQP}
 %------------------------------------------------------------
-To get USGS water quality data from water samples collected at the streamgage or other monitoring site (as distinct from unit values collected through some type of automatic monitor) we can use the function \texttt{getNWISqwData}, with the input arguments: siteNumber, parameterCd, startDate, endDate, and interactive (similar to \texttt{getNWISunitData} and \texttt{getNWISdvData}). Additionally, the argument \texttt{"}expanded\texttt{"} is a logical input that allows the user to choose between a simple return of datetimes/qualifier/values (expanded=FALSE), or a more complete and verbose output (expanded=TRUE). Expanded = TRUE includes such columns as remark codes, value qualifying text, and detection level.
+To get USGS water quality data from water samples collected at the streamgage or other monitoring site (as distinct from unit values collected through some type of automatic monitor) we can use the function \texttt{readNWISqw}, with the input arguments: siteNumber, parameterCd, startDate, and endDate. Additionally, the argument \texttt{"}expanded\texttt{"} is a logical input that allows the user to choose between a simple return of datetimes/qualifier/values (expanded=FALSE), or a more complete and verbose output (expanded=TRUE). Expanded = TRUE includes such columns as remark codes, value qualifying text, and detection level for each parameter code. There also includes an argument \texttt{"}reshape\texttt{"}, that converts the expanded dataset to a \texttt{"}wide\texttt{"} format (each requested parameter code gets individual columns).
 
-
-<<label=getQW, echo=TRUE>>=
+<<label=getQW, echo=TRUE, eval=TRUE>>=
  
 # Dissolved Nitrate parameter codes:
 parameterCd <- c("00618","71851")
 startDate <- "1985-10-01"
 endDate <- "2012-09-30"
 
-dissolvedNitrate <- getNWISqwData(siteNumber, parameterCd, 
-      startDate, endDate, expanded=TRUE)
-names(dissolvedNitrate)
+dfLong <- readNWISqw(siteNumber, parameterCd, 
+      startDate, endDate, expanded=TRUE,reshape=FALSE)
 
+# Or the wide return:
+# dfWide <- readNWISqw(siteNumber, parameterCd, 
+#       startDate, endDate, expanded=TRUE, reshape=TRUE)
 
 @
 
+Metadata, such as information about the column names can be found by using the \texttt{comment} function, as described in section \ref{sec:metadata}.
+
+<<qwmeta, echo=TRUE, eval=TRUE>>=
+
+comment(dfLong)
 
-<<getQWtemperaturePlot, echo=TRUE, fig.cap=paste(parameterINFO$parameter_nm, "at", siteINFO$station.nm[1])>>=
-with(dissolvedNitrate, plot(
-  startDateTime, result_va_00618,
-  xlab="Date",ylab = paste(parameterINFO$srsname,
-      "[",parameterINFO$parameter_units,"]")
-  ))
-title(siteINFO$station.nm[1])
 @
 
 \FloatBarrier
 
+%------------------------------------------------------------
+\subsection{Groundwater level data}
+\label{sec:gwl}
+%------------------------------------------------------------
+Groundwater level measurements can be obtained with the \texttt{readNWISgwl} function. Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
+
+<<gwlexample, echo=TRUE, eval=TRUE>>=
+siteNumber <- "434400121275801"
+groundWater <- readNWISgwl(siteNumber)
+
+names(groundWater)
+
+@
+
+%------------------------------------------------------------
+\subsection{Peak flow data}
+\label{sec:peak}
+%------------------------------------------------------------
+
+Peak flow data are instantaneous discharge or stage data that record the maximum values of these variables during a flood event.  They include the annual peak flood event but can also include records of other peaks that are lower than the annual maximum. Peak discharge measurements can be obtained with the \texttt{readNWISpeak} function. Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
+
+<<peakexample, echo=TRUE, eval=TRUE>>=
+siteNumber <- '01594440'
+peakData <- readNWISpeak(siteNumber)
+
+
+names(peakData)
+
+@
+
+
+%------------------------------------------------------------
+\subsection{Rating curve data}
+\label{sec:rating}
+%------------------------------------------------------------
+Rating curves are the calibration curves that are used to convert measurements of stage to discharge.  Because of changing hydrologic conditions these rating curves change over time. Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
+
+Rating curves can be obtained with the \texttt{readNWISrating} function.
+
+<<ratingexample, echo=TRUE, eval=TRUE>>=
+ratingData <- readNWISrating(siteNumber, "base")
+attr(ratingData, "RATING")
+
+names(ratingData)
+
+@
+
+
+
+%------------------------------------------------------------
+\subsection{Surface-water measurement data}
+\label{sec:meas}
+%------------------------------------------------------------
+These data are the discrete measurements of discharge that are made for the purpose of developing or revising the rating curve.  Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
+
+Surface-water measurement data can be obtained with the \texttt{readNWISmeas} function.
+
+<<surfexample, echo=TRUE, eval=TRUE>>=
+surfaceData <- readNWISmeas(siteNumber)
+
+names(surfaceData)
+
+@
+
+
+
 %------------------------------------------------------------
 \subsection{URL Construction}
 \label{sec:usgsURL}
@@ -533,10 +691,10 @@ There may be times when you might be interested in seeing the URL (Web address)
 pCode <- c("00618","71851")
 startDate <- "1964-06-11"
 endDate <- "2012-12-18"
-url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,'qw')
+url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,"qw")
 url_dv <- constructNWISURL(siteNumber,"00060",startDate,endDate,
-                           'dv',statCd="00003")
-url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'uv')
+                           "dv",statCd="00003")
+url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,"uv")
 @
 
 
@@ -545,15 +703,15 @@ url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'uv')
 \section{Water Quality Portal Web Retrievals}
 \label{sec:usgsSTORET}
 %------------------------------------------------------------
-There are additional water quality data sets available from the Water Quality Data Portal (\url{http://www.waterqualitydata.us/}).  These data sets can be housed in either the STORET database (data from EPA), NWIS database (data from USGS), STEWARDS database (data from USDA), and additional databases are slated to be included.  Because only USGS uses parameter codes, a \texttt{"}characteristic name\texttt{"} must be supplied.  The \texttt{getWQPqwData} function can take either a USGS parameter code, or a more general characteristic name in the parameterCd input argument. The Water Quality Data Portal includes data discovery tools and information on characteristic names. The following example retrieves specific conductance from a DNR site in Wisconsin. 
+There are additional water quality data sets available from the Water Quality Data Portal (\url{http://www.waterqualitydata.us/}).  These data sets can be housed in either the STORET database (data from EPA), NWIS database (data from USGS), STEWARDS database (data from USDA), and additional databases are slated to be included in the future.  Because only USGS uses parameter codes, a \texttt{"}characteristic name\texttt{"} must be supplied.  The \texttt{readWQPqw} function can take either a USGS parameter code, or a more general characteristic name in the parameterCd input argument. The Water Quality Data Portal includes data discovery tools and information on characteristic names. The following example retrieves specific conductance from a DNR site in Wisconsin. 
 
 
 <<label=getQWData, echo=TRUE, eval=FALSE>>=
-specificCond <- getWQPqwData('WIDNR_WQX-10032762',
+specificCond <- readWQPqw('WIDNR_WQX-10032762',
                 'Specific conductance','2011-05-01','2011-09-30')
 @
 
-Guidance for finding characteristic names can be found at: \url{http://www.waterqualitydata.us/webservices_documentation.jsp}.
+A tool for finding NWIS characteristic names can be found at: \url{http://www.waterqualitydata.us/public_srsnames.jsp}
 
 \FloatBarrier
 
@@ -567,7 +725,7 @@ The previous examples all took specific input arguments: siteNumber, parameterCd
 \subsubsection{NWIS sites}
 \label{sec:NWISGenSite}
 %------------------------------------------------------------
-The function \texttt{getNWISSites} can be used to discover NWIS sites based on any query that the NWIS Site Service offers. This is done by using the \texttt{"..."} argument, which allows the user to use any arbitrary input argument. We can then use the service here:
+The function \texttt{whatNWISsites} can be used to discover NWIS sites based on any query that the NWIS Site Service offers. This is done by using the \texttt{"..."} argument, which allows the user to use any arbitrary input argument. We can then use the service here:
 
 \url{http://waterservices.usgs.gov/rest/Site-Test-Tool.html}
 
@@ -578,7 +736,7 @@ to discover many options for searching for NWIS sites. For example, you may want
 The following dataRetrieval code can be used to get those sites:
 
 <<siteSearch>>=
-sites <- getNWISSites(bBox="-83.0,36.5,-81.0,38.5", 
+sites <- whatNWISsites(bBox="-83.0,36.5,-81.0,38.5", 
                       parameterCd="00010,00060",
                       hasDataTypeCd="dv")
 
@@ -591,7 +749,7 @@ nrow(sites)
 \subsubsection{NWIS data}
 \label{sec:NWISGenData}
 %------------------------------------------------------------
-For NWIS data, the function \texttt{getNWISData} can be used. The argument listed in the R help file is \texttt{"..."} and \texttt{"}service\texttt{"} (only for data requests). Table \ref{tab:NWISGeneral} describes the services are available.
+For NWIS data, the function \texttt{readNWISdata} can be used. The argument listed in the R help file is \texttt{"..."} and \texttt{"}service\texttt{"} (only for data requests). Table \ref{tab:NWISGeneral} describes the services are available.
 
 \begin{table}[!ht]
 \begin{minipage}{\linewidth}
@@ -617,7 +775,8 @@ For NWIS data, the function \texttt{getNWISData} can be used. The argument liste
 The \texttt{"..."} argument allows the user to create their own queries based on the instructions found in the web links above. The links provide instructions on how to create a URL to request data. Perhaps you want sites only in Wisconsin, with a drainage area less than 50 mi$^2$, and the most recent daily dischage data. That request would be done as follows:
 
 <<dataExample>>=
-dischargeWI <- getNWISData(stateCd="WI",
+dischargeWI <- readNWISdata(service="dv",
+                           stateCd="WI",
                            parameterCd="00060",
                            drainAreaMin="50",
                            statCd="00003")
@@ -634,11 +793,11 @@ Just as with NWIS, the Water Quality Portal (WQP) offers a variety of ways to se
 
 \url{http://www.waterqualitydata.us/webservices_documentation.jsp}
 
-To discover available sites in the WQP in New Jersey that have measured Chloride, use the function \texttt{getWQPSites}.
+To discover available sites in the WQP in New Jersey that have measured Chloride, use the function \texttt{whatWQPsites}.
 
 <<NJChloride, eval=FALSE>>=
 
-sitesNJ <- getWQPSites(statecode="US:34",
+sitesNJ <- whatWQPsites(statecode="US:34",
                        characteristicName="Chloride")
 
 @
@@ -648,11 +807,11 @@ sitesNJ <- getWQPSites(statecode="US:34",
 \subsubsection{Water Quality Portal data}
 \label{sec:WQPGenData}
 %------------------------------------------------------------
-Finally, to get data from the WQP using generalized Web service calls, use the function \texttt{getWQPData}. For example, to get all the pH data in Wisconsin:
+Finally, to get data from the WQP using generalized Web service calls, use the function \texttt{readWQPdata}. For example, to get all the pH data in Wisconsin:
 
 <<phData, eval=FALSE>>=
 
-dataPH <- getWQPData(statecode="US:55", 
+dataPH <- readWQPdata(statecode="US:55", 
                  characteristicName="pH")
 
 @
@@ -661,488 +820,59 @@ dataPH <- getWQPData(statecode="US:55",
 
 \FloatBarrier
 
-%------------------------------------------------------------
-\section{Data Retrievals Structured For Use In The EGRET Package}
-\label{sec:EGRETdfs}
-%------------------------------------------------------------ 
-Rather than using the raw data as retrieved by the Web, the dataRetrieval package also includes functions that return the data in a structure that has been designed to work with the EGRET R package (\url{https://github.com/USGS-R/EGRET/wiki}). In general, these dataframes may be much more \enquote{R-friendly} than the raw data, and will contain additional date information that allows for efficient data analysis.
-
-In this section, we use 3 dataRetrieval functions to get sufficient data to perform an EGRET analysis.  We will continue analyzing the Choptank River. We retrieve essentially the same data that were retrieved in section \ref{sec:genRetrievals}, but in this case the data are structured into three EGRET-specific dataframes.  The daily discharge data are placed in a dataframe called Daily.  The nitrate sample data are placed in a dataframe called Sample.  The data about the site and the parameter are placed in a dataframe called INFO.  Although these dataframes were designed to work with the EGRET R package, they can be very useful for a wide range of hydrology studies that don't use EGRET.
-
-%------------------------------------------------------------
-\subsection{INFO Data}
-\label{INFOsubsection}
-%------------------------------------------------------------
-
-The \texttt{getNWISInfo}, \texttt{getWQPInfo}, and \texttt{getUserInfo} functions obtain metadata, or data about the streamgage and measured parameters. Any number of columns can be included in this dataframe. Table \ref{tab:INFOtable} describes fields are required for EGRET functions. 
-
-\begin{table}[!ht]
-\begin{minipage}{\linewidth}
-{\footnotesize
-\caption{INFO columns required in EGRET functions} 
-\label{tab:INFOtable}
-\begin{tabular}{lll}
-  \hline
-\multicolumn{1}{c}{\textbf{\textsf{Column Name}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Type}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Description}}} \\  [0pt]
-  \hline
-  constitAbbrev & string & Constituent abbreviation, used for saving the workspace in EGRET\\
-  [5pt] drainSqKm & numeric & Drainage area in square kilometers \\
-  [5pt] paramShortName & string & Parameter name to use on graphs \\
-  [5pt] param.units & string & Parameter units \\
-  [5pt] shortName & string & Station name to use on graphs\\
-  [5pt] staAbbrev & string & Station Abbreviation \\
-   \hline
-\end{tabular}
-}
-\end{minipage}
-\end{table}
-
-The function \texttt{getNWISInfo} combines \texttt{getNWISSiteInfo} and \texttt{getNWISPcodeInfo}, producing one dataframe called INFO.
-
-<<ThirdExample>>=
-parameterCd <- "00618"
-INFO <- getNWISInfo(siteNumber,parameterCd, interactive=FALSE)
-@
-
-It is also possible to create the INFO dataframe using information from the Water Quality Portal:
-
-<<WQPInfo, eval=FALSE>>=
-parameterCd <- "00618"
-INFO_WQP <- getWQPInfo("USGS-01491000",parameterCd)
-@
-
-Finally, the function \texttt{getUserInfo} can be used to convert comma separated files into an INFO dataframe. 
-
-Any supplemental column that would be useful can be added to the INFO dataframe. 
-
-<<addInfo, eval=TRUE, echo=TRUE>>=
-
-INFO$riverInfo <- "Major tributary of the Chesapeake Bay"
-INFO$GreensboroPopulation <- 1931
-
-@
-
-
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsection{Daily Data}
-\label{Dailysubsection}
-%------------------------------------------------------------
-The \texttt{getNWISDaily} function retrieves the daily values (discharge in this case).  It requires the inputs siteNumber, parameterCd, startDate, endDate, interactive, and convert. Most of these arguments are described in section \ref{sec:genRetrievals}, however \texttt{"}convert\texttt{"} is a new argument (that defaults to TRUE). The convert argument tells the program to convert the values from cubic feet per second (ft\textsuperscript{3}/s) to cubic meters per second (m\textsuperscript{3}/s) as shown in the example Daily data frame in Table \ref{tab:DailyDF1}. For EGRET applications with NWIS Web retrieval, do not use this argument (the default is TRUE), EGRET assumes that discharge is always stored in units of cubic meters per second. If you don't want this conversion and are not using EGRET, set convert=FALSE in the function call. 
-
-<<firstExample>>=
-siteNumber <- "01491000"
-startDate <- "2000-01-01"
-endDate <- "2013-01-01"
-# This call will get NWIS (ft3/s) data , and convert it to m3/s:
-Daily <- getNWISDaily(siteNumber, "00060", startDate, endDate)
-@
-
-
-
-<<colNamesDaily, echo=FALSE,results='asis'>>=
-ColumnName <- c("Date", "Q", "Julian","Month","Day","DecYear","MonthSeq","Qualifier","i","LogQ","Q7","Q30")
-Type <- c("Date", "number", "number","integer","integer","number","integer","string","integer","number","number","number")
-Description <- c("Date", "Discharge in m$^3$/s", "Number of days since January 1, 1850", "Month of the year [1-12]", "Day of the year [1-366]", "Decimal year", "Number of months since January 1, 1850", "Qualifying code", "Index of days, starting with 1", "Natural logarithm of Q", "7 day running average of Q", "30 day running average of Q")
-Units <- c("date", "m$^3$/s","days", "months","days","years","months", "character","days","numeric","m$^3$/s","m$^3$/s")
-
-DF <- data.frame(ColumnName,Type,Description,Units)
-
-print(xtable(DF, caption="Daily dataframe",label="tab:DailyDF1"),
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.text.function = function(x) {x},
-       sanitize.colnames.function =  bold.colHeaders,
-       sanitize.rownames.function = addSpace
-       
-      )
-
-@
-
-
-If discharge values are negative or zero, the code will set all of these values to zero and then add a small constant to all of the daily discharge values.  This constant is 0.001 times the mean discharge.  The code will also report on the number of zero and negative values and the size of the constant.  Use EGRET analysis only if the number of zero values is a very small fraction of the total days in the record (say less than 0.1\% of the days), and there are no negative discharge values.  Columns Q7 and Q30 are the 7 and 30 day running averages for the 7 or 30 days ending on this specific date. Table \ref{tab:DailyDF1} lists details of the Daily data frame.
-
-Notice that the \enquote{Day of the year} column can span from 1 to 366. The 366 accounts for leap years. Every day has a consistent day of the year. This means, February 28\textsuperscript{th} is always the 59\textsuperscript{th} day of the year, Feb. 29\textsuperscript{th} is always the 60\textsuperscript{th} day of the year, and March 1\textsuperscript{st} is always the 61\textsuperscript{st} day of the year whether or not it is a leap year.
-
-User-generated Sample dataframes can also be created using the \texttt{getUserDaily} function. This is discused in detail in section \ref{sec:DailyFile}.
-
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsection{Sample Data}
-\label{Samplesubsection}
-%------------------------------------------------------------
-The \texttt{getNWISSample} function retrieves USGS sample data from NWIS. The arguments for this function are also siteNumber, parameterCd, startDate, endDate, interactive. These are the same inputs as \texttt{getWQPqwData} or \texttt{getWQPData} as described in the previous section.
-
-<<secondExample>>=
-parameterCd <- "00618"
-Sample <-getNWISSample(siteNumber,parameterCd,
-      startDate, endDate)
-@
-
-The \texttt{getWQPSample} function retrieves Water Quality Portal sample data (STORET, NWIS, STEWARDS). The arguments for this function are siteNumber, characteristicName, startDate, endDate, interactive. Table \ref{tab:SampleDataframe} lists details of the Sample data frame. 
-
-<<STORET,echo=TRUE,eval=FALSE>>=
-site <- 'WIDNR_WQX-10032762'
-characteristicName <- 'Specific conductance'
-Sample <-getWQPSample(site,characteristicName,
-      startDate, endDate)
-@
-
-User-generated Sample dataframes can also be created using the \texttt{getUserSample} function. This is discused in detail in section \ref{sec:SampleFile}.
-
-\pagebreak
-
-
-\begin{table}
-{\footnotesize
-  \begin{threeparttable}[b]
-  \caption{Sample dataframe}
-  \label{tab:SampleDataframe}
-  \begin{tabular}{llll}
-  \hline
-\multicolumn{1}{c}{\textbf{\textsf{ColumnName}}} & 
-\multicolumn{1}{c}{\textbf{\textsf{Type}}} & 
-\multicolumn{1}{c}{\textbf{\textsf{Description}}} & 
-\multicolumn{1}{c}{\textbf{\textsf{Units}}} \\ 
-  \hline
-  Date & Date & Date & date \\ 
-  [5pt]ConcLow & number & Lower limit of concentration & mg/L \\ 
-  [5pt]ConcHigh & number & Upper limit of concentration & mg/L \\ 
-  [5pt]Uncen & integer & Uncensored data (1=true, 0=false) & integer \\ 
-  [5pt]ConcAve & number & Average of ConcLow and ConcHigh & mg/L \\ 
-  [5pt]Julian & number & Number of days since January 1, 1850 & days \\ 
-  [5pt]Month & integer & Month of the year [1-12] & months \\ 
-  [5pt]Day & integer & Day of the year [1-366] & days \\ 
-  [5pt]DecYear & number & Decimal year & years \\ 
-  [5pt]MonthSeq & integer & Number of months since January 1, 1850 & months \\ 
-  [5pt]SinDY & number & Sine of DecYear & numeric \\ 
-  [5pt]CosDY & number & Cosine of DecYear & numeric \\ 
-  [5pt]Q \tnote{1} & number & Discharge & m\textsuperscript{3}/s \\ 
-  [5pt]LogQ \tnote{1} & number & Natural logarithm of discharge & numeric \\ 
-   \hline
-\end{tabular}
-
-  \begin{tablenotes}
-    \item[1] Discharge columns are populated from data in the Daily dataframe after calling the \texttt{mergeReport} function.
-  \end{tablenotes}
- \end{threeparttable}
-}
-\end{table}
-
-Notice that the \enquote{Day of the year} column can span from 1 to 366. The 366 accounts for leap years. Every day has a consistent day of the year. This means, February 28\textsuperscript{th} is always the 59\textsuperscript{th} day of the year, Feb. 29\textsuperscript{th} is always the 60\textsuperscript{th} day of the year, and March 1\textsuperscript{st} is always the 61\textsuperscript{st} day of the year whether or not it is a leap year.
-
-Section \ref{sec:cenValues} is about summing multiple constituents, including how interval censoring is used. Since the Sample data frame is structured to only contain one constituent, when more than one parameter codes are requested, the \texttt{getNWISSample} function will sum the values of each constituent as described below.
-
-\FloatBarrier
-
+\clearpage
 
 %------------------------------------------------------------
-\subsection{Censored Values: Summation Explanation}
-\label{sec:cenValues}
+\section{Dataframe Metadata}
+\label{sec:metadata}
 %------------------------------------------------------------
-In the typical case where none of the data are censored (that is, no values are reported as \enquote{less-than} values), the ConcLow = ConcHigh = ConcAve and Uncen = 1 are equal to the reported value.  For the most common type of censoring, where a value is reported as less than the reporting limit, then ConcLow = NA, ConcHigh = reporting limit, ConcAve = 0.5 * reporting limit, and Uncen = 0.
-
-To illustrate how the dataRetrieval package handles a more complex censoring problem, let us say that in 2004 and earlier, we computed total phosphorus (tp) as the sum of dissolved phosphorus (dp) and particulate phosphorus (pp). From 2005 and onward, we have direct measurements of total phosphorus (tp). A small subset of this fictional data looks like Table \ref{tab:exampleComplexQW}.
-
-
+All dataframes returned from the Web services have some form of associated metadata. This information is included as attributes to the dataframe. All dataframes will have a \texttt{url} and \texttt{queryTime} attribute. For example, the url and query time used to obtain the data can be found as follows:
 
-<<label=tab:exampleComplexQW, echo=FALSE, eval=TRUE,results='asis'>>=
-cdate <- c("2003-02-15","2003-06-30","2004-09-15","2005-01-30","2005-05-30","2005-10-30")
-rdp <- c("", "<","<","","","")
-dp <- c(0.02,0.01,0.005,NA,NA,NA)
-rpp <- c("", "","<","","","")
-pp <- c(0.5,0.3,0.2,NA,NA,NA)
-rtp <- c("","","","","<","<")
-tp <- c(NA,NA,NA,0.43,0.05,0.02)
+<<meta1, eval=TRUE>>=
 
-DF <- data.frame(cdate,rdp,dp,rpp,pp,rtp,tp,stringsAsFactors=FALSE)
+attr(dischargeWI, "url")
 
-xTab <- xtable(DF, caption="Example data",digits=c(0,0,0,3,0,3,0,3),label="tab:exampleComplexQW")
-
-print(xTab,
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.colnames.function =  bold.colHeaders,
-       sanitize.rownames.function = addSpace
-      )
+attr(dischargeWI, "queryTime")
 
 @
 
-The dataRetrieval package will \enquote{add up} all the values in a given row to form the total for that sample when using the Sample dataframe. Thus, you only want to enter data that should be added together. If you want a dataframe with multiple constituents that are not summed, do not use getNWISSample, getWQPSample, or getUserSample. The raw data functions: \texttt{getWQPData}, \texttt{getNWISqwData}, \texttt{getWQPqwData}, \texttt{getWQPData} will not sum constituents, but leave them in their individual columns. 
-
-For example, we might know the value for dp on 5/30/2005, but we don't want to put it in the table because under the rules of this data set, we are not supposed to add it in to the values in 2005.
+Depending on the format that the data was obtained (xml, rdb, etc), there will be additional information embedded in the dataframe as attributes. To discover the available attributes:
 
-For every sample, the EGRET package requires a pair of numbers to define an interval in which the true value lies (ConcLow and ConcHigh). In a simple uncensored case (the reported value is above the detection limit), ConcLow equals ConcHigh and the interval collapses down to a single point. In a simple censored case, the value might be reported as \verb@<@0.2, then ConcLow=NA and ConcHigh=0.2. We use NA instead of 0 as a way to elegantly handle future logarithm calculations.
+<<meta2, eval=TRUE>>=
 
-For the more complex example case, let us say dp is reported as \verb@<@0.01 and pp is reported as 0.3. We know that the total must be at least 0.3 and could be as much as 0.31. Therefore, ConcLow=0.3 and ConcHigh=0.31. Another case would be if dp is reported as \verb@<@0.005 and pp is reported \verb@<@0.2. We know in this case that the true value could be as low as zero, but could be as high as 0.205. Therefore, in this case, ConcLow=NA and ConcHigh=0.205. The Sample dataframe for the example data would be:
+names(attributes(dischargeWI))
 
-<<thirdExample,echo=FALSE>>=
-  compressedData <- compressData(DF)
-  Sample <- populateSampleColumns(compressedData)
 @
 
-<<thirdExampleView,echo=TRUE>>=
-  Sample
-@
-
-Section \ref{sec:userFiles} discusses inputting user-generated files. The functions \texttt{getUserSample} and \texttt{getNWISSample} assume summation with interval censoring inputs, and are discussed in sections \ref{sec:DailyFile} and \ref{sec:SampleFile}.
-
-\FloatBarrier
-
-%------------------------------------------------------------ 
-\subsection{User-Generated Data Files}
-\label{sec:userFiles}
-%------------------------------------------------------------ 
-In addition to retrieving data from the USGS Web services, the dataRetrieval package also includes functions to generate the Daily and Sample data frame from local files.
+For data obtained from \texttt{readNWISuv}, \texttt{readNWISdv}, \texttt{readNWISgwl} there are two attributes that are particularly useful: \texttt{siteInfo} and \texttt{variableInfo}.
 
-%------------------------------------------------------------ 
-\subsubsection{getUserDaily}
-\label{sec:DailyFile}
-%------------------------------------------------------------ 
-The \texttt{getUserDaily} function will load a user-supplied text file and convert it to the Daily dataframe. The file should have two columns, the first dates, the second values.  The dates are formatted either mm/dd/yyyy or yyyy-mm-dd. Using a 4-digit year is required. This function has the following inputs: filePath, fileName,hasHeader (TRUE/FALSE), separator, qUnit, and interactive (TRUE/FALSE). filePath is a string that defines the path to your file, and the string can either be a full path, or path relative to your R working directory. The input fileName is a string that defines the file name (including the extension).
+<<meta3, eval=TRUE>>=
 
-Text files that contain this sort of data require some sort of a separator, for example, a \enquote{csv} file (comma-separated value) file uses a comma to separate the date and value column. A tab delimited file would use a tab (\verb@"\t"@) rather than the comma (\texttt{"},\texttt{"}). Define the type of separator you choose to use in the function call in the \texttt{"}separator\texttt{"} argument, the default is \texttt{"},\texttt{"}. Another function input is a logical variable: hasHeader.  The default is TRUE. If your data does not have column names, set this variable to FALSE.
+siteInfo <- attr(dischargeWI, "siteInfo")
+head(siteInfo)
 
-Finally, qUnit is a numeric argument that defines the discharge units used in the input file.  The default is qUnit = 1 which assumes discharge is in cubic feet per second.  If the discharge in the file is already in cubic meters per second then set qUnit = 2.  If it is in some other units (like liters per second or acre-feet per day), the user must pre-process the data with a unit conversion that changes it to either cubic feet per second or cubic meters per second.
+variableInfo <- attr(dischargeWI, "variableInfo")
 
-So, if you have a file called \enquote{ChoptankRiverFlow.txt} located in a folder called \enquote{RData} on the C drive (this example is for the Windows\textregistered\ operating systems), and the file is structured as follows (tab-separated):
 
-
-% \singlespacing
-\begin{verbatim}
-date  Qdaily
-10/1/1999  107
-10/2/1999  85
-10/3/1999	76
-10/4/1999	76
-10/5/1999	113
-10/6/1999	98
-...
-\end{verbatim}
-% \doublespacing
-
-The call to open this file, convert the discharge to cubic meters per second, and populate the Daily data frame would be:
-<<openDaily, eval = FALSE>>=
-fileName <- "ChoptankRiverFlow.txt"
-filePath <-  "C:/RData/"
-Daily <-getFileDaily(filePath,fileName,
-                    separator="\t")
 @
 
-Microsoft\textregistered\ Excel files can be a bit tricky to import into R directly. The simplest way to get Excel data into R is to open the Excel file in Excel, then save it as a .csv file (comma-separated values). 
-
-\FloatBarrier
-
-%------------------------------------------------------------ 
-\subsubsection{getUserSample}
-\label{sec:SampleFile}
-%------------------------------------------------------------ 
+Data obtained from \texttt{readNWISpeak}, \texttt{readNWISmeas}, and \texttt{readNWISrating}, the \texttt{comment} attribute is useful.
 
-The \texttt{getUserSample} function will import a user-generated file and populate the Sample dataframe. The difference between sample data and discharge data is that the code requires a third column that contains a remark code, either blank or \verb@"<"@, which will tell the program that the data were \enquote{left-censored} (or, below the detection limit of the sensor). Therefore, the data must be in the form: date, remark, value.   An example of a comma-delimited file is:
+<<meta5, eval=TRUE, eval=FALSE>>=
+comment(peakData)
 
-\singlespacing
-\begin{verbatim}
-cdate;remarkCode;Nitrate
-10/7/1999,,1.4
-11/4/1999,<,0.99
-12/3/1999,,1.42
-1/4/2000,,1.59
-2/3/2000,,1.54
-...
-\end{verbatim}
-
-The call to open this file, and populate the Sample dataframe is:
-<<openSample, eval = FALSE>>=
-fileName <- "ChoptankRiverNitrate.csv"
-filePath <-  "C:/RData/"
-Sample <-getUserSample(filePath,fileName,
-                                separator=",")
+#Which is equivalent to:
+# attr(peakData, "comment")
 @
 
-When multiple constituents are to be summed, the format can be date, remark\_A, value\_A, remark\_b, value\_b, etc... A tab-separated example might look like the file below, where the columns are date, remark dissolved phosphate (rdp), dissolved phosphate (dp), remark particulate phosphorus (rpp), particulate phosphorus (pp), remark total phosphate (rtp), and total phosphate (tp):
+A subset (due to space considerations) of the \texttt{comment} metatdata is shown here:
 
-\singlespacing
-\begin{verbatim}
-date  rdp	dp	rpp	pp	rtp	tp
-2003-02-15		0.020		0.500		
-2003-06-30	<	0.010		0.300		
-2004-09-15	<	0.005	<	0.200		
-2005-01-30						0.430
-2005-05-30					<	0.050
-2005-10-30					<	0.020
-...
-\end{verbatim}
+<<meta6, eval=TRUE, eval=TRUE>>=
+comment(peakData)[c(1:15,58:66)]
 
-
-<<openSample2, eval = FALSE>>=
-fileName <- "ChoptankPhosphorus.txt"
-filePath <-  "C:/RData/"
-Sample <-getUserSample(filePath,fileName,
-                                separator="\t")
 @
 
 
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsection{Merge Report}
-%------------------------------------------------------------
-Finally, there is a function called \texttt{mergeReport} that will look at both the Daily and Sample dataframe, and populate Q and LogQ columns into the Sample dataframe. The default arguments are Daily and Sample, however if you want to use other similarly structured dataframes, you can specify localDaily or localSample. Once \texttt{mergeReport} has been run, the Sample dataframe will be augmented with the daily discharges for all the days with samples.  None of the water quality functions in EGRET will work without first having run the \texttt{mergeReport} function.
-
-
-<<mergeExample>>=
-siteNumber <- "01491000"
-parameterCd <- "00631"  # Nitrate
-startDate <- "2000-01-01"
-endDate <- "2013-01-01"
-
-Daily <- getNWISDaily(siteNumber, "00060", startDate, endDate)
-Sample <- getNWISSample(siteNumber,parameterCd, startDate, endDate)
-Sample <- mergeReport(Daily,Sample)
-names(Sample)
-@
-
-\FloatBarrier
-
-% %------------------------------------------------------------
-% \subsection{EGRET Plots}
-% %------------------------------------------------------------
-% The Daily, Sample, and INFO dataframes (described in Secs. \ref{INFOsubsection} - \ref{Samplesubsection}) are specifically formatted to be used with the EGRET package. The EGRET package has powerful modeling capabilities that use WRTDS, but EGRET also has graphing and tabular tools for exploring the data without using the WRTDS algorithm. See the EGRET vignette, user guide, and/or wiki (\url{https://github.com/USGS-R/EGRET/wiki}) for detailed information. Figure \ref{fig:egretEx} shows one of the plotting functions that can be used directly from the dataRetrieval dataframes.
-% 
-% <<egretEx, echo=TRUE, eval=TRUE, fig.cap="Default \\texttt{multiPlotDataOverview}">>=
-% # Continuing Choptank example from the previous sections
-% library(EGRET)
-% multiPlotDataOverview()
-% @
-% 
-% \FloatBarrier
-% \clearpage
-
-
-%------------------------------------------------------------
-\section{Summary}
-\label{sec:summary}
-%------------------------------------------------------------
-
-Tables \ref{tab:dataRetrievalFunctions1},\ref{tab:dataRetrievalOrg}, and \ref{tab:dataRetrievalMisc} summarize the data retrieval functions:
-
-\begin{table}
-{\footnotesize
-  \begin{threeparttable}[b]
-  \caption{dataRetrieval functions}
-  \label{tab:dataRetrievalFunctions1}
-%   \doublespacing
-\begin{tabular}{lll}
-  \hline
-\multicolumn{1}{c}{\textbf{\textsf{Data Type}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Function Name}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Description}}} \\ [0pt]
-  \hline
-  Daily & \texttt{getNWISdvData} & Raw USGS daily data \\ 
-  [5pt]Daily & \texttt{getNWISData} & Raw USGS data in generalized query \\
-  [5pt]Daily\tnote{1} & \texttt{getNWISDaily} & USGS daily values \\ 
-  [5pt]Daily\tnote{1} & \texttt{getUserDaily} & User-generated daily data \\ 
-  [5pt]Sample & \texttt{getNWISqwData} & Raw USGS water quality data \\
-  [5pt]Sample & \texttt{getWQPqwData} & Raw Water Quality Data Portal data \\ 
-  [5pt]Sample & \texttt{getWQPData} & Raw Water Quality Portal data in generalized query\\
-  [5pt]Sample\tnote{1} & \texttt{getNWISSample} & USGS water quality data\\
-  [5pt]Sample\tnote{1} & \texttt{getWQPSample} & Water Quality Data Portal data \\
-  [5pt]Sample\tnote{1} & \texttt{getUserSample} & User-generated sample data \\ 
-  [5pt]Unit & \texttt{getNWISunitData} & Raw USGS instantaneous data \\
-  [5pt]Information\tnote{1} & \texttt{getNWISInfo} & Station and parameter code information extracted from USGS\\ 
-  [5pt]Information\tnote{1} & \texttt{getWQPInfo} & Station and parameter information extracted from Water Quality Portal \\
-  [5pt]Information\tnote{1} & \texttt{getUserInfo} & Station and parameter information extracted from user-generated file \\ 
-  [5pt]Information & \texttt{getNWISPcodeInfo} & USGS parameter code information \\ 
-  [5pt]Information & \texttt{getNWISSiteInfo} & USGS station information \\ 
-  [5pt]Information & \texttt{getNWISDataAvailability} & Data available at USGS stations \\
-  [5pt]Information & \texttt{getNWISSites} & USGS station information in generalized query \\ 
-   \hline
-\end{tabular}
-
-  \begin{tablenotes}
-    \item[1] Indicates that the function creates a data frame suitable for use in EGRET software, otherwise data is returned in the exact form that it was received.
-  \end{tablenotes}
- \end{threeparttable}
-}
-\end{table}
-
-
-\begin{table}[!ht]
-\begin{minipage}{\linewidth}
-{\footnotesize
-\begin{threeparttable}[b]
-\caption{dataRetrieval functions organization} 
-\label{tab:dataRetrievalOrg}
-\begin{tabular}{|c|ccc|}
-
-\multicolumn{1}{c}{\textbf{\textsf{}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Site Query}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Meta Data}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Data Retrieval}}} \\  [0pt]
-\hline
-\textbf{NWIS:} & \texttt{getNWISSites} & \texttt{getNWISInfo}\tnote{1} & \texttt{getNWISData}  \\
-\textit{Daily} & \texttt{getNWISDataAvailability}& \texttt{getNWISSiteInfo} & \texttt{getNWISDaily}\tnote{1} \\
-\textit{Unit/Instantaneous} &  & \texttt{getNWISPcodeInfo} & \texttt{getNWISSample}\tnote{1} \\
-\textit{Groundwater} & &  & \texttt{getNWISdvData}   \\
-\textit{Water Quality} & & & \texttt{getNWISunitData}\\
- & & & \texttt{getNWISqwData} \\
-\hline
-\textbf{Water Quality Portal:} & \texttt{getWQPSites} & \texttt{getWQPInfo}\tnote{1} &  \texttt{getWQPSample}\tnote{1}\\
-\textit{USGS} & & & \texttt{getWQPqwData} \\
-\textit{EPA} & & & \texttt{getWQPData} \\
-\textit{USDA} & & &  \\
-\hline
-\textbf{User-supplied files:} & & \texttt{getUserInfo}\tnote{1}  & \texttt{getUserDaily}\tnote{1}  \\
-\textit{Daily} & & & \texttt{getUserSample}\tnote{1}  \\
-\textit{Sample} & & &  \\
-\textit{Site Information} & & & \\
-
-   \hline
-\end{tabular}
-  \begin{tablenotes}
-    \item[1] Indicates that the function creates a data frame suitable for use in EGRET software, otherwise data is returned in the exact form that it was received.
-  \end{tablenotes}
- \end{threeparttable}
-}
-\end{minipage}
-\end{table}
-
-
-
-\begin{table}[!ht]
-\begin{minipage}{\linewidth}
-{\footnotesize
-\caption{Supplemental dataRetrieval functions} 
-\label{tab:dataRetrievalMisc}
-\begin{tabular}{ll}
-  \hline
-\multicolumn{1}{c}{\textbf{\textsf{Function Name}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Description}}} \\  [0pt]
-  \hline
-  \texttt{compressData} &  Converts value/qualifier into ConcLow, ConcHigh, Uncen\\
-  [5pt]\texttt{getRDB1Data} & Retrieves and converts RDB data to dataframe\\
-  [5pt]\texttt{getWaterML1Data} & Retrieves and converts WaterML1 data to dataframe\\
-  [5pt]\texttt{getWaterML2Data} & Retrieves and converts WaterML2 data to dataframe\\
-  [5pt]\texttt{mergeReport} & Merges flow data from the daily record into the sample record\\
-  [5pt]\texttt{populateDateColumns} & Generates Julian, Month, Day, DecYear, and MonthSeq columns\\
-  [5pt]\texttt{removeDuplicates} & Removes duplicated rows\\
-  [5pt]\texttt{renameColumns} & Renames columns from raw data retrievals\\
-   \hline
-\end{tabular}
-}
-\end{minipage}
-\end{table}
-
-\FloatBarrier
-\clearpage
-
-
 %------------------------------------------------------------ 
 \section{Getting Started in R}
 \label{sec:appendix1}
@@ -1154,21 +884,15 @@ This section describes the options for downloading and installing the dataRetrie
 %------------------------------------------------------------ 
 If you are new to R, you will need to first install the latest version of R, which can be found here: \url{http://www.r-project.org/}.
 
-At any time, you can get information about any function in R by typing a question mark before the functions name.  This will open a file (in RStudio, in the Help window) that describes the function, the required arguments, and provides working examples.
+At any time, you can get information about any function in R by typing a question mark before the functions name.  This will open a file (in RStudio, in the Help window) that describes the function, the required arguments, and provides working examples. This will open a help file similar to Figure \ref{fig:help}. To see the raw code for a particular code, type the name of the function, without parentheses.
+
 
 <<helpFunc,eval = FALSE>>=
-?removeDuplicates
+?readNWISpCode
 @
 
-This will open a help file similar to Figure \ref{fig:help}.
-
 \FloatBarrier
 
-To see the raw code for a particular code, type the name of the function, without parentheses.:
-<<rawFunc,eval = TRUE>>=
-removeDuplicates
-@
-
 
 \begin{figure}[ht!]
 \centering
@@ -1190,10 +914,7 @@ vignette(dataRetrieval)
 The following command installs dataRetrieval and subsequent required packages:
 
 <<installFromCran,eval = FALSE>>=
-install.packages("dataRetrieval", 
-repos=c("http://usgs-r.github.com","http://cran.us.r-project.org"),
-dependencies=TRUE,
-type="both")
+install.packages("dataRetrieval")
 @
 
 After installing the package, you need to open the library each time you re-start R.  This is done with the simple command:
@@ -1209,16 +930,15 @@ library(dataRetrieval)
 There are a few steps that are required in order to create a table in Microsoft\textregistered\ software (Excel, Word, PowerPoint, etc.) from an R dataframe. There are certainly a variety of good methods, one of which is detailed here. The example we will step through here will be to create a table in Microsoft Excel based on the dataframe tableData:
 
 <<label=getSiteApp, echo=TRUE>>=
-availableData <- getNWISDataAvailability(siteNumber)
-dailyData <- availableData["dv" == availableData$service,]
-dailyData <- dailyData["00003" == dailyData$statCd,]
+availableData <- whatNWISdata(siteNumber, "dv")
+dailyData <- availableData["00003" == availableData$stat_cd,]
 
 tableData <- with(dailyData, 
       data.frame(
         shortName=srsname, 
-        Start=startDate, 
-        End=endDate, 
-        Count=count,
+        Start=begin_date, 
+        End=end_date, 
+        Count=count_nu,
         Units=parameter_units)
       )
 tableData
@@ -1256,12 +976,12 @@ Next, follow the steps below to open this file in Excel:
 \item Use the many formatting tools within Excel to customize the table
 \end{enumerate}
 
-From Excel, it is simple to copy and paste the tables in other Microsoft\textregistered\ software. An example using one of the default Excel table formats is here.
+From Excel, it is simple to copy and paste the tables in other Microsoft\textregistered\ software. An example using one of the default Excel table formats is here. Additional formatting could be requried in Excel, for example converting u to  $\mu$.
 
 \begin{figure}[ht!]
 \centering
  \resizebox{0.9\textwidth}{!}{\includegraphics{table1.png}} 
-\caption{A simple table produced in Microsoft\textregistered\ Excel. Additional formatting will be requried, for example converting u to  $\mu$ }
+\caption{A simple table produced in Microsoft\textregistered\ Excel.}
 \label{overflow}
 \end{figure}
 
diff --git a/inst/doc/dataRetrieval.pdf b/inst/doc/dataRetrieval.pdf
index 8ce1b7e27e7a84767cace3dd9d0bd8e4120a06da..22acfbdd0c171d021dd47bf6340d2578560e7d63 100644
Binary files a/inst/doc/dataRetrieval.pdf and b/inst/doc/dataRetrieval.pdf differ
diff --git a/inst/extdata/ChoptankRiverFlow.txt b/inst/extdata/ChoptankRiverFlow.txt
deleted file mode 100644
index 1aa92f002be518b6d369904d6acac643f62764d2..0000000000000000000000000000000000000000
--- a/inst/extdata/ChoptankRiverFlow.txt
+++ /dev/null
@@ -1,4384 +0,0 @@
-date	Qdaily
-10/1/1999	3.029902561
-10/2/1999	2.406931941
-10/3/1999	2.152080324
-10/4/1999	2.152080324
-10/5/1999	3.19980364
-10/6/1999	2.775050944
-10/7/1999	2.350298249
-10/8/1999	2.152080324
-10/9/1999	1.925545553
-10/10/1999	1.86891186
-10/11/1999	2.605149866
-10/12/1999	2.095446631
-10/13/1999	2.010496092
-10/14/1999	1.982179246
-10/15/1999	1.86891186
-10/16/1999	2.237030863
-10/17/1999	2.18039717
-10/18/1999	2.152080324
-10/19/1999	1.982179246
-10/20/1999	2.944952022
-10/21/1999	4.020992184
-10/22/1999	4.700596497
-10/23/1999	4.44574488
-10/24/1999	4.417428034
-10/25/1999	3.992675338
-10/26/1999	3.369704718
-10/27/1999	3.001585715
-10/28/1999	2.775050944
-10/29/1999	2.57683302
-10/30/1999	2.463565634
-10/31/1999	2.350298249
-11/1/1999	2.831684637
-11/2/1999	2.49188248
-11/3/1999	2.831684637
-11/4/1999	2.973268869
-11/5/1999	2.746734098
-11/6/1999	2.49188248
-11/7/1999	2.378615095
-11/8/1999	2.293664556
-11/9/1999	2.208714017
-11/10/1999	2.18039717
-11/11/1999	2.123763478
-11/12/1999	2.067129785
-11/13/1999	2.038812939
-11/14/1999	1.982179246
-11/15/1999	1.982179246
-11/16/1999	1.925545553
-11/17/1999	1.86891186
-11/18/1999	1.812278168
-11/19/1999	1.755644475
-11/20/1999	1.727327628
-11/21/1999	1.755644475
-11/22/1999	1.755644475
-11/23/1999	1.727327628
-11/24/1999	1.727327628
-11/25/1999	1.755644475
-11/26/1999	1.812278168
-11/27/1999	3.82277426
-11/28/1999	5.323567117
-11/29/1999	4.615645958
-11/30/1999	3.51128895
-12/1/1999	3.001585715
-12/2/1999	2.746734098
-12/3/1999	2.605149866
-12/4/1999	2.520199327
-12/5/1999	2.435248788
-12/6/1999	2.548516173
-12/7/1999	3.426338411
-12/8/1999	3.313071025
-12/9/1999	2.973268869
-12/10/1999	2.88831833
-12/11/1999	3.454655257
-12/12/1999	3.51128895
-12/13/1999	3.228120486
-12/14/1999	4.898814422
-12/15/1999	11.80812494
-12/16/1999	11.49663963
-12/17/1999	7.334063209
-12/18/1999	5.606735581
-12/19/1999	4.785547036
-12/20/1999	4.389111187
-12/21/1999	4.44574488
-12/22/1999	4.474061726
-12/23/1999	4.190893263
-12/24/1999	3.907724799
-12/25/1999	3.652873182
-12/26/1999	3.426338411
-12/27/1999	3.398021564
-12/28/1999	3.284754179
-12/29/1999	3.143169947
-12/30/1999	3.001585715
-12/31/1999	2.944952022
-1/1/2000	2.80336779
-1/2/2000	2.661783559
-1/3/2000	2.633466712
-1/4/2000	2.746734098
-1/5/2000	4.728913344
-1/6/2000	5.748319813
-1/7/2000	4.700596497
-1/8/2000	4.077625877
-1/9/2000	3.737823721
-1/10/2000	3.737823721
-1/11/2000	4.049309031
-1/12/2000	3.964358492
-1/13/2000	3.596239489
-1/14/2000	3.341387872
-1/15/2000	2.973268869
-1/16/2000	2.860001483
-1/17/2000	2.831684637
-1/18/2000	2.661783559
-1/19/2000	2.775050944
-1/20/2000	2.718417251
-1/21/2000	2.661783559
-1/22/2000	2.49188248
-1/23/2000	2.690100405
-1/24/2000	2.520199327
-1/25/2000	2.435248788
-1/26/2000	2.746734098
-1/27/2000	2.633466712
-1/28/2000	2.293664556
-1/29/2000	2.265347709
-1/30/2000	2.265347709
-1/31/2000	3.454655257
-2/1/2000	4.275843802
-2/2/2000	4.417428034
-2/3/2000	3.936041645
-2/4/2000	3.82277426
-2/5/2000	3.766140567
-2/6/2000	3.652873182
-2/7/2000	3.567922642
-2/8/2000	3.794457413
-2/9/2000	3.907724799
-2/10/2000	4.247526955
-2/11/2000	5.635052427
-2/12/2000	7.56059798
-2/13/2000	7.475647441
-2/14/2000	6.824359975
-2/15/2000	10.84535216
-2/16/2000	11.24178801
-2/17/2000	7.730499059
-2/18/2000	6.880993668
-2/19/2000	15.51763181
-2/20/2000	22.20040755
-2/21/2000	14.61149273
-2/22/2000	9.627727765
-2/23/2000	7.362380056
-2/24/2000	6.597825204
-2/25/2000	5.521785042
-2/26/2000	5.69168612
-2/27/2000	6.31465674
-2/28/2000	6.00317143
-2/29/2000	5.748319813
-3/1/2000	5.238616578
-3/2/2000	4.842180729
-3/3/2000	4.44574488
-3/4/2000	4.105942723
-3/5/2000	3.907724799
-3/6/2000	3.709506874
-3/7/2000	3.454655257
-3/8/2000	3.313071025
-3/9/2000	3.313071025
-3/10/2000	3.228120486
-3/11/2000	3.086536254
-3/12/2000	3.369704718
-3/13/2000	3.539605796
-3/14/2000	3.313071025
-3/15/2000	3.114853101
-3/16/2000	3.029902561
-3/17/2000	4.077625877
-3/18/2000	4.955448115
-3/19/2000	4.332477494
-3/20/2000	3.82277426
-3/21/2000	8.070301215
-3/22/2000	66.54458897
-3/23/2000	56.9168612
-3/24/2000	23.44634879
-3/25/2000	15.80080027
-3/26/2000	13.08238302
-3/27/2000	11.83644178
-3/28/2000	27.80714313
-3/29/2000	29.44952022
-3/30/2000	16.33882035
-3/31/2000	11.55327332
-4/1/2000	8.778222374
-4/2/2000	7.419013749
-4/3/2000	6.654458897
-4/4/2000	6.597825204
-4/5/2000	7.079211592
-4/6/2000	6.739409436
-4/7/2000	5.974854584
-4/8/2000	5.295250271
-4/9/2000	6.31465674
-4/10/2000	7.64554852
-4/11/2000	7.107528439
-4/12/2000	5.918220891
-4/13/2000	5.0687155
-4/14/2000	4.530695419
-4/15/2000	4.44574488
-4/16/2000	5.266933425
-4/17/2000	12.96911564
-4/18/2000	19.4819903
-4/19/2000	19.1138713
-4/20/2000	14.7813938
-4/21/2000	11.92139232
-4/22/2000	15.23446335
-4/23/2000	15.2061465
-4/24/2000	11.12852062
-4/25/2000	8.608321296
-4/26/2000	8.664954989
-4/27/2000	8.749905528
-4/28/2000	7.730499059
-4/29/2000	6.62614205
-4/30/2000	5.861587198
-5/1/2000	5.097032346
-5/2/2000	4.643962804
-5/3/2000	4.304160648
-5/4/2000	3.936041645
-5/5/2000	3.709506874
-5/6/2000	3.369704718
-5/7/2000	3.171486793
-5/8/2000	3.029902561
-5/9/2000	2.831684637
-5/10/2000	2.520199327
-5/11/2000	2.746734098
-5/12/2000	2.463565634
-5/13/2000	2.406931941
-5/14/2000	2.973268869
-5/15/2000	2.661783559
-5/16/2000	2.237030863
-5/17/2000	2.067129785
-5/18/2000	2.010496092
-5/19/2000	1.840595014
-5/20/2000	1.86891186
-5/21/2000	2.038812939
-5/22/2000	3.058219408
-5/23/2000	4.275843802
-5/24/2000	3.681190028
-5/25/2000	3.171486793
-5/26/2000	2.746734098
-5/27/2000	2.435248788
-5/28/2000	2.548516173
-5/29/2000	3.001585715
-5/30/2000	3.029902561
-5/31/2000	2.690100405
-6/1/2000	2.406931941
-6/2/2000	2.237030863
-6/3/2000	2.095446631
-6/4/2000	1.953862399
-6/5/2000	1.812278168
-6/6/2000	2.18039717
-6/7/2000	2.463565634
-6/8/2000	2.095446631
-6/9/2000	1.812278168
-6/10/2000	1.614060243
-6/11/2000	1.500792858
-6/12/2000	1.415842318
-6/13/2000	1.699010782
-6/14/2000	1.755644475
-6/15/2000	1.699010782
-6/16/2000	1.953862399
-6/17/2000	1.897228707
-6/18/2000	1.812278168
-6/19/2000	2.378615095
-6/20/2000	2.775050944
-6/21/2000	2.123763478
-6/22/2000	2.067129785
-6/23/2000	1.925545553
-6/24/2000	1.642377089
-6/25/2000	1.444159165
-6/26/2000	1.330891779
-6/27/2000	1.387525472
-6/28/2000	1.330891779
-6/29/2000	1.699010782
-6/30/2000	1.982179246
-7/1/2000	2.406931941
-7/2/2000	1.812278168
-7/3/2000	1.330891779
-7/4/2000	1.529109704
-7/5/2000	1.699010782
-7/6/2000	1.387525472
-7/7/2000	1.019406469
-7/8/2000	0.906139084
-7/9/2000	0.906139084
-7/10/2000	0.906139084
-7/11/2000	0.93445593
-7/12/2000	0.849505391
-7/13/2000	0.764554852
-7/14/2000	0.736238006
-7/15/2000	1.444159165
-7/16/2000	3.567922642
-7/17/2000	8.806539221
-7/18/2000	5.493468196
-7/19/2000	2.520199327
-7/20/2000	2.605149866
-7/21/2000	2.49188248
-7/22/2000	2.860001483
-7/23/2000	3.737823721
-7/24/2000	2.49188248
-7/25/2000	2.010496092
-7/26/2000	2.88831833
-7/27/2000	5.125349193
-7/28/2000	5.578418735
-7/29/2000	4.247526955
-7/30/2000	3.171486793
-7/31/2000	3.029902561
-8/1/2000	4.219210109
-8/2/2000	4.360794341
-8/3/2000	3.539605796
-8/4/2000	3.029902561
-8/5/2000	2.80336779
-8/6/2000	2.435248788
-8/7/2000	2.237030863
-8/8/2000	2.010496092
-8/9/2000	1.755644475
-8/10/2000	1.585743397
-8/11/2000	1.55742655
-8/12/2000	1.472476011
-8/13/2000	1.444159165
-8/14/2000	2.718417251
-8/15/2000	14.38495796
-8/16/2000	15.48931496
-8/17/2000	8.466737064
-8/18/2000	5.465151349
-8/19/2000	4.219210109
-8/20/2000	3.652873182
-8/21/2000	3.19980364
-8/22/2000	2.775050944
-8/23/2000	2.463565634
-8/24/2000	2.237030863
-8/25/2000	2.095446631
-8/26/2000	1.897228707
-8/27/2000	1.812278168
-8/28/2000	1.86891186
-8/29/2000	1.840595014
-8/30/2000	1.925545553
-8/31/2000	1.925545553
-9/1/2000	1.86891186
-9/2/2000	2.605149866
-9/3/2000	4.389111187
-9/4/2000	7.079211592
-9/5/2000	9.854262536
-9/6/2000	7.164162131
-9/7/2000	4.389111187
-9/8/2000	3.341387872
-9/9/2000	2.88831833
-9/10/2000	2.633466712
-9/11/2000	2.378615095
-9/12/2000	2.18039717
-9/13/2000	2.038812939
-9/14/2000	1.897228707
-9/15/2000	2.463565634
-9/16/2000	2.80336779
-9/17/2000	2.605149866
-9/18/2000	2.548516173
-9/19/2000	2.350298249
-9/20/2000	2.944952022
-9/21/2000	3.539605796
-9/22/2000	3.143169947
-9/23/2000	2.520199327
-9/24/2000	2.293664556
-9/25/2000	3.398021564
-9/26/2000	17.27327628
-9/27/2000	33.13071025
-9/28/2000	18.40595014
-9/29/2000	11.41168909
-9/30/2000	7.815449598
-10/1/2000	6.229706201
-10/2/2000	5.210299732
-10/3/2000	4.587329112
-10/4/2000	4.105942723
-10/5/2000	3.709506874
-10/6/2000	3.539605796
-10/7/2000	3.454655257
-10/8/2000	3.114853101
-10/9/2000	2.88831833
-10/10/2000	2.718417251
-10/11/2000	2.633466712
-10/12/2000	2.520199327
-10/13/2000	2.406931941
-10/14/2000	2.293664556
-10/15/2000	2.208714017
-10/16/2000	2.123763478
-10/17/2000	2.010496092
-10/18/2000	1.982179246
-10/19/2000	1.925545553
-10/20/2000	1.86891186
-10/21/2000	1.840595014
-10/22/2000	1.755644475
-10/23/2000	1.670693936
-10/24/2000	1.614060243
-10/25/2000	1.642377089
-10/26/2000	1.614060243
-10/27/2000	1.585743397
-10/28/2000	1.585743397
-10/29/2000	1.529109704
-10/30/2000	1.444159165
-10/31/2000	1.415842318
-11/1/2000	1.359208626
-11/2/2000	1.444159165
-11/3/2000	1.387525472
-11/4/2000	1.359208626
-11/5/2000	1.359208626
-11/6/2000	1.330891779
-11/7/2000	1.274258087
-11/8/2000	1.24594124
-11/9/2000	1.330891779
-11/10/2000	1.699010782
-11/11/2000	1.812278168
-11/12/2000	1.614060243
-11/13/2000	1.500792858
-11/14/2000	1.55742655
-11/15/2000	1.699010782
-11/16/2000	1.585743397
-11/17/2000	1.55742655
-11/18/2000	1.500792858
-11/19/2000	1.444159165
-11/20/2000	1.415842318
-11/21/2000	1.387525472
-11/22/2000	1.359208626
-11/23/2000	1.330891779
-11/24/2000	1.302574933
-11/25/2000	1.330891779
-11/26/2000	2.435248788
-11/27/2000	3.709506874
-11/28/2000	3.539605796
-11/29/2000	2.80336779
-11/30/2000	2.605149866
-12/1/2000	2.463565634
-12/2/2000	2.293664556
-12/3/2000	2.152080324
-12/4/2000	2.038812939
-12/5/2000	2.038812939
-12/6/2000	2.010496092
-12/7/2000	1.953862399
-12/8/2000	1.925545553
-12/9/2000	1.897228707
-12/10/2000	2.038812939
-12/11/2000	2.208714017
-12/12/2000	1.670693936
-12/13/2000	1.614060243
-12/14/2000	1.982179246
-12/15/2000	2.746734098
-12/16/2000	3.001585715
-12/17/2000	7.107528439
-12/18/2000	21.52080324
-12/19/2000	14.75307696
-12/20/2000	8.211885447
-12/21/2000	6.116438816
-12/22/2000	5.097032346
-12/23/2000	4.190893263
-12/24/2000	3.964358492
-12/25/2000	3.539605796
-12/26/2000	3.454655257
-12/27/2000	3.19980364
-12/28/2000	3.256437332
-12/29/2000	3.171486793
-12/30/2000	2.944952022
-12/31/2000	2.831684637
-1/1/2001	2.718417251
-1/2/2001	2.605149866
-1/3/2001	2.293664556
-1/4/2001	2.18039717
-1/5/2001	2.350298249
-1/6/2001	2.378615095
-1/7/2001	2.321981402
-1/8/2001	2.321981402
-1/9/2001	2.548516173
-1/10/2001	2.463565634
-1/11/2001	2.350298249
-1/12/2001	2.350298249
-1/13/2001	2.265347709
-1/14/2001	2.18039717
-1/15/2001	2.321981402
-1/16/2001	2.435248788
-1/17/2001	2.406931941
-1/18/2001	2.378615095
-1/19/2001	3.001585715
-1/20/2001	10.73208477
-1/21/2001	21.15268424
-1/22/2001	15.48931496
-1/23/2001	9.854262536
-1/24/2001	7.447330595
-1/25/2001	6.569508358
-1/26/2001	5.974854584
-1/27/2001	5.295250271
-1/28/2001	4.927131268
-1/29/2001	4.530695419
-1/30/2001	4.700596497
-1/31/2001	6.484557818
-2/1/2001	6.909310514
-2/2/2001	5.635052427
-2/3/2001	4.842180729
-2/4/2001	4.360794341
-2/5/2001	6.088121969
-2/6/2001	17.18832575
-2/7/2001	15.6308992
-2/8/2001	9.854262536
-2/9/2001	7.362380056
-2/10/2001	6.569508358
-2/11/2001	6.031488277
-2/12/2001	5.153666039
-2/13/2001	5.012081807
-2/14/2001	5.153666039
-2/15/2001	5.210299732
-2/16/2001	5.436834503
-2/17/2001	10.50555
-2/18/2001	14.46990849
-2/19/2001	9.20297507
-2/20/2001	7.24911267
-2/21/2001	6.399607279
-2/22/2001	5.720002966
-2/23/2001	5.351883964
-2/24/2001	5.238616578
-2/25/2001	5.351883964
-2/26/2001	7.64554852
-2/27/2001	8.438420218
-2/28/2001	6.796043128
-3/1/2001	5.889904045
-3/2/2001	5.266933425
-3/3/2001	4.842180729
-3/4/2001	5.238616578
-3/5/2001	5.946537737
-3/6/2001	7.787132751
-3/7/2001	7.503964288
-3/8/2001	6.229706201
-3/9/2001	5.38020081
-3/10/2001	5.012081807
-3/11/2001	4.672279651
-3/12/2001	4.332477494
-3/13/2001	5.097032346
-3/14/2001	6.682775743
-3/15/2001	6.201389355
-3/16/2001	6.201389355
-3/17/2001	6.456240972
-3/18/2001	5.889904045
-3/19/2001	5.012081807
-3/20/2001	4.474061726
-3/21/2001	11.24178801
-3/22/2001	52.38616578
-3/23/2001	28.8831833
-3/24/2001	15.82911712
-3/25/2001	11.32673855
-3/26/2001	8.721588682
-3/27/2001	7.617231673
-3/28/2001	6.824359975
-3/29/2001	6.93762736
-3/30/2001	21.74733801
-3/31/2001	39.64358492
-4/1/2001	19.76515877
-4/2/2001	13.6770368
-4/3/2001	10.27901523
-4/4/2001	8.183568601
-4/5/2001	6.965944207
-4/6/2001	6.31465674
-4/7/2001	6.031488277
-4/8/2001	5.408517656
-4/9/2001	5.210299732
-4/10/2001	6.00317143
-4/11/2001	6.654458897
-4/12/2001	8.523370757
-4/13/2001	8.608321296
-4/14/2001	7.362380056
-4/15/2001	6.286339894
-4/16/2001	5.889904045
-4/17/2001	6.229706201
-4/18/2001	6.399607279
-4/19/2001	6.059805123
-4/20/2001	5.153666039
-4/21/2001	4.615645958
-4/22/2001	4.417428034
-4/23/2001	4.219210109
-4/24/2001	3.992675338
-4/25/2001	3.82277426
-4/26/2001	3.624556335
-4/27/2001	3.426338411
-4/28/2001	3.341387872
-4/29/2001	3.143169947
-4/30/2001	2.944952022
-5/1/2001	2.860001483
-5/2/2001	2.775050944
-5/3/2001	2.57683302
-5/4/2001	2.49188248
-5/5/2001	2.378615095
-5/6/2001	2.237030863
-5/7/2001	1.982179246
-5/8/2001	1.925545553
-5/9/2001	1.840595014
-5/10/2001	1.86891186
-5/11/2001	1.812278168
-5/12/2001	1.755644475
-5/13/2001	1.614060243
-5/14/2001	1.330891779
-5/15/2001	1.415842318
-5/16/2001	1.359208626
-5/17/2001	1.359208626
-5/18/2001	1.387525472
-5/19/2001	1.55742655
-5/20/2001	1.500792858
-5/21/2001	1.699010782
-5/22/2001	2.237030863
-5/23/2001	2.321981402
-5/24/2001	1.953862399
-5/25/2001	1.670693936
-5/26/2001	3.879407953
-5/27/2001	16.25386982
-5/28/2001	17.1600089
-5/29/2001	11.38337224
-5/30/2001	7.107528439
-5/31/2001	4.870497575
-6/1/2001	4.020992184
-6/2/2001	6.116438816
-6/3/2001	7.95703383
-6/4/2001	6.371290433
-6/5/2001	4.643962804
-6/6/2001	3.992675338
-6/7/2001	4.983764961
-6/8/2001	6.682775743
-6/9/2001	5.408517656
-6/10/2001	4.049309031
-6/11/2001	3.369704718
-6/12/2001	2.944952022
-6/13/2001	2.690100405
-6/14/2001	2.49188248
-6/15/2001	2.406931941
-6/16/2001	2.321981402
-6/17/2001	88.9148976
-6/18/2001	108.4535216
-6/19/2001	31.71486793
-6/20/2001	15.03624542
-6/21/2001	10.02416361
-6/22/2001	9.571094073
-6/23/2001	8.438420218
-6/24/2001	7.87208329
-6/25/2001	6.484557818
-6/26/2001	5.408517656
-6/27/2001	4.672279651
-6/28/2001	4.105942723
-6/29/2001	3.596239489
-6/30/2001	3.228120486
-7/1/2001	3.143169947
-7/2/2001	3.907724799
-7/3/2001	3.482972103
-7/4/2001	3.313071025
-7/5/2001	5.040398654
-7/6/2001	10.76040162
-7/7/2001	7.050894746
-7/8/2001	4.643962804
-7/9/2001	4.020992184
-7/10/2001	3.567922642
-7/11/2001	3.19980364
-7/12/2001	2.775050944
-7/13/2001	2.690100405
-7/14/2001	2.321981402
-7/15/2001	2.067129785
-7/16/2001	1.925545553
-7/17/2001	1.699010782
-7/18/2001	2.208714017
-7/19/2001	2.746734098
-7/20/2001	2.123763478
-7/21/2001	1.727327628
-7/22/2001	1.55742655
-7/23/2001	1.359208626
-7/24/2001	1.330891779
-7/25/2001	1.302574933
-7/26/2001	1.614060243
-7/27/2001	4.020992184
-7/28/2001	2.916635176
-7/29/2001	2.18039717
-7/30/2001	2.520199327
-7/31/2001	2.18039717
-8/1/2001	1.840595014
-8/2/2001	1.614060243
-8/3/2001	1.302574933
-8/4/2001	1.160990701
-8/5/2001	1.160990701
-8/6/2001	1.217624394
-8/7/2001	0.991089623
-8/8/2001	0.93445593
-8/9/2001	0.849505391
-8/10/2001	0.877822237
-8/11/2001	1.132673855
-8/12/2001	2.321981402
-8/13/2001	3.114853101
-8/14/2001	2.605149866
-8/15/2001	1.472476011
-8/16/2001	1.24594124
-8/17/2001	1.132673855
-8/18/2001	1.217624394
-8/19/2001	1.160990701
-8/20/2001	1.387525472
-8/21/2001	1.104357008
-8/22/2001	0.93445593
-8/23/2001	0.906139084
-8/24/2001	1.217624394
-8/25/2001	1.047723316
-8/26/2001	0.93445593
-8/27/2001	0.93445593
-8/28/2001	0.877822237
-8/29/2001	0.764554852
-8/30/2001	0.707921159
-8/31/2001	0.906139084
-9/1/2001	1.019406469
-9/2/2001	0.877822237
-9/3/2001	0.792871698
-9/4/2001	0.792871698
-9/5/2001	1.160990701
-9/6/2001	1.104357008
-9/7/2001	0.991089623
-9/8/2001	0.792871698
-9/9/2001	0.764554852
-9/10/2001	0.764554852
-9/11/2001	0.764554852
-9/12/2001	0.707921159
-9/13/2001	0.651287466
-9/14/2001	0.62297062
-9/15/2001	0.736238006
-9/16/2001	0.679604313
-9/17/2001	0.651287466
-9/18/2001	0.651287466
-9/19/2001	0.651287466
-9/20/2001	0.651287466
-9/21/2001	0.764554852
-9/22/2001	0.764554852
-9/23/2001	0.707921159
-9/24/2001	1.359208626
-9/25/2001	1.415842318
-9/26/2001	0.962772777
-9/27/2001	0.821188545
-9/28/2001	0.906139084
-9/29/2001	0.736238006
-9/30/2001	0.62297062
-10/1/2001	0.736238006
-10/2/2001	0.792871698
-10/3/2001	0.736238006
-10/4/2001	0.679604313
-10/5/2001	0.679604313
-10/6/2001	0.62297062
-10/7/2001	0.679604313
-10/8/2001	0.62297062
-10/9/2001	0.594653774
-10/10/2001	0.594653774
-10/11/2001	0.594653774
-10/12/2001	0.594653774
-10/13/2001	0.594653774
-10/14/2001	0.594653774
-10/15/2001	0.651287466
-10/16/2001	0.679604313
-10/17/2001	0.62297062
-10/18/2001	0.594653774
-10/19/2001	0.566336927
-10/20/2001	0.594653774
-10/21/2001	0.594653774
-10/22/2001	0.594653774
-10/23/2001	0.594653774
-10/24/2001	0.594653774
-10/25/2001	0.594653774
-10/26/2001	0.538020081
-10/27/2001	0.509703235
-10/28/2001	0.538020081
-10/29/2001	0.509703235
-10/30/2001	0.538020081
-10/31/2001	0.538020081
-11/1/2001	0.566336927
-11/2/2001	0.538020081
-11/3/2001	0.566336927
-11/4/2001	0.538020081
-11/5/2001	0.566336927
-11/6/2001	0.538020081
-11/7/2001	0.509703235
-11/8/2001	0.509703235
-11/9/2001	0.538020081
-11/10/2001	0.509703235
-11/11/2001	0.509703235
-11/12/2001	0.538020081
-11/13/2001	0.509703235
-11/14/2001	0.538020081
-11/15/2001	0.538020081
-11/16/2001	0.566336927
-11/17/2001	0.566336927
-11/18/2001	0.538020081
-11/19/2001	0.566336927
-11/20/2001	0.566336927
-11/21/2001	0.594653774
-11/22/2001	0.62297062
-11/23/2001	0.594653774
-11/24/2001	0.594653774
-11/25/2001	0.594653774
-11/26/2001	0.877822237
-11/27/2001	0.821188545
-11/28/2001	0.736238006
-11/29/2001	0.679604313
-11/30/2001	0.707921159
-12/1/2001	0.679604313
-12/2/2001	0.62297062
-12/3/2001	0.566336927
-12/4/2001	0.594653774
-12/5/2001	0.594653774
-12/6/2001	0.594653774
-12/7/2001	0.594653774
-12/8/2001	0.62297062
-12/9/2001	0.849505391
-12/10/2001	0.764554852
-12/11/2001	0.792871698
-12/12/2001	0.792871698
-12/13/2001	0.764554852
-12/14/2001	0.764554852
-12/15/2001	0.707921159
-12/16/2001	0.707921159
-12/17/2001	0.707921159
-12/18/2001	0.792871698
-12/19/2001	0.764554852
-12/20/2001	0.707921159
-12/21/2001	0.651287466
-12/22/2001	0.62297062
-12/23/2001	0.62297062
-12/24/2001	0.792871698
-12/25/2001	0.821188545
-12/26/2001	0.764554852
-12/27/2001	0.679604313
-12/28/2001	0.679604313
-12/29/2001	0.651287466
-12/30/2001	0.62297062
-12/31/2001	0.594653774
-1/1/2002	0.566336927
-1/2/2002	0.566336927
-1/3/2002	0.566336927
-1/4/2002	0.594653774
-1/5/2002	0.594653774
-1/6/2002	0.707921159
-1/7/2002	1.302574933
-1/8/2002	1.132673855
-1/9/2002	0.93445593
-1/10/2002	0.877822237
-1/11/2002	0.962772777
-1/12/2002	1.047723316
-1/13/2002	0.962772777
-1/14/2002	0.877822237
-1/15/2002	0.849505391
-1/16/2002	0.792871698
-1/17/2002	0.764554852
-1/18/2002	0.764554852
-1/19/2002	0.792871698
-1/20/2002	1.019406469
-1/21/2002	1.047723316
-1/22/2002	1.047723316
-1/23/2002	1.019406469
-1/24/2002	1.019406469
-1/25/2002	1.132673855
-1/26/2002	1.104357008
-1/27/2002	1.019406469
-1/28/2002	0.962772777
-1/29/2002	0.962772777
-1/30/2002	0.93445593
-1/31/2002	0.93445593
-2/1/2002	0.93445593
-2/2/2002	0.93445593
-2/3/2002	0.877822237
-2/4/2002	0.849505391
-2/5/2002	0.849505391
-2/6/2002	0.821188545
-2/7/2002	0.93445593
-2/8/2002	1.047723316
-2/9/2002	0.962772777
-2/10/2002	0.93445593
-2/11/2002	0.991089623
-2/12/2002	0.991089623
-2/13/2002	0.93445593
-2/14/2002	0.906139084
-2/15/2002	0.877822237
-2/16/2002	0.877822237
-2/17/2002	0.877822237
-2/18/2002	0.849505391
-2/19/2002	0.821188545
-2/20/2002	0.849505391
-2/21/2002	0.849505391
-2/22/2002	0.849505391
-2/23/2002	0.849505391
-2/24/2002	0.849505391
-2/25/2002	0.821188545
-2/26/2002	0.821188545
-2/27/2002	0.821188545
-2/28/2002	0.792871698
-3/1/2002	0.792871698
-3/2/2002	0.792871698
-3/3/2002	1.444159165
-3/4/2002	1.415842318
-3/5/2002	1.132673855
-3/6/2002	1.019406469
-3/7/2002	0.962772777
-3/8/2002	0.962772777
-3/9/2002	1.019406469
-3/10/2002	0.991089623
-3/11/2002	0.906139084
-3/12/2002	0.906139084
-3/13/2002	1.019406469
-3/14/2002	1.24594124
-3/15/2002	1.132673855
-3/16/2002	1.104357008
-3/17/2002	1.047723316
-3/18/2002	1.359208626
-3/19/2002	1.699010782
-3/20/2002	2.010496092
-3/21/2002	3.313071025
-3/22/2002	3.482972103
-3/23/2002	2.321981402
-3/24/2002	1.953862399
-3/25/2002	1.812278168
-3/26/2002	1.727327628
-3/27/2002	2.88831833
-3/28/2002	3.992675338
-3/29/2002	3.143169947
-3/30/2002	3.256437332
-3/31/2002	3.114853101
-4/1/2002	2.973268869
-4/2/2002	2.775050944
-4/3/2002	2.548516173
-4/4/2002	2.378615095
-4/5/2002	2.18039717
-4/6/2002	2.038812939
-4/7/2002	1.925545553
-4/8/2002	1.840595014
-4/9/2002	1.812278168
-4/10/2002	2.49188248
-4/11/2002	2.746734098
-4/12/2002	2.520199327
-4/13/2002	2.520199327
-4/14/2002	2.406931941
-4/15/2002	2.265347709
-4/16/2002	2.18039717
-4/17/2002	2.010496092
-4/18/2002	1.840595014
-4/19/2002	1.755644475
-4/20/2002	1.670693936
-4/21/2002	1.614060243
-4/22/2002	1.727327628
-4/23/2002	1.727327628
-4/24/2002	1.55742655
-4/25/2002	1.500792858
-4/26/2002	1.529109704
-4/27/2002	1.444159165
-4/28/2002	3.907724799
-4/29/2002	6.597825204
-4/30/2002	6.739409436
-5/1/2002	4.927131268
-5/2/2002	5.974854584
-5/3/2002	9.51446038
-5/4/2002	8.410103371
-5/5/2002	5.351883964
-5/6/2002	4.389111187
-5/7/2002	3.766140567
-5/8/2002	3.341387872
-5/9/2002	2.916635176
-5/10/2002	2.661783559
-5/11/2002	2.321981402
-5/12/2002	2.095446631
-5/13/2002	3.879407953
-5/14/2002	5.38020081
-5/15/2002	4.360794341
-5/16/2002	3.256437332
-5/17/2002	2.661783559
-5/18/2002	3.539605796
-5/19/2002	6.767726282
-5/20/2002	6.682775743
-5/21/2002	4.530695419
-5/22/2002	3.51128895
-5/23/2002	2.916635176
-5/24/2002	2.548516173
-5/25/2002	2.350298249
-5/26/2002	2.152080324
-5/27/2002	1.925545553
-5/28/2002	1.755644475
-5/29/2002	1.614060243
-5/30/2002	1.55742655
-5/31/2002	1.55742655
-6/1/2002	1.472476011
-6/2/2002	1.330891779
-6/3/2002	1.24594124
-6/4/2002	1.132673855
-6/5/2002	0.991089623
-6/6/2002	1.359208626
-6/7/2002	4.389111187
-6/8/2002	6.286339894
-6/9/2002	3.029902561
-6/10/2002	1.840595014
-6/11/2002	1.614060243
-6/12/2002	1.359208626
-6/13/2002	1.302574933
-6/14/2002	1.472476011
-6/15/2002	1.415842318
-6/16/2002	1.189307547
-6/17/2002	1.132673855
-6/18/2002	1.047723316
-6/19/2002	1.132673855
-6/20/2002	2.661783559
-6/21/2002	2.038812939
-6/22/2002	1.415842318
-6/23/2002	1.217624394
-6/24/2002	1.104357008
-6/25/2002	0.991089623
-6/26/2002	0.962772777
-6/27/2002	0.877822237
-6/28/2002	0.849505391
-6/29/2002	0.821188545
-6/30/2002	0.764554852
-7/1/2002	0.679604313
-7/2/2002	0.538020081
-7/3/2002	0.509703235
-7/4/2002	0.453069542
-7/5/2002	0.396435849
-7/6/2002	0.339802156
-7/7/2002	0.396435849
-7/8/2002	0.396435849
-7/9/2002	0.396435849
-7/10/2002	0.339802156
-7/11/2002	0.283168464
-7/12/2002	0.27467341
-7/13/2002	0.31148531
-7/14/2002	0.424752696
-7/15/2002	0.396435849
-7/16/2002	0.368119003
-7/17/2002	0.246356563
-7/18/2002	0.212376348
-7/19/2002	0.164237709
-7/20/2002	0.226534771
-7/21/2002	0.254851617
-7/22/2002	0.155742655
-7/23/2002	0.147247601
-7/24/2002	0.509703235
-7/25/2002	0.792871698
-7/26/2002	0.651287466
-7/27/2002	0.453069542
-7/28/2002	0.453069542
-7/29/2002	0.339802156
-7/30/2002	0.215208032
-7/31/2002	0.178396132
-8/1/2002	0.155742655
-8/2/2002	0.096277278
-8/3/2002	0.169901078
-8/4/2002	0.141584232
-8/5/2002	0.138752547
-8/6/2002	0.192554555
-8/7/2002	0.084950539
-8/8/2002	0.059465377
-8/9/2002	0.050970323
-8/10/2002	0.067960431
-8/11/2002	0.059465377
-8/12/2002	0.050970323
-8/13/2002	0.04247527
-8/14/2002	0.113267385
-8/15/2002	0.240693194
-8/16/2002	0.065128747
-8/17/2002	0.026051499
-8/18/2002	0.015857434
-8/19/2002	0.009910896
-8/20/2002	0.013875255
-8/21/2002	0.011893075
-8/22/2002	0.0368119
-8/23/2002	0.012176244
-8/24/2002	0.028316846
-8/25/2002	0.04247527
-8/26/2002	0.138752547
-8/27/2002	0.223703086
-8/28/2002	0.198217925
-8/29/2002	0.792871698
-8/30/2002	1.019406469
-8/31/2002	0.764554852
-9/1/2002	1.614060243
-9/2/2002	4.247526955
-9/3/2002	5.0687155
-9/4/2002	2.80336779
-9/5/2002	1.387525472
-9/6/2002	1.047723316
-9/7/2002	0.906139084
-9/8/2002	0.821188545
-9/9/2002	0.736238006
-9/10/2002	0.679604313
-9/11/2002	0.594653774
-9/12/2002	0.566336927
-9/13/2002	0.509703235
-9/14/2002	0.538020081
-9/15/2002	0.538020081
-9/16/2002	0.764554852
-9/17/2002	0.821188545
-9/18/2002	0.679604313
-9/19/2002	0.566336927
-9/20/2002	0.481386388
-9/21/2002	0.481386388
-9/22/2002	0.481386388
-9/23/2002	0.453069542
-9/24/2002	0.453069542
-9/25/2002	0.396435849
-9/26/2002	0.509703235
-9/27/2002	0.792871698
-9/28/2002	0.792871698
-9/29/2002	0.679604313
-9/30/2002	0.566336927
-10/1/2002	0.509703235
-10/2/2002	0.481386388
-10/3/2002	0.424752696
-10/4/2002	0.396435849
-10/5/2002	0.424752696
-10/6/2002	0.424752696
-10/7/2002	0.396435849
-10/8/2002	0.396435849
-10/9/2002	0.368119003
-10/10/2002	0.792871698
-10/11/2002	2.548516173
-10/12/2002	4.927131268
-10/13/2002	4.785547036
-10/14/2002	2.605149866
-10/15/2002	1.755644475
-10/16/2002	2.038812939
-10/17/2002	3.426338411
-10/18/2002	4.530695419
-10/19/2002	3.029902561
-10/20/2002	2.123763478
-10/21/2002	1.897228707
-10/22/2002	1.727327628
-10/23/2002	1.614060243
-10/24/2002	1.472476011
-10/25/2002	1.415842318
-10/26/2002	2.463565634
-10/27/2002	4.360794341
-10/28/2002	4.13425957
-10/29/2002	2.916635176
-10/30/2002	3.907724799
-10/31/2002	6.286339894
-11/1/2002	6.994261053
-11/2/2002	5.493468196
-11/3/2002	4.332477494
-11/4/2002	3.624556335
-11/5/2002	3.284754179
-11/6/2002	4.360794341
-11/7/2002	6.994261053
-11/8/2002	6.682775743
-11/9/2002	4.870497575
-11/10/2002	4.049309031
-11/11/2002	3.879407953
-11/12/2002	5.040398654
-11/13/2002	12.37446186
-11/14/2002	14.32832426
-11/15/2002	9.769311997
-11/16/2002	7.843766444
-11/17/2002	27.97704421
-11/18/2002	43.89111187
-11/19/2002	25.14535958
-11/20/2002	15.51763181
-11/21/2002	11.49663963
-11/22/2002	9.684361458
-11/23/2002	9.344559302
-11/24/2002	8.410103371
-11/25/2002	6.994261053
-11/26/2002	6.229706201
-11/27/2002	5.861587198
-11/28/2002	5.521785042
-11/29/2002	5.238616578
-11/30/2002	5.0687155
-12/1/2002	4.898814422
-12/2/2002	4.587329112
-12/3/2002	4.304160648
-12/4/2002	4.049309031
-12/5/2002	4.700596497
-12/6/2002	4.275843802
-12/7/2002	3.709506874
-12/8/2002	4.275843802
-12/9/2002	3.879407953
-12/10/2002	3.652873182
-12/11/2002	4.502378573
-12/12/2002	14.72476011
-12/13/2002	18.97228707
-12/14/2002	21.18100108
-12/15/2002	22.25704125
-12/16/2002	14.38495796
-12/17/2002	10.27901523
-12/18/2002	7.928716983
-12/19/2002	6.824359975
-12/20/2002	7.220795824
-12/21/2002	12.0912934
-12/22/2002	11.94970917
-12/23/2002	8.58000445
-12/24/2002	7.107528439
-12/25/2002	10.30733208
-12/26/2002	26.22139974
-12/27/2002	18.6891186
-12/28/2002	12.06297655
-12/29/2002	9.089707684
-12/30/2002	7.87208329
-12/31/2002	6.93762736
-1/1/2003	7.815449598
-1/2/2003	16.93347413
-1/3/2003	18.20773222
-1/4/2003	16.87684044
-1/5/2003	14.72476011
-1/6/2003	11.38337224
-1/7/2003	9.910896229
-1/8/2003	8.976440299
-1/9/2003	8.240202293
-1/10/2003	7.617231673
-1/11/2003	6.682775743
-1/12/2003	5.889904045
-1/13/2003	5.323567117
-1/14/2003	5.125349193
-1/15/2003	4.870497575
-1/16/2003	4.559012265
-1/17/2003	4.44574488
-1/18/2003	4.389111187
-1/19/2003	4.785547036
-1/20/2003	3.964358492
-1/21/2003	3.766140567
-1/22/2003	3.709506874
-1/23/2003	4.105942723
-1/24/2003	3.143169947
-1/25/2003	3.058219408
-1/26/2003	2.831684637
-1/27/2003	2.831684637
-1/28/2003	2.718417251
-1/29/2003	2.718417251
-1/30/2003	2.775050944
-1/31/2003	2.633466712
-2/1/2003	2.718417251
-2/2/2003	4.219210109
-2/3/2003	4.615645958
-2/4/2003	4.615645958
-2/5/2003	5.918220891
-2/6/2003	5.408517656
-2/7/2003	4.785547036
-2/8/2003	4.615645958
-2/9/2003	4.190893263
-2/10/2003	4.13425957
-2/11/2003	4.247526955
-2/12/2003	4.077625877
-2/13/2003	3.766140567
-2/14/2003	3.482972103
-2/15/2003	3.681190028
-2/16/2003	3.681190028
-2/17/2003	2.237030863
-2/18/2003	3.907724799
-2/19/2003	4.615645958
-2/20/2003	4.898814422
-2/21/2003	5.153666039
-2/22/2003	9.316242455
-2/23/2003	43.89111187
-2/24/2003	77.02182212
-2/25/2003	42.75843802
-2/26/2003	26.8726872
-2/27/2003	18.66080176
-2/28/2003	15.14951281
-3/1/2003	13.13901672
-3/2/2003	16.76357305
-3/3/2003	39.36041645
-3/4/2003	24.29585418
-3/5/2003	19.25545553
-3/6/2003	34.82972103
-3/7/2003	40.49309031
-3/8/2003	22.11545701
-3/9/2003	17.38654367
-3/10/2003	16.76357305
-3/11/2003	13.93188841
-3/12/2003	10.53386685
-3/13/2003	9.571094073
-3/14/2003	8.778222374
-3/15/2003	7.985350676
-3/16/2003	7.277429517
-3/17/2003	8.693271835
-3/18/2003	12.00634286
-3/19/2003	10.64713423
-3/20/2003	9.287925609
-3/21/2003	16.90515728
-3/22/2003	17.18832575
-3/23/2003	12.57267979
-3/24/2003	9.486143534
-3/25/2003	7.87208329
-3/26/2003	7.022577899
-3/27/2003	7.24911267
-3/28/2003	7.334063209
-3/29/2003	6.93762736
-3/30/2003	9.51446038
-3/31/2003	12.96911564
-4/1/2003	10.9303027
-4/2/2003	8.26851914
-4/3/2003	7.220795824
-4/4/2003	6.484557818
-4/5/2003	6.088121969
-4/6/2003	5.804953506
-4/7/2003	5.833270352
-4/8/2003	8.211885447
-4/9/2003	13.33723464
-4/10/2003	20.61466416
-4/11/2003	21.88892224
-4/12/2003	26.84437036
-4/13/2003	19.99169354
-4/14/2003	13.50713572
-4/15/2003	9.656044612
-4/16/2003	8.070301215
-4/17/2003	7.107528439
-4/18/2003	6.286339894
-4/19/2003	5.776636659
-4/20/2003	5.408517656
-4/21/2003	5.153666039
-4/22/2003	5.0687155
-4/23/2003	4.842180729
-4/24/2003	4.559012265
-4/25/2003	4.247526955
-4/26/2003	4.672279651
-4/27/2003	5.153666039
-4/28/2003	4.672279651
-4/29/2003	4.275843802
-4/30/2003	3.992675338
-5/1/2003	3.624556335
-5/2/2003	3.426338411
-5/3/2003	3.228120486
-5/4/2003	2.860001483
-5/5/2003	2.690100405
-5/6/2003	2.718417251
-5/7/2003	2.661783559
-5/8/2003	2.973268869
-5/9/2003	2.831684637
-5/10/2003	2.973268869
-5/11/2003	2.860001483
-5/12/2003	2.690100405
-5/13/2003	2.406931941
-5/14/2003	2.152080324
-5/15/2003	2.010496092
-5/16/2003	4.870497575
-5/17/2003	11.49663963
-5/18/2003	11.01525324
-5/19/2003	7.475647441
-5/20/2003	5.69168612
-5/21/2003	4.75723019
-5/22/2003	4.955448115
-5/23/2003	5.266933425
-5/24/2003	5.833270352
-5/25/2003	6.144755662
-5/26/2003	14.01683895
-5/27/2003	28.14694529
-5/28/2003	18.03783114
-5/29/2003	14.27169057
-5/30/2003	12.8841651
-5/31/2003	9.967529922
-6/1/2003	8.296835986
-6/2/2003	7.928716983
-6/3/2003	6.62614205
-6/4/2003	6.173072508
-6/5/2003	7.617231673
-6/6/2003	8.438420218
-6/7/2003	10.25069839
-6/8/2003	29.44952022
-6/9/2003	25.00377534
-6/10/2003	14.75307696
-6/11/2003	9.486143534
-6/12/2003	11.86475863
-6/13/2003	13.84693787
-6/14/2003	12.23287763
-6/15/2003	12.85584825
-6/16/2003	8.749905528
-6/17/2003	6.484557818
-6/18/2003	10.9303027
-6/19/2003	21.26595162
-6/20/2003	25.51347858
-6/21/2003	62.86339894
-6/22/2003	38.51091106
-6/23/2003	23.21981402
-6/24/2003	14.21505688
-6/25/2003	9.061390838
-6/26/2003	7.135845285
-6/27/2003	6.00317143
-6/28/2003	5.153666039
-6/29/2003	4.587329112
-6/30/2003	4.219210109
-7/1/2003	3.992675338
-7/2/2003	3.652873182
-7/3/2003	7.928716983
-7/4/2003	10.59050054
-7/5/2003	7.220795824
-7/6/2003	5.889904045
-7/7/2003	6.201389355
-7/8/2003	6.711092589
-7/9/2003	5.635052427
-7/10/2003	17.10337521
-7/11/2003	28.31684637
-7/12/2003	14.89466119
-7/13/2003	9.146341377
-7/14/2003	7.192478978
-7/15/2003	14.18674003
-7/16/2003	10.64713423
-7/17/2003	6.880993668
-7/18/2003	5.578418735
-7/19/2003	4.700596497
-7/20/2003	4.219210109
-7/21/2003	3.737823721
-7/22/2003	3.341387872
-7/23/2003	3.171486793
-7/24/2003	3.596239489
-7/25/2003	3.624556335
-7/26/2003	2.661783559
-7/27/2003	2.265347709
-7/28/2003	2.265347709
-7/29/2003	4.360794341
-7/30/2003	4.813863883
-7/31/2003	3.794457413
-8/1/2003	3.001585715
-8/2/2003	2.80336779
-8/3/2003	2.775050944
-8/4/2003	2.718417251
-8/5/2003	2.605149866
-8/6/2003	3.086536254
-8/7/2003	3.029902561
-8/8/2003	5.351883964
-8/9/2003	5.550101888
-8/10/2003	5.153666039
-8/11/2003	5.0687155
-8/12/2003	4.672279651
-8/13/2003	5.804953506
-8/14/2003	5.635052427
-8/15/2003	4.360794341
-8/16/2003	4.275843802
-8/17/2003	7.730499059
-8/18/2003	10.81703531
-8/19/2003	7.050894746
-8/20/2003	4.870497575
-8/21/2003	4.020992184
-8/22/2003	3.313071025
-8/23/2003	2.916635176
-8/24/2003	2.350298249
-8/25/2003	2.067129785
-8/26/2003	1.925545553
-8/27/2003	2.095446631
-8/28/2003	2.237030863
-8/29/2003	2.095446631
-8/30/2003	1.953862399
-8/31/2003	2.18039717
-9/1/2003	1.953862399
-9/2/2003	2.321981402
-9/3/2003	2.265347709
-9/4/2003	2.633466712
-9/5/2003	2.746734098
-9/6/2003	2.406931941
-9/7/2003	2.038812939
-9/8/2003	1.840595014
-9/9/2003	1.642377089
-9/10/2003	1.55742655
-9/11/2003	1.55742655
-9/12/2003	1.55742655
-9/13/2003	3.567922642
-9/14/2003	6.31465674
-9/15/2003	11.21347116
-9/16/2003	35.96239489
-9/17/2003	22.2287244
-9/18/2003	12.48772925
-9/19/2003	27.38239044
-9/20/2003	28.8831833
-9/21/2003	16.08396874
-9/22/2003	10.44891631
-9/23/2003	9.287925609
-9/24/2003	12.68594717
-9/25/2003	11.92139232
-9/26/2003	8.070301215
-9/27/2003	6.286339894
-9/28/2003	5.493468196
-9/29/2003	5.210299732
-9/30/2003	4.842180729
-10/1/2003	4.44574488
-10/2/2003	4.020992184
-10/3/2003	3.681190028
-10/4/2003	3.398021564
-10/5/2003	3.114853101
-10/6/2003	2.944952022
-10/7/2003	2.80336779
-10/8/2003	2.633466712
-10/9/2003	2.520199327
-10/10/2003	2.661783559
-10/11/2003	2.237030863
-10/12/2003	2.152080324
-10/13/2003	2.038812939
-10/14/2003	2.010496092
-10/15/2003	3.058219408
-10/16/2003	3.737823721
-10/17/2003	3.086536254
-10/18/2003	2.548516173
-10/19/2003	2.237030863
-10/20/2003	2.123763478
-10/21/2003	2.067129785
-10/22/2003	2.067129785
-10/23/2003	2.010496092
-10/24/2003	1.86891186
-10/25/2003	1.812278168
-10/26/2003	1.755644475
-10/27/2003	1.925545553
-10/28/2003	3.681190028
-10/29/2003	10.53386685
-10/30/2003	17.98119744
-10/31/2003	12.62931348
-11/1/2003	7.673865366
-11/2/2003	5.889904045
-11/3/2003	5.097032346
-11/4/2003	4.672279651
-11/5/2003	4.44574488
-11/6/2003	13.70535364
-11/7/2003	31.9980364
-11/8/2003	21.40753585
-11/9/2003	13.39386833
-11/10/2003	8.523370757
-11/11/2003	6.597825204
-11/12/2003	6.258023047
-11/13/2003	9.797628844
-11/14/2003	12.40277871
-11/15/2003	8.495053911
-11/16/2003	6.711092589
-11/17/2003	6.00317143
-11/18/2003	5.323567117
-11/19/2003	5.635052427
-11/20/2003	16.16891928
-11/21/2003	23.19149718
-11/22/2003	14.63980957
-11/23/2003	9.967529922
-11/24/2003	7.135845285
-11/25/2003	6.965944207
-11/26/2003	6.427924126
-11/27/2003	5.861587198
-11/28/2003	5.550101888
-11/29/2003	7.334063209
-11/30/2003	9.174658223
-12/1/2003	7.56059798
-12/2/2003	6.258023047
-12/3/2003	5.408517656
-12/4/2003	4.927131268
-12/5/2003	7.164162131
-12/6/2003	16.25386982
-12/7/2003	16.28218666
-12/8/2003	11.63822386
-12/9/2003	8.58000445
-12/10/2003	7.447330595
-12/11/2003	19.82179246
-12/12/2003	28.31684637
-12/13/2003	16.39545405
-12/14/2003	13.53545256
-12/15/2003	29.73268869
-12/16/2003	23.21981402
-12/17/2003	15.97070135
-12/18/2003	17.30159313
-12/19/2003	15.31941389
-12/20/2003	11.52495647
-12/21/2003	8.523370757
-12/22/2003	7.135845285
-12/23/2003	6.569508358
-12/24/2003	12.57267979
-12/25/2003	33.13071025
-12/26/2003	20.78456523
-12/27/2003	13.93188841
-12/28/2003	10.61881739
-12/29/2003	8.325152832
-12/30/2003	7.390696902
-12/31/2003	6.824359975
-1/1/2004	6.258023047
-1/2/2004	5.889904045
-1/3/2004	5.776636659
-1/4/2004	5.720002966
-1/5/2004	5.663369274
-1/6/2004	6.258023047
-1/7/2004	6.456240972
-1/8/2004	5.606735581
-1/9/2004	5.125349193
-1/10/2004	4.672279651
-1/11/2004	4.813863883
-1/12/2004	4.389111187
-1/13/2004	4.389111187
-1/14/2004	4.247526955
-1/15/2004	4.020992184
-1/16/2004	3.681190028
-1/17/2004	3.82277426
-1/18/2004	3.907724799
-1/19/2004	5.153666039
-1/20/2004	5.238616578
-1/21/2004	4.530695419
-1/22/2004	4.105942723
-1/23/2004	3.539605796
-1/24/2004	3.256437332
-1/25/2004	2.831684637
-1/26/2004	2.661783559
-1/27/2004	3.284754179
-1/28/2004	3.313071025
-1/29/2004	3.058219408
-1/30/2004	2.916635176
-1/31/2004	2.661783559
-2/1/2004	3.029902561
-2/2/2004	2.633466712
-2/3/2004	3.454655257
-2/4/2004	12.48772925
-2/5/2004	11.55327332
-2/6/2004	8.89148976
-2/7/2004	47.28913344
-2/8/2004	31.43169947
-2/9/2004	14.66812642
-2/10/2004	9.486143534
-2/11/2004	7.928716983
-2/12/2004	7.164162131
-2/13/2004	6.427924126
-2/14/2004	5.974854584
-2/15/2004	5.493468196
-2/16/2004	5.012081807
-2/17/2004	4.643962804
-2/18/2004	4.530695419
-2/19/2004	4.389111187
-2/20/2004	3.907724799
-2/21/2004	3.964358492
-2/22/2004	4.105942723
-2/23/2004	3.794457413
-2/24/2004	3.567922642
-2/25/2004	3.539605796
-2/26/2004	3.313071025
-2/27/2004	3.114853101
-2/28/2004	3.001585715
-2/29/2004	2.88831833
-3/1/2004	2.831684637
-3/2/2004	2.916635176
-3/3/2004	2.944952022
-3/4/2004	2.860001483
-3/5/2004	2.88831833
-3/6/2004	3.737823721
-3/7/2004	5.663369274
-3/8/2004	5.776636659
-3/9/2004	4.870497575
-3/10/2004	4.304160648
-3/11/2004	3.936041645
-3/12/2004	3.681190028
-3/13/2004	3.313071025
-3/14/2004	2.944952022
-3/15/2004	2.831684637
-3/16/2004	3.114853101
-3/17/2004	4.332477494
-3/18/2004	4.672279651
-3/19/2004	4.474061726
-3/20/2004	4.643962804
-3/21/2004	4.474061726
-3/22/2004	4.077625877
-3/23/2004	3.681190028
-3/24/2004	3.284754179
-3/25/2004	3.058219408
-3/26/2004	3.058219408
-3/27/2004	3.001585715
-3/28/2004	3.001585715
-3/29/2004	2.775050944
-3/30/2004	2.633466712
-3/31/2004	3.001585715
-4/1/2004	4.190893263
-4/2/2004	5.889904045
-4/3/2004	7.079211592
-4/4/2004	6.711092589
-4/5/2004	6.512874665
-4/6/2004	5.889904045
-4/7/2004	4.842180729
-4/8/2004	4.474061726
-4/9/2004	4.247526955
-4/10/2004	3.964358492
-4/11/2004	3.681190028
-4/12/2004	3.879407953
-4/13/2004	18.20773222
-4/14/2004	58.04953506
-4/15/2004	59.46537737
-4/16/2004	28.60001483
-4/17/2004	16.16891928
-4/18/2004	12.28951132
-4/19/2004	9.967529922
-4/20/2004	7.390696902
-4/21/2004	6.824359975
-4/22/2004	6.229706201
-4/23/2004	5.69168612
-4/24/2004	5.635052427
-4/25/2004	5.351883964
-4/26/2004	5.097032346
-4/27/2004	6.399607279
-4/28/2004	7.079211592
-4/29/2004	6.031488277
-4/30/2004	5.181982885
-5/1/2004	4.785547036
-5/2/2004	4.587329112
-5/3/2004	4.75723019
-5/4/2004	5.266933425
-5/5/2004	5.351883964
-5/6/2004	4.983764961
-5/7/2004	4.587329112
-5/8/2004	4.360794341
-5/9/2004	3.964358492
-5/10/2004	3.936041645
-5/11/2004	4.247526955
-5/12/2004	3.766140567
-5/13/2004	3.256437332
-5/14/2004	3.086536254
-5/15/2004	2.775050944
-5/16/2004	2.661783559
-5/17/2004	2.406931941
-5/18/2004	2.321981402
-5/19/2004	2.293664556
-5/20/2004	2.237030863
-5/21/2004	1.925545553
-5/22/2004	2.010496092
-5/23/2004	1.982179246
-5/24/2004	1.614060243
-5/25/2004	1.444159165
-5/26/2004	2.860001483
-5/27/2004	2.80336779
-5/28/2004	2.321981402
-5/29/2004	1.925545553
-5/30/2004	1.529109704
-5/31/2004	1.302574933
-6/1/2004	1.24594124
-6/2/2004	1.189307547
-6/3/2004	2.038812939
-6/4/2004	1.727327628
-6/5/2004	2.010496092
-6/6/2004	2.520199327
-6/7/2004	2.237030863
-6/8/2004	1.86891186
-6/9/2004	1.444159165
-6/10/2004	1.217624394
-6/11/2004	1.55742655
-6/12/2004	2.265347709
-6/13/2004	1.86891186
-6/14/2004	1.387525472
-6/15/2004	1.189307547
-6/16/2004	1.076040162
-6/17/2004	4.275843802
-6/18/2004	10.08079731
-6/19/2004	5.0687155
-6/20/2004	3.001585715
-6/21/2004	2.18039717
-6/22/2004	1.897228707
-6/23/2004	1.500792858
-6/24/2004	1.274258087
-6/25/2004	1.104357008
-6/26/2004	1.189307547
-6/27/2004	1.132673855
-6/28/2004	1.104357008
-6/29/2004	1.104357008
-6/30/2004	1.076040162
-7/1/2004	1.076040162
-7/2/2004	1.076040162
-7/3/2004	1.047723316
-7/4/2004	1.019406469
-7/5/2004	1.019406469
-7/6/2004	0.962772777
-7/7/2004	0.906139084
-7/8/2004	0.877822237
-7/9/2004	0.849505391
-7/10/2004	0.849505391
-7/11/2004	0.877822237
-7/12/2004	0.93445593
-7/13/2004	4.332477494
-7/14/2004	13.28060095
-7/15/2004	5.889904045
-7/16/2004	3.029902561
-7/17/2004	1.897228707
-7/18/2004	2.265347709
-7/19/2004	5.153666039
-7/20/2004	6.965944207
-7/21/2004	4.105942723
-7/22/2004	2.831684637
-7/23/2004	1.982179246
-7/24/2004	1.585743397
-7/25/2004	1.699010782
-7/26/2004	1.585743397
-7/27/2004	1.444159165
-7/28/2004	1.982179246
-7/29/2004	1.953862399
-7/30/2004	1.614060243
-7/31/2004	1.217624394
-8/1/2004	1.24594124
-8/2/2004	1.642377089
-8/3/2004	1.783961321
-8/4/2004	1.359208626
-8/5/2004	0.962772777
-8/6/2004	0.821188545
-8/7/2004	0.736238006
-8/8/2004	0.679604313
-8/9/2004	0.651287466
-8/10/2004	0.594653774
-8/11/2004	0.566336927
-8/12/2004	1.415842318
-8/13/2004	1.132673855
-8/14/2004	0.821188545
-8/15/2004	0.736238006
-8/16/2004	0.679604313
-8/17/2004	0.62297062
-8/18/2004	1.076040162
-8/19/2004	0.991089623
-8/20/2004	0.93445593
-8/21/2004	0.877822237
-8/22/2004	0.849505391
-8/23/2004	0.707921159
-8/24/2004	0.736238006
-8/25/2004	0.707921159
-8/26/2004	0.679604313
-8/27/2004	0.651287466
-8/28/2004	0.651287466
-8/29/2004	0.62297062
-8/30/2004	0.651287466
-8/31/2004	1.217624394
-9/1/2004	1.302574933
-9/2/2004	0.93445593
-9/3/2004	0.877822237
-9/4/2004	0.821188545
-9/5/2004	0.736238006
-9/6/2004	0.707921159
-9/7/2004	0.651287466
-9/8/2004	0.679604313
-9/9/2004	0.849505391
-9/10/2004	0.849505391
-9/11/2004	0.707921159
-9/12/2004	0.566336927
-9/13/2004	0.538020081
-9/14/2004	0.566336927
-9/15/2004	0.651287466
-9/16/2004	0.821188545
-9/17/2004	0.792871698
-9/18/2004	0.849505391
-9/19/2004	0.849505391
-9/20/2004	0.736238006
-9/21/2004	0.651287466
-9/22/2004	0.651287466
-9/23/2004	0.594653774
-9/24/2004	0.538020081
-9/25/2004	0.509703235
-9/26/2004	0.481386388
-9/27/2004	0.509703235
-9/28/2004	1.160990701
-9/29/2004	1.755644475
-9/30/2004	1.387525472
-10/1/2004	1.104357008
-10/2/2004	0.906139084
-10/3/2004	1.047723316
-10/4/2004	1.047723316
-10/5/2004	0.906139084
-10/6/2004	0.821188545
-10/7/2004	0.764554852
-10/8/2004	0.707921159
-10/9/2004	0.679604313
-10/10/2004	0.679604313
-10/11/2004	0.62297062
-10/12/2004	0.707921159
-10/13/2004	0.821188545
-10/14/2004	0.679604313
-10/15/2004	0.679604313
-10/16/2004	0.651287466
-10/17/2004	0.651287466
-10/18/2004	0.62297062
-10/19/2004	0.62297062
-10/20/2004	0.764554852
-10/21/2004	0.792871698
-10/22/2004	0.736238006
-10/23/2004	0.679604313
-10/24/2004	0.679604313
-10/25/2004	0.679604313
-10/26/2004	0.679604313
-10/27/2004	0.62297062
-10/28/2004	0.594653774
-10/29/2004	0.566336927
-10/30/2004	0.594653774
-10/31/2004	0.594653774
-11/1/2004	0.594653774
-11/2/2004	0.566336927
-11/3/2004	0.566336927
-11/4/2004	0.62297062
-11/5/2004	1.472476011
-11/6/2004	1.614060243
-11/7/2004	1.302574933
-11/8/2004	1.047723316
-11/9/2004	0.93445593
-11/10/2004	0.877822237
-11/11/2004	0.849505391
-11/12/2004	1.132673855
-11/13/2004	3.766140567
-11/14/2004	6.796043128
-11/15/2004	4.870497575
-11/16/2004	2.973268869
-11/17/2004	2.350298249
-11/18/2004	2.038812939
-11/19/2004	1.897228707
-11/20/2004	1.812278168
-11/21/2004	1.783961321
-11/22/2004	1.670693936
-11/23/2004	1.727327628
-11/24/2004	1.727327628
-11/25/2004	1.699010782
-11/26/2004	1.727327628
-11/27/2004	1.55742655
-11/28/2004	2.435248788
-11/29/2004	5.153666039
-11/30/2004	5.012081807
-12/1/2004	3.794457413
-12/2/2004	3.737823721
-12/3/2004	3.737823721
-12/4/2004	3.19980364
-12/5/2004	2.831684637
-12/6/2004	2.661783559
-12/7/2004	2.605149866
-12/8/2004	2.88831833
-12/9/2004	3.228120486
-12/10/2004	4.020992184
-12/11/2004	5.833270352
-12/12/2004	6.031488277
-12/13/2004	5.408517656
-12/14/2004	4.587329112
-12/15/2004	3.936041645
-12/16/2004	3.398021564
-12/17/2004	3.539605796
-12/18/2004	3.398021564
-12/19/2004	2.916635176
-12/20/2004	2.944952022
-12/21/2004	2.718417251
-12/22/2004	2.548516173
-12/23/2004	2.690100405
-12/24/2004	3.936041645
-12/25/2004	4.417428034
-12/26/2004	3.766140567
-12/27/2004	3.284754179
-12/28/2004	2.973268869
-12/29/2004	2.746734098
-12/30/2004	2.718417251
-12/31/2004	2.633466712
-1/1/2005	2.548516173
-1/2/2005	2.49188248
-1/3/2005	2.378615095
-1/4/2005	2.406931941
-1/5/2005	2.463565634
-1/6/2005	2.633466712
-1/7/2005	2.690100405
-1/8/2005	2.718417251
-1/9/2005	2.80336779
-1/10/2005	2.605149866
-1/11/2005	2.548516173
-1/12/2005	2.520199327
-1/13/2005	2.775050944
-1/14/2005	6.229706201
-1/15/2005	18.77406914
-1/16/2005	13.93188841
-1/17/2005	7.702182212
-1/18/2005	6.088121969
-1/19/2005	5.153666039
-1/20/2005	5.181982885
-1/21/2005	4.105942723
-1/22/2005	3.228120486
-1/23/2005	3.851091106
-1/24/2005	4.275843802
-1/25/2005	4.162576416
-1/26/2005	3.992675338
-1/27/2005	3.936041645
-1/28/2005	3.907724799
-1/29/2005	3.907724799
-1/30/2005	3.766140567
-1/31/2005	3.341387872
-2/1/2005	3.058219408
-2/2/2005	2.916635176
-2/3/2005	2.916635176
-2/4/2005	2.944952022
-2/5/2005	3.19980364
-2/6/2005	3.709506874
-2/7/2005	4.360794341
-2/8/2005	4.842180729
-2/9/2005	5.210299732
-2/10/2005	5.436834503
-2/11/2005	5.295250271
-2/12/2005	4.75723019
-2/13/2005	4.304160648
-2/14/2005	4.13425957
-2/15/2005	5.748319813
-2/16/2005	7.64554852
-2/17/2005	6.342973587
-2/18/2005	5.436834503
-2/19/2005	5.238616578
-2/20/2005	4.332477494
-2/21/2005	4.020992184
-2/22/2005	4.530695419
-2/23/2005	4.898814422
-2/24/2005	4.75723019
-2/25/2005	4.700596497
-2/26/2005	4.672279651
-2/27/2005	4.842180729
-2/28/2005	5.097032346
-3/1/2005	5.748319813
-3/2/2005	7.135845285
-3/3/2005	6.824359975
-3/4/2005	5.550101888
-3/5/2005	5.012081807
-3/6/2005	4.728913344
-3/7/2005	4.587329112
-3/8/2005	4.898814422
-3/9/2005	6.399607279
-3/10/2005	6.371290433
-3/11/2005	5.436834503
-3/12/2005	5.040398654
-3/13/2005	4.842180729
-3/14/2005	4.502378573
-3/15/2005	4.190893263
-3/16/2005	3.992675338
-3/17/2005	3.82277426
-3/18/2005	3.681190028
-3/19/2005	3.482972103
-3/20/2005	3.482972103
-3/21/2005	3.596239489
-3/22/2005	3.454655257
-3/23/2005	5.408517656
-3/24/2005	16.02733504
-3/25/2005	15.29109704
-3/26/2005	9.627727765
-3/27/2005	7.050894746
-3/28/2005	8.296835986
-3/29/2005	17.1600089
-3/30/2005	15.48931496
-3/31/2005	9.372876148
-4/1/2005	7.022577899
-4/2/2005	10.73208477
-4/3/2005	51.25349193
-4/4/2005	33.98021564
-4/5/2005	16.33882035
-4/6/2005	10.84535216
-4/7/2005	8.183568601
-4/8/2005	43.32477494
-4/9/2005	50.40398654
-4/10/2005	21.18100108
-4/11/2005	14.0451558
-4/12/2005	9.910896229
-4/13/2005	7.617231673
-4/14/2005	6.541191511
-4/15/2005	6.116438816
-4/16/2005	5.606735581
-4/17/2005	5.210299732
-4/18/2005	4.813863883
-4/19/2005	4.502378573
-4/20/2005	4.360794341
-4/21/2005	4.190893263
-4/22/2005	4.077625877
-4/23/2005	4.049309031
-4/24/2005	4.474061726
-4/25/2005	4.502378573
-4/26/2005	4.190893263
-4/27/2005	3.964358492
-4/28/2005	3.766140567
-4/29/2005	3.482972103
-4/30/2005	3.766140567
-5/1/2005	4.785547036
-5/2/2005	5.804953506
-5/3/2005	5.0687155
-5/4/2005	4.389111187
-5/5/2005	4.105942723
-5/6/2005	3.794457413
-5/7/2005	3.596239489
-5/8/2005	3.398021564
-5/9/2005	3.143169947
-5/10/2005	2.860001483
-5/11/2005	2.690100405
-5/12/2005	2.548516173
-5/13/2005	2.463565634
-5/14/2005	2.378615095
-5/15/2005	2.520199327
-5/16/2005	2.463565634
-5/17/2005	2.152080324
-5/18/2005	1.86891186
-5/19/2005	1.897228707
-5/20/2005	7.362380056
-5/21/2005	27.69387575
-5/22/2005	18.29268275
-5/23/2005	10.59050054
-5/24/2005	6.484557818
-5/25/2005	5.776636659
-5/26/2005	5.578418735
-5/27/2005	5.295250271
-5/28/2005	4.615645958
-5/29/2005	4.13425957
-5/30/2005	3.652873182
-5/31/2005	3.228120486
-6/1/2005	2.88831833
-6/2/2005	2.633466712
-6/3/2005	3.228120486
-6/4/2005	4.304160648
-6/5/2005	4.304160648
-6/6/2005	3.851091106
-6/7/2005	6.229706201
-6/8/2005	10.19406469
-6/9/2005	6.682775743
-6/10/2005	5.181982885
-6/11/2005	4.530695419
-6/12/2005	3.992675338
-6/13/2005	3.369704718
-6/14/2005	2.944952022
-6/15/2005	2.605149866
-6/16/2005	2.293664556
-6/17/2005	2.18039717
-6/18/2005	1.953862399
-6/19/2005	1.783961321
-6/20/2005	1.812278168
-6/21/2005	1.670693936
-6/22/2005	1.614060243
-6/23/2005	1.812278168
-6/24/2005	1.642377089
-6/25/2005	1.529109704
-6/26/2005	1.415842318
-6/27/2005	1.55742655
-6/28/2005	1.840595014
-6/29/2005	1.614060243
-6/30/2005	1.55742655
-7/1/2005	1.415842318
-7/2/2005	1.302574933
-7/3/2005	1.24594124
-7/4/2005	1.104357008
-7/5/2005	1.104357008
-7/6/2005	1.585743397
-7/7/2005	2.152080324
-7/8/2005	2.152080324
-7/9/2005	2.548516173
-7/10/2005	1.925545553
-7/11/2005	1.472476011
-7/12/2005	1.302574933
-7/13/2005	1.160990701
-7/14/2005	1.160990701
-7/15/2005	1.387525472
-7/16/2005	1.500792858
-7/17/2005	6.427924126
-7/18/2005	5.918220891
-7/19/2005	2.548516173
-7/20/2005	1.840595014
-7/21/2005	1.614060243
-7/22/2005	1.359208626
-7/23/2005	1.160990701
-7/24/2005	1.076040162
-7/25/2005	1.132673855
-7/26/2005	1.217624394
-7/27/2005	1.047723316
-7/28/2005	1.132673855
-7/29/2005	1.189307547
-7/30/2005	4.927131268
-7/31/2005	2.18039717
-8/1/2005	1.585743397
-8/2/2005	1.24594124
-8/3/2005	1.076040162
-8/4/2005	0.849505391
-8/5/2005	0.764554852
-8/6/2005	0.93445593
-8/7/2005	1.500792858
-8/8/2005	1.472476011
-8/9/2005	1.217624394
-8/10/2005	1.302574933
-8/11/2005	1.160990701
-8/12/2005	0.991089623
-8/13/2005	0.821188545
-8/14/2005	0.736238006
-8/15/2005	0.62297062
-8/16/2005	0.679604313
-8/17/2005	0.93445593
-8/18/2005	0.906139084
-8/19/2005	0.821188545
-8/20/2005	0.877822237
-8/21/2005	0.821188545
-8/22/2005	0.707921159
-8/23/2005	0.62297062
-8/24/2005	0.594653774
-8/25/2005	0.566336927
-8/26/2005	0.481386388
-8/27/2005	0.453069542
-8/28/2005	0.453069542
-8/29/2005	1.132673855
-8/30/2005	0.566336927
-8/31/2005	0.509703235
-9/1/2005	0.453069542
-9/2/2005	0.453069542
-9/3/2005	0.453069542
-9/4/2005	0.396435849
-9/5/2005	0.368119003
-9/6/2005	0.283168464
-9/7/2005	0.283168464
-9/8/2005	0.31148531
-9/9/2005	0.31148531
-9/10/2005	0.339802156
-9/11/2005	0.339802156
-9/12/2005	0.339802156
-9/13/2005	0.368119003
-9/14/2005	0.509703235
-9/15/2005	0.62297062
-9/16/2005	0.481386388
-9/17/2005	0.453069542
-9/18/2005	0.396435849
-9/19/2005	0.368119003
-9/20/2005	0.339802156
-9/21/2005	0.339802156
-9/22/2005	0.31148531
-9/23/2005	0.31148531
-9/24/2005	0.31148531
-9/25/2005	0.31148531
-9/26/2005	0.31148531
-9/27/2005	0.31148531
-9/28/2005	0.31148531
-9/29/2005	0.31148531
-9/30/2005	0.31148531
-10/1/2005	0.283168464
-10/2/2005	0.283168464
-10/3/2005	0.283168464
-10/4/2005	0.31148531
-10/5/2005	0.339802156
-10/6/2005	0.877822237
-10/7/2005	0.877822237
-10/8/2005	2.038812939
-10/9/2005	3.539605796
-10/10/2005	2.605149866
-10/11/2005	1.86891186
-10/12/2005	2.152080324
-10/13/2005	1.670693936
-10/14/2005	1.529109704
-10/15/2005	1.444159165
-10/16/2005	1.189307547
-10/17/2005	0.991089623
-10/18/2005	0.93445593
-10/19/2005	0.877822237
-10/20/2005	0.849505391
-10/21/2005	0.821188545
-10/22/2005	1.444159165
-10/23/2005	2.152080324
-10/24/2005	1.840595014
-10/25/2005	3.567922642
-10/26/2005	4.785547036
-10/27/2005	4.672279651
-10/28/2005	3.19980364
-10/29/2005	2.350298249
-10/30/2005	2.095446631
-10/31/2005	1.925545553
-11/1/2005	1.755644475
-11/2/2005	1.670693936
-11/3/2005	1.585743397
-11/4/2005	1.55742655
-11/5/2005	1.529109704
-11/6/2005	1.444159165
-11/7/2005	1.444159165
-11/8/2005	1.387525472
-11/9/2005	1.359208626
-11/10/2005	1.302574933
-11/11/2005	1.302574933
-11/12/2005	1.24594124
-11/13/2005	1.24594124
-11/14/2005	1.24594124
-11/15/2005	1.24594124
-11/16/2005	1.274258087
-11/17/2005	1.670693936
-11/18/2005	1.614060243
-11/19/2005	1.444159165
-11/20/2005	1.359208626
-11/21/2005	1.699010782
-11/22/2005	3.907724799
-11/23/2005	6.456240972
-11/24/2005	6.654458897
-11/25/2005	4.530695419
-11/26/2005	3.51128895
-11/27/2005	3.001585715
-11/28/2005	2.80336779
-11/29/2005	2.661783559
-11/30/2005	4.870497575
-12/1/2005	6.880993668
-12/2/2005	5.720002966
-12/3/2005	4.502378573
-12/4/2005	4.190893263
-12/5/2005	4.304160648
-12/6/2005	4.360794341
-12/7/2005	4.13425957
-12/8/2005	3.737823721
-12/9/2005	3.851091106
-12/10/2005	4.785547036
-12/11/2005	5.238616578
-12/12/2005	4.927131268
-12/13/2005	4.530695419
-12/14/2005	4.049309031
-12/15/2005	3.624556335
-12/16/2005	8.325152832
-12/17/2005	15.9990182
-12/18/2005	10.78871847
-12/19/2005	6.852676821
-12/20/2005	5.861587198
-12/21/2005	5.266933425
-12/22/2005	4.785547036
-12/23/2005	4.502378573
-12/24/2005	4.360794341
-12/25/2005	4.304160648
-12/26/2005	4.898814422
-12/27/2005	5.69168612
-12/28/2005	5.266933425
-12/29/2005	4.898814422
-12/30/2005	5.351883964
-12/31/2005	5.69168612
-1/1/2006	5.181982885
-1/2/2006	4.813863883
-1/3/2006	9.882579383
-1/4/2006	15.9990182
-1/5/2006	11.18515432
-1/6/2006	7.588914827
-1/7/2006	6.371290433
-1/8/2006	5.748319813
-1/9/2006	5.238616578
-1/10/2006	4.870497575
-1/11/2006	4.587329112
-1/12/2006	4.502378573
-1/13/2006	4.44574488
-1/14/2006	4.587329112
-1/15/2006	4.927131268
-1/16/2006	4.672279651
-1/17/2006	4.275843802
-1/18/2006	4.813863883
-1/19/2006	6.824359975
-1/20/2006	6.711092589
-1/21/2006	5.776636659
-1/22/2006	5.181982885
-1/23/2006	9.20297507
-1/24/2006	19.73684192
-1/25/2006	15.12119596
-1/26/2006	9.712678304
-1/27/2006	7.277429517
-1/28/2006	6.342973587
-1/29/2006	6.031488277
-1/30/2006	5.521785042
-1/31/2006	5.323567117
-2/1/2006	5.040398654
-2/2/2006	4.728913344
-2/3/2006	4.700596497
-2/4/2006	4.559012265
-2/5/2006	4.785547036
-2/6/2006	4.927131268
-2/7/2006	4.587329112
-2/8/2006	4.332477494
-2/9/2006	4.162576416
-2/10/2006	3.964358492
-2/11/2006	3.851091106
-2/12/2006	4.785547036
-2/13/2006	4.587329112
-2/14/2006	4.502378573
-2/15/2006	4.530695419
-2/16/2006	5.012081807
-2/17/2006	6.229706201
-2/18/2006	6.597825204
-2/19/2006	5.720002966
-2/20/2006	5.040398654
-2/21/2006	4.75723019
-2/22/2006	4.643962804
-2/23/2006	4.643962804
-2/24/2006	4.559012265
-2/25/2006	4.304160648
-2/26/2006	4.247526955
-2/27/2006	3.992675338
-2/28/2006	3.907724799
-3/1/2006	3.766140567
-3/2/2006	3.709506874
-3/3/2006	3.766140567
-3/4/2006	3.482972103
-3/5/2006	3.313071025
-3/6/2006	3.171486793
-3/7/2006	3.143169947
-3/8/2006	3.001585715
-3/9/2006	2.973268869
-3/10/2006	2.973268869
-3/11/2006	2.88831833
-3/12/2006	2.775050944
-3/13/2006	2.775050944
-3/14/2006	2.831684637
-3/15/2006	2.690100405
-3/16/2006	2.463565634
-3/17/2006	2.378615095
-3/18/2006	2.350298249
-3/19/2006	2.265347709
-3/20/2006	2.208714017
-3/21/2006	2.18039717
-3/22/2006	2.18039717
-3/23/2006	2.123763478
-3/24/2006	2.010496092
-3/25/2006	2.067129785
-3/26/2006	2.095446631
-3/27/2006	2.038812939
-3/28/2006	1.925545553
-3/29/2006	1.953862399
-3/30/2006	1.86891186
-3/31/2006	1.86891186
-4/1/2006	1.897228707
-4/2/2006	1.897228707
-4/3/2006	1.812278168
-4/4/2006	2.095446631
-4/5/2006	1.925545553
-4/6/2006	1.812278168
-4/7/2006	1.783961321
-4/8/2006	2.038812939
-4/9/2006	2.633466712
-4/10/2006	2.265347709
-4/11/2006	2.010496092
-4/12/2006	1.897228707
-4/13/2006	1.86891186
-4/14/2006	1.86891186
-4/15/2006	2.123763478
-4/16/2006	2.237030863
-4/17/2006	1.925545553
-4/18/2006	1.812278168
-4/19/2006	1.755644475
-4/20/2006	1.642377089
-4/21/2006	1.529109704
-4/22/2006	2.321981402
-4/23/2006	6.173072508
-4/24/2006	7.673865366
-4/25/2006	6.654458897
-4/26/2006	5.323567117
-4/27/2006	4.275843802
-4/28/2006	3.596239489
-4/29/2006	3.029902561
-4/30/2006	2.605149866
-5/1/2006	2.406931941
-5/2/2006	2.237030863
-5/3/2006	2.152080324
-5/4/2006	2.067129785
-5/5/2006	1.925545553
-5/6/2006	1.812278168
-5/7/2006	1.699010782
-5/8/2006	1.500792858
-5/9/2006	1.387525472
-5/10/2006	1.330891779
-5/11/2006	1.274258087
-5/12/2006	1.755644475
-5/13/2006	1.699010782
-5/14/2006	1.500792858
-5/15/2006	1.670693936
-5/16/2006	1.925545553
-5/17/2006	1.727327628
-5/18/2006	1.500792858
-5/19/2006	1.387525472
-5/20/2006	1.330891779
-5/21/2006	1.217624394
-5/22/2006	1.160990701
-5/23/2006	1.132673855
-5/24/2006	1.019406469
-5/25/2006	1.019406469
-5/26/2006	1.047723316
-5/27/2006	1.076040162
-5/28/2006	0.991089623
-5/29/2006	0.93445593
-5/30/2006	0.93445593
-5/31/2006	0.849505391
-6/1/2006	0.792871698
-6/2/2006	0.877822237
-6/3/2006	1.840595014
-6/4/2006	1.642377089
-6/5/2006	1.160990701
-6/6/2006	0.93445593
-6/7/2006	0.962772777
-6/8/2006	0.991089623
-6/9/2006	1.160990701
-6/10/2006	1.217624394
-6/11/2006	0.991089623
-6/12/2006	0.962772777
-6/13/2006	0.991089623
-6/14/2006	0.93445593
-6/15/2006	0.906139084
-6/16/2006	0.821188545
-6/17/2006	0.792871698
-6/18/2006	0.707921159
-6/19/2006	0.594653774
-6/20/2006	0.764554852
-6/21/2006	0.679604313
-6/22/2006	0.792871698
-6/23/2006	1.132673855
-6/24/2006	0.736238006
-6/25/2006	8.693271835
-6/26/2006	37.66140567
-6/27/2006	69.65944207
-6/28/2006	51.53666039
-6/29/2006	39.36041645
-6/30/2006	17.07505836
-7/1/2006	9.82594569
-7/2/2006	6.965944207
-7/3/2006	5.465151349
-7/4/2006	4.813863883
-7/5/2006	5.946537737
-7/6/2006	8.806539221
-7/7/2006	16.16891928
-7/8/2006	11.52495647
-7/9/2006	7.277429517
-7/10/2006	5.493468196
-7/11/2006	4.559012265
-7/12/2006	3.851091106
-7/13/2006	3.567922642
-7/14/2006	3.228120486
-7/15/2006	2.88831833
-7/16/2006	2.520199327
-7/17/2006	2.265347709
-7/18/2006	1.953862399
-7/19/2006	1.86891186
-7/20/2006	1.727327628
-7/21/2006	1.642377089
-7/22/2006	2.010496092
-7/23/2006	5.493468196
-7/24/2006	5.210299732
-7/25/2006	3.426338411
-7/26/2006	2.520199327
-7/27/2006	2.152080324
-7/28/2006	1.840595014
-7/29/2006	1.699010782
-7/30/2006	1.585743397
-7/31/2006	1.444159165
-8/1/2006	1.330891779
-8/2/2006	1.330891779
-8/3/2006	1.104357008
-8/4/2006	1.076040162
-8/5/2006	0.991089623
-8/6/2006	0.906139084
-8/7/2006	0.93445593
-8/8/2006	0.93445593
-8/9/2006	0.736238006
-8/10/2006	0.764554852
-8/11/2006	0.906139084
-8/12/2006	0.764554852
-8/13/2006	0.736238006
-8/14/2006	0.679604313
-8/15/2006	0.566336927
-8/16/2006	0.62297062
-8/17/2006	0.481386388
-8/18/2006	0.566336927
-8/19/2006	0.538020081
-8/20/2006	0.62297062
-8/21/2006	0.538020081
-8/22/2006	0.424752696
-8/23/2006	0.368119003
-8/24/2006	0.396435849
-8/25/2006	0.424752696
-8/26/2006	0.453069542
-8/27/2006	0.453069542
-8/28/2006	0.453069542
-8/29/2006	0.509703235
-8/30/2006	1.189307547
-8/31/2006	0.62297062
-9/1/2006	0.792871698
-9/2/2006	2.775050944
-9/3/2006	3.737823721
-9/4/2006	2.718417251
-9/5/2006	1.925545553
-9/6/2006	2.690100405
-9/7/2006	2.293664556
-9/8/2006	1.670693936
-9/9/2006	1.387525472
-9/10/2006	1.274258087
-9/11/2006	1.160990701
-9/12/2006	1.076040162
-9/13/2006	1.019406469
-9/14/2006	1.132673855
-9/15/2006	1.444159165
-9/16/2006	1.585743397
-9/17/2006	1.444159165
-9/18/2006	1.302574933
-9/19/2006	1.189307547
-9/20/2006	1.132673855
-9/21/2006	1.019406469
-9/22/2006	0.962772777
-9/23/2006	0.906139084
-9/24/2006	0.906139084
-9/25/2006	0.906139084
-9/26/2006	0.849505391
-9/27/2006	0.792871698
-9/28/2006	0.821188545
-9/29/2006	1.047723316
-9/30/2006	0.991089623
-10/1/2006	0.962772777
-10/2/2006	0.93445593
-10/3/2006	0.877822237
-10/4/2006	0.821188545
-10/5/2006	0.821188545
-10/6/2006	2.152080324
-10/7/2006	3.51128895
-10/8/2006	2.633466712
-10/9/2006	1.727327628
-10/10/2006	1.415842318
-10/11/2006	2.237030863
-10/12/2006	5.946537737
-10/13/2006	4.587329112
-10/14/2006	2.746734098
-10/15/2006	2.010496092
-10/16/2006	1.783961321
-10/17/2006	1.840595014
-10/18/2006	3.228120486
-10/19/2006	3.737823721
-10/20/2006	3.313071025
-10/21/2006	3.284754179
-10/22/2006	3.19980364
-10/23/2006	2.605149866
-10/24/2006	2.237030863
-10/25/2006	2.010496092
-10/26/2006	1.86891186
-10/27/2006	1.812278168
-10/28/2006	3.058219408
-10/29/2006	4.842180729
-10/30/2006	5.012081807
-10/31/2006	3.709506874
-11/1/2006	2.916635176
-11/2/2006	2.57683302
-11/3/2006	2.350298249
-11/4/2006	2.152080324
-11/5/2006	2.010496092
-11/6/2006	1.953862399
-11/7/2006	1.953862399
-11/8/2006	3.029902561
-11/9/2006	5.663369274
-11/10/2006	6.201389355
-11/11/2006	4.813863883
-11/12/2006	4.813863883
-11/13/2006	8.353469679
-11/14/2006	15.54594866
-11/15/2006	11.6665407
-11/16/2006	7.900400137
-11/17/2006	14.01683895
-11/18/2006	15.51763181
-11/19/2006	9.684361458
-11/20/2006	6.965944207
-11/21/2006	5.833270352
-11/22/2006	5.606735581
-11/23/2006	13.96020526
-11/24/2006	20.13327777
-11/25/2006	14.24337372
-11/26/2006	10.02416361
-11/27/2006	7.730499059
-11/28/2006	6.654458897
-11/29/2006	6.031488277
-11/30/2006	5.635052427
-12/1/2006	5.351883964
-12/2/2006	5.125349193
-12/3/2006	4.75723019
-12/4/2006	4.44574488
-12/5/2006	4.275843802
-12/6/2006	4.13425957
-12/7/2006	3.964358492
-12/8/2006	3.794457413
-12/9/2006	3.51128895
-12/10/2006	3.369704718
-12/11/2006	3.284754179
-12/12/2006	3.19980364
-12/13/2006	3.143169947
-12/14/2006	3.19980364
-12/15/2006	3.19980364
-12/16/2006	3.114853101
-12/17/2006	2.916635176
-12/18/2006	2.831684637
-12/19/2006	2.775050944
-12/20/2006	2.633466712
-12/21/2006	2.520199327
-12/22/2006	2.520199327
-12/23/2006	3.398021564
-12/24/2006	4.105942723
-12/25/2006	4.020992184
-12/26/2006	4.983764961
-12/27/2006	6.00317143
-12/28/2006	5.153666039
-12/29/2006	4.417428034
-12/30/2006	4.049309031
-12/31/2006	3.82277426
-1/1/2007	5.720002966
-1/2/2007	16.25386982
-1/3/2007	14.24337372
-1/4/2007	8.26851914
-1/5/2007	6.62614205
-1/6/2007	6.569508358
-1/7/2007	7.022577899
-1/8/2007	17.78297952
-1/9/2007	38.79407953
-1/10/2007	19.02892076
-1/11/2007	11.92139232
-1/12/2007	8.353469679
-1/13/2007	7.24911267
-1/14/2007	6.541191511
-1/15/2007	6.342973587
-1/16/2007	6.00317143
-1/17/2007	5.521785042
-1/18/2007	4.983764961
-1/19/2007	4.927131268
-1/20/2007	4.870497575
-1/21/2007	4.587329112
-1/22/2007	4.502378573
-1/23/2007	4.559012265
-1/24/2007	4.530695419
-1/25/2007	4.389111187
-1/26/2007	4.190893263
-1/27/2007	3.992675338
-1/28/2007	4.020992184
-1/29/2007	3.907724799
-1/30/2007	3.596239489
-1/31/2007	3.426338411
-2/1/2007	3.256437332
-2/2/2007	3.341387872
-2/3/2007	3.482972103
-2/4/2007	3.228120486
-2/5/2007	2.944952022
-2/6/2007	2.831684637
-2/7/2007	2.860001483
-2/8/2007	2.831684637
-2/9/2007	2.775050944
-2/10/2007	2.718417251
-2/11/2007	2.661783559
-2/12/2007	2.605149866
-2/13/2007	2.520199327
-2/14/2007	4.530695419
-2/15/2007	8.863172913
-2/16/2007	7.475647441
-2/17/2007	5.38020081
-2/18/2007	4.813863883
-2/19/2007	4.530695419
-2/20/2007	4.247526955
-2/21/2007	4.332477494
-2/22/2007	4.219210109
-2/23/2007	4.190893263
-2/24/2007	3.879407953
-2/25/2007	3.652873182
-2/26/2007	4.813863883
-2/27/2007	7.050894746
-2/28/2007	6.711092589
-3/1/2007	5.493468196
-3/2/2007	6.597825204
-3/3/2007	10.84535216
-3/4/2007	7.64554852
-3/5/2007	5.833270352
-3/6/2007	5.125349193
-3/7/2007	4.615645958
-3/8/2007	4.360794341
-3/9/2007	4.247526955
-3/10/2007	4.13425957
-3/11/2007	4.077625877
-3/12/2007	4.020992184
-3/13/2007	3.851091106
-3/14/2007	3.766140567
-3/15/2007	3.681190028
-3/16/2007	6.456240972
-3/17/2007	37.37823721
-3/18/2007	26.58951874
-3/19/2007	13.90357157
-3/20/2007	8.749905528
-3/21/2007	6.456240972
-3/22/2007	5.550101888
-3/23/2007	5.295250271
-3/24/2007	5.210299732
-3/25/2007	5.125349193
-3/26/2007	4.927131268
-3/27/2007	4.983764961
-3/28/2007	4.813863883
-3/29/2007	4.44574488
-3/30/2007	4.13425957
-3/31/2007	3.964358492
-4/1/2007	3.766140567
-4/2/2007	3.766140567
-4/3/2007	3.652873182
-4/4/2007	4.049309031
-4/5/2007	5.889904045
-4/6/2007	5.804953506
-4/7/2007	4.955448115
-4/8/2007	4.474061726
-4/9/2007	4.105942723
-4/10/2007	3.766140567
-4/11/2007	3.567922642
-4/12/2007	6.201389355
-4/13/2007	10.33564892
-4/14/2007	8.098618061
-4/15/2007	22.37030863
-4/16/2007	118.0812494
-4/17/2007	65.97825204
-4/18/2007	24.66397319
-4/19/2007	16.3671372
-4/20/2007	12.62931348
-4/21/2007	9.571094073
-4/22/2007	7.815449598
-4/23/2007	6.852676821
-4/24/2007	6.144755662
-4/25/2007	5.606735581
-4/26/2007	5.210299732
-4/27/2007	4.927131268
-4/28/2007	5.0687155
-4/29/2007	4.842180729
-4/30/2007	4.502378573
-5/1/2007	4.304160648
-5/2/2007	3.709506874
-5/3/2007	3.51128895
-5/4/2007	3.284754179
-5/5/2007	3.086536254
-5/6/2007	2.973268869
-5/7/2007	2.80336779
-5/8/2007	2.548516173
-5/9/2007	2.49188248
-5/10/2007	2.548516173
-5/11/2007	2.57683302
-5/12/2007	2.49188248
-5/13/2007	2.690100405
-5/14/2007	2.57683302
-5/15/2007	2.265347709
-5/16/2007	2.18039717
-5/17/2007	2.690100405
-5/18/2007	2.520199327
-5/19/2007	2.237030863
-5/20/2007	2.18039717
-5/21/2007	2.038812939
-5/22/2007	1.840595014
-5/23/2007	1.699010782
-5/24/2007	1.55742655
-5/25/2007	1.472476011
-5/26/2007	1.415842318
-5/27/2007	1.330891779
-5/28/2007	1.387525472
-5/29/2007	1.387525472
-5/30/2007	1.302574933
-5/31/2007	1.217624394
-6/1/2007	1.132673855
-6/2/2007	1.104357008
-6/3/2007	1.132673855
-6/4/2007	2.237030863
-6/5/2007	2.350298249
-6/6/2007	1.755644475
-6/7/2007	1.444159165
-6/8/2007	1.274258087
-6/9/2007	1.160990701
-6/10/2007	1.132673855
-6/11/2007	1.104357008
-6/12/2007	1.076040162
-6/13/2007	1.812278168
-6/14/2007	1.585743397
-6/15/2007	1.444159165
-6/16/2007	1.387525472
-6/17/2007	1.330891779
-6/18/2007	1.160990701
-6/19/2007	0.962772777
-6/20/2007	0.906139084
-6/21/2007	0.906139084
-6/22/2007	0.877822237
-6/23/2007	0.849505391
-6/24/2007	0.764554852
-6/25/2007	0.736238006
-6/26/2007	0.679604313
-6/27/2007	0.651287466
-6/28/2007	0.651287466
-6/29/2007	0.651287466
-6/30/2007	0.679604313
-7/1/2007	0.707921159
-7/2/2007	0.679604313
-7/3/2007	0.481386388
-7/4/2007	0.509703235
-7/5/2007	0.651287466
-7/6/2007	0.566336927
-7/7/2007	0.453069542
-7/8/2007	0.396435849
-7/9/2007	0.31148531
-7/10/2007	0.62297062
-7/11/2007	1.55742655
-7/12/2007	1.812278168
-7/13/2007	1.076040162
-7/14/2007	0.821188545
-7/15/2007	0.62297062
-7/16/2007	0.368119003
-7/17/2007	0.368119003
-7/18/2007	0.368119003
-7/19/2007	0.339802156
-7/20/2007	0.263346671
-7/21/2007	0.283168464
-7/22/2007	0.223703086
-7/23/2007	0.215208032
-7/24/2007	0.212376348
-7/25/2007	0.15291097
-7/26/2007	0.186891186
-7/27/2007	0.283168464
-7/28/2007	0.31148531
-7/29/2007	0.31148531
-7/30/2007	1.274258087
-7/31/2007	2.831684637
-8/1/2007	1.925545553
-8/2/2007	0.962772777
-8/3/2007	0.594653774
-8/4/2007	0.424752696
-8/5/2007	0.368119003
-8/6/2007	0.453069542
-8/7/2007	0.368119003
-8/8/2007	0.271841725
-8/9/2007	0.368119003
-8/10/2007	0.424752696
-8/11/2007	0.368119003
-8/12/2007	0.277505094
-8/13/2007	0.201049609
-8/14/2007	0.141584232
-8/15/2007	0.155742655
-8/16/2007	0.192554555
-8/17/2007	0.283168464
-8/18/2007	0.201049609
-8/19/2007	0.215208032
-8/20/2007	0.424752696
-8/21/2007	0.764554852
-8/22/2007	0.764554852
-8/23/2007	0.679604313
-8/24/2007	0.594653774
-8/25/2007	0.396435849
-8/26/2007	0.509703235
-8/27/2007	0.453069542
-8/28/2007	0.368119003
-8/29/2007	0.339802156
-8/30/2007	0.339802156
-8/31/2007	0.31148531
-9/1/2007	0.280336779
-9/2/2007	0.229366456
-9/3/2007	0.212376348
-9/4/2007	0.209544663
-9/5/2007	0.453069542
-9/6/2007	0.283168464
-9/7/2007	0.257683302
-9/8/2007	0.172732763
-9/9/2007	0.155742655
-9/10/2007	0.203881294
-9/11/2007	0.368119003
-9/12/2007	0.368119003
-9/13/2007	0.31148531
-9/14/2007	0.277505094
-9/15/2007	0.453069542
-9/16/2007	0.453069542
-9/17/2007	0.368119003
-9/18/2007	0.31148531
-9/19/2007	0.31148531
-9/20/2007	0.31148531
-9/21/2007	0.283168464
-9/22/2007	0.283168464
-9/23/2007	0.263346671
-9/24/2007	0.254851617
-9/25/2007	0.254851617
-9/26/2007	0.246356563
-9/27/2007	0.237861509
-9/28/2007	0.266178356
-9/29/2007	0.254851617
-9/30/2007	0.31148531
-10/1/2007	0.23219814
-10/2/2007	0.223703086
-10/3/2007	0.226534771
-10/4/2007	0.229366456
-10/5/2007	0.283168464
-10/6/2007	0.271841725
-10/7/2007	0.260514987
-10/8/2007	0.212376348
-10/9/2007	0.201049609
-10/10/2007	0.283168464
-10/11/2007	0.283168464
-10/12/2007	0.229366456
-10/13/2007	0.226534771
-10/14/2007	0.246356563
-10/15/2007	0.257683302
-10/16/2007	0.254851617
-10/17/2007	0.266178356
-10/18/2007	0.283168464
-10/19/2007	0.280336779
-10/20/2007	0.283168464
-10/21/2007	0.283168464
-10/22/2007	0.235029825
-10/23/2007	0.237861509
-10/24/2007	0.424752696
-10/25/2007	0.849505391
-10/26/2007	0.821188545
-10/27/2007	1.359208626
-10/28/2007	1.274258087
-10/29/2007	0.962772777
-10/30/2007	0.764554852
-10/31/2007	0.651287466
-11/1/2007	0.538020081
-11/2/2007	0.481386388
-11/3/2007	0.453069542
-11/4/2007	0.453069542
-11/5/2007	0.481386388
-11/6/2007	0.453069542
-11/7/2007	0.509703235
-11/8/2007	0.481386388
-11/9/2007	0.453069542
-11/10/2007	0.453069542
-11/11/2007	0.453069542
-11/12/2007	0.566336927
-11/13/2007	0.453069542
-11/14/2007	0.453069542
-11/15/2007	0.453069542
-11/16/2007	0.509703235
-11/17/2007	0.453069542
-11/18/2007	0.453069542
-11/19/2007	0.538020081
-11/20/2007	0.62297062
-11/21/2007	0.538020081
-11/22/2007	0.538020081
-11/23/2007	0.509703235
-11/24/2007	0.453069542
-11/25/2007	0.424752696
-11/26/2007	0.453069542
-11/27/2007	0.509703235
-11/28/2007	0.509703235
-11/29/2007	0.481386388
-11/30/2007	0.453069542
-12/1/2007	0.453069542
-12/2/2007	0.481386388
-12/3/2007	0.821188545
-12/4/2007	0.849505391
-12/5/2007	0.736238006
-12/6/2007	1.189307547
-12/7/2007	0.792871698
-12/8/2007	0.736238006
-12/9/2007	0.821188545
-12/10/2007	0.764554852
-12/11/2007	0.764554852
-12/12/2007	0.736238006
-12/13/2007	0.707921159
-12/14/2007	0.764554852
-12/15/2007	0.736238006
-12/16/2007	1.415842318
-12/17/2007	1.982179246
-12/18/2007	1.897228707
-12/19/2007	1.359208626
-12/20/2007	1.160990701
-12/21/2007	1.076040162
-12/22/2007	0.93445593
-12/23/2007	0.906139084
-12/24/2007	1.019406469
-12/25/2007	0.991089623
-12/26/2007	0.962772777
-12/27/2007	1.019406469
-12/28/2007	1.047723316
-12/29/2007	1.500792858
-12/30/2007	2.095446631
-12/31/2007	2.661783559
-1/1/2008	2.548516173
-1/2/2008	2.095446631
-1/3/2008	1.699010782
-1/4/2008	1.500792858
-1/5/2008	1.444159165
-1/6/2008	1.444159165
-1/7/2008	1.472476011
-1/8/2008	1.444159165
-1/9/2008	1.387525472
-1/10/2008	1.359208626
-1/11/2008	1.55742655
-1/12/2008	1.925545553
-1/13/2008	1.840595014
-1/14/2008	1.812278168
-1/15/2008	1.783961321
-1/16/2008	1.699010782
-1/17/2008	1.642377089
-1/18/2008	2.57683302
-1/19/2008	3.539605796
-1/20/2008	3.001585715
-1/21/2008	2.265347709
-1/22/2008	1.953862399
-1/23/2008	1.982179246
-1/24/2008	1.953862399
-1/25/2008	1.840595014
-1/26/2008	1.755644475
-1/27/2008	1.727327628
-1/28/2008	1.727327628
-1/29/2008	1.699010782
-1/30/2008	1.699010782
-1/31/2008	1.642377089
-2/1/2008	2.406931941
-2/2/2008	7.220795824
-2/3/2008	8.013667522
-2/4/2008	4.983764961
-2/5/2008	4.190893263
-2/6/2008	3.879407953
-2/7/2008	3.596239489
-2/8/2008	3.284754179
-2/9/2008	3.001585715
-2/10/2008	2.80336779
-2/11/2008	2.49188248
-2/12/2008	2.265347709
-2/13/2008	4.44574488
-2/14/2008	10.10911415
-2/15/2008	8.126934908
-2/16/2008	5.408517656
-2/17/2008	4.474061726
-2/18/2008	4.275843802
-2/19/2008	4.247526955
-2/20/2008	3.879407953
-2/21/2008	3.539605796
-2/22/2008	3.482972103
-2/23/2008	4.020992184
-2/24/2008	4.304160648
-2/25/2008	4.275843802
-2/26/2008	4.247526955
-2/27/2008	4.530695419
-2/28/2008	4.643962804
-2/29/2008	4.077625877
-3/1/2008	3.766140567
-3/2/2008	3.567922642
-3/3/2008	3.313071025
-3/4/2008	3.171486793
-3/5/2008	3.992675338
-3/6/2008	5.181982885
-3/7/2008	4.813863883
-3/8/2008	10.9303027
-3/9/2008	31.9980364
-3/10/2008	19.31208922
-3/11/2008	10.78871847
-3/12/2008	7.815449598
-3/13/2008	6.541191511
-3/14/2008	5.720002966
-3/15/2008	5.38020081
-3/16/2008	5.153666039
-3/17/2008	4.927131268
-3/18/2008	4.502378573
-3/19/2008	4.389111187
-3/20/2008	4.75723019
-3/21/2008	4.559012265
-3/22/2008	4.190893263
-3/23/2008	4.020992184
-3/24/2008	3.709506874
-3/25/2008	3.426338411
-3/26/2008	3.256437332
-3/27/2008	3.171486793
-3/28/2008	3.114853101
-3/29/2008	2.973268869
-3/30/2008	2.661783559
-3/31/2008	2.520199327
-4/1/2008	2.633466712
-4/2/2008	2.661783559
-4/3/2008	2.463565634
-4/4/2008	3.19980364
-4/5/2008	3.964358492
-4/6/2008	3.992675338
-4/7/2008	4.389111187
-4/8/2008	4.275843802
-4/9/2008	3.879407953
-4/10/2008	3.539605796
-4/11/2008	3.341387872
-4/12/2008	3.313071025
-4/13/2008	5.097032346
-4/14/2008	6.965944207
-4/15/2008	4.870497575
-4/16/2008	3.766140567
-4/17/2008	3.482972103
-4/18/2008	3.341387872
-4/19/2008	3.143169947
-4/20/2008	2.944952022
-4/21/2008	2.860001483
-4/22/2008	2.775050944
-4/23/2008	2.520199327
-4/24/2008	2.378615095
-4/25/2008	2.237030863
-4/26/2008	2.152080324
-4/27/2008	3.936041645
-4/28/2008	3.737823721
-4/29/2008	4.105942723
-4/30/2008	3.879407953
-5/1/2008	3.482972103
-5/2/2008	3.256437332
-5/3/2008	2.944952022
-5/4/2008	2.633466712
-5/5/2008	2.378615095
-5/6/2008	2.18039717
-5/7/2008	2.010496092
-5/8/2008	2.067129785
-5/9/2008	4.587329112
-5/10/2008	10.64713423
-5/11/2008	11.94970917
-5/12/2008	25.99486497
-5/13/2008	56.63369274
-5/14/2008	26.07981551
-5/15/2008	14.89466119
-5/16/2008	9.939213075
-5/17/2008	8.664954989
-5/18/2008	7.673865366
-5/19/2008	6.456240972
-5/20/2008	6.541191511
-5/21/2008	8.211885447
-5/22/2008	8.211885447
-5/23/2008	6.258023047
-5/24/2008	5.153666039
-5/25/2008	4.474061726
-5/26/2008	4.105942723
-5/27/2008	3.681190028
-5/28/2008	3.398021564
-5/29/2008	3.114853101
-5/30/2008	2.690100405
-5/31/2008	2.80336779
-6/1/2008	3.624556335
-6/2/2008	4.077625877
-6/3/2008	3.029902561
-6/4/2008	3.766140567
-6/5/2008	11.75149124
-6/6/2008	14.7813938
-6/7/2008	8.466737064
-6/8/2008	5.748319813
-6/9/2008	4.615645958
-6/10/2008	3.851091106
-6/11/2008	3.114853101
-6/12/2008	2.605149866
-6/13/2008	2.237030863
-6/14/2008	2.095446631
-6/15/2008	1.925545553
-6/16/2008	1.783961321
-6/17/2008	1.897228707
-6/18/2008	1.812278168
-6/19/2008	1.670693936
-6/20/2008	1.444159165
-6/21/2008	1.217624394
-6/22/2008	1.132673855
-6/23/2008	1.019406469
-6/24/2008	0.93445593
-6/25/2008	0.877822237
-6/26/2008	0.821188545
-6/27/2008	0.764554852
-6/28/2008	0.764554852
-6/29/2008	0.736238006
-6/30/2008	0.707921159
-7/1/2008	0.679604313
-7/2/2008	0.651287466
-7/3/2008	0.594653774
-7/4/2008	0.566336927
-7/5/2008	0.594653774
-7/6/2008	0.707921159
-7/7/2008	0.849505391
-7/8/2008	1.359208626
-7/9/2008	1.019406469
-7/10/2008	1.670693936
-7/11/2008	1.840595014
-7/12/2008	1.189307547
-7/13/2008	0.906139084
-7/14/2008	0.764554852
-7/15/2008	0.707921159
-7/16/2008	0.651287466
-7/17/2008	0.594653774
-7/18/2008	0.538020081
-7/19/2008	0.481386388
-7/20/2008	0.424752696
-7/21/2008	0.396435849
-7/22/2008	0.368119003
-7/23/2008	0.764554852
-7/24/2008	0.991089623
-7/25/2008	0.707921159
-7/26/2008	0.594653774
-7/27/2008	0.509703235
-7/28/2008	0.481386388
-7/29/2008	0.453069542
-7/30/2008	0.396435849
-7/31/2008	0.368119003
-8/1/2008	0.31148531
-8/2/2008	0.283168464
-8/3/2008	0.266178356
-8/4/2008	0.243524879
-8/5/2008	0.237861509
-8/6/2008	0.226534771
-8/7/2008	0.223703086
-8/8/2008	0.260514987
-8/9/2008	0.15291097
-8/10/2008	0.090613908
-8/11/2008	0.147247601
-8/12/2008	0.147247601
-8/13/2008	0.073623801
-8/14/2008	0.067960431
-8/15/2008	0.084950539
-8/16/2008	0.161406024
-8/17/2008	0.172732763
-8/18/2008	0.181227817
-8/19/2008	0.167069394
-8/20/2008	0.141584232
-8/21/2008	0.090613908
-8/22/2008	0.15291097
-8/23/2008	0.31148531
-8/24/2008	0.31148531
-8/25/2008	0.186891186
-8/26/2008	0.144415916
-8/27/2008	0.133089178
-8/28/2008	0.150079286
-8/29/2008	0.229366456
-8/30/2008	0.237861509
-8/31/2008	0.184059501
-9/1/2008	0.161406024
-9/2/2008	0.147247601
-9/3/2008	0.135920863
-9/4/2008	0.135920863
-9/5/2008	0.566336927
-9/6/2008	0.424752696
-9/7/2008	0.538020081
-9/8/2008	0.453069542
-9/9/2008	0.368119003
-9/10/2008	0.339802156
-9/11/2008	0.339802156
-9/12/2008	0.339802156
-9/13/2008	0.453069542
-9/14/2008	0.594653774
-9/15/2008	0.509703235
-9/16/2008	0.368119003
-9/17/2008	0.339802156
-9/18/2008	0.31148531
-9/19/2008	0.283168464
-9/20/2008	0.283168464
-9/21/2008	0.27467341
-9/22/2008	0.269010041
-9/23/2008	0.209544663
-9/24/2008	0.220871402
-9/25/2008	0.368119003
-9/26/2008	0.481386388
-9/27/2008	0.481386388
-9/28/2008	0.453069542
-9/29/2008	0.396435849
-9/30/2008	0.396435849
-10/1/2008	0.594653774
-10/2/2008	0.509703235
-10/3/2008	0.424752696
-10/4/2008	0.368119003
-10/5/2008	0.339802156
-10/6/2008	0.339802156
-10/7/2008	0.31148531
-10/8/2008	0.31148531
-10/9/2008	0.31148531
-10/10/2008	0.339802156
-10/11/2008	0.31148531
-10/12/2008	0.31148531
-10/13/2008	0.280336779
-10/14/2008	0.31148531
-10/15/2008	0.31148531
-10/16/2008	0.283168464
-10/17/2008	0.266178356
-10/18/2008	0.23219814
-10/19/2008	0.243524879
-10/20/2008	0.246356563
-10/21/2008	0.260514987
-10/22/2008	0.263346671
-10/23/2008	0.257683302
-10/24/2008	0.254851617
-10/25/2008	0.283168464
-10/26/2008	0.368119003
-10/27/2008	0.368119003
-10/28/2008	0.538020081
-10/29/2008	0.538020081
-10/30/2008	0.424752696
-10/31/2008	0.368119003
-11/1/2008	0.368119003
-11/2/2008	0.368119003
-11/3/2008	0.339802156
-11/4/2008	0.368119003
-11/5/2008	0.453069542
-11/6/2008	0.481386388
-11/7/2008	0.481386388
-11/8/2008	0.453069542
-11/9/2008	0.481386388
-11/10/2008	0.424752696
-11/11/2008	0.396435849
-11/12/2008	0.396435849
-11/13/2008	0.481386388
-11/14/2008	0.906139084
-11/15/2008	1.076040162
-11/16/2008	1.387525472
-11/17/2008	1.132673855
-11/18/2008	0.991089623
-11/19/2008	0.877822237
-11/20/2008	0.764554852
-11/21/2008	0.736238006
-11/22/2008	0.679604313
-11/23/2008	0.651287466
-11/24/2008	0.62297062
-11/25/2008	0.821188545
-11/26/2008	0.821188545
-11/27/2008	0.736238006
-11/28/2008	0.707921159
-11/29/2008	0.679604313
-11/30/2008	0.764554852
-12/1/2008	1.019406469
-12/2/2008	0.906139084
-12/3/2008	0.849505391
-12/4/2008	0.821188545
-12/5/2008	0.792871698
-12/6/2008	0.821188545
-12/7/2008	0.764554852
-12/8/2008	0.736238006
-12/9/2008	0.707921159
-12/10/2008	0.736238006
-12/11/2008	1.387525472
-12/12/2008	6.824359975
-12/13/2008	11.46832278
-12/14/2008	5.436834503
-12/15/2008	2.944952022
-12/16/2008	2.435248788
-12/17/2008	2.973268869
-12/18/2008	3.737823721
-12/19/2008	3.51128895
-12/20/2008	4.219210109
-12/21/2008	4.530695419
-12/22/2008	4.813863883
-12/23/2008	4.247526955
-12/24/2008	3.228120486
-12/25/2008	3.086536254
-12/26/2008	2.88831833
-12/27/2008	2.548516173
-12/28/2008	2.49188248
-12/29/2008	2.463565634
-12/30/2008	2.350298249
-12/31/2008	2.208714017
-1/1/2009	2.038812939
-1/2/2009	1.897228707
-1/3/2009	1.897228707
-1/4/2009	1.840595014
-1/5/2009	1.812278168
-1/6/2009	1.840595014
-1/7/2009	3.171486793
-1/8/2009	7.220795824
-1/9/2009	7.928716983
-1/10/2009	5.266933425
-1/11/2009	4.474061726
-1/12/2009	4.417428034
-1/13/2009	4.190893263
-1/14/2009	3.82277426
-1/15/2009	3.426338411
-1/16/2009	3.284754179
-1/17/2009	3.114853101
-1/18/2009	2.944952022
-1/19/2009	2.775050944
-1/20/2009	2.661783559
-1/21/2009	2.548516173
-1/22/2009	2.435248788
-1/23/2009	2.265347709
-1/24/2009	2.152080324
-1/25/2009	2.038812939
-1/26/2009	1.897228707
-1/27/2009	1.86891186
-1/28/2009	2.152080324
-1/29/2009	3.19980364
-1/30/2009	3.454655257
-1/31/2009	3.114853101
-2/1/2009	2.80336779
-2/2/2009	2.775050944
-2/3/2009	2.690100405
-2/4/2009	2.57683302
-2/5/2009	2.463565634
-2/6/2009	2.350298249
-2/7/2009	2.293664556
-2/8/2009	2.406931941
-2/9/2009	2.378615095
-2/10/2009	2.237030863
-2/11/2009	2.265347709
-2/12/2009	2.293664556
-2/13/2009	2.208714017
-2/14/2009	2.038812939
-2/15/2009	1.953862399
-2/16/2009	1.897228707
-2/17/2009	1.812278168
-2/18/2009	1.840595014
-2/19/2009	2.010496092
-2/20/2009	1.925545553
-2/21/2009	1.783961321
-2/22/2009	1.755644475
-2/23/2009	1.783961321
-2/24/2009	1.699010782
-2/25/2009	1.642377089
-2/26/2009	1.642377089
-2/27/2009	1.642377089
-2/28/2009	1.670693936
-3/1/2009	1.783961321
-3/2/2009	1.982179246
-3/3/2009	1.925545553
-3/4/2009	1.783961321
-3/5/2009	1.783961321
-3/6/2009	2.038812939
-3/7/2009	3.001585715
-3/8/2009	3.907724799
-3/9/2009	3.624556335
-3/10/2009	3.058219408
-3/11/2009	2.633466712
-3/12/2009	2.435248788
-3/13/2009	2.237030863
-3/14/2009	2.123763478
-3/15/2009	2.18039717
-3/16/2009	2.265347709
-3/17/2009	2.237030863
-3/18/2009	2.152080324
-3/19/2009	2.067129785
-3/20/2009	2.095446631
-3/21/2009	1.925545553
-3/22/2009	1.840595014
-3/23/2009	1.783961321
-3/24/2009	1.755644475
-3/25/2009	1.699010782
-3/26/2009	1.727327628
-3/27/2009	2.067129785
-3/28/2009	2.548516173
-3/29/2009	3.482972103
-3/30/2009	3.624556335
-3/31/2009	3.029902561
-4/1/2009	2.548516173
-4/2/2009	2.860001483
-4/3/2009	3.652873182
-4/4/2009	5.606735581
-4/5/2009	5.408517656
-4/6/2009	4.360794341
-4/7/2009	4.587329112
-4/8/2009	4.672279651
-4/9/2009	4.105942723
-4/10/2009	3.482972103
-4/11/2009	3.539605796
-4/12/2009	4.190893263
-4/13/2009	4.162576416
-4/14/2009	4.927131268
-4/15/2009	8.919806606
-4/16/2009	14.49822534
-4/17/2009	12.65763033
-4/18/2009	8.155251754
-4/19/2009	6.427924126
-4/20/2009	5.663369274
-4/21/2009	8.523370757
-4/22/2009	11.44000593
-4/23/2009	9.627727765
-4/24/2009	8.183568601
-4/25/2009	6.512874665
-4/26/2009	5.550101888
-4/27/2009	4.898814422
-4/28/2009	4.304160648
-4/29/2009	3.851091106
-4/30/2009	3.454655257
-5/1/2009	3.228120486
-5/2/2009	3.19980364
-5/3/2009	3.19980364
-5/4/2009	4.44574488
-5/5/2009	5.861587198
-5/6/2009	7.24911267
-5/7/2009	12.00634286
-5/8/2009	17.38654367
-5/9/2009	12.23287763
-5/10/2009	7.985350676
-5/11/2009	6.144755662
-5/12/2009	5.0687155
-5/13/2009	3.964358492
-5/14/2009	3.454655257
-5/15/2009	3.426338411
-5/16/2009	3.284754179
-5/17/2009	3.539605796
-5/18/2009	3.936041645
-5/19/2009	3.398021564
-5/20/2009	2.88831833
-5/21/2009	2.520199327
-5/22/2009	2.208714017
-5/23/2009	2.152080324
-5/24/2009	1.982179246
-5/25/2009	2.860001483
-5/26/2009	9.287925609
-5/27/2009	7.56059798
-5/28/2009	5.238616578
-5/29/2009	4.304160648
-5/30/2009	4.13425957
-5/31/2009	3.709506874
-6/1/2009	3.001585715
-6/2/2009	2.406931941
-6/3/2009	2.18039717
-6/4/2009	2.49188248
-6/5/2009	6.654458897
-6/6/2009	17.30159313
-6/7/2009	14.58317588
-6/8/2009	8.523370757
-6/9/2009	7.390696902
-6/10/2009	17.38654367
-6/11/2009	21.4358527
-6/12/2009	13.53545256
-6/13/2009	9.089707684
-6/14/2009	7.305746363
-6/15/2009	6.059805123
-6/16/2009	6.597825204
-6/17/2009	5.606735581
-6/18/2009	5.408517656
-6/19/2009	5.974854584
-6/20/2009	6.852676821
-6/21/2009	10.81703531
-6/22/2009	10.5621837
-6/23/2009	7.164162131
-6/24/2009	5.521785042
-6/25/2009	4.728913344
-6/26/2009	4.219210109
-6/27/2009	3.794457413
-6/28/2009	3.341387872
-6/29/2009	3.001585715
-6/30/2009	2.661783559
-7/1/2009	2.463565634
-7/2/2009	2.435248788
-7/3/2009	2.435248788
-7/4/2009	2.067129785
-7/5/2009	1.840595014
-7/6/2009	1.614060243
-7/7/2009	1.614060243
-7/8/2009	1.472476011
-7/9/2009	1.330891779
-7/10/2009	1.217624394
-7/11/2009	1.076040162
-7/12/2009	1.076040162
-7/13/2009	1.047723316
-7/14/2009	0.962772777
-7/15/2009	0.906139084
-7/16/2009	0.93445593
-7/17/2009	0.962772777
-7/18/2009	0.93445593
-7/19/2009	0.764554852
-7/20/2009	0.679604313
-7/21/2009	0.62297062
-7/22/2009	0.707921159
-7/23/2009	0.849505391
-7/24/2009	0.991089623
-7/25/2009	0.849505391
-7/26/2009	0.877822237
-7/27/2009	0.792871698
-7/28/2009	0.707921159
-7/29/2009	0.991089623
-7/30/2009	3.766140567
-7/31/2009	2.067129785
-8/1/2009	1.585743397
-8/2/2009	1.444159165
-8/3/2009	1.86891186
-8/4/2009	1.387525472
-8/5/2009	1.160990701
-8/6/2009	1.24594124
-8/7/2009	1.274258087
-8/8/2009	1.132673855
-8/9/2009	1.047723316
-8/10/2009	0.962772777
-8/11/2009	0.877822237
-8/12/2009	0.906139084
-8/13/2009	2.520199327
-8/14/2009	2.293664556
-8/15/2009	1.415842318
-8/16/2009	1.132673855
-8/17/2009	0.906139084
-8/18/2009	0.792871698
-8/19/2009	0.792871698
-8/20/2009	0.679604313
-8/21/2009	0.538020081
-8/22/2009	30.01585715
-8/23/2009	73.90696902
-8/24/2009	38.2277426
-8/25/2009	14.38495796
-8/26/2009	8.296835986
-8/27/2009	6.144755662
-8/28/2009	5.720002966
-8/29/2009	7.843766444
-8/30/2009	9.740995151
-8/31/2009	10.59050054
-9/1/2009	8.041984369
-9/2/2009	5.210299732
-9/3/2009	4.530695419
-9/4/2009	4.049309031
-9/5/2009	3.341387872
-9/6/2009	2.916635176
-9/7/2009	2.605149866
-9/8/2009	2.548516173
-9/9/2009	2.406931941
-9/10/2009	2.265347709
-9/11/2009	5.804953506
-9/12/2009	18.88733653
-9/13/2009	15.91406766
-9/14/2009	9.712678304
-9/15/2009	7.050894746
-9/16/2009	5.578418735
-9/17/2009	4.842180729
-9/18/2009	4.332477494
-9/19/2009	3.936041645
-9/20/2009	3.426338411
-9/21/2009	3.086536254
-9/22/2009	2.831684637
-9/23/2009	2.633466712
-9/24/2009	2.463565634
-9/25/2009	2.321981402
-9/26/2009	2.18039717
-9/27/2009	4.672279651
-9/28/2009	7.334063209
-9/29/2009	6.484557818
-9/30/2009	4.75723019
-10/1/2009	3.794457413
-10/2/2009	3.143169947
-10/3/2009	2.88831833
-10/4/2009	2.718417251
-10/5/2009	2.435248788
-10/6/2009	2.237030863
-10/7/2009	2.095446631
-10/8/2009	1.953862399
-10/9/2009	1.86891186
-10/10/2009	1.812278168
-10/11/2009	1.783961321
-10/12/2009	1.699010782
-10/13/2009	1.642377089
-10/14/2009	1.614060243
-10/15/2009	1.699010782
-10/16/2009	2.152080324
-10/17/2009	2.265347709
-10/18/2009	3.879407953
-10/19/2009	5.210299732
-10/20/2009	4.474061726
-10/21/2009	3.426338411
-10/22/2009	2.944952022
-10/23/2009	2.57683302
-10/24/2009	2.435248788
-10/25/2009	3.907724799
-10/26/2009	4.927131268
-10/27/2009	6.088121969
-10/28/2009	27.18417251
-10/29/2009	40.20992184
-10/30/2009	17.92456375
-10/31/2009	11.44000593
-11/1/2009	12.99743248
-11/2/2009	16.42377089
-11/3/2009	12.51604609
-11/4/2009	8.976440299
-11/5/2009	7.390696902
-11/6/2009	6.512874665
-11/7/2009	5.918220891
-11/8/2009	5.465151349
-11/9/2009	5.210299732
-11/10/2009	4.983764961
-11/11/2009	4.898814422
-11/12/2009	11.32673855
-11/13/2009	44.4574488
-11/14/2009	35.67922642
-11/15/2009	19.4819903
-11/16/2009	13.6770368
-11/17/2009	10.137431
-11/18/2009	8.211885447
-11/19/2009	7.334063209
-11/20/2009	7.95703383
-11/21/2009	8.664954989
-11/22/2009	7.730499059
-11/23/2009	6.796043128
-11/24/2009	7.985350676
-11/25/2009	7.928716983
-11/26/2009	7.362380056
-11/27/2009	7.390696902
-11/28/2009	6.852676821
-11/29/2009	6.229706201
-11/30/2009	5.804953506
-12/1/2009	6.371290433
-12/2/2009	6.739409436
-12/3/2009	24.38080472
-12/4/2009	30.58219408
-12/5/2009	16.90515728
-12/6/2009	25.17367642
-12/7/2009	21.09605054
-12/8/2009	14.0451558
-12/9/2009	45.02378573
-12/10/2009	86.36638142
-12/11/2009	30.01585715
-12/12/2009	16.33882035
-12/13/2009	13.47881887
-12/14/2009	17.64139529
-12/15/2009	16.7918899
-12/16/2009	12.43109556
-12/17/2009	9.316242455
-12/18/2009	7.87208329
-12/19/2009	7.164162131
-12/20/2009	7.503964288
-12/21/2009	7.900400137
-12/22/2009	7.24911267
-12/23/2009	7.050894746
-12/24/2009	6.909310514
-12/25/2009	6.711092589
-12/26/2009	41.90893263
-12/27/2009	77.58815905
-12/28/2009	32.84754179
-12/29/2009	18.7457523
-12/30/2009	12.99743248
-12/31/2009	10.50555
-1/1/2010	11.75149124
-1/2/2010	12.03465971
-1/3/2010	9.457826687
-1/4/2010	7.588914827
-1/5/2010	6.824359975
-1/6/2010	6.342973587
-1/7/2010	6.00317143
-1/8/2010	5.833270352
-1/9/2010	5.550101888
-1/10/2010	5.238616578
-1/11/2010	4.983764961
-1/12/2010	4.870497575
-1/13/2010	4.813863883
-1/14/2010	4.672279651
-1/15/2010	4.615645958
-1/16/2010	4.643962804
-1/17/2010	5.153666039
-1/18/2010	10.22238154
-1/19/2010	11.83644178
-1/20/2010	8.098618061
-1/21/2010	6.739409436
-1/22/2010	6.059805123
-1/23/2010	5.663369274
-1/24/2010	5.266933425
-1/25/2010	10.25069839
-1/26/2010	29.73268869
-1/27/2010	19.42535661
-1/28/2010	12.0912934
-1/29/2010	8.778222374
-1/30/2010	7.079211592
-1/31/2010	6.456240972
-2/1/2010	6.229706201
-2/2/2010	6.031488277
-2/3/2010	5.946537737
-2/4/2010	5.918220891
-2/5/2010	5.776636659
-2/6/2010	5.69168612
-2/7/2010	6.258023047
-2/8/2010	6.824359975
-2/9/2010	6.456240972
-2/10/2010	6.371290433
-2/11/2010	6.173072508
-2/12/2010	6.00317143
-2/13/2010	5.889904045
-2/14/2010	5.804953506
-2/15/2010	5.889904045
-2/16/2010	6.00317143
-2/17/2010	6.229706201
-2/18/2010	6.512874665
-2/19/2010	7.079211592
-2/20/2010	8.126934908
-2/21/2010	9.344559302
-2/22/2010	11.12852062
-2/23/2010	19.36872292
-2/24/2010	47.85547036
-2/25/2010	50.97032346
-2/26/2010	34.26338411
-2/27/2010	20.67129785
-2/28/2010	17.58476159
-3/1/2010	16.08396874
-3/2/2010	15.5742655
-3/3/2010	15.14951281
-3/4/2010	15.88575081
-3/5/2010	13.39386833
-3/6/2010	10.78871847
-3/7/2010	8.976440299
-3/8/2010	8.155251754
-3/9/2010	7.503964288
-3/10/2010	6.824359975
-3/11/2010	6.371290433
-3/12/2010	7.503964288
-3/13/2010	37.66140567
-3/14/2010	93.44559302
-3/15/2010	35.67922642
-3/16/2010	21.8039717
-3/17/2010	15.29109704
-3/18/2010	11.89307547
-3/19/2010	9.910896229
-3/20/2010	8.495053911
-3/21/2010	7.64554852
-3/22/2010	7.079211592
-3/23/2010	6.654458897
-3/24/2010	6.31465674
-3/25/2010	5.776636659
-3/26/2010	6.00317143
-3/27/2010	6.229706201
-3/28/2010	5.720002966
-3/29/2010	18.20773222
-3/30/2010	31.14853101
-3/31/2010	30.01585715
-4/1/2010	21.4358527
-4/2/2010	13.81862103
-4/3/2010	9.627727765
-4/4/2010	8.325152832
-4/5/2010	7.334063209
-4/6/2010	6.62614205
-4/7/2010	6.173072508
-4/8/2010	5.69168612
-4/9/2010	6.682775743
-4/10/2010	7.447330595
-4/11/2010	6.31465674
-4/12/2010	5.521785042
-4/13/2010	5.0687155
-4/14/2010	4.955448115
-4/15/2010	4.672279651
-4/16/2010	4.530695419
-4/17/2010	4.474061726
-4/18/2010	4.219210109
-4/19/2010	3.936041645
-4/20/2010	3.652873182
-4/21/2010	3.567922642
-4/22/2010	3.709506874
-4/23/2010	3.567922642
-4/24/2010	3.313071025
-4/25/2010	3.256437332
-4/26/2010	6.258023047
-4/27/2010	6.965944207
-4/28/2010	5.521785042
-4/29/2010	4.275843802
-4/30/2010	3.794457413
-5/1/2010	3.624556335
-5/2/2010	3.398021564
-5/3/2010	3.114853101
-5/4/2010	2.944952022
-5/5/2010	2.605149866
-5/6/2010	2.463565634
-5/7/2010	2.237030863
-5/8/2010	2.123763478
-5/9/2010	1.953862399
-5/10/2010	1.783961321
-5/11/2010	1.727327628
-5/12/2010	1.86891186
-5/13/2010	2.010496092
-5/14/2010	1.86891186
-5/15/2010	1.812278168
-5/16/2010	1.727327628
-5/17/2010	1.642377089
-5/18/2010	2.746734098
-5/19/2010	5.181982885
-5/20/2010	4.077625877
-5/21/2010	2.860001483
-5/22/2010	2.378615095
-5/23/2010	2.18039717
-5/24/2010	2.123763478
-5/25/2010	2.095446631
-5/26/2010	1.982179246
-5/27/2010	1.783961321
-5/28/2010	1.699010782
-5/29/2010	1.642377089
-5/30/2010	1.585743397
-5/31/2010	1.472476011
-6/1/2010	1.444159165
-6/2/2010	1.55742655
-6/3/2010	1.444159165
-6/4/2010	1.217624394
-6/5/2010	1.132673855
-6/6/2010	1.472476011
-6/7/2010	1.330891779
-6/8/2010	1.160990701
-6/9/2010	1.019406469
-6/10/2010	0.962772777
-6/11/2010	0.906139084
-6/12/2010	0.877822237
-6/13/2010	0.849505391
-6/14/2010	0.821188545
-6/15/2010	0.792871698
-6/16/2010	0.736238006
-6/17/2010	0.991089623
-6/18/2010	0.93445593
-6/19/2010	0.821188545
-6/20/2010	0.736238006
-6/21/2010	0.679604313
-6/22/2010	0.594653774
-6/23/2010	0.991089623
-6/24/2010	1.302574933
-6/25/2010	1.302574933
-6/26/2010	1.047723316
-6/27/2010	0.93445593
-6/28/2010	0.792871698
-6/29/2010	0.594653774
-6/30/2010	0.538020081
-7/1/2010	0.453069542
-7/2/2010	0.368119003
-7/3/2010	0.339802156
-7/4/2010	0.368119003
-7/5/2010	0.396435849
-7/6/2010	0.283168464
-7/7/2010	0.226534771
-7/8/2010	0.396435849
-7/9/2010	0.263346671
-7/10/2010	1.359208626
-7/11/2010	9.259608763
-7/12/2010	4.190893263
-7/13/2010	3.851091106
-7/14/2010	10.73208477
-7/15/2010	8.89148976
-7/16/2010	4.417428034
-7/17/2010	2.293664556
-7/18/2010	1.812278168
-7/19/2010	1.614060243
-7/20/2010	1.55742655
-7/21/2010	1.529109704
-7/22/2010	1.330891779
-7/23/2010	1.076040162
-7/24/2010	0.962772777
-7/25/2010	0.93445593
-7/26/2010	1.274258087
-7/27/2010	1.274258087
-7/28/2010	0.906139084
-7/29/2010	0.849505391
-7/30/2010	0.877822237
-7/31/2010	0.849505391
-8/1/2010	0.792871698
-8/2/2010	0.679604313
-8/3/2010	0.594653774
-8/4/2010	0.821188545
-8/5/2010	0.877822237
-8/6/2010	0.792871698
-8/7/2010	0.679604313
-8/8/2010	0.509703235
-8/9/2010	0.481386388
-8/10/2010	0.509703235
-8/11/2010	0.481386388
-8/12/2010	0.736238006
-8/13/2010	0.906139084
-8/14/2010	0.821188545
-8/15/2010	0.764554852
-8/16/2010	0.736238006
-8/17/2010	0.509703235
-8/18/2010	1.585743397
-8/19/2010	2.690100405
-8/20/2010	1.727327628
-8/21/2010	1.132673855
-8/22/2010	0.962772777
-8/23/2010	0.906139084
-8/24/2010	0.849505391
-8/25/2010	0.821188545
-8/26/2010	0.792871698
-8/27/2010	0.736238006
-8/28/2010	0.62297062
-8/29/2010	0.594653774
-8/30/2010	0.538020081
-8/31/2010	0.538020081
-9/1/2010	0.424752696
-9/2/2010	0.453069542
-9/3/2010	0.509703235
-9/4/2010	0.481386388
-9/5/2010	0.424752696
-9/6/2010	0.368119003
-9/7/2010	0.368119003
-9/8/2010	0.368119003
-9/9/2010	0.339802156
-9/10/2010	0.283168464
-9/11/2010	0.339802156
-9/12/2010	0.424752696
-9/13/2010	0.481386388
-9/14/2010	0.424752696
-9/15/2010	0.368119003
-9/16/2010	0.368119003
-9/17/2010	0.368119003
-9/18/2010	0.424752696
-9/19/2010	0.566336927
-9/20/2010	0.31148531
-9/21/2010	0.283168464
-9/22/2010	0.280336779
-9/23/2010	0.283168464
-9/24/2010	0.31148531
-9/25/2010	0.266178356
-9/26/2010	0.218039717
-9/27/2010	0.368119003
-9/28/2010	0.509703235
-9/29/2010	0.991089623
-9/30/2010	1.189307547
-10/1/2010	25.14535958
-10/2/2010	28.06199475
-10/3/2010	9.712678304
-10/4/2010	4.813863883
-10/5/2010	4.219210109
-10/6/2010	3.851091106
-10/7/2010	3.086536254
-10/8/2010	2.548516173
-10/9/2010	2.152080324
-10/10/2010	1.897228707
-10/11/2010	1.783961321
-10/12/2010	1.699010782
-10/13/2010	1.642377089
-10/14/2010	1.727327628
-10/15/2010	2.548516173
-10/16/2010	2.095446631
-10/17/2010	1.840595014
-10/18/2010	1.699010782
-10/19/2010	1.642377089
-10/20/2010	1.642377089
-10/21/2010	1.670693936
-10/22/2010	1.614060243
-10/23/2010	1.500792858
-10/24/2010	1.444159165
-10/25/2010	1.387525472
-10/26/2010	1.415842318
-10/27/2010	1.55742655
-10/28/2010	1.953862399
-10/29/2010	1.897228707
-10/30/2010	1.699010782
-10/31/2010	1.614060243
-11/1/2010	1.55742655
-11/2/2010	1.472476011
-11/3/2010	1.444159165
-11/4/2010	2.095446631
-11/5/2010	3.964358492
-11/6/2010	4.162576416
-11/7/2010	2.88831833
-11/8/2010	2.435248788
-11/9/2010	2.293664556
-11/10/2010	2.208714017
-11/11/2010	2.038812939
-11/12/2010	1.86891186
-11/13/2010	1.812278168
-11/14/2010	1.783961321
-11/15/2010	1.783961321
-11/16/2010	1.925545553
-11/17/2010	2.010496092
-11/18/2010	1.982179246
-11/19/2010	1.840595014
-11/20/2010	1.755644475
-11/21/2010	1.727327628
-11/22/2010	1.670693936
-11/23/2010	1.670693936
-11/24/2010	1.699010782
-11/25/2010	1.642377089
-11/26/2010	1.642377089
-11/27/2010	1.642377089
-11/28/2010	1.585743397
-11/29/2010	1.529109704
-11/30/2010	1.614060243
-12/1/2010	1.755644475
-12/2/2010	2.973268869
-12/3/2010	2.860001483
-12/4/2010	2.350298249
-12/5/2010	2.18039717
-12/6/2010	2.010496092
-12/7/2010	1.925545553
-12/8/2010	1.840595014
-12/9/2010	1.755644475
-12/10/2010	1.670693936
-12/11/2010	1.670693936
-12/12/2010	2.095446631
-12/13/2010	3.398021564
-12/14/2010	3.82277426
-12/15/2010	2.80336779
-12/16/2010	2.520199327
-12/17/2010	2.435248788
-12/18/2010	2.237030863
-12/19/2010	2.18039717
-12/20/2010	2.123763478
-12/21/2010	2.038812939
-12/22/2010	1.982179246
-12/23/2010	1.953862399
-12/24/2010	1.86891186
-12/25/2010	1.840595014
-12/26/2010	1.86891186
-12/27/2010	1.925545553
-12/28/2010	1.840595014
-12/29/2010	1.812278168
-12/30/2010	1.812278168
-12/31/2010	1.812278168
-1/1/2011	1.897228707
-1/2/2011	2.406931941
-1/3/2011	2.831684637
-1/4/2011	2.57683302
-1/5/2011	2.378615095
-1/6/2011	2.237030863
-1/7/2011	2.18039717
-1/8/2011	2.095446631
-1/9/2011	2.038812939
-1/10/2011	1.840595014
-1/11/2011	1.783961321
-1/12/2011	1.86891186
-1/13/2011	1.812278168
-1/14/2011	1.783961321
-1/15/2011	1.783961321
-1/16/2011	1.755644475
-1/17/2011	1.755644475
-1/18/2011	2.406931941
-1/19/2011	4.049309031
-1/20/2011	4.75723019
-1/21/2011	4.190893263
-1/22/2011	3.539605796
-1/23/2011	3.029902561
-1/24/2011	2.690100405
-1/25/2011	2.633466712
-1/26/2011	2.973268869
-1/27/2011	5.351883964
-1/28/2011	6.994261053
-1/29/2011	5.748319813
-1/30/2011	4.785547036
-1/31/2011	4.304160648
-2/1/2011	3.964358492
-2/2/2011	6.852676821
-2/3/2011	16.99010782
-2/4/2011	11.69485755
-2/5/2011	7.390696902
-2/6/2011	6.909310514
-2/7/2011	6.62614205
-2/8/2011	5.635052427
-2/9/2011	4.870497575
-2/10/2011	4.360794341
-2/11/2011	4.049309031
-2/12/2011	3.851091106
-2/13/2011	3.709506874
-2/14/2011	3.567922642
-2/15/2011	3.426338411
-2/16/2011	3.171486793
-2/17/2011	3.114853101
-2/18/2011	3.086536254
-2/19/2011	3.001585715
-2/20/2011	2.831684637
-2/21/2011	2.775050944
-2/22/2011	3.058219408
-2/23/2011	3.001585715
-2/24/2011	2.831684637
-2/25/2011	3.992675338
-2/26/2011	6.597825204
-2/27/2011	6.427924126
-2/28/2011	5.578418735
-3/1/2011	8.778222374
-3/2/2011	10.08079731
-3/3/2011	7.164162131
-3/4/2011	5.521785042
-3/5/2011	4.813863883
-3/6/2011	5.012081807
-3/7/2011	13.08238302
-3/8/2011	14.49822534
-3/9/2011	9.146341377
-3/10/2011	8.183568601
-3/11/2011	13.42218518
-3/12/2011	16.31050351
-3/13/2011	10.64713423
-3/14/2011	7.87208329
-3/15/2011	6.484557818
-3/16/2011	7.56059798
-3/17/2011	10.61881739
-3/18/2011	8.948123453
-3/19/2011	7.277429517
-3/20/2011	6.031488277
-3/21/2011	5.465151349
-3/22/2011	5.748319813
-3/23/2011	5.748319813
-3/24/2011	6.796043128
-3/25/2011	7.362380056
-3/26/2011	6.201389355
-3/27/2011	5.266933425
-3/28/2011	4.75723019
-3/29/2011	4.417428034
-3/30/2011	4.105942723
-3/31/2011	4.077625877
-4/1/2011	4.275843802
-4/2/2011	4.304160648
-4/3/2011	4.077625877
-4/4/2011	3.794457413
-4/5/2011	3.737823721
-4/6/2011	3.879407953
-4/7/2011	3.567922642
-4/8/2011	3.482972103
-4/9/2011	3.851091106
-4/10/2011	3.907724799
-4/11/2011	3.766140567
-4/12/2011	3.709506874
-4/13/2011	4.219210109
-4/14/2011	4.559012265
-4/15/2011	4.105942723
-4/16/2011	4.360794341
-4/17/2011	21.8039717
-4/18/2011	21.32258532
-4/19/2011	11.77980809
-4/20/2011	8.495053911
-4/21/2011	6.880993668
-4/22/2011	5.606735581
-4/23/2011	5.097032346
-4/24/2011	5.040398654
-4/25/2011	6.682775743
-4/26/2011	7.305746363
-4/27/2011	6.00317143
-4/28/2011	5.040398654
-4/29/2011	4.672279651
-4/30/2011	4.105942723
-5/1/2011	3.681190028
-5/2/2011	3.454655257
-5/3/2011	3.284754179
-5/4/2011	3.596239489
-5/5/2011	4.162576416
-5/6/2011	3.737823721
-5/7/2011	3.341387872
-5/8/2011	3.086536254
-5/9/2011	2.860001483
-5/10/2011	2.661783559
-5/11/2011	2.463565634
-5/12/2011	2.321981402
-5/13/2011	2.237030863
-5/14/2011	2.293664556
-5/15/2011	3.058219408
-5/16/2011	3.313071025
-5/17/2011	2.88831833
-5/18/2011	2.605149866
-5/19/2011	2.520199327
-5/20/2011	2.80336779
-5/21/2011	2.605149866
-5/22/2011	2.265347709
-5/23/2011	2.095446631
-5/24/2011	1.982179246
-5/25/2011	1.86891186
-5/26/2011	1.727327628
-5/27/2011	1.614060243
-5/28/2011	1.529109704
-5/29/2011	1.472476011
-5/30/2011	1.415842318
-5/31/2011	1.359208626
-6/1/2011	1.274258087
-6/2/2011	1.160990701
-6/3/2011	1.047723316
-6/4/2011	0.962772777
-6/5/2011	0.991089623
-6/6/2011	1.076040162
-6/7/2011	1.047723316
-6/8/2011	0.962772777
-6/9/2011	0.906139084
-6/10/2011	1.132673855
-6/11/2011	1.444159165
-6/12/2011	1.953862399
-6/13/2011	3.596239489
-6/14/2011	1.925545553
-6/15/2011	1.359208626
-6/16/2011	1.189307547
-6/17/2011	1.217624394
-6/18/2011	1.132673855
-6/19/2011	0.962772777
-6/20/2011	0.991089623
-6/21/2011	1.019406469
-6/22/2011	1.302574933
-6/23/2011	1.24594124
-6/24/2011	1.019406469
-6/25/2011	0.877822237
-6/26/2011	0.651287466
-6/27/2011	0.509703235
-6/28/2011	0.679604313
-6/29/2011	0.62297062
-6/30/2011	0.424752696
-7/1/2011	0.424752696
-7/2/2011	0.257683302
-7/3/2011	0.396435849
-7/4/2011	0.651287466
-7/5/2011	0.566336927
-7/6/2011	0.566336927
-7/7/2011	0.679604313
-7/8/2011	0.962772777
-7/9/2011	3.001585715
-7/10/2011	1.585743397
-7/11/2011	1.019406469
-7/12/2011	0.792871698
-7/13/2011	0.481386388
-7/14/2011	0.424752696
-7/15/2011	0.339802156
-7/16/2011	0.368119003
-7/17/2011	0.215208032
-7/18/2011	0.178396132
-7/19/2011	0.181227817
-7/20/2011	0.736238006
-7/21/2011	1.302574933
-7/22/2011	0.566336927
-7/23/2011	0.31148531
-7/24/2011	0.339802156
-7/25/2011	0.424752696
-7/26/2011	0.481386388
-7/27/2011	0.283168464
-7/28/2011	0.147247601
-7/29/2011	0.192554555
-7/30/2011	0.07928717
-7/31/2011	0.175564447
-8/1/2011	0.237861509
-8/2/2011	0.31148531
-8/3/2011	0.453069542
-8/4/2011	0.509703235
-8/5/2011	0.481386388
-8/6/2011	0.396435849
-8/7/2011	0.339802156
-8/8/2011	0.260514987
-8/9/2011	0.212376348
-8/10/2011	0.138752547
-8/11/2011	0.065128747
-8/12/2011	0.065128747
-8/13/2011	0.133089178
-8/14/2011	6.258023047
-8/15/2011	11.2984217
-8/16/2011	10.98693639
-8/17/2011	6.654458897
-8/18/2011	2.463565634
-8/19/2011	2.18039717
-8/20/2011	6.031488277
-8/21/2011	6.201389355
-8/22/2011	4.190893263
-8/23/2011	2.860001483
-8/24/2011	1.982179246
-8/25/2011	2.49188248
-8/26/2011	3.341387872
-8/27/2011	14.72476011
-8/28/2011	246.3565634
-8/29/2011	192.5545553
-8/30/2011	84.95053911
-8/31/2011	25.88159758
-9/1/2011	14.35664111
-9/2/2011	9.20297507
-9/3/2011	7.362380056
-9/4/2011	6.62614205
-9/5/2011	5.720002966
-9/6/2011	5.38020081
-9/7/2011	7.702182212
-9/8/2011	13.22396725
-9/9/2011	20.30317885
-9/10/2011	19.96337669
-9/11/2011	12.17624394
-9/12/2011	9.061390838
-9/13/2011	8.013667522
-9/14/2011	6.909310514
-9/15/2011	6.00317143
-9/16/2011	5.097032346
-9/17/2011	4.530695419
-9/18/2011	4.190893263
-9/19/2011	3.964358492
-9/20/2011	3.82277426
-9/21/2011	3.794457413
-9/22/2011	3.624556335
-9/23/2011	3.766140567
-9/24/2011	4.898814422
-9/25/2011	5.550101888
-9/26/2011	4.728913344
-9/27/2011	4.304160648
-9/28/2011	8.58000445
-9/29/2011	13.90357157
-9/30/2011	9.457826687
diff --git a/inst/extdata/ChoptankRiverNitrate.csv b/inst/extdata/ChoptankRiverNitrate.csv
deleted file mode 100644
index 7bd463448294cf6487a7b15c9b73178e1bfcf332..0000000000000000000000000000000000000000
--- a/inst/extdata/ChoptankRiverNitrate.csv
+++ /dev/null
@@ -1,8 +0,0 @@
-cdate;remarkCode;Nitrate;remarkCode2;Nitrate2
-1999-10-07;;1.4;;1.2
-1999-11-04;<;0.99;<;0.25
-1999-12-30;;1.42;;1.4
-2000-01-04;;1.59;;2.1
-2000-02-03;;1.54;;1.0
-2000-02-15;;1.37;<;.50
-2000-02-19;<;1.24;;1.4
diff --git a/inst/extdata/WaterML2Example.xml b/inst/extdata/WaterML2Example.xml
index 4226d5f4c68d577d56a9178891f96b0355c2b14d..9180f7d39d702cec4cb1145d36666ed0d62beb31 100644
--- a/inst/extdata/WaterML2Example.xml
+++ b/inst/extdata/WaterML2Example.xml
@@ -1 +1,120 @@
-<wml2:Collection xmlns:gml="http://www.opengis.net/gml/3.2" xmlns:om="http://www.opengis.net/om/2.0" xmlns:sa="http://www.opengis.net/sampling/2.0" xmlns:swe="http://www.opengis.net/swe/2.0" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:wml2="http://www.opengis.net/waterml/2.0" xmlns:x-wml2="http://www.opengis.net/waterml/2.0" xmlns:gmd="http://www.isotc211.org/2005/gmd" xmlns:gco="http://www.isotc211.org/2005/gco" xmlns:sf="http://www.opengis.net/sampling/2.0" xmlns:sams="http://www.opengis.net/samplingSpatial/2.0" xmlns:gts="http://www.isotc211.org/2005/gts" xmlns:gss="http://www.isotc211.org/2005/gss" xmlns:gsr="http://www.isotc211.org/2005/gsr" xsi:schemaLocation="http://www.opengis.net/waterml/2.0 http://nwisvaws02.er.usgs.gov/ogc-swie/schemas/waterml2.xsd" gml:id="TO_BE_DETERMINED"><gml:identifier codeSpace="http://nwis.waterdata.usgs.gov/WI/nwis">USGS.435601087432701</gml:identifier><gml:name codeSpace="http://nwis.waterdata.usgs.gov/WI/nwis">HIKA RAIN GAGE NEAR CLEVELAND, WI</gml:name><wml2:metadata><wml2:DocumentMetadata gml:id="doc.USGS.MP.USGS.435601087432701"><gml:metaDataProperty xlink:href="contact"><gml:GenericMetaData>http://cida.usgs.gov</gml:GenericMetaData></gml:metaDataProperty><wml2:generationDate>2014-01-27T16:33:58</wml2:generationDate><wml2:version xlink:href="http://www.opengis.net/waterml/2.0" xlink:title="WaterML 2.0"/></wml2:DocumentMetadata></wml2:metadata><wml2:observationMember><om:OM_Observation gml:id="obs.USGS.435601087432701"><om:metadata><wml2:ObservationMetadata><gmd:contact><gmd:CI_ResponsibleParty><gmd:organisationName><gco:CharacterString>Wisconsin Water Science Center</gco:CharacterString></gmd:organisationName><gmd:role><gmd:CI_RoleCode codeList="http://www.isotc211.org/2005/resources/CodeList/gmxCodelists.xml#CI_RoleCode" codeListValue="owner"/></gmd:role></gmd:CI_ResponsibleParty></gmd:contact><gmd:dateStamp><gco:DateTime>2014-01-27T16:33:58</gco:DateTime></gmd:dateStamp><gmd:identificationInfo/></wml2:ObservationMetadata></om:metadata><om:phenomenonTime><gml:TimePeriod gml:id="pt.USGS.435601087432701"><gml:beginPosition>2012-01-01</gml:beginPosition><gml:endPosition>2012-09-26</gml:endPosition></gml:TimePeriod></om:phenomenonTime><om:resultTime><gml:TimeInstant gml:id="forecast.2014-01-27"><gml:timePosition>2014-01-27T16:33:58</gml:timePosition></gml:TimeInstant></om:resultTime><om:validTime><gml:TimePeriod gml:id="vt.USGS.435601087432701"><gml:beginPosition>2012-01-01</gml:beginPosition><gml:endPosition>2012-09-26</gml:endPosition></gml:TimePeriod></om:validTime><om:procedure xlink:href="http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&amp;format=table" xlink:title="SUMMATION VALUES"/><om:observedProperty xlink:href="http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes?radio_pm_search=pm_search&amp;format=html_table&amp;show=parameter_nm&amp;show=parameter_units&amp;pm_search=00045" xlink:title="Precipitation"/><om:featureOfInterest><wml2:MonitoringPoint gml:id="USGS.MP.435601087432701"><sf:sampledFeature xlink:href="http://nwisvaws02.er.usgs.gov/ogc-swie/wfs?request=GetFeature&amp;featureId=435601087432701"/><sf:parameter><om:NamedValue><om:name xlink:title="Watershed"/><om:value>Manitowoc-Sheboygan</om:value></om:NamedValue></sf:parameter><sams:shape><gml:Point gml:id="USGS.P.435601087432701"><gml:pos srsName="urn:ogc:def:crs:EPSG:4269">43.93361110 -87.72416670</gml:pos></gml:Point></sams:shape><wml2:descriptionReference xlink:href="http://waterdata.usgs.gov/nwis/nwisman/?site_no=435601087432701" xlink:title="HIKA RAIN GAGE NEAR CLEVELAND, WI"/><wml2:timeZone><wml2:TimeZone><wml2:zoneOffset>-06:00</wml2:zoneOffset><wml2:zoneAbbreviation>CST</wml2:zoneAbbreviation></wml2:TimeZone></wml2:timeZone></wml2:MonitoringPoint></om:featureOfInterest><om:result><wml2:MeasurementTimeseries gml:id="ts_435601087432701_00045_00006"><wml2:metadata><wml2:TimeseriesMetadata><wml2:temporalExtent><gml:TimePeriod gml:id="USGS.TP.00045_00006"><gml:beginPosition>2012-01-01</gml:beginPosition><gml:endPosition>2012-09-26</gml:endPosition></gml:TimePeriod></wml2:temporalExtent></wml2:TimeseriesMetadata></wml2:metadata><wml2:defaultPointMetadata><wml2:DefaultTVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/WI/nwis/help" xlink:title="Approved for publication. Processing and review completed."/><wml2:uom code="in"/><wml2:interpolationType xlink:href="http://www.opengis.net/def/interpolationType/WaterML/2.0/TotalPrec" xlink:title="Preceding total"/></wml2:DefaultTVPMeasurementMetadata></wml2:defaultPointMetadata><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-01T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-02T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-03T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-04T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-05T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-06T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-07T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-08T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-09T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-10T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-11T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-12T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-13T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-14T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-15T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-16T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-17T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-18T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-19T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-20T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-21T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-22T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-23T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-24T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-25T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-26T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-27T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-28T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-29T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-30T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-01-31T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-01T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-02T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-03T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-04T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-05T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-06T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-07T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-08T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-09T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-10T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-11T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-12T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-13T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-14T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-15T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-16T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-17T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-18T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-19T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-20T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-21T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-22T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-23T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-24T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-25T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-26T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-27T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-28T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-02-29T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-01T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-02T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-03T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-04T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-05T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-06T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-07T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-08T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-09T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-10T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-11T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-12T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-13T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-14T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-15T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-16T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-17T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-18T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-19T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-20T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-21T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-22T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-23T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-24T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-25T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-26T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-27T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-28T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-29T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-30T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-03-31T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-01T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-02T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-03T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-04T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-05T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-06T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-07T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-08T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-09T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-10T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-11T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-12T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-13T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-14T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-15T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-16T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-17T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-18T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-19T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-20T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-21T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-22T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-23T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-24T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-25T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-26T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-27T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-28T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-29T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-04-30T12:00:00-06:00</wml2:time><wml2:value xsi:nil="true"/><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:comment>Parameter monitored seasonally</wml2:comment><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-01T12:00:00-06:00</wml2:time><wml2:value>0.96</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-02T12:00:00-06:00</wml2:time><wml2:value>0.75</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-03T12:00:00-06:00</wml2:time><wml2:value>1.44</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-04T12:00:00-06:00</wml2:time><wml2:value>0.14</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-05T12:00:00-06:00</wml2:time><wml2:value>0.08</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-06T12:00:00-06:00</wml2:time><wml2:value>0.80</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-07T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-08T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-09T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-10T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-11T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-12T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-13T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-14T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-15T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-16T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-17T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-18T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-19T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-20T12:00:00-06:00</wml2:time><wml2:value>0.02</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-21T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-22T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-23T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-24T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-25T12:00:00-06:00</wml2:time><wml2:value>0.38</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-26T12:00:00-06:00</wml2:time><wml2:value>0.19</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-27T12:00:00-06:00</wml2:time><wml2:value>0.17</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-28T12:00:00-06:00</wml2:time><wml2:value>0.30</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-29T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-30T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-05-31T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-01T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-02T12:00:00-06:00</wml2:time><wml2:value>0.03</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-03T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-04T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-05T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-06T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-07T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-08T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-09T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-10T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-11T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-12T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-13T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-14T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-15T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-16T12:00:00-06:00</wml2:time><wml2:value>0.50</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-17T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-18T12:00:00-06:00</wml2:time><wml2:value>2.90</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-19T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-20T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-21T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-22T12:00:00-06:00</wml2:time><wml2:value>0.21</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-23T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-24T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-25T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-26T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-27T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-28T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-29T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-06-30T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-01T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-02T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-03T12:00:00-06:00</wml2:time><wml2:value>0.41</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-04T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-05T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-06T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-07T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-08T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-09T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-10T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-12T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-13T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-14T12:00:00-06:00</wml2:time><wml2:value>0.06</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-15T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-16T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-17T12:00:00-06:00</wml2:time><wml2:value>0.24</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-18T12:00:00-06:00</wml2:time><wml2:value>0.01</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-19T12:00:00-06:00</wml2:time><wml2:value>0.22</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-20T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-21T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-22T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-23T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-24T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-25T12:00:00-06:00</wml2:time><wml2:value>0.72</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-26T12:00:00-06:00</wml2:time><wml2:value>0.41</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-27T12:00:00-06:00</wml2:time><wml2:value>0.02</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-28T12:00:00-06:00</wml2:time><wml2:value>0.01</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-29T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-30T12:00:00-06:00</wml2:time><wml2:value>0.01</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-07-31T12:00:00-06:00</wml2:time><wml2:value>0.14</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-01T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-02T12:00:00-06:00</wml2:time><wml2:value>0.19</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-03T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-04T12:00:00-06:00</wml2:time><wml2:value>0.05</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-05T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-06T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-07T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-08T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-09T12:00:00-06:00</wml2:time><wml2:value>1.47</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-10T12:00:00-06:00</wml2:time><wml2:value>0.20</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-11T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-12T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-13T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-14T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-15T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-16T12:00:00-06:00</wml2:time><wml2:value>0.44</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-17T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-18T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-19T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-20T12:00:00-06:00</wml2:time><wml2:value>0.01</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-21T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-22T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-23T12:00:00-06:00</wml2:time><wml2:value>0.01</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-24T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-25T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-26T12:00:00-06:00</wml2:time><wml2:value>0.41</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-27T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-28T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-29T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-30T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-08-31T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-01T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-02T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-03T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-04T12:00:00-06:00</wml2:time><wml2:value>0.09</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-05T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-06T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-07T12:00:00-06:00</wml2:time><wml2:value>0.21</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-08T12:00:00-06:00</wml2:time><wml2:value>0.02</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-09T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-10T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-11T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-12T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-13T12:00:00-06:00</wml2:time><wml2:value>0.01</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-14T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-15T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-16T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-17T12:00:00-06:00</wml2:time><wml2:value>0.02</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-18T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-19T12:00:00-06:00</wml2:time><wml2:value>0.13</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-20T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-21T12:00:00-06:00</wml2:time><wml2:value>0.18</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-22T12:00:00-06:00</wml2:time><wml2:value>0.01</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-23T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-24T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-25T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point><wml2:point><wml2:MeasurementTVP><wml2:time>2012-09-26T12:00:00-06:00</wml2:time><wml2:value>0.00</wml2:value><wml2:metadata><wml2:TVPMeasurementMetadata><wml2:qualifier xlink:href="http://waterdata.usgs.gov/nwis/help" xlink:title="Provisional data subject to revision."/></wml2:TVPMeasurementMetadata></wml2:metadata></wml2:MeasurementTVP></wml2:point></wml2:MeasurementTimeseries></om:result></om:OM_Observation></wml2:observationMember></wml2:Collection>
\ No newline at end of file
+<wml2:Collection gml:id="C.USGS.01646500" xsi:schemaLocation="http://www.opengis.net/waterml/2.0 http://schemas.opengis.net/waterml/2.0/waterml2.xsd" xmlns:wml2="http://www.opengis.net/waterml/2.0" xmlns:gml="http://www.opengis.net/gml/3.2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:om="http://www.opengis.net/om/2.0" xmlns:sa="http://www.opengis.net/sampling/2.0" xmlns:sams="http://www.opengis.net/samplingSpatial/2.0" xmlns:swe="http://www.opengis.net/swe/2.0">
+  <gml:identifier codeSpace="http://waterservices.usgs.gov/nwis/dv">USGS.01646500</gml:identifier>
+  <gml:name codeSpace="http://waterservices.usgs.gov/nwis/dv">Timeseries collected at POTOMAC RIVER NEAR WASH, DC LITTLE FALLS PUMP STA</gml:name>
+  <wml2:metadata>
+    <wml2:DocumentMetadata gml:id="doc.USGS.MP.USGS.01646500">
+      <gml:metaDataProperty about="contact" xlink:href="http://waterservices.usgs.gov"/>
+      <wml2:generationDate>2014-10-30T13:46:03.425-04:00</wml2:generationDate>
+      <wml2:version xlink:href="http://www.opengis.net/waterml/2.0" xlink:title="WaterML 2.0"/>
+    </wml2:DocumentMetadata>
+  </wml2:metadata>
+  <wml2:observationMember>
+    <om:OM_Observation gml:id="obs.USGS.01646500.00060.1.00003">
+      <om:phenomenonTime>
+        <gml:TimePeriod gml:id="sample_time.USGS.01646500.00060.1.00003">
+          <gml:beginPosition>2014-09-01</gml:beginPosition>
+          <gml:endPosition>2014-09-08</gml:endPosition>
+        </gml:TimePeriod>
+      </om:phenomenonTime>
+      <om:resultTime>
+        <gml:TimeInstant gml:id="requested_time.USGS.01646500.00060.1.00003">
+          <gml:timePosition>2014-10-30T13:46:03.425-04:00</gml:timePosition>
+        </gml:TimeInstant>
+      </om:resultTime>
+      <om:procedure>
+        <wml2:ObservationProcess gml:id="process.USGS.01646500.00060.1.00003">
+          <wml2:processType xlink:href="http://www.opengis.net/def/waterml/2.0/processType/Sensor" xlink:title="Sensor"/>
+          <wml2:parameter xlink:title="Statistic" xlink:href="http://waterdata.usgs.gov/nwisweb/rdf?statCd=00003">
+            <om:NamedValue>
+              <om:name xlink:title="Mean"/>
+              <om:value xsi:type="xs:string" xmlns:xs="http://www.w3.org/2001/XMLSchema">00003</om:value>
+            </om:NamedValue>
+          </wml2:parameter>
+        </wml2:ObservationProcess>
+      </om:procedure>
+      <om:observedProperty xlink:title="Discharge" xlink:href="http://waterdata.usgs.gov/nwisweb/rdf?parmCd=00060"/>
+      <om:featureOfInterest xlink:title="POTOMAC RIVER NEAR WASH, DC LITTLE FALLS PUMP STA">
+        <wml2:MonitoringPoint gml:id="USGS.MP.USGS.01646500.00060.1.00003">
+          <gml:descriptionReference xlink:href="http://waterservices.usgs.gov/nwis/site/?sites=01646500&amp;agencyCd=USGS&amp;format=rdb" xlink:title="POTOMAC RIVER NEAR WASH, DC LITTLE FALLS PUMP STA"/>
+          <sa:sampledFeature xlink:title="POTOMAC RIVER NEAR WASH, DC LITTLE FALLS PUMP STA"/>
+          <sams:shape>
+            <gml:Point gml:id="USGS.P.USGS.01646500.00060.1.00003">
+              <gml:pos srsName="urn:ogc:def:crs:EPSG:4326">38.94977778 -77.12763889</gml:pos>
+            </gml:Point>
+          </sams:shape>
+        </wml2:MonitoringPoint>
+      </om:featureOfInterest>
+      <om:result>
+        <wml2:MeasurementTimeseries gml:id="TS.USGS.01646500.00060.1.00003">
+          <wml2:defaultPointMetadata>
+            <wml2:DefaultTVPMeasurementMetadata>
+              <wml2:qualifier xlink:title="Provisional data subject to revision.">
+                <swe:Category definition="http://waterdata.usgs.gov/nwisweb/rdf?dvQualCd=P">
+                  <swe:description>Provisional</swe:description>
+                  <swe:value>P</swe:value>
+                </swe:Category>
+              </wml2:qualifier>
+              <wml2:uom xlink:title="ft3/s"/>
+              <wml2:interpolationType/>
+            </wml2:DefaultTVPMeasurementMetadata>
+          </wml2:defaultPointMetadata>
+          <wml2:point>
+            <wml2:MeasurementTVP>
+              <wml2:metadata>
+                <wml2:TVPMeasurementMetadata/>
+              </wml2:metadata>
+            </wml2:MeasurementTVP>
+          </wml2:point>
+          <wml2:point>
+            <wml2:MeasurementTVP>
+              <wml2:time>2014-09-01</wml2:time>
+              <wml2:value>2690.0</wml2:value>
+            </wml2:MeasurementTVP>
+          </wml2:point>
+          <wml2:point>
+            <wml2:MeasurementTVP>
+              <wml2:time>2014-09-02</wml2:time>
+              <wml2:value>2750.0</wml2:value>
+            </wml2:MeasurementTVP>
+          </wml2:point>
+          <wml2:point>
+            <wml2:MeasurementTVP>
+              <wml2:time>2014-09-03</wml2:time>
+              <wml2:value>2990.0</wml2:value>
+            </wml2:MeasurementTVP>
+          </wml2:point>
+          <wml2:point>
+            <wml2:MeasurementTVP>
+              <wml2:time>2014-09-04</wml2:time>
+              <wml2:value>3180.0</wml2:value>
+            </wml2:MeasurementTVP>
+          </wml2:point>
+          <wml2:point>
+            <wml2:MeasurementTVP>
+              <wml2:time>2014-09-05</wml2:time>
+              <wml2:value>2940.0</wml2:value>
+            </wml2:MeasurementTVP>
+          </wml2:point>
+          <wml2:point>
+            <wml2:MeasurementTVP>
+              <wml2:time>2014-09-06</wml2:time>
+              <wml2:value>3100.0</wml2:value>
+            </wml2:MeasurementTVP>
+          </wml2:point>
+          <wml2:point>
+            <wml2:MeasurementTVP>
+              <wml2:time>2014-09-07</wml2:time>
+              <wml2:value>2620.0</wml2:value>
+            </wml2:MeasurementTVP>
+          </wml2:point>
+          <wml2:point>
+            <wml2:MeasurementTVP>
+              <wml2:time>2014-09-08</wml2:time>
+              <wml2:value>2300.0</wml2:value>
+            </wml2:MeasurementTVP>
+          </wml2:point>
+        </wml2:MeasurementTimeseries>
+      </om:result>
+    </om:OM_Observation>
+  </wml2:observationMember>
+</wml2:Collection>
diff --git a/man/checkStartEndDate.Rd b/man/checkStartEndDate.Rd
deleted file mode 100644
index 8f4519b56e87c0c7f92c09c6d73c02dc2178ab67..0000000000000000000000000000000000000000
--- a/man/checkStartEndDate.Rd
+++ /dev/null
@@ -1,28 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{checkStartEndDate}
-\alias{checkStartEndDate}
-\title{checkStartEndDate}
-\usage{
-checkStartEndDate(startDate, endDate, interactive = TRUE)
-}
-\arguments{
-\item{startDate}{string}
-
-\item{endDate}{string}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-vector where first value is startDate, second is endDate
-}
-\description{
-Checks that the start date is before the end date.  If not, it will give the user the opportunity to correct, otherwise will create a warning.
-}
-\examples{
-startDate <- '1985-01-01'
-endDate <- '1990-01-01'
-checkStartEndDate(startDate, endDate)
-}
-\keyword{WRTDS}
-\keyword{flow}
-
diff --git a/man/compressData.Rd b/man/compressData.Rd
deleted file mode 100644
index 5782f8d5dc6b13a024a8cfb16222eebba6b25d58..0000000000000000000000000000000000000000
--- a/man/compressData.Rd
+++ /dev/null
@@ -1,38 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{compressData}
-\alias{compressData}
-\title{Compress sample data frame}
-\usage{
-compressData(data, interactive = TRUE)
-}
-\arguments{
-\item{data}{dataframe contains at least dateTime, value, code columns}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-dataframe returnDataFrame data frame containing dateTime, ConcHigh, ConcLow, Uncen, ConcAve
-}
-\description{
-Using raw data that has at least dateTime, value, code, populates the measured data portion of the Sample data frame used in WRTDS
-ConcLow  = Lower bound for an observed concentration
-ConcHigh = Upper bound for an observed concentration
-ConcAve  = Average of ConcLow and ConcHigh.  If ConcLow is NA, then ConcAve = ConcHigh/2
-Uncen    = 1 if uncensored, 0 if censored
-}
-\examples{
-dateTime <- c('1985-01-01', '1985-01-02', '1985-01-03')
-comment1 <- c("","","")
-value1 <- c(1,2,3)
-comment2 <- c("","<","")
-value2 <- c(2,3,4)
-comment3 <- c("","","<")
-value3 <- c(3,4,5)
-dataInput <- data.frame(dateTime, comment1, value1,
-      comment2, value2,
-      comment3, value3, stringsAsFactors=FALSE)
-compressData(dataInput)
-}
-\keyword{WRTDS}
-\keyword{flow}
-
diff --git a/man/constructNWISURL.Rd b/man/constructNWISURL.Rd
index 384b286afc826cde9aaf65edf53ed4d706707d55..c62340a48e61709a45a2fad1c68ed34664d362ad 100644
--- a/man/constructNWISURL.Rd
+++ b/man/constructNWISURL.Rd
@@ -3,9 +3,9 @@
 \alias{constructNWISURL}
 \title{Construct NWIS url for data retrieval}
 \usage{
-constructNWISURL(siteNumber, parameterCd, startDate, endDate, service,
-  statCd = "00003", format = "xml", expanded = FALSE,
-  interactive = TRUE)
+constructNWISURL(siteNumber, parameterCd = "00060", startDate = "",
+  endDate = "", service, statCd = "00003", format = "xml",
+  expanded = FALSE, ratingType = "base")
 }
 \arguments{
 \item{siteNumber}{string or vector of strings USGS site number.  This is usually an 8 digit number}
@@ -16,7 +16,8 @@ constructNWISURL(siteNumber, parameterCd, startDate, endDate, service,
 
 \item{endDate}{string ending date for data retrieval in the form YYYY-MM-DD.}
 
-\item{service}{string USGS service to call. Possible values are "dv" (daily values), "uv" (unit/instantaneous values), "qw" (water quality data), "gwlevels" (groundwater),and "wqp" (water quality portal, which can include STORET).}
+\item{service}{string USGS service to call. Possible values are "dv" (daily values), "uv" (unit/instantaneous values),
+"qw" (water quality data), "gwlevels" (groundwater),and "rating" (rating curve), "peak", "meas" (discrete streamflow measurements).}
 
 \item{statCd}{string or vector USGS statistic code only used for daily value service. This is usually 5 digits.  Daily mean (00003) is the default.}
 
@@ -26,7 +27,7 @@ but the user must carefully check the results to see if the data returns matches
 
 \item{expanded}{logical defaults to FALSE. If TRUE, retrieves additional information, only applicable for qw data.}
 
-\item{interactive}{logical Option for interactive mode.  If TRUE, there is user interaction for error handling and data checks.}
+\item{ratingType}{can be "base", "corr", or "exsa". Only applies to rating curve data.}
 }
 \value{
 url string
@@ -43,15 +44,18 @@ endDate <- ''
 pCode <- c("00060","00010")
 url_daily <- constructNWISURL(siteNumber,pCode,
            startDate,endDate,'dv',statCd=c("00003","00001"))
-url_unit <- constructNWISURL(siteNumber,pCode,"2012-06-28","2012-06-30",'iv')
 \dontrun{
+# Not running for time considerations
+url_unit <- constructNWISURL(siteNumber,pCode,"2012-06-28","2012-06-30",'iv')
+
 url_qw_single <- constructNWISURL(siteNumber,"01075",startDate,endDate,'qw')
 url_qw <- constructNWISURL(siteNumber,c('01075','00029','00453'),
            startDate,endDate,'qw')
-url_wqp <- constructNWISURL(paste("USGS",siteNumber,sep="-"),c('01075','00029','00453'),
-           startDate,endDate,'wqp')
 url_daily_tsv <- constructNWISURL(siteNumber,pCode,startDate,endDate,'dv',
            statCd=c("00003","00001"),format="tsv")
+url_rating <- constructNWISURL(siteNumber,service="rating",ratingType="base")
+url_peak <- constructNWISURL(siteNumber, service="peak")
+url_meas <- constructNWISURL(siteNumber, service="meas")
            }
 }
 \keyword{USGS}
diff --git a/man/constructWQPURL.Rd b/man/constructWQPURL.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..235ae77640290164cf54badbe40c2a2227dbf53b
--- /dev/null
+++ b/man/constructWQPURL.Rd
@@ -0,0 +1,39 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\name{constructWQPURL}
+\alias{constructWQPURL}
+\title{Construct WQP url for data retrieval}
+\usage{
+constructWQPURL(siteNumber, parameterCd, startDate, endDate)
+}
+\arguments{
+\item{siteNumber}{string or vector of strings USGS site number.  This is usually an 8 digit number}
+
+\item{parameterCd}{string or vector of USGS parameter code.  This is usually an 5 digit number.}
+
+\item{startDate}{string starting date for data retrieval in the form YYYY-MM-DD.}
+
+\item{endDate}{string ending date for data retrieval in the form YYYY-MM-DD.}
+}
+\value{
+url string
+}
+\description{
+Imports data from WQP web service. This function gets the data from here: \url{http://nwis.waterdata.usgs.gov/nwis/qwdata}
+A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/}
+A list of statistic codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
+}
+\examples{
+siteNumber <- '01594440'
+startDate <- '1985-01-01'
+endDate <- ''
+pCode <- c("00060","00010")
+url_wqp <- constructWQPURL(paste("USGS",siteNumber,sep="-"),
+           c('01075','00029','00453'),
+           startDate,endDate)
+}
+\keyword{WQP}
+\keyword{data}
+\keyword{import}
+\keyword{service}
+\keyword{web}
+
diff --git a/man/dataOverview.Rd b/man/dataOverview.Rd
deleted file mode 100644
index e8fdb6c6cc17af832e76549f5403e98bb8a34950..0000000000000000000000000000000000000000
--- a/man/dataOverview.Rd
+++ /dev/null
@@ -1,29 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{dataOverview}
-\alias{dataOverview}
-\title{Data Overview for WRTDS}
-\usage{
-dataOverview(Daily, Sample)
-}
-\arguments{
-\item{Daily}{dataframe}
-
-\item{Sample}{dataframe}
-}
-\description{
-Gives a summary of data to be used for WRTDS analysis
-}
-\examples{
-# These examples require an internet connection to run
-exDaily <- getNWISDaily('01594440','00060', '1985-01-01', '1985-03-31', interactive=FALSE)
-exSample <- getNWISSample('01594440','01075', '1985-01-01', '1985-03-31', interactive=FALSE)
-dataOverview(Daily = exDaily, Sample = exSample)
-}
-\seealso{
-\code{\link{mergeReport}}
-}
-\keyword{USGS}
-\keyword{WRTDS}
-\keyword{data}
-\keyword{import}
-
diff --git a/man/dataRetrieval-package.Rd b/man/dataRetrieval-package.Rd
deleted file mode 100644
index 06e4b1493549825394033e4d506114e541c1d8cc..0000000000000000000000000000000000000000
--- a/man/dataRetrieval-package.Rd
+++ /dev/null
@@ -1,32 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\docType{package}
-\name{dataRetrieval-package}
-\alias{dataRetrieval-package}
-\title{Retrieval functions for USGS data}
-\description{
-\tabular{ll}{
-Package: \tab dataRetrieval\cr
-Type: \tab Package\cr
-Version: \tab 1.4.0\cr
-Date: \tab 2014-09-16\cr
-License: \tab Unlimited for this package, dependencies have more restrictive licensing.\cr
-Copyright: \tab This software is in the public domain because it contains materials
-that originally came from the United States Geological Survey, an agency of
-the United States Department of Interior. For more information, see the
-official USGS copyright policy at
-http://www.usgs.gov/visual-id/credit_usgs.html#copyright\cr
-LazyLoad: \tab yes\cr
-}
-}
-\details{
-Collection of functions to help retrieve USGS data from either web services or user provided data files.
-}
-\author{
-Robert M. Hirsch \email{rhirsch@usgs.gov}, Laura De Cicco \email{ldecicco@usgs.gov}
-}
-\references{
-Hirsch, R. M., Moyer, D. L. and Archfield, S. A. (2010), Weighted Regressions on Time, Discharge, and Season (WRTDS), with an Application to Chesapeake Bay River Inputs. JAWRA Journal of the American Water Resources Association, 46: 857-880. doi: 10.1111/j.1752-1688.2010.00482.x
-}
-\keyword{data,}
-\keyword{retrieval}
-
diff --git a/man/dateFormatCheck.Rd b/man/dateFormatCheck.Rd
deleted file mode 100644
index 5800f638b3e0f91691724019061fc454b7f5613f..0000000000000000000000000000000000000000
--- a/man/dateFormatCheck.Rd
+++ /dev/null
@@ -1,25 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{dateFormatCheck}
-\alias{dateFormatCheck}
-\title{Check date format}
-\usage{
-dateFormatCheck(date)
-}
-\arguments{
-\item{date}{string}
-}
-\value{
-condition logical if TRUE,
-}
-\description{
-Checks to see if format is YYYY-MM-DD. Also performs a few other date checks.
-}
-\examples{
-date <- '1985-01-01'
-dateFormatCheck(date)
-dateWrong <- '1999/1/7'
-dateFormatCheck(dateWrong)
-}
-\keyword{WRTDS}
-\keyword{flow}
-
diff --git a/man/formatCheckDate.Rd b/man/formatCheckDate.Rd
deleted file mode 100644
index a2c6843499ada9b47f91349c8dbdfd8b828bc2ef..0000000000000000000000000000000000000000
--- a/man/formatCheckDate.Rd
+++ /dev/null
@@ -1,28 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{formatCheckDate}
-\alias{formatCheckDate}
-\title{formatCheckDate}
-\usage{
-formatCheckDate(Date, dateString, interactive = TRUE)
-}
-\arguments{
-\item{Date}{string}
-
-\item{dateString}{string used in either error message or interactive message. An example would be "startDate"}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-condition logical if TRUE,
-}
-\description{
-Response to the date format checker.  If the date is not formated correctly, it will give the user the opportunity to correct, otherwise will create a warning.
-}
-\examples{
-Date <- '1985-01-01'
-dateString <- 'startDate'
-formatCheckDate(Date, dateString, interactive = FALSE)
-}
-\keyword{WRTDS}
-\keyword{flow}
-
diff --git a/man/formatCheckParameterCd.Rd b/man/formatCheckParameterCd.Rd
deleted file mode 100644
index 4af747f0bbf7f1ca80ed6356952e882f2097b445..0000000000000000000000000000000000000000
--- a/man/formatCheckParameterCd.Rd
+++ /dev/null
@@ -1,25 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{formatCheckParameterCd}
-\alias{formatCheckParameterCd}
-\title{formatCheckParameterCd}
-\usage{
-formatCheckParameterCd(parameterCd, interactive = TRUE)
-}
-\arguments{
-\item{parameterCd}{string to check}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-parameterCd string
-}
-\description{
-Checks that the parameter code is 5 digits. If it is less, it will pad the string with zeros. If more, ask the user to re-enter.
-}
-\examples{
-pCode <- '01234'
-formatCheckParameterCd(pCode)
-}
-\keyword{WRTDS}
-\keyword{flow}
-
diff --git a/man/getDailyDataFromFile.Rd b/man/getDailyDataFromFile.Rd
deleted file mode 100644
index 62e5346a65d29d075faeffecfc9a574b6e994041..0000000000000000000000000000000000000000
--- a/man/getDailyDataFromFile.Rd
+++ /dev/null
@@ -1,39 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getDailyDataFromFile}
-\alias{getDailyDataFromFile}
-\title{Import Daily Data for WRTDS}
-\usage{
-getDailyDataFromFile(filePath, fileName, hasHeader = TRUE, separator = ",",
-  qUnit = 1, interactive = TRUE)
-}
-\arguments{
-\item{filePath}{string specifying the path to the file}
-
-\item{fileName}{string name of file to open}
-
-\item{hasHeader}{logical true if the first row of data is the column headers}
-
-\item{separator}{string character that separates data cells}
-
-\item{qUnit}{number 1 is cubic feet per second, 2 is cubic meters per second, 3 is 10^3 cubic feet per second, and 4 is 10^3 cubic meters per second}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-Daily dataframe
-}
-\description{
-This function is being deprecated for \code{\link{getUserDaily}}.
-}
-\examples{
-filePath <- system.file("extdata", package="dataRetrieval")
-filePath <- paste(filePath,"/",sep="")
-fileName <- "ChoptankRiverFlow.txt"
-\dontrun{Daily <- getDailyDataFromFile(filePath,fileName,separator="\\t")}
-}
-\keyword{USGS}
-\keyword{WRTDS}
-\keyword{data}
-\keyword{file}
-\keyword{import}
-
diff --git a/man/getDataFromFile.Rd b/man/getDataFromFile.Rd
deleted file mode 100644
index a138b286029a9940716b129bb12feef972330986..0000000000000000000000000000000000000000
--- a/man/getDataFromFile.Rd
+++ /dev/null
@@ -1,36 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getDataFromFile}
-\alias{getDataFromFile}
-\title{Basic Data Import for Water Flow Data}
-\usage{
-getDataFromFile(filePath, fileName, hasHeader = TRUE, separator = ",")
-}
-\arguments{
-\item{filePath}{string specifying the path to the file}
-
-\item{fileName}{string name of file to open}
-
-\item{hasHeader}{logical true if the first row of data is the column headers}
-
-\item{separator}{string character that separates data cells}
-}
-\value{
-retval dataframe with dateTime, value, and code columns
-}
-\description{
-Imports data from user-supplied data file. Specifically used to import water flow data for use in the WRTDS package.
-For WRTDS usage, the first column is expected to be dates, the second column measured values.
-The third column is optional, it contains any remark codes.
-}
-\examples{
-# Examples of how to use getDataFromFile:
-# Change the file path and file name to something meaningful:
-filePath <- system.file("extdata", package="dataRetrieval")
-filePath <- paste(filePath,"/",sep="")
-fileName <- 'ChoptankRiverFlow.txt'
-ChopData <- getDataFromFile(filePath,fileName, separator="\\t")
-}
-\keyword{data}
-\keyword{file}
-\keyword{import}
-
diff --git a/man/getNWISDaily.Rd b/man/getNWISDaily.Rd
deleted file mode 100644
index aa269cc7101aa18df8c0deb5093fc6ecda4644c0..0000000000000000000000000000000000000000
--- a/man/getNWISDaily.Rd
+++ /dev/null
@@ -1,49 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISDaily}
-\alias{getNWISDaily}
-\title{Import NWIS Daily Data for EGRET analysis}
-\usage{
-getNWISDaily(siteNumber, parameterCd, startDate, endDate, interactive = TRUE,
-  convert = TRUE, format = "tsv")
-}
-\arguments{
-\item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
-
-\item{parameterCd}{string USGS parameter code.  This is usually an 5 digit number.}
-
-\item{startDate}{string starting date for data retrieval in the form YYYY-MM-DD.}
-
-\item{endDate}{string ending date for data retrieval in the form YYYY-MM-DD.}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-
-\item{convert}{logical Option to include a conversion from cfs to cms (35.314667). The default is TRUE,
-which is appropriate for using NWIS data in the EGRET package.  Set this to FALSE to not include the conversion. If the parameter code is not 00060 (NWIS discharge),
-there is no conversion applied.}
-
-\item{format}{string, can be "tsv" or "xml", and is only applicable for daily and unit value requests.  "tsv" returns results faster, but there is a possiblitiy that an incomplete file is returned without warning. XML is slower,
-but will offer a warning if the file was incomplete (for example, if there was a momentary problem with the internet connection). It is possible to safely use the "tsv" option,
-but the user must carefully check the results to see if the data returns matches what is expected. The default is "tsv".}
-}
-\value{
-Daily dataframe
-}
-\description{
-Imports data from NWIS web service. This function gets the data from here: \url{http://waterservices.usgs.gov/}
-A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/}
-A list of statistic codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
-}
-\examples{
-# These examples require an internet connection to run
-Daily <- getNWISDaily('01594440','00060', '1985-01-01', '1985-03-31')
-DailyCFS <- getNWISDaily('01594440','00060', '1985-01-01', '1985-03-31',convert=FALSE)
-DailySuspSediment <- getNWISDaily('01594440','80154', '1985-01-01', '1985-03-31')
-}
-\seealso{
-\code{\link{getNWISdvData}}, \code{\link{populateDaily}}
-}
-\keyword{USGS}
-\keyword{WRTDS}
-\keyword{data}
-\keyword{import}
-
diff --git a/man/getNWISDataAvailability.Rd b/man/getNWISDataAvailability.Rd
deleted file mode 100644
index 8fdb4950a8b29dc6483654c88d26388d7fe91923..0000000000000000000000000000000000000000
--- a/man/getNWISDataAvailability.Rd
+++ /dev/null
@@ -1,31 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISDataAvailability}
-\alias{getNWISDataAvailability}
-\title{USGS data availability}
-\usage{
-getNWISDataAvailability(siteNumber, type = c("uv", "dv", "qw"))
-}
-\arguments{
-\item{siteNumber}{string USGS site number.}
-
-\item{type}{vector string. Options are "uv", "dv", "qw"}
-}
-\value{
-retval dataframe with all information found in the expanded site file
-}
-\description{
-Imports a table of available parameters, period of record, and count.
-}
-\examples{
-# These examples require an internet connection to run
-availableData <- getNWISDataAvailability('05114000')
-# To find just unit value ('instantaneous') data:
-uvData <- getNWISDataAvailability('05114000',type="uv")
-uvDataMulti <- getNWISDataAvailability(c('05114000','09423350'),type="uv")
-}
-\keyword{USGS}
-\keyword{data}
-\keyword{import}
-\keyword{service}
-\keyword{web}
-
diff --git a/man/getNWISInfo.Rd b/man/getNWISInfo.Rd
deleted file mode 100644
index 10f3bdf529b1df517280064a395102ac7903d895..0000000000000000000000000000000000000000
--- a/man/getNWISInfo.Rd
+++ /dev/null
@@ -1,37 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISInfo}
-\alias{getNWISInfo}
-\title{Import Metadata for USGS Data}
-\usage{
-getNWISInfo(siteNumber, parameterCd, interactive = TRUE)
-}
-\arguments{
-\item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
-
-\item{parameterCd}{string USGS parameter code.  This is usually an 5 digit number.}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-INFO dataframe with at least param.nm, param.units, parameShortName, paramNumber
-}
-\description{
-Populates INFO data frame for EGRET study.  If either station number or parameter code supplied, imports data about a particular USGS site from NWIS web service.
-This function gets the data from here: \url{http://waterservices.usgs.gov/}
-A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/}
-If either station number or parameter code is not supplied, the user will be asked to input data.
-Additionally, the user will be asked for:
-staAbbrev - station abbreviation, will be used in naming output files and for structuring batch jobs
-constitAbbrev - constitute abbreviation
-}
-\examples{
-# These examples require an internet connection to run
-# Automatically gets information about site 05114000 and temperature, no interaction with user
-INFO <- getNWISInfo('05114000','00010')
-}
-\keyword{USGS}
-\keyword{data}
-\keyword{import}
-\keyword{service}
-\keyword{web}
-
diff --git a/man/getNWISSample.Rd b/man/getNWISSample.Rd
deleted file mode 100644
index e6caf8a67373caa53a117d331660ddf0b139e019..0000000000000000000000000000000000000000
--- a/man/getNWISSample.Rd
+++ /dev/null
@@ -1,42 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getNWISSample}
-\alias{getNWISSample}
-\title{Import NWIS Sample Data for EGRET analysis}
-\usage{
-getNWISSample(siteNumber, parameterCd, startDate, endDate, interactive = TRUE)
-}
-\arguments{
-\item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
-
-\item{parameterCd}{string USGS parameter code.  This is usually an 5 digit number.}
-
-\item{startDate}{string starting date for data retrieval in the form YYYY-MM-DD.}
-
-\item{endDate}{string ending date for data retrieval in the form YYYY-MM-DD.}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-Sample dataframe
-}
-\description{
-Imports data from NWIS web service. This function gets the data from here: \url{http://nwis.waterdata.usgs.gov/nwis/qwdata/}
-A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/}
-A list of statistic codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
-For raw data, use getQWData.  This function will retrieve the raw data, and compress it (summing constituents). See
-section 3.4 of the vignette for more details.
-}
-\examples{
-# These examples require an internet connection to run
-Sample_01075 <- getNWISSample('01594440','01075', '1985-01-01', '1985-03-31')
-Sample_All2 <- getNWISSample('05114000',c('00915','00931'), '1985-01-01', '1985-03-31')
-Sample_Select <- getNWISSample('05114000',c('00915','00931'), '', '')
-}
-\seealso{
-\code{\link{compressData}}, \code{\link{populateSampleColumns}}, , \code{\link{getNWISSample}}
-}
-\keyword{USGS}
-\keyword{WRTDS}
-\keyword{data}
-\keyword{import}
-
diff --git a/man/getRDB1Data.Rd b/man/getRDB1Data.Rd
deleted file mode 100644
index 674e5526b0dbc20a0ce20ecc5b101c0d2fd2d5e6..0000000000000000000000000000000000000000
--- a/man/getRDB1Data.Rd
+++ /dev/null
@@ -1,38 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getRDB1Data}
-\alias{getRDB1Data}
-\title{Function to return data from the NWIS RDB 1.0 format}
-\usage{
-getRDB1Data(obs_url, asDateTime = FALSE, qw = FALSE)
-}
-\arguments{
-\item{obs_url}{string containing the url for the retrieval}
-
-\item{asDateTime}{logical, if TRUE returns date and time as POSIXct, if FALSE, Date}
-
-\item{qw}{logical, if TRUE parses as water quality data (where dates/times are in start and end times)}
-}
-\value{
-data a data frame containing columns agency, site, dateTime, values, and remark codes for all requested combinations
-}
-\description{
-This function accepts a url parameter that already contains the desired
-NWIS site, parameter code, statistic, startdate and enddate.
-}
-\examples{
-siteNumber <- "02177000"
-startDate <- "2012-09-01"
-endDate <- "2012-10-01"
-offering <- "00003"
-property <- "00060"
-obs_url <- constructNWISURL(siteNumber,property,
-         startDate,endDate,"dv",format="tsv")
-data <- getRDB1Data(obs_url)
-urlMulti <- constructNWISURL("04085427",c("00060","00010"),
-         startDate,endDate,"dv",statCd=c("00003","00001"),"tsv")
-multiData <- getRDB1Data(urlMulti)
-unitDataURL <- constructNWISURL(siteNumber,property,
-         "2014-10-10","2014-10-10","uv",format="tsv")
-unitData <- getRDB1Data(unitDataURL, asDateTime=TRUE)
-}
-
diff --git a/man/getUserDaily.Rd b/man/getUserDaily.Rd
deleted file mode 100644
index 64ed42aac10cfc9198f999d6e989a9fe2b094ae9..0000000000000000000000000000000000000000
--- a/man/getUserDaily.Rd
+++ /dev/null
@@ -1,39 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getUserDaily}
-\alias{getUserDaily}
-\title{Import user daily data for EGRET analysis}
-\usage{
-getUserDaily(filePath, fileName, hasHeader = TRUE, separator = ",",
-  qUnit = 1, interactive = TRUE)
-}
-\arguments{
-\item{filePath}{string specifying the path to the file}
-
-\item{fileName}{string name of file to open}
-
-\item{hasHeader}{logical true if the first row of data is the column headers}
-
-\item{separator}{string character that separates data cells}
-
-\item{qUnit}{number 1 is cubic feet per second, 2 is cubic meters per second, 3 is 10^3 cubic feet per second, and 4 is 10^3 cubic meters per second}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-Daily dataframe
-}
-\description{
-Imports data from a user-supplied file, and converts it to a Daily data frame, appropriate for WRTDS calculations.
-}
-\examples{
-filePath <- system.file("extdata", package="dataRetrieval")
-filePath <- paste(filePath,"/",sep="")
-fileName <- "ChoptankRiverFlow.txt"
-Daily <- getUserDaily(filePath,fileName,separator="\\t")
-}
-\keyword{USGS}
-\keyword{WRTDS}
-\keyword{data}
-\keyword{file}
-\keyword{import}
-
diff --git a/man/getUserInfo.Rd b/man/getUserInfo.Rd
deleted file mode 100644
index c7dcd25cad4af30bd2950e9dd9291eecd6a173a6..0000000000000000000000000000000000000000
--- a/man/getUserInfo.Rd
+++ /dev/null
@@ -1,42 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getUserInfo}
-\alias{getUserInfo}
-\title{Import Metadata from User-Generated File}
-\usage{
-getUserInfo(filePath, fileName, hasHeader = TRUE, separator = ",",
-  interactive = FALSE)
-}
-\arguments{
-\item{filePath}{string specifying the path to the file}
-
-\item{fileName}{string name of file to open}
-
-\item{hasHeader}{logical true if the first row of data is the column headers}
-
-\item{separator}{string character that separates data cells}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-INFO dataframe with agency, site, dateTime, value, and code columns
-}
-\description{
-Populates INFO data frame for EGRET study. Accepts a user generated file with any metadata that might
-be important for the analysis.
-Additionally, EGRET analysis requires:"drainSqKm", "staAbbrev", "constitAbbrev",
-"param.units", "paramShortName","shortName". If interactive=TRUE, the function will ask for these
-fields if they aren't supplied in the file.
-}
-\examples{
-filePath <- system.file("extdata", package="dataRetrieval")
-filePath <- paste(filePath,"/",sep="")
-fileName <- 'infoTest.csv'
-INFO <- getUserInfo(filePath,fileName, separator=",",interactive=FALSE)
-}
-\keyword{USGS}
-\keyword{WRTDS}
-\keyword{data}
-\keyword{import}
-\keyword{service}
-\keyword{web}
-
diff --git a/man/getUserSample.Rd b/man/getUserSample.Rd
deleted file mode 100644
index b6b9844172b1facfd8dc8b76691cc2658ade562f..0000000000000000000000000000000000000000
--- a/man/getUserSample.Rd
+++ /dev/null
@@ -1,38 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getUserSample}
-\alias{getUserSample}
-\title{Import user sample data for EGRET analysis}
-\usage{
-getUserSample(filePath, fileName, hasHeader = TRUE, separator = ",",
-  interactive = TRUE)
-}
-\arguments{
-\item{filePath}{string specifying the path to the file}
-
-\item{fileName}{string name of file to open}
-
-\item{hasHeader}{logical true if the first row of data is the column headers}
-
-\item{separator}{string character that separates data cells}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-Sample dataframe
-}
-\description{
-Imports data from a user-supplied file, and converts it to a Sample data frame (including summing multiple constituents), appropriate for WRTDS calculations.
-}
-\examples{
-filePath <- system.file("extdata", package="dataRetrieval")
-filePath <- paste(filePath,"/",sep="")
-fileName <- 'ChoptankRiverNitrate.csv'
-Sample <- getUserSample(filePath,fileName, separator=";",interactive=FALSE)
-}
-\seealso{
-\code{\link{compressData}}, \code{\link{populateSampleColumns}}
-}
-\keyword{data}
-\keyword{file}
-\keyword{import}
-
diff --git a/man/getWQPInfo.Rd b/man/getWQPInfo.Rd
deleted file mode 100644
index 46eb210ed08aa5c83c1e4035c4e2581013163f22..0000000000000000000000000000000000000000
--- a/man/getWQPInfo.Rd
+++ /dev/null
@@ -1,43 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getWQPInfo}
-\alias{getWQPInfo}
-\title{Import Metadata for Water Quality Portal Data}
-\usage{
-getWQPInfo(siteNumber, parameterCd, interactive = FALSE)
-}
-\arguments{
-\item{siteNumber}{string site number.}
-
-\item{parameterCd}{string USGS parameter code or characteristic name.}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-INFO dataframe with agency, site, dateTime, value, and code columns
-}
-\description{
-Populates INFO data frame for EGRET study. If siteNumber or parameter code (for USGS) or characteristic name
-(for non-USGS) is provided, the function will make a call to the Water Quality Portal to get metadata information.
-staAbbrev - station abbreviation, will be used in naming output files and for structuring batch jobs
-constitAbbrev - constitute abbreviation
-}
-\examples{
-# These examples require an internet connection to run
-# Automatically gets information about site 01594440 and temperature, no interaction with user
-nameToUse <- 'Specific conductance'
-pcodeToUse <- '00095'
-\dontrun{
-INFO <- getWQPInfo('USGS-04024315',pcodeToUse,interactive=TRUE)
-INFO2 <- getWQPInfo('WIDNR_WQX-10032762',nameToUse)
-# To adjust the label names:
-INFO$shortName <- "Little"
-INFO$paramShortName <- "SC"
-}
-}
-\keyword{USGS}
-\keyword{WRTDS}
-\keyword{data}
-\keyword{import}
-\keyword{service}
-\keyword{web}
-
diff --git a/man/getWQPSample.Rd b/man/getWQPSample.Rd
deleted file mode 100644
index 6418f32a98fce6ad5546d55b35ac9d2b9c78d148..0000000000000000000000000000000000000000
--- a/man/getWQPSample.Rd
+++ /dev/null
@@ -1,44 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getWQPSample}
-\alias{getWQPSample}
-\title{Import Sample Data for WRTDS}
-\usage{
-getWQPSample(siteNumber, characteristicName, startDate, endDate,
-  interactive = TRUE)
-}
-\arguments{
-\item{siteNumber}{string site number.  If USGS, it should be in the form :'USGS-XXXXXXXXX...'}
-
-\item{characteristicName}{string}
-
-\item{startDate}{string starting date for data retrieval in the form YYYY-MM-DD.}
-
-\item{endDate}{string ending date for data retrieval in the form YYYY-MM-DD.}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-Sample dataframe
-}
-\description{
-Imports data from the Water Quality Portal, so it could be STORET, NWIS, or . This function gets the data from: \url{http://www.waterqualitydata.us}
-For raw data, use getWQPData.  This function will retrieve the raw data, and compress it (summing constituents). See
-chapter 7 of the EGRET user guide for more details, then converts it to the Sample dataframe structure.
-}
-\examples{
-# These examples require an internet connection to run
-\dontrun{
-Sample_01075 <- getWQPSample('USGS-01594440','Chloride', '', '')
-Sample_All <- getWQPSample('WIDNR_WQX-10032762','Specific conductance', '', '')
-}
-}
-\seealso{
-\code{\link{getWQPData}}, \code{\link{getWQPSites}},
-\code{\link{getWQPqwData}}, \code{\link{getNWISqwData}}, and \code{\link{readWQPData}},
-\code{\link{compressData}}, \code{\link{populateSampleColumns}}
-}
-\keyword{USGS}
-\keyword{WRTDS}
-\keyword{data}
-\keyword{import}
-
diff --git a/man/getWaterML1Data.Rd b/man/getWaterML1Data.Rd
deleted file mode 100644
index 7006273119494caed77535ef526b3ab564ec4024..0000000000000000000000000000000000000000
--- a/man/getWaterML1Data.Rd
+++ /dev/null
@@ -1,40 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{getWaterML1Data}
-\alias{getWaterML1Data}
-\title{Function to return data from the NWISWeb WaterML1.1 service}
-\usage{
-getWaterML1Data(obs_url)
-}
-\arguments{
-\item{obs_url}{string containing the url for the retrieval}
-}
-\value{
-mergedDF a data frame containing columns agency, site, dateTime, values, and remark codes for all requested combinations
-}
-\description{
-This function accepts a url parameter that already contains the desired
-NWIS site, parameter code, statistic, startdate and enddate.
-}
-\examples{
-siteNumber <- "02177000"
-startDate <- "2012-09-01"
-endDate <- "2012-10-01"
-offering <- '00003'
-property <- '00060'
-urlBase <- "http://waterservices.usgs.gov/nwis"
-obs_url <- constructNWISURL(siteNumber,property,startDate,endDate,'dv')
-data <- getWaterML1Data(obs_url)
-urlMulti <- constructNWISURL("04085427",c("00060","00010"),
-            startDate,endDate,'dv',statCd=c("00003","00001"))
-multiData <- getWaterML1Data(urlMulti)
-groundWaterSite <- "431049071324301"
-startGW <- "2013-10-01"
-endGW <- "2014-06-30"
-groundwaterExampleURL <- constructNWISURL(groundWaterSite, NA,
-          startGW,endGW, service="gwlevels", format="xml",interactive=FALSE)
-groundWater <- getWaterML1Data(groundwaterExampleURL)
-unitDataURL <- constructNWISURL(siteNumber,property,
-         "2014-10-10","2014-10-10",'uv',format='xml')
-unitData <- getWaterML1Data(unitDataURL)
-}
-
diff --git a/man/importRDB1.Rd b/man/importRDB1.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..0fcc978834df0b84e337edb8a2cd4f5c5a2b8d2b
--- /dev/null
+++ b/man/importRDB1.Rd
@@ -0,0 +1,62 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\name{importRDB1}
+\alias{importRDB1}
+\title{Function to return data from the NWIS RDB 1.0 format}
+\usage{
+importRDB1(obs_url, asDateTime = FALSE, qw = FALSE, convertType = TRUE,
+  tz = "")
+}
+\arguments{
+\item{obs_url}{string containing the url for the retrieval}
+
+\item{asDateTime}{logical, if TRUE returns date and time as POSIXct, if FALSE, Date}
+
+\item{qw}{logical, if TRUE parses as water quality data (where dates/times are in start and end times)}
+
+\item{convertType}{logical, defaults to TRUE. If TRUE, the function will convert the data to dates, datetimes,
+numerics based on a standard algorithm. If false, everything is returned as a string.}
+
+\item{tz}{string to set timezone attribute of datetime. Default is an empty quote, which converts the
+datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+"America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"}
+}
+\value{
+data a data frame containing columns agency, site, dateTime (converted to UTC), values, and remark codes for all requested combinations
+}
+\description{
+This function accepts a url parameter that already contains the desired
+NWIS site, parameter code, statistic, startdate and enddate. It is not
+recommended to use the RDB format for importing multi-site data.
+}
+\examples{
+siteNumber <- "02177000"
+startDate <- "2012-09-01"
+endDate <- "2012-10-01"
+offering <- "00003"
+property <- "00060"
+obs_url <- constructNWISURL(siteNumber,property,
+         startDate,endDate,"dv",format="tsv")
+data <- importRDB1(obs_url)
+urlMultiPcodes <- constructNWISURL("04085427",c("00060","00010"),
+         startDate,endDate,"dv",statCd=c("00003","00001"),"tsv")
+multiData <- importRDB1(urlMultiPcodes)
+unitDataURL <- constructNWISURL(siteNumber,property,
+         "2013-11-03","2013-11-03","uv",format="tsv") #includes timezone switch
+unitData <- importRDB1(unitDataURL, asDateTime=TRUE)
+qwURL <- constructNWISURL(c('04024430','04024000'),
+          c('34247','30234','32104','34220'),
+         "2010-11-03","","qw",format="rdb")
+qwData <- importRDB1(qwURL, qw=TRUE, tz="America/Chicago")
+iceSite <- '04024430'
+start <- "2013-11-09"
+end <- "2013-11-28"
+urlIce <- constructNWISURL(iceSite,"00060",start, end,"uv",format="tsv")
+
+# User file:
+filePath <- system.file("extdata", package="dataRetrieval")
+fileName <- "RDB1Example.txt"
+fullPath <- file.path(filePath, fileName)
+importUserRDB <- importRDB1(fullPath)
+}
+
diff --git a/man/importWQP.Rd b/man/importWQP.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..f5faaee1995217faa03d69dece7c73403b126ffa
--- /dev/null
+++ b/man/importWQP.Rd
@@ -0,0 +1,40 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\name{importWQP}
+\alias{importWQP}
+\title{Basic Water Quality Portal Data grabber}
+\usage{
+importWQP(url, zip = FALSE, tz = "")
+}
+\arguments{
+\item{url}{string URL to Water Quality Portal#'}
+
+\item{zip}{logical used to request the data in a zip format (TRUE)}
+
+\item{tz}{string to set timezone attribute of datetime. Default is an empty quote, which converts the
+datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+"America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"}
+}
+\value{
+retval dataframe raw data returned from the Water Quality Portal. Additionally, a POSIXct dateTime column is supplied for
+start and end times.
+}
+\description{
+Imports data from the Water Quality Portal based on a specified url.
+}
+\examples{
+# These examples require an internet connection to run
+\dontrun{
+## Examples take longer than 5 seconds:
+rawSampleURL <- constructWQPURL('USGS-01594440','01075', '', '')
+rawSample <- importWQP(rawSampleURL)
+url2 <- paste0(rawSampleURL,"&zip=yes")
+rawSample2 <- importWQP(url2, TRUE)
+}
+}
+\keyword{USGS}
+\keyword{data}
+\keyword{import}
+\keyword{service}
+\keyword{web}
+
diff --git a/man/importWaterML1.Rd b/man/importWaterML1.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..9b80f695ec100c5da4195af618bc82f1c2b5f19f
--- /dev/null
+++ b/man/importWaterML1.Rd
@@ -0,0 +1,60 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\name{importWaterML1}
+\alias{importWaterML1}
+\title{Function to return data from the NWISWeb WaterML1.1 service}
+\usage{
+importWaterML1(obs_url, asDateTime = FALSE, tz = "")
+}
+\arguments{
+\item{obs_url}{string containing the url for the retrieval}
+
+\item{asDateTime}{logical, if TRUE returns date and time as POSIXct, if FALSE, Date}
+
+\item{tz}{string to set timezone attribute of datetime. Default is an empty quote, which converts the
+datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+"America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"}
+}
+\value{
+mergedDF a data frame containing columns agency, site, dateTime, values, and remark codes for all requested combinations
+}
+\description{
+This function accepts a url parameter that already contains the desired
+NWIS site, parameter code, statistic, startdate and enddate.
+}
+\examples{
+siteNumber <- "02177000"
+startDate <- "2012-09-01"
+endDate <- "2012-10-01"
+offering <- '00003'
+property <- '00060'
+obs_url <- constructNWISURL(siteNumber,property,startDate,endDate,'dv')
+data <- importWaterML1(obs_url,TRUE)
+
+groundWaterSite <- "431049071324301"
+startGW <- "2013-10-01"
+endGW <- "2014-06-30"
+groundwaterExampleURL <- constructNWISURL(groundWaterSite, NA,
+          startGW,endGW, service="gwlevels")
+groundWater <- importWaterML1(groundwaterExampleURL)
+
+unitDataURL <- constructNWISURL(siteNumber,property,
+         "2013-11-03","2013-11-03",'uv')
+unitData <- importWaterML1(unitDataURL,TRUE)
+
+filePath <- system.file("extdata", package="dataRetrieval")
+fileName <- "WaterML1Example.xml"
+fullPath <- file.path(filePath, fileName)
+importUserWM1 <- importWaterML1(fullPath,TRUE)
+
+# Two sites, two pcodes, one site has two data descriptors:
+siteNumber <- c('01480015',"04085427")
+obs_url <- constructNWISURL(siteNumber,c("00060","00010"),startDate,endDate,'dv')
+data <- importWaterML1(obs_url)
+data$dateTime <- as.Date(data$dateTime)
+data <- renameNWISColumns(data)
+names(attributes(data))
+attr(data, "url")
+attr(data, "disclaimer")
+}
+
diff --git a/man/importWaterML2.Rd b/man/importWaterML2.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..c8f3bc680cb3d6a7f76932040dffae7ba4148335
--- /dev/null
+++ b/man/importWaterML2.Rd
@@ -0,0 +1,51 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\name{importWaterML2}
+\alias{importWaterML2}
+\title{Function to return data from the WaterML2 data}
+\usage{
+importWaterML2(obs_url, asDateTime = FALSE, tz = "")
+}
+\arguments{
+\item{obs_url}{string containing the url for the retrieval}
+
+\item{asDateTime}{logical, if TRUE returns date and time as POSIXct, if FALSE, Date}
+
+\item{tz}{string to set timezone attribute of datetime. Default is an empty quote, which converts the
+datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+"America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"}
+}
+\value{
+mergedDF a data frame containing columns agency, site, dateTime, values, and remark codes for all requested combinations
+}
+\description{
+This function accepts a url parameter for a WaterML2 getObservation
+}
+\examples{
+baseURL <- "http://waterservices.usgs.gov/nwis/dv/?format=waterml,2.0"
+URL <- paste(baseURL, "sites=01646500",
+     "startDT=2014-09-01",
+     "endDT=2014-09-08",
+     "statCd=00003",
+     "parameterCd=00060",sep="&")
+URL2 <- paste("http://cida.usgs.gov/noreast-sos/simple?request=GetObservation",
+     "featureID=MD-BC-BC-05",
+     "offering=RAW",
+     "observedProperty=WATER",sep="&")
+\dontrun{
+dataReturned1 <- importWaterML2(URL)
+dataReturn2 <- importWaterML2(URL2, TRUE)
+URLmulti <-  paste(baseURL,
+  "sites=04024430,04024000",
+  "startDT=2014-09-01",
+  "endDT=2014-09-08",
+  "statCd=00003",
+  "parameterCd=00060",sep="&")
+dataReturnMulti <- importWaterML2(URLmulti)
+filePath <- system.file("extdata", package="dataRetrieval")
+fileName <- "WaterML2Example.xml"
+fullPath <- file.path(filePath, fileName)
+UserData <- importWaterML2(fullPath)
+}
+}
+
diff --git a/man/mergeReport.Rd b/man/mergeReport.Rd
deleted file mode 100644
index 1ac27dabccc1862005ab7a4133ef3c54e15b02d7..0000000000000000000000000000000000000000
--- a/man/mergeReport.Rd
+++ /dev/null
@@ -1,34 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{mergeReport}
-\alias{mergeReport}
-\title{Merge Sample and Daily Data for WRTDS}
-\usage{
-mergeReport(Daily, Sample, interactive = TRUE)
-}
-\arguments{
-\item{Daily}{dataframe containing the daily data, default is Daily}
-
-\item{Sample}{dataframe containing the sample data, default is Sample}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-newSample dataframe with merged flow information
-}
-\description{
-Merges the flow data from the daily record into the sample record.
-}
-\examples{
-# These examples require an internet connection to run
-Daily <- getNWISDaily('01594440','00060', '1985-01-01', '1985-03-31')
-Sample <- getNWISSample('01594440','01075', '1985-01-01', '1985-03-31')
-Sample <- mergeReport(Daily, Sample)
-}
-\seealso{
-\code{\link{getNWISDaily}}, \code{\link{getNWISSample}}
-}
-\keyword{USGS}
-\keyword{WRTDS}
-\keyword{data}
-\keyword{import}
-
diff --git a/man/parameterCdFile.Rd b/man/parameterCdFile.Rd
index 012423eadf99441f67a6bc785c3d0235bb527ca6..532995a605e8f169f4b0db5ff0775aa838ba33a4 100644
--- a/man/parameterCdFile.Rd
+++ b/man/parameterCdFile.Rd
@@ -4,7 +4,7 @@
 \alias{parameterCdFile}
 \title{List of USGS parameter codes}
 \description{
-Complete list of USGS parameter codes as of September 25, 2013.
+Complete list of USGS parameter codes as of November 7, 2014.
 }
 \keyword{USGS}
 \keyword{parameterCd}
diff --git a/man/populateConcentrations.Rd b/man/populateConcentrations.Rd
deleted file mode 100644
index 54456d911de59f398ac49c3a80dc45f2f047f20c..0000000000000000000000000000000000000000
--- a/man/populateConcentrations.Rd
+++ /dev/null
@@ -1,23 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{populateConcentrations}
-\alias{populateConcentrations}
-\title{Populate Concentration Columns}
-\usage{
-populateConcentrations(rawData)
-}
-\arguments{
-\item{rawData}{vector with value and code columns}
-}
-\value{
-concentrationColumns dataframe
-}
-\description{
-Creates ConcLow, ConcHigh, Uncen (0 if censored, 1 if uncensored) columns for Sample data frame for WRTDS study.
-}
-\examples{
-code <- c("","<","")
-value <- c(1,2,3)
-dataInput <- data.frame(value, code, stringsAsFactors=FALSE)
-concentrationDF <- populateConcentrations(dataInput)
-}
-
diff --git a/man/populateDaily.Rd b/man/populateDaily.Rd
deleted file mode 100644
index 4672b2dccc1a58f8a708fbedcbc1a017e7757dc2..0000000000000000000000000000000000000000
--- a/man/populateDaily.Rd
+++ /dev/null
@@ -1,33 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{populateDaily}
-\alias{populateDaily}
-\title{Populate Daily data frame}
-\usage{
-populateDaily(rawData, qConvert, interactive = TRUE)
-}
-\arguments{
-\item{rawData}{dataframe contains at least dateTime, value, code columns}
-
-\item{qConvert}{string conversion to cubic meters per second}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-dataframe Daily
-}
-\description{
-Using raw data that has at least dateTime, value, code, populates the rest of the basic Daily data frame used in WRTDS
-}
-\examples{
-dateTime <- c('1985-01-01', '1985-01-02', '1985-01-03')
-value <- c(1,2,3)
-code <- c("","","")
-dataInput <- data.frame(dateTime, value, code, stringsAsFactors=FALSE)
-Daily <- populateDaily(dataInput, 2)
-}
-\author{
-Robert M. Hirsch \email{rhirsch@usgs.gov}
-}
-\keyword{WRTDS}
-\keyword{flow}
-
diff --git a/man/populateDateColumns.Rd b/man/populateDateColumns.Rd
deleted file mode 100644
index 423f0d0c8dc26163885bfa466b594a02cc07ad68..0000000000000000000000000000000000000000
--- a/man/populateDateColumns.Rd
+++ /dev/null
@@ -1,21 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{populateDateColumns}
-\alias{populateDateColumns}
-\title{Populate Date Columns}
-\usage{
-populateDateColumns(rawData)
-}
-\arguments{
-\item{rawData}{vector with dateTime}
-}
-\value{
-DateFrame dataframe
-}
-\description{
-Creates various date columns for WRTDS study.
-}
-\examples{
-dateTime <- c('1984-02-28 13:56', '1984-03-01', '1986-03-01')
-expandedDateDF <- populateDateColumns(dateTime)
-}
-
diff --git a/man/populateParameterINFO.Rd b/man/populateParameterINFO.Rd
deleted file mode 100644
index bfd5aa1f083a76c230f0e78fb2cd1f0e14451a9d..0000000000000000000000000000000000000000
--- a/man/populateParameterINFO.Rd
+++ /dev/null
@@ -1,32 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{populateParameterINFO}
-\alias{populateParameterINFO}
-\title{Populate Parameter Information Columns}
-\usage{
-populateParameterINFO(parameterCd, INFO, interactive = TRUE)
-}
-\arguments{
-\item{parameterCd}{string USGS parameter code}
-
-\item{INFO}{dataframe with value and code columns. Default is INFO}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-INFO dataframe
-}
-\description{
-Populates INFO data frame with additional user-supplied information concerning the measured parameter.
-}
-\examples{
-#This example requires an internet connection to run
-INFO <- getNWISSiteInfo('01594440')
-parameterCd <- "01075"
-parameterData <- getNWISPcodeInfo(parameterCd)
-INFO$param.nm <- parameterData$parameter_nm
-INFO$param.units <- parameterData$parameter_units
-INFO$paramShortName <- parameterData$srsname
-INFO$paramNumber <- parameterData$parameter_cd
-INFO <- populateParameterINFO(parameterCd, INFO)
-}
-
diff --git a/man/populateSampleColumns.Rd b/man/populateSampleColumns.Rd
deleted file mode 100644
index 48be0d713bb3dd369a7c65a99344d6eaeb1e4953..0000000000000000000000000000000000000000
--- a/man/populateSampleColumns.Rd
+++ /dev/null
@@ -1,25 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{populateSampleColumns}
-\alias{populateSampleColumns}
-\title{Populate Sample Columns}
-\usage{
-populateSampleColumns(rawData)
-}
-\arguments{
-\item{rawData}{dataframe with dateTime, ConcLow, ConcHigh, Uncen}
-}
-\value{
-Sample2 dataframe
-}
-\description{
-Creates ConcAve and ConcLow based on Uncen. Removes any samples with NA values in ConcHigh
-}
-\examples{
-dateTime <- c('1985-01-01', '1985-01-02', '1985-01-03')
-ConcLow <- c(1,2,0)
-ConcHigh <- c(1,2,3)
-Uncen <- c(1,1,0)
-dataInput <- data.frame(dateTime, ConcLow, ConcHigh, Uncen, stringsAsFactors=FALSE)
-Sample <- populateSampleColumns(dataInput)
-}
-
diff --git a/man/populateSiteINFO.Rd b/man/populateSiteINFO.Rd
deleted file mode 100644
index b5da355c14252cdc91400d3dc2de654dea8e6c21..0000000000000000000000000000000000000000
--- a/man/populateSiteINFO.Rd
+++ /dev/null
@@ -1,27 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{populateSiteINFO}
-\alias{populateSiteINFO}
-\title{Populate Site Information Columns}
-\usage{
-populateSiteINFO(INFO, siteNumber, interactive = TRUE)
-}
-\arguments{
-\item{INFO}{dataframe with value and code columns}
-
-\item{siteNumber}{string USGS site number}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
-}
-\value{
-INFO dataframe
-}
-\description{
-Populates INFO data frame with additional user-supplied information. Also removes fields not related to WRTDS study.
-}
-\examples{
-#This example requires an internet connection to run
-INFO <- getNWISSiteInfo('01594440')
-siteNumber <- "01594440"
-siteINFO <- populateSiteINFO(INFO, siteNumber)
-}
-
diff --git a/man/processQWData.Rd b/man/processQWData.Rd
deleted file mode 100644
index c68559e050ba0d224f575a3781b27c1e53f95fc7..0000000000000000000000000000000000000000
--- a/man/processQWData.Rd
+++ /dev/null
@@ -1,34 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{processQWData}
-\alias{processQWData}
-\title{Processing of USGS NWIS Water Quality Data}
-\usage{
-processQWData(data, pCode = TRUE)
-}
-\arguments{
-\item{data}{dataframe from Water Quality Portal}
-
-\item{pCode}{logical if TRUE, assume data came from a pCode search, if FALSE, characteristic name.}
-}
-\value{
-data dataframe with first column dateTime, and at least one qualifier and value columns
-(subsequent qualifier/value columns could follow depending on the number of parameter codes)
-}
-\description{
-Processes water quality portal data. This function looks at detection limit and detection
-conditions to determine if a value is left censored or not. Censored values are given the qualifier
-"<".  The dataframe is also converted from a long to wide format.
-}
-\examples{
-# These examples require an internet connection to run
-\dontrun{
-rawSample <- getWQPqwData('USGS-01594440','', '', '')
-rawSampleSelect <- processQWData(rawSample)
-}
-}
-\keyword{USGS}
-\keyword{data}
-\keyword{import}
-\keyword{service}
-\keyword{web}
-
diff --git a/man/getNWISData.Rd b/man/readNWISdata.Rd
similarity index 64%
rename from man/getNWISData.Rd
rename to man/readNWISdata.Rd
index 48b4f2d12d51441153f6076a1c9deea827694530..28aac9ce859cb35d76472beea60939dbbec53c54 100644
--- a/man/getNWISData.Rd
+++ b/man/readNWISdata.Rd
@@ -1,9 +1,12 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \name{getNWISData}
 \alias{getNWISData}
+\alias{readNWISdata}
 \title{General Data Import from NWIS}
 \usage{
 getNWISData(service = "dv", ...)
+
+readNWISdata(service = "dv", ...)
 }
 \arguments{
 \item{service}{string. Possible values are "iv" (for instantaneous), "dv" (for daily values), "gwlevels"
@@ -19,8 +22,12 @@ Returns data from the NWIS web service.
 Arguments to the function should be based on \url{http://waterservices.usgs.gov} service calls.
 }
 \examples{
-dataTemp <- getNWISData(stateCd="OH",parameterCd="00010")
-dataTempUnit <- getNWISData(sites="03086500", service="iv", parameterCd="00010")
+dataTemp <- readNWISdata(stateCd="OH",parameterCd="00010")
+dataTempUnit <- readNWISdata(sites="03086500", service="iv", parameterCd="00010")
+#Empty:
+multiSite <- readNWISdata(sites=c("04025000","04072150"), service="iv", parameterCd="00010")
+#Not empty:
+multiSite <- readNWISdata(sites=c("04025500","040263491"), service="iv", parameterCd="00060")
 }
 \keyword{NWIS}
 \keyword{data}
diff --git a/man/getNWISdvData.Rd b/man/readNWISdv.Rd
similarity index 52%
rename from man/getNWISdvData.Rd
rename to man/readNWISdv.Rd
index 0568a5716e22c8bb7176acf007025341550d279c..507b71d8615d06996ea6b17d72c7087f3db5fc6a 100644
--- a/man/getNWISdvData.Rd
+++ b/man/readNWISdv.Rd
@@ -1,10 +1,14 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \name{getNWISdvData}
 \alias{getNWISdvData}
+\alias{readNWISdv}
 \title{Raw Data Import for USGS NWIS Data}
 \usage{
-getNWISdvData(siteNumber, parameterCd, startDate, endDate, statCd = "00003",
-  format = "tsv", interactive = TRUE)
+getNWISdvData(siteNumber, parameterCd, startDate = "", endDate = "",
+  statCd = "00003")
+
+readNWISdv(siteNumber, parameterCd, startDate = "", endDate = "",
+  statCd = "00003")
 }
 \arguments{
 \item{siteNumber}{string USGS site number.  This is usually an 8 digit number. Multiple sites can be requested with a string vector.}
@@ -16,12 +20,6 @@ getNWISdvData(siteNumber, parameterCd, startDate, endDate, statCd = "00003",
 \item{endDate}{string ending date for data retrieval in the form YYYY-MM-DD.}
 
 \item{statCd}{string USGS statistic code. This is usually 5 digits.  Daily mean (00003) is the default.}
-
-\item{format}{string, can be 'tsv' or 'xml', and is only applicable for daily and unit value requests.  'tsv' returns results faster, but there is a possiblitiy that an incomplete file is returned without warning. XML is slower,
-but will offer a warning if the file was incomplete (for example, if there was a momentary problem with the internet connection). It is possible to safely use the 'tsv' option,
-but the user must carefully check the results to see if the data returns matches what is expected. The default is 'tsv'.}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
 }
 \value{
 data dataframe with agency, site, dateTime, value, and code columns
@@ -32,20 +30,21 @@ A list of parameter codes can be found here: \url{http://help.waterdata.usgs.gov
 A list of statistic codes can be found here: \url{http://help.waterdata.usgs.gov/code/stat_code_query?fmt=html}
 }
 \examples{
-# These examples require an internet connection to run
 siteNumber <- '04085427'
 startDate <- '2012-01-01'
 endDate <- '2012-06-30'
 pCode <- '00060'
-rawDailyQ <- getNWISdvData(siteNumber,pCode, startDate, endDate)
-rawDailyTemperature <- getNWISdvData(siteNumber,'00010',
-       startDate, endDate, statCd='00001')
-rawDailyTemperatureTSV <- getNWISdvData(siteNumber,'00010',
-       startDate, endDate, statCd='00001',format='tsv')
-rawDailyQAndTempMeanMax <- getNWISdvData(siteNumber,c('00010','00060'),
+rawDailyQ <- readNWISdv(siteNumber,pCode, startDate, endDate)
+rawDailyQAndTempMeanMax <- readNWISdv(siteNumber,c('00010','00060'),
        startDate, endDate, statCd=c('00001','00003'))
-rawDailyMultiSites<- getNWISdvData(c("01491000","01645000"),c('00010','00060'),
+rawDailyQAndTempMeanMax <- renameNWISColumns(rawDailyQAndTempMeanMax)
+rawDailyMultiSites<- readNWISdv(c("01491000","01645000"),c('00010','00060'),
        startDate, endDate, statCd=c('00001','00003'))
+# Site with no data:
+x <- readNWISdv("10258500","00060", "2014-09-08", "2014-09-14")
+names(attributes(x))
+attr(x, "siteInfo")
+attr(x, "variableInfo")
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/readNWISgwl.Rd b/man/readNWISgwl.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..4ee53c4e5296deccbc1c2f07365725c32ce8c0f6
--- /dev/null
+++ b/man/readNWISgwl.Rd
@@ -0,0 +1,25 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\name{readNWISgwl}
+\alias{readNWISgwl}
+\title{Reads groundwater level measurements from NWISweb.}
+\usage{
+readNWISgwl(siteNumbers, startDate = "", endDate = "")
+}
+\arguments{
+\item{siteNumbers}{string USGS site number (or multiple sites).  This is usually an 8 digit number}
+
+\item{startDate}{string starting date for data retrieval in the form YYYY-MM-DD.}
+
+\item{endDate}{string ending date for data retrieval in the form YYYY-MM-DD.}
+}
+\description{
+Reads groundwater level measurements from NWISweb. Mixed date/times come back from the service
+depending on the year that the data was collected.
+}
+\examples{
+siteNumber <- "434400121275801"
+data <- readNWISgwl(siteNumber, '','')
+sites <- c("434400121275801", "375907091432201")
+data2 <- readNWISgwl(sites, '','')
+}
+
diff --git a/man/readNWISmeas.Rd b/man/readNWISmeas.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..01e9d9d11edf8aa9cb1fe5efc0ea9a15f1f35d18
--- /dev/null
+++ b/man/readNWISmeas.Rd
@@ -0,0 +1,27 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\name{readNWISmeas}
+\alias{readNWISmeas}
+\title{Reads surface-water measurement data from NWISweb.}
+\usage{
+readNWISmeas(siteNumber, startDate = "", endDate = "", tz = "")
+}
+\arguments{
+\item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
+
+\item{startDate}{string starting date for data retrieval in the form YYYY-MM-DD.}
+
+\item{endDate}{string ending date for data retrieval in the form YYYY-MM-DD.}
+
+\item{tz}{string to set timezone attribute of datetime. Default is an empty quote, which converts the
+datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+"America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"}
+}
+\description{
+Reads surface-water measurement data from NWISweb.
+}
+\examples{
+siteNumber <- '01594440'
+data <- readNWISmeas(siteNumber)
+}
+
diff --git a/man/getNWISPcodeInfo.Rd b/man/readNWISpCode.Rd
similarity index 76%
rename from man/getNWISPcodeInfo.Rd
rename to man/readNWISpCode.Rd
index 9dd734f8eae29bc064b6a2f642f40885c307559c..b7d1323e9d32eb2b607e39847dcd6a523e7b8c73 100644
--- a/man/getNWISPcodeInfo.Rd
+++ b/man/readNWISpCode.Rd
@@ -1,14 +1,15 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \name{getNWISPcodeInfo}
 \alias{getNWISPcodeInfo}
+\alias{readNWISpCode}
 \title{USGS Parameter Data Retrieval}
 \usage{
-getNWISPcodeInfo(parameterCd, interactive = TRUE)
+getNWISPcodeInfo(parameterCd)
+
+readNWISpCode(parameterCd)
 }
 \arguments{
 \item{parameterCd}{vector of USGS parameter codes.  This is usually an 5 digit number.}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
 }
 \value{
 parameterData dataframe with all information from the USGS about the particular parameter (usually code, name, short name, units, and CAS registry numbers)
@@ -19,7 +20,7 @@ This function gets the data from here: \url{http://nwis.waterdata.usgs.gov/nwis/
 }
 \examples{
 # These examples require an internet connection to run
-paramINFO <- getNWISPcodeInfo(c('01075','00060','00931'))
+paramINFO <- readNWISpCode(c('01075','00060','00931'))
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/readNWISpeak.Rd b/man/readNWISpeak.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..df59298c3d2003319e1b7d7e0e804d11b0602534
--- /dev/null
+++ b/man/readNWISpeak.Rd
@@ -0,0 +1,22 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\name{readNWISpeak}
+\alias{readNWISpeak}
+\title{Reads peak flow data from NWISweb.}
+\usage{
+readNWISpeak(siteNumber, startDate = "", endDate = "")
+}
+\arguments{
+\item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
+
+\item{startDate}{string starting date for data retrieval in the form YYYY-MM-DD.}
+
+\item{endDate}{string ending date for data retrieval in the form YYYY-MM-DD.}
+}
+\description{
+Reads peak flow data from NWISweb.
+}
+\examples{
+siteNumber <- '01594440'
+data <- readNWISpeak(siteNumber)
+}
+
diff --git a/man/getNWISqwData.Rd b/man/readNWISqw.Rd
similarity index 57%
rename from man/getNWISqwData.Rd
rename to man/readNWISqw.Rd
index f172635ba07cad2c8dded39890c91868e00239a6..7cb0d4ac8ad5896dab697931561d36bdadbdb65a 100644
--- a/man/getNWISqwData.Rd
+++ b/man/readNWISqw.Rd
@@ -1,10 +1,14 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \name{getNWISqwData}
 \alias{getNWISqwData}
+\alias{readNWISqw}
 \title{Raw Data Import for USGS NWIS QW Data}
 \usage{
-getNWISqwData(siteNumber, pCodes, startDate, endDate, expanded = FALSE,
-  interactive = TRUE)
+getNWISqwData(siteNumber, pCodes, startDate = "", endDate = "",
+  expanded = FALSE, reshape = TRUE, tz = "")
+
+readNWISqw(siteNumber, pCodes, startDate = "", endDate = "",
+  expanded = FALSE, reshape = TRUE, tz = "")
 }
 \arguments{
 \item{siteNumber}{string or vector of of USGS site numbers.  This is usually an 8 digit number}
@@ -19,7 +23,12 @@ getNWISqwData(siteNumber, pCodes, startDate, endDate, expanded = FALSE,
 remark_cd (remark code), result_va (result value), val_qual_tx (result value qualifier code), meth_cd (method code),
 dqi_cd (data-quality indicator code), rpt_lev_va (reporting level), and rpt_lev_cd (reporting level type).}
 
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
+\item{reshape}{logical. Will reshape the data if TRUE (default)}
+
+\item{tz}{string to set timezone attribute of datetime. Default is an empty quote, which converts the
+datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+"America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"}
 }
 \value{
 data dataframe with agency, site, dateTime, value, and code columns
@@ -30,24 +39,19 @@ A list of parameter codes can be found here: \url{http://nwis.waterdata.usgs.gov
 A list of statistic codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
 }
 \examples{
-# These examples require an internet connection to run
 siteNumber <- c('04024430','04024000')
 startDate <- '2010-01-01'
 endDate <- ''
 pCodes <- c('34247','30234','32104','34220')
-rawNWISqwData <- getNWISqwData(siteNumber,pCodes,startDate,endDate)
-rawNWISqwDataExpand <- getNWISqwData(siteNumber,pCodes,startDate,endDate,expanded=TRUE)
-# To get data in Sample dataframe format:
-dataColumns <- grep("p\\\\d{5}",names(rawNWISqwData))
-remarkColumns <- grep("r\\\\d{5}",names(rawNWISqwData))
-totalColumns <-c(grep("sample_dt",names(rawNWISqwData)), dataColumns, remarkColumns)
-totalColumns <- totalColumns[order(totalColumns)]
-compressedData <- compressData(rawNWISqwData[,totalColumns])
-Sample <- populateSampleColumns(compressedData)
+rawNWISqwData <- readNWISqw(siteNumber,pCodes,startDate,endDate)
+rawNWISqwDataExpandReshaped <- readNWISqw(siteNumber,pCodes,
+          startDate,endDate,expanded=TRUE)
+rawNWISqwDataExpand <- readNWISqw(siteNumber,pCodes,
+          startDate,endDate,expanded=TRUE,reshape=FALSE)
 }
 \seealso{
-\code{\link{getWQPData}}, \code{\link{getWQPSites}},
-\code{\link{getWQPqwData}}, \code{\link{constructNWISURL}}
+\code{\link{readWQPdata}}, \code{\link{whatWQPsites}},
+\code{\link{readWQPqw}}, \code{\link{constructNWISURL}}
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/readNWISrating.Rd b/man/readNWISrating.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..f335d06d16be5a93aa3086d22ecc8c1d926821c8
--- /dev/null
+++ b/man/readNWISrating.Rd
@@ -0,0 +1,21 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\name{readNWISrating}
+\alias{readNWISrating}
+\title{Reads the current rating table for an active USGS streamgage.}
+\usage{
+readNWISrating(siteNumber, type = "base")
+}
+\arguments{
+\item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
+
+\item{type}{string can be "base", "corr", or "exsa"}
+}
+\description{
+Reads the current rating table for an active USGS streamgage.
+}
+\examples{
+siteNumber <- '01594440'
+data <- readNWISrating(siteNumber, "base")
+attr(data, "RATING")
+}
+
diff --git a/man/getNWISSiteInfo.Rd b/man/readNWISsite.Rd
similarity index 67%
rename from man/getNWISSiteInfo.Rd
rename to man/readNWISsite.Rd
index 27d7521730f6949190b9b542169f86f2e71d7fce..2db0ac7d276ce887d97c2cb1e156a3a26f760825 100644
--- a/man/getNWISSiteInfo.Rd
+++ b/man/readNWISsite.Rd
@@ -1,12 +1,15 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \name{getNWISSiteInfo}
 \alias{getNWISSiteInfo}
+\alias{readNWISsite}
 \title{USGS Site File Data Retrieval}
 \usage{
-getNWISSiteInfo(siteNumber)
+getNWISSiteInfo(siteNumbers)
+
+readNWISsite(siteNumbers)
 }
 \arguments{
-\item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
+\item{siteNumbers}{string USGS site number.  This is usually an 8 digit number}
 }
 \value{
 retval dataframe with all information found in the expanded site file
@@ -16,8 +19,8 @@ Imports data from USGS site file site. This function gets data from here: \url{h
 }
 \examples{
 # These examples require an internet connection to run
-siteINFO <- getNWISSiteInfo('05114000')
-siteINFOMulti <- getNWISSiteInfo(c('05114000','09423350'))
+siteINFO <- readNWISsite('05114000')
+siteINFOMulti <- readNWISsite(c('05114000','09423350'))
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/getNWISunitData.Rd b/man/readNWISuv.Rd
similarity index 52%
rename from man/getNWISunitData.Rd
rename to man/readNWISuv.Rd
index 0f23fbf2623d5814f72235538a928cbc3870c277..1244ce3ae0709774e17eef54ccd6e5aa24be0436 100644
--- a/man/getNWISunitData.Rd
+++ b/man/readNWISuv.Rd
@@ -1,13 +1,17 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \name{getNWISunitData}
 \alias{getNWISunitData}
+\alias{readNWISuv}
 \title{Raw Data Import for Instantaneous USGS NWIS Data}
 \usage{
-getNWISunitData(siteNumber, parameterCd, startDate, endDate, format = "xml",
-  interactive = TRUE)
+getNWISunitData(siteNumbers, parameterCd, startDate = "", endDate = "",
+  tz = "")
+
+readNWISuv(siteNumbers, parameterCd, startDate = "", endDate = "",
+  tz = "")
 }
 \arguments{
-\item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
+\item{siteNumbers}{string USGS site number (or multiple sites).  This is usually an 8 digit number}
 
 \item{parameterCd}{string USGS parameter code.  This is usually an 5 digit number.}
 
@@ -15,11 +19,10 @@ getNWISunitData(siteNumber, parameterCd, startDate, endDate, format = "xml",
 
 \item{endDate}{string ending date for data retrieval in the form YYYY-MM-DD.}
 
-\item{format}{string, can be "tsv" or "xml", and is only applicable for daily and unit value requests.  "tsv" returns results faster, but there is a possiblitiy that an incomplete file is returned without warning. XML is slower,
-but will offer a warning if the file was incomplete (for example, if there was a momentary problem with the internet connection). It is possible to safely use the "tsv" option,
-but the user must carefully check the results to see if the data returns matches what is expected. The default is therefore "xml".}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
+\item{tz}{string to set timezone attribute of datetime. Default is an empty quote, which converts the
+datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
+"America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"}
 }
 \value{
 data dataframe with agency, site, dateTime, time zone, value, and code columns
@@ -35,10 +38,11 @@ parameterCd <- '00060'
 startDate <- "2014-10-10"
 endDate <- "2014-10-10"
 # These examples require an internet connection to run
-rawData <- getNWISunitData(siteNumber,parameterCd,startDate,endDate)
-summary(rawData)
-rawData2 <- getNWISunitData(siteNumber,parameterCd,startDate,endDate,"tsv")
-summary(rawData2)
+rawData <- readNWISuv(siteNumber,parameterCd,startDate,endDate)
+
+timeZoneChange <- readNWISuv(c('04024430','04024000'),parameterCd,
+         "2013-11-03","2013-11-03")
+firstSite <- timeZoneChange[timeZoneChange$site_no == '04024430',]
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/readWQPData.Rd b/man/readWQPData.Rd
deleted file mode 100644
index aa1e2f15549d6e4bae241e45abfd913caf54ee07..0000000000000000000000000000000000000000
--- a/man/readWQPData.Rd
+++ /dev/null
@@ -1,31 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{readWQPData}
-\alias{readWQPData}
-\title{Basic Water Quality Portal Data grabber}
-\usage{
-readWQPData(url)
-}
-\arguments{
-\item{url}{string URL to Water Quality Portal#'}
-}
-\value{
-retval dataframe raw data returned from the Water Quality Portal. Additionally, a POSIXct dateTime column is supplied for
-start and end times.
-}
-\description{
-Imports data from the Water Quality Portal based on a specified url.
-}
-\examples{
-# These examples require an internet connection to run
-\dontrun{
-## Examples take longer than 5 seconds:
-rawSampleURL <- constructNWISURL('USGS-01594440','01075', '1985-01-01', '1985-03-31',"wqp")
-rawSample <- readWQPData(rawSampleURL)
-}
-}
-\keyword{USGS}
-\keyword{data}
-\keyword{import}
-\keyword{service}
-\keyword{web}
-
diff --git a/man/getWQPData.Rd b/man/readWQPdata.Rd
similarity index 87%
rename from man/getWQPData.Rd
rename to man/readWQPdata.Rd
index 7fa0bdd33b8da644b53bd7292c033bf145c4c729..0f20f06cefa982ba7861a75dea9aff96ec30f58e 100644
--- a/man/getWQPData.Rd
+++ b/man/readWQPdata.Rd
@@ -1,9 +1,12 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \name{getWQPData}
 \alias{getWQPData}
+\alias{readWQPdata}
 \title{General Data Import from Water Quality Portal}
 \usage{
 getWQPData(...)
+
+readWQPdata(...)
 }
 \arguments{
 \item{\dots}{see \url{www.waterqualitydata.us/webservices_documentation.jsp} for a complete list of options}
@@ -19,7 +22,7 @@ because it allows for other agencies rather than the USGS.
 \examples{
 \dontrun{
 nameToUse <- "pH"
-pHData <- getWQPData(siteid="USGS-04024315",characteristicName=nameToUse)
+pHData <- readWQPdata(siteid="USGS-04024315",characteristicName=nameToUse)
 }
 }
 \keyword{WQP}
diff --git a/man/getWQPqwData.Rd b/man/readWQPqw.Rd
similarity index 70%
rename from man/getWQPqwData.Rd
rename to man/readWQPqw.Rd
index 4eb60fd4385a70b91ff24a6b989985f655e668a2..fd7714f49898cdf0086b31115392dca993b10954 100644
--- a/man/getWQPqwData.Rd
+++ b/man/readWQPqw.Rd
@@ -1,9 +1,12 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \name{getWQPqwData}
 \alias{getWQPqwData}
+\alias{readWQPqw}
 \title{Raw Data Import for Water Quality Portal}
 \usage{
-getWQPqwData(siteNumber, parameterCd, startDate, endDate, interactive = TRUE)
+getWQPqwData(siteNumber, parameterCd, startDate = "", endDate = "")
+
+readWQPqw(siteNumber, parameterCd, startDate = "", endDate = "")
 }
 \arguments{
 \item{siteNumber}{string site number. This needs to include the full agency code prefix.}
@@ -14,8 +17,6 @@ Leaving this blank will return all of the measured values during the specified t
 \item{startDate}{string starting date for data retrieval in the form YYYY-MM-DD.}
 
 \item{endDate}{string ending date for data retrieval in the form YYYY-MM-DD.}
-
-\item{interactive}{logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.}
 }
 \value{
 retval dataframe raw data returned from the Water Quality Portal. Additionally, a POSIXct dateTime column is supplied for
@@ -30,15 +31,14 @@ either USGS, or other Water Quality Portal offered sites. It is required to use
 site name, such as 'USGS-01234567'.
 }
 \examples{
-# These examples require an internet connection to run
 \dontrun{
-rawPcode <- getWQPqwData('USGS-01594440','01075', '1985-01-01', '1985-03-31')
-rawCharacteristicName <- getWQPqwData('WIDNR_WQX-10032762','Specific conductance', '', '')
+rawPcode <- readWQPqw('USGS-01594440','01075', '', '')
+rawCharacteristicName <- readWQPqw('WIDNR_WQX-10032762','Specific conductance', '', '')
 }
 }
 \seealso{
-\code{\link{getWQPData}}, \code{\link{getWQPSites}},
-\code{\link{getNWISqwData}}, and \code{\link{readWQPData}}
+\code{\link{readWQPdata}}, \code{\link{whatWQPsites}},
+\code{\link{readNWISqw}}, and \code{\link{importWQP}}
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/removeDuplicates.Rd b/man/removeDuplicates.Rd
deleted file mode 100644
index 6eed234002f01221fa02e3777404c815c3a31f34..0000000000000000000000000000000000000000
--- a/man/removeDuplicates.Rd
+++ /dev/null
@@ -1,23 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{removeDuplicates}
-\alias{removeDuplicates}
-\title{Remove Duplicates}
-\usage{
-removeDuplicates(Sample)
-}
-\arguments{
-\item{Sample}{dataframe with at least DecYear and ConcHigh, default name is Sample}
-}
-\value{
-Sample1 dataframe
-}
-\description{
-Removes observations from the data frame Sample when the observation has the identical date and value as another observation
-}
-\examples{
-DecYear <- c('1985.01', '1985.01', '1985.02', '1985.02', '1985.03')
-ConcHigh <- c(1,2,3,3,5)
-dataInput <- data.frame(DecYear, ConcHigh, stringsAsFactors=FALSE)
-removeDuplicates(dataInput)
-}
-
diff --git a/man/renameColumns.Rd b/man/renameColumns.Rd
deleted file mode 100644
index bb8142710aa3bc48ae68038414d67a2800e7367c..0000000000000000000000000000000000000000
--- a/man/renameColumns.Rd
+++ /dev/null
@@ -1,33 +0,0 @@
-% Generated by roxygen2 (4.0.2): do not edit by hand
-\name{renameColumns}
-\alias{renameColumns}
-\title{renameColumns}
-\usage{
-renameColumns(rawData)
-}
-\arguments{
-\item{rawData}{dataframe returned from retrieval functions}
-}
-\value{
-rawData dataframe with improved column names
-}
-\description{
-Rename columns coming back from NWIS data retrievals
-}
-\examples{
-# This example requires an internet connection to run
-siteNumber <- '05114000'
-rawData <- getNWISdvData(siteNumber,c("00010","00060","00300"),
-          "2001-01-01","2002-01-01",statCd=c("00001","00003"))
-rawData <- renameColumns(rawData)
-date <- "2014-10-10"
-rawData2 <- getNWISunitData(siteNumber,c("00010","00060"),date,date)
-rawData2 <- renameColumns(rawData2)
-head(rawData2)
-}
-\keyword{USGS}
-\keyword{data}
-\keyword{import}
-\keyword{service}
-\keyword{web}
-
diff --git a/man/renameNWISColumns.Rd b/man/renameNWISColumns.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..e594b37039c3e97763f1d9bc959387eddd4e56c6
--- /dev/null
+++ b/man/renameNWISColumns.Rd
@@ -0,0 +1,76 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\name{renameNWISColumns}
+\alias{renameNWISColumns}
+\title{renameColumns}
+\usage{
+renameNWISColumns(rawData, p00010 = "Wtemp", p00045 = "Precip",
+  p00060 = "Flow", p00065 = "GH", p00095 = "SpecCond", p00300 = "DO",
+  p00400 = "pH", p62611 = "GWL", p63680 = "Turb", p72019 = "WLBLS", ...)
+}
+\arguments{
+\item{rawData}{the daily- or unit-values datset retrieved from NWISweb.}
+
+\item{p00010}{the base name for parameter code 00010.}
+
+\item{p00045}{the base name for parameter code 00045.}
+
+\item{p00060}{the base name for parameter code 00060.}
+
+\item{p00065}{the base name for parameter code 00065.}
+
+\item{p00095}{the base name for parameter code 00095.}
+
+\item{p00300}{the base name for parameter code 00300.}
+
+\item{p00400}{the base name for parameter code 00400.}
+
+\item{p62611}{the base name for parameter code 62611.}
+
+\item{p63680}{the base name for parameter code 63680.}
+
+\item{p72019}{the base name for parameter code 72019.}
+
+\item{\dots}{named arguments for the base name for any other parameter code. The
+form of the name must be like pXXXXX, where XXXXX is the parameter code.}
+}
+\value{
+A dataset like \code{data} with selected columns renamed.
+}
+\description{
+Rename columns coming back from NWIS data retrievals.  Daily and unit value columns
+have names derived from their data descriptor, parameter, and statistic codes. This
+function reads information from the header and the arguments in the call to
+to rename those columns.
+}
+\note{
+The following statistics codes are converted by \code{renameNWISColumns}. See
+\url{http://help.waterdata.usgs.gov/stat_cd_nm} for information about USGS statistics codes.
+\describe{
+\item{00001}{Maximum value, suffix: Max}
+\item{00002}{Minimum value, suffix: Min}
+\item{00003}{Mean value, no suffix}
+\item{00006}{Sum of values, suffix: Sum}
+\item{00007}{Modal value, suffix: Mode}
+\item{00008}{Median value, suffix: Median}
+\item{00011}{Instantaneous Value, suffix: Inst}
+\item{00012}{Equivalent mean value, suffix: EqMean}
+\item{00021}{Tidal high-high value, suffix: HiHiTide}
+\item{00022}{Tidal low-high value, suffix: LoHiTide}
+\item{00023}{Tidal high-low value, suffix: HiLoTide}
+\item{00024}{Tidal low-low value, suffix: LoLoTide}
+}
+}
+\examples{
+siteWithTwo <- '01480015'
+startDate <- "2012-09-01"
+endDate <- "2012-10-01"
+url2 <- constructNWISURL(siteWithTwo, "00060",startDate,endDate,'dv')
+twoResults <- importWaterML1(url2,TRUE)
+twoResults <- renameNWISColumns(twoResults)
+}
+\seealso{
+\code{\link{readNWISdv}}, \code{\link{readNWISuv}}
+}
+\keyword{IO}
+\keyword{manip}
+
diff --git a/man/whatNWISData.Rd b/man/whatNWISData.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..2ca8c1ca887c82a189dcfb5d902561586e1e8972
--- /dev/null
+++ b/man/whatNWISData.Rd
@@ -0,0 +1,45 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\name{getNWISDataAvailability}
+\alias{getNWISDataAvailability}
+\alias{whatNWISdata}
+\title{USGS data availability}
+\usage{
+getNWISDataAvailability(siteNumbers, service = c("uv", "dv", "qw"))
+
+whatNWISdata(siteNumbers, service = "all", parameterCd = "all",
+  statCd = "all")
+}
+\arguments{
+\item{siteNumbers}{string vector of USGS site number or multiple sites.}
+
+\item{service}{vector string. Options are "all", or one or many of "dv"(daily values),
+"uv","rt", or "iv"(unit values), "qw"(water-quality),"sv"(sites visits),"pk"(peak measurements),
+"gw"(groundwater levels), "ad" (sites included in USGS Annual Water Data Reports External Link),
+"aw" (sites monitored by the USGS Active Groundwater Level Network External Link), "id" (historical
+instantaneous values)}
+
+\item{parameterCd}{string vector of valid parameter codes to return. Defaults to "all" which will not perform a filter.}
+
+\item{statCd}{string vector of all statistic codes to return. Defaults to "all" which will not perform a filter.}
+}
+\value{
+retval dataframe with all information found in the expanded site file
+}
+\description{
+Imports a table of available parameters, period of record, and count. See \url{http://waterservices.usgs.gov/rest/Site-Service.html}
+for more information.
+}
+\examples{
+availableData <- whatNWISdata('05114000')
+# To find just unit value ('instantaneous') data:
+uvData <- whatNWISdata('05114000',service="uv")
+uvDataMulti <- whatNWISdata(c('05114000','09423350'),service=c("uv","dv"))
+siteNumbers <- c("01491000","01645000")
+flowAndTemp <- whatNWISdata(siteNumbers, parameterCd=c("00060","00010"))
+}
+\keyword{USGS}
+\keyword{data}
+\keyword{import}
+\keyword{service}
+\keyword{web}
+
diff --git a/man/getNWISSites.Rd b/man/whatNWISsites.Rd
similarity index 88%
rename from man/getNWISSites.Rd
rename to man/whatNWISsites.Rd
index 3921badfae3d4d49bb7973b0982b07203f1cbb7e..0b6f918742f1f71995cbdc0e67c808c3aa7987b7 100644
--- a/man/getNWISSites.Rd
+++ b/man/whatNWISsites.Rd
@@ -1,9 +1,12 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \name{getNWISSites}
 \alias{getNWISSites}
+\alias{whatNWISsites}
 \title{Site Data Import from NWIS}
 \usage{
 getNWISSites(...)
+
+whatNWISsites(...)
 }
 \arguments{
 \item{\dots}{see \url{http://waterservices.usgs.gov/rest/Site-Service.html#Service} for a complete list of options}
@@ -17,7 +20,7 @@ Arguments to the function should be based on \url{http://waterservices.usgs.gov/
 Mapper format is used
 }
 \examples{
-siteListPhos <- getNWISSites(stateCd="OH",parameterCd="00665")
+siteListPhos <- whatNWISsites(stateCd="OH",parameterCd="00665")
 }
 \keyword{NWIS}
 \keyword{data}
diff --git a/man/getWQPSites.Rd b/man/whatWQPsites.Rd
similarity index 83%
rename from man/getWQPSites.Rd
rename to man/whatWQPsites.Rd
index 6ac4d2163e830967c3d04f2d2b61cc5b518a3af7..65adf0834315f97ff79c19653af3292a7260a5ba 100644
--- a/man/getWQPSites.Rd
+++ b/man/whatWQPsites.Rd
@@ -1,9 +1,12 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \name{getWQPSites}
 \alias{getWQPSites}
+\alias{whatWQPsites}
 \title{Site Data Import from Water Quality Portal}
 \usage{
 getWQPSites(...)
+
+whatWQPsites(...)
 }
 \arguments{
 \item{\dots}{see \url{www.waterqualitydata.us/webservices_documentation.jsp} for a complete list of options}
@@ -18,9 +21,10 @@ Arguments to the function should be based on \url{www.waterqualitydata.us/webser
 }
 \examples{
 \dontrun{
-site1 <- getWQPSites(siteid="USGS-01594440")
+site1 <- whatWQPsites(siteid="USGS-01594440")
+
 type <- "Stream"
-sites <- getWQPSites(statecode="US:55",countycode="US:55:025",siteType=type)
+sites <- whatWQPsites(statecode="US:55",countycode="US:55:025",siteType=type)
 }
 }
 \keyword{WQP}
diff --git a/man/padVariable.Rd b/man/zeroPad.Rd
similarity index 78%
rename from man/padVariable.Rd
rename to man/zeroPad.Rd
index 2a55576067c78204fcfe02f8cc2c01c8992e89a6..7aaf059d387821cacbaffc83964d0d57016b2047 100644
--- a/man/padVariable.Rd
+++ b/man/zeroPad.Rd
@@ -1,9 +1,9 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
-\name{padVariable}
-\alias{padVariable}
+\name{zeroPad}
+\alias{zeroPad}
 \title{Pad string with leading zeros}
 \usage{
-padVariable(x, padTo)
+zeroPad(x, padTo)
 }
 \arguments{
 \item{x}{string}
@@ -18,9 +18,9 @@ Function to pad a string with leading zeros. Useful for parameter codes and USGS
 }
 \examples{
 pCode <- '10'
-correctPCode <- padVariable(pCode,5)
+correctPCode <- zeroPad(pCode,5)
 pCodes <- c('100','1000','0','12345','1565465465465465')
-correctPCodes <- padVariable(pCodes,5)
+correctPCodes <- zeroPad(pCodes,5)
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/vignettes/Rhelp.png b/vignettes/Rhelp.png
index 2267f63983342275a92bf41a3f9b9f99fd0b76fd..453786664e0501c3ef8b4fe2073c18fbd363ccac 100644
Binary files a/vignettes/Rhelp.png and b/vignettes/Rhelp.png differ
diff --git a/vignettes/dataRetrieval-concordance.tex b/vignettes/dataRetrieval-concordance.tex
index 78cf8512f5ecde9e75efe4d6874c7177b27885d8..a630817fc3ab1ba541dc591efb2a2c8a3dd5b669 100644
--- a/vignettes/dataRetrieval-concordance.tex
+++ b/vignettes/dataRetrieval-concordance.tex
@@ -1,10 +1,9 @@
 \Sconcordance{concordance:dataRetrieval.tex:dataRetrieval.Rnw:%
-1 127 1 49 0 1 7 15 1 1 14 55 1 3 0 36 1 2 0 8 1 9 0 %
-24 1 3 0 21 1 4 0 6 1 8 0 18 1 3 0 25 1 1 4 19 1 9 0 %
-6 1 7 0 22 1 8 0 16 1 2 0 11 1 23 0 22 1 9 0 20 1 3 0 %
-6 1 17 0 28 1 39 0 10 1 9 0 20 1 4 0 14 1 4 0 33 1 13 %
-0 39 1 14 0 18 1 2 0 14 1 2 0 49 1 4 0 7 1 4 0 11 1 2 %
-0 17 1 7 0 22 1 8 0 21 1 4 0 9 1 4 0 79 1 1 2 9 1 1 4 %
-4 1 20 0 44 1 4 0 30 1 4 0 22 1 4 0 21 1 26 0 152 1 4 %
-0 9 1 13 0 13 1 4 0 14 1 4 0 5 1 4 0 23 1 18 0 8 1 4 %
-0 43 1}
+1 127 1 49 0 1 7 15 1 1 14 47 1 3 0 97 1 2 0 8 1 9 0 %
+31 1 3 0 21 1 4 0 6 1 8 0 7 1 4 0 23 1 2 0 21 1 1 8 %
+19 1 9 0 6 1 7 0 26 1 7 0 18 1 2 0 11 1 18 0 21 1 9 0 %
+20 1 3 0 7 1 20 0 44 1 2 0 8 1 88 0 16 1 10 0 16 1 10 %
+0 17 1 13 0 17 1 14 0 20 1 4 0 14 1 4 0 33 1 13 0 40 %
+1 13 0 18 1 2 0 14 1 2 0 20 1 9 0 8 1 8 0 12 1 25 0 9 %
+1 4 0 7 1 29 0 19 1 4 0 15 1 4 0 11 1 4 0 5 1 4 0 22 %
+1 14 0 8 1 4 0 43 1}
diff --git a/vignettes/dataRetrieval.Rnw b/vignettes/dataRetrieval.Rnw
index bc52720f90eaacf7ffe93f134999ff849f5be8b5..fe19498fbf7f4831c25fb27dcaf1d6fd5c5f5079 100644
--- a/vignettes/dataRetrieval.Rnw
+++ b/vignettes/dataRetrieval.Rnw
@@ -1,8 +1,8 @@
 %\VignetteIndexEntry{Introduction to the dataRetrieval package}
 %\VignetteEngine{knitr::knitr}
 %\VignetteDepends{}
-%\VignetteSuggests{xtable,EGRET}
-%\VignetteImports{zoo, XML, RCurl, reshape2,lubridate}
+%\VignetteSuggests{xtable}
+%\VignetteImports{XML, RCurl, reshape2,lubridate,httr,plyr}
 %\VignettePackage{dataRetrieval}
 
 \documentclass[a4paper,11pt]{article}
@@ -142,8 +142,8 @@ library(knitr)
 %------------------------------------------------------------
 \title{The dataRetrieval R package}
 %------------------------------------------------------------
-\author[1]{Laura De Cicco}
-\author[1]{Robert Hirsch}
+\author[1]{Laura A. De Cicco}
+\author[1]{Robert M. Hirsch}
 \affil[1]{United States Geological Survey}
 
 
@@ -164,7 +164,7 @@ addSpace <- function(x) ifelse(x != "1", "[5pt]","")
 
 \noindent{\huge\textsf{\textbf{The dataRetrieval R package}}}
 
-\noindent\textsf{By Laura De Cicco and Robert Hirsch}
+\noindent\textsf{By Laura A. De Cicco and Robert M. Hirsch}
 
 \noindent\textsf{\today}
 
@@ -181,55 +181,108 @@ addSpace <- function(x) ifelse(x != "1", "[5pt]","")
 %------------------------------------------------------------
 \section{Introduction to dataRetrieval}
 %------------------------------------------------------------ 
-The dataRetrieval package was created to simplify the process of loading hydrologic data into the R environment. It has been specifically designed to work seamlessly with the EGRET R package: Exploration and Graphics for RivEr Trends. See: \url{https://github.com/USGS-R/EGRET/wiki} or \url{http://dx.doi.org/10.3133/tm4A10} for information on EGRET. EGRET is designed to provide analysis of water quality data sets using the Weighted Regressions on Time, Discharge and Season (WRTDS) method as well as analysis of discharge trends using robust time-series smoothing techniques.  Both of these capabilities provide both tabular and graphical analyses of long-term data sets.
+The dataRetrieval package was created to simplify the process of loading hydrologic data into the R environment. It is designed to retrieve the major data types of U.S. Geological Survey (USGS) hydrologic data that are available on the Web, as well as data from the Water Quality Portal (WQP), which currently houses water quality data from the Environmental Protection Agency (EPA), U.S. Department of Agriculture (USDA), and USGS. Direct USGS data is obtained from a service called the National Water Information System (NWIS). A lot of useful information about NWIS can be obtained here:
 
-
-The dataRetrieval package is designed to retrieve many of the major data types of U.S. Geological Survey (USGS) hydrologic data that are available on the Web. Users may also load data from other sources (text files, spreadsheets) using dataRetrieval.  Section \ref{sec:genRetrievals} provides examples of how one can obtain raw data from USGS sources on the Web and load them into dataframes within the R environment.  The functionality described in section \ref{sec:genRetrievals} is for general use and is not tailored for the specific uses of the EGRET package.  The functionality described in section \ref{sec:EGRETdfs} is tailored specifically to obtaining input from the Web and structuring it for use in the EGRET package.  The functionality described in section \ref{sec:userFiles} is for converting hydrologic data from user-supplied files and structuring it specifically for use in the EGRET package.
+\url{http://help.waterdata.usgs.gov/}
 
 For information on getting started in R and installing the package, see (\ref{sec:appendix1}): Getting Started. Any use of trade, firm, or product names is for descriptive purposes only and does not imply endorsement by the U.S. Government.
 
-A quick workflow for major dataRetrieval functions:
+A quick workflow for USGS dataRetrieval functions:
 
 <<workflow, echo=TRUE,eval=FALSE>>=
 library(dataRetrieval)
 # Choptank River near Greensboro, MD
 siteNumber <- "01491000" 
-ChoptankInfo <- getNWISSiteInfo(siteNumber)
+ChoptankInfo <- readNWISsite(siteNumber)
 parameterCd <- "00060"
 
 #Raw daily data:
-rawDailyData <- getNWISdvData(siteNumber,parameterCd,
-                      "1980-01-01","2010-01-01")
-# Data compiled for EGRET analysis
-Daily <- getNWISDaily(siteNumber,parameterCd,
+rawDailyData <- readNWISdv(siteNumber,parameterCd,
                       "1980-01-01","2010-01-01")
 
 # Sample data Nitrate:
 parameterCd <- "00618"
-Sample <- getNWISSample(siteNumber,parameterCd,
+qwData <- readNWISqw(siteNumber,parameterCd,
                       "1980-01-01","2010-01-01")
 
-# Metadata on site and nitrate:
-INFO <- getNWISInfo(siteNumber,parameterCd)
-
-# Merge discharge and nitrate data to one dataframe:
-Sample <- mergeReport()
+pCode <- readNWISpCode(parameterCd)
 
 @
 
+USGS data are made available through the National Water Information System (NWIS).
+
+Table \ref{tab:func} describes the functions available in the dataRetrieval package.
+
+\begin{table}[!ht]
+\begin{minipage}{\linewidth}
+{\footnotesize
+\caption{dataRetrieval functions} 
+\label{tab:func}
+\begin{tabular}{lll}
+  \hline
+\multicolumn{1}{c}{\textbf{\textsf{Function Name}}} &
+\multicolumn{1}{c}{\textbf{\textsf{Arguments}}}  &
+\multicolumn{1}{c}{\textbf{\textsf{Description}}} \\  [0pt]
+  \hline
+  \texttt{readNWISdata} &  \texttt{...} & NWIS data using user-specified queries\\
+   & service & \\
+  [5pt]\texttt{readNWISdv} & siteNumber & NWIS daily data\\
+  & parameterCd & \\
+  & startDate & \\
+  & endDate & \\
+  & statCd & \\
+  [5pt]\texttt{readNWISqw} & siteNumber & NWIS water quality data\\
+    & parameterCd & \\
+  & startDate & \\
+  & endDate & \\
+  & expanded & \\
+  [5pt]\texttt{readNWISuv} & siteNumber & NWIS water quality data\\
+  & parameterCd & \\
+  & startDate & \\
+  & endDate & \\
+  [5pt]\texttt{readNWISrating} & siteNumber & NWIS rating table for active streamgage \\
+  & type & \\
+  [5pt]\texttt{readNWISmeas} & siteNumber & NWIS surface-water measurements \\
+  & startDate & \\
+  & endDate & \\
+  [5pt]\texttt{readNWISpeak} & siteNumber & NWIS peak flow data \\
+  & startDate & \\
+  & endDate & \\
+  [5pt]\texttt{readNWISgwl} & siteNumber & NWIS groundwater level measurements \\
+  & startDate & \\
+  & endDate & \\  
+  [5pt]\texttt{readNWISpCode} & parameterCd & NWIS parameter code information\\
+  [5pt]\texttt{readNWISsite} & siteNumber & NWIS site information \\
+  [5pt]\texttt{whatNWISsites} & \texttt{...} & NWIS site search using user-specified queries \\
+  [5pt]\texttt{whatNWISdata} & siteNumber & NWIS data availability, including period of record and count \\ 
+   & service & \\
+   [5pt]\texttt{readWQPdata} & \texttt{...} & WQP data using user-specified queries \\
+   [5pt]\texttt{readWQPqw} & siteNumber & WQP data \\
+     & parameterCd (or characteristic name) & \\
+  & startDate & \\
+  & endDate & \\
+  [5pt]\texttt{whatWQPsites} & \texttt{...} & WQP site search using user-specified queries \\  
+   \hline
+\end{tabular}
+}
+\end{minipage}
+\end{table}
+
+\clearpage
 
 %------------------------------------------------------------
 \section{USGS Web Retrievals}
 \label{sec:genRetrievals}
 %------------------------------------------------------------ 
-In this section, five examples of Web retrievals document how to get raw data. This data includes site information (\ref{sec:usgsSite}), measured parameter information (\ref{sec:usgsParams}), historical daily values(\ref{sec:usgsDaily}), unit values (which include real-time data but can also include other sensor data stored at regular time intervals) (\ref{sec:usgsRT}), and water quality data (\ref{sec:usgsWQP}) or (\ref{sec:usgsSTORET}). We will use the Choptank River near Greensboro, MD as an example.  Daily discharge measurements are available as far back as 1948.  Additionally, nitrate has been measured since 1964. 
+In this section, examples of Web retrievals document how to get raw data. This data includes site information (\ref{sec:usgsSite}), measured parameter information (\ref{sec:usgsParams}), historical daily values(\ref{sec:usgsDaily}), unit values (which include real-time data but can also include other sensor data stored at regular time intervals) (\ref{sec:usgsRT}), water quality data (\ref{sec:usgsWQP}), groundwater level data (\ref{sec:gwl}), peak flow data (\ref{sec:peak}), rating curve data (\ref{sec:rating}, and surface-water measurement data (\ref{sec:meas}). Section \ref{sec:metadata} shows instructions for getting metadata that is attached to each returned dataframe.
 
-% %------------------------------------------------------------
-% \subsection{Introduction}
-% %------------------------------------------------------------
 The USGS organizes hydrologic data in a standard structure.  Streamgages are located throughout the United States, and each streamgage has a unique ID (referred in this document and throughout the dataRetrieval package as \enquote{siteNumber}).  Often (but not always), these ID's are 8 digits.  The first step to finding data is discovering this siteNumber. There are many ways to do this, one is the National Water Information System: Mapper \url{http://maps.waterdata.usgs.gov/mapper/index.html}.
 
-Once the siteNumber is known, the next required input for USGS data retrievals is the \enquote{parameter code}.  This is a 5-digit code that specifies the measured parameter being requested.  For example, parameter code 00631 represents \enquote{Nitrate plus nitrite, water, filtered, milligrams per liter as nitrogen}, with units of \enquote{mg/l as N}. A complete list of possible USGS parameter codes can be found at \url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes?help}.
+Once the siteNumber is known, the next required input for USGS data retrievals is the \enquote{parameter code}.  This is a 5-digit code that specifies the measured parameter being requested.  For example, parameter code 00631 represents \enquote{Nitrate plus nitrite, water, filtered, milligrams per liter as nitrogen}, with units of \enquote{mg/l as N}. 
+
+A useful place to discover USGS codes information, along with other NWIS information is:
+
+\url{http://help.waterdata.usgs.gov/codes-and-parameters}
 
 Not every station will measure all parameters. A short list of commonly measured parameters is shown in Table \ref{tab:params}.
 
@@ -253,18 +306,25 @@ print(xtable(data.df,
 
 @
 
-A complete list (as of September 25, 2013) is available as data attached to the package. It is accessed by the following:
+A complete list (as of November 7, 2014) is available as data attached to the package. It is accessed by the following:
 
-<<tableParameterCodesDataRetrieval>>=
+<<tableParameterCodesDataRetrieval, echo=TRUE, eval=TRUE>>=
 library(dataRetrieval)
 parameterCdFile <-  parameterCdFile
 names(parameterCdFile)
 @
 
+Two output columns that may not be obvious are \enquote{srsname} and \enquote{casrn}. Srsname stands for \enquote{Substance Registry Services}. More information on the srs name can be found here:
+
+\url{http://ofmpub.epa.gov/sor_internet/registry/substreg/home/overview/home.do}
+
+Casrn stands for \enquote{Chemical Abstracts Service (CAS) Registry Number}. More information on CAS can be found here:
+
+\url{http://www.cas.org/content/chemical-substances/faqs}
 
 For unit values data (sensor data measured at regular time intervals such as 15 minutes or hourly), knowing the parameter code and siteNumber is enough to make a request for data.  For most variables that are measured on a continuous basis, the USGS also stores the historical data as daily values.  These daily values are statistical summaries of the continuous data, e.g. maximum, minimum, mean, or median. The different statistics are specified by a 5-digit statistics code.  A complete list of statistic codes can be found here:
 
-\url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
+\url{http://help.waterdata.usgs.gov/code/stat_cd_nm_query?stat_nm_cd=%25&fmt=html&inline=true}
 
 Some common codes are shown in Table \ref{tab:stat}.
 
@@ -285,7 +345,7 @@ print(xtable(data.df,label="tab:stat",
 
 @
 
-Examples for using these siteNumber's, parameter codes, and stat codes will be presented in subsequent sections.
+Examples for using these siteNumbers, parameter codes, and stat codes will be presented in subsequent sections.
 
 \FloatBarrier
 
@@ -295,38 +355,50 @@ Examples for using these siteNumber's, parameter codes, and stat codes will be p
 %------------------------------------------------------------
 
 %------------------------------------------------------------
-\subsubsection{getNWISSiteInfo}
+\subsubsection{readNWISsite}
 \label{sec:usgsSiteFileData}
 %------------------------------------------------------------
-Use the \texttt{getNWISSiteInfo} function to obtain all of the information available for a particular USGS site such as full station name, drainage area, latitude, and longitude. \texttt{getNWISSiteInfo} can also access information about multiple sites with a vector input.
+Use the \texttt{readNWISsite} function to obtain all of the information available for a particular USGS site (or sites) such as full station name, drainage area, latitude, and longitude. \texttt{readNWISsite} can also access information about multiple sites with a vector input.
 
 
 <<getSite, echo=TRUE>>=
 siteNumbers <- c("01491000","01645000") 
-siteINFO <- getNWISSiteInfo(siteNumbers)
+siteINFO <- readNWISsite(siteNumbers)
 @
 
 A specific example piece of information can be retrieved, in this case a station name, as follows:
 
 <<siteNames2, echo=TRUE>>=
-siteINFO$station.nm
+siteINFO$station_nm
 @
 Site information is obtained from \url{http://waterservices.usgs.gov/rest/Site-Test-Tool.html}
+
+Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
+
+<<siteNames3, echo=TRUE, eval=FALSE>>=
+comment(siteINFO)
+@
+
+
+
 \FloatBarrier
 
 %------------------------------------------------------------
-\subsubsection{getNWISDataAvailability}
+\subsubsection{whatNWISdata}
 \label{sec:usgsDataAvailability}
 %------------------------------------------------------------
-To discover what data is available for a particular USGS site, including measured parameters, period of record, and number of samples (count), use the \texttt{getNWISDataAvailability} function. It is possible to limit the retrieval information to a subset of types (\texttt{"}dv\texttt{"}, \texttt{"}uv\texttt{"}, or \texttt{"}qw\texttt{"}). In the following example, we limit the retrieved Choptank data to only daily data. Leaving the \texttt{"}type\texttt{"} argument blank returns all of the available data for that site.
+To discover what data is available for a particular USGS site, including measured parameters, period of record, and number of samples (count), use the \texttt{whatNWISdata} function. It is possible to limit the retrieval information to a subset of services. The possible choices for services are: \texttt{"}dv\texttt{"} (daily values), \texttt{"}uv\texttt{"}, \texttt{"}rt\texttt{"}, or \texttt{"}iv\texttt{"} (unit values), \texttt{"}qw\texttt{"} (water-quality), \texttt{"}sv\texttt{"} (sites visits), \texttt{"}pk\texttt{"} (peak measurements), \texttt{"}gw\texttt{"} (groundwater levels), \texttt{"}ad\texttt{"} (sites included in USGS Annual Water Data Reports External Link), \texttt{"}aw\texttt{"} (sites monitored by the USGS Active Groundwater Level Network External Link), and \texttt{"}id\texttt{"} (historical instantaneous values).
+
+In the following example, we limit the retrieved data to only daily data. The default for \texttt{"}service\texttt{"} is \enquote{all}, which returns all of the available data for that site. Likewise, there are arguments for parameter code (\texttt{parameterCd}) and statistic code (\texttt{statCd}) to filter the results. The default for both is to return all possible values (\enquote{all}). The returned \texttt{"}count\_nu\texttt{"} for \texttt{"}uv\texttt{"} data is the count of days with returned data, not the actual count of returned values.
 
 
 <<getSiteExtended, echo=TRUE>>=
 # Continuing from the previous example:
-# This pulls out just the daily data:
+# This pulls out just the daily, mean data:
+
+dailyDataAvailable <- whatNWISdata(siteNumbers,
+                    service="dv", statCd="00003")
 
-dailyDataAvailable <- getNWISDataAvailability(siteNumbers,
-                    type="dv")
 
 @
 
@@ -335,11 +407,11 @@ tableData <- with(dailyDataAvailable,
       data.frame( 
       siteNumber= site_no,
       srsname=srsname, 
-      startDate=as.character(startDate), 
-      endDate=as.character(endDate), 
-      count=as.character(count),
+      startDate=as.character(begin_date), 
+      endDate=as.character(end_date), 
+      count=as.character(count_nu),
       units=parameter_units,
-      statCd = statCd,
+#       statCd = stat_cd,
       stringsAsFactors=FALSE)
       )
 
@@ -348,7 +420,7 @@ tableData$units[which(tableData$units == "uS/cm @25C")] <- "$\\mu$S/cm @25C"
 
 
 print(xtable(tableData,label="tab:gda",
-    caption="Daily mean data availabile at the Choptank River near Greensboro, MD. [Some columns deleted for space considerations]"),
+    caption="Reformatted version of output from \\texttt{whatNWISdata} function for the Choptank River near Greensboro, MD, and from Seneca Creek at Dawsonville, MD from the daily values service [Some columns deleted for space considerations]"),
        caption.placement="top",
        size = "\\footnotesize",
        latex.environment=NULL,
@@ -369,12 +441,12 @@ See Section \ref{app:createWordTable} for instructions on converting an R datafr
 \subsection{Parameter Information}
 \label{sec:usgsParams}
 %------------------------------------------------------------
-To obtain all of the available information concerning a measured parameter (or multiple parameters), use the \texttt{getNWISPcodeInfo} function:
+To obtain all of the available information concerning a measured parameter (or multiple parameters), use the \texttt{readNWISpCode} function:
 
 <<label=getPCodeInfo, echo=TRUE>>=
 # Using defaults:
 parameterCd <- "00618" 
-parameterINFO <- getNWISPcodeInfo(parameterCd)
+parameterINFO <- readNWISpCode(parameterCd)
 colnames(parameterINFO)
 @
 
@@ -383,33 +455,39 @@ A specific example piece of information, in this case parameter name, can be obt
 <<siteNames, echo=TRUE>>=
 parameterINFO$parameter_nm
 @
-Parameter information can obtained from \url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes}
+Parameter information can obtained from:
+
+\url{http://help.waterdata.usgs.gov/codes-and-parameters/parameters}
+
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Daily Values}
 \label{sec:usgsDaily}
 %------------------------------------------------------------
-To obtain daily records of USGS data, use the \texttt{getNWISdvData} function. The arguments for this function are siteNumber, parameterCd, startDate, endDate, statCd, and a logical (TRUE/FALSE) interactive. There are 2 default arguments: statCd (defaults to \texttt{"}00003\texttt{"}), and interactive (defaults to TRUE).  If you want to use the default values, you do not need to list them in the function call. By setting the \texttt{"}interactive\texttt{"} option to FALSE, the operation of the function will advance automatically. It might make more sense to run large batch collections with the interactive option set to FALSE. 
+To obtain daily records of USGS data, use the \texttt{readNWISdv} function. The arguments for this function are siteNumber, parameterCd, startDate, endDate, and statCd (defaults to \texttt{"}00003\texttt{"}).  If you want to use the default values, you do not need to list them in the function call. 
 
 The dates (start and end) must be in the format \texttt{"}YYYY-MM-DD\texttt{"} (note: the user must include the quotes).  Setting the start date to \texttt{"}\texttt{"} (no space) will prompt the program to ask for the earliest date, and setting the end date to \texttt{"}\texttt{"} (no space) will prompt for the latest available date.
 
 <<label=getNWISDaily, echo=TRUE, eval=TRUE>>=
 
-# Continuing with our Choptank River example
+# Choptank River near Greensboro, MD:
 siteNumber <- "01491000"
 parameterCd <- "00060"  # Discharge
-startDate <- ""  # Will request earliest date
-endDate <- "" # Will request latest date
+startDate <- "2009-10-01"  
+endDate <- "2012-09-30" 
 
-discharge <- getNWISdvData(siteNumber, 
+discharge <- readNWISdv(siteNumber, 
                     parameterCd, startDate, endDate)
 names(discharge)
 @
 
-The column \texttt{"}datetime\texttt{"} in the returned dataframe is automatically imported as a variable of class \texttt{"}Date\texttt{"} in R. Each requested parameter has a value and remark code column.  The names of these columns depend on the requested parameter and stat code combinations. USGS remark codes are often \texttt{"}A\texttt{"} (approved for publication) or \texttt{"}P\texttt{"} (provisional data subject to revision). A more complete list of remark codes can be found here:
-\url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes}
+The column \texttt{"}datetime\texttt{"} in the returned dataframe is automatically imported as a variable of class \texttt{"}Date\texttt{"} in R. Each requested parameter has a value and remark code column.  The names of these columns depend on the requested parameter and stat code combinations. USGS daily value qualification codes are often \texttt{"}A\texttt{"} (approved for publication) or \texttt{"}P\texttt{"} (provisional data subject to revision). A more complete list of daily value qualification codes can be found here:
+
+\url{http://help.waterdata.usgs.gov/codes-and-parameters/daily-value-qualification-code-dv_rmk_cd}
 
 Another example that doesn't use the defaults would be a request for mean and maximum daily temperature and discharge in early 2012:
+
 <<label=getNWIStemperature, echo=TRUE>>=
 
 parameterCd <- c("00010","00060")  # Temperature and discharge
@@ -417,41 +495,40 @@ statCd <- c("00001","00003")  # Mean and maximum
 startDate <- "2012-01-01"
 endDate <- "2012-05-01"
 
-temperatureAndFlow <- getNWISdvData(siteNumber, parameterCd, 
+temperatureAndFlow <- readNWISdv(siteNumber, parameterCd, 
         startDate, endDate, statCd=statCd)
 
 @
 
 Daily data is pulled from \url{http://waterservices.usgs.gov/rest/DV-Test-Tool.html}.
 
-The column names can be automatically adjusted based on the parameter and statistic codes using the \texttt{renameColumns} function. This is not necessary, but may be useful when analyzing the data. 
+The column names can be shortened and simplified using the \texttt{renameNWISColumns} function.  This is not necessary, but may streamline subsequent data analysis and presentation.
 
 <<label=renameColumns, echo=TRUE>>=
 names(temperatureAndFlow)
 
-temperatureAndFlow <- renameColumns(temperatureAndFlow)
+temperatureAndFlow <- renameNWISColumns(temperatureAndFlow)
 names(temperatureAndFlow)
 @
 
 An example of plotting the above data (Figure \ref{fig:getNWIStemperaturePlot}):
 
 <<getNWIStemperaturePlot, echo=TRUE, fig.cap="Temperature and discharge plot of Choptank River in 2012.",out.width='1\\linewidth',out.height='1\\linewidth',fig.show='hold'>>=
+variableInfo <- attr(temperatureAndFlow, "variableInfo")
+siteInfo <- attr(temperatureAndFlow, "siteInfo")
+
 par(mar=c(5,5,5,5)) #sets the size of the plot window
 
-with(temperatureAndFlow, plot(
-  datetime, Temperature_water_degrees_Celsius_Max_01,
-  xlab="Date",ylab="Max Temperature [C]"
-  ))
+plot(temperatureAndFlow$dateTime, temperatureAndFlow$Wtemp_Max,
+  ylab=variableInfo$parameter_desc[1],xlab="" )
 par(new=TRUE)
-with(temperatureAndFlow, plot(
-  datetime, Discharge_cubic_feet_per_second,
+plot(temperatureAndFlow$dateTime, temperatureAndFlow$Flow,
   col="red",type="l",xaxt="n",yaxt="n",xlab="",ylab="",axes=FALSE
-  ))
+  )
 axis(4,col="red",col.axis="red")
-mtext(expression(paste("Mean Discharge [ft"^"3","/s]",
-                       sep="")),side=4,line=3,col="red")
-title(paste(siteINFO$station.nm[1],"2012",sep=" "))
-legend("topleft", c("Max Temperature", "Mean Discharge"), 
+mtext(variableInfo$parameter_desc[2],side=4,line=3,col="red")
+title(paste(siteInfo$station_nm,"2012"))
+legend("topleft", variableInfo$param_units, 
        col=c("black","red"),lty=c(NA,1),pch=c(1,NA))
 @
 
@@ -464,24 +541,40 @@ There are occasions where NWIS values are not reported as numbers, instead there
 \subsection{Unit Values}
 \label{sec:usgsRT}
 %------------------------------------------------------------
-Any data collected at regular time intervals (such as 15-minute or hourly) are known as \enquote{unit values.} Many of these are delivered on a real time basis and very recent data (even less than an hour old in many cases) are available through the function \texttt{getNWISunitData}.  Some of these unit values are available for many years, and some are only available for a recent time period such as 120 days.  Here is an example of a retrieval of such data.  
+Any data collected at regular time intervals (such as 15-minute or hourly) are known as \enquote{unit values.} Many of these are delivered on a real time basis and very recent data (even less than an hour old in many cases) are available through the function \texttt{readNWISuv}.  Some of these unit values are available for many years, and some are only available for a recent time period such as 120 days.  Here is an example of a retrieval of such data.  
 
-<<label=getNWISUnit, echo=TRUE>>=
+<<label=readNWISuv, echo=TRUE>>=
 
 parameterCd <- "00060"  # Discharge
 startDate <- "2012-05-12" 
 endDate <- "2012-05-13" 
-dischargeToday <- getNWISunitData(siteNumber, parameterCd, 
+dischargeUnit <- readNWISuv(siteNumber, parameterCd, 
         startDate, endDate)
 @
 
-The retrieval produces the following dataframe:
+The retrieval produces a data frame that contains 96 rows (one for every 15 minute period in the day).  They include all data collected from the startDate through the endDate (starting and ending with midnight locally-collected time). The dateTime column is converted to \enquote{UTC} (Coordinated Universal Time), so midnight EST will be 5 hours earlier in the dateTime column (the previous day, at 7pm).
+
 
-<<dischargeData, echo=FALSE>>=
-head(dischargeToday)
+<<dischargeData, echo=TRUE>>=
+head(dischargeUnit)
 @
 
-Note that time now becomes important, so the variable datetime is a POSIXct, and the time zone is included in a separate column. Data are retrieved from \url{http://waterservices.usgs.gov/rest/IV-Test-Tool.html}. There are occasions where NWIS values are not reported as numbers, instead a common example is \enquote{Ice.}  Any value that cannot be converted to a number will be reported as NA in this package.
+To override the UTC timezone, specify a valid timezone in the tz argument. Default is \texttt{""}, which will keep the dateTime column in UTC. Other valid timezones are:
+
+\begin{verbatim}
+America/New_York
+America/Chicago
+America/Denver
+America/Los_Angeles
+America/Anchorage
+America/Honolulu
+America/Jamaica
+America/Managua
+America/Phoenix
+America/Metlakatla
+\end{verbatim}
+
+Data are retrieved from \url{http://waterservices.usgs.gov/rest/IV-Test-Tool.html}. There are occasions where NWIS values are not reported as numbers, instead a common example is \enquote{Ice.}  Any value that cannot be converted to a number will be reported as NA in this package.
 
 \newpage
 
@@ -492,35 +585,100 @@ Note that time now becomes important, so the variable datetime is a POSIXct, and
 \subsection{Water Quality Values}
 \label{sec:usgsWQP}
 %------------------------------------------------------------
-To get USGS water quality data from water samples collected at the streamgage or other monitoring site (as distinct from unit values collected through some type of automatic monitor) we can use the function \texttt{getNWISqwData}, with the input arguments: siteNumber, parameterCd, startDate, endDate, and interactive (similar to \texttt{getNWISunitData} and \texttt{getNWISdvData}). Additionally, the argument \texttt{"}expanded\texttt{"} is a logical input that allows the user to choose between a simple return of datetimes/qualifier/values (expanded=FALSE), or a more complete and verbose output (expanded=TRUE). Expanded = TRUE includes such columns as remark codes, value qualifying text, and detection level.
+To get USGS water quality data from water samples collected at the streamgage or other monitoring site (as distinct from unit values collected through some type of automatic monitor) we can use the function \texttt{readNWISqw}, with the input arguments: siteNumber, parameterCd, startDate, and endDate. Additionally, the argument \texttt{"}expanded\texttt{"} is a logical input that allows the user to choose between a simple return of datetimes/qualifier/values (expanded=FALSE), or a more complete and verbose output (expanded=TRUE). Expanded = TRUE includes such columns as remark codes, value qualifying text, and detection level for each parameter code. There also includes an argument \texttt{"}reshape\texttt{"}, that converts the expanded dataset to a \texttt{"}wide\texttt{"} format (each requested parameter code gets individual columns).
 
-
-<<label=getQW, echo=TRUE>>=
+<<label=getQW, echo=TRUE, eval=TRUE>>=
  
 # Dissolved Nitrate parameter codes:
 parameterCd <- c("00618","71851")
 startDate <- "1985-10-01"
 endDate <- "2012-09-30"
 
-dissolvedNitrate <- getNWISqwData(siteNumber, parameterCd, 
-      startDate, endDate, expanded=TRUE)
-names(dissolvedNitrate)
+dfLong <- readNWISqw(siteNumber, parameterCd, 
+      startDate, endDate, expanded=TRUE,reshape=FALSE)
 
+# Or the wide return:
+# dfWide <- readNWISqw(siteNumber, parameterCd, 
+#       startDate, endDate, expanded=TRUE, reshape=TRUE)
 
 @
 
+Metadata, such as information about the column names can be found by using the \texttt{comment} function, as described in section \ref{sec:metadata}.
+
+<<qwmeta, echo=TRUE, eval=TRUE>>=
+
+comment(dfLong)
 
-<<getQWtemperaturePlot, echo=TRUE, fig.cap=paste(parameterINFO$parameter_nm, "at", siteINFO$station.nm[1])>>=
-with(dissolvedNitrate, plot(
-  startDateTime, result_va_00618,
-  xlab="Date",ylab = paste(parameterINFO$srsname,
-      "[",parameterINFO$parameter_units,"]")
-  ))
-title(siteINFO$station.nm[1])
 @
 
 \FloatBarrier
 
+%------------------------------------------------------------
+\subsection{Groundwater level data}
+\label{sec:gwl}
+%------------------------------------------------------------
+Groundwater level measurements can be obtained with the \texttt{readNWISgwl} function. Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
+
+<<gwlexample, echo=TRUE, eval=TRUE>>=
+siteNumber <- "434400121275801"
+groundWater <- readNWISgwl(siteNumber)
+
+names(groundWater)
+
+@
+
+%------------------------------------------------------------
+\subsection{Peak flow data}
+\label{sec:peak}
+%------------------------------------------------------------
+
+Peak flow data are instantaneous discharge or stage data that record the maximum values of these variables during a flood event.  They include the annual peak flood event but can also include records of other peaks that are lower than the annual maximum. Peak discharge measurements can be obtained with the \texttt{readNWISpeak} function. Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
+
+<<peakexample, echo=TRUE, eval=TRUE>>=
+siteNumber <- '01594440'
+peakData <- readNWISpeak(siteNumber)
+
+
+names(peakData)
+
+@
+
+
+%------------------------------------------------------------
+\subsection{Rating curve data}
+\label{sec:rating}
+%------------------------------------------------------------
+Rating curves are the calibration curves that are used to convert measurements of stage to discharge.  Because of changing hydrologic conditions these rating curves change over time. Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
+
+Rating curves can be obtained with the \texttt{readNWISrating} function.
+
+<<ratingexample, echo=TRUE, eval=TRUE>>=
+ratingData <- readNWISrating(siteNumber, "base")
+attr(ratingData, "RATING")
+
+names(ratingData)
+
+@
+
+
+
+%------------------------------------------------------------
+\subsection{Surface-water measurement data}
+\label{sec:meas}
+%------------------------------------------------------------
+These data are the discrete measurements of discharge that are made for the purpose of developing or revising the rating curve.  Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
+
+Surface-water measurement data can be obtained with the \texttt{readNWISmeas} function.
+
+<<surfexample, echo=TRUE, eval=TRUE>>=
+surfaceData <- readNWISmeas(siteNumber)
+
+names(surfaceData)
+
+@
+
+
+
 %------------------------------------------------------------
 \subsection{URL Construction}
 \label{sec:usgsURL}
@@ -533,10 +691,10 @@ There may be times when you might be interested in seeing the URL (Web address)
 pCode <- c("00618","71851")
 startDate <- "1964-06-11"
 endDate <- "2012-12-18"
-url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,'qw')
+url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,"qw")
 url_dv <- constructNWISURL(siteNumber,"00060",startDate,endDate,
-                           'dv',statCd="00003")
-url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'uv')
+                           "dv",statCd="00003")
+url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,"uv")
 @
 
 
@@ -545,15 +703,15 @@ url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'uv')
 \section{Water Quality Portal Web Retrievals}
 \label{sec:usgsSTORET}
 %------------------------------------------------------------
-There are additional water quality data sets available from the Water Quality Data Portal (\url{http://www.waterqualitydata.us/}).  These data sets can be housed in either the STORET database (data from EPA), NWIS database (data from USGS), STEWARDS database (data from USDA), and additional databases are slated to be included.  Because only USGS uses parameter codes, a \texttt{"}characteristic name\texttt{"} must be supplied.  The \texttt{getWQPqwData} function can take either a USGS parameter code, or a more general characteristic name in the parameterCd input argument. The Water Quality Data Portal includes data discovery tools and information on characteristic names. The following example retrieves specific conductance from a DNR site in Wisconsin. 
+There are additional water quality data sets available from the Water Quality Data Portal (\url{http://www.waterqualitydata.us/}).  These data sets can be housed in either the STORET database (data from EPA), NWIS database (data from USGS), STEWARDS database (data from USDA), and additional databases are slated to be included in the future.  Because only USGS uses parameter codes, a \texttt{"}characteristic name\texttt{"} must be supplied.  The \texttt{readWQPqw} function can take either a USGS parameter code, or a more general characteristic name in the parameterCd input argument. The Water Quality Data Portal includes data discovery tools and information on characteristic names. The following example retrieves specific conductance from a DNR site in Wisconsin. 
 
 
 <<label=getQWData, echo=TRUE, eval=FALSE>>=
-specificCond <- getWQPqwData('WIDNR_WQX-10032762',
+specificCond <- readWQPqw('WIDNR_WQX-10032762',
                 'Specific conductance','2011-05-01','2011-09-30')
 @
 
-Guidance for finding characteristic names can be found at: \url{http://www.waterqualitydata.us/webservices_documentation.jsp}.
+A tool for finding NWIS characteristic names can be found at: \url{http://www.waterqualitydata.us/public_srsnames.jsp}
 
 \FloatBarrier
 
@@ -567,7 +725,7 @@ The previous examples all took specific input arguments: siteNumber, parameterCd
 \subsubsection{NWIS sites}
 \label{sec:NWISGenSite}
 %------------------------------------------------------------
-The function \texttt{getNWISSites} can be used to discover NWIS sites based on any query that the NWIS Site Service offers. This is done by using the \texttt{"..."} argument, which allows the user to use any arbitrary input argument. We can then use the service here:
+The function \texttt{whatNWISsites} can be used to discover NWIS sites based on any query that the NWIS Site Service offers. This is done by using the \texttt{"..."} argument, which allows the user to use any arbitrary input argument. We can then use the service here:
 
 \url{http://waterservices.usgs.gov/rest/Site-Test-Tool.html}
 
@@ -578,7 +736,7 @@ to discover many options for searching for NWIS sites. For example, you may want
 The following dataRetrieval code can be used to get those sites:
 
 <<siteSearch>>=
-sites <- getNWISSites(bBox="-83.0,36.5,-81.0,38.5", 
+sites <- whatNWISsites(bBox="-83.0,36.5,-81.0,38.5", 
                       parameterCd="00010,00060",
                       hasDataTypeCd="dv")
 
@@ -591,7 +749,7 @@ nrow(sites)
 \subsubsection{NWIS data}
 \label{sec:NWISGenData}
 %------------------------------------------------------------
-For NWIS data, the function \texttt{getNWISData} can be used. The argument listed in the R help file is \texttt{"..."} and \texttt{"}service\texttt{"} (only for data requests). Table \ref{tab:NWISGeneral} describes the services are available.
+For NWIS data, the function \texttt{readNWISdata} can be used. The argument listed in the R help file is \texttt{"..."} and \texttt{"}service\texttt{"} (only for data requests). Table \ref{tab:NWISGeneral} describes the services are available.
 
 \begin{table}[!ht]
 \begin{minipage}{\linewidth}
@@ -617,7 +775,8 @@ For NWIS data, the function \texttt{getNWISData} can be used. The argument liste
 The \texttt{"..."} argument allows the user to create their own queries based on the instructions found in the web links above. The links provide instructions on how to create a URL to request data. Perhaps you want sites only in Wisconsin, with a drainage area less than 50 mi$^2$, and the most recent daily dischage data. That request would be done as follows:
 
 <<dataExample>>=
-dischargeWI <- getNWISData(stateCd="WI",
+dischargeWI <- readNWISdata(service="dv",
+                           stateCd="WI",
                            parameterCd="00060",
                            drainAreaMin="50",
                            statCd="00003")
@@ -634,11 +793,11 @@ Just as with NWIS, the Water Quality Portal (WQP) offers a variety of ways to se
 
 \url{http://www.waterqualitydata.us/webservices_documentation.jsp}
 
-To discover available sites in the WQP in New Jersey that have measured Chloride, use the function \texttt{getWQPSites}.
+To discover available sites in the WQP in New Jersey that have measured Chloride, use the function \texttt{whatWQPsites}.
 
 <<NJChloride, eval=FALSE>>=
 
-sitesNJ <- getWQPSites(statecode="US:34",
+sitesNJ <- whatWQPsites(statecode="US:34",
                        characteristicName="Chloride")
 
 @
@@ -648,11 +807,11 @@ sitesNJ <- getWQPSites(statecode="US:34",
 \subsubsection{Water Quality Portal data}
 \label{sec:WQPGenData}
 %------------------------------------------------------------
-Finally, to get data from the WQP using generalized Web service calls, use the function \texttt{getWQPData}. For example, to get all the pH data in Wisconsin:
+Finally, to get data from the WQP using generalized Web service calls, use the function \texttt{readWQPdata}. For example, to get all the pH data in Wisconsin:
 
 <<phData, eval=FALSE>>=
 
-dataPH <- getWQPData(statecode="US:55", 
+dataPH <- readWQPdata(statecode="US:55", 
                  characteristicName="pH")
 
 @
@@ -661,488 +820,59 @@ dataPH <- getWQPData(statecode="US:55",
 
 \FloatBarrier
 
-%------------------------------------------------------------
-\section{Data Retrievals Structured For Use In The EGRET Package}
-\label{sec:EGRETdfs}
-%------------------------------------------------------------ 
-Rather than using the raw data as retrieved by the Web, the dataRetrieval package also includes functions that return the data in a structure that has been designed to work with the EGRET R package (\url{https://github.com/USGS-R/EGRET/wiki}). In general, these dataframes may be much more \enquote{R-friendly} than the raw data, and will contain additional date information that allows for efficient data analysis.
-
-In this section, we use 3 dataRetrieval functions to get sufficient data to perform an EGRET analysis.  We will continue analyzing the Choptank River. We retrieve essentially the same data that were retrieved in section \ref{sec:genRetrievals}, but in this case the data are structured into three EGRET-specific dataframes.  The daily discharge data are placed in a dataframe called Daily.  The nitrate sample data are placed in a dataframe called Sample.  The data about the site and the parameter are placed in a dataframe called INFO.  Although these dataframes were designed to work with the EGRET R package, they can be very useful for a wide range of hydrology studies that don't use EGRET.
-
-%------------------------------------------------------------
-\subsection{INFO Data}
-\label{INFOsubsection}
-%------------------------------------------------------------
-
-The \texttt{getNWISInfo}, \texttt{getWQPInfo}, and \texttt{getUserInfo} functions obtain metadata, or data about the streamgage and measured parameters. Any number of columns can be included in this dataframe. Table \ref{tab:INFOtable} describes fields are required for EGRET functions. 
-
-\begin{table}[!ht]
-\begin{minipage}{\linewidth}
-{\footnotesize
-\caption{INFO columns required in EGRET functions} 
-\label{tab:INFOtable}
-\begin{tabular}{lll}
-  \hline
-\multicolumn{1}{c}{\textbf{\textsf{Column Name}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Type}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Description}}} \\  [0pt]
-  \hline
-  constitAbbrev & string & Constituent abbreviation, used for saving the workspace in EGRET\\
-  [5pt] drainSqKm & numeric & Drainage area in square kilometers \\
-  [5pt] paramShortName & string & Parameter name to use on graphs \\
-  [5pt] param.units & string & Parameter units \\
-  [5pt] shortName & string & Station name to use on graphs\\
-  [5pt] staAbbrev & string & Station Abbreviation \\
-   \hline
-\end{tabular}
-}
-\end{minipage}
-\end{table}
-
-The function \texttt{getNWISInfo} combines \texttt{getNWISSiteInfo} and \texttt{getNWISPcodeInfo}, producing one dataframe called INFO.
-
-<<ThirdExample>>=
-parameterCd <- "00618"
-INFO <- getNWISInfo(siteNumber,parameterCd, interactive=FALSE)
-@
-
-It is also possible to create the INFO dataframe using information from the Water Quality Portal:
-
-<<WQPInfo, eval=FALSE>>=
-parameterCd <- "00618"
-INFO_WQP <- getWQPInfo("USGS-01491000",parameterCd)
-@
-
-Finally, the function \texttt{getUserInfo} can be used to convert comma separated files into an INFO dataframe. 
-
-Any supplemental column that would be useful can be added to the INFO dataframe. 
-
-<<addInfo, eval=TRUE, echo=TRUE>>=
-
-INFO$riverInfo <- "Major tributary of the Chesapeake Bay"
-INFO$GreensboroPopulation <- 1931
-
-@
-
-
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsection{Daily Data}
-\label{Dailysubsection}
-%------------------------------------------------------------
-The \texttt{getNWISDaily} function retrieves the daily values (discharge in this case).  It requires the inputs siteNumber, parameterCd, startDate, endDate, interactive, and convert. Most of these arguments are described in section \ref{sec:genRetrievals}, however \texttt{"}convert\texttt{"} is a new argument (that defaults to TRUE). The convert argument tells the program to convert the values from cubic feet per second (ft\textsuperscript{3}/s) to cubic meters per second (m\textsuperscript{3}/s) as shown in the example Daily data frame in Table \ref{tab:DailyDF1}. For EGRET applications with NWIS Web retrieval, do not use this argument (the default is TRUE), EGRET assumes that discharge is always stored in units of cubic meters per second. If you don't want this conversion and are not using EGRET, set convert=FALSE in the function call. 
-
-<<firstExample>>=
-siteNumber <- "01491000"
-startDate <- "2000-01-01"
-endDate <- "2013-01-01"
-# This call will get NWIS (ft3/s) data , and convert it to m3/s:
-Daily <- getNWISDaily(siteNumber, "00060", startDate, endDate)
-@
-
-
-
-<<colNamesDaily, echo=FALSE,results='asis'>>=
-ColumnName <- c("Date", "Q", "Julian","Month","Day","DecYear","MonthSeq","Qualifier","i","LogQ","Q7","Q30")
-Type <- c("Date", "number", "number","integer","integer","number","integer","string","integer","number","number","number")
-Description <- c("Date", "Discharge in m$^3$/s", "Number of days since January 1, 1850", "Month of the year [1-12]", "Day of the year [1-366]", "Decimal year", "Number of months since January 1, 1850", "Qualifying code", "Index of days, starting with 1", "Natural logarithm of Q", "7 day running average of Q", "30 day running average of Q")
-Units <- c("date", "m$^3$/s","days", "months","days","years","months", "character","days","numeric","m$^3$/s","m$^3$/s")
-
-DF <- data.frame(ColumnName,Type,Description,Units)
-
-print(xtable(DF, caption="Daily dataframe",label="tab:DailyDF1"),
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.text.function = function(x) {x},
-       sanitize.colnames.function =  bold.colHeaders,
-       sanitize.rownames.function = addSpace
-       
-      )
-
-@
-
-
-If discharge values are negative or zero, the code will set all of these values to zero and then add a small constant to all of the daily discharge values.  This constant is 0.001 times the mean discharge.  The code will also report on the number of zero and negative values and the size of the constant.  Use EGRET analysis only if the number of zero values is a very small fraction of the total days in the record (say less than 0.1\% of the days), and there are no negative discharge values.  Columns Q7 and Q30 are the 7 and 30 day running averages for the 7 or 30 days ending on this specific date. Table \ref{tab:DailyDF1} lists details of the Daily data frame.
-
-Notice that the \enquote{Day of the year} column can span from 1 to 366. The 366 accounts for leap years. Every day has a consistent day of the year. This means, February 28\textsuperscript{th} is always the 59\textsuperscript{th} day of the year, Feb. 29\textsuperscript{th} is always the 60\textsuperscript{th} day of the year, and March 1\textsuperscript{st} is always the 61\textsuperscript{st} day of the year whether or not it is a leap year.
-
-User-generated Sample dataframes can also be created using the \texttt{getUserDaily} function. This is discused in detail in section \ref{sec:DailyFile}.
-
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsection{Sample Data}
-\label{Samplesubsection}
-%------------------------------------------------------------
-The \texttt{getNWISSample} function retrieves USGS sample data from NWIS. The arguments for this function are also siteNumber, parameterCd, startDate, endDate, interactive. These are the same inputs as \texttt{getWQPqwData} or \texttt{getWQPData} as described in the previous section.
-
-<<secondExample>>=
-parameterCd <- "00618"
-Sample <-getNWISSample(siteNumber,parameterCd,
-      startDate, endDate)
-@
-
-The \texttt{getWQPSample} function retrieves Water Quality Portal sample data (STORET, NWIS, STEWARDS). The arguments for this function are siteNumber, characteristicName, startDate, endDate, interactive. Table \ref{tab:SampleDataframe} lists details of the Sample data frame. 
-
-<<STORET,echo=TRUE,eval=FALSE>>=
-site <- 'WIDNR_WQX-10032762'
-characteristicName <- 'Specific conductance'
-Sample <-getWQPSample(site,characteristicName,
-      startDate, endDate)
-@
-
-User-generated Sample dataframes can also be created using the \texttt{getUserSample} function. This is discused in detail in section \ref{sec:SampleFile}.
-
-\pagebreak
-
-
-\begin{table}
-{\footnotesize
-  \begin{threeparttable}[b]
-  \caption{Sample dataframe}
-  \label{tab:SampleDataframe}
-  \begin{tabular}{llll}
-  \hline
-\multicolumn{1}{c}{\textbf{\textsf{ColumnName}}} & 
-\multicolumn{1}{c}{\textbf{\textsf{Type}}} & 
-\multicolumn{1}{c}{\textbf{\textsf{Description}}} & 
-\multicolumn{1}{c}{\textbf{\textsf{Units}}} \\ 
-  \hline
-  Date & Date & Date & date \\ 
-  [5pt]ConcLow & number & Lower limit of concentration & mg/L \\ 
-  [5pt]ConcHigh & number & Upper limit of concentration & mg/L \\ 
-  [5pt]Uncen & integer & Uncensored data (1=true, 0=false) & integer \\ 
-  [5pt]ConcAve & number & Average of ConcLow and ConcHigh & mg/L \\ 
-  [5pt]Julian & number & Number of days since January 1, 1850 & days \\ 
-  [5pt]Month & integer & Month of the year [1-12] & months \\ 
-  [5pt]Day & integer & Day of the year [1-366] & days \\ 
-  [5pt]DecYear & number & Decimal year & years \\ 
-  [5pt]MonthSeq & integer & Number of months since January 1, 1850 & months \\ 
-  [5pt]SinDY & number & Sine of DecYear & numeric \\ 
-  [5pt]CosDY & number & Cosine of DecYear & numeric \\ 
-  [5pt]Q \tnote{1} & number & Discharge & m\textsuperscript{3}/s \\ 
-  [5pt]LogQ \tnote{1} & number & Natural logarithm of discharge & numeric \\ 
-   \hline
-\end{tabular}
-
-  \begin{tablenotes}
-    \item[1] Discharge columns are populated from data in the Daily dataframe after calling the \texttt{mergeReport} function.
-  \end{tablenotes}
- \end{threeparttable}
-}
-\end{table}
-
-Notice that the \enquote{Day of the year} column can span from 1 to 366. The 366 accounts for leap years. Every day has a consistent day of the year. This means, February 28\textsuperscript{th} is always the 59\textsuperscript{th} day of the year, Feb. 29\textsuperscript{th} is always the 60\textsuperscript{th} day of the year, and March 1\textsuperscript{st} is always the 61\textsuperscript{st} day of the year whether or not it is a leap year.
-
-Section \ref{sec:cenValues} is about summing multiple constituents, including how interval censoring is used. Since the Sample data frame is structured to only contain one constituent, when more than one parameter codes are requested, the \texttt{getNWISSample} function will sum the values of each constituent as described below.
-
-\FloatBarrier
-
+\clearpage
 
 %------------------------------------------------------------
-\subsection{Censored Values: Summation Explanation}
-\label{sec:cenValues}
+\section{Dataframe Metadata}
+\label{sec:metadata}
 %------------------------------------------------------------
-In the typical case where none of the data are censored (that is, no values are reported as \enquote{less-than} values), the ConcLow = ConcHigh = ConcAve and Uncen = 1 are equal to the reported value.  For the most common type of censoring, where a value is reported as less than the reporting limit, then ConcLow = NA, ConcHigh = reporting limit, ConcAve = 0.5 * reporting limit, and Uncen = 0.
-
-To illustrate how the dataRetrieval package handles a more complex censoring problem, let us say that in 2004 and earlier, we computed total phosphorus (tp) as the sum of dissolved phosphorus (dp) and particulate phosphorus (pp). From 2005 and onward, we have direct measurements of total phosphorus (tp). A small subset of this fictional data looks like Table \ref{tab:exampleComplexQW}.
-
-
+All dataframes returned from the Web services have some form of associated metadata. This information is included as attributes to the dataframe. All dataframes will have a \texttt{url} and \texttt{queryTime} attribute. For example, the url and query time used to obtain the data can be found as follows:
 
-<<label=tab:exampleComplexQW, echo=FALSE, eval=TRUE,results='asis'>>=
-cdate <- c("2003-02-15","2003-06-30","2004-09-15","2005-01-30","2005-05-30","2005-10-30")
-rdp <- c("", "<","<","","","")
-dp <- c(0.02,0.01,0.005,NA,NA,NA)
-rpp <- c("", "","<","","","")
-pp <- c(0.5,0.3,0.2,NA,NA,NA)
-rtp <- c("","","","","<","<")
-tp <- c(NA,NA,NA,0.43,0.05,0.02)
+<<meta1, eval=TRUE>>=
 
-DF <- data.frame(cdate,rdp,dp,rpp,pp,rtp,tp,stringsAsFactors=FALSE)
+attr(dischargeWI, "url")
 
-xTab <- xtable(DF, caption="Example data",digits=c(0,0,0,3,0,3,0,3),label="tab:exampleComplexQW")
-
-print(xTab,
-       caption.placement="top",
-       size = "\\footnotesize",
-       latex.environment=NULL,
-       sanitize.colnames.function =  bold.colHeaders,
-       sanitize.rownames.function = addSpace
-      )
+attr(dischargeWI, "queryTime")
 
 @
 
-The dataRetrieval package will \enquote{add up} all the values in a given row to form the total for that sample when using the Sample dataframe. Thus, you only want to enter data that should be added together. If you want a dataframe with multiple constituents that are not summed, do not use getNWISSample, getWQPSample, or getUserSample. The raw data functions: \texttt{getWQPData}, \texttt{getNWISqwData}, \texttt{getWQPqwData}, \texttt{getWQPData} will not sum constituents, but leave them in their individual columns. 
-
-For example, we might know the value for dp on 5/30/2005, but we don't want to put it in the table because under the rules of this data set, we are not supposed to add it in to the values in 2005.
+Depending on the format that the data was obtained (xml, rdb, etc), there will be additional information embedded in the dataframe as attributes. To discover the available attributes:
 
-For every sample, the EGRET package requires a pair of numbers to define an interval in which the true value lies (ConcLow and ConcHigh). In a simple uncensored case (the reported value is above the detection limit), ConcLow equals ConcHigh and the interval collapses down to a single point. In a simple censored case, the value might be reported as \verb@<@0.2, then ConcLow=NA and ConcHigh=0.2. We use NA instead of 0 as a way to elegantly handle future logarithm calculations.
+<<meta2, eval=TRUE>>=
 
-For the more complex example case, let us say dp is reported as \verb@<@0.01 and pp is reported as 0.3. We know that the total must be at least 0.3 and could be as much as 0.31. Therefore, ConcLow=0.3 and ConcHigh=0.31. Another case would be if dp is reported as \verb@<@0.005 and pp is reported \verb@<@0.2. We know in this case that the true value could be as low as zero, but could be as high as 0.205. Therefore, in this case, ConcLow=NA and ConcHigh=0.205. The Sample dataframe for the example data would be:
+names(attributes(dischargeWI))
 
-<<thirdExample,echo=FALSE>>=
-  compressedData <- compressData(DF)
-  Sample <- populateSampleColumns(compressedData)
 @
 
-<<thirdExampleView,echo=TRUE>>=
-  Sample
-@
-
-Section \ref{sec:userFiles} discusses inputting user-generated files. The functions \texttt{getUserSample} and \texttt{getNWISSample} assume summation with interval censoring inputs, and are discussed in sections \ref{sec:DailyFile} and \ref{sec:SampleFile}.
-
-\FloatBarrier
-
-%------------------------------------------------------------ 
-\subsection{User-Generated Data Files}
-\label{sec:userFiles}
-%------------------------------------------------------------ 
-In addition to retrieving data from the USGS Web services, the dataRetrieval package also includes functions to generate the Daily and Sample data frame from local files.
+For data obtained from \texttt{readNWISuv}, \texttt{readNWISdv}, \texttt{readNWISgwl} there are two attributes that are particularly useful: \texttt{siteInfo} and \texttt{variableInfo}.
 
-%------------------------------------------------------------ 
-\subsubsection{getUserDaily}
-\label{sec:DailyFile}
-%------------------------------------------------------------ 
-The \texttt{getUserDaily} function will load a user-supplied text file and convert it to the Daily dataframe. The file should have two columns, the first dates, the second values.  The dates are formatted either mm/dd/yyyy or yyyy-mm-dd. Using a 4-digit year is required. This function has the following inputs: filePath, fileName,hasHeader (TRUE/FALSE), separator, qUnit, and interactive (TRUE/FALSE). filePath is a string that defines the path to your file, and the string can either be a full path, or path relative to your R working directory. The input fileName is a string that defines the file name (including the extension).
+<<meta3, eval=TRUE>>=
 
-Text files that contain this sort of data require some sort of a separator, for example, a \enquote{csv} file (comma-separated value) file uses a comma to separate the date and value column. A tab delimited file would use a tab (\verb@"\t"@) rather than the comma (\texttt{"},\texttt{"}). Define the type of separator you choose to use in the function call in the \texttt{"}separator\texttt{"} argument, the default is \texttt{"},\texttt{"}. Another function input is a logical variable: hasHeader.  The default is TRUE. If your data does not have column names, set this variable to FALSE.
+siteInfo <- attr(dischargeWI, "siteInfo")
+head(siteInfo)
 
-Finally, qUnit is a numeric argument that defines the discharge units used in the input file.  The default is qUnit = 1 which assumes discharge is in cubic feet per second.  If the discharge in the file is already in cubic meters per second then set qUnit = 2.  If it is in some other units (like liters per second or acre-feet per day), the user must pre-process the data with a unit conversion that changes it to either cubic feet per second or cubic meters per second.
+variableInfo <- attr(dischargeWI, "variableInfo")
 
-So, if you have a file called \enquote{ChoptankRiverFlow.txt} located in a folder called \enquote{RData} on the C drive (this example is for the Windows\textregistered\ operating systems), and the file is structured as follows (tab-separated):
 
-
-% \singlespacing
-\begin{verbatim}
-date  Qdaily
-10/1/1999  107
-10/2/1999  85
-10/3/1999	76
-10/4/1999	76
-10/5/1999	113
-10/6/1999	98
-...
-\end{verbatim}
-% \doublespacing
-
-The call to open this file, convert the discharge to cubic meters per second, and populate the Daily data frame would be:
-<<openDaily, eval = FALSE>>=
-fileName <- "ChoptankRiverFlow.txt"
-filePath <-  "C:/RData/"
-Daily <-getFileDaily(filePath,fileName,
-                    separator="\t")
 @
 
-Microsoft\textregistered\ Excel files can be a bit tricky to import into R directly. The simplest way to get Excel data into R is to open the Excel file in Excel, then save it as a .csv file (comma-separated values). 
-
-\FloatBarrier
-
-%------------------------------------------------------------ 
-\subsubsection{getUserSample}
-\label{sec:SampleFile}
-%------------------------------------------------------------ 
+Data obtained from \texttt{readNWISpeak}, \texttt{readNWISmeas}, and \texttt{readNWISrating}, the \texttt{comment} attribute is useful.
 
-The \texttt{getUserSample} function will import a user-generated file and populate the Sample dataframe. The difference between sample data and discharge data is that the code requires a third column that contains a remark code, either blank or \verb@"<"@, which will tell the program that the data were \enquote{left-censored} (or, below the detection limit of the sensor). Therefore, the data must be in the form: date, remark, value.   An example of a comma-delimited file is:
+<<meta5, eval=TRUE, eval=FALSE>>=
+comment(peakData)
 
-\singlespacing
-\begin{verbatim}
-cdate;remarkCode;Nitrate
-10/7/1999,,1.4
-11/4/1999,<,0.99
-12/3/1999,,1.42
-1/4/2000,,1.59
-2/3/2000,,1.54
-...
-\end{verbatim}
-
-The call to open this file, and populate the Sample dataframe is:
-<<openSample, eval = FALSE>>=
-fileName <- "ChoptankRiverNitrate.csv"
-filePath <-  "C:/RData/"
-Sample <-getUserSample(filePath,fileName,
-                                separator=",")
+#Which is equivalent to:
+# attr(peakData, "comment")
 @
 
-When multiple constituents are to be summed, the format can be date, remark\_A, value\_A, remark\_b, value\_b, etc... A tab-separated example might look like the file below, where the columns are date, remark dissolved phosphate (rdp), dissolved phosphate (dp), remark particulate phosphorus (rpp), particulate phosphorus (pp), remark total phosphate (rtp), and total phosphate (tp):
+A subset (due to space considerations) of the \texttt{comment} metatdata is shown here:
 
-\singlespacing
-\begin{verbatim}
-date  rdp	dp	rpp	pp	rtp	tp
-2003-02-15		0.020		0.500		
-2003-06-30	<	0.010		0.300		
-2004-09-15	<	0.005	<	0.200		
-2005-01-30						0.430
-2005-05-30					<	0.050
-2005-10-30					<	0.020
-...
-\end{verbatim}
+<<meta6, eval=TRUE, eval=TRUE>>=
+comment(peakData)[c(1:15,58:66)]
 
-
-<<openSample2, eval = FALSE>>=
-fileName <- "ChoptankPhosphorus.txt"
-filePath <-  "C:/RData/"
-Sample <-getUserSample(filePath,fileName,
-                                separator="\t")
 @
 
 
-\FloatBarrier
-
-%------------------------------------------------------------
-\subsection{Merge Report}
-%------------------------------------------------------------
-Finally, there is a function called \texttt{mergeReport} that will look at both the Daily and Sample dataframe, and populate Q and LogQ columns into the Sample dataframe. The default arguments are Daily and Sample, however if you want to use other similarly structured dataframes, you can specify localDaily or localSample. Once \texttt{mergeReport} has been run, the Sample dataframe will be augmented with the daily discharges for all the days with samples.  None of the water quality functions in EGRET will work without first having run the \texttt{mergeReport} function.
-
-
-<<mergeExample>>=
-siteNumber <- "01491000"
-parameterCd <- "00631"  # Nitrate
-startDate <- "2000-01-01"
-endDate <- "2013-01-01"
-
-Daily <- getNWISDaily(siteNumber, "00060", startDate, endDate)
-Sample <- getNWISSample(siteNumber,parameterCd, startDate, endDate)
-Sample <- mergeReport(Daily,Sample)
-names(Sample)
-@
-
-\FloatBarrier
-
-% %------------------------------------------------------------
-% \subsection{EGRET Plots}
-% %------------------------------------------------------------
-% The Daily, Sample, and INFO dataframes (described in Secs. \ref{INFOsubsection} - \ref{Samplesubsection}) are specifically formatted to be used with the EGRET package. The EGRET package has powerful modeling capabilities that use WRTDS, but EGRET also has graphing and tabular tools for exploring the data without using the WRTDS algorithm. See the EGRET vignette, user guide, and/or wiki (\url{https://github.com/USGS-R/EGRET/wiki}) for detailed information. Figure \ref{fig:egretEx} shows one of the plotting functions that can be used directly from the dataRetrieval dataframes.
-% 
-% <<egretEx, echo=TRUE, eval=TRUE, fig.cap="Default \\texttt{multiPlotDataOverview}">>=
-% # Continuing Choptank example from the previous sections
-% library(EGRET)
-% multiPlotDataOverview()
-% @
-% 
-% \FloatBarrier
-% \clearpage
-
-
-%------------------------------------------------------------
-\section{Summary}
-\label{sec:summary}
-%------------------------------------------------------------
-
-Tables \ref{tab:dataRetrievalFunctions1},\ref{tab:dataRetrievalOrg}, and \ref{tab:dataRetrievalMisc} summarize the data retrieval functions:
-
-\begin{table}
-{\footnotesize
-  \begin{threeparttable}[b]
-  \caption{dataRetrieval functions}
-  \label{tab:dataRetrievalFunctions1}
-%   \doublespacing
-\begin{tabular}{lll}
-  \hline
-\multicolumn{1}{c}{\textbf{\textsf{Data Type}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Function Name}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Description}}} \\ [0pt]
-  \hline
-  Daily & \texttt{getNWISdvData} & Raw USGS daily data \\ 
-  [5pt]Daily & \texttt{getNWISData} & Raw USGS data in generalized query \\
-  [5pt]Daily\tnote{1} & \texttt{getNWISDaily} & USGS daily values \\ 
-  [5pt]Daily\tnote{1} & \texttt{getUserDaily} & User-generated daily data \\ 
-  [5pt]Sample & \texttt{getNWISqwData} & Raw USGS water quality data \\
-  [5pt]Sample & \texttt{getWQPqwData} & Raw Water Quality Data Portal data \\ 
-  [5pt]Sample & \texttt{getWQPData} & Raw Water Quality Portal data in generalized query\\
-  [5pt]Sample\tnote{1} & \texttt{getNWISSample} & USGS water quality data\\
-  [5pt]Sample\tnote{1} & \texttt{getWQPSample} & Water Quality Data Portal data \\
-  [5pt]Sample\tnote{1} & \texttt{getUserSample} & User-generated sample data \\ 
-  [5pt]Unit & \texttt{getNWISunitData} & Raw USGS instantaneous data \\
-  [5pt]Information\tnote{1} & \texttt{getNWISInfo} & Station and parameter code information extracted from USGS\\ 
-  [5pt]Information\tnote{1} & \texttt{getWQPInfo} & Station and parameter information extracted from Water Quality Portal \\
-  [5pt]Information\tnote{1} & \texttt{getUserInfo} & Station and parameter information extracted from user-generated file \\ 
-  [5pt]Information & \texttt{getNWISPcodeInfo} & USGS parameter code information \\ 
-  [5pt]Information & \texttt{getNWISSiteInfo} & USGS station information \\ 
-  [5pt]Information & \texttt{getNWISDataAvailability} & Data available at USGS stations \\
-  [5pt]Information & \texttt{getNWISSites} & USGS station information in generalized query \\ 
-   \hline
-\end{tabular}
-
-  \begin{tablenotes}
-    \item[1] Indicates that the function creates a data frame suitable for use in EGRET software, otherwise data is returned in the exact form that it was received.
-  \end{tablenotes}
- \end{threeparttable}
-}
-\end{table}
-
-
-\begin{table}[!ht]
-\begin{minipage}{\linewidth}
-{\footnotesize
-\begin{threeparttable}[b]
-\caption{dataRetrieval functions organization} 
-\label{tab:dataRetrievalOrg}
-\begin{tabular}{|c|ccc|}
-
-\multicolumn{1}{c}{\textbf{\textsf{}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Site Query}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Meta Data}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Data Retrieval}}} \\  [0pt]
-\hline
-\textbf{NWIS:} & \texttt{getNWISSites} & \texttt{getNWISInfo}\tnote{1} & \texttt{getNWISData}  \\
-\textit{Daily} & \texttt{getNWISDataAvailability}& \texttt{getNWISSiteInfo} & \texttt{getNWISDaily}\tnote{1} \\
-\textit{Unit/Instantaneous} &  & \texttt{getNWISPcodeInfo} & \texttt{getNWISSample}\tnote{1} \\
-\textit{Groundwater} & &  & \texttt{getNWISdvData}   \\
-\textit{Water Quality} & & & \texttt{getNWISunitData}\\
- & & & \texttt{getNWISqwData} \\
-\hline
-\textbf{Water Quality Portal:} & \texttt{getWQPSites} & \texttt{getWQPInfo}\tnote{1} &  \texttt{getWQPSample}\tnote{1}\\
-\textit{USGS} & & & \texttt{getWQPqwData} \\
-\textit{EPA} & & & \texttt{getWQPData} \\
-\textit{USDA} & & &  \\
-\hline
-\textbf{User-supplied files:} & & \texttt{getUserInfo}\tnote{1}  & \texttt{getUserDaily}\tnote{1}  \\
-\textit{Daily} & & & \texttt{getUserSample}\tnote{1}  \\
-\textit{Sample} & & &  \\
-\textit{Site Information} & & & \\
-
-   \hline
-\end{tabular}
-  \begin{tablenotes}
-    \item[1] Indicates that the function creates a data frame suitable for use in EGRET software, otherwise data is returned in the exact form that it was received.
-  \end{tablenotes}
- \end{threeparttable}
-}
-\end{minipage}
-\end{table}
-
-
-
-\begin{table}[!ht]
-\begin{minipage}{\linewidth}
-{\footnotesize
-\caption{Supplemental dataRetrieval functions} 
-\label{tab:dataRetrievalMisc}
-\begin{tabular}{ll}
-  \hline
-\multicolumn{1}{c}{\textbf{\textsf{Function Name}}} &
-\multicolumn{1}{c}{\textbf{\textsf{Description}}} \\  [0pt]
-  \hline
-  \texttt{compressData} &  Converts value/qualifier into ConcLow, ConcHigh, Uncen\\
-  [5pt]\texttt{getRDB1Data} & Retrieves and converts RDB data to dataframe\\
-  [5pt]\texttt{getWaterML1Data} & Retrieves and converts WaterML1 data to dataframe\\
-  [5pt]\texttt{getWaterML2Data} & Retrieves and converts WaterML2 data to dataframe\\
-  [5pt]\texttt{mergeReport} & Merges flow data from the daily record into the sample record\\
-  [5pt]\texttt{populateDateColumns} & Generates Julian, Month, Day, DecYear, and MonthSeq columns\\
-  [5pt]\texttt{removeDuplicates} & Removes duplicated rows\\
-  [5pt]\texttt{renameColumns} & Renames columns from raw data retrievals\\
-   \hline
-\end{tabular}
-}
-\end{minipage}
-\end{table}
-
-\FloatBarrier
-\clearpage
-
-
 %------------------------------------------------------------ 
 \section{Getting Started in R}
 \label{sec:appendix1}
@@ -1154,21 +884,15 @@ This section describes the options for downloading and installing the dataRetrie
 %------------------------------------------------------------ 
 If you are new to R, you will need to first install the latest version of R, which can be found here: \url{http://www.r-project.org/}.
 
-At any time, you can get information about any function in R by typing a question mark before the functions name.  This will open a file (in RStudio, in the Help window) that describes the function, the required arguments, and provides working examples.
+At any time, you can get information about any function in R by typing a question mark before the functions name.  This will open a file (in RStudio, in the Help window) that describes the function, the required arguments, and provides working examples. This will open a help file similar to Figure \ref{fig:help}. To see the raw code for a particular code, type the name of the function, without parentheses.
+
 
 <<helpFunc,eval = FALSE>>=
-?removeDuplicates
+?readNWISpCode
 @
 
-This will open a help file similar to Figure \ref{fig:help}.
-
 \FloatBarrier
 
-To see the raw code for a particular code, type the name of the function, without parentheses.:
-<<rawFunc,eval = TRUE>>=
-removeDuplicates
-@
-
 
 \begin{figure}[ht!]
 \centering
@@ -1190,10 +914,7 @@ vignette(dataRetrieval)
 The following command installs dataRetrieval and subsequent required packages:
 
 <<installFromCran,eval = FALSE>>=
-install.packages("dataRetrieval", 
-repos=c("http://usgs-r.github.com","http://cran.us.r-project.org"),
-dependencies=TRUE,
-type="both")
+install.packages("dataRetrieval")
 @
 
 After installing the package, you need to open the library each time you re-start R.  This is done with the simple command:
@@ -1209,16 +930,15 @@ library(dataRetrieval)
 There are a few steps that are required in order to create a table in Microsoft\textregistered\ software (Excel, Word, PowerPoint, etc.) from an R dataframe. There are certainly a variety of good methods, one of which is detailed here. The example we will step through here will be to create a table in Microsoft Excel based on the dataframe tableData:
 
 <<label=getSiteApp, echo=TRUE>>=
-availableData <- getNWISDataAvailability(siteNumber)
-dailyData <- availableData["dv" == availableData$service,]
-dailyData <- dailyData["00003" == dailyData$statCd,]
+availableData <- whatNWISdata(siteNumber, "dv")
+dailyData <- availableData["00003" == availableData$stat_cd,]
 
 tableData <- with(dailyData, 
       data.frame(
         shortName=srsname, 
-        Start=startDate, 
-        End=endDate, 
-        Count=count,
+        Start=begin_date, 
+        End=end_date, 
+        Count=count_nu,
         Units=parameter_units)
       )
 tableData
@@ -1256,12 +976,12 @@ Next, follow the steps below to open this file in Excel:
 \item Use the many formatting tools within Excel to customize the table
 \end{enumerate}
 
-From Excel, it is simple to copy and paste the tables in other Microsoft\textregistered\ software. An example using one of the default Excel table formats is here.
+From Excel, it is simple to copy and paste the tables in other Microsoft\textregistered\ software. An example using one of the default Excel table formats is here. Additional formatting could be requried in Excel, for example converting u to  $\mu$.
 
 \begin{figure}[ht!]
 \centering
  \resizebox{0.9\textwidth}{!}{\includegraphics{table1.png}} 
-\caption{A simple table produced in Microsoft\textregistered\ Excel. Additional formatting will be requried, for example converting u to  $\mu$ }
+\caption{A simple table produced in Microsoft\textregistered\ Excel.}
 \label{overflow}
 \end{figure}
 
diff --git a/vignettes/figure/getNWIStemperaturePlot-1.pdf b/vignettes/figure/getNWIStemperaturePlot-1.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..e3f6a6ad1be461db191e843900a5882175716579
Binary files /dev/null and b/vignettes/figure/getNWIStemperaturePlot-1.pdf differ
diff --git a/vignettes/figure/getNWIStemperaturePlot.pdf b/vignettes/figure/getNWIStemperaturePlot.pdf
index eb7a2d15c3231c6885fa74e26a6d7b1d36ec1a48..154a13b0b6cace0fa62ddeacecdcc7dace8e6187 100644
Binary files a/vignettes/figure/getNWIStemperaturePlot.pdf and b/vignettes/figure/getNWIStemperaturePlot.pdf differ
diff --git a/vignettes/figure/getQWtemperaturePlot-1.pdf b/vignettes/figure/getQWtemperaturePlot-1.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..8e78b6c4c06c86e8fd6a8be02bf0611b07ce12f0
Binary files /dev/null and b/vignettes/figure/getQWtemperaturePlot-1.pdf differ