diff --git a/R/importWQP.R b/R/importWQP.R
index 2602077c5f0298eff7706425a0de51e1a6d18587..aa74bfaed8194791cf7e858fafc883233dbffa61 100644
--- a/R/importWQP.R
+++ b/R/importWQP.R
@@ -1,4 +1,4 @@
-#' Basic Water Quality Portal Data grabber
+#' Basic Water Quality Portal Data parser
 #'
 #' Imports data from the Water Quality Portal based on a specified url.
 #' 
@@ -11,6 +11,7 @@
 #' @return retval dataframe raw data returned from the Water Quality Portal. Additionally, a POSIXct dateTime column is supplied for 
 #' start and end times.
 #' @export
+#' @seealso \code{\link{readWQPdata}}, \code{\link{readWQPqw}}, \code{\link{whatWQPsites}}
 #' @import RCurl
 #' @import httr
 #' @import lubridate
@@ -22,6 +23,8 @@
 #' rawSample <- importWQP(rawSampleURL)
 #' url2 <- paste0(rawSampleURL,"&zip=yes")
 #' rawSample2 <- importWQP(url2, TRUE)
+#' STORETex <- constructWQPURL('WIDNR_WQX-10032762','Specific conductance', '', '')
+#' STORETdata <- importWQP(STORETex)
 #' }
 importWQP <- function(url, zip=FALSE, tz=""){
   
@@ -109,15 +112,46 @@ importWQP <- function(url, zip=FALSE, tz=""){
       
     }
     
-    if(any(!is.na(timeZoneEnd))){
-      
+    if(any(!is.na(timeZoneEnd))){      
       retval$ActivityEndDateTime <- with(retval, as.POSIXct(paste(ActivityEndDate, ActivityEndTime.Time),format="%Y-%m-%d %H:%M:%S", tz = "UTC"))
       retval$ActivityEndDateTime <- retval$ActivityEndDateTime + timeZoneEnd*60*60
-      retval$ActivityEndDateTime <- as.POSIXct(retval$ActivityEndDateTime)
-      
-      
+      retval$ActivityEndDateTime <- as.POSIXct(retval$ActivityEndDateTime)      
+    }
+    
+    if(all(is.na(retval$ActivityEndDateTime))){
+      retval$ActivityEndDateTime <- NULL
+    }
+            
+    siteInfo <- whatWQPsites(siteid=paste(unique(retval$MonitoringLocationIdentifier),collapse=","))
+    
+    siteInfoCommon <- data.frame(station_nm=siteInfo$MonitoringLocationName,
+                                 agency_cd=siteInfo$OrganizationIdentifier,
+                                 site_no=siteInfo$MonitoringLocationIdentifier,
+                                 dec_lat_va=siteInfo$LatitudeMeasure,
+                                 dec_lon_va=siteInfo$LongitudeMeasure,
+                                 hucCd=siteInfo$HUCEightDigitCode,
+                                 stringsAsFactors=FALSE)
+    
+    siteInfo <- cbind(siteInfoCommon, siteInfo)
+                                 
+    
+    variableInfo <- data.frame(characteristicName=retval$CharacteristicName,
+                               parameterCd=retval$USGSPCode,
+                               param_units=retval$ResultMeasure.MeasureUnitCode,
+                               valueType=retval$ResultSampleFractionText,
+                               stringsAsFactors=FALSE)
+    variableInfo <- unique(variableInfo)
+    
+    if(any(!is.na(variableInfo$parameterCd))){
+      pCodeToName <- pCodeToName
+      varExtras <- pCodeToName[pCodeToName$parm_cd %in% unique(variableInfo$parameterCd[!is.na(variableInfo$parameterCd)]),]
+      names(varExtras)[names(varExtras) == "parm_cd"] <- "parameterCd"
+      variableInfo <- merge(variableInfo, varExtras, by="parameterCd")
     }
-        
+    
+    attr(retval, "siteInfo") <- siteInfo
+    attr(retval, "variableInfo") <- variableInfo
+    
     return(retval)
     
   } else {
diff --git a/R/importWaterML1.r b/R/importWaterML1.r
index 7ebe3676c6652cff82bdfddff5a60146fdeae461..2e4e72c9a73a14d7be7bb4ac29afd9606ef7c24e 100644
--- a/R/importWaterML1.r
+++ b/R/importWaterML1.r
@@ -310,9 +310,9 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz=""){
         colNames <- names(df)
         
         if( exists("qualName")){
-          columnsOrdered <- c("agency","site_no","dateTime","tz_cd",attributeNames[attributeNames != "dateTime"],qualName,valueName)
+          columnsOrdered <- c("agency_cd","site_no","dateTime","tz_cd",attributeNames[attributeNames != "dateTime"],qualName,valueName)
         } else {
-          columnsOrdered <- c("agency","site_no","dateTime","tz_cd",attributeNames[attributeNames != "dateTime"],valueName)
+          columnsOrdered <- c("agency_cd","site_no","dateTime","tz_cd",attributeNames[attributeNames != "dateTime"],valueName)
         }
         
         columnsOrderd <- columnsOrdered[columnsOrdered %in% names(df)]
@@ -343,7 +343,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz=""){
     
     siteInfo <- data.frame(station_nm=extraSiteData$siteName,
                            site_no=extraSiteData$siteCode$text,
-                           agency=extraSiteData$siteCode$.attrs[["agencyCode"]],
+                           agency_cd=extraSiteData$siteCode$.attrs[["agencyCode"]],
                            timeZoneOffset=extraSiteData$timeZoneInfo$defaultTimeZone[1],
                            timeZoneAbbreviation=extraSiteData$timeZoneInfo$defaultTimeZone[2],
                            dec_lat_va=as.numeric(extraSiteData$geoLocation$geogLocation$latitude),
diff --git a/R/importWaterML2.r b/R/importWaterML2.r
index fe45e3a08a9b942b33542bc0d5dcbc4d0e5388b2..8879d7710db62ec8bfb3f1a29ea8e550a1667248 100644
--- a/R/importWaterML2.r
+++ b/R/importWaterML2.r
@@ -1,6 +1,7 @@
 #' Function to return data from the WaterML2 data
 #'
-#' This function accepts a url parameter for a WaterML2 getObservation 
+#' This function accepts a url parameter for a WaterML2 getObservation. This function is still under development,
+#' but the general functionality is correct.
 #'
 #' @param obs_url character containing the url for the retrieval
 #' @param asDateTime logical, if TRUE returns date and time as POSIXct, if FALSE, Date
@@ -8,7 +9,7 @@
 #' datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
 #' Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
 #' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
-#' @return mergedDF a data frame containing columns agency, site, dateTime, values, and remark codes for all requested combinations
+#' @return mergedDF a data frame time, value, description, qualifier, and identifier
 #' @export
 #' @import XML
 #' @import RCurl
diff --git a/R/readNWISunit.r b/R/readNWISunit.r
index dbfe2e913acca7a356aa076e7b36a8f094b8e09e..0a1a274b25033b14c127aad076f5ffb863e34456 100644
--- a/R/readNWISunit.r
+++ b/R/readNWISunit.r
@@ -8,22 +8,22 @@
 #' @param parameterCd character USGS parameter code.  This is usually an 5 digit number.
 #' @param startDate character starting date for data retrieval in the form YYYY-MM-DD.
 #' @param endDate character ending date for data retrieval in the form YYYY-MM-DD.
-#' @param tz character to set timezone attribute of datetime. Default is an empty quote, which converts the 
-#' datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+#' @param tz character to set timezone attribute of dateTime. Default is an empty quote, which converts the 
+#' dateTimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
 #' Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
 #' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
 #' @keywords data import USGS web service
 #' @return A data frame with the following columns:
 #' \tabular{lll}{
 #' Name \tab Type \tab Description \cr
-#' agency \tab character \tab The NWIS code for the agency reporting the data\cr
-#' site \tab character \tab The USGS site number \cr
-#' datetime \tab POSIXct \tab The date and time of the value converted to UTC \cr 
-#' tz_cd \tab character \tab The time zone code for datetime \cr
+#' agency_cd \tab character \tab The NWIS code for the agency reporting the data\cr
+#' site_no \tab character \tab The USGS site number \cr
+#' dateTime \tab POSIXct \tab The date and time of the value converted to UTC \cr 
+#' tz_cd \tab character \tab The time zone code for dateTime \cr
 #' code \tab character \tab Any codes that qualify the corresponding value\cr
 #' value \tab numeric \tab The numeric value for the parameter \cr
 #' }
-#' Note that code and value are repeated for the parameters requested. The names are of the form 
+#' Note that code and value are repeated for the parameters requested. The names are of the form: 
 #' X_D_P_S, where X is literal, 
 #' D is an option description of the parameter, 
 #' P is the parameter code, 
@@ -73,17 +73,12 @@ readNWISuv <- function (siteNumbers,parameterCd,startDate="",endDate="", tz=""){
 #' Name \tab Type \tab Description \cr
 #' agency_cd \tab character \tab The NWIS code for the agency reporting the data\cr
 #' site_no \tab character \tab The USGS site number \cr
-#' datetime \tab POSIXct \tab The date and time of the value converted to UTC (if asDateTime = TRUE), \cr 
+#' dateTime \tab POSIXct \tab The date and time of the value converted to UTC (if asDateTime = TRUE), \cr 
 #' \tab character \tab or raw character string (if asDateTime = FALSE) \cr
-#' tz_cd \tab character \tab The time zone code for datetime \cr
+#' tz_cd \tab character \tab The time zone code for dateTime \cr
 #' code \tab character \tab Any codes that qualify the corresponding value\cr
 #' value \tab numeric \tab The numeric value for the parameter \cr
 #' }
-#' Note that code and value are repeated for the parameters requested. The names are of the form 
-#' XD_P_S, where X is literal, 
-#' D is an option description of the parameter, 
-#' P is the parameter code, 
-#' and S is the statistic code (if applicable).
 #' 
 #' There are also several useful attributes attached to the data frame:
 #' \tabular{lll}{
@@ -91,6 +86,7 @@ readNWISuv <- function (siteNumbers,parameterCd,startDate="",endDate="", tz=""){
 #' url \tab character \tab The url used to generate the data \cr
 #' queryTime \tab POSIXct \tab The time the data was returned \cr
 #' comment \tab character \tab Header comments from the RDB file \cr
+#' siteInfo \tab data.frame \tab A data frame containing information on the requested sites \cr
 #' }
 #' @export
 #' @examples
@@ -102,6 +98,11 @@ readNWISpeak <- function (siteNumber,startDate="",endDate=""){
   url <- constructNWISURL(siteNumber,NA,startDate,endDate,"peak")
   
   data <- importRDB1(url, asDateTime=FALSE)
+  siteInfo <- readNWISsite(siteNumber)
+  
+  attr(data, "siteInfo") <- siteInfo
+  attr(data, "variableInfo") <- NULL
+  attr(data, "statisticInfo") <- NULL
     
   return (data)
 }
@@ -143,6 +144,12 @@ readNWISrating <- function (siteNumber,type="base"){
     attr(data, "RATING") <- Rat
   }
   
+  siteInfo <- readNWISsite(siteNumber)
+  
+  attr(data, "siteInfo") <- siteInfo
+  attr(data, "variableInfo") <- NULL
+  attr(data, "statisticInfo") <- NULL
+  
   return (data)
 }
 
@@ -153,8 +160,8 @@ readNWISrating <- function (siteNumber,type="base"){
 #' @param siteNumber character USGS site number.  This is usually an 8 digit number
 #' @param startDate character starting date for data retrieval in the form YYYY-MM-DD.
 #' @param endDate character ending date for data retrieval in the form YYYY-MM-DD.
-#' @param tz character to set timezone attribute of datetime. Default is an empty quote, which converts the 
-#' datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+#' @param tz character to set timezone attribute of dateTime. Default is an empty quote, which converts the 
+#' dateTimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
 #' Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
 #' "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"
 #' @return A data frame with at least the following columns:
@@ -162,15 +169,18 @@ readNWISrating <- function (siteNumber,type="base"){
 #' Name \tab Type \tab Description \cr
 #' agency_cd \tab character \tab The NWIS code for the agency reporting the data\cr
 #' site_no \tab character \tab The USGS site number \cr
-#' tz_cd \tab character \tab The time zone code for datetime \cr
+#' tz_cd \tab character \tab The time zone code for dateTime \cr
 #' }
 #' 
+#' See \url{http://waterdata.usgs.gov/usa/nwis/sw} for details about surface water.
+#' 
 #' There are also several useful attributes attached to the data frame:
 #' \tabular{lll}{
 #' Name \tab Type \tab Description \cr
 #' url \tab character \tab The url used to generate the data \cr
 #' queryTime \tab POSIXct \tab The time the data was returned \cr
 #' comment \tab character \tab Header comments from the RDB file \cr
+#' siteInfo \tab data.frame \tab A data frame containing information on the requested sites \cr
 #' }
 #' @export
 #' @examples
@@ -187,13 +197,20 @@ readNWISmeas <- function (siteNumber,startDate="",endDate="", tz=""){
     data$diff_from_rating_pc <- as.numeric(data$diff_from_rating_pc)
   }
   
+  siteInfo <- readNWISsite(siteNumber)
+  
+  attr(data, "siteInfo") <- siteInfo
+  attr(data, "variableInfo") <- NULL
+  attr(data, "statisticInfo") <- NULL
+  
   return (data)
 }
 
 #' Reads groundwater level measurements from NWISweb.
 #'
 #' Reads groundwater level measurements from NWISweb. Mixed date/times come back from the service 
-#' depending on the year that the data was collected. 
+#' depending on the year that the data was collected. See \url{http://waterdata.usgs.gov/usa/nwis/gw}
+#' for details about groundwater
 #'
 #' @param siteNumbers character USGS site number (or multiple sites).  This is usually an 8 digit number
 #' @param startDate character starting date for data retrieval in the form YYYY-MM-DD.
@@ -201,10 +218,10 @@ readNWISmeas <- function (siteNumber,startDate="",endDate="", tz=""){
 #' @return A data frame with the following columns:
 #' \tabular{lll}{
 #' Name \tab Type \tab Description \cr
-#' agency \tab character \tab The NWIS code for the agency reporting the data\cr
-#' site \tab character \tab The USGS site number \cr
-#' datetime \tab character \tab The date and time of the value as a character \cr 
-#' tz_cd \tab character \tab The time zone code for datetime \cr
+#' agency_cd \tab character \tab The NWIS code for the agency reporting the data\cr
+#' site_no \tab character \tab The USGS site number \cr
+#' dateTime \tab character \tab The date and time of the value as a character \cr 
+#' tz_cd \tab character \tab The time zone code for dateTime \cr
 #' code \tab character \tab Any codes that qualify the corresponding value\cr
 #' value \tab numeric \tab The numeric value for the parameter \cr
 #' }
diff --git a/R/tabbedDataRetrievals.R b/R/tabbedDataRetrievals.R
index 94aa70f4d18df51a676ad048e78e9c15326b975b..2e022766fafc904e1d1afecdc682adb10572a7e5 100644
--- a/R/tabbedDataRetrievals.R
+++ b/R/tabbedDataRetrievals.R
@@ -31,3 +31,12 @@ NULL
 #' @docType data
 #' @keywords USGS parameterCd
 NULL
+
+#' Data to convert USGS parameter code to characteristic names
+#'
+#' Data pulled from Water Quality Portal on November 25, 2014.
+#'
+#' @name pCodeToName
+#' @docType data
+#' @keywords USGS parameterCd
+NULL
diff --git a/data/pCodeToName.RData b/data/pCodeToName.RData
new file mode 100644
index 0000000000000000000000000000000000000000..185cb9d94aeaedc7c2bc0d097c885c6e4ed4af59
Binary files /dev/null and b/data/pCodeToName.RData differ
diff --git a/inst/doc/dataRetrieval.pdf b/inst/doc/dataRetrieval.pdf
index 3ea8c0a393d6a97e41448cf2f8d00b99900083da..c3d7f1240903f62cedf9525609620b644acbd696 100644
Binary files a/inst/doc/dataRetrieval.pdf and b/inst/doc/dataRetrieval.pdf differ
diff --git a/man/importWQP.Rd b/man/importWQP.Rd
index c77b43468dbbc11f58f668549c779754687304c4..aaa844df5ea1ee2cb83d02dde41918fe3022ccdb 100644
--- a/man/importWQP.Rd
+++ b/man/importWQP.Rd
@@ -1,7 +1,7 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \name{importWQP}
 \alias{importWQP}
-\title{Basic Water Quality Portal Data grabber}
+\title{Basic Water Quality Portal Data parser}
 \usage{
 importWQP(url, zip = FALSE, tz = "")
 }
@@ -30,8 +30,13 @@ rawSampleURL <- constructWQPURL('USGS-01594440','01075', '', '')
 rawSample <- importWQP(rawSampleURL)
 url2 <- paste0(rawSampleURL,"&zip=yes")
 rawSample2 <- importWQP(url2, TRUE)
+STORETex <- constructWQPURL('WIDNR_WQX-10032762','Specific conductance', '', '')
+STORETdata <- importWQP(STORETex)
 }
 }
+\seealso{
+\code{\link{readWQPdata}}, \code{\link{readWQPqw}}, \code{\link{whatWQPsites}}
+}
 \keyword{USGS}
 \keyword{data}
 \keyword{import}
diff --git a/man/importWaterML2.Rd b/man/importWaterML2.Rd
index 8175999b279f93276c7a7ca78776d2977d93407e..9fca77fbe3efab41824e2928eba59ec78901cee2 100644
--- a/man/importWaterML2.Rd
+++ b/man/importWaterML2.Rd
@@ -16,10 +16,11 @@ Possible values to provide are "America/New_York","America/Chicago", "America/De
 "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"}
 }
 \value{
-mergedDF a data frame containing columns agency, site, dateTime, values, and remark codes for all requested combinations
+mergedDF a data frame time, value, description, qualifier, and identifier
 }
 \description{
-This function accepts a url parameter for a WaterML2 getObservation
+This function accepts a url parameter for a WaterML2 getObservation. This function is still under development,
+but the general functionality is correct.
 }
 \examples{
 baseURL <- "http://waterservices.usgs.gov/nwis/dv/?format=waterml,2.0"
diff --git a/man/pCodeToName.Rd b/man/pCodeToName.Rd
new file mode 100644
index 0000000000000000000000000000000000000000..e333c6f0eca5504063fe439a2cab61b55acfb008
--- /dev/null
+++ b/man/pCodeToName.Rd
@@ -0,0 +1,11 @@
+% Generated by roxygen2 (4.0.2): do not edit by hand
+\docType{data}
+\name{pCodeToName}
+\alias{pCodeToName}
+\title{Data to convert USGS parameter code to characteristic names}
+\description{
+Data pulled from Water Quality Portal on November 25, 2014.
+}
+\keyword{USGS}
+\keyword{parameterCd}
+
diff --git a/man/readNWISgwl.Rd b/man/readNWISgwl.Rd
index f1f571c0fa3a6ec8e0a96520406c2e74e2c76686..cb2fac55e6eba02e804bd6ce33088d561b72f7e4 100644
--- a/man/readNWISgwl.Rd
+++ b/man/readNWISgwl.Rd
@@ -16,10 +16,10 @@ readNWISgwl(siteNumbers, startDate = "", endDate = "")
 A data frame with the following columns:
 \tabular{lll}{
 Name \tab Type \tab Description \cr
-agency \tab character \tab The NWIS code for the agency reporting the data\cr
-site \tab character \tab The USGS site number \cr
-datetime \tab character \tab The date and time of the value as a character \cr
-tz_cd \tab character \tab The time zone code for datetime \cr
+agency_cd \tab character \tab The NWIS code for the agency reporting the data\cr
+site_no \tab character \tab The USGS site number \cr
+dateTime \tab character \tab The date and time of the value as a character \cr
+tz_cd \tab character \tab The time zone code for dateTime \cr
 code \tab character \tab Any codes that qualify the corresponding value\cr
 value \tab numeric \tab The numeric value for the parameter \cr
 }
@@ -41,7 +41,8 @@ queryTime \tab POSIXct \tab The time the data was returned \cr
 }
 \description{
 Reads groundwater level measurements from NWISweb. Mixed date/times come back from the service
-depending on the year that the data was collected.
+depending on the year that the data was collected. See \url{http://waterdata.usgs.gov/usa/nwis/gw}
+for details about groundwater
 }
 \examples{
 siteNumber <- "434400121275801"
diff --git a/man/readNWISmeas.Rd b/man/readNWISmeas.Rd
index da408adea08dad265449fa8617b66be448c8332b..688300e50ada1ed98782c419f0677da8adfebe10 100644
--- a/man/readNWISmeas.Rd
+++ b/man/readNWISmeas.Rd
@@ -12,8 +12,8 @@ readNWISmeas(siteNumber, startDate = "", endDate = "", tz = "")
 
 \item{endDate}{character ending date for data retrieval in the form YYYY-MM-DD.}
 
-\item{tz}{character to set timezone attribute of datetime. Default is an empty quote, which converts the
-datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+\item{tz}{character to set timezone attribute of dateTime. Default is an empty quote, which converts the
+dateTimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
 Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
 "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"}
 }
@@ -23,15 +23,18 @@ A data frame with at least the following columns:
 Name \tab Type \tab Description \cr
 agency_cd \tab character \tab The NWIS code for the agency reporting the data\cr
 site_no \tab character \tab The USGS site number \cr
-tz_cd \tab character \tab The time zone code for datetime \cr
+tz_cd \tab character \tab The time zone code for dateTime \cr
 }
 
+See \url{http://waterdata.usgs.gov/usa/nwis/sw} for details about surface water.
+
 There are also several useful attributes attached to the data frame:
 \tabular{lll}{
 Name \tab Type \tab Description \cr
 url \tab character \tab The url used to generate the data \cr
 queryTime \tab POSIXct \tab The time the data was returned \cr
 comment \tab character \tab Header comments from the RDB file \cr
+siteInfo \tab data.frame \tab A data frame containing information on the requested sites \cr
 }
 }
 \description{
diff --git a/man/readNWISpeak.Rd b/man/readNWISpeak.Rd
index e171579f41c5686c55899663a97a30d8311e505c..8f8cf938f3e17dbc8f2090746a22e758d4589f1a 100644
--- a/man/readNWISpeak.Rd
+++ b/man/readNWISpeak.Rd
@@ -18,17 +18,12 @@ A data frame with the following columns:
 Name \tab Type \tab Description \cr
 agency_cd \tab character \tab The NWIS code for the agency reporting the data\cr
 site_no \tab character \tab The USGS site number \cr
-datetime \tab POSIXct \tab The date and time of the value converted to UTC (if asDateTime = TRUE), \cr
+dateTime \tab POSIXct \tab The date and time of the value converted to UTC (if asDateTime = TRUE), \cr
 \tab character \tab or raw character string (if asDateTime = FALSE) \cr
-tz_cd \tab character \tab The time zone code for datetime \cr
+tz_cd \tab character \tab The time zone code for dateTime \cr
 code \tab character \tab Any codes that qualify the corresponding value\cr
 value \tab numeric \tab The numeric value for the parameter \cr
 }
-Note that code and value are repeated for the parameters requested. The names are of the form
-XD_P_S, where X is literal,
-D is an option description of the parameter,
-P is the parameter code,
-and S is the statistic code (if applicable).
 
 There are also several useful attributes attached to the data frame:
 \tabular{lll}{
@@ -36,6 +31,7 @@ Name \tab Type \tab Description \cr
 url \tab character \tab The url used to generate the data \cr
 queryTime \tab POSIXct \tab The time the data was returned \cr
 comment \tab character \tab Header comments from the RDB file \cr
+siteInfo \tab data.frame \tab A data frame containing information on the requested sites \cr
 }
 }
 \description{
diff --git a/man/readNWISuv.Rd b/man/readNWISuv.Rd
index fd810547416fa59ba0e9507f9eb0eebfe2f9441c..d78dee06997b0cc9d7fbd06fd08ccb472d5a4055 100644
--- a/man/readNWISuv.Rd
+++ b/man/readNWISuv.Rd
@@ -19,8 +19,8 @@ readNWISuv(siteNumbers, parameterCd, startDate = "", endDate = "",
 
 \item{endDate}{character ending date for data retrieval in the form YYYY-MM-DD.}
 
-\item{tz}{character to set timezone attribute of datetime. Default is an empty quote, which converts the
-datetimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
+\item{tz}{character to set timezone attribute of dateTime. Default is an empty quote, which converts the
+dateTimes to UTC (properly accounting for daylight savings times based on the data's provided tz_cd column).
 Possible values to provide are "America/New_York","America/Chicago", "America/Denver","America/Los_Angeles",
 "America/Anchorage","America/Honolulu","America/Jamaica","America/Managua","America/Phoenix", and "America/Metlakatla"}
 }
@@ -28,14 +28,14 @@ Possible values to provide are "America/New_York","America/Chicago", "America/De
 A data frame with the following columns:
 \tabular{lll}{
 Name \tab Type \tab Description \cr
-agency \tab character \tab The NWIS code for the agency reporting the data\cr
-site \tab character \tab The USGS site number \cr
-datetime \tab POSIXct \tab The date and time of the value converted to UTC \cr
-tz_cd \tab character \tab The time zone code for datetime \cr
+agency_cd \tab character \tab The NWIS code for the agency reporting the data\cr
+site_no \tab character \tab The USGS site number \cr
+dateTime \tab POSIXct \tab The date and time of the value converted to UTC \cr
+tz_cd \tab character \tab The time zone code for dateTime \cr
 code \tab character \tab Any codes that qualify the corresponding value\cr
 value \tab numeric \tab The numeric value for the parameter \cr
 }
-Note that code and value are repeated for the parameters requested. The names are of the form
+Note that code and value are repeated for the parameters requested. The names are of the form:
 X_D_P_S, where X is literal,
 D is an option description of the parameter,
 P is the parameter code,
diff --git a/vignettes/dataRetrieval-concordance.tex b/vignettes/dataRetrieval-concordance.tex
index a630817fc3ab1ba541dc591efb2a2c8a3dd5b669..9af5b0742a302be2bfc9e12ca7aaa21ecb567036 100644
--- a/vignettes/dataRetrieval-concordance.tex
+++ b/vignettes/dataRetrieval-concordance.tex
@@ -1,9 +1,9 @@
 \Sconcordance{concordance:dataRetrieval.tex:dataRetrieval.Rnw:%
 1 127 1 49 0 1 7 15 1 1 14 47 1 3 0 97 1 2 0 8 1 9 0 %
 31 1 3 0 21 1 4 0 6 1 8 0 7 1 4 0 23 1 2 0 21 1 1 8 %
-19 1 9 0 6 1 7 0 26 1 7 0 18 1 2 0 11 1 18 0 21 1 9 0 %
-20 1 3 0 7 1 20 0 44 1 2 0 8 1 88 0 16 1 10 0 16 1 10 %
-0 17 1 13 0 17 1 14 0 20 1 4 0 14 1 4 0 33 1 13 0 40 %
-1 13 0 18 1 2 0 14 1 2 0 20 1 9 0 8 1 8 0 12 1 25 0 9 %
-1 4 0 7 1 29 0 19 1 4 0 15 1 4 0 11 1 4 0 5 1 4 0 22 %
-1 14 0 8 1 4 0 43 1}
+19 1 9 0 6 1 7 0 26 1 7 0 18 1 2 0 11 1 15 0 21 1 8 0 %
+20 1 3 0 7 1 20 0 44 1 2 0 8 1 2 0 16 1 9 0 16 1 10 0 %
+17 1 13 0 17 1 14 0 20 1 4 0 14 1 4 0 33 1 13 0 40 1 %
+13 0 18 1 2 0 14 1 2 0 20 1 9 0 8 1 7 0 12 1 25 0 9 1 %
+4 0 7 1 29 0 19 1 4 0 15 1 4 0 11 1 4 0 5 1 4 0 22 1 %
+14 0 8 1 4 0 43 1}
diff --git a/vignettes/dataRetrieval.Rnw b/vignettes/dataRetrieval.Rnw
index 9865c1a72c6dcc459849c7ca17379916b8376f2b..0a4cd78aa55ddc76915f99d89bf14214f13a1ddf 100644
--- a/vignettes/dataRetrieval.Rnw
+++ b/vignettes/dataRetrieval.Rnw
@@ -462,7 +462,7 @@ Parameter information can obtained from:
 \FloatBarrier
 
 %------------------------------------------------------------
-\subsection{Daily Values}
+\subsection{Daily Data}
 \label{sec:usgsDaily}
 %------------------------------------------------------------
 To obtain daily records of USGS data, use the \texttt{readNWISdv} function. The arguments for this function are siteNumber, parameterCd, startDate, endDate, and statCd (defaults to \texttt{"}00003\texttt{"}).  If you want to use the default values, you do not need to list them in the function call. 
@@ -538,7 +538,7 @@ There are occasions where NWIS values are not reported as numbers, instead there
 \FloatBarrier
 
 %------------------------------------------------------------
-\subsection{Unit Values}
+\subsection{Unit Data}
 \label{sec:usgsRT}
 %------------------------------------------------------------
 Any data collected at regular time intervals (such as 15-minute or hourly) are known as \enquote{unit values.} Many of these are delivered on a real time basis and very recent data (even less than an hour old in many cases) are available through the function \texttt{readNWISuv}.  Some of these unit values are available for many years, and some are only available for a recent time period such as 120 days.  Here is an example of a retrieval of such data.  
@@ -582,7 +582,7 @@ Data are retrieved from \url{http://waterservices.usgs.gov/rest/IV-Test-Tool.htm
 \FloatBarrier
 
 %------------------------------------------------------------
-\subsection{Water Quality Values}
+\subsection{Water Quality Data}
 \label{sec:usgsWQP}
 %------------------------------------------------------------
 To get USGS water quality data from water samples collected at the streamgage or other monitoring site (as distinct from unit values collected through some type of automatic monitor) we can use the function \texttt{readNWISqw}, with the input arguments: siteNumber, parameterCd, startDate, and endDate. Additionally, the argument \texttt{"}expanded\texttt{"} is a logical input that allows the user to choose between a simple return of datetimes/qualifier/values (expanded=FALSE), or a more complete and verbose output (expanded=TRUE). Expanded = TRUE includes such columns as remark codes, value qualifying text, and detection level for each parameter code. There also includes an argument \texttt{"}reshape\texttt{"}, that converts the expanded dataset to a \texttt{"}wide\texttt{"} format (each requested parameter code gets individual columns).
@@ -614,7 +614,7 @@ comment(dfLong)
 \FloatBarrier
 
 %------------------------------------------------------------
-\subsection{Groundwater level data}
+\subsection{Groundwater Level Data}
 \label{sec:gwl}
 %------------------------------------------------------------
 Groundwater level measurements can be obtained with the \texttt{readNWISgwl} function. Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
@@ -628,7 +628,7 @@ names(groundWater)
 @
 
 %------------------------------------------------------------
-\subsection{Peak flow data}
+\subsection{Peak Flow Data}
 \label{sec:peak}
 %------------------------------------------------------------
 
@@ -645,7 +645,7 @@ names(peakData)
 
 
 %------------------------------------------------------------
-\subsection{Rating curve data}
+\subsection{Rating Curve Data}
 \label{sec:rating}
 %------------------------------------------------------------
 Rating curves are the calibration curves that are used to convert measurements of stage to discharge.  Because of changing hydrologic conditions these rating curves change over time. Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
@@ -663,7 +663,7 @@ names(ratingData)
 
 
 %------------------------------------------------------------
-\subsection{Surface-water measurement data}
+\subsection{Surface-Water Measurement Data}
 \label{sec:meas}
 %------------------------------------------------------------
 These data are the discrete measurements of discharge that are made for the purpose of developing or revising the rating curve.  Information on the returned data can be found with the \texttt{comment} function as described in section \ref{sec:metadata}.
@@ -722,7 +722,7 @@ A tool for finding NWIS characteristic names can be found at: \url{http://www.wa
 The previous examples all took specific input arguments: siteNumber, parameterCd (or characteristic name), startDate, endDate, etc. However, the Web services that supply the data can accept a wide variety of additional arguments. 
 
 %------------------------------------------------------------
-\subsubsection{NWIS sites}
+\subsubsection{NWIS Sites}
 \label{sec:NWISGenSite}
 %------------------------------------------------------------
 The function \texttt{whatNWISsites} can be used to discover NWIS sites based on any query that the NWIS Site Service offers. This is done by using the \texttt{"..."} argument, which allows the user to use any arbitrary input argument. We can then use the service here:
@@ -746,7 +746,7 @@ nrow(sites)
 
 
 %------------------------------------------------------------
-\subsubsection{NWIS data}
+\subsubsection{NWIS Data}
 \label{sec:NWISGenData}
 %------------------------------------------------------------
 For NWIS data, the function \texttt{readNWISdata} can be used. The argument listed in the R help file is \texttt{"..."} and \texttt{"}service\texttt{"} (only for data requests). Table \ref{tab:NWISGeneral} describes the services are available.
@@ -785,7 +785,7 @@ nrow(dischargeWI)
 @
 
 %------------------------------------------------------------
-\subsubsection{Water Quality Portal sites}
+\subsubsection{WQP Sites}
 \label{sec:WQPGenSite}
 %------------------------------------------------------------
 
@@ -804,7 +804,7 @@ sitesNJ <- whatWQPsites(statecode="US:34",
 
 
 %------------------------------------------------------------
-\subsubsection{Water Quality Portal data}
+\subsubsection{WQP Data}
 \label{sec:WQPGenData}
 %------------------------------------------------------------
 Finally, to get data from the WQP using generalized Web service calls, use the function \texttt{readWQPdata}. For example, to get all the pH data in Wisconsin:
@@ -836,7 +836,7 @@ attr(dischargeWI, "queryTime")
 
 @
 
-Depending on the format that the data was obtained (xml, rdb, etc), there will be additional information embedded in the dataframe as attributes. To discover the available attributes:
+Depending on the format that the data was obtained (RDB, WaterML1, etc), there will be additional information embedded in the dataframe as attributes. To discover the available attributes:
 
 <<meta2, eval=TRUE>>=
 
@@ -924,7 +924,7 @@ library(dataRetrieval)
 
 
 %------------------------------------------------------------ 
-\section{Creating tables in Microsoft\textregistered\ software from R}
+\section{Creating Tables in Microsoft\textregistered\ Software from R}
 \label{app:createWordTable}
 %------------------------------------------------------------
 There are a few steps that are required in order to create a table in Microsoft\textregistered\ software (Excel, Word, PowerPoint, etc.) from an R dataframe. There are certainly a variety of good methods, one of which is detailed here. The example we will step through here will be to create a table in Microsoft Excel based on the dataframe tableData:
diff --git a/vignettes/figure/getNWIStemperaturePlot-1.pdf b/vignettes/figure/getNWIStemperaturePlot-1.pdf
index c568cab28d9f584637bc6c484a8b745c62581fde..9a0cdfe9bc5c70f5ae9a948fddaa97ea349277b9 100644
Binary files a/vignettes/figure/getNWIStemperaturePlot-1.pdf and b/vignettes/figure/getNWIStemperaturePlot-1.pdf differ