diff --git a/inst/doc/Rplots.pdf b/inst/doc/Rplots.pdf
index 32a1d08d8aa29e3234d319ebc1ed7e336e61aa12..b3e40e0eee7392df8044546d2f4012849c33efe5 100644
Binary files a/inst/doc/Rplots.pdf and b/inst/doc/Rplots.pdf differ
diff --git a/inst/doc/dataRetrieval-concordance.tex b/inst/doc/dataRetrieval-concordance.tex
index 573b84d2421210e7db4f581e7fe977674971ee9f..152cb5ae9d761e22482942625a699abc2c177f42 100644
--- a/inst/doc/dataRetrieval-concordance.tex
+++ b/inst/doc/dataRetrieval-concordance.tex
@@ -1,16 +1,16 @@
 \Sconcordance{concordance:dataRetrieval.tex:dataRetrieval.Rnw:%
 1 84 1 1 8 1 1 1 10 16 0 1 2 5 1 1 10 15 0 1 2 13 1 1 2 1 0 1 2 1 0 1 1 %
-3 0 1 2 2 1 1 2 7 0 1 2 7 1 1 3 2 0 1 1 12 0 1 2 4 1 1 4 3 0 1 2 1 0 1 %
-3 7 0 1 3 4 0 1 2 3 1 1 8 7 0 1 4 1 0 1 2 21 0 1 2 7 1 1 3 2 0 2 1 7 0 %
-1 2 1 1 1 2 7 0 1 2 9 1 1 3 2 0 3 1 1 2 3 0 1 2 1 1 1 2 10 0 1 2 4 1 1 %
-3 2 0 4 1 1 3 4 0 1 2 4 1 1 6 4 0 1 1 1 4 3 0 3 1 3 0 1 2 3 1 1 -5 1 9 %
-14 1 1 2 1 0 3 1 1 2 4 0 2 2 10 0 1 2 3 1 1 5 4 0 1 1 3 0 1 2 3 1 1 -5 %
-1 9 12 1 1 2 1 0 1 2 1 0 2 1 1 3 4 0 1 2 2 1 1 3 2 0 1 1 7 0 1 2 3 1 1 %
-6 5 0 1 1 3 0 1 2 2 1 1 -4 1 8 10 1 1 3 2 0 1 1 12 0 1 2 13 1 1 2 4 0 1 %
-2 7 1 1 2 1 0 2 1 1 3 5 0 1 2 2 1 1 11 18 0 1 2 8 1 1 3 5 0 1 2 2 1 1 %
-12 24 0 1 2 10 1 1 14 12 0 1 2 9 1 1 2 17 0 1 3 27 1 1 2 1 0 2 1 3 0 1 %
-2 15 1 1 2 1 0 2 1 3 0 1 2 6 1 1 2 1 0 3 1 1 2 2 1 11 0 1 1 19 0 1 2 24 %
-1 1 2 4 0 1 2 1 1 1 2 13 0 1 2 6 1 1 2 1 0 1 1 3 0 1 2 3 1 1 2 4 0 1 2 %
-7 1 1 2 1 0 1 1 3 0 1 2 1 1 1 2 4 0 1 2 12 1 1 5 47 0 1 2 9 1 1 6 45 0 %
-1 2 2 1 1 6 27 0 1 2 8 1 1 2 1 0 4 1 1 9 7 0 1 1 11 0 1 2 4 1 1 3 5 0 1 %
-2 53 1}
+3 0 1 2 2 1 1 2 7 0 1 2 7 1 1 3 2 0 1 1 12 0 1 2 5 1 1 4 3 0 1 3 1 0 1 %
+3 7 0 1 3 4 0 1 2 2 1 1 8 7 0 1 4 1 0 1 2 21 0 1 2 7 1 1 3 2 0 2 1 7 0 %
+1 2 1 1 1 2 7 0 1 2 9 1 1 3 2 0 2 1 1 2 3 0 1 2 1 1 1 2 10 0 1 2 4 1 1 %
+2 1 0 3 1 1 3 4 0 1 2 4 1 1 6 4 0 1 1 1 4 3 0 3 1 3 0 1 2 3 1 1 -5 1 9 %
+14 1 1 2 1 0 1 1 1 2 1 0 1 3 5 0 2 2 10 0 1 2 3 1 1 5 4 0 1 1 3 0 1 2 3 %
+1 1 -5 1 9 12 1 1 4 2 0 2 1 1 3 1 0 1 3 1 0 1 1 7 0 1 2 1 1 1 6 5 0 1 1 %
+3 0 1 2 2 1 1 -4 1 8 10 1 1 3 2 0 1 1 12 0 1 2 7 1 1 3 2 0 5 1 3 0 1 2 %
+13 1 1 2 1 0 1 1 3 0 1 2 7 1 1 2 1 0 2 1 1 3 5 0 1 2 2 1 1 11 18 0 1 2 %
+8 1 1 3 5 0 1 2 2 1 1 12 24 0 1 2 10 1 1 14 12 0 1 2 9 1 1 2 17 0 1 3 %
+27 1 1 2 1 0 2 1 3 0 1 2 15 1 1 2 1 0 2 1 3 0 1 2 6 1 1 2 1 0 3 1 1 2 2 %
+1 11 0 1 1 19 0 1 2 6 1 1 3 2 0 1 1 3 0 1 2 3 1 1 -5 1 9 21 1 1 2 4 0 1 %
+2 1 1 1 2 13 0 1 2 6 1 1 2 1 0 1 1 3 0 1 2 3 1 1 2 4 0 1 2 7 1 1 2 1 0 %
+1 1 3 0 1 2 1 1 1 2 4 0 1 2 10 1 1 5 47 0 1 2 9 1 1 6 45 0 1 2 2 1 1 6 %
+27 0 1 2 8 1 1 2 1 0 4 1 1 9 10 0 1 2 3 1 1 3 5 0 1 2 53 1}
diff --git a/inst/doc/dataRetrieval-fig1.pdf b/inst/doc/dataRetrieval-fig1.pdf
index 3ea96ee2cd6ff5f86e849c701af6e3cfb55a5eb1..fcc2036fc0ffaec5c2ee8b26adb65ba0acb58264 100644
Binary files a/inst/doc/dataRetrieval-fig1.pdf and b/inst/doc/dataRetrieval-fig1.pdf differ
diff --git a/inst/doc/dataRetrieval-fig2.pdf b/inst/doc/dataRetrieval-fig2.pdf
index 3495a7a39205ff053538e5301b624137b79e7919..13d2e362db91e184765390aec0adf2313c0c8e52 100644
Binary files a/inst/doc/dataRetrieval-fig2.pdf and b/inst/doc/dataRetrieval-fig2.pdf differ
diff --git a/inst/doc/dataRetrieval-fig3.pdf b/inst/doc/dataRetrieval-fig3.pdf
index 42e70b553aceffd65341e7cc834679e5545ef9a7..66c388a084b5f3f9f0b14ba3df909fc0bee0e6a7 100644
Binary files a/inst/doc/dataRetrieval-fig3.pdf and b/inst/doc/dataRetrieval-fig3.pdf differ
diff --git a/inst/doc/dataRetrieval-figegretEx.pdf b/inst/doc/dataRetrieval-figegretEx.pdf
index 101b9c38bc49214aa4da0afd6dc6f43ca38408f0..361675f2e94a3cfd779b24b810a2603d579c749d 100644
Binary files a/inst/doc/dataRetrieval-figegretEx.pdf and b/inst/doc/dataRetrieval-figegretEx.pdf differ
diff --git a/inst/doc/dataRetrieval.Rnw b/inst/doc/dataRetrieval.Rnw
index 6c841e6fd1ef0afbd5db055325c0b90b74d3d7b6..5d6c25a1b9fc30600673128fb04f7fa0480056ca 100644
--- a/inst/doc/dataRetrieval.Rnw
+++ b/inst/doc/dataRetrieval.Rnw
@@ -64,13 +64,13 @@ The dataRetrieval package was created to simplify the process of getting hydrolo
 
 The dataRetrieval package is designed to retrieve many of the major data types of USGS hydrologic data that are available on the web, but also allows users to make use of other data that they supply from spreadsheets.  Section 2 provides examples of how one can obtain raw data from USGS sources on the web and ingest them into data frames within the R environment.  The functionality described in section 2 is for general use and is not tailored for the specific uses of the EGRET package.  The functionality described in section 3 is tailored specifically to obtaining input from the web and structuring them specifically for use in the EGRET package.  The functionality described in section 4 is for converting hydrologic data from user-supplied spreadsheets and structuring them specifically for use in the EGRET package.
 
-For information on getting started in R, downloading and installing the package, see Appendix 1: Getting Started (\ref{sec:appendix1}).
+For information on getting started in R and installing the package, see Appendix (\ref{sec:appendix1}): Getting Started.
 
 
 %------------------------------------------------------------
 \section{General USGS Web Retrievals}
 %------------------------------------------------------------ 
-In this section, we will run through 5 examples, documenting how to get raw data from the web. This includes site information (\ref{sec:usgsSite}), measured parameter information (\ref{sec:usgsParams}), historical daily values(\ref{sec:usgsDaily}), real-time current values (\ref{sec:usgsRT}), and water quality data (\ref{sec:usgsWQP}) or (\ref{sec:usgsSTORET}). We will use the Choptank River near Greensboro, MD as an example.  The site-ID for this gage station is 01491000. Daily discharge measurements are available as far back as 1948.  Additionally, forms of nitrate have been measured dating back to 1964. The functions/examples in this section are for raw data retrieval.  This may or may not be the easiest data to work with.  In the next section, we will use functions that retrieve and process the data in a dataframe that may prove more friendly for R analysis.
+In this section, we will run through 5 examples, documenting how to get raw data from the web. This includes site information (\ref{sec:usgsSite}), measured parameter information (\ref{sec:usgsParams}), historical daily values(\ref{sec:usgsDaily}), real-time (unit) values (\ref{sec:usgsRT}), and water quality data (\ref{sec:usgsWQP}) or (\ref{sec:usgsSTORET}). We will use the Choptank River near Greensboro, MD as an example.  The site-ID for this gage station is 01491000. Daily discharge measurements are available as far back as 1948.  Additionally, forms of nitrate have been measured dating back to 1964. The functions/examples in this section are for raw data retrieval.  This may or may not be the easiest data to work with.  In the next section, we will use functions that retrieve and process the data in a dataframe that may prove more friendly for R analysis.
 
 %------------------------------------------------------------
 \subsection{Introduction}
@@ -81,14 +81,14 @@ Once the site-ID is known, the next required input for USGS data retrievals is t
 
 \url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes?radio_pm_search=param_group&pm_group=All+--+include+all+parameter+groups&pm_search=&casrn_search=&srsname_search=&format=html_table&show=parameter_group_nm&show=parameter_nm&show=casrn&show=srsname&show=parameter_units}
 
-Not every station will measure all parameters. A list of commonly measured parameters is shown in Table \ref{tab:params}.
+Not every station will measure all parameters. A short list of commonly measured parameters is shown in Table \ref{tab:params}.
 
 <<openLibrary, echo=FALSE>>=
 library(xtable)
 options(continue=" ")
-# options(width=70)
-options(SweaveHooks=list(fig=function()
-  par(mar=c(4.1,4.1,1.1,4.1),oma=c(0,0,0,0))))
+# options(width=60)
+# options(SweaveHooks=list(fig=function()
+#   par(mar=c(4.1,4.1,1.1,4.1),oma=c(0,0,0,0))))
 @
 
 
@@ -99,7 +99,7 @@ shortName <- c("Discharge [cfs]","Gage height [ft]","Temperature [C]", "Precipit
 data.df <- data.frame(pCode, shortName, stringsAsFactors=FALSE)
 
 data.table <- xtable(data.df,label="tab:params",
-                     caption="Commonly found USGS Parameter Codes")
+                     caption="Common USGS Parameter Codes")
 print(data.table,
       caption.placement="top",include.rownames=FALSE)
 @
@@ -141,7 +141,7 @@ siteNumber <- "01491000"
 ChoptankInfo <- getSiteFileData(siteNumber)
 @
 
-A list of the available columns are found in Appendix 2: INFO dataframe (\ref{sec:appendix2INFO}). Pulling out a specific example piece of information, in this case station name can be done as follows:
+A list of the available columns are found in Appendix \ref{sec:appendix2INFO}: INFO dataframe. Pulling out a specific example piece of information, in this case station name can be done as follows:
 
 <<siteNames, echo=TRUE>>=
 ChoptankInfo$station.nm
@@ -162,14 +162,16 @@ head(ChoptankAvailableData)
 
 There is an additional argument to the getDataAvailability called longNames, which defaults to FALSE. Setting longNames to TRUE will cause the function to make a web service call for each parameter and return expanded information on that parameter. Currently, this is a very slow process because each parameter code makes a unique web service call. If the site does not have many measured parameters, setting longNames to TRUE is reasonable.
 
-It is also possible to only request information for certain variables. In the following example, we retrieve just the daily mean parameter information from the Choptank data availability dataframe (excluding all unit value and water quality values). 
+It is also possible to only request parameter information for a subset of variables. In the following example, we retrieve just the daily mean parameter information from the Choptank data availability dataframe (excluding all unit value and water quality values). getMultipleParameterNames is the function that is embedded in the getDataAvailability, but here can be used as a standalone function.
+
 
 <<label=getSiteExtended, echo=TRUE>>=
 # Continuing from the previous example:
 # This pulls out just the daily data:
-ChoptankDailyData <- ChoptankAvailableData["dv" == ChoptankAvailableData$service,]
+ChoptankDailyData <- subset(ChoptankAvailableData,"dv" == service)
+
 # This pulls out the mean:
-ChoptankDailyData <- ChoptankDailyData["00003" == ChoptankDailyData$statCd,]
+ChoptankDailyData <- subset(ChoptankDailyData,"00003" == statCd)
 
 #Now, make a call to get all of the parameter information:
 pCodeINFO <- getMultipleParameterNames(ChoptankDailyData$parameter_cd)
@@ -178,8 +180,7 @@ pCodeINFO <- getMultipleParameterNames(ChoptankDailyData$parameter_cd)
 ChoptankDailyData <- merge(ChoptankDailyData,pCodeINFO,by="parameter_cd")
 @
 
-The daily data at the Choptank River site can be displayed in a nice \LaTeX table using the xtable package. See Appendix \ref{app:createWordTable} for instructions on converting an R dataframe to a nice table in Microsoft Excel or Word.
-
+The daily data at the Choptank River site can be displayed in a \LaTeX table using the xtable package. See Appendix \ref{app:createWordTable} for instructions on converting an R dataframe to a table in Microsoft Excel or Word.
 
 <<label=tablegda, echo=TRUE,results=tex>>=
 tableData <- with(ChoptankDailyData, 
@@ -221,14 +222,13 @@ Parameter information is obtained from \url{http://nwis.waterdata.usgs.gov/nwis/
 \subsection{Daily Values}
 \label{sec:usgsDaily}
 %------------------------------------------------------------
-To obtain historic daily records of USGS data, use the retrieveNWISData function. The arguments for this function are siteNumber, parameterCd, startDate, endDate, statCd, and a logical (true/false) interactive. There are 2 default argument: statCd (defaults to \texttt{"}00003\texttt{"}), and interactive (defaults to TRUE).  If you want to use the default values, you do not need to list them in the function call. Setting the 'interactive' option to true will walk you through the function. It might make more sense to run large batch collections with the interactive option set to FALSE. 
+To obtain historic daily records of USGS data, use the retrieveNWISData function. The arguments for this function are siteNumber, parameterCd, startDate, endDate, statCd, and a logical (true/false) interactive. There are 2 default argument: statCd (defaults to \texttt{"}00003\texttt{"}), and interactive (defaults to TRUE).  If you want to use the default values, you do not need to list them in the function call. Setting the \texttt{"}interactive\texttt{"} option to true will walk you through the function. It might make more sense to run large batch collections with the interactive option set to FALSE. 
 
 The dates (start and end) need to be in the format \texttt{"}YYYY-MM-DD\texttt{"} (note: the user does need to include the quotes).  Setting the start date to \texttt{"}\texttt{"} will indicate to the program to ask for the earliest date, setting the end date to \texttt{"}\texttt{"} will ask for the latest available date.
 
 <<label=getNWISDaily, echo=TRUE>>=
-# Using defaults:
-siteNumber <- "01491000"
-parameterCd <- "00060"  # Discharge in cubic feet per second
+# Continuing with our Choptank River example
+parameterCd <- "00060"  # Discharge (cfs)
 startDate <- ""  # Will request earliest date
 endDate <- "" # Will request latest date
 
@@ -245,15 +245,13 @@ The variable datetime is automatically imported as a Date. Each requested parame
 
 Another example that doesn't use the defaults would be a request for mean and maximum daily temperature and discharge in early 2012:
 <<label=getNWIStemperature, echo=TRUE>>=
-# Using defaults:
-siteNumber <- "01491000" 
-parameterCd <- "00010,00060"  # Temperature and discharge
-statCd <- "00001,00003"  # Mean and maximum
+parameterCd <- c("00010","00060")  # Temperature and discharge
+statCd <- c("00001","00003")  # Mean and maximum
 startDate <- "2012-01-01"
 endDate <- "2012-06-30"
 
 temperatureAndFlow <- retrieveNWISData(siteNumber, parameterCd, 
-                  startDate, endDate, StatCd=statCd,interactive=FALSE)
+        startDate, endDate, StatCd=statCd,interactive=FALSE)
 @
 
 Daily data is pulled from \url{http://waterservices.usgs.gov/rest/DV-Test-Tool.html}. 
@@ -298,10 +296,11 @@ There are occasions where NWIS values are not reported as numbers, instead there
 Any data that are collected at regular time intervals (such as 15-minute or hourly) are known as \texttt{"}Unit Values\texttt{"} - many of these are delivered on a real time basis and very recent data (even less than an hour old in many cases) are available through the function retrieveUnitNWISData.  Some of these Unit Values are available for the past several years, and some are only available for a recent time period such as 120 days or a year.  Here is an example of a retrieval of such data.  
 
 <<label=getNWISUnit, echo=TRUE>>=
-siteNumber <- "01491000"
 parameterCd <- "00060"  # Discharge (cfs)
-startDate <- "2013-03-12" # or pick yesterday by the command as.character(Sys.Date()-1)
-endDate <- "2013-03-13" # Today: as.character(Sys.Date())
+startDate <- "2013-03-12" 
+# or use (yesterday): startDate <- as.character(Sys.Date()-1)
+endDate <- "2013-03-13" 
+# or use (today):  endDate <- as.character(Sys.Date())
 dischargeToday <- retrieveUnitNWISData(siteNumber, parameterCd, 
         startDate, endDate)
 @
@@ -337,30 +336,24 @@ title(ChoptankInfo$station.nm)
 \subsection{Water Quality Values}
 \label{sec:usgsWQP}
 %------------------------------------------------------------
-To get USGS water quality data from water samples collected at the streamgage (as distinct from unit values collected through some type of automatic monitor) we can use the Water Quality Data Portal: \url{http://www.waterqualitydata.us/}. The raw data are obtained from the function  getRawQWData, with the similar input arguments: siteNumber, parameterCd, startDate, endDate, and interactive. The difference is in parameterCd, in this function multiple parameters can be queried using a \texttt{"};\texttt{"} separator, and setting parameterCd to \texttt{"}\texttt{"} will return all of the measured observations. The raw data can be overwelming, a simplified version of the data can be obtained using getQWData.
+To get USGS water quality data from water samples collected at the streamgage (as distinct from unit values collected through some type of automatic monitor) we can use the Water Quality Data Portal: \url{http://www.waterqualitydata.us/}. The raw data are obtained from the function  getRawQWData, with the similar input arguments: siteNumber, parameterCd, startDate, endDate, and interactive. The difference is in parameterCd, in this function multiple parameters can be queried using a \texttt{"};\texttt{"} separator, and setting parameterCd to \texttt{"}\texttt{"} will return all of the measured observations. The raw data can be overwelming (see Appendix \ref{sec:appendix2WQP}), a simplified version of the data can be obtained using getQWData.There is a large amount of data returned for each observation. 
 
 
 <<label=getQW, echo=TRUE>>=
-siteNumber <- "01491000" 
+ 
 # Dissolved Nitrate parameter codes:
-parameterCd <- "00618;71851"  
-startDate <- "1964-06-11"
+parameterCd <- c("00618","71851")
+startDate <- "1979-10-11"
 endDate <- "2012-12-18"
 
 dissolvedNitrate <- getRawQWData(siteNumber, parameterCd, 
       startDate, endDate)
-@
-
-There is a large amount of data returned for each observation. The column names are listed in Appendix 2 (\ref{sec:appendix2WQP}). To get a simplified dataframe that contains only datetime, value, and qualifier, use the function getQWData:
 
-<<label=getQWData, echo=TRUE>>=
 dissolvedNitrateSimple <- getQWData(siteNumber, parameterCd, 
         startDate, endDate)
 names(dissolvedNitrateSimple)
 @
-Note that in this dataframe, datetime is imported as Dates (no times are included), and the qualifier is either blank or \texttt{"}\verb@<@\texttt{"} signifying a censored value.
-
-An example of plotting the above data (Figure \ref{fig:nitrate}):
+Note that in this dataframe, datetime is imported as Dates (no times are included), and the qualifier is either blank or \texttt{"}\verb@<@\texttt{"} signifying a censored value. A plotting example is shown in Figure \ref{fig:nitrate}.
 
 <<label=getQWtemperaturePlot, echo=TRUE>>=
 with(dissolvedNitrateSimple, plot(
@@ -393,6 +386,23 @@ specificCond <- getWQPData('WIDNR_WQX-10032762',
 head(specificCond)
 @
 
+%------------------------------------------------------------
+\subsection{URL Construction}
+\label{sec:usgsURL}
+%------------------------------------------------------------
+There may be times when you might be interested in seeing the URL (web address) that was used to obtain the raw data. The constructNWISURL function returns the URL.  Aside from input variables that have already been described, there is a new argument \texttt{"}service\texttt{"}. The service argument can be \texttt{"}dv\texttt{"} (daily values), \texttt{"}uv\texttt{"} (unit values), \texttt{"}qw\texttt{"} (NWIS water quality values), or \texttt{"}wqp\texttt{"} (general Water Quality Portal values).
+ 
+
+<<label=geturl, echo=TRUE, eval=FALSE>>=
+# Dissolved Nitrate parameter codes:
+pCode <- c("00618","71851")
+startDate <- "1964-06-11"
+endDate <- "2012-12-18"
+url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,'qw')
+url_dv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'dv',statCd="00003")
+url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'uv')
+@
+
 
 %------------------------------------------------------------
 \section{Data Retrievals Structured For Use In The EGRET Package}
@@ -407,6 +417,7 @@ In this section, we use 3 dataRetrieval functions to get sufficient data to perf
 The function to obtain metadata, or data about the streamgage and measured parameters is getMetaData. This function combines getSiteFileData and getParameterInfo, producing one dataframe called INFO.
 
 <<ThirdExample>>=
+parameterCd <- "00618"
 INFO <-getMetaData(siteNumber,parameterCd, interactive=FALSE)
 @
 
@@ -586,8 +597,24 @@ head(Sample)
 %------------------------------------------------------------
 \subsection{EGRET Plots}
 %------------------------------------------------------------
-The EGRET package....
+As has been mentioned, the data is specifically formatted to be used with the EGRET package. The EGRET package has powerful modeling capabilities using WRTDS, but also has a variety of graphing and tablular tools to explore the data without using the WRTDS algorithm. See the EGRET vignette, user guide, and/or wiki (\url{https://github.com/USGS-R/EGRET/wiki}) for detailed information. The following figure is an example of one of the plotting functions that can be used directly from the dataRetrieval dataframes.
 
+<<egretEx, echo=TRUE, eval=TRUE>>=
+# Continuing Choptank example from the previous sections
+library(EGRET)
+multiPlotDataOverview()
+@
+
+\begin{figure}[ht]
+\begin{center}
+
+<<label=figegretEx, fig=TRUE,echo=FALSE>>=
+<<egretEx>>
+@
+\end{center}
+\caption{Default multiPlotDataOverview}
+\label{fig:multiPlotDataOverview}
+\end{figure}
 
 \clearpage
 \appendix
@@ -650,8 +677,6 @@ To then open the library, simply type:
 library(dataRetrieval)
 @
 
-\FloatBarrier
-
 %------------------------------------------------------------ 
 \section{Columns Names}
 \label{sec:appendix2}
@@ -702,7 +727,7 @@ print(data.table, caption.placement="top",floating="FALSE",latex.environments=NU
 \section{Creating tables in Microsoft from R}
 \label{app:createWordTable}
 %------------------------------------------------------------
-There are a few steps that are required in order to create a table in a Microsoft product (Excel, Word, Powerpoint, etc.) from an R dataframe. There are actually a variety of methods, one of which is detailed here. The example we will step through here will be to create the following data.
+There are a few steps that are required in order to create a table in a Microsoft product (Excel, Word, Powerpoint, etc.) from an R dataframe. There are certainly a variety of good methods, one of which is detailed here. The example we will step through here will be to create a table in Microsoft Word based on the dataframe tableData:
 
 <<label=getSiteApp, echo=TRUE>>=
 ChoptankAvailableData <- getDataAvailability(siteNumber)
@@ -719,12 +744,10 @@ tableData <- with(ChoptankDailyData,
       Count=count,
       Units=parameter_units)
       )
-tableData
 @
 
-Our goal now is to get the data from the dataframe tableData to a Microsoft Word table. 
+First, save the dataframe as a tab delimited file (you don't want to use comma delimited because there are commas in some of the data elements):
 
-First, save the dataframe as a tab delimited file:
 
 <<label=saveData, echo=TRUE, eval=FALSE>>=
 write.table(tableData, file="tableData.tsv",sep="\t",
diff --git a/inst/doc/dataRetrieval.log b/inst/doc/dataRetrieval.log
index 28d2e8b578b3d81483138f18f20d5e612073e2a0..940993b2d02a876e6092b609346f14613c0031f4 100644
--- a/inst/doc/dataRetrieval.log
+++ b/inst/doc/dataRetrieval.log
@@ -1,4 +1,4 @@
-This is pdfTeX, Version 3.1415926-2.3-1.40.12 (MiKTeX 2.9) (preloaded format=pdflatex 2012.1.6)  14 MAR 2013 14:54
+This is pdfTeX, Version 3.1415926-2.3-1.40.12 (MiKTeX 2.9) (preloaded format=pdflatex 2012.1.6)  14 MAR 2013 16:42
 entering extended mode
 **dataRetrieval.tex
 (D:\LADData\RCode\dataRetrieval\inst\doc\dataRetrieval.tex
@@ -510,11 +510,11 @@ Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[8] <dataRetrieval-fig1.pdf, id=223, 433.62pt x 289.08pt>
+[8] <dataRetrieval-fig1.pdf, id=228, 433.62pt x 289.08pt>
 File: dataRetrieval-fig1.pdf Graphic file (type pdf)
 
 <use dataRetrieval-fig1.pdf>
-Package pdftex.def Info: dataRetrieval-fig1.pdf used on input line 349.
+Package pdftex.def Info: dataRetrieval-fig1.pdf used on input line 346.
 (pdftex.def)             Requested size: 358.46039pt x 238.98355pt.
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
@@ -524,55 +524,53 @@ Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[10] <dataRetrieval-fig2.pdf, id=242, 433.62pt x 289.08pt>
+[10] <dataRetrieval-fig2.pdf, id=247, 433.62pt x 289.08pt>
 File: dataRetrieval-fig2.pdf Graphic file (type pdf)
 
 <use dataRetrieval-fig2.pdf>
-Package pdftex.def Info: dataRetrieval-fig2.pdf used on input line 404.
+Package pdftex.def Info: dataRetrieval-fig2.pdf used on input line 402.
 (pdftex.def)             Requested size: 358.46039pt x 238.98355pt.
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
 [11 <D:/LADData/RCode/dataRetrieval/inst/doc/dataRetrieval-fig2.pdf>]
-<dataRetrieval-fig3.pdf, id=256, 433.62pt x 289.08pt>
+<dataRetrieval-fig3.pdf, id=261, 433.62pt x 289.08pt>
 File: dataRetrieval-fig3.pdf Graphic file (type pdf)
 
 <use dataRetrieval-fig3.pdf>
-Package pdftex.def Info: dataRetrieval-fig3.pdf used on input line 460.
+Package pdftex.def Info: dataRetrieval-fig3.pdf used on input line 448.
 (pdftex.def)             Requested size: 358.46039pt x 238.98355pt.
 
-Overfull \hbox (0.79091pt too wide) in paragraph at lines 470--471
+Overfull \hbox (0.79091pt too wide) in paragraph at lines 458--459
 \T1/aer/m/n/10.95 EPA) or NWIS database. Since STORET does not use USGS pa-ram-
 e-ter codes, a \T1/aett/m/n/10.95 "\T1/aer/m/n/10.95 characteristic
  []
 
-
-Overfull \vbox (21.68121pt too high) has occurred while \output is active []
-
-
-[12 <D:/LADData/RCode/dataRetrieval/inst/doc/dataRetrieval-fig3.pdf>]
-LaTeX Font Info:    Try loading font information for TS1+aett on input line 474
+LaTeX Font Info:    Try loading font information for TS1+aett on input line 462
 .
-
 (C:/PROGRA~1/R/R-215~1.3/share/texmf/tex/latex\ts1aett.fd
 File: ts1aett.fd 
 )
-LaTeX Font Info:    Try loading font information for TS1+cmtt on input line 474
+LaTeX Font Info:    Try loading font information for TS1+cmtt on input line 462
 .
 
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\base\ts1cmtt.fd"
 File: ts1cmtt.fd 1999/05/25 v2.5h Standard LaTeX font definitions
 )
 LaTeX Font Info:    Font shape `TS1/aett/m/sl' in size <10.95> not available
-(Font)              Font shape `TS1/cmtt/m/sl' tried instead on input line 474.
+(Font)              Font shape `TS1/cmtt/m/sl' tried instead on input line 462.
 
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
+[12 <D:/LADData/RCode/dataRetrieval/inst/doc/dataRetrieval-fig3.pdf>]
+Overfull \vbox (21.68121pt too high) has occurred while \output is active []
+
+
 [13]
-Underfull \hbox (badness 10000) in paragraph at lines 530--548
+Underfull \hbox (badness 10000) in paragraph at lines 538--556
 
  []
 
@@ -600,75 +598,89 @@ Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[19]
+[19] <dataRetrieval-figegretEx.pdf, id=313, 433.62pt x 433.62pt>
+File: dataRetrieval-figegretEx.pdf Graphic file (type pdf)
+
+<use dataRetrieval-figegretEx.pdf>
+Package pdftex.def Info: dataRetrieval-figegretEx.pdf used on input line 772.
+(pdftex.def)             Requested size: 358.46039pt x 358.47534pt.
+
+Overfull \vbox (21.68121pt too high) has occurred while \output is active []
+
+
+[20]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[20
+[21 <D:/LADData/RCode/dataRetrieval/inst/doc/dataRetrieval-figegretEx.pdf>]
+Overfull \vbox (21.68121pt too high) has occurred while \output is active []
+
+
+[22
 
 ]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[21]
+[23]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[22]
+[24]
 Overfull \vbox (15.16835pt too high) has occurred while \output is active []
 
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[23]
+[25]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[24]
+[26]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[25]
+[27]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[26
+[28
 
-] <table1.png, id=338, 554.07pt x 125.71968pt>
+] <table1.png, id=361, 554.07pt x 125.71968pt>
 File: table1.png Graphic file (type png)
  <use table1.png>
-Package pdftex.def Info: table1.png used on input line 1058.
+Package pdftex.def Info: table1.png used on input line 1070.
 (pdftex.def)             Requested size: 358.46039pt x 81.33507pt.
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[27 <D:/LADData/RCode/dataRetrieval/inst/doc/table1.png>]
-Package atveryend Info: Empty hook `BeforeClearDocument' on input line 1080.
+[29 <D:/LADData/RCode/dataRetrieval/inst/doc/table1.png>]
+Package atveryend Info: Empty hook `BeforeClearDocument' on input line 1092.
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[28
+[30
 
 ]
-Package atveryend Info: Empty hook `AfterLastShipout' on input line 1080.
+Package atveryend Info: Empty hook `AfterLastShipout' on input line 1092.
  (D:\LADData\RCode\dataRetrieval\inst\doc\dataRetrieval.aux)
-Package atveryend Info: Executing hook `AtVeryEndDocument' on input line 1080.
-Package atveryend Info: Executing hook `AtEndAfterFileList' on input line 1080.
+Package atveryend Info: Executing hook `AtVeryEndDocument' on input line 1092.
+Package atveryend Info: Executing hook `AtEndAfterFileList' on input line 1092.
 
 Package rerunfilecheck Info: File `dataRetrieval.out' has not changed.
-(rerunfilecheck)             Checksum: 40BB526E32B9712C796C3616F1DDBE3B;2038.
+(rerunfilecheck)             Checksum: 13FFD1C75CB55F2DDE6E037E39171B97;2105.
  ) 
 Here is how much of TeX's memory you used:
- 7534 strings out of 494045
- 108149 string characters out of 3145961
- 191631 words of memory out of 3000000
- 10589 multiletter control sequences out of 15000+200000
+ 7546 strings out of 494045
+ 108400 string characters out of 3145961
+ 194724 words of memory out of 3000000
+ 10596 multiletter control sequences out of 15000+200000
  45443 words of font info for 92 fonts, out of 3000000 for 9000
  715 hyphenation exceptions out of 8191
- 35i,12n,28p,913b,487s stack positions out of 5000i,500n,10000p,200000b,50000s
+ 35i,12n,28p,912b,489s stack positions out of 5000i,500n,10000p,200000b,50000s
  <C:\Users\ldecicco\AppData\Local\MiKTeX\2.9\fonts\pk\ljfour\jknappen\ec\dpi6
 00\tcst1095.pk><C:/Program Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/c
 m/cmbx10.pfb><C:/Program Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/
@@ -683,9 +695,9 @@ Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmr9.pfb><C:/Program File
 s (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmsltt10.pfb><C:/Program File
 s (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmti10.pfb><C:/Program Files 
 (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmtt10.pfb>
-Output written on dataRetrieval.pdf (28 pages, 354429 bytes).
+Output written on dataRetrieval.pdf (30 pages, 475780 bytes).
 PDF statistics:
- 433 PDF objects out of 1000 (max. 8388607)
- 80 named destinations out of 1000 (max. 500000)
- 257 words of extra memory for PDF output out of 10000 (max. 10000000)
+ 455 PDF objects out of 1000 (max. 8388607)
+ 84 named destinations out of 1000 (max. 500000)
+ 270 words of extra memory for PDF output out of 10000 (max. 10000000)
 
diff --git a/inst/doc/dataRetrieval.pdf b/inst/doc/dataRetrieval.pdf
index e1e16abeb10a474158ed839f5c3b85d6d969b10f..740b9d65e79c785e1510541e1bc8a0b7692daead 100644
Binary files a/inst/doc/dataRetrieval.pdf and b/inst/doc/dataRetrieval.pdf differ
diff --git a/inst/doc/dataRetrieval.synctex.gz b/inst/doc/dataRetrieval.synctex.gz
index cf8f93768a30c91fc4462f1b1046c051fc518a5c..70b14830f956fab7f96ccc755e004bf7cf590d1c 100644
Binary files a/inst/doc/dataRetrieval.synctex.gz and b/inst/doc/dataRetrieval.synctex.gz differ
diff --git a/inst/doc/dataRetrieval.tex b/inst/doc/dataRetrieval.tex
index 75f8d8adeaa76d0f68e69aef568a6f699d0bcffe..802fc70d5a0d3f3af0b8aa147e3cafaecd8722ff 100644
--- a/inst/doc/dataRetrieval.tex
+++ b/inst/doc/dataRetrieval.tex
@@ -64,13 +64,13 @@ The dataRetrieval package was created to simplify the process of getting hydrolo
 
 The dataRetrieval package is designed to retrieve many of the major data types of USGS hydrologic data that are available on the web, but also allows users to make use of other data that they supply from spreadsheets.  Section 2 provides examples of how one can obtain raw data from USGS sources on the web and ingest them into data frames within the R environment.  The functionality described in section 2 is for general use and is not tailored for the specific uses of the EGRET package.  The functionality described in section 3 is tailored specifically to obtaining input from the web and structuring them specifically for use in the EGRET package.  The functionality described in section 4 is for converting hydrologic data from user-supplied spreadsheets and structuring them specifically for use in the EGRET package.
 
-For information on getting started in R, downloading and installing the package, see Appendix 1: Getting Started (\ref{sec:appendix1}).
+For information on getting started in R and installing the package, see Appendix (\ref{sec:appendix1}): Getting Started.
 
 
 %------------------------------------------------------------
 \section{General USGS Web Retrievals}
 %------------------------------------------------------------ 
-In this section, we will run through 5 examples, documenting how to get raw data from the web. This includes site information (\ref{sec:usgsSite}), measured parameter information (\ref{sec:usgsParams}), historical daily values(\ref{sec:usgsDaily}), real-time current values (\ref{sec:usgsRT}), and water quality data (\ref{sec:usgsWQP}) or (\ref{sec:usgsSTORET}). We will use the Choptank River near Greensboro, MD as an example.  The site-ID for this gage station is 01491000. Daily discharge measurements are available as far back as 1948.  Additionally, forms of nitrate have been measured dating back to 1964. The functions/examples in this section are for raw data retrieval.  This may or may not be the easiest data to work with.  In the next section, we will use functions that retrieve and process the data in a dataframe that may prove more friendly for R analysis.
+In this section, we will run through 5 examples, documenting how to get raw data from the web. This includes site information (\ref{sec:usgsSite}), measured parameter information (\ref{sec:usgsParams}), historical daily values(\ref{sec:usgsDaily}), real-time (unit) values (\ref{sec:usgsRT}), and water quality data (\ref{sec:usgsWQP}) or (\ref{sec:usgsSTORET}). We will use the Choptank River near Greensboro, MD as an example.  The site-ID for this gage station is 01491000. Daily discharge measurements are available as far back as 1948.  Additionally, forms of nitrate have been measured dating back to 1964. The functions/examples in this section are for raw data retrieval.  This may or may not be the easiest data to work with.  In the next section, we will use functions that retrieve and process the data in a dataframe that may prove more friendly for R analysis.
 
 %------------------------------------------------------------
 \subsection{Introduction}
@@ -81,15 +81,15 @@ Once the site-ID is known, the next required input for USGS data retrievals is t
 
 \url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes?radio_pm_search=param_group&pm_group=All+--+include+all+parameter+groups&pm_search=&casrn_search=&srsname_search=&format=html_table&show=parameter_group_nm&show=parameter_nm&show=casrn&show=srsname&show=parameter_units}
 
-Not every station will measure all parameters. A list of commonly measured parameters is shown in Table \ref{tab:params}.
+Not every station will measure all parameters. A short list of commonly measured parameters is shown in Table \ref{tab:params}.
 
 
 
 % latex table generated in R 2.15.3 by xtable 1.7-1 package
-% Thu Mar 14 14:54:17 2013
+% Thu Mar 14 16:41:43 2013
 \begin{table}[ht]
 \centering
-\caption{Commonly found USGS Parameter Codes} 
+\caption{Common USGS Parameter Codes} 
 \label{tab:params}
 \begin{tabular}{ll}
   \hline
@@ -109,7 +109,7 @@ For real-time data, the parameter code and site ID will suffice.  For most varia
 
 Some common stat codes are shown in Table \ref{tab:stat}.
 % latex table generated in R 2.15.3 by xtable 1.7-1 package
-% Thu Mar 14 14:54:17 2013
+% Thu Mar 14 16:41:43 2013
 \begin{table}[ht]
 \centering
 \caption{Commonly found USGS Stat Codes} 
@@ -147,7 +147,7 @@ Use the getSiteFileData function to obtain all of the information available for
 \end{Sinput}
 \end{Schunk}
 
-A list of the available columns are found in Appendix 2: INFO dataframe (\ref{sec:appendix2INFO}). Pulling out a specific example piece of information, in this case station name can be done as follows:
+A list of the available columns are found in Appendix \ref{sec:appendix2INFO}: INFO dataframe. Pulling out a specific example piece of information, in this case station name can be done as follows:
 
 \begin{Schunk}
 \begin{Sinput}
@@ -184,15 +184,16 @@ To find out the available data at a particular USGS site, including measured par
 
 There is an additional argument to the getDataAvailability called longNames, which defaults to FALSE. Setting longNames to TRUE will cause the function to make a web service call for each parameter and return expanded information on that parameter. Currently, this is a very slow process because each parameter code makes a unique web service call. If the site does not have many measured parameters, setting longNames to TRUE is reasonable.
 
-It is also possible to only request information for certain variables. In the following example, we retrieve just the daily mean parameter information from the Choptank data availability dataframe (excluding all unit value and water quality values). 
+It is also possible to only request parameter information for a subset of variables. In the following example, we retrieve just the daily mean parameter information from the Choptank data availability dataframe (excluding all unit value and water quality values). getMultipleParameterNames is the function that is embedded in the getDataAvailability, but here can be used as a standalone function.
+
 
 \begin{Schunk}
 \begin{Sinput}
 > # Continuing from the previous example:
 > # This pulls out just the daily data:
-> ChoptankDailyData <- ChoptankAvailableData["dv" == ChoptankAvailableData$service,]
+> ChoptankDailyData <- subset(ChoptankAvailableData,"dv" == service)
 > # This pulls out the mean:
-> ChoptankDailyData <- ChoptankDailyData["00003" == ChoptankDailyData$statCd,]
+> ChoptankDailyData <- subset(ChoptankDailyData,"00003" == statCd)
 > #Now, make a call to get all of the parameter information:
 > pCodeINFO <- getMultipleParameterNames(ChoptankDailyData$parameter_cd)
 \end{Sinput}
@@ -206,8 +207,7 @@ Percent complete:
 \end{Sinput}
 \end{Schunk}
 
-The daily data at the Choptank River site can be displayed in a nice \LaTeX table using the xtable package. See Appendix \ref{app:createWordTable} for instructions on converting an R dataframe to a nice table in Microsoft Excel or Word.
-
+The daily data at the Choptank River site can be displayed in a \LaTeX table using the xtable package. See Appendix \ref{app:createWordTable} for instructions on converting an R dataframe to a table in Microsoft Excel or Word.
 
 \begin{Schunk}
 \begin{Sinput}
@@ -224,7 +224,7 @@ The daily data at the Choptank River site can be displayed in a nice \LaTeX tabl
        caption.placement="top",include.rownames=FALSE)
 \end{Sinput}
 % latex table generated in R 2.15.3 by xtable 1.7-1 package
-% Thu Mar 14 14:54:22 2013
+% Thu Mar 14 16:41:49 2013
 \begin{table}[ht]
 \centering
 \caption{Daily mean data availabile at the Choptank River} 
@@ -277,15 +277,14 @@ Parameter information is obtained from \url{http://nwis.waterdata.usgs.gov/nwis/
 \subsection{Daily Values}
 \label{sec:usgsDaily}
 %------------------------------------------------------------
-To obtain historic daily records of USGS data, use the retrieveNWISData function. The arguments for this function are siteNumber, parameterCd, startDate, endDate, statCd, and a logical (true/false) interactive. There are 2 default argument: statCd (defaults to \texttt{"}00003\texttt{"}), and interactive (defaults to TRUE).  If you want to use the default values, you do not need to list them in the function call. Setting the 'interactive' option to true will walk you through the function. It might make more sense to run large batch collections with the interactive option set to FALSE. 
+To obtain historic daily records of USGS data, use the retrieveNWISData function. The arguments for this function are siteNumber, parameterCd, startDate, endDate, statCd, and a logical (true/false) interactive. There are 2 default argument: statCd (defaults to \texttt{"}00003\texttt{"}), and interactive (defaults to TRUE).  If you want to use the default values, you do not need to list them in the function call. Setting the \texttt{"}interactive\texttt{"} option to true will walk you through the function. It might make more sense to run large batch collections with the interactive option set to FALSE. 
 
 The dates (start and end) need to be in the format \texttt{"}YYYY-MM-DD\texttt{"} (note: the user does need to include the quotes).  Setting the start date to \texttt{"}\texttt{"} will indicate to the program to ask for the earliest date, setting the end date to \texttt{"}\texttt{"} will ask for the latest available date.
 
 \begin{Schunk}
 \begin{Sinput}
-> # Using defaults:
-> siteNumber <- "01491000"
-> parameterCd <- "00060"  # Discharge in cubic feet per second
+> # Continuing with our Choptank River example
+> parameterCd <- "00060"  # Discharge (cfs)
 > startDate <- ""  # Will request earliest date
 > endDate <- "" # Will request latest date
 > discharge <- retrieveNWISData(siteNumber, parameterCd, startDate, endDate)
@@ -311,14 +310,12 @@ The variable datetime is automatically imported as a Date. Each requested parame
 Another example that doesn't use the defaults would be a request for mean and maximum daily temperature and discharge in early 2012:
 \begin{Schunk}
 \begin{Sinput}
-> # Using defaults:
-> siteNumber <- "01491000" 
-> parameterCd <- "00010,00060"  # Temperature and discharge
-> statCd <- "00001,00003"  # Mean and maximum
+> parameterCd <- c("00010","00060")  # Temperature and discharge
+> statCd <- c("00001","00003")  # Mean and maximum
 > startDate <- "2012-01-01"
 > endDate <- "2012-06-30"
 > temperatureAndFlow <- retrieveNWISData(siteNumber, parameterCd, 
-                   startDate, endDate, StatCd=statCd,interactive=FALSE)
+         startDate, endDate, StatCd=statCd,interactive=FALSE)
 \end{Sinput}
 \end{Schunk}
 
@@ -364,10 +361,11 @@ Any data that are collected at regular time intervals (such as 15-minute or hour
 
 \begin{Schunk}
 \begin{Sinput}
-> siteNumber <- "01491000"
 > parameterCd <- "00060"  # Discharge (cfs)
-> startDate <- "2013-03-12" # or pick yesterday by the command as.character(Sys.Date()-1)
-> endDate <- "2013-03-13" # Today: as.character(Sys.Date())
+> startDate <- "2013-03-12" 
+> # or use (yesterday): startDate <- as.character(Sys.Date()-1)
+> endDate <- "2013-03-13" 
+> # or use (today):  endDate <- as.character(Sys.Date())
 > dischargeToday <- retrieveUnitNWISData(siteNumber, parameterCd, 
          startDate, endDate)
 \end{Sinput}
@@ -412,25 +410,17 @@ A simple plotting example is shown in Figure \ref{fig:RT}:
 \subsection{Water Quality Values}
 \label{sec:usgsWQP}
 %------------------------------------------------------------
-To get USGS water quality data from water samples collected at the streamgage (as distinct from unit values collected through some type of automatic monitor) we can use the Water Quality Data Portal: \url{http://www.waterqualitydata.us/}. The raw data are obtained from the function  getRawQWData, with the similar input arguments: siteNumber, parameterCd, startDate, endDate, and interactive. The difference is in parameterCd, in this function multiple parameters can be queried using a \texttt{"};\texttt{"} separator, and setting parameterCd to \texttt{"}\texttt{"} will return all of the measured observations. The raw data can be overwelming, a simplified version of the data can be obtained using getQWData.
+To get USGS water quality data from water samples collected at the streamgage (as distinct from unit values collected through some type of automatic monitor) we can use the Water Quality Data Portal: \url{http://www.waterqualitydata.us/}. The raw data are obtained from the function  getRawQWData, with the similar input arguments: siteNumber, parameterCd, startDate, endDate, and interactive. The difference is in parameterCd, in this function multiple parameters can be queried using a \texttt{"};\texttt{"} separator, and setting parameterCd to \texttt{"}\texttt{"} will return all of the measured observations. The raw data can be overwelming (see Appendix \ref{sec:appendix2WQP}), a simplified version of the data can be obtained using getQWData.There is a large amount of data returned for each observation. 
 
 
 \begin{Schunk}
 \begin{Sinput}
-> siteNumber <- "01491000" 
 > # Dissolved Nitrate parameter codes:
-> parameterCd <- "00618;71851"  
+> parameterCd <- c("00618","71851")
 > startDate <- "1964-06-11"
 > endDate <- "2012-12-18"
 > dissolvedNitrate <- getRawQWData(siteNumber, parameterCd, 
        startDate, endDate)
-\end{Sinput}
-\end{Schunk}
-
-There is a large amount of data returned for each observation. The column names are listed in Appendix 2 (\ref{sec:appendix2WQP}). To get a simplified dataframe that contains only datetime, value, and qualifier, use the function getQWData:
-
-\begin{Schunk}
-\begin{Sinput}
 > dissolvedNitrateSimple <- getQWData(siteNumber, parameterCd, 
          startDate, endDate)
 > names(dissolvedNitrateSimple)
@@ -440,9 +430,7 @@ There is a large amount of data returned for each observation. The column names
 [5] "value.00618"    
 \end{Soutput}
 \end{Schunk}
-Note that in this dataframe, datetime is imported as Dates (no times are included), and the qualifier is either blank or \texttt{"}\verb@<@\texttt{"} signifying a censored value.
-
-An example of plotting the above data (Figure \ref{fig:nitrate}):
+Note that in this dataframe, datetime is imported as Dates (no times are included), and the qualifier is either blank or \texttt{"}\verb@<@\texttt{"} signifying a censored value. A plotting example is shown in Figure \ref{fig:nitrate}.
 
 \begin{Schunk}
 \begin{Sinput}
@@ -486,6 +474,25 @@ There are additional data sets available on the Water Quality Data Portal (\url{
 \end{Soutput}
 \end{Schunk}
 
+%------------------------------------------------------------
+\subsection{URL Construction}
+\label{sec:usgsURL}
+%------------------------------------------------------------
+There may be times when you might be interested in seeing the URL (web address) that was used to obtain the raw data. The constructNWISURL function returns the URL.  Aside from input variables that have already been described, there is a new argument \texttt{"}service\texttt{"}. The service argument can be \texttt{"}dv\texttt{"} (daily values), \texttt{"}uv\texttt{"} (unit values), \texttt{"}qw\texttt{"} (NWIS water quality values), \texttt{"}wqp\texttt{"} (general Water Quality Portal values).
+ 
+
+\begin{Schunk}
+\begin{Sinput}
+> # Dissolved Nitrate parameter codes:
+> pCode <- c("00618","71851")
+> startDate <- "1964-06-11"
+> endDate <- "2012-12-18"
+> url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,'qw')
+> url_dv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'dv',statCd="00003")
+> url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'uv')
+\end{Sinput}
+\end{Schunk}
+
 
 %------------------------------------------------------------
 \section{Data Retrievals Structured For Use In The EGRET Package}
@@ -501,6 +508,7 @@ The function to obtain metadata, or data about the streamgage and measured param
 
 \begin{Schunk}
 \begin{Sinput}
+> parameterCd <- "00618"
 > INFO <-getMetaData(siteNumber,parameterCd, interactive=FALSE)
 \end{Sinput}
 \end{Schunk}
@@ -526,7 +534,7 @@ The function to obtain the daily values (discharge in this case) is getDVData.
 Details of the Daily dataframe are listed below:
 
 % latex table generated in R 2.15.3 by xtable 1.7-1 package
-% Thu Mar 14 14:54:30 2013
+% Thu Mar 14 16:41:59 2013
 \begin{tabular}{llll}
   \hline
 ColumnName & Type & Description & Units \\ 
@@ -563,7 +571,7 @@ The function to obtain sample data from the water quality portal is getSampleDat
 Details of the Sample dataframe are listed below:
 
 % latex table generated in R 2.15.3 by xtable 1.7-1 package
-% Thu Mar 14 14:54:32 2013
+% Thu Mar 14 16:42:01 2013
 \begin{table}[!ht]
 \centering
 \caption{Sample dataframe} 
@@ -599,7 +607,7 @@ As an example to understand how the dataRetrieval package handles a more complex
 \begin{center}
 
 % latex table generated in R 2.15.3 by xtable 1.7-1 package
-% Thu Mar 14 14:54:32 2013
+% Thu Mar 14 16:42:01 2013
 \begin{tabular}{llrlrlr}
   \hline
 cdate & rdp & dp & rpp & pp & rtp & tp \\ 
@@ -748,8 +756,24 @@ Finally, there is a function called mergeReport that will look at both the Daily
 %------------------------------------------------------------
 \subsection{EGRET Plots}
 %------------------------------------------------------------
-The EGRET package....
+As has been mentioned, the data is specifically formatted to be used with the EGRET package. The EGRET package has powerful modeling capabilities using WRTDS, but also has a variety of graphing and tablular tools to explore the data without using the WRTDS algorithm. See the EGRET vignette, user guide, and/or wiki (\url{https://github.com/USGS-R/EGRET/wiki}) for detailed information. The following figure is an example of one of the plotting functions that can be used directly from the dataRetrieval dataframes.
 
+\begin{Schunk}
+\begin{Sinput}
+> # Continuing Choptank example from the previous sections
+> library(EGRET)
+> multiPlotDataOverview()
+\end{Sinput}
+\end{Schunk}
+
+\begin{figure}[ht]
+\begin{center}
+
+\includegraphics{dataRetrieval-figegretEx}
+\end{center}
+\caption{Default multiPlotDataOverview}
+\label{fig:multiPlotDataOverview}
+\end{figure}
 
 \clearpage
 \appendix
@@ -833,8 +857,6 @@ To then open the library, simply type:
 \end{Sinput}
 \end{Schunk}
 
-\FloatBarrier
-
 %------------------------------------------------------------ 
 \section{Columns Names}
 \label{sec:appendix2}
@@ -846,7 +868,7 @@ To then open the library, simply type:
 %------------------------------------------------------------
 
 % latex table generated in R 2.15.3 by xtable 1.7-1 package
-% Thu Mar 14 14:54:34 2013
+% Thu Mar 14 16:42:05 2013
 \begin{tabular}{l}
   \hline
   \hline
@@ -904,7 +926,7 @@ agency.cd \\
 There are 62 columns returned from the water quality portal. 
 
 % latex table generated in R 2.15.3 by xtable 1.7-1 package
-% Thu Mar 14 14:54:34 2013
+% Thu Mar 14 16:42:05 2013
 \begin{tabular}{l}
   \hline
   \hline
@@ -953,7 +975,7 @@ OrganizationIdentifier \\
 \FloatBarrier
 
 % latex table generated in R 2.15.3 by xtable 1.7-1 package
-% Thu Mar 14 14:54:34 2013
+% Thu Mar 14 16:42:05 2013
 \begin{tabular}{l}
   \hline
   \hline
@@ -987,7 +1009,7 @@ ResultTimeBasisText \\
 \section{Creating tables in Microsoft from R}
 \label{app:createWordTable}
 %------------------------------------------------------------
-There are a few steps that are required in order to create a table in a Microsoft product (Excel, Word, Powerpoint, etc.) from an R dataframe. There are actually a variety of methods, one of which is detailed here. The example we will step through here will be to create the following data.
+There are a few steps that are required in order to create a table in a Microsoft product (Excel, Word, Powerpoint, etc.) from an R dataframe. There are certainly a variety of good methods, one of which is detailed here. The example we will step through here will be to create a table in Microsoft Word based on the dataframe tableData:
 
 \begin{Schunk}
 \begin{Sinput}
@@ -1004,21 +1026,11 @@ There are a few steps that are required in order to create a table in a Microsof
        Count=count,
        Units=parameter_units)
        )
-> tableData
 \end{Sinput}
-\begin{Soutput}
-                               shortName      Start        End Count      Units
-1                     Temperature, water 2010-10-01 2012-06-24   575      deg C
-2               Stream flow, mean. daily 1948-01-01 2013-03-13 23814        cfs
-3                   Specific conductance 2010-10-01 2012-06-24   551 uS/cm @25C
-4 Suspended sediment concentration (SSC) 1980-10-01 1991-09-30  3651       mg/l
-5           Suspended sediment discharge 1980-10-01 1991-09-30  3652   tons/day
-\end{Soutput}
 \end{Schunk}
 
-Our goal now is to get the data from the dataframe tableData to a Microsoft Word table. 
+First, save the dataframe as a tab delimited file (you don't want to use comma delimited because there are commas in some of the data elements):
 
-First, save the dataframe as a tab delimited file:
 
 \begin{Schunk}
 \begin{Sinput}
diff --git a/inst/doc/dataRetrieval.toc b/inst/doc/dataRetrieval.toc
index df3eefd5c4fd23a57ddae6aea05b09b51dd9bd27..3fd8a595d90bed5ee4668f034a640a91d085bd73 100644
--- a/inst/doc/dataRetrieval.toc
+++ b/inst/doc/dataRetrieval.toc
@@ -9,22 +9,23 @@
 \contentsline {subsection}{\numberline {2.4}Daily Values}{7}{subsection.2.4}
 \contentsline {subsection}{\numberline {2.5}Unit Values}{9}{subsection.2.5}
 \contentsline {subsection}{\numberline {2.6}Water Quality Values}{11}{subsection.2.6}
-\contentsline {subsection}{\numberline {2.7}STORET Water Quality Retrievals}{13}{subsection.2.7}
+\contentsline {subsection}{\numberline {2.7}STORET Water Quality Retrievals}{12}{subsection.2.7}
+\contentsline {subsection}{\numberline {2.8}URL Construction}{13}{subsection.2.8}
 \contentsline {section}{\numberline {3}Data Retrievals Structured For Use In The EGRET Package}{13}{section.3}
-\contentsline {subsection}{\numberline {3.1}INFO Data}{13}{subsection.3.1}
+\contentsline {subsection}{\numberline {3.1}INFO Data}{14}{subsection.3.1}
 \contentsline {subsection}{\numberline {3.2}Daily Data}{14}{subsection.3.2}
 \contentsline {subsection}{\numberline {3.3}Sample Data}{15}{subsection.3.3}
 \contentsline {subsection}{\numberline {3.4}Censored Data Evaluation}{15}{subsection.3.4}
 \contentsline {subsection}{\numberline {3.5}User-Generated Data Files}{17}{subsection.3.5}
 \contentsline {subsubsection}{\numberline {3.5.1}getDailyDataFromFile}{17}{subsubsection.3.5.1}
 \contentsline {subsubsection}{\numberline {3.5.2}getSampleDataFromFile}{18}{subsubsection.3.5.2}
-\contentsline {subsection}{\numberline {3.6}Merge Report}{18}{subsection.3.6}
-\contentsline {subsection}{\numberline {3.7}EGRET Plots}{19}{subsection.3.7}
-\contentsline {section}{\numberline {A}Getting Started in R}{20}{appendix.A}
-\contentsline {subsection}{\numberline {A.1}New to R?}{20}{subsection.A.1}
-\contentsline {subsection}{\numberline {A.2}R User: Installing dataRetrieval}{20}{subsection.A.2}
-\contentsline {subsection}{\numberline {A.3}R Developers: Installing dataRetrieval from gitHub}{21}{subsection.A.3}
-\contentsline {section}{\numberline {B}Columns Names}{23}{appendix.B}
-\contentsline {subsection}{\numberline {B.1}INFO dataframe}{23}{subsection.B.1}
-\contentsline {subsection}{\numberline {B.2}Water Quality Portal}{24}{subsection.B.2}
-\contentsline {section}{\numberline {C}Creating tables in Microsoft from R}{26}{appendix.C}
+\contentsline {subsection}{\numberline {3.6}Merge Report}{19}{subsection.3.6}
+\contentsline {subsection}{\numberline {3.7}EGRET Plots}{20}{subsection.3.7}
+\contentsline {section}{\numberline {A}Getting Started in R}{22}{appendix.A}
+\contentsline {subsection}{\numberline {A.1}New to R?}{22}{subsection.A.1}
+\contentsline {subsection}{\numberline {A.2}R User: Installing dataRetrieval}{22}{subsection.A.2}
+\contentsline {subsection}{\numberline {A.3}R Developers: Installing dataRetrieval from gitHub}{23}{subsection.A.3}
+\contentsline {section}{\numberline {B}Columns Names}{25}{appendix.B}
+\contentsline {subsection}{\numberline {B.1}INFO dataframe}{25}{subsection.B.1}
+\contentsline {subsection}{\numberline {B.2}Water Quality Portal}{26}{subsection.B.2}
+\contentsline {section}{\numberline {C}Creating tables in Microsoft from R}{28}{appendix.C}
diff --git a/man/checkStartEndDate.Rd b/man/checkStartEndDate.Rd
index 892f64cca9c0ad84ad4ad691484ae396ed210d2e..aa1aa87150fb5d96f1f4f053fd04972bebad9d9e 100644
--- a/man/checkStartEndDate.Rd
+++ b/man/checkStartEndDate.Rd
@@ -26,6 +26,6 @@ startDate <- '1985-01-01'
 endDate <- '1990-01-01'
 checkStartEndDate(startDate, endDate, interactive = FALSE)
 }
-\keyword{WRTDS}
 \keyword{flow}
+\keyword{WRTDS}
 
diff --git a/man/compressData.Rd b/man/compressData.Rd
index 92d9134d44d9369c41a427c163d614c96601a8d5..17d40a4d71f6d04dd298450d5fc90f0746fd515e 100644
--- a/man/compressData.Rd
+++ b/man/compressData.Rd
@@ -36,6 +36,6 @@ value3 <- c(3,4,5)
 dataInput <- data.frame(dateTime, comment1, value1, comment2, value2, comment3, value3, stringsAsFactors=FALSE)
 compressData(dataInput, interactive=FALSE)
 }
-\keyword{WRTDS}
 \keyword{flow}
+\keyword{WRTDS}
 
diff --git a/man/constructNWISURL.Rd b/man/constructNWISURL.Rd
index fe2096e0ae220a8e227ef02106d805ab2727f6e0..7a4a78ff1b56100e2e5b56229dd29487617f6ee0 100644
--- a/man/constructNWISURL.Rd
+++ b/man/constructNWISURL.Rd
@@ -18,9 +18,9 @@
   \item{endDate}{string ending date for data retrieval in
   the form YYYY-MM-DD.}
 
-  \item{statCd}{string USGS statistic code only used for
-  daily value service. This is usually 5 digits.  Daily
-  mean (00003) is the default.}
+  \item{statCd}{string or vector USGS statistic code only
+  used for daily value service. This is usually 5 digits.
+  Daily mean (00003) is the default.}
 
   \item{service}{string USGS service to call. Possible
   values are "dv" (daily values), "uv" (unit/instantaneous
@@ -44,15 +44,15 @@ siteNumber <- '04085427'
 startDate <- '2012-01-01'
 endDate <- '2012-06-30'
 pCode <- c("00060","00010")
-url_daily <- constructNWISURL(siteNumber,pCode,startDate,endDate,'dv')
+url_daily <- constructNWISURL(siteNumber,pCode,startDate,endDate,'dv',statCd=c("00003","00001"))
 url_unit <- constructNWISURL(siteNumber,pCode,startDate,endDate,'iv')
 url_qw_single <- constructNWISURL(siteNumber,"34220",startDate,endDate,'qwdata')
 url_qw <- constructNWISURL(siteNumber,c('34247','30234','32104','34220'),startDate,endDate,'qwdata')
 url_wqp <- constructNWISURL(siteNumber,"34220",startDate,endDate,'wqp')
 }
-\keyword{USGS}
 \keyword{data}
 \keyword{import}
 \keyword{service}
+\keyword{USGS}
 \keyword{web}
 
diff --git a/man/dataOverview.Rd b/man/dataOverview.Rd
index 6fe56a3ac2930bf546840bcbdb75fea354dadd3f..c99da9b50f9d0142972deef2188cd549ca498cfd 100644
--- a/man/dataOverview.Rd
+++ b/man/dataOverview.Rd
@@ -21,8 +21,8 @@ dataOverview(localDaily = exDaily, localSample = exSample)
 \seealso{
   \code{\link{mergeReport}}
 }
-\keyword{USGS}
-\keyword{WRTDS}
 \keyword{data}
 \keyword{import}
+\keyword{USGS}
+\keyword{WRTDS}
 
diff --git a/man/dateFormatCheck.Rd b/man/dateFormatCheck.Rd
index dd75fee619a69001adad54bba538e9d5980ee496..99532817221093d2a64e3ce0e6ac8e3e3aabd07b 100644
--- a/man/dateFormatCheck.Rd
+++ b/man/dateFormatCheck.Rd
@@ -18,6 +18,6 @@
 date <- '1985-01-01'
 dateFormatCheck(date)
 }
-\keyword{WRTDS}
 \keyword{flow}
+\keyword{WRTDS}
 
diff --git a/man/formatCheckDate.Rd b/man/formatCheckDate.Rd
index 64c54b65061785ecf5741d3f1c4eac8c9ac3af0d..0bf8637101b5ac2a8f016bcc8faccdc3edac4dfe 100644
--- a/man/formatCheckDate.Rd
+++ b/man/formatCheckDate.Rd
@@ -27,6 +27,6 @@ Date <- '1985-01-01'
 dateString <- 'StartDate'
 formatCheckDate(Date, dateString, interactive = FALSE)
 }
-\keyword{WRTDS}
 \keyword{flow}
+\keyword{WRTDS}
 
diff --git a/man/formatCheckParameterCd.Rd b/man/formatCheckParameterCd.Rd
index d1574546ed4940e46494b4c4bbd46bacce0da90e..086bea0aa058edc05bb1775bf7025433c33b96ee 100644
--- a/man/formatCheckParameterCd.Rd
+++ b/man/formatCheckParameterCd.Rd
@@ -23,6 +23,6 @@
 pCode <- '01234'
 formatCheckParameterCd(pCode, interactive = FALSE)
 }
-\keyword{WRTDS}
 \keyword{flow}
+\keyword{WRTDS}
 
diff --git a/man/formatCheckSiteNumber.Rd b/man/formatCheckSiteNumber.Rd
index 5d8b42cd9e9cb3a8d96bdf4d70ac01ec0fbe8a03..60a912a66d584e466306e309483b57e7a1f38d54 100644
--- a/man/formatCheckSiteNumber.Rd
+++ b/man/formatCheckSiteNumber.Rd
@@ -22,6 +22,6 @@
 site<- '01234567'
 formatCheckSiteNumber(site, interactive = FALSE)
 }
-\keyword{WRTDS}
 \keyword{flow}
+\keyword{WRTDS}