diff --git a/DESCRIPTION b/DESCRIPTION
index c1f6aacd09f6520f362c455613f08334a51c24b8..906d77c3fac586ebe1f7c0e6175ce26f2c52dc92 100644
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -63,6 +63,8 @@ Imports:
     RCurl
 Suggests:
     xtable,
-    EGRET
+    EGRET,
+    knitr
 LazyLoad: yes
 LazyData: yes
+VignetteBuilder: knitr
diff --git a/inst/doc/dataRetrieval.pdf b/inst/doc/dataRetrieval.pdf
index a318b868a025a593b301ed6a90543c0c360c97ac..207ffeebf0b083755d7cfc647cf009fd7349198f 100644
Binary files a/inst/doc/dataRetrieval.pdf and b/inst/doc/dataRetrieval.pdf differ
diff --git a/vignettes/Rplots.pdf b/vignettes/Rplots.pdf
deleted file mode 100644
index 1ea11ca3db2dea66119dbe12b19dbb2b9040206d..0000000000000000000000000000000000000000
Binary files a/vignettes/Rplots.pdf and /dev/null differ
diff --git a/vignettes/dataRetrieval-concordance.tex b/vignettes/dataRetrieval-concordance.tex
index 6666b7b667561fba80b016bbd472f5700fae6068..6531fec293bf5529c291ee7833f0ef8b878bf68a 100644
--- a/vignettes/dataRetrieval-concordance.tex
+++ b/vignettes/dataRetrieval-concordance.tex
@@ -1,16 +1,9 @@
 \Sconcordance{concordance:dataRetrieval.tex:dataRetrieval.Rnw:%
-1 84 1 1 8 1 1 1 10 16 0 1 2 5 1 1 10 15 0 1 2 13 1 1 2 1 0 1 2 1 0 1 1 %
-3 0 1 2 2 1 1 2 7 0 1 2 7 1 1 3 2 0 1 1 12 0 1 2 5 1 1 4 3 0 1 3 1 0 1 %
-3 7 0 1 3 4 0 1 2 2 1 1 8 7 0 1 4 1 0 1 2 21 0 1 2 7 1 1 3 2 0 2 1 7 0 %
-1 2 1 1 1 2 7 0 1 2 9 1 1 4 2 0 2 1 1 2 3 0 1 2 4 1 1 3 1 0 3 1 1 3 5 0 %
-1 3 4 1 1 2 1 0 1 4 3 0 1 1 1 4 3 0 3 1 3 0 1 2 3 1 1 -5 1 9 14 1 1 3 1 %
-0 1 1 1 2 1 0 1 3 5 0 2 2 10 0 1 2 3 1 1 2 1 0 1 4 3 0 1 1 4 0 1 3 3 1 %
-1 -6 1 10 12 1 1 4 2 0 2 1 1 3 1 0 1 3 1 0 1 1 7 0 1 2 1 1 1 6 5 0 1 1 %
-3 0 1 2 2 1 1 -4 1 8 11 1 1 3 2 0 1 1 12 0 1 2 8 1 1 3 2 0 5 1 3 0 1 2 %
-13 1 1 2 1 0 1 1 3 0 1 2 8 1 1 2 1 0 2 1 1 3 5 0 1 2 2 1 1 11 18 0 1 2 %
-9 1 1 3 5 0 1 2 2 1 1 12 24 0 1 2 11 1 1 14 12 0 1 2 9 1 1 2 17 0 1 3 %
-28 1 1 2 1 0 2 1 3 0 1 2 16 1 1 2 1 0 2 1 3 0 1 2 7 1 1 2 1 0 3 1 1 2 2 %
-1 11 0 1 1 19 0 1 2 6 1 1 3 2 0 1 1 3 0 1 2 3 1 1 -5 1 9 21 1 1 2 4 0 1 %
-2 1 1 1 2 13 0 1 2 6 1 1 2 1 0 1 1 3 0 1 2 3 1 1 2 4 0 1 2 7 1 1 2 1 0 %
-1 1 3 0 1 2 1 1 1 2 4 0 1 2 10 1 1 5 47 0 1 2 9 1 1 6 45 0 1 2 2 1 1 6 %
-27 0 1 2 8 1 1 2 1 0 4 1 1 9 10 0 1 2 3 1 1 3 5 0 1 2 53 1}
+1 49 1 55 0 1 6 11 1 1 5 41 1 10 0 16 1 9 0 21 1 5 0 %
+6 1 8 0 14 1 14 0 24 1 11 0 17 1 20 0 15 1 10 0 5 1 8 %
+0 20 1 5 0 16 1 4 0 21 1 10 0 20 1 5 0 4 1 18 0 13 1 %
+9 0 26 1 9 0 10 1 10 0 14 1 21 0 19 1 5 0 19 1 5 0 17 %
+1 8 0 14 1 15 0 16 1 5 0 61 1 6 0 14 1 17 0 36 1 5 0 %
+24 1 5 0 20 1 38 0 13 1 10 0 22 1 5 0 5 1 14 0 10 1 5 %
+0 7 1 5 0 16 1 51 0 15 1 49 0 7 1 32 0 26 1 25 0 8 1 %
+5 0 56 1}
diff --git a/vignettes/dataRetrieval-fig1.pdf b/vignettes/dataRetrieval-fig1.pdf
deleted file mode 100644
index dbfd9bda6bacf9c84842581d24aad56e85715d44..0000000000000000000000000000000000000000
Binary files a/vignettes/dataRetrieval-fig1.pdf and /dev/null differ
diff --git a/vignettes/dataRetrieval-fig2.pdf b/vignettes/dataRetrieval-fig2.pdf
deleted file mode 100644
index 01ce0beba2e98a3c5754144c36a445407def471c..0000000000000000000000000000000000000000
Binary files a/vignettes/dataRetrieval-fig2.pdf and /dev/null differ
diff --git a/vignettes/dataRetrieval-fig3.pdf b/vignettes/dataRetrieval-fig3.pdf
deleted file mode 100644
index 099e61b857d9b23f054ac3ffdcaaefc505732802..0000000000000000000000000000000000000000
Binary files a/vignettes/dataRetrieval-fig3.pdf and /dev/null differ
diff --git a/vignettes/dataRetrieval-figegretEx.pdf b/vignettes/dataRetrieval-figegretEx.pdf
deleted file mode 100644
index a191efc14b2a5f7f6eab9c68304bf34433cc4a67..0000000000000000000000000000000000000000
Binary files a/vignettes/dataRetrieval-figegretEx.pdf and /dev/null differ
diff --git a/vignettes/dataRetrieval.Rnw b/vignettes/dataRetrieval.Rnw
index a92f5947bfd6a56788a3cc14de45b9d77645a18e..11455595d5c818ae8dc8aa61f0325214f677423a 100644
--- a/vignettes/dataRetrieval.Rnw
+++ b/vignettes/dataRetrieval.Rnw
@@ -1,8 +1,9 @@
 %\VignetteIndexEntry{Introduction to the dataRetrieval package}
+%\VignetteEngine{knitr::knitr}
 %\VignetteDepends{}
-%\VignetteSuggests{}
-%\VignetteImports{}
-%\VignettePackage{}
+%\VignetteSuggests{xtable,EGRET}
+%\VignetteImports{zoo, XML, RCurl}
+%\VignettePackage{dataRetrieval}
 
 \documentclass[a4paper,11pt]{article}
 
@@ -12,10 +13,12 @@
 \usepackage[numbers, round]{natbib}
 \usepackage[american]{babel}
 \usepackage{authblk}
-\usepackage{footnote}
+\usepackage{subfig}
 \usepackage{placeins}
+\usepackage{footnote}
+\usepackage{tabularx}
 \renewcommand\Affilfont{\itshape\small}
-\usepackage{Sweave}
+
 \renewcommand{\topfraction}{0.85}
 \renewcommand{\textfraction}{0.1}
 \usepackage{graphicx}
@@ -42,7 +45,16 @@
 \newcommand{\Rfunarg}[1]{{\texttt{#1}}}
 
 \begin{document}
-\SweaveOpts{concordance=TRUE}
+
+
+<<openLibrary, echo=FALSE>>=
+library(xtable)
+options(continue=" ")
+options(width=60)
+library(knitr)
+
+@
+
 
 %------------------------------------------------------------
 \title{The dataRetrieval R package}
@@ -51,7 +63,14 @@
 \author[1]{Robert Hirsch}
 \affil[1]{United States Geological Survey}
 
-\SweaveOpts{highlight=TRUE, tidy=TRUE, keep.space=TRUE, keep.blank.space=FALSE, keep.comment=TRUE, keep.source=TRUE}
+
+<<include=TRUE ,echo=FALSE,eval=TRUE>>=
+opts_chunk$set(highlight=TRUE, tidy=TRUE, keep.space=TRUE, keep.blank.space=FALSE, keep.comment=TRUE, tidy=FALSE)
+knit_hooks$set(inline = function(x) {
+   if (is.numeric(x)) round(x, 3)})
+knit_hooks$set(crop = hook_pdfcrop)
+@
+
 
 \maketitle
 \tableofcontents
@@ -77,31 +96,20 @@ In this section, we will run through 5 examples, documenting how to get raw data
 %------------------------------------------------------------
 The United States Geological Survey organizes their hydrological data in standard structure.  Streamgages are located throughout the United States, and each streamgage has a unique ID.  Often (but not always), these ID's are 8 digits.  The first step to finding data is discoving this 8-digit ID. One potential tool for discovering data is Environmental Data Discovery and Transformation (EnDDaT): \url{http://cida.usgs.gov/enddat/}.  Follow the example on the EnDDaT web page to learn how to discover USGS stations and available data from any location in the United States. 
 
-Once the site-ID is known, the next required input for USGS data retrievals is the 'parameter code'.  This is a 5-digit code that specifies what measured paramater is being requested.  A complete list of possible USGS parameter codes can be found at:
-
-\url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes?radio_pm_search=param_group&pm_group=All+--+include+all+parameter+groups&pm_search=&casrn_search=&srsname_search=&format=html_table&show=parameter_group_nm&show=parameter_nm&show=casrn&show=srsname&show=parameter_units}
+Once the site-ID is known, the next required input for USGS data retrievals is the 'parameter code'.  This is a 5-digit code that specifies what measured paramater is being requested.  A complete list of possible USGS parameter codes can be found at \url{http://go.usa.gov/bVDz}.
 
 Not every station will measure all parameters. A short list of commonly measured parameters is shown in Table \ref{tab:params}.
 
-<<openLibrary, echo=FALSE>>=
-library(xtable)
-options(continue=" ")
-# options(width=60)
-# options(SweaveHooks=list(fig=function()
-#   par(mar=c(4.1,4.1,1.1,4.1),oma=c(0,0,0,0))))
-@
-
 
-<<label=tableParameterCodes, echo=FALSE,results=tex>>=
+<<tableParameterCodes, echo=FALSE,results='asis'>>=
 pCode <- c('00060', '00065', '00010','00045','00400')
 shortName <- c("Discharge [cfs]","Gage height [ft]","Temperature [C]", "Precipitation [in]", "pH")
 
 data.df <- data.frame(pCode, shortName, stringsAsFactors=FALSE)
 
-data.table <- xtable(data.df,label="tab:params",
+xtable(data.df,label="tab:params",
                      caption="Common USGS Parameter Codes")
-print(data.table,
-      caption.placement="top",include.rownames=FALSE, table.placement="!ht")
+
 @
 
 For real-time data, the parameter code and site ID will suffice.  For most variables that are measured on a continuous basis, the USGS stores the historical data as daily values.  These daily values may be in the form of statistics such as the daily mean values, but they can also include daily maximums, minimums or medians.  These different statistics are specified by a 5-digit \texttt{"}stat code\texttt{"}.  A complete list of stat codes can be found here:
@@ -109,19 +117,19 @@ For real-time data, the parameter code and site ID will suffice.  For most varia
 \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
 
 Some common stat codes are shown in Table \ref{tab:stat}.
-<<label=tableStatCodes, echo=FALSE,results=tex>>=
+<<tableStatCodes, echo=FALSE,results='asis'>>=
 StatCode <- c('00001', '00002', '00003','00008')
 shortName <- c("Maximum","Minimum","Mean", "Median")
 
 data.df <- data.frame(StatCode, shortName, stringsAsFactors=FALSE)
 
-data.table <- xtable(data.df,label="tab:stat",
+xtable(data.df,label="tab:stat",
                      caption="Commonly found USGS Stat Codes")
-print(data.table, 
-      caption.placement="top",include.rownames=FALSE, table.placement="!ht")
+
 @
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Site Information}
 \label{sec:usgsSite}
@@ -134,7 +142,7 @@ print(data.table,
 Use the getSiteFileData function to obtain all of the information available for a particular USGS site such as full station name, drainage area, latitude, and longitude:
 
 
-<<label=getSite, echo=TRUE>>=
+<<getSite, echo=TRUE>>=
 library(dataRetrieval)
 # Site ID for Choptank River near Greensboro, MD
 siteNumber <- "01491000" 
@@ -143,18 +151,19 @@ ChoptankInfo <- getSiteFileData(siteNumber)
 
 A list of the available columns are found in Appendix \ref{sec:appendix2INFO}: INFO dataframe. Pulling out a specific example piece of information, in this case station name can be done as follows:
 
-<<siteNames, echo=TRUE>>=
+<<siteNames2, echo=TRUE>>=
 ChoptankInfo$station.nm
 @
 Site information is obtained from \url{http://waterservices.usgs.gov/rest/Site-Test-Tool.html}
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsubsection{getDataAvailability}
 \label{sec:usgsDataAvailability}
 %------------------------------------------------------------
 To find out the available data at a particular USGS site, including measured parameters, period of record, and number of samples (count), use the getDataAvailability function:
 
-<<label=getSiteAvailability, echo=TRUE>>=
+<<getSiteAvailability, echo=TRUE>>=
 # Continuing from the previous example:
 ChoptankAvailableData <- getDataAvailability(siteNumber)
 head(ChoptankAvailableData)
@@ -165,24 +174,28 @@ There is an additional argument to the getDataAvailability called longNames, whi
 It is also possible to only request parameter information for a subset of variables. In the following example, we retrieve just the daily mean parameter information from the Choptank data availability dataframe (excluding all unit value and water quality values). getMultipleParameterNames is the function that is embedded in the getDataAvailability, but here can be used as a standalone function.
 
 
-<<label=getSiteExtended, echo=TRUE>>=
+<<getSiteExtended, echo=TRUE>>=
 # Continuing from the previous example:
 # This pulls out just the daily data:
-ChoptankDailyData <- subset(ChoptankAvailableData,"dv" == service)
+ChoptankDailyData <- subset(ChoptankAvailableData,
+                            "dv" == service)
 
 # This pulls out the mean:
-ChoptankDailyData <- subset(ChoptankDailyData,"00003" == statCd)
+ChoptankDailyData <- subset(ChoptankDailyData,
+                            "00003" == statCd)
 
 #Now, make a call to get all of the parameter information:
-pCodeINFO <- getMultipleParameterNames(ChoptankDailyData$parameter_cd)
+pCodeINFO <- getMultipleParameterNames(
+  ChoptankDailyData$parameter_cd)
 
-#Merge the available dataframe with the parameter information dataframe:
-ChoptankDailyData <- merge(ChoptankDailyData,pCodeINFO,by="parameter_cd")
+#Merge the available dataframes:
+ChoptankDailyData <- merge(ChoptankDailyData,
+                           pCodeINFO,by="parameter_cd")
 @
 
 The daily data at the Choptank River site can be displayed in a \LaTeX table using the xtable package. See Appendix \ref{app:createWordTable} for instructions on converting an R dataframe to a table in Microsoft Excel or Word.
 
-<<label=tablegda, echo=TRUE,results=tex>>=
+<<tablegda, echo=TRUE,results='asis'>>=
 tableData <- with(ChoptankDailyData, 
       data.frame(shortName=srsname, 
       Start=as.character(startDate), 
@@ -192,20 +205,20 @@ tableData <- with(ChoptankDailyData,
       )
 
 
-data.table <- xtable(tableData,label="tab:gda",
+xtable(tableData,label="tab:gda",
     caption="Daily mean data availabile at the Choptank River")
-print(data.table, 
-      caption.placement="top",include.rownames=FALSE)
+
 @
 
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Parameter Information}
 \label{sec:usgsParams}
 %------------------------------------------------------------
 To obtain all of the available information concerning a measured parameter, use the getParameterInfo function:
-<<label=getSite, echo=TRUE>>=
+<<label=getPCodeInfo, echo=TRUE>>=
 # Using defaults:
 parameterCd <- "00618" 
 parameterINFO <- getParameterInfo(parameterCd)
@@ -233,7 +246,8 @@ parameterCd <- "00060"  # Discharge (cfs)
 startDate <- ""  # Will request earliest date
 endDate <- "" # Will request latest date
 
-discharge <- retrieveNWISData(siteNumber, parameterCd, startDate, endDate)
+discharge <- retrieveNWISData(siteNumber, 
+                    parameterCd, startDate, endDate)
 @
 
 The variable datetime is automatically imported as a Date. Each requested parameter has a value and remark code column.  The names of these columns depend on the requested parameter and stat code combinations. USGS remark codes are often \texttt{"}A\texttt{"} (approved for publication) or \texttt{"}P\texttt{"} (provisional data subject to revision). A more complete list of remark codes can be found here:
@@ -248,15 +262,16 @@ startDate <- "2012-01-01"
 endDate <- "2012-06-30"
 
 temperatureAndFlow <- retrieveNWISData(siteNumber, parameterCd, 
-        startDate, endDate, StatCd=statCd,interactive=FALSE)
+        startDate, endDate, StatCd=statCd)
 
 @
 
 Daily data is pulled from \url{http://waterservices.usgs.gov/rest/DV-Test-Tool.html}. 
 
-An example of plotting the above data (Figure \ref{fig:TD}):
+An example of plotting the above data (Figure \ref{fig:getNWIStemperaturePlot}):
 
-<<label=getNWIStemperaturePlot, echo=TRUE>>=
+<<getNWIStemperaturePlot, echo=TRUE, fig.cap="Temperature and discharge plot of Choptank River in 2012.">>=
+par(mar=c(5,5,5,5))
 colnames <- names(temperatureAndFlow)
 with(temperatureAndFlow, plot(
   get(colnames[3]), get(colnames[6]),
@@ -273,20 +288,10 @@ title(paste(ChoptankInfo$station.nm,"2012",sep=" "))
 @
 
 
-\begin{figure}
-\begin{center}
-<<label=fig1, fig=TRUE,echo=FALSE,width=6,height=4>>=
-<<getNWIStemperaturePlot>>
-@
-\end{center}
-\caption{Temperature and discharge plot of Choptank River in 2012.}
-\label{fig:TD}
-\end{figure}
-
-
 There are occasions where NWIS values are not reported as numbers, instead there might be text describing a certain event such as \texttt{"}Ice\texttt{"}.  Any value that cannot be converted to a number will be reported as NA in this package.
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Unit Values}
 \label{sec:usgsRT}
@@ -297,9 +302,7 @@ Any data that are collected at regular time intervals (such as 15-minute or hour
 
 parameterCd <- "00060"  # Discharge (cfs)
 startDate <- "2012-05-12" 
-# or use (yesterday): startDate <- as.character(Sys.Date()-1)
 endDate <- "2012-05-13" 
-# or use (today):  endDate <- as.character(Sys.Date())
 dischargeToday <- retrieveUnitNWISData(siteNumber, parameterCd, 
         startDate, endDate)
 @
@@ -310,11 +313,11 @@ head(dischargeToday)
 
 Note that time now becomes important, so the variable datetime is a POSIXct, and the time zone is included in a separate column. Data is pulled from \url{http://waterservices.usgs.gov/rest/IV-Test-Tool.html}. There are occasions where NWIS values are not reported as numbers, instead a common example is \texttt{"}Ice\texttt{"}.  Any value that cannot be converted to a number will be reported as NA in this package.
 
-A simple plotting example is shown in Figure \ref{fig:RT}:
-<<label=getNWISUnit, echo=TRUE>>=
+A simple plotting example is shown in Figure \ref{fig:getNWISUnitPlot}:
+<<getNWISUnitPlot, echo=TRUE, fig.cap="Real-time discharge plot of Choptank River from May 12-13, 2012.">>=
 colnames <- names(dischargeToday)
 with(dischargeToday, plot(
-  get(colnames[3]), get(colnames[4]),
+  dateTime, get(colnames[4]),
   ylab="Discharge [cfs]",xlab=""
   ))
 title(ChoptankInfo$station.nm)
@@ -322,17 +325,9 @@ title(ChoptankInfo$station.nm)
 @
 \newpage
 
-\begin{figure}
-\begin{center}
-<<label=fig2, fig=TRUE,echo=FALSE,width=6,height=4>>=
-<<getNWISUnit>>
-@
-\end{center}
-\caption{Real-time discharge plot of Choptank River from May 12-13, 2012.}
-\label{fig:RT}
-\end{figure}
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Water Quality Values}
 \label{sec:usgsWQP}
@@ -354,9 +349,9 @@ dissolvedNitrateSimple <- getQWData(siteNumber, parameterCd,
         startDate, endDate)
 names(dissolvedNitrateSimple)
 @
-Note that in this dataframe, datetime is imported as Dates (no times are included), and the qualifier is either blank or \texttt{"}\verb@<@\texttt{"} signifying a censored value. A plotting example is shown in Figure \ref{fig:nitrate}.
+Note that in this dataframe, datetime is imported as Dates (no times are included), and the qualifier is either blank or \texttt{"}\verb@<@\texttt{"} signifying a censored value. A plotting example is shown in Figure \ref{fig:getQWtemperaturePlot}.
 
-<<label=getQWtemperaturePlot, echo=TRUE>>=
+<<getQWtemperaturePlot, echo=TRUE, fig.cap="Nitrate plot of Choptank River.">>=
 with(dissolvedNitrateSimple, plot(
   dateTime, value.00618,
   xlab="Date",ylab = paste(parameterINFO$srsname,
@@ -365,17 +360,8 @@ with(dissolvedNitrateSimple, plot(
 title(ChoptankInfo$station.nm)
 @
 
-\begin{figure}
-\begin{center}
-<<label=fig3, fig=TRUE,echo=FALSE,width=6,height=4>>=
-<<getQWtemperaturePlot>>
-@
-\end{center}
-\caption{Nitrate plot of Choptank River.}
-\label{fig:nitrate}
-\end{figure}
-
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{STORET Water Quality Retrievals}
 \label{sec:usgsSTORET}
@@ -402,11 +388,13 @@ pCode <- c("00618","71851")
 startDate <- "1964-06-11"
 endDate <- "2012-12-18"
 url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,'qw')
-url_dv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'dv',statCd="00003")
+url_dv <- constructNWISURL(siteNumber,"00060",startDate,endDate,
+                           'dv',statCd="00003")
 url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'uv')
 @
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \section{Data Retrievals Structured For Use In The EGRET Package}
 %------------------------------------------------------------ 
@@ -427,6 +415,7 @@ INFO <-getMetaData(siteNumber,parameterCd, interactive=FALSE)
 Column names in the INFO dataframe are listed in Appendix 2 (\ref{sec:appendix2INFO}).
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Daily Data}
 %------------------------------------------------------------
@@ -436,58 +425,77 @@ The function to obtain the daily values (discharge in this case) is getDVData.
 siteNumber <- "01491000"
 startDate <- "2000-01-01"
 endDate <- "2013-01-01"
-# This call will get NWIS data that is in cfs, and convert it
-# to cms since we didn't override the default in the convert argument:
-Daily <- getDVData(siteNumber, "00060", startDate, endDate,interactive=FALSE)
+# This call will get NWIS (cfs) data , and convert it to cms:
+Daily <- getDVData(siteNumber, "00060", startDate, endDate)
 @
 
 Details of the Daily dataframe are listed below:
 
-<<label=colNamesDaily, echo=FALSE,results=tex>>=
+<<colNamesDaily, echo=FALSE,results='asis'>>=
 ColumnName <- c("Date", "Q", "Julian","Month","Day","DecYear","MonthSeq","Qualifier","i","LogQ","Q7","Q30")
 Type <- c("Date", "number", "number","integer","integer","number","integer","string","integer","number","number","number")
-Description <- c("Date", "Discharge", "Number of days since January 1, 1850", "Month of the year [1-12]", "Day of the year [1-366]", "Decimal year", "Number of months since January 1, 1850", "Qualifing code", "Index of days from the start of the data frame", "Natural logarithm of Q", "7 day running average of Q", "30 running average of Q")
+Description <- c("Date", "Discharge in cms", "Number of days since January 1, 1850", "Month of the year [1-12]", "Day of the year [1-366]", "Decimal year", "Number of months since January 1, 1850", "Qualifing code", "Index of days, starting with 1", "Natural logarithm of Q", "7 day running average of Q", "30 running average of Q")
 Units <- c("date", "cms","days", "months","days","years","months", "character","days","numeric","cms","cms")
 
 DF <- data.frame(ColumnName,Type,Description,Units)
 
-data.table <- xtable(DF,
-                     caption="Daily dataframe")
-print(data.table, caption.placement="top",floating="FALSE",latex.environments=NULL,include.rownames=FALSE)
+xtable(DF, caption="Daily dataframe")
+
 @
-\\*
+
 
 If there are discharge values of zero, the code will add a small constant to all of the daily discharges.  This constant is 0.001 times the mean discharge.  The code will also report on the number of zero values and the size of the constant.  EGRET should only be used if the number of zero values is a very small fraction of the total days in the record (say less than 0.1\% of the days).  Columns Q7 and Q30 are the 7 and 30 day running averages for the 7 or 30 days ending on this specific date.
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Sample Data}
 %------------------------------------------------------------
 The function to obtain sample data from the water quality portal is getSampleData. The arguments for this function are also siteNumber, ParameterCd, StartDate, EndDate, interactive. These are the same inputs as getRawQWData or getQWData as described in the previous section.
 
 <<secondExample>>=
+parameterCd <- "00618"
 Sample <-getSampleData(siteNumber,parameterCd,
       startDate, endDate,interactive=FALSE)
 @
 
-Details of the Sample dataframe are listed below:
+\pagebreak
 
-<<label=colNamesQW, echo=FALSE,results=tex>>=
-ColumnName <- c("Date", "ConcLow", "ConcHigh", "Uncen", "ConcAve", "Julian","Month","Day","DecYear","MonthSeq","SinDY","CosDY","Q footnote","LogQ footnote")
-Type <- c("Date", "number","number","integer","number", "number","integer","integer","number","integer","number","number","number","number")
-Description <- c("Date", "Lower limit of concentration", "Upper limit of concentration", "Uncensored data (1=true, 0=false)", "Average of ConcLow and ConcHigh","Number of days since January 1, 1850", "Month of the year [1-12]", "Day of the year [1-366]", "Decimal year", "Number of months since January 1, 1850", "Sine of DecYear", "Cosine of DecYear", "Discharge", "Natural logarithm of flow")
-Units <- c("date","mg/L","mg/L","integer","mg/L","days","months","days","years","months","numeric","numeric","cms", "numeric")
+Details of the Sample dataframe are listed below:
 
-DF <- data.frame(ColumnName,Type,Description,Units)
+\begin{table}[!ht]
+\begin{minipage}{\linewidth}
+\begin{center}
+\caption{Sample dataframe} 
+\begin{tabular}{llll}
+  \hline
+ColumnName & Type & Description & Units \\ 
+  \hline
+Date & Date & Date & date \\ 
+  ConcLow & number & Lower limit of concentration & mg/L \\ 
+  ConcHigh & number & Upper limit of concentration & mg/L \\ 
+  Uncen & integer & Uncensored data (1=true, 0=false) & integer \\ 
+  ConcAve & number & Average of ConcLow and ConcHigh & mg/L \\ 
+  Julian & number & Number of days since January 1, 1850 & days \\ 
+  Month & integer & Month of the year [1-12] & months \\ 
+  Day & integer & Day of the year [1-366] & days \\ 
+  DecYear & number & Decimal year & years \\ 
+  MonthSeq & integer & Number of months since January 1, 1850 & months \\ 
+  SinDY & number & Sine of DecYear & numeric \\ 
+  CosDY & number & Cosine of DecYear & numeric \\ 
+  Q \footnotemark[1] & number & Discharge & cms \\ 
+  LogQ \footnotemark[1] & number & Natural logarithm of flow & numeric \\ 
+   \hline
+\end{tabular}
+\end{center}
+\end{minipage}
+\end{table}
 
-data.table <- xtable(DF,
-                     caption="Sample dataframe")
-print(data.table, caption.placement="top",include.rownames=FALSE,table.placement="!ht",
-      sanitize.text.function=function(str)gsub("footnote","\\footnotemark[1]",str,fixed=TRUE))
-@
 \footnotetext[1]{Flow columns are populated from data in the Daily dataframe after calling the mergeReport function.}
 
 \FloatBarrier
+
+
 %------------------------------------------------------------
 \subsection{Censored Values: Summation Explanation}
 %------------------------------------------------------------
@@ -497,7 +505,7 @@ As an example to understand how the dataRetrieval package handles a more complex
 
 \begin{center}
 
-<<label=exampleComplexQW, echo=FALSE,results=tex>>=
+<<label=exampleComplexQW, echo=FALSE, eval=TRUE,results='asis'>>=
 cdate <- c("2003-02-15","2003-06-30","2004-09-15","2005-01-30","2005-05-30","2005-10-30")
 rdp <- c("", "<","<","","","")
 dp <- c(0.02,0.01,0.005,NA,NA,NA)
@@ -508,9 +516,8 @@ tp <- c(NA,NA,NA,0.43,0.05,0.02)
 
 DF <- data.frame(cdate,rdp,dp,rpp,pp,rtp,tp)
 
-data.table <- xtable(DF,
-                     caption="Example data")
-print(data.table, caption.placement="top",floating="FALSE",latex.environments=NULL,include.rownames=FALSE)
+xtable(DF, caption="Example data")
+
 @
 
 \end{center}
@@ -518,9 +525,9 @@ print(data.table, caption.placement="top",floating="FALSE",latex.environments=NU
 
 The dataRetrieval package will \texttt{"}add up\texttt{"} all the values in a given row to form the total for that sample. Thus, you only want to enter data that should be added together. For example, we might know the value for dp on 5/30/2005, but we don't want to put it in the table because under the rules of this data set, we are not suppose to add it in to the values in 2005.
 
-For every sample, the EGRET package requires a pair of numbers to define an interval in which the true value lies (ConcLow and ConcHigh). In a simple non-censored case (the reported value is above the detection limit), ConcLow equals ConcHigh and the interval collapses down to a single point.In a simple censored case, the value might be reported as <0.2, then ConcLow=NA and ConcHigh=0.2. We use NA instead of 0 as a way to elegantly handle future logarithm calculations.
+For every sample, the EGRET package requires a pair of numbers to define an interval in which the true value lies (ConcLow and ConcHigh). In a simple non-censored case (the reported value is above the detection limit), ConcLow equals ConcHigh and the interval collapses down to a single point.In a simple censored case, the value might be reported as \verb@<@0.2, then ConcLow=NA and ConcHigh=0.2. We use NA instead of 0 as a way to elegantly handle future logarithm calculations.
 
-For the more complex example case, let us say dp is reported as <0.01 and pp is reported as 0.3. We know that the total must be at least 0.3 and could be as much as 0.31. Therefore, ConcLow=0.3 and ConcHigh=0.31. Another case would be if dp is reported as <0.005 and pp is reported <0.2. We know in this case that the true value could be as low as zero, but could be as high as 0.205. Therefore, in this case, ConcLow=NA and ConcHigh=0.205. The Sample dataframe for the example data is therefore:
+For the more complex example case, let us say dp is reported as \verb@<@0.01 and pp is reported as 0.3. We know that the total must be at least 0.3 and could be as much as 0.31. Therefore, ConcLow=0.3 and ConcHigh=0.31. Another case would be if dp is reported as \verb@<@0.005 and pp is reported \verb@<@0.2. We know in this case that the true value could be as low as zero, but could be as high as 0.205. Therefore, in this case, ConcLow=NA and ConcHigh=0.205. The Sample dataframe for the example data is therefore:
 
 <<thirdExample,echo=FALSE>>=
 getPreLoadedSampleData(DF)
@@ -528,6 +535,7 @@ getPreLoadedSampleData(DF)
 @
 
 \FloatBarrier
+
 %------------------------------------------------------------ 
 \subsection{User-Generated Data Files}
 %------------------------------------------------------------ 
@@ -546,7 +554,7 @@ So, if you have a file called \texttt{"}ChoptankRiverFlow.txt\texttt{"} located
 \begin{verbatim}
 date  Qdaily
 10/1/1999  107
-10/2/1999	85
+10/2/1999  85
 10/3/1999	76
 10/4/1999	76
 10/5/1999	113
@@ -558,10 +566,12 @@ The call to open this file, convert the flow to cubic meters per second, and pop
 <<openDaily, eval = FALSE>>=
 fileName <- "ChoptankRiverFlow.txt"
 filePath <-  "C:/RData/"
-Daily <- getDailyDataFromFile(filePath,fileName,separator="\t",interactive=FALSE)
+Daily <- getDailyDataFromFile(filePath,fileName,
+                    separator="\t")
 @
 
 \FloatBarrier
+
 %------------------------------------------------------------ 
 \subsubsection{getSampleDataFromFile}
 %------------------------------------------------------------ 
@@ -580,10 +590,12 @@ The call to open this file, and populate the Sample dataframe would be:
 <<openSample, eval = FALSE>>=
 fileName <- "ChoptankRiverNitrate.csv"
 filePath <-  "C:/RData/"
-Sample <- getSampleDataFromFile(filePath,fileName,separator=",",interactive=FALSE)
+Sample <- getSampleDataFromFile(filePath,fileName,
+                                separator=",")
 @
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Merge Report}
 %------------------------------------------------------------
@@ -596,37 +608,28 @@ parameterCd <- "00631"  # Nitrate
 startDate <- "2000-01-01"
 endDate <- "2013-01-01"
 
-Daily <- getDVData(siteNumber, "00060", startDate, endDate,interactive=FALSE)
-Sample <- getSampleData(siteNumber,parameterCd, startDate, endDate, interactive=FALSE)
+Daily <- getDVData(siteNumber, "00060", startDate, endDate)
+Sample <- getSampleData(siteNumber,parameterCd, startDate, endDate)
 Sample <- mergeReport()
 head(Sample)
 @
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{EGRET Plots}
 %------------------------------------------------------------
 As has been mentioned, the data is specifically formatted to be used with the EGRET package. The EGRET package has powerful modeling capabilities using WRTDS, but also has a variety of graphing and tablular tools to explore the data without using the WRTDS algorithm. See the EGRET vignette, user guide, and/or wiki (\url{https://github.com/USGS-R/EGRET/wiki}) for detailed information. The following figure is an example of one of the plotting functions that can be used directly from the dataRetrieval dataframes.
 
-<<egretEx, echo=TRUE, eval=TRUE>>=
+<<egretEx, echo=TRUE, eval=TRUE, fig.cap="Default multiPlotDataOverview">>=
 # Continuing Choptank example from the previous sections
 library(EGRET)
 multiPlotDataOverview()
 @
 
-\begin{figure}[ht]
-\begin{center}
-
-<<label=figegretEx, fig=TRUE,echo=FALSE>>=
-<<egretEx>>
-@
-\end{center}
-\caption{Default multiPlotDataOverview}
-\label{fig:multiPlotDataOverview}
-\end{figure}
-
 \clearpage
 \appendix
+
 %------------------------------------------------------------ 
 \section{Getting Started in R}
 \label{sec:appendix1}
@@ -651,7 +654,6 @@ To see the raw code for a particular code, type the name of the function:
 removeDuplicates
 @
 
-
 %------------------------------------------------------------
 \subsection{R User: Installing dataRetrieval}
 %------------------------------------------------------------ 
@@ -662,29 +664,12 @@ install.packages("zoo")
 install.packages("dataRetrieval", repos="http://usgs-r.github.com", type="source")
 @
 
-It is a good idea to re-start the R enviornment after installing the package, especially if installing an updated version. Some users have found it necessary to delete the previous version's package folder before installing newer version of dataRetrieval. If you are experiencing issues after updating a package, trying deleting the package folder - the default location for Windows is something like this: C:/Users/userA/Documents/R/win-library/2.15/dataRetrieval, and the default for a Mac: /Users/userA/Library/R/2.15/library/dataRetrieval. Then, re-install the package using the directions above. Moving to CRAN should solve this problem.
+It is a good idea to re-start R after installing the package, especially if installing an updated version. Some users have found it necessary to delete the previous version's package folder before installing newer version of dataRetrieval. If you are experiencing issues after updating a package, trying deleting the package folder - the default location for Windows is something like this: C:/Users/userA/Documents/R/win-library/2.15/dataRetrieval, and the default for a Mac: /Users/userA/Library/R/2.15/library/dataRetrieval. Then, re-install the package using the directions above. Moving to CRAN should solve this problem.
 
 After installing the package, you need to open the library each time you re-start R.  This is done with the simple command:
 <<openLibraryTest, eval=FALSE>>=
 library(dataRetrieval)
 @
-Using RStudio, you could alternatively click on the checkbox for dataRetrieval in the Packages window.
-
-%------------------------------------------------------------
-\subsection{R Developers: Installing dataRetrieval from gitHub}
-%------------------------------------------------------------
-Alternatively, R-developers can install the latest working version of dataRetrieval directly from gitHub using the devtools package (available on CRAN).  Rtools (for Windows) and appropriate \LaTeX\ tools are required. Be aware that the version installed using this method isn't necessarily the same as the version in the stable release branch.  
-
-
-<<gitInstal,eval = FALSE>>=
-library(devtools)
-install_github("dataRetrieval", "USGS-R")
-@
-To then open the library, simply type:
-
-<<openLibrary, eval=FALSE>>=
-library(dataRetrieval)
-@
 
 %------------------------------------------------------------ 
 \section{Columns Names}
@@ -696,11 +681,10 @@ library(dataRetrieval)
 \label{sec:appendix2INFO}
 %------------------------------------------------------------
 
-<<label=colNamesQW, echo=FALSE,results=tex>>=
+<<label=colNamesINFO, echo=FALSE,results='asis'>>=
 infoDF <- data.frame(ColumnNames=names(INFO))
-data.table <- xtable(infoDF,
-                     caption="Column names in the INFO dataframe")
-print(data.table, caption.placement="top",floating="FALSE",latex.environments=NULL,include.rownames=FALSE,include.colnames=FALSE)
+xtable(infoDF, caption="Column names in the INFO dataframe")
+
 @
 
 \FloatBarrier
@@ -712,22 +696,17 @@ print(data.table, caption.placement="top",floating="FALSE",latex.environments=NU
 
 There are 62 columns returned from the water quality portal. 
 
-<<label=colNamesQW, echo=FALSE,results=tex>>=
+<<colNamesQW, echo=FALSE,results='asis'>>=
 infoDF <- data.frame(ColumnNames=names(dissolvedNitrate[1:40]))
-data.table <- xtable(infoDF,
-                     caption="Column names in dissolvedNitrate")
-print(data.table, caption.placement="top",floating="FALSE",latex.environments=NULL,
-      include.rownames=FALSE,include.colnames=FALSE)
+xtable(infoDF, caption="Column names in dissolvedNitrate")
+
 @
 
 \FloatBarrier
 
-<<label=colNamesQW2, echo=FALSE,results=tex>>=
+<<colNamesQW2, echo=FALSE,results='asis'>>=
 infoDF <- data.frame(ColumnNames_Continued=names(dissolvedNitrate[41:62]))
-data.table <- xtable(infoDF,
-                     caption="Column names in dissolvedNitrate")
-print(data.table, caption.placement="top",floating="FALSE",latex.environments=NULL,
-      include.rownames=FALSE,include.colnames=FALSE)
+xtable(infoDF, caption="Column names in dissolvedNitrate")
 @
 
 \clearpage
@@ -739,20 +718,21 @@ print(data.table, caption.placement="top",floating="FALSE",latex.environments=NU
 There are a few steps that are required in order to create a table in a Microsoft product (Excel, Word, Powerpoint, etc.) from an R dataframe. There are certainly a variety of good methods, one of which is detailed here. The example we will step through here will be to create a table in Microsoft Word based on the dataframe tableData:
 
 <<label=getSiteApp, echo=TRUE>>=
-ChoptankAvailableData <- getDataAvailability(siteNumber)
-ChoptankDailyData <- ChoptankAvailableData["dv" == ChoptankAvailableData$service,]
-ChoptankDailyData <- ChoptankDailyData["00003" == ChoptankDailyData$statCd,]
-pCodeINFO <- getMultipleParameterNames(ChoptankDailyData$parameter_cd, interactive=FALSE)
-ChoptankDailyData <- merge(ChoptankDailyData,pCodeINFO,by="parameter_cd")
+availableData <- getDataAvailability(siteNumber)
+dailyData <- availableData["dv" == availableData$service,]
+dailyData <- dailyData["00003" == dailyData$statCd,]
+pCodeINFO <- getMultipleParameterNames(dailyData$parameter_cd)
+dailyData <- merge(dailyData,pCodeINFO, by="parameter_cd")
 
-tableData <- with(ChoptankDailyData, 
+tableData <- with(dailyData, 
       data.frame(
-      shortName=srsname, 
-      Start=startDate, 
-      End=endDate, 
-      Count=count,
-      Units=parameter_units)
+        shortName=srsname, 
+        Start=startDate, 
+        End=endDate, 
+        Count=count,
+        Units=parameter_units)
       )
+tableData
 @
 
 First, save the dataframe as a tab delimited file (you don't want to use comma delimited because there are commas in some of the data elements):
@@ -766,7 +746,7 @@ write.table(tableData, file="tableData.tsv",sep="\t",
 This will save a file in your working directory called tableData.tsv.  You can see your working directory by typing getwd() in the R console. Opening the file in a general-purpose text editor, you should see the following:
 
 \begin{verbatim}
-shortName  Start	End	Count	Units
+shortName  Start  End	Count	Units
 Temperature, water	2010-10-01	2012-06-24	575	deg C
 Stream flow, mean. daily	1948-01-01	2013-03-13	23814	cfs
 Specific conductance	2010-10-01	2012-06-24	551	uS/cm @25C
@@ -797,6 +777,7 @@ From Excel, it is simple to copy and paste the tables in other Microsoft product
 \end{figure}
 
 \clearpage
+
 %------------------------------------------------------------
 % BIBLIO
 %------------------------------------------------------------
diff --git a/vignettes/dataRetrieval.log b/vignettes/dataRetrieval.log
index 910ea85734d4b4d6b7164c1816e2976ce2affc1a..44cb50cce9584a679e7239c09aa9183874a946f9 100644
--- a/vignettes/dataRetrieval.log
+++ b/vignettes/dataRetrieval.log
@@ -1,4 +1,4 @@
-This is pdfTeX, Version 3.1415926-2.3-1.40.12 (MiKTeX 2.9) (preloaded format=pdflatex 2012.1.6)  25 APR 2013 12:09
+This is pdfTeX, Version 3.1415926-2.3-1.40.12 (MiKTeX 2.9) (preloaded format=pdflatex 2012.1.6)  17 JUN 2013 15:35
 entering extended mode
 **dataRetrieval.tex
 (D:\LADData\RCode\dataRetrieval\vignettes\dataRetrieval.tex
@@ -30,9 +30,60 @@ File: size11.clo 2007/10/19 v1.4h Standard LaTeX file (size option)
 \belowcaptionskip=\skip42
 \bibindent=\dimen102
 )
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\graphics\graphicx.sty"
+Package: graphicx 1999/02/16 v1.0f Enhanced LaTeX Graphics (DPC,SPQR)
+
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\graphics\keyval.sty"
+Package: keyval 1999/03/16 v1.13 key=value parser (DPC)
+\KV@toks@=\toks14
+)
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\graphics\graphics.sty"
+Package: graphics 2009/02/05 v1.0o Standard LaTeX Graphics (DPC,SPQR)
+
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\graphics\trig.sty"
+Package: trig 1999/03/16 v1.09 sin cos tan (DPC)
+)
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\00miktex\graphics.cfg"
+File: graphics.cfg 2007/01/18 v1.5 graphics configuration of teTeX/TeXLive
+)
+Package graphics Info: Driver file: pdftex.def on input line 91.
+
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\pdftex-def\pdftex.def"
+File: pdftex.def 2011/05/27 v0.06d Graphics/color for pdfTeX
+
+("C:\Program Files (x86)\MiKTeX 2.9\tex\generic\oberdiek\infwarerr.sty"
+Package: infwarerr 2010/04/08 v1.3 Providing info/warning/message (HO)
+)
+("C:\Program Files (x86)\MiKTeX 2.9\tex\generic\oberdiek\ltxcmds.sty"
+Package: ltxcmds 2011/04/18 v1.20 LaTeX kernel commands for general use (HO)
+)
+\Gread@gobject=\count87
+))
+\Gin@req@height=\dimen103
+\Gin@req@width=\dimen104
+)
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\graphics\color.sty"
+Package: color 2005/11/14 v1.0j Standard LaTeX Color (DPC)
+
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\00miktex\color.cfg"
+File: color.cfg 2007/01/18 v1.5 color configuration of teTeX/TeXLive
+)
+Package color Info: Driver file: pdftex.def on input line 130.
+)
+(C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\framed\framed.sty
+Package: framed 2011/10/22 v 0.96: framed or shaded text with page breaks
+\OuterFrameSep=\skip43
+\fb@frw=\dimen105
+\fb@frh=\dimen106
+\FrameRule=\dimen107
+\FrameSep=\dimen108
+)
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\base\alltt.sty"
+Package: alltt 1997/06/16 v2.0g defines alltt environment
+)
 (C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\amsmath\amsmath.sty
 Package: amsmath 2013/01/14 v2.14 AMS math features
-\@mathmargin=\skip43
+\@mathmargin=\skip44
 
 For additional information on amsmath, use the `?' option.
 (C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\amsmath\amstext.sty
@@ -40,51 +91,51 @@ Package: amstext 2000/06/29 v2.01
 
 (C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\amsmath\amsgen.sty
 File: amsgen.sty 1999/11/30 v2.0
-\@emptytoks=\toks14
-\ex@=\dimen103
+\@emptytoks=\toks15
+\ex@=\dimen109
 ))
 (C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\amsmath\amsbsy.sty
 Package: amsbsy 1999/11/29 v1.2d
-\pmbraise@=\dimen104
+\pmbraise@=\dimen110
 )
 (C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\amsmath\amsopn.sty
 Package: amsopn 1999/12/14 v2.01 operator names
 )
-\inf@bad=\count87
+\inf@bad=\count88
 LaTeX Info: Redefining \frac on input line 210.
-\uproot@=\count88
-\leftroot@=\count89
+\uproot@=\count89
+\leftroot@=\count90
 LaTeX Info: Redefining \overline on input line 306.
-\classnum@=\count90
-\DOTSCASE@=\count91
+\classnum@=\count91
+\DOTSCASE@=\count92
 LaTeX Info: Redefining \ldots on input line 378.
 LaTeX Info: Redefining \dots on input line 381.
 LaTeX Info: Redefining \cdots on input line 466.
 \Mathstrutbox@=\box26
 \strutbox@=\box27
-\big@size=\dimen105
+\big@size=\dimen111
 LaTeX Font Info:    Redeclaring font encoding OML on input line 566.
 LaTeX Font Info:    Redeclaring font encoding OMS on input line 567.
-\macc@depth=\count92
-\c@MaxMatrixCols=\count93
+\macc@depth=\count93
+\c@MaxMatrixCols=\count94
 \dotsspace@=\muskip10
-\c@parentequation=\count94
-\dspbrk@lvl=\count95
-\tag@help=\toks15
-\row@=\count96
-\column@=\count97
-\maxfields@=\count98
-\andhelp@=\toks16
-\eqnshift@=\dimen106
-\alignsep@=\dimen107
-\tagshift@=\dimen108
-\tagwidth@=\dimen109
-\totwidth@=\dimen110
-\lineht@=\dimen111
-\@envbody=\toks17
-\multlinegap=\skip44
-\multlinetaggap=\skip45
-\mathdisplay@stack=\toks18
+\c@parentequation=\count95
+\dspbrk@lvl=\count96
+\tag@help=\toks16
+\row@=\count97
+\column@=\count98
+\maxfields@=\count99
+\andhelp@=\toks17
+\eqnshift@=\dimen112
+\alignsep@=\dimen113
+\tagshift@=\dimen114
+\tagwidth@=\dimen115
+\totwidth@=\dimen116
+\lineht@=\dimen117
+\@envbody=\toks18
+\multlinegap=\skip45
+\multlinetaggap=\skip46
+\mathdisplay@stack=\toks19
 LaTeX Info: Redefining \[ on input line 2665.
 LaTeX Info: Redefining \] on input line 2666.
 )
@@ -100,8 +151,8 @@ Package: hobsub-hyperref 2011/04/23 v1.4 Bundle oberdiek, subset hyperref (HO)
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\generic\oberdiek\hobsub-generic.sty"
 Package: hobsub-generic 2011/04/23 v1.4 Bundle oberdiek, subset generic (HO)
 Package: hobsub 2011/04/23 v1.4 Subsetting bundle oberdiek (HO)
-Package: infwarerr 2010/04/08 v1.3 Providing info/warning/message (HO)
-Package: ltxcmds 2011/04/18 v1.20 LaTeX kernel commands for general use (HO)
+Package hobsub Info: Skipping package `infwarerr' (already loaded).
+Package hobsub Info: Skipping package `ltxcmds' (already loaded).
 Package: ifluatex 2010/03/01 v1.3 Provides the ifluatex switch (HO)
 Package ifluatex Info: LuaTeX not detected.
 Package: ifvtex 2010/03/01 v1.5 Switches for detecting VTeX and its modes (HO)
@@ -135,24 +186,20 @@ Package: atbegshi 2011/01/30 v1.15 At begin shipout hook (HO)
 Package: refcount 2010/12/01 v3.2 Data extraction from references (HO)
 Package: hycolor 2011/01/30 v1.7 Color options of hyperref/bookmark (HO)
 )
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\graphics\keyval.sty"
-Package: keyval 1999/03/16 v1.13 key=value parser (DPC)
-\KV@toks@=\toks19
-)
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\generic\ifxetex\ifxetex.sty"
 Package: ifxetex 2010/09/12 v0.6 Provides ifxetex conditional
 )
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\oberdiek\kvoptions.sty"
 Package: kvoptions 2010/12/23 v3.10 Keyval support for LaTeX options (HO)
 )
-\@linkdim=\dimen112
-\Hy@linkcounter=\count99
-\Hy@pagecounter=\count100
+\@linkdim=\dimen118
+\Hy@linkcounter=\count100
+\Hy@pagecounter=\count101
 
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\hyperref\pd1enc.def"
 File: pd1enc.def 2011/08/19 v6.82h Hyperref: PDFDocEncoding definition (HO)
 )
-\Hy@SavedSpaceFactor=\count101
+\Hy@SavedSpaceFactor=\count102
 
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\00miktex\hyperref.cfg"
 File: hyperref.cfg 2002/06/06 v1.2 hyperref configuration of TeXLive
@@ -164,16 +211,16 @@ Package hyperref Info: Plain pages OFF on input line 4068.
 Package hyperref Info: Backreferencing OFF on input line 4073.
 Package hyperref Info: Implicit mode ON; LaTeX internals redefined.
 Package hyperref Info: Bookmarks ON on input line 4291.
-\c@Hy@tempcnt=\count102
+\c@Hy@tempcnt=\count103
 
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\ltxmisc\url.sty"
 \Urlmuskip=\muskip11
 Package: url 2006/04/12  ver 3.3  Verb mode for urls, etc.
 )
 LaTeX Info: Redefining \url on input line 4644.
-\Fld@menulength=\count103
-\Field@Width=\dimen113
-\Fld@charsize=\dimen114
+\Fld@menulength=\count104
+\Field@Width=\dimen119
+\Fld@charsize=\dimen120
 Package hyperref Info: Hyper figures OFF on input line 5730.
 Package hyperref Info: Link nesting OFF on input line 5735.
 Package hyperref Info: Hyper index ON on input line 5738.
@@ -183,31 +230,31 @@ Package hyperref Info: Link coloring with OCG OFF on input line 5755.
 Package hyperref Info: PDF/A mode OFF on input line 5760.
 LaTeX Info: Redefining \ref on input line 5800.
 LaTeX Info: Redefining \pageref on input line 5804.
-\Hy@abspage=\count104
-\c@Item=\count105
-\c@Hfootnote=\count106
+\Hy@abspage=\count105
+\c@Item=\count106
+\c@Hfootnote=\count107
 )
 
 Package hyperref Message: Driver (autodetected): hpdftex.
 
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\hyperref\hpdftex.def"
 File: hpdftex.def 2011/08/19 v6.82h Hyperref driver for pdfTeX
-\Fld@listcount=\count107
-\c@bookmark@seq@number=\count108
+\Fld@listcount=\count108
+\c@bookmark@seq@number=\count109
 
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\oberdiek\rerunfilecheck.sty"
 Package: rerunfilecheck 2011/04/15 v1.7 Rerun checks for auxiliary files (HO)
 Package uniquecounter Info: New unique counter `rerunfilecheck' on input line 2
 82.
 )
-\Hy@SectionHShift=\skip46
+\Hy@SectionHShift=\skip47
 )
 (C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\natbib\natbib.sty
 Package: natbib 2010/09/13 8.31b (PWD, AO)
-\bibhang=\skip47
-\bibsep=\skip48
+\bibhang=\skip48
+\bibsep=\skip49
 LaTeX Info: Redefining \cite on input line 694.
-\c@NAT@ctr=\count109
+\c@NAT@ctr=\count110
 )
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\generic\babel\babel.sty"
 Package: babel 2008/07/08 v3.8m The Babel package
@@ -223,8 +270,8 @@ Language: english 2005/03/30 v3.3o English support from the babel system
 
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\generic\babel\babel.def"
 File: babel.def 2008/07/08 v3.8m Babel common definitions
-\babel@savecnt=\count110
-\U@D=\dimen115
+\babel@savecnt=\count111
+\U@D=\dimen121
 )
 \l@canadian = a dialect from \language\l@american 
 \l@australian = a dialect from \language\l@british 
@@ -232,58 +279,68 @@ File: babel.def 2008/07/08 v3.8m Babel common definitions
 ))
 (C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\preprint\authblk.sty
 Package: authblk 2009/11/18 1.3 (PWD)
-\affilsep=\skip49
-\@affilsep=\skip50
-\c@Maxaffil=\count111
-\c@authors=\count112
-\c@affil=\count113
-)
-(C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\mdwtools\footnote.sty
+\affilsep=\skip50
+\@affilsep=\skip51
+\c@Maxaffil=\count112
+\c@authors=\count113
+\c@affil=\count114
+)
+(C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\subfig\subfig.sty
+Package: subfig 2005/06/28 ver: 1.3 subfig package
+
+(C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\caption\caption.sty
+Package: caption 2013/02/03 v3.3-65 Customizing captions (AR)
+
+(C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\caption\caption3.sty
+Package: caption3 2013/02/03 v1.5-62 caption3 kernel (AR)
+\captionmargin=\dimen122
+\captionmargin@=\dimen123
+\captionwidth=\dimen124
+\caption@tempdima=\dimen125
+\caption@indent=\dimen126
+\caption@parindent=\dimen127
+\caption@hangindent=\dimen128
+)
+\c@ContinuedFloat=\count115
+Package caption Info: hyperref package is loaded.
+)
+\c@KVtest=\count116
+\sf@farskip=\skip52
+\sf@captopadj=\dimen129
+\sf@capskip=\skip53
+\sf@nearskip=\skip54
+\c@subfigure=\count117
+\c@subfigure@save=\count118
+\c@lofdepth=\count119
+\c@subtable=\count120
+\c@subtable@save=\count121
+\c@lotdepth=\count122
+\sf@top=\skip55
+\sf@bottom=\skip56
+) (C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\placeins\placeins.sty
+Package: placeins 2005/04/18  v 2.2
+) (C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\mdwtools\footnote.sty
 Package: footnote 1997/01/28 1.13 Save footnotes around boxes
 \fn@notes=\box28
-\fn@width=\dimen116
-)
-(C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\placeins\placeins.sty
-Package: placeins 2005/04/18  v 2.2
-)
-(C:/PROGRA~1/R/R-30~1.0/share/texmf/tex/latex\Sweave.sty
-Package: Sweave 
-
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\base\ifthen.sty"
-Package: ifthen 2001/05/26 v1.1c Standard LaTeX ifthen package (DPC)
-)
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\graphics\graphicx.sty"
-Package: graphicx 1999/02/16 v1.0f Enhanced LaTeX Graphics (DPC,SPQR)
-
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\graphics\graphics.sty"
-Package: graphics 2009/02/05 v1.0o Standard LaTeX Graphics (DPC,SPQR)
-
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\graphics\trig.sty"
-Package: trig 1999/03/16 v1.09 sin cos tan (DPC)
-)
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\00miktex\graphics.cfg"
-File: graphics.cfg 2007/01/18 v1.5 graphics configuration of teTeX/TeXLive
-)
-Package graphics Info: Driver file: pdftex.def on input line 91.
-
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\pdftex-def\pdftex.def"
-File: pdftex.def 2011/05/27 v0.06d Graphics/color for pdfTeX
-\Gread@gobject=\count114
-))
-\Gin@req@height=\dimen117
-\Gin@req@width=\dimen118
-)
-(C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\fancyvrb\fancyvrb.sty
-Package: fancyvrb 2008/02/07
-
-Style option: `fancyvrb' v2.7a, with DG/SPQR fixes, and firstline=lastline fix 
-<2008/02/07> (tvz)
-\FV@CodeLineNo=\count115
-\FV@InFile=\read1
-\FV@TabBox=\box29
-\c@FancyVerbLine=\count116
-\FV@StepNumber=\count117
-\FV@OutFile=\write3
+\fn@width=\dimen130
+) ("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\tools\tabularx.sty"
+Package: tabularx 1999/01/07 v2.07 `tabularx' package (DPC)
+
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\tools\array.sty"
+Package: array 2008/09/09 v2.4c Tabular extension package (FMi)
+\col@sep=\dimen131
+\extrarowheight=\dimen132
+\NC@list=\toks20
+\extratabsurround=\skip57
+\backup@length=\skip58
+)
+\TX@col@width=\dimen133
+\TX@old@table=\dimen134
+\TX@old@col=\dimen135
+\TX@target=\dimen136
+\TX@delta=\dimen137
+\TX@cols=\count123
+\TX@ftn=\toks21
 )
 (C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\latex\upquote\upquote.sty
 Package: upquote 2012/04/19 v1.3 upright-quote and grave-accent glyphs in verba
@@ -370,115 +427,99 @@ Package textcomp Info: Setting pplj sub-encoding to TS1/3 on input line 338.
 Package textcomp Info: Setting ptmx sub-encoding to TS1/4 on input line 339.
 Package textcomp Info: Setting ptmj sub-encoding to TS1/4 on input line 340.
 ))
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\base\fontenc.sty"
-Package: fontenc 2005/09/27 v1.99g Standard LaTeX package
-
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\base\t1enc.def"
-File: t1enc.def 2005/09/27 v1.99g Standard LaTeX file
-LaTeX Font Info:    Redeclaring font encoding T1 on input line 43.
-))
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\ae\ae.sty"
-Package: ae 2001/02/12 1.3 Almost European Computer Modern
+(D:\LADData\RCode\dataRetrieval\vignettes\dataRetrieval.aux)
+LaTeX Font Info:    Checking defaults for OML/cmm/m/it on input line 102.
+LaTeX Font Info:    ... okay on input line 102.
+LaTeX Font Info:    Checking defaults for T1/cmr/m/n on input line 102.
+LaTeX Font Info:    ... okay on input line 102.
+LaTeX Font Info:    Checking defaults for OT1/cmr/m/n on input line 102.
+LaTeX Font Info:    ... okay on input line 102.
+LaTeX Font Info:    Checking defaults for OMS/cmsy/m/n on input line 102.
+LaTeX Font Info:    ... okay on input line 102.
+LaTeX Font Info:    Checking defaults for OMX/cmex/m/n on input line 102.
+LaTeX Font Info:    ... okay on input line 102.
+LaTeX Font Info:    Checking defaults for U/cmr/m/n on input line 102.
+LaTeX Font Info:    ... okay on input line 102.
+LaTeX Font Info:    Checking defaults for PD1/pdf/m/n on input line 102.
+LaTeX Font Info:    ... okay on input line 102.
+LaTeX Font Info:    Checking defaults for TS1/cmr/m/n on input line 102.
+LaTeX Font Info:    Try loading font information for TS1+cmr on input line 102.
 
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\base\fontenc.sty"
-Package: fontenc 2005/09/27 v1.99g Standard LaTeX package
 
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\base\t1enc.def"
-File: t1enc.def 2005/09/27 v1.99g Standard LaTeX file
-LaTeX Font Info:    Redeclaring font encoding T1 on input line 43.
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\base\ts1cmr.fd"
+File: ts1cmr.fd 1999/05/25 v2.5h Standard LaTeX font definitions
 )
-LaTeX Font Info:    Try loading font information for T1+aer on input line 100.
+LaTeX Font Info:    ... okay on input line 102.
+LaTeX Font Info:    Try loading font information for OT1+ptm on input line 102.
 
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\ae\t1aer.fd"
-File: t1aer.fd 1997/11/16 Font definitions for T1/aer.
-))))
-(D:\LADData\RCode\dataRetrieval\vignettes\dataRetrieval.aux)
-LaTeX Font Info:    Checking defaults for OML/cmm/m/it on input line 44.
-LaTeX Font Info:    ... okay on input line 44.
-LaTeX Font Info:    Checking defaults for T1/cmr/m/n on input line 44.
-LaTeX Font Info:    ... okay on input line 44.
-LaTeX Font Info:    Checking defaults for OT1/cmr/m/n on input line 44.
-LaTeX Font Info:    ... okay on input line 44.
-LaTeX Font Info:    Checking defaults for OMS/cmsy/m/n on input line 44.
-LaTeX Font Info:    ... okay on input line 44.
-LaTeX Font Info:    Checking defaults for OMX/cmex/m/n on input line 44.
-LaTeX Font Info:    ... okay on input line 44.
-LaTeX Font Info:    Checking defaults for U/cmr/m/n on input line 44.
-LaTeX Font Info:    ... okay on input line 44.
-LaTeX Font Info:    Checking defaults for PD1/pdf/m/n on input line 44.
-LaTeX Font Info:    ... okay on input line 44.
-LaTeX Font Info:    Checking defaults for TS1/cmr/m/n on input line 44.
-LaTeX Font Info:    Try loading font information for TS1+cmr on input line 44.
 
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\base\ts1cmr.fd"
-File: ts1cmr.fd 1999/05/25 v2.5h Standard LaTeX font definitions
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\psnfss\ot1ptm.fd"
+File: ot1ptm.fd 2001/06/04 font definitions for OT1/ptm.
+)
+(C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\context\base\supp-pdf.mkii
+[Loading MPS to PDF converter (version 2006.09.02).]
+\scratchcounter=\count124
+\scratchdimen=\dimen138
+\scratchbox=\box29
+\nofMPsegments=\count125
+\nofMParguments=\count126
+\everyMPshowfont=\toks22
+\MPscratchCnt=\count127
+\MPscratchDim=\dimen139
+\MPnumerator=\count128
+\makeMPintoPDFobject=\count129
+\everyMPtoPDFconversion=\toks23
 )
-LaTeX Font Info:    ... okay on input line 44.
 \AtBeginShipoutBox=\box30
-Package hyperref Info: Link coloring OFF on input line 44.
-
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\hyperref\nameref.sty"
+Package hyperref Info: Link coloring OFF on input line 102.
+ ("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\hyperref\nameref.sty"
 Package: nameref 2010/04/30 v2.40 Cross-referencing by name of section
 
 ("C:\Program Files (x86)\MiKTeX 2.9\tex\generic\oberdiek\gettitlestring.sty"
 Package: gettitlestring 2010/12/03 v1.4 Cleanup title references (HO)
 )
-\c@section@level=\count118
+\c@section@level=\count130
 )
-LaTeX Info: Redefining \ref on input line 44.
-LaTeX Info: Redefining \pageref on input line 44.
-LaTeX Info: Redefining \nameref on input line 44.
+LaTeX Info: Redefining \ref on input line 102.
+LaTeX Info: Redefining \pageref on input line 102.
+LaTeX Info: Redefining \nameref on input line 102.
 
 (D:\LADData\RCode\dataRetrieval\vignettes\dataRetrieval.out)
 (D:\LADData\RCode\dataRetrieval\vignettes\dataRetrieval.out)
-\@outlinefile=\write4
+\@outlinefile=\write3
+Package caption Info: Begin \AtBeginDocument code.
+Package caption Info: subfig package v1.3 is loaded.
+Package caption Info: End \AtBeginDocument code.
+LaTeX Font Info:    Font shape `OT1/ptm/bx/n' in size <14.4> not available
+(Font)              Font shape `OT1/ptm/b/n' tried instead on input line 122.
 
-(C:\Users\ldecicco\AppData\Roaming\MiKTeX\2.9\tex\context\base\supp-pdf.mkii
-[Loading MPS to PDF converter (version 2006.09.02).]
-\scratchcounter=\count119
-\scratchdimen=\dimen119
-\scratchbox=\box31
-\nofMPsegments=\count120
-\nofMParguments=\count121
-\everyMPshowfont=\toks20
-\MPscratchCnt=\count122
-\MPscratchDim=\dimen120
-\MPnumerator=\count123
-\makeMPintoPDFobject=\count124
-\everyMPtoPDFconversion=\toks21
-) (D:\LADData\RCode\dataRetrieval\vignettes\dataRetrieval-concordance.tex)
 (D:\LADData\RCode\dataRetrieval\vignettes\dataRetrieval.toc
+LaTeX Font Info:    Font shape `OT1/ptm/bx/n' in size <10.95> not available
+(Font)              Font shape `OT1/ptm/b/n' tried instead on input line 2.
+
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
 [1
 
 {C:/Users/ldecicco/AppData/Local/MiKTeX/2.9/pdftex/config/pdftex.map}])
-\tf@toc=\write5
-LaTeX Font Info:    Try loading font information for T1+aett on input line 62.
+\tf@toc=\write4
+LaTeX Font Info:    Try loading font information for OT1+pcr on input line 127.
+
 
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\ae\t1aett.fd"
-File: t1aett.fd 1997/11/16 Font definitions for T1/aett.
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\psnfss\ot1pcr.fd"
+File: ot1pcr.fd 2001/06/04 font definitions for OT1/pcr.
 )
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
 [2]
-Overfull \hbox (22.21066pt too wide) in paragraph at lines 82--83
-[][]$\T1/aett/m/n/10.95 http : / / nwis . waterdata . usgs . gov / usa / nwis /
- pmcodes ? radio _ pm _ search = param _ group&pm _$
- []
-
-
-Overfull \hbox (23.424pt too wide) in paragraph at lines 82--83
-$\T1/aett/m/n/10.95 group = All + -[]-[] + include + all + parameter + groups&p
-m _ search = &casrn _ search = &srsname _ search =$
- []
-
+LaTeX Font Info:    Font shape `OT1/ptm/bx/n' in size <12> not available
+(Font)              Font shape `OT1/ptm/b/n' tried instead on input line 141.
 
-Overfull \hbox (68.32622pt too wide) in paragraph at lines 82--83
-$\T1/aett/m/n/10.95 &format = html _ table&show = parameter _ group _ nm&show =
- parameter _ nm&show = casrn&show = srsname&show =$
+Overfull \hbox (22.25568pt too wide) in paragraph at lines 173--174
+[][]$\OT1/pcr/m/n/10.95 http : / / nwis . waterdata . usgs . gov / nwis / help 
+/ ?read _ file = stat&format = table$[] 
  []
 
 
@@ -486,16 +527,34 @@ Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
 [3]
+Package color Info: Redefining color shadecolor on input line 211.
+LaTeX Font Info:    Font shape `OT1/pcr/bx/n' in size <10.95> not available
+(Font)              Font shape `OT1/pcr/b/n' tried instead on input line 213.
+Package color Info: Redefining color shadecolor on input line 225.
+
+Overfull \hbox (43.87282pt too wide) in paragraph at lines 235--236
+[]\OT1/ptm/m/n/10.95 Site in-for-ma-tion is ob-tained from []$\OT1/pcr/m/n/10.9
+5 http : / / waterservices . usgs . gov / rest / Site-[]Test-[]Tool .$
+ []
+
+Package color Info: Redefining color shadecolor on input line 245.
+
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
 [4]
+Package color Info: Redefining color shadecolor on input line 270.
+
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
 [5]
-Overfull \hbox (3.57079pt too wide) in paragraph at lines 232--243
- [][] 
+Package color Info: Redefining color shadecolor on input line 345.
+Package color Info: Redefining color shadecolor on input line 363.
+
+Overfull \hbox (30.64148pt too wide) in paragraph at lines 373--374
+[]\OT1/ptm/m/n/10.95 Parameter in-for-ma-tion is ob-tained from []$\OT1/pcr/m/n
+/10.95 http : / / nwis . waterdata . usgs . gov / nwis / pmcodes/$[] 
  []
 
 
@@ -503,86 +562,116 @@ Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
 [6]
+Package color Info: Redefining color shadecolor on input line 384.
+Package color Info: Redefining color shadecolor on input line 404.
+Package color Info: Redefining color shadecolor on input line 424.
+
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[7] <dataRetrieval-fig1.pdf, id=224, 433.62pt x 289.08pt>
-File: dataRetrieval-fig1.pdf Graphic file (type pdf)
+[7]
 
-<use dataRetrieval-fig1.pdf>
-Package pdftex.def Info: dataRetrieval-fig1.pdf used on input line 335.
-(pdftex.def)             Requested size: 358.46039pt x 238.98355pt.
+LaTeX Warning: No positions in optional float specifier.
+               Default added (so using `tbp') on input line 441.
+
+<figure/getNWIStemperaturePlot.pdf, id=212, 505.89pt x 505.89pt>
+File: figure/getNWIStemperaturePlot.pdf Graphic file (type pdf)
+
+<use figure/getNWIStemperaturePlot.pdf>
+Package pdftex.def Info: figure/getNWIStemperaturePlot.pdf used on input line 4
+43.
+(pdftex.def)             Requested size: 448.07378pt x 448.07928pt.
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[8 <D:/LADData/RCode/dataRetrieval/vignettes/dataRetrieval-fig1.pdf>]
+[8]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[9] <dataRetrieval-fig2.pdf, id=243, 433.62pt x 289.08pt>
-File: dataRetrieval-fig2.pdf Graphic file (type pdf)
+[9 <D:/LADData/RCode/dataRetrieval/vignettes/figure/getNWIStemperaturePlot.pdf>
+]
+Package color Info: Redefining color shadecolor on input line 462.
+Package color Info: Redefining color shadecolor on input line 476.
+Package color Info: Redefining color shadecolor on input line 501.
+
+
+LaTeX Warning: No positions in optional float specifier.
+               Default added (so using `tbp') on input line 510.
 
-<use dataRetrieval-fig2.pdf>
-Package pdftex.def Info: dataRetrieval-fig2.pdf used on input line 393.
-(pdftex.def)             Requested size: 358.46039pt x 238.98355pt.
+<figure/getNWISUnitPlot.pdf, id=229, 505.89pt x 505.89pt>
+File: figure/getNWISUnitPlot.pdf Graphic file (type pdf)
+
+<use figure/getNWISUnitPlot.pdf>
+Package pdftex.def Info: figure/getNWISUnitPlot.pdf used on input line 512.
+(pdftex.def)             Requested size: 448.07378pt x 448.07928pt.
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[10 <D:/LADData/RCode/dataRetrieval/vignettes/dataRetrieval-fig2.pdf>]
-<dataRetrieval-fig3.pdf, id=257, 433.62pt x 289.08pt>
-File: dataRetrieval-fig3.pdf Graphic file (type pdf)
+[10]
+Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
-<use dataRetrieval-fig3.pdf>
-Package pdftex.def Info: dataRetrieval-fig3.pdf used on input line 439.
-(pdftex.def)             Requested size: 358.46039pt x 238.98355pt.
 
-Overfull \hbox (0.79091pt too wide) in paragraph at lines 450--451
-\T1/aer/m/n/10.95 EPA) or NWIS database. Since STORET does not use USGS pa-ram-
-e-ter codes, a \T1/aett/m/n/10.95 "\T1/aer/m/n/10.95 characteristic
- []
+[11 <D:/LADData/RCode/dataRetrieval/vignettes/figure/getNWISUnitPlot.pdf>]
+Package color Info: Redefining color shadecolor on input line 531.
+Package color Info: Redefining color shadecolor on input line 556.
 
-LaTeX Font Info:    Try loading font information for TS1+aett on input line 454
-.
-(C:/PROGRA~1/R/R-30~1.0/share/texmf/tex/latex\ts1aett.fd
-File: ts1aett.fd 
-)
-LaTeX Font Info:    Try loading font information for TS1+cmtt on input line 454
-.
 
-("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\base\ts1cmtt.fd"
-File: ts1cmtt.fd 1999/05/25 v2.5h Standard LaTeX font definitions
-)
-LaTeX Font Info:    Font shape `TS1/aett/m/sl' in size <10.95> not available
-(Font)              Font shape `TS1/cmtt/m/sl' tried instead on input line 454.
+LaTeX Warning: No positions in optional float specifier.
+               Default added (so using `tbp') on input line 565.
 
+<figure/getQWtemperaturePlot.pdf, id=249, 505.89pt x 505.89pt>
+File: figure/getQWtemperaturePlot.pdf Graphic file (type pdf)
+
+<use figure/getQWtemperaturePlot.pdf>
+Package pdftex.def Info: figure/getQWtemperaturePlot.pdf used on input line 567
+.
+(pdftex.def)             Requested size: 448.07378pt x 448.07928pt.
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[11 <D:/LADData/RCode/dataRetrieval/vignettes/dataRetrieval-fig3.pdf>]
+[12]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[12]
-Underfull \hbox (badness 10000) in paragraph at lines 532--550
+[13 <D:/LADData/RCode/dataRetrieval/vignettes/figure/getQWtemperaturePlot.pdf>]
+
+Overfull \hbox (50.793pt too wide) in paragraph at lines 580--581
+\OT1/ptm/m/n/10.95 There are ad-di-tional data sets avail-able on the Wa-ter Qu
+al-ity Data Por-tal ([]$\OT1/pcr/m/n/10.95 http : / / www . waterqualitydata .$
 
  []
 
+Package color Info: Redefining color shadecolor on input line 583.
+LaTeX Font Info:    Try loading font information for TS1+pcr on input line 585.
 
-Overfull \vbox (21.68121pt too high) has occurred while \output is active []
+("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\psnfss\ts1pcr.fd"
+File: ts1pcr.fd 2001/06/04 font definitions for TS1/pcr.
+)
+Package color Info: Redefining color shadecolor on input line 618.
+
+Overfull \hbox (5.25568pt too wide) in paragraph at lines 627--627
+[]\OT1/pcr/m/n/10.95 url_uv <- []\OT1/pcr/b/n/10.95 constructNWISURL[]\OT1/pcr/
+m/n/10.95 (siteNumber,[]"00060"[],startDate,endDate,[]\TS1/pcr/m/n/10.95 '\OT1/
+pcr/m/n/10.95 uv\TS1/pcr/m/n/10.95 '[]\OT1/pcr/m/n/10.95 )[] 
+ []
 
 
-[13]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
 [14]
+Package color Info: Redefining color shadecolor on input line 648.
+Package color Info: Redefining color shadecolor on input line 667.
+
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
 [15]
+Package color Info: Redefining color shadecolor on input line 722.
+
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
@@ -591,40 +680,87 @@ Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
 [17]
+Package color Info: Redefining color shadecolor on input line 809.
+
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[18] <dataRetrieval-figegretEx.pdf, id=308, 433.62pt x 433.62pt>
-File: dataRetrieval-figegretEx.pdf Graphic file (type pdf)
+[18]
+Package color Info: Redefining color shadecolor on input line 860.
+LaTeX Font Info:    Try loading font information for OMS+pcr on input line 865.
 
-<use dataRetrieval-figegretEx.pdf>
-Package pdftex.def Info: dataRetrieval-figegretEx.pdf used on input line 771.
-(pdftex.def)             Requested size: 358.46039pt x 358.47534pt.
+ ("C:\Program Files (x86)\MiKTeX 2.9\tex\latex\psnfss\omspcr.fd"
+File: omspcr.fd 
+)
+LaTeX Font Info:    Font shape `OMS/pcr/m/n' in size <10.95> not available
+(Font)              Font shape `OMS/cmsy/m/n' tried instead on input line 865.
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
 [19]
-Overfull \vbox (21.68121pt too high) has occurred while \output is active []
+Package color Info: Redefining color shadecolor on input line 889.
+Package color Info: Redefining color shadecolor on input line 909.
+
+Overfull \hbox (64.3856pt too wide) in paragraph at lines 934--934
+[]\OT1/pcr/m/n/10.95 ##  First day of the discharge record is 2000-01-01 and la
+st day is 2013-01-01[] 
+ []
+
+
+Overfull \hbox (64.3856pt too wide) in paragraph at lines 934--934
+[]\OT1/pcr/m/n/10.95 ##  The first sample is from 2000-01-04 and the last sampl
+e is from 2012-12-18[] 
+ []
 
 
-[20 <D:/LADData/RCode/dataRetrieval/vignettes/dataRetrieval-figegretEx.pdf>]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[21
+[20]
+Package color Info: Redefining color shadecolor on input line 966.
+
+
+LaTeX Warning: No positions in optional float specifier.
+               Default added (so using `tbp') on input line 972.
+
+<figure/egretEx.pdf, id=306, 505.89pt x 505.89pt>
+File: figure/egretEx.pdf Graphic file (type pdf)
+ <use figure/egretEx.pdf>
+Package pdftex.def Info: figure/egretEx.pdf used on input line 974.
+(pdftex.def)             Requested size: 448.07378pt x 448.07928pt.
 
-]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[22]
+[21]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[23]
-Overfull \vbox (15.16835pt too high) has occurred while \output is active []
+[22 <D:/LADData/RCode/dataRetrieval/vignettes/figure/egretEx.pdf>]
+Package color Info: Redefining color shadecolor on input line 1000.
+Package color Info: Redefining color shadecolor on input line 1010.
+Package color Info: Redefining color shadecolor on input line 1033.
 
+Overfull \hbox (90.66557pt too wide) in paragraph at lines 1036--1036
+[][]\OT1/pcr/b/n/10.95 install.packages[]\OT1/pcr/m/n/10.95 ([]"dataRetrieval"[
+], repos=[]"http://usgs-r.github.com"[], type=[]"source"[])[] 
+ []
+
+
+Overfull \hbox (157.60596pt too wide) in paragraph at lines 1042--1043
+\OT1/ptm/m/n/10.95 C:/Users/userA/Documents/R/win-library/2.15/dataRetrieval, a
+nd the de-fault for a Mac: /Users/userA/Library/R/2.15/library/dataRetrieval.
+ []
+
+
+Overfull \vbox (21.68121pt too high) has occurred while \output is active []
+
+
+[23
+
+]
+Package color Info: Redefining color shadecolor on input line 1046.
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
@@ -637,63 +773,105 @@ Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[26]
+[26
+
+]
+Overfull \vbox (21.68121pt too high) has occurred while \output is active []
+
+
+[27]
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[27
+[28
+
+]
+Package color Info: Redefining color shadecolor on input line 1233.
+Package color Info: Redefining color shadecolor on input line 1279.
+
+Overfull \vbox (21.68121pt too high) has occurred while \output is active []
+
 
-] <table1.png, id=357, 554.07pt x 125.71968pt>
+[29
+
+]
+Overfull \hbox (11.82567pt too wide) in paragraph at lines 1297--1297
+[]\OT1/pcr/m/n/10.95 Suspended sediment concentration (SSC) 1980-10-01 1991-09-
+30 3651 mg/l[] 
+ []
+
+<table1.png, id=356, 554.07pt x 125.71968pt>
 File: table1.png Graphic file (type png)
  <use table1.png>
-Package pdftex.def Info: table1.png used on input line 1069.
-(pdftex.def)             Requested size: 358.46039pt x 81.33507pt.
+Package pdftex.def Info: table1.png used on input line 1316.
+(pdftex.def)             Requested size: 554.06865pt x 125.71936pt.
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[28 <D:/LADData/RCode/dataRetrieval/vignettes/table1.png>]
-Package atveryend Info: Empty hook `BeforeClearDocument' on input line 1091.
+[30]
+Overfull \vbox (21.68121pt too high) has occurred while \output is active []
+
+
+[31 <D:/LADData/RCode/dataRetrieval/vignettes/table1.png>]
+Underfull \hbox (badness 1983) in paragraph at lines 1332--1333
+[]\OT1/ptm/m/n/10.95 Hirsch, R. M., Moyer, D. L. and Arch-field, S. A. (2010), 
+Weighted Re-gres-sions on
+ []
+
+
+Underfull \hbox (badness 1221) in paragraph at lines 1332--1333
+\OT1/ptm/m/n/10.95 Time, Dis-charge, and Sea-son (WRTDS), with an Ap-pli-ca-tio
+n to Chesa-peake Bay River
+ []
+
+
+Underfull \hbox (badness 2443) in paragraph at lines 1332--1333
+\OT1/ptm/m/n/10.95 In-puts. JAWRA Jour-nal of the Amer-i-can Wa-ter Re-sources 
+As-so-ci-a-tion, 46: 857-880.
+ []
+
+
+Underfull \hbox (badness 3690) in paragraph at lines 1332--1333
+\OT1/ptm/m/n/10.95 doi: 10.1111/j.1752-1688.2010.00482.x []$\OT1/pcr/m/n/10.95 
+http : / / onlinelibrary . wiley . com / doi /$
+ []
+
+Package atveryend Info: Empty hook `BeforeClearDocument' on input line 1339.
 
 Overfull \vbox (21.68121pt too high) has occurred while \output is active []
 
 
-[29
+[32
 
 ]
-Package atveryend Info: Empty hook `AfterLastShipout' on input line 1091.
+Package atveryend Info: Empty hook `AfterLastShipout' on input line 1339.
  (D:\LADData\RCode\dataRetrieval\vignettes\dataRetrieval.aux)
-Package atveryend Info: Executing hook `AtVeryEndDocument' on input line 1091.
-Package atveryend Info: Executing hook `AtEndAfterFileList' on input line 1091.
+Package atveryend Info: Executing hook `AtVeryEndDocument' on input line 1339.
+Package atveryend Info: Executing hook `AtEndAfterFileList' on input line 1339.
 
 Package rerunfilecheck Info: File `dataRetrieval.out' has not changed.
-(rerunfilecheck)             Checksum: 3B1480473EC95CC4090ED48BB3268644;2119.
+(rerunfilecheck)             Checksum: E39EB3526BB75384BBF16131BFA2BB3D;2017.
  ) 
 Here is how much of TeX's memory you used:
- 7545 strings out of 494045
- 108403 string characters out of 3145961
- 194722 words of memory out of 3000000
- 10596 multiletter control sequences out of 15000+200000
- 45443 words of font info for 92 fonts, out of 3000000 for 9000
+ 8263 strings out of 494045
+ 123475 string characters out of 3145961
+ 215648 words of memory out of 3000000
+ 11333 multiletter control sequences out of 15000+200000
+ 30364 words of font info for 66 fonts, out of 3000000 for 9000
  715 hyphenation exceptions out of 8191
- 35i,12n,28p,912b,489s stack positions out of 5000i,500n,10000p,200000b,50000s
- <C:\Users\ldecicco\AppData\Local\MiKTeX\2.9\fonts\pk\ljfour\jknappen\ec\dpi6
-00\tcst1095.pk><C:/Program Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/c
-m/cmbx10.pfb><C:/Program Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/
-cmbx12.pfb><C:/Program Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cm
-mi10.pfb><C:/Program Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmr1
-0.pfb><C:/Program Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmr12.p
-fb><C:/Program Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmr17.pfb>
-<C:/Program Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmr6.pfb><C:/
-Program Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmr7.pfb><C:/Prog
-ram Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmr8.pfb><C:/Program 
-Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmr9.pfb><C:/Program File
-s (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmsltt10.pfb><C:/Program File
-s (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmti10.pfb><C:/Program Files 
-(x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmtt10.pfb>
-Output written on dataRetrieval.pdf (29 pages, 394032 bytes).
+ 44i,15n,42p,912b,451s stack positions out of 5000i,500n,10000p,200000b,50000s
+{C:/Program Files (x86)/MiKTeX 2.9/fonts/enc/dvips/fontname/8r.enc}<C:/Progra
+m Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmmi10.pfb><C:/Program 
+Files (x86)/MiKTeX 2.9/fonts/type1/public/amsfonts/cm/cmsy10.pfb><C:/Program Fi
+les (x86)/MiKTeX 2.9/fonts/type1/urw/courier/ucrb8a.pfb><C:/Program Files (x86)
+/MiKTeX 2.9/fonts/type1/urw/courier/ucrr8a.pfb><C:/Program Files (x86)/MiKTeX 2
+.9/fonts/type1/urw/times/utmb8a.pfb><C:/Program Files (x86)/MiKTeX 2.9/fonts/ty
+pe1/urw/times/utmr8a.pfb><C:/Program Files (x86)/MiKTeX 2.9/fonts/type1/urw/tim
+es/utmri8a.pfb>
+Output written on dataRetrieval.pdf (32 pages, 290294 bytes).
 PDF statistics:
- 451 PDF objects out of 1000 (max. 8388607)
- 83 named destinations out of 1000 (max. 500000)
- 270 words of extra memory for PDF output out of 10000 (max. 10000000)
+ 437 PDF objects out of 1000 (max. 8388607)
+ 90 named destinations out of 1000 (max. 500000)
+ 258 words of extra memory for PDF output out of 10000 (max. 10000000)
 
diff --git a/vignettes/dataRetrieval.pdf b/vignettes/dataRetrieval.pdf
index 063848efca335a038447cf51e09688af3de226cb..59f740417379ba9e99d594c93e4cf66c0fa04869 100644
Binary files a/vignettes/dataRetrieval.pdf and b/vignettes/dataRetrieval.pdf differ
diff --git a/vignettes/dataRetrieval.synctex.gz b/vignettes/dataRetrieval.synctex.gz
index 52ad2f678d3fda6fa8cf5b0a7f254641143af314..3f2d8cbc483300a977cc61b7bd6b9509cb34a01e 100644
Binary files a/vignettes/dataRetrieval.synctex.gz and b/vignettes/dataRetrieval.synctex.gz differ
diff --git a/vignettes/dataRetrieval.tex b/vignettes/dataRetrieval.tex
index cb9accedf6a83ab368ec8057362768a1c7478ba5..4a885515a876d8e0a71f64c553a79b56f09dda70 100644
--- a/vignettes/dataRetrieval.tex
+++ b/vignettes/dataRetrieval.tex
@@ -1,10 +1,65 @@
 %\VignetteIndexEntry{Introduction to the dataRetrieval package}
+%\VignetteEngine{knitr::knitr}
 %\VignetteDepends{}
-%\VignetteSuggests{}
-%\VignetteImports{}
-%\VignettePackage{}
-
-\documentclass[a4paper,11pt]{article}
+%\VignetteSuggests{xtable,EGRET}
+%\VignetteImports{zoo, XML, RCurl}
+%\VignettePackage{dataRetrieval}
+
+\documentclass[a4paper,11pt]{article}\usepackage{graphicx, color}
+%% maxwidth is the original width if it is less than linewidth
+%% otherwise use linewidth (to make sure the graphics do not exceed the margin)
+\makeatletter
+\def\maxwidth{ %
+  \ifdim\Gin@nat@width>\linewidth
+    \linewidth
+  \else
+    \Gin@nat@width
+  \fi
+}
+\makeatother
+
+\definecolor{fgcolor}{rgb}{0.2, 0.2, 0.2}
+\newcommand{\hlnumber}[1]{\textcolor[rgb]{0,0,0}{#1}}%
+\newcommand{\hlfunctioncall}[1]{\textcolor[rgb]{0.501960784313725,0,0.329411764705882}{\textbf{#1}}}%
+\newcommand{\hlstring}[1]{\textcolor[rgb]{0.6,0.6,1}{#1}}%
+\newcommand{\hlkeyword}[1]{\textcolor[rgb]{0,0,0}{\textbf{#1}}}%
+\newcommand{\hlargument}[1]{\textcolor[rgb]{0.690196078431373,0.250980392156863,0.0196078431372549}{#1}}%
+\newcommand{\hlcomment}[1]{\textcolor[rgb]{0.180392156862745,0.6,0.341176470588235}{#1}}%
+\newcommand{\hlroxygencomment}[1]{\textcolor[rgb]{0.43921568627451,0.47843137254902,0.701960784313725}{#1}}%
+\newcommand{\hlformalargs}[1]{\textcolor[rgb]{0.690196078431373,0.250980392156863,0.0196078431372549}{#1}}%
+\newcommand{\hleqformalargs}[1]{\textcolor[rgb]{0.690196078431373,0.250980392156863,0.0196078431372549}{#1}}%
+\newcommand{\hlassignement}[1]{\textcolor[rgb]{0,0,0}{\textbf{#1}}}%
+\newcommand{\hlpackage}[1]{\textcolor[rgb]{0.588235294117647,0.709803921568627,0.145098039215686}{#1}}%
+\newcommand{\hlslot}[1]{\textit{#1}}%
+\newcommand{\hlsymbol}[1]{\textcolor[rgb]{0,0,0}{#1}}%
+\newcommand{\hlprompt}[1]{\textcolor[rgb]{0.2,0.2,0.2}{#1}}%
+
+\usepackage{framed}
+\makeatletter
+\newenvironment{kframe}{%
+ \def\at@end@of@kframe{}%
+ \ifinner\ifhmode%
+  \def\at@end@of@kframe{\end{minipage}}%
+  \begin{minipage}{\columnwidth}%
+ \fi\fi%
+ \def\FrameCommand##1{\hskip\@totalleftmargin \hskip-\fboxsep
+ \colorbox{shadecolor}{##1}\hskip-\fboxsep
+     % There is no \\@totalrightmargin, so:
+     \hskip-\linewidth \hskip-\@totalleftmargin \hskip\columnwidth}%
+ \MakeFramed {\advance\hsize-\width
+   \@totalleftmargin\z@ \linewidth\hsize
+   \@setminipage}}%
+ {\par\unskip\endMakeFramed%
+ \at@end@of@kframe}
+\makeatother
+
+\definecolor{shadecolor}{rgb}{.97, .97, .97}
+\definecolor{messagecolor}{rgb}{0, 0, 0}
+\definecolor{warningcolor}{rgb}{1, 0, 1}
+\definecolor{errorcolor}{rgb}{1, 0, 0}
+\newenvironment{knitrout}{}{} % an empty environment to be redefined in TeX
+
+\usepackage{alltt}
 
 \usepackage{amsmath}
 \usepackage{times}
@@ -12,10 +67,12 @@
 \usepackage[numbers, round]{natbib}
 \usepackage[american]{babel}
 \usepackage{authblk}
-\usepackage{footnote}
+\usepackage{subfig}
 \usepackage{placeins}
+\usepackage{footnote}
+\usepackage{tabularx}
 \renewcommand\Affilfont{\itshape\small}
-\usepackage{Sweave}
+
 \renewcommand{\topfraction}{0.85}
 \renewcommand{\textfraction}{0.1}
 \usepackage{graphicx}
@@ -40,9 +97,14 @@
 \newcommand{\Rexpression}[1]{\texttt{#1}}
 \newcommand{\Rmethod}[1]{{\texttt{#1}}}
 \newcommand{\Rfunarg}[1]{{\texttt{#1}}}
+\IfFileExists{upquote.sty}{\usepackage{upquote}}{}
 
 \begin{document}
-\input{dataRetrieval-concordance}
+
+
+
+
+
 
 %------------------------------------------------------------
 \title{The dataRetrieval R package}
@@ -53,6 +115,9 @@
 
 
 
+
+
+
 \maketitle
 \tableofcontents
 
@@ -77,55 +142,59 @@ In this section, we will run through 5 examples, documenting how to get raw data
 %------------------------------------------------------------
 The United States Geological Survey organizes their hydrological data in standard structure.  Streamgages are located throughout the United States, and each streamgage has a unique ID.  Often (but not always), these ID's are 8 digits.  The first step to finding data is discoving this 8-digit ID. One potential tool for discovering data is Environmental Data Discovery and Transformation (EnDDaT): \url{http://cida.usgs.gov/enddat/}.  Follow the example on the EnDDaT web page to learn how to discover USGS stations and available data from any location in the United States. 
 
-Once the site-ID is known, the next required input for USGS data retrievals is the 'parameter code'.  This is a 5-digit code that specifies what measured paramater is being requested.  A complete list of possible USGS parameter codes can be found at:
-
-\url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes?radio_pm_search=param_group&pm_group=All+--+include+all+parameter+groups&pm_search=&casrn_search=&srsname_search=&format=html_table&show=parameter_group_nm&show=parameter_nm&show=casrn&show=srsname&show=parameter_units}
+Once the site-ID is known, the next required input for USGS data retrievals is the 'parameter code'.  This is a 5-digit code that specifies what measured paramater is being requested.  A complete list of possible USGS parameter codes can be found at \url{http://go.usa.gov/bVDz}.
 
 Not every station will measure all parameters. A short list of commonly measured parameters is shown in Table \ref{tab:params}.
 
 
-
-% latex table generated in R 3.0.0 by xtable 1.7-1 package
-% Thu Apr 25 12:08:34 2013
-\begin{table}[!ht]
+% latex table generated in R 3.0.1 by xtable 1.7-1 package
+% Mon Jun 17 15:34:54 2013
+\begin{table}[ht]
 \centering
-\caption{Common USGS Parameter Codes} 
-\label{tab:params}
-\begin{tabular}{ll}
+\begin{tabular}{rll}
   \hline
-pCode & shortName \\ 
+ & pCode & shortName \\ 
   \hline
-00060 & Discharge [cfs] \\ 
-  00065 & Gage height [ft] \\ 
-  00010 & Temperature [C] \\ 
-  00045 & Precipitation [in] \\ 
-  00400 & pH \\ 
+1 & 00060 & Discharge [cfs] \\ 
+  2 & 00065 & Gage height [ft] \\ 
+  3 & 00010 & Temperature [C] \\ 
+  4 & 00045 & Precipitation [in] \\ 
+  5 & 00400 & pH \\ 
    \hline
 \end{tabular}
+\caption{Common USGS Parameter Codes} 
+\label{tab:params}
 \end{table}
+
+
+
 For real-time data, the parameter code and site ID will suffice.  For most variables that are measured on a continuous basis, the USGS stores the historical data as daily values.  These daily values may be in the form of statistics such as the daily mean values, but they can also include daily maximums, minimums or medians.  These different statistics are specified by a 5-digit \texttt{"}stat code\texttt{"}.  A complete list of stat codes can be found here:
 
 \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
 
 Some common stat codes are shown in Table \ref{tab:stat}.
-% latex table generated in R 3.0.0 by xtable 1.7-1 package
-% Thu Apr 25 12:08:34 2013
-\begin{table}[!ht]
+% latex table generated in R 3.0.1 by xtable 1.7-1 package
+% Mon Jun 17 15:34:54 2013
+\begin{table}[ht]
 \centering
-\caption{Commonly found USGS Stat Codes} 
-\label{tab:stat}
-\begin{tabular}{ll}
+\begin{tabular}{rll}
   \hline
-StatCode & shortName \\ 
+ & StatCode & shortName \\ 
   \hline
-00001 & Maximum \\ 
-  00002 & Minimum \\ 
-  00003 & Mean \\ 
-  00008 & Median \\ 
+1 & 00001 & Maximum \\ 
+  2 & 00002 & Minimum \\ 
+  3 & 00003 & Mean \\ 
+  4 & 00008 & Median \\ 
    \hline
 \end{tabular}
+\caption{Commonly found USGS Stat Codes} 
+\label{tab:stat}
 \end{table}
+
+
+
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Site Information}
 \label{sec:usgsSite}
@@ -138,139 +207,169 @@ StatCode & shortName \\
 Use the getSiteFileData function to obtain all of the information available for a particular USGS site such as full station name, drainage area, latitude, and longitude:
 
 
-\begin{Schunk}
-\begin{Sinput}
-> library(dataRetrieval)
-> # Site ID for Choptank River near Greensboro, MD
-> siteNumber <- "01491000" 
-> ChoptankInfo <- getSiteFileData(siteNumber)
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+\hlfunctioncall{library}(dataRetrieval)
+\hlcomment{# Site ID for Choptank River near Greensboro, MD}
+siteNumber <- \hlstring{"01491000"} 
+ChoptankInfo <- \hlfunctioncall{getSiteFileData}(siteNumber)
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
 
 A list of the available columns are found in Appendix \ref{sec:appendix2INFO}: INFO dataframe. Pulling out a specific example piece of information, in this case station name can be done as follows:
 
-\begin{Schunk}
-\begin{Sinput}
-> ChoptankInfo$station.nm
-\end{Sinput}
-\begin{Soutput}
-[1] "CHOPTANK RIVER NEAR GREENSBORO, MD"
-\end{Soutput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+ChoptankInfo$station.nm
+\end{alltt}
+\begin{verbatim}
+## [1] "CHOPTANK RIVER NEAR GREENSBORO, MD"
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
+
 Site information is obtained from \url{http://waterservices.usgs.gov/rest/Site-Test-Tool.html}
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsubsection{getDataAvailability}
 \label{sec:usgsDataAvailability}
 %------------------------------------------------------------
 To find out the available data at a particular USGS site, including measured parameters, period of record, and number of samples (count), use the getDataAvailability function:
 
-\begin{Schunk}
-\begin{Sinput}
-> # Continuing from the previous example:
-> ChoptankAvailableData <- getDataAvailability(siteNumber)
-> head(ChoptankAvailableData)
-\end{Sinput}
-\begin{Soutput}
-  parameter_cd statCd  startDate    endDate count service
-2        00010  00001 1988-10-01 2012-05-09   894      dv
-3        00010  00002 2010-10-01 2012-05-09   529      dv
-4        00010  00003 2010-10-01 2012-05-09   529      dv
-5        00060  00003 1948-01-01 2013-04-24 23856      dv
-6        00095  00001 2010-10-01 2012-05-09   527      dv
-7        00095  00002 2010-10-01 2012-05-09   527      dv
-\end{Soutput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+\hlcomment{# Continuing from the previous example:}
+ChoptankAvailableData <- \hlfunctioncall{getDataAvailability}(siteNumber)
+\hlfunctioncall{head}(ChoptankAvailableData)
+\end{alltt}
+\begin{verbatim}
+##   parameter_cd statCd  startDate    endDate count service
+## 2        00010  00001 1988-10-01 2012-05-09   894      dv
+## 3        00010  00002 2010-10-01 2012-05-09   529      dv
+## 4        00010  00003 2010-10-01 2012-05-09   529      dv
+## 5        00060  00003 1948-01-01 2013-06-16 23908      dv
+## 6        00095  00001 2010-10-01 2012-05-09   527      dv
+## 7        00095  00002 2010-10-01 2012-05-09   527      dv
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
+
 
 There is an additional argument to the getDataAvailability called longNames, which defaults to FALSE. Setting longNames to TRUE will cause the function to make a web service call for each parameter and return expanded information on that parameter. Currently, this is a very slow process because each parameter code makes a unique web service call. If the site does not have many measured parameters, setting longNames to TRUE is reasonable.
 
 It is also possible to only request parameter information for a subset of variables. In the following example, we retrieve just the daily mean parameter information from the Choptank data availability dataframe (excluding all unit value and water quality values). getMultipleParameterNames is the function that is embedded in the getDataAvailability, but here can be used as a standalone function.
 
 
-\begin{Schunk}
-\begin{Sinput}
-> # Continuing from the previous example:
-> # This pulls out just the daily data:
-> ChoptankDailyData <- subset(ChoptankAvailableData,"dv" == service)
-> # This pulls out the mean:
-> ChoptankDailyData <- subset(ChoptankDailyData,"00003" == statCd)
-> #Now, make a call to get all of the parameter information:
-> pCodeINFO <- getMultipleParameterNames(ChoptankDailyData$parameter_cd)
-\end{Sinput}
-\begin{Soutput}
-Percent complete: 
-20 	40 	60 	80 	100 	
-\end{Soutput}
-\begin{Sinput}
-> #Merge the available dataframe with the parameter information dataframe:
-> ChoptankDailyData <- merge(ChoptankDailyData,pCodeINFO,by="parameter_cd")
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+\hlcomment{# Continuing from the previous example:}
+\hlcomment{# This pulls out just the daily data:}
+ChoptankDailyData <- \hlfunctioncall{subset}(ChoptankAvailableData,
+                            \hlstring{"dv"} == service)
+
+\hlcomment{# This pulls out the mean:}
+ChoptankDailyData <- \hlfunctioncall{subset}(ChoptankDailyData,
+                            \hlstring{"00003"} == statCd)
+
+\hlcomment{#Now, make a call to get all of the parameter information:}
+pCodeINFO <- \hlfunctioncall{getMultipleParameterNames}(
+  ChoptankDailyData$parameter_cd)
+\end{alltt}
+\begin{verbatim}
+## Percent complete: 
+## 20 	40 	60 	80 	100 	
+\end{verbatim}
+\begin{alltt}
+
+\hlcomment{#Merge the available dataframes:}
+ChoptankDailyData <- \hlfunctioncall{merge}(ChoptankDailyData,
+                           pCodeINFO,by=\hlstring{"parameter_cd"})
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
 
 The daily data at the Choptank River site can be displayed in a \LaTeX table using the xtable package. See Appendix \ref{app:createWordTable} for instructions on converting an R dataframe to a table in Microsoft Excel or Word.
 
-\begin{Schunk}
-\begin{Sinput}
-> tableData <- with(ChoptankDailyData, 
-       data.frame(shortName=srsname, 
-       Start=as.character(startDate), 
-       End=as.character(endDate), 
-       Count=as.character(count),
-       Units=parameter_units)
-       )
-> data.table <- xtable(tableData,label="tab:gda",
-     caption="Daily mean data availabile at the Choptank River")
-> print(data.table, 
-       caption.placement="top",include.rownames=FALSE)
-\end{Sinput}
-% latex table generated in R 3.0.0 by xtable 1.7-1 package
-% Thu Apr 25 12:08:39 2013
+\begin{kframe}
+\begin{alltt}
+tableData <- \hlfunctioncall{with}(ChoptankDailyData, 
+      \hlfunctioncall{data.frame}(shortName=srsname, 
+      Start=\hlfunctioncall{as.character}(startDate), 
+      End=\hlfunctioncall{as.character}(endDate), 
+      Count=\hlfunctioncall{as.character}(count),
+      Units=parameter_units)
+      )
+
+
+\hlfunctioncall{xtable}(tableData,label=\hlstring{"tab:gda"},
+    caption=\hlstring{"Daily mean data availabile at the Choptank River"})
+\end{alltt}
+\end{kframe}% latex table generated in R 3.0.1 by xtable 1.7-1 package
+% Mon Jun 17 15:35:00 2013
 \begin{table}[ht]
 \centering
-\caption{Daily mean data availabile at the Choptank River} 
-\label{tab:gda}
-\begin{tabular}{lllll}
+\begin{tabular}{rlllll}
   \hline
-shortName & Start & End & Count & Units \\ 
+ & shortName & Start & End & Count & Units \\ 
   \hline
-Temperature, water & 2010-10-01 & 2012-05-09 & 529 & deg C \\ 
-  Stream flow, mean. daily & 1948-01-01 & 2013-04-24 & 23856 & cfs \\ 
-  Specific conductance & 2010-10-01 & 2012-05-09 & 527 & uS/cm @25C \\ 
-  Suspended sediment concentration (SSC) & 1980-10-01 & 1991-09-30 & 3651 & mg/l \\ 
-  Suspended sediment discharge & 1980-10-01 & 1991-09-30 & 3652 & tons/day \\ 
+1 & Temperature, water & 2010-10-01 & 2012-05-09 & 529 & deg C \\ 
+  2 & Stream flow, mean. daily & 1948-01-01 & 2013-06-16 & 23908 & cfs \\ 
+  3 & Specific conductance & 2010-10-01 & 2012-05-09 & 527 & uS/cm @25C \\ 
+  4 & Suspended sediment concentration (SSC) & 1980-10-01 & 1991-09-30 & 3651 & mg/l \\ 
+  5 & Suspended sediment discharge & 1980-10-01 & 1991-09-30 & 3652 & tons/day \\ 
    \hline
 \end{tabular}
-\end{table}\end{Schunk}
+\caption{Daily mean data availabile at the Choptank River} 
+\label{tab:gda}
+\end{table}
+
+
 
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Parameter Information}
 \label{sec:usgsParams}
 %------------------------------------------------------------
 To obtain all of the available information concerning a measured parameter, use the getParameterInfo function:
-\begin{Schunk}
-\begin{Sinput}
-> # Using defaults:
-> parameterCd <- "00618" 
-> parameterINFO <- getParameterInfo(parameterCd)
-> colnames(parameterINFO)
-\end{Sinput}
-\begin{Soutput}
-[1] "parameter_cd"       "parameter_group_nm" "parameter_nm"      
-[4] "casrn"              "srsname"            "parameter_units"   
-\end{Soutput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+\hlcomment{# Using defaults:}
+parameterCd <- \hlstring{"00618"} 
+parameterINFO <- \hlfunctioncall{getParameterInfo}(parameterCd)
+\hlfunctioncall{colnames}(parameterINFO)
+\end{alltt}
+\begin{verbatim}
+## [1] "parameter_cd"       "parameter_group_nm"
+## [3] "parameter_nm"       "casrn"             
+## [5] "srsname"            "parameter_units"
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
+
 
 Pulling out a specific example piece of information, in this case parameter name can be done as follows:
-\begin{Schunk}
-\begin{Sinput}
-> parameterINFO$parameter_nm
-\end{Sinput}
-\begin{Soutput}
-[1] "Nitrate, water, filtered, milligrams per liter as nitrogen"
-\end{Soutput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+parameterINFO$parameter_nm
+\end{alltt}
+\begin{verbatim}
+## [1] "Nitrate, water, filtered, milligrams per liter as nitrogen"
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
+
 Parameter information is obtained from \url{http://nwis.waterdata.usgs.gov/nwis/pmcodes/}
 \FloatBarrier
 %------------------------------------------------------------
@@ -281,122 +380,146 @@ To obtain historic daily records of USGS data, use the retrieveNWISData function
 
 The dates (start and end) need to be in the format \texttt{"}YYYY-MM-DD\texttt{"} (note: the user does need to include the quotes).  Setting the start date to \texttt{"}\texttt{"} will indicate to the program to ask for the earliest date, setting the end date to \texttt{"}\texttt{"} will ask for the latest available date.
 
-\begin{Schunk}
-\begin{Sinput}
-> # Continuing with our Choptank River example
-> parameterCd <- "00060"  # Discharge (cfs)
-> startDate <- ""  # Will request earliest date
-> endDate <- "" # Will request latest date
-> discharge <- retrieveNWISData(siteNumber, parameterCd, startDate, endDate)
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+
+\hlcomment{# Continuing with our Choptank River example}
+parameterCd <- \hlstring{"00060"}  # \hlfunctioncall{Discharge} (cfs)
+startDate <- \hlstring{""}  # Will request earliest date
+endDate <- \hlstring{""} # Will request latest date
+
+discharge <- \hlfunctioncall{retrieveNWISData}(siteNumber, 
+                    parameterCd, startDate, endDate)
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
 
 The variable datetime is automatically imported as a Date. Each requested parameter has a value and remark code column.  The names of these columns depend on the requested parameter and stat code combinations. USGS remark codes are often \texttt{"}A\texttt{"} (approved for publication) or \texttt{"}P\texttt{"} (provisional data subject to revision). A more complete list of remark codes can be found here:
 \url{http://waterdata.usgs.gov/usa/nwis/help?codes_help}
 
 Another example that doesn't use the defaults would be a request for mean and maximum daily temperature and discharge in early 2012:
-\begin{Schunk}
-\begin{Sinput}
-> parameterCd <- c("00010","00060")  # Temperature and discharge
-> statCd <- c("00001","00003")  # Mean and maximum
-> startDate <- "2012-01-01"
-> endDate <- "2012-06-30"
-> temperatureAndFlow <- retrieveNWISData(siteNumber, parameterCd, 
-         startDate, endDate, StatCd=statCd,interactive=FALSE)
-> 
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+
+parameterCd <- \hlfunctioncall{c}(\hlstring{"00010"},\hlstring{"00060"})  # Temperature and discharge
+statCd <- \hlfunctioncall{c}(\hlstring{"00001"},\hlstring{"00003"})  # Mean and maximum
+startDate <- \hlstring{"2012-01-01"}
+endDate <- \hlstring{"2012-06-30"}
+
+temperatureAndFlow <- \hlfunctioncall{retrieveNWISData}(siteNumber, parameterCd, 
+        startDate, endDate, StatCd=statCd)
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
 
 Daily data is pulled from \url{http://waterservices.usgs.gov/rest/DV-Test-Tool.html}. 
 
-An example of plotting the above data (Figure \ref{fig:TD}):
-
-\begin{Schunk}
-\begin{Sinput}
-> colnames <- names(temperatureAndFlow)
-> with(temperatureAndFlow, plot(
-   get(colnames[3]), get(colnames[6]),
-   xlab="Date",ylab="Temperature [C]"
-   ))
-> par(new=TRUE)
-> with(temperatureAndFlow, plot(
-   get(colnames[3]), get(colnames[8]),
-   col="red",type="l",xaxt="n",yaxt="n",xlab="",ylab="",axes=FALSE
-   ))
-> axis(4,col="red",col.axis="red")
-> mtext("Discharge [cfs]",side=4,line=3,col="red")
-> title(paste(ChoptankInfo$station.nm,"2012",sep=" "))
-\end{Sinput}
-\end{Schunk}
-
-
-\begin{figure}
-\begin{center}
-\includegraphics{dataRetrieval-fig1}
-\end{center}
-\caption{Temperature and discharge plot of Choptank River in 2012.}
-\label{fig:TD}
+An example of plotting the above data (Figure \ref{fig:getNWIStemperaturePlot}):
+
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+\hlfunctioncall{par}(mar=\hlfunctioncall{c}(5,5,5,5))
+colnames <- \hlfunctioncall{names}(temperatureAndFlow)
+\hlfunctioncall{with}(temperatureAndFlow, \hlfunctioncall{plot}(
+  \hlfunctioncall{get}(colnames[3]), \hlfunctioncall{get}(colnames[6]),
+  xlab=\hlstring{"Date"},ylab=\hlstring{"Temperature [C]"}
+  ))
+\hlfunctioncall{par}(new=TRUE)
+\hlfunctioncall{with}(temperatureAndFlow, \hlfunctioncall{plot}(
+  \hlfunctioncall{get}(colnames[3]), \hlfunctioncall{get}(colnames[8]),
+  col=\hlstring{"red"},type=\hlstring{"l"},xaxt=\hlstring{"n"},yaxt=\hlstring{"n"},xlab=\hlstring{""},ylab=\hlstring{""},axes=FALSE
+  ))
+\hlfunctioncall{axis}(4,col=\hlstring{"red"},col.axis=\hlstring{"red"})
+\hlfunctioncall{mtext}(\hlstring{"Discharge [cfs]"},side=4,line=3,col=\hlstring{"red"})
+\hlfunctioncall{title}(\hlfunctioncall{paste}(ChoptankInfo$station.nm,\hlstring{"2012"},sep=\hlstring{" "}))
+\end{alltt}
+\end{kframe}\begin{figure}[]
+
+\includegraphics[width=\maxwidth]{figure/getNWIStemperaturePlot} \caption[Temperature and discharge plot of Choptank River in 2012]{Temperature and discharge plot of Choptank River in 2012.\label{fig:getNWIStemperaturePlot}}
 \end{figure}
 
 
+\end{knitrout}
+
+
+
 There are occasions where NWIS values are not reported as numbers, instead there might be text describing a certain event such as \texttt{"}Ice\texttt{"}.  Any value that cannot be converted to a number will be reported as NA in this package.
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Unit Values}
 \label{sec:usgsRT}
 %------------------------------------------------------------
 Any data that are collected at regular time intervals (such as 15-minute or hourly) are known as \texttt{"}Unit Values\texttt{"} - many of these are delivered on a real time basis and very recent data (even less than an hour old in many cases) are available through the function retrieveUnitNWISData.  Some of these Unit Values are available for the past several years, and some are only available for a recent time period such as 120 days or a year.  Here is an example of a retrieval of such data.  
 
-\begin{Schunk}
-\begin{Sinput}
-> parameterCd <- "00060"  # Discharge (cfs)
-> startDate <- "2012-05-12" 
-> # or use (yesterday): startDate <- as.character(Sys.Date()-1)
-> endDate <- "2012-05-13" 
-> # or use (today):  endDate <- as.character(Sys.Date())
-> dischargeToday <- retrieveUnitNWISData(siteNumber, parameterCd, 
-         startDate, endDate)
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+
+parameterCd <- \hlstring{"00060"}  # \hlfunctioncall{Discharge} (cfs)
+startDate <- \hlstring{"2012-05-12"} 
+endDate <- \hlstring{"2012-05-13"} 
+dischargeToday <- \hlfunctioncall{retrieveUnitNWISData}(siteNumber, parameterCd, 
+        startDate, endDate)
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
 Which produces the following dataframe:
-\begin{Schunk}
-\begin{Soutput}
-  agency     site            dateTime X02_00060_00011 X02_00060_00011_cd
-1   USGS 01491000 2012-05-12 00:00:00              83                  A
-2   USGS 01491000 2012-05-12 00:15:00              83                  A
-3   USGS 01491000 2012-05-12 00:30:00              83                  A
-4   USGS 01491000 2012-05-12 00:45:00              83                  A
-5   USGS 01491000 2012-05-12 01:00:00              85                  A
-6   USGS 01491000 2012-05-12 01:15:00              83                  A
-\end{Soutput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{verbatim}
+##   agency     site            dateTime X02_00060_00011
+## 1   USGS 01491000 2012-05-12 00:00:00              83
+## 2   USGS 01491000 2012-05-12 00:15:00              83
+## 3   USGS 01491000 2012-05-12 00:30:00              83
+## 4   USGS 01491000 2012-05-12 00:45:00              83
+## 5   USGS 01491000 2012-05-12 01:00:00              85
+## 6   USGS 01491000 2012-05-12 01:15:00              83
+##   X02_00060_00011_cd
+## 1                  A
+## 2                  A
+## 3                  A
+## 4                  A
+## 5                  A
+## 6                  A
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
+
 
 Note that time now becomes important, so the variable datetime is a POSIXct, and the time zone is included in a separate column. Data is pulled from \url{http://waterservices.usgs.gov/rest/IV-Test-Tool.html}. There are occasions where NWIS values are not reported as numbers, instead a common example is \texttt{"}Ice\texttt{"}.  Any value that cannot be converted to a number will be reported as NA in this package.
 
-A simple plotting example is shown in Figure \ref{fig:RT}:
-\begin{Schunk}
-\begin{Sinput}
-> colnames <- names(dischargeToday)
-> with(dischargeToday, plot(
-   get(colnames[3]), get(colnames[4]),
-   ylab="Discharge [cfs]",xlab=""
-   ))
-> title(ChoptankInfo$station.nm)
-> 
-\end{Sinput}
-\end{Schunk}
+A simple plotting example is shown in Figure \ref{fig:getNWISUnitPlot}:
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+colnames <- \hlfunctioncall{names}(dischargeToday)
+\hlfunctioncall{with}(dischargeToday, \hlfunctioncall{plot}(
+  dateTime, \hlfunctioncall{get}(colnames[4]),
+  ylab=\hlstring{"Discharge [cfs]"},xlab=\hlstring{""}
+  ))
+\hlfunctioncall{title}(ChoptankInfo$station.nm)
+\end{alltt}
+\end{kframe}\begin{figure}[]
+
+\includegraphics[width=\maxwidth]{figure/getNWISUnitPlot} \caption[Real-time discharge plot of Choptank River from May 12-13, 2012]{Real-time discharge plot of Choptank River from May 12-13, 2012.\label{fig:getNWISUnitPlot}}
+\end{figure}
+
+
+\end{knitrout}
+
 \newpage
 
-\begin{figure}
-\begin{center}
-\includegraphics{dataRetrieval-fig2}
-\end{center}
-\caption{Real-time discharge plot of Choptank River from May 12-13, 2012.}
-\label{fig:RT}
-\end{figure}
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Water Quality Values}
 \label{sec:usgsWQP}
@@ -404,67 +527,84 @@ A simple plotting example is shown in Figure \ref{fig:RT}:
 To get USGS water quality data from water samples collected at the streamgage (as distinct from unit values collected through some type of automatic monitor) we can use the Water Quality Data Portal: \url{http://www.waterqualitydata.us/}. The raw data are obtained from the function  getRawQWData, with the similar input arguments: siteNumber, parameterCd, startDate, endDate, and interactive. The difference is in parameterCd, in this function multiple parameters can be queried using a \texttt{"};\texttt{"} separator, and setting parameterCd to \texttt{"}\texttt{"} will return all of the measured observations. The raw data can be overwelming (see Appendix \ref{sec:appendix2WQP}), a simplified version of the data can be obtained using getQWData.There is a large amount of data returned for each observation. 
 
 
-\begin{Schunk}
-\begin{Sinput}
-> # Dissolved Nitrate parameter codes:
-> parameterCd <- c("00618","71851")
-> startDate <- "1979-10-11"
-> endDate <- "2012-12-18"
-> dissolvedNitrate <- getRawQWData(siteNumber, parameterCd, 
-       startDate, endDate)
-> dissolvedNitrateSimple <- getQWData(siteNumber, parameterCd, 
-         startDate, endDate)
-> names(dissolvedNitrateSimple)
-\end{Sinput}
-\begin{Soutput}
-[1] "dateTime"        "qualifier.71851" "value.71851"     "qualifier.00618"
-[5] "value.00618"    
-\end{Soutput}
-\end{Schunk}
-Note that in this dataframe, datetime is imported as Dates (no times are included), and the qualifier is either blank or \texttt{"}\verb@<@\texttt{"} signifying a censored value. A plotting example is shown in Figure \ref{fig:nitrate}.
-
-\begin{Schunk}
-\begin{Sinput}
-> with(dissolvedNitrateSimple, plot(
-   dateTime, value.00618,
-   xlab="Date",ylab = paste(parameterINFO$srsname,
-       "[",parameterINFO$parameter_units,"]")
-   ))
-> title(ChoptankInfo$station.nm)
-\end{Sinput}
-\end{Schunk}
-
-\begin{figure}
-\begin{center}
-\includegraphics{dataRetrieval-fig3}
-\end{center}
-\caption{Nitrate plot of Choptank River.}
-\label{fig:nitrate}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+ 
+\hlcomment{# Dissolved Nitrate parameter codes:}
+parameterCd <- \hlfunctioncall{c}(\hlstring{"00618"},\hlstring{"71851"})
+startDate <- \hlstring{"1979-10-11"}
+endDate <- \hlstring{"2012-12-18"}
+
+dissolvedNitrate <- \hlfunctioncall{getRawQWData}(siteNumber, parameterCd, 
+      startDate, endDate)
+
+dissolvedNitrateSimple <- \hlfunctioncall{getQWData}(siteNumber, parameterCd, 
+        startDate, endDate)
+\hlfunctioncall{names}(dissolvedNitrateSimple)
+\end{alltt}
+\begin{verbatim}
+## [1] "dateTime"        "qualifier.00618" "value.00618"    
+## [4] "qualifier.71851" "value.71851"
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
+
+Note that in this dataframe, datetime is imported as Dates (no times are included), and the qualifier is either blank or \texttt{"}\verb@<@\texttt{"} signifying a censored value. A plotting example is shown in Figure \ref{fig:getQWtemperaturePlot}.
+
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+\hlfunctioncall{with}(dissolvedNitrateSimple, \hlfunctioncall{plot}(
+  dateTime, value.00618,
+  xlab=\hlstring{"Date"},ylab = \hlfunctioncall{paste}(parameterINFO$srsname,
+      \hlstring{"["},parameterINFO$parameter_units,\hlstring{"]"})
+  ))
+\hlfunctioncall{title}(ChoptankInfo$station.nm)
+\end{alltt}
+\end{kframe}\begin{figure}[]
+
+\includegraphics[width=\maxwidth]{figure/getQWtemperaturePlot} \caption[Nitrate plot of Choptank River]{Nitrate plot of Choptank River.\label{fig:getQWtemperaturePlot}}
 \end{figure}
 
+
+\end{knitrout}
+
+
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{STORET Water Quality Retrievals}
 \label{sec:usgsSTORET}
 %------------------------------------------------------------
 There are additional data sets available on the Water Quality Data Portal (\url{http://www.waterqualitydata.us/}).  These data sets can be housed in either the STORET (data from EPA) or NWIS database.  Since STORET does not use USGS parameter codes, a \texttt{"}characteristic name\texttt{"} must be supplied.  The following example retrieves specific conductance from a DNR site in Wisconsin.
 
-\begin{Schunk}
-\begin{Sinput}
-> specificCond <- getWQPData('WIDNR_WQX-10032762', 
-         'Specific conductance', '', '')
-> head(specificCond)
-\end{Sinput}
-\begin{Soutput}
-    dateTime qualifier.Specific conductance value.Specific conductance
-1 2011-02-14                                                      1360
-2 2011-02-17                                                      1930
-3 2011-03-03                                                      1240
-4 2011-03-10                                                      1480
-5 2011-03-29                                                      1130
-6 2011-04-07                                                      1200
-\end{Soutput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+specificCond <- \hlfunctioncall{getWQPData}(\hlstring{'WIDNR_WQX-10032762'}, 
+        \hlstring{'Specific conductance'}, \hlstring{''}, \hlstring{''})
+\hlfunctioncall{head}(specificCond)
+\end{alltt}
+\begin{verbatim}
+##     dateTime qualifier.Specific conductance
+## 1 2011-02-14                               
+## 2 2011-02-17                               
+## 3 2011-03-03                               
+## 4 2011-03-10                               
+## 5 2011-03-29                               
+## 6 2011-04-07                               
+##   value.Specific conductance
+## 1                       1360
+## 2                       1930
+## 3                       1240
+## 4                       1480
+## 5                       1130
+## 6                       1200
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
+
 
 \FloatBarrier
 %------------------------------------------------------------
@@ -474,19 +614,24 @@ There are additional data sets available on the Water Quality Data Portal (\url{
 There may be times when you might be interested in seeing the URL (web address) that was used to obtain the raw data. The constructNWISURL function returns the URL.  Aside from input variables that have already been described, there is a new argument \texttt{"}service\texttt{"}. The service argument can be \texttt{"}dv\texttt{"} (daily values), \texttt{"}uv\texttt{"} (unit values), \texttt{"}qw\texttt{"} (NWIS water quality values), or \texttt{"}wqp\texttt{"} (general Water Quality Portal values).
  
 
-\begin{Schunk}
-\begin{Sinput}
-> # Dissolved Nitrate parameter codes:
-> pCode <- c("00618","71851")
-> startDate <- "1964-06-11"
-> endDate <- "2012-12-18"
-> url_qw <- constructNWISURL(siteNumber,pCode,startDate,endDate,'qw')
-> url_dv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'dv',statCd="00003")
-> url_uv <- constructNWISURL(siteNumber,"00060",startDate,endDate,'uv')
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+\hlcomment{# Dissolved Nitrate parameter codes:}
+pCode <- \hlfunctioncall{c}(\hlstring{"00618"},\hlstring{"71851"})
+startDate <- \hlstring{"1964-06-11"}
+endDate <- \hlstring{"2012-12-18"}
+url_qw <- \hlfunctioncall{constructNWISURL}(siteNumber,pCode,startDate,endDate,\hlstring{'qw'})
+url_dv <- \hlfunctioncall{constructNWISURL}(siteNumber,\hlstring{"00060"},startDate,endDate,
+                           \hlstring{'dv'},statCd=\hlstring{"00003"})
+url_uv <- \hlfunctioncall{constructNWISURL}(siteNumber,\hlstring{"00060"},startDate,endDate,\hlstring{'uv'})
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \section{Data Retrievals Structured For Use In The EGRET Package}
 %------------------------------------------------------------ 
@@ -499,76 +644,98 @@ In this section, we use 3 dataRetrieval functions to get sufficient data to perf
 %------------------------------------------------------------
 The function to obtain metadata, or data about the streamgage and measured parameters is getMetaData. This function combines getSiteFileData and getParameterInfo, producing one dataframe called INFO.
 
-\begin{Schunk}
-\begin{Sinput}
-> parameterCd <- "00618"
-> INFO <-getMetaData(siteNumber,parameterCd, interactive=FALSE)
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+parameterCd <- \hlstring{"00618"}
+INFO <-\hlfunctioncall{getMetaData}(siteNumber,parameterCd, interactive=FALSE)
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
 
 Column names in the INFO dataframe are listed in Appendix 2 (\ref{sec:appendix2INFO}).
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Daily Data}
 %------------------------------------------------------------
 The function to obtain the daily values (discharge in this case) is getDVData.  It requires the inputs siteNumber, ParameterCd, StartDate, EndDate, interactive, and convert. Most of these arguments are described in the previous section, however \texttt{"}convert\texttt{"} is a new argument (defaults to TRUE), and it tells the program to convert the values from cubic feet per second (cfs) to cubic meters per second (cms). For EGRET applications with NWIS web retrieval, do not use this argument (the default is TRUE), EGRET assumes that discharge is always in cubic meters per second. If you don't want this conversion and are not using EGRET, set convert=FALSE in the function call. 
 
-\begin{Schunk}
-\begin{Sinput}
-> siteNumber <- "01491000"
-> startDate <- "2000-01-01"
-> endDate <- "2013-01-01"
-> # This call will get NWIS data that is in cfs, and convert it
-> # to cms since we didn't override the default in the convert argument:
-> Daily <- getDVData(siteNumber, "00060", startDate, endDate,interactive=FALSE)
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+siteNumber <- \hlstring{"01491000"}
+startDate <- \hlstring{"2000-01-01"}
+endDate <- \hlstring{"2013-01-01"}
+\hlcomment{# This call will get NWIS (cfs) data , and convert it to cms:}
+Daily <- \hlfunctioncall{getDVData}(siteNumber, \hlstring{"00060"}, startDate, endDate)
+\end{alltt}
+\begin{verbatim}
+## There are  4750 data points, and  4750 days.
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
+
 
 Details of the Daily dataframe are listed below:
 
-% latex table generated in R 3.0.0 by xtable 1.7-1 package
-% Thu Apr 25 12:08:53 2013
-\begin{tabular}{llll}
+% latex table generated in R 3.0.1 by xtable 1.7-1 package
+% Mon Jun 17 15:35:10 2013
+\begin{table}[ht]
+\centering
+\begin{tabular}{rllll}
   \hline
-ColumnName & Type & Description & Units \\ 
+ & ColumnName & Type & Description & Units \\ 
   \hline
-Date & Date & Date & date \\ 
-  Q & number & Discharge & cms \\ 
-  Julian & number & Number of days since January 1, 1850 & days \\ 
-  Month & integer & Month of the year [1-12] & months \\ 
-  Day & integer & Day of the year [1-366] & days \\ 
-  DecYear & number & Decimal year & years \\ 
-  MonthSeq & integer & Number of months since January 1, 1850 & months \\ 
-  Qualifier & string & Qualifing code & character \\ 
-  i & integer & Index of days from the start of the data frame & days \\ 
-  LogQ & number & Natural logarithm of Q & numeric \\ 
-  Q7 & number & 7 day running average of Q & cms \\ 
-  Q30 & number & 30 running average of Q & cms \\ 
+1 & Date & Date & Date & date \\ 
+  2 & Q & number & Discharge in cms & cms \\ 
+  3 & Julian & number & Number of days since January 1, 1850 & days \\ 
+  4 & Month & integer & Month of the year [1-12] & months \\ 
+  5 & Day & integer & Day of the year [1-366] & days \\ 
+  6 & DecYear & number & Decimal year & years \\ 
+  7 & MonthSeq & integer & Number of months since January 1, 1850 & months \\ 
+  8 & Qualifier & string & Qualifing code & character \\ 
+  9 & i & integer & Index of days, starting with 1 & days \\ 
+  10 & LogQ & number & Natural logarithm of Q & numeric \\ 
+  11 & Q7 & number & 7 day running average of Q & cms \\ 
+  12 & Q30 & number & 30 running average of Q & cms \\ 
    \hline
-\end{tabular}\\*
+\end{tabular}
+\caption{Daily dataframe} 
+\end{table}
+
+
+
 
 If there are discharge values of zero, the code will add a small constant to all of the daily discharges.  This constant is 0.001 times the mean discharge.  The code will also report on the number of zero values and the size of the constant.  EGRET should only be used if the number of zero values is a very small fraction of the total days in the record (say less than 0.1\% of the days).  Columns Q7 and Q30 are the 7 and 30 day running averages for the 7 or 30 days ending on this specific date.
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Sample Data}
 %------------------------------------------------------------
 The function to obtain sample data from the water quality portal is getSampleData. The arguments for this function are also siteNumber, ParameterCd, StartDate, EndDate, interactive. These are the same inputs as getRawQWData or getQWData as described in the previous section.
 
-\begin{Schunk}
-\begin{Sinput}
-> Sample <-getSampleData(siteNumber,parameterCd,
-       startDate, endDate,interactive=FALSE)
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+parameterCd <- \hlstring{"00618"}
+Sample <-\hlfunctioncall{getSampleData}(siteNumber,parameterCd,
+      startDate, endDate,interactive=FALSE)
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
+
+\pagebreak
 
 Details of the Sample dataframe are listed below:
 
-% latex table generated in R 3.0.0 by xtable 1.7-1 package
-% Thu Apr 25 12:08:55 2013
 \begin{table}[!ht]
-\centering
+\begin{minipage}{\linewidth}
+\begin{center}
 \caption{Sample dataframe} 
 \begin{tabular}{llll}
   \hline
@@ -590,9 +757,15 @@ Date & Date & Date & date \\
   LogQ \footnotemark[1] & number & Natural logarithm of flow & numeric \\ 
    \hline
 \end{tabular}
-\end{table}\footnotetext[1]{Flow columns are populated from data in the Daily dataframe after calling the mergeReport function.}
+\end{center}
+\end{minipage}
+\end{table}
+
+\footnotetext[1]{Flow columns are populated from data in the Daily dataframe after calling the mergeReport function.}
 
 \FloatBarrier
+
+
 %------------------------------------------------------------
 \subsection{Censored Values: Summation Explanation}
 %------------------------------------------------------------
@@ -602,49 +775,60 @@ As an example to understand how the dataRetrieval package handles a more complex
 
 \begin{center}
 
-% latex table generated in R 3.0.0 by xtable 1.7-1 package
-% Thu Apr 25 12:08:55 2013
-\begin{tabular}{llrlrlr}
+% latex table generated in R 3.0.1 by xtable 1.7-1 package
+% Mon Jun 17 15:35:11 2013
+\begin{table}[ht]
+\centering
+\begin{tabular}{rllrlrlr}
   \hline
-cdate & rdp & dp & rpp & pp & rtp & tp \\ 
+ & cdate & rdp & dp & rpp & pp & rtp & tp \\ 
   \hline
-2003-02-15 &  & 0.02 &  & 0.50 &  &  \\ 
-  2003-06-30 & $<$ & 0.01 &  & 0.30 &  &  \\ 
-  2004-09-15 & $<$ & 0.00 & $<$ & 0.20 &  &  \\ 
-  2005-01-30 &  &  &  &  &  & 0.43 \\ 
-  2005-05-30 &  &  &  &  & $<$ & 0.05 \\ 
-  2005-10-30 &  &  &  &  & $<$ & 0.02 \\ 
+1 & 2003-02-15 &  & 0.02 &  & 0.50 &  &  \\ 
+  2 & 2003-06-30 & $<$ & 0.01 &  & 0.30 &  &  \\ 
+  3 & 2004-09-15 & $<$ & 0.00 & $<$ & 0.20 &  &  \\ 
+  4 & 2005-01-30 &  &  &  &  &  & 0.43 \\ 
+  5 & 2005-05-30 &  &  &  &  & $<$ & 0.05 \\ 
+  6 & 2005-10-30 &  &  &  &  & $<$ & 0.02 \\ 
    \hline
 \end{tabular}
+\caption{Example data} 
+\end{table}
+
+
+
 \end{center}
 
 
 The dataRetrieval package will \texttt{"}add up\texttt{"} all the values in a given row to form the total for that sample. Thus, you only want to enter data that should be added together. For example, we might know the value for dp on 5/30/2005, but we don't want to put it in the table because under the rules of this data set, we are not suppose to add it in to the values in 2005.
 
-For every sample, the EGRET package requires a pair of numbers to define an interval in which the true value lies (ConcLow and ConcHigh). In a simple non-censored case (the reported value is above the detection limit), ConcLow equals ConcHigh and the interval collapses down to a single point.In a simple censored case, the value might be reported as <0.2, then ConcLow=NA and ConcHigh=0.2. We use NA instead of 0 as a way to elegantly handle future logarithm calculations.
-
-For the more complex example case, let us say dp is reported as <0.01 and pp is reported as 0.3. We know that the total must be at least 0.3 and could be as much as 0.31. Therefore, ConcLow=0.3 and ConcHigh=0.31. Another case would be if dp is reported as <0.005 and pp is reported <0.2. We know in this case that the true value could be as low as zero, but could be as high as 0.205. Therefore, in this case, ConcLow=NA and ConcHigh=0.205. The Sample dataframe for the example data is therefore:
-
-\begin{Schunk}
-\begin{Soutput}
-        Date ConcLow ConcHigh Uncen ConcAve Julian Month Day  DecYear MonthSeq
-1 2003-02-15   0.520    0.520     1   0.520  55927     2  46 2003.124     1838
-2 2003-06-30   0.310    0.310     1   0.310  56062     6 181 2003.493     1842
-3 2004-09-15   0.205    0.205     1   0.205  56505     9 259 2004.706     1857
-4 2005-01-30   0.430    0.430     1   0.430  56642     1  30 2005.081     1861
-5 2005-05-30   0.050    0.050     1   0.050  56762     5 150 2005.408     1865
-6 2005-10-30   0.020    0.020     1   0.020  56915    10 303 2005.827     1870
-        SinDY      CosDY
-1  0.70406552  0.7101350
-2  0.04290476 -0.9990792
-3 -0.96251346 -0.2712339
-4  0.48505985  0.8744810
-5  0.54391895 -0.8391378
-6 -0.88668032  0.4623830
-\end{Soutput}
-\end{Schunk}
+For every sample, the EGRET package requires a pair of numbers to define an interval in which the true value lies (ConcLow and ConcHigh). In a simple non-censored case (the reported value is above the detection limit), ConcLow equals ConcHigh and the interval collapses down to a single point.In a simple censored case, the value might be reported as \verb@<@0.2, then ConcLow=NA and ConcHigh=0.2. We use NA instead of 0 as a way to elegantly handle future logarithm calculations.
+
+For the more complex example case, let us say dp is reported as \verb@<@0.01 and pp is reported as 0.3. We know that the total must be at least 0.3 and could be as much as 0.31. Therefore, ConcLow=0.3 and ConcHigh=0.31. Another case would be if dp is reported as \verb@<@0.005 and pp is reported \verb@<@0.2. We know in this case that the true value could be as low as zero, but could be as high as 0.205. Therefore, in this case, ConcLow=NA and ConcHigh=0.205. The Sample dataframe for the example data is therefore:
+
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{verbatim}
+##         Date ConcLow ConcHigh Uncen ConcAve Julian Month
+## 1 2003-02-15   0.520    0.520     1   0.520  55927     2
+## 2 2003-06-30   0.310    0.310     1   0.310  56062     6
+## 3 2004-09-15   0.205    0.205     1   0.205  56505     9
+## 4 2005-01-30   0.430    0.430     1   0.430  56642     1
+## 5 2005-05-30   0.050    0.050     1   0.050  56762     5
+## 6 2005-10-30   0.020    0.020     1   0.020  56915    10
+##   Day DecYear MonthSeq    SinDY   CosDY
+## 1  46    2003     1838  0.70407  0.7101
+## 2 182    2003     1842  0.02575 -0.9997
+## 3 259    2005     1857 -0.96251 -0.2712
+## 4  30    2005     1861  0.48506  0.8745
+## 5 151    2005     1865  0.52943 -0.8484
+## 6 304    2006     1870 -0.87861  0.4775
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
+
 
 \FloatBarrier
+
 %------------------------------------------------------------ 
 \subsection{User-Generated Data Files}
 %------------------------------------------------------------ 
@@ -663,7 +847,7 @@ So, if you have a file called \texttt{"}ChoptankRiverFlow.txt\texttt{"} located
 \begin{verbatim}
 date  Qdaily
 10/1/1999  107
-10/2/1999	85
+10/2/1999  85
 10/3/1999	76
 10/4/1999	76
 10/5/1999	113
@@ -672,15 +856,20 @@ date  Qdaily
 \end{verbatim}
 
 The call to open this file, convert the flow to cubic meters per second, and populate the Daily data frame would be:
-\begin{Schunk}
-\begin{Sinput}
-> fileName <- "ChoptankRiverFlow.txt"
-> filePath <-  "C:/RData/"
-> Daily <- getDailyDataFromFile(filePath,fileName,separator="\t",interactive=FALSE)
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+fileName <- \hlstring{"ChoptankRiverFlow.txt"}
+filePath <-  \hlstring{"C:/RData/"}
+Daily <- \hlfunctioncall{getDailyDataFromFile}(filePath,fileName,
+                    separator=\hlstring{"\textbackslash{}t"})
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
 
 \FloatBarrier
+
 %------------------------------------------------------------ 
 \subsubsection{getSampleDataFromFile}
 %------------------------------------------------------------ 
@@ -696,86 +885,102 @@ cdate;remarkCode;Nitrate
 ...
 \end{verbatim}
 The call to open this file, and populate the Sample dataframe would be:
-\begin{Schunk}
-\begin{Sinput}
-> fileName <- "ChoptankRiverNitrate.csv"
-> filePath <-  "C:/RData/"
-> Sample <- getSampleDataFromFile(filePath,fileName,separator=",",interactive=FALSE)
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+fileName <- \hlstring{"ChoptankRiverNitrate.csv"}
+filePath <-  \hlstring{"C:/RData/"}
+Sample <- \hlfunctioncall{getSampleDataFromFile}(filePath,fileName,
+                                separator=\hlstring{","})
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{Merge Report}
 %------------------------------------------------------------
 Finally, there is a function called mergeReport that will look at both the Daily and Sample dataframe, and populate Q and LogQ columns into the Sample dataframe. The default arguments are Daily and Sample, however if you want to use other similarly structured dataframes, you can specify localDaily or localSample. Once mergeReport has been run, the Sample dataframe will be augumented with the daily discharges for all the days with samples.  None of the water quality functions in EGRET will work without first having run the mergeReport function.
 
 
-\begin{Schunk}
-\begin{Sinput}
-> siteNumber <- "01491000"
-> parameterCd <- "00631"  # Nitrate
-> startDate <- "2000-01-01"
-> endDate <- "2013-01-01"
-> Daily <- getDVData(siteNumber, "00060", startDate, endDate,interactive=FALSE)
-> Sample <- getSampleData(siteNumber,parameterCd, startDate, endDate, interactive=FALSE)
-> Sample <- mergeReport()
-\end{Sinput}
-\begin{Soutput}
- Discharge Record is 4750 days long, which is 13 years
- First day of the discharge record is 2000-01-01 and last day is 2013-01-01
- The water quality record has 220 samples
- The first sample is from 2000-01-04 and the last sample is from 2012-12-18
- Discharge: Minimum, mean and maximum 0.00991 4.55 246
- Concentration: Minimum, mean and maximum 0.2 1.3 2.4
- Percentage of the sample values that are censored is 0 %
-\end{Soutput}
-\begin{Sinput}
-> head(Sample)
-\end{Sinput}
-\begin{Soutput}
-        Date ConcLow ConcHigh Uncen ConcAve Julian Month Day  DecYear MonthSeq
-1 2000-01-04    1.59     1.59     1    1.59  54789     1   4 2000.010     1801
-2 2000-02-03    1.54     1.54     1    1.54  54819     2  34 2000.092     1802
-3 2000-02-15    1.37     1.37     1    1.37  54831     2  46 2000.124     1802
-4 2000-02-19    1.24     1.24     1    1.24  54835     2  50 2000.135     1802
-5 2000-03-23    0.52     0.52     1    0.52  54868     3  83 2000.225     1803
-6 2000-06-05    1.11     1.11     1    1.11  54942     6 157 2000.428     1806
-       SinDY      CosDY         Q      LogQ
-1 0.06004896  0.9981954  2.746734 1.0104126
-2 0.54391895  0.8391378  3.936042 1.3701756
-3 0.70406552  0.7101350 10.845352 2.3837366
-4 0.75113193  0.6601521 15.517632 2.7419769
-5 0.98808790  0.1538906 56.916861 4.0415916
-6 0.43939951 -0.8982918  1.812278 0.5945847
-\end{Soutput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+siteNumber <- \hlstring{"01491000"}
+parameterCd <- \hlstring{"00631"}  # Nitrate
+startDate <- \hlstring{"2000-01-01"}
+endDate <- \hlstring{"2013-01-01"}
+
+Daily <- \hlfunctioncall{getDVData}(siteNumber, \hlstring{"00060"}, startDate, endDate)
+\end{alltt}
+\begin{verbatim}
+## There are  4750 data points, and  4750 days.
+\end{verbatim}
+\begin{alltt}
+Sample <- \hlfunctioncall{getSampleData}(siteNumber,parameterCd, startDate, endDate)
+Sample <- \hlfunctioncall{mergeReport}()
+\end{alltt}
+\begin{verbatim}
+## 
+##  Discharge Record is 4750 days long, which is 13 years
+##  First day of the discharge record is 2000-01-01 and last day is 2013-01-01
+##  The water quality record has 220 samples
+##  The first sample is from 2000-01-04 and the last sample is from 2012-12-18
+##  Discharge: Minimum, mean and maximum 0.00991 4.55 246
+##  Concentration: Minimum, mean and maximum 0.2 1.3 2.4
+##  Percentage of the sample values that are censored is 0 %
+\end{verbatim}
+\begin{alltt}
+\hlfunctioncall{head}(Sample)
+\end{alltt}
+\begin{verbatim}
+##         Date ConcLow ConcHigh Uncen ConcAve Julian Month
+## 1 2000-01-04    1.59     1.59     1    1.59  54789     1
+## 2 2000-02-03    1.54     1.54     1    1.54  54819     2
+## 3 2000-02-15    1.37     1.37     1    1.37  54831     2
+## 4 2000-02-19    1.24     1.24     1    1.24  54835     2
+## 5 2000-03-23    0.52     0.52     1    0.52  54868     3
+## 6 2000-06-05    1.11     1.11     1    1.11  54942     6
+##   Day DecYear MonthSeq   SinDY   CosDY      Q   LogQ
+## 1   4    2000     1801 0.06005  0.9982  2.747 1.0104
+## 2  34    2000     1802 0.54392  0.8391  3.936 1.3702
+## 3  46    2000     1802 0.70407  0.7101 10.845 2.3837
+## 4  50    2000     1802 0.75113  0.6602 15.518 2.7420
+## 5  83    2000     1803 0.98809  0.1539 56.917 4.0416
+## 6 157    2000     1806 0.43940 -0.8983  1.812 0.5946
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
+
 
 \FloatBarrier
+
 %------------------------------------------------------------
 \subsection{EGRET Plots}
 %------------------------------------------------------------
 As has been mentioned, the data is specifically formatted to be used with the EGRET package. The EGRET package has powerful modeling capabilities using WRTDS, but also has a variety of graphing and tablular tools to explore the data without using the WRTDS algorithm. See the EGRET vignette, user guide, and/or wiki (\url{https://github.com/USGS-R/EGRET/wiki}) for detailed information. The following figure is an example of one of the plotting functions that can be used directly from the dataRetrieval dataframes.
 
-\begin{Schunk}
-\begin{Sinput}
-> # Continuing Choptank example from the previous sections
-> library(EGRET)
-> multiPlotDataOverview()
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+\hlcomment{# Continuing Choptank example from the previous sections}
+\hlfunctioncall{library}(EGRET)
+\hlfunctioncall{multiPlotDataOverview}()
+\end{alltt}
+\end{kframe}\begin{figure}[]
 
-\begin{figure}[ht]
-\begin{center}
-
-\includegraphics{dataRetrieval-figegretEx}
-\end{center}
-\caption{Default multiPlotDataOverview}
-\label{fig:multiPlotDataOverview}
+\includegraphics[width=\maxwidth]{figure/egretEx} \caption[Default multiPlotDataOverview]{Default multiPlotDataOverview\label{fig:egretEx}}
 \end{figure}
 
+
+\end{knitrout}
+
+
 \clearpage
 \appendix
+
 %------------------------------------------------------------ 
 \section{Getting Started in R}
 \label{sec:appendix1}
@@ -791,27 +996,32 @@ There are many options for running and editing R code, one nice environment to l
 
 At any time, you can get information about any function in R by typing a question mark before the functions name.  This will open a file (in RStudio, in the Help window) that describes the function, the required arguments, and provides working examples.
 
-\begin{Schunk}
-\begin{Sinput}
-> ?removeDuplicates
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+?removeDuplicates
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
 
 To see the raw code for a particular code, type the name of the function:
-\begin{Schunk}
-\begin{Sinput}
-> removeDuplicates
-\end{Sinput}
-\begin{Soutput}
-function (localSample = Sample) 
-{
-    Sample1 <- localSample[!duplicated(localSample[c("DecYear", 
-        "ConcHigh")]), ]
-    return(Sample1)
-}
-<environment: namespace:dataRetrieval>
-\end{Soutput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+removeDuplicates
+\end{alltt}
+\begin{verbatim}
+## function (localSample = Sample) 
+## {
+##     Sample1 <- localSample[!duplicated(localSample[c("DecYear", 
+##         "ConcHigh")]), ]
+##     return(Sample1)
+## }
+## <environment: namespace:dataRetrieval>
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
 
 
 %------------------------------------------------------------
@@ -819,42 +1029,27 @@ function (localSample = Sample)
 %------------------------------------------------------------ 
 Before installing dataRetrieval, the zoo packages must be installed from CRAN:
 
-\begin{Schunk}
-\begin{Sinput}
-> install.packages("zoo")
-> install.packages("dataRetrieval", repos="http://usgs-r.github.com", type="source")
-\end{Sinput}
-\end{Schunk}
-
-It is a good idea to re-start the R enviornment after installing the package, especially if installing an updated version. Some users have found it necessary to delete the previous version's package folder before installing newer version of dataRetrieval. If you are experiencing issues after updating a package, trying deleting the package folder - the default location for Windows is something like this: C:/Users/userA/Documents/R/win-library/2.15/dataRetrieval, and the default for a Mac: /Users/userA/Library/R/2.15/library/dataRetrieval. Then, re-install the package using the directions above. Moving to CRAN should solve this problem.
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+\hlfunctioncall{install.packages}(\hlstring{"zoo"})
+\hlfunctioncall{install.packages}(\hlstring{"dataRetrieval"}, repos=\hlstring{"http://usgs-r.github.com"}, type=\hlstring{"source"})
+\end{alltt}
+\end{kframe}
+\end{knitrout}
 
-After installing the package, you need to open the library each time you re-start R.  This is done with the simple command:
-\begin{Schunk}
-\begin{Sinput}
-> library(dataRetrieval)
-\end{Sinput}
-\end{Schunk}
-Using RStudio, you could alternatively click on the checkbox for dataRetrieval in the Packages window.
-
-%------------------------------------------------------------
-\subsection{R Developers: Installing dataRetrieval from gitHub}
-%------------------------------------------------------------
-Alternatively, R-developers can install the latest working version of dataRetrieval directly from gitHub using the devtools package (available on CRAN).  Rtools (for Windows) and appropriate \LaTeX\ tools are required. Be aware that the version installed using this method isn't necessarily the same as the version in the stable release branch.  
 
+It is a good idea to re-start R after installing the package, especially if installing an updated version. Some users have found it necessary to delete the previous version's package folder before installing newer version of dataRetrieval. If you are experiencing issues after updating a package, trying deleting the package folder - the default location for Windows is something like this: C:/Users/userA/Documents/R/win-library/2.15/dataRetrieval, and the default for a Mac: /Users/userA/Library/R/2.15/library/dataRetrieval. Then, re-install the package using the directions above. Moving to CRAN should solve this problem.
 
-\begin{Schunk}
-\begin{Sinput}
-> library(devtools)
-> install_github("dataRetrieval", "USGS-R")
-\end{Sinput}
-\end{Schunk}
-To then open the library, simply type:
+After installing the package, you need to open the library each time you re-start R.  This is done with the simple command:
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+\hlfunctioncall{library}(dataRetrieval)
+\end{alltt}
+\end{kframe}
+\end{knitrout}
 
-\begin{Schunk}
-\begin{Sinput}
-> library(dataRetrieval)
-\end{Sinput}
-\end{Schunk}
 
 %------------------------------------------------------------ 
 \section{Columns Names}
@@ -866,55 +1061,63 @@ To then open the library, simply type:
 \label{sec:appendix2INFO}
 %------------------------------------------------------------
 
-% latex table generated in R 3.0.0 by xtable 1.7-1 package
-% Thu Apr 25 12:08:57 2013
-\begin{tabular}{l}
+% latex table generated in R 3.0.1 by xtable 1.7-1 package
+% Mon Jun 17 15:35:14 2013
+\begin{table}[ht]
+\centering
+\begin{tabular}{rl}
   \hline
+ & ColumnNames \\ 
   \hline
-agency.cd \\ 
-  site.no \\ 
-  station.nm \\ 
-  site.tp.cd \\ 
-  lat.va \\ 
-  long.va \\ 
-  dec.lat.va \\ 
-  dec.long.va \\ 
-  coord.meth.cd \\ 
-  coord.acy.cd \\ 
-  coord.datum.cd \\ 
-  dec.coord.datum.cd \\ 
-  district.cd \\ 
-  state.cd \\ 
-  county.cd \\ 
-  country.cd \\ 
-  map.nm \\ 
-  map.scale.fc \\ 
-  alt.va \\ 
-  alt.meth.cd \\ 
-  alt.acy.va \\ 
-  alt.datum.cd \\ 
-  huc.cd \\ 
-  basin.cd \\ 
-  topo.cd \\ 
-  construction.dt \\ 
-  inventory.dt \\ 
-  drain.area.va \\ 
-  contrib.drain.area.va \\ 
-  tz.cd \\ 
-  local.time.fg \\ 
-  reliability.cd \\ 
-  project.no \\ 
-  queryTime \\ 
-  drainSqKm \\ 
-  shortName \\ 
-  staAbbrev \\ 
-  param.nm \\ 
-  param.units \\ 
-  paramShortName \\ 
-  paramNumber \\ 
-  constitAbbrev \\ 
+1 & agency.cd \\ 
+  2 & site.no \\ 
+  3 & station.nm \\ 
+  4 & site.tp.cd \\ 
+  5 & lat.va \\ 
+  6 & long.va \\ 
+  7 & dec.lat.va \\ 
+  8 & dec.long.va \\ 
+  9 & coord.meth.cd \\ 
+  10 & coord.acy.cd \\ 
+  11 & coord.datum.cd \\ 
+  12 & dec.coord.datum.cd \\ 
+  13 & district.cd \\ 
+  14 & state.cd \\ 
+  15 & county.cd \\ 
+  16 & country.cd \\ 
+  17 & map.nm \\ 
+  18 & map.scale.fc \\ 
+  19 & alt.va \\ 
+  20 & alt.meth.cd \\ 
+  21 & alt.acy.va \\ 
+  22 & alt.datum.cd \\ 
+  23 & huc.cd \\ 
+  24 & basin.cd \\ 
+  25 & topo.cd \\ 
+  26 & construction.dt \\ 
+  27 & inventory.dt \\ 
+  28 & drain.area.va \\ 
+  29 & contrib.drain.area.va \\ 
+  30 & tz.cd \\ 
+  31 & local.time.fg \\ 
+  32 & reliability.cd \\ 
+  33 & project.no \\ 
+  34 & queryTime \\ 
+  35 & drainSqKm \\ 
+  36 & shortName \\ 
+  37 & staAbbrev \\ 
+  38 & param.nm \\ 
+  39 & param.units \\ 
+  40 & paramShortName \\ 
+  41 & paramNumber \\ 
+  42 & constitAbbrev \\ 
    \hline
 \end{tabular}
+\caption{Column names in the INFO dataframe} 
+\end{table}
+
+
+
 \FloatBarrier
 
 %------------------------------------------------------------
@@ -924,84 +1127,100 @@ agency.cd \\
 
 There are 62 columns returned from the water quality portal. 
 
-% latex table generated in R 3.0.0 by xtable 1.7-1 package
-% Thu Apr 25 12:08:57 2013
-\begin{tabular}{l}
+% latex table generated in R 3.0.1 by xtable 1.7-1 package
+% Mon Jun 17 15:35:14 2013
+\begin{table}[ht]
+\centering
+\begin{tabular}{rl}
   \hline
+ & ColumnNames \\ 
   \hline
-OrganizationIdentifier \\ 
-  OrganizationFormalName \\ 
-  ActivityIdentifier \\ 
-  ActivityTypeCode \\ 
-  ActivityMediaName \\ 
-  ActivityMediaSubdivisionName \\ 
-  ActivityStartDate \\ 
-  ActivityStartTime.Time \\ 
-  ActivityStartTime.TimeZoneCode \\ 
-  ActivityEndDate \\ 
-  ActivityEndTime.Time \\ 
-  ActivityEndTime.TimeZoneCode \\ 
-  ActivityDepthHeightMeasure.MeasureValue \\ 
-  ActivityDepthHeightMeasure.MeasureUnitCode \\ 
-  ActivityDepthAltitudeReferencePointText \\ 
-  ActivityTopDepthHeightMeasure.MeasureValue \\ 
-  ActivityTopDepthHeightMeasure.MeasureUnitCode \\ 
-  ActivityBottomDepthHeightMeasure.MeasureValue \\ 
-  ActivityBottomDepthHeightMeasure.MeasureUnitCode \\ 
-  ProjectIdentifier \\ 
-  ActivityConductingOrganizationText \\ 
-  MonitoringLocationIdentifier \\ 
-  ActivityCommentText \\ 
-  SampleAquifer \\ 
-  HydrologicCondition \\ 
-  HydrologicEvent \\ 
-  SampleCollectionMethod.MethodIdentifier \\ 
-  SampleCollectionMethod.MethodIdentifierContext \\ 
-  SampleCollectionMethod.MethodName \\ 
-  SampleCollectionEquipmentName \\ 
-  ResultDetectionConditionText \\ 
-  CharacteristicName \\ 
-  ResultSampleFractionText \\ 
-  ResultMeasureValue \\ 
-  ResultMeasure.MeasureUnitCode \\ 
-  MeasureQualifierCode \\ 
-  ResultStatusIdentifier \\ 
-  StatisticalBaseCode \\ 
-  ResultValueTypeName \\ 
-  ResultWeightBasisText \\ 
+1 & OrganizationIdentifier \\ 
+  2 & OrganizationFormalName \\ 
+  3 & ActivityIdentifier \\ 
+  4 & ActivityTypeCode \\ 
+  5 & ActivityMediaName \\ 
+  6 & ActivityMediaSubdivisionName \\ 
+  7 & ActivityStartDate \\ 
+  8 & ActivityStartTime.Time \\ 
+  9 & ActivityStartTime.TimeZoneCode \\ 
+  10 & ActivityEndDate \\ 
+  11 & ActivityEndTime.Time \\ 
+  12 & ActivityEndTime.TimeZoneCode \\ 
+  13 & ActivityDepthHeightMeasure.MeasureValue \\ 
+  14 & ActivityDepthHeightMeasure.MeasureUnitCode \\ 
+  15 & ActivityDepthAltitudeReferencePointText \\ 
+  16 & ActivityTopDepthHeightMeasure.MeasureValue \\ 
+  17 & ActivityTopDepthHeightMeasure.MeasureUnitCode \\ 
+  18 & ActivityBottomDepthHeightMeasure.MeasureValue \\ 
+  19 & ActivityBottomDepthHeightMeasure.MeasureUnitCode \\ 
+  20 & ProjectIdentifier \\ 
+  21 & ActivityConductingOrganizationText \\ 
+  22 & MonitoringLocationIdentifier \\ 
+  23 & ActivityCommentText \\ 
+  24 & SampleAquifer \\ 
+  25 & HydrologicCondition \\ 
+  26 & HydrologicEvent \\ 
+  27 & SampleCollectionMethod.MethodIdentifier \\ 
+  28 & SampleCollectionMethod.MethodIdentifierContext \\ 
+  29 & SampleCollectionMethod.MethodName \\ 
+  30 & SampleCollectionEquipmentName \\ 
+  31 & ResultDetectionConditionText \\ 
+  32 & CharacteristicName \\ 
+  33 & ResultSampleFractionText \\ 
+  34 & ResultMeasureValue \\ 
+  35 & ResultMeasure.MeasureUnitCode \\ 
+  36 & MeasureQualifierCode \\ 
+  37 & ResultStatusIdentifier \\ 
+  38 & StatisticalBaseCode \\ 
+  39 & ResultValueTypeName \\ 
+  40 & ResultWeightBasisText \\ 
    \hline
 \end{tabular}
+\caption{Column names in dissolvedNitrate} 
+\end{table}
+
+
+
 \FloatBarrier
 
-% latex table generated in R 3.0.0 by xtable 1.7-1 package
-% Thu Apr 25 12:08:58 2013
-\begin{tabular}{l}
+% latex table generated in R 3.0.1 by xtable 1.7-1 package
+% Mon Jun 17 15:35:14 2013
+\begin{table}[ht]
+\centering
+\begin{tabular}{rl}
   \hline
+ & ColumnNames\_Continued \\ 
   \hline
-ResultTimeBasisText \\ 
-  ResultTemperatureBasisText \\ 
-  ResultParticleSizeBasisText \\ 
-  PrecisionValue \\ 
-  ResultCommentText \\ 
-  USGSPCode \\ 
-  ResultDepthHeightMeasure.MeasureValue \\ 
-  ResultDepthHeightMeasure.MeasureUnitCode \\ 
-  ResultDepthAltitudeReferencePointText \\ 
-  SubjectTaxonomicName \\ 
-  SampleTissueAnatomyName \\ 
-  ResultAnalyticalMethod.MethodIdentifier \\ 
-  ResultAnalyticalMethod.MethodIdentifierContext \\ 
-  ResultAnalyticalMethod.MethodName \\ 
-  MethodDescriptionText \\ 
-  LaboratoryName \\ 
-  AnalysisStartDate \\ 
-  ResultLaboratoryCommentText \\ 
-  DetectionQuantitationLimitTypeName \\ 
-  DetectionQuantitationLimitMeasure.MeasureValue \\ 
-  DetectionQuantitationLimitMeasure.MeasureUnitCode \\ 
-  PreparationStartDate \\ 
+1 & ResultTimeBasisText \\ 
+  2 & ResultTemperatureBasisText \\ 
+  3 & ResultParticleSizeBasisText \\ 
+  4 & PrecisionValue \\ 
+  5 & ResultCommentText \\ 
+  6 & USGSPCode \\ 
+  7 & ResultDepthHeightMeasure.MeasureValue \\ 
+  8 & ResultDepthHeightMeasure.MeasureUnitCode \\ 
+  9 & ResultDepthAltitudeReferencePointText \\ 
+  10 & SubjectTaxonomicName \\ 
+  11 & SampleTissueAnatomyName \\ 
+  12 & ResultAnalyticalMethod.MethodIdentifier \\ 
+  13 & ResultAnalyticalMethod.MethodIdentifierContext \\ 
+  14 & ResultAnalyticalMethod.MethodName \\ 
+  15 & MethodDescriptionText \\ 
+  16 & LaboratoryName \\ 
+  17 & AnalysisStartDate \\ 
+  18 & ResultLaboratoryCommentText \\ 
+  19 & DetectionQuantitationLimitTypeName \\ 
+  20 & DetectionQuantitationLimitMeasure.MeasureValue \\ 
+  21 & DetectionQuantitationLimitMeasure.MeasureUnitCode \\ 
+  22 & PreparationStartDate \\ 
    \hline
 \end{tabular}
+\caption{Column names in dissolvedNitrate} 
+\end{table}
+
+
+
 \clearpage
 
 %------------------------------------------------------------ 
@@ -1010,38 +1229,66 @@ ResultTimeBasisText \\
 %------------------------------------------------------------
 There are a few steps that are required in order to create a table in a Microsoft product (Excel, Word, Powerpoint, etc.) from an R dataframe. There are certainly a variety of good methods, one of which is detailed here. The example we will step through here will be to create a table in Microsoft Word based on the dataframe tableData:
 
-\begin{Schunk}
-\begin{Sinput}
-> ChoptankAvailableData <- getDataAvailability(siteNumber)
-> ChoptankDailyData <- ChoptankAvailableData["dv" == ChoptankAvailableData$service,]
-> ChoptankDailyData <- ChoptankDailyData["00003" == ChoptankDailyData$statCd,]
-> pCodeINFO <- getMultipleParameterNames(ChoptankDailyData$parameter_cd, interactive=FALSE)
-> ChoptankDailyData <- merge(ChoptankDailyData,pCodeINFO,by="parameter_cd")
-> tableData <- with(ChoptankDailyData, 
-       data.frame(
-       shortName=srsname, 
-       Start=startDate, 
-       End=endDate, 
-       Count=count,
-       Units=parameter_units)
-       )
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+availableData <- \hlfunctioncall{getDataAvailability}(siteNumber)
+dailyData <- availableData[\hlstring{"dv"} == availableData$service,]
+dailyData <- dailyData[\hlstring{"00003"} == dailyData$statCd,]
+pCodeINFO <- \hlfunctioncall{getMultipleParameterNames}(dailyData$parameter_cd)
+\end{alltt}
+\begin{verbatim}
+## Percent complete: 
+## 20 	40 	60 	80 	100 	
+\end{verbatim}
+\begin{alltt}
+dailyData <- \hlfunctioncall{merge}(dailyData,pCodeINFO, by=\hlstring{"parameter_cd"})
+
+tableData <- \hlfunctioncall{with}(dailyData, 
+      \hlfunctioncall{data.frame}(
+        shortName=srsname, 
+        Start=startDate, 
+        End=endDate, 
+        Count=count,
+        Units=parameter_units)
+      )
+tableData
+\end{alltt}
+\begin{verbatim}
+##                                shortName      Start
+## 1                     Temperature, water 2010-10-01
+## 2               Stream flow, mean. daily 1948-01-01
+## 3                   Specific conductance 2010-10-01
+## 4 Suspended sediment concentration (SSC) 1980-10-01
+## 5           Suspended sediment discharge 1980-10-01
+##          End Count      Units
+## 1 2012-05-09   529      deg C
+## 2 2013-06-16 23908        cfs
+## 3 2012-05-09   527 uS/cm @25C
+## 4 1991-09-30  3651       mg/l
+## 5 1991-09-30  3652   tons/day
+\end{verbatim}
+\end{kframe}
+\end{knitrout}
+
 
 First, save the dataframe as a tab delimited file (you don't want to use comma delimited because there are commas in some of the data elements):
 
 
-\begin{Schunk}
-\begin{Sinput}
-> write.table(tableData, file="tableData.tsv",sep="\t",
-             row.names = FALSE,quote=FALSE)
-\end{Sinput}
-\end{Schunk}
+\begin{knitrout}
+\definecolor{shadecolor}{rgb}{0.969, 0.969, 0.969}\color{fgcolor}\begin{kframe}
+\begin{alltt}
+\hlfunctioncall{write.table}(tableData, file=\hlstring{"tableData.tsv"},sep=\hlstring{"\textbackslash{}t"},
+            row.names = FALSE,quote=FALSE)
+\end{alltt}
+\end{kframe}
+\end{knitrout}
+
 
 This will save a file in your working directory called tableData.tsv.  You can see your working directory by typing getwd() in the R console. Opening the file in a general-purpose text editor, you should see the following:
 
 \begin{verbatim}
-shortName  Start	End	Count	Units
+shortName  Start  End	Count	Units
 Temperature, water	2010-10-01	2012-06-24	575	deg C
 Stream flow, mean. daily	1948-01-01	2013-03-13	23814	cfs
 Specific conductance	2010-10-01	2012-06-24	551	uS/cm @25C
@@ -1072,6 +1319,7 @@ From Excel, it is simple to copy and paste the tables in other Microsoft product
 \end{figure}
 
 \clearpage
+
 %------------------------------------------------------------
 % BIBLIO
 %------------------------------------------------------------
diff --git a/vignettes/dataRetrieval.toc b/vignettes/dataRetrieval.toc
index f05553d7aa44793f0a9945381c7f4fe909a895f7..a6398165a491095179565b0083edcdf602e67b5e 100644
--- a/vignettes/dataRetrieval.toc
+++ b/vignettes/dataRetrieval.toc
@@ -4,28 +4,27 @@
 \contentsline {subsection}{\numberline {2.1}Introduction}{3}{subsection.2.1}
 \contentsline {subsection}{\numberline {2.2}Site Information}{4}{subsection.2.2}
 \contentsline {subsubsection}{\numberline {2.2.1}getSiteFileData}{4}{subsubsection.2.2.1}
-\contentsline {subsubsection}{\numberline {2.2.2}getDataAvailability}{5}{subsubsection.2.2.2}
+\contentsline {subsubsection}{\numberline {2.2.2}getDataAvailability}{4}{subsubsection.2.2.2}
 \contentsline {subsection}{\numberline {2.3}Parameter Information}{6}{subsection.2.3}
 \contentsline {subsection}{\numberline {2.4}Daily Values}{7}{subsection.2.4}
-\contentsline {subsection}{\numberline {2.5}Unit Values}{8}{subsection.2.5}
-\contentsline {subsection}{\numberline {2.6}Water Quality Values}{10}{subsection.2.6}
-\contentsline {subsection}{\numberline {2.7}STORET Water Quality Retrievals}{11}{subsection.2.7}
-\contentsline {subsection}{\numberline {2.8}URL Construction}{12}{subsection.2.8}
-\contentsline {section}{\numberline {3}Data Retrievals Structured For Use In The EGRET Package}{12}{section.3}
-\contentsline {subsection}{\numberline {3.1}INFO Data}{13}{subsection.3.1}
-\contentsline {subsection}{\numberline {3.2}Daily Data}{13}{subsection.3.2}
-\contentsline {subsection}{\numberline {3.3}Sample Data}{14}{subsection.3.3}
-\contentsline {subsection}{\numberline {3.4}Censored Values: Summation Explanation}{15}{subsection.3.4}
-\contentsline {subsection}{\numberline {3.5}User-Generated Data Files}{16}{subsection.3.5}
-\contentsline {subsubsection}{\numberline {3.5.1}getDailyDataFromFile}{16}{subsubsection.3.5.1}
-\contentsline {subsubsection}{\numberline {3.5.2}getSampleDataFromFile}{17}{subsubsection.3.5.2}
-\contentsline {subsection}{\numberline {3.6}Merge Report}{18}{subsection.3.6}
-\contentsline {subsection}{\numberline {3.7}EGRET Plots}{19}{subsection.3.7}
-\contentsline {section}{\numberline {A}Getting Started in R}{21}{appendix.A}
-\contentsline {subsection}{\numberline {A.1}New to R?}{21}{subsection.A.1}
-\contentsline {subsection}{\numberline {A.2}R User: Installing dataRetrieval}{21}{subsection.A.2}
-\contentsline {subsection}{\numberline {A.3}R Developers: Installing dataRetrieval from gitHub}{22}{subsection.A.3}
+\contentsline {subsection}{\numberline {2.5}Unit Values}{10}{subsection.2.5}
+\contentsline {subsection}{\numberline {2.6}Water Quality Values}{12}{subsection.2.6}
+\contentsline {subsection}{\numberline {2.7}STORET Water Quality Retrievals}{14}{subsection.2.7}
+\contentsline {subsection}{\numberline {2.8}URL Construction}{14}{subsection.2.8}
+\contentsline {section}{\numberline {3}Data Retrievals Structured For Use In The EGRET Package}{15}{section.3}
+\contentsline {subsection}{\numberline {3.1}INFO Data}{15}{subsection.3.1}
+\contentsline {subsection}{\numberline {3.2}Daily Data}{15}{subsection.3.2}
+\contentsline {subsection}{\numberline {3.3}Sample Data}{16}{subsection.3.3}
+\contentsline {subsection}{\numberline {3.4}Censored Values: Summation Explanation}{17}{subsection.3.4}
+\contentsline {subsection}{\numberline {3.5}User-Generated Data Files}{18}{subsection.3.5}
+\contentsline {subsubsection}{\numberline {3.5.1}getDailyDataFromFile}{18}{subsubsection.3.5.1}
+\contentsline {subsubsection}{\numberline {3.5.2}getSampleDataFromFile}{19}{subsubsection.3.5.2}
+\contentsline {subsection}{\numberline {3.6}Merge Report}{20}{subsection.3.6}
+\contentsline {subsection}{\numberline {3.7}EGRET Plots}{21}{subsection.3.7}
+\contentsline {section}{\numberline {A}Getting Started in R}{23}{appendix.A}
+\contentsline {subsection}{\numberline {A.1}New to R?}{23}{subsection.A.1}
+\contentsline {subsection}{\numberline {A.2}R User: Installing dataRetrieval}{23}{subsection.A.2}
 \contentsline {section}{\numberline {B}Columns Names}{24}{appendix.B}
 \contentsline {subsection}{\numberline {B.1}INFO dataframe}{24}{subsection.B.1}
-\contentsline {subsection}{\numberline {B.2}Water Quality Portal}{25}{subsection.B.2}
-\contentsline {section}{\numberline {C}Creating tables in Microsoft from R}{27}{appendix.C}
+\contentsline {subsection}{\numberline {B.2}Water Quality Portal}{26}{subsection.B.2}
+\contentsline {section}{\numberline {C}Creating tables in Microsoft from R}{29}{appendix.C}
diff --git a/vignettes/figure/egretEx.pdf b/vignettes/figure/egretEx.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..f872ba28d66b8cfa6a9a27767bae898e9fd72c30
Binary files /dev/null and b/vignettes/figure/egretEx.pdf differ
diff --git a/vignettes/figure/getNWISUnitPlot.pdf b/vignettes/figure/getNWISUnitPlot.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..65b4613b662ab6b5f0eabd1e1dbe46df929684f8
Binary files /dev/null and b/vignettes/figure/getNWISUnitPlot.pdf differ
diff --git a/vignettes/figure/getNWIStemperaturePlot.pdf b/vignettes/figure/getNWIStemperaturePlot.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..09c6c67a0cfca61157978698d3a4dca0e98f42aa
Binary files /dev/null and b/vignettes/figure/getNWIStemperaturePlot.pdf differ
diff --git a/vignettes/figure/getQWtemperaturePlot.pdf b/vignettes/figure/getQWtemperaturePlot.pdf
new file mode 100644
index 0000000000000000000000000000000000000000..f959d3ce1add55aa754a8c97844a13f8c60142ae
Binary files /dev/null and b/vignettes/figure/getQWtemperaturePlot.pdf differ