diff --git a/.Rbuildignore b/.Rbuildignore
index bd20a11e8772462c4eb790fb74c39199b758c545..9acafc4c7c3c72e587baea9904aea01521a61cfb 100644
--- a/.Rbuildignore
+++ b/.Rbuildignore
@@ -1,13 +1,13 @@
 ^.*\.Rproj$
 ^\.Rproj\.user$
-^\.Rproj\inst\dataRetrieval.log
-^\.Rproj\inst\dataRetrieval-concordance.tex
-^\.Rproj\inst\dataRetrieval.toc
-^\.Rproj\inst\dataRetrieval.tex
+^\.Rproj\inst\dataRetrievaldemo.log
+^\.Rproj\inst\dataRetrievaldemo-concordance.tex
+^\.Rproj\inst\dataRetrievaldemo.toc
+^\.Rproj\inst\dataRetrievaldemo.tex
 ^\.travis\.yml$
 ^\.Rproj\.gitignore
 ^\.Rproj\.RData
-^\.Rproj\dataRetrieval.Rproj
+^\.Rproj\dataRetrievaldemo.Rproj
 ^\.Rproj\.Rhistory
 ^\.Rproj\.travis.yml
 ^\.Rproj\appveyor.yml
diff --git a/.Rinstignore b/.Rinstignore
index f8c48d1eb281bd099d9f9dd29ff8eaf1656e5bdc..53f981c7a40a8722aa278ce610a175f4efa8b730 100644
--- a/.Rinstignore
+++ b/.Rinstignore
@@ -1,5 +1,5 @@
-inst/doc/dataRetrieval-concordance.tex
-inst/doc/dataRetrieval.tex
-inst/doc/dataRetrieval.log
-inst/doc/dataRetrieval.toc
-inst/doc/dataRetrieval.synctex.gz
\ No newline at end of file
+inst/doc/dataRetrievaldemo-concordance.tex
+inst/doc/dataRetrievaldemo.tex
+inst/doc/dataRetrievaldemo.log
+inst/doc/dataRetrievaldemo.toc
+inst/doc/dataRetrievaldemo.synctex.gz
\ No newline at end of file
diff --git a/DESCRIPTION b/DESCRIPTION
index 8eed721ad0af3fd841b48183a7953b9f6ea4d97c..73570bad48fafca586ec36126bb9c4a948046640 100644
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -1,4 +1,4 @@
-Package: dataRetrieval
+Package: dataRetrievaldemo
 Type: Package
 Title: Retrieval functions for USGS and EPA hydrologic and water quality data
 Version: 1.5.0.9000
diff --git a/NEWS b/NEWS
index 455366c1f883e80fb446eeab0a6735bcdffbe5e7..015e3b18e67c507e21d0a5a21568ee3dc1500b15 100644
--- a/NEWS
+++ b/NEWS
@@ -1,4 +1,4 @@
-dataRetrieval 1.4.0-in developement
+dataRetrievaldemo 1.4.0-in developement
 ===========
 * Changed naming convention:
 getDVData -> getNWISDaily
@@ -15,7 +15,7 @@ getDataAvailability <- getNWISDataAvailability
 *
 
 
-dataRetrieval 1.3.3
+dataRetrievaldemo 1.3.3
 ===========
 
 * Updated getNWISSiteInfo to retrieve multiple site file datasets at once using a vector of siteNumbers as input argument.
@@ -24,7 +24,7 @@ dataRetrieval 1.3.3
 * Added very generalized NWIS and WQP retrieval functions (getNWISData, getNWISSites, getGeneralWQPData, and whatWQPsites) which allow the user to use any argument available on the Web service platform.
 
 
-dataRetrieval 1.3.2
+dataRetrievaldemo 1.3.2
 ===========
 
 * Deprecated getQWData, updated readWQPdata to take either parameter code or characteristic name.
@@ -33,7 +33,7 @@ dataRetrieval 1.3.2
 * Updated mergeReport to allow for Sample data with different measurements taken on the same day
 
 
-dataRetrieval 1.2.2
+dataRetrievaldemo 1.2.2
 ===========
 
 * Added getNWISDataAvailability function to find measured parameters and period of record information for a requested station.
@@ -44,7 +44,7 @@ dataRetrieval 1.2.2
 
 * Fixed a small leap year bug by changing day of year by making Feb. 29 always 59, and March 1st always 60 (even in non-leap years).
 
-dataRetrieval 1.2.1
+dataRetrievaldemo 1.2.1
 ===========
 
 * Improved documentation, especially example functions.
diff --git a/R/constructNWISURL.r b/R/constructNWISURL.r
index 97fdbd2bd3d5171eaa17956c5c29f3854cd81e52..4cf5b65aeae542b71e9add07d1c0599dc2b97a00 100644
--- a/R/constructNWISURL.r
+++ b/R/constructNWISURL.r
@@ -195,27 +195,26 @@ constructWQPURL <- function(siteNumber,parameterCd,startDate,endDate){
   if(multiplePcodes){
     parameterCd <- paste(parameterCd, collapse=";")
   }
+
+
+  
+  baseURL <- "http://www.waterqualitydata.us/Result/search?siteid="
+  url <- paste0(baseURL,
+                siteNumber,
+                ifelse(pCodeLogic,"&pCode=","&characteristicName="),
+                parameterCd)
+  
   if (nzchar(startDate)){
     startDate <- format(as.Date(startDate), format="%m-%d-%Y")
+    url <- paste0(url, "&startDateLo=",startDate)
   }
+  
   if (nzchar(endDate)){
     endDate <- format(as.Date(endDate), format="%m-%d-%Y")
+    url <- paste0(url, "&startDateHi=",endDate)
   }
   
-  baseURL <- "http://www.waterqualitydata.us/Result/search?siteid="
-  url <- paste0(baseURL,
-                siteNumber,
-                ifelse(pCodeLogic,"&pCode=","&characteristicName="),
-                parameterCd,
-                "&startDateLo=",
-                startDate,
-                "&startDateHi=",
-                endDate,
-                "&countrycode=US&mimeType=tsv")
-  if(url.exists(url)){
-    return(url)
-  } else {
-    stop("The following url doesn't seem to exist:\n",url)
-  }
+  url <- paste0(url,"&countrycode=US&mimeType=tsv&zip=yes")
+  return(url)
 
 }
diff --git a/R/importWQP.R b/R/importWQP.R
index ee13dad6bd99698968e159a89fbb3c9f257bbd91..6bc64fb7160510a7e80c2d124e47685551f700af 100644
--- a/R/importWQP.R
+++ b/R/importWQP.R
@@ -7,21 +7,35 @@
 #' start and end times.
 #' @export
 #' @import RCurl
+#' @import httr
 #' @importFrom lubridate parse_date_time
 #' @examples
 #' # These examples require an internet connection to run
 #' 
 #' ## Examples take longer than 5 seconds:
-#' rawSampleURL <- constructWQPURL('USGS-01594440','01075', '1985-01-01', '1985-03-31')
-#' rawSample <- importWQP(rawSampleURL)
-#' 
-importWQP <- function(url){
+#' rawSampleURL <- constructWQPURL('USGS-01594440','01075', '', '')
+#' rawSample <- importWQP(rawSampleURL, TRUE)
+#' url2 <- paste0("http://www.waterqualitydata.us/Result/search?",
+#' "siteid=USGS-01594440&pCode=01075&mimeType=tsv")
+#' rawSample2 <- importWQP(url2, FALSE)
+importWQP <- function(url, zip=TRUE){
   
   h <- basicHeaderGatherer()
   
-  retval = tryCatch({  
-    doc <- getURL(url, headerfunction = h$update)
+  tryCatch({  
+    if(zip){
+      headerInfo <- HEAD(url)$headers
+      temp <- tempfile()
+      origTimeout <- getOption("timeout")
+      options(timeout = 120)
+      download.file(url,temp, quiet=TRUE, mode='wb')
+      doc <- unzip(temp)
+      options(timeout = origTimeout)
+    } else {
+      doc <- getURL(url, headerfunction = h$update)
+      headerInfo <- h$value()
     
+    }
   }, warning = function(w) {
     message(paste("URL caused a warning:", url))
     message(w)
@@ -31,100 +45,109 @@ importWQP <- function(url){
     return(NA)
   })
   
-  if(h$value()["Content-Type"] == "text/tab-separated-values;charset=UTF-8"){
+#   if(headerInfo["Content-Type"] == "application/zip;charset=UTF-8"){
+    
+  numToBeReturned <- as.numeric(headerInfo["Total-Result-Count"])
+  
+  if (!is.na(numToBeReturned) | numToBeReturned != 0){
+# 
+#     fileName <- strsplit(as.character(headerInfo["Content-disposition"]),"=")[[1]][2]
+#     fileName <- gsub('\"',"",fileName)
+
+    suppressWarnings(namesData <- read.delim(if(zip) doc else textConnection(doc) , header = TRUE, quote="\"",
+                                             dec=".", sep='\t',
+                                             colClasses='character',
+                                             fill = TRUE,nrow=1))
+          
+    classColumns <- setNames(rep('character',ncol(namesData)),names(namesData))
     
-    numToBeReturned <- as.numeric(h$value()["Total-Result-Count"])
+    classColumns[grep("MeasureValue",names(classColumns))] <- NA
     
-    if (!is.na(numToBeReturned) | numToBeReturned != 0){
-      
-      
-      namesData <- read.delim(textConnection(doc), header = TRUE, quote="\"", 
-                              dec=".", sep='\t', 
-                              colClasses='character', 
-                              fill = TRUE,nrow=1)
-      classColumns <- setNames(rep('character',ncol(namesData)),names(namesData))
-      
-      classColumns[grep("MeasureValue",names(classColumns))] <- NA
-      
-      retval <- read.delim(textConnection(doc), header = TRUE, quote="\"", 
-                           dec=".", sep='\t', 
-                           colClasses=as.character(classColumns), 
-                           fill = TRUE)    
-      actualNumReturned <- nrow(retval)
-      
-      retval[,names(which(sapply(retval[,grep("MeasureValue",names(retval))], function(x)all(is.na(x)))))] <- ""
-      
-      if(actualNumReturned != numToBeReturned) warning(numToBeReturned, " sample results were expected, ", actualNumReturned, " were returned")
-      
-      timeZoneLibrary <- setNames(c("America/New_York","America/New_York","America/Chicago","America/Chicago",
-                                    "America/Denver","America/Denver","America/Los_Angeles","America/Los_Angeles",
-                                    "America/Anchorage","America/Anchorage","America/Honolulu","America/Honolulu"),
-                                  c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST"))
-      timeZoneStart <- as.character(timeZoneLibrary[retval$ActivityStartTime.TimeZoneCode])
-      timeZoneEnd <- as.character(timeZoneLibrary[retval$ActivityEndTime.TimeZoneCode])
-      timeZoneStart[is.na(timeZoneStart)] <- ""
-      timeZoneEnd[is.na(timeZoneEnd)] <- ""
-      
-      if("ActivityStartDate" %in% names(retval)){
-        if(any(retval$ActivityStartDate != "")){
-          suppressWarnings(retval$ActivityStartDate <- as.Date(parse_date_time(retval$ActivityStartDate, c("Ymd", "mdY"))))
-        }
+    suppressWarnings(retval <- read.delim(if(zip) doc else textConnection(doc), header = TRUE, quote="\"", 
+                         dec=".", sep='\t', 
+                         colClasses=as.character(classColumns), 
+                         fill = TRUE))
+    
+    if(zip){
+      unlink(temp)
+    } 
+    
+    actualNumReturned <- nrow(retval)
+    
+    retval[,names(which(sapply(retval[,grep("MeasureValue",names(retval))], function(x)all(is.na(x)))))] <- ""
+    
+    if(actualNumReturned != numToBeReturned) warning(numToBeReturned, " sample results were expected, ", actualNumReturned, " were returned")
+    
+    timeZoneLibrary <- setNames(c("America/New_York","America/New_York","America/Chicago","America/Chicago",
+                                  "America/Denver","America/Denver","America/Los_Angeles","America/Los_Angeles",
+                                  "America/Anchorage","America/Anchorage","America/Honolulu","America/Honolulu"),
+                                c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST"))
+    timeZoneStart <- as.character(timeZoneLibrary[retval$ActivityStartTime.TimeZoneCode])
+    timeZoneEnd <- as.character(timeZoneLibrary[retval$ActivityEndTime.TimeZoneCode])
+    timeZoneStart[is.na(timeZoneStart)] <- ""
+    timeZoneEnd[is.na(timeZoneEnd)] <- ""
+    
+    if("ActivityStartDate" %in% names(retval)){
+      if(any(retval$ActivityStartDate != "")){
+        suppressWarnings(retval$ActivityStartDate <- as.Date(parse_date_time(retval$ActivityStartDate, c("Ymd", "mdY"))))
       }
+    }
 
-      if("ActivityEndDate" %in% names(retval)){
-        if(any(retval$ActivityEndDate != "")){
-          suppressWarnings(retval$ActivityEndDate <- as.Date(parse_date_time(retval$ActivityEndDate, c("Ymd", "mdY"))))
-        }        
-      }
+    if("ActivityEndDate" %in% names(retval)){
+      if(any(retval$ActivityEndDate != "")){
+        suppressWarnings(retval$ActivityEndDate <- as.Date(parse_date_time(retval$ActivityEndDate, c("Ymd", "mdY"))))
+      }        
+    }
 
-      if(any(!is.na(timeZoneStart))){
-        if(length(unique(timeZoneStart)) == 1){
-          retval$ActivityStartDateTime <- with(retval, as.POSIXct(paste(ActivityStartDate, ActivityStartTime.Time),format="%Y-%m-%d %H:%M:%S", tz=unique(timeZoneStart)))
-        } else {
-          
-          mostCommonTZ <- names(sort(summary(as.factor(timeZoneStart)),decreasing = TRUE)[1])
+    if(any(!is.na(timeZoneStart))){
+      if(length(unique(timeZoneStart)) == 1){
+        retval$ActivityStartDateTime <- with(retval, as.POSIXct(paste(ActivityStartDate, ActivityStartTime.Time),format="%Y-%m-%d %H:%M:%S", tz=unique(timeZoneStart)))
+      } else {
+        
+        mostCommonTZ <- names(sort(summary(as.factor(timeZoneStart)),decreasing = TRUE)[1])
 
-          retval$ActivityStartDateTime <- with(retval, as.POSIXct(paste(ActivityStartDate, ActivityStartTime.Time),
-                                format="%Y-%m-%d %H:%M:%S", 
-                                tz=mostCommonTZ))
-          additionalTZs <- names(sort(summary(as.factor(timeZoneStart)),decreasing = TRUE)[-1])
-          for(i in additionalTZs){
-            retval$ActivityStartDateTime[timeZoneStart == i] <-  with(retval[timeZoneStart == i,], 
-                               as.POSIXct(paste(ActivityStartDate, ActivityStartTime.Time),
-                               format="%Y-%m-%d %H:%M:%S", 
-                               tz=i))      
-          }
+        retval$ActivityStartDateTime <- with(retval, as.POSIXct(paste(ActivityStartDate, ActivityStartTime.Time),
+                              format="%Y-%m-%d %H:%M:%S", 
+                              tz=mostCommonTZ))
+        additionalTZs <- names(sort(summary(as.factor(timeZoneStart)),decreasing = TRUE)[-1])
+        for(i in additionalTZs){
+          retval$ActivityStartDateTime[timeZoneStart == i] <-  with(retval[timeZoneStart == i,], 
+                             as.POSIXct(paste(ActivityStartDate, ActivityStartTime.Time),
+                             format="%Y-%m-%d %H:%M:%S", 
+                             tz=i))      
         }
       }
-      
-      if(any(!is.na(timeZoneEnd))){
-        if(length(unique(timeZoneEnd)) == 1){
-          retval$ActivityEndDateTime <- with(retval, as.POSIXct(paste(ActivityEndDate, ActivityEndTime.Time), format="%Y-%m-%d %H:%M:%S",tz=unique(timeZoneEnd)))
-        } else {
-          mostCommonTZ <- names(sort(summary(as.factor(timeZoneEnd)),decreasing = TRUE)[1])
-          
-          retval$ActivityEndDateTime <- with(retval, as.POSIXct(paste(ActivityEndDate, ActivityEndTime.Time),
-                                      format="%Y-%m-%d %H:%M:%S", 
-                                      tz=mostCommonTZ))
-          additionalTZs <- names(sort(summary(as.factor(timeZoneEnd)),decreasing = TRUE)[-1])
-          for(i in additionalTZs){
-            retval$ActivityEndDateTime[timeZoneEnd == i] <-  with(retval[timeZoneEnd == i,], 
-                          as.POSIXct(paste(ActivityEndDate, ActivityEndTime.Time),
-                                     format="%Y-%m-%d %H:%M:%S", 
-                                     tz=i))      
-          }
+    }
+    
+    if(any(!is.na(timeZoneEnd))){
+      if(length(unique(timeZoneEnd)) == 1){
+        retval$ActivityEndDateTime <- with(retval, as.POSIXct(paste(ActivityEndDate, ActivityEndTime.Time), format="%Y-%m-%d %H:%M:%S",tz=unique(timeZoneEnd)))
+      } else {
+        mostCommonTZ <- names(sort(summary(as.factor(timeZoneEnd)),decreasing = TRUE)[1])
+        
+        retval$ActivityEndDateTime <- with(retval, as.POSIXct(paste(ActivityEndDate, ActivityEndTime.Time),
+                                    format="%Y-%m-%d %H:%M:%S", 
+                                    tz=mostCommonTZ))
+        additionalTZs <- names(sort(summary(as.factor(timeZoneEnd)),decreasing = TRUE)[-1])
+        for(i in additionalTZs){
+          retval$ActivityEndDateTime[timeZoneEnd == i] <-  with(retval[timeZoneEnd == i,], 
+                        as.POSIXct(paste(ActivityEndDate, ActivityEndTime.Time),
+                                   format="%Y-%m-%d %H:%M:%S", 
+                                   tz=i))      
         }
       }
-          
-      return(retval)
-      
-    } else {
-      warning("No data to retrieve")
-      return(NA)
     }
+        
+    return(retval)
+    if(zip) unlink(temp)
+    
   } else {
-    message(paste("URL caused an error:", url))
-    message("Content-Type=",h$value()["Content-Type"])
+    warning("No data to retrieve")
     return(NA)
   }
+#   } else {
+#     message(paste("URL caused an error:", url))
+#     message("Content-Type=",h$value()["Content-Type"])
+#     return(NA)
+#   }
 }
\ No newline at end of file
diff --git a/R/readWQPdata.R b/R/readWQPdata.R
index 95a8074ec72fe839d9e1c1be50ee8f21427817ca..3a113a7c225998e3e668d2a20510f3a7debf210c 100644
--- a/R/readWQPdata.R
+++ b/R/readWQPdata.R
@@ -32,11 +32,11 @@ readWQPdata <- function(...){
   
   
   baseURL <- "http://www.waterqualitydata.us/Result/search?"
-  urlCall <- paste(baseURL,
+  urlCall <- paste0(baseURL,
                    urlCall,
-                   "&mimeType=tsv",sep = "")
+                   "&mimeType=tsv&zip=yes")
 
-  retVal <- importWQP(urlCall)
+  retVal <- importWQP(urlCall,TRUE)
   return(retVal)
   
 }
\ No newline at end of file
diff --git a/R/readWQPqw.r b/R/readWQPqw.r
index 1f7fe43d5be9af23c0cdcaa957f30d90f094de19..a6125c86e722bb2bb583c53346e740d4586d5763 100644
--- a/R/readWQPqw.r
+++ b/R/readWQPqw.r
@@ -20,13 +20,13 @@
 #' @seealso \code{\link{readWQPdata}}, \code{\link{whatWQPsites}}, 
 #' \code{\link{readNWISqw}}, and \code{\link{importWQP}}
 #' @examples
-#' rawPcode <- readWQPqw('USGS-01594440','01075', '1985-01-01', '1985-03-31')
+#' rawPcode <- readWQPqw('USGS-01594440','01075', '', '')
 #' rawCharacteristicName <- readWQPqw('WIDNR_WQX-10032762','Specific conductance', '', '')
 #' 
 readWQPqw <- function(siteNumber,parameterCd,startDate,endDate){
 
   url <- constructWQPURL(siteNumber,parameterCd,startDate,endDate)
-  retVal <- importWQP(url)
+  retVal <- importWQP(url,TRUE)
   return(retVal)
   
 }
diff --git a/R/tabbedDataRetrievals.R b/R/tabbedDataRetrievals.R
index 361b0c9739838b079899a13adf0e5f5685e3e59b..8933a9ae8fe3b4e0c12087a905cb8cfa69bc563a 100644
--- a/R/tabbedDataRetrievals.R
+++ b/R/tabbedDataRetrievals.R
@@ -1,7 +1,7 @@
 #' Retrieval functions for USGS data
 #'
 #' \tabular{ll}{
-#' Package: \tab dataRetrieval\cr
+#' Package: \tab dataRetrievaldemo\cr
 #' Type: \tab Package\cr
 #' Version: \tab 1.5.0\cr
 #' Date: \tab 2014-11-16\cr
@@ -16,7 +16,7 @@
 #'
 #' Collection of functions to help retrieve USGS data from either web services or user provided data files.
 #'
-#' @name dataRetrieval-package
+#' @name dataRetrievaldemo-package
 #' @docType package
 #' @author Robert M. Hirsch \email{rhirsch@@usgs.gov}, Laura De Cicco \email{ldecicco@@usgs.gov}
 #' @references Hirsch, R. M., Moyer, D. L. and Archfield, S. A. (2010), Weighted Regressions on Time, Discharge, and Season (WRTDS), with an Application to Chesapeake Bay River Inputs. JAWRA Journal of the American Water Resources Association, 46: 857-880. doi: 10.1111/j.1752-1688.2010.00482.x
diff --git a/README.md b/README.md
index 3b3a3449342f1d2f508441f3d656041696bc5bae..8ad02cc77a2f65e88f2a5423239d43a095b71608 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
-`dataRetrieval`
+`dataRetrievaldemo`
 =============
-Linux: [![travis](https://travis-ci.org/USGS-R/dataRetrieval.svg?branch=master)](https://travis-ci.org/USGS-R/dataRetrieval)
+Linux: [![travis](https://travis-ci.org/USGS-R/dataRetrievaldemo.svg?branch=master)](https://travis-ci.org/USGS-R/dataRetrievaldemo)
 
 Windows: [![Build status](https://ci.appveyor.com/api/projects/status/msanha92b500grr7?svg=true)](https://ci.appveyor.com/project/ldecicco-USGS/dataretrieval-787)
 
@@ -51,9 +51,9 @@ egret_comments@usgs.gov
 Additionally, to subscribe to an email list concerning updates to these R packages, please send a request to egret_comments@usgs.gov.
 
 ##Package Installation
-To install the dataRetrieval package, you must be using R 3.0 or greater and run the following command:
+To install the dataRetrievaldemo package, you must be using R 3.0 or greater and run the following command:
 
-	install.packages("dataRetrieval", 
+	install.packages("dataRetrievaldemo", 
 	  repos=c("http://usgs-r.github.com",
 	           "http://cran.us.r-project.org"),
 	  dependencies=TRUE)
@@ -62,7 +62,7 @@ To install the dataRetrieval package, you must be using R 3.0 or greater and run
 ##Version updates
 ---------------
 
-###dataRetrieval 1.5.0
+###dataRetrievaldemo 1.5.0
 
 * Changing naming convention one last time. Migrating `EGRET` specific retrievals to `EGRET`.
 * Added back WaterML2 parsing tool
@@ -70,7 +70,7 @@ To install the dataRetrieval package, you must be using R 3.0 or greater and run
 
 
 
-###dataRetrieval 1.4.0
+###dataRetrievaldemo 1.4.0
 Changed naming convention:
 
 |Original Name | New Name |
@@ -90,7 +90,7 @@ Changed naming convention:
 *Removing WaterML2 until fixed.
 
 
-###dataRetrieval 1.3.3
+###dataRetrievaldemo 1.3.3
 
 * Updated getNWISSiteInfo to retrieve multiple site file datasets at once using a vector of siteNumbers as input argument.
 * Updated error-handling for Web service calls. More information is returned when errors happen
@@ -98,7 +98,7 @@ Changed naming convention:
 * Added very generalized NWIS and WQP retrieval functions (getNWISData, getNWISSites, getGeneralWQPData, and whatWQPsites) which allow the user to use any argument available on the Web service platform.
 
 
-###dataRetrieval 1.3.2
+###dataRetrievaldemo 1.3.2
 
 * Deprecated getQWData, updated readWQPdata to take either parameter code or characteristic name.
 * Changed the name of raw data retrievals to: readNWISqw, getNWISunitData, getNWISdvData, and getWQPqwData (from: readNWISqw, retrieveUnitNWISData, retrieveNWISData, getRawQWData)
@@ -110,7 +110,7 @@ Changed naming convention:
 
 Load data from web services:
 
-	library(dataRetrieval)
+	library(dataRetrievaldemo)
 	Daily <- getNWISDaily("06934500","00060","1979-10-01","2010-09-30")
 	Sample <-getNWISSample("06934500","00631","1970-10-01","2011-09-30")
 	INFO <-getNWISInfo("06934500","00631", interactive=FALSE)
diff --git a/inst/doc/dataRetrieval.R b/inst/doc/dataRetrieval.R
index 434bdcda9c963177d9c05fcf035a4354c03f6273..015b15f6b6b1db7ca1c5c9429a7e409147640607 100644
--- a/inst/doc/dataRetrieval.R
+++ b/inst/doc/dataRetrieval.R
@@ -20,7 +20,7 @@ bold.colHeaders <- function(x) {
 addSpace <- function(x) ifelse(x != "1", "[5pt]","")
 
 ## ----workflow, echo=TRUE,eval=FALSE-----------------------
-#  library(dataRetrieval)
+#  library(dataRetrievaldemo)
 #  # Choptank River near Greensboro, MD
 #  siteNumber <- "01491000"
 #  ChoptankInfo <- readNWISsite(siteNumber)
@@ -55,7 +55,7 @@ print(xtable(data.df,
 
 
 ## ----tableParameterCodesDataRetrieval---------------------
-library(dataRetrieval)
+library(dataRetrievaldemo)
 parameterCdFile <-  parameterCdFile
 names(parameterCdFile)
 
@@ -256,16 +256,16 @@ nrow(dischargeWI)
 readNWISpCode
 
 ## ----seeVignette,eval = FALSE-----------------------------
-#  vignette(dataRetrieval)
+#  vignette(dataRetrievaldemo)
 
 ## ----installFromCran,eval = FALSE-------------------------
-#  install.packages("dataRetrieval",
+#  install.packages("dataRetrievaldemo",
 #  repos=c("http://usgs-r.github.com","http://cran.us.r-project.org"),
 #  dependencies=TRUE,
 #  type="both")
 
 ## ----openLibraryTest, eval=FALSE--------------------------
-#  library(dataRetrieval)
+#  library(dataRetrievaldemo)
 
 ## ----label=getSiteApp, echo=TRUE--------------------------
 availableData <- whatNWISData(siteNumber, "dv")
diff --git a/inst/doc/dataRetrieval.Rnw b/inst/doc/dataRetrieval.Rnw
index a2468f4f8ef13877b20d80db70bc045e0dc7ad88..de316036f0da99203894ba3c3518f2217975b92c 100644
--- a/inst/doc/dataRetrieval.Rnw
+++ b/inst/doc/dataRetrieval.Rnw
@@ -1,9 +1,9 @@
-%\VignetteIndexEntry{Introduction to the dataRetrieval package}
+%\VignetteIndexEntry{Introduction to the dataRetrievaldemo package}
 %\VignetteEngine{knitr::knitr}
 %\VignetteDepends{}
 %\VignetteSuggests{xtable,EGRET}
 %\VignetteImports{zoo, XML, RCurl, reshape2,lubridate}
-%\VignettePackage{dataRetrieval}
+%\VignettePackage{dataRetrievaldemo}
 
 \documentclass[a4paper,11pt]{article}
 
@@ -140,7 +140,7 @@ library(knitr)
 
 
 %------------------------------------------------------------
-\title{The dataRetrieval R package}
+\title{The dataRetrievaldemo R package}
 %------------------------------------------------------------
 \author[1]{Laura De Cicco}
 \author[1]{Robert Hirsch}
@@ -162,7 +162,7 @@ bold.colHeaders <- function(x) {
 addSpace <- function(x) ifelse(x != "1", "[5pt]","")
 @
 
-\noindent{\huge\textsf{\textbf{The dataRetrieval R package}}}
+\noindent{\huge\textsf{\textbf{The dataRetrievaldemo R package}}}
 
 \noindent\textsf{By Laura De Cicco and Robert Hirsch}
 
@@ -179,19 +179,19 @@ addSpace <- function(x) ifelse(x != "1", "[5pt]","")
 \newpage
 
 %------------------------------------------------------------
-\section{Introduction to dataRetrieval}
+\section{Introduction to dataRetrievaldemo}
 %------------------------------------------------------------ 
-The dataRetrieval package was created to simplify the process of loading hydrologic data into the R environment. It has been specifically designed to work seamlessly with the EGRET R package: Exploration and Graphics for RivEr Trends. See: \url{https://github.com/USGS-R/EGRET/wiki} or \url{http://dx.doi.org/10.3133/tm4A10} for information on EGRET. EGRET is designed to provide analysis of water quality data sets using the Weighted Regressions on Time, Discharge and Season (WRTDS) method as well as analysis of discharge trends using robust time-series smoothing techniques.  Both of these capabilities provide both tabular and graphical analyses of long-term data sets.
+The dataRetrievaldemo package was created to simplify the process of loading hydrologic data into the R environment. It has been specifically designed to work seamlessly with the EGRET R package: Exploration and Graphics for RivEr Trends. See: \url{https://github.com/USGS-R/EGRET/wiki} or \url{http://dx.doi.org/10.3133/tm4A10} for information on EGRET. EGRET is designed to provide analysis of water quality data sets using the Weighted Regressions on Time, Discharge and Season (WRTDS) method as well as analysis of discharge trends using robust time-series smoothing techniques.  Both of these capabilities provide both tabular and graphical analyses of long-term data sets.
 
 
-The dataRetrieval package is designed to retrieve many of the major data types of U.S. Geological Survey (USGS) hydrologic data that are available on the Web. Users may also load data from other sources (text files, spreadsheets) using dataRetrieval.  Section \ref{sec:genRetrievals} provides examples of how one can obtain raw data from USGS sources on the Web and load them into dataframes within the R environment.  The functionality described in section \ref{sec:genRetrievals} is for general use and is not tailored for the specific uses of the EGRET package.  The functionality described in section \ref{sec:EGRETdfs} is tailored specifically to obtaining input from the Web and structuring it for use in the EGRET package.  The functionality described in section \ref{sec:userFiles} is for converting hydrologic data from user-supplied files and structuring it specifically for use in the EGRET package.
+The dataRetrievaldemo package is designed to retrieve many of the major data types of U.S. Geological Survey (USGS) hydrologic data that are available on the Web. Users may also load data from other sources (text files, spreadsheets) using dataRetrievaldemo.  Section \ref{sec:genRetrievals} provides examples of how one can obtain raw data from USGS sources on the Web and load them into dataframes within the R environment.  The functionality described in section \ref{sec:genRetrievals} is for general use and is not tailored for the specific uses of the EGRET package.  The functionality described in section \ref{sec:EGRETdfs} is tailored specifically to obtaining input from the Web and structuring it for use in the EGRET package.  The functionality described in section \ref{sec:userFiles} is for converting hydrologic data from user-supplied files and structuring it specifically for use in the EGRET package.
 
 For information on getting started in R and installing the package, see (\ref{sec:appendix1}): Getting Started. Any use of trade, firm, or product names is for descriptive purposes only and does not imply endorsement by the U.S. Government.
 
-A quick workflow for major dataRetrieval functions:
+A quick workflow for major dataRetrievaldemo functions:
 
 <<workflow, echo=TRUE,eval=FALSE>>=
-library(dataRetrieval)
+library(dataRetrievaldemo)
 # Choptank River near Greensboro, MD
 siteNumber <- "01491000" 
 ChoptankInfo <- readNWISsite(siteNumber)
@@ -218,7 +218,7 @@ In this section, five examples of Web retrievals document how to get raw data. T
 % %------------------------------------------------------------
 % \subsection{Introduction}
 % %------------------------------------------------------------
-The USGS organizes hydrologic data in a standard structure.  Streamgages are located throughout the United States, and each streamgage has a unique ID (referred in this document and throughout the dataRetrieval package as \enquote{siteNumber}).  Often (but not always), these ID's are 8 digits.  The first step to finding data is discovering this siteNumber. There are many ways to do this, one is the National Water Information System: Mapper \url{http://maps.waterdata.usgs.gov/mapper/index.html}.
+The USGS organizes hydrologic data in a standard structure.  Streamgages are located throughout the United States, and each streamgage has a unique ID (referred in this document and throughout the dataRetrievaldemo package as \enquote{siteNumber}).  Often (but not always), these ID's are 8 digits.  The first step to finding data is discovering this siteNumber. There are many ways to do this, one is the National Water Information System: Mapper \url{http://maps.waterdata.usgs.gov/mapper/index.html}.
 
 Once the siteNumber is known, the next required input for USGS data retrievals is the \enquote{parameter code}.  This is a 5-digit code that specifies the measured parameter being requested.  For example, parameter code 00631 represents \enquote{Nitrate plus nitrite, water, filtered, milligrams per liter as nitrogen}, with units of \enquote{mg/l as N}. A complete list of possible USGS parameter codes can be found at \url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes?help}.
 
@@ -247,7 +247,7 @@ print(xtable(data.df,
 A complete list (as of September 25, 2013) is available as data attached to the package. It is accessed by the following:
 
 <<tableParameterCodesDataRetrieval>>=
-library(dataRetrieval)
+library(dataRetrievaldemo)
 parameterCdFile <-  parameterCdFile
 names(parameterCdFile)
 @
@@ -566,7 +566,7 @@ to discover many options for searching for NWIS sites. For example, you may want
 
 \url{http://waterservices.usgs.gov/nwis/site/?format=rdb&bBox=-83.0,36.5,-81.0,38.5&parameterCd=00010,00060&hasDataTypeCd=dv}
 
-The following dataRetrieval code can be used to get those sites:
+The following dataRetrievaldemo code can be used to get those sites:
 
 <<siteSearch>>=
 sites <- whatNWISsites(bBox="-83.0,36.5,-81.0,38.5", 
@@ -659,7 +659,7 @@ dataPH <- readWQPdata(statecode="US:55",
 \section{Getting Started in R}
 \label{sec:appendix1}
 %------------------------------------------------------------ 
-This section describes the options for downloading and installing the dataRetrieval package.
+This section describes the options for downloading and installing the dataRetrievaldemo package.
 
 %------------------------------------------------------------
 \subsection{New to R?}
@@ -691,18 +691,18 @@ readNWISpCode
 
 Additionally, many R packages have vignette files attached (such as this paper). To view the vignette:
 <<seeVignette,eval = FALSE>>=
-vignette(dataRetrieval)
+vignette(dataRetrievaldemo)
 @
 
 \FloatBarrier
 \clearpage
 %------------------------------------------------------------
-\subsection{R User: Installing dataRetrieval}
+\subsection{R User: Installing dataRetrievaldemo}
 %------------------------------------------------------------ 
-The following command installs dataRetrieval and subsequent required packages:
+The following command installs dataRetrievaldemo and subsequent required packages:
 
 <<installFromCran,eval = FALSE>>=
-install.packages("dataRetrieval", 
+install.packages("dataRetrievaldemo", 
 repos=c("http://usgs-r.github.com","http://cran.us.r-project.org"),
 dependencies=TRUE,
 type="both")
@@ -710,7 +710,7 @@ type="both")
 
 After installing the package, you need to open the library each time you re-start R.  This is done with the simple command:
 <<openLibraryTest, eval=FALSE>>=
-library(dataRetrieval)
+library(dataRetrievaldemo)
 @
 
 
diff --git a/inst/doc/dataRetrieval.pdf b/inst/doc/dataRetrieval.pdf
index 26f45594a1655c3b20cd71cd78f2b308cdc6e283..7c224c975fa30ab1c0faae9e60cd37659bca922f 100644
Binary files a/inst/doc/dataRetrieval.pdf and b/inst/doc/dataRetrieval.pdf differ
diff --git a/inst/tests/run-all.R b/inst/tests/run-all.R
index 098e186f34c8adbda37f05a62c3b66883ac0bc36..bc5bac7043baaac5b68edcc5fe9409ac35a34ae6 100644
--- a/inst/tests/run-all.R
+++ b/inst/tests/run-all.R
@@ -1,4 +1,4 @@
 library(testthat)
-library(dataRetrieval)
+library(dataRetrievaldemo)
 
-test_package("dataRetrieval")
\ No newline at end of file
+test_package("dataRetrievaldemo")
\ No newline at end of file
diff --git a/man/dataRetrieval-package.Rd b/man/dataRetrieval-package.Rd
index 58cb8dc86251d3d1585d48ed6447c768abd8920d..11322efa3eb16f4d49f28d30083d0f3dd0ab962d 100644
--- a/man/dataRetrieval-package.Rd
+++ b/man/dataRetrieval-package.Rd
@@ -1,11 +1,11 @@
 % Generated by roxygen2 (4.0.2): do not edit by hand
 \docType{package}
-\name{dataRetrieval-package}
-\alias{dataRetrieval-package}
+\name{dataRetrievaldemo-package}
+\alias{dataRetrievaldemo-package}
 \title{Retrieval functions for USGS data}
 \description{
 \tabular{ll}{
-Package: \tab dataRetrieval\cr
+Package: \tab dataRetrievaldemo\cr
 Type: \tab Package\cr
 Version: \tab 1.5.0\cr
 Date: \tab 2014-11-16\cr
diff --git a/vignettes/dataRetrieval.Rnw b/vignettes/dataRetrieval.Rnw
index a2468f4f8ef13877b20d80db70bc045e0dc7ad88..de316036f0da99203894ba3c3518f2217975b92c 100644
--- a/vignettes/dataRetrieval.Rnw
+++ b/vignettes/dataRetrieval.Rnw
@@ -1,9 +1,9 @@
-%\VignetteIndexEntry{Introduction to the dataRetrieval package}
+%\VignetteIndexEntry{Introduction to the dataRetrievaldemo package}
 %\VignetteEngine{knitr::knitr}
 %\VignetteDepends{}
 %\VignetteSuggests{xtable,EGRET}
 %\VignetteImports{zoo, XML, RCurl, reshape2,lubridate}
-%\VignettePackage{dataRetrieval}
+%\VignettePackage{dataRetrievaldemo}
 
 \documentclass[a4paper,11pt]{article}
 
@@ -140,7 +140,7 @@ library(knitr)
 
 
 %------------------------------------------------------------
-\title{The dataRetrieval R package}
+\title{The dataRetrievaldemo R package}
 %------------------------------------------------------------
 \author[1]{Laura De Cicco}
 \author[1]{Robert Hirsch}
@@ -162,7 +162,7 @@ bold.colHeaders <- function(x) {
 addSpace <- function(x) ifelse(x != "1", "[5pt]","")
 @
 
-\noindent{\huge\textsf{\textbf{The dataRetrieval R package}}}
+\noindent{\huge\textsf{\textbf{The dataRetrievaldemo R package}}}
 
 \noindent\textsf{By Laura De Cicco and Robert Hirsch}
 
@@ -179,19 +179,19 @@ addSpace <- function(x) ifelse(x != "1", "[5pt]","")
 \newpage
 
 %------------------------------------------------------------
-\section{Introduction to dataRetrieval}
+\section{Introduction to dataRetrievaldemo}
 %------------------------------------------------------------ 
-The dataRetrieval package was created to simplify the process of loading hydrologic data into the R environment. It has been specifically designed to work seamlessly with the EGRET R package: Exploration and Graphics for RivEr Trends. See: \url{https://github.com/USGS-R/EGRET/wiki} or \url{http://dx.doi.org/10.3133/tm4A10} for information on EGRET. EGRET is designed to provide analysis of water quality data sets using the Weighted Regressions on Time, Discharge and Season (WRTDS) method as well as analysis of discharge trends using robust time-series smoothing techniques.  Both of these capabilities provide both tabular and graphical analyses of long-term data sets.
+The dataRetrievaldemo package was created to simplify the process of loading hydrologic data into the R environment. It has been specifically designed to work seamlessly with the EGRET R package: Exploration and Graphics for RivEr Trends. See: \url{https://github.com/USGS-R/EGRET/wiki} or \url{http://dx.doi.org/10.3133/tm4A10} for information on EGRET. EGRET is designed to provide analysis of water quality data sets using the Weighted Regressions on Time, Discharge and Season (WRTDS) method as well as analysis of discharge trends using robust time-series smoothing techniques.  Both of these capabilities provide both tabular and graphical analyses of long-term data sets.
 
 
-The dataRetrieval package is designed to retrieve many of the major data types of U.S. Geological Survey (USGS) hydrologic data that are available on the Web. Users may also load data from other sources (text files, spreadsheets) using dataRetrieval.  Section \ref{sec:genRetrievals} provides examples of how one can obtain raw data from USGS sources on the Web and load them into dataframes within the R environment.  The functionality described in section \ref{sec:genRetrievals} is for general use and is not tailored for the specific uses of the EGRET package.  The functionality described in section \ref{sec:EGRETdfs} is tailored specifically to obtaining input from the Web and structuring it for use in the EGRET package.  The functionality described in section \ref{sec:userFiles} is for converting hydrologic data from user-supplied files and structuring it specifically for use in the EGRET package.
+The dataRetrievaldemo package is designed to retrieve many of the major data types of U.S. Geological Survey (USGS) hydrologic data that are available on the Web. Users may also load data from other sources (text files, spreadsheets) using dataRetrievaldemo.  Section \ref{sec:genRetrievals} provides examples of how one can obtain raw data from USGS sources on the Web and load them into dataframes within the R environment.  The functionality described in section \ref{sec:genRetrievals} is for general use and is not tailored for the specific uses of the EGRET package.  The functionality described in section \ref{sec:EGRETdfs} is tailored specifically to obtaining input from the Web and structuring it for use in the EGRET package.  The functionality described in section \ref{sec:userFiles} is for converting hydrologic data from user-supplied files and structuring it specifically for use in the EGRET package.
 
 For information on getting started in R and installing the package, see (\ref{sec:appendix1}): Getting Started. Any use of trade, firm, or product names is for descriptive purposes only and does not imply endorsement by the U.S. Government.
 
-A quick workflow for major dataRetrieval functions:
+A quick workflow for major dataRetrievaldemo functions:
 
 <<workflow, echo=TRUE,eval=FALSE>>=
-library(dataRetrieval)
+library(dataRetrievaldemo)
 # Choptank River near Greensboro, MD
 siteNumber <- "01491000" 
 ChoptankInfo <- readNWISsite(siteNumber)
@@ -218,7 +218,7 @@ In this section, five examples of Web retrievals document how to get raw data. T
 % %------------------------------------------------------------
 % \subsection{Introduction}
 % %------------------------------------------------------------
-The USGS organizes hydrologic data in a standard structure.  Streamgages are located throughout the United States, and each streamgage has a unique ID (referred in this document and throughout the dataRetrieval package as \enquote{siteNumber}).  Often (but not always), these ID's are 8 digits.  The first step to finding data is discovering this siteNumber. There are many ways to do this, one is the National Water Information System: Mapper \url{http://maps.waterdata.usgs.gov/mapper/index.html}.
+The USGS organizes hydrologic data in a standard structure.  Streamgages are located throughout the United States, and each streamgage has a unique ID (referred in this document and throughout the dataRetrievaldemo package as \enquote{siteNumber}).  Often (but not always), these ID's are 8 digits.  The first step to finding data is discovering this siteNumber. There are many ways to do this, one is the National Water Information System: Mapper \url{http://maps.waterdata.usgs.gov/mapper/index.html}.
 
 Once the siteNumber is known, the next required input for USGS data retrievals is the \enquote{parameter code}.  This is a 5-digit code that specifies the measured parameter being requested.  For example, parameter code 00631 represents \enquote{Nitrate plus nitrite, water, filtered, milligrams per liter as nitrogen}, with units of \enquote{mg/l as N}. A complete list of possible USGS parameter codes can be found at \url{http://nwis.waterdata.usgs.gov/usa/nwis/pmcodes?help}.
 
@@ -247,7 +247,7 @@ print(xtable(data.df,
 A complete list (as of September 25, 2013) is available as data attached to the package. It is accessed by the following:
 
 <<tableParameterCodesDataRetrieval>>=
-library(dataRetrieval)
+library(dataRetrievaldemo)
 parameterCdFile <-  parameterCdFile
 names(parameterCdFile)
 @
@@ -566,7 +566,7 @@ to discover many options for searching for NWIS sites. For example, you may want
 
 \url{http://waterservices.usgs.gov/nwis/site/?format=rdb&bBox=-83.0,36.5,-81.0,38.5&parameterCd=00010,00060&hasDataTypeCd=dv}
 
-The following dataRetrieval code can be used to get those sites:
+The following dataRetrievaldemo code can be used to get those sites:
 
 <<siteSearch>>=
 sites <- whatNWISsites(bBox="-83.0,36.5,-81.0,38.5", 
@@ -659,7 +659,7 @@ dataPH <- readWQPdata(statecode="US:55",
 \section{Getting Started in R}
 \label{sec:appendix1}
 %------------------------------------------------------------ 
-This section describes the options for downloading and installing the dataRetrieval package.
+This section describes the options for downloading and installing the dataRetrievaldemo package.
 
 %------------------------------------------------------------
 \subsection{New to R?}
@@ -691,18 +691,18 @@ readNWISpCode
 
 Additionally, many R packages have vignette files attached (such as this paper). To view the vignette:
 <<seeVignette,eval = FALSE>>=
-vignette(dataRetrieval)
+vignette(dataRetrievaldemo)
 @
 
 \FloatBarrier
 \clearpage
 %------------------------------------------------------------
-\subsection{R User: Installing dataRetrieval}
+\subsection{R User: Installing dataRetrievaldemo}
 %------------------------------------------------------------ 
-The following command installs dataRetrieval and subsequent required packages:
+The following command installs dataRetrievaldemo and subsequent required packages:
 
 <<installFromCran,eval = FALSE>>=
-install.packages("dataRetrieval", 
+install.packages("dataRetrievaldemo", 
 repos=c("http://usgs-r.github.com","http://cran.us.r-project.org"),
 dependencies=TRUE,
 type="both")
@@ -710,7 +710,7 @@ type="both")
 
 After installing the package, you need to open the library each time you re-start R.  This is done with the simple command:
 <<openLibraryTest, eval=FALSE>>=
-library(dataRetrieval)
+library(dataRetrievaldemo)
 @