diff --git a/R/compressData.r b/R/compressData.r
index 57cf0bf54095f5f65e8d10240187c60cbbaed52c..8f944da52846869945b42f8565cead6fa3089854 100644
--- a/R/compressData.r
+++ b/R/compressData.r
@@ -19,7 +19,9 @@
 #' value2 <- c(2,3,4)
 #' comment3 <- c("","","<")
 #' value3 <- c(3,4,5)
-#' dataInput <- data.frame(dateTime, comment1, value1, comment2, value2, comment3, value3, stringsAsFactors=FALSE)
+#' dataInput <- data.frame(dateTime, comment1, value1, 
+#'       comment2, value2, 
+#'       comment3, value3, stringsAsFactors=FALSE)
 #' compressData(dataInput)
 compressData <- function(data, interactive=TRUE){  
   
diff --git a/R/constructNWISURL.r b/R/constructNWISURL.r
index 88083fdd77f0a8cf7fa013a72b60cf8cd225eb49..d638f8d5fbc3965c18a0373c99711abd4d362c60 100644
--- a/R/constructNWISURL.r
+++ b/R/constructNWISURL.r
@@ -23,12 +23,16 @@
 #' startDate <- '1985-01-01'
 #' endDate <- ''
 #' pCode <- c("00060","00010")
-#' url_daily <- constructNWISURL(siteNumber,pCode,startDate,endDate,'dv',statCd=c("00003","00001"))
+#' url_daily <- constructNWISURL(siteNumber,pCode,
+#'            startDate,endDate,'dv',statCd=c("00003","00001"))
 #' url_unit <- constructNWISURL(siteNumber,pCode,"2012-06-28","2012-06-30",'iv')
 #' url_qw_single <- constructNWISURL(siteNumber,"01075",startDate,endDate,'qw')
-#' url_qw <- constructNWISURL(siteNumber,c('01075','00029','00453'),startDate,endDate,'qw')
-#' url_wqp <- constructNWISURL(paste("USGS",siteNumber,sep="-"),c('01075','00029','00453'),startDate,endDate,'wqp')
-#' url_daily_tsv <- constructNWISURL(siteNumber,pCode,startDate,endDate,'dv',statCd=c("00003","00001"),format="tsv")
+#' url_qw <- constructNWISURL(siteNumber,c('01075','00029','00453'),
+#'            startDate,endDate,'qw')
+#' url_wqp <- constructNWISURL(paste("USGS",siteNumber,sep="-"),c('01075','00029','00453'),
+#'            startDate,endDate,'wqp')
+#' url_daily_tsv <- constructNWISURL(siteNumber,pCode,startDate,endDate,'dv',
+#'            statCd=c("00003","00001"),format="tsv")
 constructNWISURL <- function(siteNumber,parameterCd,startDate,endDate,service,statCd="00003", format="xml",expanded=FALSE,interactive=TRUE){
 
   startDate <- formatCheckDate(startDate, "StartDate", interactive=interactive)
diff --git a/R/formatCheckSiteNumber.r b/R/formatCheckSiteNumber.r
index c277b045a08c5d9f6b93aa2bdd97017d2dd07ed3..d673a4be2befa8d366bd907db5ab1213c93696bc 100644
--- a/R/formatCheckSiteNumber.r
+++ b/R/formatCheckSiteNumber.r
@@ -9,8 +9,6 @@
 #' @examples
 #' site<- '01234567'
 #' formatCheckSiteNumber(site)
-#' site_incorrect <- '1234567'
-#' formatCheckSiteNumber(site_incorrect)
 formatCheckSiteNumber <- function(siteNumber, interactive=TRUE){  #checks for a 8 digit number
   if (nchar(siteNumber) < 8){
     if (interactive){
diff --git a/R/getDataAvailability.r b/R/getDataAvailability.r
index 5bf6fa99429b005a611e400a4d42ea156d5ed9a4..a54f9651c4e1ac7c300352d405ab29e2985fdcaf 100644
--- a/R/getDataAvailability.r
+++ b/R/getDataAvailability.r
@@ -14,32 +14,51 @@
 #' uvData <- availableData <- getDataAvailability('05114000',type="uv")
 getDataAvailability <- function(siteNumber,type=c("uv","dv","qw")){
   
-  urlSitefile <- paste("http://waterservices.usgs.gov/nwis/site?format=rdb&seriesCatalogOutput=true&sites=",siteNumber,sep = "")
+  urlSitefile <- paste("http://waterservices.usgs.gov/nwis/site/?format=rdb&seriesCatalogOutput=true&sites=",siteNumber,sep = "")
+ 
+  doc = tryCatch({
+    h <- basicHeaderGatherer()
+    doc <- getURL(urlSitefile, headerfunction = h$update)
+    
+  }, warning = function(w) {
+    message(paste("URL caused a warning:", urlSitefile))
+    message(w)
+  }, error = function(e) {
+    message(paste("URL does not seem to exist:", urlSitefile))
+    message(e)
+    return(NA)
+  }) 
   
-  SiteFile <- read.delim(
-    urlSitefile,
-    header = TRUE,
-    quote="\"",
-    dec=".",
-    sep='\t',
-    colClasses=c('character'),
-    fill = TRUE,
-    comment.char="#")
-  
-  SiteFile <- SiteFile[-1,]
-  
-  SiteFile <- with(SiteFile, data.frame(parameter_cd=parm_cd, statCd=stat_cd, startDate=begin_date,endDate=end_date, count=count_nu,service=data_type_cd,stringsAsFactors = FALSE))
-  
-  SiteFile <- SiteFile[!is.na(SiteFile$parameter_cd),]
-  SiteFile <- SiteFile["" != SiteFile$parameter_cd,]
-  SiteFile$startDate <- as.Date(SiteFile$startDate)
-  SiteFile$endDate <- as.Date(SiteFile$endDate)
-  SiteFile$count <- as.numeric(SiteFile$count)
-  
-  pCodes <- unique(SiteFile$parameter_cd)
-  
-  pcodeINFO <- parameterCdFile[parameterCdFile$parameter_cd %in% pCodes,]
-  SiteFile <- merge(SiteFile,pcodeINFO,by="parameter_cd")
-  SiteFile <- SiteFile[SiteFile$service %in% type,]
-  return(SiteFile)
+  if(h$value()["Content-Type"] == "text/plain;charset=UTF-8"){
+    SiteFile <- read.delim(
+      textConnection(doc),
+      header = TRUE,
+      quote="\"",
+      dec=".",
+      sep='\t',
+      colClasses=c('character'),
+      fill = TRUE,
+      comment.char="#")
+    
+    SiteFile <- SiteFile[-1,]
+    
+    SiteFile <- with(SiteFile, data.frame(parameter_cd=parm_cd, statCd=stat_cd, startDate=begin_date,endDate=end_date, count=count_nu,service=data_type_cd,stringsAsFactors = FALSE))
+    
+    SiteFile <- SiteFile[!is.na(SiteFile$parameter_cd),]
+    SiteFile <- SiteFile["" != SiteFile$parameter_cd,]
+    SiteFile$startDate <- as.Date(SiteFile$startDate)
+    SiteFile$endDate <- as.Date(SiteFile$endDate)
+    SiteFile$count <- as.numeric(SiteFile$count)
+    
+    pCodes <- unique(SiteFile$parameter_cd)
+    
+    pcodeINFO <- parameterCdFile[parameterCdFile$parameter_cd %in% pCodes,]
+    SiteFile <- merge(SiteFile,pcodeINFO,by="parameter_cd")
+    SiteFile <- SiteFile[SiteFile$service %in% type,]
+    return(SiteFile)
+  } else {
+    message(paste("URL caused an error:", urlSitefile))
+    message("Content-Type=",h$value()["Content-Type"])
+    return(NA)
+  }
 }
\ No newline at end of file
diff --git a/R/getDataFromFile.r b/R/getDataFromFile.r
index ac85f8829278ec9584efbcd43d52fd0a84b2b751..34da1fee126b6303e9339e2a43dd9fc4453fce77 100644
--- a/R/getDataFromFile.r
+++ b/R/getDataFromFile.r
@@ -17,7 +17,7 @@
 #' filePath <- system.file("extdata", package="dataRetrieval")
 #' filePath <- paste(filePath,"/",sep="")
 #' fileName <- 'ChoptankRiverFlow.txt'
-#' getDataFromFile(filePath,fileName, separator="\t")
+#' ChopData <- getDataFromFile(filePath,fileName, separator="\t")
 getDataFromFile <- function (filePath,fileName,hasHeader=TRUE,separator=","){
   totalPath <- paste(filePath,fileName,sep="");  
   tmp <- read.delim(  
diff --git a/R/getGeneralWQPData.R b/R/getGeneralWQPData.R
index f163f623a7bdd04b8d148b918c04013b14f662de..3e7eb95f47316200c8cf590a0a79a4f1ed01bcf0 100644
--- a/R/getGeneralWQPData.R
+++ b/R/getGeneralWQPData.R
@@ -31,6 +31,44 @@ getGeneralWQPData <- function(...){
                    urlCall,
                    "&mimeType=tsv",sep = "")
   
-  suppressWarnings(retval <- read.delim(urlCall, header = TRUE, quote="\"", dec=".", sep='\t', colClasses=c('character'), fill = TRUE))
-  return(retval)
+  doc = tryCatch({
+    h <- basicHeaderGatherer()
+    doc <- getURL(urlCall, headerfunction = h$update)
+    
+  }, warning = function(w) {
+    message(paste("URL caused a warning:", urlCall))
+    message(w)
+  }, error = function(e) {
+    message(paste("URL does not seem to exist:", urlCall))
+    message(e)
+    return(NA)
+  })
+  
+  if(h$value()["Content-Type"] == "text/tab-separated-values;charset=UTF-8"){
+    
+    numToBeReturned <- as.numeric(h$value()["Total-Result-Count"])
+    
+    if (!is.na(numToBeReturned) | numToBeReturned != 0){
+      retval <- read.delim(textConnection(doc), header = TRUE, quote="\"", 
+                           dec=".", sep='\t', 
+                           colClasses=c('character'), 
+                           fill = TRUE)
+      
+      actualNumReturned <- nrow(retval)
+      
+      if(actualNumReturned != numToBeReturned) warning(numToBeReturned, " sample results were expected, ", actualNumReturned, " were returned")
+      
+      return(retval)
+    } else {
+      warning(paste("No data to retrieve from",urlCall))
+      return(NA)
+    }
+    
+  } else {
+    message(paste("URL caused an error:", urlCall))
+    message("Content-Type=",h$value()["Content-Type"])
+    return(NA)
+  }
+
+  
 }
\ No newline at end of file
diff --git a/R/getRDB1Data.r b/R/getRDB1Data.r
index 2fe495e7326107c44d91c48f5460f0bf16e4b197..de62aaf106a4a0793ff4e8ea08621ade487018f0 100644
--- a/R/getRDB1Data.r
+++ b/R/getRDB1Data.r
@@ -8,42 +8,81 @@
 #' @return data a data frame containing columns agency, site, dateTime, values, and remark codes for all requested combinations
 #' @export
 #' @examples
-#' sites <- "02177000"
+#' siteNumber <- "02177000"
 #' startDate <- "2012-09-01"
 #' endDate <- "2012-10-01"
 #' offering <- '00003'
 #' property <- '00060'
-#' obs_url <- constructNWISURL(sites,property,startDate,endDate,'dv',format='tsv')
+#' obs_url <- constructNWISURL(siteNumber,property,
+#'          startDate,endDate,'dv',format='tsv')
 #' data <- getRDB1Data(obs_url)
-#' urlMulti <- constructNWISURL("04085427",c("00060","00010"),startDate,endDate,'dv',statCd=c("00003","00001"),'tsv')
+#' urlMulti <- constructNWISURL("04085427",c("00060","00010"),
+#'          startDate,endDate,'dv',statCd=c("00003","00001"),'tsv')
 #' multiData <- getRDB1Data(urlMulti)
+#' unitDataURL <- constructNWISURL(siteNumber,property,
+#'          as.character(Sys.Date()),as.character(Sys.Date()),'uv',format='tsv')
+#' unitData <- getRDB1Data(unitDataURL, asDateT=TRUE)
 getRDB1Data <- function(obs_url,asDateTime=FALSE){
-  tmp <- read.delim(  
-    obs_url, 
-    header = TRUE, 
-    quote="\"", 
-    dec=".", 
-    sep='\t',
-    colClasses=c('character'),
-    fill = TRUE, 
-    comment.char="#")
   
-  dataType <- tmp[1,]
-  data <- tmp[-1,]
+  retval = tryCatch({
+    h <- basicHeaderGatherer()
+    doc <- getURL(obs_url, headerfunction = h$update)
+    
+  }, warning = function(w) {
+    message(paste("URL caused a warning:", obs_url))
+    message(w)
+  }, error = function(e) {
+    message(paste("URL does not seem to exist:", obs_url))
+    message(e)
+    return(NA)
+  })   
   
-  if(sum(regexpr('d$', dataType) > 0) > 0){
-    if (asDateTime){
-      data[,regexpr('d$', dataType) > 0] <- as.POSIXct(strptime(data[,regexpr('d$', dataType) > 0], "%Y-%m-%d %H:%M"))
-    } else {
-      data[,regexpr('d$', dataType) > 0] <- as.Date(data[,regexpr('d$', dataType) > 0])
+  if(as.character(h$value()["Content-Type"]) == "text/plain;charset=UTF-8"){
+    
+    tmp <- read.delim(  
+      textConnection(doc), 
+      header = TRUE, 
+      quote="\"", 
+      dec=".", 
+      sep='\t',
+      colClasses=c('character'),
+      fill = TRUE, 
+      comment.char="#")
+    
+    dataType <- tmp[1,]
+    data <- tmp[-1,]
+    
+    if(sum(regexpr('d$', dataType) > 0) > 0){
+      if (asDateTime){
+        
+        timeZoneLibrary <- setNames(c("America/New_York","America/New_York","America/Chicago","America/Chicago",
+                                      "America/Denver","America/Denver","America/Los_Angeles","America/Los_Angeles",
+                                      "America/Anchorage","America/Anchorage","America/Honolulu","America/Honolulu"),
+                                    c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST"))
+        timeZone <- as.character(timeZoneLibrary[data$tz_cd])
+        if(length(unique(timeZone)) == 1){
+          data[,regexpr('d$', dataType) > 0] <- as.POSIXct(data[,regexpr('d$', dataType) > 0], "%Y-%m-%d %H:%M", tz = unique(timeZone))
+        } else {
+          warning("Mixed time zone information")
+          for(i in seq_along(row.names(data))){
+            data[i,regexpr('d$', dataType) > 0] <- as.POSIXct(data[i,regexpr('d$', dataType) > 0], "%Y-%m-%d %H:%M", tz = timeZone[i])
+          }
+        }
+        
+      } else {
+        data[,regexpr('d$', dataType) > 0] <- as.Date(data[,regexpr('d$', dataType) > 0])
+      }
     }
+    
+    if (sum(regexpr('n$', dataType) > 0) > 0){
+      tempDF <- data[,which(regexpr('n$', dataType) > 0)]
+      tempDF <- suppressWarnings(sapply(tempDF, function(x) as.numeric(x)))  
+      data[,which(regexpr('n$', dataType) > 0)] <- tempDF
+    }
+    row.names(data) <- NULL
+    return(data)
+  } else {
+    message(paste("URL caused an error:", obs_url))
+    message("Content-Type=",h$value()["Content-Type"])
   }
-  
-  if (sum(regexpr('n$', dataType) > 0) > 0){
-    tempDF <- data[,which(regexpr('n$', dataType) > 0)]
-    tempDF <- suppressWarnings(sapply(tempDF, function(x) as.numeric(x)))  
-    data[,which(regexpr('n$', dataType) > 0)] <- tempDF
-  }
-  row.names(data) <- NULL
-  return(data)
 }
diff --git a/R/getRawQWData.r b/R/getRawQWData.r
index b4be17a3d28a82a44bf25f8276224eb9303dc6ac..3b748d0bf2a6cb0b489cc10ed9477c047ba4820d 100644
--- a/R/getRawQWData.r
+++ b/R/getRawQWData.r
@@ -5,7 +5,7 @@
 #' A list of statistic codes can be found here: \url{http://nwis.waterdata.usgs.gov/nwis/help/?read_file=stat&format=table}
 #'
 #' @param siteNumber string USGS site number.  This is usually an 8 digit number
-#' @param parameterCd vector of USGS 5-digit parameter code. Leaving this blank will return all of the measured values during the specified time period.
+#' @param parameterCd vector of USGS 5-digit parameter code or string of characteristicNames. Leaving this blank will return all of the measured values during the specified time period.
 #' @param startDate string starting date for data retrieval in the form YYYY-MM-DD.
 #' @param endDate string ending date for data retrieval in the form YYYY-MM-DD.
 #' @param interactive logical Option for interactive mode.  If true, there is user interaction for error handling and data checks.
@@ -24,17 +24,43 @@ retrieveWQPqwData <- function(siteNumber,parameterCd,startDate,endDate,interacti
 
   url <- constructNWISURL(siteNumber,parameterCd,startDate,endDate,"wqp",interactive=interactive)
 
-  h <- basicHeaderGatherer()
-  doc <- getURI(url, headerfunction = h$update)
-  numToBeReturned <- as.numeric(h$value()["Total-Result-Count"])
-  if (!is.na(numToBeReturned) | numToBeReturned != 0){  
-    suppressWarnings(retval <- read.delim(url, header = TRUE, quote="\"", dec=".", sep='\t', colClasses=c('character'), fill = TRUE))
-    actualNumReturned <- nrow(retval)
+  retval = tryCatch({
+    h <- basicHeaderGatherer()
+    doc <- getURL(url, headerfunction = h$update)
+
+  }, warning = function(w) {
+    message(paste("URL caused a warning:", url))
+    message(w)
+  }, error = function(e) {
+    message(paste("URL does not seem to exist:", url))
+    message(e)
+    return(NA)
+  })
+  
+  if(h$value()["Content-Type"] == "text/tab-separated-values;charset=UTF-8"){
+  
+    numToBeReturned <- as.numeric(h$value()["Total-Result-Count"])
     
-    if(actualNumReturned != numToBeReturned) warning(numToBeReturned, " sample results were expected, ", actualNumReturned, " were returned")
+    if (!is.na(numToBeReturned) | numToBeReturned != 0){
     
-    return(retval)
+      retval <- read.delim(textConnection(doc), header = TRUE, quote="\"", 
+                 dec=".", sep='\t', 
+                 colClasses=c('character'), 
+                 fill = TRUE)    
+      actualNumReturned <- nrow(retval)
+      
+      if(actualNumReturned != numToBeReturned) warning(numToBeReturned, " sample results were expected, ", actualNumReturned, " were returned")
+      
+      return(retval)
+  
+    } else {
+      warning("No data to retrieve")
+      return(NA)
+    }
   } else {
-    warning("No data to retrieve")
+    message(paste("URL caused an error:", url))
+    message("Content-Type=",h$value()["Content-Type"])
+    return(NA)
   }
+  
 }
diff --git a/R/getSiteFileData.r b/R/getSiteFileData.r
index 46cf018792e9aa2493f464724d20e00803f71679..228a49c701f56fcdadd9d442482d132b006b9418 100644
--- a/R/getSiteFileData.r
+++ b/R/getSiteFileData.r
@@ -15,25 +15,45 @@ getSiteFileData <- function(siteNumber="",interactive=TRUE){
   # Checking for 8 digit site ID:
   siteNumber <- formatCheckSiteNumber(siteNumber, interactive=interactive)
   
-  urlSitefile <- paste("http://waterservices.usgs.gov/nwis/site?format=rdb&siteOutput=Expanded&sites=",siteNumber,sep = "")
+  urlSitefile <- paste("http://waterservices.usgs.gov/nwis/site/?format=rdb&siteOutput=Expanded&sites=",siteNumber,sep = "")
   
-  SiteFile <- read.delim(
-    urlSitefile,
-    header = TRUE,
-    quote="\"",
-    dec=".",
-    sep='\t',
-    colClasses=c('character'),
-    fill = TRUE,
-    comment.char="#")
+  doc = tryCatch({
+    h <- basicHeaderGatherer()
+    doc <- getURL(urlSitefile, headerfunction = h$update)
+    
+  }, warning = function(w) {
+    message(paste("URL caused a warning:", urlSitefile))
+    message(w)
+  }, error = function(e) {
+    message(paste("URL does not seem to exist:", urlSitefile))
+    message(e)
+    return(NA)
+  }) 
   
-  INFO <- SiteFile[-1,]
-  names(INFO) <- gsub("_",".",names(INFO))
+  if(h$value()["Content-Type"] == "text/plain;charset=UTF-8"){
   
-  INFO$queryTime <- Sys.time()
-  INFO$dec.lat.va <- as.numeric(INFO$dec.lat.va)
-  INFO$dec.long.va <- as.numeric(INFO$dec.long.va)
-  INFO$alt.va <- as.numeric(INFO$alt.va)
-  
-  return(INFO)
+    SiteFile <- read.delim(
+      textConnection(doc),
+      header = TRUE,
+      quote="\"",
+      dec=".",
+      sep='\t',
+      colClasses=c('character'),
+      fill = TRUE,
+      comment.char="#")
+    
+    INFO <- SiteFile[-1,]
+    names(INFO) <- gsub("_",".",names(INFO))
+    
+    INFO$queryTime <- Sys.time()
+    INFO$dec.lat.va <- as.numeric(INFO$dec.lat.va)
+    INFO$dec.long.va <- as.numeric(INFO$dec.long.va)
+    INFO$alt.va <- as.numeric(INFO$alt.va)
+    
+    return(INFO)
+  } else {
+    message(paste("URL caused an error:", urlSitefile))
+    message("Content-Type=",h$value()["Content-Type"])
+    return(NA)
+  }
 }
diff --git a/R/getWQPSites.R b/R/getWQPSites.R
index 2cf54c6d36cdf403759ba12d655086152d7d4d09..c0ace34ad8c86456529b1f4b355ce6c7434ec22a 100644
--- a/R/getWQPSites.R
+++ b/R/getWQPSites.R
@@ -30,6 +30,43 @@ getWQPSites <- function(...){
                urlCall,
                "&mimeType=tsv",sep = "")
   
-  retval <- suppressWarnings(read.delim(urlCall, header = TRUE, quote="\"", dec=".", sep='\t', colClasses=c('character'), fill = TRUE))
-  return(retval)
+  retval = tryCatch({
+    h <- basicHeaderGatherer()
+    doc <- getURL(urlCall, headerfunction = h$update)
+    
+  }, warning = function(w) {
+    message(paste("URL caused a warning:", urlCall))
+    message(w)
+  }, error = function(e) {
+    message(paste("URL does not seem to exist:", urlCall))
+    message(e)
+    return(NA)
+  })
+  
+  if(h$value()["Content-Type"] == "text/tab-separated-values;charset=UTF-8"){
+  
+    numToBeReturned <- as.numeric(h$value()["Total-Site-Count"])
+    
+    if (!is.na(numToBeReturned) | numToBeReturned != 0){
+   
+      retval <- read.delim(textConnection(doc), header = TRUE, quote="\"", 
+                           dec=".", sep='\t', 
+                           colClasses=c('character'), 
+                           fill = TRUE)    
+      actualNumReturned <- nrow(retval)
+      
+      if(actualNumReturned != numToBeReturned) warning(numToBeReturned, " sites were expected, ", actualNumReturned, " were returned")
+      
+      return(retval)
+      
+    } else {
+      warning(paste("No data to retrieve from",urlCall))
+      return(NA)
+    }
+  } else {
+    message(paste("URL caused an error:", urlCall))
+    message("Content-Type=",h$value()["Content-Type"])
+    return(NA)
+  }
+
 }
diff --git a/R/getWaterML1Data.r b/R/getWaterML1Data.r
index 0957c42fae272ef2b8c7092029a14b36ea9b48ed..5c7acac75634f695214be2b46771f446668ae2ae 100644
--- a/R/getWaterML1Data.r
+++ b/R/getWaterML1Data.r
@@ -8,32 +8,46 @@
 #' @export
 #' @import XML
 #' @examples
-#' sites <- "02177000"
+#' siteNumber <- "02177000"
 #' startDate <- "2012-09-01"
 #' endDate <- "2012-10-01"
 #' offering <- '00003'
 #' property <- '00060'
-#' obs_url <- constructNWISURL(sites,property,startDate,endDate,'dv')
+#' obs_url <- constructNWISURL(siteNumber,property,startDate,endDate,'dv')
 #' data <- getWaterML1Data(obs_url)
-#' urlMulti <- constructNWISURL("04085427",c("00060","00010"),startDate,endDate,'dv',statCd=c("00003","00001"))
+#' urlMulti <- constructNWISURL("04085427",c("00060","00010"),
+#'             startDate,endDate,'dv',statCd=c("00003","00001"))
 #' multiData <- getWaterML1Data(urlMulti)
-#' goundwaterExampleURL <- "http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=431049071324301&startDT=2013-10-01&endDT=2014-06-30"
+#' goundwaterExampleURL <- 
+#'     "http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=431049071324301&startDT=2013-10-01&endDT=2014-06-30"
 #' groundWater <- getWaterML1Data(goundwaterExampleURL)
+#' unitDataURL <- constructNWISURL(siteNumber,property,
+#'          as.character(Sys.Date()),as.character(Sys.Date()),'uv',format='xml')
+#' unitData <- getWaterML1Data(unitDataURL)
 getWaterML1Data <- function(obs_url){
-
-  # This is more elegent, but requires yet another package dependency RCurl...which I now require for wqp
-#   content <- getURLContent(obs_url,.opts=list(timeout.ms=500000))
-#   test <- capture.output(tryCatch(xmlTreeParse(content, getDTD=FALSE, useInternalNodes=TRUE),"XMLParserErrorList" = function(e) {cat("incomplete",e$message)}))
-#   while (length(grep("<?xml",test))==0) {
-#     content <- getURLContent(obs_url,.opts=list(timeout.ms=500000))
-#     test <- capture.output(tryCatch(xmlTreeParse(content, getDTD=FALSE, useInternalNodes=TRUE),"XMLParserErrorList" = function(e) {cat("incomplete",e$message)}))
-#   }
-#   doc <- htmlTreeParse(content, getDTD=TRUE, useInternalNodes=TRUE)
-#   require(XML)
   
-  doc <- xmlTreeParse(obs_url, getDTD = FALSE, useInternalNodes = TRUE)
+  h <- basicHeaderGatherer()
+  doc = tryCatch({
+    returnedDoc <- getURI(obs_url, headerfunction = h$update)
+    if(h$value()["Content-Type"] == "text/xml;charset=UTF-8"){
+      xmlTreeParse(returnedDoc, getDTD = FALSE, useInternalNodes = TRUE)
+    } else {
+      message(paste("URL caused an error:", obs_url))
+      message("Content-Type=",h$value()["Content-Type"])
+      return(NA)
+    }   
+    
+  }, warning = function(w) {
+    message(paste("URL caused a warning:", obs_url))
+    message(w)
+  }, error = function(e) {
+    message(paste("URL does not seem to exist:", obs_url))
+    message(e)
+    return(NA)
+  }) 
+  
+  
   doc <- xmlRoot(doc)
-
   ns <- xmlNamespaceDefinitions(doc, simplify = TRUE)  
   timeSeries <- xpathApply(doc, "//ns1:timeSeries", namespaces = ns)
   
@@ -62,9 +76,9 @@ getWaterML1Data <- function(obs_url){
       methodID <- padVariable(methodID,2)
       
       value <- as.numeric(xpathSApply(subChunk, "ns1:value",namespaces = chunkNS, xmlValue))  
-      dateTime <- strptime(xpathSApply(subChunk, "ns1:value/@dateTime",namespaces = chunkNS),"%Y-%m-%dT%H:%M:%S")
+      dateTime <- as.POSIXct(strptime(xpathSApply(subChunk, "ns1:value/@dateTime",namespaces = chunkNS),"%Y-%m-%dT%H:%M:%S"))
       tzHours <- substr(xpathSApply(subChunk, "ns1:value/@dateTime",namespaces = chunkNS),
-                        23,
+                        24,
                         nchar(xpathSApply(subChunk, "ns1:value/@dateTime",namespaces = chunkNS)))
       if(mean(nchar(tzHours),rm.na=TRUE) == 6){
         tzAbbriev <- zoneAbbrievs[tzHours]
@@ -72,6 +86,20 @@ getWaterML1Data <- function(obs_url){
         tzAbbriev <- rep(as.character(zoneAbbrievs[1]),length(dateTime))
       }
       
+      timeZoneLibrary <- setNames(c("America/New_York","America/New_York","America/Chicago","America/Chicago",
+                                    "America/Denver","America/Denver","America/Los_Angeles","America/Los_Angeles",
+                                    "America/Anchorage","America/Anchorage","America/Honolulu","America/Honolulu"),
+                                  c("EST","EDT","CST","CDT","MST","MDT","PST","PDT","AKST","AKDT","HAST","HST"))
+      timeZone <- as.character(timeZoneLibrary[tzAbbriev])
+      if(length(unique(timeZone)) == 1){
+       dateTime <- as.POSIXct(as.character(dateTime), tz = unique(timeZone))
+      } else {
+        warning("Mixed time zone information")
+        for(i in seq_along(dateTime)){
+          dateTime[i] <- as.POSIXct(as.character(dateTime[i]), tz = timeZone[i])
+        }
+      }
+      
       qualifier <- as.character(xpathSApply(subChunk, "ns1:value/@qualifiers",namespaces = chunkNS))
 
       valueName <- paste(methodID,pCode,statCd,sep="_")
@@ -86,21 +114,22 @@ getWaterML1Data <- function(obs_url){
         df <- data.frame(dateTime,
                          tzAbbriev,
                          get(valueName),
-                         get(qualName)
-        )
-        names(df) <- c("dateTime","tz_cd",valueName,qualName)
+                         get(qualName),
+                         stringsAsFactors=FALSE)
+        
+        names(df) <- c("datetime","tz_cd",valueName,qualName)
       } else {
         df <- data.frame(dateTime,
                          tzAbbriev,
-                         get(valueName)
-        )
-        names(df) <- c("dateTime","tz_cd",valueName)       
+                         get(valueName),stringsAsFactors=FALSE)
+        
+        names(df) <- c("datetime","tz_cd",valueName)       
       }
  
       if (1 == i & valuesIndex[1] == j){
         mergedDF <- df
       } else {
-        mergedDF <- merge(mergedDF, df,by=c("dateTime","tz_cd"),all=TRUE)
+        mergedDF <- merge(mergedDF, df,by=c("datetime","tz_cd"),all=TRUE)
       }
     }
   }
@@ -108,8 +137,8 @@ getWaterML1Data <- function(obs_url){
   agencyCd <- as.character(xpathSApply(timeSeries[[1]], "ns1:sourceInfo/ns1:siteCode/@agencyCode",namespaces = chunkNS))
   siteNo <- as.character(xpathSApply(timeSeries[[1]], "ns1:sourceInfo/ns1:siteCode",namespaces = chunkNS, xmlValue))
   
-  mergedDF$agency <- rep(agencyCd, nrow(mergedDF))
-  mergedDF$site <- rep(siteNo, nrow(mergedDF))
+  mergedDF$agency_cd <- rep(agencyCd, nrow(mergedDF))
+  mergedDF$site_no <- rep(siteNo, nrow(mergedDF))
   
   reorder <- c(ncol(mergedDF)-1, ncol(mergedDF), 1:(ncol(mergedDF)-2))
   
diff --git a/R/getWaterML2Data.r b/R/getWaterML2Data.r
index 60d4c8fad23ce67a92545dfc4789426d083b9770..45137c609a56d88577e10f7f1c15ed68a7e37346 100644
--- a/R/getWaterML2Data.r
+++ b/R/getWaterML2Data.r
@@ -8,11 +8,30 @@
 #' @import XML
 #' @importFrom plyr rbind.fill.matrix
 #' @examples
-#' URL <- "http://webvastage6.er.usgs.gov/ogc-swie/wml2/dv/sos?request=GetObservation&featureID=435601087432701&observedProperty=00045&beginPosition=2012-01-01&offering=Sum"
-#' dataReturned3 <- getWaterML2Data(URL)
+#' URL <- "http://waterservices.usgs.gov/nwis/dv/?format=waterml,2.0&sites=01646500&startDT=2014-09-01&endDT=2014-09-08&statCd=00003&parameterCd=00060"
+#' \dontrun{dataReturned3 <- getWaterML2Data(URL)}
 getWaterML2Data <- function(obs_url){
   
-  doc <- xmlTreeParse(obs_url, getDTD = FALSE, useInternalNodes = TRUE)
+  h <- basicHeaderGatherer()
+  doc = tryCatch({
+    returnedDoc <- getURL(obs_url, headerfunction = h$update)
+    if(h$value()["Content-Type"] == "text/xml;charset=UTF-8"){
+      xmlTreeParse(returnedDoc, getDTD = FALSE, useInternalNodes = TRUE)
+    } else {
+      message(paste("URL caused an error:", obs_url))
+      message("Content-Type=",h$value()["Content-Type"])
+      return(NA)
+    }   
+    
+  }, warning = function(w) {
+    message(paste("URL caused a warning:", obs_url))
+    message(w)
+  }, error = function(e) {
+    message(paste("URL does not seem to exist:", obs_url))
+    message(e)
+    return(NA)
+  }) 
+  
   doc <- xmlRoot(doc)
   
   ns <- xmlNamespaceDefinitions(doc, simplify = TRUE)  
diff --git a/R/renameColumns.R b/R/renameColumns.R
index 1a9774964a3dd3405d98916b7eb2714ca122be3b..e1a4cf7fb91d2a8f74e68e0a757125d4f3a59a63 100644
--- a/R/renameColumns.R
+++ b/R/renameColumns.R
@@ -9,7 +9,8 @@
 #' @examples
 #' # This example requires an internet connection to run
 #' siteNumber <- '05114000' 
-#' rawData <- retrieveNWISdvData(siteNumber,c("00010","00060","00300"),"2001-01-01","2002-01-01",statCd=c("00001","00003"))
+#' rawData <- retrieveNWISdvData(siteNumber,c("00010","00060","00300"),
+#'           "2001-01-01","2002-01-01",statCd=c("00001","00003"))
 #' rawData <- renameColumns(rawData)
 #' today <- as.character(Sys.Date())
 #' rawData2 <- retrieveNWISunitData(siteNumber,c("00010","00060"),today,today)
diff --git a/R/retrieveNWISData.r b/R/retrieveNWISData.r
index fe1288743015113045733a966901c57f9b7ca35b..1053808dfa521fc07224d1fcdef6d0776e34d685 100644
--- a/R/retrieveNWISData.r
+++ b/R/retrieveNWISData.r
@@ -23,9 +23,12 @@
 #' endDate <- '2012-06-30'
 #' pCode <- '00060'
 #' rawDailyQ <- retrieveNWISdvData(siteNumber,pCode, startDate, endDate)
-#' rawDailyTemperature <- retrieveNWISdvData(siteNumber,'00010', startDate, endDate, statCd='00001')
-#' rawDailyTemperatureTSV <- retrieveNWISdvData(siteNumber,'00010', startDate, endDate, statCd='00001',format='tsv')
-#' rawDailyQAndTempMeanMax <- retrieveNWISdvData(siteNumber,c('00010','00060'), startDate, endDate, statCd=c('00001','00003'))
+#' rawDailyTemperature <- retrieveNWISdvData(siteNumber,'00010', 
+#'        startDate, endDate, statCd='00001')
+#' rawDailyTemperatureTSV <- retrieveNWISdvData(siteNumber,'00010', 
+#'        startDate, endDate, statCd='00001',format='tsv')
+#' rawDailyQAndTempMeanMax <- retrieveNWISdvData(siteNumber,c('00010','00060'),
+#'        startDate, endDate, statCd=c('00001','00003'))
 retrieveNWISdvData <- function (siteNumber,parameterCd,startDate,endDate,statCd="00003",format="tsv",interactive=TRUE){  
   
   url <- constructNWISURL(siteNumber,parameterCd,startDate,endDate,"dv",statCd=statCd,format=format,interactive=interactive)
diff --git a/R/retrieveNWISqwData.r b/R/retrieveNWISqwData.r
index 57bce34c4739d2c84b18daed3e95e5578ebed4f9..4ce5ff5340152f7065febd14fae7975b812f457f 100644
--- a/R/retrieveNWISqwData.r
+++ b/R/retrieveNWISqwData.r
@@ -33,67 +33,87 @@ retrieveNWISqwData <- function (siteNumber,pCodes,startDate,endDate,expanded=FAL
   
   url <- constructNWISURL(siteNumber,pCodes,startDate,endDate,"qw",expanded=expanded,interactive=interactive)
   
-  tmp <- read.delim(  
-    url, 
-    header = TRUE, 
-    quote="\"", 
-    dec=".", 
-    sep='\t',
-    colClasses=c('character'),
-    fill = TRUE, 
-    comment.char="#")
-
-  dataType <- tmp[1,]
-  data <- tmp[-1,]
-  row.names(data) <- NULL
+  retval = tryCatch({
+    h <- basicHeaderGatherer()
+    doc <- getURL(url, headerfunction = h$update)
+    
+  }, warning = function(w) {
+    message(paste("URL caused a warning:", url))
+    message(w)
+  }, error = function(e) {
+    message(paste("URL does not seem to exist:", url))
+    message(e)
+    return(NA)
+  })   
   
+  if(h$value()["Content-Type"] == "text/plain"){
   
+    tmp <- read.delim(  
+      textConnection(doc), 
+      header = TRUE, 
+      quote="\"", 
+      dec=".", 
+      sep='\t',
+      colClasses=c('character'),
+      fill = TRUE, 
+      comment.char="#")
   
-  if(expanded){
-    data$site <- with(data,paste(agency_cd,site_no,sep="-"))
-    data$dateTime <- with(data, as.POSIXct(paste(sample_dt,sample_tm,sep=" "),tz="UTC"))
-    data$dateTimeEnd <- rep(as.POSIXct(NA), length(data$sample_end_tm))
-    
-    if (any("" != data[["sample_end_dt"]])){
-      data$sample_end_dt["" == data$sample_end_dt] <- NA
-      data$sample_end_tm["" == data$sample_end_tm] <- NA
-      
-      data$dateTimeEnd[!is.na(data$sample_end_tm) & !is.na(data$sample_end_dt)] <- as.POSIXct(paste(data$sample_end_dt[!is.na(data$sample_end_tm) & !is.na(data$sample_end_dt)],
-                            data$sample_end_tm[!is.na(data$sample_end_tm) & !is.na(data$sample_end_dt)],sep=" "),tz="UTC")
-    } 
+    dataType <- tmp[1,]
+    data <- tmp[-1,]
+    row.names(data) <- NULL
     
-    data$result_va <- as.numeric(data$result_va)
-    data$rpt_lev_va <- as.numeric(data$rpt_lev_va)
-    rmCol <- c("agency_cd","site_no","tm_datum_rlbty_cd",
-               "coll_ent_cd","medium_cd","tu_id","body_part_id",
-               "sample_end_dt","sample_end_tm","sample_dt","sample_tm",
-               "sample_start_time_datum_cd","anl_ent_cd","lab_std_va")
-    data <- data[,!(names(data) %in% rmCol)]
     
-    longDF <- melt(data, c("parm_cd","dateTime","site","dateTimeEnd"))
-    wideDF <- dcast(longDF, ... ~ variable + parm_cd )
-    wideDF[,grep("_va_",names(wideDF))] <- sapply(wideDF[,grep("_va_",names(wideDF))], function(x) as.numeric(x))
     
-    data <- wideDF[,c(1,2,3,(3+order(sapply(strsplit(names(wideDF)[c(-1:-3)],"_"), function(x) x[length(x)]))))]
-    if (all(is.na(data$dateTimeEnd))){
-      data$dateTimeEnd <- NULL
-    }    
+    if(expanded){
+      data$site <- with(data,paste(agency_cd,site_no,sep="-"))
+      data$dateTime <- with(data, as.POSIXct(paste(sample_dt,sample_tm,sep=" "),tz="UTC"))
+      data$dateTimeEnd <- rep(as.POSIXct(NA), length(data$sample_end_tm))
+      
+      if (any("" != data[["sample_end_dt"]])){
+        data$sample_end_dt["" == data$sample_end_dt] <- NA
+        data$sample_end_tm["" == data$sample_end_tm] <- NA
+        
+        data$dateTimeEnd[!is.na(data$sample_end_tm) & !is.na(data$sample_end_dt)] <- as.POSIXct(paste(data$sample_end_dt[!is.na(data$sample_end_tm) & !is.na(data$sample_end_dt)],
+                              data$sample_end_tm[!is.na(data$sample_end_tm) & !is.na(data$sample_end_dt)],sep=" "),tz="UTC")
+      } 
+      
+      data$result_va <- as.numeric(data$result_va)
+      data$rpt_lev_va <- as.numeric(data$rpt_lev_va)
+      rmCol <- c("agency_cd","site_no","tm_datum_rlbty_cd",
+                 "coll_ent_cd","medium_cd","tu_id","body_part_id",
+                 "sample_end_dt","sample_end_tm","sample_dt","sample_tm",
+                 "sample_start_time_datum_cd","anl_ent_cd","lab_std_va")
+      data <- data[,!(names(data) %in% rmCol)]
+      
+      longDF <- melt(data, c("parm_cd","dateTime","site","dateTimeEnd"))
+      wideDF <- dcast(longDF, ... ~ variable + parm_cd )
+      wideDF[,grep("_va_",names(wideDF))] <- sapply(wideDF[,grep("_va_",names(wideDF))], function(x) as.numeric(x))
+      
+      data <- wideDF[,c(1,2,3,(3+order(sapply(strsplit(names(wideDF)[c(-1:-3)],"_"), function(x) x[length(x)]))))]
+      if (all(is.na(data$dateTimeEnd))){
+        data$dateTimeEnd <- NULL
+      }    
+      
+    } else {
+      data$site <- with(data,paste(agency_cd,site_no,sep="-"))
+      data$dateTime <- with(data, as.POSIXct(paste(sample_dt,sample_tm,sep=" "),tz="UTC"))
+      rmCol <- c("agency_cd","site_no","tm_datum_rlbty_cd",
+                 "coll_ent_cd","medium_cd","tu_id","body_part_id",
+                 "sample_end_dt","sample_end_tm","sample_dt","sample_tm","sample_start_time_datum_cd")
+      data <- data[,!(names(data) %in% rmCol)]
+      
+      names(data) <- c(gsub("r", "qualifier_",names(data)[1:(length(names(data))-2)]),names(data)[(length(names(data))-1):length(names(data))])
+      names(data) <- c(gsub("p", "value_",names(data)[1:(length(names(data))-2)]),names(data)[(length(names(data))-1):length(names(data))])
+      
+      data[,grep("value",names(data))] <- sapply( data[,grep("value",names(data))], function(x) as.numeric(x))
+      
+      data <- data[,c(ncol(data):(ncol(data)-1),(1:(ncol(data)-2)))]
+    }
     
+    return (data)
   } else {
-    data$site <- with(data,paste(agency_cd,site_no,sep="-"))
-    data$dateTime <- with(data, as.POSIXct(paste(sample_dt,sample_tm,sep=" "),tz="UTC"))
-    rmCol <- c("agency_cd","site_no","tm_datum_rlbty_cd",
-               "coll_ent_cd","medium_cd","tu_id","body_part_id",
-               "sample_end_dt","sample_end_tm","sample_dt","sample_tm","sample_start_time_datum_cd")
-    data <- data[,!(names(data) %in% rmCol)]
-    
-    names(data) <- c(gsub("r", "qualifier_",names(data)[1:(length(names(data))-2)]),names(data)[(length(names(data))-1):length(names(data))])
-    names(data) <- c(gsub("p", "value_",names(data)[1:(length(names(data))-2)]),names(data)[(length(names(data))-1):length(names(data))])
-    
-    data[,grep("value",names(data))] <- sapply( data[,grep("value",names(data))], function(x) as.numeric(x))
-    
-    data <- data[,c(ncol(data):(ncol(data)-1),(1:(ncol(data)-2)))]
+    message(paste("URL caused an error:", url))
+    message("Content-Type=",h$value()["Content-Type"])
+    return(NA)
   }
-  
-  return (data)
 }
diff --git a/README.md b/README.md
index 05171b0c5d1cd077710c3e4c4fc2670db48c164f..308d3a7850a799483e521294f15ebb4516f71e24 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,9 @@
 dataRetrieval
 =============
 
+[![Build status](https://ci.appveyor.com/api/projects/status/luni4ckts7j1u2k8)](https://ci.appveyor.com/project/USGS-R/dataretrieval)
+
+
 R package source for data retrieval specifically for the EGRET R package:
 
 Please visit the EGRET wiki for more information:
diff --git a/appveyor.yml b/appveyor.yml
index 62efd2cd26d2616e6d432b42b91610988f9be644..2129c268144f8a87106397a68dd7b92ec708b501 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -1,26 +1,37 @@
 init:
-ps: |
-$ErrorActionPreference = "Stop"
-Invoke-WebRequest http://raw.github.com/krlmlr/r-appveyor/master/scripts/appveyor-tool.ps1 -OutFile "..\appveyor-tool.ps1"
-Import-Module '..\appveyor-tool.ps1'
+  ps: |
+        $ErrorActionPreference = "Stop"
+        Invoke-WebRequest http://raw.github.com/krlmlr/r-appveyor/master/scripts/appveyor-tool.ps1 -OutFile "..\appveyor-tool.ps1"
+        Import-Module '..\appveyor-tool.ps1'
+
 install:
-ps: Bootstrap
+  ps: Bootstrap
+
 build_script:
-- travis-tool.sh install_deps
+  - travis-tool.sh install_github USGS-R/EGRET
+  - travis-tool.sh install_deps
+
 test_script:
-- travis-tool.sh run_tests
+  - travis-tool.sh run_tests
+
 on_failure:
-- travis-tool.sh dump_logs
+  - travis-tool.sh dump_logs
+
 artifacts:
-- path: '*.Rcheck\**\*.log'
-name: Logs
-- path: '*.Rcheck\**\*.out'
-name: Logs
-- path: '*.Rcheck\**\*.fail'
-name: Logs
-- path: '*.Rcheck\**\*.Rout'
-name: Logs
-- path: '\*_*.tar.gz'
-name: Bits
-- path: '\*_*.zip'
-name: Bits
\ No newline at end of file
+  - path: '*.Rcheck\**\*.log'
+    name: Logs
+
+  - path: '*.Rcheck\**\*.out'
+    name: Logs
+
+  - path: '*.Rcheck\**\*.fail'
+    name: Logs
+
+  - path: '*.Rcheck\**\*.Rout'
+    name: Logs
+
+  - path: '\*_*.tar.gz'
+    name: Bits
+
+  - path: '\*_*.zip'
+    name: Bits
\ No newline at end of file
diff --git a/man/compressData.Rd b/man/compressData.Rd
index 4484178090a3371a53a69bc3f518c2704aaa982b..5782f8d5dc6b13a024a8cfb16222eebba6b25d58 100644
--- a/man/compressData.Rd
+++ b/man/compressData.Rd
@@ -28,7 +28,9 @@ comment2 <- c("","<","")
 value2 <- c(2,3,4)
 comment3 <- c("","","<")
 value3 <- c(3,4,5)
-dataInput <- data.frame(dateTime, comment1, value1, comment2, value2, comment3, value3, stringsAsFactors=FALSE)
+dataInput <- data.frame(dateTime, comment1, value1,
+      comment2, value2,
+      comment3, value3, stringsAsFactors=FALSE)
 compressData(dataInput)
 }
 \keyword{WRTDS}
diff --git a/man/constructNWISURL.Rd b/man/constructNWISURL.Rd
index c0386ebd60a0c250d5cfb0d6c3b495483dbc4c04..bc503ef25d639e879ca3f9ebe3f1de2969bf81a4 100644
--- a/man/constructNWISURL.Rd
+++ b/man/constructNWISURL.Rd
@@ -41,12 +41,16 @@ siteNumber <- '01594440'
 startDate <- '1985-01-01'
 endDate <- ''
 pCode <- c("00060","00010")
-url_daily <- constructNWISURL(siteNumber,pCode,startDate,endDate,'dv',statCd=c("00003","00001"))
+url_daily <- constructNWISURL(siteNumber,pCode,
+           startDate,endDate,'dv',statCd=c("00003","00001"))
 url_unit <- constructNWISURL(siteNumber,pCode,"2012-06-28","2012-06-30",'iv')
 url_qw_single <- constructNWISURL(siteNumber,"01075",startDate,endDate,'qw')
-url_qw <- constructNWISURL(siteNumber,c('01075','00029','00453'),startDate,endDate,'qw')
-url_wqp <- constructNWISURL(paste("USGS",siteNumber,sep="-"),c('01075','00029','00453'),startDate,endDate,'wqp')
-url_daily_tsv <- constructNWISURL(siteNumber,pCode,startDate,endDate,'dv',statCd=c("00003","00001"),format="tsv")
+url_qw <- constructNWISURL(siteNumber,c('01075','00029','00453'),
+           startDate,endDate,'qw')
+url_wqp <- constructNWISURL(paste("USGS",siteNumber,sep="-"),c('01075','00029','00453'),
+           startDate,endDate,'wqp')
+url_daily_tsv <- constructNWISURL(siteNumber,pCode,startDate,endDate,'dv',
+           statCd=c("00003","00001"),format="tsv")
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/formatCheckSiteNumber.Rd b/man/formatCheckSiteNumber.Rd
index 744cddc0451d4a46d7919088e9dd26b47f0e50c5..120e99d07e527a64cb0c63f0b01c65324ed438b1 100644
--- a/man/formatCheckSiteNumber.Rd
+++ b/man/formatCheckSiteNumber.Rd
@@ -19,8 +19,6 @@ Checks that the site code is at least 8 digits. If not, it confirms with the use
 \examples{
 site<- '01234567'
 formatCheckSiteNumber(site)
-site_incorrect <- '1234567'
-formatCheckSiteNumber(site_incorrect)
 }
 \keyword{WRTDS}
 \keyword{flow}
diff --git a/man/getDataFromFile.Rd b/man/getDataFromFile.Rd
index ae4d4fe51167b84b781623e1fa5b3c35b36cd932..a138b286029a9940716b129bb12feef972330986 100644
--- a/man/getDataFromFile.Rd
+++ b/man/getDataFromFile.Rd
@@ -28,7 +28,7 @@ The third column is optional, it contains any remark codes.
 filePath <- system.file("extdata", package="dataRetrieval")
 filePath <- paste(filePath,"/",sep="")
 fileName <- 'ChoptankRiverFlow.txt'
-getDataFromFile(filePath,fileName, separator="\\t")
+ChopData <- getDataFromFile(filePath,fileName, separator="\\t")
 }
 \keyword{data}
 \keyword{file}
diff --git a/man/getRDB1Data.Rd b/man/getRDB1Data.Rd
index b4b48d2968ad4ffffd65197400c2fd5bf4527187..4ce65b1483b02db6b5f9530b3122c4bbf5e8ebb0 100644
--- a/man/getRDB1Data.Rd
+++ b/man/getRDB1Data.Rd
@@ -18,14 +18,19 @@ This function accepts a url parameter that already contains the desired
 NWIS site, parameter code, statistic, startdate and enddate.
 }
 \examples{
-sites <- "02177000"
+siteNumber <- "02177000"
 startDate <- "2012-09-01"
 endDate <- "2012-10-01"
 offering <- '00003'
 property <- '00060'
-obs_url <- constructNWISURL(sites,property,startDate,endDate,'dv',format='tsv')
+obs_url <- constructNWISURL(siteNumber,property,
+         startDate,endDate,'dv',format='tsv')
 data <- getRDB1Data(obs_url)
-urlMulti <- constructNWISURL("04085427",c("00060","00010"),startDate,endDate,'dv',statCd=c("00003","00001"),'tsv')
+urlMulti <- constructNWISURL("04085427",c("00060","00010"),
+         startDate,endDate,'dv',statCd=c("00003","00001"),'tsv')
 multiData <- getRDB1Data(urlMulti)
+unitDataURL <- constructNWISURL(siteNumber,property,
+         as.character(Sys.Date()),as.character(Sys.Date()),'uv',format='tsv')
+unitData <- getRDB1Data(unitDataURL, asDateT=TRUE)
 }
 
diff --git a/man/getWaterML1Data.Rd b/man/getWaterML1Data.Rd
index a0a07f0f50aa8efbfb0239b3fda4e0f3b06d2b7f..575e44579cfa19b48b1cb8447e193342c12f08ca 100644
--- a/man/getWaterML1Data.Rd
+++ b/man/getWaterML1Data.Rd
@@ -16,16 +16,21 @@ This function accepts a url parameter that already contains the desired
 NWIS site, parameter code, statistic, startdate and enddate.
 }
 \examples{
-sites <- "02177000"
+siteNumber <- "02177000"
 startDate <- "2012-09-01"
 endDate <- "2012-10-01"
 offering <- '00003'
 property <- '00060'
-obs_url <- constructNWISURL(sites,property,startDate,endDate,'dv')
+obs_url <- constructNWISURL(siteNumber,property,startDate,endDate,'dv')
 data <- getWaterML1Data(obs_url)
-urlMulti <- constructNWISURL("04085427",c("00060","00010"),startDate,endDate,'dv',statCd=c("00003","00001"))
+urlMulti <- constructNWISURL("04085427",c("00060","00010"),
+            startDate,endDate,'dv',statCd=c("00003","00001"))
 multiData <- getWaterML1Data(urlMulti)
-goundwaterExampleURL <- "http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=431049071324301&startDT=2013-10-01&endDT=2014-06-30"
+goundwaterExampleURL <-
+    "http://waterservices.usgs.gov/nwis/gwlevels/?format=waterml&sites=431049071324301&startDT=2013-10-01&endDT=2014-06-30"
 groundWater <- getWaterML1Data(goundwaterExampleURL)
+unitDataURL <- constructNWISURL(siteNumber,property,
+         as.character(Sys.Date()),as.character(Sys.Date()),'uv',format='xml')
+unitData <- getWaterML1Data(unitDataURL)
 }
 
diff --git a/man/getWaterML2Data.Rd b/man/getWaterML2Data.Rd
index ca210afc93f239d9fc2d663a600b9ceef997ce17..ac84669ef8160e9781b0ab7fb213679ababd6547 100644
--- a/man/getWaterML2Data.Rd
+++ b/man/getWaterML2Data.Rd
@@ -15,7 +15,7 @@ mergedDF a data frame containing columns agency, site, dateTime, values, and rem
 This function accepts a url parameter for a WaterML2 getObservation
 }
 \examples{
-URL <- "http://webvastage6.er.usgs.gov/ogc-swie/wml2/dv/sos?request=GetObservation&featureID=435601087432701&observedProperty=00045&beginPosition=2012-01-01&offering=Sum"
-dataReturned3 <- getWaterML2Data(URL)
+URL <- "http://waterservices.usgs.gov/nwis/dv/?format=waterml,2.0&sites=01646500&startDT=2014-09-01&endDT=2014-09-08&statCd=00003&parameterCd=00060"
+\dontrun{dataReturned3 <- getWaterML2Data(URL)}
 }
 
diff --git a/man/renameColumns.Rd b/man/renameColumns.Rd
index 6442b41c8bd97222b08e4f4fbd2333d7fbdc2cd7..fc26c62cee2b5f3cd3002a3fc7f3286f793aa9f2 100644
--- a/man/renameColumns.Rd
+++ b/man/renameColumns.Rd
@@ -17,7 +17,8 @@ Rename columns coming back from NWIS data retrievals
 \examples{
 # This example requires an internet connection to run
 siteNumber <- '05114000'
-rawData <- retrieveNWISdvData(siteNumber,c("00010","00060","00300"),"2001-01-01","2002-01-01",statCd=c("00001","00003"))
+rawData <- retrieveNWISdvData(siteNumber,c("00010","00060","00300"),
+          "2001-01-01","2002-01-01",statCd=c("00001","00003"))
 rawData <- renameColumns(rawData)
 today <- as.character(Sys.Date())
 rawData2 <- retrieveNWISunitData(siteNumber,c("00010","00060"),today,today)
diff --git a/man/retrieveNWISdvData.Rd b/man/retrieveNWISdvData.Rd
index dc0516c86a312a1056cb47d3aa462dc2d5b45e7c..72badd54d2dcf4a56a9db5a2b73babb5f8e8c1e7 100644
--- a/man/retrieveNWISdvData.Rd
+++ b/man/retrieveNWISdvData.Rd
@@ -38,9 +38,12 @@ startDate <- '2012-01-01'
 endDate <- '2012-06-30'
 pCode <- '00060'
 rawDailyQ <- retrieveNWISdvData(siteNumber,pCode, startDate, endDate)
-rawDailyTemperature <- retrieveNWISdvData(siteNumber,'00010', startDate, endDate, statCd='00001')
-rawDailyTemperatureTSV <- retrieveNWISdvData(siteNumber,'00010', startDate, endDate, statCd='00001',format='tsv')
-rawDailyQAndTempMeanMax <- retrieveNWISdvData(siteNumber,c('00010','00060'), startDate, endDate, statCd=c('00001','00003'))
+rawDailyTemperature <- retrieveNWISdvData(siteNumber,'00010',
+       startDate, endDate, statCd='00001')
+rawDailyTemperatureTSV <- retrieveNWISdvData(siteNumber,'00010',
+       startDate, endDate, statCd='00001',format='tsv')
+rawDailyQAndTempMeanMax <- retrieveNWISdvData(siteNumber,c('00010','00060'),
+       startDate, endDate, statCd=c('00001','00003'))
 }
 \keyword{USGS}
 \keyword{data}
diff --git a/man/retrieveWQPqwData.Rd b/man/retrieveWQPqwData.Rd
index 5067730007e63ffc7406fd0af1a1a31b274ace27..79b0c0ce34dc4d7c02a7b4c9c89a42ba46e16888 100644
--- a/man/retrieveWQPqwData.Rd
+++ b/man/retrieveWQPqwData.Rd
@@ -9,7 +9,7 @@ retrieveWQPqwData(siteNumber, parameterCd, startDate, endDate,
 \arguments{
 \item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
 
-\item{parameterCd}{vector of USGS 5-digit parameter code. Leaving this blank will return all of the measured values during the specified time period.}
+\item{parameterCd}{vector of USGS 5-digit parameter code or string of characteristicNames. Leaving this blank will return all of the measured values during the specified time period.}
 
 \item{startDate}{string starting date for data retrieval in the form YYYY-MM-DD.}