diff --git a/R/importRDB1.r b/R/importRDB1.r
index f5ed78d8ba3cb8443f8382fe3ef9078ea63b95d6..0b5f4547dbafaee9d215ef66e6018ad642b8c35b 100644
--- a/R/importRDB1.r
+++ b/R/importRDB1.r
@@ -67,6 +67,10 @@ importRDB1 <- function(obs_url, asDateTime=FALSE, qw=FALSE, convertType = TRUE,
       h <- basicHeaderGatherer()
       doc <- getURL(obs_url, headerfunction = h$update)
 
+      fileVecChar <- scan(obs_url, what = "", sep = "\n", quiet=TRUE)
+      pndIndx<-regexpr("^#", fileVecChar)
+      hdr <- fileVecChar[pndIndx > 0L]
+      
       if(!(as.character(h$value()["Content-Type"]) == "text/plain;charset=UTF-8" | 
            as.character(h$value()["Content-Type"]) == "text/plain")){
         message(paste("URL caused an error:", obs_url))
@@ -86,7 +90,7 @@ importRDB1 <- function(obs_url, asDateTime=FALSE, qw=FALSE, convertType = TRUE,
     doc <- obs_url
 
   }
-      
+  
   tmp <- read.delim(  
     doc, 
     header = TRUE, 
@@ -125,8 +129,12 @@ importRDB1 <- function(obs_url, asDateTime=FALSE, qw=FALSE, convertType = TRUE,
     data[,numberColumns] <- sapply(data[,numberColumns],as.numeric)
     
     intColumns <- grep("_nu",names(data))
-    data[,intColumns] <- sapply(data[,intColumns],as.integer)
     
+    if("current_rating_nu" %in% names(data)){
+      intColumns <- intColumns[!("current_rating_nu" %in% names(data)[intColumns])]
+      data$current_rating_nu <- gsub(" ", "", data$current_rating_nu)
+    }
+    data[,intColumns] <- sapply(data[,intColumns],as.integer)
     
     if(length(grep('d$', dataType)) > 0){
       if (asDateTime & !qw){
@@ -210,6 +218,9 @@ importRDB1 <- function(obs_url, asDateTime=FALSE, qw=FALSE, convertType = TRUE,
   
     row.names(data) <- NULL
   }
+  
+  comment(data) <- hdr
+  
   return(data)
 
 }
diff --git a/R/readNWISsite.r b/R/readNWISsite.r
index 8ad90d3872638d4e1f4c8eaa0a6270bb4435e0b9..a3a508a8279c9798a5a9d8d62c71c8e90664244c 100644
--- a/R/readNWISsite.r
+++ b/R/readNWISsite.r
@@ -2,7 +2,7 @@
 #'
 #' Imports data from USGS site file site. This function gets data from here: \url{http://waterservices.usgs.gov/}
 #'
-#' @param siteNumber string USGS site number.  This is usually an 8 digit number
+#' @param siteNumbers string USGS site number.  This is usually an 8 digit number
 #' @keywords data import USGS web service
 #' @return retval dataframe with all information found in the expanded site file
 #' @export
@@ -10,48 +10,52 @@
 #' # These examples require an internet connection to run
 #' siteINFO <- readNWISsite('05114000')
 #' siteINFOMulti <- readNWISsite(c('05114000','09423350'))
-readNWISsite <- function(siteNumber){
+readNWISsite <- function(siteNumbers){
   
-  siteNumber <- paste(siteNumber,collapse=",")
+  siteNumber <- paste(siteNumbers,collapse=",")
   urlSitefile <- paste("http://waterservices.usgs.gov/nwis/site/?format=rdb&siteOutput=Expanded&sites=",siteNumber,sep = "")
   
-  doc = tryCatch({
-    h <- basicHeaderGatherer()
-    doc <- getURL(urlSitefile, headerfunction = h$update)
+  if(url.exists(urlSitefile)){
+    doc = tryCatch({
+      h <- basicHeaderGatherer()
+      doc <- getURL(urlSitefile, headerfunction = h$update)
+      
+    }, warning = function(w) {
+      message(paste("URL caused a warning:", urlSitefile))
+      message(w)
+    }, error = function(e) {
+      message(paste("URL does not seem to exist:", urlSitefile))
+      message(e)
+      return(NA)
+    }) 
     
-  }, warning = function(w) {
-    message(paste("URL caused a warning:", urlSitefile))
-    message(w)
-  }, error = function(e) {
-    message(paste("URL does not seem to exist:", urlSitefile))
-    message(e)
-    return(NA)
-  }) 
-  
-  if(h$value()["Content-Type"] == "text/plain;charset=UTF-8"){
-  
-    SiteFile <- read.delim(
-      textConnection(doc),
-      header = TRUE,
-      quote="\"",
-      dec=".",
-      sep='\t',
-      colClasses=c('character'),
-      fill = TRUE,
-      comment.char="#")
-    
-    INFO <- SiteFile[-1,]
-    names(INFO) <- gsub("_",".",names(INFO))
-    
-    INFO$queryTime <- Sys.time()
-    INFO$dec.lat.va <- as.numeric(INFO$dec.lat.va)
-    INFO$dec.long.va <- as.numeric(INFO$dec.long.va)
-    INFO$alt.va <- as.numeric(INFO$alt.va)
+    if(h$value()["Content-Type"] == "text/plain;charset=UTF-8"){
     
-    return(INFO)
+      SiteFile <- read.delim(
+        textConnection(doc),
+        header = TRUE,
+        quote="\"",
+        dec=".",
+        sep='\t',
+        colClasses=c('character'),
+        fill = TRUE,
+        comment.char="#")
+      
+      INFO <- SiteFile[-1,]
+      names(INFO) <- gsub("_",".",names(INFO))
+      
+      INFO$queryTime <- Sys.time()
+      INFO$dec.lat.va <- as.numeric(INFO$dec.lat.va)
+      INFO$dec.long.va <- as.numeric(INFO$dec.long.va)
+      INFO$alt.va <- as.numeric(INFO$alt.va)
+      
+      return(INFO)
+    } else {
+      message(paste("URL caused an error:", urlSitefile))
+      message("Content-Type=",h$value()["Content-Type"])
+      return(NA)
+    }
   } else {
-    message(paste("URL caused an error:", urlSitefile))
-    message("Content-Type=",h$value()["Content-Type"])
-    return(NA)
+    message("URL caused an error:", urlSitefile)
   }
 }
diff --git a/R/readNWISunit.r b/R/readNWISunit.r
index 10b8ce9f9ade559e3694c29ba33bdb3b81fefbff..23e997fc9d0640516b5f8e979952a94f6e56ba7c 100644
--- a/R/readNWISunit.r
+++ b/R/readNWISunit.r
@@ -51,7 +51,7 @@ readNWISpeak <- function (siteNumber,startDate="",endDate=""){
   
   # Doesn't seem to be a peak xml service
   url <- constructNWISURL(siteNumber,NA,startDate,endDate,"peak")
-
+  
   data <- importRDB1(url, asDateTime=FALSE)
     
   return (data)
@@ -67,11 +67,21 @@ readNWISpeak <- function (siteNumber,startDate="",endDate=""){
 #' @examples
 #' siteNumber <- '01594440'
 #' data <- readNWISrating(siteNumber, "base")
+#' attr(data, "RATING")
 readNWISrating <- function (siteNumber,type="base"){  
-  # Doesn't seem to be a rating xml service 
+  
+  # No rating xml service 
   url <- constructNWISURL(siteNumber,service="rating",ratingType = type)
+    
   data <- importRDB1(url, asDateTime=FALSE)
   
+  if(type == "base") {
+    Rat <- grep("//RATING ", comment(data), value=TRUE, fixed=TRUE)
+    Rat <- sub("# //RATING ", "", Rat)
+    Rat <- scan(text=Rat, sep=" ", what="")
+    attr(data, "RATING") <- Rat
+  }
+  
   return (data)
 }
 
@@ -94,8 +104,13 @@ readNWISmeas <- function (siteNumber,startDate="",endDate="", tz=""){
   
   # Doesn't seem to be a WaterML1 format option
   url <- constructNWISURL(siteNumber,NA,startDate,endDate,"meas")
+  
   data <- importRDB1(url,asDateTime=FALSE,tz=tz)
   
+  if("diff_from_rating_pc" %in% names(data)){
+    data$diff_from_rating_pc <- as.numeric(data$diff_from_rating_pc)
+  }
+  
   return (data)
 }
 
@@ -115,7 +130,7 @@ readNWISmeas <- function (siteNumber,startDate="",endDate="", tz=""){
 #' data2 <- readNWISgwl(sites, '','')
 readNWISgwl <- function (siteNumbers,startDate="",endDate=""){  
   
-  url <- constructNWISURL(siteNumber,NA,startDate,endDate,"gwlevels",format="wml1")
+  url <- constructNWISURL(siteNumbers,NA,startDate,endDate,"gwlevels",format="wml1")
   data <- importWaterML1(url,asDateTime=FALSE)
   
   return (data)
diff --git a/R/whatNWISsites.R b/R/whatNWISsites.R
index 7d4d1197d8e2d8b963ee973403b99d8be3644a14..1b392f2e19c0f4f884022ce8a3b8f13d4dc6460e 100644
--- a/R/whatNWISsites.R
+++ b/R/whatNWISsites.R
@@ -24,50 +24,54 @@ whatNWISsites <- function(...){
   urlCall <- paste(baseURL,
                    urlCall,sep = "")
   
-  h <- basicHeaderGatherer()
-  doc = tryCatch({
-    returnedDoc <- getURI(urlCall, headerfunction = h$update)
-    if(h$value()["Content-Type"] == "text/xml;charset=UTF-8"){
-      xmlTreeParse(returnedDoc, getDTD = FALSE, useInternalNodes = TRUE)
-    } else {
-      message(paste("URL caused an error:", urlCall))
-      message("Content-Type=",h$value()["Content-Type"])
+  if(url.exists(urlSitefile)){
+    h <- basicHeaderGatherer()
+    doc = tryCatch({
+      returnedDoc <- getURI(urlCall, headerfunction = h$update)
+      if(h$value()["Content-Type"] == "text/xml;charset=UTF-8"){
+        xmlTreeParse(returnedDoc, getDTD = FALSE, useInternalNodes = TRUE)
+      } else {
+        message(paste("URL caused an error:", urlCall))
+        message("Content-Type=",h$value()["Content-Type"])
+        return(NA)
+      }   
+      
+    }, warning = function(w) {
+      message(paste("URL caused a warning:", urlCall))
+      message(w)
+    }, error = function(e) {
+      message(paste("URL does not seem to exist:", urlCall))
+      message(e)
       return(NA)
-    }   
+    }) 
     
-  }, warning = function(w) {
-    message(paste("URL caused a warning:", urlCall))
-    message(w)
-  }, error = function(e) {
-    message(paste("URL does not seem to exist:", urlCall))
-    message(e)
-    return(NA)
-  }) 
-  
-  doc <- xmlRoot(doc)
-  numChunks <- xmlSize(doc)
-  for(i in 1:numChunks){
-    chunk <- doc[[1]]
-    site_no <- as.character(xpathApply(chunk, "site/@sno"))
-    station_nm <- as.character(xpathApply(chunk, "site/@sna"))
-    site_tp_cd <- as.character(xpathApply(chunk, "site/@cat"))
-    dec_lat_va <- as.numeric(xpathApply(chunk, "site/@lat"))
-    dec_long_va <- as.numeric(xpathApply(chunk, "site/@lng"))
-    agency_cd <- as.character(xpathApply(chunk, "site/@agc"))
+    doc <- xmlRoot(doc)
+    numChunks <- xmlSize(doc)
+    for(i in 1:numChunks){
+      chunk <- doc[[1]]
+      site_no <- as.character(xpathApply(chunk, "site/@sno"))
+      station_nm <- as.character(xpathApply(chunk, "site/@sna"))
+      site_tp_cd <- as.character(xpathApply(chunk, "site/@cat"))
+      dec_lat_va <- as.numeric(xpathApply(chunk, "site/@lat"))
+      dec_long_va <- as.numeric(xpathApply(chunk, "site/@lng"))
+      agency_cd <- as.character(xpathApply(chunk, "site/@agc"))
+      
+      df <- data.frame(agency_cd, site_no, station_nm, site_tp_cd, 
+                       dec_lat_va, dec_long_va, stringsAsFactors=FALSE) 
+      
+      if(1==i){
+        retval <- df
+      } else {
+        retval <- rbind(retval, df)
+      }
+    }
     
-    df <- data.frame(agency_cd, site_no, station_nm, site_tp_cd, 
-                     dec_lat_va, dec_long_va, stringsAsFactors=FALSE) 
+    retval <- retval[!duplicated(retval),]
     
-    if(1==i){
-      retval <- df
-    } else {
-      retval <- rbind(retval, df)
-    }
+    retval$queryTime <- Sys.time()
+    
+    return(retval)
+  } else {
+    message("URL caused an error:", urlCall)
   }
-  
-  retval <- retval[!duplicated(retval),]
-  
-  retval$queryTime <- Sys.time()
-  
-  return(retval)
 }
diff --git a/man/readNWISrating.Rd b/man/readNWISrating.Rd
index 88200fb67e217860371172482c969a23ca52cdb0..f335d06d16be5a93aa3086d22ecc8c1d926821c8 100644
--- a/man/readNWISrating.Rd
+++ b/man/readNWISrating.Rd
@@ -16,5 +16,6 @@ Reads the current rating table for an active USGS streamgage.
 \examples{
 siteNumber <- '01594440'
 data <- readNWISrating(siteNumber, "base")
+attr(data, "RATING")
 }
 
diff --git a/man/readNWISsite.Rd b/man/readNWISsite.Rd
index 9d83a9d80e0a3fcc0ee09da57e3f3e44f4f197f0..16547e44908e6aa27f50507294a00710c5c4aa47 100644
--- a/man/readNWISsite.Rd
+++ b/man/readNWISsite.Rd
@@ -3,10 +3,10 @@
 \alias{readNWISsite}
 \title{USGS Site File Data Retrieval}
 \usage{
-readNWISsite(siteNumber)
+readNWISsite(siteNumbers)
 }
 \arguments{
-\item{siteNumber}{string USGS site number.  This is usually an 8 digit number}
+\item{siteNumbers}{string USGS site number.  This is usually an 8 digit number}
 }
 \value{
 retval dataframe with all information found in the expanded site file