Commit cc30b968 authored by Laura A DeCicco's avatar Laura A DeCicco
Browse files

Merge branch 'main' into 'master'

Main

See merge request !426
parents 6177e51f c49f88c1
Pipeline #116556 passed with stage
in 11 minutes and 28 seconds
......@@ -50,3 +50,5 @@ vignettes/tutorial.Rmd
vignettes/phosData.rds
vignettes/waterservices.png
.gitlab-ci.yml
sample_large_pull.R
# Workflow derived from https://github.com/r-lib/actions/tree/master/examples
# Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help
on:
push:
branches:
- master
branches: main
pull_request:
branches:
- master
branches: main
name: R-CMD-check
......@@ -20,68 +20,30 @@ jobs:
config:
- {os: macOS-latest, r: 'release'}
- {os: windows-latest, r: 'release'}
- {os: ubuntu-18.04, r: 'release', rspm: "https://packagemanager.rstudio.com/cran/__linux__/bionic/latest"}
- {os: ubuntu-latest, r: 'devel', http-user-agent: 'release'}
- {os: ubuntu-latest, r: 'release'}
- {os: ubuntu-latest, r: 'oldrel-1'}
env:
R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
RSPM: ${{ matrix.config.rspm }}
GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
R_KEEP_PKG_SOURCE: yes
steps:
- uses: actions/checkout@v2
- uses: r-lib/actions/setup-r@master
- uses: r-lib/actions/setup-pandoc@v1
- uses: r-lib/actions/setup-r@v1
with:
r-version: ${{ matrix.config.r }}
http-user-agent: ${{ matrix.config.http-user-agent }}
use-public-rspm: true
- uses: r-lib/actions/setup-pandoc@master
- name: Query dependencies
run: |
install.packages(c('remotes', 'zoo', 'sf'))
saveRDS(remotes::dev_package_deps(dependencies = TRUE), ".github/depends.Rds", version = 2)
shell: Rscript {0}
- name: Cache R packages
if: runner.os != 'Windows'
uses: actions/cache@v1
- uses: r-lib/actions/setup-r-dependencies@v1
with:
path: ${{ env.R_LIBS_USER }}
key: ${{ runner.os }}-r-${{ matrix.config.r }}-1-${{ hashFiles('.github/depends.Rds') }}
restore-keys: ${{ runner.os }}-r-${{ matrix.config.r }}-1-
- name: Install system dependencies
if: runner.os == 'Linux'
env:
RHUB_PLATFORM: linux-x86_64-ubuntu-gcc
run: |
Rscript -e "remotes::install_github('r-hub/sysreqs')"
sysreqs=$(Rscript -e "cat(sysreqs::sysreq_commands('DESCRIPTION'))")
sudo -s eval "$sysreqs"
- name: mac dependencies
if: runner.os == 'macOS'
run: brew install pkg-config gdal proj geos udunits
- name: Install dependencies
run: |
remotes::install_deps(dependencies = TRUE)
install.packages(c('remotes', 'zoo', 'sf'))
remotes::install_cran("rcmdcheck")
shell: Rscript {0}
- name: Session info
run: |
options(width = 100)
pkgs <- installed.packages()[, "Package"]
sessioninfo::session_info(pkgs, include_base = TRUE)
shell: Rscript {0}
extra-packages: rcmdcheck
- name: Check
env:
_R_CHECK_CRAN_INCOMING_: false
run: rcmdcheck::rcmdcheck(args = c("--no-manual", "--as-cran"), error_on = "warning", check_dir = "check")
shell: Rscript {0}
- uses: r-lib/actions/check-r-package@v1
- name: Show testthat output
if: always()
......@@ -90,7 +52,7 @@ jobs:
- name: Upload check results
if: failure()
uses: actions/upload-artifact@master
uses: actions/upload-artifact@main
with:
name: ${{ runner.os }}-r${{ matrix.config.r }}-results
path: check
path: check
\ No newline at end of file
# Workflow derived from https://github.com/r-lib/actions/tree/master/examples
# Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help
on:
push:
branches: master
branches: main
name: pkgdown
jobs:
pkgdown:
runs-on: ubuntu-18.04
runs-on: ubuntu-latest
env:
RSPM: https://packagemanager.rstudio.com/cran/__linux__/bionic/latest
GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2
- uses: r-lib/actions/setup-pandoc@v1
- uses: r-lib/actions/setup-r@v1
id: install-r
with:
use-public-rspm: true
- uses: r-lib/actions/setup-pandoc@v1
- uses: r-lib/actions/setup-r-dependencies@v1
with:
extra-packages: pkgdown
needs: website
- uses: r-lib/actions/setup-r-dependencies@v1
with:
extra-packages: rcmdcheck
- name: Install pak and query dependencies
run: |
install.packages("pak", repos = "https://r-lib.github.io/p/pak/dev/")
install.packages(c("DT", "leaflet", "zoo", "maps", "patchwork"))
saveRDS(pak::pkg_deps("local::.", dependencies = TRUE), ".github/r-depends.rds")
shell: Rscript {0}
- name: Install system dependencies
if: runner.os == 'Linux'
run: |
pak::local_system_requirements(execute = TRUE)
pak::pkg_system_requirements("pkgdown", execute = TRUE)
shell: Rscript {0}
- name: Install dependencies
run: |
pak::local_install_dev_deps(upgrade = TRUE, dependencies = c("all", "Config/Needs/website"))
pak::pkg_install("pkgdown")
install.packages(c("zoo", "maps", "patchwork", "leaflet"))
shell: Rscript {0}
- name: Install package
run: R CMD INSTALL .
- name: Deploy package
run: |
git config --local user.name "$GITHUB_ACTOR"
git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com"
Rscript -e 'options(rmarkdown.html_vignette.check_title = FALSE); pkgdown::deploy_to_branch(new_process = FALSE)'
git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com"
Rscript -e 'pkgdown::deploy_to_branch(new_process = FALSE)'
on:
push:
branches:
- master
- main
pull_request:
branches:
- master
- main
name: test-coverage
......
Package: dataRetrieval
Type: Package
Title: Retrieval Functions for USGS and EPA Hydrologic and Water Quality Data
Version: 2.7.10.1
Version: 2.7.11
Authors@R: c(
person("Laura", "DeCicco", role = c("aut","cre"),
email = "ldecicco@usgs.gov",
......
......@@ -2,6 +2,8 @@ dataRetrieval 2.7.11
==================
* Updated some documentation to improve examples.
* Changed the check for internet to better account for proxies.
* Fixed bug due to new behavior in NWIS parameter code service.
* Updated documentation for readWQPdata and readWQPsummary
dataRetrieval 2.7.10
==================
......
......@@ -157,43 +157,11 @@ importWQP <- function(obs_url, zip=TRUE, tz="UTC",
}
}
retval <- suppressWarnings(readr::read_delim(doc,
col_types = readr::cols(`ActivityStartTime/Time` = readr::col_character(),
`ActivityEndTime/Time` = readr::col_character(),
USGSPCode = readr::col_character(),
ResultCommentText = readr::col_character(),
ResultSampleFractionText = readr::col_character(),
ActivityDepthAltitudeReferencePointText = readr::col_character(),
ActivityConductingOrganizationText = readr::col_character(),
ActivityCommentText = readr::col_character(),
ResultWeightBasisText = readr::col_character(),
ResultTimeBasisText = readr::col_character(),
ResultParticleSizeBasis = readr::col_character(),
ResultDepthAltitudeReferencePointText = readr::col_character(),
ResultLaboratoryCommentText = readr::col_character(),
ResultTemperatureBasisText = readr::col_character(),
ResultDetectionConditionText = readr::col_character(),
ResultParticleSizeBasisText = readr::col_character(),
`ActivityDepthHeightMeasure/MeasureValue` = readr::col_number(),
`DetectionQuantitationLimitMeasure/MeasureValue` = readr::col_number(),
ResultMeasureValue = readr::col_character(),
`WellDepthMeasure/MeasureValue` = readr::col_number(),
`WellHoleDepthMeasure/MeasureValue` = readr::col_number(),
DetectionQuantitationLimitTypeName = readr::col_character(),
LaboratoryName = readr::col_character(),
MethodDescriptionText = readr::col_character(),
`ResultAnalyticalMethod/MethodName` = readr::col_character(),
`ResultAnalyticalMethod/MethodIdentifier` = readr::col_character(),
`ResultAnalyticalMethod/MethodIdentifierContext` = readr::col_character(),
SampleTissueAnatomyName = readr::col_character(),
SubjectTaxonomicName = readr::col_character(),
ResultDepthAltitudeReferencePointText = readr::col_character(),
`ResultDepthHeightMeasure/MeasureUnitCode` = readr::col_character(),
`DetectionQuantitationLimitMeasure/MeasureUnitCode` = readr::col_character(),
`HUCEightDigitCode` = readr::col_character(),
`ActivityEndTime/TimeZoneCode` = readr::col_character()),
retval <- suppressWarnings(readr::read_delim(doc,
col_types = readr::cols(.default = "c"),
quote = ifelse(csv,'\"',""),
delim = ifelse(csv,",","\t")))
delim = ifelse(csv,",","\t"),
guess_max = totalPossible))
if(!file.exists(obs_url)){
actualNumReturned <- nrow(retval)
......@@ -202,18 +170,24 @@ importWQP <- function(obs_url, zip=TRUE, tz="UTC",
warning("Number of rows returned not matched in header")
}
}
suppressWarnings({
val <- tryCatch(as.numeric(retval$ResultMeasureValue),
warning = function(w) w)
valueCols <- names(retval)[grep("MeasureValue", names(retval))]
countCols <- names(retval)[grep("Count", names(retval))]
yearCols <- names(retval)[grep("Year", names(retval))]
for(numberCol in unique(c(valueCols, countCols, yearCols))){
suppressWarnings({
val <- tryCatch(as.numeric(retval[[numberCol]]),
warning = function(w) w)
# we don't want to convert it to numeric if there are non-numeric chars
# If we leave it to the user, it will probably break a lot of code
if(!"warning" %in% class(val)){
retval[[numberCol]] <- val
}
})
}
# we don't want to convert it to numeric if there are non-numeric chars
# they often happen after readr has decided the column type if we left it to readr
# If we leave it to the user, it will probably break a lot of code
# If we bump up readr's guess_max...the computational time becomes really really long
if(!"warning" %in% class(val)){
retval$ResultMeasureValue <- val
}
})
if(length(grep("ActivityStartTime",names(retval))) > 0){
......
......@@ -89,7 +89,8 @@
#'
#' # Timezone change with specified local timezone:
#' tzURL <- constructNWISURL("04027000", c("00300","63680"), "2011-11-05", "2011-11-07","uv")
#' tzIssue <- importWaterML1(tzURL, TRUE, "America/Chicago")
#' tzIssue <- importWaterML1(tzURL,
#' asDateTime = TRUE, tz = "America/Chicago")
#'
#' # raw XML
#' url <- constructNWISURL(service = 'dv', siteNumber = '02319300', parameterCd = "00060",
......@@ -197,7 +198,8 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
}
#^^setting tz in as.POSIXct just sets the attribute, does not convert the time!
attr(dateTime, 'tzone') <- tz
# Set to UTC now, convert the whole thing to tz later.
attr(dateTime, 'tzone') <- "UTC"
tzCol <- rep(tz,nObs)
} else {
tzCol <- rep(defaultTZ, nObs)
......@@ -241,7 +243,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
obsDF <- data.frame(dateTime=character(0), tz_cd=character(0), stringsAsFactors = FALSE)
if(asDateTime){
obsDF$dateTime <- as.POSIXct(obsDF$dateTime)
attr(obsDF$dateTime, "tzone") <- tz
attr(obsDF$dateTime, "tzone") <- "UTC"
}
}
}
......@@ -275,7 +277,8 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
#rep site no & agency, combine into DF
obsDFrows <- nrow(obsDF)
df <- cbind.data.frame(agency_cd = rep(agency_cd,obsDFrows), site_no = rep(site_no,obsDFrows),
df <- cbind.data.frame(agency_cd = rep(agency_cd,obsDFrows),
site_no = rep(site_no,obsDFrows),
obsDF, stringsAsFactors = FALSE)
#join by site no
......@@ -299,7 +302,24 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
y = df,
by = sameSite_simNames,
all=TRUE)
na.omit.unique <- function(x){
if(all(is.na(x))) NA else stats::na.omit(x)
}
if(any(duplicated(sameSite[,c("agency_cd", "site_no",
"dateTime", "tz_cd")]))){
types <- lapply(sameSite, class)
sameSite <- stats::aggregate(.~ agency_cd + site_no + dateTime + tz_cd,
data = sameSite,
FUN = na.omit.unique, na.action=NULL )
sameSite[,which(types == "numeric")] <- sapply(sameSite[,which(types == "numeric")], as.numeric)
sameSite[,which(types == "character")] <- sapply(sameSite[,which(types == "character")], as.character)
}
sameSite <- sameSite[order(as.Date(sameSite$dateTime)),]
mergedDF <- r_bind_dr(sameSite, diffSite)
} else {
similarNames <- intersect(colnames(mergedDF), colnames(df))
......@@ -337,8 +357,9 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
#move tz column to far right and sort by increasing site number to be consistent with old version
mergedNames <- names(mergedDF)
tzLoc <- grep("tz_cd", names(mergedDF))
attr(mergedDF$dateTime, 'tzone') <- tz
mergedDF <- mergedDF[c(mergedNames[-tzLoc],mergedNames[tzLoc])]
mergedDF <- mergedDF[order(mergedDF$site_no, mergedDF$dateTime),]
###############################################################
names(mergedDF) <- make.names(names(mergedDF))
......@@ -358,7 +379,7 @@ importWaterML1 <- function(obs_url,asDateTime=FALSE, tz="UTC"){
r_bind_dr <- function(df1, df2){
# Note...this funciton doesn't retain factors/levels
# Note...this function doesn't retain factors/levels
# That is not a problem with any dataRetrieval function
# but, if this function gets used else-where,
# that should be addressed.
......
......@@ -146,8 +146,10 @@ readNWISdata <- function(..., asDateTime=TRUE,convertType=TRUE,tz="UTC"){
if(any(service %in% c("qw", "qwdata"))){
.Deprecated(old = "readNWISdata", package = "dataRetrieval",
new = "readWQPdata",
msg = "NWIS qw web services are being retired. Please see the vignette
'Changes to NWIS QW services' for more information.")
msg = "NWIS qw web services are being retired.
Please see vignette('qwdata_changes', package = 'dataRetrieval')
for more information.
https://cran.r-project.org/web/packages/dataRetrieval/vignettes/qwdata_changes.html")
}
values <- sapply(valuesList$values, function(x) URLencode(x))
......@@ -276,21 +278,22 @@ countyCdLookup <- function(state, county, outputType = "id"){
stop("No county code provided")
}
if(length(state) > 1){
stop("Only one state allowed in countyCdLookup.")
}
#first turn state into stateCd postal name
stateCd <- stateCdLookup(state,outputType = "postal")
state_counties <- countyCd[countyCd$STUSAB == stateCd,]
if(is.numeric(county) | !is.na(suppressWarnings(as.numeric(county)))){
county_i <- which(as.numeric(county) == as.numeric(countyCd$COUNTY) & stateCd == countyCd$STUSAB)
} else {
# if no suffix was added, this will figure out what it should be (or throw a helpful error)
allSuffixes <- unique(tolower(unlist(lapply(strsplit(countyCd$COUNTY_NAME,split=" "), tail, 1))))
county_in_state <- grep(tolower(county), tolower(state_counties$COUNTY_NAME))
county_i <- unlist(lapply(allSuffixes, function(suffix, stateCd, county){
currentSuffixExistsInString <- grepl(paste0('(?i)\\', suffix, '$'), tolower(county))
retCounty <- ifelse(currentSuffixExistsInString, county, paste(county, suffix))
retCounty_i <- which(tolower(retCounty) == tolower(countyCd$COUNTY_NAME) & stateCd == countyCd$STUSAB)
return(retCounty_i)
}, stateCd, county))
county_i <- which(countyCd$STUSAB == stateCd &
countyCd$COUNTY_NAME == state_counties$COUNTY_NAME[county_in_state] )
if(length(county_i) == 0){
stop(paste("Could not find", county, "(county),", stateCd,
......
......@@ -34,10 +34,10 @@ readNWISpCode <- function(parameterCd){
parameterCd <- parameterCd[!is.na(parameterCd)]
baseURL <- drURL("pCode", Access=pkg.env$access)
fullURL <- paste0(baseURL, "fmt=rdb&group_cd=%")
if(any(parameterCd == "all")){
temp_df <- importRDB1(fullURL, asDateTime = FALSE)
parameterData <- data.frame(
parameter_cd = temp_df$parm_cd,
......@@ -54,11 +54,17 @@ readNWISpCode <- function(parameterCd){
parameterData <- parameterCdFile[parameterCdFile$parameter_cd %in% parameterCd,]
if(nrow(parameterData) != length(parameterCd)){
if(length(parameterCd) == 1){
subURL <- paste0(baseURL, "fmt=rdb&parm_nm_cd=", parameterCd)
if(nrow(parameterData) > 0){
parameterCd_lookup <- parameterCd[!parameterCd %in% unique(parameterData$parameter_cd)]
}
if(length(parameterCd_lookup) == 1){
baseURL <- drURL("pCodeSingle", Access=pkg.env$access)
subURL <- paste0(baseURL, "fmt=rdb&parm_nm_cd=", parameterCd_lookup)
temp_df <- importRDB1(subURL, asDateTime = FALSE)
parameterData <- data.frame(
parameter_cd = temp_df$parm_cd,
temp_df <- data.frame(
parameter_cd = temp_df$parameter_cd,
parameter_group_nm = temp_df$group,
parameter_nm = temp_df$parm_nm,
casrn = temp_df$CASRN,
......@@ -66,6 +72,8 @@ readNWISpCode <- function(parameterCd){
parameter_units = temp_df$parm_unit,
stringsAsFactors = FALSE
)
parameterData <- rbind(parameterData, temp_df)
attr(parameterData, "url") <- subURL
} else {
temp_df <- importRDB1(fullURL, asDateTime = FALSE)
......@@ -84,7 +92,8 @@ readNWISpCode <- function(parameterCd){
if(nrow(parameterData) != length(parameterCd)){
badPcode <- parameterCd[!(parameterCd %in% parameterData$parameter_cd)]
warning("The following pCodes seem mistyped, and no information was returned: ",paste(badPcode,collapse=","))
warning("The following pCodes seem mistyped, and no information was returned: ",
paste(badPcode,collapse=","))
}
}
}
......@@ -94,7 +103,9 @@ readNWISpCode <- function(parameterCd){
names(na.params) <- names(parameterData)
parameterData <- rbind(parameterData, na.params)
}
# order by parameterCd.orig
if(!isTRUE(parameterCd.orig == "all")){
parameterData <- parameterData[match(parameterCd.orig, parameterData$parameter_cd),]
}
return(parameterData)
}
......@@ -83,8 +83,10 @@ readNWISqw <- function (siteNumbers,parameterCd,startDate="",endDate="",
expanded=TRUE,reshape=FALSE,tz="UTC"){
.Deprecated(new = "readWQPqw", package = "dataRetrieval",
msg = "NWIS qw web services are being retired. Please see the vignette
'Changes to NWIS QW services' for more information.")
msg = "NWIS qw web services are being retired.
Please see vignette('qwdata_changes', package = 'dataRetrieval')
for more information.
https://cran.r-project.org/web/packages/dataRetrieval/vignettes/qwdata_changes.html")
pgrp <- c("INF", "PHY", "INM", "INN", "NUT", "MBI", "BIO", "IMM", "IMN", "TOX",
"OPE", "OPC", "OOT", "RAD", "XXX", "SED", "POP",
......
......@@ -129,10 +129,54 @@
#' querySummary=TRUE)
#'
#' # querying by county
#' dailyLexingtonVA <- readWQPdata(statecode = "Virginia",
#' countycode="Lexington",
#' parameterCd = "00010")
#' DeWitt <- readWQPdata(statecode = "Illinois",
#' countycode= "DeWitt",
#' characteristicName = "Nitrogen")
#'
#' # Data profiles: "Organization Data"
#' org_data <- readWQPdata(statecode = "WI",
#' countycode = "Dane",
#' service = "Organization")
#'
#' # Data profiles: "Site Data Only"
#' site_data <- readWQPdata(statecode = "WI",
#' countycode = "Dane",
#' service = "Station")
#'
#' # Data profiles: "Project Data"
#' project_data <- readWQPdata(statecode = "WI",
#' countycode = "Dane",
#' service = "Project")
#'
#' # Data profiles: "Project Monitoring Location Weighting Data"
#' proj_mlwd <- readWQPdata(statecode = "WI",
#' countycode = "Dane",
#' service = "ProjectMonitoringLocationWeighting")
#'
#' # Data profiles: "Sample Results (physical/chemical metadata)":
#' samp_data <- readWQPdata(siteid = "USGS-04024315",
#' dataProfile = "resultPhysChem")
#'
#' # Data profiles: "Sample Results (biological metadata)"
#' samp_bio <- readWQPdata(siteid="USGS-04024315",
#' dataProfile = "biological")
#'
#' # Data profiles: "Sample Results (narrow)"
#' samp_narrow <- readWQPdata(siteid="USGS-04024315",
#' dataProfile = "narrowResult")
#'
#' # Data profiles: "Sampling Activity"
#' samp_activity <- readWQPdata(siteid="USGS-04024315",
#' dataProfile = "activityAll")
#'
#' # Data profile: "Sampling Activity Metrics"
#' act_metrics <- readWQPdata(statecode = "WI",
#' countycode = "Dane",
#' service = "ActivityMetric")
#'
#' # Data profile: "Result Detection Quantitation Limit Data"
#' dl_data <- readWQPdata(siteid="USGS-04024315",
#' service = "ResultDetectionQuantitationLimit")
#' }
readWQPdata <- function(..., querySummary=FALSE, tz="UTC",
ignore_attributes = FALSE){
......@@ -177,33 +221,24 @@ readWQPdata <- function(..., querySummary=FALSE, tz="UTC",
siteInfo <- cbind(siteInfoCommon, siteInfo)
}
retvalVariableInfo <- retval[,c("CharacteristicName","USGSPCode",
"ResultMeasure.MeasureUnitCode","ResultSampleFractionText")]
retvalVariableInfo <- unique(retvalVariableInfo)
variableInfo <- data.frame(characteristicName=retval$CharacteristicName,
parameterCd=retval$USGSPCode,
param_units=retval$ResultMeasure.MeasureUnitCode,
valueType=retval$ResultSampleFractionText,
stringsAsFactors=FALSE)
attr(retval, "siteInfo") <- siteInfo
if(!anyNA(variableInfo$parameterCd)){
pcodes <- unique(variableInfo$parameterCd[!is.na(variableInfo$parameterCd)])
pcodes <- pcodes["" != pcodes]
paramINFO <- readNWISpCode(pcodes)
names(paramINFO)["parameter_cd" == names(paramINFO)] <- "parameterCd"
if(all(c("CharacteristicName",
"ResultMeasure.MeasureUnitCode",
"ResultSampleFractionText") %in% names(retval))){
retvalVariableInfo <- retval[,c("CharacteristicName",
"ResultMeasure.MeasureUnitCode",
"ResultSampleFractionText")]
retvalVariableInfo <- unique(retvalVariableInfo)
variableInfo <- data.frame(characteristicName=retval$CharacteristicName,
param_units=retval$ResultMeasure.MeasureUnitCode,
valueType=retval$ResultSampleFractionText,
stringsAsFactors=FALSE)
pCodeToName <- pCodeToName
varExtras <- pCodeToName[pCodeToName$parm_cd %in% unique(variableInfo$parameterCd[!is.na(variableInfo$parameterCd)]),]
names(varExtras)[names(varExtras) == "parm_cd"] <- "parameterCd"
variableInfo <- merge(variableInfo, varExtras, by="parameterCd", all = TRUE)
variableInfo <- merge(variableInfo, paramINFO, by="parameterCd", all = TRUE)
variableInfo <- unique(variableInfo)
attr(retval, "variableInfo") <- variableInfo
}