Maintenance scheduled for Thursday, October 22nd at 15:00 MDT. Expected downtime <1 hour.

Unverified Commit 19a0e0ee authored by Blodgett, David L.'s avatar Blodgett, David L. Committed by GitHub

Merge pull request #60 from dblodgett-usgs/master

testing and cran cleanup
parents c356c80e a603254c
......@@ -5,4 +5,5 @@ demo.R
_pkgdown.yml
docs
DISCLAIMER.md
code.json
\ No newline at end of file
code.json
appveyor.yml
\ No newline at end of file
......@@ -9,11 +9,10 @@ Authors@R: c(person("David", "Blodgett", role = c("aut", "cre"),
Description: Tools to create timeseries and geometry NetCDF files.
URL: https://code.usgs.gov/water/ncdfgeom
BugReports: https://github.com/USGS-R/ncdfgeom/issues
Imports: RNetCDF, ncmeta, sf, methods, dplyr
Imports: RNetCDF, ncmeta, sf, dplyr, methods
Depends:
R (>= 3.0)
Suggests: testthat, knitr, rmarkdown, pkgdown, tidyr, sp, geoknife, ncdf4
Remotes: hypertidy/ncmeta
Suggests: testthat, knitr, rmarkdown, pkgdown, tidyverse, sp, geoknife, ncdf4, jsonlite
License: file LICENSE
LazyData: TRUE
Encoding: UTF-8
......
.onAttach <- function(libname, pkgname) {
packageStartupMessage("This information is preliminary or provisional
and is subject to revision. It is being provided
to meet the need for timely best science. The
information has not received final approval by the
U.S. Geological Survey (USGS) and is provided on the
condition that neither the USGS nor the U.S. Government
shall be held liable for any damages resulting from the
authorized or unauthorized use of the information.
****Support Package****
packageStartupMessage("****Support Package****
This package is a USGS-R Support package.
see: https://owi.usgs.gov/R/packages.html#support")
}
......@@ -76,7 +67,7 @@ add_var <- function(nc, name, dim, type, units = NA, missing = NA, long_name = N
dim <- c(char_dim, dim)
}
var.def.nc(nc, name, type, dim)
if(!is.na(units))
if(!any(is.na(units)))
att.put.nc(nc, name, "units", "NC_CHAR", units)
if(!is.na(missing))
att.put.nc(nc, name, "missing_value", type, missing)
......
......@@ -26,8 +26,10 @@
#' example_file <-write_attribute_data(tempfile(), sample_data,
#' units = rep("unknown", ncol(sample_data)))
#'
#' ncdump <- system(paste("ncdump -h", example_file), intern = TRUE)
#' cat(ncdump ,sep = "\n")
#' try({
#' ncdump <- system(paste("ncdump -h", example_file), intern = TRUE)
#' cat(ncdump ,sep = "\n")
#' }, silent = TRUE)
#'
write_attribute_data <- function(nc_file, att_data, instance_dim_name = "instance",
units = rep("unknown", ncol(att_data)), overwrite = FALSE) {
......
......@@ -30,8 +30,10 @@
#'
#' hucPolygons_nc <- ncdfgeom::write_geometry(nc_file=tempfile(),
#' geom_data = hucPolygons)
#' ncdump <- system(paste("ncdump -h", hucPolygons_nc), intern = TRUE)
#' cat(ncdump ,sep = "\n")
#' try({
#' ncdump <- system(paste("ncdump -h", hucPolygons_nc), intern = TRUE)
#' cat(ncdump ,sep = "\n")
#' }, silent = TRUE)
#'
write_geometry = function(nc_file, geom_data, instance_dim_name = NULL, variables = list()) {
......
......@@ -62,10 +62,10 @@ write_timeseries_dsg = function(nc_file, instance_names, lats, lons, times, data
time_units = 'days since 1970-01-01 00:00:00',
attributes=list(), add_to_existing=FALSE, overwrite = FALSE){
if(!overwrite && !add_to_existing && file.exists(nc_file)) stop("File already exists and overwrite is false.")
if(overwrite && !add_to_existing) unlink(file.exists(nc_file))
if(!overwrite & !add_to_existing & file.exists(nc_file)) stop("File already exists and overwrite is false.")
if(overwrite & !add_to_existing) unlink(file.exists(nc_file))
if(add_to_existing && !file.exists(nc_file)) add_to_existing=FALSE
if(add_to_existing & !file.exists(nc_file)) add_to_existing=FALSE
if(!is(times, 'POSIXct')){
times = as.POSIXct(times)
......
init:
ps: |
$ErrorActionPreference = "Stop"
Invoke-WebRequest http://raw.github.com/krlmlr/r-appveyor/master/scripts/appveyor-tool.ps1 -OutFile "..\appveyor-tool.ps1"
Import-Module '..\appveyor-tool.ps1'
install:
ps: Bootstrap
environment:
NOT_CRAN: "true"
build_script:
- travis-tool.sh install_deps
test_script:
- travis-tool.sh run_tests
on_failure:
- travis-tool.sh dump_logs
artifacts:
- path: '*.Rcheck\**\*.log'
name: Logs
- path: '*.Rcheck\**\*.out'
name: Logs
- path: '*.Rcheck\**\*.fail'
name: Logs
- path: '*.Rcheck\**\*.Rout'
name: Logs
- path: '\*_*.tar.gz'
name: Bits
- path: '\*_*.zip'
name: Bits
This diff is collapsed.
......@@ -33,7 +33,9 @@ sample_data <- sf::st_set_geometry(sf::read_sf(system.file("shape/nc.shp",
example_file <-write_attribute_data(tempfile(), sample_data,
units = rep("unknown", ncol(sample_data)))
ncdump <- system(paste("ncdump -h", example_file), intern = TRUE)
cat(ncdump ,sep = "\\n")
try({
ncdump <- system(paste("ncdump -h", example_file), intern = TRUE)
cat(ncdump ,sep = "\\n")
}, silent = TRUE)
}
......@@ -33,8 +33,10 @@ hucPolygons <- sf::read_sf(system.file('extdata','example_huc_eta.json', package
hucPolygons_nc <- ncdfgeom::write_geometry(nc_file=tempfile(),
geom_data = hucPolygons)
ncdump <- system(paste("ncdump -h", hucPolygons_nc), intern = TRUE)
cat(ncdump ,sep = "\\n")
try({
ncdump <- system(paste("ncdump -h", hucPolygons_nc), intern = TRUE)
cat(ncdump ,sep = "\\n")
}, silent = TRUE)
}
\references{
......
......@@ -2,6 +2,9 @@ library(RNetCDF)
library(ncdf4)
library(sf)
library(sp)
nc_file<-tempfile()
compareSP <- function(polygonData, returnPolyData) {
polygonData <- sf::as_Spatial(polygonData)
returnPolyData <- sf::as_Spatial(returnPolyData)
......
......@@ -17,6 +17,7 @@ test_that("create geom_examples.md", {
sink(geom_examples)
cat(paste("# Examples - Contiguous Ragged Arrays \n\n"))
try({
for(geom in 1:length(namesstr)) {
cat(paste0("## ", namesstr[geom]," \nWell Known Text (WKT): ```",fixtureData[["2d"]][order[geom]]),"``` \n")
fileName<-paste0("sample_",order[geom],".nc")
......@@ -26,12 +27,13 @@ test_that("create geom_examples.md", {
write_geometry(fileName, get_fixture_data(order[geom]))
}
cat("Common Data Language (CDL):\n``` \n")
t<-system(paste0("ncdump ", fileName), intern = TRUE)
cat(t,sep = "\n")
t <- system(paste0("ncdump ", fileName), intern = TRUE)
cat(t, sep = "\n")
cat(" \n``` \n\n")
system(paste("rm", fileName))
}
sink()
testthat::skip_on_cran()
expect(file.exists(geom_examples))
}, silent = TRUE)
})
......@@ -211,23 +211,25 @@ test_that("A whole shapefile can be written", {
})
test_that("big roundtrip", {
dir.create("data/temp/", showWarnings = FALSE)
unzip("data/climdiv_prcp.nc.zip", exdir = "data/temp/")
expect_warning(prcp_data <- read_timeseries_dsg("data/temp/climdiv_prcp.nc"),
"no altitude coordinate found")
nc_file <- "data/climdiv_prcp.nc"
expect_warning(
prcp_data <- read_timeseries_dsg(nc_file),
"no altitude coordinate found")
expect_equal(length(prcp_data), 9)
expect_equal(length(prcp_data$time), 1500)
expect_s3_class(prcp_data$time[1], "POSIXct")
expect_equal(nrow(prcp_data$data_frames[[1]]), 1500)
expect_equal(ncol(prcp_data$data_frames[[1]]), 344)
climdiv_poly <- read_geometry("data/temp/climdiv_prcp.nc")
climdiv_poly <- read_geometry(nc_file)
expect_s3_class(climdiv_poly, "sf")
expect_s3_class(climdiv_poly$geom, "sfc_GEOMETRY")
out_nc <- write_timeseries_dsg(nc_file = "data/temp/temp.nc",
out_nc <- write_timeseries_dsg(nc_file = tempfile(),
instance_names = names(prcp_data$data_frames[[1]]),
lats = prcp_data$lats,
lons = prcp_data$lons,
......@@ -239,14 +241,11 @@ test_that("big roundtrip", {
attributes = prcp_data$global_attributes[[1]],
overwrite = TRUE)
expect_error(write_geometry("data/temp/temp.nc", climdiv_poly, variables = "climdiv_prcp_inches"),
expect_error(write_geometry(out_nc, climdiv_poly, variables = "climdiv_prcp_inches"),
"Found multiple geometry types, only one is supported.")
climdiv_poly <- st_sf(st_cast(climdiv_poly, "MULTIPOLYGON"))
expect_warning(out_nc <- write_geometry("data/temp/temp.nc", climdiv_poly, variables = "climdiv_prcp_inches"),
expect_warning(out_nc <- write_geometry(out_nc, climdiv_poly, variables = "climdiv_prcp_inches"),
"no datum information found assuming WGS84")
unlink("data/temp/*")
})
\ No newline at end of file
context("orthogonal netcdf timeseries")
test_that("Create basic DSG file",{
test_that("Create basic DSG file", {
unlink(nc_file)
nc_summary<-'test summary'
nc_date_create<-'2099-01-01'
nc_creator_name='test creator'
......@@ -12,8 +13,6 @@ test_that("Create basic DSG file",{
creator_name=nc_creator_name,creator_email=nc_creator_email,
project=nc_project, processing_level=nc_proc_level)
nc_file<-'data/test_output.nc'
test_data <- get_sample_timeseries_data()
testnc<-write_timeseries_dsg(nc_file,
......@@ -149,28 +148,11 @@ test_that("Create basic DSG file",{
expect_equivalent(ncvar_get(testnc,varid="BCCA_0-125deg_pr_day_ACCESS1-0_rcp45_r1i1p1")[,71],test_data$all_data$`71`)
expect_equivalent(testnc$var$`BCCA_0-125deg_pr_day_ACCESS1-0_rcp45_r1i1p1`$units,"mm/d")
expect_equivalent(ncatt_get(testnc,varid=0,"summary")$value,'test summary')
expect("duplicate" %in% names(testnc$var))
test_data$meta <- list(name = "character", long_name = "test")
char_test <- dplyr::mutate_all(test_data$var_data, as.character)
testnc<-write_timeseries_dsg(nc_file,
names(test_data$var_data),
test_data$lats, test_data$lons,
test_data$time, char_test,
test_data$alts,
data_unit=test_data$units,
data_prec='char',
data_metadata=test_data$meta,
attributes=global_attributes,
add_to_existing = TRUE)
testnc<-nc_open(nc_file)
expect("character" %in% names(testnc$var))
# covers no altitude and iteration to write many rows.
test_dat2 <- dplyr::bind_rows(test_data$var_data, test_data$var_data)
time <- c(test_data$time,test_data$time)
testnc<-write_timeseries_dsg("temp.nc",
testnc<-write_timeseries_dsg(tempfile(),
names(test_data$var_data),
test_data$lats, test_data$lons,
time, test_dat2,
......@@ -178,14 +160,13 @@ test_that("Create basic DSG file",{
data_prec='double',
data_metadata=test_data$meta,
attributes=global_attributes)
testnc<-nc_open("temp.nc")
testnc<-nc_open(testnc)
expect(testnc$dim$time$len == 1460)
unlink("temp.nc")
char_test <- dplyr::mutate_all(test_dat2, as.character)
time <- c(test_data$time,test_data$time)
testnc<-write_timeseries_dsg("temp.nc",
testnc<-write_timeseries_dsg(tempfile(),
names(test_data$var_data),
test_data$lats, test_data$lons,
time, char_test,
......@@ -194,54 +175,35 @@ test_that("Create basic DSG file",{
data_metadata=test_data$meta,
attributes=global_attributes)
testnc<-nc_open("temp.nc")
testnc<-nc_open(testnc)
expect(testnc$dim$time$len == 1460)
unlink("temp.nc")
})
test_that('soilmoisturetools data writes as expected', {
ok<-readRDS("data/soilmoisturetools/ok.rds")
ok_meta<-readRDS("data/soilmoisturetools/ok_meta.rds")
attributes <- list(
title = 'National Soil Moisture Network SOS',
abstract = 'This service provides soil moisture data from the U.S.
National Soil Moisture Network Pilot and serves data from SCAN, CRN,
West Texas and Oklahoma Mesonets. This SOS web service delivers the data
using GML.',
'provider name' = 'U.S. Geological Survey, Office of Water Information,
Center for Integrated Data Analytics, United States Government',
'provider site' = 'http://cida.usgs.gov',
description = 'Percentile of Volumetric Soil Moisture as compared
to the historical distribution. Percentiles are calculated using
cumulative distribution functions and range from 0-100.'
)
nc_file <- write_timeseries_dsg(
nc_file = tempfile(),
instance_names = ok$station,
lats = ok_meta$latitude,
lons = ok_meta$longitude,
alts = ok_meta$elevation,
times = ok$datetime[1],
data = as.data.frame(array(ok$value, dim = c(
1, length(ok$value)
))),
data_unit = "percent",
data_prec = "double",
attributes = attributes
)
expect("duplicate" %in% names(testnc$var), failure_message = names(testnc$var))
nc <- nc_open(nc_file)
nc_close(testnc)
expect(file.exists(nc_file))
test_data$meta <- list(name = "character", long_name = "test")
char_test <- dplyr::mutate_all(test_data$var_data, as.character)
testnc<-write_timeseries_dsg(nc_file,
names(test_data$var_data),
test_data$lats, test_data$lons,
test_data$time, char_test,
test_data$alts,
data_unit=test_data$units,
data_prec='char',
data_metadata=test_data$meta,
attributes=global_attributes,
add_to_existing = TRUE)
testnc<-nc_open(nc_file)
expect("character" %in% names(testnc$var), failure_message = names(testnc$var))
})
test_that("Read basic DSG file",{
test_that("bork the file", {
test_data <- get_sample_timeseries_data()
nc_file<-'data/test_output.nc'
testlist<-read_timeseries_dsg(nc_file)
expect_equivalent(length(testlist$time), length(test_data$time))
......@@ -261,11 +223,7 @@ test_that("Read basic DSG file",{
expect_equivalent(testlist$global_attributes$nc_title,'test title')
expect_equivalent(testlist$data_frames[1][[1]],test_data$var_data) # Plan to have the dataframes work for 1 to many variables.
})
test_that("warnings and edge cases", {
nc_file <- "data/test_output.nc"
nc_file_borked <- "data/test_output_borked.nc"
nc_file_borked <- tempfile()
file.copy(nc_file, nc_file_borked, overwrite = TRUE)
nc <- RNetCDF::open.nc(nc_file_borked, write = TRUE)
att.delete.nc(nc, "BCCA_0-125deg_pr_day_ACCESS1-0_rcp45_r1i1p1", "coordinates")
......@@ -324,3 +282,40 @@ test_that("warnings and edge cases", {
unlink(nc_file)
unlink(nc_file_borked)
})
test_that('soilmoisturetools data writes as expected', {
ok<-readRDS("data/soilmoisturetools/ok.rds")
ok_meta<-readRDS("data/soilmoisturetools/ok_meta.rds")
attributes <- list(
title = 'National Soil Moisture Network SOS',
abstract = 'This service provides soil moisture data from the U.S.
National Soil Moisture Network Pilot and serves data from SCAN, CRN,
West Texas and Oklahoma Mesonets. This SOS web service delivers the data
using GML.',
'provider name' = 'U.S. Geological Survey, Office of Water Information,
Center for Integrated Data Analytics, United States Government',
'provider site' = 'http://cida.usgs.gov',
description = 'Percentile of Volumetric Soil Moisture as compared
to the historical distribution. Percentiles are calculated using
cumulative distribution functions and range from 0-100.'
)
nc_file <- write_timeseries_dsg(
nc_file = tempfile(),
instance_names = ok$station,
lats = ok_meta$latitude,
lons = ok_meta$longitude,
alts = ok_meta$elevation,
times = ok$datetime[1],
data = as.data.frame(array(ok$value, dim = c(
1, length(ok$value)
))),
data_unit = "percent",
data_prec = "double",
attributes = attributes
)
nc <- nc_open(nc_file)
expect(file.exists(nc_file))
unlink(nc_file)
})
......@@ -31,7 +31,7 @@ Below the example, all the geometry types that `ncdfgeom` handles are shown in d
Tables of point, line, or polygon features with associated timeseries are the target for this functionality. Here, we use some sample data from the `ncdfgeom` package.
```{r libs}
example_file <- "example.nc"
example_file <- tempfile()
file.copy(from = system.file('extdata/example_huc_eta.nc', package = 'ncdfgeom'),
to = example_file,
......@@ -45,9 +45,9 @@ plot(sf::st_geometry(polygons))
```
Now we have the polygons as shown above and a NetCDF file with a header that looks like:
```{r dump_polygons, echo=FALSE}
ncdump <- system(paste("ncdump -h", example_file), intern = TRUE)
cat(ncdump ,sep = "\n")
```{r dump_polygons, echo=FALSE, cache=TRUE, eval = FALSE}
try({ncdump <- system(paste("ncdump -h", example_file), intern = TRUE)
cat(ncdump ,sep = "\n")}, silent = TRUE)
```
Now we can use the `write_geometry` function to add the polygon data to the NetCDF file.
```{r demo}
......@@ -61,8 +61,8 @@ ncdfgeom::write_geometry(nc_file=example_file,
Now the NetCDF file looks like:
```{r dump_polygons_ts, echo=FALSE}
ncdump <- system(paste("ncdump -h", example_file), intern = TRUE)
cat(ncdump ,sep = "\n")
try({ncdump <- system(paste("ncdump -h", example_file), intern = TRUE)
cat(ncdump ,sep = "\n")}, silent = TRUE)
```
Read the polygon data from the file and write it out to a geopackage.
......
......@@ -48,11 +48,11 @@ When on CRAN, the package will be installed with the typical install.packages me
install.packages("ncdfgeom")
```
For this demo, we'll use `sf`, `dplyr`, and `ncdfgeom`.
For this demo, we'll use `sf`, `tidyverse`, and `ncdfgeom`.
```{r libs, message=FALSE, warning=FALSE}
library(sf)
library(dplyr)
library(tidyverse)
library(ncdfgeom)
```
......@@ -87,10 +87,6 @@ As shown above, we have two `data.frame`s. One has 344 columns and the other 344
The NetCDF discrete sampling geometries timeseries standard requires point lat/lon coordinate locations for timeseries data. In the code below, we calculate these values and write the timeseries data to a netcdf file.
```{r write_ts, warning = FALSE}
library(tidyverse)
library(sf)
library(ncdfgeom)
climdiv_centroids <- climdiv_poly %>%
st_transform(5070) %>% # Albers Equal Area
st_set_agr("constant") %>%
......@@ -126,8 +122,8 @@ write_geometry(nc_file = "climdiv_prcp.nc",
```
Now we have a file with a structure as shown in the `ncdump` output below.
```{r ncdump}
ncdump <- system(paste("ncdump -h", nc_file), intern = TRUE)
cat(ncdump, sep = "\n")
try({ncdump <- system(paste("ncdump -h", nc_file), intern = TRUE)
cat(ncdump, sep = "\n")}, silent = TRUE)
```
For more information about the polygon and timeseries data structures used here, see the [NetCDF-CF standard.](http://cfconventions.org/cf-conventions/cf-conventions.html)
......@@ -211,8 +207,6 @@ plot(prcp["prcp"], lwd = 0.1, pal = p_colors,
This is the code used to download and prep the precipitation and spatial data. Provided for reproducibility and is not run here.
```{r setup_dontrun, eval = FALSE}
library(tidyverse)
library(sf)
# Description here: ftp://ftp.ncdc.noaa.gov/pub/data/cirs/climdiv/divisional-readme.txt
prcp_url <- "ftp://ftp.ncdc.noaa.gov/pub/data/cirs/climdiv/climdiv-pcpndv-v1.0.0-20190408"
......
......@@ -102,8 +102,8 @@ ncmeta::nc_dims(nc_file)
The header of the resulting NetCDF file looks like:
```{r dump_polygons, echo=FALSE}
ncdump <- system(paste("ncdump -h", nc_file), intern = TRUE)
cat(ncdump ,sep = "\n")
try({ncdump <- system(paste("ncdump -h", nc_file), intern = TRUE)
cat(ncdump ,sep = "\n")}, silent = TRUE)
```
This file can be read back into R with the function `read_timeseries_dsg`. The response is a list of variables as shown below.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment