Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
reference-hydrofabric
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container Registry
Model registry
Operate
Environments
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Water Mission Area
nhgf
reference-hydrofabric
Commits
73399b3f
Commit
73399b3f
authored
4 months ago
by
Blodgett, David L.
Browse files
Options
Downloads
Patches
Plain Diff
finish initial refactor into simplified functions for
#154
parent
b7068300
No related branches found
No related tags found
1 merge request
!183
Refactor - progress
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
workspace/00_get_data.Rmd
+99
-343
99 additions, 343 deletions
workspace/00_get_data.Rmd
workspace/R/1_get_data.R
+51
-6
51 additions, 6 deletions
workspace/R/1_get_data.R
with
150 additions
and
349 deletions
workspace/00_get_data.Rmd
+
99
−
343
View file @
73399b3f
...
...
@@ -281,13 +281,18 @@ waterbodies_path <- file.path(nhdplus_dir, "nhdplus_waterbodies.rds")
if(!file.exists(waterbodies_path)) {
message("formatting NHDPlus watebodies...")
data.table::rbindlist(list(
data.table::rbindlist(
list(
read_sf(out_list$nhdplus_gdb, "NHDWaterbody") |>
st_transform(proj_crs) |>
mutate(layer = "NHDWaterbody"),
read_sf(out_list$nhdplus_gdb, "NHDArea") |>
st_transform(proj_crs) |>
mutate(layer = "NHDArea")), fill = TRUE) |>
mutate(layer = "NHDArea")
), fill = TRUE) |>
st_as_sf() |>
saveRDS(waterbodies_path)
...
...
@@ -305,114 +310,37 @@ when aggregating at points of interest.
fullcat_path <- file.path(nhdplus_dir, "nhdcat_full.rds")
islandcat_path <- file.path(islands_dir, "nhdcat_full.rds")
# Create full cat dataset
if(!file.exists(fullcat_path)){
cat_tab <- cat_rpu(out_list$ref_cat, nhdplus_gdb)
saveRDS(cat_tab, fullcat_path)
island_tab <- cat_rpu(out_list$islands_gdb, islands_gdb)
saveRDS(island_tab, islandcat_path)
}
if(!file.exists(fullcat_path))
saveRDS(cat_rpu(out_list$ref_cat, nhdplus_gdb),
fullcat_path)
if(!file.exists(islandcat_path))
saveRDS(cat_rpu(out_list$islands_gdb, islands_gdb),
islandcat_path)
out_list <- c(out_list, list(fullcats_table = fullcat_path))
out_list <- c(out_list, list(islandcats_table = islandcat_path))
out_list <- c(out_list, list(fullcats_table = fullcat_path
, islandcats_table = islandcat_path
))
```
Download NHDPlusV2 FDR and FAC grids for refactoring and catcment splitting.
```{r NHDPlusV2 FDR_FAC}
# NHDPlus FDR/FAC grids available by raster processing unit
# TODO: set this up for a per-region download for #134
out_list<- c(out_list, make_fdr_fac_list(file.path(data_dir, "fdrfac")))
fdr_fac_dir <- file.path(data_dir, "fdrfac")
if(!dir.exists(fdr_fac_dir)) {
dir.create(fdr_fac_dir, recursive = TRUE, showWarnings = FALSE)
download_elev("FDRFAC", fdr_fac_dir)
}
dirs <- unique(dirname(list.files(fdr_fac_dir, recursive = TRUE,
full.names = TRUE)))
fdr <- dirs[grepl(".*/fdr$", dirs)]
fac <- dirs[grepl(".*/fac$", dirs)]
out <- list(fdr = list(), fac = list())
rpu <- substr(fdr, (nchar(fdr) - 6), (nchar(fdr) - 4))
out$fdr <- as.list(setNames(fdr, paste0("rpu_", rpu)))
out$fac <- as.list(setNames(fac, paste0("rpu_", rpu)))
out_list<- c(out_list, out)
```
Download NHDPlusV2 elevation grids for headwater extensions and splitting
catchments into left and right banks.
```{r NHDPlusV2 elev}
# NHDPlus FDR/FAC grids available by raster processing unit
elev_dir <- file.path(data_dir, "nhdplusv2_elev")
if(!dir.exists(elev_dir)) {
dir.create(elev_dir, recursive = TRUE, showWarnings = FALSE)
# NHDPlus elev grids available by raster processing unit
# TODO: set this up for a per-region download for #134
out_list<- c(out_list, make_nhdplus_elev_list(file.path(data_dir, "nhdplusv2_elev")))
download_elev("DEM", elev_dir)
}
dirs <- unique(dirname(list.files(elev_dir, recursive = TRUE,
full.names = TRUE)))
elev_cm <- dirs[grepl(".*/elev_cm$", dirs)]
out <- list(elev_cm = list())
rpu <- substr(elev_cm, (nchar(elev_cm) - 10), (nchar(elev_cm) - 8))
out$elev_cm <- as.list(setNames(elev_cm, paste0("rpu_", rpu)))
out_list<- c(out_list, out)
```
Download the current WBD snapshot.
```{r WBD}
# Snapshot of National WBD
wbd_dir <- file.path(data_dir, "wbd")
wbd_file <- "WBD_National_GDB"
if(!dir.exists(wbd_dir)) {
dir.create(wbd_dir, recursive = TRUE)
wbd <- download.file(
"https://prd-tnm.s3.amazonaws.com/StagedProducts/Hydrography/WBD/National/GDB/WBD_National_GDB.zip",
destfile = file.path(wbd_dir, "WBD_National_GDB.zip"), mode = "wb")
}
out_gdb <- file.path(wbd_dir, paste0(wbd_file, ".gdb"))
out <- list(latest_wbd = file.path(wbd_dir, "WBD.rds"))
if(!file.exists(out$latest_wbd)) {
wbd <- file.path(wbd_dir, paste0(wbd_file, ".zip"))
system(paste0(sevenz, " x ", wbd, " -o", wbd_dir),
ignore.stdout = TRUE, intern = TRUE)
# Read the feature class
wbdfc <- sf::read_sf(out_gdb,
"WBDHU12") %>%
st_as_sf() %>%
st_transform(crs = proj_crs)
saveRDS(wbdfc, out$latest_wbd)
}
out_rds <- list(latest_wbd_rds = out$latest_wbd)
out_list <- c(out_list, out_rds)
```
Merrit Topographic and Hydrographic data for deriving GIS Features of the
National Hydrologic Modeling, Alaska Domain
...
...
@@ -427,72 +355,35 @@ National Hydrologic Modeling, Alaska Domain
merit_dir <- file.path(data_dir, "merged_AK_MERIT_Hydro")
out <- list(merit_catchments = file.path(merit_dir, "merged_AK_MERIT_Hydro",
"cat_pfaf_78_81_82_MERIT_Hydro_v07_Basins_v01.shp"),
merit_rivers = file.path(merit_dir, "merged_AK_MERIT_Hydro",
"riv_pfaf_78_81_82_MERIT_Hydro_v07_Basins_v01.shp"),
aster_dem = file.path(merit_dir, "dem.tif"),
merit_dem = file.path(merit_dir, "ak_merit_dem.tif"),
merit_fdr = file.path(merit_dir, "ak_merit_fdr.tif"),
merit_fac = file.path(merit_dir, "ak_merit_fac.tif"))
if(!dir.exists(merit_dir)) {
dir.create(merit_dir, recursive = TRUE, showWarnings = FALSE)
m <- "merged_AK_MERIT_Hydro.zip"
check_auth()
sbtools::item_file_download("5dbc53d4e4b06957974eddae", names = m,
destinations = file.path(merit_dir, m))
unzip(file.path(merit_dir, m), exdir = merit_dir)
rm(m)
get_sbfile <- function(f, itm) {
if(!file.exists(o <- gsub("zip", "tif", f))) {
check_auth()
sbtools::item_file_download(itm, names = basename(f),
destinations = f)
}
#unzip(f, exdir = merit_dir)
system(paste0(sevenz, " x ", f, " -o", merit_dir), ignore.stdout = TRUE,
intern = TRUE)
return(o)
}
# Out folders
out <- list(aster_dem = get_sbfile(file.path(merit_dir, "dem.zip"),
"5fbbc6b6d34eb413d5e21378"),
merit_dem = get_sbfile(file.path(merit_dir,
"ak_merit_dem.zip"),
"5fbbc6b6d34eb413d5e21378"),
merit_fdr = get_sbfile(file.path(merit_dir,
"ak_merit_fdr.zip"),
"64ff628ed34ed30c2057b430"),
merit_fac = get_sbfile(file.path(merit_dir,
"ak_merit_fac.zip"),
"64ff628ed34ed30c2057b430"))
out <- list(aster_dem = get_sbfile(file.path(merit_dir, "dem.zip"), "5fbbc6b6d34eb413d5e21378"),
merit_dem = get_sbfile(file.path(merit_dir,
"ak_merit_dem.zip"), "5fc51e65d34e4b9faad8877b"),
merit_fdr = get_sbfile(file.path(merit_dir,
"ak_merit_fdr.zip"), "5fc51e65d34e4b9faad8877b"),
merit_fac = get_sbfile(file.path(merit_dir,
"ak_merit_fac.zip"), "5fc51e65d34e4b9faad8877b"))
}
get_sb_file("5dbc53d4e4b06957974eddae", "merged_AK_MERIT_Hydro.zip", merit_dir)
# TODO: update to use "6644f85ed34e1955f5a42dc4" when released (roughly Dec 10,)
get_sb_file("5fbbc6b6d34eb413d5e21378", "dem.zip", merit_dir)
get_sb_file("64ff628ed34ed30c2057b430",
c("ak_merit_dem.zip", "ak_merit_fdr.zip", "ak_merit_fac.zip"),
merit_dir)
out_list <- c(out_list, out)
```
out_list <- c(
out_list,
list(merit_catchments = file.path(merit_dir,
"merged_AK_MERIT_Hydro",
"cat_pfaf_78_81_82_MERIT_Hydro_v07_Basins_v01.shp"),
merit_rivers = file.path(merit_dir,
"merged_AK_MERIT_Hydro",
"riv_pfaf_78_81_82_MERIT_Hydro_v07_Basins_v01.shp"),
aster_dem = file.path(merit_dir, "dem.tif"),
merit_dem = file.path(merit_dir, "ak_merit_dem.tif"),
merit_fdr = file.path(merit_dir, "ak_merit_fdr.tif"),
merit_fac = file.path(merit_dir, "ak_merit_fac.tif")))
Source data for deriving GIS Featurs of the National Hydrologic Modeling,
```
Source data for deriving GIS Featurs of the National Hydrologic Modeling,
Alaska Domain
```{r AK GF Source data}
# TODO: fix this citation
# Bock, A.R., Rosa, S.N., McDonald, R.R., Wieczorek, M.E., Santiago, M.,
# Blodgett, D.L., and Norton, P.A., 2024, Geospatial Fabric for National
# Hydrologic Modeling, Hawaii Domain: U.S. Geological Survey data release,
...
...
@@ -501,18 +392,10 @@ Alaska Domain
AK_GF_source <- "ak.7z"
AK_dir <- file.path(data_dir, "AK")
if(!dir.exists(AK_dir)) {
dir.create(AK_dir, recursive = TRUE)
check_auth()
sbtools::item_file_download("5dbc53d4e4b06957974eddae", names = AK_GF_source,
destinations = file.path(AK_dir, AK_GF_source))
system(paste0(sevenz, " e -o", AK_dir, " ", file.path(AK_dir, AK_GF_source)))
} else {
out_ak <- list(ak_source = file.path(AK_dir, "ak.gpkg"))
out_list <- c(out_list, out_ak)
}
get_sb_file("5dbc53d4e4b06957974eddae", AK_GF_source, AK_dir)
out_list <- c(out_list, list(ak_source = file.path(AK_dir, "ak.gpkg")))
```
Source data for deriving GIS Featurs of the National Hydrologic Modeling,
...
...
@@ -524,80 +407,10 @@ Hawaii Domain
# Hydrologic Modeling, Hawaii Domain: U.S. Geological Survey data release,
# https://doi.org/10.5066/P9HMKOP8
HI_GF_source <- "hi.7z"
out_hi <- list(hi_source = file.path(islands_dir, "hi.gpkg"))
get_sb_file("5dbc53d4e4b06957974eddae", "hi.7z", islands_dir)
if(!file.exists(file.path(islands_dir, "hi.gpkg"))) {
check_auth()
sbtools::item_file_download("5dbc53d4e4b06957974eddae", names = HI_GF_source,
destinations = file.path(islands_dir,
HI_GF_source))
system(paste0(sevenz, " e -o", islands_dir, " ",
file.path(islands_dir, HI_GF_source)))
}
out_list <- c(out_list, list(hi_source = file.path(islands_dir, "hi.gpkg")))
out_list <- c(out_list, out_hi)
```
National Water Model Network Topology
```{r nwm_topology}
nwm_targz_url <-
"https://www.nohrsc.noaa.gov/pub/staff/keicher/NWM_live/NWM_parameters/NWM_parameter_files_v2.1.tar.gz"
nwm_parm_url <-
"https://www.nohrsc.noaa.gov/pub/staff/keicher/NWM_live/web/data_tools/NWM_v2.1_channel_hydrofabric.tar.gz"
targz <- file.path(data_dir, basename(nwm_targz_url))
out <- list(nwm_network = file.path(data_dir, "NWM_parameters_v2.1", "RouteLink_CONUS.nc"))
if(!file.exists(out$nwm_network)) {
options(timeout = 60000)
download.file(nwm_targz_url, destfile = targz)
utils::untar(targz, exdir = data_dir)
}
out_list <- c(out_list, out)
parmtgz <- file.path(data_dir, basename(nwm_parm_url))
out <- list(nwm_parm = file.path(data_dir,
"NWM_v2.1_channel_hydrofabric_10262020",
"nwm_v2_1_hydrofabric.gdb"))
if(!file.exists(out$nwm_parm)) {
download.file(nwm_parm_url, destfile = parmtgz)
utils::untar(parmtgz, exdir = data_dir)
}
out_list <- c(out_list, out)
```
e2nhd Network Attributes
```{r nhdplus_attributes}
# Blodgett, D.L., 2023, Updated CONUS river network attributes based on the
# E2NHDPlusV2 and NWMv2.1 networks (ver. 2.0, February 2023): U.S. Geological
# Survey data release, https://doi.org/10.5066/P976XCVT.
out <- list(new_nhdp_atts = file.path("cache",
(sb_f <- "enhd_nhdplusatts.csv")))
if(!file.exists(out$new_nhdp_atts)) {
check_auth()
sbtools::item_file_download("63cb311ed34e06fef14f40a3",
names = sb_f,
destinations = out$new_nhdp_atts)
}
out_list <- c(out_list, out)
```
GIS Features of the Geospatial Fabric for National Hydrologic Modeling,
...
...
@@ -610,65 +423,39 @@ version 1.1, Transboundary Geospatial Fabric
# https://doi.org/10.5066/P971JAGF.
GFv11_dir <- file.path(data_dir, "GFv11")
out <- list(GFv11_gages_lyr = file.path(data_dir, "GFv11/GFv11_gages.rds"),
GFv11_gdb = file.path(GFv11_dir, "GFv1.1.gdb"),
GFv11_tgf = file.path(GFv11_dir, "TGF.gdb"))
# Download the GFv1.1 geodatabase
if(!dir.exists(GFv11_dir)) {
dir.create(GFv11_dir, recursive = TRUE)
check_auth()
sbtools::item_file_download("5e29d1a0e4b0a79317cf7f63",
names = "GFv1.1.gdb.zip",
destinations = file.path(GFv11_dir,
"GFv1.1.gdb.zip"))
tgf_f <- file.path(GFv11_dir, "TGF.gdb.zip")
check_auth()
sbtools::item_file_download("5d967365e4b0c4f70d113923",
names = basename(tgf_f),
destinations = tgf_f)
unzip(file.path(GFv11_dir, "GFv1.1.gdb.zip"), exdir = GFv11_dir)
unzip(tgf_f, exdir = GFv11_dir)
file.remove(tgf_f)
# Extract gages
GFv11_gages <- read_sf(out$GFv11_gdb, "POIs_v1_1") %>%
filter(Type_Gage != 0)
saveRDS(GFv11_gages, out$GFv11_gages_lyr)
file.remove(file.path(GFv11_dir, "GFv1.1.gdb.zip"))
}
get_sb_file("5e29d1a0e4b0a79317cf7f63", "GFv1.1.gdb.zip", GFv11_dir)
get_sb_file("5d967365e4b0c4f70d113923", "TGF.gdb.zip", GFv11_dir)
cat("", file = file.path(GFv11_dir, "GFv1.1.gdb.zip"))
cat("", file = file.path(GFv11_dir, "TGF.gdb.zip"))
# Extract gages
read_sf(out$GFv11_gdb, "POIs_v1_1") |>
filter(Type_Gage != 0) |>
saveRDS(out$GFv11_gages_lyr)
out_list <- c(out_list, out)
if(mapview)(mapview(readRDS(out_list$GFv11_gages_lyr)))
```
GAGESII dataset
```{r Gages_II}
# https://doi.org/10.3133/70046617
if(!dir.exists(SWIM_points_path))
dir.create(SWIM_points_path, recursive = TRUE)
# Falcone, J., 2011, GAGES-II: Geospatial Attributes of Gages for Evaluating
# Streamflow: U.S. Geological Survey data release, https://doi.org/10.5066/P96CPHOT.
g2_out <- list(gagesii_lyr = file.path(SWIM_points_path,
"gagesII_9322_point_shapefile"))
get_sb_file("631405bbd34e36012efa304a", "gagesII_9322_point_shapefile.zip", SWIM_points_path)
gagesII_url <- "https://water.usgs.gov/GIS/dsdl/gagesII_9322_point_shapefile.zip"
out_list <- c(out_list, list(
gagesii_lyr = file.path(SWIM_points_path, "gagesII_9322_point_shapefile")))
zip <- file.path(SWIM_points_path, basename(gagesII_url))
if(!file.exists(g2_out$gagesii_lyr)) {
download.file(gagesII_url, destfile = zip)
unzip(zip, exdir = g2_out$gagesii_lyr)
}
out_list <- c(out_list, g2_out)
if(mapview)(mapview(read_sf(out_list$gagesii_lyr)))
```
...
...
@@ -684,25 +471,16 @@ locations
hilarri_dir <- file.path(data_dir, "HILARRI")
hilarri_out <- list(hilari_sites = file.path(hilarri_dir, "HILARRI_v2.csv"))
# Download the HILARRI points
if(!dir.exists(hilarri_dir))
dir.create(hilarri_dir, recursive = TRUE)
hilarri_url <- "https://hydrosource.ornl.gov/sites/default/files/2023-03/HILARRI_v2.zip"
download_file("https://hydrosource.ornl.gov/sites/default/files/2023-03/HILARRI_v2.zip",
out_path = hilarri_dir, check_path = hilarri_out$hilari_sites)
if(!file.exists(file.path(hilarri_dir, "HILARRI_v2.csv"))){
download.file(hilarri_url,
dest = file.path(hilarri_dir, basename(hilarri_url)),
mode = "wb")
unzip(file.path(hilarri_dir, basename(hilarri_url)), exdir = hilarri_dir)
}
out_list <- c(out_list, hilarri_out)
if(mapview){
hill_sf <- st_as_sf(read.csv(out_list$hilari_sites),
coords = c("longitude", "latitude"), crs = 4326)
mapview(hill_sf)}
if(mapview) {
mapview(st_as_sf(read.csv(out_list$hilari_sites),
coords = c("longitude", "latitude"),
crs = 4326))
}
```
...
...
@@ -722,51 +500,44 @@ ResOpsUS dataset and indexed locations
# management. Frontiers in Ecology and the Environment 9 (9): 494-502.
# https://ln.sync.com/dl/bd47eb6b0/anhxaikr-62pmrgtq-k44xf84f-pyz4atkm/view/default/447819520013
res_path <- file.path(data_dir,"reservoir_data")
# Set Data download links
res_att_url <- "https://zenodo.org/record/5367383/files/ResOpsUS.zip?download=1"
# ISTARF - Inferred Storage Targets and Release Functions for CONUS large reservoirs
istarf_url <- "https://zenodo.org/record/4602277/files/ISTARF-CONUS.csv?download=1"
# Download GRanD zip from above
GRanD_zip <- "GRanD_Version_1_3.zip"
GRanD_zip <-
file.path(res_path,
"GRanD_Version_1_3.zip"
)
# - ResOpsUS - reservoir operations
res_path <- file.path(data_dir,"reservoir_data")
if(!dir.exists(res_path))
dir.create(res_path, recursive = TRUE
)
download_file(res_att_url, res_path, file_name = "ResOpsUS.zip")
tab_out <- c(out_list, list(res_attributes = file.path(res_path, "ResOpsUS", "attributes",
"reservoir_attributes.csv"))
)
the_ex_dir <- file.path(res_path, "ResOpsUS")
tab_out <- list(res_attributes = file.path(the_ex_dir, "attributes",
"reservoir_attributes.csv"))
# Download istarf data
istarf_csv <- file.path(res_path, "ISTARF-CONUS.csv")
if(!file.exists(istarf_csv)) {
download.file(istarf_url, destfile = istarf_csv, mode = "wb")
}
tab_out <- list(istarf = istarf_csv)
out_list <- c(out_list, tab_out)
download_file(istarf_url, res_path, istarf_csv, file_name = "ISTARF-CONUS.csv")
out_list <- c(out_list, list(istarf = istarf_csv))
grand_dir <- file.path(res_path, "GRanD_Version_1_3")
# Extract GRanD data
if(!file.exists(GRanD_zip)) {
# set mode wb for a file of type zip
unzip(file.path(res_path, GRanD_zip), exdir = res_path)
} else {
print("Download GRanD data")
if(!dir.exists(grand_dir)) {
if(!file.exists(GRanD_zip))
stop("Download GRanD data from https://ln.sync.com/dl/bd47eb6b0/anhxaikr-62pmrgtq-k44xf84f-pyz4atkm/view/default/447819520013 to ",
res_path)
unzip(GRanD_zip, exdir = res_path)
}
out_list <- c(out_list, tab_out)
# crosswalk of selected (fit %in% c("full", "storage_only") 595 ResOpsUS to NID
# - derived by msant and abock; currently hosted at GFv2 POI Source data page
out_list <- c(out_list, list(GRanD = grand_dir))
resops_to_nid_path <- file.path(res_path, "cw_ResOpsUS_NID.csv")
if(!file.exists(resops_to_nid_path)) {
check_auth()
sbtools::item_file_download("5dbc53d4e4b06957974eddae",
names = "cw_ResOpsUS_NID.csv",
destinations = resops_to_nid_path)
}
get_sb_file("5dbc53d4e4b06957974eddae", "cw_ResOpsUS_NID.csv", resops_to_nid_path)
tab_out <- list(resops_NID_CW = resops_to_nid_path)
out_list <- c(out_list, tab_out)
out_list <- c(out_list, list(resops_NID_CW = resops_to_nid_path))
```
All Hydro-linked Network Data Index (NLDI) datasets
...
...
@@ -777,23 +548,8 @@ All Hydro-linked Network Data Index (NLDI) datasets
nldi_dir <- file.path(data_dir, "nldi")
# Download the HILARRI points
if(!dir.exists(nldi_dir))
dir.create(nldi_dir, recursive = TRUE)
nldi_list <- item_list_files("60c7b895d34e86b9389b2a6c", recursive = FALSE,
fetch_cloud_urls = TRUE)
for (fname in nldi_list$fname){
print(fname)
floc <- file.path(nldi_dir, fname)
if(!file.exists(floc)){
#check_auth()
sbtools::item_file_download("60c7b895d34e86b9389b2a6c",
names = fname,
destinations = floc)
}
}
get_sb_file("60c7b895d34e86b9389b2a6c", "all", nldi_dir)
```
```{r}
...
...
This diff is collapsed.
Click to expand it.
workspace/R/1_get_data.R
+
51
−
6
View file @
73399b3f
...
...
@@ -42,14 +42,17 @@ get_sb_file <- function(item, item_files, out_destination, unzip = TRUE) {
#' @param url character url where the file can be retrieved
#' @param out_path character path the directory to save the downloaded file
#' @param check_path character path to check that should exist when done
#' @param file_name character name to use instead of basename of url
#' @param unzip logical whether to try to unzip the file
download_file
<-
function
(
url
,
out_path
,
check_path
=
NULL
,
unzip
=
TRUE
)
{
download_file
<-
function
(
url
,
out_path
,
check_path
=
NULL
,
unzip
=
TRUE
,
file_name
=
NULL
)
{
if
(
file.exists
(
check_path
))
return
(
invisible
(
check_path
))
if
(
!
is.null
(
check_path
)
&&
file.exists
(
check_path
))
return
(
invisible
(
check_path
))
dir.create
(
out_path
,
showWarnings
=
FALSE
,
recursive
=
TRUE
)
out
<-
file.path
(
out_path
,
basename
(
url
))
if
(
is.null
(
file_name
))
file_name
<-
basename
(
url
)
out
<-
file.path
(
out_path
,
file_name
)
if
(
!
file.exists
(
out
))
{
httr
::
RETRY
(
"GET"
,
url
,
httr
::
write_disk
(
out
))
...
...
@@ -59,7 +62,49 @@ download_file <- function(url, out_path, check_path = NULL, unzip = TRUE) {
}
if
(
file.exists
(
check_path
))
return
(
invisible
(
check_path
))
if
(
!
is.null
(
check_path
)
&&
file.exists
(
check_path
))
return
(
invisible
(
check_path
))
if
(
!
is.null
(
check_path
))
stop
(
"did not create "
,
check_path
)
}
#' make flow direction and flow accumulation file list
#' @param fdr_fac_dir directory where flow direction and flow accumulation are
#' @return list containing all flow direction and flow accumulation files
make_fdr_fac_list
<-
function
(
fdr_fac_dir
)
{
if
(
!
dir.exists
(
fdr_fac_dir
))
download_elev
(
"FDRFAC"
,
fdr_fac_dir
)
dirs
<-
unique
(
dirname
(
list.files
(
fdr_fac_dir
,
recursive
=
TRUE
,
full.names
=
TRUE
)))
fdr
<-
dirs
[
grepl
(
".*/fdr$"
,
dirs
)]
fac
<-
dirs
[
grepl
(
".*/fac$"
,
dirs
)]
out
<-
list
(
fdr
=
list
(),
fac
=
list
())
rpu
<-
substr
(
fdr
,
(
nchar
(
fdr
)
-
6
),
(
nchar
(
fdr
)
-
4
))
out
$
fdr
<-
as.list
(
setNames
(
fdr
,
paste0
(
"rpu_"
,
rpu
)))
out
$
fac
<-
as.list
(
setNames
(
fac
,
paste0
(
"rpu_"
,
rpu
)))
out
}
make_nhdplus_elev_list
<-
function
(
elev_dir
)
{
if
(
!
dir.exists
(
elev_dir
))
download_elev
(
"DEM"
,
elev_dir
)
dirs
<-
unique
(
dirname
(
list.files
(
elev_dir
,
recursive
=
TRUE
,
full.names
=
TRUE
)))
elev_cm
<-
dirs
[
grepl
(
".*/elev_cm$"
,
dirs
)]
stop
(
"did not create "
,
check_file
)
}
\ No newline at end of file
out
<-
list
(
elev_cm
=
list
())
rpu
<-
substr
(
elev_cm
,
(
nchar
(
elev_cm
)
-
10
),
(
nchar
(
elev_cm
)
-
8
))
out
$
elev_cm
<-
as.list
(
setNames
(
elev_cm
,
paste0
(
"rpu_"
,
rpu
)))
out
}
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment