diff --git a/workspace/00_get_data.Rmd b/workspace/00_get_data.Rmd
index 13ead041b571b399da2ef378cd346cfd38a8295f..d75412ef3799226920a7f3ab204245c1719f951f 100644
--- a/workspace/00_get_data.Rmd
+++ b/workspace/00_get_data.Rmd
@@ -21,6 +21,10 @@ source("R/00_get_data_functions.R")
 
 library(sbtools)
 library(jsonlite)
+library(dplyr)
+library(nhdplusTools)
+library(sf)
+library(hyRefactor)
 
 if(!dir.exists("data")) {dir.create("data")}
 if(!dir.exists("bin")) {dir.create("bin")}
@@ -56,22 +60,24 @@ mapview <- FALSE
 HUC12 (Hydrologic Unit Code, Level 12) outlets derived from the Watershed 
 Boundary Dataset indexed to the reference fabricform the baseline and extent of 
 national modeling fabrics.
-```{r HUC12 outlets, eval="huc12" %in% POI_types}
+```{r HUC12 outlets}
 
 #  Blodgett, D.L., 2022, Mainstem Rivers of the Conterminous United States: 
 #  U.S. Geological Survey data release, https://doi.org/10.5066/P9BTKP3T. 
 
 wbd_points_file <- "102020wbd_outlets.gpkg"
 
+if("huc12" %in% POI_types) {
 get_sb_file(item = "63cb38b2d34e06fef14f40ad",
                      item_files = wbd_points_file,
                      out_destination = data_dir)
+}
 
 out_list <- c(
   out_list, 
   list(hu12_points_path = file.path(data_dir, wbd_points_file)))
 
-if(mapview)(mapview(read_sf(out_list$hu12_points_path)))
+if(mapview) try(mapview(read_sf(out_list$hu12_points_path)))
 
 ```
 
@@ -82,12 +88,12 @@ datasets developed for the Streamgage Watershed InforMation (SWIM) project. This
 dataset, which is referred to as “SWIM streamgage locations,” was created in 
 support of the second dataset of basin characteristics and disturbance indexes. 
 
-```{r SWIM, eval="gages" %in% POI_types}
+```{r SWIM}
 #  Hayes, L., Chase, K.J., Wieczorek, M.E., and Jackson, S.E., 2021, 
 #  USGS streamgages in the conterminous United States indexed to NHDPlus v2.1 
 #  flowlines to support Streamgage Watershed InforMation (SWIM), 2021: U.S. 
 #  Geological Survey data release, https://doi.org/10.5066/P9J5CK2Y.
-
+if("gages" %in% POI_types) {
 out_list <- c(
   out_list, 
   list(SWIM_points_path = 
@@ -100,7 +106,8 @@ out_list <- c(
   out_list, 
   list(SWIM_dbf = file.path(data_dir, "SWIMGFinfo.dbf")))
 
-if(mapview)(mapview(read_sf(out_list$SWIM_points_path)))
+if(mapview) try(mapview(read_sf(out_list$SWIM_points_path)))
+}
 ```
 
 Sites associated with Work by the U.S. Geological Survey (USGS)  to estimate 
@@ -108,7 +115,7 @@ the amount of water that is withdrawn and consumed by thermoelectric power
 plants (Diehl and others, 2013; Diehl and Harris, 2014; Harris and Diehl, 2019 
 Galanter and othes, 2023). 
 
-```{r Thermoelectric Facilities, eval="thermo_electric" %in% POI_types}
+```{r Thermoelectric Facilities}
 #   Harris, Melissa A. and Diehl, Timothy H., 2017. A Comparison of Three 
 #   Federal Datasets for Thermoelectric Water Withdrawals in the United States 
 #   for 2010. Journal of the American Water Resources Association (JAWRA) 
@@ -121,7 +128,7 @@ Galanter and othes, 2023).
 #   2008-2020 period by power plant, month, and year for the conterminous 
 #   United States: U.S. Geological Survey data release, 
 #   https://doi.org/10.5066/P9ZE2FVM.
-
+if("thermo_electric" %in% POI_types) {
 TE_points_path <- file.path(data_dir, "TE_points")
 
 dir.create(TE_points_path, recursive = TRUE, showWarnings = FALSE)
@@ -136,7 +143,8 @@ get_sb_file("63adc826d34e92aad3ca5af4",
 
 out_list <- c(out_list, list(TE_points_path = TE_points_path))
 
-if(mapview)(mapview(read_sf(out_list$TE_points_path)))
+if(mapview) try(mapview(read_sf(out_list$TE_points_path)))
+}
 ```
 
 Network locations made to improve the routing capabilities 
@@ -145,17 +153,18 @@ hydrologic analyses. The resulting enhanced network is named E2NHDPlusV2_us.
 This includes the network locations associated with some diversions and 
 water use withdrawals.
 
-```{r e2nhd supplemental data - USGS, eval="addition_removal" %in% POI_types}
+```{r e2nhd supplemental data - USGS}
 #   Schwarz, G.E., 2019, E2NHDPlusV2_us: Database of Ancillary Hydrologic 
 #   Attributes and Modified Routing for NHDPlus Version 2.1 Flowlines: U.S. 
 #   Geological Survey data release, https://doi.org/10.5066/P986KZEM.
-
+if("addition_removal" %in% POI_types) {
 out_list <- c(
   out_list, 
   list(USGS_IT_path = 
          get_sb_file("5d16509ee4b0941bde5d8ffe", 
                      "supplemental_files.zip",
                      file.path(data_dir, "USGS_IT"))))
+}
 ```
 
 Two datasets relate hydro location information from the National Inventory of
@@ -163,7 +172,7 @@ Dams to the NHDPlus network.  One effort is related to the SPARROW work
 (Wieczorek and others, 2018), the other related to work quantifying impacts on
 natural flow (Wieczorek and others, 2021).
 
-```{r National Inventory of Dams, eval="dams" %in% POI_types}
+```{r National Inventory of Dams}
 
 #   Wieczorek, M.E., Jackson, S.E., and Schwarz, G.E., 2018, Select Attributes 
 #   for NHDPlus Version 2.1 Reach Catchments and Modified Network Routed 
@@ -175,6 +184,7 @@ natural flow (Wieczorek and others, 2021).
 #  impact/disturbance metrics for the conterminous United States, 1800 to 2018: 
 #  U.S. Geological Survey data release, https://doi.org/10.5066/P92S9ZX6.
 
+if("dams" %in% POI_types) {
 NID_points_path <- file.path(data_dir, "NID_points")
 
 get_sb_file("5dbc53d4e4b06957974eddae",
@@ -187,7 +197,8 @@ get_sb_file("5fb7e483d34eb413d5e14873",
 
 out_list <- c(out_list, list(NID_points_path = NID_points_path))
 
-if(mapview)(mapview(read_sf(out_list$NID_points_path)))
+if(mapview) try(mapview(read_sf(out_list$NID_points_path)))
+}
 ```
 
 This next section retrieves NHDPlus datasets related to national modeling 
@@ -233,17 +244,6 @@ download_file(paste0(epa_data_root, "NationalData/NHDPlusV21_NationalData_Seamle
               out_path = islands_dir, check_path = islands_gdb)
 }
 
-# cache the huc12 layer in rds format
-hu12_rds <- file.path(nhdplus_dir, "HUC12.rds")
-
-if(!file.exists(hu12_rds)) {
-  read_sf(nhdplus_gdb, layer = "HUC12") |>
-    st_make_valid() |>
-    st_transform(crs = proj_crs) |> 
-    # TODO: convert this to gpkg
-    saveRDS(file = hu12_rds)
-}
-
 get_sb_file("5dcd5f96e4b069579760aedb", "GageLocGFinfo.dbf", data_dir)
 
 download_file(paste0(epa_data_root, "GlobalData/NHDPlusV21_NHDPlusGlobalData_03.7z"),
@@ -322,18 +322,22 @@ out_list <- c(out_list, list(fullcats_table = fullcat_path, islandcats_table = i
 
 Download NHDPlusV2 FDR and FAC grids for refactoring and catcment splitting.
 
-```{r NHDPlusV2 FDR_FAC, eval="CONUS" %in% domain | reg}
+```{r NHDPlusV2 FDR_FAC}
+if("CONUS" %in% domain | reg) {
 # NHDPlus FDR/FAC grids available by raster processing unit
 out_list<- c(out_list, make_fdr_fac_list(file.path(data_dir, "fdrfac"), region))
+}
 
 ```
 
 Download NHDPlusV2 elevation grids for headwater extensions and splitting 
 catchments into left and right banks.
 
-```{r NHDPlusV2 elev, eval="CONUS" %in% domain | reg}
+```{r NHDPlusV2 elev}
+if("CONUS" %in% domain | reg) {
 # NHDPlus elev grids available by raster processing unit
 out_list<- c(out_list, make_nhdplus_elev_list(file.path(data_dir, "nhdplusv2_elev"), region))
+}
 ```
 
 Merrit Topographic and Hydrographic data for deriving GIS Features of the 
@@ -480,8 +484,11 @@ hilarri_dir <- file.path(data_dir, "HILARRI")
 hilarri_out <- list(hilarri_sites = file.path(hilarri_dir, "HILARRI_v2.csv"))
 
 if("reservoirs" %in% POI_types) {
-download_file("https://hydrosource.ornl.gov/sites/default/files/2023-03/HILARRI_v2.zip", 
-              out_path = hilarri_dir, check_path = hilarri_out$hilari_sites)
+  # now behind user tracking
+# download_file("https://hydrosource.ornl.gov/sites/default/files/2023-03/HILARRI_v2.zip", 
+#               out_path = hilarri_dir, check_path = hilarri_out$hilari_sites)
+  
+  get_sb_file("5dbc53d4e4b06957974eddae", "HILARRI_v2.zip", hilarri_dir, check_path = hilarri_out$hilarri_sites)
 }
 
 out_list <- c(out_list, hilarri_out)
diff --git a/workspace/R/00_get_data_functions.R b/workspace/R/00_get_data_functions.R
index 2bf98b0ad3d04b26d6a35a493a67d2d84259d572..113beed4e683be19b906ac867b6e21a870766b97 100644
--- a/workspace/R/00_get_data_functions.R
+++ b/workspace/R/00_get_data_functions.R
@@ -6,7 +6,7 @@
 #' @param check_path character path to check, if it exists, it will be returned invisibly
 #' @return path that data was saved to
 get_sb_file <- function(item, item_files, out_destination, unzip = TRUE, check_path = NULL) {
-  
+
   if(!is.null(check_path) && file.exists(check_path)) return(invisible(check_path))
   
   check_auth()
@@ -26,13 +26,24 @@ get_sb_file <- function(item, item_files, out_destination, unzip = TRUE, check_p
                                 names = basename(missing), 
                                 destinations = missing)
   }
+
+  if(!is.null(check_path) && !file.exists(check_path) && unzip) {
+    missing <- out_files[grepl("7z$|zip$", out_files)]
+    warning("check file missing but zip file exists, trying to unzip again")
+  }
   
   if(unzip) {
     
     un7zip_fs <- missing[grepl("7z$|zip$", missing)]
 
     for(f in un7zip_fs) {
-      system(paste0(sevenz, " e -o", out_destination, " ", f))
+      
+      out_destination_use <- out_destination
+      if(!is.null(check_path) && grepl(".gdb.zip", f)) {
+        out_destination_use <- check_path
+      }
+      
+      system(paste0(sevenz, " e -o", out_destination_use, " ", f))
     }
     
   }
@@ -82,7 +93,7 @@ download_file <- function(url, out_path, check_path = NULL, unzip = TRUE, file_n
 #' @return list containing all flow direction and flow accumulation files
 make_fdr_fac_list <- function(fdr_fac_dir, hu2) {
   if(!dir.exists(fdr_fac_dir))
-    download_elev("FDRFAC", fdr_fac_dir)
+    download_elev("FDRFAC", fdr_fac_dir, hu2)
   
   dirs <- unique(dirname(list.files(fdr_fac_dir, recursive = TRUE, full.names = TRUE)))
   
@@ -99,8 +110,8 @@ make_fdr_fac_list <- function(fdr_fac_dir, hu2) {
   out
 }
 
-make_nhdplus_elev_list <- function(elev_dir, region, hu2) {
-  
+make_nhdplus_elev_list <- function(elev_dir, hu2) {
+
   if(!dir.exists(elev_dir))
     download_elev("DEM", elev_dir, hu2)