diff --git a/catalog/catalog.json b/catalog/catalog.json
index 7c43872b5362fbbe5b830f9ad8a0fbd77031bcff..4807913c57e6045ad7538c3ae3397a1eb82c14d3 100644
--- a/catalog/catalog.json
+++ b/catalog/catalog.json
@@ -203,6 +203,11 @@
       "rel": "child",
       "href": "./slr2d/collection.json",
       "type": "application/json"
+    },
+    {
+      "rel": "child",
+      "href": "./sambi_urb/collection.json",
+      "type": "application/json"
     }
   ]
 }
\ No newline at end of file
diff --git a/catalog/sambi_urb/collection.json b/catalog/sambi_urb/collection.json
new file mode 100644
index 0000000000000000000000000000000000000000..fe6ed2b9cfe52bb98c428abf07b6470a9dae807e
--- /dev/null
+++ b/catalog/sambi_urb/collection.json
@@ -0,0 +1,134 @@
+{
+  "type": "Collection",
+  "id": "sambi_urb",
+  "stac_version": "1.0.0",
+  "description": "Urban Growth Projection for DSL-SAMBI",
+  "links": [
+    {
+      "rel": "root",
+      "href": "../catalog.json",
+      "type": "application/json"
+    },
+    {
+      "rel": "parent",
+      "href": "../catalog.json",
+      "type": "application/json"
+    }
+  ],
+  "stac_extensions": [
+    "https://stac-extensions.github.io/datacube/v2.2.0/schema.json"
+  ],
+  "cube:dimensions": {
+    "time": {
+      "type": "temporal",
+      "description": null,
+      "extent": [
+        "2000-01-01T00:00:00Z",
+        "2090-01-01T00:00:00Z"
+      ],
+      "step": "P10Y0M0DT0H0M0S"
+    },
+    "x": {
+      "type": "spatial",
+      "axis": "x",
+      "description": "x coordinate of projection",
+      "extent": [
+        1018838.7729999996,
+        1840358.7729999996
+      ],
+      "step": 60.0,
+      "reference_system": "{\"$schema\":\"https://proj.org/schemas/v0.5/projjson.schema.json\",\"type\":\"ProjectedCRS\",\"name\":\"undefined\",\"base_crs\":{\"name\":\"undefined\",\"datum\":{\"type\":\"GeodeticReferenceFrame\",\"name\":\"undefined\",\"ellipsoid\":{\"name\":\"undefined\",\"semi_major_axis\":6378137,\"inverse_flattening\":298.257222101},\"prime_meridian\":{\"name\":\"undefined\",\"longitude\":0}},\"coordinate_system\":{\"subtype\":\"ellipsoidal\",\"axis\":[{\"name\":\"Longitude\",\"abbreviation\":\"lon\",\"direction\":\"east\",\"unit\":\"degree\"},{\"name\":\"Latitude\",\"abbreviation\":\"lat\",\"direction\":\"north\",\"unit\":\"degree\"}]}},\"conversion\":{\"name\":\"unknown\",\"method\":{\"name\":\"Albers Equal Area\",\"id\":{\"authority\":\"EPSG\",\"code\":9822}},\"parameters\":[{\"name\":\"Latitude of false origin\",\"value\":23,\"unit\":\"degree\",\"id\":{\"authority\":\"EPSG\",\"code\":8821}},{\"name\":\"Longitude of false origin\",\"value\":-96,\"unit\":\"degree\",\"id\":{\"authority\":\"EPSG\",\"code\":8822}},{\"name\":\"Latitude of 1st standard parallel\",\"value\":29.5,\"unit\":\"degree\",\"id\":{\"authority\":\"EPSG\",\"code\":8823}},{\"name\":\"Latitude of 2nd standard parallel\",\"value\":45.5,\"unit\":\"degree\",\"id\":{\"authority\":\"EPSG\",\"code\":8824}},{\"name\":\"Easting at false origin\",\"value\":0,\"unit\":{\"type\":\"LinearUnit\",\"name\":\"Metre\",\"conversion_factor\":1},\"id\":{\"authority\":\"EPSG\",\"code\":8826}},{\"name\":\"Northing at false origin\",\"value\":0,\"unit\":{\"type\":\"LinearUnit\",\"name\":\"Metre\",\"conversion_factor\":1},\"id\":{\"authority\":\"EPSG\",\"code\":8827}}]},\"coordinate_system\":{\"subtype\":\"Cartesian\",\"axis\":[{\"name\":\"Easting\",\"abbreviation\":\"E\",\"direction\":\"east\",\"unit\":\"metre\"},{\"name\":\"Northing\",\"abbreviation\":\"N\",\"direction\":\"north\",\"unit\":\"metre\"}]}}"
+    },
+    "y": {
+      "type": "spatial",
+      "axis": "y",
+      "description": "y coordinate of projection",
+      "extent": [
+        760892.0780000016,
+        1744172.0780000016
+      ],
+      "step": 60.0,
+      "reference_system": "{\"$schema\":\"https://proj.org/schemas/v0.5/projjson.schema.json\",\"type\":\"ProjectedCRS\",\"name\":\"undefined\",\"base_crs\":{\"name\":\"undefined\",\"datum\":{\"type\":\"GeodeticReferenceFrame\",\"name\":\"undefined\",\"ellipsoid\":{\"name\":\"undefined\",\"semi_major_axis\":6378137,\"inverse_flattening\":298.257222101},\"prime_meridian\":{\"name\":\"undefined\",\"longitude\":0}},\"coordinate_system\":{\"subtype\":\"ellipsoidal\",\"axis\":[{\"name\":\"Longitude\",\"abbreviation\":\"lon\",\"direction\":\"east\",\"unit\":\"degree\"},{\"name\":\"Latitude\",\"abbreviation\":\"lat\",\"direction\":\"north\",\"unit\":\"degree\"}]}},\"conversion\":{\"name\":\"unknown\",\"method\":{\"name\":\"Albers Equal Area\",\"id\":{\"authority\":\"EPSG\",\"code\":9822}},\"parameters\":[{\"name\":\"Latitude of false origin\",\"value\":23,\"unit\":\"degree\",\"id\":{\"authority\":\"EPSG\",\"code\":8821}},{\"name\":\"Longitude of false origin\",\"value\":-96,\"unit\":\"degree\",\"id\":{\"authority\":\"EPSG\",\"code\":8822}},{\"name\":\"Latitude of 1st standard parallel\",\"value\":29.5,\"unit\":\"degree\",\"id\":{\"authority\":\"EPSG\",\"code\":8823}},{\"name\":\"Latitude of 2nd standard parallel\",\"value\":45.5,\"unit\":\"degree\",\"id\":{\"authority\":\"EPSG\",\"code\":8824}},{\"name\":\"Easting at false origin\",\"value\":0,\"unit\":{\"type\":\"LinearUnit\",\"name\":\"Metre\",\"conversion_factor\":1},\"id\":{\"authority\":\"EPSG\",\"code\":8826}},{\"name\":\"Northing at false origin\",\"value\":0,\"unit\":{\"type\":\"LinearUnit\",\"name\":\"Metre\",\"conversion_factor\":1},\"id\":{\"authority\":\"EPSG\",\"code\":8827}}]},\"coordinate_system\":{\"subtype\":\"Cartesian\",\"axis\":[{\"name\":\"Easting\",\"abbreviation\":\"E\",\"direction\":\"east\",\"unit\":\"metre\"},{\"name\":\"Northing\",\"abbreviation\":\"N\",\"direction\":\"north\",\"unit\":\"metre\"}]}}"
+    }
+  },
+  "cube:variables": {
+    "albers_conical_equal_area": {
+      "dimensions": [],
+      "type": "auxiliary",
+      "description": null,
+      "unit": null
+    },
+    "urb": {
+      "dimensions": [
+        "time",
+        "y",
+        "x"
+      ],
+      "type": "data",
+      "description": "Urban Growth SAMBI-DSL",
+      "unit": "percent"
+    }
+  },
+  "extent": {
+    "spatial": {
+      "bbox": [
+        [
+          -85.48217902360058,
+          28.271951434906544,
+          -74.98118991571037,
+          38.16218753090247
+        ]
+      ]
+    },
+    "temporal": {
+      "interval": [
+        [
+          "2000-01-01T00:00:00Z",
+          "2090-01-01T00:00:00Z"
+        ]
+      ]
+    }
+  },
+  "license": "Unlicense",
+  "assets": {
+    "zarr-s3-osn": {
+      "href": "s3://mdmf/gdp/sambi_urb.zarr/",
+      "type": "application/vnd+zarr",
+      "description": "Open Storage Network Pod S3 API access to collection zarr group",
+      "xarray:open_kwargs": {
+        "chunks": {},
+        "engine": "zarr",
+        "consolidated": true
+      },
+      "xarray:storage_options": {
+        "anon": true,
+        "client_kwargs": {
+          "endpoint_url": "https://usgs.osn.mghpcc.org/"
+        }
+      },
+      "roles": [
+        "data",
+        "zarr",
+        "s3"
+      ]
+    },
+    "zarr-s3": {
+      "href": "s3://nhgf-development/workspace/DataConversion/sambi_urb.zarr/",
+      "type": "application/vnd+zarr",
+      "description": "S3 access to collection zarr group",
+      "xarray:open_kwargs": {
+        "chunks": {},
+        "engine": "zarr",
+        "consolidated": true
+      },
+      "xarray:storage_options": {
+        "requester_pays": true
+      },
+      "roles": [
+        "data",
+        "zarr",
+        "s3"
+      ]
+    }
+  }
+}
\ No newline at end of file
diff --git a/workflows/archive/sambi_urb_create_collection_from_zarr.ipynb b/workflows/archive/sambi_urb_create_collection_from_zarr.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..2436a97d4962d937c0ae15d185eecdff1cd0570c
--- /dev/null
+++ b/workflows/archive/sambi_urb_create_collection_from_zarr.ipynb
@@ -0,0 +1,2756 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "id": "6c10e07b-1e60-4926-af1d-fa75dc78e5d4",
+   "metadata": {
+    "tags": []
+   },
+   "source": [
+    "# sambi_urb Zarr -> Collection Workflow\n",
+    "This is a workflow to build a [STAC collection](https://github.com/radiantearth/stac-spec/blob/master/collection-spec/collection-spec.md) from the zarr asset for the dataset named above. We use the [datacube extension](https://github.com/stac-extensions/datacube) to define the spatial and temporal dimensions of the zarr store, as well as the variables it contains.\n",
+    "\n",
+    "To simplify this workflow so that it can scale to many datasets, a few simplifying suggestions and assumptions are made:\n",
+    "1. For USGS data, we can use the CC0-1.0 license. For all other data we can use Unlicense. Ref: https://spdx.org/licenses/\n",
+    "2. I am assuming all coordinates are from the WGS84 datum if not specified."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "id": "201e0945-de55-45ff-b095-c2af009a4e62",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "application/javascript": [
+       "(function(root) {\n",
+       "  function now() {\n",
+       "    return new Date();\n",
+       "  }\n",
+       "\n",
+       "  var force = true;\n",
+       "  var py_version = '3.3.4'.replace('rc', '-rc.').replace('.dev', '-dev.');\n",
+       "  var reloading = false;\n",
+       "  var Bokeh = root.Bokeh;\n",
+       "\n",
+       "  if (typeof (root._bokeh_timeout) === \"undefined\" || force) {\n",
+       "    root._bokeh_timeout = Date.now() + 5000;\n",
+       "    root._bokeh_failed_load = false;\n",
+       "  }\n",
+       "\n",
+       "  function run_callbacks() {\n",
+       "    try {\n",
+       "      root._bokeh_onload_callbacks.forEach(function(callback) {\n",
+       "        if (callback != null)\n",
+       "          callback();\n",
+       "      });\n",
+       "    } finally {\n",
+       "      delete root._bokeh_onload_callbacks;\n",
+       "    }\n",
+       "    console.debug(\"Bokeh: all callbacks have finished\");\n",
+       "  }\n",
+       "\n",
+       "  function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n",
+       "    if (css_urls == null) css_urls = [];\n",
+       "    if (js_urls == null) js_urls = [];\n",
+       "    if (js_modules == null) js_modules = [];\n",
+       "    if (js_exports == null) js_exports = {};\n",
+       "\n",
+       "    root._bokeh_onload_callbacks.push(callback);\n",
+       "\n",
+       "    if (root._bokeh_is_loading > 0) {\n",
+       "      console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n",
+       "      return null;\n",
+       "    }\n",
+       "    if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n",
+       "      run_callbacks();\n",
+       "      return null;\n",
+       "    }\n",
+       "    if (!reloading) {\n",
+       "      console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n",
+       "    }\n",
+       "\n",
+       "    function on_load() {\n",
+       "      root._bokeh_is_loading--;\n",
+       "      if (root._bokeh_is_loading === 0) {\n",
+       "        console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n",
+       "        run_callbacks()\n",
+       "      }\n",
+       "    }\n",
+       "    window._bokeh_on_load = on_load\n",
+       "\n",
+       "    function on_error() {\n",
+       "      console.error(\"failed to load \" + url);\n",
+       "    }\n",
+       "\n",
+       "    var skip = [];\n",
+       "    if (window.requirejs) {\n",
+       "      window.requirejs.config({'packages': {}, 'paths': {'jspanel': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/jspanel', 'jspanel-modal': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/modal/jspanel.modal', 'jspanel-tooltip': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/tooltip/jspanel.tooltip', 'jspanel-hint': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/hint/jspanel.hint', 'jspanel-layout': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/layout/jspanel.layout', 'jspanel-contextmenu': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/contextmenu/jspanel.contextmenu', 'jspanel-dock': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/dock/jspanel.dock', 'gridstack': 'https://cdn.jsdelivr.net/npm/gridstack@7.2.3/dist/gridstack-all', 'notyf': 'https://cdn.jsdelivr.net/npm/notyf@3/notyf.min'}, 'shim': {'jspanel': {'exports': 'jsPanel'}, 'gridstack': {'exports': 'GridStack'}}});\n",
+       "      require([\"jspanel\"], function(jsPanel) {\n",
+       "\twindow.jsPanel = jsPanel\n",
+       "\ton_load()\n",
+       "      })\n",
+       "      require([\"jspanel-modal\"], function() {\n",
+       "\ton_load()\n",
+       "      })\n",
+       "      require([\"jspanel-tooltip\"], function() {\n",
+       "\ton_load()\n",
+       "      })\n",
+       "      require([\"jspanel-hint\"], function() {\n",
+       "\ton_load()\n",
+       "      })\n",
+       "      require([\"jspanel-layout\"], function() {\n",
+       "\ton_load()\n",
+       "      })\n",
+       "      require([\"jspanel-contextmenu\"], function() {\n",
+       "\ton_load()\n",
+       "      })\n",
+       "      require([\"jspanel-dock\"], function() {\n",
+       "\ton_load()\n",
+       "      })\n",
+       "      require([\"gridstack\"], function(GridStack) {\n",
+       "\twindow.GridStack = GridStack\n",
+       "\ton_load()\n",
+       "      })\n",
+       "      require([\"notyf\"], function() {\n",
+       "\ton_load()\n",
+       "      })\n",
+       "      root._bokeh_is_loading = css_urls.length + 9;\n",
+       "    } else {\n",
+       "      root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n",
+       "    }\n",
+       "\n",
+       "    var existing_stylesheets = []\n",
+       "    var links = document.getElementsByTagName('link')\n",
+       "    for (var i = 0; i < links.length; i++) {\n",
+       "      var link = links[i]\n",
+       "      if (link.href != null) {\n",
+       "\texisting_stylesheets.push(link.href)\n",
+       "      }\n",
+       "    }\n",
+       "    for (var i = 0; i < css_urls.length; i++) {\n",
+       "      var url = css_urls[i];\n",
+       "      if (existing_stylesheets.indexOf(url) !== -1) {\n",
+       "\ton_load()\n",
+       "\tcontinue;\n",
+       "      }\n",
+       "      const element = document.createElement(\"link\");\n",
+       "      element.onload = on_load;\n",
+       "      element.onerror = on_error;\n",
+       "      element.rel = \"stylesheet\";\n",
+       "      element.type = \"text/css\";\n",
+       "      element.href = url;\n",
+       "      console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n",
+       "      document.body.appendChild(element);\n",
+       "    }    if (((window['jsPanel'] !== undefined) && (!(window['jsPanel'] instanceof HTMLElement))) || window.requirejs) {\n",
+       "      var urls = ['https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/jspanel.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/modal/jspanel.modal.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/tooltip/jspanel.tooltip.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/hint/jspanel.hint.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/layout/jspanel.layout.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/contextmenu/jspanel.contextmenu.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/dock/jspanel.dock.js'];\n",
+       "      for (var i = 0; i < urls.length; i++) {\n",
+       "        skip.push(urls[i])\n",
+       "      }\n",
+       "    }    if (((window['GridStack'] !== undefined) && (!(window['GridStack'] instanceof HTMLElement))) || window.requirejs) {\n",
+       "      var urls = ['https://cdn.holoviz.org/panel/1.3.8/dist/bundled/gridstack/gridstack@7.2.3/dist/gridstack-all.js'];\n",
+       "      for (var i = 0; i < urls.length; i++) {\n",
+       "        skip.push(urls[i])\n",
+       "      }\n",
+       "    }    if (((window['Notyf'] !== undefined) && (!(window['Notyf'] instanceof HTMLElement))) || window.requirejs) {\n",
+       "      var urls = ['https://cdn.holoviz.org/panel/1.3.8/dist/bundled/notificationarea/notyf@3/notyf.min.js'];\n",
+       "      for (var i = 0; i < urls.length; i++) {\n",
+       "        skip.push(urls[i])\n",
+       "      }\n",
+       "    }    var existing_scripts = []\n",
+       "    var scripts = document.getElementsByTagName('script')\n",
+       "    for (var i = 0; i < scripts.length; i++) {\n",
+       "      var script = scripts[i]\n",
+       "      if (script.src != null) {\n",
+       "\texisting_scripts.push(script.src)\n",
+       "      }\n",
+       "    }\n",
+       "    for (var i = 0; i < js_urls.length; i++) {\n",
+       "      var url = js_urls[i];\n",
+       "      if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n",
+       "\tif (!window.requirejs) {\n",
+       "\t  on_load();\n",
+       "\t}\n",
+       "\tcontinue;\n",
+       "      }\n",
+       "      var element = document.createElement('script');\n",
+       "      element.onload = on_load;\n",
+       "      element.onerror = on_error;\n",
+       "      element.async = false;\n",
+       "      element.src = url;\n",
+       "      console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+       "      document.head.appendChild(element);\n",
+       "    }\n",
+       "    for (var i = 0; i < js_modules.length; i++) {\n",
+       "      var url = js_modules[i];\n",
+       "      if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n",
+       "\tif (!window.requirejs) {\n",
+       "\t  on_load();\n",
+       "\t}\n",
+       "\tcontinue;\n",
+       "      }\n",
+       "      var element = document.createElement('script');\n",
+       "      element.onload = on_load;\n",
+       "      element.onerror = on_error;\n",
+       "      element.async = false;\n",
+       "      element.src = url;\n",
+       "      element.type = \"module\";\n",
+       "      console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+       "      document.head.appendChild(element);\n",
+       "    }\n",
+       "    for (const name in js_exports) {\n",
+       "      var url = js_exports[name];\n",
+       "      if (skip.indexOf(url) >= 0 || root[name] != null) {\n",
+       "\tif (!window.requirejs) {\n",
+       "\t  on_load();\n",
+       "\t}\n",
+       "\tcontinue;\n",
+       "      }\n",
+       "      var element = document.createElement('script');\n",
+       "      element.onerror = on_error;\n",
+       "      element.async = false;\n",
+       "      element.type = \"module\";\n",
+       "      console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n",
+       "      element.textContent = `\n",
+       "      import ${name} from \"${url}\"\n",
+       "      window.${name} = ${name}\n",
+       "      window._bokeh_on_load()\n",
+       "      `\n",
+       "      document.head.appendChild(element);\n",
+       "    }\n",
+       "    if (!js_urls.length && !js_modules.length) {\n",
+       "      on_load()\n",
+       "    }\n",
+       "  };\n",
+       "\n",
+       "  function inject_raw_css(css) {\n",
+       "    const element = document.createElement(\"style\");\n",
+       "    element.appendChild(document.createTextNode(css));\n",
+       "    document.body.appendChild(element);\n",
+       "  }\n",
+       "\n",
+       "  var js_urls = [\"https://cdn.bokeh.org/bokeh/release/bokeh-3.3.4.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.3.4.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.3.4.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.3.4.min.js\", \"https://cdn.holoviz.org/panel/1.3.8/dist/panel.min.js\"];\n",
+       "  var js_modules = [];\n",
+       "  var js_exports = {};\n",
+       "  var css_urls = [];\n",
+       "  var inline_js = [    function(Bokeh) {\n",
+       "      Bokeh.set_log_level(\"info\");\n",
+       "    },\n",
+       "function(Bokeh) {} // ensure no trailing comma for IE\n",
+       "  ];\n",
+       "\n",
+       "  function run_inline_js() {\n",
+       "    if ((root.Bokeh !== undefined) || (force === true)) {\n",
+       "      for (var i = 0; i < inline_js.length; i++) {\n",
+       "\ttry {\n",
+       "          inline_js[i].call(root, root.Bokeh);\n",
+       "\t} catch(e) {\n",
+       "\t  if (!reloading) {\n",
+       "\t    throw e;\n",
+       "\t  }\n",
+       "\t}\n",
+       "      }\n",
+       "      // Cache old bokeh versions\n",
+       "      if (Bokeh != undefined && !reloading) {\n",
+       "\tvar NewBokeh = root.Bokeh;\n",
+       "\tif (Bokeh.versions === undefined) {\n",
+       "\t  Bokeh.versions = new Map();\n",
+       "\t}\n",
+       "\tif (NewBokeh.version !== Bokeh.version) {\n",
+       "\t  Bokeh.versions.set(NewBokeh.version, NewBokeh)\n",
+       "\t}\n",
+       "\troot.Bokeh = Bokeh;\n",
+       "      }} else if (Date.now() < root._bokeh_timeout) {\n",
+       "      setTimeout(run_inline_js, 100);\n",
+       "    } else if (!root._bokeh_failed_load) {\n",
+       "      console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n",
+       "      root._bokeh_failed_load = true;\n",
+       "    }\n",
+       "    root._bokeh_is_initializing = false\n",
+       "  }\n",
+       "\n",
+       "  function load_or_wait() {\n",
+       "    // Implement a backoff loop that tries to ensure we do not load multiple\n",
+       "    // versions of Bokeh and its dependencies at the same time.\n",
+       "    // In recent versions we use the root._bokeh_is_initializing flag\n",
+       "    // to determine whether there is an ongoing attempt to initialize\n",
+       "    // bokeh, however for backward compatibility we also try to ensure\n",
+       "    // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n",
+       "    // before older versions are fully initialized.\n",
+       "    if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n",
+       "      root._bokeh_is_initializing = false;\n",
+       "      root._bokeh_onload_callbacks = undefined;\n",
+       "      console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n",
+       "      load_or_wait();\n",
+       "    } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n",
+       "      setTimeout(load_or_wait, 100);\n",
+       "    } else {\n",
+       "      root._bokeh_is_initializing = true\n",
+       "      root._bokeh_onload_callbacks = []\n",
+       "      var bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n",
+       "      if (!reloading && !bokeh_loaded) {\n",
+       "\troot.Bokeh = undefined;\n",
+       "      }\n",
+       "      load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n",
+       "\tconsole.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n",
+       "\trun_inline_js();\n",
+       "      });\n",
+       "    }\n",
+       "  }\n",
+       "  // Give older versions of the autoload script a head-start to ensure\n",
+       "  // they initialize before we start loading newer version.\n",
+       "  setTimeout(load_or_wait, 100)\n",
+       "}(window));"
+      ],
+      "application/vnd.holoviews_load.v0+json": "(function(root) {\n  function now() {\n    return new Date();\n  }\n\n  var force = true;\n  var py_version = '3.3.4'.replace('rc', '-rc.').replace('.dev', '-dev.');\n  var reloading = false;\n  var Bokeh = root.Bokeh;\n\n  if (typeof (root._bokeh_timeout) === \"undefined\" || force) {\n    root._bokeh_timeout = Date.now() + 5000;\n    root._bokeh_failed_load = false;\n  }\n\n  function run_callbacks() {\n    try {\n      root._bokeh_onload_callbacks.forEach(function(callback) {\n        if (callback != null)\n          callback();\n      });\n    } finally {\n      delete root._bokeh_onload_callbacks;\n    }\n    console.debug(\"Bokeh: all callbacks have finished\");\n  }\n\n  function load_libs(css_urls, js_urls, js_modules, js_exports, callback) {\n    if (css_urls == null) css_urls = [];\n    if (js_urls == null) js_urls = [];\n    if (js_modules == null) js_modules = [];\n    if (js_exports == null) js_exports = {};\n\n    root._bokeh_onload_callbacks.push(callback);\n\n    if (root._bokeh_is_loading > 0) {\n      console.debug(\"Bokeh: BokehJS is being loaded, scheduling callback at\", now());\n      return null;\n    }\n    if (js_urls.length === 0 && js_modules.length === 0 && Object.keys(js_exports).length === 0) {\n      run_callbacks();\n      return null;\n    }\n    if (!reloading) {\n      console.debug(\"Bokeh: BokehJS not loaded, scheduling load and callback at\", now());\n    }\n\n    function on_load() {\n      root._bokeh_is_loading--;\n      if (root._bokeh_is_loading === 0) {\n        console.debug(\"Bokeh: all BokehJS libraries/stylesheets loaded\");\n        run_callbacks()\n      }\n    }\n    window._bokeh_on_load = on_load\n\n    function on_error() {\n      console.error(\"failed to load \" + url);\n    }\n\n    var skip = [];\n    if (window.requirejs) {\n      window.requirejs.config({'packages': {}, 'paths': {'jspanel': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/jspanel', 'jspanel-modal': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/modal/jspanel.modal', 'jspanel-tooltip': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/tooltip/jspanel.tooltip', 'jspanel-hint': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/hint/jspanel.hint', 'jspanel-layout': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/layout/jspanel.layout', 'jspanel-contextmenu': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/contextmenu/jspanel.contextmenu', 'jspanel-dock': 'https://cdn.jsdelivr.net/npm/jspanel4@4.12.0/dist/extensions/dock/jspanel.dock', 'gridstack': 'https://cdn.jsdelivr.net/npm/gridstack@7.2.3/dist/gridstack-all', 'notyf': 'https://cdn.jsdelivr.net/npm/notyf@3/notyf.min'}, 'shim': {'jspanel': {'exports': 'jsPanel'}, 'gridstack': {'exports': 'GridStack'}}});\n      require([\"jspanel\"], function(jsPanel) {\n\twindow.jsPanel = jsPanel\n\ton_load()\n      })\n      require([\"jspanel-modal\"], function() {\n\ton_load()\n      })\n      require([\"jspanel-tooltip\"], function() {\n\ton_load()\n      })\n      require([\"jspanel-hint\"], function() {\n\ton_load()\n      })\n      require([\"jspanel-layout\"], function() {\n\ton_load()\n      })\n      require([\"jspanel-contextmenu\"], function() {\n\ton_load()\n      })\n      require([\"jspanel-dock\"], function() {\n\ton_load()\n      })\n      require([\"gridstack\"], function(GridStack) {\n\twindow.GridStack = GridStack\n\ton_load()\n      })\n      require([\"notyf\"], function() {\n\ton_load()\n      })\n      root._bokeh_is_loading = css_urls.length + 9;\n    } else {\n      root._bokeh_is_loading = css_urls.length + js_urls.length + js_modules.length + Object.keys(js_exports).length;\n    }\n\n    var existing_stylesheets = []\n    var links = document.getElementsByTagName('link')\n    for (var i = 0; i < links.length; i++) {\n      var link = links[i]\n      if (link.href != null) {\n\texisting_stylesheets.push(link.href)\n      }\n    }\n    for (var i = 0; i < css_urls.length; i++) {\n      var url = css_urls[i];\n      if (existing_stylesheets.indexOf(url) !== -1) {\n\ton_load()\n\tcontinue;\n      }\n      const element = document.createElement(\"link\");\n      element.onload = on_load;\n      element.onerror = on_error;\n      element.rel = \"stylesheet\";\n      element.type = \"text/css\";\n      element.href = url;\n      console.debug(\"Bokeh: injecting link tag for BokehJS stylesheet: \", url);\n      document.body.appendChild(element);\n    }    if (((window['jsPanel'] !== undefined) && (!(window['jsPanel'] instanceof HTMLElement))) || window.requirejs) {\n      var urls = ['https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/jspanel.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/modal/jspanel.modal.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/tooltip/jspanel.tooltip.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/hint/jspanel.hint.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/layout/jspanel.layout.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/contextmenu/jspanel.contextmenu.js', 'https://cdn.holoviz.org/panel/1.3.8/dist/bundled/floatpanel/jspanel4@4.12.0/dist/extensions/dock/jspanel.dock.js'];\n      for (var i = 0; i < urls.length; i++) {\n        skip.push(urls[i])\n      }\n    }    if (((window['GridStack'] !== undefined) && (!(window['GridStack'] instanceof HTMLElement))) || window.requirejs) {\n      var urls = ['https://cdn.holoviz.org/panel/1.3.8/dist/bundled/gridstack/gridstack@7.2.3/dist/gridstack-all.js'];\n      for (var i = 0; i < urls.length; i++) {\n        skip.push(urls[i])\n      }\n    }    if (((window['Notyf'] !== undefined) && (!(window['Notyf'] instanceof HTMLElement))) || window.requirejs) {\n      var urls = ['https://cdn.holoviz.org/panel/1.3.8/dist/bundled/notificationarea/notyf@3/notyf.min.js'];\n      for (var i = 0; i < urls.length; i++) {\n        skip.push(urls[i])\n      }\n    }    var existing_scripts = []\n    var scripts = document.getElementsByTagName('script')\n    for (var i = 0; i < scripts.length; i++) {\n      var script = scripts[i]\n      if (script.src != null) {\n\texisting_scripts.push(script.src)\n      }\n    }\n    for (var i = 0; i < js_urls.length; i++) {\n      var url = js_urls[i];\n      if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t  on_load();\n\t}\n\tcontinue;\n      }\n      var element = document.createElement('script');\n      element.onload = on_load;\n      element.onerror = on_error;\n      element.async = false;\n      element.src = url;\n      console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n      document.head.appendChild(element);\n    }\n    for (var i = 0; i < js_modules.length; i++) {\n      var url = js_modules[i];\n      if (skip.indexOf(url) !== -1 || existing_scripts.indexOf(url) !== -1) {\n\tif (!window.requirejs) {\n\t  on_load();\n\t}\n\tcontinue;\n      }\n      var element = document.createElement('script');\n      element.onload = on_load;\n      element.onerror = on_error;\n      element.async = false;\n      element.src = url;\n      element.type = \"module\";\n      console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n      document.head.appendChild(element);\n    }\n    for (const name in js_exports) {\n      var url = js_exports[name];\n      if (skip.indexOf(url) >= 0 || root[name] != null) {\n\tif (!window.requirejs) {\n\t  on_load();\n\t}\n\tcontinue;\n      }\n      var element = document.createElement('script');\n      element.onerror = on_error;\n      element.async = false;\n      element.type = \"module\";\n      console.debug(\"Bokeh: injecting script tag for BokehJS library: \", url);\n      element.textContent = `\n      import ${name} from \"${url}\"\n      window.${name} = ${name}\n      window._bokeh_on_load()\n      `\n      document.head.appendChild(element);\n    }\n    if (!js_urls.length && !js_modules.length) {\n      on_load()\n    }\n  };\n\n  function inject_raw_css(css) {\n    const element = document.createElement(\"style\");\n    element.appendChild(document.createTextNode(css));\n    document.body.appendChild(element);\n  }\n\n  var js_urls = [\"https://cdn.bokeh.org/bokeh/release/bokeh-3.3.4.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-gl-3.3.4.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-widgets-3.3.4.min.js\", \"https://cdn.bokeh.org/bokeh/release/bokeh-tables-3.3.4.min.js\", \"https://cdn.holoviz.org/panel/1.3.8/dist/panel.min.js\"];\n  var js_modules = [];\n  var js_exports = {};\n  var css_urls = [];\n  var inline_js = [    function(Bokeh) {\n      Bokeh.set_log_level(\"info\");\n    },\nfunction(Bokeh) {} // ensure no trailing comma for IE\n  ];\n\n  function run_inline_js() {\n    if ((root.Bokeh !== undefined) || (force === true)) {\n      for (var i = 0; i < inline_js.length; i++) {\n\ttry {\n          inline_js[i].call(root, root.Bokeh);\n\t} catch(e) {\n\t  if (!reloading) {\n\t    throw e;\n\t  }\n\t}\n      }\n      // Cache old bokeh versions\n      if (Bokeh != undefined && !reloading) {\n\tvar NewBokeh = root.Bokeh;\n\tif (Bokeh.versions === undefined) {\n\t  Bokeh.versions = new Map();\n\t}\n\tif (NewBokeh.version !== Bokeh.version) {\n\t  Bokeh.versions.set(NewBokeh.version, NewBokeh)\n\t}\n\troot.Bokeh = Bokeh;\n      }} else if (Date.now() < root._bokeh_timeout) {\n      setTimeout(run_inline_js, 100);\n    } else if (!root._bokeh_failed_load) {\n      console.log(\"Bokeh: BokehJS failed to load within specified timeout.\");\n      root._bokeh_failed_load = true;\n    }\n    root._bokeh_is_initializing = false\n  }\n\n  function load_or_wait() {\n    // Implement a backoff loop that tries to ensure we do not load multiple\n    // versions of Bokeh and its dependencies at the same time.\n    // In recent versions we use the root._bokeh_is_initializing flag\n    // to determine whether there is an ongoing attempt to initialize\n    // bokeh, however for backward compatibility we also try to ensure\n    // that we do not start loading a newer (Panel>=1.0 and Bokeh>3) version\n    // before older versions are fully initialized.\n    if (root._bokeh_is_initializing && Date.now() > root._bokeh_timeout) {\n      root._bokeh_is_initializing = false;\n      root._bokeh_onload_callbacks = undefined;\n      console.log(\"Bokeh: BokehJS was loaded multiple times but one version failed to initialize.\");\n      load_or_wait();\n    } else if (root._bokeh_is_initializing || (typeof root._bokeh_is_initializing === \"undefined\" && root._bokeh_onload_callbacks !== undefined)) {\n      setTimeout(load_or_wait, 100);\n    } else {\n      root._bokeh_is_initializing = true\n      root._bokeh_onload_callbacks = []\n      var bokeh_loaded = Bokeh != null && (Bokeh.version === py_version || (Bokeh.versions !== undefined && Bokeh.versions.has(py_version)));\n      if (!reloading && !bokeh_loaded) {\n\troot.Bokeh = undefined;\n      }\n      load_libs(css_urls, js_urls, js_modules, js_exports, function() {\n\tconsole.debug(\"Bokeh: BokehJS plotting callback run at\", now());\n\trun_inline_js();\n      });\n    }\n  }\n  // Give older versions of the autoload script a head-start to ensure\n  // they initialize before we start loading newer version.\n  setTimeout(load_or_wait, 100)\n}(window));"
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "application/javascript": [
+       "\n",
+       "if ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n",
+       "  window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n",
+       "}\n",
+       "\n",
+       "\n",
+       "    function JupyterCommManager() {\n",
+       "    }\n",
+       "\n",
+       "    JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n",
+       "      if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
+       "        var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
+       "        comm_manager.register_target(comm_id, function(comm) {\n",
+       "          comm.on_msg(msg_handler);\n",
+       "        });\n",
+       "      } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
+       "        window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n",
+       "          comm.onMsg = msg_handler;\n",
+       "        });\n",
+       "      } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
+       "        google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n",
+       "          var messages = comm.messages[Symbol.asyncIterator]();\n",
+       "          function processIteratorResult(result) {\n",
+       "            var message = result.value;\n",
+       "            console.log(message)\n",
+       "            var content = {data: message.data, comm_id};\n",
+       "            var buffers = []\n",
+       "            for (var buffer of message.buffers || []) {\n",
+       "              buffers.push(new DataView(buffer))\n",
+       "            }\n",
+       "            var metadata = message.metadata || {};\n",
+       "            var msg = {content, buffers, metadata}\n",
+       "            msg_handler(msg);\n",
+       "            return messages.next().then(processIteratorResult);\n",
+       "          }\n",
+       "          return messages.next().then(processIteratorResult);\n",
+       "        })\n",
+       "      }\n",
+       "    }\n",
+       "\n",
+       "    JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n",
+       "      if (comm_id in window.PyViz.comms) {\n",
+       "        return window.PyViz.comms[comm_id];\n",
+       "      } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n",
+       "        var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n",
+       "        var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n",
+       "        if (msg_handler) {\n",
+       "          comm.on_msg(msg_handler);\n",
+       "        }\n",
+       "      } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n",
+       "        var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n",
+       "        comm.open();\n",
+       "        if (msg_handler) {\n",
+       "          comm.onMsg = msg_handler;\n",
+       "        }\n",
+       "      } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n",
+       "        var comm_promise = google.colab.kernel.comms.open(comm_id)\n",
+       "        comm_promise.then((comm) => {\n",
+       "          window.PyViz.comms[comm_id] = comm;\n",
+       "          if (msg_handler) {\n",
+       "            var messages = comm.messages[Symbol.asyncIterator]();\n",
+       "            function processIteratorResult(result) {\n",
+       "              var message = result.value;\n",
+       "              var content = {data: message.data};\n",
+       "              var metadata = message.metadata || {comm_id};\n",
+       "              var msg = {content, metadata}\n",
+       "              msg_handler(msg);\n",
+       "              return messages.next().then(processIteratorResult);\n",
+       "            }\n",
+       "            return messages.next().then(processIteratorResult);\n",
+       "          }\n",
+       "        }) \n",
+       "        var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n",
+       "          return comm_promise.then((comm) => {\n",
+       "            comm.send(data, metadata, buffers, disposeOnDone);\n",
+       "          });\n",
+       "        };\n",
+       "        var comm = {\n",
+       "          send: sendClosure\n",
+       "        };\n",
+       "      }\n",
+       "      window.PyViz.comms[comm_id] = comm;\n",
+       "      return comm;\n",
+       "    }\n",
+       "    window.PyViz.comm_manager = new JupyterCommManager();\n",
+       "    \n",
+       "\n",
+       "\n",
+       "var JS_MIME_TYPE = 'application/javascript';\n",
+       "var HTML_MIME_TYPE = 'text/html';\n",
+       "var EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\n",
+       "var CLASS_NAME = 'output';\n",
+       "\n",
+       "/**\n",
+       " * Render data to the DOM node\n",
+       " */\n",
+       "function render(props, node) {\n",
+       "  var div = document.createElement(\"div\");\n",
+       "  var script = document.createElement(\"script\");\n",
+       "  node.appendChild(div);\n",
+       "  node.appendChild(script);\n",
+       "}\n",
+       "\n",
+       "/**\n",
+       " * Handle when a new output is added\n",
+       " */\n",
+       "function handle_add_output(event, handle) {\n",
+       "  var output_area = handle.output_area;\n",
+       "  var output = handle.output;\n",
+       "  if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n",
+       "    return\n",
+       "  }\n",
+       "  var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n",
+       "  var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n",
+       "  if (id !== undefined) {\n",
+       "    var nchildren = toinsert.length;\n",
+       "    var html_node = toinsert[nchildren-1].children[0];\n",
+       "    html_node.innerHTML = output.data[HTML_MIME_TYPE];\n",
+       "    var scripts = [];\n",
+       "    var nodelist = html_node.querySelectorAll(\"script\");\n",
+       "    for (var i in nodelist) {\n",
+       "      if (nodelist.hasOwnProperty(i)) {\n",
+       "        scripts.push(nodelist[i])\n",
+       "      }\n",
+       "    }\n",
+       "\n",
+       "    scripts.forEach( function (oldScript) {\n",
+       "      var newScript = document.createElement(\"script\");\n",
+       "      var attrs = [];\n",
+       "      var nodemap = oldScript.attributes;\n",
+       "      for (var j in nodemap) {\n",
+       "        if (nodemap.hasOwnProperty(j)) {\n",
+       "          attrs.push(nodemap[j])\n",
+       "        }\n",
+       "      }\n",
+       "      attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n",
+       "      newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n",
+       "      oldScript.parentNode.replaceChild(newScript, oldScript);\n",
+       "    });\n",
+       "    if (JS_MIME_TYPE in output.data) {\n",
+       "      toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n",
+       "    }\n",
+       "    output_area._hv_plot_id = id;\n",
+       "    if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n",
+       "      window.PyViz.plot_index[id] = Bokeh.index[id];\n",
+       "    } else {\n",
+       "      window.PyViz.plot_index[id] = null;\n",
+       "    }\n",
+       "  } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n",
+       "    var bk_div = document.createElement(\"div\");\n",
+       "    bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n",
+       "    var script_attrs = bk_div.children[0].attributes;\n",
+       "    for (var i = 0; i < script_attrs.length; i++) {\n",
+       "      toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n",
+       "    }\n",
+       "    // store reference to server id on output_area\n",
+       "    output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n",
+       "  }\n",
+       "}\n",
+       "\n",
+       "/**\n",
+       " * Handle when an output is cleared or removed\n",
+       " */\n",
+       "function handle_clear_output(event, handle) {\n",
+       "  var id = handle.cell.output_area._hv_plot_id;\n",
+       "  var server_id = handle.cell.output_area._bokeh_server_id;\n",
+       "  if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n",
+       "  var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n",
+       "  if (server_id !== null) {\n",
+       "    comm.send({event_type: 'server_delete', 'id': server_id});\n",
+       "    return;\n",
+       "  } else if (comm !== null) {\n",
+       "    comm.send({event_type: 'delete', 'id': id});\n",
+       "  }\n",
+       "  delete PyViz.plot_index[id];\n",
+       "  if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n",
+       "    var doc = window.Bokeh.index[id].model.document\n",
+       "    doc.clear();\n",
+       "    const i = window.Bokeh.documents.indexOf(doc);\n",
+       "    if (i > -1) {\n",
+       "      window.Bokeh.documents.splice(i, 1);\n",
+       "    }\n",
+       "  }\n",
+       "}\n",
+       "\n",
+       "/**\n",
+       " * Handle kernel restart event\n",
+       " */\n",
+       "function handle_kernel_cleanup(event, handle) {\n",
+       "  delete PyViz.comms[\"hv-extension-comm\"];\n",
+       "  window.PyViz.plot_index = {}\n",
+       "}\n",
+       "\n",
+       "/**\n",
+       " * Handle update_display_data messages\n",
+       " */\n",
+       "function handle_update_output(event, handle) {\n",
+       "  handle_clear_output(event, {cell: {output_area: handle.output_area}})\n",
+       "  handle_add_output(event, handle)\n",
+       "}\n",
+       "\n",
+       "function register_renderer(events, OutputArea) {\n",
+       "  function append_mime(data, metadata, element) {\n",
+       "    // create a DOM node to render to\n",
+       "    var toinsert = this.create_output_subarea(\n",
+       "    metadata,\n",
+       "    CLASS_NAME,\n",
+       "    EXEC_MIME_TYPE\n",
+       "    );\n",
+       "    this.keyboard_manager.register_events(toinsert);\n",
+       "    // Render to node\n",
+       "    var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n",
+       "    render(props, toinsert[0]);\n",
+       "    element.append(toinsert);\n",
+       "    return toinsert\n",
+       "  }\n",
+       "\n",
+       "  events.on('output_added.OutputArea', handle_add_output);\n",
+       "  events.on('output_updated.OutputArea', handle_update_output);\n",
+       "  events.on('clear_output.CodeCell', handle_clear_output);\n",
+       "  events.on('delete.Cell', handle_clear_output);\n",
+       "  events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n",
+       "\n",
+       "  OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n",
+       "    safe: true,\n",
+       "    index: 0\n",
+       "  });\n",
+       "}\n",
+       "\n",
+       "if (window.Jupyter !== undefined) {\n",
+       "  try {\n",
+       "    var events = require('base/js/events');\n",
+       "    var OutputArea = require('notebook/js/outputarea').OutputArea;\n",
+       "    if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n",
+       "      register_renderer(events, OutputArea);\n",
+       "    }\n",
+       "  } catch(err) {\n",
+       "  }\n",
+       "}\n"
+      ],
+      "application/vnd.holoviews_load.v0+json": "\nif ((window.PyViz === undefined) || (window.PyViz instanceof HTMLElement)) {\n  window.PyViz = {comms: {}, comm_status:{}, kernels:{}, receivers: {}, plot_index: []}\n}\n\n\n    function JupyterCommManager() {\n    }\n\n    JupyterCommManager.prototype.register_target = function(plot_id, comm_id, msg_handler) {\n      if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n        var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n        comm_manager.register_target(comm_id, function(comm) {\n          comm.on_msg(msg_handler);\n        });\n      } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n        window.PyViz.kernels[plot_id].registerCommTarget(comm_id, function(comm) {\n          comm.onMsg = msg_handler;\n        });\n      } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n        google.colab.kernel.comms.registerTarget(comm_id, (comm) => {\n          var messages = comm.messages[Symbol.asyncIterator]();\n          function processIteratorResult(result) {\n            var message = result.value;\n            console.log(message)\n            var content = {data: message.data, comm_id};\n            var buffers = []\n            for (var buffer of message.buffers || []) {\n              buffers.push(new DataView(buffer))\n            }\n            var metadata = message.metadata || {};\n            var msg = {content, buffers, metadata}\n            msg_handler(msg);\n            return messages.next().then(processIteratorResult);\n          }\n          return messages.next().then(processIteratorResult);\n        })\n      }\n    }\n\n    JupyterCommManager.prototype.get_client_comm = function(plot_id, comm_id, msg_handler) {\n      if (comm_id in window.PyViz.comms) {\n        return window.PyViz.comms[comm_id];\n      } else if (window.comm_manager || ((window.Jupyter !== undefined) && (Jupyter.notebook.kernel != null))) {\n        var comm_manager = window.comm_manager || Jupyter.notebook.kernel.comm_manager;\n        var comm = comm_manager.new_comm(comm_id, {}, {}, {}, comm_id);\n        if (msg_handler) {\n          comm.on_msg(msg_handler);\n        }\n      } else if ((plot_id in window.PyViz.kernels) && (window.PyViz.kernels[plot_id])) {\n        var comm = window.PyViz.kernels[plot_id].connectToComm(comm_id);\n        comm.open();\n        if (msg_handler) {\n          comm.onMsg = msg_handler;\n        }\n      } else if (typeof google != 'undefined' && google.colab.kernel != null) {\n        var comm_promise = google.colab.kernel.comms.open(comm_id)\n        comm_promise.then((comm) => {\n          window.PyViz.comms[comm_id] = comm;\n          if (msg_handler) {\n            var messages = comm.messages[Symbol.asyncIterator]();\n            function processIteratorResult(result) {\n              var message = result.value;\n              var content = {data: message.data};\n              var metadata = message.metadata || {comm_id};\n              var msg = {content, metadata}\n              msg_handler(msg);\n              return messages.next().then(processIteratorResult);\n            }\n            return messages.next().then(processIteratorResult);\n          }\n        }) \n        var sendClosure = (data, metadata, buffers, disposeOnDone) => {\n          return comm_promise.then((comm) => {\n            comm.send(data, metadata, buffers, disposeOnDone);\n          });\n        };\n        var comm = {\n          send: sendClosure\n        };\n      }\n      window.PyViz.comms[comm_id] = comm;\n      return comm;\n    }\n    window.PyViz.comm_manager = new JupyterCommManager();\n    \n\n\nvar JS_MIME_TYPE = 'application/javascript';\nvar HTML_MIME_TYPE = 'text/html';\nvar EXEC_MIME_TYPE = 'application/vnd.holoviews_exec.v0+json';\nvar CLASS_NAME = 'output';\n\n/**\n * Render data to the DOM node\n */\nfunction render(props, node) {\n  var div = document.createElement(\"div\");\n  var script = document.createElement(\"script\");\n  node.appendChild(div);\n  node.appendChild(script);\n}\n\n/**\n * Handle when a new output is added\n */\nfunction handle_add_output(event, handle) {\n  var output_area = handle.output_area;\n  var output = handle.output;\n  if ((output.data == undefined) || (!output.data.hasOwnProperty(EXEC_MIME_TYPE))) {\n    return\n  }\n  var id = output.metadata[EXEC_MIME_TYPE][\"id\"];\n  var toinsert = output_area.element.find(\".\" + CLASS_NAME.split(' ')[0]);\n  if (id !== undefined) {\n    var nchildren = toinsert.length;\n    var html_node = toinsert[nchildren-1].children[0];\n    html_node.innerHTML = output.data[HTML_MIME_TYPE];\n    var scripts = [];\n    var nodelist = html_node.querySelectorAll(\"script\");\n    for (var i in nodelist) {\n      if (nodelist.hasOwnProperty(i)) {\n        scripts.push(nodelist[i])\n      }\n    }\n\n    scripts.forEach( function (oldScript) {\n      var newScript = document.createElement(\"script\");\n      var attrs = [];\n      var nodemap = oldScript.attributes;\n      for (var j in nodemap) {\n        if (nodemap.hasOwnProperty(j)) {\n          attrs.push(nodemap[j])\n        }\n      }\n      attrs.forEach(function(attr) { newScript.setAttribute(attr.name, attr.value) });\n      newScript.appendChild(document.createTextNode(oldScript.innerHTML));\n      oldScript.parentNode.replaceChild(newScript, oldScript);\n    });\n    if (JS_MIME_TYPE in output.data) {\n      toinsert[nchildren-1].children[1].textContent = output.data[JS_MIME_TYPE];\n    }\n    output_area._hv_plot_id = id;\n    if ((window.Bokeh !== undefined) && (id in Bokeh.index)) {\n      window.PyViz.plot_index[id] = Bokeh.index[id];\n    } else {\n      window.PyViz.plot_index[id] = null;\n    }\n  } else if (output.metadata[EXEC_MIME_TYPE][\"server_id\"] !== undefined) {\n    var bk_div = document.createElement(\"div\");\n    bk_div.innerHTML = output.data[HTML_MIME_TYPE];\n    var script_attrs = bk_div.children[0].attributes;\n    for (var i = 0; i < script_attrs.length; i++) {\n      toinsert[toinsert.length - 1].childNodes[1].setAttribute(script_attrs[i].name, script_attrs[i].value);\n    }\n    // store reference to server id on output_area\n    output_area._bokeh_server_id = output.metadata[EXEC_MIME_TYPE][\"server_id\"];\n  }\n}\n\n/**\n * Handle when an output is cleared or removed\n */\nfunction handle_clear_output(event, handle) {\n  var id = handle.cell.output_area._hv_plot_id;\n  var server_id = handle.cell.output_area._bokeh_server_id;\n  if (((id === undefined) || !(id in PyViz.plot_index)) && (server_id !== undefined)) { return; }\n  var comm = window.PyViz.comm_manager.get_client_comm(\"hv-extension-comm\", \"hv-extension-comm\", function () {});\n  if (server_id !== null) {\n    comm.send({event_type: 'server_delete', 'id': server_id});\n    return;\n  } else if (comm !== null) {\n    comm.send({event_type: 'delete', 'id': id});\n  }\n  delete PyViz.plot_index[id];\n  if ((window.Bokeh !== undefined) & (id in window.Bokeh.index)) {\n    var doc = window.Bokeh.index[id].model.document\n    doc.clear();\n    const i = window.Bokeh.documents.indexOf(doc);\n    if (i > -1) {\n      window.Bokeh.documents.splice(i, 1);\n    }\n  }\n}\n\n/**\n * Handle kernel restart event\n */\nfunction handle_kernel_cleanup(event, handle) {\n  delete PyViz.comms[\"hv-extension-comm\"];\n  window.PyViz.plot_index = {}\n}\n\n/**\n * Handle update_display_data messages\n */\nfunction handle_update_output(event, handle) {\n  handle_clear_output(event, {cell: {output_area: handle.output_area}})\n  handle_add_output(event, handle)\n}\n\nfunction register_renderer(events, OutputArea) {\n  function append_mime(data, metadata, element) {\n    // create a DOM node to render to\n    var toinsert = this.create_output_subarea(\n    metadata,\n    CLASS_NAME,\n    EXEC_MIME_TYPE\n    );\n    this.keyboard_manager.register_events(toinsert);\n    // Render to node\n    var props = {data: data, metadata: metadata[EXEC_MIME_TYPE]};\n    render(props, toinsert[0]);\n    element.append(toinsert);\n    return toinsert\n  }\n\n  events.on('output_added.OutputArea', handle_add_output);\n  events.on('output_updated.OutputArea', handle_update_output);\n  events.on('clear_output.CodeCell', handle_clear_output);\n  events.on('delete.Cell', handle_clear_output);\n  events.on('kernel_ready.Kernel', handle_kernel_cleanup);\n\n  OutputArea.prototype.register_mime_type(EXEC_MIME_TYPE, append_mime, {\n    safe: true,\n    index: 0\n  });\n}\n\nif (window.Jupyter !== undefined) {\n  try {\n    var events = require('base/js/events');\n    var OutputArea = require('notebook/js/outputarea').OutputArea;\n    if (OutputArea.prototype.mime_types().indexOf(EXEC_MIME_TYPE) == -1) {\n      register_renderer(events, OutputArea);\n    }\n  } catch(err) {\n  }\n}\n"
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "text/html": [
+       "<style>*[data-root-id],\n",
+       "*[data-root-id] > * {\n",
+       "  box-sizing: border-box;\n",
+       "  font-family: var(--jp-ui-font-family);\n",
+       "  font-size: var(--jp-ui-font-size1);\n",
+       "  color: var(--vscode-editor-foreground, var(--jp-ui-font-color1));\n",
+       "}\n",
+       "\n",
+       "/* Override VSCode background color */\n",
+       ".cell-output-ipywidget-background:has(\n",
+       "    > .cell-output-ipywidget-background > .lm-Widget > *[data-root-id]\n",
+       "  ),\n",
+       ".cell-output-ipywidget-background:has(> .lm-Widget > *[data-root-id]) {\n",
+       "  background-color: transparent !important;\n",
+       "}\n",
+       "</style>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
+    {
+     "data": {
+      "application/vnd.holoviews_exec.v0+json": "",
+      "text/html": [
+       "<div id='p1002'>\n",
+       "  <div id=\"f9275171-ae6a-40bf-985f-ed51c2f18558\" data-root-id=\"p1002\" style=\"display: contents;\"></div>\n",
+       "</div>\n",
+       "<script type=\"application/javascript\">(function(root) {\n",
+       "  var docs_json = {\"afb12bee-cd0e-4bd2-b996-7e103c21c5ee\":{\"version\":\"3.3.4\",\"title\":\"Bokeh Application\",\"roots\":[{\"type\":\"object\",\"name\":\"panel.models.browser.BrowserInfo\",\"id\":\"p1002\"},{\"type\":\"object\",\"name\":\"panel.models.comm_manager.CommManager\",\"id\":\"p1003\",\"attributes\":{\"plot_id\":\"p1002\",\"comm_id\":\"6f0e3457f1ef45e385c9c8d3842eccb0\",\"client_comm_id\":\"1cba4a96cb2c489f9afded28a2ae74e4\"}}],\"defs\":[{\"type\":\"model\",\"name\":\"ReactiveHTML1\"},{\"type\":\"model\",\"name\":\"FlexBox1\",\"properties\":[{\"name\":\"align_content\",\"kind\":\"Any\",\"default\":\"flex-start\"},{\"name\":\"align_items\",\"kind\":\"Any\",\"default\":\"flex-start\"},{\"name\":\"flex_direction\",\"kind\":\"Any\",\"default\":\"row\"},{\"name\":\"flex_wrap\",\"kind\":\"Any\",\"default\":\"wrap\"},{\"name\":\"justify_content\",\"kind\":\"Any\",\"default\":\"flex-start\"}]},{\"type\":\"model\",\"name\":\"FloatPanel1\",\"properties\":[{\"name\":\"config\",\"kind\":\"Any\",\"default\":{\"type\":\"map\"}},{\"name\":\"contained\",\"kind\":\"Any\",\"default\":true},{\"name\":\"position\",\"kind\":\"Any\",\"default\":\"right-top\"},{\"name\":\"offsetx\",\"kind\":\"Any\",\"default\":null},{\"name\":\"offsety\",\"kind\":\"Any\",\"default\":null},{\"name\":\"theme\",\"kind\":\"Any\",\"default\":\"primary\"},{\"name\":\"status\",\"kind\":\"Any\",\"default\":\"normalized\"}]},{\"type\":\"model\",\"name\":\"GridStack1\",\"properties\":[{\"name\":\"mode\",\"kind\":\"Any\",\"default\":\"warn\"},{\"name\":\"ncols\",\"kind\":\"Any\",\"default\":null},{\"name\":\"nrows\",\"kind\":\"Any\",\"default\":null},{\"name\":\"allow_resize\",\"kind\":\"Any\",\"default\":true},{\"name\":\"allow_drag\",\"kind\":\"Any\",\"default\":true},{\"name\":\"state\",\"kind\":\"Any\",\"default\":[]}]},{\"type\":\"model\",\"name\":\"drag1\",\"properties\":[{\"name\":\"slider_width\",\"kind\":\"Any\",\"default\":5},{\"name\":\"slider_color\",\"kind\":\"Any\",\"default\":\"black\"},{\"name\":\"value\",\"kind\":\"Any\",\"default\":50}]},{\"type\":\"model\",\"name\":\"click1\",\"properties\":[{\"name\":\"terminal_output\",\"kind\":\"Any\",\"default\":\"\"},{\"name\":\"debug_name\",\"kind\":\"Any\",\"default\":\"\"},{\"name\":\"clears\",\"kind\":\"Any\",\"default\":0}]},{\"type\":\"model\",\"name\":\"copy_to_clipboard1\",\"properties\":[{\"name\":\"fill\",\"kind\":\"Any\",\"default\":\"none\"},{\"name\":\"value\",\"kind\":\"Any\",\"default\":null}]},{\"type\":\"model\",\"name\":\"FastWrapper1\",\"properties\":[{\"name\":\"object\",\"kind\":\"Any\",\"default\":null},{\"name\":\"style\",\"kind\":\"Any\",\"default\":null}]},{\"type\":\"model\",\"name\":\"NotificationAreaBase1\",\"properties\":[{\"name\":\"js_events\",\"kind\":\"Any\",\"default\":{\"type\":\"map\"}},{\"name\":\"position\",\"kind\":\"Any\",\"default\":\"bottom-right\"},{\"name\":\"_clear\",\"kind\":\"Any\",\"default\":0}]},{\"type\":\"model\",\"name\":\"NotificationArea1\",\"properties\":[{\"name\":\"js_events\",\"kind\":\"Any\",\"default\":{\"type\":\"map\"}},{\"name\":\"notifications\",\"kind\":\"Any\",\"default\":[]},{\"name\":\"position\",\"kind\":\"Any\",\"default\":\"bottom-right\"},{\"name\":\"_clear\",\"kind\":\"Any\",\"default\":0},{\"name\":\"types\",\"kind\":\"Any\",\"default\":[{\"type\":\"map\",\"entries\":[[\"type\",\"warning\"],[\"background\",\"#ffc107\"],[\"icon\",{\"type\":\"map\",\"entries\":[[\"className\",\"fas fa-exclamation-triangle\"],[\"tagName\",\"i\"],[\"color\",\"white\"]]}]]},{\"type\":\"map\",\"entries\":[[\"type\",\"info\"],[\"background\",\"#007bff\"],[\"icon\",{\"type\":\"map\",\"entries\":[[\"className\",\"fas fa-info-circle\"],[\"tagName\",\"i\"],[\"color\",\"white\"]]}]]}]}]},{\"type\":\"model\",\"name\":\"Notification\",\"properties\":[{\"name\":\"background\",\"kind\":\"Any\",\"default\":null},{\"name\":\"duration\",\"kind\":\"Any\",\"default\":3000},{\"name\":\"icon\",\"kind\":\"Any\",\"default\":null},{\"name\":\"message\",\"kind\":\"Any\",\"default\":\"\"},{\"name\":\"notification_type\",\"kind\":\"Any\",\"default\":null},{\"name\":\"_destroyed\",\"kind\":\"Any\",\"default\":false}]},{\"type\":\"model\",\"name\":\"TemplateActions1\",\"properties\":[{\"name\":\"open_modal\",\"kind\":\"Any\",\"default\":0},{\"name\":\"close_modal\",\"kind\":\"Any\",\"default\":0}]},{\"type\":\"model\",\"name\":\"BootstrapTemplateActions1\",\"properties\":[{\"name\":\"open_modal\",\"kind\":\"Any\",\"default\":0},{\"name\":\"close_modal\",\"kind\":\"Any\",\"default\":0}]},{\"type\":\"model\",\"name\":\"MaterialTemplateActions1\",\"properties\":[{\"name\":\"open_modal\",\"kind\":\"Any\",\"default\":0},{\"name\":\"close_modal\",\"kind\":\"Any\",\"default\":0}]}]}};\n",
+       "  var render_items = [{\"docid\":\"afb12bee-cd0e-4bd2-b996-7e103c21c5ee\",\"roots\":{\"p1002\":\"f9275171-ae6a-40bf-985f-ed51c2f18558\"},\"root_ids\":[\"p1002\"]}];\n",
+       "  var docs = Object.values(docs_json)\n",
+       "  if (!docs) {\n",
+       "    return\n",
+       "  }\n",
+       "  const py_version = docs[0].version.replace('rc', '-rc.').replace('.dev', '-dev.')\n",
+       "  function embed_document(root) {\n",
+       "    var Bokeh = get_bokeh(root)\n",
+       "    Bokeh.embed.embed_items_notebook(docs_json, render_items);\n",
+       "    for (const render_item of render_items) {\n",
+       "      for (const root_id of render_item.root_ids) {\n",
+       "\tconst id_el = document.getElementById(root_id)\n",
+       "\tif (id_el.children.length && (id_el.children[0].className === 'bk-root')) {\n",
+       "\t  const root_el = id_el.children[0]\n",
+       "\t  root_el.id = root_el.id + '-rendered'\n",
+       "\t}\n",
+       "      }\n",
+       "    }\n",
+       "  }\n",
+       "  function get_bokeh(root) {\n",
+       "    if (root.Bokeh === undefined) {\n",
+       "      return null\n",
+       "    } else if (root.Bokeh.version !== py_version) {\n",
+       "      if (root.Bokeh.versions === undefined || !root.Bokeh.versions.has(py_version)) {\n",
+       "\treturn null\n",
+       "      }\n",
+       "      return root.Bokeh.versions.get(py_version);\n",
+       "    } else if (root.Bokeh.version === py_version) {\n",
+       "      return root.Bokeh\n",
+       "    }\n",
+       "    return null\n",
+       "  }\n",
+       "  function is_loaded(root) {\n",
+       "    var Bokeh = get_bokeh(root)\n",
+       "    return (Bokeh != null && Bokeh.Panel !== undefined)\n",
+       "  }\n",
+       "  if (is_loaded(root)) {\n",
+       "    embed_document(root);\n",
+       "  } else {\n",
+       "    var attempts = 0;\n",
+       "    var timer = setInterval(function(root) {\n",
+       "      if (is_loaded(root)) {\n",
+       "        clearInterval(timer);\n",
+       "        embed_document(root);\n",
+       "      } else if (document.readyState == \"complete\") {\n",
+       "        attempts++;\n",
+       "        if (attempts > 200) {\n",
+       "          clearInterval(timer);\n",
+       "\t  var Bokeh = get_bokeh(root)\n",
+       "\t  if (Bokeh == null || Bokeh.Panel == null) {\n",
+       "            console.warn(\"Panel: ERROR: Unable to run Panel code because Bokeh or Panel library is missing\");\n",
+       "\t  } else {\n",
+       "\t    console.warn(\"Panel: WARNING: Attempting to render but not all required libraries could be resolved.\")\n",
+       "\t    embed_document(root)\n",
+       "\t  }\n",
+       "        }\n",
+       "      }\n",
+       "    }, 25, root)\n",
+       "  }\n",
+       "})(window);</script>"
+      ]
+     },
+     "metadata": {
+      "application/vnd.holoviews_exec.v0+json": {
+       "id": "p1002"
+      }
+     },
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "import pystac\n",
+    "from pystac.extensions.datacube import CollectionDatacubeExtension, AssetDatacubeExtension, AdditionalDimension, DatacubeExtension\n",
+    "import xarray as xr\n",
+    "import cf_xarray\n",
+    "import os\n",
+    "import fsspec\n",
+    "import cf_xarray\n",
+    "import hvplot.xarray\n",
+    "import pandas as pd\n",
+    "import json\n",
+    "import numpy as np\n",
+    "import pyproj\n",
+    "from pyproj import Transformer\n",
+    "import cartopy.crs as ccrs\n",
+    "import cfunits\n",
+    "import json\n",
+    "import sys\n",
+    "sys.path.insert(1, '..')\n",
+    "import stac_helpers"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a71f9d19-8fb3-4f47-b4c4-447bb80d8dd5",
+   "metadata": {},
+   "source": [
+    "## Collection ID"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "id": "15ee060d-3127-4024-a1ad-6aa0648667e1",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# name for STAC collection - should match name of zarr dataset\n",
+    "collection_id = 'sambi_urb'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "116b5837-8e85-4ae7-964a-803533ded714",
+   "metadata": {},
+   "source": [
+    "## Asset Metadata Input"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "id": "dd6fa323-132a-4794-8c80-576933f547a0",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# url to zarr store that you want to create a collection for\n",
+    "zarr_url = f's3://mdmf/gdp/{collection_id}.zarr/'\n",
+    "\n",
+    "# define keyword arguments needed for opening the dataset with xarray\n",
+    "# ref: https://github.com/stac-extensions/xarray-assets\n",
+    "xarray_opendataset_kwargs = {\"xarray:open_kwargs\":{\"chunks\":{},\"engine\":\"zarr\",\"consolidated\":True},\n",
+    "                          \"xarray:storage_options\": {\"anon\": True, \"client_kwargs\": {\"endpoint_url\":\"https://usgs.osn.mghpcc.org/\"}}}\n",
+    "# description for zarr url asset attached to collection (zarr_url)\n",
+    "asset_description = \"Open Storage Network Pod S3 API access to collection zarr group\"\n",
+    "# roles to tag zarr url asset with\n",
+    "asset_roles = [\"data\",\"zarr\",\"s3\"]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "id": "e1441cd4-e94c-4902-af46-8f1af470eb6b",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# url to zarr store that you want to create a collection for\n",
+    "zarr_url2 = f's3://nhgf-development/workspace/DataConversion/{collection_id}.zarr/'\n",
+    "\n",
+    "# define keyword arguments needed for opening the dataset with xarray\n",
+    "# ref: https://github.com/stac-extensions/xarray-assets\n",
+    "xarray_opendataset_kwargs2 = {\"xarray:open_kwargs\":{\"chunks\":{},\"engine\":\"zarr\",\"consolidated\":True},\n",
+    "                          \"xarray:storage_options\":{\"requester_pays\":True}}\n",
+    "# description for zarr url asset attached to collection (zarr_url)\n",
+    "asset_description2 = \"S3 access to collection zarr group\"\n",
+    "# roles to tag zarr url asset with\n",
+    "asset_roles2 = [\"data\",\"zarr\",\"s3\"]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "b213b74f-ad17-4774-93b6-3b62be616b45",
+   "metadata": {
+    "tags": []
+   },
+   "source": [
+    "## Data Exploration"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "id": "708f2cf5-79ab-49af-8067-de31d0d13ee6",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/html": [
+       "<div><svg style=\"position: absolute; width: 0; height: 0; overflow: hidden\">\n",
+       "<defs>\n",
+       "<symbol id=\"icon-database\" viewBox=\"0 0 32 32\">\n",
+       "<path d=\"M16 0c-8.837 0-16 2.239-16 5v4c0 2.761 7.163 5 16 5s16-2.239 16-5v-4c0-2.761-7.163-5-16-5z\"></path>\n",
+       "<path d=\"M16 17c-8.837 0-16-2.239-16-5v6c0 2.761 7.163 5 16 5s16-2.239 16-5v-6c0 2.761-7.163 5-16 5z\"></path>\n",
+       "<path d=\"M16 26c-8.837 0-16-2.239-16-5v6c0 2.761 7.163 5 16 5s16-2.239 16-5v-6c0 2.761-7.163 5-16 5z\"></path>\n",
+       "</symbol>\n",
+       "<symbol id=\"icon-file-text2\" viewBox=\"0 0 32 32\">\n",
+       "<path d=\"M28.681 7.159c-0.694-0.947-1.662-2.053-2.724-3.116s-2.169-2.030-3.116-2.724c-1.612-1.182-2.393-1.319-2.841-1.319h-15.5c-1.378 0-2.5 1.121-2.5 2.5v27c0 1.378 1.122 2.5 2.5 2.5h23c1.378 0 2.5-1.122 2.5-2.5v-19.5c0-0.448-0.137-1.23-1.319-2.841zM24.543 5.457c0.959 0.959 1.712 1.825 2.268 2.543h-4.811v-4.811c0.718 0.556 1.584 1.309 2.543 2.268zM28 29.5c0 0.271-0.229 0.5-0.5 0.5h-23c-0.271 0-0.5-0.229-0.5-0.5v-27c0-0.271 0.229-0.5 0.5-0.5 0 0 15.499-0 15.5 0v7c0 0.552 0.448 1 1 1h7v19.5z\"></path>\n",
+       "<path d=\"M23 26h-14c-0.552 0-1-0.448-1-1s0.448-1 1-1h14c0.552 0 1 0.448 1 1s-0.448 1-1 1z\"></path>\n",
+       "<path d=\"M23 22h-14c-0.552 0-1-0.448-1-1s0.448-1 1-1h14c0.552 0 1 0.448 1 1s-0.448 1-1 1z\"></path>\n",
+       "<path d=\"M23 18h-14c-0.552 0-1-0.448-1-1s0.448-1 1-1h14c0.552 0 1 0.448 1 1s-0.448 1-1 1z\"></path>\n",
+       "</symbol>\n",
+       "</defs>\n",
+       "</svg>\n",
+       "<style>/* CSS stylesheet for displaying xarray objects in jupyterlab.\n",
+       " *\n",
+       " */\n",
+       "\n",
+       ":root {\n",
+       "  --xr-font-color0: var(--jp-content-font-color0, rgba(0, 0, 0, 1));\n",
+       "  --xr-font-color2: var(--jp-content-font-color2, rgba(0, 0, 0, 0.54));\n",
+       "  --xr-font-color3: var(--jp-content-font-color3, rgba(0, 0, 0, 0.38));\n",
+       "  --xr-border-color: var(--jp-border-color2, #e0e0e0);\n",
+       "  --xr-disabled-color: var(--jp-layout-color3, #bdbdbd);\n",
+       "  --xr-background-color: var(--jp-layout-color0, white);\n",
+       "  --xr-background-color-row-even: var(--jp-layout-color1, white);\n",
+       "  --xr-background-color-row-odd: var(--jp-layout-color2, #eeeeee);\n",
+       "}\n",
+       "\n",
+       "html[theme=dark],\n",
+       "body[data-theme=dark],\n",
+       "body.vscode-dark {\n",
+       "  --xr-font-color0: rgba(255, 255, 255, 1);\n",
+       "  --xr-font-color2: rgba(255, 255, 255, 0.54);\n",
+       "  --xr-font-color3: rgba(255, 255, 255, 0.38);\n",
+       "  --xr-border-color: #1F1F1F;\n",
+       "  --xr-disabled-color: #515151;\n",
+       "  --xr-background-color: #111111;\n",
+       "  --xr-background-color-row-even: #111111;\n",
+       "  --xr-background-color-row-odd: #313131;\n",
+       "}\n",
+       "\n",
+       ".xr-wrap {\n",
+       "  display: block !important;\n",
+       "  min-width: 300px;\n",
+       "  max-width: 700px;\n",
+       "}\n",
+       "\n",
+       ".xr-text-repr-fallback {\n",
+       "  /* fallback to plain text repr when CSS is not injected (untrusted notebook) */\n",
+       "  display: none;\n",
+       "}\n",
+       "\n",
+       ".xr-header {\n",
+       "  padding-top: 6px;\n",
+       "  padding-bottom: 6px;\n",
+       "  margin-bottom: 4px;\n",
+       "  border-bottom: solid 1px var(--xr-border-color);\n",
+       "}\n",
+       "\n",
+       ".xr-header > div,\n",
+       ".xr-header > ul {\n",
+       "  display: inline;\n",
+       "  margin-top: 0;\n",
+       "  margin-bottom: 0;\n",
+       "}\n",
+       "\n",
+       ".xr-obj-type,\n",
+       ".xr-array-name {\n",
+       "  margin-left: 2px;\n",
+       "  margin-right: 10px;\n",
+       "}\n",
+       "\n",
+       ".xr-obj-type {\n",
+       "  color: var(--xr-font-color2);\n",
+       "}\n",
+       "\n",
+       ".xr-sections {\n",
+       "  padding-left: 0 !important;\n",
+       "  display: grid;\n",
+       "  grid-template-columns: 150px auto auto 1fr 20px 20px;\n",
+       "}\n",
+       "\n",
+       ".xr-section-item {\n",
+       "  display: contents;\n",
+       "}\n",
+       "\n",
+       ".xr-section-item input {\n",
+       "  display: none;\n",
+       "}\n",
+       "\n",
+       ".xr-section-item input + label {\n",
+       "  color: var(--xr-disabled-color);\n",
+       "}\n",
+       "\n",
+       ".xr-section-item input:enabled + label {\n",
+       "  cursor: pointer;\n",
+       "  color: var(--xr-font-color2);\n",
+       "}\n",
+       "\n",
+       ".xr-section-item input:enabled + label:hover {\n",
+       "  color: var(--xr-font-color0);\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary {\n",
+       "  grid-column: 1;\n",
+       "  color: var(--xr-font-color2);\n",
+       "  font-weight: 500;\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary > span {\n",
+       "  display: inline-block;\n",
+       "  padding-left: 0.5em;\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in:disabled + label {\n",
+       "  color: var(--xr-font-color2);\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in + label:before {\n",
+       "  display: inline-block;\n",
+       "  content: 'â–º';\n",
+       "  font-size: 11px;\n",
+       "  width: 15px;\n",
+       "  text-align: center;\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in:disabled + label:before {\n",
+       "  color: var(--xr-disabled-color);\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in:checked + label:before {\n",
+       "  content: 'â–¼';\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in:checked + label > span {\n",
+       "  display: none;\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary,\n",
+       ".xr-section-inline-details {\n",
+       "  padding-top: 4px;\n",
+       "  padding-bottom: 4px;\n",
+       "}\n",
+       "\n",
+       ".xr-section-inline-details {\n",
+       "  grid-column: 2 / -1;\n",
+       "}\n",
+       "\n",
+       ".xr-section-details {\n",
+       "  display: none;\n",
+       "  grid-column: 1 / -1;\n",
+       "  margin-bottom: 5px;\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in:checked ~ .xr-section-details {\n",
+       "  display: contents;\n",
+       "}\n",
+       "\n",
+       ".xr-array-wrap {\n",
+       "  grid-column: 1 / -1;\n",
+       "  display: grid;\n",
+       "  grid-template-columns: 20px auto;\n",
+       "}\n",
+       "\n",
+       ".xr-array-wrap > label {\n",
+       "  grid-column: 1;\n",
+       "  vertical-align: top;\n",
+       "}\n",
+       "\n",
+       ".xr-preview {\n",
+       "  color: var(--xr-font-color3);\n",
+       "}\n",
+       "\n",
+       ".xr-array-preview,\n",
+       ".xr-array-data {\n",
+       "  padding: 0 5px !important;\n",
+       "  grid-column: 2;\n",
+       "}\n",
+       "\n",
+       ".xr-array-data,\n",
+       ".xr-array-in:checked ~ .xr-array-preview {\n",
+       "  display: none;\n",
+       "}\n",
+       "\n",
+       ".xr-array-in:checked ~ .xr-array-data,\n",
+       ".xr-array-preview {\n",
+       "  display: inline-block;\n",
+       "}\n",
+       "\n",
+       ".xr-dim-list {\n",
+       "  display: inline-block !important;\n",
+       "  list-style: none;\n",
+       "  padding: 0 !important;\n",
+       "  margin: 0;\n",
+       "}\n",
+       "\n",
+       ".xr-dim-list li {\n",
+       "  display: inline-block;\n",
+       "  padding: 0;\n",
+       "  margin: 0;\n",
+       "}\n",
+       "\n",
+       ".xr-dim-list:before {\n",
+       "  content: '(';\n",
+       "}\n",
+       "\n",
+       ".xr-dim-list:after {\n",
+       "  content: ')';\n",
+       "}\n",
+       "\n",
+       ".xr-dim-list li:not(:last-child):after {\n",
+       "  content: ',';\n",
+       "  padding-right: 5px;\n",
+       "}\n",
+       "\n",
+       ".xr-has-index {\n",
+       "  font-weight: bold;\n",
+       "}\n",
+       "\n",
+       ".xr-var-list,\n",
+       ".xr-var-item {\n",
+       "  display: contents;\n",
+       "}\n",
+       "\n",
+       ".xr-var-item > div,\n",
+       ".xr-var-item label,\n",
+       ".xr-var-item > .xr-var-name span {\n",
+       "  background-color: var(--xr-background-color-row-even);\n",
+       "  margin-bottom: 0;\n",
+       "}\n",
+       "\n",
+       ".xr-var-item > .xr-var-name:hover span {\n",
+       "  padding-right: 5px;\n",
+       "}\n",
+       "\n",
+       ".xr-var-list > li:nth-child(odd) > div,\n",
+       ".xr-var-list > li:nth-child(odd) > label,\n",
+       ".xr-var-list > li:nth-child(odd) > .xr-var-name span {\n",
+       "  background-color: var(--xr-background-color-row-odd);\n",
+       "}\n",
+       "\n",
+       ".xr-var-name {\n",
+       "  grid-column: 1;\n",
+       "}\n",
+       "\n",
+       ".xr-var-dims {\n",
+       "  grid-column: 2;\n",
+       "}\n",
+       "\n",
+       ".xr-var-dtype {\n",
+       "  grid-column: 3;\n",
+       "  text-align: right;\n",
+       "  color: var(--xr-font-color2);\n",
+       "}\n",
+       "\n",
+       ".xr-var-preview {\n",
+       "  grid-column: 4;\n",
+       "}\n",
+       "\n",
+       ".xr-index-preview {\n",
+       "  grid-column: 2 / 5;\n",
+       "  color: var(--xr-font-color2);\n",
+       "}\n",
+       "\n",
+       ".xr-var-name,\n",
+       ".xr-var-dims,\n",
+       ".xr-var-dtype,\n",
+       ".xr-preview,\n",
+       ".xr-attrs dt {\n",
+       "  white-space: nowrap;\n",
+       "  overflow: hidden;\n",
+       "  text-overflow: ellipsis;\n",
+       "  padding-right: 10px;\n",
+       "}\n",
+       "\n",
+       ".xr-var-name:hover,\n",
+       ".xr-var-dims:hover,\n",
+       ".xr-var-dtype:hover,\n",
+       ".xr-attrs dt:hover {\n",
+       "  overflow: visible;\n",
+       "  width: auto;\n",
+       "  z-index: 1;\n",
+       "}\n",
+       "\n",
+       ".xr-var-attrs,\n",
+       ".xr-var-data,\n",
+       ".xr-index-data {\n",
+       "  display: none;\n",
+       "  background-color: var(--xr-background-color) !important;\n",
+       "  padding-bottom: 5px !important;\n",
+       "}\n",
+       "\n",
+       ".xr-var-attrs-in:checked ~ .xr-var-attrs,\n",
+       ".xr-var-data-in:checked ~ .xr-var-data,\n",
+       ".xr-index-data-in:checked ~ .xr-index-data {\n",
+       "  display: block;\n",
+       "}\n",
+       "\n",
+       ".xr-var-data > table {\n",
+       "  float: right;\n",
+       "}\n",
+       "\n",
+       ".xr-var-name span,\n",
+       ".xr-var-data,\n",
+       ".xr-index-name div,\n",
+       ".xr-index-data,\n",
+       ".xr-attrs {\n",
+       "  padding-left: 25px !important;\n",
+       "}\n",
+       "\n",
+       ".xr-attrs,\n",
+       ".xr-var-attrs,\n",
+       ".xr-var-data,\n",
+       ".xr-index-data {\n",
+       "  grid-column: 1 / -1;\n",
+       "}\n",
+       "\n",
+       "dl.xr-attrs {\n",
+       "  padding: 0;\n",
+       "  margin: 0;\n",
+       "  display: grid;\n",
+       "  grid-template-columns: 125px auto;\n",
+       "}\n",
+       "\n",
+       ".xr-attrs dt,\n",
+       ".xr-attrs dd {\n",
+       "  padding: 0;\n",
+       "  margin: 0;\n",
+       "  float: left;\n",
+       "  padding-right: 10px;\n",
+       "  width: auto;\n",
+       "}\n",
+       "\n",
+       ".xr-attrs dt {\n",
+       "  font-weight: normal;\n",
+       "  grid-column: 1;\n",
+       "}\n",
+       "\n",
+       ".xr-attrs dt:hover span {\n",
+       "  display: inline-block;\n",
+       "  background: var(--xr-background-color);\n",
+       "  padding-right: 10px;\n",
+       "}\n",
+       "\n",
+       ".xr-attrs dd {\n",
+       "  grid-column: 2;\n",
+       "  white-space: pre-wrap;\n",
+       "  word-break: break-all;\n",
+       "}\n",
+       "\n",
+       ".xr-icon-database,\n",
+       ".xr-icon-file-text2,\n",
+       ".xr-no-icon {\n",
+       "  display: inline-block;\n",
+       "  vertical-align: middle;\n",
+       "  width: 1em;\n",
+       "  height: 1.5em !important;\n",
+       "  stroke-width: 0;\n",
+       "  stroke: currentColor;\n",
+       "  fill: currentColor;\n",
+       "}\n",
+       "</style><pre class='xr-text-repr-fallback'>&lt;xarray.Dataset&gt; Size: 9GB\n",
+       "Dimensions:                    (time: 10, y: 16389, x: 13693)\n",
+       "Coordinates:\n",
+       "  * time                       (time) datetime64[ns] 80B 2000-01-01 ... 2090-...\n",
+       "  * x                          (x) float64 110kB 1.019e+06 ... 1.84e+06\n",
+       "  * y                          (y) float64 131kB 7.609e+05 ... 1.744e+06\n",
+       "Data variables:\n",
+       "    albers_conical_equal_area  |S64 64B ...\n",
+       "    urb                        (time, y, x) float32 9GB dask.array&lt;chunksize=(1, 3962, 3308), meta=np.ndarray&gt;\n",
+       "Attributes: (12/32)\n",
+       "    Conventions:               CF-1.5\n",
+       "    DODS.strlen:               0\n",
+       "    GDAL:                      GDAL 1.10.1, released 2013/08/26\n",
+       "    Metadata_Conventions:      Unidata Dataset Discovery v1.0\n",
+       "    NCO:                       4.3.8\n",
+       "    acknowledgement:           Southeast Regional Assessment Project; Biodive...\n",
+       "    ...                        ...\n",
+       "    publisher_url:             https://www.cida.usgs.gov/\n",
+       "    summary:                   This dataset represents the extent of urbaniza...\n",
+       "    time_coverage_end:         2100-01-01T00:00\n",
+       "    time_coverage_resolution:  P10Y\n",
+       "    time_coverage_start:       2010-01-01T00:00\n",
+       "    title:                     Urban Growth Projection for DSL-SAMBI</pre><div class='xr-wrap' style='display:none'><div class='xr-header'><div class='xr-obj-type'>xarray.Dataset</div></div><ul class='xr-sections'><li class='xr-section-item'><input id='section-d305a879-d8ca-4153-b40a-a1f553d1fd70' class='xr-section-summary-in' type='checkbox' disabled ><label for='section-d305a879-d8ca-4153-b40a-a1f553d1fd70' class='xr-section-summary'  title='Expand/collapse section'>Dimensions:</label><div class='xr-section-inline-details'><ul class='xr-dim-list'><li><span class='xr-has-index'>time</span>: 10</li><li><span class='xr-has-index'>y</span>: 16389</li><li><span class='xr-has-index'>x</span>: 13693</li></ul></div><div class='xr-section-details'></div></li><li class='xr-section-item'><input id='section-d4cc599b-ce84-4cc6-b1fb-f91198b43d24' class='xr-section-summary-in' type='checkbox'  checked><label for='section-d4cc599b-ce84-4cc6-b1fb-f91198b43d24' class='xr-section-summary' >Coordinates: <span>(3)</span></label><div class='xr-section-inline-details'></div><div class='xr-section-details'><ul class='xr-var-list'><li class='xr-var-item'><div class='xr-var-name'><span class='xr-has-index'>time</span></div><div class='xr-var-dims'>(time)</div><div class='xr-var-dtype'>datetime64[ns]</div><div class='xr-var-preview xr-preview'>2000-01-01 ... 2090-01-01</div><input id='attrs-0cb49f8b-1c71-4035-9899-942afcf719a3' class='xr-var-attrs-in' type='checkbox' disabled><label for='attrs-0cb49f8b-1c71-4035-9899-942afcf719a3' title='Show/Hide attributes'><svg class='icon xr-icon-file-text2'><use xlink:href='#icon-file-text2'></use></svg></label><input id='data-17704630-2c3d-44fd-bb69-f851ee75ed4b' class='xr-var-data-in' type='checkbox'><label for='data-17704630-2c3d-44fd-bb69-f851ee75ed4b' title='Show/Hide data repr'><svg class='icon xr-icon-database'><use xlink:href='#icon-database'></use></svg></label><div class='xr-var-attrs'><dl class='xr-attrs'></dl></div><div class='xr-var-data'><pre>array([&#x27;2000-01-01T00:00:00.000000000&#x27;, &#x27;2010-01-01T00:00:00.000000000&#x27;,\n",
+       "       &#x27;2020-01-01T00:00:00.000000000&#x27;, &#x27;2030-01-01T00:00:00.000000000&#x27;,\n",
+       "       &#x27;2040-01-01T00:00:00.000000000&#x27;, &#x27;2050-01-01T00:00:00.000000000&#x27;,\n",
+       "       &#x27;2060-01-01T00:00:00.000000000&#x27;, &#x27;2070-01-01T00:00:00.000000000&#x27;,\n",
+       "       &#x27;2080-01-01T00:00:00.000000000&#x27;, &#x27;2090-01-01T00:00:00.000000000&#x27;],\n",
+       "      dtype=&#x27;datetime64[ns]&#x27;)</pre></div></li><li class='xr-var-item'><div class='xr-var-name'><span class='xr-has-index'>x</span></div><div class='xr-var-dims'>(x)</div><div class='xr-var-dtype'>float64</div><div class='xr-var-preview xr-preview'>1.019e+06 1.019e+06 ... 1.84e+06</div><input id='attrs-3161abe2-e6ab-49bf-a4e9-9fad095a84de' class='xr-var-attrs-in' type='checkbox' ><label for='attrs-3161abe2-e6ab-49bf-a4e9-9fad095a84de' title='Show/Hide attributes'><svg class='icon xr-icon-file-text2'><use xlink:href='#icon-file-text2'></use></svg></label><input id='data-ded99f85-067b-430b-ad22-db8310cfafc1' class='xr-var-data-in' type='checkbox'><label for='data-ded99f85-067b-430b-ad22-db8310cfafc1' title='Show/Hide data repr'><svg class='icon xr-icon-database'><use xlink:href='#icon-database'></use></svg></label><div class='xr-var-attrs'><dl class='xr-attrs'><dt><span>_CoordinateAxisType :</span></dt><dd>GeoX</dd><dt><span>long_name :</span></dt><dd>x coordinate of projection</dd><dt><span>standard_name :</span></dt><dd>projection_x_coordinate</dd><dt><span>units :</span></dt><dd>m</dd></dl></div><div class='xr-var-data'><pre>array([1018838.773, 1018898.773, 1018958.773, ..., 1840238.773, 1840298.773,\n",
+       "       1840358.773])</pre></div></li><li class='xr-var-item'><div class='xr-var-name'><span class='xr-has-index'>y</span></div><div class='xr-var-dims'>(y)</div><div class='xr-var-dtype'>float64</div><div class='xr-var-preview xr-preview'>7.609e+05 7.61e+05 ... 1.744e+06</div><input id='attrs-7cfe9655-6859-40d8-a0c3-f5382633137e' class='xr-var-attrs-in' type='checkbox' ><label for='attrs-7cfe9655-6859-40d8-a0c3-f5382633137e' title='Show/Hide attributes'><svg class='icon xr-icon-file-text2'><use xlink:href='#icon-file-text2'></use></svg></label><input id='data-2948c3a4-6010-4189-9daa-fde99c3478a3' class='xr-var-data-in' type='checkbox'><label for='data-2948c3a4-6010-4189-9daa-fde99c3478a3' title='Show/Hide data repr'><svg class='icon xr-icon-database'><use xlink:href='#icon-database'></use></svg></label><div class='xr-var-attrs'><dl class='xr-attrs'><dt><span>_CoordinateAxisType :</span></dt><dd>GeoY</dd><dt><span>long_name :</span></dt><dd>y coordinate of projection</dd><dt><span>standard_name :</span></dt><dd>projection_y_coordinate</dd><dt><span>units :</span></dt><dd>m</dd></dl></div><div class='xr-var-data'><pre>array([ 760892.078,  760952.078,  761012.078, ..., 1744052.078, 1744112.078,\n",
+       "       1744172.078])</pre></div></li></ul></div></li><li class='xr-section-item'><input id='section-6ef25e84-f8f4-47eb-8049-e71f6495007a' class='xr-section-summary-in' type='checkbox'  checked><label for='section-6ef25e84-f8f4-47eb-8049-e71f6495007a' class='xr-section-summary' >Data variables: <span>(2)</span></label><div class='xr-section-inline-details'></div><div class='xr-section-details'><ul class='xr-var-list'><li class='xr-var-item'><div class='xr-var-name'><span>albers_conical_equal_area</span></div><div class='xr-var-dims'>()</div><div class='xr-var-dtype'>|S64</div><div class='xr-var-preview xr-preview'>...</div><input id='attrs-5d2c312e-3779-423c-9bed-2026169ce61d' class='xr-var-attrs-in' type='checkbox' ><label for='attrs-5d2c312e-3779-423c-9bed-2026169ce61d' title='Show/Hide attributes'><svg class='icon xr-icon-file-text2'><use xlink:href='#icon-file-text2'></use></svg></label><input id='data-55ebdc42-7c8c-4db1-a036-763dbd0a2036' class='xr-var-data-in' type='checkbox'><label for='data-55ebdc42-7c8c-4db1-a036-763dbd0a2036' title='Show/Hide data repr'><svg class='icon xr-icon-database'><use xlink:href='#icon-database'></use></svg></label><div class='xr-var-attrs'><dl class='xr-attrs'><dt><span>GeoTransform :</span></dt><dd>1018808.773 60 0 1744202.078000002 0 -60 </dd><dt><span>false_easting :</span></dt><dd>0.0</dd><dt><span>false_northing :</span></dt><dd>0.0</dd><dt><span>grid_mapping_name :</span></dt><dd>albers_conical_equal_area</dd><dt><span>inverse_flattening :</span></dt><dd>298.2572221010002</dd><dt><span>latitude_of_projection_origin :</span></dt><dd>23.0</dd><dt><span>longitude_of_central_meridian :</span></dt><dd>-96.0</dd><dt><span>longitude_of_prime_meridian :</span></dt><dd>0.0</dd><dt><span>semi_major_axis :</span></dt><dd>6378137.0</dd><dt><span>standard_parallel :</span></dt><dd>[29.5, 45.5]</dd></dl></div><div class='xr-var-data'><pre>[1 values with dtype=|S64]</pre></div></li><li class='xr-var-item'><div class='xr-var-name'><span>urb</span></div><div class='xr-var-dims'>(time, y, x)</div><div class='xr-var-dtype'>float32</div><div class='xr-var-preview xr-preview'>dask.array&lt;chunksize=(1, 3962, 3308), meta=np.ndarray&gt;</div><input id='attrs-3eacb115-6994-437e-88c0-3dc592fceb17' class='xr-var-attrs-in' type='checkbox' ><label for='attrs-3eacb115-6994-437e-88c0-3dc592fceb17' title='Show/Hide attributes'><svg class='icon xr-icon-file-text2'><use xlink:href='#icon-file-text2'></use></svg></label><input id='data-9266eb99-efd5-4cb0-8cc2-684ee811cc1e' class='xr-var-data-in' type='checkbox'><label for='data-9266eb99-efd5-4cb0-8cc2-684ee811cc1e' title='Show/Hide data repr'><svg class='icon xr-icon-database'><use xlink:href='#icon-database'></use></svg></label><div class='xr-var-attrs'><dl class='xr-attrs'><dt><span>grid_mapping :</span></dt><dd>albers_conical_equal_area</dd><dt><span>long_name :</span></dt><dd>Urban Growth SAMBI-DSL</dd><dt><span>name :</span></dt><dd>urb</dd><dt><span>units :</span></dt><dd>percent</dd></dl></div><div class='xr-var-data'><table>\n",
+       "    <tr>\n",
+       "        <td>\n",
+       "            <table style=\"border-collapse: collapse;\">\n",
+       "                <thead>\n",
+       "                    <tr>\n",
+       "                        <td> </td>\n",
+       "                        <th> Array </th>\n",
+       "                        <th> Chunk </th>\n",
+       "                    </tr>\n",
+       "                </thead>\n",
+       "                <tbody>\n",
+       "                    \n",
+       "                    <tr>\n",
+       "                        <th> Bytes </th>\n",
+       "                        <td> 8.36 GiB </td>\n",
+       "                        <td> 50.00 MiB </td>\n",
+       "                    </tr>\n",
+       "                    \n",
+       "                    <tr>\n",
+       "                        <th> Shape </th>\n",
+       "                        <td> (10, 16389, 13693) </td>\n",
+       "                        <td> (1, 3962, 3308) </td>\n",
+       "                    </tr>\n",
+       "                    <tr>\n",
+       "                        <th> Dask graph </th>\n",
+       "                        <td colspan=\"2\"> 250 chunks in 2 graph layers </td>\n",
+       "                    </tr>\n",
+       "                    <tr>\n",
+       "                        <th> Data type </th>\n",
+       "                        <td colspan=\"2\"> float32 numpy.ndarray </td>\n",
+       "                    </tr>\n",
+       "                </tbody>\n",
+       "            </table>\n",
+       "        </td>\n",
+       "        <td>\n",
+       "        <svg width=\"175\" height=\"184\" style=\"stroke:rgb(0,0,0);stroke-width:1\" >\n",
+       "\n",
+       "  <!-- Horizontal lines -->\n",
+       "  <line x1=\"10\" y1=\"0\" x2=\"24\" y2=\"14\" style=\"stroke-width:2\" />\n",
+       "  <line x1=\"10\" y1=\"29\" x2=\"24\" y2=\"43\" />\n",
+       "  <line x1=\"10\" y1=\"58\" x2=\"24\" y2=\"72\" />\n",
+       "  <line x1=\"10\" y1=\"87\" x2=\"24\" y2=\"101\" />\n",
+       "  <line x1=\"10\" y1=\"116\" x2=\"24\" y2=\"130\" />\n",
+       "  <line x1=\"10\" y1=\"120\" x2=\"24\" y2=\"134\" style=\"stroke-width:2\" />\n",
+       "\n",
+       "  <!-- Vertical lines -->\n",
+       "  <line x1=\"10\" y1=\"0\" x2=\"10\" y2=\"120\" style=\"stroke-width:2\" />\n",
+       "  <line x1=\"11\" y1=\"1\" x2=\"11\" y2=\"121\" />\n",
+       "  <line x1=\"12\" y1=\"2\" x2=\"12\" y2=\"122\" />\n",
+       "  <line x1=\"14\" y1=\"4\" x2=\"14\" y2=\"124\" />\n",
+       "  <line x1=\"15\" y1=\"5\" x2=\"15\" y2=\"125\" />\n",
+       "  <line x1=\"17\" y1=\"7\" x2=\"17\" y2=\"127\" />\n",
+       "  <line x1=\"18\" y1=\"8\" x2=\"18\" y2=\"128\" />\n",
+       "  <line x1=\"20\" y1=\"10\" x2=\"20\" y2=\"130\" />\n",
+       "  <line x1=\"21\" y1=\"11\" x2=\"21\" y2=\"131\" />\n",
+       "  <line x1=\"23\" y1=\"13\" x2=\"23\" y2=\"133\" />\n",
+       "  <line x1=\"24\" y1=\"14\" x2=\"24\" y2=\"134\" style=\"stroke-width:2\" />\n",
+       "\n",
+       "  <!-- Colored Rectangle -->\n",
+       "  <polygon points=\"10.0,0.0 24.9485979497544,14.948597949754403 24.9485979497544,134.9485979497544 10.0,120.0\" style=\"fill:#ECB172A0;stroke-width:0\"/>\n",
+       "\n",
+       "  <!-- Horizontal lines -->\n",
+       "  <line x1=\"10\" y1=\"0\" x2=\"110\" y2=\"0\" style=\"stroke-width:2\" />\n",
+       "  <line x1=\"11\" y1=\"1\" x2=\"111\" y2=\"1\" />\n",
+       "  <line x1=\"12\" y1=\"2\" x2=\"113\" y2=\"2\" />\n",
+       "  <line x1=\"14\" y1=\"4\" x2=\"114\" y2=\"4\" />\n",
+       "  <line x1=\"15\" y1=\"5\" x2=\"116\" y2=\"5\" />\n",
+       "  <line x1=\"17\" y1=\"7\" x2=\"117\" y2=\"7\" />\n",
+       "  <line x1=\"18\" y1=\"8\" x2=\"119\" y2=\"8\" />\n",
+       "  <line x1=\"20\" y1=\"10\" x2=\"120\" y2=\"10\" />\n",
+       "  <line x1=\"21\" y1=\"11\" x2=\"122\" y2=\"11\" />\n",
+       "  <line x1=\"23\" y1=\"13\" x2=\"123\" y2=\"13\" />\n",
+       "  <line x1=\"24\" y1=\"14\" x2=\"125\" y2=\"14\" style=\"stroke-width:2\" />\n",
+       "\n",
+       "  <!-- Vertical lines -->\n",
+       "  <line x1=\"10\" y1=\"0\" x2=\"24\" y2=\"14\" style=\"stroke-width:2\" />\n",
+       "  <line x1=\"34\" y1=\"0\" x2=\"49\" y2=\"14\" />\n",
+       "  <line x1=\"58\" y1=\"0\" x2=\"73\" y2=\"14\" />\n",
+       "  <line x1=\"82\" y1=\"0\" x2=\"97\" y2=\"14\" />\n",
+       "  <line x1=\"106\" y1=\"0\" x2=\"121\" y2=\"14\" />\n",
+       "  <line x1=\"110\" y1=\"0\" x2=\"125\" y2=\"14\" style=\"stroke-width:2\" />\n",
+       "\n",
+       "  <!-- Colored Rectangle -->\n",
+       "  <polygon points=\"10.0,0.0 110.25993044114955,0.0 125.20852839090395,14.948597949754403 24.9485979497544,14.948597949754403\" style=\"fill:#ECB172A0;stroke-width:0\"/>\n",
+       "\n",
+       "  <!-- Horizontal lines -->\n",
+       "  <line x1=\"24\" y1=\"14\" x2=\"125\" y2=\"14\" style=\"stroke-width:2\" />\n",
+       "  <line x1=\"24\" y1=\"43\" x2=\"125\" y2=\"43\" />\n",
+       "  <line x1=\"24\" y1=\"72\" x2=\"125\" y2=\"72\" />\n",
+       "  <line x1=\"24\" y1=\"101\" x2=\"125\" y2=\"101\" />\n",
+       "  <line x1=\"24\" y1=\"130\" x2=\"125\" y2=\"130\" />\n",
+       "  <line x1=\"24\" y1=\"134\" x2=\"125\" y2=\"134\" style=\"stroke-width:2\" />\n",
+       "\n",
+       "  <!-- Vertical lines -->\n",
+       "  <line x1=\"24\" y1=\"14\" x2=\"24\" y2=\"134\" style=\"stroke-width:2\" />\n",
+       "  <line x1=\"49\" y1=\"14\" x2=\"49\" y2=\"134\" />\n",
+       "  <line x1=\"73\" y1=\"14\" x2=\"73\" y2=\"134\" />\n",
+       "  <line x1=\"97\" y1=\"14\" x2=\"97\" y2=\"134\" />\n",
+       "  <line x1=\"121\" y1=\"14\" x2=\"121\" y2=\"134\" />\n",
+       "  <line x1=\"125\" y1=\"14\" x2=\"125\" y2=\"134\" style=\"stroke-width:2\" />\n",
+       "\n",
+       "  <!-- Colored Rectangle -->\n",
+       "  <polygon points=\"24.9485979497544,14.948597949754403 125.20852839090395,14.948597949754403 125.20852839090395,134.9485979497544 24.9485979497544,134.9485979497544\" style=\"fill:#ECB172A0;stroke-width:0\"/>\n",
+       "\n",
+       "  <!-- Text -->\n",
+       "  <text x=\"75.078563\" y=\"154.948598\" font-size=\"1.0rem\" font-weight=\"100\" text-anchor=\"middle\" >13693</text>\n",
+       "  <text x=\"145.208528\" y=\"74.948598\" font-size=\"1.0rem\" font-weight=\"100\" text-anchor=\"middle\" transform=\"rotate(-90,145.208528,74.948598)\">16389</text>\n",
+       "  <text x=\"7.474299\" y=\"147.474299\" font-size=\"1.0rem\" font-weight=\"100\" text-anchor=\"middle\" transform=\"rotate(45,7.474299,147.474299)\">10</text>\n",
+       "</svg>\n",
+       "        </td>\n",
+       "    </tr>\n",
+       "</table></div></li></ul></div></li><li class='xr-section-item'><input id='section-946acdfd-e63c-4162-8c43-d0f5a4ceaef1' class='xr-section-summary-in' type='checkbox'  ><label for='section-946acdfd-e63c-4162-8c43-d0f5a4ceaef1' class='xr-section-summary' >Indexes: <span>(3)</span></label><div class='xr-section-inline-details'></div><div class='xr-section-details'><ul class='xr-var-list'><li class='xr-var-item'><div class='xr-index-name'><div>time</div></div><div class='xr-index-preview'>PandasIndex</div><div></div><input id='index-14279840-f629-4dee-a9c5-1e0f3d52d2db' class='xr-index-data-in' type='checkbox'/><label for='index-14279840-f629-4dee-a9c5-1e0f3d52d2db' title='Show/Hide index repr'><svg class='icon xr-icon-database'><use xlink:href='#icon-database'></use></svg></label><div class='xr-index-data'><pre>PandasIndex(DatetimeIndex([&#x27;2000-01-01&#x27;, &#x27;2010-01-01&#x27;, &#x27;2020-01-01&#x27;, &#x27;2030-01-01&#x27;,\n",
+       "               &#x27;2040-01-01&#x27;, &#x27;2050-01-01&#x27;, &#x27;2060-01-01&#x27;, &#x27;2070-01-01&#x27;,\n",
+       "               &#x27;2080-01-01&#x27;, &#x27;2090-01-01&#x27;],\n",
+       "              dtype=&#x27;datetime64[ns]&#x27;, name=&#x27;time&#x27;, freq=None))</pre></div></li><li class='xr-var-item'><div class='xr-index-name'><div>x</div></div><div class='xr-index-preview'>PandasIndex</div><div></div><input id='index-bb3bf31b-2fcc-476a-9966-eaa84c37b5aa' class='xr-index-data-in' type='checkbox'/><label for='index-bb3bf31b-2fcc-476a-9966-eaa84c37b5aa' title='Show/Hide index repr'><svg class='icon xr-icon-database'><use xlink:href='#icon-database'></use></svg></label><div class='xr-index-data'><pre>PandasIndex(Float64Index([1018838.7729999996, 1018898.7729999996, 1018958.7729999996,\n",
+       "              1019018.7729999996, 1019078.7729999996, 1019138.7729999996,\n",
+       "              1019198.7729999996, 1019258.7729999996, 1019318.7729999996,\n",
+       "              1019378.7729999996,\n",
+       "              ...\n",
+       "              1839818.7729999996, 1839878.7729999996, 1839938.7729999996,\n",
+       "              1839998.7729999996, 1840058.7729999996, 1840118.7729999996,\n",
+       "              1840178.7729999996, 1840238.7729999996, 1840298.7729999996,\n",
+       "              1840358.7729999996],\n",
+       "             dtype=&#x27;float64&#x27;, name=&#x27;x&#x27;, length=13693))</pre></div></li><li class='xr-var-item'><div class='xr-index-name'><div>y</div></div><div class='xr-index-preview'>PandasIndex</div><div></div><input id='index-4ef04c40-71db-4c51-a904-8a5f0b7fc01e' class='xr-index-data-in' type='checkbox'/><label for='index-4ef04c40-71db-4c51-a904-8a5f0b7fc01e' title='Show/Hide index repr'><svg class='icon xr-icon-database'><use xlink:href='#icon-database'></use></svg></label><div class='xr-index-data'><pre>PandasIndex(Float64Index([ 760892.0780000016,  760952.0780000016,  761012.0780000016,\n",
+       "               761072.0780000016,  761132.0780000016,  761192.0780000016,\n",
+       "               761252.0780000016,  761312.0780000016,  761372.0780000016,\n",
+       "               761432.0780000016,\n",
+       "              ...\n",
+       "              1743632.0780000016, 1743692.0780000016, 1743752.0780000016,\n",
+       "              1743812.0780000016, 1743872.0780000016, 1743932.0780000016,\n",
+       "              1743992.0780000016, 1744052.0780000016, 1744112.0780000016,\n",
+       "              1744172.0780000016],\n",
+       "             dtype=&#x27;float64&#x27;, name=&#x27;y&#x27;, length=16389))</pre></div></li></ul></div></li><li class='xr-section-item'><input id='section-4d0a186b-0637-4eff-bc3d-6a49c9faa481' class='xr-section-summary-in' type='checkbox'  ><label for='section-4d0a186b-0637-4eff-bc3d-6a49c9faa481' class='xr-section-summary' >Attributes: <span>(32)</span></label><div class='xr-section-inline-details'></div><div class='xr-section-details'><dl class='xr-attrs'><dt><span>Conventions :</span></dt><dd>CF-1.5</dd><dt><span>DODS.strlen :</span></dt><dd>0</dd><dt><span>GDAL :</span></dt><dd>GDAL 1.10.1, released 2013/08/26</dd><dt><span>Metadata_Conventions :</span></dt><dd>Unidata Dataset Discovery v1.0</dd><dt><span>NCO :</span></dt><dd>4.3.8</dd><dt><span>acknowledgement :</span></dt><dd>Southeast Regional Assessment Project; Biodiversity and Spatial Information Center, North Carolina State University, Raleigh, North Carolina 27695, Curtis M. Belyea. Designing Sustainable Landscapes Project; Biodiversity and Spatial Information Center, North Carolina State University, Raleigh, North Carolina 27695, Curtis M. Belyea.</dd><dt><span>cdm_data_type :</span></dt><dd>Grid</dd><dt><span>creator_email :</span></dt><dd>cbelyea@ncsu.edu</dd><dt><span>creator_name :</span></dt><dd>Curtis M. Belyea</dd><dt><span>creator_url :</span></dt><dd>http://www.basic.ncsu.edu/</dd><dt><span>date_created :</span></dt><dd>2012-03-01</dd><dt><span>date_issued :</span></dt><dd>2012-03-01</dd><dt><span>geospatial_lat_max :</span></dt><dd>38.162486</dd><dt><span>geospatial_lat_min :</span></dt><dd>28.271632</dd><dt><span>geospatial_lon_max :</span></dt><dd>-74.980784</dd><dt><span>geospatial_lon_min :</span></dt><dd>-85.48252</dd><dt><span>history :</span></dt><dd>Original ArcGIS grids were converted to NetCDF with gdaltranslate. The data were then changed to make them appropriate for aerial summary statistics calculation; that is, noData was set to zero in the domain of the dataset, a noData mask was applied outside the modeled domain, and the original urbanization extent (which was represented as 1) was set to 100%. For information preceding this modification, see the metadata in the original ArcGIS formatted data here: https://cida.usgs.gov/thredds/catalog.html</dd><dt><span>id :</span></dt><dd>sambi_urb</dd><dt><span>institution :</span></dt><dd>ncsu</dd><dt><span>keywords :</span></dt><dd>SLEUTH, Urbanization, Prediction, Development, Growth, Model, Project Gigalopolis</dd><dt><span>license :</span></dt><dd>Freely available</dd><dt><span>naming_authority :</span></dt><dd>cida.usgs.gov</dd><dt><span>processing_level :</span></dt><dd>Model Result</dd><dt><span>project :</span></dt><dd>Southeast Regional Assessment Project</dd><dt><span>publisher_email :</span></dt><dd>cida@usgs.gov</dd><dt><span>publisher_name :</span></dt><dd>Center for Integrated Data Analytics</dd><dt><span>publisher_url :</span></dt><dd>https://www.cida.usgs.gov/</dd><dt><span>summary :</span></dt><dd>This dataset represents the extent of urbanization (for the year indicated) predicted by the model SLEUTH, developed by Dr. Keith C. Clarke, at the University of California, Santa Barbara, Department of Geography and modified by David I. Donato of the United States Geological Survey (USGS) Eastern Geographic Science Center (EGSC). Further model modification and implementation was performed at the Biodiversity and Spatial Information Center at North Carolina State University. Urban growth probability extents throughout the 21st century for the DSL-SAMBI, which encompasses the Atlantic Coastal Plain in the states of Alabama, Florida, Georgia, North Carolina, South Carolina and Virginia. This data set is not intended for site-specific analyses. Interpretations derived from its use are suited for regional and planning purposes only. These data are not intended to be used at scales larger than 1:100,000. Acknowledgment of Biodiversity and Spatial Analysis Center at North Carolina State University is appreciated.</dd><dt><span>time_coverage_end :</span></dt><dd>2100-01-01T00:00</dd><dt><span>time_coverage_resolution :</span></dt><dd>P10Y</dd><dt><span>time_coverage_start :</span></dt><dd>2010-01-01T00:00</dd><dt><span>title :</span></dt><dd>Urban Growth Projection for DSL-SAMBI</dd></dl></div></li></ul></div></div>"
+      ],
+      "text/plain": [
+       "<xarray.Dataset> Size: 9GB\n",
+       "Dimensions:                    (time: 10, y: 16389, x: 13693)\n",
+       "Coordinates:\n",
+       "  * time                       (time) datetime64[ns] 80B 2000-01-01 ... 2090-...\n",
+       "  * x                          (x) float64 110kB 1.019e+06 ... 1.84e+06\n",
+       "  * y                          (y) float64 131kB 7.609e+05 ... 1.744e+06\n",
+       "Data variables:\n",
+       "    albers_conical_equal_area  |S64 64B ...\n",
+       "    urb                        (time, y, x) float32 9GB dask.array<chunksize=(1, 3962, 3308), meta=np.ndarray>\n",
+       "Attributes: (12/32)\n",
+       "    Conventions:               CF-1.5\n",
+       "    DODS.strlen:               0\n",
+       "    GDAL:                      GDAL 1.10.1, released 2013/08/26\n",
+       "    Metadata_Conventions:      Unidata Dataset Discovery v1.0\n",
+       "    NCO:                       4.3.8\n",
+       "    acknowledgement:           Southeast Regional Assessment Project; Biodive...\n",
+       "    ...                        ...\n",
+       "    publisher_url:             https://www.cida.usgs.gov/\n",
+       "    summary:                   This dataset represents the extent of urbaniza...\n",
+       "    time_coverage_end:         2100-01-01T00:00\n",
+       "    time_coverage_resolution:  P10Y\n",
+       "    time_coverage_start:       2010-01-01T00:00\n",
+       "    title:                     Urban Growth Projection for DSL-SAMBI"
+      ]
+     },
+     "execution_count": 5,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "# open and view zarr dataset\n",
+    "fs2 = fsspec.filesystem('s3', anon=True, endpoint_url='https://usgs.osn.mghpcc.org/')\n",
+    "ds = xr.open_dataset(fs2.get_mapper(zarr_url), engine='zarr', \n",
+    "                             backend_kwargs={'consolidated':True}, chunks={})\n",
+    "ds"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "996e60ba-13e4-453a-8534-e62ce747f0fa",
+   "metadata": {},
+   "source": [
+    "## Collection Metadata Input"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "id": "482d204d-b5b6-40e5-ac42-55b459be1097",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "collection description: Urban Growth Projection for DSL-SAMBI\n"
+     ]
+    }
+   ],
+   "source": [
+    "# description of STAC collection\n",
+    "collection_description = ds.attrs['title']\n",
+    "print(f'collection description: {collection_description}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "id": "91129d65-a614-4fe4-86b6-105b1f121f55",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "license in dataset attrs: \"Freely available\"\n",
+      "\n",
+      "For USGS data, we can use \"\u001b[1mCC0-1.0\u001b[0m\" as the license. For all other data we can use \"\u001b[1mUnlicense\u001b[0m\".\n",
+      "Ref: https://spdx.org/licenses/\n",
+      "\n",
+      "license automatically chosen: \u001b[1mUnlicense\n"
+     ]
+    }
+   ],
+   "source": [
+    "# license for dataset\n",
+    "collection_license = stac_helpers.license_picker(ds.attrs['license'])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "0bc7e9b3-ad62-4b10-a18e-66b7ed2d35dc",
+   "metadata": {},
+   "source": [
+    "## Identify x, y, t dimensions of dataset\n",
+    "May require user input if dimensions cannot be auto-detected."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "id": "ab91268f-7200-4cb1-979a-c7d75531d2c0",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Dimension dictionary: {'X': 'x', 'Y': 'y', 'T': 'time'}\n"
+     ]
+    }
+   ],
+   "source": [
+    "# dims_auto_extract = ['X', 'Y', 'T']\n",
+    "# dim_names_dict = {}\n",
+    "# for d in dims_auto_extract:\n",
+    "#     dim_names_dict[d] = stac_helpers.extract_dim(ds, d)\n",
+    "dim_names_dict={'X': 'x', 'Y': 'y', 'T': 'time'}\n",
+    "print(f\"Dimension dictionary: {dim_names_dict}\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "810d7480-165d-41c0-bd09-163656a14003",
+   "metadata": {},
+   "source": [
+    "## Get crs info\n",
+    "If there is no crs info that can be automatically extracted from the dataset with pyproj, you will need to manually identify the crs and create a crs object. This reference list of cartopy projections may be a helpful resource: https://scitools.org.uk/cartopy/docs/latest/reference/projections.html"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "id": "239d3b00-77f9-4178-954b-ba81a2b34512",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "crs_var = 'albers_conical_equal_area'"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 11,
+   "id": "b03d52f3-1367-4255-a561-52ee4fc9e92d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# use pyproj to automatically extract crs info\n",
+    "crs = pyproj.CRS.from_cf(ds[crs_var].attrs)\n",
+    "\n",
+    "# alternatively, create the appropriate cartopy projection\n",
+    "# crs = ccrs.LambertConformal(central_longitude=crs_info.longitude_of_central_meridian, \n",
+    "#                             central_latitude=crs_info.latitude_of_projection_origin,\n",
+    "#                             standard_parallels=crs_info.standard_parallel)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "282c689e-07f0-48ee-8e3d-35876e8c5094",
+   "metadata": {},
+   "source": [
+    "### Compare dataset crs var to generated proj4 string to make sure it looks ok"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "id": "4cee13ba-487d-483e-a013-b65685137502",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/html": [
+       "<div><svg style=\"position: absolute; width: 0; height: 0; overflow: hidden\">\n",
+       "<defs>\n",
+       "<symbol id=\"icon-database\" viewBox=\"0 0 32 32\">\n",
+       "<path d=\"M16 0c-8.837 0-16 2.239-16 5v4c0 2.761 7.163 5 16 5s16-2.239 16-5v-4c0-2.761-7.163-5-16-5z\"></path>\n",
+       "<path d=\"M16 17c-8.837 0-16-2.239-16-5v6c0 2.761 7.163 5 16 5s16-2.239 16-5v-6c0 2.761-7.163 5-16 5z\"></path>\n",
+       "<path d=\"M16 26c-8.837 0-16-2.239-16-5v6c0 2.761 7.163 5 16 5s16-2.239 16-5v-6c0 2.761-7.163 5-16 5z\"></path>\n",
+       "</symbol>\n",
+       "<symbol id=\"icon-file-text2\" viewBox=\"0 0 32 32\">\n",
+       "<path d=\"M28.681 7.159c-0.694-0.947-1.662-2.053-2.724-3.116s-2.169-2.030-3.116-2.724c-1.612-1.182-2.393-1.319-2.841-1.319h-15.5c-1.378 0-2.5 1.121-2.5 2.5v27c0 1.378 1.122 2.5 2.5 2.5h23c1.378 0 2.5-1.122 2.5-2.5v-19.5c0-0.448-0.137-1.23-1.319-2.841zM24.543 5.457c0.959 0.959 1.712 1.825 2.268 2.543h-4.811v-4.811c0.718 0.556 1.584 1.309 2.543 2.268zM28 29.5c0 0.271-0.229 0.5-0.5 0.5h-23c-0.271 0-0.5-0.229-0.5-0.5v-27c0-0.271 0.229-0.5 0.5-0.5 0 0 15.499-0 15.5 0v7c0 0.552 0.448 1 1 1h7v19.5z\"></path>\n",
+       "<path d=\"M23 26h-14c-0.552 0-1-0.448-1-1s0.448-1 1-1h14c0.552 0 1 0.448 1 1s-0.448 1-1 1z\"></path>\n",
+       "<path d=\"M23 22h-14c-0.552 0-1-0.448-1-1s0.448-1 1-1h14c0.552 0 1 0.448 1 1s-0.448 1-1 1z\"></path>\n",
+       "<path d=\"M23 18h-14c-0.552 0-1-0.448-1-1s0.448-1 1-1h14c0.552 0 1 0.448 1 1s-0.448 1-1 1z\"></path>\n",
+       "</symbol>\n",
+       "</defs>\n",
+       "</svg>\n",
+       "<style>/* CSS stylesheet for displaying xarray objects in jupyterlab.\n",
+       " *\n",
+       " */\n",
+       "\n",
+       ":root {\n",
+       "  --xr-font-color0: var(--jp-content-font-color0, rgba(0, 0, 0, 1));\n",
+       "  --xr-font-color2: var(--jp-content-font-color2, rgba(0, 0, 0, 0.54));\n",
+       "  --xr-font-color3: var(--jp-content-font-color3, rgba(0, 0, 0, 0.38));\n",
+       "  --xr-border-color: var(--jp-border-color2, #e0e0e0);\n",
+       "  --xr-disabled-color: var(--jp-layout-color3, #bdbdbd);\n",
+       "  --xr-background-color: var(--jp-layout-color0, white);\n",
+       "  --xr-background-color-row-even: var(--jp-layout-color1, white);\n",
+       "  --xr-background-color-row-odd: var(--jp-layout-color2, #eeeeee);\n",
+       "}\n",
+       "\n",
+       "html[theme=dark],\n",
+       "body[data-theme=dark],\n",
+       "body.vscode-dark {\n",
+       "  --xr-font-color0: rgba(255, 255, 255, 1);\n",
+       "  --xr-font-color2: rgba(255, 255, 255, 0.54);\n",
+       "  --xr-font-color3: rgba(255, 255, 255, 0.38);\n",
+       "  --xr-border-color: #1F1F1F;\n",
+       "  --xr-disabled-color: #515151;\n",
+       "  --xr-background-color: #111111;\n",
+       "  --xr-background-color-row-even: #111111;\n",
+       "  --xr-background-color-row-odd: #313131;\n",
+       "}\n",
+       "\n",
+       ".xr-wrap {\n",
+       "  display: block !important;\n",
+       "  min-width: 300px;\n",
+       "  max-width: 700px;\n",
+       "}\n",
+       "\n",
+       ".xr-text-repr-fallback {\n",
+       "  /* fallback to plain text repr when CSS is not injected (untrusted notebook) */\n",
+       "  display: none;\n",
+       "}\n",
+       "\n",
+       ".xr-header {\n",
+       "  padding-top: 6px;\n",
+       "  padding-bottom: 6px;\n",
+       "  margin-bottom: 4px;\n",
+       "  border-bottom: solid 1px var(--xr-border-color);\n",
+       "}\n",
+       "\n",
+       ".xr-header > div,\n",
+       ".xr-header > ul {\n",
+       "  display: inline;\n",
+       "  margin-top: 0;\n",
+       "  margin-bottom: 0;\n",
+       "}\n",
+       "\n",
+       ".xr-obj-type,\n",
+       ".xr-array-name {\n",
+       "  margin-left: 2px;\n",
+       "  margin-right: 10px;\n",
+       "}\n",
+       "\n",
+       ".xr-obj-type {\n",
+       "  color: var(--xr-font-color2);\n",
+       "}\n",
+       "\n",
+       ".xr-sections {\n",
+       "  padding-left: 0 !important;\n",
+       "  display: grid;\n",
+       "  grid-template-columns: 150px auto auto 1fr 20px 20px;\n",
+       "}\n",
+       "\n",
+       ".xr-section-item {\n",
+       "  display: contents;\n",
+       "}\n",
+       "\n",
+       ".xr-section-item input {\n",
+       "  display: none;\n",
+       "}\n",
+       "\n",
+       ".xr-section-item input + label {\n",
+       "  color: var(--xr-disabled-color);\n",
+       "}\n",
+       "\n",
+       ".xr-section-item input:enabled + label {\n",
+       "  cursor: pointer;\n",
+       "  color: var(--xr-font-color2);\n",
+       "}\n",
+       "\n",
+       ".xr-section-item input:enabled + label:hover {\n",
+       "  color: var(--xr-font-color0);\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary {\n",
+       "  grid-column: 1;\n",
+       "  color: var(--xr-font-color2);\n",
+       "  font-weight: 500;\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary > span {\n",
+       "  display: inline-block;\n",
+       "  padding-left: 0.5em;\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in:disabled + label {\n",
+       "  color: var(--xr-font-color2);\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in + label:before {\n",
+       "  display: inline-block;\n",
+       "  content: 'â–º';\n",
+       "  font-size: 11px;\n",
+       "  width: 15px;\n",
+       "  text-align: center;\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in:disabled + label:before {\n",
+       "  color: var(--xr-disabled-color);\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in:checked + label:before {\n",
+       "  content: 'â–¼';\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in:checked + label > span {\n",
+       "  display: none;\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary,\n",
+       ".xr-section-inline-details {\n",
+       "  padding-top: 4px;\n",
+       "  padding-bottom: 4px;\n",
+       "}\n",
+       "\n",
+       ".xr-section-inline-details {\n",
+       "  grid-column: 2 / -1;\n",
+       "}\n",
+       "\n",
+       ".xr-section-details {\n",
+       "  display: none;\n",
+       "  grid-column: 1 / -1;\n",
+       "  margin-bottom: 5px;\n",
+       "}\n",
+       "\n",
+       ".xr-section-summary-in:checked ~ .xr-section-details {\n",
+       "  display: contents;\n",
+       "}\n",
+       "\n",
+       ".xr-array-wrap {\n",
+       "  grid-column: 1 / -1;\n",
+       "  display: grid;\n",
+       "  grid-template-columns: 20px auto;\n",
+       "}\n",
+       "\n",
+       ".xr-array-wrap > label {\n",
+       "  grid-column: 1;\n",
+       "  vertical-align: top;\n",
+       "}\n",
+       "\n",
+       ".xr-preview {\n",
+       "  color: var(--xr-font-color3);\n",
+       "}\n",
+       "\n",
+       ".xr-array-preview,\n",
+       ".xr-array-data {\n",
+       "  padding: 0 5px !important;\n",
+       "  grid-column: 2;\n",
+       "}\n",
+       "\n",
+       ".xr-array-data,\n",
+       ".xr-array-in:checked ~ .xr-array-preview {\n",
+       "  display: none;\n",
+       "}\n",
+       "\n",
+       ".xr-array-in:checked ~ .xr-array-data,\n",
+       ".xr-array-preview {\n",
+       "  display: inline-block;\n",
+       "}\n",
+       "\n",
+       ".xr-dim-list {\n",
+       "  display: inline-block !important;\n",
+       "  list-style: none;\n",
+       "  padding: 0 !important;\n",
+       "  margin: 0;\n",
+       "}\n",
+       "\n",
+       ".xr-dim-list li {\n",
+       "  display: inline-block;\n",
+       "  padding: 0;\n",
+       "  margin: 0;\n",
+       "}\n",
+       "\n",
+       ".xr-dim-list:before {\n",
+       "  content: '(';\n",
+       "}\n",
+       "\n",
+       ".xr-dim-list:after {\n",
+       "  content: ')';\n",
+       "}\n",
+       "\n",
+       ".xr-dim-list li:not(:last-child):after {\n",
+       "  content: ',';\n",
+       "  padding-right: 5px;\n",
+       "}\n",
+       "\n",
+       ".xr-has-index {\n",
+       "  font-weight: bold;\n",
+       "}\n",
+       "\n",
+       ".xr-var-list,\n",
+       ".xr-var-item {\n",
+       "  display: contents;\n",
+       "}\n",
+       "\n",
+       ".xr-var-item > div,\n",
+       ".xr-var-item label,\n",
+       ".xr-var-item > .xr-var-name span {\n",
+       "  background-color: var(--xr-background-color-row-even);\n",
+       "  margin-bottom: 0;\n",
+       "}\n",
+       "\n",
+       ".xr-var-item > .xr-var-name:hover span {\n",
+       "  padding-right: 5px;\n",
+       "}\n",
+       "\n",
+       ".xr-var-list > li:nth-child(odd) > div,\n",
+       ".xr-var-list > li:nth-child(odd) > label,\n",
+       ".xr-var-list > li:nth-child(odd) > .xr-var-name span {\n",
+       "  background-color: var(--xr-background-color-row-odd);\n",
+       "}\n",
+       "\n",
+       ".xr-var-name {\n",
+       "  grid-column: 1;\n",
+       "}\n",
+       "\n",
+       ".xr-var-dims {\n",
+       "  grid-column: 2;\n",
+       "}\n",
+       "\n",
+       ".xr-var-dtype {\n",
+       "  grid-column: 3;\n",
+       "  text-align: right;\n",
+       "  color: var(--xr-font-color2);\n",
+       "}\n",
+       "\n",
+       ".xr-var-preview {\n",
+       "  grid-column: 4;\n",
+       "}\n",
+       "\n",
+       ".xr-index-preview {\n",
+       "  grid-column: 2 / 5;\n",
+       "  color: var(--xr-font-color2);\n",
+       "}\n",
+       "\n",
+       ".xr-var-name,\n",
+       ".xr-var-dims,\n",
+       ".xr-var-dtype,\n",
+       ".xr-preview,\n",
+       ".xr-attrs dt {\n",
+       "  white-space: nowrap;\n",
+       "  overflow: hidden;\n",
+       "  text-overflow: ellipsis;\n",
+       "  padding-right: 10px;\n",
+       "}\n",
+       "\n",
+       ".xr-var-name:hover,\n",
+       ".xr-var-dims:hover,\n",
+       ".xr-var-dtype:hover,\n",
+       ".xr-attrs dt:hover {\n",
+       "  overflow: visible;\n",
+       "  width: auto;\n",
+       "  z-index: 1;\n",
+       "}\n",
+       "\n",
+       ".xr-var-attrs,\n",
+       ".xr-var-data,\n",
+       ".xr-index-data {\n",
+       "  display: none;\n",
+       "  background-color: var(--xr-background-color) !important;\n",
+       "  padding-bottom: 5px !important;\n",
+       "}\n",
+       "\n",
+       ".xr-var-attrs-in:checked ~ .xr-var-attrs,\n",
+       ".xr-var-data-in:checked ~ .xr-var-data,\n",
+       ".xr-index-data-in:checked ~ .xr-index-data {\n",
+       "  display: block;\n",
+       "}\n",
+       "\n",
+       ".xr-var-data > table {\n",
+       "  float: right;\n",
+       "}\n",
+       "\n",
+       ".xr-var-name span,\n",
+       ".xr-var-data,\n",
+       ".xr-index-name div,\n",
+       ".xr-index-data,\n",
+       ".xr-attrs {\n",
+       "  padding-left: 25px !important;\n",
+       "}\n",
+       "\n",
+       ".xr-attrs,\n",
+       ".xr-var-attrs,\n",
+       ".xr-var-data,\n",
+       ".xr-index-data {\n",
+       "  grid-column: 1 / -1;\n",
+       "}\n",
+       "\n",
+       "dl.xr-attrs {\n",
+       "  padding: 0;\n",
+       "  margin: 0;\n",
+       "  display: grid;\n",
+       "  grid-template-columns: 125px auto;\n",
+       "}\n",
+       "\n",
+       ".xr-attrs dt,\n",
+       ".xr-attrs dd {\n",
+       "  padding: 0;\n",
+       "  margin: 0;\n",
+       "  float: left;\n",
+       "  padding-right: 10px;\n",
+       "  width: auto;\n",
+       "}\n",
+       "\n",
+       ".xr-attrs dt {\n",
+       "  font-weight: normal;\n",
+       "  grid-column: 1;\n",
+       "}\n",
+       "\n",
+       ".xr-attrs dt:hover span {\n",
+       "  display: inline-block;\n",
+       "  background: var(--xr-background-color);\n",
+       "  padding-right: 10px;\n",
+       "}\n",
+       "\n",
+       ".xr-attrs dd {\n",
+       "  grid-column: 2;\n",
+       "  white-space: pre-wrap;\n",
+       "  word-break: break-all;\n",
+       "}\n",
+       "\n",
+       ".xr-icon-database,\n",
+       ".xr-icon-file-text2,\n",
+       ".xr-no-icon {\n",
+       "  display: inline-block;\n",
+       "  vertical-align: middle;\n",
+       "  width: 1em;\n",
+       "  height: 1.5em !important;\n",
+       "  stroke-width: 0;\n",
+       "  stroke: currentColor;\n",
+       "  fill: currentColor;\n",
+       "}\n",
+       "</style><pre class='xr-text-repr-fallback'>&lt;xarray.DataArray &#x27;albers_conical_equal_area&#x27; ()&gt; Size: 64B\n",
+       "[1 values with dtype=|S64]\n",
+       "Attributes:\n",
+       "    GeoTransform:                   1018808.773 60 0 1744202.078000002 0 -60 \n",
+       "    false_easting:                  0.0\n",
+       "    false_northing:                 0.0\n",
+       "    grid_mapping_name:              albers_conical_equal_area\n",
+       "    inverse_flattening:             298.2572221010002\n",
+       "    latitude_of_projection_origin:  23.0\n",
+       "    longitude_of_central_meridian:  -96.0\n",
+       "    longitude_of_prime_meridian:    0.0\n",
+       "    semi_major_axis:                6378137.0\n",
+       "    standard_parallel:              [29.5, 45.5]</pre><div class='xr-wrap' style='display:none'><div class='xr-header'><div class='xr-obj-type'>xarray.DataArray</div><div class='xr-array-name'>'albers_conical_equal_area'</div></div><ul class='xr-sections'><li class='xr-section-item'><div class='xr-array-wrap'><input id='section-7a40f497-2e8a-4158-b3c4-d3b720f56fcf' class='xr-array-in' type='checkbox' checked><label for='section-7a40f497-2e8a-4158-b3c4-d3b720f56fcf' title='Show/hide data repr'><svg class='icon xr-icon-database'><use xlink:href='#icon-database'></use></svg></label><div class='xr-array-preview xr-preview'><span>...</span></div><div class='xr-array-data'><pre>[1 values with dtype=|S64]</pre></div></div></li><li class='xr-section-item'><input id='section-70b5666e-1ac6-412e-9c41-7665d5594b8e' class='xr-section-summary-in' type='checkbox' disabled ><label for='section-70b5666e-1ac6-412e-9c41-7665d5594b8e' class='xr-section-summary'  title='Expand/collapse section'>Coordinates: <span>(0)</span></label><div class='xr-section-inline-details'></div><div class='xr-section-details'><ul class='xr-var-list'></ul></div></li><li class='xr-section-item'><input id='section-ddc0b76b-5f94-4f10-8c93-b6e00667a353' class='xr-section-summary-in' type='checkbox' disabled ><label for='section-ddc0b76b-5f94-4f10-8c93-b6e00667a353' class='xr-section-summary'  title='Expand/collapse section'>Indexes: <span>(0)</span></label><div class='xr-section-inline-details'></div><div class='xr-section-details'><ul class='xr-var-list'></ul></div></li><li class='xr-section-item'><input id='section-06f5a4cd-43ec-4820-aea9-9991c9e4a678' class='xr-section-summary-in' type='checkbox'  ><label for='section-06f5a4cd-43ec-4820-aea9-9991c9e4a678' class='xr-section-summary' >Attributes: <span>(10)</span></label><div class='xr-section-inline-details'></div><div class='xr-section-details'><dl class='xr-attrs'><dt><span>GeoTransform :</span></dt><dd>1018808.773 60 0 1744202.078000002 0 -60 </dd><dt><span>false_easting :</span></dt><dd>0.0</dd><dt><span>false_northing :</span></dt><dd>0.0</dd><dt><span>grid_mapping_name :</span></dt><dd>albers_conical_equal_area</dd><dt><span>inverse_flattening :</span></dt><dd>298.2572221010002</dd><dt><span>latitude_of_projection_origin :</span></dt><dd>23.0</dd><dt><span>longitude_of_central_meridian :</span></dt><dd>-96.0</dd><dt><span>longitude_of_prime_meridian :</span></dt><dd>0.0</dd><dt><span>semi_major_axis :</span></dt><dd>6378137.0</dd><dt><span>standard_parallel :</span></dt><dd>[29.5, 45.5]</dd></dl></div></li></ul></div></div>"
+      ],
+      "text/plain": [
+       "<xarray.DataArray 'albers_conical_equal_area' ()> Size: 64B\n",
+       "[1 values with dtype=|S64]\n",
+       "Attributes:\n",
+       "    GeoTransform:                   1018808.773 60 0 1744202.078000002 0 -60 \n",
+       "    false_easting:                  0.0\n",
+       "    false_northing:                 0.0\n",
+       "    grid_mapping_name:              albers_conical_equal_area\n",
+       "    inverse_flattening:             298.2572221010002\n",
+       "    latitude_of_projection_origin:  23.0\n",
+       "    longitude_of_central_meridian:  -96.0\n",
+       "    longitude_of_prime_meridian:    0.0\n",
+       "    semi_major_axis:                6378137.0\n",
+       "    standard_parallel:              [29.5, 45.5]"
+      ]
+     },
+     "execution_count": 12,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "ds[crs_var]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 13,
+   "id": "f7bc73db-7717-450e-9679-525f7be0c910",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/home/conda/global/a0dc0d97e6de5faa8c721c8eda15021703d922e6958b7080a97f142a46820491-20240307-130545-757388-99-pangeo/lib/python3.11/site-packages/pyproj/crs/crs.py:1286: UserWarning: You will likely lose important projection information when converting to a PROJ string from another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems\n",
+      "  proj = self._crs.to_proj4(version=version)\n"
+     ]
+    },
+    {
+     "data": {
+      "text/plain": [
+       "'+proj=aea +lat_0=23 +lon_0=-96 +lat_1=29.5 +lat_2=45.5 +x_0=0 +y_0=0 +ellps=GRS80 +units=m +no_defs +type=crs'"
+      ]
+     },
+     "execution_count": 13,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "crs.to_proj4()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a8c3ed37-8564-400b-a7fb-25bd5e43d21c",
+   "metadata": {},
+   "source": [
+    "## Create Collection Extent"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "69f0d837-68a5-4fed-9a14-5d75cfbb0da4",
+   "metadata": {},
+   "source": [
+    "### Spatial Extent\n",
+    "##### WARNING - make sure data type is **float** NOT **numpy.float64**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 14,
+   "id": "d46805e0-8e94-4ebe-aa01-d9a2d7051459",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[1018838.7729999996, 760892.0780000016, 1840358.7729999996, 1744172.0780000016]\n",
+      "\n",
+      "spatial_bounds data type: <class 'float'>\n"
+     ]
+    }
+   ],
+   "source": [
+    "# pull out lat/lon bbox for data\n",
+    "# coordinates must be from WGS 84 datum\n",
+    "# left, bottom, right, top\n",
+    "\n",
+    "# Note: try changing around the commented out lines below to get type float rather than a numpy float\n",
+    "#spatial_bounds = [ds[dim_names_dict['X']].data.min().compute().astype(float), ds[dim_names_dict['Y']].data.min().compute().astype(float), ds[dim_names_dict['X']].data.max().compute().astype(float), ds[dim_names_dict['Y']].data.max().compute().astype(float)]\n",
+    "#spatial_bounds = [ds[dim_names_dict['X']].data.min().compute().astype(float).tolist(), ds[dim_names_dict['Y']].data.min().compute().astype(float).tolist(), ds[dim_names_dict['X']].data.max().compute().astype(float).tolist(), ds[dim_names_dict['Y']].data.max().compute().astype(float).tolist()]\n",
+    "spatial_bounds = [ds[dim_names_dict['X']].data.min().astype(float).item(), ds[dim_names_dict['Y']].data.min().astype(float).item(), ds[dim_names_dict['X']].data.max().astype(float).item(), ds[dim_names_dict['Y']].data.max().astype(float).item()]\n",
+    "print(spatial_bounds)\n",
+    "print(f'\\nspatial_bounds data type: {type(spatial_bounds[0])}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 15,
+   "id": "f16fdb9e-7ed8-40fb-a4f1-9ecabdebc0a1",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "XX, YY = np.meshgrid(ds[dim_names_dict['X']].data, ds[dim_names_dict['Y']].data)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 16,
+   "id": "074fc23c-f4d9-4427-80d3-fbf691e6d411",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "transformer = Transformer.from_crs(crs, \"EPSG:4326\", always_xy=True)\n",
+    "lon, lat = transformer.transform(XX.ravel(), YY.ravel())"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 17,
+   "id": "5345c975-9fe3-48e1-a663-0275cdf275dc",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "lower left coordinates (WGS84): -85.48217902360058, 28.271951434906544\n",
+      "upper right coordinates (WGS84): -74.98118991571037, 38.16218753090247\n"
+     ]
+    }
+   ],
+   "source": [
+    "print(f'lower left coordinates (WGS84): {min(lon)}, {min(lat)}')\n",
+    "print(f'upper right coordinates (WGS84): {max(lon)}, {max(lat)}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 18,
+   "id": "e0a5a222-743d-403a-9411-2406374803cf",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# create a spatial extent object \n",
+    "spatial_extent = pystac.SpatialExtent(bboxes=[[min(lon).item(), min(lat).item(), max(lon).item(), max(lat).item()]])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a04c8fca-1d33-43ac-9e2b-62d7be2887f7",
+   "metadata": {},
+   "source": [
+    "### Temporal Extent"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 19,
+   "id": "41a84995-867c-4152-8c57-85e3758bbb77",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "min: 2000-01-01 00:00:00 \n",
+      "max: 2090-01-01 00:00:00\n"
+     ]
+    }
+   ],
+   "source": [
+    "# pull out first and last timestamps\n",
+    "temporal_extent_lower = pd.Timestamp(ds[dim_names_dict['T']].data.min())\n",
+    "temporal_extent_upper = pd.Timestamp(ds[dim_names_dict['T']].data.max())\n",
+    "# if you get an error:\n",
+    "# Cannot convert input [] of type <class 'cftime._cftime.DatetimeNoLeap'> to Timestamp\n",
+    "# use the following instead:\n",
+    "#temporal_extent_lower = pd.Timestamp(ds.indexes[dim_names_dict['T']].to_datetimeindex().min())\n",
+    "#temporal_extent_upper = pd.Timestamp(ds.indexes[dim_names_dict['T']].to_datetimeindex().max())\n",
+    "\n",
+    "print(f'min: {temporal_extent_lower} \\nmax: {temporal_extent_upper}')\n",
+    "# create a temporal extent object\n",
+    "temporal_extent = pystac.TemporalExtent(intervals=[[temporal_extent_lower, temporal_extent_upper]])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 20,
+   "id": "1b1e37c4-5348-46ad-abc9-e005b5d6c02b",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "collection_extent = pystac.Extent(spatial=spatial_extent, temporal=temporal_extent)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "20b00e88-5a13-46b3-9787-d9ac2d4e7bd6",
+   "metadata": {},
+   "source": [
+    "## Open up STAC Catalog and create a collection"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 21,
+   "id": "adf6c59d-58cd-48b1-a5fd-3bb205a3ef56",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# define folder location where your STAC catalog json file is\n",
+    "catalog_path = os.path.join('..', '..', 'catalog')\n",
+    "# open catalog\n",
+    "catalog = pystac.Catalog.from_file(os.path.join(catalog_path, 'catalog.json'))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 22,
+   "id": "7e96811b-95ae-406a-9728-55fc429d4e1f",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "new collection created\n"
+     ]
+    }
+   ],
+   "source": [
+    "if catalog.get_child(collection_id):\n",
+    "    collection = catalog.get_child(collection_id)\n",
+    "    print(\"existing collection opened\")\n",
+    "    collection.extent=collection_extent\n",
+    "    collection.description=collection_description\n",
+    "    collection.license=collection_license\n",
+    "else:\n",
+    "    collection = pystac.Collection(id=collection_id,\n",
+    "                                   description=collection_description,\n",
+    "                                   extent=collection_extent,\n",
+    "                                   license=collection_license)\n",
+    "    print(\"new collection created\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a21c76e8-cd57-4eb5-a33f-7c668a3b3205",
+   "metadata": {},
+   "source": [
+    "## Add zarr url asset to collection"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 23,
+   "id": "094832af-d22b-4359-b0f6-cf687acce5cc",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "asset_id = \"zarr-s3-osn\"\n",
+    "asset = pystac.Asset(href=zarr_url,\n",
+    "                     description=asset_description,\n",
+    "                     media_type=\"application/vnd+zarr\",\n",
+    "                     roles=asset_roles,\n",
+    "                     extra_fields = xarray_opendataset_kwargs)\n",
+    "collection.add_asset(asset_id, asset)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 24,
+   "id": "0c298d07-f234-4a08-986d-87f4a39e9ae6",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "asset_id2 = \"zarr-s3\"\n",
+    "asset2 = pystac.Asset(href=zarr_url2,\n",
+    "                     description=asset_description2,\n",
+    "                     media_type=\"application/vnd+zarr\",\n",
+    "                     roles=asset_roles2,\n",
+    "                     extra_fields = xarray_opendataset_kwargs2)\n",
+    "collection.add_asset(asset_id2, asset2)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "f67cd5c9-db33-45c2-bc21-480cd67354f4",
+   "metadata": {},
+   "source": [
+    "## Add datacube extension to collection"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 25,
+   "id": "fc00946d-2880-491d-9b3b-3aeeb4414d6c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# instantiate extention on collection\n",
+    "dc = DatacubeExtension.ext(collection, add_if_missing=True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "8bdd77a2-7587-485e-afb7-42af3a822241",
+   "metadata": {},
+   "source": [
+    "### Add cube dimensions (required field for extension)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "e7dc357c-91ec-49ae-83e5-400f791f9792",
+   "metadata": {},
+   "source": [
+    "#### user review needed\n",
+    "#### compare crs information to the projjson to make sure it looks correct"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 26,
+   "id": "ea452f62-5644-49b6-8a4e-7dc4f649fd1a",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "<Projected CRS: {\"$schema\": \"https://proj.org/schemas/v0.2/projjso ...>\n",
+       "Name: undefined\n",
+       "Axis Info [cartesian]:\n",
+       "- E[east]: Easting (metre)\n",
+       "- N[north]: Northing (metre)\n",
+       "Area of Use:\n",
+       "- undefined\n",
+       "Coordinate Operation:\n",
+       "- name: unknown\n",
+       "- method: Albers Equal Area\n",
+       "Datum: undefined\n",
+       "- Ellipsoid: undefined\n",
+       "- Prime Meridian: undefined"
+      ]
+     },
+     "execution_count": 26,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "# print out crs information in dataset\n",
+    "crs"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 27,
+   "id": "1b1d05ff-8e43-44a7-8343-178b112c4ad6",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "{\n",
+      "  \"$schema\": \"https://proj.org/schemas/v0.5/projjson.schema.json\",\n",
+      "  \"type\": \"ProjectedCRS\",\n",
+      "  \"name\": \"undefined\",\n",
+      "  \"base_crs\": {\n",
+      "    \"name\": \"undefined\",\n",
+      "    \"datum\": {\n",
+      "      \"type\": \"GeodeticReferenceFrame\",\n",
+      "      \"name\": \"undefined\",\n",
+      "      \"ellipsoid\": {\n",
+      "        \"name\": \"undefined\",\n",
+      "        \"semi_major_axis\": 6378137,\n",
+      "        \"inverse_flattening\": 298.257222101\n",
+      "      },\n",
+      "      \"prime_meridian\": {\n",
+      "        \"name\": \"undefined\",\n",
+      "        \"longitude\": 0\n",
+      "      }\n",
+      "    },\n",
+      "    \"coordinate_system\": {\n",
+      "      \"subtype\": \"ellipsoidal\",\n",
+      "      \"axis\": [\n",
+      "        {\n",
+      "          \"name\": \"Longitude\",\n",
+      "          \"abbreviation\": \"lon\",\n",
+      "          \"direction\": \"east\",\n",
+      "          \"unit\": \"degree\"\n",
+      "        },\n",
+      "        {\n",
+      "          \"name\": \"Latitude\",\n",
+      "          \"abbreviation\": \"lat\",\n",
+      "          \"direction\": \"north\",\n",
+      "          \"unit\": \"degree\"\n",
+      "        }\n",
+      "      ]\n",
+      "    }\n",
+      "  },\n",
+      "  \"conversion\": {\n",
+      "    \"name\": \"unknown\",\n",
+      "    \"method\": {\n",
+      "      \"name\": \"Albers Equal Area\",\n",
+      "      \"id\": {\n",
+      "        \"authority\": \"EPSG\",\n",
+      "        \"code\": 9822\n",
+      "      }\n",
+      "    },\n",
+      "    \"parameters\": [\n",
+      "      {\n",
+      "        \"name\": \"Latitude of false origin\",\n",
+      "        \"value\": 23,\n",
+      "        \"unit\": \"degree\",\n",
+      "        \"id\": {\n",
+      "          \"authority\": \"EPSG\",\n",
+      "          \"code\": 8821\n",
+      "        }\n",
+      "      },\n",
+      "      {\n",
+      "        \"name\": \"Longitude of false origin\",\n",
+      "        \"value\": -96,\n",
+      "        \"unit\": \"degree\",\n",
+      "        \"id\": {\n",
+      "          \"authority\": \"EPSG\",\n",
+      "          \"code\": 8822\n",
+      "        }\n",
+      "      },\n",
+      "      {\n",
+      "        \"name\": \"Latitude of 1st standard parallel\",\n",
+      "        \"value\": 29.5,\n",
+      "        \"unit\": \"degree\",\n",
+      "        \"id\": {\n",
+      "          \"authority\": \"EPSG\",\n",
+      "          \"code\": 8823\n",
+      "        }\n",
+      "      },\n",
+      "      {\n",
+      "        \"name\": \"Latitude of 2nd standard parallel\",\n",
+      "        \"value\": 45.5,\n",
+      "        \"unit\": \"degree\",\n",
+      "        \"id\": {\n",
+      "          \"authority\": \"EPSG\",\n",
+      "          \"code\": 8824\n",
+      "        }\n",
+      "      },\n",
+      "      {\n",
+      "        \"name\": \"Easting at false origin\",\n",
+      "        \"value\": 0,\n",
+      "        \"unit\": {\n",
+      "          \"type\": \"LinearUnit\",\n",
+      "          \"name\": \"Metre\",\n",
+      "          \"conversion_factor\": 1\n",
+      "        },\n",
+      "        \"id\": {\n",
+      "          \"authority\": \"EPSG\",\n",
+      "          \"code\": 8826\n",
+      "        }\n",
+      "      },\n",
+      "      {\n",
+      "        \"name\": \"Northing at false origin\",\n",
+      "        \"value\": 0,\n",
+      "        \"unit\": {\n",
+      "          \"type\": \"LinearUnit\",\n",
+      "          \"name\": \"Metre\",\n",
+      "          \"conversion_factor\": 1\n",
+      "        },\n",
+      "        \"id\": {\n",
+      "          \"authority\": \"EPSG\",\n",
+      "          \"code\": 8827\n",
+      "        }\n",
+      "      }\n",
+      "    ]\n",
+      "  },\n",
+      "  \"coordinate_system\": {\n",
+      "    \"subtype\": \"Cartesian\",\n",
+      "    \"axis\": [\n",
+      "      {\n",
+      "        \"name\": \"Easting\",\n",
+      "        \"abbreviation\": \"E\",\n",
+      "        \"direction\": \"east\",\n",
+      "        \"unit\": \"metre\"\n",
+      "      },\n",
+      "      {\n",
+      "        \"name\": \"Northing\",\n",
+      "        \"abbreviation\": \"N\",\n",
+      "        \"direction\": \"north\",\n",
+      "        \"unit\": \"metre\"\n",
+      "      }\n",
+      "    ]\n",
+      "  }\n",
+      "}\n"
+     ]
+    }
+   ],
+   "source": [
+    "# # the datacube extension can accept reference_system information as a numerical EPSG code, \n",
+    "# # WKT2 (ISO 19162) string or PROJJSON object.\n",
+    "# # we will use a projjson, as was done by Microsoft Planetary Computer here:\n",
+    "# # https://planetarycomputer.microsoft.com/dataset/daymet-annual-na\n",
+    "# # https://planetarycomputer.microsoft.com/api/stac/v1/collections/daymet-annual-na\n",
+    "# projjson = json.loads(lcc.to_json())\n",
+    "\n",
+    "# alternatively, I think we could do this:\n",
+    "projjson = crs.to_json()\n",
+    "print(crs.to_json(pretty=True))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "b6b88ee9-60c2-4d91-af74-c1c56b094826",
+   "metadata": {},
+   "source": [
+    "#### user review needed\n",
+    "#### look at the spatial and temporal steps, make sure they are all successfully pulled and they look correct"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "9e2bbcc5-e45a-4b8c-9d60-601f345e8134",
+   "metadata": {},
+   "source": [
+    "**Time**\n",
+    "\n",
+    "If you need to manually construct this field, here is a helpful reference: https://en.wikipedia.org/wiki/ISO_8601#Durations\n",
+    "\n",
+    "**manually constructed time step for 10 year data**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 31,
+   "id": "82f1e9bd-52ee-46f5-9e95-c2359d95fcf3",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "time step: P10Y0M0DT0H0M0S\n"
+     ]
+    }
+   ],
+   "source": [
+    "#time_step = pd.Timedelta(stac_helpers.get_step(ds, dim_names_dict['T'], time_dim=True)).isoformat()\n",
+    "# if time is yearly or monthly, you will need to manually construct it:\n",
+    "time_step = \"P10Y0M0DT0H0M0S\"\n",
+    "print(f'time step: {time_step}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 32,
+   "id": "64be65b2-de20-447a-a9c2-bd8eca3e440e",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # optional debugging for time steps:\n",
+    "# # check all step sizes (step_list), get number of occurences of each (step_count), and get index locations where each step size occurs in the dataset so you can manually inspect the values, if needed\n",
+    "# # please specify the index of the step in step_list with the step_ix field - this will return the indices in the dataset where this step size occurred\n",
+    "# time_step = stac_helpers.get_step(ds, dim_names_dict['T'], time_dim=True, debug=True, step_ix=4)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 33,
+   "id": "bc8dff39-2a2e-44a0-9b30-987107c2d1e2",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # debugging for time steps, cont:\n",
+    "# # please choose one of the index locations printed above\n",
+    "# # this will print the time steps adjacent to it\n",
+    "# ix = 11\n",
+    "# ds.isel(time=slice(ix-1,ix+3)).time"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "9aa6c8ff-8d9b-40a7-a281-39b502bd5a3d",
+   "metadata": {},
+   "source": [
+    "**X/lon**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 36,
+   "id": "a8ba7695-ca45-4db2-bd46-c465f4e37eff",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "x step: 60.0\n"
+     ]
+    }
+   ],
+   "source": [
+    "x_step = stac_helpers.get_step(ds, dim_names_dict['X'])\n",
+    "# a common issue that causes the spatial step not to be identified comes from rounding errors in the step calculation\n",
+    "# use the debugging cells below to identify if this is the issue, if so, use the round_dec argument to round to a higher decimal place:\n",
+    "#x_step = stac_helpers.get_step(ds, dim_names_dict['X'], round_dec=13)\n",
+    "print(f'x step: {x_step}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 37,
+   "id": "fac4c9f2-a952-4c7f-aa32-862957372d6f",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # optional debugging for spatial steps:\n",
+    "# # check all step sizes (step_list), get number of occurences of each (step_count), and get index locations where each step size occurs in the dataset so you can manually inspect the values, if needed\n",
+    "# # please specify the index of the step in step_list with the step_ix field - this will return the indices in the dataset where this step size occurred\n",
+    "# x_dim=dim_names_dict['X']\n",
+    "# x_step = stac_helpers.get_step(ds, x_dim, debug=True, step_ix=0)\n",
+    "# print(f'\\nx dim name (for next cell): {x_dim}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "8d0b5a2d-dc58-4ad6-b890-859ce6bb08de",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # debugging for spatial steps, cont:\n",
+    "# # please choose one of the index locations printed above\n",
+    "# # this will print the time steps adjacent to it\n",
+    "# ix = 5\n",
+    "# ds.isel(x=slice(ix-1,ix+3)).x"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "21b5cca4-8bb4-498d-ae6b-6b8545fffe56",
+   "metadata": {},
+   "source": [
+    "**Y/lat**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 39,
+   "id": "7405583b-ecb9-44b0-8815-048e42e55a42",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "y step: 60.0\n"
+     ]
+    }
+   ],
+   "source": [
+    "y_step = stac_helpers.get_step(ds, dim_names_dict['Y'])\n",
+    "# a common issue that causes the spatial step not to be identified comes from rounding errors in the step calculation\n",
+    "# use the debugging cells below to identify if this is the issue, if so, use the round_dec argument to round to a higher decimal place:\n",
+    "#y_step = stac_helpers.get_step(ds, dim_names_dict['Y'], round_dec=13)\n",
+    "print(f'y step: {y_step}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 40,
+   "id": "ece0fe37-b54c-4721-aa9b-33d2998d191b",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # optional debugging for spatial steps:\n",
+    "# # check all step sizes (step_list), get number of occurences of each (step_count), and get index locations where each step size occurs in the dataset so you can manually inspect the values, if needed\n",
+    "# # please specify the index of the step in step_list with the step_ix field - this will return the indices in the dataset where this step size occurred\n",
+    "# y_dim=dim_names_dict['Y']\n",
+    "# y_step = stac_helpers.get_step(ds, y_dim, debug=True, step_ix=0)\n",
+    "# print(f'\\nx dim name (for next cell): {x_dim}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 41,
+   "id": "abdafb8f-5217-4b82-91b6-eec8183c9128",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # debugging for spatial steps, cont:\n",
+    "# # please choose one of the index locations printed above\n",
+    "# # this will print the time steps adjacent to it\n",
+    "# ix = 5\n",
+    "# ds.isel(y=slice(ix-1,ix+3)).y"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "00a5e041-081d-428d-ac2e-75d16de205e6",
+   "metadata": {},
+   "source": [
+    "#### user input needed\n",
+    "#### you will need to copy all of the dimensions printed below into the dict and fill in the appropriate attributes (type, axis, extent, etc.):\n",
+    "\n",
+    "Please see [datacube spec](https://github.com/stac-extensions/datacube?tab=readme-ov-file#dimension-object) for details on required fields.\n",
+    "\n",
+    "If you have a dimension like \"bnds\" or \"nv\" that is used on variables like time_bnds, lon_bnds, lat_bnds to choose either the lower or upper bound, you can use and [additional dimension object](https://github.com/stac-extensions/datacube?tab=readme-ov-file#additional-dimension-object). We recommend making the type \"count\" as Microsoft Planetary Computer did [here](https://github.com/stac-extensions/datacube/blob/9e74fa706c9bdd971e01739cf18dcc53bdd3dd4f/examples/daymet-hi-annual.json#L76).\n",
+    "\n",
+    "Here is an example:\n",
+    "\n",
+    "```\n",
+    "dims_dict = {\n",
+    "            'bnds': pystac.extensions.datacube.Dimension({'type': 'count', 'description': stac_helpers.get_long_name(ds, 'bnds'), 'extent': [ds.bnds.min().item(), ds.bnds.max().item()]})\n",
+    "            }\n",
+    "```"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 42,
+   "id": "acd45d3c-7845-47e6-9b7d-e35627a7ca9a",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "['time', 'y', 'x']\n"
+     ]
+    }
+   ],
+   "source": [
+    "dims = list(ds.dims)\n",
+    "print(dims)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 43,
+   "id": "5a443497-67a9-4dce-a8e9-b08d31a88223",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# create a dictionary of datacube dimensions you would like to assign to this dataset\n",
+    "# dimension name should come from the dims printed in above cell\n",
+    "\n",
+    "# x, y, t dimension info is pulled out automatically using the dim dict we created above\n",
+    "# all other dims listed in above cell need to be manually written in\n",
+    "\n",
+    "# we do not recommend including redundant dimensions (do not include x,y if you have lon,lat)\n",
+    "# note that the extent of each dimension should be pulled from the dataset\n",
+    "dims_dict = {dim_names_dict['T']: pystac.extensions.datacube.Dimension({'type': 'temporal', 'description': stac_helpers.get_long_name(ds, dim_names_dict['T']), 'extent': [temporal_extent_lower.strftime('%Y-%m-%dT%XZ'), temporal_extent_upper.strftime('%Y-%m-%dT%XZ')], 'step':time_step}),\n",
+    "             dim_names_dict['X']: pystac.extensions.datacube.Dimension({'type': 'spatial', 'axis': 'x', 'description': stac_helpers.get_long_name(ds, dim_names_dict['X']), 'extent': [spatial_bounds[0], spatial_bounds[2]], 'step': x_step, 'reference_system': projjson}),\n",
+    "             dim_names_dict['Y']: pystac.extensions.datacube.Dimension({'type': 'spatial', 'axis': 'y', 'description': stac_helpers.get_long_name(ds, dim_names_dict['Y']), 'extent': [spatial_bounds[1], spatial_bounds[3]], 'step': y_step, 'reference_system': projjson}),\n",
+    "            }"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 44,
+   "id": "8ab85b09-eb38-404c-910c-13349d5e2234",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# make sure you added all the right dims\n",
+    "assert sorted(list(dims_dict.keys())) == sorted(dims)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "0f277883-a3fd-425f-966a-ca2140d0ef2f",
+   "metadata": {},
+   "source": [
+    "### Add cube variables (optional field for extension)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 45,
+   "id": "e9272931-fc0b-4f2a-9546-283033e9cde8",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# drop metpy_crs coordinate we have added\n",
+    "if 'metpy_crs' in ds.coords:\n",
+    "    ds = ds.drop_vars('metpy_crs')\n",
+    "\n",
+    "# pull list of vars from dataset\n",
+    "vars = list(ds.variables)\n",
+    "\n",
+    "# spec says that the keys of cube:dimensions and cube:variables should be unique together; a key like lat should not be both a dimension and a variable.\n",
+    "# we will drop all values in dims from vars\n",
+    "vars = [v for v in vars if v not in dims]\n",
+    "\n",
+    "# Microsoft Planetary Computer includes coordinates and crs as variables here:\n",
+    "# https://planetarycomputer.microsoft.com/dataset/daymet-annual-na\n",
+    "# https://planetarycomputer.microsoft.com/api/stac/v1/collections/daymet-annual-na\n",
+    "# we will keep those in the var list\n",
+    "\n",
+    "# create dictionary of dataset variables and associated dimensions\n",
+    "vars_dict={}\n",
+    "for v in vars:\n",
+    "    unit = stac_helpers.get_unit(ds, v)\n",
+    "    var_type = stac_helpers.get_var_type(ds, v, crs_var)\n",
+    "    long_name = stac_helpers.get_long_name(ds, v)\n",
+    "    vars_dict[v] = pystac.extensions.datacube.Variable({'dimensions':list(ds[v].dims), 'type': var_type, 'description': long_name, 'unit': unit})"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "11ad5352-884c-4472-8864-4570a96f66e5",
+   "metadata": {},
+   "source": [
+    "### Finalize extension"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 46,
+   "id": "10141fd4-91d6-491d-878b-02653720891d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# add dimesions and variables to collection extension\n",
+    "dc.apply(dimensions=dims_dict, variables=vars_dict)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "615ca168-75fb-4135-9941-0ef5fe4fd1cb",
+   "metadata": {},
+   "source": [
+    "## Add STAC Collection to Catalog and Save"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 47,
+   "id": "e2120a55-3d04-4122-a93f-29afcdb8cb1b",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# # helper to find items of wrong type\n",
+    "# d = collection.to_dict()\n",
+    "# print(*stac_helpers.find_paths(d))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 48,
+   "id": "4b75791b-6b2d-40be-b7c6-330a60888fb5",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if catalog.get_child(collection_id):\n",
+    "    collection.normalize_and_save(root_href=os.path.join(catalog_path, collection_id), catalog_type=pystac.CatalogType.SELF_CONTAINED)\n",
+    "else:\n",
+    "    catalog.add_child(collection)\n",
+    "    catalog.normalize_and_save(root_href=catalog_path, catalog_type=pystac.CatalogType.SELF_CONTAINED)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d6f676b5-e892-4bfb-8d73-2828addd838c",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "global-global-pangeo",
+   "language": "python",
+   "name": "conda-env-global-global-pangeo-py"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.11.6"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/workflows/archive/serap_slamm_create_collection_from_zarr.ipynb b/workflows/archive/serap_slamm_create_collection_from_zarr.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..740f54ecd0cb305842bcf5dfed8549e594872248
--- /dev/null
+++ b/workflows/archive/serap_slamm_create_collection_from_zarr.ipynb
@@ -0,0 +1,881 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "id": "6c10e07b-1e60-4926-af1d-fa75dc78e5d4",
+   "metadata": {
+    "tags": []
+   },
+   "source": [
+    "# serap_slamm Zarr -> Collection Workflow\n",
+    "This is a workflow to build a [STAC collection](https://github.com/radiantearth/stac-spec/blob/master/collection-spec/collection-spec.md) from the zarr asset for the dataset named above. We use the [datacube extension](https://github.com/stac-extensions/datacube) to define the spatial and temporal dimensions of the zarr store, as well as the variables it contains.\n",
+    "\n",
+    "To simplify this workflow so that it can scale to many datasets, a few simplifying suggestions and assumptions are made:\n",
+    "1. For USGS data, we can use the CC0-1.0 license. For all other data we can use Unlicense. Ref: https://spdx.org/licenses/\n",
+    "2. I am assuming all coordinates are from the WGS84 datum if not specified."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "201e0945-de55-45ff-b095-c2af009a4e62",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import pystac\n",
+    "from pystac.extensions.datacube import CollectionDatacubeExtension, AssetDatacubeExtension, AdditionalDimension, DatacubeExtension\n",
+    "import xarray as xr\n",
+    "import cf_xarray\n",
+    "import os\n",
+    "import fsspec\n",
+    "import cf_xarray\n",
+    "import hvplot.xarray\n",
+    "import pandas as pd\n",
+    "import json\n",
+    "import numpy as np\n",
+    "import pyproj\n",
+    "from pyproj import Transformer\n",
+    "import cartopy.crs as ccrs\n",
+    "import cfunits\n",
+    "import json\n",
+    "import sys\n",
+    "sys.path.insert(1, '..')\n",
+    "import stac_helpers"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a71f9d19-8fb3-4f47-b4c4-447bb80d8dd5",
+   "metadata": {},
+   "source": [
+    "## Collection ID"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "15ee060d-3127-4024-a1ad-6aa0648667e1",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# name for STAC collection - should match name of zarr dataset\n",
+    "collection_id = 'serap_slamm'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "116b5837-8e85-4ae7-964a-803533ded714",
+   "metadata": {},
+   "source": [
+    "## Asset Metadata Input"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "dd6fa323-132a-4794-8c80-576933f547a0",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# url to zarr store that you want to create a collection for\n",
+    "zarr_url = f's3://mdmf/gdp/{collection_id}.zarr/'\n",
+    "\n",
+    "# define keyword arguments needed for opening the dataset with xarray\n",
+    "# ref: https://github.com/stac-extensions/xarray-assets\n",
+    "xarray_opendataset_kwargs = {\"xarray:open_kwargs\":{\"chunks\":{},\"engine\":\"zarr\",\"consolidated\":True},\n",
+    "                          \"xarray:storage_options\": {\"anon\": True, \"client_kwargs\": {\"endpoint_url\":\"https://usgs.osn.mghpcc.org/\"}}}\n",
+    "# description for zarr url asset attached to collection (zarr_url)\n",
+    "asset_description = \"Open Storage Network Pod S3 API access to collection zarr group\"\n",
+    "# roles to tag zarr url asset with\n",
+    "asset_roles = [\"data\",\"zarr\",\"s3\"]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e1441cd4-e94c-4902-af46-8f1af470eb6b",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# url to zarr store that you want to create a collection for\n",
+    "zarr_url2 = f's3://nhgf-development/workspace/DataConversion/{collection_id}.zarr/'\n",
+    "\n",
+    "# define keyword arguments needed for opening the dataset with xarray\n",
+    "# ref: https://github.com/stac-extensions/xarray-assets\n",
+    "xarray_opendataset_kwargs2 = {\"xarray:open_kwargs\":{\"chunks\":{},\"engine\":\"zarr\",\"consolidated\":True},\n",
+    "                          \"xarray:storage_options\":{\"requester_pays\":True}}\n",
+    "# description for zarr url asset attached to collection (zarr_url)\n",
+    "asset_description2 = \"S3 access to collection zarr group\"\n",
+    "# roles to tag zarr url asset with\n",
+    "asset_roles2 = [\"data\",\"zarr\",\"s3\"]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "b213b74f-ad17-4774-93b6-3b62be616b45",
+   "metadata": {
+    "tags": []
+   },
+   "source": [
+    "## Data Exploration"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "708f2cf5-79ab-49af-8067-de31d0d13ee6",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# open and view zarr dataset\n",
+    "fs2 = fsspec.filesystem('s3', anon=True, endpoint_url='https://usgs.osn.mghpcc.org/')\n",
+    "ds = xr.open_dataset(fs2.get_mapper(zarr_url), engine='zarr', \n",
+    "                             backend_kwargs={'consolidated':True}, chunks={})\n",
+    "ds"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "996e60ba-13e4-453a-8534-e62ce747f0fa",
+   "metadata": {},
+   "source": [
+    "## Collection Metadata Input"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "482d204d-b5b6-40e5-ac42-55b459be1097",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# description of STAC collection\n",
+    "collection_description = ds.attrs['title']\n",
+    "print(f'collection description: {collection_description}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "91129d65-a614-4fe4-86b6-105b1f121f55",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# license for dataset\n",
+    "collection_license = stac_helpers.license_picker(ds.attrs['license'])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "0bc7e9b3-ad62-4b10-a18e-66b7ed2d35dc",
+   "metadata": {},
+   "source": [
+    "## Identify x, y, t dimensions of dataset\n",
+    "May require user input if dimensions cannot be auto-detected."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "ab91268f-7200-4cb1-979a-c7d75531d2c0",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# dims_auto_extract = ['X', 'Y', 'T']\n",
+    "# dim_names_dict = {}\n",
+    "# for d in dims_auto_extract:\n",
+    "#     dim_names_dict[d] = stac_helpers.extract_dim(ds, d)\n",
+    "dim_names_dict={'X': 'x', 'Y': 'y', 'T': 'time'}\n",
+    "print(f\"Dimension dictionary: {dim_names_dict}\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "810d7480-165d-41c0-bd09-163656a14003",
+   "metadata": {},
+   "source": [
+    "## Get crs info\n",
+    "If there is no crs info that can be automatically extracted from the dataset with pyproj, you will need to manually identify the crs and create a crs object. This reference list of cartopy projections may be a helpful resource: https://scitools.org.uk/cartopy/docs/latest/reference/projections.html"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "239d3b00-77f9-4178-954b-ba81a2b34512",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "crs_var = 'albers_conical_equal_area'"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "b03d52f3-1367-4255-a561-52ee4fc9e92d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# use pyproj to automatically extract crs info\n",
+    "crs = pyproj.CRS.from_cf(ds[crs_var].attrs)\n",
+    "\n",
+    "# alternatively, create the appropriate cartopy projection\n",
+    "# crs = ccrs.LambertConformal(central_longitude=crs_info.longitude_of_central_meridian, \n",
+    "#                             central_latitude=crs_info.latitude_of_projection_origin,\n",
+    "#                             standard_parallels=crs_info.standard_parallel)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "282c689e-07f0-48ee-8e3d-35876e8c5094",
+   "metadata": {},
+   "source": [
+    "### Compare dataset crs var to generated proj4 string to make sure it looks ok"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "4cee13ba-487d-483e-a013-b65685137502",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "ds[crs_var]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "f7bc73db-7717-450e-9679-525f7be0c910",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "crs.to_proj4()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a8c3ed37-8564-400b-a7fb-25bd5e43d21c",
+   "metadata": {},
+   "source": [
+    "## Create Collection Extent"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "69f0d837-68a5-4fed-9a14-5d75cfbb0da4",
+   "metadata": {},
+   "source": [
+    "### Spatial Extent\n",
+    "##### WARNING - make sure data type is **float** NOT **numpy.float64**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d46805e0-8e94-4ebe-aa01-d9a2d7051459",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# pull out lat/lon bbox for data\n",
+    "# coordinates must be from WGS 84 datum\n",
+    "# left, bottom, right, top\n",
+    "\n",
+    "# Note: try changing around the commented out lines below to get type float rather than a numpy float\n",
+    "#spatial_bounds = [ds[dim_names_dict['X']].data.min().compute().astype(float), ds[dim_names_dict['Y']].data.min().compute().astype(float), ds[dim_names_dict['X']].data.max().compute().astype(float), ds[dim_names_dict['Y']].data.max().compute().astype(float)]\n",
+    "#spatial_bounds = [ds[dim_names_dict['X']].data.min().compute().astype(float).tolist(), ds[dim_names_dict['Y']].data.min().compute().astype(float).tolist(), ds[dim_names_dict['X']].data.max().compute().astype(float).tolist(), ds[dim_names_dict['Y']].data.max().compute().astype(float).tolist()]\n",
+    "spatial_bounds = [ds[dim_names_dict['X']].data.min().astype(float).item(), ds[dim_names_dict['Y']].data.min().astype(float).item(), ds[dim_names_dict['X']].data.max().astype(float).item(), ds[dim_names_dict['Y']].data.max().astype(float).item()]\n",
+    "print(spatial_bounds)\n",
+    "print(f'\\nspatial_bounds data type: {type(spatial_bounds[0])}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "f16fdb9e-7ed8-40fb-a4f1-9ecabdebc0a1",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "XX, YY = np.meshgrid(ds[dim_names_dict['X']].data, ds[dim_names_dict['Y']].data)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "074fc23c-f4d9-4427-80d3-fbf691e6d411",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "transformer = Transformer.from_crs(crs, \"EPSG:4326\", always_xy=True)\n",
+    "lon, lat = transformer.transform(XX.ravel(), YY.ravel())"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5345c975-9fe3-48e1-a663-0275cdf275dc",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(f'lower left coordinates (WGS84): {min(lon)}, {min(lat)}')\n",
+    "print(f'upper right coordinates (WGS84): {max(lon)}, {max(lat)}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e0a5a222-743d-403a-9411-2406374803cf",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# create a spatial extent object \n",
+    "spatial_extent = pystac.SpatialExtent(bboxes=[[min(lon).item(), min(lat).item(), max(lon).item(), max(lat).item()]])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a04c8fca-1d33-43ac-9e2b-62d7be2887f7",
+   "metadata": {},
+   "source": [
+    "### Temporal Extent"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "41a84995-867c-4152-8c57-85e3758bbb77",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# pull out first and last timestamps\n",
+    "temporal_extent_lower = pd.Timestamp(ds[dim_names_dict['T']].data.min())\n",
+    "temporal_extent_upper = pd.Timestamp(ds[dim_names_dict['T']].data.max())\n",
+    "# if you get an error:\n",
+    "# Cannot convert input [] of type <class 'cftime._cftime.DatetimeNoLeap'> to Timestamp\n",
+    "# use the following instead:\n",
+    "#temporal_extent_lower = pd.Timestamp(ds.indexes[dim_names_dict['T']].to_datetimeindex().min())\n",
+    "#temporal_extent_upper = pd.Timestamp(ds.indexes[dim_names_dict['T']].to_datetimeindex().max())\n",
+    "\n",
+    "print(f'min: {temporal_extent_lower} \\nmax: {temporal_extent_upper}')\n",
+    "# create a temporal extent object\n",
+    "temporal_extent = pystac.TemporalExtent(intervals=[[temporal_extent_lower, temporal_extent_upper]])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "1b1e37c4-5348-46ad-abc9-e005b5d6c02b",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "collection_extent = pystac.Extent(spatial=spatial_extent, temporal=temporal_extent)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "20b00e88-5a13-46b3-9787-d9ac2d4e7bd6",
+   "metadata": {},
+   "source": [
+    "## Open up STAC Catalog and create a collection"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "adf6c59d-58cd-48b1-a5fd-3bb205a3ef56",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# define folder location where your STAC catalog json file is\n",
+    "catalog_path = os.path.join('..', '..', 'catalog')\n",
+    "# open catalog\n",
+    "catalog = pystac.Catalog.from_file(os.path.join(catalog_path, 'catalog.json'))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "7e96811b-95ae-406a-9728-55fc429d4e1f",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if catalog.get_child(collection_id):\n",
+    "    collection = catalog.get_child(collection_id)\n",
+    "    print(\"existing collection opened\")\n",
+    "    collection.extent=collection_extent\n",
+    "    collection.description=collection_description\n",
+    "    collection.license=collection_license\n",
+    "else:\n",
+    "    collection = pystac.Collection(id=collection_id,\n",
+    "                                   description=collection_description,\n",
+    "                                   extent=collection_extent,\n",
+    "                                   license=collection_license)\n",
+    "    print(\"new collection created\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a21c76e8-cd57-4eb5-a33f-7c668a3b3205",
+   "metadata": {},
+   "source": [
+    "## Add zarr url asset to collection"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "094832af-d22b-4359-b0f6-cf687acce5cc",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "asset_id = \"zarr-s3-osn\"\n",
+    "asset = pystac.Asset(href=zarr_url,\n",
+    "                     description=asset_description,\n",
+    "                     media_type=\"application/vnd+zarr\",\n",
+    "                     roles=asset_roles,\n",
+    "                     extra_fields = xarray_opendataset_kwargs)\n",
+    "collection.add_asset(asset_id, asset)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "0c298d07-f234-4a08-986d-87f4a39e9ae6",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "asset_id2 = \"zarr-s3\"\n",
+    "asset2 = pystac.Asset(href=zarr_url2,\n",
+    "                     description=asset_description2,\n",
+    "                     media_type=\"application/vnd+zarr\",\n",
+    "                     roles=asset_roles2,\n",
+    "                     extra_fields = xarray_opendataset_kwargs2)\n",
+    "collection.add_asset(asset_id2, asset2)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "f67cd5c9-db33-45c2-bc21-480cd67354f4",
+   "metadata": {},
+   "source": [
+    "## Add datacube extension to collection"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "fc00946d-2880-491d-9b3b-3aeeb4414d6c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# instantiate extention on collection\n",
+    "dc = DatacubeExtension.ext(collection, add_if_missing=True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "8bdd77a2-7587-485e-afb7-42af3a822241",
+   "metadata": {},
+   "source": [
+    "### Add cube dimensions (required field for extension)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "e7dc357c-91ec-49ae-83e5-400f791f9792",
+   "metadata": {},
+   "source": [
+    "#### user review needed\n",
+    "#### compare crs information to the projjson to make sure it looks correct"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "ea452f62-5644-49b6-8a4e-7dc4f649fd1a",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# print out crs information in dataset\n",
+    "crs"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "1b1d05ff-8e43-44a7-8343-178b112c4ad6",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # the datacube extension can accept reference_system information as a numerical EPSG code, \n",
+    "# # WKT2 (ISO 19162) string or PROJJSON object.\n",
+    "# # we will use a projjson, as was done by Microsoft Planetary Computer here:\n",
+    "# # https://planetarycomputer.microsoft.com/dataset/daymet-annual-na\n",
+    "# # https://planetarycomputer.microsoft.com/api/stac/v1/collections/daymet-annual-na\n",
+    "# projjson = json.loads(lcc.to_json())\n",
+    "\n",
+    "# alternatively, I think we could do this:\n",
+    "projjson = crs.to_json()\n",
+    "print(crs.to_json(pretty=True))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "b6b88ee9-60c2-4d91-af74-c1c56b094826",
+   "metadata": {},
+   "source": [
+    "#### user review needed\n",
+    "#### look at the spatial and temporal steps, make sure they are all successfully pulled and they look correct"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "9e2bbcc5-e45a-4b8c-9d60-601f345e8134",
+   "metadata": {},
+   "source": [
+    "**Time**\n",
+    "\n",
+    "If you need to manually construct this field, here is a helpful reference: https://en.wikipedia.org/wiki/ISO_8601#Durations\n",
+    "\n",
+    "**manually constructed time step for 10 year data**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "82f1e9bd-52ee-46f5-9e95-c2359d95fcf3",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#time_step = pd.Timedelta(stac_helpers.get_step(ds, dim_names_dict['T'], time_dim=True)).isoformat()\n",
+    "# if time is yearly or monthly, you will need to manually construct it:\n",
+    "time_step = \"P10Y0M0DT0H0M0S\"\n",
+    "print(f'time step: {time_step}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "64be65b2-de20-447a-a9c2-bd8eca3e440e",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # optional debugging for time steps:\n",
+    "# # check all step sizes (step_list), get number of occurences of each (step_count), and get index locations where each step size occurs in the dataset so you can manually inspect the values, if needed\n",
+    "# # please specify the index of the step in step_list with the step_ix field - this will return the indices in the dataset where this step size occurred\n",
+    "# time_step = stac_helpers.get_step(ds, dim_names_dict['T'], time_dim=True, debug=True, step_ix=4)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "bc8dff39-2a2e-44a0-9b30-987107c2d1e2",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # debugging for time steps, cont:\n",
+    "# # please choose one of the index locations printed above\n",
+    "# # this will print the time steps adjacent to it\n",
+    "# ix = 11\n",
+    "# ds.isel(time=slice(ix-1,ix+3)).time"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "9aa6c8ff-8d9b-40a7-a281-39b502bd5a3d",
+   "metadata": {},
+   "source": [
+    "**X/lon**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "a8ba7695-ca45-4db2-bd46-c465f4e37eff",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "x_step = stac_helpers.get_step(ds, dim_names_dict['X'])\n",
+    "# a common issue that causes the spatial step not to be identified comes from rounding errors in the step calculation\n",
+    "# use the debugging cells below to identify if this is the issue, if so, use the round_dec argument to round to a higher decimal place:\n",
+    "#x_step = stac_helpers.get_step(ds, dim_names_dict['X'], round_dec=13)\n",
+    "print(f'x step: {x_step}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "fac4c9f2-a952-4c7f-aa32-862957372d6f",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # optional debugging for spatial steps:\n",
+    "# # check all step sizes (step_list), get number of occurences of each (step_count), and get index locations where each step size occurs in the dataset so you can manually inspect the values, if needed\n",
+    "# # please specify the index of the step in step_list with the step_ix field - this will return the indices in the dataset where this step size occurred\n",
+    "# x_dim=dim_names_dict['X']\n",
+    "# x_step = stac_helpers.get_step(ds, x_dim, debug=True, step_ix=0)\n",
+    "# print(f'\\nx dim name (for next cell): {x_dim}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "8d0b5a2d-dc58-4ad6-b890-859ce6bb08de",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # debugging for spatial steps, cont:\n",
+    "# # please choose one of the index locations printed above\n",
+    "# # this will print the time steps adjacent to it\n",
+    "# ix = 5\n",
+    "# ds.isel(x=slice(ix-1,ix+3)).x"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "21b5cca4-8bb4-498d-ae6b-6b8545fffe56",
+   "metadata": {},
+   "source": [
+    "**Y/lat**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "7405583b-ecb9-44b0-8815-048e42e55a42",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "y_step = stac_helpers.get_step(ds, dim_names_dict['Y'])\n",
+    "# a common issue that causes the spatial step not to be identified comes from rounding errors in the step calculation\n",
+    "# use the debugging cells below to identify if this is the issue, if so, use the round_dec argument to round to a higher decimal place:\n",
+    "#y_step = stac_helpers.get_step(ds, dim_names_dict['Y'], round_dec=13)\n",
+    "print(f'y step: {y_step}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "ece0fe37-b54c-4721-aa9b-33d2998d191b",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # optional debugging for spatial steps:\n",
+    "# # check all step sizes (step_list), get number of occurences of each (step_count), and get index locations where each step size occurs in the dataset so you can manually inspect the values, if needed\n",
+    "# # please specify the index of the step in step_list with the step_ix field - this will return the indices in the dataset where this step size occurred\n",
+    "# y_dim=dim_names_dict['Y']\n",
+    "# y_step = stac_helpers.get_step(ds, y_dim, debug=True, step_ix=0)\n",
+    "# print(f'\\nx dim name (for next cell): {x_dim}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "abdafb8f-5217-4b82-91b6-eec8183c9128",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # debugging for spatial steps, cont:\n",
+    "# # please choose one of the index locations printed above\n",
+    "# # this will print the time steps adjacent to it\n",
+    "# ix = 5\n",
+    "# ds.isel(y=slice(ix-1,ix+3)).y"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "00a5e041-081d-428d-ac2e-75d16de205e6",
+   "metadata": {},
+   "source": [
+    "#### user input needed\n",
+    "#### you will need to copy all of the dimensions printed below into the dict and fill in the appropriate attributes (type, axis, extent, etc.):\n",
+    "\n",
+    "Please see [datacube spec](https://github.com/stac-extensions/datacube?tab=readme-ov-file#dimension-object) for details on required fields.\n",
+    "\n",
+    "If you have a dimension like \"bnds\" or \"nv\" that is used on variables like time_bnds, lon_bnds, lat_bnds to choose either the lower or upper bound, you can use and [additional dimension object](https://github.com/stac-extensions/datacube?tab=readme-ov-file#additional-dimension-object). We recommend making the type \"count\" as Microsoft Planetary Computer did [here](https://github.com/stac-extensions/datacube/blob/9e74fa706c9bdd971e01739cf18dcc53bdd3dd4f/examples/daymet-hi-annual.json#L76).\n",
+    "\n",
+    "Here is an example:\n",
+    "\n",
+    "```\n",
+    "dims_dict = {\n",
+    "            'bnds': pystac.extensions.datacube.Dimension({'type': 'count', 'description': stac_helpers.get_long_name(ds, 'bnds'), 'extent': [ds.bnds.min().item(), ds.bnds.max().item()]})\n",
+    "            }\n",
+    "```"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "acd45d3c-7845-47e6-9b7d-e35627a7ca9a",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "dims = list(ds.dims)\n",
+    "print(dims)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5a443497-67a9-4dce-a8e9-b08d31a88223",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# create a dictionary of datacube dimensions you would like to assign to this dataset\n",
+    "# dimension name should come from the dims printed in above cell\n",
+    "\n",
+    "# x, y, t dimension info is pulled out automatically using the dim dict we created above\n",
+    "# all other dims listed in above cell need to be manually written in\n",
+    "\n",
+    "# we do not recommend including redundant dimensions (do not include x,y if you have lon,lat)\n",
+    "# note that the extent of each dimension should be pulled from the dataset\n",
+    "dims_dict = {dim_names_dict['T']: pystac.extensions.datacube.Dimension({'type': 'temporal', 'description': stac_helpers.get_long_name(ds, dim_names_dict['T']), 'extent': [temporal_extent_lower.strftime('%Y-%m-%dT%XZ'), temporal_extent_upper.strftime('%Y-%m-%dT%XZ')], 'step':time_step}),\n",
+    "             dim_names_dict['X']: pystac.extensions.datacube.Dimension({'type': 'spatial', 'axis': 'x', 'description': stac_helpers.get_long_name(ds, dim_names_dict['X']), 'extent': [spatial_bounds[0], spatial_bounds[2]], 'step': x_step, 'reference_system': projjson}),\n",
+    "             dim_names_dict['Y']: pystac.extensions.datacube.Dimension({'type': 'spatial', 'axis': 'y', 'description': stac_helpers.get_long_name(ds, dim_names_dict['Y']), 'extent': [spatial_bounds[1], spatial_bounds[3]], 'step': y_step, 'reference_system': projjson}),\n",
+    "            }"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "8ab85b09-eb38-404c-910c-13349d5e2234",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# make sure you added all the right dims\n",
+    "assert sorted(list(dims_dict.keys())) == sorted(dims)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "0f277883-a3fd-425f-966a-ca2140d0ef2f",
+   "metadata": {},
+   "source": [
+    "### Add cube variables (optional field for extension)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e9272931-fc0b-4f2a-9546-283033e9cde8",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# drop metpy_crs coordinate we have added\n",
+    "if 'metpy_crs' in ds.coords:\n",
+    "    ds = ds.drop_vars('metpy_crs')\n",
+    "\n",
+    "# pull list of vars from dataset\n",
+    "vars = list(ds.variables)\n",
+    "\n",
+    "# spec says that the keys of cube:dimensions and cube:variables should be unique together; a key like lat should not be both a dimension and a variable.\n",
+    "# we will drop all values in dims from vars\n",
+    "vars = [v for v in vars if v not in dims]\n",
+    "\n",
+    "# Microsoft Planetary Computer includes coordinates and crs as variables here:\n",
+    "# https://planetarycomputer.microsoft.com/dataset/daymet-annual-na\n",
+    "# https://planetarycomputer.microsoft.com/api/stac/v1/collections/daymet-annual-na\n",
+    "# we will keep those in the var list\n",
+    "\n",
+    "# create dictionary of dataset variables and associated dimensions\n",
+    "vars_dict={}\n",
+    "for v in vars:\n",
+    "    unit = stac_helpers.get_unit(ds, v)\n",
+    "    var_type = stac_helpers.get_var_type(ds, v, crs_var)\n",
+    "    long_name = stac_helpers.get_long_name(ds, v)\n",
+    "    vars_dict[v] = pystac.extensions.datacube.Variable({'dimensions':list(ds[v].dims), 'type': var_type, 'description': long_name, 'unit': unit})"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "11ad5352-884c-4472-8864-4570a96f66e5",
+   "metadata": {},
+   "source": [
+    "### Finalize extension"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "10141fd4-91d6-491d-878b-02653720891d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# add dimesions and variables to collection extension\n",
+    "dc.apply(dimensions=dims_dict, variables=vars_dict)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "615ca168-75fb-4135-9941-0ef5fe4fd1cb",
+   "metadata": {},
+   "source": [
+    "## Add STAC Collection to Catalog and Save"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e2120a55-3d04-4122-a93f-29afcdb8cb1b",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# # helper to find items of wrong type\n",
+    "# d = collection.to_dict()\n",
+    "# print(*stac_helpers.find_paths(d))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "4b75791b-6b2d-40be-b7c6-330a60888fb5",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if catalog.get_child(collection_id):\n",
+    "    collection.normalize_and_save(root_href=os.path.join(catalog_path, collection_id), catalog_type=pystac.CatalogType.SELF_CONTAINED)\n",
+    "else:\n",
+    "    catalog.add_child(collection)\n",
+    "    catalog.normalize_and_save(root_href=catalog_path, catalog_type=pystac.CatalogType.SELF_CONTAINED)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d6f676b5-e892-4bfb-8d73-2828addd838c",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "global-global-pangeo",
+   "language": "python",
+   "name": "conda-env-global-global-pangeo-py"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.11.6"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/workflows/archive/serap_urb_create_collection_from_zarr.ipynb b/workflows/archive/serap_urb_create_collection_from_zarr.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..0a2975c232f0e0ccc07c1930ed059f086b776116
--- /dev/null
+++ b/workflows/archive/serap_urb_create_collection_from_zarr.ipynb
@@ -0,0 +1,881 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "id": "6c10e07b-1e60-4926-af1d-fa75dc78e5d4",
+   "metadata": {
+    "tags": []
+   },
+   "source": [
+    "# serap_urb Zarr -> Collection Workflow\n",
+    "This is a workflow to build a [STAC collection](https://github.com/radiantearth/stac-spec/blob/master/collection-spec/collection-spec.md) from the zarr asset for the dataset named above. We use the [datacube extension](https://github.com/stac-extensions/datacube) to define the spatial and temporal dimensions of the zarr store, as well as the variables it contains.\n",
+    "\n",
+    "To simplify this workflow so that it can scale to many datasets, a few simplifying suggestions and assumptions are made:\n",
+    "1. For USGS data, we can use the CC0-1.0 license. For all other data we can use Unlicense. Ref: https://spdx.org/licenses/\n",
+    "2. I am assuming all coordinates are from the WGS84 datum if not specified."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "201e0945-de55-45ff-b095-c2af009a4e62",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import pystac\n",
+    "from pystac.extensions.datacube import CollectionDatacubeExtension, AssetDatacubeExtension, AdditionalDimension, DatacubeExtension\n",
+    "import xarray as xr\n",
+    "import cf_xarray\n",
+    "import os\n",
+    "import fsspec\n",
+    "import cf_xarray\n",
+    "import hvplot.xarray\n",
+    "import pandas as pd\n",
+    "import json\n",
+    "import numpy as np\n",
+    "import pyproj\n",
+    "from pyproj import Transformer\n",
+    "import cartopy.crs as ccrs\n",
+    "import cfunits\n",
+    "import json\n",
+    "import sys\n",
+    "sys.path.insert(1, '..')\n",
+    "import stac_helpers"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a71f9d19-8fb3-4f47-b4c4-447bb80d8dd5",
+   "metadata": {},
+   "source": [
+    "## Collection ID"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "15ee060d-3127-4024-a1ad-6aa0648667e1",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# name for STAC collection - should match name of zarr dataset\n",
+    "collection_id = 'serap_urb'"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "116b5837-8e85-4ae7-964a-803533ded714",
+   "metadata": {},
+   "source": [
+    "## Asset Metadata Input"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "dd6fa323-132a-4794-8c80-576933f547a0",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# url to zarr store that you want to create a collection for\n",
+    "zarr_url = f's3://mdmf/gdp/{collection_id}.zarr/'\n",
+    "\n",
+    "# define keyword arguments needed for opening the dataset with xarray\n",
+    "# ref: https://github.com/stac-extensions/xarray-assets\n",
+    "xarray_opendataset_kwargs = {\"xarray:open_kwargs\":{\"chunks\":{},\"engine\":\"zarr\",\"consolidated\":True},\n",
+    "                          \"xarray:storage_options\": {\"anon\": True, \"client_kwargs\": {\"endpoint_url\":\"https://usgs.osn.mghpcc.org/\"}}}\n",
+    "# description for zarr url asset attached to collection (zarr_url)\n",
+    "asset_description = \"Open Storage Network Pod S3 API access to collection zarr group\"\n",
+    "# roles to tag zarr url asset with\n",
+    "asset_roles = [\"data\",\"zarr\",\"s3\"]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e1441cd4-e94c-4902-af46-8f1af470eb6b",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# url to zarr store that you want to create a collection for\n",
+    "zarr_url2 = f's3://nhgf-development/workspace/DataConversion/{collection_id}.zarr/'\n",
+    "\n",
+    "# define keyword arguments needed for opening the dataset with xarray\n",
+    "# ref: https://github.com/stac-extensions/xarray-assets\n",
+    "xarray_opendataset_kwargs2 = {\"xarray:open_kwargs\":{\"chunks\":{},\"engine\":\"zarr\",\"consolidated\":True},\n",
+    "                          \"xarray:storage_options\":{\"requester_pays\":True}}\n",
+    "# description for zarr url asset attached to collection (zarr_url)\n",
+    "asset_description2 = \"S3 access to collection zarr group\"\n",
+    "# roles to tag zarr url asset with\n",
+    "asset_roles2 = [\"data\",\"zarr\",\"s3\"]"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "b213b74f-ad17-4774-93b6-3b62be616b45",
+   "metadata": {
+    "tags": []
+   },
+   "source": [
+    "## Data Exploration"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "708f2cf5-79ab-49af-8067-de31d0d13ee6",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# open and view zarr dataset\n",
+    "fs2 = fsspec.filesystem('s3', anon=True, endpoint_url='https://usgs.osn.mghpcc.org/')\n",
+    "ds = xr.open_dataset(fs2.get_mapper(zarr_url), engine='zarr', \n",
+    "                             backend_kwargs={'consolidated':True}, chunks={})\n",
+    "ds"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "996e60ba-13e4-453a-8534-e62ce747f0fa",
+   "metadata": {},
+   "source": [
+    "## Collection Metadata Input"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "482d204d-b5b6-40e5-ac42-55b459be1097",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# description of STAC collection\n",
+    "collection_description = ds.attrs['title']\n",
+    "print(f'collection description: {collection_description}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "91129d65-a614-4fe4-86b6-105b1f121f55",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# license for dataset\n",
+    "collection_license = stac_helpers.license_picker(ds.attrs['license'])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "0bc7e9b3-ad62-4b10-a18e-66b7ed2d35dc",
+   "metadata": {},
+   "source": [
+    "## Identify x, y, t dimensions of dataset\n",
+    "May require user input if dimensions cannot be auto-detected."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "ab91268f-7200-4cb1-979a-c7d75531d2c0",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# dims_auto_extract = ['X', 'Y', 'T']\n",
+    "# dim_names_dict = {}\n",
+    "# for d in dims_auto_extract:\n",
+    "#     dim_names_dict[d] = stac_helpers.extract_dim(ds, d)\n",
+    "dim_names_dict={'X': 'x', 'Y': 'y', 'T': 'time'}\n",
+    "print(f\"Dimension dictionary: {dim_names_dict}\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "810d7480-165d-41c0-bd09-163656a14003",
+   "metadata": {},
+   "source": [
+    "## Get crs info\n",
+    "If there is no crs info that can be automatically extracted from the dataset with pyproj, you will need to manually identify the crs and create a crs object. This reference list of cartopy projections may be a helpful resource: https://scitools.org.uk/cartopy/docs/latest/reference/projections.html"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "239d3b00-77f9-4178-954b-ba81a2b34512",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "crs_var = 'albers_conical_equal_area'"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "b03d52f3-1367-4255-a561-52ee4fc9e92d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# use pyproj to automatically extract crs info\n",
+    "crs = pyproj.CRS.from_cf(ds[crs_var].attrs)\n",
+    "\n",
+    "# alternatively, create the appropriate cartopy projection\n",
+    "# crs = ccrs.LambertConformal(central_longitude=crs_info.longitude_of_central_meridian, \n",
+    "#                             central_latitude=crs_info.latitude_of_projection_origin,\n",
+    "#                             standard_parallels=crs_info.standard_parallel)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "282c689e-07f0-48ee-8e3d-35876e8c5094",
+   "metadata": {},
+   "source": [
+    "### Compare dataset crs var to generated proj4 string to make sure it looks ok"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "4cee13ba-487d-483e-a013-b65685137502",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "ds[crs_var]"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "f7bc73db-7717-450e-9679-525f7be0c910",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "crs.to_proj4()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a8c3ed37-8564-400b-a7fb-25bd5e43d21c",
+   "metadata": {},
+   "source": [
+    "## Create Collection Extent"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "69f0d837-68a5-4fed-9a14-5d75cfbb0da4",
+   "metadata": {},
+   "source": [
+    "### Spatial Extent\n",
+    "##### WARNING - make sure data type is **float** NOT **numpy.float64**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d46805e0-8e94-4ebe-aa01-d9a2d7051459",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# pull out lat/lon bbox for data\n",
+    "# coordinates must be from WGS 84 datum\n",
+    "# left, bottom, right, top\n",
+    "\n",
+    "# Note: try changing around the commented out lines below to get type float rather than a numpy float\n",
+    "#spatial_bounds = [ds[dim_names_dict['X']].data.min().compute().astype(float), ds[dim_names_dict['Y']].data.min().compute().astype(float), ds[dim_names_dict['X']].data.max().compute().astype(float), ds[dim_names_dict['Y']].data.max().compute().astype(float)]\n",
+    "#spatial_bounds = [ds[dim_names_dict['X']].data.min().compute().astype(float).tolist(), ds[dim_names_dict['Y']].data.min().compute().astype(float).tolist(), ds[dim_names_dict['X']].data.max().compute().astype(float).tolist(), ds[dim_names_dict['Y']].data.max().compute().astype(float).tolist()]\n",
+    "spatial_bounds = [ds[dim_names_dict['X']].data.min().astype(float).item(), ds[dim_names_dict['Y']].data.min().astype(float).item(), ds[dim_names_dict['X']].data.max().astype(float).item(), ds[dim_names_dict['Y']].data.max().astype(float).item()]\n",
+    "print(spatial_bounds)\n",
+    "print(f'\\nspatial_bounds data type: {type(spatial_bounds[0])}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "f16fdb9e-7ed8-40fb-a4f1-9ecabdebc0a1",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "XX, YY = np.meshgrid(ds[dim_names_dict['X']].data, ds[dim_names_dict['Y']].data)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "074fc23c-f4d9-4427-80d3-fbf691e6d411",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "transformer = Transformer.from_crs(crs, \"EPSG:4326\", always_xy=True)\n",
+    "lon, lat = transformer.transform(XX.ravel(), YY.ravel())"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5345c975-9fe3-48e1-a663-0275cdf275dc",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(f'lower left coordinates (WGS84): {min(lon)}, {min(lat)}')\n",
+    "print(f'upper right coordinates (WGS84): {max(lon)}, {max(lat)}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e0a5a222-743d-403a-9411-2406374803cf",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# create a spatial extent object \n",
+    "spatial_extent = pystac.SpatialExtent(bboxes=[[min(lon).item(), min(lat).item(), max(lon).item(), max(lat).item()]])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a04c8fca-1d33-43ac-9e2b-62d7be2887f7",
+   "metadata": {},
+   "source": [
+    "### Temporal Extent"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "41a84995-867c-4152-8c57-85e3758bbb77",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# pull out first and last timestamps\n",
+    "temporal_extent_lower = pd.Timestamp(ds[dim_names_dict['T']].data.min())\n",
+    "temporal_extent_upper = pd.Timestamp(ds[dim_names_dict['T']].data.max())\n",
+    "# if you get an error:\n",
+    "# Cannot convert input [] of type <class 'cftime._cftime.DatetimeNoLeap'> to Timestamp\n",
+    "# use the following instead:\n",
+    "#temporal_extent_lower = pd.Timestamp(ds.indexes[dim_names_dict['T']].to_datetimeindex().min())\n",
+    "#temporal_extent_upper = pd.Timestamp(ds.indexes[dim_names_dict['T']].to_datetimeindex().max())\n",
+    "\n",
+    "print(f'min: {temporal_extent_lower} \\nmax: {temporal_extent_upper}')\n",
+    "# create a temporal extent object\n",
+    "temporal_extent = pystac.TemporalExtent(intervals=[[temporal_extent_lower, temporal_extent_upper]])"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "1b1e37c4-5348-46ad-abc9-e005b5d6c02b",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "collection_extent = pystac.Extent(spatial=spatial_extent, temporal=temporal_extent)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "20b00e88-5a13-46b3-9787-d9ac2d4e7bd6",
+   "metadata": {},
+   "source": [
+    "## Open up STAC Catalog and create a collection"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "adf6c59d-58cd-48b1-a5fd-3bb205a3ef56",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# define folder location where your STAC catalog json file is\n",
+    "catalog_path = os.path.join('..', '..', 'catalog')\n",
+    "# open catalog\n",
+    "catalog = pystac.Catalog.from_file(os.path.join(catalog_path, 'catalog.json'))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "7e96811b-95ae-406a-9728-55fc429d4e1f",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if catalog.get_child(collection_id):\n",
+    "    collection = catalog.get_child(collection_id)\n",
+    "    print(\"existing collection opened\")\n",
+    "    collection.extent=collection_extent\n",
+    "    collection.description=collection_description\n",
+    "    collection.license=collection_license\n",
+    "else:\n",
+    "    collection = pystac.Collection(id=collection_id,\n",
+    "                                   description=collection_description,\n",
+    "                                   extent=collection_extent,\n",
+    "                                   license=collection_license)\n",
+    "    print(\"new collection created\")"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a21c76e8-cd57-4eb5-a33f-7c668a3b3205",
+   "metadata": {},
+   "source": [
+    "## Add zarr url asset to collection"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "094832af-d22b-4359-b0f6-cf687acce5cc",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "asset_id = \"zarr-s3-osn\"\n",
+    "asset = pystac.Asset(href=zarr_url,\n",
+    "                     description=asset_description,\n",
+    "                     media_type=\"application/vnd+zarr\",\n",
+    "                     roles=asset_roles,\n",
+    "                     extra_fields = xarray_opendataset_kwargs)\n",
+    "collection.add_asset(asset_id, asset)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "0c298d07-f234-4a08-986d-87f4a39e9ae6",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "asset_id2 = \"zarr-s3\"\n",
+    "asset2 = pystac.Asset(href=zarr_url2,\n",
+    "                     description=asset_description2,\n",
+    "                     media_type=\"application/vnd+zarr\",\n",
+    "                     roles=asset_roles2,\n",
+    "                     extra_fields = xarray_opendataset_kwargs2)\n",
+    "collection.add_asset(asset_id2, asset2)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "f67cd5c9-db33-45c2-bc21-480cd67354f4",
+   "metadata": {},
+   "source": [
+    "## Add datacube extension to collection"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "fc00946d-2880-491d-9b3b-3aeeb4414d6c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# instantiate extention on collection\n",
+    "dc = DatacubeExtension.ext(collection, add_if_missing=True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "8bdd77a2-7587-485e-afb7-42af3a822241",
+   "metadata": {},
+   "source": [
+    "### Add cube dimensions (required field for extension)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "e7dc357c-91ec-49ae-83e5-400f791f9792",
+   "metadata": {},
+   "source": [
+    "#### user review needed\n",
+    "#### compare crs information to the projjson to make sure it looks correct"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "ea452f62-5644-49b6-8a4e-7dc4f649fd1a",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# print out crs information in dataset\n",
+    "crs"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "1b1d05ff-8e43-44a7-8343-178b112c4ad6",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # the datacube extension can accept reference_system information as a numerical EPSG code, \n",
+    "# # WKT2 (ISO 19162) string or PROJJSON object.\n",
+    "# # we will use a projjson, as was done by Microsoft Planetary Computer here:\n",
+    "# # https://planetarycomputer.microsoft.com/dataset/daymet-annual-na\n",
+    "# # https://planetarycomputer.microsoft.com/api/stac/v1/collections/daymet-annual-na\n",
+    "# projjson = json.loads(lcc.to_json())\n",
+    "\n",
+    "# alternatively, I think we could do this:\n",
+    "projjson = crs.to_json()\n",
+    "print(crs.to_json(pretty=True))"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "b6b88ee9-60c2-4d91-af74-c1c56b094826",
+   "metadata": {},
+   "source": [
+    "#### user review needed\n",
+    "#### look at the spatial and temporal steps, make sure they are all successfully pulled and they look correct"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "9e2bbcc5-e45a-4b8c-9d60-601f345e8134",
+   "metadata": {},
+   "source": [
+    "**Time**\n",
+    "\n",
+    "If you need to manually construct this field, here is a helpful reference: https://en.wikipedia.org/wiki/ISO_8601#Durations\n",
+    "\n",
+    "**manually constructed time step for 10 year data**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "82f1e9bd-52ee-46f5-9e95-c2359d95fcf3",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#time_step = pd.Timedelta(stac_helpers.get_step(ds, dim_names_dict['T'], time_dim=True)).isoformat()\n",
+    "# if time is yearly or monthly, you will need to manually construct it:\n",
+    "time_step = \"P10Y0M0DT0H0M0S\"\n",
+    "print(f'time step: {time_step}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "64be65b2-de20-447a-a9c2-bd8eca3e440e",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # optional debugging for time steps:\n",
+    "# # check all step sizes (step_list), get number of occurences of each (step_count), and get index locations where each step size occurs in the dataset so you can manually inspect the values, if needed\n",
+    "# # please specify the index of the step in step_list with the step_ix field - this will return the indices in the dataset where this step size occurred\n",
+    "# time_step = stac_helpers.get_step(ds, dim_names_dict['T'], time_dim=True, debug=True, step_ix=4)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "bc8dff39-2a2e-44a0-9b30-987107c2d1e2",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # debugging for time steps, cont:\n",
+    "# # please choose one of the index locations printed above\n",
+    "# # this will print the time steps adjacent to it\n",
+    "# ix = 11\n",
+    "# ds.isel(time=slice(ix-1,ix+3)).time"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "9aa6c8ff-8d9b-40a7-a281-39b502bd5a3d",
+   "metadata": {},
+   "source": [
+    "**X/lon**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "a8ba7695-ca45-4db2-bd46-c465f4e37eff",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "x_step = stac_helpers.get_step(ds, dim_names_dict['X'])\n",
+    "# a common issue that causes the spatial step not to be identified comes from rounding errors in the step calculation\n",
+    "# use the debugging cells below to identify if this is the issue, if so, use the round_dec argument to round to a higher decimal place:\n",
+    "#x_step = stac_helpers.get_step(ds, dim_names_dict['X'], round_dec=13)\n",
+    "print(f'x step: {x_step}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "fac4c9f2-a952-4c7f-aa32-862957372d6f",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # optional debugging for spatial steps:\n",
+    "# # check all step sizes (step_list), get number of occurences of each (step_count), and get index locations where each step size occurs in the dataset so you can manually inspect the values, if needed\n",
+    "# # please specify the index of the step in step_list with the step_ix field - this will return the indices in the dataset where this step size occurred\n",
+    "# x_dim=dim_names_dict['X']\n",
+    "# x_step = stac_helpers.get_step(ds, x_dim, debug=True, step_ix=0)\n",
+    "# print(f'\\nx dim name (for next cell): {x_dim}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "8d0b5a2d-dc58-4ad6-b890-859ce6bb08de",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # debugging for spatial steps, cont:\n",
+    "# # please choose one of the index locations printed above\n",
+    "# # this will print the time steps adjacent to it\n",
+    "# ix = 5\n",
+    "# ds.isel(x=slice(ix-1,ix+3)).x"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "21b5cca4-8bb4-498d-ae6b-6b8545fffe56",
+   "metadata": {},
+   "source": [
+    "**Y/lat**"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "7405583b-ecb9-44b0-8815-048e42e55a42",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "y_step = stac_helpers.get_step(ds, dim_names_dict['Y'])\n",
+    "# a common issue that causes the spatial step not to be identified comes from rounding errors in the step calculation\n",
+    "# use the debugging cells below to identify if this is the issue, if so, use the round_dec argument to round to a higher decimal place:\n",
+    "#y_step = stac_helpers.get_step(ds, dim_names_dict['Y'], round_dec=13)\n",
+    "print(f'y step: {y_step}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "ece0fe37-b54c-4721-aa9b-33d2998d191b",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # optional debugging for spatial steps:\n",
+    "# # check all step sizes (step_list), get number of occurences of each (step_count), and get index locations where each step size occurs in the dataset so you can manually inspect the values, if needed\n",
+    "# # please specify the index of the step in step_list with the step_ix field - this will return the indices in the dataset where this step size occurred\n",
+    "# y_dim=dim_names_dict['Y']\n",
+    "# y_step = stac_helpers.get_step(ds, y_dim, debug=True, step_ix=0)\n",
+    "# print(f'\\nx dim name (for next cell): {x_dim}')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "abdafb8f-5217-4b82-91b6-eec8183c9128",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# # debugging for spatial steps, cont:\n",
+    "# # please choose one of the index locations printed above\n",
+    "# # this will print the time steps adjacent to it\n",
+    "# ix = 5\n",
+    "# ds.isel(y=slice(ix-1,ix+3)).y"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "00a5e041-081d-428d-ac2e-75d16de205e6",
+   "metadata": {},
+   "source": [
+    "#### user input needed\n",
+    "#### you will need to copy all of the dimensions printed below into the dict and fill in the appropriate attributes (type, axis, extent, etc.):\n",
+    "\n",
+    "Please see [datacube spec](https://github.com/stac-extensions/datacube?tab=readme-ov-file#dimension-object) for details on required fields.\n",
+    "\n",
+    "If you have a dimension like \"bnds\" or \"nv\" that is used on variables like time_bnds, lon_bnds, lat_bnds to choose either the lower or upper bound, you can use and [additional dimension object](https://github.com/stac-extensions/datacube?tab=readme-ov-file#additional-dimension-object). We recommend making the type \"count\" as Microsoft Planetary Computer did [here](https://github.com/stac-extensions/datacube/blob/9e74fa706c9bdd971e01739cf18dcc53bdd3dd4f/examples/daymet-hi-annual.json#L76).\n",
+    "\n",
+    "Here is an example:\n",
+    "\n",
+    "```\n",
+    "dims_dict = {\n",
+    "            'bnds': pystac.extensions.datacube.Dimension({'type': 'count', 'description': stac_helpers.get_long_name(ds, 'bnds'), 'extent': [ds.bnds.min().item(), ds.bnds.max().item()]})\n",
+    "            }\n",
+    "```"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "acd45d3c-7845-47e6-9b7d-e35627a7ca9a",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "dims = list(ds.dims)\n",
+    "print(dims)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5a443497-67a9-4dce-a8e9-b08d31a88223",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# create a dictionary of datacube dimensions you would like to assign to this dataset\n",
+    "# dimension name should come from the dims printed in above cell\n",
+    "\n",
+    "# x, y, t dimension info is pulled out automatically using the dim dict we created above\n",
+    "# all other dims listed in above cell need to be manually written in\n",
+    "\n",
+    "# we do not recommend including redundant dimensions (do not include x,y if you have lon,lat)\n",
+    "# note that the extent of each dimension should be pulled from the dataset\n",
+    "dims_dict = {dim_names_dict['T']: pystac.extensions.datacube.Dimension({'type': 'temporal', 'description': stac_helpers.get_long_name(ds, dim_names_dict['T']), 'extent': [temporal_extent_lower.strftime('%Y-%m-%dT%XZ'), temporal_extent_upper.strftime('%Y-%m-%dT%XZ')], 'step':time_step}),\n",
+    "             dim_names_dict['X']: pystac.extensions.datacube.Dimension({'type': 'spatial', 'axis': 'x', 'description': stac_helpers.get_long_name(ds, dim_names_dict['X']), 'extent': [spatial_bounds[0], spatial_bounds[2]], 'step': x_step, 'reference_system': projjson}),\n",
+    "             dim_names_dict['Y']: pystac.extensions.datacube.Dimension({'type': 'spatial', 'axis': 'y', 'description': stac_helpers.get_long_name(ds, dim_names_dict['Y']), 'extent': [spatial_bounds[1], spatial_bounds[3]], 'step': y_step, 'reference_system': projjson}),\n",
+    "            }"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "8ab85b09-eb38-404c-910c-13349d5e2234",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# make sure you added all the right dims\n",
+    "assert sorted(list(dims_dict.keys())) == sorted(dims)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "0f277883-a3fd-425f-966a-ca2140d0ef2f",
+   "metadata": {},
+   "source": [
+    "### Add cube variables (optional field for extension)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e9272931-fc0b-4f2a-9546-283033e9cde8",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# drop metpy_crs coordinate we have added\n",
+    "if 'metpy_crs' in ds.coords:\n",
+    "    ds = ds.drop_vars('metpy_crs')\n",
+    "\n",
+    "# pull list of vars from dataset\n",
+    "vars = list(ds.variables)\n",
+    "\n",
+    "# spec says that the keys of cube:dimensions and cube:variables should be unique together; a key like lat should not be both a dimension and a variable.\n",
+    "# we will drop all values in dims from vars\n",
+    "vars = [v for v in vars if v not in dims]\n",
+    "\n",
+    "# Microsoft Planetary Computer includes coordinates and crs as variables here:\n",
+    "# https://planetarycomputer.microsoft.com/dataset/daymet-annual-na\n",
+    "# https://planetarycomputer.microsoft.com/api/stac/v1/collections/daymet-annual-na\n",
+    "# we will keep those in the var list\n",
+    "\n",
+    "# create dictionary of dataset variables and associated dimensions\n",
+    "vars_dict={}\n",
+    "for v in vars:\n",
+    "    unit = stac_helpers.get_unit(ds, v)\n",
+    "    var_type = stac_helpers.get_var_type(ds, v, crs_var)\n",
+    "    long_name = stac_helpers.get_long_name(ds, v)\n",
+    "    vars_dict[v] = pystac.extensions.datacube.Variable({'dimensions':list(ds[v].dims), 'type': var_type, 'description': long_name, 'unit': unit})"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "11ad5352-884c-4472-8864-4570a96f66e5",
+   "metadata": {},
+   "source": [
+    "### Finalize extension"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "10141fd4-91d6-491d-878b-02653720891d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# add dimesions and variables to collection extension\n",
+    "dc.apply(dimensions=dims_dict, variables=vars_dict)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "615ca168-75fb-4135-9941-0ef5fe4fd1cb",
+   "metadata": {},
+   "source": [
+    "## Add STAC Collection to Catalog and Save"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e2120a55-3d04-4122-a93f-29afcdb8cb1b",
+   "metadata": {
+    "tags": []
+   },
+   "outputs": [],
+   "source": [
+    "# # helper to find items of wrong type\n",
+    "# d = collection.to_dict()\n",
+    "# print(*stac_helpers.find_paths(d))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "4b75791b-6b2d-40be-b7c6-330a60888fb5",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "if catalog.get_child(collection_id):\n",
+    "    collection.normalize_and_save(root_href=os.path.join(catalog_path, collection_id), catalog_type=pystac.CatalogType.SELF_CONTAINED)\n",
+    "else:\n",
+    "    catalog.add_child(collection)\n",
+    "    catalog.normalize_and_save(root_href=catalog_path, catalog_type=pystac.CatalogType.SELF_CONTAINED)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d6f676b5-e892-4bfb-8d73-2828addd838c",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "global-global-pangeo",
+   "language": "python",
+   "name": "conda-env-global-global-pangeo-py"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.11.6"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}