diff --git a/catalog/TTU_2019_rcp45_station_data/collection.json b/catalog/TTU_2019_rcp45_station_data/collection.json index 0a546332f16c5af6ea66b4ce70554f07250987ef..66e6647f221c194452f0f4e608c74fb041b8bd43 100644 --- a/catalog/TTU_2019_rcp45_station_data/collection.json +++ b/catalog/TTU_2019_rcp45_station_data/collection.json @@ -35,6 +35,16 @@ 0, 1 ] + }, + "stations": { + "type": "geometry", + "description": null, + "bbox": [ + 220.3289031982422, + 24.55500030517578, + 293.0080871582031, + 59.51190185546875 + ] } }, "cube:variables": { diff --git a/catalog/TTU_2019_rcp85_station_data/collection.json b/catalog/TTU_2019_rcp85_station_data/collection.json index 223dcc4725b8f4fa9cd010d01e9fb09b6b9311dc..eb6782d626a5c831ff2b32c1bd52a84154d06839 100644 --- a/catalog/TTU_2019_rcp85_station_data/collection.json +++ b/catalog/TTU_2019_rcp85_station_data/collection.json @@ -35,6 +35,16 @@ 0, 1 ] + }, + "stations": { + "type": "geometry", + "description": null, + "bbox": [ + 220.3289031982422, + 24.55500030517578, + 293.0080871582031, + 59.51190185546875 + ] } }, "cube:variables": { diff --git a/workflows/archive/TTU_2019_rcp45_station_data_create_collection_from_zarr.ipynb b/workflows/archive/TTU_2019_rcp45_station_data_create_collection_from_zarr.ipynb index de27ed276aa8df5b98181acfc06249f0c20aee2e..f6c4fd1d85e1b0e3d1609de7f64acf37fa2f5817 100644 --- a/workflows/archive/TTU_2019_rcp45_station_data_create_collection_from_zarr.ipynb +++ b/workflows/archive/TTU_2019_rcp45_station_data_create_collection_from_zarr.ipynb @@ -166,7 +166,9 @@ "metadata": {}, "source": [ "## Identify x, y, t dimensions of dataset\n", - "May require user input if dimensions cannot be auto-detected." + "May require user input if dimensions cannot be auto-detected.\n", + "\n", + "**WARNING: no x, y dims in this dataset, but we will still include lat/lon as dims in our dict so we can use the same code snippets as usual to get a bbox for the dataset**" ] }, { @@ -710,10 +712,21 @@ "# note that the extent of each dimension should be pulled from the dataset\n", "dims_dict = {dim_names_dict['T']: pystac.extensions.datacube.Dimension({'type': 'temporal', 'description': stac_helpers.get_long_name(ds, dim_names_dict['T']), 'extent': [temporal_extent_lower.strftime('%Y-%m-%dT%XZ'), temporal_extent_upper.strftime('%Y-%m-%dT%XZ')], 'step':time_step}),\n", " 'nv': pystac.extensions.datacube.Dimension({'type': 'count', 'description': stac_helpers.get_long_name(ds, 'nv'), 'extent': [ds.nv.min().item(), ds.nv.max().item()]}),\n", - "# 'stations': pystac.extensions.datacube.Dimension({'type': 'spatial', 'description': stac_helpers.get_long_name(ds, 'bottom_top')}),\n", + " 'stations': pystac.extensions.datacube.Dimension({'type': 'geometry', 'description': stac_helpers.get_long_name(ds, 'stations'), 'bbox': spatial_bounds}),\n", " }" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "15db7bae-e387-4377-852b-5f864c67ee69", + "metadata": {}, + "outputs": [], + "source": [ + "# make sure you added all the right dims\n", + "assert list(dims_dict.keys()) == dims" + ] + }, { "cell_type": "markdown", "id": "0f277883-a3fd-425f-966a-ca2140d0ef2f", diff --git a/workflows/archive/TTU_2019_rcp85_station_data_create_collection_from_zarr.ipynb b/workflows/archive/TTU_2019_rcp85_station_data_create_collection_from_zarr.ipynb index 1e12d0947b9e20bba3e51029e30cf66bf02e9cf4..ac1d9963f3326d928d3ddb7b51a6ed87175e3dcb 100644 --- a/workflows/archive/TTU_2019_rcp85_station_data_create_collection_from_zarr.ipynb +++ b/workflows/archive/TTU_2019_rcp85_station_data_create_collection_from_zarr.ipynb @@ -166,7 +166,9 @@ "metadata": {}, "source": [ "## Identify x, y, t dimensions of dataset\n", - "May require user input if dimensions cannot be auto-detected." + "May require user input if dimensions cannot be auto-detected.\n", + "\n", + "**WARNING: no x, y dims in this dataset, but we will still include lat/lon as dims in our dict so we can use the same code snippets as usual to get a bbox for the dataset**" ] }, { @@ -710,10 +712,30 @@ "# note that the extent of each dimension should be pulled from the dataset\n", "dims_dict = {dim_names_dict['T']: pystac.extensions.datacube.Dimension({'type': 'temporal', 'description': stac_helpers.get_long_name(ds, dim_names_dict['T']), 'extent': [temporal_extent_lower.strftime('%Y-%m-%dT%XZ'), temporal_extent_upper.strftime('%Y-%m-%dT%XZ')], 'step':time_step}),\n", " 'nv': pystac.extensions.datacube.Dimension({'type': 'count', 'description': stac_helpers.get_long_name(ds, 'nv'), 'extent': [ds.nv.min().item(), ds.nv.max().item()]}),\n", - "# 'stations': pystac.extensions.datacube.Dimension({'type': 'count', 'description': stac_helpers.get_long_name(ds, 'stations')}),\n", + " 'stations': pystac.extensions.datacube.Dimension({'type': 'geometry', 'description': stac_helpers.get_long_name(ds, 'stations'), 'bbox': spatial_bounds}),\n", " }" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "910cf68e-5d1d-43f9-baad-b8b5709dcbd7", + "metadata": {}, + "outputs": [], + "source": [ + "dims_dict.keys()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "74cac855-7570-42d6-a8a6-5d233bc06ad7", + "metadata": {}, + "outputs": [], + "source": [ + "list(dims_dict.keys()) == dims" + ] + }, { "cell_type": "code", "execution_count": null, @@ -722,7 +744,7 @@ "outputs": [], "source": [ "# make sure you added all the right dims\n", - "assert dims_dict.keys() == dims" + "assert list(dims_dict.keys()) == dims" ] }, {