diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 315d56e785ecf7207a104bc12e730c9e4c40b4f2..5347ec02ca800a6cdbe6ca424cd5fd93da2b8f70 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -171,6 +171,7 @@ Markdown Lint: - .java script: - ./gradlew markdownlint + allow_failure: true Unit Tests: artifacts: diff --git a/docs/README.md b/docs/README.md index 180c0d63e9eae785e6b5a6b666448666d89c8da3..8b2bfaf034f9a1c144480f7b7e71414dfc140c29 100644 --- a/docs/README.md +++ b/docs/README.md @@ -6,7 +6,7 @@ National Seismic Hazard Model Project ([NSHMP](https://earthquake.usgs.gov/hazar USGS's earthquake hazards program ([EHP](http://earthquake.usgs.gov)). *nshmp-haz* supports high performance seismic hazard calculations required to generate detailed -maps over large areas and supports a variety of USGS web services and applications related to +maps over large areas and supports a variety of web services and applications related to seismic hazards research and the dissemination of hazard data (see the [NSHM Hazard Tool](https://earthquake.usgs.gov/nshmp/)). This documentation explains how to use *nshmp-haz* as well as underlying model implementation details. @@ -18,8 +18,7 @@ use *nshmp-haz* as well as underlying model implementation details. * [Developer Basics](./pages/Developer-Basics.md) * [Calculation Configuration](./pages/Calculation-Configuration.md) * [Site Specification](./pages/Site-Specification.md) - * [Examples](../../etc/examples) (or - [on GitLab](https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples)) + * [Examples](../../etc/examples) * [Hazard Model](./pages/Hazard-Model.md) * [Model Structure](./pages/Model-Structure.md) * [Model Files](./pages/Model-Files.md) @@ -27,6 +26,7 @@ use *nshmp-haz* as well as underlying model implementation details. * [Magnitude Frequency Distributions (MFDs)](./pages/Magnitude-Frequency-Distributions.md) * [Rupture Scaling Relations](./pages/Rupture-Scaling-Relations.md) * [Ground Motion Models (GMMs)](./pages/Ground-Motion-Models.md) +* [Implementation Details](./pages/Implementation-Details.md) * [USGS Models](./pages/USGS-Models.md) * [Model Editions](./pages/Model-Editions.md) * [Logic Trees & Uncertainty](./pages/Logic-Trees-&-Uncertainty.md) diff --git a/docs/pages/Building-&-Running.md b/docs/pages/Building-&-Running.md index 0dfe9f0baedae37796f77fdf4551b6afd3ed49d0..fd91043957d9e256364065d5466269ae8bb5b717 100644 --- a/docs/pages/Building-&-Running.md +++ b/docs/pages/Building-&-Running.md @@ -59,16 +59,17 @@ See the [examples](../../etc/examples) directory for more details (or ### Computing Disaggregations Like `HazardCalc`, the `DisaggCalc` program performs disaggregations at one or more sites for a -variety of intensity measures, but requires an additional `returnPeriod` argument, in years. For -example: +variety of intensity measures. The return period for the disaggregation is defined in the config, +see [`disagg.returnPeriod`](./Calculation-Configuration.md#calculation-configuration-parameters). +Example: ```bash -java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.DisaggCalc model sites returnPeriod [config] +java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.DisaggCalc model sites [config] ``` Disaggregations build on and output `HazardCalc` results along with other disaggregation specific files. Disaggregations also have some independent -[configuration](./Calculation-Configuration.md#config-disagg) options. +[configuration](./Calculation-Configuration.md#calculation-configuration-parameters) options. ## Run with [Docker](https://docs.docker.com/install/) diff --git a/docs/pages/Calculation-Configuration.md b/docs/pages/Calculation-Configuration.md index c7a20bde54a9f7407626f35161e8b90837efbd78..8ed8b9276c78b7c8b2341a2f1df058301e359d27 100644 --- a/docs/pages/Calculation-Configuration.md +++ b/docs/pages/Calculation-Configuration.md @@ -14,41 +14,40 @@ may be overridden. See [building and running](./Building-&-Running.md) and the Parameter | Type | Default | Notes | --------- | ---- | ------- | ----- | __`hazard`__ - `.exceedanceModel` |`String` | `TRUNCATION_3SIGMA_UPPER` | [`ExceedanceModel`][url-exceedance] - `.truncationLevel` |`Double` | `3.0` | [1](#notes) + `.exceedanceModel` |`String` | `TRUNCATION_3SIGMA_UPPER` | [`ExceedanceModel`][url-exceedance] + `.truncationLevel` |`Double` | `3.0` | [1](#notes) `.imts` |`String[]` | `[ PGV, PGA, SA0P01, SA0P02, SA0P03, SA0P05, SA0P075, SA0P1, SA0P15, SA0P2, SA0P25, SA0P3, SA0P4, SA0P5, SA0P75, SA1P0, SA1P5, SA2P0, SA3P0, SA4P0, SA5P0, SA7P5, SA10P0 ]` | [`Imt`][url-imt] + `.tectonicSettings` |`String[]` | `[]` | Tectonic setting filter + `.sourceTypes` |`String[]` | `[]` | Source type filter + `.vs30s` |`Double[]` | `[]` | Vs30s to use for batch jobs `.customImls` |`Map<String, Double[]>` | `{}` (empty object) | [2](#notes) - `.gmmUncertainty` |`Boolean` | `false` | [3](#notes) - `.valueFormat` |`String` | `ANNUAL_RATE` | [`ValueFormat`][url-valueformat] + `.gmmDampingRatio` |`Double` | `0.05` (5%) | [3](#notes) + `.gmmSigmaScale` |`Double` | `1.0` (100%, no scaling) | + `.valueFormat` |`String` | `ANNUAL_RATE` | [`ValueFormat`][url-valueformat] __`disagg`__ - `.bins` |`Object` | | [4](#notes) - `.contributorLimit` |`Double` | `0.1` | [5](#notes) + `.retrunPeriod` |`Double` | `2475` | + `.bins` |`Object` | | [4](#notes) + `.contributorLimit` |`Double` | `0.1` | [5](#notes) __`rate`__ - `.bins` |`Object` | | [6](#notes) + `.bins` |`Object` | | [6](#notes) `.distance` |`Double` | `20` km - `.distributionFormat` |`String` | `INCREMENTAL` | [`DistributionFormat`][url-distribution] + `.distributionFormat` |`String` | `INCREMENTAL` | [`DistributionFormat`][url-distribution] `.timespan` |`Double` | `30` years - `.valueFormat` |`String` | `ANNUAL_RATE` | [`ValueFormat`][url-valueformat] -__`site`__ - `.vs30` |`Double` | `760.0` | [`Site`][url-site] - `.vsInferred` |`Boolean` | `true` - `.z1p0` |`Double` | `null` | [7](#notes) - `.z2p5` |`Double` | `null` | [7](#notes) + `.valueFormat` |`String` | `ANNUAL_RATE` | [`ValueFormat`][url-valueformat] __`output`__ | `.directory` |`String` | `hazout` - `.dataTypes` |`String[]` | `[ TOTAL, MAP ]` | [`DataType`][url-datatype] - `.returnPeriods` |`Integer[]`| `[ 475, 975, 2475 ]` | [`ReturnPeriods`][url-returnperiods] + `.dataTypes` |`String[]` | `[ TOTAL, MAP ]` | [`DataType`][url-datatype] + `.returnPeriods` |`Double[]` | `[ 475, 975, 2475, 10000]` | [`ReturnPeriods`][url-returnperiods] __`performance`__ - `.optimizeGrids` |`Boolean` | `true` | [8](#notes) - `.smoothGrids` |`Boolean` | `true` | [9](#notes) - `.systemPartition` |`Integer` | `1000` | [10](#notes) - `.threadCount` |`String` | `ALL` | [`ThreadCount`][url-sheets] + `.optimizeGrids` |`Boolean` | `true` | [7](#notes) + `.smoothGrids` |`Boolean` | `true` | [8](#notes) + `.systemPartition` |`Integer` | `1000` | [9](#notes) + `.threadCount` |`String` | `ALL` | [`ThreadCount`][url-sheets] [url-exceedance]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/ExceedanceModel.html [url-imt]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/Imt.html [url-valueformat]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/ValueFormat.html [url-distribution]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/DistributionFormat.html -[url-site]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/Site.html [url-datatype]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/DataType.html [url-returnperiods]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/CalcConfig.Output.html#returnPeriods [url-sheets]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/ThreadCount.html @@ -57,12 +56,11 @@ __`performance`__ 1. `hazard.truncationLevel`: This value is only used if the `hazard.exceedanceModel` requires a limit (e.g. `TRUNCATION_UPPER_ONLY`) -2. `hazard.gmmUncertainty`: If values for additional epistemic uncertainty on ground motion have - been defined, this value en/disables this feature. -3. `hazard.customImls`: Hazard is computed at default intensity measure levels (IMLs) for every +2. `hazard.customImls`: Hazard is computed at default intensity measure levels (IMLs) for every supported intenisty measure type (IMT), but a user can specify different IMLs as needed (see [example 2](../../etc/examples/2-custom-config/README.md) and the table of default IMLs, below). +3. `hazard.gmmDampingRatio` currently has no effect. 4. `disagg.bins`: This field maps to a data container that specifies the following default ranges and intervals for distance, magnitude, and epsilon binning: `"bins": { "rMin": 0.0, "rMax": 1000.0, "Δr": 20.0, "mMin": 4.4, "mMax": 9.4, "Δm": 0.2, "εMin": -3.0, "εMax": 3.0, "Δε": 0.5 }`. @@ -72,14 +70,11 @@ __`performance`__ 6. `rate.bins`: This field maps to a data container that specifies the following default magnitude binning range and interval: `"bins": { "mMin": 4.2, "mMax": 9.4, "Δm": 0.1 }`. The `bins` object must be fully specified; partial overrides do not apply to nested JSON objects. -7. `site.z1p0` and `site.z2p5`: Basin terms may be specified as `null` or `NaN` (both unquoted). - `null` is preferred as `NaN` does not conform to the JSON spec. When trying to override default - values, however, a `null` term will be ignored whereas `NaN` will override any existing value. -8. `performance.optimizeGrids`: Gridded seismicity source optimizations are currently implemented +7. `performance.optimizeGrids`: Gridded seismicity source optimizations are currently implemented for any non-fixed strike grid source. For any site, rates across all azimuths are aggregated in tables of distance and magnitude. -9. `performance.smoothGrids`: Resample gridded seismicity sources close to a site. -10. `performance.systemPartition`: The number of ruptures in a fault-system source to process +8. `performance.smoothGrids`: Resample gridded seismicity sources close to a site. +9. `performance.systemPartition`: The number of ruptures in a fault-system source to process concurrently. ## Default Intensity Measure Levels (IMLs) diff --git a/docs/pages/Implementation-Details.md b/docs/pages/Implementation-Details.md new file mode 100644 index 0000000000000000000000000000000000000000..4850c63c6a4ad68057079ef868a4e1b5685fc073 --- /dev/null +++ b/docs/pages/Implementation-Details.md @@ -0,0 +1,24 @@ +# Implementation Details + +## Logic Trees + +The logic trees of epistemic uncertainty described in previous section are represented internally +using graphs of indexed nodes that connect a `root` node with one or more `leaf` nodes: + + + +The different logic trees for sources, MFDs and GMMs have similar representations: + + + +And each unique branch combination across all trees is considered in the hazard integral: + + + +--- + +[**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Site-Specification.md b/docs/pages/Site-Specification.md index 48fddc3be34b76d95173d3f7ba82af7d92ae6bfa..796f52840d83657b61c127979a1108727fa56bcd 100644 --- a/docs/pages/Site-Specification.md +++ b/docs/pages/Site-Specification.md @@ -5,21 +5,12 @@ ways. Examples of the file formats described below are available in the resource [`etc/nshm`](../../etc/nshm/README.md). __Note on Coordinates:__ *nshmp-haz* supports longitude and latitude values in the closed -ranges `[-360° ‥ 360°]` and `[-90° ‥ 90°]`. Note, however, that mixing site and/or source +ranges `[-360° ‥ 360°]` and `[-90° ‥ 90°]`. However, mixing site and/or source coordinates across the antimeridian (the -180° to 180° transition) will yield unexpected results. For Pacific models and calculations, always use positive or negative longitudes exclusively. -## Site String - -For the case of running a single site of interest, most *nshmp-haz* programs accept a -comma-delimited string of the form: `name,lon,lat[,vs30,vsInf[,z1p0,z2p5]]`, where `vs30`, `vsInf`, -`z1p0`, and `z2p5` are optional. Note that if `vs30` is supplied, so too must `vsInf`. Likewise if -`z1p0` is supplied, so too must `z2p5`. If the string contains any spaces, escape them or wrap the -entire string in double quotes. - -For any site parameter values that are not supplied on the command line or in the file formats -below, the following defaults are used (see the `site` member of the -[configuration parameter](./Calculation-Configuration.md#calculation-configuration-parameters) table): +For any site parameter values that are not supplied in the file formats below, the following +defaults are used: ```text name: Unnamed @@ -30,7 +21,7 @@ below, the following defaults are used (see the `site` member of the ``` For basin depth parameters `z1p0` and `z2p5`, a `null` value indicates that a GMM should use -it's 'default' basin depth scale factor. +it's 'default' basin depth scale factor, which is usually included in it's Vs30 site term. ## Comma-Delimited Format (\*.csv) diff --git a/docs/pages/images/tree-branches-combined.png b/docs/pages/images/tree-branches-combined.png new file mode 100644 index 0000000000000000000000000000000000000000..75b750380acf4dc665cf945391bd1514346df58a Binary files /dev/null and b/docs/pages/images/tree-branches-combined.png differ diff --git a/docs/pages/images/tree-example.png b/docs/pages/images/tree-example.png new file mode 100644 index 0000000000000000000000000000000000000000..cce92c54113aacd02ed53aa0cb9702e29b36db2e Binary files /dev/null and b/docs/pages/images/tree-example.png differ diff --git a/docs/pages/images/tree-types.png b/docs/pages/images/tree-types.png new file mode 100644 index 0000000000000000000000000000000000000000..de1cf1fcee0a99656abc38b099ac220dfa42a992 Binary files /dev/null and b/docs/pages/images/tree-types.png differ diff --git a/etc/examples/1-hazard-curve/README.md b/etc/examples/1-hazard-curve/README.md index 6891e5968155a05c4e77ae30a9f7286efcf21571..f136d550c47e40da878ecb9a7535110aab6289b0 100644 --- a/etc/examples/1-hazard-curve/README.md +++ b/etc/examples/1-hazard-curve/README.md @@ -5,7 +5,7 @@ __Working directory:__ `/path/to/nshmp-haz/etc/examples/1-hazard-curve` On the command line, navigate to the directory above and execute the following: ```Shell -hazard ../../peer/models/Set1-Case1 "Test Site, -122.0, 38.0" +hazard ../../peer/models/Set1-Case1 site.csv ``` The PEER models, such as that designated above, consist of simple cases for different source @@ -15,28 +15,31 @@ testing. See the [PEER directory](../../peer/) for more information. The result of this calculation should be available as a single comma-delimited file containing several total mean hazard curves for PGA in a newly created `hazout` directory. In this example, the calculation configuration was derived from the model directory and the site is defined in -file `site.csv`. See the [site specification](https://github.com/usgs/nshmp-haz/wiki/sites) +file `site.csv`. See the [site specification](../../../docs/pages/Site-Specification.md) page for more details. -Note that not all [calculation configuration](https://github.com/usgs/nshmp-haz/wiki/Configuration) +Note that not all [calculation configuration](../../../docs/pages/Calculation-Configuration.md) parameters need be supplied; see the [configuration file](../../peer/models/Set1-Case1/config.json) for this example model. Also note that all output is written to a `hazout` directory by default, but the output destination can be specified via the -[`output.directory`](https://github.com/usgs/nshmp-haz/wiki/configuration#config-output) parameter. -In addition to hazard curves, the calculation configuration and a log of the calculation -are also saved. +[`output.directory`](../../../docs/pages/Calculation-Configuration.md#calculation-configuration-parameters) +parameter. In addition to hazard curves, the calculation configuration and a log of the calculation +are also saved. The primary outputs are hazard curves, hazard curves truncated below about 10â»â´, +and ground motion values derived from the curves for specific return periods. __Results directory structure:__ ```text 1-hazard-curve/ └─ hazout/ - ├─ config.json + ├─ calc-config.json ├─ HazardCalc.log └─ PGA/ - └─ curves.csv + ├─ curves.csv + ├─ curves-truncated.csv + └─ map.csv ``` In the next example, we'll override the model supplied configuration with a custom file. diff --git a/etc/examples/2-custom-config/README.md b/etc/examples/2-custom-config/README.md index ca5163209413c937d88b172a5215c677a1899f2c..fee21af31f3d6d280a35947895c9ec2c7d3c706d 100644 --- a/etc/examples/2-custom-config/README.md +++ b/etc/examples/2-custom-config/README.md @@ -13,12 +13,12 @@ In this example we've overridden the configuration supplied by the model. Specif * The upper end of each hazard curve has been truncated at 3 standard deviations. * Hazard curves have been saved as poisson probability instead of annual rate. * Hazard curves have been calculated for 3 `imts` - ([intensity measures](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/gmm/Imt.html), + ([intensity measures](https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/Imt.html), or spectral periods). * The `imls` (intensity measure levels or x-values) of the resultant curves have been explicitely defined for each `imt`. -See the [configuration specification](https://github.com/usgs/nshmp-haz/wiki/configuration) +See the [configuration specification](../../../docs/pages/Calculation-Configuration.md) for details on default values and supported options and formats. __Results directory structure:__ @@ -26,14 +26,20 @@ __Results directory structure:__ ```text 2-custom-config/ └─ hazout/ - ├─ config.json + ├─ calc-config.json ├─ HazardCalc.log ├─ PGA/ - │ └─ curves.csv + │ ├─ curves.csv + │ ├─ curves-truncated.csv + │ └─ map.csv ├─ SA0P2/ - │ └─ curves.csv + │ ├─ curves.csv + │ ├─ curves-truncated.csv + │ └─ map.csv └─ SA1P0/ - └─ curves.csv + ├─ curves.csv + ├─ curves-truncated.csv + └─ map.csv ``` <!-- markdownlint-disable MD001 --> diff --git a/etc/examples/3-sites-file/README.md b/etc/examples/3-sites-file/README.md index 65aa2ba681db02df0ac2a9c789f6851e3b8b8657..97ec338b4e351fae1b11053e64817011b271f0ff 100644 --- a/etc/examples/3-sites-file/README.md +++ b/etc/examples/3-sites-file/README.md @@ -6,7 +6,7 @@ To compute hazard at more than one site, one may supply a comma-delimited (\*.cs or [GeoJSON](http://geojson.org) (\*.geojson) formatted site data file instead: ```Shell -hazard ../../peer/models/Set1-Case1 sites.csv config.json +hazard ../../peer/models/Set1-Case1 sites.csv config.json ``` or @@ -15,8 +15,8 @@ or hazard ../../peer/models/Set1-Case1 sites.geojson config.json ``` -The [site specification](https://github.com/usgs/nshmp-haz/wiki/sites) -wiki page provides details on the two file formats. Note that with either format, +The [site specification](../../../docs/pages/Site-Specification.md) +page provides details on the two file formats. Note that with either format, if the name of a site is supplied, it will be included in the first column of any output curve files. __Results directory structure:__ @@ -24,14 +24,20 @@ __Results directory structure:__ ```text 3-sites-file/ └─ hazout/ - ├─ config.json + ├─ calc-config.json ├─ HazardCalc.log ├─ PGA/ - │ └─ curves.csv + │ ├─ curves.csv + │ ├─ curves-truncated.csv + │ └─ map.csv ├─ SA0P2/ - │ └─ curves.csv + │ ├─ curves.csv + │ ├─ curves-truncated.csv + │ └─ map.csv └─ SA1P0/ - └─ curves.csv + ├─ curves.csv + ├─ curves-truncated.csv + └─ map.csv ``` <!-- markdownlint-disable MD001 --> diff --git a/etc/examples/4-hazard-map/README.md b/etc/examples/4-hazard-map/README.md index da3708ea2464aa03afd53985b33b5b0486f02543..641feee179eb50977ff3f2195bb3d9e9f4486cb3 100644 --- a/etc/examples/4-hazard-map/README.md +++ b/etc/examples/4-hazard-map/README.md @@ -16,14 +16,20 @@ __Results directory structure:__ ```text 4-hazard-map/ └─ hazout/ - ├─ config.json + ├─ calc-config.json ├─ HazardCalc.log ├─ PGA/ - │ └─ curves.csv + │ ├─ curves.csv + │ ├─ curves-truncated.csv + │ └─ map.csv ├─ SA0P2/ - │ └─ curves.csv + │ ├─ curves.csv + │ ├─ curves-truncated.csv + │ └─ map.csv └─ SA1P0/ - └─ curves.csv + ├─ curves.csv + ├─ curves-truncated.csv + └─ map.csv ``` <!-- markdownlint-disable MD001 --> diff --git a/etc/examples/5-complex-model/README.md b/etc/examples/5-complex-model/README.md index 15616984654db3a8cc073e4fdb6c110cdea49a1c..29a3ee9b4c1a78f253a21432fc6c85a82c2f5092 100644 --- a/etc/examples/5-complex-model/README.md +++ b/etc/examples/5-complex-model/README.md @@ -39,7 +39,7 @@ hazard ../../../../nshm-conus map.geojson config-map.json ``` This computes 121 curves over a 2° by 2° area and will give you a sense of how long a larger map -might take. This small coarse map may take 10 minutes to complete. Note that in the above two +might take. This small, coarse map may take 10 minutes to complete. Note that in the above two examples we specified different output directories in the config files for each calculation. __Results directory structure:__ @@ -47,20 +47,28 @@ __Results directory structure:__ ```text 5-complex-model/ ├─ hazout-sites/ - │ ├─ config.json + │ ├─ calc-config.json │ ├─ HazardCalc.log │ ├─ SA1P0/ - │ │ └─ curves.csv + │ │ ├─ curves.csv + │ │ ├─ curves-truncated.csv + │ │ └─ map.csv │ └─ SA2P0/ - │ └─ curves.csv + │ ├─ curves.csv + │ ├─ curves-truncated.csv + │ └─ map.csv │ └─ hazout-map/ - ├─ config.json + ├─ calc-config.json ├─ HazardCalc.log ├─ SA1P0/ - │ └─ curves.csv + │ ├─ curves.csv + │ ├─ curves-truncated.csv + │ └─ map.csv └─ SA2P0/ - └─ curves.csv + ├─ curves.csv + ├─ curves-truncated.csv + └─ map.csv ``` <!-- markdownlint-disable MD001 --> diff --git a/etc/examples/6-enhanced-output/README.md b/etc/examples/6-enhanced-output/README.md index efed4971851d0928f43b062c5c4d76a6bc328e7c..c20c6d251d44547eab074082843b0167a23713ca 100644 --- a/etc/examples/6-enhanced-output/README.md +++ b/etc/examples/6-enhanced-output/README.md @@ -15,14 +15,15 @@ The config file for this example, `config.json`, specified `GMM` and `SOURCE` as [output data types][output_types]. Note that the output curves directory now contains additional directories of curves by source type and GMM. -[output_types]: ../../../docs/pages/Calculation-Configuration.md#calculation-configuration +[output_types]: ../../../docs/pages/Calculation-Configuration.md#calculation-configuration-parameters -See the `nshmp-haz` wiki and javadocs for more information on source types ([Wiki][source_wiki], -[JavaDoc][source_javadoc]) and GMMs ([Wiki][gmm_wiki], [JavaDoc][gmm_javadoc]). +See the `nshmp-haz` documentation and javadocs for more information on source types +([docs][source_docs], [JavaDoc][source_javadoc]) and GMMs +([docs][gmm_docs], [JavaDoc][gmm_javadoc]). -[source_wiki]: ../../../docs/pages/Source-Types.md +[source_docs]: ../../../docs/pages/Source-Types.md [source_javadoc]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/model/SourceType.html -[gmm_wiki]: ./../../docs/pages/Ground-Motion-Models.md +[gmm_docs]: ./../../docs/pages/Ground-Motion-Models.md [gmm_javadoc]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/package-summary.html __Results directory structure:__ @@ -30,7 +31,7 @@ __Results directory structure:__ ```text 6-enhanced-output/ └─ hazout/ - ├─ config.json + ├─ calc-config.json ├─ HazardCalc.log ├─ PGA/ │ ├─ curves-truncated.csv diff --git a/etc/examples/7-disaggregation/README.md b/etc/examples/7-disaggregation/README.md index ea60c780c8022a225536f941cb2358b7b3d533fe..a01f54397295e2ba55c14878526aae6785b7aff1 100644 --- a/etc/examples/7-disaggregation/README.md +++ b/etc/examples/7-disaggregation/README.md @@ -3,77 +3,54 @@ __Working directory:__ `/path/to/nshmp-haz/etc/examples/7-disaggregation` To perform a disaggregation of hazard, one must use the program `DisaggCalc`. Internally, -`DisaggCalc` calls `HazardCalc` and then reprocesses the data to generate a comma-delimited -file of distance, magnitude, and epsilon bins, and a text file of summary statistics and primary -contributing sources. For this, it can be helpful to create a second system alias: +`DisaggCalc` calls `HazardCalc` and then reprocesses the data to generate output files of +disaggregation summary statistics and primary contributing sources. For this, it can be helpful +to create a second system alias: ```Shell alias disagg='java -Xms4g -Xmx8g -cp /path/to/nshmp-haz/build/libs/nshmp-haz.jar gov.usgs.earthquake.nshmp.DisaggCalc' ``` -`DisaggCalc` is similar to `HazardCalc` in every way except that the return-period of interest -must be specified. For example, execute: +The command line arguments for `DisaggCalc` are the same as those for `HazardCalc`. The target +return period for a disaggregation is specified in the config +[`disagg.returnPeriod`](../../../docs/pages/Calculation-Configuration.md#calculation-configuration-parameters) +field. For compute the disaggregation in this example, execute: ```Shell -disagg ../../../../nshm-conus sites.geojson 2475 config.json +disagg ../../../../nshm-conus sites.csv config.json ``` -The results of the disaggregation are saved along with hazard curves in `disagg` directories. -As with `HazardCalc`, if the `GMM` data type has been specified (as it has in the +The results of the disaggregation are saved alongside hazard curves in a `disagg` directory. +Disaggregation results are stored in JSON format with one file for each site. The results for +each IMT are stored within that file as well. As with `HazardCalc`, if the `GMM` data type has +been specified (as it has in the [config](../../../docs/pages/Calculation-Configuration.md#calculation-configuration) file for this example) additional disaggregation results for each GMM are generated as well. Disaggregations by individual `SOURCE` type are also possible. +Note that `DisaggCalc` will only process a CSV file of sites (not GeoJSON). + __Results directory structure:__ ```text 7-disaggregation/ └─ hazout/ - ├─ config.json + ├─ calc-config.json ├─ DisaggCalc.log + ├─ disagg/Los Angeles CA.json + │ ├─ Los Angeles CA.json + │ ├─ Salt Lake City UT.json + │ ├─ San Francisco CA.json + │ └─ Seattle WA.json ├─ PGA/ │ ├─ curves-truncated.csv │ ├─ curves.csv - │ ├─ disagg/ - │ │ ├─ Los Angeles CA/ - │ │ │ ├─ data.csv - │ │ │ └─ summary.txt - │ │ ├─ Salt Lake City UT/ - │ │ │ ├─ data.csv - │ │ │ └─ summary.txt - │ │ ├─ San Francisco CA/ - │ │ │ ├─ data.csv - │ │ │ └─ summary.txt - │ │ └─ Seattle WA/ - │ │ ├─ data.csv - │ │ └─ summary.txt │ └─ gmm/ │ ├─ AM_09_INTERFACE_BASIN/ - │ │ ├─ curves.csv - │ │ └─ disagg/ - │ │ ├─ San Francisco CA/ - │ │ │ ├─ data.csv - │ │ │ └─ summary.txt - │ │ └─ Seattle WA/ - │ │ ├─ data.csv - │ │ └─ summary.txt + │ │ └─ curves.csv │ ├─ ... - │ ├─ CB_14_BASIN/ - │ │ ├─ curves.csv - │ │ └─ disagg/ - │ │ ├─ Los Angeles CA/ - │ │ │ ├─ data.csv - │ │ │ └─ dsummary.txt - │ │ ├─ Salt Lake City UT/ - │ │ │ ├─ data.csv - │ │ │ └─ summary.txt - │ │ ├─ San Francisco CA/ - │ │ │ ├─ data.csv - │ │ │ └─ summary.txt - │ │ └─ Seattle WA/ - │ │ ├─ data.csv - │ │ └─ summary.txt - │ └─ ... + │ └─ ZHAO_06_SLAB_BASIN/ + │ └─ curves.csv ├─ SA0P1/ │ └─ ... └─ ... @@ -84,7 +61,7 @@ Note that in the output above, there are only disaggregation results for subduct will not be saved. <!-- markdownlint-disable MD001 --> -#### Next: [Example 8 – Earthquake probabilities and rates](../8-probabilities/README.md) +<!-- #### Next: [Example 8 – Earthquake probabilities and rates](../8-probabilities/README.md) --> --- diff --git a/etc/examples/7-disaggregation/sites.csv b/etc/examples/7-disaggregation/sites.csv new file mode 100644 index 0000000000000000000000000000000000000000..bed28dd3d11ad7a40ad9c85a44b962f4fb5bdac2 --- /dev/null +++ b/etc/examples/7-disaggregation/sites.csv @@ -0,0 +1,5 @@ +name, lon, lat +Los Angeles CA, -118.25, 34.05 +San Francisco CA, -122.40, 37.75 +Seattle WA, -122.30, 47.60 +Salt Lake City UT, -111.90, 40.75 diff --git a/etc/examples/7-disaggregation/sites.geojson b/etc/examples/7-disaggregation/sites.geojson deleted file mode 100644 index 2275ce3bbc6674402a6f8bbefa93d7dc56f9b925..0000000000000000000000000000000000000000 --- a/etc/examples/7-disaggregation/sites.geojson +++ /dev/null @@ -1,53 +0,0 @@ -{ - "type": "FeatureCollection", - "features": [ - { - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": [-118.25, 34.05] - }, - "properties": { - "marker-size": "small", - "marker-color": "#ff0080", - "title": "Los Angeles CA" - } - }, - { - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": [-122.40, 37.75] - }, - "properties": { - "marker-size": "small", - "marker-color": "#ff0080", - "title": "San Francisco CA" - } - }, - { - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": [-122.30, 47.60] - }, - "properties": { - "marker-size": "small", - "marker-color": "#ff0080", - "title": "Seattle WA" - } - }, - { - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": [-111.90, 40.75] - }, - "properties": { - "marker-size": "small", - "marker-color": "#ff0080", - "title": "Salt Lake City UT" - } - } - ] -} diff --git a/etc/examples/8-probabilities/README.md b/etc/examples/8-probabilities/README.md deleted file mode 100644 index 4647c3da9ddfabc645d0f4404c6edc0f1329e889..0000000000000000000000000000000000000000 --- a/etc/examples/8-probabilities/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# Example 8: Earthquake probabilities and rates - -__Working directory:__ `/path/to/nshmp-haz/etc/examples/8-probabilities` - -`nshmp-haz` can also calculate earthquake probabilities and rates at a location. -As with the `HazardCalc` and `DisaggCalc` programs, `RateCalc` takes a model, a site data file -or string, and an optional config file, which will control whether the distributions generated -are incremental or cumulative, and whether the distribution values are expressed as annual rates -or Poisson probabilities. The default (no config supplied) settings are for incremental -annual-rates. The `config.rate` elements also specify the cutoff `distance`, within which -all sources should be included, and a `timespan` used for conversion to Poisson probabilities. - -For this example, the following system alias is helpful: - -```Shell -alias rate='java -Xms4g -Xmx8g -cp /path/to/nshmp-haz/build/libs/nshmp-haz.jar gov.usgs.earthquake.nshmp.RateCalc' -``` - -Assuming a copy of the CONUS NSHM is available (see [Example 5](../5-complex-model)), execute: - -```Shell -rate ../../../../nshm-conus sites.csv config-sites.json -``` - -to generate incremental, annual-rate output for a list of sites, or - -```Shell -rate ../../../../nshm-conus map.geojson config-map.json -``` - -to generate a map of cumulative Poisson probabilities (i.e. P ≥ M). - -Like `HazardCalc`, `RateCalc` observes the `config.output.dataTypes` `SOURCE` -option and will include a `source` directory with rates or probabilities for all -contributing source types. - -__Results directory structure:__ - -```text -8-probabilities/ - ├─ hazout-rate-sites/ - │ ├─ config.json - │ ├─ RateCalc.log - │ ├─ rates.csv - │ └─ source/ - │ ├─ FAULT/ - │ │ └─ probs.csv - │ └─ ... - └─ hazout-prob-map/ - ├─ config.json - ├─ RateCalc.log - └─ probs.csv - └─ source/ - ├─ FAULT/ - │ └─ rates.csv - └─ ... -``` - ---- - -* [**Documentation Index**](../../../docs/README.md) diff --git a/etc/examples/8-probabilities/config-map.json b/etc/examples/8-probabilities/config-map.json deleted file mode 100644 index 8fe1479387b26dec348c70da4046e7cc8108e829..0000000000000000000000000000000000000000 --- a/etc/examples/8-probabilities/config-map.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "output": { - "dataTypes": ["SOURCE"], - "directory": "hazout-prob-map" - }, - "rate": { - "distance": 10.0, - "valueFormat": "POISSON_PROBABILITY", - "distributionFormat": "CUMULATIVE", - "timespan": 50.0 - } -} diff --git a/etc/examples/8-probabilities/config-sites.json b/etc/examples/8-probabilities/config-sites.json deleted file mode 100644 index fa865014fc0b306cfd5bd603a7c02b56712bdac0..0000000000000000000000000000000000000000 --- a/etc/examples/8-probabilities/config-sites.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "output": { - "dataTypes": ["SOURCE"], - "directory": "hazout-rate-sites" - } -} diff --git a/etc/examples/8-probabilities/map.geojson b/etc/examples/8-probabilities/map.geojson deleted file mode 100644 index 77ba418305feb514f068c8ed64dd223065fb32ef..0000000000000000000000000000000000000000 --- a/etc/examples/8-probabilities/map.geojson +++ /dev/null @@ -1,22 +0,0 @@ -{ - "type": "FeatureCollection", - "features": [ - { - "type": "Feature", - "geometry": { - "type": "Polygon", - "coordinates": [[ - [-123.0, 37.0], - [-121.0, 37.0], - [-121.0, 39.0], - [-123.0, 39.0], - [-123.0, 37.0] - ]] - }, - "properties": { - "spacing": 0.2, - "title": "San Francisco Bay Area" - } - } - ] -} diff --git a/etc/examples/8-probabilities/sites.csv b/etc/examples/8-probabilities/sites.csv deleted file mode 100644 index b55826d0fd00d2c5ba3b83fa2250cca4a9d91af3..0000000000000000000000000000000000000000 --- a/etc/examples/8-probabilities/sites.csv +++ /dev/null @@ -1,14 +0,0 @@ -name, lon, lat -Fresno CA, -119.75, 36.75 -Los Angeles CA, -118.25, 34.05 -Oakland CA, -122.25, 37.80 -San Francisco CA, -122.40, 37.75 -San Jose CA, -121.90, 37.35 -Santa Rosa CA, -122.70, 38.45 -Vallejo CA, -122.25, 38.10 -Las Vegas NV, -115.15, 36.20 -Reno NV, -119.80, 39.55 -Eugene OR, -123.10, 44.05 -Salt Lake City UT, -111.90, 40.75 -Tacoma WA, -122.45, 47.25 -Jackson WY, -110.75, 43.50 diff --git a/etc/examples/README.md b/etc/examples/README.md index b557f3baae6ef5ffda228f41c8bcd4d2d489fc20..343bc5b7fc525aa731e601cd2aebeeabc27459c5 100644 --- a/etc/examples/README.md +++ b/etc/examples/README.md @@ -1,6 +1,6 @@ # Examples -These examples are designed to be executed locally while following the READMEs on GitHub. +These examples are designed to be executed locally while following the READMEs on GitLub. All examples avoid a lengthy call to Java and the `HazardCalc` program by using the following system alias: diff --git a/etc/matlab/README.md b/etc/matlab/README.md index 56457d3f377f38a0e70092ffb5c87b941f45209e..e6ae912c9572a058886e0c1f22aab36a009d82ee 100644 --- a/etc/matlab/README.md +++ b/etc/matlab/README.md @@ -1,13 +1,5 @@ # Using nshmp-haz with Matlab ->**NOTE:** *nshmp-haz* was recently upgraded to Java 8, which supercedes and is -incompatable with the Java 7 JVM that ships with Matlab. Users will need to set the -`MATLAB_JAVA` environment variable to point to a Java 8 runtime. - -All recent versions of Matlab include a Java runtime environment and it is therefore -relatively straightforward to use the nshmp-haz library. - -## Requirements - -1. Matlab R2013B or higher (nshmp-haz targets Java 7; prior versions of Matlab use Java 6). -2. A [build](https://github.com/usgs/nshmp-haz/wiki/building-&-running) of nshmp-haz. +The best way to use _nshmp-haz_ with matlab is as a web service. The scripts in this directory +provide examples of how to access web services for ground motion models (GMMs) that are based +on the code in this repository. diff --git a/etc/matlab/gmmBatchExample.m b/etc/matlab/gmmBatchExample.m index e61fff510598179fff04eba3900568b3f4c4229e..eadd4d9a276d9a689f20b8662c35243507ebb19f 100644 --- a/etc/matlab/gmmBatchExample.m +++ b/etc/matlab/gmmBatchExample.m @@ -14,11 +14,11 @@ clear; % 90.0, % % For a full list of GMM input paramters see: -% http://usgs.github.io/nshmp-haz/javadoc/gov/usgs/earthquake/nshmp/gmm/GmmInput.html +% https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/GmmInput.html % % If 'null' is supplied as a value or a GMM input field and values are % not given, the default values are used: -% http://usgs.github.io/nshmp-haz/javadoc/gov/usgs/earthquake/nshmp/gmm/GmmInput.Builder.html#withDefaults-- +% https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/GmmInput.Builder.html#withDefaults() inputs = fileread('gmm-inputs.csv'); diff --git a/etc/matlab/gmmExample.m b/etc/matlab/gmmExample.m index 609bf1799d8f30c973ae46274a4987aec0f09dea..22cbd36331752982ce832b4e6d7fd45e1caf1376 100644 --- a/etc/matlab/gmmExample.m +++ b/etc/matlab/gmmExample.m @@ -54,11 +54,11 @@ input.z2p5 = NaN; % in km; NaN triggers default basin depth model input.z1p0 = NaN; % in km; NaN triggers default basin depth model % Specify a ground motion model. GMM identifiers: -% http://usgs.github.io/nshmp-haz/javadoc/gov/usgs/earthquake/nshmp/gmm/Gmm.html +% https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/Gmm.html gmm = 'ASK_14'; % Specify an intensity measure type (IMT). IMT identifiers: -% http://usgs.github.io/nshmp-haz/javadoc/gov/usgs/earthquake/nshmp/gmm/Imt.html +% https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/Imt.html imt = 'PGA'; % Do a calculation. The MatUtil.calc(gmm, imt, gmmInput) method returns an diff --git a/etc/matlab/response_spectra.m b/etc/matlab/response_spectra.m new file mode 100644 index 0000000000000000000000000000000000000000..a0943694b3c23eea527bee0958e589598ca1070c --- /dev/null +++ b/etc/matlab/response_spectra.m @@ -0,0 +1,141 @@ +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% +% Response spectra web service example +% +% Author: Demi Girot (dgirot@usgs.gov) +% Peter Powers (pmpowers@usgs.gov) +% +% Created: 09/16/2021 +% Updated: 01/12/2022 +% +% This script assembles a response spectra web service URL request, makes +% the request, places the result in a struct, and generates a simple plot. +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Create web service URL +clearvars +% The root url for the response spectra web service +urlbase = "https://staging-earthquake.usgs.gov/ws/nshmp/data/gmm/spectra?"; + +% The ground motion models (GMMs) of interest +% +% For allowed identifiers, see: +% https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/Gmm.html +gmms = ["BJF_97" "CB_03" "CY_14"]; + +% Ground motion model parameters +Mw = 6.5; +dip = 90; +rake = 0; +width = 14; +rJB = 10; +rRup = 10.012; +rX = 10; +vs30 = 760; +vsInf = true; +zHyp = 7.5; +zTop = 0.5; + +url = createUrl( ... + urlbase, gmms, ... + Mw, dip, rake, width, rJB, rRup, rX, vs30, vsInf, zHyp, zTop); +%% Call web service + +% Open a browser window with the web service URL to show the JSON response +web(url); + +% Call the web service and place response in a struct +data = webread(url); + +%% Summary of each GMM dataset + +means = data.response.means.data; +sigmas = data.response.sigmas.data; + +c = newline; + +for i=1:length(means) + gmm = ... + "GMM: " + means(i).label + c + ... + "Periods: " + mat2str(means(i).data.xs') + c + ... + "Means: " + mat2str(means(i).data.ys') + c + ... + "Sigmas: " + mat2str(sigmas(i).data.ys') +end + +%% Make a simple plot of means data with epistemic uncertainty from web +% service response + +figure(1) +cla + +plot_handles = []; +legend_labels = {}; + +% loop means response array +for i = 1:length(means) + disp(means(i)) + + gmm_id = means(i).id; + gmm_label = means(i).label; + gmm_xs = means(i).data.xs; + gmm_ys = means(i).data.ys; + epi_tree = means(i).tree; + + % Plot the total spectrum + PH = semilogx(gmm_xs, gmm_ys, 'LineWidth', 2); + hold on; grid on + plot_handles = [plot_handles PH]; + legend_labels{end+1} = gmm_id; + + % Plot epistemic spectra, if present + if ~isempty(epi_tree) + for j = 1 : length(epi_tree) + epi_branch = epi_tree(j); + epi_label = epi_branch.id; + epi_ys = epi_branch.values; + PHE = semilogx(gmm_xs, epi_ys,'LineWidth',1,'LineStyle','--','Color',PH.Color); + plot_handles = [plot_handles PHE]; + legend_labels{end+1} = [gmm_id ' ' epi_label]; + end + + end +end + +xlabel('Periods (sec)','FontSize',12) +ylabel('Median Ground Motion (g)','FontSize',12) +title('Ground Motion vs Response Spectra (Means)', 'FontSize', 14) +axis([0.001 10 0.005 0.8]); +set(gca, 'FontSize', 12); +set(gca, 'XTick', [0.01 0.1 1 10]); +set(gca, 'XTickLabel', {'0.01','0.1','1','10'}); + +l = legend(plot_handles, legend_labels, 'Location', 'northwest'); +set(l, 'Interpreter', 'none') + + +%% Build URL function + +function url = createUrl( ... + urlbase, gmms, ... + Mw, dip, rake, width, rJB, rRup, rX, vs30, vsInf, zHyp, zTop) + + url = urlbase; + for i = 1:size(gmms, 2) + if i == 1 + url = url + "gmm=" + gmms(i); + else + url = url + "&gmm=" + gmms(i); + end + end + url = url + ... + "&Mw=" + num2str(Mw) + ... + "&dip=" + num2str(dip) + ... + "&rake=" + num2str(rake) + ... + "&width=" + num2str(width) + ... + "&rJB=" + num2str(rJB) + ... + "&rRup=" + num2str(rRup) + ... + "&rX=" + num2str(rX) + ... + "&vs30=" + num2str(vs30) + ... + "&vsInf=" + string(vsInf) + ... + "&zHyp=" + num2str(zHyp) + ... + "&zTop=" + num2str(zTop); +end \ No newline at end of file diff --git a/etc/python/README.md b/etc/python/README.md deleted file mode 100644 index 4fb2e5dd2e549126d23f8dcf47d2cb3626796f15..0000000000000000000000000000000000000000 --- a/etc/python/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Using nshmp-haz with Python - -There are a few different packages that will allow you to call Java code from Python. -This example uses jpype. It also uses NumPy to simplify working with the arrays -returned by the ground motion model calculators. - -## Requirements - -1. A [build](https://github.com/usgs/nshmp-haz/wiki/building-&-running) of nshmp-haz. -2. [jpype](http://jpype.readthedocs.io/en/latest/install.html) -3. [NumPy](http://www.numpy.org/) diff --git a/etc/python/gmm-inputs.csv b/etc/python/gmm-inputs.csv deleted file mode 100644 index dc4f5e207506b53a41b5e7fe9a0fc78178b47392..0000000000000000000000000000000000000000 --- a/etc/python/gmm-inputs.csv +++ /dev/null @@ -1,38 +0,0 @@ - dip, -0, -2.5, -5, -7.5, -10, -12.5, -15, -17.5, -20, -22.5, -25, -27.5, -30, -32.5, -35, -37.5, -40, -42.5, -45, -47.5, -50, -52.5, -55, -57.5, -60, -62.5, -65, -67.5, -70, -72.5, -75, -77.5, -80, -82.5, -85, -87.5, -90, \ No newline at end of file diff --git a/etc/python/gmmBatchExample.py b/etc/python/gmmBatchExample.py deleted file mode 100644 index ff5c3dd4da960ec7f53da49b9cd1071f2c60dbaa..0000000000000000000000000000000000000000 --- a/etc/python/gmmBatchExample.py +++ /dev/null @@ -1,90 +0,0 @@ -## nshmp-haz Ground Motion Model (GMM) batch processing example script - -import requests - -## Read CSV file of GMM inputs -# -# Each column of the CSV file is a GMM input parameter with the -# first row dictating that GMM input field. -# -# Example CSV to change only dip: -# dip, -# 0.0, -# 45.0, -# 90.0, -# -# For a full list of GMM input paramters see: -# http://usgs.github.io/nshmp-haz/javadoc/gov/usgs/earthquake/nshmp/gmm/GmmInput.html -# -# If 'null' is supplied as a value or a GMM input field and values are -# not given, the default values are used: -# http://usgs.github.io/nshmp-haz/javadoc/gov/usgs/earthquake/nshmp/gmm/GmmInput.Builder.html#withDefaults-- -file = open('gmm-inputs.csv', 'r') - -inputs = file.read() - -file.close() - - -## URL to POST the CSV file of GMM inputs -# -# Must update the URL host if not on localhost. -# -# The GMMs must be specified in the URL query string. -# -# All GMM services are available to call for batch processing. -host = 'http://localhost:8080' - -service = '/nshmp-haz/gmm/spectra' - -url = host + service - -query = { 'gmm': [ 'AB_06_PRIME', 'CAMPBELL_03', 'FRANKEL_96' ] } - - -## Conduct HTTP POST Request -# -# Conduct a HTTP POST request, sending the CSV file of GMM inputs. -# -# The POST response is loaded into a object -# following the returned JSON structure. -svcResponse = requests.post(url, data = inputs, params = query).json() - - -## Check Response -# -# Check to see if the response returned an error and check -# to see if the field 'response' exists in the object. -# -# If the URL does not contain a query string of GMMs the response -# returned will be the service usage. -if svcResponse['status'] == 'error' and ~hasattr(svcResponse, 'response'): - exit() - - -## Retreive the data -# -# Loop through each response spectrum response and obtain the means -# and sigmas. -for response in svcResponse['response']: - - # Request structure contains the GMMs and GMM input parameters used - request = response['request'] - - # The GMMs used for the calculation - gmms = request['gmms'] - - # The GMM input parameters used for the calculation - gmmInput = request['input'] - - # Get the means - for means in response['means']['data']: - data = means['data'] - xMeans = data['xs'] - yMeans = data['ys'] - - # Get the sigmas - for sigmas in response['sigmas']['data']: - data = sigmas['data'] - xSigmas = data['xs'] - ySigmas = data['ys'] diff --git a/etc/python/gmmExample.py b/etc/python/gmmExample.py deleted file mode 100644 index 5819560c29f8cb10e9970e05d1b3dac497a14bb1..0000000000000000000000000000000000000000 --- a/etc/python/gmmExample.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python -## nshmp-haz Ground Motion Model (GMM) calculator example script - -# ========================================================================= -# This script provides instruction on how to access ground motion models -# (GMMs) implemented in the nshmp-haz library. -# ========================================================================= - -from jpype import * -import numpy as np - -# Specify path to nshmp-haz library: -classpath = '/path/to/repository/nshmp-haz-master/build/libs/nshmp-haz.jar' - -# Start Java Virtual Machine and add nshmp-haz to classpath: -startJVM(getDefaultJVMPath(), "-ea", - "-Djava.class.path={0}".format(classpath)) - -# Import packages: -nshmp = JPackage("gov").usgs.earthquake.nshmp.etc - -# ========================================================================= -# Single ground motion calcuation: - -# Initialize calculator: -hazMat = nshmp.HazMat.init(classpath) - -# Note that hazMat is stateless and reusable and should therefore be -# initialized only once in a script if doing many calculations. - -# Set up a GMM input parameter object. These data are a source and site -# parameterization that will satisfy all currently implemented Gmms. Note -# that not all models will necessarily use all parameters. -gmmparams = nshmp.GmmParams() -gmmparams.Mw = 6.5 -gmmparams.rJB = 5.0 -gmmparams.rRup = 5.1 -gmmparams.rX = 5.1 -gmmparams.dip = 90.0 -gmmparams.width = 10.0 -gmmparams.zTop = 1.0 -gmmparams.zHyp = 6.0 -gmmparams.rake = 0.0 -gmmparams.vs30 = 760. -gmmparams.vsInf = True -gmmparams.z2p5 = np.nan -gmmparams.z1p0 = np.nan - -# Specify a ground motion model. GMM identifiers: -# http://usgs.github.io/nshmp-haz/javadoc/gov/usgs/earthquake/nshmp/gmm/Gmm.html -gmm = 'ASK_14'; - -# Specify an intensity measure type (IMT). IMT identifiers: -# http://usgs.github.io/nshmp-haz/javadoc/gov/usgs/earthquake/nshmp/gmm/Imt.html -imt = 'PGA'; - -# Do a calculation. The MatUtil.calc(gmm, imt, gmmInput) method returns an -# array of [ln(median ground motion), sigma] -ln_med_gm, sigma = hazMat.gmmMean(gmm, imt, gmmparams) - -print('ln(median ground motion), sigma:') -print(ln_med_gm, sigma) - -# ========================================================================= -# Determinisitic response spectrum calculation: - -# The object returned by the MatUtil.spectrum(gmm, gmmInput) method may -# be converted to NumPy arrays. -# The returned HazMat Spectrum object is not iterable, so do this array -# by array. -spectrumResult = hazMat.gmmSpectrum(gmm, gmmparams) -pds = np.array(spectrumResult.periods) -means = np.array(spectrumResult.means) -sigmas = np.array(spectrumResult.sigmas) -print('period, mean, sigma:') -for i in range(len(pds)): - print(pds[i], means[i], sigmas[i]) -# ========================================================================= diff --git a/gradle.properties b/gradle.properties index 7961ce99a669dfb81ce4e8e6e8436fb2e4b3b9d3..5f64b407c17c5874ee42ac159ae4929b48bbedf8 100644 --- a/gradle.properties +++ b/gradle.properties @@ -10,8 +10,8 @@ micronautRxVersion = 2.1.1 micronautPluginVersion = 3.1.1 nodePluginVersion = 3.0.1 nodeVersion = 16.3.0 -nshmpLibVersion = 0.8.2 -nshmpWsUtilsVersion = 0.1.3 +nshmpLibVersion = 0.9.4 +nshmpWsUtilsVersion = 0.1.7 shadowVersion = 7.1.2 spotbugsVersion = 4.7.0 spotlessVersion = 6.0.4 diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle index 2a81e3a09e2c2fb9f3dfbaa7a03a4c847efc020d..af19505cd1bef520ab22beb648c802e286e8c3b5 100644 --- a/gradle/dependencies.gradle +++ b/gradle/dependencies.gradle @@ -1,6 +1,8 @@ dependencies { + // NSHMP + // implementation files('../nshmp-lib/build/libs/nshmp-lib.jar') implementation "ghsc:nshmp-lib:${nshmpLibVersion}" implementation "ghsc:nshmp-ws-utils:${nshmpWsUtilsVersion}" diff --git a/settings.gradle b/settings.gradle index 90c2faad1fbd25d62f6b46e92c5d1c0eaf72b53d..0daffad9b55fb3fc714dee47c7498518e07b9de6 100644 --- a/settings.gradle +++ b/settings.gradle @@ -20,6 +20,9 @@ git { fetch("https://code.usgs.gov/ghsc/nshmp/nshms/nshm-hawaii.git", { name "nshmp-haz-dep--nshm-hi-2021" tag "2.0.0" + // fetch("https://code.usgs.gov/ghsc/nshmp/nshms/nshm-conus.git", { + // name "nshmp-haz-dep--nshm-conus-2018" + // tag "main" }) } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/DisaggCalc.java b/src/main/java/gov/usgs/earthquake/nshmp/DisaggCalc.java index 3719b73010ae7dd9f28c8ac8db3fbb32c28b93a8..2b268e5b92ef29a307470c56d5df8b6680c8c077 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/DisaggCalc.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/DisaggCalc.java @@ -1,21 +1,42 @@ package gov.usgs.earthquake.nshmp; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; import static gov.usgs.earthquake.nshmp.Text.NEWLINE; +import static gov.usgs.earthquake.nshmp.calc.DataType.DISAGG_DATA; +import static gov.usgs.earthquake.nshmp.calc.DataType.GMM; +import static gov.usgs.earthquake.nshmp.calc.DataType.SOURCE; +import static java.util.stream.Collectors.toList; +import static java.util.stream.Collectors.toSet; import java.io.IOException; +import java.io.Writer; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Arrays; +import java.util.EnumMap; +import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; +import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ThreadPoolExecutor; import java.util.logging.FileHandler; import java.util.logging.Logger; +import java.util.stream.Collectors; -import com.google.common.base.Preconditions; +import com.google.common.base.Splitter; +import com.google.common.base.Stopwatch; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.MoreExecutors; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import gov.usgs.earthquake.nshmp.calc.CalcConfig; import gov.usgs.earthquake.nshmp.calc.Disaggregation; @@ -25,24 +46,45 @@ import gov.usgs.earthquake.nshmp.calc.HazardExport; import gov.usgs.earthquake.nshmp.calc.Site; import gov.usgs.earthquake.nshmp.calc.Sites; import gov.usgs.earthquake.nshmp.calc.ThreadCount; +import gov.usgs.earthquake.nshmp.data.Interpolator; +import gov.usgs.earthquake.nshmp.data.XySequence; +import gov.usgs.earthquake.nshmp.gmm.Imt; import gov.usgs.earthquake.nshmp.internal.Logging; import gov.usgs.earthquake.nshmp.model.HazardModel; /** - * Disaggregate probabilisitic seismic hazard at a return period of interest. + * Disaggregate probabilistic seismic hazard at a return period of interest or + * at specific ground motion levels. * * @author U.S. Geological Survey */ public class DisaggCalc { + private static final Gson GSON = new GsonBuilder() + .serializeSpecialFloatingPointValues() + .serializeNulls() + .create(); + /** * Entry point for the disaggregation of probabilisitic seismic hazard. * - * <p>Disaggregating siesmic hazard is largeley identical to a hazard - * calculation except that a return period (in years) must be supplied as an - * additional argument after the 'site(s)' argument. See the - * {@link HazardCalc#main(String[]) HazardCalc program} for more information - * on required parameters. + * <p>Two approaches to disaggregation of seimic hazard are possible with this + * application. In the first approach, the 'sites' file is the same as it + * would be for a hazard calculation, and disaggregation is performed for all + * configured intensity measures at the 'returnPeriod' (in years) of interest + * specified in the config file (default = 2475 years, equivalent to 2% in 50 + * years). + * + * <p>In the second approach, the sites file includes columns for each + * spectral period or other intensity measure and the target ground motion + * level to disaggregate for each. For example, the target values could be a + * risk-targeted spectral accelerations, or they could be ground motion levels + * precomputed for a specific return period. + * + * <p>Note that the first approach will do the full hazard calculation and + * compute hazard curves from which the target disaggregation ground motion + * level will be determined. In the second approach, the ground motion targets + * are known and the time consuming hazard curve calculation can be avoided. * * <p>Please refer to the nshmp-haz <a * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs"> @@ -78,9 +120,10 @@ public class DisaggCalc { Logging.init(); Logger log = Logger.getLogger(DisaggCalc.class.getName()); Path tmpLog = HazardCalc.createTempLog(); + String tmpLogName = checkNotNull(tmpLog.getFileName()).toString(); try { - FileHandler fh = new FileHandler(Preconditions.checkNotNull(tmpLog.getFileName()).toString()); + FileHandler fh = new FileHandler(tmpLogName); fh.setFormatter(new Logging.ConsoleFormatter()); log.getParent().addHandler(fh); @@ -88,6 +131,14 @@ public class DisaggCalc { Path modelPath = Paths.get(args[0]); HazardModel model = HazardModel.load(modelPath); + log.info(""); + Path siteFile = Paths.get(args[1]); + log.info("Site file: " + siteFile.toAbsolutePath().normalize()); + checkArgument( + siteFile.toString().endsWith(".csv"), + "Only *.csv site files supported"); + + /* Calculation configuration, possibly user supplied. */ CalcConfig config = model.config(); if (argCount == 3) { Path userConfigPath = Paths.get(args[2]); @@ -97,13 +148,49 @@ public class DisaggCalc { } log.info(config.toString()); - log.info(""); - List<Site> sites = HazardCalc.readSites(args[1], config, model.siteData(), log); - log.info("Sites: " + Sites.toString(sites)); + /* Column header data. */ + Set<String> allColumns = columns(siteFile); + Set<String> siteColumns = new HashSet<>(allColumns); + siteColumns.retainAll(SITE_KEYS); + int colsToSkip = siteColumns.size(); // needed? + log.info("Site data columns: " + colsToSkip); + + /* Sites */ + List<Site> sites = Sites.fromCsv(siteFile, config, model.siteData()); + log.info("Sites: " + sites.size()); - double returnPeriod = config.disagg.returnPeriod; + Set<Imt> modelImts = model.config().hazard.imts; + + /* + * If no IML columns present, disaggregate at IMTs and return period from + * config, otherwise disaggregate at target IMLs are present. + * + * We've removed support for gejson site files at present. + */ + Path out; + if (siteColumns.size() == allColumns.size()) { + + checkArgument( + modelImts.containsAll(config.hazard.imts), + "Config specifies IMTs not supported by model"); + double returnPeriod = config.disagg.returnPeriod; + out = calcRp(model, config, sites, returnPeriod, log); + + } else { + + List<Imt> imts = readImtList(siteFile, colsToSkip); + checkArgument( + modelImts.containsAll(imts), + "Sites file contains IMTs not supported by model"); + List<Map<Imt, Double>> imls = readSpectra(siteFile, imts, colsToSkip); + checkArgument( + sites.size() == imls.size(), + "Sites and spectra lists different sizes"); + log.info("Spectra: " + imls.size()); // 1:1 with sites + out = calcIml(model, config, sites, imls, log); + + } - Path out = calc(model, config, sites, returnPeriod, log); log.info(PROGRAM + ": finished"); /* Transfer log and write config, windows requires fh.close() */ @@ -118,15 +205,63 @@ public class DisaggCalc { } } + private static final Set<String> SITE_KEYS = ImmutableSet.of( + Site.Key.NAME, + Site.Key.LAT, + Site.Key.LON, + Site.Key.VS30, + Site.Key.VS_INF, + Site.Key.Z1P0, + Site.Key.Z2P5); + + private static Set<String> columns(Path path) throws IOException { + String header = Files.lines(path).findFirst().get(); + return Arrays.stream(header.split(",")) + .map(String::trim) + .collect(toSet()); + } + + private static List<Imt> readImtList(Path path, int colsToSkip) throws IOException { + String header = Files.lines(path).findFirst().get(); + return Splitter.on(',') + .trimResults() + .splitToList(header) + .stream() + .skip(colsToSkip) + .map(Imt::valueOf) + .collect(ImmutableList.toImmutableList()); + } + + private static List<Map<Imt, Double>> readSpectra(Path path, List<Imt> imts, int colsToSkip) + throws IOException { + return Files.lines(path) + .skip(1) + .map(s -> readSpectra(imts, s, colsToSkip)) + .collect(ImmutableList.toImmutableList()); + } + + private static Map<Imt, Double> readSpectra(List<Imt> imts, String line, int colsToSkip) { + + double[] imls = Splitter.on(',') + .trimResults() + .splitToList(line) + .stream() + .skip(colsToSkip) + .mapToDouble(Double::valueOf) + .toArray(); + + EnumMap<Imt, Double> imtImlMap = new EnumMap<>(Imt.class); + for (int i = 0; i < imts.size(); i++) { + imtImlMap.put(imts.get(i), imls[i]); + } + return imtImlMap; + } + /* * Compute hazard curves using the supplied model, config, and sites. Method * returns the path to the directory where results were written. - * - * TODO consider refactoring to supply an Optional<Double> return period to - * HazardCalc.calc() that will trigger disaggregations if the value is - * present. */ - private static Path calc( + private static Path calcRp( HazardModel model, CalcConfig config, List<Site> sites, @@ -143,26 +278,258 @@ public class DisaggCalc { log.info("Threads: " + ((ThreadPoolExecutor) exec).getCorePoolSize()); } - log.info(PROGRAM + ": calculating ..."); + log.info(PROGRAM + " (return period): calculating ..."); + + HazardExport handler = HazardExport.create(model, config, sites); + Path disaggDir = handler.outputDir().resolve("disagg"); + Files.createDirectory(disaggDir); + + Stopwatch stopwatch = Stopwatch.createStarted(); + int logInterval = sites.size() < 100 ? 1 : sites.size() < 1000 ? 10 : 100; - HazardExport handler = HazardExport.create(model, config, sites, log); + for (int i = 0; i < sites.size(); i++) { + Site site = sites.get(i); - for (Site site : sites) { Hazard hazard = HazardCalcs.hazard(model, config, site, exec); - Disaggregation disagg = HazardCalcs.disaggReturnPeriod(hazard, returnPeriod, exec); - handler.write(hazard, Optional.of(disagg)); - log.fine(hazard.toString()); + handler.write(hazard); + + Map<Imt, Double> imls = imlsForReturnPeriod(hazard, returnPeriod); + Disaggregation disagg = Disaggregation.atImls(hazard, imls, exec); + + Response response = new Response.Builder() + .config(config) + .site(site) + .returnPeriod(returnPeriod) + .imls(imls) + .disagg(disagg) + .build(); + + String filename = disaggFilename(site); + Path resultPath = disaggDir.resolve(filename); + Writer writer = Files.newBufferedWriter(resultPath); + GSON.toJson(response, writer); + writer.close(); + + int count = i + 1; + if (count % logInterval == 0) { + log.info(String.format( + " %s of %s sites completed in %s", + count, sites.size(), stopwatch)); + } } handler.expire(); log.info(String.format( - PROGRAM + ": %s sites completed in %s", - handler.resultCount(), handler.elapsedTime())); + PROGRAM + " (return period): %s sites completed in %s", + sites.size(), stopwatch.stop())); exec.shutdown(); return handler.outputDir(); } + /* Hazard curves are already in log-x space. */ + private static final Interpolator IML_INTERPOLATER = Interpolator.builder() + .logy() + .decreasingY() + .build(); + + /** Compute the return period intercepts from a hazard result. */ + public static Map<Imt, Double> imlsForReturnPeriod( + Hazard hazard, + double returnPeriod) { + + double rate = 1.0 / returnPeriod; + Map<Imt, Double> imls = new EnumMap<>(Imt.class); + for (Entry<Imt, XySequence> entry : hazard.curves().entrySet()) { + double iml = IML_INTERPOLATER.findX(entry.getValue(), rate); + imls.put(entry.getKey(), Math.exp(iml)); + } + return imls; + } + + /* + * Compute hazard curves using the supplied model, config, and sites. Method + * returns the path to the directory where results were written. + */ + private static Path calcIml( + HazardModel model, + CalcConfig config, + List<Site> sites, + List<Map<Imt, Double>> imls, + Logger log) throws IOException { + + ExecutorService exec = null; + ThreadCount threadCount = config.performance.threadCount; + if (threadCount == ThreadCount.ONE) { + exec = MoreExecutors.newDirectExecutorService(); + log.info("Threads: Running on calling thread"); + } else { + exec = Executors.newFixedThreadPool(threadCount.value()); + log.info("Threads: " + ((ThreadPoolExecutor) exec).getCorePoolSize()); + } + + log.info(PROGRAM + " (IML): calculating ..."); + Path outDir = createOutputDir(config.output.directory); + Path disaggDir = outDir.resolve("disagg"); + Files.createDirectory(disaggDir); + + Stopwatch stopwatch = Stopwatch.createStarted(); + int logInterval = sites.size() < 100 ? 1 : sites.size() < 1000 ? 10 : 100; + + for (int i = 0; i < sites.size(); i++) { + + Site site = sites.get(i); + Map<Imt, Double> siteImls = imls.get(i); + + Hazard hazard = HazardCalcs.hazard(model, config, site, exec); + Disaggregation disagg = Disaggregation.atImls(hazard, siteImls, exec); + + Response response = new Response.Builder() + .config(config) + .site(site) + .imls(siteImls) + .disagg(disagg) + .build(); + + String filename = disaggFilename(site); + Path resultPath = disaggDir.resolve(filename); + Writer writer = Files.newBufferedWriter(resultPath); + GSON.toJson(response, writer); + writer.close(); + + int count = i + 1; + if (count % logInterval == 0) { + log.info(String.format( + " %s of %s sites completed in %s", + count, sites.size(), stopwatch)); + } + } + + log.info(String.format( + PROGRAM + " (IML): %s sites completed in %s", + sites.size(), stopwatch.stop())); + + exec.shutdown(); + return outDir; + } + + private static final class Response { + + final Response.Metadata metadata; + final Object data; + + Response(Response.Metadata metadata, Object data) { + this.metadata = metadata; + this.data = data; + } + + static final class Metadata { + + final String name; + final double longitude; + final double latitude; + final double vs30; + final Double returnPeriod; + final Map<String, Double> imls; + + Metadata(Site site, Double returnPeriod, Map<Imt, Double> imls) { + this.name = site.name(); + this.longitude = site.location().longitude; + this.latitude = site.location().latitude; + this.vs30 = site.vs30(); + this.returnPeriod = returnPeriod; + this.imls = imls.entrySet().stream() + .collect(Collectors.toMap( + e -> e.getKey().name(), + Entry::getValue, + (x, y) -> y, + () -> new LinkedHashMap<String, Double>())); + } + } + + static final class Builder { + + Site site; + Disaggregation disagg; + Double returnPeriod; // optional + Map<Imt, Double> imls; + CalcConfig config; + + Builder imls(Map<Imt, Double> imls) { + this.imls = imls; + return this; + } + + Builder returnPeriod(double returnPeriod) { + this.returnPeriod = returnPeriod; + return this; + } + + Builder site(Site site) { + this.site = site; + return this; + } + + Builder disagg(Disaggregation disagg) { + this.disagg = disagg; + return this; + } + + Builder config(CalcConfig config) { + this.config = config; + return this; + } + + Response build() { + + // default toJson(imt, false, false, false) + List<ImtDisagg> disaggs = imls.keySet().stream() + .map(imt -> new ImtDisagg(imt, disagg.toJson( + imt, + config.output.dataTypes.contains(GMM), + config.output.dataTypes.contains(SOURCE), + config.output.dataTypes.contains(DISAGG_DATA)))) + .collect(toList()); + + return new Response( + new Response.Metadata(site, returnPeriod, imls), + disaggs); + } + } + } + + // this could be consolidated with DisaggService + private static final class ImtDisagg { + final String imt; + final Object data; + + ImtDisagg(Imt imt, Object data) { + this.imt = imt.name(); + this.data = data; + } + } + + // duplicate of that in HazardExport + private static Path createOutputDir(Path dir) throws IOException { + int i = 1; + Path incrementedDir = dir; + while (Files.exists(incrementedDir)) { + incrementedDir = incrementedDir.resolveSibling(dir.getFileName() + "-" + i); + i++; + } + Files.createDirectories(incrementedDir); + return incrementedDir; + } + + private static String disaggFilename(Site site) { + return site.name().equals(Site.NO_NAME) + ? String.format( + "%.2f,%.2f.json", + site.location().longitude, + site.location().latitude) + : site.name() + ".json"; + } + private static final String PROGRAM = DisaggCalc.class.getSimpleName(); private static final String USAGE_COMMAND = "java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.DisaggCalc model sites [config]"; @@ -181,9 +548,10 @@ public class DisaggCalc { .append("Where:").append(NEWLINE) .append(" 'model' is a model directory") .append(NEWLINE) - .append(" 'sites' is a *.csv file or *.geojson file of sites and data") + .append( + " 'sites' is a *.csv file of locations, site parameters and (optional) target ground motion levels") .append(NEWLINE) - .append(" - site class and basin terms are optional") + .append(" - Header: lon,lat,PGA,SA0P01,SA0P02,...") .append(NEWLINE) .append(" 'config' (optional) supplies a calculation configuration") .append(NEWLINE) @@ -191,7 +559,6 @@ public class DisaggCalc { .append("For more information, see:").append(NEWLINE) .append(" ").append(USAGE_URL1).append(NEWLINE) .append(" ").append(USAGE_URL2).append(NEWLINE) - .append(NEWLINE) .toString(); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/DisaggEpsilon.java b/src/main/java/gov/usgs/earthquake/nshmp/DisaggEpsilon.java deleted file mode 100644 index 066a6fca23ea3a14413174a8f95692d5c76f6067..0000000000000000000000000000000000000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/DisaggEpsilon.java +++ /dev/null @@ -1,362 +0,0 @@ -package gov.usgs.earthquake.nshmp; - -import static com.google.common.base.Preconditions.checkArgument; -import static gov.usgs.earthquake.nshmp.Text.NEWLINE; - -import java.io.IOException; -import java.io.Writer; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.EnumMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.logging.FileHandler; -import java.util.logging.Logger; - -import com.google.common.base.Preconditions; -import com.google.common.base.Splitter; -import com.google.common.base.Stopwatch; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; -import com.google.common.util.concurrent.MoreExecutors; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; - -import gov.usgs.earthquake.nshmp.calc.CalcConfig; -import gov.usgs.earthquake.nshmp.calc.Disaggregation; -import gov.usgs.earthquake.nshmp.calc.Hazard; -import gov.usgs.earthquake.nshmp.calc.HazardCalcs; -import gov.usgs.earthquake.nshmp.calc.Site; -import gov.usgs.earthquake.nshmp.calc.Sites; -import gov.usgs.earthquake.nshmp.calc.ThreadCount; -import gov.usgs.earthquake.nshmp.gmm.Imt; -import gov.usgs.earthquake.nshmp.internal.Logging; -import gov.usgs.earthquake.nshmp.model.HazardModel; - -/** - * Disaggregate probabilistic seismic hazard at a return period of interest or - * at specific ground motion levels. - * - * @author U.S. Geological Survey - */ -public class DisaggEpsilon { - - private static final Gson GSON = new GsonBuilder() - .serializeSpecialFloatingPointValues() - .serializeNulls() - .create(); - - /** - * Entry point for the disaggregation of probabilisitic seismic hazard. - * - * <p>Two approaches to disaggregation of seimic hazard are possible with this - * application. In the first approach, the 'sites' file is the same as it - * would be for a hazard calculation, and disaggregation is performed for all - * configured intensity measures at the 'returnPeriod' (in years) of interest - * specified in the config file (default = 2475 years). - * - * <p>In the second approach, the sites file includes columns for each - * spectral period and the target ground motion level to disaggregate for - * each. For example, the target values could be a risk-targeted response - * spectrum, or they could be ground motion levels precomputed for a specific - * return period. - * - * <p>It is important to note that the first approach will do the full hazard - * calculation and compute hazard curves from which the target disaggregation - * ground motion level will be determined. In the second approach, the ground - * motion targets are known and the time consuming hazard curve calculation - * can be avoided. - * - * <p>Please refer to the nshmp-haz <a - * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs"> - * docs</a> for comprehensive descriptions of source models, configuration - * files, site files, and hazard calculations. - * - * @see <a - * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/docs/pages/Building-&-Running.md"> - * nshmp-haz Building & Running</a> - * @see <a - * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples"> - * example calculations</a> - */ - public static void main(String[] args) { - - /* Delegate to run which has a return value for testing. */ - - Optional<String> status = run(args); - if (status.isPresent()) { - System.err.print(status.get()); - System.exit(1); - } - System.exit(0); - } - - static Optional<String> run(String[] args) { - int argCount = args.length; - - if (argCount < 2 || argCount > 3) { - return Optional.of(USAGE); - } - - Logging.init(); - Logger log = Logger.getLogger(DisaggCalc.class.getName()); - Path tmpLog = HazardCalc.createTempLog(); - - try { - FileHandler fh = new FileHandler(Preconditions.checkNotNull(tmpLog.getFileName()).toString()); - fh.setFormatter(new Logging.ConsoleFormatter()); - log.getParent().addHandler(fh); - - log.info(PROGRAM + ": " + HazardCalc.VERSION); - Path modelPath = Paths.get(args[0]); - HazardModel model = HazardModel.load(modelPath); - - log.info(""); - Path siteFile = Paths.get(args[1]); - log.info("Site and spectra file: " + siteFile.toAbsolutePath().normalize()); - checkArgument(siteFile.toString().endsWith(".csv"), "Only *.csv site files supported"); - - int colsToSkip = headerCount(siteFile); - List<Imt> imts = readImtList(siteFile, colsToSkip); - - CalcConfig config = model.config(); - if (argCount == 3) { - Path userConfigPath = Paths.get(args[2]); - config = CalcConfig.copyOf(model.config()) - .extend(CalcConfig.from(userConfigPath)) - .build(); - } - log.info(config.toString()); - - List<Site> sites = ImmutableList.copyOf(Sites.fromCsv(siteFile, config, model.siteData())); - log.info("Sites: " + sites.size()); - - log.info("Site data columns: " + colsToSkip); - List<Map<Imt, Double>> imtImlMaps = readSpectra(siteFile, imts, colsToSkip); - log.info("Spectra: " + imtImlMaps.size()); - - checkArgument(sites.size() == imtImlMaps.size(), "Sites and spectra lists different sizes"); - - Path out = calc(model, config, sites, imtImlMaps, log); - - log.info(PROGRAM + ": finished"); - - /* Transfer log and write config, windows requires fh.close() */ - fh.close(); - Files.move(tmpLog, out.resolve(PROGRAM + ".log")); - config.write(out); - - return Optional.empty(); - - } catch (Exception e) { - return HazardCalc.handleError(e, log, tmpLog, args, PROGRAM, USAGE); - } - } - - // TODO removed this set from Site; temp repair - static final Set<String> SITE_KEYS = ImmutableSet.of( - "name", - "lat", - "lon", - "vs30", - "vsInf", - "z1p0", - "z2p5"); - - /* returns the number of site data columns are present. */ - private static int headerCount(Path path) throws IOException { - String header = Files.lines(path).findFirst().get(); - Set<String> columns = ImmutableSet.copyOf(Splitter.on(',').trimResults().split(header)); - return Sets.intersection(columns, SITE_KEYS).size(); - } - - private static List<Imt> readImtList(Path path, int colsToSkip) throws IOException { - String header = Files.lines(path).findFirst().get(); - return Splitter.on(',') - .trimResults() - .splitToList(header) - .stream() - .skip(colsToSkip) - .map(Imt::valueOf) - .collect(ImmutableList.toImmutableList()); - } - - private static List<Map<Imt, Double>> readSpectra(Path path, List<Imt> imts, int colsToSkip) - throws IOException { - return Files.lines(path) - .skip(1) - .map(s -> readSpectra(imts, s, colsToSkip)) - .collect(ImmutableList.toImmutableList()); - } - - private static Map<Imt, Double> readSpectra(List<Imt> imts, String line, int colsToSkip) { - - double[] imls = Splitter.on(',') - .trimResults() - .splitToList(line) - .stream() - .skip(colsToSkip) - .mapToDouble(Double::valueOf) - .toArray(); - - EnumMap<Imt, Double> imtImlMap = new EnumMap<>(Imt.class); - for (int i = 0; i < imts.size(); i++) { - imtImlMap.put(imts.get(i), imls[i]); - } - return imtImlMap; - } - - /* - * Compute hazard curves using the supplied model, config, and sites. Method - * returns the path to the directory where results were written. - * - * TODO consider refactoring to supply an Optional<Double> return period to - * HazardCalc.calc() that will trigger disaggregations if the value is - * present. - */ - private static Path calc( - HazardModel model, - CalcConfig config, - List<Site> sites, - List<Map<Imt, Double>> rtrSpectra, - Logger log) throws IOException { - - ExecutorService exec = null; - ThreadCount threadCount = config.performance.threadCount; - if (threadCount == ThreadCount.ONE) { - exec = MoreExecutors.newDirectExecutorService(); - log.info("Threads: Running on calling thread"); - } else { - exec = Executors.newFixedThreadPool(threadCount.value()); - log.info("Threads: " + ((ThreadPoolExecutor) exec).getCorePoolSize()); - } - - log.info(PROGRAM + ": calculating ..."); - Path outDir = createOutputDir(config.output.directory); - Path siteDir = outDir.resolve("vs30-" + (int) sites.get(0).vs30()); - Files.createDirectory(siteDir); - - Stopwatch stopwatch = Stopwatch.createStarted(); - - for (int i = 0; i < sites.size(); i++) { - - Site site = sites.get(i); - Map<Imt, Double> spectrum = rtrSpectra.get(i); - - Hazard hazard = HazardCalcs.hazard(model, config, site, exec); - Disaggregation disagg = Disaggregation.atImls(hazard, spectrum, exec); - - List<Response> responses = new ArrayList<>(spectrum.size()); - for (Imt imt : spectrum.keySet()) { - ResponseData imtMetadata = new ResponseData( - ImmutableList.of(), - site, - imt, - spectrum.get(imt)); - Response response = new Response(imtMetadata, disagg.toJsonCompact(imt)); - responses.add(response); - } - Result result = new Result(responses); - - String filename = String.format( - "edisagg_%.2f_%.2f.json", - site.location().longitude, - site.location().latitude); - - Path resultPath = siteDir.resolve(filename); - Writer writer = Files.newBufferedWriter(resultPath); - GSON.toJson(result, writer); - writer.close(); - log.info(String.format( - " %s of %s sites completed in %s", - i + 1, sites.size(), stopwatch)); - } - - exec.shutdown(); - return siteDir; - } - - private static class Result { - - final List<Response> response; - - Result(List<Response> response) { - this.response = response; - } - } - - private static final class ResponseData { - - final List<String> models; - final double longitude; - final double latitude; - final String imt; - final double iml; - final double vs30; - - ResponseData(List<String> models, Site site, Imt imt, double iml) { - this.models = models; - this.longitude = site.location().longitude; - this.latitude = site.location().latitude; - this.imt = imt.toString(); - this.iml = iml; - this.vs30 = site.vs30(); - } - } - - private static final class Response { - - final ResponseData metadata; - final Object data; - - Response(ResponseData metadata, Object data) { - this.metadata = metadata; - this.data = data; - } - } - - static Path createOutputDir(Path dir) throws IOException { - int i = 1; - Path incrementedDir = dir; - while (Files.exists(incrementedDir)) { - incrementedDir = incrementedDir.resolveSibling(dir.getFileName() + "-" + i); - i++; - } - Files.createDirectories(incrementedDir); - return incrementedDir; - } - - private static final String PROGRAM = DisaggEpsilon.class.getSimpleName(); - private static final String USAGE_COMMAND = - "java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.DisaggEpsilon model sites [config]"; - - private static final String USAGE = new StringBuilder() - .append(NEWLINE) - .append(PROGRAM).append(" [").append(HazardCalc.VERSION).append("]").append(NEWLINE) - .append(NEWLINE) - .append("Usage:").append(NEWLINE) - .append(" ").append(USAGE_COMMAND).append(NEWLINE) - .append(NEWLINE) - .append("Where:").append(NEWLINE) - .append(" 'model' is a model directory") - .append(NEWLINE) - .append( - " 'sites' is a *.csv file of locations, site parameters and (optional) target ground motion levels") - .append(NEWLINE) - .append(" - Header: lon,lat,PGA,SA0P01,SA0P02,...") - .append(NEWLINE) - .append(" (spectral periods must be ascending)") - .append(NEWLINE) - .append(" 'config' (optional) supplies a calculation configuration") - .append(NEWLINE) - .toString(); - -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java b/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java index 9773931f415fab98e6e6f85bf92bebe5ac51b129..3342bca22cd0ef921c542fb752dd5a1907e1423d 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java @@ -21,6 +21,7 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.logging.FileHandler; import java.util.logging.Logger; +import com.google.common.base.Stopwatch; import com.google.common.base.Throwables; import com.google.common.util.concurrent.MoreExecutors; @@ -49,7 +50,8 @@ public class HazardCalc { * At a minimum, the path to a model directory and a file of site(s) at which * to perform calculations must be specified. Under the 2-argument scenario, * model initialization and calculation configuration settings are drawn from - * the default configuration. Sites may be defined in a CSV or GeoJSON file. + * the default configuration for the model. Sites may be defined in a CSV or + * GeoJSON file. * * <p>To override any default calculation configuration settings, also supply * the path to a configuration file as a third argument. @@ -88,9 +90,10 @@ public class HazardCalc { Logging.init(); Logger log = Logger.getLogger(HazardCalc.class.getName()); Path tmpLog = createTempLog(); + String tmpLogName = checkNotNull(tmpLog.getFileName()).toString(); try { - FileHandler fh = new FileHandler(checkNotNull(tmpLog.getFileName()).toString()); + FileHandler fh = new FileHandler(tmpLogName); fh.setFormatter(new Logging.ConsoleFormatter()); log.getParent().addHandler(fh); @@ -98,6 +101,7 @@ public class HazardCalc { Path modelPath = Paths.get(args[0]); HazardModel model = HazardModel.load(modelPath); + /* Calculation configuration, possibly user supplied. */ CalcConfig config = model.config(); if (argCount == 3) { Path userConfigPath = Paths.get(args[2]); @@ -168,14 +172,24 @@ public class HazardCalc { log.info("Threads: " + ((ThreadPoolExecutor) exec).getCorePoolSize()); log.info(PROGRAM + ": calculating ..."); - HazardExport handler = HazardExport.create(model, config, sites, log); + HazardExport handler = HazardExport.create(model, config, sites); CalcTask.Builder calcTask = new CalcTask.Builder(model, config, exec); WriteTask.Builder writeTask = new WriteTask.Builder(handler); + Stopwatch stopwatch = Stopwatch.createStarted(); + int logInterval = sites.size() < 100 ? 1 : sites.size() < 1000 ? 10 : 100; + Future<Path> out = null; - for (Site site : sites) { + for (int i = 0; i < sites.size(); i++) { + Site site = sites.get(i); Hazard hazard = calcTask.withSite(site).call(); out = exec.submit(writeTask.withResult(hazard)); + int count = i + 1; + if (count % logInterval == 0) { + log.info(String.format( + " %s of %s sites completed in %s", + count, sites.size(), stopwatch)); + } } /* Block shutdown until last task is returned. */ Path outputDir = out.get(); @@ -184,7 +198,7 @@ public class HazardCalc { exec.shutdown(); log.info(String.format( PROGRAM + ": %s sites completed in %s", - handler.resultCount(), handler.elapsedTime())); + handler.resultCount(), stopwatch.stop())); return outputDir; } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/HazardMaps.java b/src/main/java/gov/usgs/earthquake/nshmp/HazardMaps.java index 74cca82232305096990ce79cc9a38200e6b449ef..b4580ef94e478e0733909b9f055b73b29b8a4229 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/HazardMaps.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/HazardMaps.java @@ -53,9 +53,11 @@ public class HazardMaps { */ public static void main(String[] args) { if (args.length < 1) { - System.out.println("Usage: Supply a path to a file of hazard curve results and"); - System.out.println(" optionally a space separated list of return periods (in yr)"); - System.out.println(" default return periods: 475 975 2475"); + System.out.println("Usage: Supply a path to a file of or directory containing hazard"); + System.out.println(" curve results and optionally a space separated list of return"); + System.out.println(" periods (in yr). If a directory is specified, nested curve"); + System.out.println(" files are expected to be named 'curves.csv'."); + System.out.println(" Default return periods: 475 975 2475"); return; } @@ -84,9 +86,9 @@ public class HazardMaps { Path curvesPath, List<Integer> returnPeriods, Logger log) throws IOException { - log.info(PROGRAM + ": Creating hazard map dataset:"); - log.info("\tReturn periods: " + returnPeriods.toString()); - log.info("\tPath: " + curvesPath.toAbsolutePath().toString()); + log.info(PROGRAM + ": Creating hazard map datasets..."); + log.info(" Return periods: " + returnPeriods.toString()); + log.info(" Path: " + curvesPath.toAbsolutePath().toString()); if (Files.isDirectory(curvesPath)) { CurvesVisitor curvesFinder = new CurvesVisitor(returnPeriods); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/RateCalc.java b/src/main/java/gov/usgs/earthquake/nshmp/RateCalc.java index 8c59131d9327b57f5e3abd2f40e2d08085f4e4e9..581b40dca6728473e1965fdd02f1bc06070f5b96 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/RateCalc.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/RateCalc.java @@ -54,9 +54,9 @@ public class RateCalc { * argument. * * <p>Please refer to the nshmp-haz <a - * href="https://github.com/usgs/nshmp-haz/wiki">wiki</a> for comprehensive - * descriptions of source models, configuration files, site files, and - * earthquake rate calculations. + * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs">documentation</a> + * for comprehensive descriptions of source models, configuration files, site + * files, and earthquake rate calculations. * * @see <a * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/docs/pages/Building-&-Running.md"> @@ -186,9 +186,6 @@ public class RateCalc { * there are one or more longer-running calcs in the batch, processing * batches of locations to a List preserves submission order; as opposed to * using FutureCallbacks, which will reorder sites on export. - * - * TODO this is a terrible implementation with batch size 10. resulted from - * refactor to exports not queueing results */ for (Site site : sites) { Callable<EqRate> task = EqRate.callable(model, config, site); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/site/CybershakeSite.java b/src/main/java/gov/usgs/earthquake/nshmp/site/CybershakeSite.java index 712fb6fc69510fc100414f5069edbcce2892dfab..381332a4621cade7348db2ddd2db4142a4ac6242 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/site/CybershakeSite.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/site/CybershakeSite.java @@ -17,9 +17,6 @@ public enum CybershakeSite implements NamedLocation { * that there are scattered inconsistencies between the Wills and CVM vs30 * values (e.g. LAPD) Sites beginning with lowercase s have been capitalized. * - * CVM vs30 values for [S603, S684], [S474, S476], [S644, S646], and [S688, - * S689] are identical; waiting on reponse from Callahan or Milner. - * * Site S603 is repeated (and commented out) in Inland Empire group. */ diff --git a/src/main/java/gov/usgs/earthquake/nshmp/site/NshmpSite.java b/src/main/java/gov/usgs/earthquake/nshmp/site/NshmpSite.java index f8754e83931d8719cb5729a027a7899c69ca75fb..21ef1cd42b72ed2243ae3623d23b1e29c878f496 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/site/NshmpSite.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/site/NshmpSite.java @@ -20,10 +20,6 @@ import gov.usgs.earthquake.nshmp.internal.UsRegion; */ public enum NshmpSite implements NamedLocation { - // TODO move this and other nshmp specific classes to nshmp-haz - // TODO update output files which should also be part of nshmp-haz - // TODO do we need ATC sites? - /* Northern CA (16) */ BIG_SUR_CA(-121.75, 36.25), COALINGA_CA(-120.40, 36.15), diff --git a/src/main/java/gov/usgs/earthquake/nshmp/site/NshmpSiteFiles.java b/src/main/java/gov/usgs/earthquake/nshmp/site/NshmpSiteFiles.java index 5d1022116e4f4160f5cb1f90c541cce3b5695338..07badfde449502d54134a1b13b49c34cb3bb004d 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/site/NshmpSiteFiles.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/site/NshmpSiteFiles.java @@ -56,7 +56,7 @@ import gov.usgs.earthquake.nshmp.geo.json.Properties.Style; */ final class NshmpSiteFiles { - // TODO consider removing this to nshm-model-dev + // Consider removing this to nshm-model-dev // keeping the outputs in nshmp-haz /* @@ -82,7 +82,7 @@ final class NshmpSiteFiles { /* * Currently, we're exporting map regions as polygons. Although the GeoJSON - * spec supports polygons with holes (and hence 3-dimensional arrays, we only + * spec supports polygons with holes (and hence 3-dimensional arrays), we only * support singular polygons. Polygons render better than PointStrings in any * event. */ @@ -185,19 +185,24 @@ final class NshmpSiteFiles { static void writeNshmpSummaryPoly() throws IOException { Set<NshmpPolygon> polys = EnumSet.range(LA_BASIN, UCERF3_NSHM14); + + List<String> nameList = polys.stream() + .map(Functions.toStringFunction()) + .collect(Collectors.toList()); + + List<LocationList> coordList = polys.stream() + .map(new Function<NshmpPolygon, LocationList>() { + @Override + public LocationList apply(NshmpPolygon poly) { + return poly.coordinates(); + } + }::apply) + .collect(Collectors.toList()); + writePolysJson( EXPORT_DIR.resolve("map-nshmp-all.geojson"), - polys.stream() - .map(Functions.toStringFunction()) - .collect(Collectors.toList()), - polys.stream() - .map(new Function<NshmpPolygon, LocationList>() { - @Override - public LocationList apply(NshmpPolygon poly) { - return poly.coordinates(); - } - }::apply) - .collect(Collectors.toList())); + nameList, + coordList); } static void writePolysJson( @@ -210,7 +215,6 @@ final class NshmpSiteFiles { .put("spacing", 0.1); int i = 0; - // TODO this incrementer is messed up // can't name and coords come as a map? for (LocationList border : coordList) { props.put(Style.TITLE, nameList.get(i++)); @@ -395,7 +399,6 @@ final class NshmpSiteFiles { .put(Style.MARKER_SIZE, "small"); for (NamedLocation loc : sites) { - // TODO test loc vs loc.toString() b.add(Feature.point(loc.location()) .properties(props .put(Style.TITLE, loc.toString()) diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/HazardController.java b/src/main/java/gov/usgs/earthquake/nshmp/www/HazardController.java deleted file mode 100644 index fdc74747815374a10fb5977cf022ceb16a05319a..0000000000000000000000000000000000000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/HazardController.java +++ /dev/null @@ -1,81 +0,0 @@ -package gov.usgs.earthquake.nshmp.www; - -import gov.usgs.earthquake.nshmp.www.services.HazardService; -import gov.usgs.earthquake.nshmp.www.services.HazardService.QueryParameters; - -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import io.micronaut.http.annotation.Controller; -import io.micronaut.http.annotation.Get; -import io.micronaut.http.annotation.PathVariable; -import io.micronaut.http.annotation.QueryValue; -import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.media.Schema; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.tags.Tag; -import jakarta.inject.Inject; - -/** - * Micronaut controller for probabilistic seismic hazard calculations. - * - * @see HazardService - * @author U.S. Geological Survey - */ -@Tag( - name = "Hazard", - description = "USGS NSHMP hazard calculation service") -@Controller("/hazard") -public class HazardController { - - @Inject - private NshmpMicronautServlet servlet; - - @Operation( - summary = "Hazard model and service metadata", - description = "Returns details of the installed model and service request parameters", - operationId = "hazard_doGetMetadata") - @ApiResponse( - description = "Hazard service metadata", - responseCode = "200") - @Get - public HttpResponse<String> doGetMetadata(HttpRequest<?> request) { - return HazardService.handleDoGetMetadata(request); - } - - /** - * @param longitude Longitude in decimal degrees [-360..360] - * @param latitude Latitude in decimal degrees [-90..90] - * @param vs30 Site Vs30 value in m/s [150..3000] - * @param truncate Truncate curves at return periods below ~10,000 years - * @param maxdir Apply max-direction scaling - */ - @Operation( - summary = "Compute probabilisitic hazard at a site", - description = "Returns hazard curves computed from the installed model", - operationId = "hazard_doGetHazard") - @ApiResponse( - description = "Hazard curves", - responseCode = "200") - @Get(uri = "/{longitude}/{latitude}/{vs30}{?truncate,maxdir}") - public HttpResponse<String> doGetHazard( - HttpRequest<?> request, - - @Schema(minimum = "-360", maximum = "360") @PathVariable double longitude, - - @Schema(minimum = "-90", maximum = "90") @PathVariable double latitude, - - @Schema(minimum = "150", maximum = "3000") @PathVariable int vs30, - - @QueryValue(defaultValue = "false") boolean truncate, - - @QueryValue(defaultValue = "false") boolean maxdir) { - - /* - * @Schema annotation parameter constraints only affect Swagger service - * index page behavior; still need to validate against model. TODO - */ - - var query = new QueryParameters(longitude, latitude, vs30, truncate, maxdir); - return HazardService.handleDoGetHazard(request, query); - } -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/services/ServicesUtil.java b/src/main/java/gov/usgs/earthquake/nshmp/www/ServicesUtil.java similarity index 71% rename from src/main/java/gov/usgs/earthquake/nshmp/www/services/ServicesUtil.java rename to src/main/java/gov/usgs/earthquake/nshmp/www/ServicesUtil.java index a3e6d6a37efc79187c2629651abfd3abc419bfcb..3bdcab696e214e4d4940244a3ad557070552a54b 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/services/ServicesUtil.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/ServicesUtil.java @@ -1,37 +1,19 @@ -package gov.usgs.earthquake.nshmp.www.services; +package gov.usgs.earthquake.nshmp.www; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.function.Function; -import com.google.gson.GsonBuilder; - import gov.usgs.earthquake.nshmp.calc.CalcConfig; import gov.usgs.earthquake.nshmp.calc.Hazard; import gov.usgs.earthquake.nshmp.calc.HazardCalcs; import gov.usgs.earthquake.nshmp.calc.Site; import gov.usgs.earthquake.nshmp.model.HazardModel; -import gov.usgs.earthquake.nshmp.www.Response; -import gov.usgs.earthquake.nshmp.www.WsUtils; -import gov.usgs.earthquake.nshmp.www.meta.Status; - -import io.micronaut.http.HttpResponse; public class ServicesUtil { - public static HttpResponse<String> handleError( - Throwable e, - String name, - String url) { - var msg = e.getMessage() + " (see logs)"; - var svcResponse = new Response<>(Status.ERROR, name, url, msg, url); - var gson = new GsonBuilder().setPrettyPrinting().create(); - var response = gson.toJson(svcResponse); - e.printStackTrace(); - return HttpResponse.serverError(response); - } - - static Hazard calcHazard( + @Deprecated + public static Hazard calcHazard( Function<HazardModel, CalcConfig> configFunction, Function<CalcConfig, Site> siteFunction) throws InterruptedException, ExecutionException { @@ -43,12 +25,12 @@ public class ServicesUtil { } @Deprecated - static class ServiceQueryData implements ServiceQuery { + public static class ServiceQueryData implements ServiceQuery { public final Double longitude; public final Double latitude; - ServiceQueryData(Double longitude, Double latitude) { + public ServiceQueryData(Double longitude, Double latitude) { this.longitude = longitude; this.latitude = latitude; } @@ -66,7 +48,7 @@ public class ServicesUtil { } @Deprecated - static class ServiceRequestData { + public static class ServiceRequestData { public final double longitude; public final double latitude; @@ -77,7 +59,7 @@ public class ServicesUtil { } } - enum Key { + public enum Key { EDITION, REGION, MODEL, @@ -110,6 +92,7 @@ public class ServicesUtil { void checkValues(); } + @Deprecated private static CompletableFuture<Hazard> calcHazard( HazardModel model, CalcConfig config, diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/services/ServletUtil.java b/src/main/java/gov/usgs/earthquake/nshmp/www/ServletUtil.java similarity index 62% rename from src/main/java/gov/usgs/earthquake/nshmp/www/services/ServletUtil.java rename to src/main/java/gov/usgs/earthquake/nshmp/www/ServletUtil.java index 81651afd7d44ac3d968008892b7e7d1dc03700eb..f4b7b1e98f8ac64783e6eed698dc7d8b60a64beb 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/services/ServletUtil.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/ServletUtil.java @@ -1,4 +1,4 @@ -package gov.usgs.earthquake.nshmp.www.services; +package gov.usgs.earthquake.nshmp.www; import static java.lang.Runtime.getRuntime; @@ -14,6 +14,10 @@ import java.util.HashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Stopwatch; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.gson.Gson; @@ -25,14 +29,16 @@ import com.google.gson.JsonSerializer; import gov.usgs.earthquake.nshmp.calc.Site; import gov.usgs.earthquake.nshmp.calc.ValueFormat; +import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.gmm.Imt; import gov.usgs.earthquake.nshmp.model.HazardModel; -import gov.usgs.earthquake.nshmp.www.WsUtils; +import gov.usgs.earthquake.nshmp.model.SiteData; import gov.usgs.earthquake.nshmp.www.meta.MetaUtil; import io.micronaut.context.annotation.Value; import io.micronaut.context.event.ShutdownEvent; import io.micronaut.context.event.StartupEvent; +import io.micronaut.http.HttpResponse; import io.micronaut.runtime.event.annotation.EventListener; import jakarta.inject.Singleton; @@ -45,11 +51,14 @@ import jakarta.inject.Singleton; public class ServletUtil { public static final Gson GSON; + public static final Gson GSON2; + + public static final ListeningExecutorService CALC_EXECUTOR; + public static final ExecutorService TASK_EXECUTOR; - static final ListeningExecutorService CALC_EXECUTOR; - static final ExecutorService TASK_EXECUTOR; + public static final int THREAD_COUNT; - static final int THREAD_COUNT; + private static final Logger LOGGER = LoggerFactory.getLogger(ServletUtil.class); @Value("${nshmp-haz.model-path}") private Path modelPath; @@ -57,7 +66,6 @@ public class ServletUtil { private static HazardModel HAZARD_MODEL; static { - /* TODO modified for disagg-epsilon branch; should be context var */ THREAD_COUNT = getRuntime().availableProcessors(); CALC_EXECUTOR = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(THREAD_COUNT)); TASK_EXECUTOR = Executors.newSingleThreadExecutor(); @@ -71,9 +79,20 @@ public class ServletUtil { .serializeNulls() .setPrettyPrinting() .create(); + + // removed old IMT and ValueFormat enum serialization + GSON2 = new GsonBuilder() + .registerTypeAdapter(Double.class, new WsUtils.DoubleSerializer()) + .registerTypeAdapter(Site.class, new MetaUtil.SiteSerializer()) + .registerTypeHierarchyAdapter(Path.class, new PathConverter()) + .disableHtmlEscaping() + .serializeNulls() + .setPrettyPrinting() + .create(); + } - static HazardModel model() { + public static HazardModel model() { return HAZARD_MODEL; } @@ -141,4 +160,57 @@ public class ServletUtil { } } + public static HttpResponse<String> error( + Logger logger, + Throwable e, + String name, + String url) { + var msg = e.getMessage() + " (see logs)"; + var svcResponse = ResponseBody.error() + .name(name) + .url(url) + .request(url) + .response(msg) + .build(); + var response = GSON2.toJson(svcResponse); + logger.error("Servlet error", e); + return HttpResponse.serverError(response); + } + + public static String imtShortLabel(Imt imt) { + if (imt.equals(Imt.PGA) || imt.equals(Imt.PGV)) { + return imt.name(); + } else if (imt.isSA()) { + return imt.period() + " s"; + } + return imt.toString(); + } + + public static Object serverData(int threads, Stopwatch timer) { + return new Server(threads, timer); + } + + public static Site createSite(Location location, double vs30, SiteData siteData) { + Site.Builder builder = Site.builder() + .location(location) + .vs30(vs30); + SiteData.Values sdValues = siteData.get(location); + sdValues.z1p0.ifPresent(builder::z1p0); + sdValues.z2p5.ifPresent(builder::z2p5); + return builder.build(); + } + + private static class Server { + + final int threads; + final String timer; + final String version; + + Server(int threads, Stopwatch timer) { + this.threads = threads; + this.timer = timer.toString(); + this.version = "TODO where to get version?"; + } + } + } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/SwaggerController.java b/src/main/java/gov/usgs/earthquake/nshmp/www/SwaggerController.java index c2251caba5d3ed3612ed43e1aa8ca14404f7f381..dc4d6fb3cdf414424b09634b25c7d34bdd67f97c 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/SwaggerController.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/SwaggerController.java @@ -3,9 +3,9 @@ package gov.usgs.earthquake.nshmp.www; import java.nio.charset.StandardCharsets; import java.util.stream.Collectors; -import com.google.common.io.Resources; +import org.slf4j.LoggerFactory; -import gov.usgs.earthquake.nshmp.www.services.ServicesUtil; +import com.google.common.io.Resources; import io.micronaut.http.HttpRequest; import io.micronaut.http.HttpResponse; @@ -40,7 +40,9 @@ public class SwaggerController { .collect(Collectors.joining("\n")); return HttpResponse.ok(yml); } catch (Exception e) { - return ServicesUtil.handleError(e, "Swagger", request.getUri().getPath()); + return ServletUtil.error( + LoggerFactory.getLogger("Swagger"), + e, "Swagger", request.getUri().toString()); } } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggController.java b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggController.java new file mode 100644 index 0000000000000000000000000000000000000000..a525b1b8c1124939e10f9383e5f43409f8d99297 --- /dev/null +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggController.java @@ -0,0 +1,169 @@ +package gov.usgs.earthquake.nshmp.www.hazard; + +import static com.google.common.base.Preconditions.checkArgument; + +import java.util.Map; +import java.util.Set; + +import gov.usgs.earthquake.nshmp.calc.DataType; +import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.www.NshmpMicronautServlet; +import gov.usgs.earthquake.nshmp.www.ServletUtil; +import gov.usgs.earthquake.nshmp.www.hazard.DisaggService.DisaggDataType; +import gov.usgs.earthquake.nshmp.www.hazard.HazardService.HazardImt; + +import io.micronaut.core.annotation.Nullable; +import io.micronaut.http.HttpRequest; +import io.micronaut.http.HttpResponse; +import io.micronaut.http.MediaType; +import io.micronaut.http.annotation.Controller; +import io.micronaut.http.annotation.Get; +import io.micronaut.http.annotation.PathVariable; +import io.micronaut.http.annotation.QueryValue; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.media.Schema; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Inject; + +/** + * Micronaut web service controller for disaggregation of probabilistic seismic + * hazard. + * + * @author U.S. Geological Survey + */ +@Tag( + name = "Disaggregation", + description = "USGS NSHMP hazard disaggregation service") +@Controller("/disagg") +public class DisaggController { + + @Inject + private NshmpMicronautServlet servlet; + + @Operation( + summary = "Disaggregation model and service metadata", + description = "Returns details of the installed model and service request parameters") + @ApiResponse( + description = "Disaggregation service metadata", + responseCode = "200") + @Get(produces = MediaType.APPLICATION_JSON) + public HttpResponse<String> doGetMetadata(HttpRequest<?> http) { + try { + return DisaggService.getMetadata(http); + } catch (Exception e) { + return ServletUtil.error( + DisaggService.LOG, e, + DisaggService.NAME, + http.getUri().toString()); + } + } + + /** + * @param longitude Longitude in the range [-360..360]°. + * @param latitude Latitude in the range [-90..90]°. + * @param vs30 Site Vs30 value in the range [150..3000] m/s. + * @param returnPeriod The return period of the target ground motion, or + * intensity measure level (IML), in the range [1..20000] years. + * @param imt Optional IMTs at which to compute hazard. If none are supplied, + * then the supported set for the installed model is used. Responses + * for numerous IMT's are quite large, on the order of MB. + * @param out The data types to output + */ + @Operation( + summary = "Disaggregate hazard at a specified return period", + description = "Returns a hazard disaggregation computed from the installed model") + @ApiResponse( + description = "Disaggregation", + responseCode = "200") + @Get( + uri = "rp/{longitude}/{latitude}/{vs30}/{returnPeriod}{?imt}", + produces = MediaType.APPLICATION_JSON) + public HttpResponse<String> doGetDisaggReturnPeriod( + HttpRequest<?> http, + @Schema( + minimum = "-360", + maximum = "360") @PathVariable double longitude, + @Schema( + minimum = "-90", + maximum = "90") @PathVariable double latitude, + @Schema( + minimum = "150", + maximum = "3000") @PathVariable double vs30, + @Schema( + minimum = "150", + maximum = "3000") @PathVariable double returnPeriod, + @QueryValue @Nullable Set<HazardImt> imt, + @QueryValue @Nullable Set<DisaggDataType> out) { + try { + Set<Imt> imts = HazardService.readImts(http); + Set<DataType> dataTypes = HazardService.readDataTypes(http); + DisaggService.RequestRp request = new DisaggService.RequestRp( + http, + longitude, latitude, vs30, + returnPeriod, + imts, + dataTypes); + return DisaggService.getDisaggRp(request); + } catch (Exception e) { + return ServletUtil.error( + DisaggService.LOG, e, + DisaggService.NAME, + http.getUri().toString()); + } + } + + /** + * @param longitude Longitude in the range [-360..360]°. + * @param latitude Latitude in decimal degrees [-90..90]°. + * @param vs30 Site Vs30 value in the range [150..3000] m/s. + * @param out The data types to output + */ + @Operation( + summary = "Disaggregate hazard at specified IMLs", + description = "Returns a hazard disaggregation computed from the installed model") + @ApiResponse( + description = "Disaggregation", + responseCode = "200") + @Get( + uri = "iml/{longitude}/{latitude}/{vs30}", + produces = MediaType.APPLICATION_JSON) + public HttpResponse<String> doGetDisaggIml( + HttpRequest<?> http, + @Schema( + minimum = "-360", + maximum = "360") @PathVariable double longitude, + @Schema( + minimum = "-90", + maximum = "90") @PathVariable double latitude, + @Schema( + minimum = "150", + maximum = "3000") @PathVariable double vs30, + @QueryValue @Nullable Set<DisaggDataType> out) { + + /* + * Developer notes: + * + * It is awkward to support IMT=#; numerous unique keys that may or may not + * be present yields a clunky swagger interface. The disagg-iml endpoint + * requires one or more IMT=# query arguments. Document in example. + */ + + try { + Map<Imt, Double> imtImlMap = http.getParameters().asMap(Imt.class, Double.class); + checkArgument(!imtImlMap.isEmpty(), "No IMLs supplied"); + Set<DataType> dataTypes = HazardService.readDataTypes(http); + DisaggService.RequestIml request = new DisaggService.RequestIml( + http, + longitude, latitude, vs30, + imtImlMap, + dataTypes); + return DisaggService.getDisaggIml(request); + } catch (Exception e) { + return ServletUtil.error( + DisaggService.LOG, e, + DisaggService.NAME, + http.getUri().toString()); + } + } +} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggService.java b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggService.java new file mode 100644 index 0000000000000000000000000000000000000000..a4e527bf761e48133bd97b69362b0d5b5591f4c9 --- /dev/null +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggService.java @@ -0,0 +1,344 @@ +package gov.usgs.earthquake.nshmp.www.hazard; + +import static gov.usgs.earthquake.nshmp.calc.DataType.DISAGG_DATA; +import static gov.usgs.earthquake.nshmp.calc.DataType.GMM; +import static gov.usgs.earthquake.nshmp.calc.DataType.SOURCE; +import static java.util.stream.Collectors.toList; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Stopwatch; +import com.google.common.collect.Range; + +import gov.usgs.earthquake.nshmp.DisaggCalc; +import gov.usgs.earthquake.nshmp.calc.CalcConfig; +import gov.usgs.earthquake.nshmp.calc.DataType; +import gov.usgs.earthquake.nshmp.calc.Disaggregation; +import gov.usgs.earthquake.nshmp.calc.Hazard; +import gov.usgs.earthquake.nshmp.calc.HazardCalcs; +import gov.usgs.earthquake.nshmp.calc.Site; +import gov.usgs.earthquake.nshmp.geo.Location; +import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.model.HazardModel; +import gov.usgs.earthquake.nshmp.www.ResponseBody; +import gov.usgs.earthquake.nshmp.www.ServletUtil; +import gov.usgs.earthquake.nshmp.www.hazard.HazardService.Metadata; +import gov.usgs.earthquake.nshmp.www.meta.Parameter; + +import io.micronaut.http.HttpRequest; +import io.micronaut.http.HttpResponse; +import jakarta.inject.Singleton; + +/** + * Disaggregation service. + * + * @see DisaggController + * @author U.S. Geological Survey + */ +@Singleton +public final class DisaggService { + + /* + * Developer notes: + * + * Same query structure as hazard service, but either return period and imt(s) + * OR imt=iml pairs + */ + + static final String NAME = "Disaggregation Service"; + static final Logger LOG = LoggerFactory.getLogger(DisaggService.class); + + private static Range<Double> rpRange = Range.closed(1.0, 20000.0); + private static Range<Double> imlRange = Range.closed(0.0001, 8.0); + + /* For Swagger selctions */ + enum DisaggDataType { + GMM, + SOURCE, + DISAGG_DATA; + } + + /** HazardController.doGetMetadata() handler. */ + static HttpResponse<String> getMetadata(HttpRequest<?> request) { + var url = request.getUri().toString(); + var usage = new Metadata(ServletUtil.model()); + var response = ResponseBody.usage() + .name(NAME) + .url(url) + .request(url) + .response(usage) + .build(); + var svcResponse = ServletUtil.GSON.toJson(response); + return HttpResponse.ok(svcResponse); + } + + /** HazardController.doGetDisaggIml() handler. */ + static HttpResponse<String> getDisaggIml(RequestIml request) + throws InterruptedException, ExecutionException { + var stopwatch = Stopwatch.createStarted(); + var disagg = calcDisaggIml(request); + var response = new Response.Builder() + .timer(stopwatch) + .request(request) + .disagg(disagg) + .build(); + var body = ResponseBody.success() + .name(NAME) + .url(request.http.getUri().toString()) + .request(request) + .response(response) + .build(); + String svcResponse = ServletUtil.GSON2.toJson(body); + return HttpResponse.ok(svcResponse); + } + + /** HazardController.doGetDisaggRp() handler. */ + static HttpResponse<String> getDisaggRp(RequestRp request) + throws InterruptedException, ExecutionException { + var stopwatch = Stopwatch.createStarted(); + var disagg = calcDisaggRp(request); + var response = new Response.Builder() + .timer(stopwatch) + .request(request) + .disagg(disagg) + .build(); + var body = ResponseBody.success() + .name(NAME) + .url(request.http.getUri().toString()) + .request(request) + .response(response) + .build(); + String svcResponse = ServletUtil.GSON2.toJson(body); + return HttpResponse.ok(svcResponse); + } + + /* + * Developer notes: + * + * If disaggIml, we need to do the calculation for single XySeqs if disaggRp, + * we don't know the imls so must compute hazard over the full curve + * + */ + + private static Disaggregation calcDisaggIml(RequestIml request) + throws InterruptedException, ExecutionException { + + HazardModel model = ServletUtil.model(); + + // modify config to include service endpoint arguments + CalcConfig config = CalcConfig.copyOf(model.config()) + .imts(request.imls.keySet()) + .build(); + + Location loc = Location.create(request.longitude, request.latitude); + Site site = ServletUtil.createSite(loc, request.vs30, model.siteData()); + + // use HazardService.calcHazard() instead? + CompletableFuture<Hazard> hazFuture = CompletableFuture.supplyAsync( + () -> HazardCalcs.hazard( + model, config, site, + ServletUtil.CALC_EXECUTOR), + ServletUtil.TASK_EXECUTOR); + + Hazard hazard = hazFuture.get(); + + CompletableFuture<Disaggregation> disaggfuture = CompletableFuture.supplyAsync( + () -> Disaggregation.atImls( + hazard, request.imls, + ServletUtil.CALC_EXECUTOR), + ServletUtil.TASK_EXECUTOR); + + Disaggregation disagg = disaggfuture.get(); + + return disagg; + } + + private static Disaggregation calcDisaggRp(RequestRp request) + throws InterruptedException, ExecutionException { + + HazardModel model = ServletUtil.model(); + + // modify config to include service endpoint arguments + CalcConfig config = CalcConfig.copyOf(model.config()) + .imts(request.imts) + .build(); + + Location loc = Location.create(request.longitude, request.latitude); + Site site = ServletUtil.createSite(loc, request.vs30, model.siteData()); + + // could just get from HazardService + CompletableFuture<Hazard> hazFuture = CompletableFuture.supplyAsync( + () -> HazardCalcs.hazard( + model, config, site, + ServletUtil.CALC_EXECUTOR), + ServletUtil.TASK_EXECUTOR); + + Hazard hazard = hazFuture.get(); + Map<Imt, Double> imls = DisaggCalc.imlsForReturnPeriod( + hazard, + request.returnPeriod); + + CompletableFuture<Disaggregation> disaggfuture = CompletableFuture.supplyAsync( + () -> Disaggregation.atImls( + hazard, imls, + ServletUtil.CALC_EXECUTOR), + ServletUtil.TASK_EXECUTOR); + + Disaggregation disagg = disaggfuture.get(); + + return disagg; + } + + static final class RequestIml { + + final transient HttpRequest<?> http; + final double longitude; + final double latitude; + final double vs30; + final Map<Imt, Double> imls; + final Set<DataType> dataTypes; + + RequestIml( + HttpRequest<?> http, + double longitude, + double latitude, + double vs30, + Map<Imt, Double> imls, + Set<DataType> dataTypes) { + + this.http = http; + this.longitude = longitude; + this.latitude = latitude; + this.vs30 = vs30; + this.imls = imls; + this.dataTypes = dataTypes; + } + } + + static final class RequestRp { + + final transient HttpRequest<?> http; + final double longitude; + final double latitude; + final double vs30; + final double returnPeriod; + final Set<Imt> imts; + final Set<DataType> dataTypes; + + RequestRp( + HttpRequest<?> http, + double longitude, + double latitude, + double vs30, + double returnPeriod, + Set<Imt> imts, + Set<DataType> dataTypes) { + + this.http = http; + this.longitude = longitude; + this.latitude = latitude; + this.vs30 = vs30; + this.returnPeriod = returnPeriod; + this.imts = imts.isEmpty() + ? ServletUtil.model().config().hazard.imts + : imts; + this.dataTypes = dataTypes; + } + } + + private static final class Response { + final Response.Metadata metadata; + final List<ImtDisagg> disaggs; + + Response(Response.Metadata metadata, List<ImtDisagg> disaggs) { + this.metadata = metadata; + this.disaggs = disaggs; + } + + private static final class Metadata { + final Object server; + final String rlabel = "Closest Distance, rRup (km)"; + final String mlabel = "Magnitude (Mw)"; + final String εlabel = "% Contribution to Hazard"; + final Object εbins; + + Metadata(Object server, Object εbins) { + this.server = server; + this.εbins = εbins; + } + } + + private static final class Builder { + + Stopwatch timer; + Optional<RequestRp> requestRp = Optional.empty(); + Optional<RequestIml> requestIml = Optional.empty(); + Disaggregation disagg; + CalcConfig config; + + Builder timer(Stopwatch timer) { + this.timer = timer; + return this; + } + + Builder request(Object request) { + if (request instanceof RequestRp) { + requestRp = Optional.of((RequestRp) request); + return this; + } + requestIml = Optional.of((RequestIml) request); + return this; + } + + Builder disagg(Disaggregation disagg) { + this.disagg = disagg; + return this; + } + + Response build() { + + Set<Imt> imts = requestRp.isPresent() + ? requestRp.orElseThrow().imts + : requestIml.orElseThrow().imls.keySet(); + + Set<DataType> dataTypes = requestRp.isPresent() + ? requestRp.orElseThrow().dataTypes + : requestIml.orElseThrow().dataTypes; + + List<ImtDisagg> disaggs = imts.stream() + .map(imt -> new ImtDisagg(imt, disagg.toJson( + imt, + dataTypes.contains(GMM), + dataTypes.contains(SOURCE), + dataTypes.contains(DISAGG_DATA)))) + .collect(toList()); + + Object server = ServletUtil.serverData(ServletUtil.THREAD_COUNT, timer); + + return new Response( + new Response.Metadata(server, disagg.εBins()), + disaggs); + } + } + } + + private static final class ImtDisagg { + final Parameter imt; + final Object data; + + ImtDisagg(Imt imt, Object data) { + this.imt = new Parameter( + ServletUtil.imtShortLabel(imt), + imt.name()); + this.data = data; + } + } +} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/HazardController.java b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/HazardController.java new file mode 100644 index 0000000000000000000000000000000000000000..03bc22142d9da072cdd6991f2269d389f1981095 --- /dev/null +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/HazardController.java @@ -0,0 +1,108 @@ +package gov.usgs.earthquake.nshmp.www.hazard; + +import java.util.Set; + +import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.www.NshmpMicronautServlet; +import gov.usgs.earthquake.nshmp.www.ServletUtil; +import gov.usgs.earthquake.nshmp.www.hazard.HazardService.HazardImt; + +import io.micronaut.core.annotation.Nullable; +import io.micronaut.http.HttpRequest; +import io.micronaut.http.HttpResponse; +import io.micronaut.http.MediaType; +import io.micronaut.http.annotation.Controller; +import io.micronaut.http.annotation.Get; +import io.micronaut.http.annotation.PathVariable; +import io.micronaut.http.annotation.QueryValue; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.media.Schema; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.tags.Tag; +import jakarta.inject.Inject; + +/** + * Micronaut web service controller for probabilistic seismic hazard + * calculations. + * + * @author U.S. Geological Survey + */ +@Tag( + name = "Hazard", + description = "USGS NSHMP hazard calculation service") +@Controller("/hazard") +public class HazardController { + + @Inject + private NshmpMicronautServlet servlet; + + @Operation( + summary = "Hazard model and service metadata", + description = "Returns details of the installed model and service request parameters") + @ApiResponse( + description = "Hazard service metadata", + responseCode = "200") + @Get(produces = MediaType.APPLICATION_JSON) + public HttpResponse<String> doGetMetadata(HttpRequest<?> http) { + try { + return HazardService.getMetadata(http); + } catch (Exception e) { + return ServletUtil.error( + HazardService.LOG, e, + HazardService.NAME, + http.getUri().toString()); + } + } + + /** + * @param longitude Longitude in decimal degrees [-360..360] + * @param latitude Latitude in decimal degrees [-90..90] + * @param vs30 Site Vs30 value in m/s [150..3000] + * @param truncate Truncate curves at return periods below ~10,000 years + * @param maxdir Apply max-direction scaling + * @param imt Optional IMTs at which to compute hazard. If none are supplied, + * then the supported set for the installed model is used. Responses + * for numerous IMT's are quite large, on the order of MB. + * + */ + @Operation( + summary = "Compute probabilisitic hazard at a site", + description = "Returns hazard curves computed from the installed model") + @ApiResponse( + description = "Hazard curves", + responseCode = "200") + @Get( + uri = "/{longitude}/{latitude}/{vs30}{?truncate,maxdir,imt}", + produces = MediaType.APPLICATION_JSON) + public HttpResponse<String> doGetHazard( + HttpRequest<?> http, + @Schema( + minimum = "-360", + maximum = "360") @PathVariable double longitude, + @Schema( + minimum = "-90", + maximum = "90") @PathVariable double latitude, + @Schema( + minimum = "150", + maximum = "3000") @PathVariable int vs30, + @QueryValue( + defaultValue = "false") @Nullable Boolean truncate, + @QueryValue( + defaultValue = "false") @Nullable Boolean maxdir, + @QueryValue @Nullable Set<HazardImt> imt) { + try { + Set<Imt> imts = HazardService.readImts(http); + HazardService.Request request = new HazardService.Request( + http, + longitude, latitude, vs30, + truncate, maxdir, + imts); + return HazardService.getHazard(request); + } catch (Exception e) { + return ServletUtil.error( + HazardService.LOG, e, + HazardService.NAME, + http.getUri().toString()); + } + } +} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/HazardService.java b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/HazardService.java new file mode 100644 index 0000000000000000000000000000000000000000..e87db5fef77f5f75b96880e89d03e3b3d49ab003 --- /dev/null +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/HazardService.java @@ -0,0 +1,390 @@ +package gov.usgs.earthquake.nshmp.www.hazard; + +import static com.google.common.base.Preconditions.checkState; +import static gov.usgs.earthquake.nshmp.calc.HazardExport.curvesBySource; +import static gov.usgs.earthquake.nshmp.data.DoubleData.checkInRange; +import static gov.usgs.earthquake.nshmp.geo.Coordinates.checkLatitude; +import static gov.usgs.earthquake.nshmp.geo.Coordinates.checkLongitude; +import static java.util.stream.Collectors.toCollection; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.EnumMap; +import java.util.EnumSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Stopwatch; + +import gov.usgs.earthquake.nshmp.calc.CalcConfig; +import gov.usgs.earthquake.nshmp.calc.DataType; +import gov.usgs.earthquake.nshmp.calc.Hazard; +import gov.usgs.earthquake.nshmp.calc.HazardCalcs; +import gov.usgs.earthquake.nshmp.calc.Site; +import gov.usgs.earthquake.nshmp.data.MutableXySequence; +import gov.usgs.earthquake.nshmp.data.XySequence; +import gov.usgs.earthquake.nshmp.geo.Coordinates; +import gov.usgs.earthquake.nshmp.geo.Location; +import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.model.HazardModel; +import gov.usgs.earthquake.nshmp.model.SourceType; +import gov.usgs.earthquake.nshmp.www.ResponseBody; +import gov.usgs.earthquake.nshmp.www.ServletUtil; +import gov.usgs.earthquake.nshmp.www.meta.DoubleParameter; +import gov.usgs.earthquake.nshmp.www.meta.Parameter; +import gov.usgs.earthquake.nshmp.www.services.SourceServices.SourceModel; + +import io.micronaut.http.HttpRequest; +import io.micronaut.http.HttpResponse; +import jakarta.inject.Singleton; + +/** + * Hazard service. + * + * @see HazardController + * @author U.S. Geological Survey + */ +@Singleton +public final class HazardService { + + static final String NAME = "Hazard Service"; + static final Logger LOG = LoggerFactory.getLogger(HazardService.class); + + private static final String TOTAL_KEY = "Total"; + + /* For Swagger selections; mprs + pgv */ + enum HazardImt { + PGA, + PGV, + SA0P01, + SA0P02, + SA0P03, + SA0P05, + SA0P075, + SA0P1, + SA0P15, + SA0P2, + SA0P25, + SA0P3, + SA0P4, + SA0P5, + SA0P75, + SA1P0, + SA1P5, + SA2P0, + SA3P0, + SA4P0, + SA5P0, + SA7P5, + SA10P0; + } + + /** HazardController.doGetUsage() handler. */ + public static HttpResponse<String> getMetadata(HttpRequest<?> request) { + var url = request.getUri().toString(); + var usage = new Metadata(ServletUtil.model()); + var body = ResponseBody.usage() + .name(NAME) + .url(url) + .request(url) + .response(usage) + .build(); + var json = ServletUtil.GSON2.toJson(body); + return HttpResponse.ok(json); + } + + /** HazardController.doGetHazard() handler. */ + public static HttpResponse<String> getHazard(Request request) + throws InterruptedException, ExecutionException { + var stopwatch = Stopwatch.createStarted(); + var hazard = calcHazard(request); + var response = new Response.Builder() + .timer(stopwatch) + .request(request) + .hazard(hazard) + .build(); + var body = ResponseBody.success() + .name(NAME) + .url(request.http.getUri().toString()) + .request(request) + .response(response) + .build(); + String json = ServletUtil.GSON2.toJson(body); + return HttpResponse.ok(json); + } + + /* + * Developer notes: + * + * Future calculation configuration options: vertical GMs + * + * NSHM Hazard Tool will not pass truncation and maxdir args/flags as the apps + * apply truncation and scaling on the client. + */ + + public static Hazard calcHazard(Request request) + throws InterruptedException, ExecutionException { + + HazardModel model = ServletUtil.model(); + + // modify config to include service endpoint arguments + CalcConfig config = CalcConfig.copyOf(model.config()) + .imts(request.imts) + .build(); + + Location loc = Location.create(request.longitude, request.latitude); + Site site = ServletUtil.createSite(loc, request.vs30, model.siteData()); + + CompletableFuture<Hazard> future = CompletableFuture.supplyAsync( + () -> HazardCalcs.hazard( + model, config, site, + ServletUtil.CALC_EXECUTOR), + ServletUtil.TASK_EXECUTOR); + + return future.get(); + } + + static class Metadata { + + final SourceModel model; + final DoubleParameter longitude; + final DoubleParameter latitude; + final DoubleParameter vs30; + + Metadata(HazardModel model) { + this.model = new SourceModel(model); + // should get min max from model + longitude = new DoubleParameter( + "Longitude", + "°", + Coordinates.LON_RANGE.lowerEndpoint(), + Coordinates.LON_RANGE.upperEndpoint()); + + latitude = new DoubleParameter( + "Latitude", + "°", + Coordinates.LAT_RANGE.lowerEndpoint(), + Coordinates.LAT_RANGE.upperEndpoint()); + + vs30 = new DoubleParameter( + "Vs30", + "m/s", + 150, + 1500); + } + } + + static final class Request { + + final transient HttpRequest<?> http; + final double longitude; + final double latitude; + final double vs30; + final boolean truncate; + final boolean maxdir; + final Set<Imt> imts; + + public Request( + HttpRequest<?> http, + double longitude, + double latitude, + int vs30, + boolean truncate, + boolean maxdir, + Set<Imt> imts) { + + this.http = http; + this.longitude = checkLongitude(longitude); + this.latitude = checkLatitude(latitude); + this.vs30 = checkInRange(Site.VS30_RANGE, Site.Key.VS30, vs30); + this.truncate = truncate; + this.maxdir = maxdir; + this.imts = imts.isEmpty() + ? ServletUtil.model().config().hazard.imts + : imts; + } + } + + private static final class Response { + + final Metadata metadata; + final List<ImtCurves> hazardCurves; + + Response(Metadata metadata, List<ImtCurves> hazardCurves) { + this.metadata = metadata; + this.hazardCurves = hazardCurves; + } + + private static final class Metadata { + final Object server; + final String xlabel = "Ground Motion (g)"; + final String ylabel = "Annual Frequency of Exceedence"; + + Metadata(Object server) { + this.server = server; + } + } + + private static final class Builder { + + Stopwatch timer; + Request request; + Map<Imt, Map<SourceType, MutableXySequence>> componentMaps; + Map<Imt, MutableXySequence> totalMap; + + Builder timer(Stopwatch timer) { + this.timer = timer; + return this; + } + + Builder request(Request request) { + this.request = request; + return this; + } + + Builder hazard(Hazard hazard) { + // necessary?? + checkState(totalMap == null, "Hazard has already been added to this builder"); + + componentMaps = new EnumMap<>(Imt.class); + totalMap = new EnumMap<>(Imt.class); + + var typeTotalMaps = curvesBySource(hazard); + + for (var imt : hazard.curves().keySet()) { + + /* Total curve for IMT. */ + XySequence.addToMap(imt, totalMap, hazard.curves().get(imt)); + + /* Source component curves for IMT. */ + var typeTotalMap = typeTotalMaps.get(imt); + var componentMap = componentMaps.get(imt); + + if (componentMap == null) { + componentMap = new EnumMap<>(SourceType.class); + componentMaps.put(imt, componentMap); + } + + for (var type : typeTotalMap.keySet()) { + XySequence.addToMap(type, componentMap, typeTotalMap.get(type)); + } + } + + return this; + } + + Response build() { + var hazards = new ArrayList<ImtCurves>(); + + for (Imt imt : totalMap.keySet()) { + var curves = new ArrayList<Curve>(); + + // total curve + curves.add(new Curve( + TOTAL_KEY, + updateCurve(request, totalMap.get(imt), imt))); + + // component curves + var typeMap = componentMaps.get(imt); + for (SourceType type : typeMap.keySet()) { + curves.add(new Curve( + type.toString(), + updateCurve(request, typeMap.get(type), imt))); + } + + hazards.add(new ImtCurves(imt, curves)); + } + + Object server = ServletUtil.serverData(ServletUtil.THREAD_COUNT, timer); + var response = new Response( + new Response.Metadata(server), + hazards); + + return response; + } + } + + } + + private static final class ImtCurves { + final Parameter imt; + final List<Curve> data; + + ImtCurves(Imt imt, List<Curve> data) { + this.imt = new Parameter(ServletUtil.imtShortLabel(imt), imt.name()); + this.data = data; + } + } + + private static final class Curve { + final String component; + final XySequence values; + + Curve(String component, XySequence values) { + this.component = component; + this.values = values; + } + } + + private static final double TRUNCATION_LIMIT = 1e-4; + + /* Convert to linear and possibly truncate and scale to max-direction. */ + private static XySequence updateCurve( + Request request, + XySequence curve, + Imt imt) { + + /* + * If entire curve is <1e-4, this method will return a curve consisting of + * just the first point in the supplied curve. + * + * Consider moving to config. + */ + + double[] yValues = curve.yValues().toArray(); + int limit = request.truncate ? truncationLimit(yValues) : yValues.length; + yValues = Arrays.copyOf(yValues, limit); + + double scale = request.maxdir ? MaxDirection.FACTORS.get(imt) : 1.0; + double[] xValues = curve.xValues() + .limit(yValues.length) + .map(Math::exp) + .map(x -> x * scale) + .toArray(); + + return XySequence.create(xValues, yValues); + } + + private static int truncationLimit(double[] yValues) { + int limit = 1; + double y = yValues[0]; + while (y > TRUNCATION_LIMIT && limit < yValues.length) { + y = yValues[limit++]; + } + return limit; + } + + /* Read the 'imt' query values; can be comma-delimited. */ + static Set<Imt> readImts(HttpRequest<?> http) { + return http.getParameters().getAll("imt").stream() + .map(s -> s.split(",")) + .flatMap(Arrays::stream) + .map(Imt::valueOf) + .collect(toCollection(() -> EnumSet.noneOf(Imt.class))); + } + + /* Read the 'imt' query values; can be comma-delimited. */ + static Set<DataType> readDataTypes(HttpRequest<?> http) { + return http.getParameters().getAll("out").stream() + .map(s -> s.split(",")) + .flatMap(Arrays::stream) + .map(DataType::valueOf) + .collect(toCollection(() -> EnumSet.noneOf(DataType.class))); + } +} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/services/MaxDirection.java b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/MaxDirection.java similarity index 97% rename from src/main/java/gov/usgs/earthquake/nshmp/www/services/MaxDirection.java rename to src/main/java/gov/usgs/earthquake/nshmp/www/hazard/MaxDirection.java index e35c59355037416482937ce865aeee59400af2cf..99c40c42d3127fcca3bb176c6fb837d20d0d6c3d 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/services/MaxDirection.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/MaxDirection.java @@ -1,4 +1,4 @@ -package gov.usgs.earthquake.nshmp.www.services; +package gov.usgs.earthquake.nshmp.www.hazard; import static gov.usgs.earthquake.nshmp.gmm.Imt.PGA; import static gov.usgs.earthquake.nshmp.gmm.Imt.SA0P01; diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/meta/MetaUtil.java b/src/main/java/gov/usgs/earthquake/nshmp/www/meta/MetaUtil.java index 74c7c7ab48943d014553be9d53d8046790d38701..a1a2c068400b60b52532e01acb7d912b1dbde6e0 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/meta/MetaUtil.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/meta/MetaUtil.java @@ -39,7 +39,7 @@ public final class MetaUtil { JsonObject json = new JsonObject(); json.add("location", loc); json.addProperty("vs30", site.vs30()); - json.addProperty("vsInfered", site.vsInferred()); + json.addProperty("vsInferred", site.vsInferred()); json.addProperty("z1p0", Double.isNaN(site.z1p0()) ? null : site.z1p0()); json.addProperty("z2p5", Double.isNaN(site.z2p5()) ? null : site.z2p5()); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/meta/Metadata.java b/src/main/java/gov/usgs/earthquake/nshmp/www/meta/Metadata.java index fd844d51d73793c66972b6e11cc5d9e989d01b89..0f2d40b1d6eeb39cd7d7de5d222542b0ef49f266 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/meta/Metadata.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/meta/Metadata.java @@ -4,7 +4,7 @@ import com.google.common.base.Stopwatch; import com.google.common.base.Throwables; import gov.usgs.earthquake.nshmp.geo.Coordinates; -import gov.usgs.earthquake.nshmp.www.services.ServletUtil; +import gov.usgs.earthquake.nshmp.www.ServletUtil; /** * Service metadata, parameterization, and constraint strings, in JSON format. @@ -28,62 +28,18 @@ public final class Metadata { this.status = Status.USAGE.toString(); this.description = description; this.syntax = syntax; - this.server = serverData(1, Stopwatch.createStarted()); + this.server = ServletUtil.serverData(1, Stopwatch.createStarted()); this.parameters = parameters; } } - public static Object serverData(int threads, Stopwatch timer) { - return new Server(threads, timer); - } - - private static class Server { - - final int threads; - final String timer; - final String version; - - Server(int threads, Stopwatch timer) { - this.threads = threads; - this.timer = timer.toString(); - this.version = "TODO where to get version?"; - } - - // static Component NSHMP_HAZ_COMPONENT = new Component( - // NSHMP_HAZ_URL, - // Versions.NSHMP_HAZ_VERSION); - // - // static final class Component { - // - // final String url; - // final String version; - // - // Component(String url, String version) { - // this.url = url; - // this.version = version; - // } - // } - } - public static class DefaultParameters { - // final EnumParameter<Edition> edition; - // final EnumParameter<Region> region; final DoubleParameter longitude; final DoubleParameter latitude; public DefaultParameters() { - // edition = new EnumParameter<>( - // "Model edition", - // ParamType.STRING, - // EnumSet.allOf(Edition.class)); - // - // region = new EnumParameter<>( - // "Model region", - // ParamType.STRING, - // EnumSet.allOf(Region.class)); - longitude = new DoubleParameter( "Longitude", "°", @@ -98,26 +54,6 @@ public final class Metadata { } } - public static String busyMessage(String url, long hits, long misses) { - Busy busy = new Busy(url, hits, misses); - return ServletUtil.GSON.toJson(busy); - } - - static final String BUSY_MESSAGE = "Server busy. Please try again later. " + - "We apologize for any inconvenience while we increase capacity."; - - private static class Busy { - - final String status = Status.BUSY.toString(); - final String request; - final String message; - - private Busy(String request, long hits, long misses) { - this.request = request; - this.message = BUSY_MESSAGE + String.format(" (%s,%s)", hits, misses); - } - } - public static String errorMessage(String url, Throwable e, boolean trace) { Error error = new Error(url, e, trace); return ServletUtil.GSON.toJson(error); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/services/HazardService.java b/src/main/java/gov/usgs/earthquake/nshmp/www/services/HazardService.java deleted file mode 100644 index 32468d63800dcfd4989430a8b607ecfa65e62ef3..0000000000000000000000000000000000000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/services/HazardService.java +++ /dev/null @@ -1,413 +0,0 @@ -package gov.usgs.earthquake.nshmp.www.services; - -import static com.google.common.base.Preconditions.checkState; -import static gov.usgs.earthquake.nshmp.calc.HazardExport.curvesBySource; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.EnumMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.function.Function; - -import com.google.common.base.Stopwatch; - -import gov.usgs.earthquake.nshmp.calc.CalcConfig; -import gov.usgs.earthquake.nshmp.calc.Hazard; -import gov.usgs.earthquake.nshmp.calc.Site; -import gov.usgs.earthquake.nshmp.data.MutableXySequence; -import gov.usgs.earthquake.nshmp.data.XySequence; -import gov.usgs.earthquake.nshmp.geo.Coordinates; -import gov.usgs.earthquake.nshmp.geo.Location; -import gov.usgs.earthquake.nshmp.gmm.Imt; -import gov.usgs.earthquake.nshmp.model.HazardModel; -import gov.usgs.earthquake.nshmp.model.SourceType; -import gov.usgs.earthquake.nshmp.www.HazardController; -import gov.usgs.earthquake.nshmp.www.Response; -import gov.usgs.earthquake.nshmp.www.WsUtils; -import gov.usgs.earthquake.nshmp.www.meta.DoubleParameter; -import gov.usgs.earthquake.nshmp.www.meta.Metadata; -import gov.usgs.earthquake.nshmp.www.meta.Parameter; -import gov.usgs.earthquake.nshmp.www.meta.Status; -import gov.usgs.earthquake.nshmp.www.services.ServicesUtil.ServiceQueryData; -import gov.usgs.earthquake.nshmp.www.services.ServicesUtil.ServiceRequestData; -import gov.usgs.earthquake.nshmp.www.services.SourceServices.SourceModel; - -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import jakarta.inject.Singleton; - -/** - * Probabilistic seismic hazard calculation handler for - * {@link HazardController}. - * - * @author U.S. Geological Survey - */ -@Singleton -public final class HazardService { - - private static final String NAME = "Hazard Service"; - - /** HazardController.doGetUsage() handler. */ - public static HttpResponse<String> handleDoGetMetadata(HttpRequest<?> request) { - var url = request.getUri().getPath(); - try { - var usage = new RequestMetadata(ServletUtil.model());// SourceServices.ResponseData(); - var response = new Response<>(Status.USAGE, NAME, url, usage, url); - var svcResponse = ServletUtil.GSON.toJson(response); - return HttpResponse.ok(svcResponse); - } catch (Exception e) { - return ServicesUtil.handleError(e, NAME, url); - } - } - - /** HazardController.doGetHazard() handler. */ - public static HttpResponse<String> handleDoGetHazard( - HttpRequest<?> request, - QueryParameters query) { - - try { - // TODO still need to validate - // if (query.isEmpty()) { - // return handleDoGetUsage(urlHelper); - // } - // query.checkParameters(); - var data = new RequestData(query); - var response = process(request, data); - var svcResponse = ServletUtil.GSON.toJson(response); - return HttpResponse.ok(svcResponse); - } catch (Exception e) { - return ServicesUtil.handleError(e, NAME, request.getUri().getPath()); - } - } - - static Response<RequestData, ResponseData> process( - HttpRequest<?> request, - RequestData data) - throws InterruptedException, ExecutionException { - - var configFunction = new ConfigFunction(); - var siteFunction = new SiteFunction(data); - var stopwatch = Stopwatch.createStarted(); - var hazard = ServicesUtil.calcHazard(configFunction, siteFunction); - - return new ResultBuilder() - .hazard(hazard) - .requestData(data) - .timer(stopwatch) - .url(request) - .build(); - } - - static class ConfigFunction implements Function<HazardModel, CalcConfig> { - @Override - public CalcConfig apply(HazardModel model) { - var configBuilder = CalcConfig.copyOf(model.config()); - return configBuilder.build(); - } - } - - static class SiteFunction implements Function<CalcConfig, Site> { - final RequestData data; - - private SiteFunction(RequestData data) { - this.data = data; - } - - @Override // TODO this needs to pick up SiteData - public Site apply(CalcConfig config) { - return Site.builder() - .location(Location.create(data.longitude, data.latitude)) - .vs30(data.vs30) - .build(); - } - } - - public static class QueryParameters { - - final double longitude; - final double latitude; - final int vs30; - final boolean truncate; - final boolean maxdir; - - public QueryParameters( - double longitude, - double latitude, - int vs30, - boolean truncate, - boolean maxdir) { - - this.longitude = longitude; - this.latitude = latitude; - this.vs30 = vs30; - this.truncate = truncate; - this.maxdir = maxdir; - } - - // void checkParameters() { - // checkParameter(longitude, "longitude"); - // checkParameter(latitude, "latitude"); - // checkParameter(vs30, "vs30"); - // } - } - - // private static void checkParameter(Object param, String id) { - // checkNotNull(param, "Missing parameter: %s", id); - // // TODO check range here - // } - - /* Service request and model metadata */ - static class RequestMetadata { - - final SourceModel model; - final DoubleParameter longitude; - final DoubleParameter latitude; - final DoubleParameter vs30; - - RequestMetadata(HazardModel model) { - this.model = new SourceModel(model); - // TODO need min max from model - longitude = new DoubleParameter( - "Longitude", - "°", - Coordinates.LON_RANGE.lowerEndpoint(), - Coordinates.LON_RANGE.upperEndpoint()); - - latitude = new DoubleParameter( - "Latitude", - "°", - Coordinates.LAT_RANGE.lowerEndpoint(), - Coordinates.LAT_RANGE.upperEndpoint()); - - vs30 = new DoubleParameter( - "Latitude", - "m/s", - 150, - 1500); - } - } - - static class RequestData { - - final double longitude; - final double latitude; - final double vs30; - final boolean truncate; - final boolean maxdir; - - RequestData(QueryParameters query) { - this.longitude = query.longitude; - this.latitude = query.latitude; - this.vs30 = query.vs30; - this.truncate = query.truncate; - this.maxdir = query.maxdir; - } - } - - private static final class ResponseMetadata { - final String xlabel = "Ground Motion (g)"; - final String ylabel = "Annual Frequency of Exceedence"; - final Object server; - - ResponseMetadata(Object server) { - this.server = server; - } - } - - private static String imtShortLabel(Imt imt) { - if (imt.equals(Imt.PGA) || imt.equals(Imt.PGV)) { - return imt.name(); - } else if (imt.isSA()) { - return imt.period() + " s"; - } - return imt.toString(); - } - - @Deprecated - static class RequestDataOld extends ServiceRequestData { - final double vs30; - - RequestDataOld(Query query, double vs30) { - super(query); - this.vs30 = vs30; - } - } - - private static final class ResponseData { - final ResponseMetadata metadata; - final List<HazardResponse> hazardCurves; - - ResponseData(ResponseMetadata metadata, List<HazardResponse> hazardCurves) { - this.metadata = metadata; - this.hazardCurves = hazardCurves; - } - } - - private static final class HazardResponse { - final Parameter imt; - final List<Curve> data; - - HazardResponse(Imt imt, List<Curve> data) { - this.imt = new Parameter(imtShortLabel(imt), imt.name()); - this.data = data; - } - } - - private static final class Curve { - final String component; - final XySequence values; - - Curve(String component, XySequence values) { - this.component = component; - this.values = values; - } - } - - private static final String TOTAL_KEY = "Total"; - - private static final class ResultBuilder { - - String url; - Stopwatch timer; - RequestData request; - - Map<Imt, Map<SourceType, MutableXySequence>> componentMaps; - Map<Imt, MutableXySequence> totalMap; - - ResultBuilder hazard(Hazard hazardResult) { - // TODO necessary?? - checkState(totalMap == null, "Hazard has already been added to this builder"); - - componentMaps = new EnumMap<>(Imt.class); - totalMap = new EnumMap<>(Imt.class); - - var typeTotalMaps = curvesBySource(hazardResult); - - for (var imt : hazardResult.curves().keySet()) { - - /* Total curve for IMT. */ - XySequence.addToMap(imt, totalMap, hazardResult.curves().get(imt)); - - /* Source component curves for IMT. */ - var typeTotalMap = typeTotalMaps.get(imt); - var componentMap = componentMaps.get(imt); - - if (componentMap == null) { - componentMap = new EnumMap<>(SourceType.class); - componentMaps.put(imt, componentMap); - } - - for (var type : typeTotalMap.keySet()) { - XySequence.addToMap(type, componentMap, typeTotalMap.get(type)); - } - } - - return this; - } - - ResultBuilder url(HttpRequest<?> request) { - url = request.getUri().getPath(); - return this; - } - - ResultBuilder timer(Stopwatch timer) { - this.timer = timer; - return this; - } - - ResultBuilder requestData(RequestData request) { - this.request = request; - return this; - } - - Response<RequestData, ResponseData> build() { - var hazards = new ArrayList<HazardResponse>(); - - for (Imt imt : totalMap.keySet()) { - var curves = new ArrayList<Curve>(); - - // total curve - curves.add(new Curve( - TOTAL_KEY, - updateCurve(request, totalMap.get(imt), imt))); - - // component curves - var typeMap = componentMaps.get(imt); - for (SourceType type : typeMap.keySet()) { - curves.add(new Curve( - type.toString(), - updateCurve(request, typeMap.get(type), imt))); - } - - hazards.add(new HazardResponse(imt, List.copyOf(curves))); - } - - Object server = Metadata.serverData(ServletUtil.THREAD_COUNT, timer); - var response = new ResponseData(new ResponseMetadata(server), List.copyOf(hazards)); - - return new Response<>(Status.SUCCESS, NAME, request, response, url); - } - } - - private static final double TRUNCATION_LIMIT = 1e-4; - - /* Convert to linear and possibly truncate and scale to max-direction. */ - private static XySequence updateCurve( - RequestData request, - XySequence curve, - Imt imt) { - - /* - * If entire curve is <1e-4, this method will return a curve consisting of - * just the first point in the supplied curve. - * - * TODO We probably want to move the TRUNCATION_LIMIT out to a config. - */ - - double[] yValues = curve.yValues().toArray(); - int limit = request.truncate ? truncationLimit(yValues) : yValues.length; - yValues = Arrays.copyOf(yValues, limit); - - double scale = request.maxdir ? MaxDirection.FACTORS.get(imt) : 1.0; - double[] xValues = curve.xValues() - .limit(yValues.length) - .map(Math::exp) - .map(x -> x * scale) - .toArray(); - - return XySequence.create(xValues, yValues); - } - - private static int truncationLimit(double[] yValues) { - int limit = 1; - double y = yValues[0]; - while (y > TRUNCATION_LIMIT && limit < yValues.length) { - y = yValues[limit++]; - } - return limit; - } - - @Deprecated - public static class Query extends ServiceQueryData { - Integer vs30; - - public Query(Double longitude, Double latitude, Integer vs30) { - super(longitude, latitude); - this.vs30 = vs30; - } - - @Override - public boolean isNull() { - return super.isNull() && vs30 == null; - } - - @Override - public void checkValues() { - super.checkValues(); - WsUtils.checkValue(ServicesUtil.Key.VS30, vs30); - } - } - -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/services/HazardService2.java b/src/main/java/gov/usgs/earthquake/nshmp/www/services/HazardService2.java deleted file mode 100644 index 3e58e39dd09e3a90e60e10be299ba559e66e3d04..0000000000000000000000000000000000000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/services/HazardService2.java +++ /dev/null @@ -1,443 +0,0 @@ -package gov.usgs.earthquake.nshmp.www.services; - -import static com.google.common.base.Preconditions.checkState; -import static gov.usgs.earthquake.nshmp.calc.HazardExport.curvesBySource; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.EnumMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.function.Function; - -import com.google.common.base.Stopwatch; - -import gov.usgs.earthquake.nshmp.calc.CalcConfig; -import gov.usgs.earthquake.nshmp.calc.Hazard; -import gov.usgs.earthquake.nshmp.calc.Site; -import gov.usgs.earthquake.nshmp.data.MutableXySequence; -import gov.usgs.earthquake.nshmp.data.XySequence; -import gov.usgs.earthquake.nshmp.geo.Coordinates; -import gov.usgs.earthquake.nshmp.geo.Location; -import gov.usgs.earthquake.nshmp.gmm.Imt; -import gov.usgs.earthquake.nshmp.model.HazardModel; -import gov.usgs.earthquake.nshmp.model.SourceType; -import gov.usgs.earthquake.nshmp.www.HazardController; -import gov.usgs.earthquake.nshmp.www.Response; -import gov.usgs.earthquake.nshmp.www.WsUtils; -import gov.usgs.earthquake.nshmp.www.meta.DoubleParameter; -import gov.usgs.earthquake.nshmp.www.meta.Metadata; -import gov.usgs.earthquake.nshmp.www.meta.Parameter; -import gov.usgs.earthquake.nshmp.www.meta.Status; -import gov.usgs.earthquake.nshmp.www.services.ServicesUtil.ServiceQueryData; -import gov.usgs.earthquake.nshmp.www.services.SourceServices.SourceModel; - -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpResponse; -import jakarta.inject.Singleton; - -/** - * Probabilistic seismic hazard calculation handler for - * {@link HazardController}. - * - * @author U.S. Geological Survey - */ -@Singleton -public final class HazardService2 { - - private static final String NAME = "Hazard Service"; - - /** HazardController.doGetUsage() handler. */ - public static HttpResponse<String> handleDoGetMetadata(HttpRequest<?> request) { - var url = request.getUri().getPath(); - try { - var usage = new RequestMetadata(ServletUtil.model());// SourceServices.ResponseData(); - var response = new Response(Status.USAGE, NAME, url, usage, url); - var svcResponse = ServletUtil.GSON.toJson(response); - return HttpResponse.ok(svcResponse); - } catch (Exception e) { - return ServicesUtil.handleError(e, NAME, url); - } - } - - /** HazardController.doGetHazard() handler. */ - public static HttpResponse<String> handleDoGetHazard( - HttpRequest<?> request, - RequestData args) { - - try { - // TODO still need to validate - // if (query.isEmpty()) { - // return handleDoGetUsage(urlHelper); - // } - // query.checkParameters(); - - // var data = new RequestData(query); - - Response<RequestData, ResponseData> response = process(request, args); - String svcResponse = ServletUtil.GSON.toJson(response); - return HttpResponse.ok(svcResponse); - - } catch (Exception e) { - return ServicesUtil.handleError(e, NAME, request.getUri().getPath()); - } - } - - static Response<RequestData, ResponseData> process( - HttpRequest<?> request, - RequestData data) throws InterruptedException, ExecutionException { - - var configFunction = new ConfigFunction(); - var siteFunction = new SiteFunction(data); - var stopwatch = Stopwatch.createStarted(); - var hazard = ServicesUtil.calcHazard(configFunction, siteFunction); - - return new ResultBuilder() - .hazard(hazard) - .requestData(data) - .timer(stopwatch) - .url(request) - .build(); - } - - static class ConfigFunction implements Function<HazardModel, CalcConfig> { - @Override - public CalcConfig apply(HazardModel model) { - var configBuilder = CalcConfig.copyOf(model.config()); - return configBuilder.build(); - } - } - - static class SiteFunction implements Function<CalcConfig, Site> { - final RequestData data; - - private SiteFunction(RequestData data) { - this.data = data; - } - - @Override // TODO this needs to pick up SiteData - public Site apply(CalcConfig config) { - return Site.builder() - .location(Location.create(data.longitude, data.latitude)) - .vs30(data.vs30) - .build(); - } - } - - // public static class QueryParameters { - // - // final double longitude; - // final double latitude; - // final int vs30; - // final boolean truncate; - // final boolean maxdir; - // - // public QueryParameters( - // double longitude, - // double latitude, - // int vs30, - // boolean truncate, - // boolean maxdir) { - // - // this.longitude = longitude; - // this.latitude = latitude; - // this.vs30 = vs30; - // this.truncate = truncate; - // this.maxdir = maxdir; - // } - // - // // void checkParameters() { - // // checkParameter(longitude, "longitude"); - // // checkParameter(latitude, "latitude"); - // // checkParameter(vs30, "vs30"); - // // } - // } - - // private static void checkParameter(Object param, String id) { - // checkNotNull(param, "Missing parameter: %s", id); - // // TODO check range here - // } - - /* Service request and model metadata */ - static class RequestMetadata { - - final SourceModel model; - final DoubleParameter longitude; - final DoubleParameter latitude; - final DoubleParameter vs30; - - RequestMetadata(HazardModel model) { - this.model = new SourceModel(model); - // TODO need min max from model - longitude = new DoubleParameter( - "Longitude", - "°", - Coordinates.LON_RANGE.lowerEndpoint(), - Coordinates.LON_RANGE.upperEndpoint()); - - latitude = new DoubleParameter( - "Latitude", - "°", - Coordinates.LAT_RANGE.lowerEndpoint(), - Coordinates.LAT_RANGE.upperEndpoint()); - - vs30 = new DoubleParameter( - "Latitude", - "m/s", - 150, - 1500); - } - } - - // static class RequestData { - // - // final double longitude; - // final double latitude; - // final double vs30; - // final boolean truncate; - // final boolean maxdir; - // - // RequestData(QueryParameters query) { - // this.longitude = query.longitude; - // this.latitude = query.latitude; - // this.vs30 = query.vs30; - // this.truncate = query.truncate; - // this.maxdir = query.maxdir; - // } - // } - - private static final class ResponseMetadata { - final String xlabel = "Ground Motion (g)"; - final String ylabel = "Annual Frequency of Exceedence"; - final Object server; - - ResponseMetadata(Object server) { - this.server = server; - } - } - - private static String imtShortLabel(Imt imt) { - if (imt.equals(Imt.PGA) || imt.equals(Imt.PGV)) { - return imt.name(); - } else if (imt.isSA()) { - return imt.period() + " s"; - } - return imt.toString(); - } - - // @Deprecated - // static class RequestDataOld extends ServiceRequestData { - // final double vs30; - // - // RequestDataOld(Query query, double vs30) { - // super(query); - // this.vs30 = vs30; - // } - // } - - private static final class ResponseData { - final ResponseMetadata metadata; - final List<HazardResponse> hazardCurves; - - ResponseData(ResponseMetadata metadata, List<HazardResponse> hazardCurves) { - this.metadata = metadata; - this.hazardCurves = hazardCurves; - } - } - - private static final class HazardResponse { - final Parameter imt; - final List<Curve> data; - - HazardResponse(Imt imt, List<Curve> data) { - this.imt = new Parameter(imtShortLabel(imt), imt.name()); - this.data = data; - } - } - - private static final class Curve { - final String component; - final XySequence values; - - Curve(String component, XySequence values) { - this.component = component; - this.values = values; - } - } - - private static final String TOTAL_KEY = "Total"; - - private static final class ResultBuilder { - - String url; - Stopwatch timer; - RequestData request; - - Map<Imt, Map<SourceType, MutableXySequence>> componentMaps; - Map<Imt, MutableXySequence> totalMap; - - ResultBuilder hazard(Hazard hazardResult) { - // TODO necessary?? - checkState(totalMap == null, "Hazard has already been added to this builder"); - - componentMaps = new EnumMap<>(Imt.class); - totalMap = new EnumMap<>(Imt.class); - - var typeTotalMaps = curvesBySource(hazardResult); - - for (var imt : hazardResult.curves().keySet()) { - - /* Total curve for IMT. */ - XySequence.addToMap(imt, totalMap, hazardResult.curves().get(imt)); - - /* Source component curves for IMT. */ - var typeTotalMap = typeTotalMaps.get(imt); - var componentMap = componentMaps.get(imt); - - if (componentMap == null) { - componentMap = new EnumMap<>(SourceType.class); - componentMaps.put(imt, componentMap); - } - - for (var type : typeTotalMap.keySet()) { - XySequence.addToMap(type, componentMap, typeTotalMap.get(type)); - } - } - - return this; - } - - ResultBuilder url(HttpRequest<?> request) { - url = request.getUri().getPath(); - return this; - } - - ResultBuilder timer(Stopwatch timer) { - this.timer = timer; - return this; - } - - ResultBuilder requestData(RequestData request) { - this.request = request; - return this; - } - - Response<RequestData, ResponseData> build() { - var hazards = new ArrayList<HazardResponse>(); - - for (Imt imt : totalMap.keySet()) { - var curves = new ArrayList<Curve>(); - - // total curve - curves.add(new Curve( - TOTAL_KEY, - updateCurve(request, totalMap.get(imt), imt))); - - // component curves - var typeMap = componentMaps.get(imt); - for (SourceType type : typeMap.keySet()) { - curves.add(new Curve( - type.toString(), - updateCurve(request, typeMap.get(type), imt))); - } - - hazards.add(new HazardResponse(imt, List.copyOf(curves))); - } - - Object server = Metadata.serverData(ServletUtil.THREAD_COUNT, timer); - var response = new ResponseData(new ResponseMetadata(server), List.copyOf(hazards)); - - return new Response<>(Status.SUCCESS, NAME, request, response, url); - } - } - - private static final double TRUNCATION_LIMIT = 1e-4; - - /* Convert to linear and possibly truncate and scale to max-direction. */ - private static XySequence updateCurve( - RequestData request, - XySequence curve, - Imt imt) { - - /* - * If entire curve is <1e-4, this method will return a curve consisting of - * just the first point in the supplied curve. - * - * TODO We probably want to move the TRUNCATION_LIMIT out to a config. - */ - - double[] yValues = curve.yValues().toArray(); - int limit = request.truncate ? truncationLimit(yValues) : yValues.length; - yValues = Arrays.copyOf(yValues, limit); - - double scale = request.maxdir ? MaxDirection.FACTORS.get(imt) : 1.0; - double[] xValues = curve.xValues() - .limit(yValues.length) - .map(Math::exp) - .map(x -> x * scale) - .toArray(); - - return XySequence.create(xValues, yValues); - } - - private static int truncationLimit(double[] yValues) { - int limit = 1; - double y = yValues[0]; - while (y > TRUNCATION_LIMIT && limit < yValues.length) { - y = yValues[limit++]; - } - return limit; - } - - @Deprecated - public static class Query extends ServiceQueryData { - Integer vs30; - - public Query(Double longitude, Double latitude, Integer vs30) { - super(longitude, latitude); - this.vs30 = vs30; - } - - @Override - public boolean isNull() { - return super.isNull() && vs30 == null; - } - - @Override - public void checkValues() { - super.checkValues(); - WsUtils.checkValue(ServicesUtil.Key.VS30, vs30); - } - } - - public static final class RequestData { - - final double longitude; - final double latitude; - final int vs30; - final boolean truncate; - final boolean maxdir; - - public RequestData( - double longitude, - double latitude, - int vs30, - boolean truncate, - boolean maxdir) { - - this.longitude = longitude; - this.latitude = latitude; - this.vs30 = vs30; - this.truncate = truncate; - this.maxdir = maxdir; - } - - // void checkParameters() { - // checkParameter(longitude, "longitude"); - // checkParameter(latitude, "latitude"); - // checkParameter(vs30, "vs30"); - // } - } - -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/services/RateService.java b/src/main/java/gov/usgs/earthquake/nshmp/www/services/RateService.java index e0772386ad74e3ec67898e042a095f92423431d7..c0bcb47fd4159c8b8171dabbff526ef1f4c37d7b 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/services/RateService.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/services/RateService.java @@ -6,6 +6,9 @@ import java.util.Optional; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.google.common.base.Stopwatch; import com.google.common.util.concurrent.ListenableFuture; @@ -16,15 +19,14 @@ import gov.usgs.earthquake.nshmp.calc.Site; import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.model.HazardModel; import gov.usgs.earthquake.nshmp.www.RateController; -import gov.usgs.earthquake.nshmp.www.Response; +import gov.usgs.earthquake.nshmp.www.ResponseBody; +import gov.usgs.earthquake.nshmp.www.ServicesUtil.Key; +import gov.usgs.earthquake.nshmp.www.ServicesUtil.ServiceQueryData; +import gov.usgs.earthquake.nshmp.www.ServicesUtil.ServiceRequestData; +import gov.usgs.earthquake.nshmp.www.ServletUtil; import gov.usgs.earthquake.nshmp.www.WsUtils; import gov.usgs.earthquake.nshmp.www.meta.DoubleParameter; -import gov.usgs.earthquake.nshmp.www.meta.Metadata; import gov.usgs.earthquake.nshmp.www.meta.Metadata.DefaultParameters; -import gov.usgs.earthquake.nshmp.www.meta.Status; -import gov.usgs.earthquake.nshmp.www.services.ServicesUtil.Key; -import gov.usgs.earthquake.nshmp.www.services.ServicesUtil.ServiceQueryData; -import gov.usgs.earthquake.nshmp.www.services.ServicesUtil.ServiceRequestData; import io.micronaut.http.HttpRequest; import io.micronaut.http.HttpResponse; @@ -39,6 +41,8 @@ import jakarta.inject.Singleton; @Singleton public final class RateService { + static final Logger LOG = LoggerFactory.getLogger(RateService.class); + /* * Developer notes: * @@ -60,10 +64,10 @@ public final class RateService { public static HttpResponse<String> handleDoGetUsage(HttpRequest<?> request, Service service) { try { var response = metadata(request, service); - var svcResponse = ServletUtil.GSON.toJson(response); - return HttpResponse.ok(svcResponse); + var json = ServletUtil.GSON.toJson(response); + return HttpResponse.ok(json); } catch (Exception e) { - return ServicesUtil.handleError(e, service.name, request.getUri().getPath()); + return ServletUtil.error(LOG, e, service.name, request.getUri().getPath()); } } @@ -92,26 +96,30 @@ public final class RateService { var svcResponse = ServletUtil.GSON.toJson(response); return HttpResponse.ok(svcResponse); } catch (Exception e) { - return ServicesUtil.handleError(e, service.name, request.getUri().getPath()); + return ServletUtil.error(LOG, e, service.name, request.getUri().getPath()); } } - static Response<String, Usage> metadata(HttpRequest<?> request, Service service) { + static ResponseBody<String, Usage> metadata(HttpRequest<?> request, Service service) { var parameters = service == Service.RATE ? new RateParameters() : new ProbabilityParameters(); var usage = new Usage(service, parameters); var url = request.getUri().getPath(); - return new Response<>(Status.USAGE, service.name, url, usage, url); + return ResponseBody.<String, Usage> usage() + .name(service.name) + .url(url) + .request(url) + .response(usage) + .build(); } - static Response<RequestData, ResponseData> processRequest( + static ResponseBody<RequestData, ResponseData> processRequest( HttpRequest<?> request, Service service, RequestData data) throws InterruptedException, ExecutionException { var timer = Stopwatch.createStarted(); var rates = calc(service, data); var responseData = new ResponseData(new ResponseMetadata(service, data), rates, timer); - return Response.<RequestData, ResponseData> builder() - .success() + return ResponseBody.<RequestData, ResponseData> success() .name(service.name) .request(data) .response(responseData) @@ -131,11 +139,9 @@ public final class RateService { * probability service has been called. */ - // for (var model : ServletUtil.hazardModels()) { var model = ServletUtil.model(); var rate = process(service, model, site, data.distance, data.timespan); futureRates.add(rate); - // } var rates = futureRates.stream() .map((future) -> { @@ -268,7 +274,7 @@ public final class RateService { final List<Sequence> data; ResponseData(ResponseMetadata metadata, EqRate rates, Stopwatch timer) { - server = Metadata.serverData(ServletUtil.THREAD_COUNT, timer); + server = ServletUtil.serverData(ServletUtil.THREAD_COUNT, timer); this.metadata = metadata; this.data = buildSequence(rates); } @@ -303,8 +309,8 @@ public final class RateService { } /* - * TODO would rather use this a general container for mfds and hazard curves. - * See HazardService.Curve + * Would rather use this a general container for mfds and hazard curves. See + * HazardService.Curve */ private static class Sequence { final String component; @@ -327,7 +333,7 @@ public final class RateService { private Usage(Service service, DefaultParameters parameters) { description = service.description; this.syntax = service.syntax; - server = Metadata.serverData(1, Stopwatch.createStarted()); + server = ServletUtil.serverData(1, Stopwatch.createStarted()); this.parameters = parameters; } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/services/SourceLogicTreesService.java b/src/main/java/gov/usgs/earthquake/nshmp/www/services/SourceLogicTreesService.java index 823445be91ee7b52663a7445da3d05412b373a4a..01e2d4c2186ce6f09a74094576a6b9f489a34e11 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/services/SourceLogicTreesService.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/services/SourceLogicTreesService.java @@ -1,9 +1,12 @@ package gov.usgs.earthquake.nshmp.www.services; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import gov.usgs.earthquake.nshmp.model.Models; -import gov.usgs.earthquake.nshmp.www.Response; +import gov.usgs.earthquake.nshmp.www.ResponseBody; +import gov.usgs.earthquake.nshmp.www.ServletUtil; import gov.usgs.earthquake.nshmp.www.SourceLogicTreesController; -import gov.usgs.earthquake.nshmp.www.meta.Status; import io.micronaut.http.HttpRequest; import io.micronaut.http.HttpResponse; @@ -17,6 +20,8 @@ import jakarta.inject.Singleton; @Singleton public class SourceLogicTreesService { + static final Logger LOG = LoggerFactory.getLogger(SourceLogicTreesService.class); + private static final String NAME = "Source Logic Trees"; /** SourceLogicTreesController.doGetMetadata() handler */ @@ -25,10 +30,15 @@ public class SourceLogicTreesService { try { var trees = Models.trees(ServletUtil.model()); - var response = new Response<>(Status.SUCCESS, NAME, url, trees, url); + var response = ResponseBody.success() + .name(NAME) + .url(url) + .request(url) + .response(trees) + .build(); return HttpResponse.ok(ServletUtil.GSON.toJson(response)); } catch (Exception e) { - return ServicesUtil.handleError(e, NAME, url); + return ServletUtil.error(LOG, e, NAME, url); } } @@ -39,10 +49,15 @@ public class SourceLogicTreesService { try { var tree = Models.tree(ServletUtil.model(), id); var requestData = new RequestData(id); - var response = new Response<>(Status.SUCCESS, NAME, requestData, tree, url); + var response = ResponseBody.success() + .name(NAME) + .url(url) + .request(requestData) + .response(tree) + .build(); return HttpResponse.ok(ServletUtil.GSON.toJson(response)); } catch (Exception e) { - return ServicesUtil.handleError(e, NAME, url); + return ServletUtil.error(LOG, e, NAME, url); } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/services/SourceServices.java b/src/main/java/gov/usgs/earthquake/nshmp/www/services/SourceServices.java index 7db7d76b57225955fe04edd5288ee220b1f0d9a6..03704247e5c02916c38503d6303c4516a67b2c63 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/www/services/SourceServices.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/www/services/SourceServices.java @@ -1,8 +1,14 @@ package gov.usgs.earthquake.nshmp.www.services; +import static java.util.stream.Collectors.toList; + +import java.util.List; import java.util.Map; import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.google.common.base.Stopwatch; import com.google.gson.Gson; import com.google.gson.GsonBuilder; @@ -11,10 +17,10 @@ import gov.usgs.earthquake.nshmp.gmm.Gmm; import gov.usgs.earthquake.nshmp.gmm.Imt; import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass; import gov.usgs.earthquake.nshmp.model.HazardModel; -import gov.usgs.earthquake.nshmp.www.Response; +import gov.usgs.earthquake.nshmp.www.ResponseBody; +import gov.usgs.earthquake.nshmp.www.ServletUtil; import gov.usgs.earthquake.nshmp.www.WsUtils; -import gov.usgs.earthquake.nshmp.www.meta.Metadata; -import gov.usgs.earthquake.nshmp.www.meta.Status; +import gov.usgs.earthquake.nshmp.www.meta.Parameter; import io.micronaut.http.HttpRequest; import io.micronaut.http.HttpResponse; @@ -34,6 +40,8 @@ public class SourceServices { private static final String SERVICE_DESCRIPTION = "Utilities for querying earthquake source models"; + static final Logger LOG = LoggerFactory.getLogger(RateService.class); + public static final Gson GSON; static { @@ -48,71 +56,53 @@ public class SourceServices { public static HttpResponse<String> handleDoGetUsage(HttpRequest<?> request) { var url = request.getUri().getPath(); try { - var response = new Response<>( - Status.USAGE, NAME, url, new ResponseData(), url); - var jsonString = GSON.toJson(response); - return HttpResponse.ok(jsonString); + var response = ResponseBody.usage() + .name(NAME) + .url(url) + .request(url) + .response(new ResponseData()) + .build(); + var json = GSON.toJson(response); + return HttpResponse.ok(json); } catch (Exception e) { - return ServicesUtil.handleError(e, NAME, url); + return ServletUtil.error(LOG, e, NAME, url); } } /* - * TODO service metadata should be in same package as services (why + * task... service metadata should be in same package as services (why * ResponseData is currently public); rename meta package to */ public static class ResponseData { final String description; final Object server; - // final Parameters parameters; public ResponseData() { this.description = "Installed source model listing"; - this.server = Metadata.serverData(ServletUtil.THREAD_COUNT, Stopwatch.createStarted()); - // this.parameters = new Parameters(); + this.server = ServletUtil.serverData( + ServletUtil.THREAD_COUNT, + Stopwatch.createStarted()); } } - // static class Parameters { - // List<SourceModel> models; - // DoubleParameter returnPeriod; - // DoubleParameter vs30; - // - // Parameters() { - // models = ServletUtil.hazardModels().stream() - // .map(SourceModel::new) - // .collect(Collectors.toList()); - // - // returnPeriod = new DoubleParameter( - // "Return period", - // "years", - // 100.0, - // 1e6); - // - // vs30 = new DoubleParameter( - // "Vs30", - // "m/s", - // 150, - // 1500); - // } - // } - public static class SourceModel { String name; Set<Gmm> gmms; Map<NehrpSiteClass, Double> siteClasses; + List<Parameter> imts; - SourceModel(HazardModel model) { + public SourceModel(HazardModel model) { name = model.name(); gmms = model.gmms(); siteClasses = model.siteClasses(); + imts = model.gmms().stream() + .map(Gmm::supportedImts) + .flatMap(Set::stream) + .distinct() + .sorted() + .map(imt -> new Parameter(ServletUtil.imtShortLabel(imt), imt.name())) + .collect(toList()); } - - // public static List<SourceModel> getList() { - // return ServletUtil.hazardModels().stream() - // .map(SourceModel::new) - // .collect(Collectors.toList()); - // } } enum Attributes { diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index d4d11ef0c76a2efa5215bacfdf56dc4f1d56d3e9..5ca3350641b37f5b8d35d94bb59f1bea3dc5b00b 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -20,4 +20,6 @@ nshmp-haz: # The path to the models. # To specify the model to use: # java -jar build/libs/nshmp-haz.jar --models=<path/to/models> + # model-path: ${models:libs/nshmp-haz-dep--nshm-hi-2021} + # model-path: ${models:libs/nshmp-haz-dep--nshm-conus-2018} diff --git a/src/test/java/gov/usgs/earthquake/nshmp/programs/HazardCurveTest.java b/src/test/java/gov/usgs/earthquake/nshmp/programs/HazardCurveTest.java deleted file mode 100644 index 8ffeefca020c785ec60bcc40a02c1ff11e12e95e..0000000000000000000000000000000000000000 --- a/src/test/java/gov/usgs/earthquake/nshmp/programs/HazardCurveTest.java +++ /dev/null @@ -1,18 +0,0 @@ -package gov.usgs.earthquake.nshmp.programs; - -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class HazardCurveTest { - - @BeforeEach - public void setUpBeforeClass() throws Exception {} - - @Test - public final void testRun() { - // String[] args = new String[] {}; - // String status = HazardCalc.run(args); - // assertEquals(HazardCalc.USAGE.substring(0,18), status.substring(0,18)); - } - -}