diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d2a0e989290c0fe43eb3aec75f5f0ef2bbc27631..1936de2a117e2d7ba18ecad5b4d701c2b3c0c062 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -62,29 +62,6 @@ Build Image WS: DOCKERFILE: ws.Dockerfile UPSTREAM_PATH: ghsc/nshmp/nshmp-haz -Build Lambda: - artifacts: - expire_in: 1 yr - paths: - - build/libs/nshmp-haz.jar - - build/libs/nshmp-haz-dependencies.zip - extends: - - .gradle - needs: - - Init - rules: - - - changes: - - 'src/**' - - '*gradle*' - when: on_success - - - allow_failure: true - when: manual - script: - - ./gradlew assemble - - ./gradlew libs - Build Project: extends: - .gradle @@ -132,7 +109,7 @@ Unit Tests: rules: - changes: - - 'src/**' + - 'src/**/*' - '*gradle*' when: on_success - diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 58cb309d74c891e6e0aef8864ff9c792b14e89b4..13b639a18358d12bbe5ca7633a95d27ea47e0cad 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,7 +3,7 @@ Contributions are welcome from the community. Questions can be asked on the [issues page][1]. Before creating a new issue, please take a moment to search and make sure a similar issue does not already exist. If one does exist, you -can comment (most simply even with just a `:+1:`) to show your support for that +can comment (most simply even with just a :+1:) to show your support for that issue. If you have direct contributions you would like considered for incorporation diff --git a/Dockerfile b/Dockerfile index 3a5e5a678a771ac174282fc00fad13727cec4ca9..934d0ee14bd0a25604e703ee66db0861e2744cb5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,48 +1,46 @@ #### -# Run hazard jar file. +# Run nshmp-haz +# +# Pull Docker Image: +# - Production (stable): docker pull usgs/nshmp-haz:production-latest +# - Staging (latest, main branch of repo): docker pull usgs/nshmp-haz:staging-latest +# - Development (developer forks): docker pull usgs/nshmp-haz:development-latest +# +# Run Docker Image: +# Parameters: +# - CLASS_NAME: The nshmp-haz class name to run (e.g. HazardCalc) +# - IML: The intensity measure level, used in certain programs +# - JAVA_OPTS: Any JVM options (e.g. -Xmx8g) +# - RETURN_PERIOD: The return period, used in certian programs +# +# Volumes: +# - Model: /app/model +# - Output: /app/output # -# Running Hazard: -# docker pull code.chs.usgs.gov:5001/ghsc/nshmp/images/nshmp-haz; # docker run \ -# -e PROGRAM=<deagg | deagg-epsilon | deagg-iml | hazard | hazard-2018 | rate> \ -# -e MODEL=<WUS_20[08|14|18] | CEUS_20[08|14|18] | COUS_20[08|14|18] | AK_2007 | HI_2020> \ -# -v /absolute/path/to/sites/file:/app/sites.<geojson | csv> \ -# -v /absolute/path/to/config/file:/app/config.json \ -# -v /absolute/path/to/output:/app/output \ -# code.chs.usgs.gov:5001/ghsc/nshmp/images/nshmp-haz; +# --env CLASS_NAME="nshmp-haz class name" \ +# --volume "/path/to/model:/app/model" \ +# --volume "/path/to/output:/app/output" \ +# usgs/nshmp-haz:production-latest # # Build locally: -# docker build -# --build-arg gitlab_token=<git-api-token> -# -t nshmp-haz . +# docker build -t nshmp-haz . #### ARG BUILD_IMAGE=usgs/java:11 ARG FROM_IMAGE=usgs/java:11 -ARG project=nshmp-haz -ARG builder_workdir=/app/${project} -ARG libs_dir=${builder_workdir}/build/libs - #### # Builder image: Build jar file. #### FROM ${BUILD_IMAGE} as builder -ARG builder_workdir -ARG libs_dir - # TODO # Remove once nshmp-lib is public -ARG git_username -ARG git_password ARG GITLAB_TOKEN=null ARG CI_JOB_TOKEN=null -ENV GIT_NSHMP_USERNAME ${git_username} -ENV GIT_NSHMP_PASSWORD ${git_password} - -WORKDIR ${builder_workdir} +WORKDIR /app COPY . . @@ -55,30 +53,24 @@ FROM ${FROM_IMAGE} LABEL maintainer="Peter Powers <pmpowers@usgs.gov>, Brandon Clayton <bclayton@usgs.gov>" -ARG builder_workdir -ARG libs_dir -ARG project -ARG ws_file - -ENV CONFIG_FILE "" -ENV DEBUG false +# nshmp-haz inputs +ENV CLASS_NAME "HazardCalc" ENV IML "" -ENV JAVA_XMX "8g" -ENV MODEL "" -ENV MOUNT_MODEL false -ENV NSHM_VERSION master -ENV PROGRAM hazard -ENV PROJECT ${project} ENV RETURN_PERIOD "" -VOLUME [ "/app/output" ] +ENV CONFIG_FILE "/app/config.json" +ENV JAVA_MEMORY "8g" +ENV MODEL_PATH "/app/model" +ENV OUTPUT_PATH "/app/output" + +VOLUME [ "${MODEL_PATH}", "${OUTPUT_PATH}" ] WORKDIR /app -COPY --from=builder ${libs_dir}/* ./ +COPY --from=builder /app/build/libs/nshmp-haz.jar . COPY scripts scripts -RUN yum install -y jq +RUN yum install -y jq \ + && echo "{}" > "${CONFIG_FILE}" -EXPOSE 8080 ENTRYPOINT [ "bash", "scripts/docker-entrypoint.sh" ] diff --git a/README.md b/README.md index 203d474f069b82ffc94e95be67fde428a1fe563e..849cd483dd258f64c5082b42c41c606c246cf4bc 100644 --- a/README.md +++ b/README.md @@ -8,4 +8,4 @@ command line applications and web service classes and relies on the [*nshmp-lib*](https://code.usgs.gov/ghsc/nshmp/nshmp-lib) hazard library, among other dependencies. -Please see the [docs](https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/master/docs) for more information. +Please see the [docs](docs/README.md) for more information. diff --git a/build.gradle b/build.gradle index cf42f93f77e99cb7fa99a5aa61fd76f14545ec18..4e0a5e5b94dd6e2b59d8580bd890145b1a2d9a1b 100644 --- a/build.gradle +++ b/build.gradle @@ -49,7 +49,6 @@ apply from: "${projectDir}/gradle/ext.gradle" apply from: "${projectDir}/gradle/jar.gradle" apply from: "${projectDir}/gradle/javadoc.gradle" apply from: "${projectDir}/gradle/repositories.gradle" -apply from: "${projectDir}/gradle/tasks.gradle" sourceCompatibility = JavaVersion.VERSION_11 compileJava.options.encoding = "UTF-8" diff --git a/code.json b/code.json index ba79cd17229e5b108ff37671089c4e128bd7a0ab..f524431f124cd5bb5c2e260747a357e7d5cf8ff6 100644 --- a/code.json +++ b/code.json @@ -12,15 +12,15 @@ "licenses": [ { "name": "Public Domain, CC0-1.0", - "URL": "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/master/LICENSE.md" + "URL": "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/LICENSE.md" } ] }, "repositoryURL": "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/", - "homepageURL": "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/master/docs", + "homepageURL": "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs", "downloadURL": "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/releases/tag/v#.#.#", - "disclaimerURL": "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/master/DISCLAIMER.md", + "disclaimerURL": "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/DISCLAIMER.md", "vcs": "git", "laborHours": 1200, diff --git a/docs/README.md b/docs/README.md index 3ef557e3b0a1c7ae9a581ccf271151734bc4857e..180c0d63e9eae785e6b5a6b666448666d89c8da3 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,42 +1,52 @@ # Documentation: nshmp-haz -***nshmp-haz*** is a USGS developed software stack that supports probabilistic seismic hazard -(PSHA) and related analyses. It is maintained by the National Seismic Hazard Model Project -([NSHMP](https://earthquake.usgs.gov/hazards/)) within the U.S. Geological Survey's -([USGS](https://www.usgs.gov)) earthquake hazards program ([EHP](http://earthquake.usgs.gov)). +***nshmp-haz*** is a U.S. Geological Survey ([USGS](https://www.usgs.gov)) developed software stack +that supports probabilistic seismic hazard (PSHA) and related analyses. It is maintained by the +National Seismic Hazard Model Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) within the +USGS's earthquake hazards program ([EHP](http://earthquake.usgs.gov)). *nshmp-haz* supports high performance seismic hazard calculations required to generate detailed maps over large areas and supports a variety of USGS web services and applications related to -seismic hazards research and the dissemination of hazard data. This documentation explains how -to use *nshmp-haz* as well as underlying model implementation details. +seismic hazards research and the dissemination of hazard data (see the +[NSHM Hazard Tool](https://earthquake.usgs.gov/nshmp/)). This documentation explains how to +use *nshmp-haz* as well as underlying model implementation details. ## Table of Contents -* [About the NSHMP](pages/About-the-NSHMP.md) -* [Building & Running](pages/Building-&-Running.md) - * [Developer Basics](pages/Developer-Basics.md) - * [Calculation Configuration](pages/Calculation-Configuration.md) - * [Site Specification](pages/Site-Specification.md) - * [Examples](/ghsc/nshmp/nshmp-haz/-/tree/master/etc/examples) -* [Hazard Model](pages/Hazard-Model.md) - * [Model Structure](pages/Model-Structure.md) - * [Model Files](pages/Model-Files.md) - * [Source Types](pages/Source-Types.md) - * [Magnitude Frequency Distributions (MFDs)](pages/Magnitude-Frequency-Distributions.md) - * [Rupture Scaling Relations](pages/Rupture-Scaling-Relations.md) - * [Ground Motion Models (GMMs)](pages/Ground-Motion-Models.md) -* [USGS Models](pages/USGS-Models.md) - * [Model Editions](pages/Model-Editions.md) - * [Logic Trees & Uncertainty](pages/Logic-Trees-&-Uncertainty.md) - -## Other Pages & References - -* [nshmp-lib](/ghsc/nshmp/nshmp-lib): USGS hazard modeling library -* [Functional PSHA](pages/Functional-PSHA.md) +* [About the NSHMP](./pages/About-the-NSHMP.md) +* [Building & Running](./pages/Building-&-Running.md) + * [Developer Basics](./pages/Developer-Basics.md) + * [Calculation Configuration](./pages/Calculation-Configuration.md) + * [Site Specification](./pages/Site-Specification.md) + * [Examples](../../etc/examples) (or + [on GitLab](https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples)) +* [Hazard Model](./pages/Hazard-Model.md) + * [Model Structure](./pages/Model-Structure.md) + * [Model Files](./pages/Model-Files.md) + * [Source Types](./pages/Source-Types.md) + * [Magnitude Frequency Distributions (MFDs)](./pages/Magnitude-Frequency-Distributions.md) + * [Rupture Scaling Relations](./pages/Rupture-Scaling-Relations.md) + * [Ground Motion Models (GMMs)](./pages/Ground-Motion-Models.md) +* [USGS Models](./pages/USGS-Models.md) + * [Model Editions](./pages/Model-Editions.md) + * [Logic Trees & Uncertainty](./pages/Logic-Trees-&-Uncertainty.md) + +## Related Information + +* [nshmp-lib](https://code.usgs.gov/ghsc/nshmp/nshmp-lib): USGS hazard modeling library + * [nshmp-lib JavaDocs](https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/) +* [*nshmp-haz* License](../LICENSE.md) + +## References + +* [Functional PSHA](./pages/Functional-PSHA.md) * [Probabilistic Seismic Hazard Analysis, a Primer - [PDF]](http://www.opensha.org/sites/opensha.org/files/PSHA_Primer_v2_0.pdf) + [PDF]](https://opensha.org/resources/PSHA_Primer_v2_0.pdf) by Edward Field * [An Introduction to Probabilistic Seismic Hazard Analysis [PDF]](http://web.stanford.edu/~bakerjw/Publications/Baker_(2015)_Intro_to_PSHA.pdf) by Jack Baker -* [License](../LICENSE.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/About-the-NSHMP.md b/docs/pages/About-the-NSHMP.md index 0e1c508a2b349865f66321b14697f7b35167182f..2ee3a89ea3da645cc77555b5faad65e554e5b928 100644 --- a/docs/pages/About-the-NSHMP.md +++ b/docs/pages/About-the-NSHMP.md @@ -21,3 +21,11 @@ An NSHM defines the set of likely earthquake sources and their rates in a partic parameters of the earhtquake source and a site of interest, ground motion models (GMMs) are used to estimate ground shaking from the set of earthquakes. The NSHMP routinely updates NSHMs for the U.S. and its territories to consider the best available science. + +--- + +[**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Building-&-Running.md b/docs/pages/Building-&-Running.md index 2184c25f133d3b1789c83d9e00905c377765a83d..25622afad6a1651185b1f6800f665fc2e974cda2 100644 --- a/docs/pages/Building-&-Running.md +++ b/docs/pages/Building-&-Running.md @@ -2,7 +2,12 @@ ## Related Pages -TODO +* [Building & Running](./Building-&-Running.md#building-&-running) + * [Developer Basics](./Developer-Basics.md#developer-basics) + * [Calculation Configuration](./Calculation-Configuration.md#calculation-configuration) + * [Site Specification](./Site-Specification.md#site-specification) + * [Examples](../../etc/examples) (or + [on GitLab](https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples)) ## Build & Run Options @@ -12,7 +17,7 @@ TODO ## Build and Run Locally Building and running *nshmp-haz* requires prior installation of Git and Java. Please see the -[developer basics](developer-basics) page for system configuration guidance. +[developer basics](./Developer-Basics.md) page for system configuration guidance. ### Building @@ -40,15 +45,16 @@ measures. For example: java -cp path/to/nshmp-haz.jar gov.usgs.earthquake.nshmp.HazardCalc model sites [config] ``` -At a minimum, the hazard source [model](hazard-model) and the [site](site-specification)(s) at -which to perform calculations must be specified. The source model should specified a path to a +At a minimum, the hazard source [model](./Hazard-Model.md) and the [site](./Site-Specification.md)(s) +at which to perform calculations must be specified. The source model should specified a path to a directory. A single site may be specified with a string; multiple sites must be specified using either a comma-delimited (CSV) or [GeoJSON](http://geojson.org) file. The path to a custom -[configuration](calculation-configuration) file containing user-specific settings may optionally +[configuration](./Calculation-Configuration.md) file containing user-specific settings may optionally be supplied as a third argument. It can be used to override any calculation settings; if absent -[default](calculation-configuration) values are used. +[default](./Calculation-Configuration.md) values are used. -See the [examples](/ghsc/nshmp/nshmp-haz-v2/-/tree/master/etc/examples) directory for more details. +See the [examples](../../etc/examples) directory for more details (or +[on GitLab](https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples)) ### Computing Disaggregations @@ -62,14 +68,32 @@ java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.DisaggCalc model sites returnPe Disaggregations build on and output `HazardCalc` results along with other disaggregation specific files. Disaggregations also have some independent -[configuration](calculation-configuration#config-disagg) options. +[configuration](./Calculation-Configuration.md#config-disagg) options. ## Run with [Docker](https://docs.docker.com/install/) -To ensure you are have the latest *nshmp-haz* update, always first pull the image from Docker: +nshmp-haz is available as a [public image](https://hub.docker.com/repository/docker/usgs/nshmp-haz) +with tags: + +* `development-latest`: Developer forks +* `staging-latest`: Latest updates associated with the +[main](https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main) branch +* `production-latest`: Latest stable release associated with the +[production](https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/production) branch + +To ensure you have the latest *nshmp-haz* update associated with a specific tag, +always first pull the image from Docker: + +```bash +docker pull usgs/nshmp-haz:<tag> +``` + +> Replace `<tag>` with one of the above tags. + +Example: ```bash -docker pull usgs/nshmp-haz +docker pull usgs/nshmp-haz:production-latest ``` ### Docker Memory on Mac @@ -78,7 +102,7 @@ By default, Docker Desktop for Mac is set to use 2 GB runtime memory. To run *ns memory available to Docker must be [increased](https://docs.docker.com/docker-for-mac/#advanced) to a minimum of 4 GB. -### Running +### Run nshmp-haz in Docker The *nshmp-haz* application may be run as a Docker container which mitigates the need to install Git, Java, or other dependencies besides Docker. A public image is available on @@ -87,60 +111,153 @@ which can be run with: ```bash docker run \ - -e PROGRAM=<disagg | hazard | rate> \ - -e MODEL=<CONUS-2018 | HAWAII-2021> \ - -e RETURN_PERIOD=<RETURN_PERIOD> \ - -v /absolute/path/to/sites/file:/app/sites.<geojson | csv> \ - -v /absolute/path/to/config/file:/app/config.json \ - -v /absolute/path/to/output:/app/output \ - usgs/nshmp-haz - -# Example -docker run \ - -e PROGRAM=hazard \ - -e MODEL=CONUS-2018 \ - -v $(pwd)/sites.geojson:/app/sites.geojson \ - -v $(pwd)/config.json:/app/config.json \ - -v $(pwd)/hazout:/app/output \ + --env CLASS_NAME=<DeaggCalc | DeaggIml | HazardCalc | RateCalc> \ + --env IML=<NUMBER> \ + --env RETURN_PERIOD=<NUMBER> \ + --volume /absolute/path/to/sites/file:/app/sites.<geojson | csv> \ + --volume /absolute/path/to/config/file:/app/config.json \ + --volume /absolute/path/to/output:/app/output \ usgs/nshmp-haz ``` -Where: (TODO links below need checking) - -* `PROGRAM` is the nshmp-haz program to run: - * disagg = `DisaggCalc` - * hazard = `HazardCalc` - * rate = `RateCalc` - -* `MODEL` is the [USGS model (NSHM)](usgs-models) to run: - * CONUS-2018: [Conterminous U.S. 2018](https://github.com/usgs/nshm-conus) - * HAWAII-2021: [Hawaii 2021](https://code.usgs.gov/ghsc/nshmp/nshm-hawaii) +Where: +* `CLASS_NAME` is the nshmp-haz class to run: + * [DeaggCalc](../../src/main/java/gov/usgs/earthquake/nshmp/DeaggCalc.java) + * [DeaggIml](../../src/main/java/gov/usgs/earthquake/nshmp/DeaggIml.java) + * [HazardCalc](../../src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java) + * [RateCalc](../../src/main/java/gov/usgs/earthquake/nshmp/RateCalc.java) * `RETURN_PERIOD`, in years, is only required when running a disaggregation - -* Other arguments: - * (required) The absolute path to a GeoJSON or CSV [site(s)](site-specification) file +* `IML`: intensity measure level, only required when running `DeaggIml` +* Other arguments (local files mapped to files within the Docker container with `:/app/...`): + * (required) The absolute path to a [USGS model (NSHM)](./USGS-Models.md) + * Example: `$(pwd)/nshm-hawaii:/app/model` + * (required) The absolute path to a GeoJSON or CSV [site(s)](./Site-Specification.md) file * CSV example: `$(pwd)/my-csv-sites.csv:/app/sites.csv` * GeoJSON example: `$(pwd)/my-geojson-sites.geojson:/app/sites.geojson` - * (optional) The absolute path to a [configuration](calculation-configuration) file - * Example: `$(pwd)/my-custom-config.json:/app/config.json` * (required) The absolute path to an output directory * Example: `$(pwd)/my-hazard-output:/app/output` + * (optional) The absolute path to a [configuration](./Calculation-Configuration.md) file + * Example: `$(pwd)/my-custom-config.json:/app/config.json` + +### Docker Examples + +#### [`DeaggCalc`](../../src/main/java/gov/usgs/earthquake/nshmp/DeaggCalc.java) Example + +The following example runs the `DeaggCalc` program in nshmp-haz with the +[nshm-hawaii](https://code.usgs.gov/ghsc/nshmp/nshms/nshm-hawaii.git) model and the +assumption a GeoJSON [site](./Site-Specification.md) file exists named `sites.geojson`. + +```bash +# Download Hawaii NSHM +git clone https://code.usgs.gov/ghsc/nshmp/nshms/nshm-hawaii.git + +# Pull image +docker pull usgs/nshmp-haz:production-latest + +# Run nshmp-haz DeaggCalc +docker run \ + --env CLASS_NAME="DeaggCalc" \ + --env RETURN_PERIOD=475 \ + --volume "$(pwd)/nshm-hawaii:/app/model" \ + --volume "$(pwd)/sites.geojson" \ + --volume "$(pwd)/hawaii-disagg-output:/app/output" \ + usgs/nshmp-haz:production-latest +``` + +#### [`DeaggIml`](../../src/main/java/gov/usgs/earthquake/nshmp/DeaggIml.java) Example + +The following example runs the `DeaggIml` program in nshmp-haz with the +[nshm-hawaii](https://code.usgs.gov/ghsc/nshmp/nshms/nshm-hawaii.git) model and the +assumption a GeoJSON [site](./Site-Specification.md) file exists named `sites.geojson`. + +```bash +# Download Hawaii NSHM +git clone https://code.usgs.gov/ghsc/nshmp/nshms/nshm-hawaii.git + +# Pull image +docker pull usgs/nshmp-haz:production-latest + +# Run nshmp-haz DeaggIml +docker run \ + --env CLASS_NAME="DeaggCalc" \ + --env IML=1 \ + --volume "$(pwd)/nshm-hawaii:/app/model" \ + --volume "$(pwd)/sites.geojson" \ + --volume "$(pwd)/hawaii-disagg-iml-output:/app/output" \ + usgs/nshmp-haz:production-latest +``` + +#### [`HazardCalc`](../../src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java) Example + +The following example runs the `HazardCalc` program in nshmp-haz with the +[nshm-hawaii](https://code.usgs.gov/ghsc/nshmp/nshms/nshm-hawaii.git) model and the +assumption a GeoJSON [site](./Site-Specification.md) file exists named `sites.geojson`. + +```bash +# Download Hawaii NSHM +git clone https://code.usgs.gov/ghsc/nshmp/nshms/nshm-hawaii.git + +# Pull image +docker pull usgs/nshmp-haz:production-latest + +# Run nshmp-haz HazardCalc +docker run \ + --env CLASS_NAME="HazardCalc" \ + --volume "$(pwd)/nshm-hawaii:/app/model" \ + --volume "$(pwd)/sites.geojson" \ + --volume "$(pwd)/hawaii-hazard-output:/app/output" \ + usgs/nshmp-haz:production-latest +``` + +#### [`RateCalc`](../../src/main/java/gov/usgs/earthquake/nshmp/RateCalc.java) Example + +The following example runs the `RateCalc` program in nshmp-haz with the +[nshm-hawaii](https://code.usgs.gov/ghsc/nshmp/nshms/nshm-hawaii.git) model and the +assumption a GeoJSON [site](./Site-Specification.md) file exists named `sites.geojson`. + +```bash +# Download Hawaii NSHM +git clone https://code.usgs.gov/ghsc/nshmp/nshms/nshm-hawaii.git + +# Pull image +docker pull usgs/nshmp-haz:production-latest + +# Run nshmp-haz RateCalc +docker run \ + --env CLASS_NAME="RateCalc" \ + --volume "$(pwd)/nshm-hawaii:/app/model" \ + --volume "$(pwd)/sites.geojson" \ + --volume "$(pwd)/hawaii-rate-output:/app/output" \ + usgs/nshmp-haz:production-latest +``` ### Run Customization -When running *nshmp-haz* with Docker the initial (Xms) and maximum (Xmx) JVM memory sizes can +When running *nshmp-haz* with Docker the maximum JVM memory size can be set with the environment flag (-e, -env): ```bash docker run \ - -e JAVA_XMS=<JAVA_XMS> \ - -e JAVA_XMX=<JAVA_XMX> \ + --env JAVA_MEMORY=<MEMORY> \ + ... + usgs/nshmp-haz + +# Example +docker run \ + --env JAVA_MEMORY="12g" \ ... usgs/nshmp-haz ``` Where: -* `JAVA_XMS` is the intial memory for the JVM (default: system) -* `JAVA_XMX` is the maximum memory for the JVM (default: 8g) +* `JAVA_MEMORY` is the maximum memory for the JVM (default: 8g) + +--- + +* [**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Calculation-Configuration.md b/docs/pages/Calculation-Configuration.md index e79851b26cd95b484107fa663ea93ad3c7608b8f..c7a20bde54a9f7407626f35161e8b90837efbd78 100644 --- a/docs/pages/Calculation-Configuration.md +++ b/docs/pages/Calculation-Configuration.md @@ -1,52 +1,59 @@ # Calculation Configuration -A `calc-config.json` file _may_ reside at the root of every [hazard model](hazard-model). This +A `calc-config.json` file _may_ reside at the root of every [hazard model](./Hazard-Model.md). This file, if present, will override any built-in default calculation configuration parameters, as -listed below. See the [examples](/usgs/nshmp-haz/tree/master/etc/examples) directory, or any -[USGS model](usgs-models), for concrete examples (TODO decide if last sentence needed and check -links). +listed below. See the [examples](../../etc/examples/README.md) directory, or any +[USGS model](./Usgs-Models.md), for concrete examples. ## Calculation Configuration Parameters Calculation configuration parameters are optional (i.e. defaults are used for missing values) and -may be overridden. See [building and running](building-&-running) and the -[examples](/usgs/nshmp-haz/tree/master/etc/examples) for details. - -(TODO needs updated javadoc links) +may be overridden. See [building and running](./Building-&-Running.md) and the +[examples](../../etc/examples) for details. Parameter | Type | Default | Notes | --------- | ---- | ------- | ----- | __`hazard`__ - `.exceedanceModel` |`String` | `TRUNCATION_3SIGMA_UPPER` | [`ExceedanceModel`](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/calc/ExceedanceModel.html) - `.truncationLevel` |`Double` | `3.0` | [1](notes) - `.imts` |`String[]` | `[ PGV, PGA, SA0P01, SA0P02, SA0P03, SA0P05, SA0P075, SA0P1, SA0P15, SA0P2, SA0P25, SA0P3, SA0P4, SA0P5, SA0P75, SA1P0, SA1P5, SA2P0, SA3P0, SA4P0, SA5P0, SA7P5, SA10P0 ]` | [`Imt`](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/gmm/Imt.html) + `.exceedanceModel` |`String` | `TRUNCATION_3SIGMA_UPPER` | [`ExceedanceModel`][url-exceedance] + `.truncationLevel` |`Double` | `3.0` | [1](#notes) + `.imts` |`String[]` | `[ PGV, PGA, SA0P01, SA0P02, SA0P03, SA0P05, SA0P075, SA0P1, SA0P15, SA0P2, SA0P25, SA0P3, SA0P4, SA0P5, SA0P75, SA1P0, SA1P5, SA2P0, SA3P0, SA4P0, SA5P0, SA7P5, SA10P0 ]` | [`Imt`][url-imt] `.customImls` |`Map<String, Double[]>` | `{}` (empty object) | [2](#notes) `.gmmUncertainty` |`Boolean` | `false` | [3](#notes) - `.valueFormat` |`String` | `ANNUAL_RATE` | [`ValueFormat`](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/calc/ValueFormat.html) -__`deagg`__ + `.valueFormat` |`String` | `ANNUAL_RATE` | [`ValueFormat`][url-valueformat] +__`disagg`__ `.bins` |`Object` | | [4](#notes) `.contributorLimit` |`Double` | `0.1` | [5](#notes) __`rate`__ `.bins` |`Object` | | [6](#notes) `.distance` |`Double` | `20` km - `.distributionFormat` |`String` | `INCREMENTAL` | [`DistributionFormat`](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/calc/DistributionFormat.html) + `.distributionFormat` |`String` | `INCREMENTAL` | [`DistributionFormat`][url-distribution] `.timespan` |`Double` | `30` years - `.valueFormat` |`String` | `ANNUAL_RATE` | [`ValueFormat`](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/calc/ValueFormat.html) + `.valueFormat` |`String` | `ANNUAL_RATE` | [`ValueFormat`][url-valueformat] __`site`__ - `.vs30` |`Double` | `760.0` | [`Site`](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/calc/Site.html) + `.vs30` |`Double` | `760.0` | [`Site`][url-site] `.vsInferred` |`Boolean` | `true` `.z1p0` |`Double` | `null` | [7](#notes) `.z2p5` |`Double` | `null` | [7](#notes) __`output`__ | `.directory` |`String` | `hazout` - `.dataTypes` |`String[]` | `[ TOTAL ]` | [`DataType`](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/calc/DataType.html) + `.dataTypes` |`String[]` | `[ TOTAL, MAP ]` | [`DataType`][url-datatype] + `.returnPeriods` |`Integer[]`| `[ 475, 975, 2475 ]` | [`ReturnPeriods`][url-returnperiods] __`performance`__ `.optimizeGrids` |`Boolean` | `true` | [8](#notes) `.smoothGrids` |`Boolean` | `true` | [9](#notes) `.systemPartition` |`Integer` | `1000` | [10](#notes) - `.threadCount` |`String` | `ALL` | [`ThreadCount`](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/calc/ThreadCount.html) + `.threadCount` |`String` | `ALL` | [`ThreadCount`][url-sheets] + +[url-exceedance]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/ExceedanceModel.html +[url-imt]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/Imt.html +[url-valueformat]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/ValueFormat.html +[url-distribution]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/DistributionFormat.html +[url-site]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/Site.html +[url-datatype]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/DataType.html +[url-returnperiods]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/CalcConfig.Output.html#returnPeriods +[url-sheets]: https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/calc/ThreadCount.html -## Notes +### Notes 1. `hazard.truncationLevel`: This value is only used if the `hazard.exceedanceModel` requires a limit (e.g. `TRUNCATION_UPPER_ONLY`) @@ -54,7 +61,7 @@ __`performance`__ been defined, this value en/disables this feature. 3. `hazard.customImls`: Hazard is computed at default intensity measure levels (IMLs) for every supported intenisty measure type (IMT), but a user can specify different IMLs as needed (see - this [example](/usgs/nshmp-haz/blob/master/etc/examples/2-custom-config/config.json) and the + [example 2](../../etc/examples/2-custom-config/README.md) and the table of default IMLs, below). 4. `disagg.bins`: This field maps to a data container that specifies the following default ranges and intervals for distance, magnitude, and epsilon binning: `"bins": { "rMin": 0.0, "rMax": @@ -82,14 +89,30 @@ not listed use the values of the next highest spectral period. IMT | IMLs -----------|----- -PGV | 0.237, 0.355, 0.532, 0.798, 1.19, 1.80, 2.69, 4.04, 6.06, 9.09, 13.6, 20.5, 30.7, 46.0, 69.0, 103.0, 155.0, 233.0, 349.0, 525.0 -PGA | 0.00233, 0.00350, 0.00524, 0.00786, 0.0118, 0.0177, 0.0265, 0.0398, 0.0597, 0.0896, 0.134, 0.202, 0.302, 0.454, 0.680, 1.02, 1.53, 2.30, 3.44, 5.17 -T ≤ 0.01 s | 0.00233, 0.00350, 0.00524, 0.00786, 0.0118, 0.0177, 0.0265, 0.0398, 0.0597, 0.0896, 0.134, 0.202, 0.302, 0.454, 0.680, 1.02, 1.53, 2.30, 3.44, 5.17 -T ≤ 0.02 s | 0.00283, 0.00424, 0.00637, 0.00955, 0.0143, 0.0215, 0.0322, 0.0483, 0.0725, 0.109, 0.163, 0.245, 0.367, 0.551, 0.826, 1.24, 1.86, 2.79, 4.18, 6.27 -T ≤ 0.05 s | 0.00333, 0.00499, 0.00749, 0.0112, 0.0169, 0.0253, 0.0379, 0.0569, 0.0853, 0.128, 0.192, 0.288, 0.432, 0.648, 0.972, 1.46, 2.19, 3.28, 4.92, 7.38 -T ≤ 2 s | 0.00250, 0.00375, 0.00562, 0.00843, 0.0126, 0.0190, 0.0284, 0.0427, 0.0640, 0.0960, 0.144, 0.216, 0.324, 0.486, 0.729, 1.09, 1.64, 2.46, 3.69, 5.54 -T ≤ 3 s | 0.00200, 0.00300, 0.00449, 0.00674, 0.0101, 0.0152, 0.0228, 0.0341, 0.0512, 0.0768, 0.115, 0.173, 0.259, 0.389, 0.583, 0.875, 1.31, 1.97, 2.95, 4.43 -T ≤ 4 s | 0.00133, 0.00200, 0.00300, 0.00449, 0.00674, 0.0101, 0.0152, 0.0228, 0.0341, 0.0512, 0.0768, 0.115, 0.173, 0.259, 0.389, 0.583, 0.875, 1.31, 1.97, 2.95 -T ≤ 5 s | 0.000999, 0.00150, 0.00225, 0.00337, 0.00506, 0.00758, 0.0114, 0.0171, 0.0256, 0.0384, 0.0576, 0.0864, 0.130, 0.194, 0.292, 0.437, 0.656, 0.984, 1.48, 2.21 -T ≤ 7.5 s | 0.000499, 0.000749, 0.00112, 0.00169, 0.00253, 0.00379, 0.00569, 0.00853, 0.0128, 0.0192, 0.0288, 0.0432, 0.0648, 0.0972, 0.146, 0.219, 0.328, 0.492, 0.738, 1.11 -T ≤ 10 s | 0.000333, 0.000499, 0.000749, 0.00112, 0.00169, 0.00253, 0.00379, 0.00569, 0.00853, 0.0128, 0.0192, 0.0288, 0.0432, 0.0648, 0.0972, 0.146, 0.219, 0.328, 0.492, 0.738 +PGV | 0.237, 0.355, 0.532, 0.798, 1.19, 1.80, 2.69, 4.04, <br>6.06, 9.09, 13.6, 20.5, 30.7, 46.0, 69.0, 103.0, 155.0, <br>233.0, 349.0, 525.0 +PGA | 0.00233, 0.00350, 0.00524, 0.00786, 0.0118, 0.0177, <br>0.0265, 0.0398, 0.0597, 0.0896, 0.134, 0.202, 0.302, 0.454, <br>0.680, 1.02, 1.53, 2.30, 3.44, 5.17 +T ≤ 0.01 s | 0.00233, 0.00350, 0.00524, 0.00786, 0.0118, 0.0177, <br>0.0265, 0.0398, 0.0597, 0.0896, 0.134, 0.202, 0.302, 0.454, <br>0.680, 1.02, 1.53, 2.30, 3.44, 5.17 +T ≤ 0.02 s | 0.00283, 0.00424, 0.00637, 0.00955, 0.0143, 0.0215, <br>0.0322, 0.0483, 0.0725, 0.109, 0.163, 0.245, 0.367, 0.551, 0.826, <br>1.24, 1.86, 2.79, 4.18, 6.27 +T ≤ 0.05 s | 0.00333, 0.00499, 0.00749, 0.0112, 0.0169, 0.0253, <br>0.0379, 0.0569, 0.0853, 0.128, 0.192, 0.288, 0.432, 0.648, 0.972, <br>1.46, 2.19, 3.28, 4.92, 7.38 +T ≤ 2 s | 0.00250, 0.00375, 0.00562, 0.00843, 0.0126, 0.0190, <br>0.0284, 0.0427, 0.0640, 0.0960, 0.144, 0.216, 0.324, 0.486, <br>0.729, 1.09, 1.64, 2.46, 3.69, 5.54 +T ≤ 3 s | 0.00200, 0.00300, 0.00449, 0.00674, 0.0101, 0.0152, <br>0.0228, 0.0341, 0.0512, 0.0768, 0.115, 0.173, 0.259, 0.389, <br>0.583, 0.875, 1.31, 1.97, 2.95, 4.43 +T ≤ 4 s | 0.00133, 0.00200, 0.00300, 0.00449, 0.00674, 0.0101, <br>0.0152, 0.0228, 0.0341, 0.0512, 0.0768, 0.115, 0.173, 0.259, <br>0.389, 0.583, 0.875, 1.31, 1.97, 2.95 +T ≤ 5 s | 0.000999, 0.00150, 0.00225, 0.00337, 0.00506, 0.00758, <br>0.0114, 0.0171, 0.0256, 0.0384, 0.0576, 0.0864, 0.130, 0.194, <br>0.292, 0.437, 0.656, 0.984, 1.48, 2.21 +T ≤ 7.5 s | 0.000499, 0.000749, 0.00112, 0.00169, 0.00253, 0.00379, <br>0.00569, 0.00853, 0.0128, 0.0192, 0.0288, 0.0432, 0.0648, <br>0.0972, 0.146, 0.219, 0.328, 0.492, 0.738, 1.11 +T ≤ 10 s | 0.000333, 0.000499, 0.000749, 0.00112, 0.00169, 0.00253, <br>0.00379, 0.00569, 0.00853, 0.0128, 0.0192, 0.0288, 0.0432, <br>0.0648, 0.0972, 0.146, 0.219, 0.328, 0.492, 0.738 + +--- + +## Related Pages + +* [Building & Running](./Building-&-Running.md#building-&-running) + * [Developer Basics](./Developer-Basics.md#developer-basics) + * [Calculation Configuration](./Calculation-Configuration.md#calculation-configuration) + * [Site Specification](./Site-Specification.md#site-specification) + * [Examples](../../etc/examples) (or + [on GitLab](https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples)) +* [**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Developer-Basics.md b/docs/pages/Developer-Basics.md index 51cf8c2cd7fbc357d0704d57e9afde3c02c9b4c2..17da9de70699c0cb144dd0f8a3c47899975c393a 100644 --- a/docs/pages/Developer-Basics.md +++ b/docs/pages/Developer-Basics.md @@ -7,11 +7,11 @@ The following provides basic guidance on how to set up command-line use of nshmp * Java 11 JDK: [Oracle](https://www.oracle.com/java/technologies/javase-jdk11-downloads.html) or [Amazon Corretto](https://docs.aws.amazon.com/corretto/latest/corretto-11-ug/downloads-list.html) * [Git](https://git-scm.com/downloads) - * Git is a distributed version control system. The USGS uses a [GitLab](https://docs.gitlab.com) - [instance](https://code.usgs.gov/) to host projects and facilitate sharing and collaborative - development of code. Git is included in the macOS - [developer tools](https://developer.apple.com/xcode/). - * Windows users may want to consider [Git for Windows](https://git-for-windows.github.io) or + * Git is a [distributed version control system](https://en.wikipedia.org/wiki/Distributed_version_control). + The USGS uses a [GitLab](https://docs.gitlab.com) [instance](https://code.usgs.gov/) to host + projects and facilitate sharing and collaborative development of code. + * On **macOS**, Git is included in the [developer tools](https://developer.apple.com/xcode/). + * On **Windows**, users may want to consider [Git for Windows](https://git-for-windows.github.io) or [GitHub Desktop](https://desktop.github.com), both of which include a linux-like terminal (Git BASH) in which subsequent commands listed here will work. @@ -58,3 +58,19 @@ install the [Eclipse IDE for Java Developers](https://www.eclipse.org/downloads/ [Eclipse IDE for Enterprise Java and Web Developers](https://www.eclipse.org/downloads/packages/), if you plan on developing web services. Import the project into Eclipse: `File > Import > Gradle > Existing Gradle Project` + +--- + +## Related Pages + +* [Building & Running](./Building-&-Running.md#building-&-running) + * [Developer Basics](./Developer-Basics.md#developer-basics) + * [Calculation Configuration](./Calculation-Configuration.md#calculation-configuration) + * [Site Specification](./Site-Specification.md#site-specification) + * [Examples](../../etc/examples) (or + [on GitLab](https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples)) +* [**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Functional-PSHA.md b/docs/pages/Functional-PSHA.md index 726864a1de39589eabe9931a7021604ad2cbdf5d..375098c02a407c9b2ce87ac88fa1506c961c891a 100644 --- a/docs/pages/Functional-PSHA.md +++ b/docs/pages/Functional-PSHA.md @@ -10,14 +10,14 @@ upended the notion of discrete faults as independent sources, and the USGS natio model uses temporally clustered sources. Moreover, as the logic trees typically employed in PSHAs to capture epistemic uncertainty grow larger, so too does the demand for a more complete understanding of uncertainty. At the USGS, there are additional requirements to support source -model mining, deaggregation, and map-making, often through the use of dynamic web-applications. +model mining, disaggregation, and map-making, often through the use of dynamic web-applications. Implementations of the PSHA methodology commonly iterate over all sources that influence the hazard at a site and sequentially build a single hazard curve. Such a linear PSHA computational pipeline, however, proves difficult to maintain and modify to support the additional complexity of new models, hazard products, and analyses. The functional programming paradigm offers some relief. The functional approach breaks calculations down into their component parts or steps, storing intermediate results as immutable objects, making it easier to: chain actions together; preserve -intermediate data or results that may still be relevant (e.g. as in a deaggregation); and leverage +intermediate data or results that may still be relevant (e.g. as in a disaggregation); and leverage the concurrency supported by many modern programming languages. ## Traditional PSHA formulation (after Baker, 2013) @@ -62,8 +62,8 @@ a single site, using a single GMM, and a nominal number of sources, modern PSHAs * Response Spectra, Conditional Mean Spectra – multiple intensity measure types (IMTs; e.g. PGA, PGD, PGV, multiple SAs) -* Deaggregation -* Banded deaggregation (multiple deaggregations at varying IMLs) +* Disaggregation +* Banded disaggregation (multiple disaggregations at varying IMLs) * Maps – many thousands of sites * Uncertainty analyses @@ -87,7 +87,7 @@ foreach IMT { } ``` -* Support for secondary analyses, such as deaggregation is supplied by a separate code or codes +* Support for secondary analyses, such as disaggregation is supplied by a separate code or codes and can require repeating many of the steps performed to generate an initial hazard curve. ## What about scaleability, maintenance, and performance? @@ -111,7 +111,7 @@ foreach IMT { may write [sqr = @(x) x.^2;]. * In Matlab, one may pass function ‘handles’ (references) to other functions as arguments. This is also possible in Javascript, where such handles serve as callbacks. Given the rise in - popularity of the functional style, Java 8 recently added constructs in the form of the function + popularity of the functional style, Java 8 added constructs in the form of the function and streaming APIs, and libraries exists for other languages. ## How do PSHA and related calculations leverage such an approach? @@ -158,3 +158,11 @@ The functional pipeline can be processed stepwise: pp. 973-987. * Cornell, C.A., 1968, Engineering seismic risk analysis, *Bulletin of the Seismological Society of America*, Vol. 58, No. 5, pp. 1583-1606. + +--- + +[**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Ground-Motion-Models.md b/docs/pages/Ground-Motion-Models.md index 82f109b620244bdb64d376606c9b01ddb01ecac8..0ffa1e71f03081bb5523a1aed9d25ba283a25447 100644 --- a/docs/pages/Ground-Motion-Models.md +++ b/docs/pages/Ground-Motion-Models.md @@ -7,18 +7,18 @@ GMMs, as published, support both interface and intraslab events. [[_TOC_]] -* How to add links to javadocs? ...not possible in wiki (separate repo)? except with external - http gitlab urls? -* include NSHM that used each model? -* save horizontal space in table by moving notes to table footnotes. this has to be done manually - (see NGA-East model IDs), since markdown footnotes always appear at the bottom of the page - -**gmm-config.json** Required adjacent to any `gmm-tree.json`. This file specifies the applicability -distance of the associated GMM's and any additional epistemic uncertainty model and properties to -apply to median ground motions derived from the GMM's. This uncertainty is distinct from the -built-in aleatory variability (standard deviation or sigma) of the GMM's themselves. Use `null` -values to indicate that no additional uncertainty model should be applied. Supported uncertainty -models are detailed in the [ground motion models](ground-motion-models) section. For example: +## GMM Configuration + +A **gmm-config.json** file governs how GMMs are applied in a NSHM and is required adjacent to any +`gmm-tree.json` file. It specifies a maximum distance at which associated GMMs are applicable. It +may also specify a model of additional epistemic uncertainty and the logic tree used to apply it to +median ground motions derived from the GMMs. If no such model is required, the `epistemic-model` +and `epistemic-tree` members must be `null`. This uncertainty is disctinct from the built-in +aleatory variability (standard deviation or sigma) of the GMMs themselves. See +[GMM Uncertainty Models](#gmm-uncertainty-models) for details on additional epistemic uncertainty +in GMMs. + +A sample `gmm-config.json` file that specifies no additional epistemic uncertainty model: ```json { @@ -28,13 +28,7 @@ models are detailed in the [ground motion models](ground-motion-models) section. } ``` -## GMM Configuration - -A `gmm-config.json` file governs how GMMs are applied in a NSHM. It specifies a maximum distance -at which a GMM is applicable. It may also specify a model of additional epistemic uncertainty and -the logic tree used to apply it. If no such model is required, the `epistemic-model` and -`epistemic-tree` members must be `null`. See [Uncertainties in NSHMs](uncertainties-in-nshms) for -details on additional epistemic uncertainty in GMMs. +The following sample `gmm-config.json` file applies the NGA-West 2 epistemic uncertainty model: ```json { @@ -50,111 +44,128 @@ details on additional epistemic uncertainty in GMMs. ## GMM Uncertainty Models -TODO - -## GMM Post Processors - -TODO - -## Active Crust GMMs - -| Reference | ID | Component | Notes | -|:---------:|:--:|:---------:|:------| -| **NGA-West 2** | | | -| [Abrahamson et al., 2014](http://dx.doi.org/10.1193/070913EQS198M) | ASK_14<br>ASK_14_BASIN | RotD50 | | -| [Boore et al., 2014](http://dx.doi.org/10.1193/070113EQS184M) | BSSA_14<br>BSSA_14_BASIN | RotD50 | | -| [Campbell & Bozorgnia, 2014](http://dx.doi.org/10.1193/062913EQS175M)| CB_14<br>CB_14_BASIN | RotD50 | | -| [Chiou & Youngs, 2014](http://dx.doi.org/10.1193/072813EQS219M) | CY_14<br>CY_14_BASIN | RotD50 | | -| [Idriss, 2014](http://dx.doi.org/10.1193/070613EQS195M) | IDRISS_14 | RotD50 | | -| **NGA-West 1** | | | | -| [Boore & Anderson, 2008](http://dx.doi.org/10.1193/1.2830434) | BA_08 | GMRotI50 | | -| [Campbell & Bozorgnia, 2008](http://dx.doi.org/10.1193/1.2857546) | CB_08 | GMRotI50 | | -| [Chiou & Youngs, 2008](http://dx.doi.org/10.1193/1.2894832) | CY_08 | GMRotI50 | | -| **Other** | | | | -| [Abrahamson & Silva, 1997](http://dx.doi.org/10.1785/gssrl.68.1.94) | AS_97 | Average horizontal | | -| [Boore et al., 1997](http://dx.doi.org/10.1785/gssrl.68.1.128)<br>[Boore, 2005](http://dx.doi.org/10.1785/gssrl.76.3.368) | BJF_97 | Random horizontal | Soft rock sites only (Vs30 760 m/s) | -| [Campbell, 1997](http://dx.doi.org/10.1785/gssrl.68.1.154)<br>[errata, 2000](http://dx.doi.org/10.1785/gssrl.71.3.352)<br>[errata, 2001](http://dx.doi.org/10.1785/gssrl.72.4.474) | CAMPBELL_97 | Geometric mean of two horizontal components | Soft rock sites only (Vs30 760 m/s) | -| [Campbell & Bozorgnia, 2003](http://dx.doi.org/10.1785/0120020029)<br>[errata, 2003](http://dx.doi.org/10.1785/0120030099)<br>[errata, 2003](http://dx.doi.org/10.1785/0120030143) | CB_03 | Average horizontal | Soft rock sites only (Vs30 760 m/s) | -| [McVerry et al., 2000](http://doi.org/10.5459/BNZSEE.39.1.1-58) | MCVERRY_00_CRUSTAL<br>MCVERRY_00_VOLCANIC | Max-horizontal implemented, model also supports geometric mean | New Zealand, does not correspond directly with US site class model | -| [Sadigh et al., 1997](http://dx.doi.org/10.1785/gssrl.68.1.180) | SADIGH_97 | Geometric mean of two horizontal components | Also used for interface sources in 2007 Alaska NSHM | -| [Zhao et al., 2016](http://dx.doi.org/10.1785/0120150063) | ZHAO_16_SHALLOW_CRUST<br>ZHAO_16_UPPER_MANTLE | Geometric mean of two randomly oriented horizontal components | | - -## Stable Crust GMMs - -| Reference | ID | Component | Notes | -|:---------:|:--:|:---------:|:------| -| [Atkinson, 2008](http://dx.doi.org/10.1785/0120070199)<br>[Atkinson & Boore, 2011](http://dx.doi.org/10.1785/0120100270) | ATKINSON_08_PRIME | horizontal | Mean values clamped | -| [Atkinson & Boore, 2006](http://dx.doi.org/10.1785/0120050245) | AB_06_140BAR<br>AB_06_200BAR<br>AB_06_140BAR_AB<br>AB_06_200BAR_AB<br>AB_06_140BAR_J<br>AB_06_200BAR_J | horizontal | Mean values clamped | -| [Atkinson & Boore, 2006](http://dx.doi.org/10.1785/0120050245)<br>[Atkinson & Boore, 2011](http://dx.doi.org/10.1785/0120100270) | AB_06_PRIME | horizontal | Mean values clamped | -| [Campbell, 2003](http://dx.doi.org/10.1785/0120020002) | CAMPBELL_03<br>CAMPBELL_03_AB<br>CAMPBELL_03_J | Geometric mean of two horizontal components | Mean values clamped | -| [Frankel et al., 1996](https://pubs.usgs.gov/of/1996/532/) | FRANKEL_96<br>FRANKEL_96_AB<br>FRANKEL_96_J | not specified | Mean values clamped | -| [Graizer & Kalkan, 2015](http://dx.doi.org/10.3133/ofr20151009)<br>[Graizer & Kalkan, 2016](http://dx.doi.org/10.1785/0120150194) | GK_15 | Geometric mean of two randomly oriented horizontal components | | -| NGA-East<br>[Goulet et al., 2017](https://peer.berkeley.edu/sites/default/files/christine-a-goulet-yousef-bozorgnia-2017_03_0.pdf) | NGA_EAST_USGS [:one:](#one-nga-east-median-model-ids)<br>NGA_EAST_USGS_SEEDS[:two:](#two-nga-east-seed-model-ids) | RotD50 (average horizontal) | Mean values are not clamped | -| [Pezeshk et al., 2011](http://dx.doi.org/10.1785/0120100144) | PEZESHK_11 | GMRotI50 | Mean values clamped | -| [Shahjouei and Pezeshk, 2016](http://dx.doi.org/10.1785/0120140367) | NGA_EAST_SEED_SP16 | RotD50 | NGA-East Seed | -| [Silva et al., 2002](http://www.pacificengineering.org/CEUS/Development%20of%20Regional%20Hard_ABC.pdf) | SILVA_02<br>SILVA_02_AB<br>SILVA_02_J | average horizontal component | Mean values clamped | -| [Somerville et al., 2001](https://earthquake.usgs.gov/static/lfs/nshm/conterminous/2008/99HQGR0098.pdf) | SOMERVILLE_01 | not specified | Mean values clamped | -| [Tavakoli & Pezeshk, 2005](http://dx.doi.org/10.1785/0120050030) | TP_05<br>TP_05_AB<br>TP_05_J | not specified | Mean values clamped | -| [Toro et al., 1997](http://dx.doi.org/10.1785/gssrl.68.1.41)<br>[Toro, 2002](http://www.ce.memphis.edu/7137/PDFs/attenuations/Toro_2001_(modification_1997).pdf) | TORO_97_MB<br>TORO_97_MW | not specified | Mean values clamped | - -1. NGA-East Median Model IDs: NGA_EAST_USGS_1, NGA_EAST_USGS_2, NGA_EAST_USGS_3, NGA_EAST_USGS_4, - NGA_EAST_USGS_5, NGA_EAST_USGS_6, NGA_EAST_USGS_7, NGA_EAST_USGS_8, NGA_EAST_USGS_9, - NGA_EAST_USGS_10, NGA_EAST_USGS_11, NGA_EAST_USGS_12, NGA_EAST_USGS_13, NGA_EAST_USGS_14, - NGA_EAST_USGS_15, NGA_EAST_USGS_16, NGA_EAST_USGS_17 -2. NGA-East Seed Model IDs: NGA_EAST_SEED_1CCSP, NGA_EAST_SEED_1CVSP, NGA_EAST_SEED_2CCSP, - NGA_EAST_SEED_2CVSP, NGA_EAST_SEED_B_A04, NGA_EAST_SEED_B_AB14, NGA_EAST_SEED_B_AB95, - NGA_EAST_SEED_B_BCA10D, NGA_EAST_SEED_B_BS11, NGA_EAST_SEED_B_SGD02, NGA_EAST_SEED_FRANKEL, - NGA_EAST_SEED_GRAIZER, NGA_EAST_SEED_GRAIZER16, NGA_EAST_SEED_GRAIZER17, NGA_EAST_SEED_HA15, - NGA_EAST_SEED_PEER_EX, NGA_EAST_SEED_PEER_GP, NGA_EAST_SEED_PZCT15_M1SS, - NGA_EAST_SEED_PZCT15_M2ES, NGA_EAST_SEED_SP15, NGA_EAST_SEED_SP16, NGA_EAST_SEED_YA15 - -## Subduction GMMs - -| Reference | ID | Component | Notes | -|:---------:|:--:|:---------:|:------| -| [Atkinson & Boore, 2003](http://dx.doi.org/10.1785/0120020156) | AB_03_GLOBAL_INTERFACE<br>AB_03_GLOBAL_SLAB<br>AB_03_GLOBAL_SLAB_LOW_SAT<br>AB_03_CASCADIA_INTERFACE<br>AB_03_CASCADIA_SLAB<br>AB_03_CASCADIA_SLAB_LOW_SAT | horizontal | | -| [Atkinson & Macias, 2009](http://dx.doi.org/10.1785/0120080147) | AM_09_INTERFACE<br>AM_09_INTERFACE_BASIN | Geometric mean of two horizontal components | Interface only | -| BC Hydro<br>[Abrahamson et al., 2016](http://dx.doi.org/10.1193/051712EQS188MR) | BCHYDRO_12_INTERFACE<br>BCHYDRO_12_INTERFACE_BACKARC<br>BCHYDRO_12_INTERFACE_BASIN<br>BCHYDRO_12_INTERFACE_BASIN_BACKARC<br>BCHYDRO_12_SLAB<br>BCHYDRO_12_SLAB_BACKARC<br>BCHYDRO_12_SLAB_BASIN<br>BCHYDRO_12_SLAB_BASIN_BACKARC | Geometric mean of two horizontal components | | -| [McVerry et al., 2000](http://doi.org/10.5459/BNZSEE.39.1.1-58) | MCVERRY_00_INTERFACE<br>MCVERRY_00_SLAB<br>MCVERRY_00_VOLCANIC | Max-horizontal implemented, model also supports geometric mean | New Zealand, does not correspond directly with US site class model | -| NGA-Subduction<br>[Abrahamson et al., 2018](https://peer.berkeley.edu/sites/default/files/2018_02_abrahamson_9.10.18.pdf) | NGA_SUB_USGS_INTERFACE<br>NGA_SUB_USGS_INTERFACE_NO_EPI<br>NGA_SUB_USGS_SLAB<br>NGA_SUB_USGS_SLAB_NO_EPI | Geometric mean of two horizontal components | **Likely to be superseded by final EQS paper**<br>Calibrated for Cascadia use only | -| [Youngs et al., 1997](http://dx.doi.org/10.1785/gssrl.68.1.58) | YOUNGS_97_INTERFACE<br>YOUNGS_97_SLAB | Geometric mean of two horizontal components | | -| [Zhao et al., 2006](http://dx.doi.org/10.1785/0120050122) | ZHAO_06_INTERFACE<br>ZHAO_06_INTERFACE_BASIN<br>ZHAO_06_SLAB<br>ZHAO_06_SLAB_BASIN | Geometric mean of two horizontal components | | -| [Zhao et al., 2016](http://dx.doi.org/10.1785/0120150034)<br>[Zhao et al., 2016](http://dx.doi.org/10.1785/0120150056) | ZHAO_16_INTERFACE<br>ZHAO_16_SLAB<br>*ZHAO_16_UPPER_MANTLE* | Geometric mean of two randomly oriented horizontal components | Subduction Slab and Interface | - -## Regional and Specialized GMMs +*nshmp-haz* supports additional epistemic uncertainty models derived from the PEER NGA-West 1 +and PEER NGA-West 2 projects. These models both have factors for distance (`Rrup`) bins +Rrup < 10 km, 10 km <= Rrup, < 30 km, and 30 km <= Rrup, and for magnitude bins M < 6.0, 6.0 <= +M < 7.0, and 7.0 <= M. These models can be applied within the `gmm-config.json` file as shown in +the [GMM Uncertainty](#gmm-uncertainty) section above. + +## GMMs By Tectonic Setting + +GMMs available in *nshmp-haz* are tabulated by tectonic setting below. See the javadocs for the +[GMM Package](https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/package-summary.html) +for implementation details of each GMM and comprehensive lists of GMM IDs. + +### Active Crust GMMs + +Reference | ID | Component | Notes +:---------|:--:|:---------:|:------: +**NGA-West 2** +[Abrahamson et al., 2014](http://dx.doi.org/10.1193/070913EQS198M) | ASK_14<br>ASK_14_BASIN | RotD50 | +[Boore et al., 2014](http://dx.doi.org/10.1193/070113EQS184M) | BSSA_14<br>BSSA_14_BASIN | RotD50 | +[Campbell & Bozorgnia, 2014](http://dx.doi.org/10.1193/062913EQS175M)| CB_14<br>CB_14_BASIN | RotD50 | +[Chiou & Youngs, 2014](http://dx.doi.org/10.1193/072813EQS219M) | CY_14<br>CY_14_BASIN | RotD50 | +[Idriss, 2014](http://dx.doi.org/10.1193/070613EQS195M) | IDRISS_14 | RotD50 | +**NGA-West 1** +[Boore & Anderson, 2008](http://dx.doi.org/10.1193/1.2830434) | BA_08 | GMRotI50 | +[Campbell & Bozorgnia, 2008](http://dx.doi.org/10.1193/1.2857546) | CB_08 | GMRotI50 | +[Chiou & Youngs, 2008](http://dx.doi.org/10.1193/1.2894832) | CY_08 | GMRotI50 | +**Other** +[Abrahamson & Silva, 1997](http://dx.doi.org/10.1785/gssrl.68.1.94) | AS_97 | Average horizontal | +[Boore et al., 1997](http://dx.doi.org/10.1785/gssrl.68.1.128)<br>[Boore, 2005](http://dx.doi.org/10.1785/gssrl.76.3.368) | BJF_97 | Random horizontal | 1 +[Campbell, 1997](http://dx.doi.org/10.1785/gssrl.68.1.154)<br>[errata, 2000](http://dx.doi.org/10.1785/gssrl.71.3.352)<br>[errata, 2001](http://dx.doi.org/10.1785/gssrl.72.4.474) | CAMPBELL_97 | Geometric mean | 1 +[Campbell & Bozorgnia, 2003](http://dx.doi.org/10.1785/0120020029)<br>[errata, 2003a](http://dx.doi.org/10.1785/0120030099)<br>[errata, 2003b](http://dx.doi.org/10.1785/0120030143) | CB_03 | Average horizontal | 1 +[McVerry et al., 2000](http://doi.org/10.5459/BNZSEE.39.1.1-58) | MCVERRY_00_CRUSTAL<br>MCVERRY_00_VOLCANIC | Max-horizontal implemented,<br>model supports geometric mean | 2 +[Sadigh et al., 1997](http://dx.doi.org/10.1785/gssrl.68.1.180) | SADIGH_97 | Geometric mean | 3 +[Zhao et al., 2016](http://dx.doi.org/10.1785/0120150063) | ZHAO_16_SHALLOW_CRUST<br>ZHAO_16_UPPER_MANTLE | Geometric mean<br>(random orientation) | + +¹ Soft rock sites only (Vs30 760 m/s) +² New Zealand model, does not correspond directly with US site class model +³ Also used for interface sources in 2007 Alaska NSHM + +### Stable Crust GMMs + +Reference | ID | Component | Notes +:---------|:---|:----------|:------: +**NGA-East** +NGA-East<br>[Goulet et al., 2017](https://peer.berkeley.edu/sites/default/files/christine-a-goulet-yousef-bozorgnia-2017_03_0.pdf) | NGA_EAST_USGS¹<br>NGA_EAST_USGS_SEEDS¹ | RotD50 | 2 +[Shahjouei and Pezeshk, 2016](http://dx.doi.org/10.1785/0120140367) | NGA_EAST_SEED_SP16 | RotD50 | 3 +**Other** +[Atkinson, 2008](http://dx.doi.org/10.1785/0120070199)<br>[Atkinson & Boore, 2011](http://dx.doi.org/10.1785/0120100270) | ATKINSON_08_PRIME | horizontal | 4 +[Atkinson & Boore, 2006](http://dx.doi.org/10.1785/0120050245) | *AB_06_\*<br>140BAR\|200BAR<br>none\|_AB\|_J* | horizontal | 4 +[Atkinson & Boore, 2006](http://dx.doi.org/10.1785/0120050245)<br>[Atkinson & Boore, 2011](http://dx.doi.org/10.1785/0120100270) | AB_06_PRIME | horizontal | 4 +[Campbell, 2003](http://dx.doi.org/10.1785/0120020002) | CAMPBELL_03<br>CAMPBELL_03_AB<br>CAMPBELL_03_J | Geometric mean | 4 +[Frankel et al., 1996](https://pubs.usgs.gov/of/1996/532/) | FRANKEL_96<br>FRANKEL_96_AB<br>FRANKEL_96_J | not specified | 4 +[Graizer & Kalkan, 2015](http://dx.doi.org/10.3133/ofr20151009)<br>[Graizer & Kalkan, 2016](http://dx.doi.org/10.1785/0120150194) | GK_15 | Geometric mean<br>(random orientation) | +[Pezeshk et al., 2011](http://dx.doi.org/10.1785/0120100144) | PEZESHK_11 | GMRotI50 | 4 +[Silva et al., 2002](http://www.pacificengineering.org/CEUS/Development%20of%20Regional%20Hard_ABC.pdf) | SILVA_02<br>SILVA_02_AB<br>SILVA_02_J | Average horizontal | 4 +[Somerville et al., 2001](https://earthquake.usgs.gov/static/lfs/nshm/conterminous/2008/99HQGR0098.pdf) | SOMERVILLE_01 | not specified | 4 +[Tavakoli & Pezeshk, 2005](http://dx.doi.org/10.1785/0120050030) | TP_05<br>TP_05_AB<br>TP_05_J | not specified | 4 +[Toro et al., 1997](http://dx.doi.org/10.1785/gssrl.68.1.41)<br>[Toro, 2002](http://www.ce.memphis.edu/7137/PDFs/attenuations/Toro_2001_(modification_1997).pdf) | TORO_97_MB<br>TORO_97_MW | not specified | 4 + +¹ See [Gmm javadocs](https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/Gmm.html) +for individual NGA-East component model IDs +² Mean values are not clamped +³ Shahjouei and Pezeshk (2016) is a NGA-East seed model +â´ Mean values clamped + +### Subduction GMMs + +*Note: See the [GMM javadocs](https://earthquake.usgs.gov/nshmp/docs/nshmp-lib/gov/usgs/earthquake/nshmp/gmm/Gmm.html) +for a comprehensive list of GMM IDs.* + +Reference | ID | Component | Notes +:---------|:---|:----------|:------: +**NGA-Subduction** +[Abrahamson & Gülerce, 2020](https://peer.berkeley.edu/sites/default/files/2020_25.pdf) | *AG_20_\*<br>GLOBAL\|CASCADIA\|ALASKA<br>INTERFACE\|SLAB<br>no basin\|_BASIN* | RotD50 +[Kuehn et al., 2020](https://peer.berkeley.edu/sites/default/files/2020_04_kuehn_final.pdf) | *KBCG_20_\*<br>GLOBAL\|CASCADIA\|ALASKA<br>INTERFACE\|SLAB<br>no basin\|_BASIN* | RotD50 +[Parker et al., 2020](https://peer.berkeley.edu/sites/default/files/2020_03_parker_final.pdf) | *PSHAB_20_\*<br>GLOBAL\|CASCADIA\|ALASKA<br>INTERFACE\|SLAB<br>no basin\|_BASIN* | RotD50 +**Other** +[Atkinson & Boore, 2003](http://dx.doi.org/10.1785/0120020156) | *AB_03\*<br>GLOBAL\|CASCADIA<br>INTERFACE\|SLAB<br>none\|_LOW_SAT*| horizontal | +[Atkinson & Macias, 2009](http://dx.doi.org/10.1785/0120080147) | AM_09_INTERFACE<br>AM_09_INTERFACE_BASIN | Geometric mean | 1 +BC Hydro<br>[Abrahamson et al., 2016](http://dx.doi.org/10.1193/051712EQS188MR) | *BCHYDRO_12_\*<br>INTERFACE\|SLAB<br>none\|_BASIN<br>none\|_BACKARC* | Geometric mean | +BC Hydro NGA<br>[Abrahamson et al., 2018](https://peer.berkeley.edu/sites/default/files/2018_02_abrahamson_9.10.18.pdf)² | *BCHYDRO_18_NGA_\*<br>INTERFACE\|SLAB<br>none\|_NO_EPI* | Geometric mean | 3 +[McVerry et al., 2000](http://doi.org/10.5459/BNZSEE.39.1.1-58) | MCVERRY_00_INTERFACE<br>MCVERRY_00_SLAB<br>MCVERRY_00_VOLCANIC | Max-horizontal,<br>also supports geometric mean | 4 +[Youngs et al., 1997](http://dx.doi.org/10.1785/gssrl.68.1.58) | YOUNGS_97_INTERFACE<br>YOUNGS_97_SLAB | Geometric mean | +[Zhao et al., 2006](http://dx.doi.org/10.1785/0120050122) | *ZHAO_06_\*<br>INTERFACE\|SLAB<br>none\|_BASIN* | Geometric mean | +[Zhao et al., 2016](http://dx.doi.org/10.1785/0120150034)<br>[Zhao et al., 2016](http://dx.doi.org/10.1785/0120150056) | ZHAO_16_INTERFACE<br>ZHAO_16_SLAB<br>*ZHAO_16_UPPER_MANTLE* | Geometric mean<br>(random orientation) | 5 + +¹ Interface only +² Likely to be superseded by the final EQ Spectra paper +³ Calibrated for Cascadia use only +â´ New Zealand model, does not correspond directly with US site class model +âµ Subduction Slab and Interface + +### Regional and Specialized GMMs | Reference | ID | Component | Notes | -|:---------:|:--:|:---------:|:------| -| **Hawaii** | | | | -| [Atkinson, 2010](http://dx.doi.org/10.1785/0120090098) | ATKINSON_10 | geometric mean of two horizontal components | | -| [Munson & Thurber, 1997](https://pubs.geoscienceworld.org/ssa/srl/article-abstract/68/1/41/142160/Model-of-Strong-Ground-Motions-from-Earthquakes-in) | MT_97 | Larger of two horizontal | PGA and 0.2 seconds, additional term applied for M > 7 | -| [Wong et al., 2015](http://doi.org/10.1193/012012EQS015M) | WONG_15 | average horizontal | | -| **New Zealand** | | | | -| [McVerry et al., 2000](http://doi.org/10.5459/BNZSEE.39.1.1-58) | MCVERRY_00_CRUSTAL<br>MCVERRY_00_VOLCANIC<br>MCVERRY_00_INTERFACE<br>MCVERRY_00_SLAB | Max-horizontal implemented, model also supports geometric mean | New Zealand, does not correspond directly with US site class model. | -| **Induced Seismicity** | | | | -| [Atkinson, 2015](http://dx.doi.org/10.1785/0120140142) | ATKINSON_15 | orientation-independent horizontal | | -<!-- -## Hawaii GMMs - -| Reference | ID | Component | Notes | -|:---------:|:--:|:---------:|:------| -| [Atkinson, 2010](http://dx.doi.org/10.1785/0120090098) | ATKINSON_10 | geometric mean of two horizontal components | Hawaii | -| [Munson & Thurber, 1997](https://pubs.geoscienceworld.org/ssa/srl/article-abstract/68/1/41/142160/Model-of-Strong-Ground-Motions-from-Earthquakes-in) | MT_97 | Larger of two horizontal | PGA and 0.2 seconds, additional term applied for M > 7 | -| [Wong et al., 2015](http://doi.org/10.1193/012012EQS015M) | WONG_15 | average horizontal | | - -## Induced Seismicity GMMs - -| Reference | ID | Component | Notes | -|:---------:|:--:|:---------:|:------| -| [Atkinson, 2015](http://dx.doi.org/10.1785/0120140142) | ATKINSON_15 | orientation-independent horizontal | | ---> - -## Auxilliary Models - -Auxilliary models are not used directly, they can be used by concrete implementations of GMMs to -modify model output. - -| Reference | Purpose | Component | Notes | -|:---------:|:-------:|:---------:|:------| -| [Rezaeian et al., 2014](http://dx.doi.org/10.1193/100512EQS298M) | Damping scaling factor | Average horizontal component | No effect if supplied damping ratio is 5% | -| USGS PGV | Conditional PGV for crustal earthquakes | Horizontal component | Conditional model for vertical component not yet implemented | +|:----------|:---|:----------|:-----:| +| **Hawaii** +| [Atkinson, 2010](http://dx.doi.org/10.1785/0120090098) | ATKINSON_10 | geometric mean | +| [Munson & Thurber, 1997](https://pubs.geoscienceworld.org/ssa/srl/article-abstract/68/1/41/142160/Model-of-Strong-Ground-Motions-from-Earthquakes-in) | MT_97 | Max-horizontal | 1 +| [Wong et al., 2015](http://doi.org/10.1193/012012EQS015M) | WONG_15 | Average horizontal | +| **New Zealand** +| [McVerry et al., 2000](http://doi.org/10.5459/BNZSEE.39.1.1-58) | MCVERRY_00_CRUSTAL<br>MCVERRY_00_VOLCANIC<br>MCVERRY_00_INTERFACE<br>MCVERRY_00_SLAB | Max-horizontal <br>(also geometric mean) | 2 +| **Induced Seismicity** +| [Atkinson, 2015](http://dx.doi.org/10.1785/0120140142) | ATKINSON_15 | orientation-independent<br>horizontal | + +¹ Munson & Thurber (1997) supports PGA and 0.2 seconds, with an additional term applied for M > 7. +² McVerry et al. (2000) is a New Zealand model and does not correspond directly with the U.S. site + class model. + +--- + +## Related Pages + +* [Hazard Model](./Hazard-Model.md#hazard-model) + * [Model Structure](./Model-Structure.md#model-structure) + * [Model Files](./Model-Files.md#model-files) + * [Source Types](./Source-Types.md#source-types) + * [Magnitude Frequency Distributions (MFDs)](./Magnitude-Frequency-Distributions.md#magnitude-frequency-distributions) + * [Rupture Scaling Relations](./Rupture-Scaling-Relations.md#rupture-scaling-relations) + * [Ground Motion Models (GMMs)](./Ground-Motion-Models.md#ground-motion-models) +* [**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Hazard-Model.md b/docs/pages/Hazard-Model.md index 005e738f1dc657d18a8de313c3d8eb902b78feb3..f9af206c457f55713bf95afb6bfdbaec13fe83fc 100644 --- a/docs/pages/Hazard-Model.md +++ b/docs/pages/Hazard-Model.md @@ -11,15 +11,6 @@ Epistemic uncertainty in source and ground motion models is represented with log file formats and structure adopted here leverage the heirarchical organization of file systems to support modeling of complex logic trees. -## Related Pages - -* [Model Structure](model-structure) -* [Model Files](model-files) -* [Source Types](source-types) -* [Magnitude-Frequency Distributions (MFDs)](magnitude-frequency-distributions-mfds) -* [Rupture-Scaling Relations](magnitude-scaling-relations) -* [Ground Motion Models (GMMs)](ground-motion-models-gmms) - ## Model Applicability * NOTE FOR HAZARD CURVE DATA: While the gridded hazard curve data includes ground motions at long @@ -27,7 +18,24 @@ support modeling of complex logic trees. years), and cautions users using values below 10<sup>-4</sup> (10,000 years). These models were developed for building codes concerned with return periods of 10<sup>-4</sup> and above. -* Important considerations when using NSHMs: NSHMs are only applicable to U.S. and it's +* Important considerations when using NSHMs: NSHMs are only applicable to U.S. and its territories. Although hazard close to the borders can be useful for comparison to other models, the further one drifts from the U.S. border, the more incomplete the underlying earthquake source model will be. + +--- + +## Related Pages + +* [Hazard Model](./Hazard-Model.md#hazard-model) + * [Model Structure](./Model-Structure.md#model-structure) + * [Model Files](./Model-Files.md#model-files) + * [Source Types](./Source-Types.md#source-types) + * [Magnitude Frequency Distributions (MFDs)](./Magnitude-Frequency-Distributions.md#magnitude-frequency-distributions) + * [Rupture Scaling Relations](./Rupture-Scaling-Relations.md#rupture-scaling-relations) + * [Ground Motion Models (GMMs)](./Ground-Motion-Models.md#ground-motion-models) +* [**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Logic-Trees-&-Uncertainty.md b/docs/pages/Logic-Trees-&-Uncertainty.md index 5b51aaec28e62bc8dd34fa0f4491448115fb4877..87a4e0c86136a6f1c633d1f4350ec6e8bf9a0a94 100644 --- a/docs/pages/Logic-Trees-&-Uncertainty.md +++ b/docs/pages/Logic-Trees-&-Uncertainty.md @@ -15,50 +15,42 @@ alternative models. | Description | 2008 | 2014 | 2018 | |:----------- |:----- |:---- |:---- | -| CEUS | __0.1__ : AB_06 (140 bar)<br />__0.1__ : AB_06 (200 bar)<br />__0.1__ : CAMPBELL_03<br />__0.1__ : FRANKEL_96<br />__0.1__ : SILVA_02<br />__0.2__ : SOMERVILLE_01<br />__0.2__ : TORO_97<br />__0.1__ : TP_05 | __0.22__ : AB_06'<br />__0.08__ : ATKINSON_08'<br />__0.11__ : CAMPBELL_03<br />__0.06__ : FRANKEL_96<br />__0.15__ : PEZESHK_11<br />__0.06__ : SILVA_02<br />__0.10__ : SOMERVILLE_01<br />__0.11__ : TORO_97<br />__0.11__ : TP_05 | __0.667__ : NGA_EAST_USGS (17)<br />__0.333__ : NGA_EAST_SEEDS (14)<br />(common aleatory variability) | -| Sigma Epistemic | _(none)_ | _(no change)_ | __0.2__ : USGS Panel<br />__0.8__ : EPRI | +| CEUS | __0.1__ : AB_06 (140 bar)<br>__0.1__ : AB_06 (200 bar)<br>__0.1__ : CAMPBELL_03<br>__0.1__ : FRANKEL_96<br>__0.1__ : SILVA_02<br>__0.2__ : SOMERVILLE_01<br>__0.2__ : TORO_97<br>__0.1__ : TP_05 | __0.22__ : AB_06'<br>__0.08__ : ATKINSON_08'<br>__0.11__ : CAMPBELL_03<br>__0.06__ : FRANKEL_96<br>__0.15__ : PEZESHK_11<br>__0.06__ : SILVA_02<br>__0.10__ : SOMERVILLE_01<br>__0.11__ : TORO_97<br>__0.11__ : TP_05 | __0.667__ : NGA_EAST_USGS (17)<br>__0.333__ : NGA_EAST_SEEDS (14)<br>(common aleatory variability) | +| Sigma Epistemic | _(none)_ | _(no change)_ | __0.2__ : USGS Panel<br>__0.8__ : EPRI | | Site Aleatory | _(none)_ | _(no change)_ | __0.185, 0.63, 0.185__ : Site ± σ | ---- - ## Active Crustal Ground Motion Models | Description | 2008 | 2014 | 2018 | |:----------- |:----- |:---- |:---- | -| WUS | __0.3333__ : BA_08<br />__0.3333__ : CB_08<br />__0.3334__ : CY_08 | __0.22__ : ASK_14<br />__0.22__ : BSSA_14<br />__0.22__ : CB_14<br />__0.22__ : CY_14<br />__0.22__ : IDRISS_14 | __0.25__ : ASK_14<br />__0.25__ : BSSA_14<br />__0.25__ : CB_14<br />__0.25__ : CY_14 | -| Mean Epistemic | NGA-West1 (M,R)<br />__0.185__ : epi+<br />__0.630__ : off<br />__0.185__ : epi- | NGA-West2 (M,R)<br />__0.185__ : epi+<br />__0.630__ : off<br />__0.185__ : epi- | _(no change)_ | - ---- +| WUS | __0.3333__ : BA_08<br>__0.3333__ : CB_08<br>__0.3334__ : CY_08 | __0.22__ : ASK_14<br>__0.22__ : BSSA_14<br>__0.22__ : CB_14<br>__0.22__ : CY_14<br>__0.22__ : IDRISS_14 | __0.25__ : ASK_14<br>__0.25__ : BSSA_14<br>__0.25__ : CB_14<br>__0.25__ : CY_14 | +| Mean Epistemic | NGA-West1 (M,R)<br>__0.185__ : epi+<br>__0.630__ : off<br>__0.185__ : epi- | NGA-West2 (M,R)<br>__0.185__ : epi+<br>__0.630__ : off<br>__0.185__ : epi- | _(no change)_ | ## Subduction Ground Motion Models | Description | 2008 | 2014 | 2018 | |:----------- |:----- |:---- |:---- | -| Cascadia<br />(interface) | __0.25__ : AB_03 (global)<br />__0.25__ : YOUNGS_97<br />__0.50__ : ZHAO_06 | __0.1__ : AB_03 (global)<br />__0.3__ : AM_09<br />__0.3__ : BCHYDRO_12<br />__0.3__ : ZHAO_06 | __0.3333__ : AM_09<br />__0.3334__ : BCHYDRO_12<br />__0.3333__ : ZHAO_06 | -| Cascadia<br />(slab) | __0.25__ : AB_03 (global)<br />__0.25__ : AB_03 (cascadia)<br />__0.50__ : YOUNGS_97 | __0.1665__ : AB_03 (global, mod)<br />__0.1665__ : AB_03 (cascadia, mod)<br />__0.3330__ : BCHYDRO_12<br />__0.3340__ : ZHAO_06 | __0.5__ : BCHYDRO_12<br />__0.5__ : ZHAO_06 | - ---- +| Cascadia<br>(interface) | __0.25__ : AB_03 (global)<br>__0.25__ : YOUNGS_97<br>__0.50__ : ZHAO_06 | __0.1__ : AB_03 (global)<br>__0.3__ : AM_09<br>__0.3__ : BCHYDRO_12<br>__0.3__ : ZHAO_06 | __0.3333__ : AM_09<br>__0.3334__ : BCHYDRO_12<br>__0.3333__ : ZHAO_06 | +| Cascadia<br>(slab) | __0.25__ : AB_03 (global)<br>__0.25__ : AB_03 (cascadia)<br>__0.50__ : YOUNGS_97 | __0.1665__ : AB_03 (global, mod)<br>__0.1665__ : AB_03 (cascadia, mod)<br>__0.3330__ : BCHYDRO_12<br>__0.3340__ : ZHAO_06 | __0.5__ : BCHYDRO_12<br>__0.5__ : ZHAO_06 | ## Fault Source Model (CEUS) | Model | Description | 2008 | 2014 | 2018 | |:----- |:----------- |:---- |:---- |:---- | -| Deformation | | __1.0__ : GEO | __0.1__ : BIRD<br />__0.8__ : GEO<br />__0.1__ : ZENG | _(no change)_ | -| Rupture | | __0.5__ : Full<br /> __0.5__ : Partial | _(no change)_ | _(no change)_ | +| Deformation | | __1.0__ : GEO | __0.1__ : BIRD<br>__0.8__ : GEO<br>__0.1__ : ZENG | _(no change)_ | +| Rupture | | __0.5__ : Full<br> __0.5__ : Partial | _(no change)_ | _(no change)_ | | Magnitude Scaling | | __1.0__ : Somerville-01 (area) | _(no change)_ | _(no change)_ | | Maximum M¹ | Partial: epistemic | __0.2, 0.6, 0.2__ : M ± 0.2 | _(no change)_ | _(no change)_ | | | Full : epistemic | __0.2, 0.6, 0.2__ : M ± 0.2 | _(no change)_ | _(no change)_ | | | Full : aleatory | M ± 0.24 (±2σ normal PDF) | _(no change)_ | _(no change)_ | -¹ There are a very limited number of fault sources in CEUS - ---- +¹ There are a very limited number of fault sources in CEUS ## Fault Source Model (WUS) | Model | Description | 2008 | 2014 | 2018 | |:----- |:----------- |:---- |:---- |:---- | -| Deformation | | __1.0__ : GEO | __0.1__ : BIRD<br />__0.8__ : GEO<br />__0.1__ : ZENG | _(no change)_ | +| Deformation | | __1.0__ : GEO | __0.1__ : BIRD<br>__0.8__ : GEO<br>__0.1__ : ZENG | _(no change)_ | | Rupture | Partial | __0.333__ : IMW, __0.5__ : PNW | _(no change)_ | _(no change)_ | | | Full | __0.667__ : IMW, __0.5__ : PNW | _(no change)_ | _(no change)_ | | Magnitude Scaling | | __1.0__ : WC_94 (length) | _(no change)_ | _(no change)_ | @@ -68,57 +60,62 @@ alternative models. | Dip | Reverse & Strike-Slip | __1.0__ : assigned | _(no change)_ | _(no change)_ | | | Normal | __0.2, 0.6, 0.2__ : 50 ± 10° | __0.2, 0.6, 0.2__ : 50±15° | _(no change)_ | ---- - ## Grid Source Model | Model | Description | 2008 | 2014 | 2018 | |:----- |:----------- |:---- |:---- |:---- | -| Maximum M | WUS (exceptions) | __1.0__ : 7.0 | __0.9__ : 7.5 (truncated)<br />__0.1__ : 8.0 (tapered) | _(no change)_ | -| | CEUS (craton) | __0.1__ : 6.6<br />__0.2__ : 6.8<br />__0.5__ : 7.0<br />__0.2__ : 7.2 | __0.2__ : 6.5<br />__0.5__ : 7.0<br />__0.2__ : 7.5<br />__0.1__ : 8.0 | _(no change)_ | -| Smoothing | | __1.0__ : Fixed |__0.4__ : Adaptive<br />__0.6__ : Fixed | _(no change)_ | +| Maximum M | WUS (exceptions) | __1.0__ : 7.0 | __0.9__ : 7.5 (truncated)<br>__0.1__ : 8.0 (tapered) | _(no change)_ | +| | CEUS (craton) | __0.1__ : 6.6<br>__0.2__ : 6.8<br>__0.5__ : 7.0<br>__0.2__ : 7.2 | __0.2__ : 6.5<br>__0.5__ : 7.0<br>__0.2__ : 7.5<br>__0.1__ : 8.0 | _(no change)_ | +| Smoothing | | __1.0__ : Fixed |__0.4__ : Adaptive<br>__0.6__ : Fixed | _(no change)_ | | Magnitude Scaling | CEUS & WUS | __1.0__ : WC_94 (length) | _(no change)_ | _(no change)_ | | Focal Mechanisms | Spatially Varying | __1.0__ : assigned | _(no change)_ | _(no change)_ | | Depth (zTor) | WUS, M < 6.5 | __1.0__ : 5.0 km | _(no change)_ | _(no change)_ | | | WUS, M ≥ 6.5 | __1.0__ : 1.0 km | _(no change)_ | _(no change)_ | | | CEUS, All M | __1.0__ : 5.0 km | _(no change)_ | _(no change)_ | ---- - ## Fault Source Model (CA, UCERF3) | Model | 2014 | 2018 | |:----- |:---- |:---- | -| Fault | __0.5__ : FM3.1<br /> __0.5__ : FM3.2 | _(no change)_ | -| Deformation | __0.1__ : ABM<br />__0.3__ : BIRD<br />__0.3__ : GEO<br />__0.3__ : ZENG | _(no change)_ | -| Scaling Relationship<br />(mag-area & slip-length)| __0.2__ : ELLS_B<br />__0.2__ : ELLS_B (sqrt-L)<br />__0.2__ : HB_08<br />__0.2__ : SHAW_09<br />__0.2__ : SHAW_09 (csd) | _(no change)_ | -| Slip Distribution | __0.5__ : Tapered<br />__0.5__ : Boxcar | _(no change)_ | -| M ≥ 5 rate (yrâ»Â¹) | __0.1__ : 6.5<br />__0.6__ : 7.9<br />__0.3__ : 9.6 | _(no change)_ | +| Fault | __0.5__ : FM3.1<br> __0.5__ : FM3.2 | _(no change)_ | +| Deformation | __0.1__ : ABM<br>__0.3__ : BIRD<br>__0.3__ : GEO<br>__0.3__ : ZENG | _(no change)_ | +| Scaling Relationship<br>(mag-area & slip-length)| __0.2__ : ELLS_B<br>__0.2__ : ELLS_B (sqrt-L)<br>__0.2__ : HB_08<br>__0.2__ : SHAW_09<br>__0.2__ : SHAW_09 (csd) | _(no change)_ | +| Slip Distribution | __0.5__ : Tapered<br>__0.5__ : Boxcar | _(no change)_ | +| M ≥ 5 rate (yrâ»Â¹) | __0.1__ : 6.5<br>__0.6__ : 7.9<br>__0.3__ : 9.6 | _(no change)_ | | Inversion Constraint | __1.0__ : UCERF2 (CH) | _(no change)_ | | Fault Mo Rate | __1.0__ : off | _(no change)_ | ---- - ## Grid Source Model (CA, UCERF3) | Model | 2014 | 2018 | |:----- |:---- |:---- | -| Grid: Maximum M | __0.1__ : 7.3<br />__0.8__ : 7.6<br />__0.1__ : 7.9 | _(no change)_ | -| Grid: Smoothing | __0.5__ : Adaptive<br />__0.5__ : Fixed | _(no change)_ | +| Grid: Maximum M | __0.1__ : 7.3<br>__0.8__ : 7.6<br>__0.1__ : 7.9 | _(no change)_ | +| Grid: Smoothing | __0.5__ : Adaptive<br>__0.5__ : Fixed | _(no change)_ | | Grid: Focal Mechanisms | __1.0__ : assigned | _(no change)_ | | Magnitude Scaling | __1.0__ : WC_94 (length) | _(no change)_ | ---- - ## Subduction seismic source model | Model | 2008 | 2014 | 2018 | |:----- |:---- |:---- |:---- | -| Rupture | __0.67__ : Full<br />__0.33__ : Partial | __1.0__ : Full<br />__0.5__ : Partial (segmented)<br />__0.5__ : Partial (unsegmented) | _(no change)_ | -| Magnitude Scaling | __1.0__ : Youngs et al. (length) | __0.334__ : Strasser et al. (2010)<br />__0.333__ : Murotani et al. (2008)<br />__0.333__ : Papazachos et al. (2004) | _(no change)_ | +| Rupture | __0.67__ : Full<br>__0.33__ : Partial | __1.0__ : Full<br>__0.5__ : Partial (segmented)<br>__0.5__ : Partial (unsegmented) | _(no change)_ | +| Magnitude Scaling | __1.0__ : Youngs et al. (length) | __0.334__ : Strasser et al. (2010)<br>__0.333__ : Murotani et al. (2008)<br>__0.333__ : Papazachos et al. (2004) | _(no change)_ | | Magnitude Uncertainty | __0.2, 0.6, 0.2__ : M±0.2 | _(none)_ | _(no change)_ | -| Depth | __0.5__ : Base<br />__0.2__ : Bottom<br />__0.2__ : Middle<br />__0.1__ : Top | __0.3__ : Bottom<br />__0.5__ : Middle<br />__0.2__ : Top | _(no change)_ | -| Slab: Maximum M | __1.0__ : 7.2 | __0.9__ : 7.5<br />__0.1__ : 8.0 | _(no change)_ | +| Depth | __0.5__ : Base<br>__0.2__ : Bottom<br>__0.2__ : Middle<br>__0.1__ : Top | __0.3__ : Bottom<br>__0.5__ : Middle<br>__0.2__ : Top | _(no change)_ | +| Slab: Maximum M | __1.0__ : 7.2 | __0.9__ : 7.5<br>__0.1__ : 8.0 | _(no change)_ | * Full: a.k.a. 'characteristic' * Partial: a.k.a. 'Gutenberg-Richter' or 'floating' + +--- + +## Related Pages + +* [USGS Models](./USGS-Models.md#usgs-models) + * [Model Editions](./Model-Editions.md#model-editions) + * [Logic Trees & Uncertainty](./Logic-Trees-&-Uncertainty.md#logic-trees-&-uncertainty) +* [**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Magnitude-Frequency-Distributions.md b/docs/pages/Magnitude-Frequency-Distributions.md index 660adb59fca2dae9615e63619014ba7fcca7b2ee..0f69189d50ab75bf754e3c6f1a8839f068e0979b 100644 --- a/docs/pages/Magnitude-Frequency-Distributions.md +++ b/docs/pages/Magnitude-Frequency-Distributions.md @@ -7,13 +7,6 @@ members listed in the JSON examples below are required. [[_TOC_]] -MFD types: - -* [Single](#single-magnitude-mfd) -* [Gutenberg-Richter](#gutenberg-richter-mfd) -* [Tapered Gutenberg-Richter](#tapered-gutenberg-richter-mfd) -* [Incremental](#incremental-mfd) - ## Single A single MFD represents an earthquake of a specific magnitude size and rate. The `rate` member @@ -48,10 +41,10 @@ tree of rates for a source is also present in a model. Example: ## Tapered Gutenberg–Richter -A [tapered Gutenberg-Richter](http://scec.ess.ucla.edu/~ykagan/moms_index.html) MFD is similar to -Gutenberg-Richter, above, but with an exponential taper applied with a corner magnitude of `mCut`. -The `a`-value member is optional when a logic tree of rates for a source is also present in a -model. Example: +A [tapered Gutenberg-Richter](https://academic.oup.com/gji/article/148/3/520/822773) MFD is +similar to Gutenberg-Richter, above, but with an exponential taper applied with a corner magnitude +of `mCut`. The `a`-value member is optional when a logic tree of rates for a source is also +present in a model. Example: ```json { @@ -79,11 +72,12 @@ A general purpose MFD that represents defined by explicit arrays of magnitudes a ## MFD Construction -Construction of MFDs from their declaration in a tree _may_ also depend on the following files: +Construction of MFDs from their declaration in a tree also depends on the following files: -**mfd-map.json:** If an `mfd-tree` value is a string, then that value must map to an actual logic -tree in a `mfd-map.json` file that is typically located high in the source model heirarchy. For -example: +**mfd-map.json:** A mfd-map defines multiple mfd-trees common to multiple branches of a +source-tree. If an `mfd-tree` value is a string, then that value must map to a logic tree +defined in a `mfd-map.json` file that is typically located high in the source model heirarchy. +For example: ```json { @@ -98,39 +92,16 @@ example: } ``` -**mfd-config.json:** - -MFD confguration files: - -* mfd-map.json -* mfd-config.json -* rate-tree.json -* Rate files (*.csv) - -For instance, the final MFDs used in a hazard may be modified by an epistemic or aleatory -uncertainty model specified in `mfd-config.json`. Single and Gutenberg-Richter MFDs that do not -have their `rate` or `a`-value members defined rely on the presence of a `rate-tree.json` file. -A rate-tree defines a logic tree of rates or pointers to CSV rate files with spatially varying -rate data. - -### `mfd-map.json` - -A mfd-map defines multiple mfd-trees common to multiple branches of a source-tree. - -### `mfd-config.json` - -Additional uncertainty in MFDs is often considered when building hazard models and is defined -in a `mfd-config.json` file. Application of uncertainty models is MFD type-dependent. The -`epistemic-tree` member, if non-null, is used to create 3-branches for single and Gutenberg-Richter -MFDs. For a single MFD, a moment-balanced three-point distribution of magnitudes (± 0.2 magnitude -units) is created. For a Gutenberg-Richter MFD, three maximum magnidue branches are created, also -moment-balanced. The `aleatory-properties` member is only applicable to single MFDs and may be -applied on top of an epistemic-tree. In the example below, `aleatory-properties` defines an -eleven-point, moment-balanced normal distribution with a width of ±2σ of magnitudes about a -central magnitude. If no additional uncertainty model is desired, `epistemic-tree` and -`aleatory-properties` should be set to null. - -TODO is aleatory uncertainty in MFD ALWAYS moment-balanced??? +**mfd-config.json:** Additional uncertainty in MFDs is often considered when building hazard +models and is defined in a `mfd-config.json` file. Application of uncertainty models is MFD +type-dependent. The `epistemic-tree` member, if non-null, is used to create 3-branches for single +and Gutenberg-Richter MFDs. For a single MFD, a moment-balanced three-point distribution of +magnitudes (± 0.2 magnitude units) is created. For a Gutenberg-Richter MFD, three maximum magnitude +branches are created, also moment-balanced. The `aleatory-properties` member is only applicable +to single MFDs and may be applied in additiona to an epistemic-tree. In the example below, +`aleatory-properties` defines an eleven-point, moment-balanced normal distribution with a width +of ±2σ of magnitudes about a central magnitude. If no additional uncertainty model is desired, +`epistemic-tree` and `aleatory-properties` should be set to null. ```json { @@ -149,9 +120,9 @@ TODO is aleatory uncertainty in MFD ALWAYS moment-balanced??? } ``` -### `rate-tree.json` - -A rate-tree defines each branch `value` in years (recurrence or return period): +**rate-tree.json:** Single and Gutenberg-Richter MFDs that do not have their `rate` or `a`-value +members defined rely on the presence of a `rate-tree.json` file. A rate-tree defines each branch +`value` in years (recurrence or return period): ```json [ @@ -168,116 +139,19 @@ A rate-tree defines each branch `value` in years (recurrence or return period): ] ``` -or with pointers to rate files, in the case of grid sources: - -```json -[ - { - "id": "fixed", - "weight": 0.6, - "value" : "fixed.csv" - }, - { - "id": "adaptive", - "weight": 0.4, - "value" : "adaptive.csv" - } -] -``` +--- -**mfd-config.json:** Controls properties of the MFD and possible additional epistemic or aleatory -uncertainty. For example: +## Related Pages -```json -{ - "epistemic-tree": [ - { "id": "+uâ‚‘", "weight": 0.2, "value": -0.2 }, - { "id": "~uâ‚‘", "weight": 0.6, "value": 0.0 }, - { "id": "-uâ‚‘", "weight": 0.2, "value": 0.2 } - ], - "aleatory-properties": { - "size": 11, - "nσ": 2, - "σ": 0.12 - }, - "minimum-magnitude": 6.5, - "nshm-bin-model": false -} -``` +* [Hazard Model](./Hazard-Model.md#hazard-model) + * [Model Structure](./Model-Structure.md#model-structure) + * [Model Files](./Model-Files.md#model-files) + * [Source Types](./Source-Types.md#source-types) + * [Magnitude Frequency Distributions (MFDs)](./Magnitude-Frequency-Distributions.md#magnitude-frequency-distributions) + * [Rupture Scaling Relations](./Rupture-Scaling-Relations.md#rupture-scaling-relations) + * [Ground Motion Models (GMMs)](./Ground-Motion-Models.md#ground-motion-models) +* [**Documentation Index**](../README.md) -**rate-tree.json:** Defines each branch `value` in annual rate (1 / return period in years). -For example: - -```json -[ - { - "id": "R1", - "weight": 0.2, - "value" : 0.002 - }, - { - "id": "R2", - "weight": 0.8, - "value" : 0.05 - } -] -``` - -From Model Files: - -### Magnitude Frequency Distributions (MFDs) - -`mfd-tree`, `mfd-map.json`, `mfd-config.json`, and `rate-tree.json` - -A `mfd-tree` property is common to all source types and defines a logic tree of magnitude -frequency distributions (MFDs). The `mfd-tree` element may be an array of mfd branches defined -inline or a string reference to a top-level member of an `mfd-map.json` that contains one or -more mfd-trees shared across a source-tree. The branches of a mfd-tree commonly have the generic -ID's: `[M1, M2, M3, ...]` to support mfd-tree matching across source-tree branches. - -```json -"mfd-tree": [ - { "id": "M1", "weight": 0.3, "value": { "type": "SINGLE", "m": 6.8, "rate": 0.001 }}, - { "id": "M2", "weight": 0.3, "value": { "type": "SINGLE", "m": 7.0, "rate": 0.001 }}, - { "id": "M3", "weight": 0.3, "value": { "type": "SINGLE", "m": 7.2, "rate": 0.001 }}, - { "id": "M4", "weight": 0.1, "value": { "type": "SINGLE", "m": 7.4, "rate": 0.001 }} -] -``` - -How MFDs are actually built depends on the settings in a `mfd-config.json` file and rates. For more -details on MFDs and their configuration see the -[magnitude frequency distributions](magnitude-frequency-distributions) section. - -An `mfd-config.json` is currently only required for finite fault sources. It can be located -anywhere in the file heirarchy and may be overridden in nested directories. - -Depending on the types of MFDs being modeled, a rate file may contain Gutenberg-Richter a-values -or magnitude-specific rates. The branches of a rate-tree commonly have the generic ID's: -`[R1, R2, R3, ...]` to support matching rate-trees across source-tree branches. - -```xml - <!-- (optional) A magnitude uncertainty model that will be - applied to every source: - - <Epistemic/> varies mMax and scales variant rates by - the supplied weights; it is only ever applied to SINGLE - and GR MFDs. - - 'cutoff' is magnitude below which uncertainty will be - disabled. - - <Aleatory/> applies a (possibly moment-balanced) ±2σ - Gaussian distribution to mMax; it is only ever applied - to SINGLE MFDs (possibly in conjunction with epistemic). - - 'count' is the number of magnitude bins spanned by - the distribution. - - <Aleatory/> or '<Epistemic/>', or the entire block - may be omitted. --> - <MagUncertainty> - <Epistemic cutoff="6.5" - deltas="[-0.2, 0.0, 0.2]" weights="[0.2, 0.6, 0.2]" /> - <Aleatory cutoff="6.5" - moBalance="true" sigma="0.12" count="11" /> - </MagUncertainty> - - <SourceProperties ruptureScaling="NSHM_FAULT_WC94_LENGTH" /> - - </Settings> -``` +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Model-Editions.md b/docs/pages/Model-Editions.md index e4524a35cd4b794875e8006ba0cf361435d4d679..02d123367b4425f21e34385b4a77d9062380a54e 100644 --- a/docs/pages/Model-Editions.md +++ b/docs/pages/Model-Editions.md @@ -7,10 +7,10 @@ data, or modeling procedures that warrant a model update. Every model the USGS p associated with a USGS Open-File report or refereed journal article and supporting datasets. However, layered on top of any given model release are bug-fixes and model improvements. -With the update to a new codebase, [nshmp-haz](https://github.com/usgs/nshmp-haz), the NSHMP has -adopted [semantic versioning](http://semver.org) to keep track of different hazard models. For -any given model region, the first number (or major version) corresponds to a particular release -or update year. For example, we consider the initial +With the update to a new codebase, [nshmp-haz](https://code.usgs.gov/ghsc/nshmp/nshmp-haz), +the NSHMP has adopted [semantic versioning](http://semver.org) to keep track of different hazard +models. For any given model region, the first number (or major version) corresponds to a particular +release or update year. For example, we consider the initial [1996 conterminous U.S. NSHM](https://earthquake.usgs.gov/hazards/hazmaps/conterminous/index.php#1996) to be v1.0.0. The second number (or minor version) reflects updates to a particular release that likely causes small changes to hazard values. The third number reflects non-hazard altering @@ -19,33 +19,37 @@ hazard. The USGS NSHMP is committed to supporting current and prior model releases in any given region via web services. The table below provides a summary of all NSHM releases and their corresponding -version numbers. +version numbers. Links to documentation and data for each of the models below are listed on the USGS +[Seismic Hazard Maps and Site-Specific Data](https://www.usgs.gov/natural-hazards/earthquake-hazards/seismic-hazard-maps-and-site-specific-data) +page. Changes between editions in model regions are documented in the release notes of the +individual model repositories. Region | Year | Version | Static | Dynamic | Notes | -------|:----:|:-------:|:------:|:-------:|-------| -Conterminous U.S. | [2014](https://earthquake.usgs.gov/hazards/hazmaps/conterminous/index.php#2014) | [v4.2.0](https://github.com/usgs/nshmp-model-cous-2014/releases/tag/v4.2.0)<sup>†</sup> | |:small_blue_diamond:| | -Conterminous U.S. | [2014](https://earthquake.usgs.gov/hazards/hazmaps/conterminous/index.php#2014) | [v4.1.4](https://github.com/usgs/nshmp-model-cous-2014/releases/tag/v4.1.4)<sup>†</sup> | |:small_blue_diamond:| | -Conterminous U.S. | [2014](https://earthquake.usgs.gov/hazards/hazmaps/conterminous/index.php#2014) | [v4.0.0](https://github.com/usgs/nshmp-haz-fortran/releases/tag/nshm2014r1) |:small_blue_diamond:| | ASCE7-16 | -Conterminous U.S. | [2008](https://earthquake.usgs.gov/hazards/hazmaps/conterminous/index.php#2008) | v3.3.3 | |:small_blue_diamond:| | -Conterminous U.S. | [2008](https://earthquake.usgs.gov/hazards/hazmaps/conterminous/index.php#2008) | [v3.2.0](https://github.com/usgs/nshmp-haz-fortran/releases/tag/nshm2008r3) |:small_blue_diamond:| | | -Conterminous U.S. | [2008](https://earthquake.usgs.gov/hazards/hazmaps/conterminous/index.php#2008) | v3.1.0 |:small_blue_diamond:| | ASCE7-10 | -Conterminous U.S. | [2008](https://earthquake.usgs.gov/hazards/hazmaps/conterminous/index.php#2008) | v3.0.0 | | | | -Conterminous U.S. | [2002](https://earthquake.usgs.gov/hazards/hazmaps/conterminous/index.php#2002) | v2.0.0 | | | | -Conterminous U.S. | [1996](https://earthquake.usgs.gov/hazards/hazmaps/conterminous/index.php#1996) | v1.0.0 | | | | -Alaska | [2007](https://earthquake.usgs.gov/hazards/hazmaps/ak/index.php#2007) | v2.1.0 | |:small_blue_diamond:| | -Alaska | [2007](https://earthquake.usgs.gov/hazards/hazmaps/ak/index.php#2007) | v2.0.0 |:small_blue_diamond:| | ASCE7-10 | -Alaska | [1999](https://earthquake.usgs.gov/hazards/hazmaps/ak/index.php#1999) | v1.0.0 | | | | -American Samoa | [2012](https://earthquake.usgs.gov/hazards/hazmaps/islands.php#samoapacific) | v1.0.0 | | | | -Guam | [2012](https://pubs.usgs.gov/of/2012/1015/) | v1.0.0 | | | | -Hawaii | [2018](https://earthquake.usgs.gov/hazards/hazmaps/islands.php#hi) | v2.0.0 | | TBD | | -Hawaii | [1998](https://earthquake.usgs.gov/hazards/hazmaps/islands.php#hi) | v1.1.0 | | TBD | | -Hawaii | [1998](https://earthquake.usgs.gov/hazards/hazmaps/islands.php#hi) | v1.0.0 |:small_blue_diamond:| | ASCE7-10 | -Puerto Rico & <br/> U.S. Virgin Islands | [2003](https://earthquake.usgs.gov/hazards/hazmaps/islands.php#prvi) | v1.0.0 | | | | +Conterminous U.S. | 2018 | [v5.0.0](https://code.usgs.gov/ghsc/nshmp/nshms/nshm-conus)<sup>†</sup> | |:small_blue_diamond:| | +Conterminous U.S. | 2014 | [v4.2.0](https://code.usgs.gov/ghsc/nshmp/nshms/nshm-conus)<sup>†</sup> | |:small_blue_diamond:| | +Conterminous U.S. | 2014 | [v4.1.4](https://code.usgs.gov/ghsc/nshmp/nshms/nshm-conus)<sup>†</sup> | |:small_blue_diamond:| | +Conterminous U.S. | 2014 | [v4.0.0](https://github.com/usgs/nshmp-haz-fortran/releases/tag/nshm2014r1) |:small_blue_diamond:| | ASCE7-16 | +Conterminous U.S. | 2008 | v3.3.3 | |:small_blue_diamond:| | +Conterminous U.S. | 2008 | [v3.2.0](https://github.com/usgs/nshmp-haz-fortran/releases/tag/nshm2008r3) |:small_blue_diamond:| | | +Conterminous U.S. | 2008 | v3.1.0 |:small_blue_diamond:| | ASCE7-10 | +Conterminous U.S. | 2008 | v3.0.0 | | | | +Conterminous U.S. | 2002 | v2.0.0 | | | | +Conterminous U.S. | 1996 | v1.0.0 | | | | +Alaska | 2007 | v2.1.0 | |:small_blue_diamond:| | +Alaska | 2007 | v2.0.0 |:small_blue_diamond:| | ASCE7-10 | +Alaska | 1999 | v1.0.0 | | | | +American Samoa | 2012 | v1.0.0 | | | | +Guam | 2012 | v1.0.0 | | | | +Hawaii | 2018 | v2.0.0 | | TBD | | +Hawaii | 1998 | v1.1.0 | | TBD | | +Hawaii | 1998 | v1.0.0 |:small_blue_diamond:| | ASCE7-10 | +Puerto Rico & <br/> U.S. Virgin Islands | 2003 | v1.0.0 | | | | -<sup>†</sup> __Note on the 2014 Conterminous U.S. NSHM:__ Initial publication of the -[2014 model](https://earthquake.usgs.gov/hazards/hazmaps/conterminous/index.php#2014) included -data to support updates to the U.S. Building Code, specifically hazard curves for peak ground -acceleration (PGA), and 0.2 and 1.0 second spectral accelerations, all at a BC boundary site +<sup>†</sup> __Note on the 2014 conterminous U.S. NSHM:__ Initial publication of the +[2014 model](https://www.usgs.gov/natural-hazards/earthquake-hazards/science/2014-united-states-lower-48-seismic-hazard-long-term) +included data to support updates to the U.S. Building Code, specifically hazard curves for peak +ground acceleration (PGA), and 0.2 and 1.0 second spectral accelerations, all at a BC boundary site class with Vs30 = 760 m/s. Some time later, the model was deployed to the Unified Hazard Tool (UHT) and included support in the Wester U.S. for calculations at sites other than Vs30 = 760 m/s, consistent with dynamic calculations using the 2008 model. Subsequently, we updated the 2014 @@ -62,8 +66,8 @@ Moving forward, we will continue to include the original dynamic version of the Historically, the USGS NSHMP has produced static datasets of hazard curves that accompany the 'official' release or update to a model. In the context of providing interactive web services, such static datasets can be quickly retreived and provide most users with the data they seek. -More complex analyses, such as deaggregations, require that a complete hazard calculation be -performed on demand. Historically, USGS deaggregation services were provided for particular model +More complex analyses, such as disaggregations, require that a complete hazard calculation be +performed on demand. Historically, USGS disaggregation services were provided for particular model years and regions, each located at a unique web address and supported by a unique codebase. However, it has proven too difficult to maintain numerous isolated services, and we therefore developed a single codebase that supports all current and prior models. @@ -71,10 +75,12 @@ developed a single codebase that supports all current and prior models. Moreover, as time goes by, there may be more customization options we want to expose to users. However, with each additional level of customization, it quickly becomes too difficult to produce and version corresponding static datasets. We therefore identify model versions that support -deaggregations and other calculations as 'dynamic'. At present, only the most current versions -of a particular model region and year are supported via 'dynamic' calculations. +disaggregations and other calculations as 'dynamic'. At present, only the most current versions +of a particular model region and year are supported via 'dynamic' calculations. Dynamic editions +are supported through web-services provided by `nshmp-haz`. Static editions are supported via a +separate set of services. -In practice, this leads to results produced by 'dynamic' caluculations being somewhat different +In practice, this leads to results produced by 'dynamic' caluculations being different than those stored in static datasets of prior model minor versions, although usually not by much. The release notes for each model version detail the changes that give rise to changes in hazard between between versions. There are also differences that arise from different modeling @@ -82,18 +88,18 @@ assumptions between past and current codebases that are detailed below. It is important for users to know which edition they should be using. For instance if one is bound to use those values adopted by the U.S. building code, one of the editions marked `ASCE7-*` -is probably most appropriate. However, if one is bound to use the most up to date source model, +is probably most appropriate. However, if one is bound to use the most up-to-date source model, one of the dynamic editions is likely better. -Dynamic editions are supported through web-services provided by the `nshmp-haz-ws` library -(this repository). Static editions are supported via a separate set of services. Both are -documented on the [web services](web-services) page. +--- -## Region specific changes +## Related Pages -Changes between editions in model regions are documented in the release notes of the individual -model repositories. +* [USGS Models](./USGS-Models.md#usgs-models) + * [Model Editions](./Model-Editions.md#model-editions) + * [Logic Trees & Uncertainty](./Logic-Trees-&-Uncertainty.md#logic-trees-&-uncertainty) +* [**Documentation Index**](../README.md) -* [Conterminous US (2014)](/usgs/nshmp-model-cous-2014/wiki) -* [Conterminous US (2008)](/usgs/nshmp-model-cous-2008/wiki) -* [Alaska (2007)](/usgs/nshmp-model-ak-2007/wiki) +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Model-Files.md b/docs/pages/Model-Files.md index f6433e2a2588aa02f62db33d0b86ac31c35a27e2..8101b20686098169a1a8c2095be3af59ed9c5ca5 100644 --- a/docs/pages/Model-Files.md +++ b/docs/pages/Model-Files.md @@ -30,7 +30,7 @@ motions from the model. **calc-config.json:** Optional calculation configuration file specifies the default calculation settings used for a NSHM. This file overrides any built in default values. See -the [calculation configuration](calculation-configuration) page for more details. +the [calculation configuration](./Calculation-Configuration.md) page for more details. ## Logic Trees @@ -79,7 +79,7 @@ or its children will be processed; any standalone sources will be ignored. For e ``` **source-group.json:** A specialized form of logic tree that describes model branches that are -additivd and therefore does not include weights. Examples from the NSHM for the Conterminous U.S. +additivd and therefore does not include weights. Examples from the NSHM for the conterminous U.S. NSHM include the Cascadia segmented partial-rupture models and the New Madrid 1500-yr cluster branches. The branch objects in a source group _may_ include an optional `scale` member that can be used to impose a probability of occurrence or other scaling requred by a NSHM. If absent, the @@ -112,12 +112,13 @@ For example: ] ``` -See the [ground motion models](ground-motion-models) page for details on GMMs supported in +See the [ground motion models](./Ground-Motion-Models.md) page for details on GMMs supported in _nshmp-haz_ and the related `gmm-config.json` files that governs GMM behavior. ### MFD Logic Trees -MFD logic trees are typically defined as JSON members nested in other files. For example: +MFD logic trees are common to all source types and are defined as JSON members nested in other +files. For example: ```json "mfd-tree": [ @@ -130,14 +131,16 @@ MFD logic trees are typically defined as JSON members nested in other files. For An `mfd-tree` may be included as a `properties` member of a GeoJSON feature or as a member of a `rupture-set.json` file. In both cases the tree may alternatively be identified with a string, in -which case the `mfd-tree` will be pulled from the collection of trees defined in a `mfd-map.json` -file. A `mfd-map.json` file is typicaly located high in the source tree heirarchy and faciltates -using the same MFDs on multiple branches of a source tree. +which case the `mfd-tree` will be pulled from the collection of shared trees defined in a +`mfd-map.json` file. A `mfd-map.json` file is typicaly located high in the source tree heirarchy +and faciltates using the same MFDs on multiple branches of a source tree. The branches of a +mfd-tree commonly have the generic ID's: `[M1, M2, M3, ...]` to support mfd-tree matching across +source-tree branches. How MFDs are intialized (or realized) depends on the presence and contents of `mfd-config.json` and `rate-tree.json` files. See the -[magnitude frequency distributions](magnitude-frequency-distributions) page for details on these -files and the types of MFDs supported in _nshmp-haz_. +[magnitude frequency distributions](./Magnitude-Frequency-Distributions.md) page for details on +these files and the types of MFDs supported in _nshmp-haz_. ## Rupture Sets @@ -183,3 +186,20 @@ is composed of an array of rupture-sets. ] } ``` + +--- + +## Related Pages + +* [Hazard Model](./Hazard-Model.md#hazard-model) + * [Model Structure](./Model-Structure.md#model-structure) + * [Model Files](./Model-Files.md#model-files) + * [Source Types](./Source-Types.md#source-types) + * [Magnitude Frequency Distributions (MFDs)](./Magnitude-Frequency-Distributions.md#magnitude-frequency-distributions) + * [Rupture Scaling Relations](./Rupture-Scaling-Relations.md#rupture-scaling-relations) + * [Ground Motion Models (GMMs)](./Ground-Motion-Models.md#ground-motion-models) +* [**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Model-Structure.md b/docs/pages/Model-Structure.md index c4fdfc6ca4b802f60716c0a1467f171207f5b421..dc7a55de5a11acb278453f7db604f00f5bc5d93b 100644 --- a/docs/pages/Model-Structure.md +++ b/docs/pages/Model-Structure.md @@ -16,7 +16,7 @@ settings supporting the nested source types: `fault-sources`, `grid-sources`, an The `volcanic` tectonic setting also supports `decollement-sources`. The root of a model must include `model-info.json` and _may_ include a `calc-config.json` that -specifies any custom default [calculation configuration](calculation-configuration) settings +specifies any custom default [calculation configuration](./Calculation-Configuration.md) settings for the model. Top level tectonic setting directories must include `gmm-tree.json` and `gmm-config.json` files. Source directories are loaded recursively, permitting configuration files deeper in the heirarchy to override those defined higher in the heirarchy, as needed and as @@ -95,12 +95,10 @@ fault-sources/ └─ branch-4/ ``` -See also: [Finite Fault Source Type](source-types#finite-fault-sources) +See also: [Finite Fault Source Type](./Source-Types.md#fault-sources) ## Crustal Grid Sources -TODO this isn't quite right, needs conus-2018 refactor for verification - Grid sources are based on smoothed seismicity or other spatially varying rate model and may be defined as either single source features, each within its own directory, or as more complex logic trees of source model variants. Grid sources are modeled as point sources of varying complexity. @@ -127,7 +125,7 @@ grid-sources/ └─ ... ``` -See also: [Grid Source Type](source-types#grid-sources) +See also: [Grid Source Type](./Source-Types.md#grid-sources) ## Crustal Zone (Area) Sources @@ -156,7 +154,7 @@ zone-sources/ └─ [rate-tree.json] (optional) tree of rates (future) ``` -See also: [Zone Source Type](source-types#zone-sources) +See also: [Zone Source Type](./Source-Types.md#zone-sources) ## Subduction Interface Sources @@ -174,7 +172,7 @@ subduction-interface └─ ... ``` -See also: [Subduction Interface Source Type](source-types#subduction-interface-sources) +See also: [Subduction Interface Source Type](./Source-Types.md#subduction-interface-sources) ## Subduction Intraslab Sources @@ -192,4 +190,21 @@ subduction-slab └─ ... ``` -See also: [Subduction Intraslab Source Type](source-types#subduction-intraslab-sources) +See also: [Subduction Intraslab Source Type](./Source-Types.md#subduction-intraslab-sources) + +--- + +## Related Pages + +* [Hazard Model](./Hazard-Model.md#hazard-model) + * [Model Structure](./Model-Structure.md#model-structure) + * [Model Files](./Model-Files.md#model-files) + * [Source Types](./Source-Types.md#source-types) + * [Magnitude Frequency Distributions (MFDs)](./Magnitude-Frequency-Distributions.md#magnitude-frequency-distributions) + * [Rupture Scaling Relations](./Rupture-Scaling-Relations.md#rupture-scaling-relations) + * [Ground Motion Models (GMMs)](./Ground-Motion-Models.md#ground-motion-models) +* [**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Rupture-Scaling-Relations.md b/docs/pages/Rupture-Scaling-Relations.md index 0a79a5e4f6e0abdcd037bf2dfbdc8ce4521cd417..6f86336e90922d1debb48e8029ac719995248588 100644 --- a/docs/pages/Rupture-Scaling-Relations.md +++ b/docs/pages/Rupture-Scaling-Relations.md @@ -67,3 +67,20 @@ the San Francisco Bay region: 2002– 2031, U.S. Geol. Surv. Open-File Report 20 Youngs RR, Chiou B-SJ, Silva WJ, and Humphrey JR (1997) Strong ground motion attenuation relationships for subduction zone earthquakes. Seismological Research Letters 68(?): 58–73. + +--- + +## Related Pages + +* [Hazard Model](./Hazard-Model.md#hazard-model) + * [Model Structure](./Model-Structure.md#model-structure) + * [Model Files](./Model-Files.md#model-files) + * [Source Types](./Source-Types.md#source-types) + * [Magnitude Frequency Distributions (MFDs)](./Magnitude-Frequency-Distributions.md#magnitude-frequency-distributions) + * [Rupture Scaling Relations](./Rupture-Scaling-Relations.md#rupture-scaling-relations) + * [Ground Motion Models (GMMs)](./Ground-Motion-Models.md#ground-motion-models) +* [**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Site-Specification.md b/docs/pages/Site-Specification.md index c2a27fa42b90f59556bd3498d6bca3d04009d293..48fddc3be34b76d95173d3f7ba82af7d92ae6bfa 100644 --- a/docs/pages/Site-Specification.md +++ b/docs/pages/Site-Specification.md @@ -1,10 +1,8 @@ # Site Specification -(TODO update etc and/or javadoc links) - The sites at which to perform hazard and related calculations may be defined in a variety of ways. Examples of the file formats described below are available in the resource directory: -[`etc/nshm`](/usgs/nshmp-haz/tree/master/etc/nshm). +[`etc/nshm`](../../etc/nshm/README.md). __Note on Coordinates:__ *nshmp-haz* supports longitude and latitude values in the closed ranges `[-360° ‥ 360°]` and `[-90° ‥ 90°]`. Note, however, that mixing site and/or source @@ -21,7 +19,7 @@ entire string in double quotes. For any site parameter values that are not supplied on the command line or in the file formats below, the following defaults are used (see the `site` member of the -[configuration](calculation-configuration) file): +[configuration parameter](./Calculation-Configuration.md#calculation-configuration-parameters) table): ```text name: Unnamed @@ -81,15 +79,15 @@ As with the CSV format, the minimum required data is a `geometry` `coordinates` `properties` are optional. When using GeoJSON, the `title` property maps to the name of the site. Additional properties, if present, are ignored by *nshmp-haz* but permitted as they may be relevant for other applications. For example, [styling -properties](https://help.github.com/articles/mapping-geojson-files-on-github/#styling-features) +properties](https://github.com/mapbox/simplestyle-spec/tree/master/1.1.0) may be used to improve rendering in mapping applications. For a fully fledged example, see the -[NSHM test sites](/usgs/nshmp-haz/blob/master/etc/nshm/sites-nshmp.geojson) file. +[NSHM test sites](../../etc/nshm/sites-nshmp.geojson) file. ### Map Regions GeoJSON is also used to define *nshmp-haz* map regions. For example, see the file that defines a region commonly used when creating hazard and other maps for the -[Los Angeles basin](/usgs/nshmp-haz/blob/master/etc/nshm/map-la-basin.geojson). +[Los Angeles basin](../../etc/nshm/map-la-basin.geojson). A map region is expected as a `Polygon` `FeatureCollection`. Currently, *nshmp-haz* only supports a `FeatureCollection` with 1 or 2 polygons. When a single polygon is defined, it must consist of a @@ -106,5 +104,20 @@ have a feature `id` of `Extents`, and *must* be rectangular (in a mercator proje parallel to lines of latitude and longitude. Any points in the 'calculation' polygon outside the 'extents' polygon are ignored; hazard values at any points within the 'extents' polygon but outside the 'calculation' polygon are set to zero. For an example, see the -[NSHMP Western US](/usgs/nshmp-haz/blob/master/etc/nshm/map-wus.geojson) map site file. -(TODO This needs updating; link to conus-2018 active crust map-region.geojson). +[NSHMP Western US](../../etc/nshm/map-wus.geojson) map site file. + +--- + +## Related Pages + +* [Building & Running](./Building-&-Running.md#building-&-running) + * [Developer Basics](./Developer-Basics.md#developer-basics) + * [Calculation Configuration](./Calculation-Configuration.md#calculation-configuration) + * [Site Specification](./Site-Specification.md#site-specification) + * [Examples](../../etc/examples) (or + [on GitLab](https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples)) +* [**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/Source-Types.md b/docs/pages/Source-Types.md index 86278011bd04619138aed3f597341cc61487efb1..595e0a9125a1a1e50597a9a4d70921f8c6032869 100644 --- a/docs/pages/Source-Types.md +++ b/docs/pages/Source-Types.md @@ -344,3 +344,20 @@ using an `interface-config.json` file. Subduction intraslab sources are currently specified the same way as [grid sources](#grid-sources) in curstal tectonic settings. Source model properties are defined using an `slab-config.json` file. + +--- + +## Related Pages + +* [Hazard Model](./Hazard-Model.md#hazard-model) + * [Model Structure](./Model-Structure.md#model-structure) + * [Model Files](./Model-Files.md#model-files) + * [Source Types](./Source-Types.md#source-types) + * [Magnitude Frequency Distributions (MFDs)](./Magnitude-Frequency-Distributions.md#magnitude-frequency-distributions) + * [Rupture Scaling Relations](./Rupture-Scaling-Relations.md#rupture-scaling-relations) + * [Ground Motion Models (GMMs)](./Ground-Motion-Models.md#ground-motion-models) +* [**Documentation Index**](../README.md) + +--- + [U.S. Geological Survey](https://www.usgs.gov) +National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/docs/pages/USGS-Models.md b/docs/pages/USGS-Models.md index b7659a3f5220531438c185ca2bb54c307131cebb..087c0af1389b34de60792bdaebc3b7396b8cda29 100644 --- a/docs/pages/USGS-Models.md +++ b/docs/pages/USGS-Models.md @@ -6,7 +6,7 @@ years. This window is narrowing, however, and updates are also being undertaken models for states and territories outside the conterminous U.S. These pages describe the models that are available and some of the history and versions of each. The models are intended for use with the USGS probabilistic earthquake hazard codebase: -[*nshmp-haz*](https://github.com/usgs/nshmp-haz). +[*nshmp-haz*](https://code.usgs.gov/ghsc/nshmp/nshmp-haz). As specified in this documentation, NSHM files are organized by tectonic setting and then further by source type. For larger models, such as that of the conterminous U.S., sources are commonly @@ -18,20 +18,22 @@ page for links to all published models. Each model region has a dedicated repository with version tags marking different model editions: -* [Conterminous U.S.](/ghsc/nshmp/nshm-conus) -* [Hawaii](/ghsc/nshmp/nshm-hawaii) +* [Conterminous U.S.](https://code.usgs.gov/ghsc/nshmp/nshms/nshm-conus) +* [Hawaii](https://code.usgs.gov/ghsc/nshmp/nshms/nshm-hawaii) * Alaska - _update not yet published_ * Guam & Marianas - _update not yet published_ * Puerto Rico & U.S. Virgin Islands - _update not yet published_ * Samoa & Pacific Islands - _update not yet published_ -## Related Pages +--- -* [Model Editions](model-editions) -* [Logic Trees & Uncertainty](logic-trees-&-uncertainty) +## Related Pages -[**Documentation Index**](docs) +* [USGS Models](./USGS-Models.md#usgs-models) + * [Model Editions](./Model-Editions.md#model-editions) + * [Logic Trees & Uncertainty](./Logic-Trees-&-Uncertainty.md#logic-trees-&-uncertainty) +* [**Documentation Index**](../README.md) --- - [U.S. Geological Survey](https://www.usgs.gov) + [U.S. Geological Survey](https://www.usgs.gov) National Seismic Hazard Mapping Project ([NSHMP](https://earthquake.usgs.gov/hazards/)) diff --git a/etc/examples/1-hazard-curve/README.md b/etc/examples/1-hazard-curve/README.md index 0d19048ccddeefa85b1e19ab92ba8e7951744e57..ed8da976693fdf3b5a0cf561651a8f1efaa79b35 100644 --- a/etc/examples/1-hazard-curve/README.md +++ b/etc/examples/1-hazard-curve/README.md @@ -44,4 +44,8 @@ __Results directory structure:__ In the next example, we'll override the model supplied configuration with a custom file. <!-- markdownlint-disable MD001 --> -#### Next: [Example 2 – A custom configuration](../2-custom-config) +#### Next: [Example 2 – A custom configuration](../2-custom-config/README.md) + +--- + +* [**Documentation Index**](../../../docs/README.md) diff --git a/etc/examples/2-custom-config/README.md b/etc/examples/2-custom-config/README.md index e5d05461c5630ad835fe3140c1bcbd4afdefc97e..33c8c381aa8b47f57fbff49085da83b6d064fd7b 100644 --- a/etc/examples/2-custom-config/README.md +++ b/etc/examples/2-custom-config/README.md @@ -37,4 +37,8 @@ __Results directory structure:__ ``` <!-- markdownlint-disable MD001 --> -#### Next: [Example 3 – Using a custom sites file](../3-sites-file) +#### Next: [Example 3 – Using a custom sites file](../3-sites-file/README.md) + +--- + +* [**Documentation Index**](../../../docs/README.md) diff --git a/etc/examples/3-sites-file/README.md b/etc/examples/3-sites-file/README.md index 3f5e64e1d2fca994685b667b93f7910100e41b88..f70f356a496a5177fdbae1418bcdc53b144ab576 100644 --- a/etc/examples/3-sites-file/README.md +++ b/etc/examples/3-sites-file/README.md @@ -38,4 +38,8 @@ __Results directory structure:__ ``` <!-- markdownlint-disable MD001 --> -#### Next: [Example 4 – A simple hazard map](../4-hazard-map) +#### Next: [Example 4 – A simple hazard map](../4-hazard-map/README.md) + +--- + +* [**Documentation Index**](../../../docs/README.md) diff --git a/etc/examples/4-hazard-map/README.md b/etc/examples/4-hazard-map/README.md index 76fa10714e6de6e20db08f1890ae99c323383b67..0c26c4102630b610eba586085ff2cd66b96b7110 100644 --- a/etc/examples/4-hazard-map/README.md +++ b/etc/examples/4-hazard-map/README.md @@ -27,4 +27,8 @@ __Results directory structure:__ ``` <!-- markdownlint-disable MD001 --> -#### Next: [Example 5 – A more complex model](../5-complex-model) +#### Next: [Example 5 – A more complex model](../5-complex-model/README.md) + +--- + +* [**Documentation Index**](../../../docs/README.md) diff --git a/etc/examples/5-complex-model/README.md b/etc/examples/5-complex-model/README.md index dfbaf44441cfdde4c8f4e94ab2d29ddac1dd1862..79f7b17b1dbbb340077ff22e5e16c755461cd98d 100644 --- a/etc/examples/5-complex-model/README.md +++ b/etc/examples/5-complex-model/README.md @@ -66,4 +66,8 @@ __Results directory structure:__ ``` <!-- markdownlint-disable MD001 --> -#### Next: [Example 6 – Enhanced output](../6-enhanced-output) +#### Next: [Example 6 – Enhanced output](../6-enhanced-output/README.md) + +--- + +* [**Documentation Index**](../../../docs/README.md) diff --git a/etc/examples/6-enhanced-output/README.md b/etc/examples/6-enhanced-output/README.md index f4d81c44c5c2f5f2ddb1e14aac339c5be0c1e2e0..61fffb7a1390f0e5893c6db546127f08cfb27335 100644 --- a/etc/examples/6-enhanced-output/README.md +++ b/etc/examples/6-enhanced-output/README.md @@ -52,4 +52,8 @@ __Results directory structure:__ ``` <!-- markdownlint-disable MD001 --> -#### Next: [Example 7 – Deaggregation](../7-deaggregation) +#### Next: [Example 7 – Deaggregation](../7-deaggregation/README.md) + +--- + +* [**Documentation Index**](../../../docs/README.md) diff --git a/etc/examples/7-deaggregation/README.md b/etc/examples/7-deaggregation/README.md index cf1dec38485c1aa46acdab19c7899bf7d9b5ef4e..4750f2affa3c64fe660c3b4c543d2c4bbeb78ea2 100644 --- a/etc/examples/7-deaggregation/README.md +++ b/etc/examples/7-deaggregation/README.md @@ -88,4 +88,8 @@ subduction GMMs (e.g. `AB_03_CASCADIA_SLAB`) for sites closer to the Cascadia su empty results will not be saved. <!-- markdownlint-disable MD001 --> -#### Next: [Example 8 – Earthquake probabilities and rates](../8-probabilities) +#### Next: [Example 8 – Earthquake probabilities and rates](../8-probabilities/README.md) + +--- + +* [**Documentation Index**](../../../docs/README.md) diff --git a/etc/examples/8-probabilities/README.md b/etc/examples/8-probabilities/README.md index 4574d2c4342e3ad29f249f1bb8c7198e2f7b23d6..38f3b7a6de77668a70c3187a5cdd9354b8d1f5eb 100644 --- a/etc/examples/8-probabilities/README.md +++ b/etc/examples/8-probabilities/README.md @@ -47,3 +47,7 @@ __Results directory structure:__ ├─ RateCalc.log └─ probs.csv ``` + +--- + +* [**Documentation Index**](../../../docs/README.md) diff --git a/etc/examples/README.md b/etc/examples/README.md index beb390bdb9a693043c3550e0ef6bbd822f97036c..b557f3baae6ef5ffda228f41c8bcd4d2d489fc20 100644 --- a/etc/examples/README.md +++ b/etc/examples/README.md @@ -11,4 +11,8 @@ alias hazard='java -cp /path/to/nshmp-haz/build/libs/nshmp-haz.jar gov.usgs.eart Because each example builds on prior concepts, it is best step through all the examples, however quickly. <!-- markdownlint-disable MD001 --> -#### Start: [Example 1 – A simple hazard calculation](1-hazard-curve) +#### Start: [Example 1 – A simple hazard calculation](1-hazard-curve/README.md) + +--- + +* [**Documentation Index**](../../docs/README.md) diff --git a/etc/nshm/README.md b/etc/nshm/README.md new file mode 100644 index 0000000000000000000000000000000000000000..96c4ee61ad93dacd36ce1a1b84edb4634a9b7194 --- /dev/null +++ b/etc/nshm/README.md @@ -0,0 +1,3 @@ +# NSHM site and map files + +This directory contains the sites, map, and boundary files used in NSHM calculations. diff --git a/gradle.properties b/gradle.properties index 05bbb8d30359538c9280b621f542f5af0487916f..b64478dc1b66d4806b6f5b6bd931a4172002c9da 100644 --- a/gradle.properties +++ b/gradle.properties @@ -8,7 +8,7 @@ junitVersion = 5.5.2 micronautVersion = 2.4.1 mnPluginVersion = 1.4.2 nodeVersion = 3.0.1 -nshmpLibVersion = 0.5.1 +nshmpLibVersion = 0.6.0 nshmpWsUtilsVersion = 0.1.2 shadowVersion = 5.2.0 spotbugsVersion = 4.2.4 diff --git a/gradle/ext.gradle b/gradle/ext.gradle index ddd9896f6c4e2672b3386794d48bcbe606e2bbc4..3cc94336dfbc62250d267f263b530fcc1095c49f 100644 --- a/gradle/ext.gradle +++ b/gradle/ext.gradle @@ -84,7 +84,7 @@ ext { */ gitCommand = 'git --git-dir=../nshmp-haz/.git describe --tags' gitTag = gitCommand.execute().text.replace('\n', '') ?: 'unknown' - gitLink = '<a href="https://github.com/usgs/nshmp-haz">' + gitTag +'</a>' + gitLink = '<a href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz">' + gitTag +'</a>' propsPath = '/resources/main/app.properties' docTitle = projectName + ': ' + gitLink docFooter = '<div style="float: left; font-size: 16px; text-align: right; ' + @@ -92,7 +92,7 @@ ext { '<b><a href="https://www.usgs.gov" target="_top">U.S. Geological Survey</a></b> ' + '– National Seismic Hazard Model Project ' + '(<a href="https://earthquake.usgs.gov/hazards/" target="_top">NSHMP</a>) ' + - '– <a href="https://github.com/usgs/nshmp-haz/blob/master/LICENSE.md" ' + + '– <a href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/LICENSE.md" ' + 'target="_top">License</a> | <b>' + gitLink + '</b></div>' docOut = findProperty('javadoc_loc') thinJar = false diff --git a/gradle/tasks.gradle b/gradle/tasks.gradle deleted file mode 100644 index 5375745fcc94badeb800e85748a1da883fd964f8..0000000000000000000000000000000000000000 --- a/gradle/tasks.gradle +++ /dev/null @@ -1,16 +0,0 @@ - -/* - * Create a zip file of all dependencies - */ -task libs(type: Zip) { - archiveBaseName = "nshmp-haz-dependencies" - from { - configurations.compileClasspath.collect { - it - } - } - - into("java/lib") - - destinationDirectory.value(libsDirectory) -} diff --git a/scripts/custom.config.sh b/scripts/custom.config.sh deleted file mode 100644 index e0630f30489325d4f6b306c27712b7f76a1271df..0000000000000000000000000000000000000000 --- a/scripts/custom.config.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -# shellcheck disable=SC2140 - -export SERVICE_MAP=( - "/nshmp/ws/conus-2018":"nshmp-haz-conus-2018" - # "/nshmp/conus-2014":"nshmp-haz-conus-2014" - # "/nshmp/conus-2014b":"nshmp-haz-conus-2014b" - # "/nshmp/conus-2008":"nshmp-haz-conus-2008" - # "/nshmp/hawaii-2020":"nshmp-haz-hi-2020" - # "/nshmp/alaska-2007":"nshmp-haz-ak-2007" -); diff --git a/scripts/docker-config.inc.sh b/scripts/docker-config.inc.sh deleted file mode 100644 index e8cba087878b452c6801999f075281c6f38db485..0000000000000000000000000000000000000000 --- a/scripts/docker-config.inc.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# shellcheck disable=SC1090 -# shellcheck disable=SC2034 - -# Prevent configuration from being included multiple times -[ -z "${CONFIGURATION_COMPLETE}" ] || return; -source "$(dirname "$0")/docker-functions.inc.sh"; - -readonly DEBUG="${DEBUG:-false}"; - -# Turn on debugging if desired. Do this first so each value is echo'd -if [[ "${DEBUG}" == "true" ]]; then - set -x; -fi - -readonly CEUS="Central & Eastern US"; -readonly CONFIG_FILE="${CONFIG_FILE:-config.json}"; -readonly CONTEXT_PATH="${CONTEXT_PATH:-/}"; -readonly JAVA_XMX="${JAVA_XMX:-8g}"; -readonly MODEL=$(echo "${MODEL:-CONUS_2008}" | awk \{'print toupper($0)'\}); -readonly NSHM_VERSION="${NSHM_VERSION:-master}"; -readonly PROJECT="${PROJECT:-nshmp-haz}"; -readonly PROGRAM=$(echo "${PROGRAM:-hazard}" | awk \{'print tolower($0)'\}); -readonly WUS="Western US"; -readonly VERSION_2014B="v4.1.1"; - -# Include guard to prevent accidental re-configuration -CONFIGURATION_COMPLETE="true"; diff --git a/scripts/docker-entrypoint.sh b/scripts/docker-entrypoint.sh index 3fbe1f8c47f4c07d72eb0bb6b7cf4049ee72313d..ad3f2b852b8676707aad1d972819b0db806e33cd 100644 --- a/scripts/docker-entrypoint.sh +++ b/scripts/docker-entrypoint.sh @@ -1,50 +1,75 @@ #!/bin/bash -# shellcheck disable=SC1090 - -source "$(dirname "${0}")/docker-config.inc.sh"; -exit_status=${?}; -[ "${exit_status}" -eq 0 ] || exit "${exit_status}"; - -# Get nshmp program to call -nshmp_program=$(get_nshmp_program "${PROGRAM}"); -exit_status=${?}; -check_exit_status "${exit_status}"; - -# Get model path to use -if [ "${MOUNT_MODEL}" = true ]; then - nshm_path="model"; -else - nshm_path=$(get_model_path "${MODEL}" "${NSHM_VERSION}"); + +## +# Run nshmp-haz +## +main() { + # Get name of sites file + sites_file=$(ls /app/sites.*); + + # Run nshmp-haz + java "-Xmx${JAVA_MEMORY}" \ + -cp "/app/nshmp-haz.jar" \ + "gov.usgs.earthquake.nshmp.${CLASS_NAME}" \ + "${MODEL_PATH}" \ + "${sites_file}" \ + ${RETURN_PERIOD:+ "${RETURN_PERIOD}"} \ + ${IML:+ "${IML}"} \ + "${CONFIG_FILE}"; + exit_status=${?}; + check_exit_status "${exit_status}"; + + # Move results to container volume + move_to_output_volume; exit_status=${?}; check_exit_status "${exit_status}"; -fi -# Check site file and get site file path -site_file=$(check_sites_file); -exit_status=${?}; -check_exit_status "${exit_status}"; + exit ${exit_status}; +} + +#### +# Check current exit status. +# +# @param $1 exit_status {Integer} +# Current exit status +#### +check_exit_status() { + local exit_status=${1}; + [ "${exit_status}" -eq 0 ] || exit "${exit_status}"; +} + +#### +# Exit with an error message. +# +# @param $1 msg {String} +# The message for exit +# @param $2 exit_status {Integer} +# The exit status +#### +error_exit() { + local msg=${1}; shift; + local exit_status=${1} + echo "Error: ${msg}" >> /dev/stderr; + exit "${exit_status}"; +} + +#### +# Move artifacts to mounted volume. +# +# @status Integer +# The status of moving the files. +#### +move_to_output_volume() { + local hazout; + hazout=$(jq -r ".output.directory" "${CONFIG_FILE}"); + + if [ "${hazout}" == null ]; then + hazout="hazout"; + fi -# Check config file -[ -f "${CONFIG_FILE}" ] || echo "{}" > "${CONFIG_FILE}"; -jq empty < "${CONFIG_FILE}"; -exit_status=${?}; -check_exit_status "${exit_status}"; + mv "${hazout}"/* "${OUTPUT_PATH}"/.; + return ${?}; +} # Run nshmp-haz -java -"Xmx${JAVA_XMX}" \ - -cp "/app/${PROJECT}.jar" \ - "gov.usgs.earthquake.nshmp.${nshmp_program}" \ - "${nshm_path}" \ - "${site_file}" \ - ${RETURN_PERIOD:+ "${RETURN_PERIOD}"} \ - ${IML:+ "${IML}"} \ - "${CONFIG_FILE}"; -exit_status=${?}; -check_exit_status "${exit_status}"; - -# Move results to container volume -move_to_output_volume; -exit_status=${?}; -check_exit_status "${exit_status}"; - -exit ${exit_status}; +main "$@"; diff --git a/scripts/docker-functions.inc.sh b/scripts/docker-functions.inc.sh deleted file mode 100644 index ae711e683f6ce28e5d6c1ec21d321bda2e3ec436..0000000000000000000000000000000000000000 --- a/scripts/docker-functions.inc.sh +++ /dev/null @@ -1,292 +0,0 @@ -#!/bin/bash - -#### -# Check current exit status. -# -# @param $1 exit_status {Integer} -# Current exit status -#### -check_exit_status() { - local exit_status=${1}; - [ "${exit_status}" -eq 0 ] || exit "${exit_status}"; -} - -#### -# Check that the sites file is valid. -# -# @return String -# The site file name -# @status Integer -# The exit status -#### -check_sites_file() { - local site_file; - local exit_status; - site_file=$(ls sites*) || error_exit "Site file does not exist." 1; - - # Check if valid JSON or ASCII file - case ${site_file} in - *.geojson) - jq empty < "${site_file}"; - exit_status=${?}; - ;; - *.csv) - if [[ "$(file "${site_file}" -b)" != "ASCII text"* ]]; then - error_exit "Site file [${site_file}] is not valid ASCII" 1; - fi - ;; - *) - error_exit "Bad site file [${site_file}]." 1; - ;; - esac - - echo "${site_file}"; - return "${exit_status}"; -} - -#### -# Download a repository from Github. -# -# @param $1 url {String} -# The url to download -# @param $2 branch {String} -# The branch or tag to checkout -# -# @status Integer -# The status of the curl call -#### -download_repo() { - local url=${1}; - local branch=${2}; - local exit_status; - - git clone --depth 1 -b "${branch}" "${url}"; - exit_status=${?}; - - if [ ${exit_status} -ne 0 ]; then - error_exit "Could not download [${url}]" ${exit_status}; - fi - - return ${exit_status}; -} - -#### -# Exit with an error message. -# -# @param $1 msg {String} -# The message for exit -# @param $2 exit_status {Integer} -# The exit status -#### -error_exit() { - local msg=${1}; - local exit_status=${2} - echo "Error: ${msg}" >> /dev/stderr; - exit "${exit_status}"; -} - -#### -# Returns the model path. -# -# @param $1 nshm {String} -# The NSHM to download. -# @param $1 nshm_version {String} -# The version to download from GitHub. -# -# @return String -# The model path -# @status Integer -# The result of downloading the repository. -#### -get_model() { - local nshm=${1}; - local nshm_version=${2}; - local model; - local model_path; - local url; - local exit_status; - - if [ "${nshm_version}" == "null" ]; then - return 0; - fi - - case ${nshm} in - "AK_2007") - model="nshm-ak-2007"; - model_path="${model}"; - url="https://github.com/usgs/${model}.git"; - ;; - "CEUS_2008") - model="nshm-cous-2008"; - model_path="${model}/${CEUS}/"; - url="https://github.com/usgs/${model}.git"; - ;; - "CEUS_2014") - model="nshm-cous-2014"; - model_path="${model}/${CEUS}/"; - url="https://github.com/usgs/${model}.git"; - ;; - "CEUS_2014B") - model="nshm-cous-2014"; - model_path="${model}/${CEUS}/"; - nshm_version="${VERSION_2014B}"; - url="https://github.com/usgs/${model}.git"; - ;; - "CEUS_2018") - model="nshm-cous-2018"; - model_path="${model}/${CEUS}/"; - url="https://github.com/usgs/${model}.git"; - ;; - "CONUS_2008") - model="nshm-cous-2008"; - model_path="${model}"; - url="https://github.com/usgs/${model}.git"; - ;; - "CONUS_2014") - model="nshm-cous-2014"; - model_path="${model}"; - url="https://github.com/usgs/${model}.git"; - ;; - "CONUS_2014B") - model="nshm-cous-2014"; - model_path="${model}"; - nshm_version="${VERSION_2014B}"; - url="https://github.com/usgs/${model}.git"; - ;; - "CONUS_2018") - model="nshm-cous-2018"; - model_path="${model}"; - url="https://github.com/usgs/${model}.git"; - # model="nshm-conus-2018"; - # url="git@code.usgs.gov:ghsc/nshmp/nshm-conus-2018.git"; - ;; - # "CONUS_2023") - # model="nshm-conus-2023"; - # url="git@code.usgs.gov:ghsc/nshmp/nshm-conus-2023.git"; - # ;; - "HI_2020") - model="nshm-hi-2020"; - model_path="${model}"; - url="https://github.com/usgs/${model}.git"; - ;; - "WUS_2008") - model="nshm-cous-2008"; - model_path="${model}/${WUS}/"; - url="https://github.com/usgs/${model}.git"; - ;; - "WUS_2014") - model="nshm-cous-2014"; - model_path="${model}/${WUS}/"; - url="https://github.com/usgs/${model}.git"; - ;; - "WUS_2014B") - model="nshm-cous-2014"; - model_path="${model}/${WUS}/"; - nshm_version="${VERSION_2014B}"; - url="https://github.com/usgs/${model}.git"; - ;; - "WUS_2018") - model="nshm-cous-2018"; - model_path="${model}/${WUS}/"; - url="https://github.com/usgs/${model}.git"; - ;; - - *) - error_exit "Model [${nshm}] not supported" 1; - ;; - esac - - download_repo "${url}" "${nshm_version}"; - rm -rf "${model:?}/.git"; - exit_status=${?}; - - echo "${model_path}"; - return ${exit_status} -} - -#### -# Returns the path to the model. -# -# @param $1 nshm {String} -# The NSHM to download. -# @param $1 nshm_version {String} -# The version to download from GitHub. -# -# @return String -# The path to the model -# @status Integer -# Status of get_model call -#### -get_model_path() { - local nshm=${1}; - local nshm_version=${2}; - local nshmp_model_path; - local exit_status; - nshmp_model_path=$(get_model "${nshm}" "${nshm_version}"); - exit_status=${?}; - - echo "${nshmp_model_path}"; - return ${exit_status}; -} - -#### -# Returns to nshmp-haz Java class to call. -# -# @param $1 program {String} -# The program to run -# -# @return String -# The program to call in nshmp-haz -#### -get_nshmp_program() { - local program=${1}; - local nshmp_program; - - case ${program} in - "deagg") - nshmp_program="DeaggCalc"; - ;; - "deagg-epsilon") - nshmp_program="DeaggEpsilon"; - ;; - "deagg-iml") - nshmp_program="DeaggIml"; - ;; - "hazard-2018") - nshmp_program="Hazard2018"; - ;; - "hazard") - nshmp_program="HazardCalc"; - ;; - "rate") - nshmp_program="RateCalc"; - ;; - *) - error_exit "Program [${program}] not supported" 1; - ;; - esac - - echo "${nshmp_program}"; -} - -#### -# Move artifacts to mounted volume. -# -# @param $1 config_file {String} -# The config file -# -# @status Integer -# The status of moving the files. -#### -move_to_output_volume() { - local config_file; - local hazout; - hazout=$(jq -r ".output.directory" "${config_file}"); - - if [ "${hazout}" == null ]; then - hazout="hazout"; - fi - - mv ${hazout}/* output/.; - return ${?}; -} diff --git a/scripts/nshmp-haz.yml b/scripts/nshmp-haz.yml deleted file mode 100644 index b1164d857ebb21d5cacd3aa85d823785317b9260..0000000000000000000000000000000000000000 --- a/scripts/nshmp-haz.yml +++ /dev/null @@ -1,65 +0,0 @@ -version: "3.7" - -# General deployment config -x-app: &app - image: ${REGISTRY}/nshmp-haz-ws:${CI_COMMIT_REF_SLUG} - deploy: - restart_policy: - condition: any - delay: 5s - max_attempts: 3 - window: 120s - replicas: 1 - update_config: - order: start-first - parallelism: 1 - ports: - - 8080 - -services: - # Deploy nshmp-haz with CONUS-2018 - nshmp-haz-conus-2018: - <<: *app - environment: - MODEL: CONUS-2018 - CONTEXT_PATH: /nshmp/ws/conus-2018 - - # # Deploy nshmp-haz with CONUS-2014 - # nshmp-haz-conus-2014: - # <<: *app - # environment: - # RUN_HAZARD: 'false' - # MODEL: CONUS-2014 - # CONTEXT_PATH: nshmp/conus-2014 - - # # Deploy nshmp-haz with CONUS-2014B - # nshmp-haz-conus-2014b: - # <<: *app - # environment: - # RUN_HAZARD: 'false' - # MODEL: CONUS-2014B - # CONTEXT_PATH: nshmp/conus-2014b - - # # Deploy nshmp-haz with CONUS-2008 - # nshmp-haz-conus-2008: - # <<: *app - # environment: - # RUN_HAZARD: 'false' - # MODEL: CONUS-2008 - # CONTEXT_PATH: nshmp/conus-2008 - - # # Deploy nshmp-haz with HI-2020 - # nshmp-haz-hi-2020: - # <<: *app - # environment: - # RUN_HAZARD: 'false' - # MODEL: HI-2020 - # CONTEXT_PATH: nshmp/hawaii-2020 - - # # Deploy nshmp-haz with AK-2007 - # nshmp-haz-ak-2007: - # <<: *app - # environment: - # RUN_HAZARD: 'false' - # MODEL: AK-2007 - # CONTEXT_PATH: nshmp/alaska-2007 diff --git a/src/main/java/gov/usgs/earthquake/nshmp/DeaggCalc.java b/src/main/java/gov/usgs/earthquake/nshmp/DeaggCalc.java index 05ad820447210cfef01a2baf498696096f30ca8f..2ea4f3d51de9b4bec9b29a33cdc1a940138055e1 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/DeaggCalc.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/DeaggCalc.java @@ -44,13 +44,15 @@ public class DeaggCalc { * on required parameters. * * <p>Please refer to the nshmp-haz <a - * href="https://github.com/usgs/nshmp-haz/wiki" target="_top">wiki</a> for - * comprehensive descriptions of source models, configuration files, site - * files, and hazard calculations. + * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs" + * target="_top">docs</a> for comprehensive descriptions of source models, + * configuration files, site files, and hazard calculations. * - * @see <a href="https://github.com/usgs/nshmp-haz/wiki/Building-&-Running" - * target="_top"> nshmp-haz wiki</a> - * @see <a href="https://github.com/usgs/nshmp-haz/tree/master/etc/examples" + * @see <a + * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/docs/pages/Building-&-Running.md" + * target="_top"> nshmp-haz Building & Running</a> + * @see <a + * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples" * target="_top"> example calculations</a> */ public static void main(String[] args) { @@ -162,8 +164,10 @@ public class DeaggCalc { private static final String PROGRAM = DeaggCalc.class.getSimpleName(); private static final String USAGE_COMMAND = "java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.DeaggCalc model sites returnPeriod [config]"; - private static final String USAGE_URL1 = "https://github.com/usgs/nshmp-haz/wiki"; - private static final String USAGE_URL2 = "https://github.com/usgs/nshmp-haz/tree/master/etc"; + private static final String USAGE_URL1 = + "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs"; + private static final String USAGE_URL2 = + "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples"; private static final String SITE_STRING = "name,lon,lat[,vs30,vsInf[,z1p0,z2p5]]"; private static final String USAGE = new StringBuilder() diff --git a/src/main/java/gov/usgs/earthquake/nshmp/DeaggIml.java b/src/main/java/gov/usgs/earthquake/nshmp/DeaggIml.java index 3cc37d5d06ec4cd7811e75c9e5c04570229504d5..e24795f3374af7ec00fe869ea6b2ddaa5c7359ae 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/DeaggIml.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/DeaggIml.java @@ -49,9 +49,11 @@ public class DeaggIml { * comprehensive descriptions of source models, configuration files, site * files, and hazard calculations. * - * @see <a href="https://github.com/usgs/nshmp-haz/wiki/Building-&-Running" - * target="_top"> nshmp-haz wiki</a> - * @see <a href="https://github.com/usgs/nshmp-haz/tree/master/etc/examples" + * @see <a + * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/docs/pages/Building-&-Running.md" + * target="_top"> nshmp-haz Building & Running</a> + * @see <a + * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples" * target="_top"> example calculations</a> */ public static void main(String[] args) { @@ -163,8 +165,10 @@ public class DeaggIml { private static final String PROGRAM = DeaggIml.class.getSimpleName(); private static final String USAGE_COMMAND = "java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.DeaggIml model sites iml [config]"; - private static final String USAGE_URL1 = "https://github.com/usgs/nshmp-haz/wiki"; - private static final String USAGE_URL2 = "https://github.com/usgs/nshmp-haz/tree/master/etc"; + private static final String USAGE_URL1 = + "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs"; + private static final String USAGE_URL2 = + "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples"; private static final String SITE_STRING = "name,lon,lat[,vs30,vsInf[,z1p0,z2p5]]"; private static final String USAGE = new StringBuilder() diff --git a/src/main/java/gov/usgs/earthquake/nshmp/Hazard2018.java b/src/main/java/gov/usgs/earthquake/nshmp/Hazard2018.java index 60df02d9bc6db0ee0cc9213bfbb243044390c040..144a258ad7d9938a6f935804ec0714778ba93b78 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/Hazard2018.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/Hazard2018.java @@ -331,8 +331,10 @@ public class Hazard2018 { private static final String PROGRAM = Hazard2018.class.getSimpleName(); private static final String USAGE_COMMAND = "java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.Hazard2018 model sites [config]"; - private static final String USAGE_URL1 = "https://github.com/usgs/nshmp-haz/wiki"; - private static final String USAGE_URL2 = "https://github.com/usgs/nshmp-haz/tree/master/etc"; + private static final String USAGE_URL1 = + "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs"; + private static final String USAGE_URL2 = + "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples"; private static final String SITE_STRING = "name,lon,lat[,vs30,vsInf[,z1p0,z2p5]]"; private static String version() { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java b/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java index dec96fb7781d59e970a839048ccb596ed16a3697..b45944e03b9fc6b1d9731ba0c741df100655fe66 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java @@ -23,6 +23,7 @@ import com.google.common.base.Throwables; import com.google.common.util.concurrent.MoreExecutors; import gov.usgs.earthquake.nshmp.calc.CalcConfig; +import gov.usgs.earthquake.nshmp.calc.DataType; import gov.usgs.earthquake.nshmp.calc.Hazard; import gov.usgs.earthquake.nshmp.calc.HazardCalcs; import gov.usgs.earthquake.nshmp.calc.HazardExport; @@ -59,10 +60,10 @@ public class HazardCalc { * configuration files, site files, and hazard calculations. * * @see <a - * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/wikis/Building-&-Running" - * target="_top"> nshmp-haz wiki</a> + * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/docs/pages/Building-&-Running.md" + * target="_top"> nshmp-haz Building & Running</a> * @see <a - * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/master/etc/examples" + * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples" * target="_top"> example calculations</a> */ public static void main(String[] args) { @@ -111,6 +112,11 @@ public class HazardCalc { log.info("Sites: " + sites); Path out = calc(model, config, sites, log); + + if (config.output.dataTypes.contains(DataType.MAP)) { + HazardMaps.createDataSets(out, config.output.returnPeriods, log); + } + log.info(PROGRAM + ": finished"); /* Transfer log and write config, windows requires fh.close() */ @@ -229,8 +235,10 @@ public class HazardCalc { private static final String PROGRAM = HazardCalc.class.getSimpleName(); private static final String USAGE_COMMAND = "java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.HazardCalc model sites [config]"; - private static final String USAGE_URL1 = "https://github.com/usgs/nshmp-haz/wiki"; - private static final String USAGE_URL2 = "https://github.com/usgs/nshmp-haz/tree/master/etc"; + private static final String USAGE_URL1 = + "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs"; + private static final String USAGE_URL2 = + "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples"; private static final String SITE_STRING = "name,lon,lat[,vs30,vsInf[,z1p0,z2p5]]"; private static String version() { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/HazardMaps.java b/src/main/java/gov/usgs/earthquake/nshmp/HazardMaps.java new file mode 100644 index 0000000000000000000000000000000000000000..cea833653b04ba3b10883e765bfa1ab507f53e6d --- /dev/null +++ b/src/main/java/gov/usgs/earthquake/nshmp/HazardMaps.java @@ -0,0 +1,190 @@ +package gov.usgs.earthquake.nshmp; + +import java.io.IOException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; +import java.util.logging.Logger; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import gov.usgs.earthquake.nshmp.data.Interpolator; +import gov.usgs.earthquake.nshmp.internal.Parsing; + +/** + * Utility class to create hazard map datasets from a hazard curve results. + * Methods in class assume *.csv curve files have no comments and have a header + * row that starts with {@code "name,lon,lat,..."} or {@code "lon,lat,..."}. + * + * @author U.S. Geological Survey + */ +public class HazardMaps { + + private static final String COMMA = ","; + private static final String CURVES_FILE = "curves.csv"; + private static final List<Integer> DEFAULT_RETURN_PERIODS = List.of(475, 975, 2475); + private static final Interpolator INTERPOLATOR = Interpolator.builder() + .logx() + .logy() + .decreasingX() + .build(); + private static final String MAP_FILE = "map.csv"; + private static final String PROGRAM = HazardMaps.class.getSimpleName(); + private static final String VALUE_FMT = "%.8e"; + private static final Function<Double, String> VALUE_FORMATTER = + Parsing.formatDoubleFunction(VALUE_FMT); + + private HazardMaps() {} + + /** + * Command line application to create a file of return period slices through a + * hazard curve dataset. Result of slicing job is saved to a {@code map.csv} + * file in the same directory as the source. + * + * @param args a path to a hazard curve result file or directory. If the + * supplied path is a directory, application will recurse through file + * tree slicing each {@code curves.csv} file encountered. + */ + public static void main(String[] args) { + if (args.length < 1) { + System.out.println("Usage: Supply a path to a file of hazard curve results and"); + System.out.println(" optionally a space separated list of return periods (in yr)"); + System.out.println(" default return periods: 475 975 2475"); + return; + } + + Path curvesPath = Path.of(args[0]); + List<Integer> returnPeriods = DEFAULT_RETURN_PERIODS; + Logger log = Logger.getLogger(HazardMaps.class.getName()); + + if (args.length > 1) { + returnPeriods = Arrays.stream(args) + .skip(1) + .mapToInt(Integer::valueOf) + .boxed() + .collect(Collectors.toList()); + } + + try { + createDataSets(curvesPath, returnPeriods, log); + } catch (Exception e) { + System.out.println("Processing Error"); + System.out.println("Arguments: " + Arrays.toString(args)); + e.printStackTrace(); + } + } + + static void createDataSets( + Path curvesPath, + List<Integer> returnPeriods, + Logger log) throws IOException { + log.info(PROGRAM + ": Creating hazard map dataset:"); + log.info("\tReturn periods: " + returnPeriods.toString()); + log.info("\tPath: " + curvesPath.toAbsolutePath().toString()); + + if (Files.isDirectory(curvesPath)) { + CurvesVisitor curvesFinder = new CurvesVisitor(returnPeriods); + Files.walkFileTree(curvesPath, curvesFinder); + } else { + processCurveFile(curvesPath, returnPeriods); + } + } + + private static List<String> create(List<String> lines, List<Integer> returnPeriods) { + int headerCount = lines.get(0).startsWith("name") ? 3 : 2; + List<String> header = Arrays.asList(lines.get(0).split(COMMA)); + + String siteStr = header.subList(0, headerCount) + .stream() + .collect(Collectors.joining(COMMA)); + + double[] imls = header.subList(headerCount, header.size()) + .stream() + .mapToDouble(Double::valueOf) + .toArray(); + + StringBuilder mapHeader = new StringBuilder(siteStr); + returnPeriods.forEach(rp -> mapHeader.append(COMMA).append(rp)); + + List<String> linesOut = new ArrayList<>(lines.size()); + linesOut.add(mapHeader.toString()); + + Slicer slicer = new Slicer(returnPeriods, imls, headerCount); + + lines.stream() + .skip(1) + .map(slicer::slice) + .forEach(linesOut::add); + + return linesOut; + } + + private static void processCurveFile(Path curves, List<Integer> returnPeriods) { + try (Stream<String> stream = Files.lines(curves)) { + List<String> linesIn = stream.collect(Collectors.toList()); + List<String> linesOut = create(linesIn, returnPeriods); + Path maps = curves.resolveSibling(MAP_FILE); + Files.write(maps, linesOut); + } catch (IOException ioe) { + throw new RuntimeException(ioe); + } + } + + private static class CurvesVisitor extends SimpleFileVisitor<Path> { + List<Integer> returnPeriods; + + public CurvesVisitor(List<Integer> returnPeriods) { + this.returnPeriods = returnPeriods; + } + + @Override + public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) { + Path fileName = path.getFileName(); + if (fileName != null && fileName.endsWith(CURVES_FILE)) { + processCurveFile(path, returnPeriods); + } + return FileVisitResult.CONTINUE; + } + } + + private static class Slicer { + private final List<Integer> returnPeriods; + private final double[] imls; + private final int headerCount; + + private Slicer(List<Integer> returnPeriods, double imls[], int headerCount) { + this.returnPeriods = returnPeriods; + this.imls = imls; + this.headerCount = headerCount; + } + + private String slice(String line) { + List<String> elements = Arrays.asList(line.split(COMMA)); + String siteStr = elements.subList(0, headerCount) + .stream() + .collect(Collectors.joining(COMMA)); + + StringBuilder lineOut = new StringBuilder(siteStr); + + double[] rates = elements + .stream() + .skip(headerCount) + .mapToDouble(Double::valueOf) + .toArray(); + + for (double returnPeriod : returnPeriods) { + lineOut.append(COMMA); + lineOut.append(VALUE_FORMATTER.apply(INTERPOLATOR.findX(imls, rates, 1 / returnPeriod))); + } + + return lineOut.toString(); + } + } + +} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/RateCalc.java b/src/main/java/gov/usgs/earthquake/nshmp/RateCalc.java index c3afa5fc01814e453bb332d01e951f6843370b72..b0ccce12457576f64524d4248471e8aee85da0bc 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/RateCalc.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/RateCalc.java @@ -58,9 +58,11 @@ public class RateCalc { * comprehensive descriptions of source models, configuration files, site * files, and earthquake rate calculations. * - * @see <a href="https://github.com/usgs/nshmp-haz/wiki/Building-&-Running" - * target="_top"> nshmp-haz wiki</a> - * @see <a href="https://github.com/usgs/nshmp-haz/tree/master/etc/examples" + * @see <a + * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/docs/pages/Building-&-Running.md" + * target="_top"> nshmp-haz Building & Running</a> + * @see <a + * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples" * target="_top"> example calculations</a> */ public static void main(String[] args) { @@ -212,8 +214,10 @@ public class RateCalc { private static final String PROGRAM = RateCalc.class.getSimpleName(); private static final String USAGE_COMMAND = "java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.RateCalc model sites [config]"; - private static final String USAGE_URL1 = "https://github.com/usgs/nshmp-haz/wiki"; - private static final String USAGE_URL2 = "https://github.com/usgs/nshmp-haz/tree/master/etc"; + private static final String USAGE_URL1 = + "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs"; + private static final String USAGE_URL2 = + "https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples"; private static final String SITE_STRING = "name,lon,lat"; private static final String USAGE = new StringBuilder() diff --git a/src/main/java/gov/usgs/earthquake/nshmp/aws/HazardResultSliceLambda.java b/src/main/java/gov/usgs/earthquake/nshmp/aws/HazardResultSliceLambda.java deleted file mode 100644 index 1311536a14383509bd5c7be42537b494a53b2833..0000000000000000000000000000000000000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/aws/HazardResultSliceLambda.java +++ /dev/null @@ -1,310 +0,0 @@ -package gov.usgs.earthquake.nshmp.aws; - -import static com.google.common.base.Preconditions.checkState; -import static gov.usgs.earthquake.nshmp.aws.Util.CURVES_FILE; -import static gov.usgs.earthquake.nshmp.aws.Util.MAP_FILE; -import static gov.usgs.earthquake.nshmp.www.services.ServletUtil.GSON; - -import java.io.BufferedReader; -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.time.ZonedDateTime; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; -import java.util.function.Function; -import java.util.stream.Collectors; - -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.RequestStreamHandler; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import com.amazonaws.services.s3.model.ObjectMetadata; -import com.amazonaws.services.s3.model.PutObjectRequest; -import com.amazonaws.services.s3.model.S3Object; -import com.amazonaws.services.s3.model.S3ObjectInputStream; -import com.google.common.base.Charsets; -import com.google.common.base.Throwables; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - -import gov.usgs.earthquake.nshmp.aws.Util.LambdaHelper; -import gov.usgs.earthquake.nshmp.calc.Site; -import gov.usgs.earthquake.nshmp.data.Interpolator; -import gov.usgs.earthquake.nshmp.internal.Parsing; -import gov.usgs.earthquake.nshmp.internal.Parsing.Delimiter; -import gov.usgs.earthquake.nshmp.www.meta.Metadata; -import gov.usgs.earthquake.nshmp.www.meta.Status; -import gov.usgs.earthquake.nshmp.www.services.ServletUtil; - -/** - * AWS Lambda function to read in a curves file from AWS S3 and create slices at - * return periods interest. <br> - * - * The results are written to S3 as map.csv bucket. - */ -public class HazardResultSliceLambda implements RequestStreamHandler { - - private static final AmazonS3 S3 = AmazonS3ClientBuilder.defaultClient(); - - private static final String RATE_FMT = "%.8e"; - private static final Function<Double, String> FORMATTER = Parsing.formatDoubleFunction(RATE_FMT); - - private static final int NUMBER_OF_HEADERS = 3; - private static final String CONTENT_TYPE = "text/csv"; - - private static final Interpolator INTERPOLATOR = Interpolator.builder() - .logx() - .logy() - .decreasingX() - .build(); - - @Override - public void handleRequest( - InputStream input, - OutputStream output, - Context context) throws IOException { - LambdaHelper lambdaHelper = new LambdaHelper(input, output, context); - String requestBucket = ""; - - try { - RequestData request = GSON.fromJson(lambdaHelper.requestJson, RequestData.class); - lambdaHelper.logger.log("Request Data: " + GSON.toJson(request) + "\n"); - requestBucket = request.bucket + "/" + request.key; - checkRequest(request); - Response response = processRequest(request); - String json = GSON.toJson(response, Response.class); - lambdaHelper.logger.log("Result: " + json + "\n"); - output.write(json.getBytes()); - output.close(); - } catch (Exception e) { - lambdaHelper.logger.log("\nError: " + Throwables.getStackTraceAsString(e) + "\n\n"); - String message = Metadata.errorMessage(requestBucket, e, false); - output.write(message.getBytes()); - } - } - - private static Response processRequest(RequestData request) throws IOException { - List<InterpolatedData> data = readCurveFile(request); - String outputBucket = request.bucket + "/" + request.key; - StringBuilder csv = new StringBuilder(); - createHeaderString(csv, request); - createDataString(csv, data); - writeResults(request, outputBucket, csv.toString().getBytes(Charsets.UTF_8)); - return new Response(request, outputBucket); - } - - private static List<InterpolatedData> readCurveFile(RequestData request) throws IOException { - S3Object object = S3.getObject(request.bucket, request.key + "/" + CURVES_FILE); - S3ObjectInputStream input = object.getObjectContent(); - BufferedReader reader = new BufferedReader(new InputStreamReader(input)); - List<String> lines = reader.lines().collect(Collectors.toList()); - reader.close(); - - Optional<List<String>> header = lines.stream() - .filter(line -> !line.startsWith("#")) - .findFirst() - .map(line -> Parsing.splitToList(line, Delimiter.COMMA)); - - checkState(header.isPresent(), "Curve file is empty"); - - List<String> keys = header.get().subList(0, NUMBER_OF_HEADERS); - List<Double> imls = header.get().subList(NUMBER_OF_HEADERS, header.get().size()) - .stream() - .map(iml -> Double.parseDouble(iml)) - .collect(Collectors.toList()); - - List<InterpolatedData> data = new ArrayList<>(); - lines.stream() - .filter(line -> !line.startsWith("#")) - .skip(1) - .forEach(line -> { - data.add(curveToInterpolatedData(request, line, keys, imls)); - }); - - return data; - } - - private static InterpolatedData curveToInterpolatedData( - RequestData request, - String line, - List<String> keys, - List<Double> imls) { - List<String> values = Parsing.splitToList(line, Delimiter.COMMA); - List<Double> gms = values.subList(NUMBER_OF_HEADERS, values.size()) - .stream() - .map(gm -> Double.parseDouble(gm)) - .collect(Collectors.toList()); - values = values.subList(0, NUMBER_OF_HEADERS); - - Site site = buildSite(keys, values); - List<Double> interpolatedValues = request.slices.stream() - .map(returnPeriod -> INTERPOLATOR.findX(imls, gms, returnPeriod)) - .collect(Collectors.toList()); - - return new InterpolatedData(site, interpolatedValues); - } - - private static Site buildSite(List<String> keys, List<String> values) { - Double lat = null; - Double lon = null; - String name = null; - - for (int index = 0; index < keys.size(); index++) { - String key = keys.get(index); - String value = values.get(index); - - switch (key) { - case Keys.LAT: - lat = Double.parseDouble(value); - break; - case Keys.LON: - lon = Double.parseDouble(value); - break; - case Keys.NAME: - name = value; - break; - default: - throw new IllegalStateException("Unsupported site key: " + key); - } - } - - return Site.builder() - .location(lon, lat) - .name(name) - .build(); - } - - private static void checkRequest(RequestData request) { - if (request.bucket == null) { - throw new RuntimeException("Request does not contain a S3 bucket"); - } - - if (request.key == null) { - throw new RuntimeException("Request does not contain a S3 key"); - } - - if (request.slices == null) { - throw new RuntimeException("Request does not contain returnPeriods"); - } - } - - private static void createDataString(StringBuilder builder, List<InterpolatedData> data) { - data.forEach(datum -> { - List<String> locData = Lists.newArrayList( - datum.site.name, - String.format("%.5f", datum.site.location.longitude), - String.format("%.5f", datum.site.location.latitude)); - builder.append(toLine(locData, datum.values) + "\n"); - }); - } - - private static String toLine( - Iterable<String> strings, - Iterable<Double> values) { - return Parsing.join( - Iterables.concat(strings, Iterables.transform(values, FORMATTER::apply)), - Delimiter.COMMA); - } - - private static void createHeaderString(StringBuilder builder, RequestData request) { - List<String> header = Lists.newArrayList(Keys.NAME, Keys.LON, Keys.LAT); - builder.append(toLine(header, request.slices) + "\n"); - } - - private static void writeResults( - RequestData request, - String outputBucket, - byte[] result) throws IOException { - ObjectMetadata metadata = new ObjectMetadata(); - - InputStream input = new ByteArrayInputStream(result); - metadata.setContentType(CONTENT_TYPE); - metadata.setContentLength(result.length); - PutObjectRequest putRequest = new PutObjectRequest( - request.bucket, - request.key + "/" + MAP_FILE, - input, - metadata); - S3.putObject(putRequest); - input.close(); - } - - static class RequestData { - String bucket; - String key; - List<Double> slices; - - private RequestData(Builder builder) { - bucket = builder.bucket; - key = builder.key; - slices = builder.slices; - } - - static Builder builder() { - return new Builder(); - } - - static class Builder { - private String bucket; - private String key; - private List<Double> slices; - - Builder bucket(String bucket) { - this.bucket = bucket; - return this; - } - - Builder key(String key) { - this.key = key; - return this; - } - - Builder slices(List<Double> slices) { - this.slices = slices; - return this; - } - - RequestData build() { - return new RequestData(this); - } - - } - - } - - private static class Response { - final String status; - final String date; - final RequestData request; - final String csv; - - Response(RequestData request, String outputBucket) { - status = Status.SUCCESS.toString(); - date = ZonedDateTime.now().format(ServletUtil.DATE_FMT); - this.request = request; - this.csv = outputBucket + "/" + MAP_FILE; - } - - } - - private static class InterpolatedData { - Site site; - List<Double> values; - - InterpolatedData(Site site, List<Double> values) { - this.site = site; - this.values = values; - } - } - - private static class Keys { - static final String LAT = "lat"; - static final String LON = "lon"; - static final String NAME = "name"; - } - -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/aws/HazardResultsMetadataLambda.java b/src/main/java/gov/usgs/earthquake/nshmp/aws/HazardResultsMetadataLambda.java deleted file mode 100644 index 88c16e1e6ca63b3824b286f332bf6d65c2ed400e..0000000000000000000000000000000000000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/aws/HazardResultsMetadataLambda.java +++ /dev/null @@ -1,315 +0,0 @@ -package gov.usgs.earthquake.nshmp.aws; - -import static gov.usgs.earthquake.nshmp.aws.Util.CURVES_FILE; -import static gov.usgs.earthquake.nshmp.aws.Util.MAP_FILE; -import static gov.usgs.earthquake.nshmp.www.services.ServletUtil.GSON; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.time.ZonedDateTime; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; -import java.util.stream.Collectors; - -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.RequestStreamHandler; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import com.amazonaws.services.s3.model.ListObjectsV2Request; -import com.amazonaws.services.s3.model.ListObjectsV2Result; -import com.amazonaws.services.s3.model.ObjectMetadata; -import com.amazonaws.services.s3.model.PutObjectRequest; -import com.google.common.base.Enums; -import com.google.common.base.Throwables; - -import gov.usgs.earthquake.nshmp.aws.Util.LambdaHelper; -import gov.usgs.earthquake.nshmp.calc.DataType; -import gov.usgs.earthquake.nshmp.gmm.Gmm; -import gov.usgs.earthquake.nshmp.gmm.Imt; -import gov.usgs.earthquake.nshmp.internal.Parsing; -import gov.usgs.earthquake.nshmp.internal.Parsing.Delimiter; -import gov.usgs.earthquake.nshmp.model.SourceType; -import gov.usgs.earthquake.nshmp.www.meta.Metadata; -import gov.usgs.earthquake.nshmp.www.meta.Status; -import gov.usgs.earthquake.nshmp.www.services.ServletUtil; - -/** - * AWS Lambda function to list all hazard results in the nshmp-hazout S3 bucket - * that contain a map.csv file. - */ -public class HazardResultsMetadataLambda implements RequestStreamHandler { - - private static final AmazonS3 S3 = AmazonS3ClientBuilder.defaultClient(); - - private static final int IMT_DIR_BACK_FROM_TOTAL = 2; - private static final int IMT_DIR_BACK_FROM_SOURCE = 4; - private static final String S3_BUCKET = "nshmp-hazout"; - private static final String RESULT_BUCKET = "nshmp-haz-lambda"; - private static final String RESULT_KEY = "nshmp-haz-aws-results-metadata.json"; - - @Override - public void handleRequest( - InputStream input, - OutputStream output, - Context context) throws IOException { - LambdaHelper lambdaHelper = new LambdaHelper(input, output, context); - - try { - Response response = processRequest(); - String json = GSON.toJson(response, Response.class); - uploadResults(json); - output.write(json.getBytes()); - output.close(); - } catch (Exception e) { - lambdaHelper.logger.log("\nError: " + Throwables.getStackTraceAsString(e) + "\n\n"); - String message = Metadata.errorMessage("", e, false); - output.write(message.getBytes()); - } - } - - private static Response processRequest() { - Map<String, CurvesMapResult> curvesMapResults = new HashMap<>(); - Set<String> users = getUsers(); - - for (String file : new String[] { CURVES_FILE, MAP_FILE }) { - List<HazardResults> hazardResults = listObjects(users, file); - CurvesMapResult result = new CurvesMapResult(users, hazardResults); - curvesMapResults.put(file, result); - } - - Result result = new Result(curvesMapResults.get(CURVES_FILE), curvesMapResults.get(MAP_FILE)); - return new Response(result); - } - - private static List<HazardResults> listObjects(Set<String> users, String file) { - ListObjectsV2Request request = new ListObjectsV2Request() - .withBucketName(S3_BUCKET) - .withDelimiter(file); - ListObjectsV2Result s3Result; - List<S3Listing> s3Listings = new ArrayList<>(); - - do { - s3Result = S3.listObjectsV2(request); - s3Result.getCommonPrefixes() - .stream() - .map(key -> keyToHazardListing(key)) - .forEach(listing -> s3Listings.add(listing)); - - request.setContinuationToken(s3Result.getNextContinuationToken()); - } while (s3Result.isTruncated()); - - return transformS3Listing(users, s3Listings); - } - - private static List<HazardResults> transformS3Listing( - Set<String> users, - List<S3Listing> s3Listings) { - List<HazardResults> hazardResults = new ArrayList<>(); - - users.forEach(user -> { - TreeSet<String> resultDirectories = s3Listings.stream() - .filter(listing -> listing.user.equals(user)) - .map(listing -> listing.resultPrefix) - .collect(Collectors.toCollection(TreeSet::new)); - - resultDirectories.forEach(resultPrefix -> { - List<S3Listing> s3Filteredlistings = s3Listings.parallelStream() - .filter(listing -> listing.user.equals(user)) - .filter(listing -> listing.resultPrefix.equals(resultPrefix)) - .collect(Collectors.toList()); - - List<HazardListing> listings = s3Filteredlistings.parallelStream() - .map(listing -> s3ListingToHazardListing(listing)) - .collect(Collectors.toList()); - - S3Listing s3Listing = s3Filteredlistings.get(0); - String path = s3Listing.path.split(resultPrefix)[0]; - String s3Path = s3Listing.user + "/" + path + resultPrefix; - - hazardResults.add(new HazardResults( - user, - s3Listing.bucket, - resultPrefix, - s3Path, - listings)); - }); - }); - - return hazardResults; - } - - private static HazardListing s3ListingToHazardListing(S3Listing s3Listing) { - return new HazardListing(s3Listing.dataType, s3Listing.path, s3Listing.file); - } - - private static S3Listing keyToHazardListing(String key) { - List<String> keys = Parsing.splitToList(key, Delimiter.SLASH); - HazardDataType<?> dataType = getDataType(keys); - String user = keys.get(0); - String file = keys.get(keys.size() - 1); - String path = keys.subList(1, keys.size() - 1) - .stream() - .collect(Collectors.joining("/")); - - return new S3Listing(user, S3_BUCKET, path, file, dataType); - } - - private static Set<String> getUsers() { - ListObjectsV2Request request = new ListObjectsV2Request() - .withBucketName(S3_BUCKET) - .withDelimiter("/"); - - ListObjectsV2Result listing = S3.listObjectsV2(request); - - return listing.getCommonPrefixes().stream() - .map(prefix -> prefix.replace("/", "")) - .collect(Collectors.toCollection(TreeSet::new)); - } - - private static HazardDataType<?> getDataType(List<String> keys) { - String sourceType = keys.get(keys.size() - IMT_DIR_BACK_FROM_TOTAL); - HazardDataType<?> dataType = null; - String resultDirectory = null; - Imt imt = null; - - if (Enums.getIfPresent(SourceType.class, sourceType).isPresent()) { - imt = Imt.valueOf(keys.get(keys.size() - IMT_DIR_BACK_FROM_SOURCE)); - resultDirectory = keys.get(keys.size() - IMT_DIR_BACK_FROM_SOURCE - 1); - SourceType type = SourceType.valueOf(sourceType); - dataType = new HazardDataType<SourceType>(imt, DataType.SOURCE, type, resultDirectory); - } else if (Enums.getIfPresent(Gmm.class, sourceType).isPresent()) { - imt = Imt.valueOf(keys.get(keys.size() - IMT_DIR_BACK_FROM_SOURCE)); - resultDirectory = keys.get(keys.size() - IMT_DIR_BACK_FROM_SOURCE - 1); - Gmm type = Gmm.valueOf(sourceType); - dataType = new HazardDataType<Gmm>(imt, DataType.GMM, type, resultDirectory); - } else if (Enums.getIfPresent(Imt.class, sourceType).isPresent()) { - Imt type = Imt.valueOf(sourceType); - resultDirectory = keys.get(keys.size() - IMT_DIR_BACK_FROM_TOTAL - 1); - imt = type; - dataType = new HazardDataType<Imt>(imt, DataType.TOTAL, type, resultDirectory); - } else { - throw new RuntimeException("Source type [" + sourceType + "] not supported"); - } - - return dataType; - } - - private static void uploadResults(String results) { - byte[] bytes = results.getBytes(); - ByteArrayInputStream input = new ByteArrayInputStream(bytes); - ObjectMetadata metadata = new ObjectMetadata(); - metadata.setContentLength(bytes.length); - metadata.setContentType("application/json"); - - PutObjectRequest request = new PutObjectRequest( - RESULT_BUCKET, - RESULT_KEY, - input, - metadata); - - S3.putObject(request); - } - - static class HazardDataType<E extends Enum<E>> { - final Imt imt; - final DataType type; - final transient String resultPrefix; - final E sourceType; - - HazardDataType(Imt imt, DataType type, E sourceType, String resultPrefix) { - this.imt = imt; - this.type = type; - this.resultPrefix = resultPrefix; - this.sourceType = sourceType; - } - } - - private static class HazardResults { - final String user; - final String bucket; - final String resultPrefix; - final String path; - final List<HazardListing> listings; - - HazardResults( - String user, - String bucket, - String resultPrefix, - String path, - List<HazardListing> listings) { - this.user = user; - this.bucket = bucket; - this.resultPrefix = resultPrefix; - this.path = path; - this.listings = listings; - } - } - - private static class HazardListing { - final HazardDataType<?> dataType; - final String file; - final String path; - - HazardListing(HazardDataType<?> dataType, String path, String file) { - this.dataType = dataType; - this.file = file; - this.path = path; - } - } - - private static class S3Listing { - final String user; - final String bucket; - final String path; - final String file; - final String resultPrefix; - final HazardDataType<?> dataType; - - S3Listing(String user, String bucket, String path, String file, HazardDataType<?> dataType) { - this.user = user; - this.bucket = bucket; - this.path = path; - this.file = file; - this.resultPrefix = dataType.resultPrefix; - this.dataType = dataType; - } - } - - private static class CurvesMapResult { - final Set<String> users; - final List<HazardResults> hazardResults; - - CurvesMapResult(Set<String> users, List<HazardResults> hazardResults) { - this.users = users; - this.hazardResults = hazardResults; - } - } - - private static class Result { - final CurvesMapResult curves; - final CurvesMapResult map; - - Result(CurvesMapResult curves, CurvesMapResult map) { - this.curves = curves; - this.map = map; - } - } - - private static class Response { - final String status; - final String date; - final Result result; - - Response(Result result) { - status = Status.SUCCESS.toString(); - date = ZonedDateTime.now().format(ServletUtil.DATE_FMT); - this.result = result; - } - } -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/aws/HazardResultsSlicerLambda.java b/src/main/java/gov/usgs/earthquake/nshmp/aws/HazardResultsSlicerLambda.java deleted file mode 100644 index a535f4cd8edd86b673d47bae589e06cf151c2e90..0000000000000000000000000000000000000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/aws/HazardResultsSlicerLambda.java +++ /dev/null @@ -1,214 +0,0 @@ -package gov.usgs.earthquake.nshmp.aws; - -import static gov.usgs.earthquake.nshmp.aws.Util.CURVES_FILE; -import static gov.usgs.earthquake.nshmp.www.services.ServletUtil.GSON; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.time.ZonedDateTime; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.stream.Collectors; - -import com.amazonaws.services.ec2.AmazonEC2; -import com.amazonaws.services.ec2.AmazonEC2ClientBuilder; -import com.amazonaws.services.lambda.AWSLambda; -import com.amazonaws.services.lambda.AWSLambdaClientBuilder; -import com.amazonaws.services.lambda.model.InvokeRequest; -import com.amazonaws.services.lambda.model.InvokeResult; -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.RequestStreamHandler; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import com.amazonaws.services.s3.model.ObjectListing; -import com.google.common.base.Throwables; - -import gov.usgs.earthquake.nshmp.aws.Util.LambdaHelper; -import gov.usgs.earthquake.nshmp.internal.Parsing; -import gov.usgs.earthquake.nshmp.internal.Parsing.Delimiter; -import gov.usgs.earthquake.nshmp.www.meta.Metadata; -import gov.usgs.earthquake.nshmp.www.meta.Status; -import gov.usgs.earthquake.nshmp.www.services.ServletUtil; - -/** - * AWS Lambda function to read in hazard results from S3 and to create slices of - * return periods of interest. - * - * @see HazardResultSliceLambda - */ -public class HazardResultsSlicerLambda implements RequestStreamHandler { - - private static final AmazonS3 S3 = AmazonS3ClientBuilder.defaultClient(); - private static final AmazonEC2 EC2 = AmazonEC2ClientBuilder.defaultClient(); - private static final AWSLambda LAMBDA_CLIENT = AWSLambdaClientBuilder.defaultClient(); - - private static final String CURVE_SLICE_LAMBDA = System.getenv("CURVE_SLICE_LAMBDA_NAME"); - private static final String INSTANCE_STATUS = "terminated"; - - private static final int MAX_INSTANCE_CHECK = 100; - private static final int INSTANCE_CHECK_TIMEOUT = 10 * 1000; - - @Override - public void handleRequest( - InputStream input, - OutputStream output, - Context context) throws IOException { - LambdaHelper lambdaHelper = new LambdaHelper(input, output, context); - String requestBucket = ""; - - try { - RequestData request = GSON.fromJson(lambdaHelper.requestJson, RequestData.class); - requestBucket = String.format("%s/%s", request.bucket, request.key); - lambdaHelper.logger.log("Request Data: " + GSON.toJson(request) + "\n\n"); - checkRequest(request); - checkBucket(request); - Response response = processRequest(lambdaHelper, request); - output.write(GSON.toJson(response, Response.class).getBytes()); - } catch (Exception e) { - lambdaHelper.logger.log("\nError: " + Throwables.getStackTraceAsString(e) + "\n\n"); - String message = Metadata.errorMessage(requestBucket, e, false); - output.write(message.getBytes()); - } - } - - private static Response processRequest( - LambdaHelper lambdaHelper, - RequestData request) throws IOException, InterruptedException { - ObjectListing objectListing = S3.listObjects(request.bucket, request.key); - List<CompletableFuture<Void>> futures = new ArrayList<>(); - - objectListing.getObjectSummaries() - .parallelStream() - .filter(summary -> summary.getKey().endsWith(CURVES_FILE)) - .forEach(summary -> { - String name = summary.getKey(); - lambdaHelper.logger.log("Reading: " + name + "\n"); - try { - futures.add(processCurveFile(request, lambdaHelper, name)); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); - - futures.forEach(CompletableFuture::join); - lambdaHelper.logger.log("Zipping results"); - return new Response(request); - } - - private static CompletableFuture<Void> processCurveFile( - RequestData request, - LambdaHelper lambdaHelper, - String curvesPath) throws IOException { - return readCurveFile(request, curvesPath) - .thenAcceptAsync(result -> { - checkLambdaResponse(result); - }); - } - - private static CompletableFuture<InvokeResult> readCurveFile( - RequestData request, - String curvesPath) throws IOException { - List<String> names = Arrays.stream(curvesPath.split("/")) - .collect(Collectors.toList()); - names.remove(names.size() - 1); - String key = Parsing.join(names, Delimiter.SLASH); - - HazardResultSliceLambda.RequestData lambdaRequest = HazardResultSliceLambda.RequestData - .builder() - .bucket(request.bucket) - .key(key) - .slices(request.slices) - .build(); - - InvokeRequest invokeRequest = new InvokeRequest() - .withFunctionName(CURVE_SLICE_LAMBDA) - .withPayload(GSON.toJson(lambdaRequest)); - - return CompletableFuture.supplyAsync(() -> { - return LAMBDA_CLIENT.invoke(invokeRequest); - }); - } - - private static void checkRequest(RequestData request) { - if (request.bucket == null) { - throw new RuntimeException("Request does not contain a S3 bucket"); - } - - if (request.key == null) { - throw new RuntimeException("Request does not contain a S3 key"); - } - - if (request.slices == null) { - throw new RuntimeException("Request does not contain slices"); - } - } - - private static void checkBucket(RequestData request) { - if (!S3.doesBucketExistV2(request.bucket)) { - throw new RuntimeException(String.format("S3 bucket [%s] does not exist", request.bucket)); - } - } - - private static void checkLambdaResponse(InvokeResult result) { - try { - LambdaResponse response = GSON.fromJson( - new String(result.getPayload().array()), - LambdaResponse.class); - - if (Status.ERROR.toString().equals(response.status)) { - throw new RuntimeException(response.message); - } - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - private static class LambdaResponse { - String status; - String message; - } - - private static class ZipResultsResponse extends LambdaResponse { - ZipResult result; - ZipRequest request; - - private static class ZipRequest { - String bucket; - String key; - } - - private static class ZipResult { - String path; - String instanceId; - - ZipResult(String path, String instanceId) { - this.path = path; - this.instanceId = instanceId; - } - } - } - - private static class RequestData { - String bucket; - String key; - List<Double> slices; - } - - private static class Response { - final String status; - final String date; - final RequestData request; - final String outputBucket; - - Response(RequestData request) { - status = Status.SUCCESS.toString(); - date = ZonedDateTime.now().format(ServletUtil.DATE_FMT); - this.request = request; - this.outputBucket = String.format("%s/%s", request.bucket, request.key); - } - } - -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/aws/Util.java b/src/main/java/gov/usgs/earthquake/nshmp/aws/Util.java deleted file mode 100644 index 13db3f37c8f52d9613d85928096e654ba3698da8..0000000000000000000000000000000000000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/aws/Util.java +++ /dev/null @@ -1,41 +0,0 @@ -package gov.usgs.earthquake.nshmp.aws; - -import java.io.BufferedReader; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.UnsupportedEncodingException; - -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.LambdaLogger; -import com.google.gson.JsonObject; -import com.google.gson.JsonParser; - -public class Util { - - static final String CURVES_FILE = "curves.csv"; - static final String MAP_FILE = "map.csv"; - - /** - * Parse the Lambda function {@code InputStream} into an {@code JsonObject}. - */ - static class LambdaHelper { - JsonObject requestJson; - Context context; - LambdaLogger logger; - OutputStream output; - - LambdaHelper(InputStream input, OutputStream output, Context context) - throws UnsupportedEncodingException { - logger = context.getLogger(); - this.context = context; - this.output = output; - - BufferedReader reader = new BufferedReader(new InputStreamReader(input)); - JsonParser parser = new JsonParser(); - - requestJson = parser.parse(reader).getAsJsonObject(); - } - } - -} diff --git a/src/main/resources/swagger/index.html b/src/main/resources/swagger/index.html index 8a4003d3a8a88c3fc3ec0d151715f68af8e5acbc..9abcef98e83f25ea79343e6b65a74aa49b80c690 100644 --- a/src/main/resources/swagger/index.html +++ b/src/main/resources/swagger/index.html @@ -39,11 +39,11 @@ <div id="swagger-ui"></div> <footer class="nshmp-template-footer"> - <a href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/master/LICENSE.md"> + <a href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/LICENSE.md"> License </a> - <a href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/master/DISCLAIMER.md"> + <a href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/DISCLAIMER.md"> Disclaimer </a>