diff --git a/.gitignore b/.gitignore index 2511358d2e49da217a34f684497b5b310da25a78..e1d3dbf31c0100d9a751af6e6c1e8581619c5d0f 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ build classes tmp curves*/ +hazout*/ eq-prob*/ eq-rate*/ Scratch*.java diff --git a/etc/examples/1-hazard-curve/README.md b/etc/examples/1-hazard-curve/README.md index 9226bf0b376ef438a9771020d252abdb52d569ce..ae19cae4e7986e8c2eb858a2d1691a300c26815d 100644 --- a/etc/examples/1-hazard-curve/README.md +++ b/etc/examples/1-hazard-curve/README.md @@ -11,11 +11,21 @@ hazard ../../peer/models/Set1-Case1 "Test Site, -122.0, 38.0" The PEER models, such as that designated above, consist of simple cases for different source types commonly encountered in a PSHA and are included in the nshmp-haz repository to support testing. See the [PEER directory](../../peer/) for more information. -The result of this calculation should be available as a single comma-delimited file containing several total mean hazard curves for PGA in a newly created 'curves' directory. In this example, the calculation configuration was derived from the model directory and the site was specified as a comma-delimited string. The string must have the form: `name,lon,lat[,vs30,vsInf[,z1p0,z2p5]]`, where `vs30`, `vsInf`, `z1p0`, and `z2p5` are optional. See the [site specification](https://github.com/usgs/nshmp-haz/wiki/sites) page for more details. +The result of this calculation should be available as a single comma-delimited file containing several total mean hazard curves for PGA in a newly created `hazout` directory. In this example, the calculation configuration was derived from the model directory and the site was specified as a comma-delimited string. The string must have the form: `name,lon,lat[,vs30,vsInf[,z1p0,z2p5]]`, where `vs30`, `vsInf`, `z1p0`, and `z2p5` are optional. See the [site specification](https://github.com/usgs/nshmp-haz/wiki/sites) page for more details. Note that not all [calculation configuration](https://github.com/usgs/nshmp-haz/wiki/Configuration) parameters need be supplied; see the [configuration file](../../peer/models/Set1-Case1/config.json) for this example model. -Also note that all output is written to a `curves` directory by default, but the output destination can be specified via the [`output.directory`](https://github.com/usgs/nshmp-haz/wiki/configuration#config-output) parameter. In addition to hazard curves, the calculation configuration and a log of the calculation are also saved. +Also note that all output is written to a `hazout` directory by default, but the output destination can be specified via the [`output.directory`](https://github.com/usgs/nshmp-haz/wiki/configuration#config-output) parameter. In addition to hazard curves, the calculation configuration and a log of the calculation are also saved. + +__Results directory structure:__ +``` +1-hazard-curve/ + └─ hazout/ + ├─ config.json + ├─ HazardCalc.log + └─ PGA/ + └─ curves.csv +``` In the next example, we'll override the model supplied configuration with a custom file. diff --git a/etc/examples/2-custom-config/README.md b/etc/examples/2-custom-config/README.md index e508d0b335b11e80a6fc18a1f668567b25c19b64..42528d0f5f6993c004dd5eee71309d5403c8d198 100644 --- a/etc/examples/2-custom-config/README.md +++ b/etc/examples/2-custom-config/README.md @@ -18,4 +18,18 @@ In this example we've overridden the configuration supplied by the model. Specif See the [configuration specification](https://github.com/usgs/nshmp-haz/wiki/configuration) for details on default values and supported options and formats. +__Results directory structure:__ +``` +2-custom-config/ + └─ hazout/ + ├─ config.json + ├─ HazardCalc.log + ├─ PGA/ + │ └─ curves.csv + ├─ SA0P2/ + │ └─ curves.csv + └─ SA1P0/ + └─ curves.csv +``` + #### Next: [Example 3 – Using a custom sites file](../3-sites-file) diff --git a/etc/examples/3-sites-file/README.md b/etc/examples/3-sites-file/README.md index 3b99f5fce245fd498b0ca9d918ccc940c366f093..a69c53d0fc13ee467146313a0a629f317a3846e6 100644 --- a/etc/examples/3-sites-file/README.md +++ b/etc/examples/3-sites-file/README.md @@ -19,4 +19,18 @@ The [site specification](https://github.com/usgs/nshmp-haz/wiki/sites) wiki page Note that both formats ([CSV](sites.csv) and [GeoJSON](sites.geojson)) are elegantly rendered by GitHub. +__Results directory structure:__ +``` +3-sites-file/ + └─ hazout/ + ├─ config.json + ├─ HazardCalc.log + ├─ PGA/ + │ └─ curves.csv + ├─ SA0P2/ + │ └─ curves.csv + └─ SA1P0/ + └─ curves.csv +``` + #### Next: [Example 4 – A simple hazard map](../4-hazard-map) diff --git a/etc/examples/4-hazard-map/README.md b/etc/examples/4-hazard-map/README.md index c2ce5fb67968e6bb22e51b81b6de94738370f053..f4dd6e5e903fd12ca3ffa694b368dcddc3758a0f 100644 --- a/etc/examples/4-hazard-map/README.md +++ b/etc/examples/4-hazard-map/README.md @@ -9,4 +9,18 @@ A hazard map is just a collection of values plucked from a lot of hazard curves. hazard ../../peer/models/Set1-Case1 map.geojson config.json ``` +__Results directory structure:__ +``` +4-hazard-map/ + └─ hazout/ + ├─ config.json + ├─ HazardCalc.log + ├─ PGA/ + │ └─ curves.csv + ├─ SA0P2/ + │ └─ curves.csv + └─ SA1P0/ + └─ curves.csv +``` + #### Next: [Example 5 – A more complex model](../5-complex-model) diff --git a/etc/examples/5-complex-model/README.md b/etc/examples/5-complex-model/README.md index 850a208eca2273d6a8e01fb4f79b25e6f47471c7..2cfd63b050bb64ebf7ef801443c29c9c7d28ef81 100644 --- a/etc/examples/5-complex-model/README.md +++ b/etc/examples/5-complex-model/README.md @@ -33,4 +33,24 @@ hazard ../../../../nshm-cous-2008/Western\ US map.geojson config-map.json This computes 121 curves over a 2° by 2° area and will give you a sense of how long a larger map might take. Note that in the above two examples we specified different output directories in the config files for each calculation. +__Results directory structure:__ +``` +5-complex-model/ + ├─ hazout-sites/ + │ ├─ config.json + │ ├─ HazardCalc.log + │ ├─ SA1P0/ + │ │ └─ curves.csv + │ └─ SA2P0/ + │ └─ curves.csv + │ + └─ hazout-map/ + ├─ config.json + ├─ HazardCalc.log + ├─ SA1P0/ + │ └─ curves.csv + └─ SA2P0/ + └─ curves.csv +``` + #### Next: [Example 6 – Enhanced output](../6-enhanced-output) diff --git a/etc/examples/5-complex-model/config-map.json b/etc/examples/5-complex-model/config-map.json index eb5b93211fd2a1c5ef110a279bc9db610b2b8a17..52b01318b10d35aaa88760fec75ae6dad36d9024 100644 --- a/etc/examples/5-complex-model/config-map.json +++ b/etc/examples/5-complex-model/config-map.json @@ -3,6 +3,6 @@ "imts": ["SA1P0", "SA2P0"] }, "output": { - "directory": "curves-map" + "directory": "hazout-map" } } diff --git a/etc/examples/5-complex-model/config-sites.json b/etc/examples/5-complex-model/config-sites.json index 5db53ac5881be0ca48864cb8f47741ea854e45de..31a32a602930367f4392fe3ace4ea1e025c643d9 100644 --- a/etc/examples/5-complex-model/config-sites.json +++ b/etc/examples/5-complex-model/config-sites.json @@ -3,7 +3,7 @@ "imts": ["SA1P0", "SA2P0"] }, "output": { - "directory": "curves-sites", + "directory": "hazout-sites", "flushLimit": 1 } } diff --git a/etc/examples/6-enhanced-output/README.md b/etc/examples/6-enhanced-output/README.md index 040ccb1c25798d474cf8c401f07fd5f3c38ec33a..e0bfe64a7ae80e5c4bb3329a0bb3ac81ff944214 100644 --- a/etc/examples/6-enhanced-output/README.md +++ b/etc/examples/6-enhanced-output/README.md @@ -9,9 +9,35 @@ While mean hazard is of broad interest, it can be useful to preserve individual hazard ../../../../nshm-cous-2008/Western\ US sites.geojson config.json ``` -The [config](https://github.com/usgs/nshmp-haz/blob/master/etc/examples/6-enhanced-output/config.json) file for this example specified `GMM` and `SOURCE` as [output curve types](https://github.com/usgs/nshmp-haz/wiki/configuration#calculation-configuration-parameters). Note that the output curves directory now contains additional directories of curves by source type and GMM. We also specified an [output flush limit](https://github.com/usgs/nshmp-haz/wiki/configuration#calculation-configuration-parameters) of `1`. Doing so gives feedback on how long it takes each site calculation to run on a particular system. +The [config](https://github.com/usgs/nshmp-haz/blob/master/etc/examples/6-enhanced-output/config.json) file for this example specified `GMM` and `SOURCE` as [output data types](https://github.com/usgs/nshmp-haz/wiki/configuration#calculation-configuration-parameters). Note that the output curves directory now contains additional directories of curves by source type and GMM. We also specified an [output flush limit](https://github.com/usgs/nshmp-haz/wiki/configuration#calculation-configuration-parameters) of `1`. Doing so gives feedback on how long it takes each site calculation to run on a particular system. -See the `nshmp-haz` wiki and JavDocs for more information on source types ([Wiki](https://github.com/usgs/nshmp-haz/wiki/source-types), [JavaDoc](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/eq/model/SourceType.html)) and GMMs ([Wiki](https://github.com/usgs/nshmp-haz/wiki/ground-motion-models), [JavaDoc](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/gmm/Gmm.html)). +See the `nshmp-haz` wiki and Javacocs for more information on source types ([Wiki](https://github.com/usgs/nshmp-haz/wiki/source-types), [JavaDoc](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/eq/model/SourceType.html)) and GMMs ([Wiki](https://github.com/usgs/nshmp-haz/wiki/ground-motion-models), [JavaDoc](http://usgs.github.io/nshmp-haz/javadoc/index.html?gov/usgs/earthquake/nshmp/gmm/Gmm.html)). + +__Results directory structure:__ +``` +6-enhanced-output/ + └─ hazout/ + ├─ config.json + ├─ HazardCalc.log + ├─ PGA/ + │ ├─ curves.csv + │ ├─ gmm/ + │ │ ├─ AB_03_CASCADIA_SLAB/ + │ │ │ └─ curves.csv + │ │ ├─ ... + │ │ └─ ZHAO_06_INTERFACE/ + │ │ └─ curves.csv + │ └─ source/ + │ ├─ FAULT/ + │ │ └─ curves.csv + │ ├─ ... + │ └─ SLAB/ + │ └─ curves.csv + ├─ SA0P2/ + │ └─ ... + └─ SA1P0/ + └─ ... +``` #### Next: [Example 7 – Deaggregation](../7-deaggregation) diff --git a/etc/examples/7-deaggregation/README.md b/etc/examples/7-deaggregation/README.md index c2139daf4d2c303eb12b13766b0a1db26d5e7688..d65e35c6844c09d8e877948ceb83b4211d959093 100644 --- a/etc/examples/7-deaggregation/README.md +++ b/etc/examples/7-deaggregation/README.md @@ -15,9 +15,64 @@ alias deagg='java -Xms1g -Xmx4g -cp /path/to/nshmp-haz/build/libs/nshmp-haz.jar deagg ../../../../nshm-cous-2008/Western\ US sites.geojson 2475 config.json ``` -The results of the deaggregation are saved to a `deagg` directory along with hazard curves. As with `HazardCalc`, if `GMM` has been specified (as it has in the [config](https://github.com/usgs/nshmp-haz/blob/master/etc/examples/7-deaggregation/config.json) file for this example) additional deaggregation results for each GMM are generated as well. +The results of the deaggregation are saved along with hazard curves in `deagg` directories. As with `HazardCalc`, if the `GMM` ddata type has been specified (as it has in the [config](https://github.com/usgs/nshmp-haz/blob/master/etc/examples/7-deaggregation/config.json) file for this example) additional deaggregation results for each GMM are generated as well. Deaggregations by individual `SOURCE` type are also possible. See the following pages for more information on [deaggregation](https://github.com/usgs/nshmp-haz/wiki/about-deaggregation) and the meaning of [epsilon](https://github.com/usgs/nshmp-haz/wiki/what-is-epsilon%3F). +__Results directory structure:__ +``` +7-deaggregation/ + └─ hazout/ + ├─ config.json + ├─ DeaggCalc.log + ├─ PGA/ + │ ├─ curves.csv + │ ├─ deagg/ + │ │ ├─ Los Angeles CA/ + │ │ │ ├─ data.csv + │ │ │ └─ summary.txt + │ │ ├─ Salt Lake City UT/ + │ │ │ ├─ data.csv + │ │ │ └─ summary.txt + │ │ ├─ San Francisco CA/ + │ │ │ ├─ data.csv + │ │ │ └─ summary.txt + │ │ └─ Seattle WA/ + │ │ ├─ data.csv + │ │ └─ summary.txt + │ └─ gmm/ + │ ├─ AB_03_CASCADIA_SLAB/ + │ │ ├─ curves.csv + │ │ └─ deagg/ + │ │ ├─ San Francisco CA/ + │ │ │ ├─ data.csv + │ │ │ └─ summary.txt + │ │ └─ Seattle WA/ + │ │ ├─ data.csv + │ │ └─ summary.txt + │ ├─ ... + │ ├─ CB_08/ + │ │ ├─ curves.csv + │ │ └─ deagg/ + │ │ ├─ Los Angeles CA/ + │ │ │ ├─ data.csv + │ │ │ └─ dsummary.txt + │ │ ├─ Salt Lake City UT/ + │ │ │ ├─ data.csv + │ │ │ └─ summary.txt + │ │ ├─ San Francisco CA/ + │ │ │ ├─ data.csv + │ │ │ └─ summary.txt + │ │ └─ Seattle WA/ + │ │ ├─ data.csv + │ │ └─ summary.txt + │ └─ ... + ├─ SA0P2/ + │ └─ ... + └─ SA1P0/ + └─ ... +``` +Note that in the output above, there are only deaggregation results for subduction GMMs (e.g. `AB_03_CASCADIA_SLAB`) for sites closer to the Cascadia subduction zone; empty results will not be saved. + #### Next: [Example 8 – Earthquake probabilities and rates](../8-probabilities) diff --git a/etc/examples/8-probabilities/README.md b/etc/examples/8-probabilities/README.md index 5ba695c875a5b1668f70b2dfb3d7ded1ef21a969..acaca18e4ffa012d6cddac6f046dfcec323595ec 100644 --- a/etc/examples/8-probabilities/README.md +++ b/etc/examples/8-probabilities/README.md @@ -25,4 +25,18 @@ rate ../../../../nshm-cous-2008/Western\ US map.geojson config-map.json to generate a map of cumulative Poisson probabilities (i.e. P ≥ M). -Unless an output directory is specified in a supplied config, output will be placed in either an `eq-rate` or `eq-prob` directory. Like `HazardCalc`, `RateCalc` observes the `config.output.curveTypes` `SOURCE` option and will include a `source` directory with rates or probabilities for all contributing source types. +Like `HazardCalc`, `RateCalc` observes the `config.output.dataTypes` `SOURCE` option and will include a `source` directory with rates or probabilities for all contributing source types. + +__Results directory structure:__ +``` +8-probabilities/ + ├─ hazout-rate-sites/ + │ ├─ config.json + │ ├─ RateCalc.log + │ └─ rates.csv + └─ hazout-prob-map/ + ├─ config.json + ├─ RateCalc.log + └─ probs.csv +``` + diff --git a/etc/examples/8-probabilities/config-map.json b/etc/examples/8-probabilities/config-map.json index 44e663c7e51dcf2768032bf47f90b0c0a1ac5ca9..8fe1479387b26dec348c70da4046e7cc8108e829 100644 --- a/etc/examples/8-probabilities/config-map.json +++ b/etc/examples/8-probabilities/config-map.json @@ -1,6 +1,7 @@ { "output": { - "dataTypes": ["SOURCE"] + "dataTypes": ["SOURCE"], + "directory": "hazout-prob-map" }, "rate": { "distance": 10.0, diff --git a/etc/examples/8-probabilities/config-sites.json b/etc/examples/8-probabilities/config-sites.json index 2c5b56ccf107cfbce37b683de0b8f4bfd6011b32..fa865014fc0b306cfd5bd603a7c02b56712bdac0 100644 --- a/etc/examples/8-probabilities/config-sites.json +++ b/etc/examples/8-probabilities/config-sites.json @@ -1,5 +1,6 @@ { "output": { - "dataTypes": ["SOURCE"] + "dataTypes": ["SOURCE"], + "directory": "hazout-rate-sites" } } diff --git a/src/gov/usgs/earthquake/nshmp/calc/CalcConfig.java b/src/gov/usgs/earthquake/nshmp/calc/CalcConfig.java index bdd48dd59fd02d37360dddb9bb559dcb8f3e205e..483d9d89a5cbdf36792c60f75ef01c0a5ee014df 100644 --- a/src/gov/usgs/earthquake/nshmp/calc/CalcConfig.java +++ b/src/gov/usgs/earthquake/nshmp/calc/CalcConfig.java @@ -63,7 +63,7 @@ public final class CalcConfig { static final String FILE_NAME = "config.json"; private static final String ID = CalcConfig.class.getSimpleName(); private static final String STATE_ERROR = "%s %s not set"; - static final String DEFAULT_OUT = "curves"; + static final String DEFAULT_OUT = "hazout"; /** * The resource from which {@code this} was derived. This field may be empty. @@ -865,7 +865,7 @@ public final class CalcConfig { /** * The directory to write any results to. * - * <p><b>Default:</b> {@code "curves"} for hazard and deaggregation + * <p><b>Default:</b> {@code "hazout"} for hazard and deaggregation * calculations; {@code "eq-rate"} or {@code "eq-prob"} for rate * calculations. */ diff --git a/src/gov/usgs/earthquake/nshmp/calc/DataType.java b/src/gov/usgs/earthquake/nshmp/calc/DataType.java index 73929b15f98869dbd15fef9acc274f8791f50a16..c01365c373bd92a009dca6ffd1d9ad266c48abf5 100644 --- a/src/gov/usgs/earthquake/nshmp/calc/DataType.java +++ b/src/gov/usgs/earthquake/nshmp/calc/DataType.java @@ -12,7 +12,7 @@ import gov.usgs.earthquake.nshmp.gmm.Gmm; */ public enum DataType { - /** Total hazard curves or magnitude-frequencey distributions, etc. */ + /** Total hazard curves or magnitude-frequency distributions, etc. */ TOTAL, /** {@linkplain Gmm Ground motion model} specific data. */ diff --git a/src/gov/usgs/earthquake/nshmp/calc/DeaggExport.java b/src/gov/usgs/earthquake/nshmp/calc/DeaggExport.java index 095e3d622335ba44792539c13c4c920344c01ba1..c6e76dc3b6465d567bef74c760eef487ca352b75 100644 --- a/src/gov/usgs/earthquake/nshmp/calc/DeaggExport.java +++ b/src/gov/usgs/earthquake/nshmp/calc/DeaggExport.java @@ -54,6 +54,9 @@ final class DeaggExport { final SummaryElements summary; final List<JsonContributor> sources; + private static final String DEAGG_DATA = "data.csv"; + private static final String DEAGG_SUMMARY = "summary.txt"; + /* * All component DeaggDatasets require data from the final total DeaggDataset * to correctly calculate contributions and represent summary data that is not @@ -78,13 +81,21 @@ final class DeaggExport { } void toFile(Path dir, String site) throws IOException { - Path dataPath = dir.resolve(site + "-data.csv"); - Files.write(dataPath, data.toString().getBytes(UTF_8)); - Path summaryPath = dir.resolve(site + "-summary.txt"); + Path siteDir = dir.resolve(site); + Files.createDirectories(siteDir); + Path dataPath = siteDir.resolve(DEAGG_DATA); + Files.write( + dataPath, + data.toString().getBytes(UTF_8), + WRITE); + Path summaryPath = siteDir.resolve(DEAGG_SUMMARY); String summaryString = summaryStringBuilder() .append(DATASET_SEPARATOR) .toString(); - Files.write(summaryPath, summaryString.getBytes(UTF_8), WRITE); + Files.write( + summaryPath, + summaryString.getBytes(UTF_8), + WRITE); } @Override diff --git a/src/gov/usgs/earthquake/nshmp/calc/Deaggregation.java b/src/gov/usgs/earthquake/nshmp/calc/Deaggregation.java index 5eba28f13e9d88daa4d229185e83b6ebbdad266e..9c4b6198f3f805a4fce04f71e2ea2b5e49551c97 100644 --- a/src/gov/usgs/earthquake/nshmp/calc/Deaggregation.java +++ b/src/gov/usgs/earthquake/nshmp/calc/Deaggregation.java @@ -16,6 +16,7 @@ import java.util.Map.Entry; import gov.usgs.earthquake.nshmp.data.Interpolator; import gov.usgs.earthquake.nshmp.data.XySequence; +import gov.usgs.earthquake.nshmp.eq.model.SourceType; import gov.usgs.earthquake.nshmp.gmm.Gmm; import gov.usgs.earthquake.nshmp.gmm.Imt; @@ -38,7 +39,7 @@ public final class Deaggregation { * Deaggregate on probability of occurrence instead of exceedance. * ------------------------------------------------------------------------- * Revisit precision issues associated with integer based return period; - * 2%in50 years os really 0.00040405414, not 1/2475 = 0.0004040404 + * 2%in50 years is really 0.00040405414, not 1/2475 = 0.0004040404 * ------------------------------------------------------------------------- * ------------------------------------------------------------------------- * One of the difficulties with deaggregation is deciding how to specify @@ -181,19 +182,23 @@ public final class Deaggregation { final DeaggConfig config; final DeaggDataset totalDataset; final Map<Gmm, DeaggDataset> gmmDatasets; + final Map<SourceType, DeaggDataset> typeDatasets; ImtDeagg(Hazard hazard, DeaggConfig config) { this.config = config; /* - * Datasets are combined as follows: For each HazardCurveSet/SourceSet - * deaggregation is performed across all relevant Gmms. These are - * preserved in a ListMultimap for output of deaggregation by Gmm. It's - * too much work to consolidate the ListMultimap and keep track of all the - * nested DeaggContributors, so a list is maintained of datasets per - * SourceSet, the total across all Gmms that result from each call to + * Datasets are combined as follows: + * + * For each HazardCurveSet (SourceSet), deaggregation is performed across + * all relevant Gmms. These are preserved in ListMultimaps for output of + * deaggregation by Gmm and SourceType. It's too much work to consolidate + * ListMultimaps on the fly and keep track of all the nested + * DeaggContributors, so lists are maintained of Gmm and SourceType + * datasets, and the total across all Gmms that result from each call to * deaggregate(). The combination of multiple datasets for single - * SourceSets is straightforward. + * SourceSets is then straightforward via static consolidators in + * DeaggDataset. */ int sourceSetCount = hazard.sourceSetCurves.size(); @@ -201,7 +206,10 @@ public final class Deaggregation { .enumKeys(Gmm.class) .arrayListValues(sourceSetCount) .build(); - List<DeaggDataset> totalDatasetList = new ArrayList<>(sourceSetCount); + ListMultimap<SourceType, DeaggDataset> typeDatasetLists = MultimapBuilder + .enumKeys(SourceType.class) + .arrayListValues(sourceSetCount) + .build(); for (HazardCurveSet curveSet : hazard.sourceSetCurves.values()) { XySequence sourceSetCurve = curveSet.totalCurves.get(config.imt); @@ -216,7 +224,8 @@ public final class Deaggregation { config, hazard.site); gmmDatasetLists.putAll(Multimaps.forMap(sourceSetDatasets)); - totalDatasetList.add(SOURCE_CONSOLIDATOR.apply(sourceSetDatasets.values())); + DeaggDataset sourceSetTotal = SOURCE_CONSOLIDATOR.apply(sourceSetDatasets.values()); + typeDatasetLists.put(curveSet.sourceSet.type(), sourceSetTotal); } /* Combine SourceSets across Gmms. */ @@ -224,29 +233,24 @@ public final class Deaggregation { Multimaps.asMap(gmmDatasetLists), SOURCE_SET_CONSOLIDATOR)); + /* Combine SourceSets across SourceTypes. */ + typeDatasets = Maps.immutableEnumMap(Maps.transformValues( + Multimaps.asMap(typeDatasetLists), + SOURCE_SET_CONSOLIDATOR)); + /* Combine SourceSet totals. */ - totalDataset = SOURCE_SET_CONSOLIDATOR.apply(totalDatasetList); + totalDataset = SOURCE_SET_CONSOLIDATOR.apply(typeDatasets.values()); } + private static final String TOTAL_COMPONENT = "Total"; + private static final String GMM_COMPONENT = "GMM: "; + private static final String TYPE_COMPONENT = "Source Type: "; + @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(NEWLINE); - DeaggExport export = new DeaggExport( - totalDataset, - totalDataset, - config, - "Total", - false); - sb.append(export.toString()); - sb.append(NEWLINE); - for (Entry<Gmm, DeaggDataset> ddEntry : gmmDatasets.entrySet()) { - export = new DeaggExport( - totalDataset, - ddEntry.getValue(), - config, - ddEntry.getKey().toString(), - false); + for (DeaggExport export : buildExports(false)) { sb.append(export.toString()); sb.append(NEWLINE); } @@ -259,25 +263,39 @@ public final class Deaggregation { * object prior to serialization. */ Object toJson() { - List<DeaggExport> jsonDeaggs = new ArrayList<>(); + return buildExports(true); + } + + private List<DeaggExport> buildExports(boolean json) { + List<DeaggExport> exports = new ArrayList<>(); DeaggExport total = new DeaggExport( totalDataset, totalDataset, config, - "Total", - true); - jsonDeaggs.add(total); - for (Entry<Gmm, DeaggDataset> ddEntry : gmmDatasets.entrySet()) { + TOTAL_COMPONENT, + json); + exports.add(total); + for (Entry<Gmm, DeaggDataset> gmmEntry : gmmDatasets.entrySet()) { DeaggExport gmm = new DeaggExport( totalDataset, - ddEntry.getValue(), + gmmEntry.getValue(), config, - ddEntry.getKey().toString(), - true); - jsonDeaggs.add(gmm); + GMM_COMPONENT + gmmEntry.getKey().toString(), + json); + exports.add(gmm); } - return jsonDeaggs; + for (Entry<SourceType, DeaggDataset> typeEntry : typeDatasets.entrySet()) { + DeaggExport type = new DeaggExport( + totalDataset, + typeEntry.getValue(), + config, + TYPE_COMPONENT + typeEntry.getKey().toString(), + json); + exports.add(type); + } + return exports; } + } } diff --git a/src/gov/usgs/earthquake/nshmp/calc/EqRateExport.java b/src/gov/usgs/earthquake/nshmp/calc/EqRateExport.java index 8eeffe96994fc06cf346b5dd0f0ce26ed828a16d..be7dc990b0cfe6cfa3160d41d6e304ce4f904dd7 100644 --- a/src/gov/usgs/earthquake/nshmp/calc/EqRateExport.java +++ b/src/gov/usgs/earthquake/nshmp/calc/EqRateExport.java @@ -15,7 +15,6 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.OpenOption; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -38,9 +37,12 @@ public final class EqRateExport { private static final String RATE_FORMAT = "%.8g"; private static final String PROB_FORMAT = "%.2f"; + private static final String RATE_FILE = "rates.csv"; + private static final String PROB_FILE = "probs.csv"; private final Logger log; private final Path dir; + private final String file; private final String valueFormat; private final CalcConfig config; private final boolean exportSource; @@ -62,7 +64,8 @@ public final class EqRateExport { boolean rates = config.rate.valueFormat == ValueFormat.ANNUAL_RATE; this.log = log; - this.dir = HazardExport.createOutputDir(updateOutDir(config.output.directory, rates)); + this.dir = HazardExport.createOutputDir(config.output.directory); + this.file = rates ? RATE_FILE : PROB_FILE; this.valueFormat = rates ? RATE_FORMAT : PROB_FORMAT; this.config = config; this.exportSource = config.output.dataTypes.contains(DataType.SOURCE); @@ -75,14 +78,6 @@ public final class EqRateExport { this.totalWatch = Stopwatch.createStarted(); } - /* If config output is 'curves', change to 'eq-rate' or 'eq-prob'. */ - static Path updateOutDir(Path dir, boolean rates) { - if (dir.toString().equals(CalcConfig.DEFAULT_OUT)) { - return (rates ? Paths.get("eq-rate") : Paths.get("eq-prob")); - } - return dir; - } - /** * Create a new results handler. * @@ -242,15 +237,15 @@ public final class EqRateExport { } /* write/append */ - Path totalFile = dir.resolve("total" + HazardExport.TEXT_SUFFIX); + Path totalFile = dir.resolve(file); Files.write(totalFile, totalLines, US_ASCII, options); if (exportSource) { - Path typeDir = dir.resolve("source"); - Files.createDirectories(typeDir); + Path parentDir = dir.resolve(HazardExport.TYPE_DIR); for (Entry<SourceType, List<String>> typeEntry : typeLines.entrySet()) { SourceType type = typeEntry.getKey(); - String filename = type.toString(); - Path typeFile = typeDir.resolve(filename + HazardExport.TEXT_SUFFIX); + Path typeDir = parentDir.resolve(type.name()); + Files.createDirectories(typeDir); + Path typeFile = typeDir.resolve(file); Files.write(typeFile, typeEntry.getValue(), US_ASCII, options); } } diff --git a/src/gov/usgs/earthquake/nshmp/calc/HazardExport.java b/src/gov/usgs/earthquake/nshmp/calc/HazardExport.java index 52bc1b12a9b1eefa5f57daa091f3929d41986e2b..8b40d42bdedfa9c3b1d8d2f1e68293d0c2effb98 100644 --- a/src/gov/usgs/earthquake/nshmp/calc/HazardExport.java +++ b/src/gov/usgs/earthquake/nshmp/calc/HazardExport.java @@ -58,8 +58,9 @@ public final class HazardExport { static final String DEAGG_DIR = "deagg"; static final String GMM_DIR = "gmm"; - static final String BINARY_SUFFIX = ".bin"; - static final String TEXT_SUFFIX = ".csv"; + static final String TYPE_DIR = "source"; + static final String CURVE_FILE_ASCII = "curves.csv"; + static final String CURVE_FILE_BINARY = "curves.bin"; static final String RATE_FMT = "%.8e"; static final OpenOption[] WRITE = new OpenOption[] { @@ -396,42 +397,42 @@ public final class HazardExport { Path imtDir = dir.resolve(imt.name()); Files.createDirectories(imtDir); - Path totalFile = imtDir.resolve("total" + TEXT_SUFFIX); + Path totalFile = imtDir.resolve(CURVE_FILE_ASCII); Files.write(totalFile, totalEntry.getValue(), US_ASCII, options); Metadata meta = null; if (exportBinary) { meta = metaMap.get(imt); - Path totalBinFile = imtDir.resolve("total" + BINARY_SUFFIX); + Path totalBinFile = imtDir.resolve(CURVE_FILE_BINARY); writeBinaryBatch(totalBinFile, meta, totalCurves.get(imt)); } if (exportSource) { - Path typeDir = imtDir.resolve("source"); - Files.createDirectories(typeDir); + Path typeParent = imtDir.resolve(TYPE_DIR); for (Entry<SourceType, List<String>> typeEntry : typeLines.get(imt).entrySet()) { SourceType type = typeEntry.getKey(); - String filename = type.toString(); - Path typeFile = typeDir.resolve(filename + TEXT_SUFFIX); + Path typeDir = typeParent.resolve(type.name()); + Files.createDirectories(typeDir); + Path typeFile = typeDir.resolve(CURVE_FILE_ASCII); Files.write(typeFile, typeEntry.getValue(), US_ASCII, options); if (exportBinary) { - Path typeBinFile = typeDir.resolve(filename + BINARY_SUFFIX); + Path typeBinFile = typeDir.resolve(CURVE_FILE_BINARY); writeBinaryBatch(typeBinFile, meta, typeCurves.get(imt).get(type)); } } } if (exportGmm) { - Path gmmDir = imtDir.resolve("gmm"); - Files.createDirectories(gmmDir); + Path gmmParent = imtDir.resolve(GMM_DIR); for (Entry<Gmm, List<String>> gmmEntry : gmmLines.get(imt).entrySet()) { Gmm gmm = gmmEntry.getKey(); - String filename = gmm.name(); - Path gmmFile = gmmDir.resolve(filename + TEXT_SUFFIX); + Path gmmDir = gmmParent.resolve(gmm.name()); + Files.createDirectories(gmmDir); + Path gmmFile = gmmDir.resolve(CURVE_FILE_ASCII); Files.write(gmmFile, gmmEntry.getValue(), US_ASCII, options); if (exportBinary) { - Path gmmBinFile = gmmDir.resolve(filename + BINARY_SUFFIX); + Path gmmBinFile = gmmDir.resolve(CURVE_FILE_BINARY); writeBinaryBatch(gmmBinFile, meta, gmmCurves.get(imt).get(gmm)); } } @@ -445,31 +446,43 @@ public final class HazardExport { private void writeDeaggs() throws IOException { /* - * Writing of Hazard results will have already created necessary Imt - * directories. + * Writing of Hazard results will have already created necessary Imt, Gmm, + * and SourceType directories. */ for (Deaggregation deagg : deaggs) { String name = namedSites ? deagg.site.name : lonLatStr(deagg.site.location); for (Entry<Imt, ImtDeagg> imtEntry : deagg.deaggs.entrySet()) { /* Write total dataset. */ - Path imtDir = dir.resolve(imtEntry.getKey().name()); - Path imtDeaggDir = imtDir.resolve(DEAGG_DIR); - Files.createDirectories(imtDeaggDir); ImtDeagg imtDeagg = imtEntry.getValue(); DeaggDataset ddTotal = imtDeagg.totalDataset; DeaggConfig dc = imtDeagg.config; DeaggExport exporter = new DeaggExport(ddTotal, ddTotal, dc, "Total", false); - exporter.toFile(imtDeaggDir, name); + Path imtDir = dir.resolve(imtEntry.getKey().name()); + Path totalDir = imtDir.resolve(DEAGG_DIR); + Files.createDirectories(totalDir); + exporter.toFile(totalDir, name); + + if (exportSource) { + for (Entry<SourceType, DeaggDataset> typeEntry : imtDeagg.typeDatasets.entrySet()) { + SourceType type = typeEntry.getKey(); + Path typeDir = imtDir.resolve(TYPE_DIR) + .resolve(type.name()) + .resolve(DEAGG_DIR); + DeaggDataset ddType = typeEntry.getValue(); + exporter = new DeaggExport(ddTotal, ddType, dc, type.toString(), false); + exporter.toFile(typeDir, name); + } + } if (exportGmm) { for (Entry<Gmm, DeaggDataset> gmmEntry : imtDeagg.gmmDatasets.entrySet()) { + Gmm gmm = gmmEntry.getKey(); Path gmmDir = imtDir.resolve(GMM_DIR) - .resolve(DEAGG_DIR) - .resolve(gmmEntry.getKey().name()); - Files.createDirectories(gmmDir); + .resolve(gmm.name()) + .resolve(DEAGG_DIR); DeaggDataset ddGmm = gmmEntry.getValue(); - exporter = new DeaggExport(ddTotal, ddGmm, dc, gmmEntry.getKey().toString(), false); + exporter = new DeaggExport(ddTotal, ddGmm, dc, gmm.toString(), false); exporter.toFile(gmmDir, name); } } diff --git a/src/gov/usgs/earthquake/nshmp/internal/GeoJson.java b/src/gov/usgs/earthquake/nshmp/internal/GeoJson.java index a5d190256323df4d2b67985e02202e5137e0ca97..8944ecc8a888befce3a9853d42448a83ff6bd1fe 100644 --- a/src/gov/usgs/earthquake/nshmp/internal/GeoJson.java +++ b/src/gov/usgs/earthquake/nshmp/internal/GeoJson.java @@ -95,21 +95,22 @@ public final class GeoJson { key, value, actual); } - /* GeoJSON objectsfor stadard GSON serialization */ + /* GeoJSON objects for standard GSON serialization */ - static class FeatureCollection { + public static class FeatureCollection<T> { String type = "FeatureCollection"; - List<Feature> features; + public Object properties; + public List<T> features; } - static class Feature { + public static class Feature { String type = "Feature"; String id; Geometry geometry = new Geometry(); PropertiesObject properties; } - static Feature createPoint(NamedLocation loc) { + public static Feature createPoint(NamedLocation loc) { Feature f = new Feature(); f.geometry.type = "Point"; f.geometry.coordinates = toCoordinates(loc.location()); @@ -118,6 +119,16 @@ public final class GeoJson { return f; } + public static Feature createPoint(NamedLocation loc, String id) { + Feature f = new Feature(); + f.geometry.type = "Point"; + f.geometry.coordinates = toCoordinates(loc.location()); + f.properties = new PropertiesObject(); + f.properties.location = loc.toString(); + f.properties.locationId = id; + return f; + } + private static final String EXTENTS_COLOR = "#AA0078"; static Feature createPolygon( @@ -154,6 +165,8 @@ public final class GeoJson { static class PropertiesObject { String title; + String location; + String locationId; } static class PointProperties extends PropertiesObject { @@ -182,7 +195,7 @@ public final class GeoJson { } /* brute force compaction of coordinate array onto single line */ - static String cleanPoints(String s) { + public static String cleanPoints(String s) { return s.replace(": [\n ", ": [") .replace(",\n ", ", ") .replace("\n ]", "]") + "\n"; @@ -197,5 +210,4 @@ public final class GeoJson { .replace("\n ]", " ]") .replace("\n ]", "]") + "\n"; } - }