From 0775b46bbc5d6bd9334c95bdca5a0c95e7cfbe0c Mon Sep 17 00:00:00 2001 From: "Powers, Peter M" <pmpowers@usgs.gov> Date: Mon, 24 Jan 2022 21:35:51 +0000 Subject: [PATCH] SourceSet refactor --- .../earthquake/nshmp/calc/CalcFactory.java | 78 +- .../nshmp/calc/DisaggContributor.java | 52 +- .../earthquake/nshmp/calc/DisaggDataset.java | 43 +- .../earthquake/nshmp/calc/DisaggExport.java | 42 +- .../earthquake/nshmp/calc/Disaggregation.java | 40 +- .../earthquake/nshmp/calc/Disaggregator.java | 56 +- .../usgs/earthquake/nshmp/calc/EqRate.java | 94 +- .../earthquake/nshmp/calc/GroundMotions.java | 2 +- .../usgs/earthquake/nshmp/calc/Hazard.java | 27 +- .../earthquake/nshmp/calc/HazardCalcs.java | 176 +-- .../earthquake/nshmp/calc/HazardCurveSet.java | 91 +- .../earthquake/nshmp/calc/HazardExport.java | 44 +- .../nshmp/calc/SystemInputList.java | 14 +- .../earthquake/nshmp/calc/Transforms.java | 85 +- ...SourceSet.java => AbstractRuptureSet.java} | 84 +- .../nshmp/model/ClusterRuptureSet.java | 187 ++- .../earthquake/nshmp/model/ClusterSource.java | 77 +- .../nshmp/model/ClusterSourceSet.java | 90 -- .../earthquake/nshmp/model/Deserialize.java | 30 +- .../nshmp/model/FaultRuptureSet.java | 79 +- .../earthquake/nshmp/model/FaultSource.java | 27 +- .../nshmp/model/FaultSourceSet.java | 114 -- .../usgs/earthquake/nshmp/model/GmmSet.java | 2 +- .../earthquake/nshmp/model/GridLoader.java | 62 +- .../nshmp/model/GridRuptureSet.java | 998 ++++++++++++++-- .../earthquake/nshmp/model/GridSourceSet.java | 1034 ----------------- .../earthquake/nshmp/model/HazardModel.java | 283 +---- .../nshmp/model/InterfaceRuptureSet.java | 84 +- .../nshmp/model/InterfaceSource.java | 15 +- .../nshmp/model/InterfaceSourceSet.java | 95 -- .../earthquake/nshmp/model/ModelFiles.java | 18 +- .../earthquake/nshmp/model/ModelLoader.java | 90 +- .../earthquake/nshmp/model/ModelTrees.java | 1 + .../usgs/earthquake/nshmp/model/Models.java | 4 +- .../earthquake/nshmp/model/PointSource.java | 10 +- .../nshmp/model/PointSourceFinite.java | 4 +- .../nshmp/model/PointSourceFixedStrike.java | 2 +- .../earthquake/nshmp/model/PointSources.java | 4 +- .../earthquake/nshmp/model/RuptureSet.java | 68 +- .../nshmp/model/SlabRuptureSet.java | 21 +- .../earthquake/nshmp/model/SlabSourceSet.java | 88 -- .../earthquake/nshmp/model/SourceSet.java | 90 -- .../earthquake/nshmp/model/SourceTree.java | 123 +- .../nshmp/model/SystemRuptureSet.java | 684 +++++++++-- .../nshmp/model/SystemSourceSet.java | 662 ----------- .../nshmp/model/ZoneRuptureSet.java | 85 +- .../earthquake/nshmp/model/LoaderTests.java | 2 +- 47 files changed, 2636 insertions(+), 3425 deletions(-) rename src/main/java/gov/usgs/earthquake/nshmp/model/{AbstractSourceSet.java => AbstractRuptureSet.java} (59%) delete mode 100644 src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSourceSet.java delete mode 100644 src/main/java/gov/usgs/earthquake/nshmp/model/FaultSourceSet.java delete mode 100644 src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java delete mode 100644 src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSourceSet.java delete mode 100644 src/main/java/gov/usgs/earthquake/nshmp/model/SlabSourceSet.java delete mode 100644 src/main/java/gov/usgs/earthquake/nshmp/model/SourceSet.java delete mode 100644 src/main/java/gov/usgs/earthquake/nshmp/model/SystemSourceSet.java diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/CalcFactory.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/CalcFactory.java index 80c28f21..8f8d81e0 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/CalcFactory.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/CalcFactory.java @@ -18,12 +18,12 @@ import gov.usgs.earthquake.nshmp.calc.Transforms.CurveSetConsolidator; import gov.usgs.earthquake.nshmp.calc.Transforms.ParallelSystemToCurves; import gov.usgs.earthquake.nshmp.calc.Transforms.SourceToCurves; import gov.usgs.earthquake.nshmp.calc.Transforms.SystemToCurves; +import gov.usgs.earthquake.nshmp.model.ClusterRuptureSet; import gov.usgs.earthquake.nshmp.model.ClusterSource; -import gov.usgs.earthquake.nshmp.model.ClusterSourceSet; import gov.usgs.earthquake.nshmp.model.HazardModel; +import gov.usgs.earthquake.nshmp.model.RuptureSet; import gov.usgs.earthquake.nshmp.model.Source; -import gov.usgs.earthquake.nshmp.model.SourceSet; -import gov.usgs.earthquake.nshmp.model.SystemSourceSet; +import gov.usgs.earthquake.nshmp.model.SystemRuptureSet; /** * Static utility methods for performing single-threaded and asynchronous hazard @@ -37,100 +37,96 @@ final class CalcFactory { private CalcFactory() {} - /* Compute hazard curves for a SourceSet. */ + /* Compute hazard curves for a RuptureSet. */ static HazardCurveSet sourcesToCurves( - SourceSet<? extends Source> sources, + RuptureSet<? extends Source> ruptures, CalcConfig config, Site site) { - SourceToCurves sourceToCurves = new SourceToCurves(sources, config, site); + SourceToCurves sourceToCurves = new SourceToCurves(ruptures, config, site); List<HazardCurves> curvesList = new ArrayList<>(); - for (Source source : sources.iterableForLocation(site.location())) { + for (Source source : ruptures.iterableForLocation(site.location())) { curvesList.add(sourceToCurves.apply(source)); } - CurveConsolidator consolidateFn = new CurveConsolidator(sources, config); + CurveConsolidator consolidateFn = new CurveConsolidator( + ruptures, config); return consolidateFn.apply(curvesList); } - /* Asynchronously compute hazard curves for a SourceSet. */ + /* Asynchronously compute hazard curves for a RuptureSet. */ static ListenableFuture<HazardCurveSet> sourcesToCurves( - SourceSet<? extends Source> sources, + RuptureSet<? extends Source> ruptures, CalcConfig config, Site site, Executor ex) { - SourceToCurves sourceToCurves = new SourceToCurves(sources, config, site); + SourceToCurves sourceToCurves = new SourceToCurves(ruptures, config, site); AsyncList<HazardCurves> curvesList = AsyncList.create(); - for (Source source : sources.iterableForLocation(site.location())) { + for (Source source : ruptures.iterableForLocation(site.location())) { ListenableFuture<HazardCurves> curves = transform( immediateFuture(source), sourceToCurves::apply, ex); curvesList.add(curves); } - return transform( - allAsList(curvesList), - new CurveConsolidator(sources, config)::apply, - ex); + CurveConsolidator cc = new CurveConsolidator(ruptures, config); + return transform(allAsList(curvesList), cc::apply, ex); } - /* Compute hazard curves for a SystemSourceSet. */ + /* Compute hazard curves for a SystemRuptureSet. */ static HazardCurveSet systemToCurves( - SystemSourceSet sources, + SystemRuptureSet ruptures, CalcConfig config, Site site) { - return new SystemToCurves(config, site).apply(sources); + return new SystemToCurves(config, site).apply(ruptures); } - /* Asynchronously compute hazard curves for a SystemSourceSet. */ + /* Asynchronously compute hazard curves for a SystemRuptureSet. */ static ListenableFuture<HazardCurveSet> systemToCurves( - SystemSourceSet sources, + SystemRuptureSet ruptures, CalcConfig config, Site site, final Executor ex) { - return transform( - immediateFuture(sources), - new ParallelSystemToCurves(site, config, ex)::apply, - ex); + ParallelSystemToCurves psc = new ParallelSystemToCurves(site, config, ex); + return transform(immediateFuture(ruptures), psc::apply, ex); } - /* Compute hazard curves for a ClusterSourceSet. */ + /* Compute hazard curves for a ClusterRuptureSet. */ static HazardCurveSet clustersToCurves( - ClusterSourceSet sources, + ClusterRuptureSet ruptures, CalcConfig config, Site site) { - ClusterToCurves clusterToCurves = new ClusterToCurves(sources, config, site); + ClusterToCurves clusterToCurves = new ClusterToCurves(ruptures, config, site); List<ClusterCurves> curvesList = new ArrayList<>(); - for (ClusterSource source : sources.iterableForLocation(site.location())) { + for (ClusterSource source : ruptures.iterableForLocation(site.location())) { curvesList.add(clusterToCurves.apply(source)); } - ClusterCurveConsolidator consolidateFn = new ClusterCurveConsolidator(sources, config); + ClusterCurveConsolidator consolidateFn = new ClusterCurveConsolidator( + ruptures, config); return consolidateFn.apply(curvesList); } - /* Asynchronously compute hazard curves for a ClusterSourceSet. */ + /* Asynchronously compute hazard curves for a ClusterRuptureSet. */ static ListenableFuture<HazardCurveSet> clustersToCurves( - ClusterSourceSet sources, + ClusterRuptureSet ruptures, CalcConfig config, Site site, Executor ex) { - ClusterToCurves clusterToCurves = new ClusterToCurves(sources, config, site); + ClusterToCurves clusterToCurves = new ClusterToCurves(ruptures, config, site); AsyncList<ClusterCurves> curvesList = AsyncList.create(); - for (ClusterSource source : sources.iterableForLocation(site.location())) { + for (ClusterSource source : ruptures.iterableForLocation(site.location())) { ListenableFuture<ClusterCurves> curves = transform( immediateFuture(source), clusterToCurves::apply, ex); curvesList.add(curves); } - return transform( - allAsList(curvesList), - new ClusterCurveConsolidator(sources, config)::apply, - ex); + ClusterCurveConsolidator ccc = new ClusterCurveConsolidator(ruptures, config); + return transform(allAsList(curvesList), ccc::apply, ex); } /* Reduce hazard curves to a result. */ @@ -151,10 +147,8 @@ final class CalcFactory { AsyncList<HazardCurveSet> curveSets, Executor ex) throws InterruptedException, ExecutionException { - return transform( - allAsList(curveSets), - new CurveSetConsolidator(model, config, site)::apply, - ex).get(); + CurveSetConsolidator csc = new CurveSetConsolidator(model, config, site); + return transform(allAsList(curveSets), csc::apply, ex).get(); } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggContributor.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggContributor.java index 816b8f72..a97b29fd 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggContributor.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggContributor.java @@ -3,16 +3,15 @@ package gov.usgs.earthquake.nshmp.calc; import static com.google.common.base.Preconditions.checkState; import static gov.usgs.earthquake.nshmp.Text.NEWLINE; import static gov.usgs.earthquake.nshmp.model.SourceType.FAULT_SYSTEM; +import static java.util.stream.Collectors.toList; import java.util.ArrayList; import java.util.Collection; +import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.function.Function; -import com.google.common.collect.Iterables; -import com.google.common.collect.Ordering; - import gov.usgs.earthquake.nshmp.Maths; import gov.usgs.earthquake.nshmp.calc.DisaggExport.ContributionFilter; import gov.usgs.earthquake.nshmp.data.DoubleData; @@ -22,13 +21,13 @@ import gov.usgs.earthquake.nshmp.internal.Parsing; import gov.usgs.earthquake.nshmp.mfd.Mfd; import gov.usgs.earthquake.nshmp.model.ClusterSource; import gov.usgs.earthquake.nshmp.model.Rupture; +import gov.usgs.earthquake.nshmp.model.RuptureSet; import gov.usgs.earthquake.nshmp.model.Source; -import gov.usgs.earthquake.nshmp.model.SourceSet; import gov.usgs.earthquake.nshmp.model.SourceType; /** * Skeletal class and implementations used for contribution to hazard in a - * disaggregation. A contribution may be keyed to a {@code SourceSet}, an + * disaggregation. A contribution may be keyed to a {@code RuptureSet}, an * individual {@code Source}, or some other representation, e.g. optimized * (table-based) {@code GridSource}s or {@code SystemSource} fault sections. * @@ -134,14 +133,14 @@ abstract class DisaggContributor { * in the future. */ - /* SourceSet contributor with references to child contributors. */ - static final class SourceSetContributor extends DisaggContributor { + /* RuptureSet contributor with references to child contributors. */ + static final class RuptureSetContributor extends DisaggContributor { - final SourceSet<? extends Source> sourceSet; + final RuptureSet<? extends Source> ruptures; final List<DisaggContributor> children; - private SourceSetContributor( - SourceSet<? extends Source> sourceSet, + private RuptureSetContributor( + RuptureSet<? extends Source> ruptures, List<DisaggContributor> children, double rate, double residual, @@ -150,13 +149,13 @@ abstract class DisaggContributor { double εScaled) { super(rate, residual, rScaled, mScaled, εScaled); - this.sourceSet = sourceSet; + this.ruptures = ruptures; this.children = children; } @Override public String toString() { - return "SourceSet contributor: " + sourceSet.name(); + return "RuptureSet contributor: " + ruptures.name(); } @Override @@ -168,7 +167,7 @@ abstract class DisaggContributor { double contribution = filter.toPercent(total()); sb.append(String.format( DisaggExport.CONTRIB_SOURCE_SET_FMT, - sourceSet.name(), sourceSet.type(), contribution)); + ruptures.name(), ruptures.type(), contribution)); sb.append(NEWLINE); for (DisaggContributor child : children) { if (filter.test(child)) { @@ -188,8 +187,8 @@ abstract class DisaggContributor { double mBar = mScaled / total; double εBar = εScaled / total; JsonContributor jc = JsonContributor.createMulti( - sourceSet.name(), - sourceSet.type(), + ruptures.name(), + ruptures.type(), Maths.round(filter.toPercent(total()), 2), rBar, mBar, @@ -207,11 +206,11 @@ abstract class DisaggContributor { static final class Builder extends DisaggContributor.Builder { - SourceSet<? extends Source> sourceSet; + RuptureSet<? extends Source> ruptures; ArrayList<DisaggContributor.Builder> children = new ArrayList<>(); - Builder sourceSet(SourceSet<? extends Source> sourceSet) { - this.sourceSet = sourceSet; + Builder ruptures(RuptureSet<? extends Source> ruptures) { + this.ruptures = ruptures; return this; } @@ -227,11 +226,11 @@ abstract class DisaggContributor { } @Override - SourceSetContributor build() { + RuptureSetContributor build() { List<DisaggContributor> sortedChildren = buildAndSort(children); super.add(sumChildValues(sortedChildren)); - return new SourceSetContributor( - sourceSet, + return new RuptureSetContributor( + ruptures, sortedChildren, rate, residual, @@ -656,7 +655,7 @@ abstract class DisaggContributor { /* * Custom source that encapsulates the fact that relevant data in the - * disaggregation of SystemSourceSet is associated with individual fault + * disaggregation of SystemRuptureSet is associated with individual fault * sections rather than sources/ruptures. */ static final class SectionSource implements Source { @@ -721,7 +720,7 @@ abstract class DisaggContributor { Double latitude; Double longitude; - /* Use for SourceSets. */ + /* Use for RuptureSets. */ static JsonContributor createMulti( String name, SourceType source, @@ -778,7 +777,10 @@ abstract class DisaggContributor { /* Convert a builder list to immutable sorted contributor list. */ private static List<DisaggContributor> buildAndSort( Collection<DisaggContributor.Builder> builders) { - return SORTER.immutableSortedCopy(Iterables.transform(builders, BUILDER::apply)); + return builders.stream() + .map(BUILDER::apply) + .sorted(SORTER) + .collect(toList()); } static final Function<DisaggContributor.Builder, DisaggContributor> BUILDER = @@ -789,7 +791,7 @@ abstract class DisaggContributor { } }; - static final Ordering<DisaggContributor> SORTER = new Ordering<DisaggContributor>() { + static final Comparator<DisaggContributor> SORTER = new Comparator<DisaggContributor>() { @Override public int compare(DisaggContributor left, DisaggContributor right) { return Double.compare(right.total(), left.total()); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggDataset.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggDataset.java index 200b654b..be992d77 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggDataset.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggDataset.java @@ -2,6 +2,7 @@ package gov.usgs.earthquake.nshmp.calc; import static com.google.common.base.Preconditions.checkState; import static gov.usgs.earthquake.nshmp.data.DoubleData.checkInRange; +import static java.util.stream.Collectors.toList; import java.util.ArrayList; import java.util.Collection; @@ -16,8 +17,8 @@ import com.google.common.collect.Range; import gov.usgs.earthquake.nshmp.Earthquakes; import gov.usgs.earthquake.nshmp.calc.CalcConfig.Disagg.Bins; import gov.usgs.earthquake.nshmp.calc.DisaggContributor.ClusterContributor; +import gov.usgs.earthquake.nshmp.calc.DisaggContributor.RuptureSetContributor; import gov.usgs.earthquake.nshmp.calc.DisaggContributor.SourceContributor; -import gov.usgs.earthquake.nshmp.calc.DisaggContributor.SourceSetContributor; import gov.usgs.earthquake.nshmp.calc.DisaggContributor.SystemContributor; import gov.usgs.earthquake.nshmp.data.IntervalArray; import gov.usgs.earthquake.nshmp.data.IntervalData; @@ -27,7 +28,7 @@ import gov.usgs.earthquake.nshmp.model.Source; /** * A disaggregation dataset class that provides a variety of builder flavors to - * create initial datasets for specific {@code SourceSet}s and {@code Gmm}s and + * create initial datasets for specific {@code RuptureSet}s and {@code Gmm}s and * then recombine them in different ways. Some of the more complex operations in * this class involve the handling of ClusterSources and the recombining of * DisaggContributors. @@ -402,13 +403,13 @@ final class DisaggDataset { } } - static final SourceSetConsolidator SOURCE_SET_CONSOLIDATOR = new SourceSetConsolidator(); + static final RuptureSetConsolidator SOURCE_SET_CONSOLIDATOR = new RuptureSetConsolidator(); - static final class SourceSetConsolidator implements + static final class RuptureSetConsolidator implements Function<Collection<DisaggDataset>, DisaggDataset> { @Override public DisaggDataset apply(Collection<DisaggDataset> datasets) { - SourceSetCombiner combiner = new SourceSetCombiner(datasets.iterator().next()); + RuptureSetCombiner combiner = new RuptureSetCombiner(datasets.iterator().next()); for (DisaggDataset dataset : datasets) { combiner.add(dataset); } @@ -417,26 +418,29 @@ final class DisaggDataset { } /* - * Specialized builder that combines datasets across multiple SourceSets. + * Specialized builder that combines datasets across multiple RuptureSets. */ - static class SourceSetCombiner extends AbstractCombiner { + static class RuptureSetCombiner extends AbstractCombiner { - ArrayList<DisaggContributor> contributors; + List<DisaggContributor> contributors; - SourceSetCombiner(DisaggDataset model) { + RuptureSetCombiner(DisaggDataset model) { super(model); contributors = new ArrayList<>(); } @Override - SourceSetCombiner add(DisaggDataset other) { + RuptureSetCombiner add(DisaggDataset other) { super.add(other); contributors.addAll(other.contributors); return this; } DisaggDataset build() { - return super.build(DisaggContributor.SORTER.immutableSortedCopy(contributors)); + List<DisaggContributor> sortedList = contributors.stream() + .sorted(DisaggContributor.SORTER) + .collect(toList()); + return super.build(sortedList); } } @@ -456,29 +460,30 @@ final class DisaggDataset { /* * Specialized builder that combines datasets across Gmms for a single - * SourceSet. The model supplied must be one of the datasets to be combined, - * and all additions must reference the same singleton SourceSet of the model. + * RuptureSet. The model supplied must be one of the datasets to be combined, + * and all additions must reference the same singleton RuptureSet of the + * model. */ static class SourceCombiner extends AbstractCombiner { - SourceSetContributor.Builder contributor; + RuptureSetContributor.Builder contributor; Map<Source, DisaggContributor.Builder> childMap; SourceCombiner(DisaggDataset model) { super(model); - SourceSetContributor ssc = (SourceSetContributor) Iterables + RuptureSetContributor ssc = (RuptureSetContributor) Iterables .getOnlyElement(model.contributors); - contributor = new SourceSetContributor.Builder().sourceSet(ssc.sourceSet); + contributor = new RuptureSetContributor.Builder().ruptures(ssc.ruptures); childMap = new HashMap<>(); } @Override SourceCombiner add(DisaggDataset other) { super.add(other); - SourceSetContributor sourceSetContributor = (SourceSetContributor) Iterables + RuptureSetContributor ruptureSetContributor = (RuptureSetContributor) Iterables .getOnlyElement(other.contributors); - for (DisaggContributor disaggContributor : sourceSetContributor.children) { - switch (sourceSetContributor.sourceSet.type()) { + for (DisaggContributor disaggContributor : ruptureSetContributor.children) { + switch (ruptureSetContributor.ruptures.type()) { case FAULT_CLUSTER: putOrAddCluster((ClusterContributor) disaggContributor); break; diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggExport.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggExport.java index c57fe7db..dcde9284 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggExport.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/DisaggExport.java @@ -3,12 +3,14 @@ package gov.usgs.earthquake.nshmp.calc; import static gov.usgs.earthquake.nshmp.Text.NEWLINE; import static java.lang.Math.exp; import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.stream.Collectors.toList; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.AbstractList; import java.util.ArrayList; +import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.function.Function; @@ -18,14 +20,13 @@ import com.google.common.base.Strings; import com.google.common.collect.ComparisonChain; import com.google.common.collect.FluentIterable; import com.google.common.collect.Iterables; -import com.google.common.collect.Ordering; import com.google.common.primitives.Doubles; import com.google.gson.annotations.SerializedName; import gov.usgs.earthquake.nshmp.Maths; import gov.usgs.earthquake.nshmp.calc.CalcConfig.Disagg.Bins; import gov.usgs.earthquake.nshmp.calc.DisaggContributor.JsonContributor; -import gov.usgs.earthquake.nshmp.calc.DisaggContributor.SourceSetContributor; +import gov.usgs.earthquake.nshmp.calc.DisaggContributor.RuptureSetContributor; import gov.usgs.earthquake.nshmp.calc.DisaggContributor.SystemContributor; import gov.usgs.earthquake.nshmp.data.DoubleData; import gov.usgs.earthquake.nshmp.gmm.Imt; @@ -191,7 +192,10 @@ final class DisaggExport { StringBuilder sb = new StringBuilder(DISAGG_DATA_HEADER); sb.append(",").append(epsilonHeader(εSize)); sb.append(NEWLINE); - for (RmBin rmBin : RM_BIN_SORTER.immutableSortedCopy(this)) { + List<RmBin> sortedRmBins = this.stream() + .sorted(RM_BIN_SORTER) + .collect(toList()); + for (RmBin rmBin : sortedRmBins) { sb.append(String.format( "%6.2f, %6.2f, %4.2f, %4.2f, %5.2f,", rmBin.r, rmBin.rBar, rmBin.m, rmBin.mBar, rmBin.εBar)); @@ -299,7 +303,7 @@ final class DisaggExport { EPSILON_FORMATTER::apply)); } - private static Ordering<RmBin> RM_BIN_SORTER = new Ordering<RmBin>() { + private static Comparator<RmBin> RM_BIN_SORTER = new Comparator<RmBin>() { @Override public int compare(RmBin left, RmBin right) { return ComparisonChain.start() @@ -563,20 +567,20 @@ final class DisaggExport { * contributors are sorted descending by total contribution, we short * circuit an iterator instead. */ - List<DisaggContributor> sourceSetContributors = new ArrayList<>(); + List<DisaggContributor> ruptureSetContributors = new ArrayList<>(); for (DisaggContributor contributor : dd.contributors) { if (contributionFilter.test(contributor)) { - sourceSetContributors.add(contributor); + ruptureSetContributors.add(contributor); continue; } break; } sb.append("Disaggregation contributors:"); - if (sourceSetContributors.size() > 0) { + if (ruptureSetContributors.size() > 0) { sb.append(NEWLINE).append(NEWLINE).append(CONTRIBUTION_HEADER); boolean firstContributor = true; - for (DisaggContributor contributor : sourceSetContributors) { + for (DisaggContributor contributor : ruptureSetContributors) { sb.append(firstContributor ? "" : NEWLINE); firstContributor = false; contributor.appendTo(sb, "", contributionFilter); @@ -594,19 +598,19 @@ final class DisaggExport { return sb; } - private static Predicate<SourceSetContributor> SYSTEM_FILTER = - new Predicate<SourceSetContributor>() { + private static Predicate<RuptureSetContributor> SYSTEM_FILTER = + new Predicate<RuptureSetContributor>() { @Override - public boolean test(SourceSetContributor contributor) { - return contributor.sourceSet.type() == SourceType.FAULT_SYSTEM; + public boolean test(RuptureSetContributor contributor) { + return contributor.ruptures.type() == SourceType.FAULT_SYSTEM; } }; - private static Function<DisaggContributor, SourceSetContributor> TO_SOURCE_SET_CONTRIBUTOR = - new Function<DisaggContributor, SourceSetContributor>() { + private static Function<DisaggContributor, RuptureSetContributor> TO_SOURCE_SET_CONTRIBUTOR = + new Function<DisaggContributor, RuptureSetContributor>() { @Override - public SourceSetContributor apply(DisaggContributor contributor) { - return (SourceSetContributor) contributor; + public RuptureSetContributor apply(DisaggContributor contributor) { + return (RuptureSetContributor) contributor; } }; @@ -659,7 +663,7 @@ final class DisaggExport { DisaggDataset dd, ContributionFilter contributionFilter) { - List<SourceSetContributor> systemSourceSetContributors = FluentIterable + List<RuptureSetContributor> systemRuptureSetContributors = FluentIterable .from(dd.contributors) .transform(TO_SOURCE_SET_CONTRIBUTOR::apply) .filter(SYSTEM_FILTER::test) @@ -667,13 +671,13 @@ final class DisaggExport { .toList(); boolean first = true; - for (SourceSetContributor ssc : systemSourceSetContributors) { + for (RuptureSetContributor ssc : systemRuptureSetContributors) { SystemContributor model = (SystemContributor) ssc.children.get(0); if (!contributionFilter.test(model)) { continue; } sb.append(first ? SECTION_SEPARATOR : NEWLINE) - .append("System section MFDs: " + ssc.sourceSet.name()) + .append("System section MFDs: " + ssc.ruptures.name()) .append(NEWLINE).append(NEWLINE) .append(String.format(SYSTEM_MFD_FORMAT, "Index", "Section")) .append(Parsing.toString(model.mfd.rows(), "%9.2f", ",", false, true)) diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/Disaggregation.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/Disaggregation.java index 2fe56ea0..0d90288c 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/Disaggregation.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/Disaggregation.java @@ -302,55 +302,55 @@ public final class Disaggregation { /* * Datasets are combined as follows: * - * For each HazardCurveSet (SourceSet), disaggregation is performed across - * all relevant Gmms. These are preserved in ListMultimaps for output of - * disaggregation by Gmm and SourceType. It's too much work to consolidate - * ListMultimaps on the fly and keep track of all the nested + * For each HazardCurveSet (RuptureSet), disaggregation is performed + * across all relevant Gmms. These are preserved in ListMultimaps for + * output of disaggregation by Gmm and SourceType. It's too much work to + * consolidate ListMultimaps on the fly and keep track of all the nested * DisaggContributors, so lists are maintained of Gmm and SourceType * datasets, and the total across all Gmms that result from each call to * disaggregate(). The combination of multiple datasets for single - * SourceSets is then straightforward via static consolidators in + * RuptureSets is then straightforward via static consolidators in * DisaggDataset. */ - int sourceSetCount = hazard.sourceSetCurves.size(); + int ruptureSetCount = hazard.ruptureSetCurves.size(); ListMultimap<Gmm, DisaggDataset> gmmDatasetLists = MultimapBuilder .enumKeys(Gmm.class) - .arrayListValues(sourceSetCount) + .arrayListValues(ruptureSetCount) .build(); ListMultimap<SourceType, DisaggDataset> typeDatasetLists = MultimapBuilder .enumKeys(SourceType.class) - .arrayListValues(sourceSetCount) + .arrayListValues(ruptureSetCount) .build(); - for (HazardCurveSet curveSet : hazard.sourceSetCurves.values()) { - XySequence sourceSetCurve = curveSet.totalCurves.get(config.imt); - double sourceSetRate = RATE_INTERPOLATER.findY(sourceSetCurve, config.iml); - if (Double.isNaN(sourceSetRate) || sourceSetRate == 0.0) { + for (HazardCurveSet curveSet : hazard.ruptureSetCurves.values()) { + XySequence ruptureSetCurve = curveSet.totalCurves.get(config.imt); + double ruptureSetRate = RATE_INTERPOLATER.findY(ruptureSetCurve, config.iml); + if (Double.isNaN(ruptureSetRate) || ruptureSetRate == 0.0) { // Consider logging statement below - // System.out.println("Skipping: " + curveSet.sourceSet.name()); + // System.out.println("Skipping: " + curveSet.ruptureSet.name()); continue; } - Map<Gmm, DisaggDataset> sourceSetDatasets = Disaggregator.disaggregate( + Map<Gmm, DisaggDataset> ruptureSetDatasets = Disaggregator.disaggregate( curveSet, config, hazard.site); - gmmDatasetLists.putAll(Multimaps.forMap(sourceSetDatasets)); - DisaggDataset sourceSetTotal = SOURCE_CONSOLIDATOR.apply(sourceSetDatasets.values()); - typeDatasetLists.put(curveSet.sourceSet.type(), sourceSetTotal); + gmmDatasetLists.putAll(Multimaps.forMap(ruptureSetDatasets)); + DisaggDataset ruptureSetTotal = SOURCE_CONSOLIDATOR.apply(ruptureSetDatasets.values()); + typeDatasetLists.put(curveSet.ruptures.type(), ruptureSetTotal); } - /* Combine SourceSets across Gmms. */ + /* Combine RuptureSets across Gmms. */ gmmDatasets = Maps.immutableEnumMap(Maps.transformValues( Multimaps.asMap(gmmDatasetLists), SOURCE_SET_CONSOLIDATOR::apply)); - /* Combine SourceSets across SourceTypes. */ + /* Combine RuptureSets across SourceTypes. */ typeDatasets = Maps.immutableEnumMap(Maps.transformValues( Multimaps.asMap(typeDatasetLists), SOURCE_SET_CONSOLIDATOR::apply)); - /* Combine SourceSet totals. */ + /* Combine RuptureSet totals. */ totalDataset = SOURCE_SET_CONSOLIDATOR.apply(typeDatasets.values()); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/Disaggregator.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/Disaggregator.java index 94f364de..6fa3be47 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/Disaggregator.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/Disaggregator.java @@ -23,9 +23,9 @@ import com.google.common.primitives.Ints; import gov.usgs.earthquake.nshmp.Maths; import gov.usgs.earthquake.nshmp.calc.DisaggContributor.ClusterContributor; +import gov.usgs.earthquake.nshmp.calc.DisaggContributor.RuptureSetContributor; import gov.usgs.earthquake.nshmp.calc.DisaggContributor.SectionSource; import gov.usgs.earthquake.nshmp.calc.DisaggContributor.SourceContributor; -import gov.usgs.earthquake.nshmp.calc.DisaggContributor.SourceSetContributor; import gov.usgs.earthquake.nshmp.calc.DisaggContributor.SystemContributor; import gov.usgs.earthquake.nshmp.data.Indexing; import gov.usgs.earthquake.nshmp.data.IntervalArray; @@ -37,14 +37,14 @@ import gov.usgs.earthquake.nshmp.gmm.GroundMotion; import gov.usgs.earthquake.nshmp.gmm.Imt; import gov.usgs.earthquake.nshmp.model.ClusterSource; import gov.usgs.earthquake.nshmp.model.GmmSet; +import gov.usgs.earthquake.nshmp.model.RuptureSet; import gov.usgs.earthquake.nshmp.model.Source; -import gov.usgs.earthquake.nshmp.model.SourceSet; -import gov.usgs.earthquake.nshmp.model.SystemSourceSet; +import gov.usgs.earthquake.nshmp.model.SystemRuptureSet; import gov.usgs.earthquake.nshmp.tree.Branch; import gov.usgs.earthquake.nshmp.tree.LogicTree; /** - * Factory class that disaggregates the hazard for a single {@code SourceSet} + * Factory class that disaggregates the hazard for a single {@code RuptureSet} * across all relevant {@code Gmm}s. * * @author U.S. Geological Survey @@ -52,7 +52,7 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; final class Disaggregator { private final HazardCurveSet curves; - private final SourceSet<? extends Source> sources; + private final RuptureSet<? extends Source> ruptures; private final GmmSet gmmSet; private final Imt imt; @@ -65,8 +65,8 @@ final class Disaggregator { private Disaggregator(HazardCurveSet curves, DisaggConfig config, Site site) { this.curves = curves; - this.sources = curves.sourceSet; - this.gmmSet = sources.groundMotionModels(); + this.ruptures = curves.ruptures; + this.gmmSet = ruptures.groundMotionModels(); this.imt = config.imt; this.model = config.model; @@ -87,7 +87,7 @@ final class Disaggregator { } private Map<Gmm, DisaggDataset> run() { - switch (sources.type()) { + switch (ruptures.type()) { case FAULT_CLUSTER: return processClusterSources(); case FAULT_SYSTEM: @@ -97,7 +97,8 @@ final class Disaggregator { } } - private static Map<Gmm, DisaggDataset.Builder> createBuilders(Set<Gmm> gmms, + private static Map<Gmm, DisaggDataset.Builder> createBuilders( + Set<Gmm> gmms, DisaggDataset model) { Map<Gmm, DisaggDataset.Builder> map = Maps.newEnumMap(Gmm.class); for (Gmm gmm : gmms) { @@ -109,8 +110,8 @@ final class Disaggregator { private Map<Gmm, DisaggDataset> processSources() { Map<Gmm, DisaggDataset.Builder> builders = createBuilders(gmmSet.gmms(), model); for (DisaggDataset.Builder builder : builders.values()) { - SourceSetContributor.Builder parent = new SourceSetContributor.Builder(); - builder.setParentContributor(parent.sourceSet(sources)); + RuptureSetContributor.Builder parent = new RuptureSetContributor.Builder(); + builder.setParentContributor(parent.ruptures(ruptures)); } for (GroundMotions gms : curves.hazardGroundMotionsList) { processSource(gms, builders); @@ -154,7 +155,7 @@ final class Disaggregator { /* * Scale builders to the rate/contribution of the cluster and attach - * ClusterContributors to parent SourceSetContributors and swap. + * ClusterContributors to parent RuptureSetContributors and swap. */ Map<Gmm, XySequence> clusterCurves = clusterCurveList.get(i); for (Entry<Gmm, DisaggDataset.Builder> entry : datasetBuilders.entrySet()) { @@ -177,10 +178,10 @@ final class Disaggregator { } /* Swap parents. */ - DisaggContributor.Builder sourceSetContributor = new SourceSetContributor.Builder() - .sourceSet(curves.sourceSet) + DisaggContributor.Builder ruptureSetContributor = new RuptureSetContributor.Builder() + .ruptures(curves.ruptures) .addChild(clusterBuilder.parent); - clusterBuilder.setParentContributor(sourceSetContributor); + clusterBuilder.setParentContributor(ruptureSetContributor); } /* Combine cluster datasets. */ @@ -232,7 +233,11 @@ final class Disaggregator { double ε = Maths.epsilon(μ, σ, iml); double probAtIml = probModel.exceedance(μ, σ, trunc, imt, iml); - double rate = probAtIml * in.rate * sources.weight() * gmmWeight * branch.weight(); + + // TODO need ruptures.weight + // double rate = probAtIml * in.rate * sources.weight() * gmmWeight * + // branch.weight(); + double rate = probAtIml * in.rate * 1.0 * gmmWeight * branch.weight(); double rScaled = rRup * rate; double mScaled = Mw * rate; @@ -308,12 +313,12 @@ final class Disaggregator { private Map<Gmm, DisaggDataset> processSystemSources() { /* Safe covariant cast assuming switch handles variants. */ - SystemSourceSet systemSources = (SystemSourceSet) sources; + SystemRuptureSet systemRuptures = (SystemRuptureSet) ruptures; Map<Gmm, DisaggDataset.Builder> builders = createBuilders(gmmSet.gmms(), model); for (DisaggDataset.Builder builder : builders.values()) { - SourceSetContributor.Builder parent = new SourceSetContributor.Builder(); - builder.setParentContributor(parent.sourceSet(sources)); + RuptureSetContributor.Builder parent = new RuptureSetContributor.Builder(); + builder.setParentContributor(parent.ruptures(ruptures)); } /* @@ -336,8 +341,8 @@ final class Disaggregator { * indexing. */ IntervalArray mfdModel = IntervalArray.Builder.withRows( - Maths.round(systemSources.stats.mMin, 1, RoundingMode.FLOOR), - Maths.round(systemSources.stats.mMax, 1, RoundingMode.CEILING), + Maths.round(systemRuptures.stats.mMin, 1, RoundingMode.FLOOR), + Maths.round(systemRuptures.stats.mMax, 1, RoundingMode.CEILING), 0.1).build(); IntervalArray.Builder mfdIndexer = IntervalArray.Builder.fromModel(mfdModel); @@ -352,10 +357,10 @@ final class Disaggregator { */ SectionSource section = new SectionSource( sectionIndex, - systemSources.sectionName(sectionIndex)); + systemRuptures.sectionName(sectionIndex)); Location location = Locations.closestPoint( site.location(), - systemSources.section(sectionIndex).getUpperEdge()); + systemRuptures.section(sectionIndex).getUpperEdge()); double azimuth = Locations.azimuth(site.location(), location); /* @@ -406,7 +411,10 @@ final class Disaggregator { double ε = Maths.epsilon(μ, σ, iml); double probAtIml = probModel.exceedance(μ, σ, trunc, imt, iml); - double rate = probAtIml * in.rate * sources.weight() * gmmWeight * branch.weight(); + // TODO need ruptures.weight + // double rate = probAtIml * in.rate * sources.weight() * + // gmmWeight * branch.weight(); + double rate = probAtIml * in.rate * 1.0 * gmmWeight * branch.weight(); SystemContributor.Builder contributor = contributors.get(gmm); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java index 8f811f9d..e0ab21d3 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java @@ -22,15 +22,17 @@ import gov.usgs.earthquake.nshmp.data.XyPoint; import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.mfd.Mfd; +import gov.usgs.earthquake.nshmp.model.ClusterRuptureSet; import gov.usgs.earthquake.nshmp.model.ClusterSource; -import gov.usgs.earthquake.nshmp.model.ClusterSourceSet; import gov.usgs.earthquake.nshmp.model.Distance; +import gov.usgs.earthquake.nshmp.model.FaultRuptureSet; import gov.usgs.earthquake.nshmp.model.HazardModel; import gov.usgs.earthquake.nshmp.model.Rupture; +import gov.usgs.earthquake.nshmp.model.RuptureSet; import gov.usgs.earthquake.nshmp.model.Source; -import gov.usgs.earthquake.nshmp.model.SourceSet; +import gov.usgs.earthquake.nshmp.model.SourceTree; import gov.usgs.earthquake.nshmp.model.SourceType; -import gov.usgs.earthquake.nshmp.model.SystemSourceSet; +import gov.usgs.earthquake.nshmp.model.SystemRuptureSet; /** * General purpose earthquake rate and probability data container. This class @@ -125,9 +127,12 @@ public class EqRate { } /* Populate builders. */ - for (SourceSet<? extends Source> sourceSet : model) { - IntervalArray sourceSetMfd = mfd(sourceSet, site.location(), distance, modelMfd); - typeMfdBuilders.get(sourceSet.type()).add(sourceSetMfd); + for (SourceTree tree : model) { + for (var branch : tree) { + RuptureSet<? extends Source> ruptures = branch.value(); + IntervalArray rupturesMfd = mfd(ruptures, site.location(), distance, modelMfd); + typeMfdBuilders.get(ruptures.type()).add(rupturesMfd); + } } /* Compute total and convert to sequences. */ @@ -231,104 +236,109 @@ public class EqRate { } private static IntervalArray mfd( - SourceSet<? extends Source> sourceSet, + RuptureSet<? extends Source> ruptures, Location location, double distance, IntervalArray modelMfd) { - switch (sourceSet.type()) { + switch (ruptures.type()) { case GRID: - return gridMfd(sourceSet, location, distance, modelMfd); + return gridMfd(ruptures, location, distance, modelMfd); case SLAB: - return gridMfd(sourceSet, location, distance, modelMfd); + return gridMfd(ruptures, location, distance, modelMfd); case FAULT_CLUSTER: - return clusterMfd((ClusterSourceSet) sourceSet, location, distance, modelMfd); + return clusterMfd((ClusterRuptureSet) ruptures, location, distance, + modelMfd); case FAULT_SYSTEM: - return systemMfd((SystemSourceSet) sourceSet, location, distance, modelMfd); + return systemMfd((SystemRuptureSet) ruptures, location, distance, + modelMfd); default: - return faultMfd(sourceSet, location, distance, modelMfd); + return faultMfd(ruptures, location, distance, modelMfd); } } /* - * Short-circuit GridSourceSet by summing the relevant node mfds. Handles both - * GRID and SLAB types. + * Short-circuit GridRuptureSet by summing the relevant node mfds. Handles + * both GRID and SLAB types. */ private static IntervalArray gridMfd( - SourceSet<? extends Source> sourceSet, + RuptureSet<? extends Source> ruptures, Location location, double distance, IntervalArray modelMfd) { - IntervalArray.Builder sourceSetMfd = IntervalArray.Builder.fromModel(modelMfd); - for (Source source : sourceSet.iterableForLocation(location, distance)) { + IntervalArray.Builder rupturesMfd = IntervalArray.Builder.fromModel(modelMfd); + for (Source source : ruptures.iterableForLocation(location, distance)) { for (Mfd mfd : source.mfds()) { - sourceSetMfd.addEach(mfd.data()); + rupturesMfd.addEach(mfd.data()); } } - return sourceSetMfd.multiply(sourceSet.weight()).build(); + return rupturesMfd.multiply(ruptures.weight()).build(); } /* - * Special case ClusterSourceSet. + * Special case ClusterRuptureSet. * * Nested fault rates are in fact weights that need to be scaled by the * cluster rate. */ private static IntervalArray clusterMfd( - ClusterSourceSet sourceSet, + ClusterRuptureSet ruptures, Location location, double distance, IntervalArray modelMfd) { - IntervalArray.Builder sourceSetMfd = IntervalArray.Builder.fromModel(modelMfd); - for (Source source : sourceSet.iterableForLocation(location, distance)) { + IntervalArray.Builder srcSetMfd = IntervalArray.Builder.fromModel(modelMfd); + for (Source source : ruptures.iterableForLocation(location, distance)) { ClusterSource clusterSource = (ClusterSource) source; - IntervalArray.Builder faultMfd = Builder - .copyOf(faultMfd( - clusterSource.faults(), - location, - distance, - modelMfd)) - .multiply(clusterSource.rate()); - sourceSetMfd.add(faultMfd.build()); + + for (FaultRuptureSet frs : clusterSource.faults()) { + IntervalArray.Builder faultMfd = Builder + .copyOf(faultMfd( + frs, + location, + distance, + modelMfd)) + .multiply(clusterSource.rate()); + srcSetMfd.add(faultMfd.build()); + } } - return sourceSetMfd.multiply(sourceSet.weight()).build(); + return srcSetMfd.multiply(ruptures.weight()).build(); } /* - * Special case and delegate to SystemSourceSet. + * Special case and delegate to SystemRuptureSet. */ static IntervalArray systemMfd( - SystemSourceSet sourceSet, + SystemRuptureSet ruptures, Location location, double distance, IntervalArray modelMfd) { - return SystemSourceSet + return SystemRuptureSet .toRatesFunction(location, distance, modelMfd) - .apply(sourceSet); + .apply(ruptures); } /* * Default approach: distance filter on ruptures. */ private static IntervalArray faultMfd( - SourceSet<? extends Source> sourceSet, + RuptureSet<? extends Source> ruptures, Location location, double distance, IntervalArray modelMfd) { - IntervalArray.Builder sourceSetMfd = IntervalArray.Builder.fromModel(modelMfd); - for (Source source : sourceSet.iterableForLocation(location, distance)) { + IntervalArray.Builder rupturesMfd = IntervalArray.Builder.fromModel(modelMfd); + for (Source source : ruptures.iterableForLocation(location, distance)) { for (Rupture rupture : source) { Distance d = rupture.surface().distanceTo(location); if (d.rJB <= distance) { - sourceSetMfd.add(rupture.mag(), rupture.rate()); + rupturesMfd.add(rupture.mag(), rupture.rate()); } } } - return sourceSetMfd.multiply(sourceSet.weight()).build(); + return rupturesMfd.multiply(ruptures.weight()).build(); } private static final class RateTask implements Callable<EqRate> { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/GroundMotions.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/GroundMotions.java index 8d9d7ab7..d55a7b39 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/GroundMotions.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/GroundMotions.java @@ -19,7 +19,7 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; /** * Container class for scalar ground motions associated with individual - * {@code Source}s in a {@code SourceSet}. + * {@code Source}s in a {@code RuptureSet}. * * @author U.S. Geological Survey */ diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/Hazard.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/Hazard.java index d651b634..8062a487 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/Hazard.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/Hazard.java @@ -16,10 +16,10 @@ import com.google.common.collect.SetMultimap; import gov.usgs.earthquake.nshmp.data.MutableXySequence; import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.gmm.Imt; -import gov.usgs.earthquake.nshmp.model.GridSourceSet; +import gov.usgs.earthquake.nshmp.model.GridRuptureSet; import gov.usgs.earthquake.nshmp.model.HazardModel; +import gov.usgs.earthquake.nshmp.model.RuptureSet; import gov.usgs.earthquake.nshmp.model.Source; -import gov.usgs.earthquake.nshmp.model.SourceSet; import gov.usgs.earthquake.nshmp.model.SourceType; /** @@ -33,20 +33,20 @@ import gov.usgs.earthquake.nshmp.model.SourceType; */ public final class Hazard { - final SetMultimap<SourceType, HazardCurveSet> sourceSetCurves; + final SetMultimap<SourceType, HazardCurveSet> ruptureSetCurves; final Map<Imt, XySequence> totalCurves; final HazardModel model; final Site site; final CalcConfig config; private Hazard( - SetMultimap<SourceType, HazardCurveSet> sourceSetCurves, + SetMultimap<SourceType, HazardCurveSet> ruptureSetCurves, Map<Imt, XySequence> totalCurves, HazardModel model, Site site, CalcConfig config) { - this.sourceSetCurves = sourceSetCurves; + this.ruptureSetCurves = ruptureSetCurves; this.totalCurves = totalCurves; this.model = model; this.site = site; @@ -57,15 +57,15 @@ public final class Hazard { public String toString() { String LF = StandardSystemProperty.LINE_SEPARATOR.value(); StringBuilder sb = new StringBuilder("HazardResult:"); - if (sourceSetCurves.keySet().isEmpty()) { + if (ruptureSetCurves.keySet().isEmpty()) { sb.append(" empty, was the site out of range?"); return sb.toString(); } sb.append(LF); - for (SourceType type : sourceSetCurves.keySet()) { - sb.append(type).append("SourceSet:").append(LF); - for (HazardCurveSet curveSet : sourceSetCurves.get(type)) { - SourceSet<? extends Source> ss = curveSet.sourceSet; + for (SourceType type : ruptureSetCurves.keySet()) { + sb.append(type).append("RuptureSet:").append(LF); + for (HazardCurveSet curveSet : ruptureSetCurves.get(type)) { + RuptureSet<? extends Source> ss = curveSet.ruptures; sb.append(" ").append(ss); sb.append("Used: "); switch (type) { @@ -76,8 +76,8 @@ public final class Hazard { sb.append(curveSet.hazardGroundMotionsList.get(0).inputs.size()); break; case GRID: - sb.append(GridSourceSet.sizeString( - curveSet.sourceSet, + sb.append(GridRuptureSet.sizeString( + curveSet.ruptures, curveSet.hazardGroundMotionsList.size())); break; default: @@ -141,7 +141,7 @@ public final class Hazard { } Builder addCurveSet(HazardCurveSet curveSet) { - curveMapBuilder.put(curveSet.sourceSet.type(), curveSet); + curveMapBuilder.put(curveSet.ruptures.type(), curveSet); for (Entry<Imt, XySequence> entry : curveSet.totalCurves.entrySet()) { totalCurves.get(entry.getKey()).add(entry.getValue()); } @@ -164,5 +164,4 @@ public final class Hazard { config); } } - } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCalcs.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCalcs.java index 6c1f833f..05ee8950 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCalcs.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCalcs.java @@ -28,13 +28,15 @@ import com.google.common.base.Stopwatch; import com.google.common.collect.Range; import gov.usgs.earthquake.nshmp.calc.Transforms.SourceToInputs; -import gov.usgs.earthquake.nshmp.model.ClusterSourceSet; -import gov.usgs.earthquake.nshmp.model.GridSourceSet; +import gov.usgs.earthquake.nshmp.model.ClusterRuptureSet; +import gov.usgs.earthquake.nshmp.model.GridRuptureSet; import gov.usgs.earthquake.nshmp.model.HazardModel; +import gov.usgs.earthquake.nshmp.model.RuptureSet; import gov.usgs.earthquake.nshmp.model.Source; -import gov.usgs.earthquake.nshmp.model.SourceSet; +import gov.usgs.earthquake.nshmp.model.SourceTree; import gov.usgs.earthquake.nshmp.model.SourceType; -import gov.usgs.earthquake.nshmp.model.SystemSourceSet; +import gov.usgs.earthquake.nshmp.model.SystemRuptureSet; +import gov.usgs.earthquake.nshmp.tree.Branch; /** * Static probabilistic seismic hazard analysis calculators. @@ -58,7 +60,7 @@ public class HazardCalcs { * debugging. * ------------------------------------------------------------------------- * Single threaded calcs monitor and log calculation duration of each - * SourceSet. + * RuptureSet. * ------------------------------------------------------------------------- * Although NPEs will be thrown once any method argument in this class is * used, because multiple arguments may be passed on the same line thereby @@ -66,7 +68,7 @@ public class HazardCalcs { * performed. * ------------------------------------------------------------------------- * Notes on empty objects: In the default calculation pipeline, calculation - * tasks are initiated when SourceSet.iterableForLocation() yields relevant + * tasks are initiated when RuptureSet.iterableForLocation() yields relevant * sources for a site. Similarly, optimized (table-based) grid sources, once * created for a site, will not yield sources if the mfd table is empty. * System source sets, on the other hand, create a list of inputs on a @@ -198,57 +200,61 @@ public class HazardCalcs { Executor ex) throws InterruptedException, ExecutionException { AsyncList<HazardCurveSet> curveSets = AsyncList.createWithCapacity(model.size()); - AsyncList<SourceSet<? extends Source>> gridTables = AsyncList.create(); - Predicate<SourceSet<? extends Source>> settingFilter = model.settingFilter( - config, - site.location()); - Predicate<SourceSet<? extends Source>> typeFilter = new SourceTypeFilter(config); - - for (SourceSet<? extends Source> sourceSet : model) { - - // check if source is applicable to site or should be skipped - if (settingFilter.test(sourceSet) && typeFilter.test(sourceSet)) { - - switch (sourceSet.type()) { - - case GRID: - GridSourceSet gss = (GridSourceSet) sourceSet; - if (config.performance.optimizeGrids && gss.optimizable()) { - gridTables.add(transform( - immediateFuture(gss), - GridSourceSet.optimizer( - site.location(), - config.performance.smoothGrids)::apply, - ex)); + AsyncList<RuptureSet<? extends Source>> gridTables = AsyncList.create(); + var settingFilter = model.settingFilter(config, site.location()); + var typeFilter = new SourceTypeFilter(config); + + for (SourceTree tree : model) { + + for (Branch<RuptureSet<? extends Source>> branch : tree) { + RuptureSet<? extends Source> ruptures = branch.value(); + + // check if source is applicable to site or should be skipped + if (settingFilter.test(ruptures) && typeFilter.test(ruptures)) { + + switch (ruptures.type()) { + + case GRID: + GridRuptureSet gss = (GridRuptureSet) ruptures; + if (config.performance.optimizeGrids && gss.optimizable()) { + gridTables.add(transform( + immediateFuture(gss), + GridRuptureSet.optimizer( + site.location(), + config.performance.smoothGrids)::apply, + ex)); + break; + } + curveSets.add(sourcesToCurves(ruptures, config, site, ex)); break; - } - curveSets.add(sourcesToCurves(sourceSet, config, site, ex)); - break; - case FAULT_CLUSTER: - curveSets.add(clustersToCurves((ClusterSourceSet) sourceSet, config, site, ex)); - break; + case FAULT_CLUSTER: + curveSets.add(clustersToCurves( + (ClusterRuptureSet) ruptures, config, site, ex)); + break; - case FAULT_SYSTEM: - curveSets.add(systemToCurves((SystemSourceSet) sourceSet, config, site, ex)); - break; + case FAULT_SYSTEM: + curveSets.add(systemToCurves( + (SystemRuptureSet) ruptures, config, site, ex)); + break; - default: - curveSets.add(sourcesToCurves(sourceSet, config, site, ex)); - break; + default: + curveSets.add(sourcesToCurves(ruptures, config, site, ex)); + break; + } } + } - } + } /* * If grid optimization is enabled, grid calculations were deferred (above) * while table based source sets were initialized. Submit once all other * source types have been submitted. */ - for (SourceSet<? extends Source> sourceSet : allAsList(gridTables).get()) { - curveSets.add(sourcesToCurves(sourceSet, config, site, ex)); + for (RuptureSet<? extends Source> ruptures : allAsList(gridTables).get()) { + curveSets.add(sourcesToCurves(ruptures, config, site, ex)); } - return toHazardResult(model, config, site, curveSets, ex); } @@ -262,47 +268,51 @@ public class HazardCalcs { Logger log) { List<HazardCurveSet> curveSets = new ArrayList<>(model.size()); - Predicate<SourceSet<? extends Source>> settingsFilter = model.settingFilter( - config, - site.location()); - Predicate<SourceSet<? extends Source>> typeFilter = new SourceTypeFilter(config); + var settingsFilter = model.settingFilter(config, site.location()); + var typeFilter = new SourceTypeFilter(config); log.info("HazardCurve: (single-threaded)"); Stopwatch swTotal = Stopwatch.createStarted(); Stopwatch swSource = Stopwatch.createStarted(); - for (SourceSet<? extends Source> sourceSet : model) { - - // check if source is applicable to site or should be skipped - if (settingsFilter.test(sourceSet) && typeFilter.test(sourceSet)) { - - switch (sourceSet.type()) { - case GRID: - GridSourceSet gss = (GridSourceSet) sourceSet; - if (config.performance.optimizeGrids && gss.optimizable()) { - sourceSet = GridSourceSet.optimizer( - site.location(), - config.performance.smoothGrids).apply(gss); - log(log, MSSG_GRID_INIT, sourceSet.name(), duration(swSource)); - } - curveSets.add(sourcesToCurves(sourceSet, config, site)); - log(log, MSSG_COMPLETED, sourceSet.name(), duration(swSource)); - break; - - case FAULT_CLUSTER: - curveSets.add(clustersToCurves((ClusterSourceSet) sourceSet, config, site)); - log(log, MSSG_COMPLETED, sourceSet.name(), duration(swSource)); - break; - - case FAULT_SYSTEM: - curveSets.add(systemToCurves((SystemSourceSet) sourceSet, config, site)); - log(log, MSSG_COMPLETED, sourceSet.name(), duration(swSource)); - break; - - default: - curveSets.add(sourcesToCurves(sourceSet, config, site)); - log(log, MSSG_COMPLETED, sourceSet.name(), duration(swSource)); - break; + for (SourceTree tree : model) { + + for (Branch<RuptureSet<? extends Source>> branch : tree) { + RuptureSet<? extends Source> ruptures = branch.value(); + + // check if source is applicable to site or should be skipped + if (settingsFilter.test(ruptures) && typeFilter.test(ruptures)) { + + switch (ruptures.type()) { + case GRID: + GridRuptureSet gss = (GridRuptureSet) ruptures; + if (config.performance.optimizeGrids && gss.optimizable()) { + ruptures = GridRuptureSet.optimizer( + site.location(), + config.performance.smoothGrids).apply(gss); + log(log, MSSG_GRID_INIT, ruptures.name(), duration(swSource)); + } + curveSets.add(sourcesToCurves(ruptures, config, site)); + log(log, MSSG_COMPLETED, ruptures.name(), duration(swSource)); + break; + + case FAULT_CLUSTER: + curveSets.add(clustersToCurves( + (ClusterRuptureSet) ruptures, config, site)); + log(log, MSSG_COMPLETED, ruptures.name(), duration(swSource)); + break; + + case FAULT_SYSTEM: + curveSets.add(systemToCurves( + (SystemRuptureSet) ruptures, config, site)); + log(log, MSSG_COMPLETED, ruptures.name(), duration(swSource)); + break; + + default: + curveSets.add(sourcesToCurves(ruptures, config, site)); + log(log, MSSG_COMPLETED, ruptures.name(), duration(swSource)); + break; + } } } } @@ -336,7 +346,7 @@ public class HazardCalcs { * A filter that skips sources in certain tectonic settings if the supplied * location is outside the map-region for the setting. */ - static class SourceTypeFilter implements Predicate<SourceSet<? extends Source>> { + static class SourceTypeFilter implements Predicate<RuptureSet<? extends Source>> { private Set<SourceType> types; @@ -352,8 +362,8 @@ public class HazardCalcs { } @Override - public boolean test(SourceSet<? extends Source> sourceSet) { - return types.contains(sourceSet.type()); + public boolean test(RuptureSet<? extends Source> ruptures) { + return types.contains(ruptures.type()); } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCurveSet.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCurveSet.java index 76a849d0..c116ca85 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCurveSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCurveSet.java @@ -15,32 +15,32 @@ import gov.usgs.earthquake.nshmp.data.MutableXySequence; import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.gmm.Gmm; import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.model.RuptureSet; import gov.usgs.earthquake.nshmp.model.Source; -import gov.usgs.earthquake.nshmp.model.SourceSet; import gov.usgs.earthquake.nshmp.model.SourceType; /** - * Container class for hazard curves derived from a {@code SourceSet}. Class + * Container class for hazard curves derived from a {@code RuptureSet}. Class * stores the {@code GroundMotions}s associated with each {@code Source} used in * a hazard calculation and the individual curves for each * {@code GroundMotionModel} used. * * <p>The {@code Builder} for this class is used to aggregate the HazardCurves - * associated with each {@code Source} in a {@code SourceSet}, scaled by the - * {@code SourceSet} weight. It also scales curves by their associated + * associated with each {@code Source} in a {@code RuptureSet}, scaled by the + * {@code RuptureSet} weight. It also scales curves by their associated * {@code GroundMotionModel} weight, using distance-dependent weights when * appropriate. Note that this may lead to the dropping of some curves that are * not appropriate at large distances. * - * <p>A HazardCurveSet is compatible with all types of {@code SourceSet}s, - * including {@code ClusterSourceSet}s, which are handled differently in hazard + * <p>A HazardCurveSet is compatible with all types of {@code RuptureSet}s, + * including {@code ClusterRuptureSet}s, which are handled differently in hazard * calculations. This container marks a point in the calculation pipeline where * results from cluster and other sources may be recombined into a single - * {@code Hazard} result, regardless of {@code SourceSet.type()} for all - * relevant {@code SourceSet}s. + * {@code Hazard} result, regardless of {@code RuptureSet.type()} for all + * relevant {@code RuptureSet}s. * * <p>For reasons related to disaggregation, this class also retains the curves - * for each {@code ClusterSource} in a {@code ClusterSourceSet}. Specifically, + * for each {@code ClusterSource} in a {@code ClusterRuptureSet}. Specifically, * disaggregation of cluster sources is not a straightforward decomposition of * contributions from individual sources and its helpful to have the total curve * for each {@code ClusterSource} available. Given the relatively specializaed @@ -51,7 +51,7 @@ import gov.usgs.earthquake.nshmp.model.SourceType; */ final class HazardCurveSet { - final SourceSet<? extends Source> sourceSet; + final RuptureSet<? extends Source> ruptures; final List<GroundMotions> hazardGroundMotionsList; final List<ClusterGroundMotions> clusterGroundMotionsList; final Map<Imt, List<Map<Gmm, XySequence>>> clusterCurveLists; @@ -63,33 +63,27 @@ final class HazardCurveSet { // objects created when generating hazard curves private HazardCurveSet(Builder builder) { - sourceSet = builder.sourceSet; - hazardGroundMotionsList = builder.hazardGroundMotionsList; - clusterGroundMotionsList = builder.clusterGroundMotionsList; - clusterCurveLists = builder.clusterCurveLists; - curveMap = builder.curveMap; - totalCurves = builder.totalCurves; + this.ruptures = builder.ruptures; + this.hazardGroundMotionsList = builder.hazardGroundMotionsList; + this.clusterGroundMotionsList = builder.clusterGroundMotionsList; + this.clusterCurveLists = builder.clusterCurveLists; + this.curveMap = builder.curveMap; + this.totalCurves = builder.totalCurves; } - private HazardCurveSet( - SourceSet<? extends Source> sourceSet, - List<GroundMotions> hazardGroundMotionsList, - List<ClusterGroundMotions> clusterGroundMotionsList, - Map<Imt, List<Map<Gmm, XySequence>>> clusterCurveLists, - Map<Imt, Map<Gmm, MutableXySequence>> curveMap, - Map<Imt, XySequence> totalCurves) { - - this.sourceSet = sourceSet; - this.hazardGroundMotionsList = hazardGroundMotionsList; - this.clusterGroundMotionsList = clusterGroundMotionsList; - this.clusterCurveLists = clusterCurveLists; - this.curveMap = curveMap; - this.totalCurves = totalCurves; + private HazardCurveSet(RuptureSet<? extends Source> ruptures) { + this.ruptures = ruptures; + this.hazardGroundMotionsList = null; + this.clusterGroundMotionsList = null; + this.clusterCurveLists = null; + this.curveMap = null; + this.totalCurves = null; } - static Builder builder(SourceSet<? extends Source> sourceSet, + static Builder builder( + RuptureSet<? extends Source> ruptures, Map<Imt, XySequence> modelCurves) { - return new Builder(sourceSet, modelCurves); + return new Builder(ruptures, modelCurves); } /* @@ -99,8 +93,8 @@ final class HazardCurveSet { * were out of range of a site. We only retain a reference to the original * source set. */ - static HazardCurveSet empty(SourceSet<? extends Source> sourceSet) { - return new HazardCurveSet(sourceSet, null, null, null, null, null); + static HazardCurveSet empty(RuptureSet<? extends Source> ruptures) { + return new HazardCurveSet(ruptures); } boolean isEmpty() { @@ -113,8 +107,8 @@ final class HazardCurveSet { private boolean built = false; private final Map<Imt, XySequence> modelCurves; + private final RuptureSet<? extends Source> ruptures; - private final SourceSet<? extends Source> sourceSet; private final List<GroundMotions> hazardGroundMotionsList; private final List<ClusterGroundMotions> clusterGroundMotionsList; private final Map<Imt, List<Map<Gmm, XySequence>>> clusterCurveLists; @@ -122,13 +116,13 @@ final class HazardCurveSet { private final Map<Imt, XySequence> totalCurves; private Builder( - SourceSet<? extends Source> sourceSet, + RuptureSet<? extends Source> ruptures, Map<Imt, XySequence> modelCurves) { - this.sourceSet = sourceSet; + this.ruptures = ruptures; this.modelCurves = modelCurves; - boolean cluster = sourceSet.type() == SourceType.FAULT_CLUSTER; + boolean cluster = ruptures.type() == SourceType.FAULT_CLUSTER; if (cluster) { hazardGroundMotionsList = null; @@ -142,7 +136,7 @@ final class HazardCurveSet { curveMap = new EnumMap<>(Imt.class); } - Set<Gmm> gmms = sourceSet.groundMotionModels().gmms(); + Set<Gmm> gmms = ruptures.groundMotionModels().gmms(); Set<Imt> imts = modelCurves.keySet(); for (Imt imt : imts) { @@ -167,7 +161,7 @@ final class HazardCurveSet { checkNotNull(hazardGroundMotionsList, "%s only supports ClusterCurves", ID); hazardGroundMotionsList.add(curvesIn.groundMotions); double distance = curvesIn.groundMotions.inputs.minDistance; - Map<Gmm, Double> gmmWeightMap = sourceSet.groundMotionModels().gmmWeightMap(distance); + Map<Gmm, Double> gmmWeightMap = ruptures.groundMotionModels().gmmWeightMap(distance); // loop Imts based on what's been calculated for (Imt imt : curvesIn.curveMap.keySet()) { Map<Gmm, XySequence> curveMapIn = curvesIn.curveMap.get(imt); @@ -175,9 +169,9 @@ final class HazardCurveSet { // loop Gmms based on what's supported at this distance for (Gmm gmm : gmmWeightMap.keySet()) { - double weight = gmmWeightMap.get(gmm) * sourceSet.weight(); - curveMapBuild.get(gmm) - .add(MutableXySequence.copyOf(curveMapIn.get(gmm)).multiply(weight)); + double weight = gmmWeightMap.get(gmm) * ruptures.weight(); + curveMapBuild.get(gmm).add( + MutableXySequence.copyOf(curveMapIn.get(gmm)).multiply(weight)); } } return this; @@ -186,9 +180,8 @@ final class HazardCurveSet { Builder addCurves(ClusterCurves curvesIn) { checkNotNull(clusterGroundMotionsList, "%s only supports HazardCurves", ID); clusterGroundMotionsList.add(curvesIn.clusterGroundMotions); - double clusterWeight = curvesIn.clusterGroundMotions.parent.weight(); double distance = curvesIn.clusterGroundMotions.minDistance; - Map<Gmm, Double> gmmWeightMap = sourceSet.groundMotionModels().gmmWeightMap(distance); + Map<Gmm, Double> gmmWeightMap = ruptures.groundMotionModels().gmmWeightMap(distance); // loop Imts based on what's been calculated for (Imt imt : curvesIn.curveMap.keySet()) { Map<Gmm, XySequence> curveMapIn = curvesIn.curveMap.get(imt); @@ -198,17 +191,17 @@ final class HazardCurveSet { * Retain references to the total curve for each cluster source for * disaggregation in clusterCurveLists. These lists of maps by GMM will * contain only those curves for the GMMs approprate for the source-site - * distance. When disaggreagting, the same distance cutoff will be - * considered so retrieval of curves from the maps should never thrwo an + * distance. When disaggregating, the same distance cutoff will be + * considered so retrieval of curves from the maps should never throw a * NPE. */ Map<Gmm, XySequence> clusterCurves = new EnumMap<>(Gmm.class); // loop Gmms based on what's supported at this distance for (Gmm gmm : gmmWeightMap.keySet()) { - // gmmWt * geometryWt * rateWt // (mags have already been collapsed, UNDO) - double weight = gmmWeightMap.get(gmm) * sourceSet.weight() * clusterWeight; + double weight = gmmWeightMap.get(gmm) * ruptures.weight(); + XySequence clusterCurve = MutableXySequence.copyOf(curveMapIn.get(gmm)).multiply(weight); curveMapBuild.get(gmm).add(clusterCurve); clusterCurves.put(gmm, clusterCurve); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardExport.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardExport.java index 837d2a49..fc3be340 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardExport.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardExport.java @@ -6,6 +6,7 @@ import static gov.usgs.earthquake.nshmp.data.MutableXySequence.emptyCopyOf; import static java.nio.charset.StandardCharsets.UTF_8; import static java.nio.file.StandardOpenOption.APPEND; import static java.util.stream.Collectors.joining; +import static java.util.stream.Collectors.toCollection; import static java.util.stream.Collectors.toList; import java.io.IOException; @@ -13,7 +14,9 @@ import java.nio.file.Files; import java.nio.file.OpenOption; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Collection; import java.util.EnumMap; +import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -39,8 +42,9 @@ import gov.usgs.earthquake.nshmp.gmm.Imt; import gov.usgs.earthquake.nshmp.internal.Parsing; import gov.usgs.earthquake.nshmp.internal.Parsing.Delimiter; import gov.usgs.earthquake.nshmp.model.HazardModel; +import gov.usgs.earthquake.nshmp.model.RuptureSet; import gov.usgs.earthquake.nshmp.model.Source; -import gov.usgs.earthquake.nshmp.model.SourceSet; +import gov.usgs.earthquake.nshmp.model.SourceTree; import gov.usgs.earthquake.nshmp.model.SourceType; /** @@ -410,12 +414,12 @@ public final class HazardExport { EnumMap<Imt, Map<SourceType, MutableXySequence>> imtMap = Maps.newEnumMap(Imt.class); // initialize receiver - Set<SourceType> types = hazard.sourceSetCurves.keySet(); + Set<SourceType> types = hazard.ruptureSetCurves.keySet(); for (Entry<Imt, XySequence> entry : hazard.curves().entrySet()) { imtMap.put(entry.getKey(), initCurves(types, MutableXySequence.copyOf(entry.getValue()))); } - for (Entry<SourceType, HazardCurveSet> curveSet : hazard.sourceSetCurves.entries()) { + for (Entry<SourceType, HazardCurveSet> curveSet : hazard.ruptureSetCurves.entries()) { for (Entry<Imt, XySequence> typeTotals : curveSet.getValue().totalCurves.entrySet()) { imtMap.get(typeTotals.getKey()) .get(curveSet.getKey()) @@ -453,21 +457,22 @@ public final class HazardExport { EnumMap<Imt, Map<Gmm, MutableXySequence>> imtMap = Maps.newEnumMap(Imt.class); /* Initialize receiver */ - Iterable<SourceSet<? extends Source>> sources = Iterables.transform( - hazard.sourceSetCurves.values(), + Iterable<RuptureSet<? extends Source>> sources = Iterables.transform( + hazard.ruptureSetCurves.values(), CURVE_SET_TO_SOURCE_SET::apply); Set<Gmm> gmms = gmmSet(sources); for (Entry<Imt, XySequence> entry : hazard.curves().entrySet()) { imtMap.put(entry.getKey(), initCurves(gmms, MutableXySequence.copyOf(entry.getValue()))); } - for (HazardCurveSet curveSet : hazard.sourceSetCurves.values()) { + for (HazardCurveSet curveSet : hazard.ruptureSetCurves.values()) { for (Entry<Imt, Map<Gmm, MutableXySequence>> imtEntry : curveSet.curveMap.entrySet()) { for (Entry<Gmm, MutableXySequence> gmmEntry : imtEntry.getValue().entrySet()) { imtMap.get(imtEntry.getKey()).get(gmmEntry.getKey()).add(gmmEntry.getValue()); } } } + return Maps.immutableEnumMap(imtMap); } @@ -477,14 +482,21 @@ public final class HazardExport { * Note that this is used when consolidating gmm result curves. Results may * not span the entire range of GMMs in a model so this is commonly a subset */ - // Consider Stream - private static Set<Gmm> gmmSet(Iterable<SourceSet<? extends Source>> sourceSets) { + + private static Set<Gmm> gmmSet(Collection<SourceTree> trees) { + return trees.stream() + .flatMap(tree -> tree.gmms().gmms().stream()) + .collect(toCollection(() -> EnumSet.noneOf(Gmm.class))); + } + + // Consider Stream (above) + private static Set<Gmm> gmmSet(Iterable<RuptureSet<? extends Source>> ruptureSets) { return Sets.immutableEnumSet( - FluentIterable.from(sourceSets).transformAndConcat( - new Function<SourceSet<? extends Source>, Set<Gmm>>() { + FluentIterable.from(ruptureSets).transformAndConcat( + new Function<RuptureSet<? extends Source>, Set<Gmm>>() { @Override - public Set<Gmm> apply(SourceSet<? extends Source> sourceSet) { - return sourceSet.groundMotionModels().gmms(); + public Set<Gmm> apply(RuptureSet<? extends Source> ruptures) { + return ruptures.groundMotionModels().gmms(); } }::apply)); } @@ -503,11 +515,11 @@ public final class HazardExport { }::apply); } - private static final Function<HazardCurveSet, SourceSet<? extends Source>> CURVE_SET_TO_SOURCE_SET = - new Function<HazardCurveSet, SourceSet<? extends Source>>() { + private static final Function<HazardCurveSet, RuptureSet<? extends Source>> CURVE_SET_TO_SOURCE_SET = + new Function<HazardCurveSet, RuptureSet<? extends Source>>() { @Override - public SourceSet<? extends Source> apply(HazardCurveSet curves) { - return curves.sourceSet; + public RuptureSet<? extends Source> apply(HazardCurveSet curves) { + return curves.ruptures; } }; diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/SystemInputList.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/SystemInputList.java index e74ff925..bd3c103f 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/SystemInputList.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/SystemInputList.java @@ -7,14 +7,14 @@ import java.util.BitSet; import java.util.List; import java.util.Set; -import gov.usgs.earthquake.nshmp.model.SystemSourceSet; +import gov.usgs.earthquake.nshmp.model.SystemRuptureSet; /** * A {@code List} of {@code HazardInput}s that contains a reference to the - * parent {@code SystemSourceSet} from which the inputs were derived. This + * parent {@code SystemRuptureSet} from which the inputs were derived. This * allows for downstream access to parent source properties. * - * <p>Presently, a {@code SystemSourceSet} consists of sources for which there + * <p>Presently, a {@code SystemRuptureSet} consists of sources for which there * is only a single rupture. Note that this could change in the future if some * magnitude variability were imposed on each source. * @@ -27,7 +27,7 @@ public final class SystemInputList extends InputList { * things we'd rather not expose. This isn't really meant to be a public * class. * - * package privacy - or move to SYstemSourceSet how to get back to parent to + * package privacy - or move to SYstemRuptureSet how to get back to parent to * mine info; index? need index reference comment bitset array is going to be * reallocating because we don't know it's size at creation time; using linked * list @@ -35,12 +35,12 @@ public final class SystemInputList extends InputList { * Well suited for a builder */ - final SystemSourceSet parent; + final SystemRuptureSet parent; final Set<Integer> sectionIndices; // ascending in rRup final List<BitSet> bitsets; // source/rupture bisets public SystemInputList( - SystemSourceSet parent, + SystemRuptureSet parent, Set<Integer> sectionIndices) { this.parent = checkNotNull(parent); @@ -48,7 +48,7 @@ public final class SystemInputList extends InputList { this.bitsets = new ArrayList<>(); } - public static SystemInputList empty(SystemSourceSet parent) { + public static SystemInputList empty(SystemRuptureSet parent) { return new SystemInputList(parent, null); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java index ac4d841b..8e9988b8 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java @@ -1,5 +1,6 @@ package gov.usgs.earthquake.nshmp.calc; +import static com.google.common.base.Preconditions.checkState; import static com.google.common.util.concurrent.Futures.allAsList; import static com.google.common.util.concurrent.Futures.getUnchecked; import static com.google.common.util.concurrent.Futures.immediateFuture; @@ -28,16 +29,16 @@ import gov.usgs.earthquake.nshmp.gmm.GmmInput; import gov.usgs.earthquake.nshmp.gmm.GroundMotion; import gov.usgs.earthquake.nshmp.gmm.GroundMotionModel; import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.model.ClusterRuptureSet; import gov.usgs.earthquake.nshmp.model.ClusterSource; -import gov.usgs.earthquake.nshmp.model.ClusterSourceSet; import gov.usgs.earthquake.nshmp.model.Distance; -import gov.usgs.earthquake.nshmp.model.FaultSource; +import gov.usgs.earthquake.nshmp.model.FaultRuptureSet; import gov.usgs.earthquake.nshmp.model.GmmSet; import gov.usgs.earthquake.nshmp.model.HazardModel; import gov.usgs.earthquake.nshmp.model.Rupture; +import gov.usgs.earthquake.nshmp.model.RuptureSet; import gov.usgs.earthquake.nshmp.model.Source; -import gov.usgs.earthquake.nshmp.model.SourceSet; -import gov.usgs.earthquake.nshmp.model.SystemSourceSet; +import gov.usgs.earthquake.nshmp.model.SystemRuptureSet; import gov.usgs.earthquake.nshmp.tree.LogicTree; import gov.usgs.earthquake.nshmp.tree.Trees; @@ -218,7 +219,7 @@ final class Transforms { private final Function<GroundMotions, HazardCurves> groundMotionsToCurves; SourceToCurves( - SourceSet<? extends Source> sources, + RuptureSet<? extends Source> sources, CalcConfig config, Site site) { @@ -248,14 +249,14 @@ final class Transforms { */ static final class CurveConsolidator implements Function<List<HazardCurves>, HazardCurveSet> { - private final SourceSet<? extends Source> sources; + private final RuptureSet<? extends Source> ruptures; private final Map<Imt, XySequence> modelCurves; CurveConsolidator( - SourceSet<? extends Source> sources, + RuptureSet<? extends Source> ruptures, CalcConfig config) { - this.sources = sources; + this.ruptures = ruptures; this.modelCurves = config.hazard.logModelCurves(); } @@ -263,11 +264,11 @@ final class Transforms { public HazardCurveSet apply(List<HazardCurves> curvesList) { if (curvesList.isEmpty()) { - return HazardCurveSet.empty(sources); + return HazardCurveSet.empty(ruptures); } HazardCurveSet.Builder curveSetBuilder = HazardCurveSet.builder( - sources, + ruptures, modelCurves); for (HazardCurves curves : curvesList) { @@ -280,22 +281,22 @@ final class Transforms { /* * A note on system sources: * - * SystemSourceSets contain many single sources and the functions here handle + * SystemRuptureSets contain many single sources and the functions here handle * them collectively. Rather than creating lists of input lists for each * source, one large input list is created. This may change if it is decided * to apply a magnitude distribution on each system source. This motivated - * reordering of the SourceType enum such that SystemSourceSets are processed + * reordering of the SourceType enum such that SystemRuptureSets are processed * first and granted a thread early in the calculation process. */ /* - * SYSTEM: SystemSourceSet --> HazardCurveSet + * SYSTEM: SystemRuptureSet --> HazardCurveSet * * Compute hazard curves for system sources. This function derives all inputs - * for an entire SystemSourceSet before being composing them with standard + * for an entire SystemRuptureSet before being composing them with standard * ground motion and hazard curve functions. */ - static final class SystemToCurves implements Function<SystemSourceSet, HazardCurveSet> { + static final class SystemToCurves implements Function<SystemRuptureSet, HazardCurveSet> { private final Site site; private final CalcConfig config; @@ -306,14 +307,14 @@ final class Transforms { } @Override - public HazardCurveSet apply(SystemSourceSet sources) { + public HazardCurveSet apply(SystemRuptureSet ruptures) { - InputList inputs = SystemSourceSet.toInputsFunction(site).apply(sources); + InputList inputs = SystemRuptureSet.toInputsFunction(site).apply(ruptures); if (inputs.isEmpty()) { - return HazardCurveSet.empty(sources); + return HazardCurveSet.empty(ruptures); } - GmmSet gmmSet = sources.groundMotionModels(); + GmmSet gmmSet = ruptures.groundMotionModels(); Map<Imt, Map<Gmm, GroundMotionModel>> gmmTable = instances( config.hazard.imts, gmmSet.gmms()); @@ -324,19 +325,21 @@ final class Transforms { Function<GroundMotions, HazardCurves> gmToCurves = new GroundMotionsToCurves(config); HazardCurves curves = gmToCurves.apply(gms); - CurveConsolidator consolidator = new CurveConsolidator(sources, config); + CurveConsolidator consolidator = new CurveConsolidator( + ruptures, config); return consolidator.apply(List.of(curves)); } } /* - * SYSTEM: SystemSourceSet --> HazardCurveSet + * SYSTEM: SystemRuptureSet --> HazardCurveSet * * Compute hazard curves for system sources concurrently. This function - * derives all inputs for an entire SystemSourceSet and partitions them before - * composing them with standard ground motion and hazard curve functions. + * derives all inputs for an entire SystemRuptureSet and partitions them + * before composing them with standard ground motion and hazard curve + * functions. */ - static final class ParallelSystemToCurves implements Function<SystemSourceSet, HazardCurveSet> { + static final class ParallelSystemToCurves implements Function<SystemRuptureSet, HazardCurveSet> { private final Site site; private final Executor ex; @@ -353,16 +356,16 @@ final class Transforms { } @Override - public HazardCurveSet apply(SystemSourceSet sources) { + public HazardCurveSet apply(SystemRuptureSet ruptures) { // create input list - InputList master = SystemSourceSet.toInputsFunction(site).apply(sources); + InputList master = SystemRuptureSet.toInputsFunction(site).apply(ruptures); if (master.isEmpty()) { - return HazardCurveSet.empty(sources); + return HazardCurveSet.empty(ruptures); } // calculate curves from list in parallel - InputsToCurves inputsToCurves = new InputsToCurves(sources, config); + InputsToCurves inputsToCurves = new InputsToCurves(ruptures, config); AsyncList<HazardCurves> asyncCurvesList = AsyncList.create(); int size = config.performance.systemPartition; for (InputList partition : master.partition(size)) { @@ -375,7 +378,8 @@ final class Transforms { // combine and consolidate HazardCurves hazardCurves = HazardCurves.combine(master, curvesList); - CurveConsolidator consolidator = new CurveConsolidator(sources, config); + CurveConsolidator consolidator = new CurveConsolidator( + ruptures, config); return consolidator.apply(List.of(hazardCurves)); } @@ -394,7 +398,7 @@ final class Transforms { private final Function<GroundMotions, HazardCurves> groundMotionsToCurves; InputsToCurves( - SourceSet<? extends Source> sources, + RuptureSet<? extends Source> sources, CalcConfig config) { GmmSet gmmSet = sources.groundMotionModels(); @@ -428,8 +432,9 @@ final class Transforms { @Override public ClusterInputs apply(ClusterSource clusterSource) { ClusterInputs clusterInputs = new ClusterInputs(clusterSource); - for (FaultSource faultSource : clusterSource.faults()) { - clusterInputs.add(transform.apply(faultSource)); + for (FaultRuptureSet frs : clusterSource.faults()) { + checkState(frs.size() == 1); + clusterInputs.add(transform.apply(frs.iterator().next())); } return clusterInputs; } @@ -611,7 +616,7 @@ final class Transforms { private static MutableXySequence collapse(LogicTree<MutableXySequence> tree) { MutableXySequence collapsed = MutableXySequence.emptyCopyOf(tree.get(0).value()); - tree.forEach(b -> collapsed.add(b.value().multiply(b.weight()))); + tree.forEach(br -> collapsed.add(br.value().multiply(br.weight()))); return collapsed; } @@ -628,11 +633,11 @@ final class Transforms { private final Function<ClusterGroundMotions, ClusterCurves> groundMotionsToCurves; ClusterToCurves( - ClusterSourceSet sources, + ClusterRuptureSet ruptures, CalcConfig config, Site site) { - Set<Gmm> gmms = sources.groundMotionModels().gmms(); + Set<Gmm> gmms = ruptures.groundMotionModels().gmms(); Map<Imt, Map<Gmm, GroundMotionModel>> gmmTable = instances(config.hazard.imts, gmms); this.sourceToInputs = new ClusterSourceToInputs(site); @@ -656,14 +661,14 @@ final class Transforms { static final class ClusterCurveConsolidator implements Function<List<ClusterCurves>, HazardCurveSet> { - private final ClusterSourceSet sources; + private final ClusterRuptureSet ruptures; private final Map<Imt, XySequence> modelCurves; ClusterCurveConsolidator( - ClusterSourceSet sources, + ClusterRuptureSet ruptures, CalcConfig config) { - this.sources = sources; + this.ruptures = ruptures; this.modelCurves = config.hazard.logModelCurves(); } @@ -671,11 +676,11 @@ final class Transforms { public HazardCurveSet apply(List<ClusterCurves> curvesList) { if (curvesList.isEmpty()) { - return HazardCurveSet.empty(sources); + return HazardCurveSet.empty(ruptures); } HazardCurveSet.Builder curveSetBuilder = HazardCurveSet.builder( - sources, + ruptures, modelCurves); for (ClusterCurves curves : curvesList) { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/AbstractSourceSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/AbstractRuptureSet.java similarity index 59% rename from src/main/java/gov/usgs/earthquake/nshmp/model/AbstractSourceSet.java rename to src/main/java/gov/usgs/earthquake/nshmp/model/AbstractRuptureSet.java index ec02ea47..baad4b67 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/AbstractSourceSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/AbstractRuptureSet.java @@ -1,55 +1,48 @@ package gov.usgs.earthquake.nshmp.model; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Strings.padEnd; import static gov.usgs.earthquake.nshmp.Text.checkName; -import static gov.usgs.earthquake.nshmp.data.DoubleData.checkWeight; import java.util.function.Predicate; import com.google.common.collect.FluentIterable; -import com.google.common.collect.Ordering; import gov.usgs.earthquake.nshmp.geo.Location; /** - * Skeletal {@code SourceSet} implementation. + * Skeletal {@code RuptureSet} implementation. * * @author U.S. Geological Survey */ -abstract class AbstractSourceSet<T extends Source> implements SourceSet<T> { +abstract class AbstractRuptureSet<T extends Source> implements RuptureSet<T> { private final String name; private final int id; private final SourceType type; private final TectonicSetting setting; - @Deprecated - private final double weight; private final GmmSet gmmSet; - AbstractSourceSet(Builder builder) { + private Double weight; + + AbstractRuptureSet(Builder builder) { this.name = builder.name; this.id = builder.id; this.type = builder.type; this.setting = builder.setting; - this.weight = builder.weight; this.gmmSet = builder.gmmSet; } - @Override - public int compareTo(SourceSet<T> other) { - return Ordering.natural().compare(this.name(), other.name()); - } - @Override public String name() { return name; } @Override - public SourceType type() { - return type; + public int id() { + return id; } @Override @@ -58,29 +51,39 @@ abstract class AbstractSourceSet<T extends Source> implements SourceSet<T> { } @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(" Id: ").append(padEnd(Integer.toString(id), 8, ' ')); - sb.append("Name: ").append(padEnd(name(), 44, ' ')); - sb.append("Size: ").append(padEnd(Integer.toString(size()), 8, ' ')); - sb.append("Weight: ").append(padEnd(Double.toString(weight), 12, ' ')); - return sb.toString(); + public SourceType type() { + return type; } @Override - public int id() { - return id; + public GmmSet groundMotionModels() { + return gmmSet; + } + + /* + * Ideally this can be removed in time but with the current calculation + * pipeline we need to be able to carry a source tree branch/leaf weight with + * each rupture set. We only allow the boxed non-final weight value to be set + * once. We don't know the source tree leaf weights until after the full tree + * has been built. + */ + void setLeafWeight(double weight) { + // only allow this to be set once + checkState(this.weight == null); + this.weight = weight; } @Override - @Deprecated public double weight() { return weight; } @Override - public GmmSet groundMotionModels() { - return gmmSet; + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(id + " " + padEnd(name(), 56, ' ')); + sb.append(padEnd("size: " + size(), 14, ' ')); + return sb.toString(); } @Override @@ -102,7 +105,6 @@ abstract class AbstractSourceSet<T extends Source> implements SourceSet<T> { Integer id; SourceType type; TectonicSetting setting; - Double weight; GmmSet gmmSet; Builder(SourceType type) { @@ -110,23 +112,18 @@ abstract class AbstractSourceSet<T extends Source> implements SourceSet<T> { } Builder name(String name) { - this.name = checkName(name, "SourceSet"); + this.name = checkName(name, "RuptureSet"); return this; } Builder id(int id) { + checkArgument(id > 0); this.id = id; return this; } Builder setting(TectonicSetting setting) { - this.setting = setting; - return this; - } - - @Deprecated - Builder weight(double weight) { - this.weight = checkWeight(weight); + this.setting = checkNotNull(setting); return this; } @@ -135,14 +132,13 @@ abstract class AbstractSourceSet<T extends Source> implements SourceSet<T> { return this; } - void validateState(String buildId) { - checkState(!built, "This %s instance has already been used", buildId); - checkState(name != null, "%s name not set", buildId); - checkState(id != null, "%s id not set", buildId); - checkState(type != null, "%s source type not set", buildId); - checkState(setting != null, "%s tectonic setting not set", buildId); - checkState(weight != null, "%s weight not set", buildId); - checkState(gmmSet != null, "%s ground motion models not set", buildId); + void validateState(String label) { + checkState(!built, "Single use builder"); + checkNotNull(name, "%s name", label); + checkNotNull(id, "%s id", label); + checkNotNull(type, "%s source type", label); + checkNotNull(setting, "%s tectonic setting", label); + checkNotNull(gmmSet, "%s ground motion models", label); built = true; } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java index 264ca9c3..fa5f562c 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java @@ -2,26 +2,21 @@ package gov.usgs.earthquake.nshmp.model; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; -import static com.google.common.base.StandardSystemProperty.LINE_SEPARATOR; -import static gov.usgs.earthquake.nshmp.Text.checkName; -import static gov.usgs.earthquake.nshmp.model.SourceType.FAULT_CLUSTER; -import java.util.ArrayList; +import java.util.Iterator; import java.util.List; -import java.util.Map; +import java.util.function.Predicate; import gov.usgs.earthquake.nshmp.geo.Location; -import gov.usgs.earthquake.nshmp.geo.LocationList; -import gov.usgs.earthquake.nshmp.geo.Locations; import gov.usgs.earthquake.nshmp.mfd.Mfd; import gov.usgs.earthquake.nshmp.tree.LogicTree; /** * Cluster source representation. Each cluster source wraps a - * {@code FaultSourceSet} containing one or more fault representations that + * {@code FaultRuptureSet} containing one or more fault representations that * rupture as independent events but with a similar rate. For example, at New - * Madrid, each ClusterSourceSet has 5 ClusterSources, one for each position - * variant of the model. For each position variant there is one FaultSourceSet + * Madrid, each ClusterRuptureSet has 5 ClusterSources, one for each position + * variant of the model. For each position variant there is one FaultRuptureSet * containing the FaultSources in the cluster, each of which may have one, or * more, magnitude or other variants represented by its internal list of * {@code Mfd}s. @@ -29,55 +24,58 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; * <p>Cluster source hazard is calculated from the joint probabilities of ground * motions from the wrapped faults. * - * <p>Unlike other {@code Source}s whose weights are carried exclusively with - * their associated {@link Mfd}, {@code ClusterSource}s carry an additional - * {@link #weight()} value. - * * <p>A {@code ClusterSource} cannot be created directly; it may only be created * by a private parser. * * @author U.S. Geological Survey */ -public class ClusterRuptureSet implements RuptureSet { - - final String name; - final int id; +public class ClusterRuptureSet extends AbstractRuptureSet<ClusterSource> { - final ModelData data; // holds rate tree (numeric values) - final List<FaultRuptureSet> faultRuptureSets; + final ModelData data; // holds cluster rate tree + private final List<ClusterSource> source; private ClusterRuptureSet(Builder builder) { - this.name = builder.name; - this.id = builder.id; + super(builder); this.data = builder.data; - this.faultRuptureSets = builder.faultRuptureSets; + this.source = List.of(builder.source); } @Override - public String name() { - return name; + public int size() { + return 1; // faultRuptureSets.size(); } @Override - public int id() { - return id; + public LogicTree<Mfd> mfdTree() { + System.err.println("TODO Cluster MFD tree implementation"); + // only returning first cluster source + throw new UnsupportedOperationException(); + // return source.mfdTree; } @Override - public int size() { - return faultRuptureSets.size(); + @Deprecated // this is problemtic as we are not getting the rate weight + public Iterator<ClusterSource> iterator() { + return source.iterator(); } @Override - public SourceType type() { - return FAULT_CLUSTER; - } + public Predicate<ClusterSource> distanceFilter( + final Location loc, + final double distance) { - @Override - public LogicTree<Mfd> mfdTree() { - System.err.println("TODO Cluster MFD tree implementation"); - // only returning first cluster source - return faultRuptureSets.get(0).mfdTree; + return new Predicate<ClusterSource>() { + + private final Predicate<FaultSource> filter = + new FaultRuptureSet.DistanceFilter(loc, distance); + + @Override + public boolean test(ClusterSource cs) { + return cs.faults().stream() + .map(frs -> frs.iterator().next()) + .anyMatch(filter); + } + }; } /** @@ -86,45 +84,7 @@ public class ClusterRuptureSet implements RuptureSet { */ @Override public Location location(Location site) { - LocationList.Builder locs = LocationList.builder(); - for (FaultRuptureSet fault : faultRuptureSets) { - locs.add(fault.location(site)); - } - return Locations.closestPoint(site, locs.build()); - } - - /** - * The weight applicable to this {@code ClusterSource}. - */ - public double weight() { - throw new UnsupportedOperationException(); // was returning 1.0 - } - - /** - * The {@code FaultSourceSet} of all {@code FaultSource}s that participate in - * this cluster. - */ - public FaultSourceSet faults() { - throw new UnsupportedOperationException(); // no longer needed? - // return null; // needed by rate calculation - } - - @Override - public String toString() { - Map<String, ?> data = Map.of( - "name", name(), - "weight", weight()); - StringBuilder sb = new StringBuilder() - .append(getClass().getSimpleName()) - .append(" ") - .append(data) - .append(LINE_SEPARATOR.value()); - for (FaultRuptureSet frs : faultRuptureSets) { - sb.append(" ") - .append(frs.toString()) - .append(LINE_SEPARATOR.value()); - } - return sb.toString(); + return source.get(0).location(site); } static Builder builder() { @@ -132,55 +92,84 @@ public class ClusterRuptureSet implements RuptureSet { } /* Single use builder */ - static class Builder { - - boolean built = false; + static class Builder extends AbstractRuptureSet.Builder { private ModelData data; // holds cluster rate-tree - private String name; - private Integer id; + private ClusterSource source; - List<FaultRuptureSet> faultRuptureSets = new ArrayList<>(); + Builder() { + super(SourceType.FAULT_CLUSTER); + } + @Override Builder name(String name) { - this.name = checkName(name, "ClusterRuptureSet"); + super.name(name); return this; } + @Override Builder id(int id) { - this.id = id; + super.id(id); return this; } /* Set MFD helper data. */ Builder modelData(ModelData data) { - this.data = data; + this.data = checkNotNull(data); + super.setting(data.tectonicSetting()); + super.gmms(data.gmms()); return this; } - Builder addRuptureSet(FaultRuptureSet ruptureSet) { - if (faultRuptureSets.size() > 1) { - ModelTrees.checkTreeIdsAndWeights( - faultRuptureSets.get(0).mfdTree, - ruptureSet.mfdTree); - } - faultRuptureSets.add(ruptureSet); + Builder setSource(ClusterSource source) { + checkState(this.source == null); + this.source = checkNotNull(source); return this; } - void validateState(String label) { - checkState(!built, "Single use builder"); - checkNotNull(name, "%s name", label); - checkNotNull(id, "%s id", label); + void validateAndInit(String label) { + validateState(label); checkNotNull(data, "%s model data", label); - checkState(faultRuptureSets.size() > 0); - built = true; + checkNotNull(source, "%s cluster source", label); } ClusterRuptureSet build() { - validateState("ClusterSource.Builder"); + validateAndInit("ClusterRuptureSet.Builder"); return new ClusterRuptureSet(this); } } + /* + * Data container class used during deserialization. ModelLoader uses these + * data to initialize multiple cluster sources having identical fault sources + * but scaled to unique rates. + */ + static class Data { + + final String name; + final int id; + final ModelData data; + final List<FaultRuptureSet> faultRuptureSets; + + Data( + String name, + int id, + ModelData data, + List<FaultRuptureSet> faultRuptureSets) { + + if (faultRuptureSets.size() > 1) { + for (int i = 1; i < faultRuptureSets.size(); i++) { + ModelTrees.checkTreeIdsAndWeights( + faultRuptureSets.get(0).mfdTree, + faultRuptureSets.get(i).mfdTree); + } + } + + this.name = name; + this.id = id; + this.data = data; + this.faultRuptureSets = faultRuptureSets; + } + } + } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java index 7b015bf3..91659323 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java @@ -18,10 +18,10 @@ import gov.usgs.earthquake.nshmp.mfd.Mfd; /** * Cluster source representation. Each cluster source wraps a - * {@code FaultSourceSet} containing one or more fault representations that + * {@code FaultRuptureSet} containing one or more fault representations that * rupture as independent events but with a similar rate. For example, at New - * Madrid, each ClusterSourceSet has 5 ClusterSources, one for each position - * variant of the model. For each position variant there is one FaultSourceSet + * Madrid, each ClusterRuptureSet has 5 ClusterSources, one for each position + * variant of the model. For each position variant there is one FaultRuptureSet * containing the FaultSources in the cluster, each of which may have one, or * more, magnitude or other variants represented by its internal list of * {@code IncrementalMfd}s. @@ -42,19 +42,21 @@ import gov.usgs.earthquake.nshmp.mfd.Mfd; */ public class ClusterSource implements Source { - // the weight of ClusterSource is stored in the nested FSS - + final String name; + final int id; final double rate; - final FaultSourceSet faults; + final List<FaultRuptureSet> faults; - ClusterSource(double rate, FaultSourceSet faults) { - this.rate = rate; - this.faults = faults; + ClusterSource(Builder builder) { + this.name = builder.name; + this.id = builder.id; + this.rate = builder.rate; + this.faults = builder.faults; } @Override public String name() { - return faults.name(); + return name; } @Override @@ -64,7 +66,7 @@ public class ClusterSource implements Source { @Override public int id() { - return faults.id(); + return id; // same as cluster rupture set id } @Override @@ -79,7 +81,7 @@ public class ClusterSource implements Source { @Override public Location location(Location site) { LocationList.Builder locs = LocationList.builder(); - for (FaultSource fault : faults) { + for (FaultRuptureSet fault : faults) { locs.add(fault.location(site)); } return Locations.closestPoint(site, locs.build()); @@ -87,12 +89,20 @@ public class ClusterSource implements Source { @Override public List<Mfd> mfds() { + // TODO this is needed for rate calcs and this + // may be missing application of weights List<Mfd> xyMfds = new ArrayList<>(); - for (FaultSource fault : faults) { - for (Mfd mfd : fault.mfds()) { + for (FaultRuptureSet frs : faults) { + checkState(frs.size() == 1); // TODO bad assumption of single source + for (Mfd mfd : frs.iterator().next().mfds()) { xyMfds.add(Mfd.Builder.from(mfd).scale(rate).build()); } } + // for (FaultSource fault : faults) { + // for (Mfd mfd : fault.mfds()) { + // xyMfds.add(Mfd.Builder.from(mfd).scale(rate).build()); + // } + // } return List.copyOf(xyMfds); } @@ -105,17 +115,10 @@ public class ClusterSource implements Source { } /** - * The weight applicable to this {@code ClusterSource}. - */ - public double weight() { - return faults.weight(); - } - - /** - * The {@code FaultSourceSet} of all {@code FaultSource}s that participate in + * The {@code FaultRuptureSet} of all {@code FaultSource}s that participate in * this cluster. */ - public FaultSourceSet faults() { + public List<FaultRuptureSet> faults() { return faults; } @@ -132,16 +135,15 @@ public class ClusterSource implements Source { public String toString() { Map<String, ?> data = Map.of( "name", name(), - "rate", rate(), - "weight", weight()); + "rate", rate()); StringBuilder sb = new StringBuilder() .append(getClass().getSimpleName()) .append(" ") .append(data) .append(LINE_SEPARATOR.value()); - for (FaultSource fs : faults) { + for (FaultRuptureSet frs : faults) { sb.append(" ") - .append(fs.toString()) + .append(frs.toString()) .append(LINE_SEPARATOR.value()); } return sb.toString(); @@ -150,13 +152,12 @@ public class ClusterSource implements Source { /* Single use builder */ static class Builder { - static final String ID = "ClusterSource.Builder"; boolean built = false; String name; Integer id; Double rate; - FaultSourceSet faults; + List<FaultRuptureSet> faults; Builder name(String name) { this.name = checkName(name, "ClusterSource"); @@ -174,25 +175,25 @@ public class ClusterSource implements Source { return this; } - Builder faults(FaultSourceSet faults) { + Builder faults(List<FaultRuptureSet> faults) { checkState(checkNotNull(faults, "Fault source set is null").size() > 0, "Fault source set is empty"); this.faults = faults; return this; } - void validateState(String source) { - checkState(!built, "This %s instance as already been used", source); - checkNotNull(name); - checkNotNull(id); - checkState(rate != null, "%s rate not set", source); - checkState(faults != null, "%s has no fault sources", source); + void validateState(String label) { + checkState(!built, "Single use builder"); + checkNotNull(name, "%s name", label); + checkNotNull(id, "%s id", label); + checkNotNull(rate, "%s rate", label); + checkNotNull(faults, "%s fault rupture sets", label); built = true; } ClusterSource buildClusterSource() { - validateState(ID); - return new ClusterSource(rate, faults); + validateState("ClusterSource.Builder"); + return new ClusterSource(this); } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSourceSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSourceSet.java deleted file mode 100644 index fc0cef72..00000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSourceSet.java +++ /dev/null @@ -1,90 +0,0 @@ -package gov.usgs.earthquake.nshmp.model; - -import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.Preconditions.checkState; - -import java.util.Iterator; -import java.util.List; -import java.util.function.Predicate; - -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; - -import gov.usgs.earthquake.nshmp.geo.Location; - -/** - * Container class for related {@link ClusterRuptureSet}s. - * - * @author U.S. Geological Survey - * @see ClusterRuptureSet - */ -public class ClusterSourceSet extends AbstractSourceSet<ClusterSource> { - - private final List<ClusterSource> sources; - - private ClusterSourceSet(Builder builder) { - super(builder); - this.sources = builder.sources; - } - - @Override - public Iterator<ClusterSource> iterator() { - return sources.iterator(); - } - - @Override - public int size() { - return sources.size(); - } - - @Override - public Predicate<ClusterSource> distanceFilter( - final Location loc, - final double distance) { - - return new Predicate<ClusterSource>() { - - private final Predicate<FaultSource> filter = - new FaultSourceSet.DistanceFilter(loc, distance); - - @Override - public boolean test(ClusterSource cs) { - return Iterables.any(cs.faults, filter::test); - } - - @Override - public String toString() { - return "ClusterSourceSet.DistanceFilter [ " + filter.toString() + " ]"; - } - }; - } - - /* Single use builder */ - static class Builder extends AbstractSourceSet.Builder { - - private static final String ID = "ClusterSourceSet.Builder"; - - private final List<ClusterSource> sources = Lists.newArrayList(); - - Builder() { - super(SourceType.FAULT_CLUSTER); - } - - Builder source(ClusterSource source) { - sources.add(checkNotNull(source, "ClusterSource is null")); - return this; - } - - @Override - void validateState(String buildId) { - super.validateState(buildId); - checkState(sources.size() > 0, "%s source list is empty", buildId); - } - - ClusterSourceSet buildClusterSet() { - validateState(ID); - return new ClusterSourceSet(this); - } - } - -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java index ca4008c3..c227d512 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java @@ -278,9 +278,9 @@ class Deserialize { } /* Create a grid rupture set. */ - static List<RuptureSet> gridRuptureSets(Path json, ModelData data) { + static List<GridRuptureSet> gridRuptureSets(Path json, ModelData data) { JsonArray ruptureSets = jsonArray(json); - List<RuptureSet> rsList = new ArrayList<>(); + List<GridRuptureSet> rsList = new ArrayList<>(); for (JsonElement ruptureSet : ruptureSets) { JsonObject obj = ruptureSet.getAsJsonObject(); String gridFile = data.rateTree().isPresent() ? SPATIAL_PDF : GRID_MFDS; @@ -297,9 +297,9 @@ class Deserialize { } /* Create a slab rupture set. */ - static List<RuptureSet> slabRuptureSets(Path json, ModelData data) { + static List<SlabRuptureSet> slabRuptureSets(Path json, ModelData data) { JsonArray ruptureSets = jsonArray(json); - List<RuptureSet> rsList = new ArrayList<>(); + List<SlabRuptureSet> rsList = new ArrayList<>(); for (JsonElement ruptureSet : ruptureSets) { JsonObject obj = ruptureSet.getAsJsonObject(); String gridFile = data.rateTree().isPresent() ? SPATIAL_PDF : GRID_MFDS; @@ -344,9 +344,9 @@ class Deserialize { JsonObject obj = jsonObject(json); return SystemRuptureSet.builder() - .modelData(data) .name(obj.get(NAME).getAsString()) .id(obj.get(ID).getAsInt()) + .modelData(data) .sections(readSystemFeatures(parent).orElseThrow()) .ruptures(checkRupturesFile(parent).orElseThrow()) .build(); @@ -369,9 +369,9 @@ class Deserialize { int id = obj.get(ID).getAsInt(); FaultRuptureSet.Builder ruptureSet = FaultRuptureSet.builder() - .modelData(data) .name(obj.get(NAME).getAsString()) .id(id) + .modelData(data) .mfdTree(mfdTree(obj, data)); /* Optional 'sections' array. */ @@ -392,28 +392,28 @@ class Deserialize { return ruptureSet.build(); } - /* Create a rupture set, initialized with data from file. */ - static ClusterRuptureSet clusterRuptureSet( + /* Create a cluster rupture set, initialized with data from file. */ + static ClusterRuptureSet.Data clusterRuptureSetData( Path json, ModelData data) { JsonObject obj = jsonObject(json); - ClusterRuptureSet.Builder clusterSet = ClusterRuptureSet.builder() - .modelData(data); - - clusterSet.name(obj.get(NAME).getAsString()); - clusterSet.id(obj.get(ID).getAsInt()); /* Set cluster flag so faultRuptureSet MFDs are built correctly. */ data.clusterModel(); + List<FaultRuptureSet> faultRuptureSets = new ArrayList<>(); JsonArray ruptures = obj.get(RUPTURE_SETS).getAsJsonArray(); for (JsonElement rupture : ruptures) { FaultRuptureSet ruptureSet = faultRuptureSet(rupture.getAsJsonObject(), data); - clusterSet.addRuptureSet(ruptureSet); + faultRuptureSets.add(ruptureSet); } - return clusterSet.build(); + return new ClusterRuptureSet.Data( + obj.get(NAME).getAsString(), + obj.get(ID).getAsInt(), + data, + faultRuptureSets); } /* Convenience method to use Deserialize.GSON instance external to class. */ diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java index 4d73dcb3..b51ca5cc 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java @@ -6,18 +6,19 @@ import static com.google.common.base.Preconditions.checkState; import static gov.usgs.earthquake.nshmp.model.Deserialize.MFD_TREE; import static gov.usgs.earthquake.nshmp.model.Deserialize.RATE_TREE; import static gov.usgs.earthquake.nshmp.model.RateType.RECURRENCE; -import static gov.usgs.earthquake.nshmp.model.SourceType.FAULT; import static java.lang.Math.log10; import static java.util.stream.Collectors.collectingAndThen; import static java.util.stream.Collectors.toList; import java.math.RoundingMode; +import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.OptionalDouble; import java.util.Set; +import java.util.function.Predicate; import java.util.stream.Stream; import gov.usgs.earthquake.nshmp.Earthquakes; @@ -43,7 +44,7 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; * * @author U.S. Geological Survey */ -public class FaultRuptureSet implements RuptureSet { +public class FaultRuptureSet extends AbstractRuptureSet<FaultSource> { /* * Developer notes: @@ -75,8 +76,10 @@ public class FaultRuptureSet implements RuptureSet { final Mfd mfdTotal; final List<Integer> sectionIds; // reference: actually needed? + private final List<FaultSource> source; // single source list FaultRuptureSet(Builder builder) { + super(builder); this.feature = builder.feature; this.data = builder.data; @@ -84,31 +87,51 @@ public class FaultRuptureSet implements RuptureSet { this.mfdTotal = builder.mfdTotal; this.sectionIds = builder.sectionIds; + this.source = List.of(createSource()); } - @Override - public String name() { - return feature.name; + private FaultSource createSource() { + return new FaultSource.Builder() + .feature(feature) + .config(data.sourceConfig().asFault()) + .mfdTree(mfdTree) + .mfdTotal(mfdTotal) + .build(); } @Override - public int id() { - return feature.id; + public Iterator<FaultSource> iterator() { + return source.iterator(); } @Override public int size() { - throw new UnsupportedOperationException(); + return source.size(); // TODO should this be # ruptures } @Override - public SourceType type() { - return FAULT; + public LogicTree<Mfd> mfdTree() { + return mfdTree; } @Override - public LogicTree<Mfd> mfdTree() { - return mfdTree; + public Predicate<FaultSource> distanceFilter(Location loc, double distance) { + return new DistanceFilter(loc, distance); + } + + /* Not inlined for use by cluster sources */ + static class DistanceFilter implements Predicate<FaultSource> { + private final Predicate<Location> filter; + + DistanceFilter(Location loc, double distance) { + filter = Locations.distanceFilter(loc, distance); + } + + @Override + public boolean test(FaultSource source) { + return filter.test(source.feature.trace.first()) || + filter.test(source.feature.trace.last()); + } } /** @@ -120,22 +143,12 @@ public class FaultRuptureSet implements RuptureSet { return Locations.closestPoint(site, feature.trace); } - @Override - public String toString() { - return getClass().getSimpleName() + " " + Map.of( - "name", name(), - "id", id(), - "feature", feature.source.toJson()); - } - static Builder builder() { return new Builder(); } /* Single use builder */ - static class Builder { - - private boolean built = false; + static class Builder extends AbstractRuptureSet.Builder { private SourceFeature.NshmFault feature; private ModelData data; @@ -146,18 +159,22 @@ public class FaultRuptureSet implements RuptureSet { private LogicTree<Mfd> mfdTree; private Mfd mfdTotal; - private String name; - private Integer id; private List<Integer> sectionIds; private Optional<Map<String, ?>> ruptureProps = Optional.empty(); + Builder() { + super(SourceType.FAULT); + } + + @Override Builder name(String name) { - this.name = name; + super.name(name); return this; } + @Override Builder id(int id) { - this.id = id; + super.id(id); return this; } @@ -181,6 +198,8 @@ public class FaultRuptureSet implements RuptureSet { /* Set MFD helper data. */ Builder modelData(ModelData data) { this.data = checkNotNull(data); + super.setting(data.tectonicSetting()); + super.gmms(data.gmms()); return this; } @@ -190,9 +209,8 @@ public class FaultRuptureSet implements RuptureSet { } private void validateAndInit(String label) { - checkState(!built, "Single use builder"); - checkNotNull(name, "%s name", label); - checkNotNull(id, "%s id", label); + validateState(label); + checkNotNull(data, "%s model data", label); checkNotNull(sectionIds, "%s section id list", label); checkNotNull(mfdPropsTree, "%s MFD logic tree", label); @@ -223,7 +241,6 @@ public class FaultRuptureSet implements RuptureSet { mfdTree = createMfdTree(mfdPropsTree, moRateTree, mfdConfig); mfdTotal = ModelTrees.reduceMfdTree(mfdTree); - built = true; } private SourceFeature.NshmFault createFeature() { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java index a3769b81..60aacf53 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java @@ -51,7 +51,6 @@ public class FaultSource implements Source { final GriddedSurface surface; final LogicTree<Mfd> mfdTree; - final double branchWeight; // rupture set branch weight private final List<Mfd> scaledMfds; // includes ruptureSet weight private final List<List<Rupture>> ruptureLists; // 1:1 with Mfds @@ -60,7 +59,6 @@ public class FaultSource implements Source { this.feature = builder.feature; this.config = builder.config; this.mfdTree = builder.mfdTree; - this.branchWeight = builder.branchWeight; this.surface = DefaultGriddedSurface.builder() .trace(feature.trace) @@ -70,7 +68,7 @@ public class FaultSource implements Source { .spacing(config.surfaceSpacing) .build(); - List<Mfd> mfds = ModelTrees.scaledMfdList(mfdTree, branchWeight); + List<Mfd> mfds = ModelTrees.scaledMfdList(mfdTree); this.scaledMfds = mfds; // this.scaledMfds = reduceMfdList(mfds); // checkTypes(mfds, feature.name); @@ -303,14 +301,12 @@ public class FaultSource implements Source { /* Single use builder */ static class Builder { - private static final String ID = "FaultSource.Builder"; private boolean built = false; SourceFeature.NshmFault feature; SourceConfig.Fault config; LogicTree<Mfd> mfdTree; Mfd mfdTotal; - Double branchWeight; Builder feature(SourceFeature.NshmFault feature) { this.feature = feature; @@ -329,24 +325,21 @@ public class FaultSource implements Source { } Builder mfdTotal(Mfd mfdTotal) { - checkNotNull(mfdTotal, "MFD total is null"); - this.mfdTotal = mfdTotal; + this.mfdTotal = checkNotNull(mfdTotal); return this; } - Builder branchWeight(double weight) { - this.branchWeight = weight; - return this; + void validateState(String label) { + checkState(!built, "Single use builder"); + checkNotNull(feature, "%s feature", label); + checkNotNull(config, "%s config", label); + checkNotNull(mfdTree, "%s MFD tree", label); + checkNotNull(mfdTotal, "%s total MFD", label); + built = true; } FaultSource build() { - checkState(!built, "This %s instance has already been used", ID); - checkState(feature != null, "%s feature not set", ID); - checkState(config != null, "%s config not set", ID); - checkState(mfdTree != null, "%s MFD tree not set", ID); - checkState(mfdTotal != null, "%s total MFD not set", ID); - checkState(branchWeight != null, "%s source branch weight not set", ID); - built = true; + validateState("FaultSource.Builder"); return new FaultSource(this); } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSourceSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSourceSet.java deleted file mode 100644 index d11c0711..00000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSourceSet.java +++ /dev/null @@ -1,114 +0,0 @@ -package gov.usgs.earthquake.nshmp.model; - -import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.Preconditions.checkState; - -import java.util.Iterator; -import java.util.List; -import java.util.function.Predicate; - -import com.google.common.collect.Lists; - -import gov.usgs.earthquake.nshmp.geo.Location; -import gov.usgs.earthquake.nshmp.geo.Locations; -import gov.usgs.earthquake.nshmp.tree.LogicGroup; -import gov.usgs.earthquake.nshmp.tree.LogicTree; - -/** - * Container class for related {@link FaultSource}s. - * - * @author U.S. Geological Survey - * @see FaultSource - */ -public class FaultSourceSet extends AbstractSourceSet<FaultSource> { - - private final List<FaultSource> sources; - - /* - * This will merge with SourceTree instance - currently are directly scaling - * source total mfds by the branch weight to make existing calculators work, - * but really want to update calculators to scale the individual RuptureSet - * curves from SourceTree when combining. - */ - private final LogicTree<FaultSource> sourceTree; - - private FaultSourceSet(Builder builder) { - super(builder); - this.sources = builder.sources; - this.sourceTree = builder.sourceTree; - } - - @Override - public Iterator<FaultSource> iterator() { - return sources.iterator(); - } - - @Override - public int size() { - return sources.size(); - } - - @Override - public Predicate<FaultSource> distanceFilter(Location loc, double distance) { - return new DistanceFilter(loc, distance); - } - - /* Not inlined for use by cluster sources */ - static class DistanceFilter implements Predicate<FaultSource> { - private final Predicate<Location> filter; - - DistanceFilter(Location loc, double distance) { - filter = Locations.distanceFilter(loc, distance); - } - - @Override - public boolean test(FaultSource source) { - return filter.test(source.feature.trace.first()) || - filter.test(source.feature.trace.last()); - } - - @Override - public String toString() { - return "FaultSourceSet.DistanceFilter[ " + filter.toString() + " ]"; - } - } - - /* Single use builder. */ - static class Builder extends AbstractSourceSet.Builder { - - static final String ID = "FaultSourceSet.Builder"; - - final List<FaultSource> sources = Lists.newArrayList(); - final LogicGroup.Builder<FaultSource> treeBuilder = LogicGroup.builder(); - LogicTree<FaultSource> sourceTree; - - Builder() { - super(SourceType.FAULT); - } - - Builder(SourceType type) { - super(type); - } - - Builder source(FaultSource source, double weight) { - sources.add(checkNotNull(source, "FaultSource is null")); - treeBuilder.addBranch(source.name(), source, weight); - return this; - } - - @Override - void validateState(String buildId) { - super.validateState(buildId); - } - - FaultSourceSet buildFaultSet() { - validateState(ID); - // this is here because calls up from interface - // will fail in this.validateState() - checkState(sources.size() > 0, "%s source list is empty", ID); - sourceTree = treeBuilder.build(); - return new FaultSourceSet(this); - } - } - -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GmmSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GmmSet.java index fe5b031e..c485d6c6 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/GmmSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GmmSet.java @@ -20,7 +20,7 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; /** * Wrapper class for {@link GroundMotionModel}s associated with a - * {@link SourceSet}. In addition to carrying {@code Map}s of + * {@link RuptureSet}. In addition to carrying {@code Map}s of * {@code GroundMotionModel}s and their associated weights, this class * encapsulates any additional epistemic uncertainty that should be applied to * the ground motions computed from these models, as well as model weight diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java index 67dde8b3..63b7b056 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java @@ -23,8 +23,6 @@ import gov.usgs.earthquake.nshmp.data.DelimitedData.Record; import gov.usgs.earthquake.nshmp.data.XyPoint; import gov.usgs.earthquake.nshmp.fault.FocalMech; import gov.usgs.earthquake.nshmp.geo.Location; -import gov.usgs.earthquake.nshmp.geo.json.Feature; -import gov.usgs.earthquake.nshmp.geo.json.Properties; import gov.usgs.earthquake.nshmp.mfd.Mfd; import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.GutenbergRichter; import gov.usgs.earthquake.nshmp.mfd.Mfd.Type; @@ -53,7 +51,7 @@ class GridLoader { // TODO deal with GR_TAPER mix in in WUS /* Grid csv file column header and property keys. */ - private static class Key { + static class Key { static final String LON = "lon"; static final String LAT = "lat"; static final String STRIKE = "strike"; @@ -129,18 +127,17 @@ class GridLoader { } private GridRuptureSet createRuptureSet() { - GridRuptureSet grs = GridRuptureSet.builder() - .id(id) + return GridRuptureSet.builder() .name(name) - .feature(feature) + .id(id) .modelData(data) + .feature(feature) .nodeData( featureData.locations, featureData.mfds, featureData.mfdsTree, - featureData.focalMechMaps) + Optional.of(featureData.focalMechMaps)) .build(); - return grs; } private void processNodes() { @@ -242,7 +239,7 @@ class GridLoader { featureData.locations, featureData.mfds, featureData.mfdsTree, - null) + Optional.empty()) .build(); ruptureSets.add(grs); } @@ -261,7 +258,7 @@ class GridLoader { featureData.locations, featureData.mfds, featureData.mfdsTree, - null); + Optional.empty()); ruptureSets.add(srsb.build()); } return ruptureSets; @@ -408,30 +405,30 @@ class GridLoader { private GridRuptureSet createGridRuptureSet() { return GridRuptureSet.builder() - .id(id) .name(name) - .feature(feature) + .id(id) .modelData(data) + .feature(feature) .nodeData( featureData.locations, featureData.mfds, featureData.mfdsTree, - null) + Optional.empty()) .build(); } private SlabRuptureSet createSlabRuptureSet() { - SlabRuptureSet.Builder srsb = SlabRuptureSet.builder(); - srsb.id(id) - .name(name) - .feature(feature) + SlabRuptureSet.Builder b = SlabRuptureSet.builder(); + b.name(name) + .id(id) .modelData(data) + .feature(feature) .nodeData( featureData.locations, featureData.mfds, featureData.mfdsTree, - null); - return srsb.build(); + Optional.empty()); + return b.build(); } private void processNodes() { @@ -540,33 +537,26 @@ class GridLoader { } } - static GridSourceSet createGrid( + static GridRuptureSet createGrid( String name, int id, - TectonicSetting setting, - SourceType type, - SourceConfig.Grid config, - Feature feature, + ModelData data, + SourceType type, // remove for zone specific method instead?? + SourceFeature feature, List<Location> locations, List<Mfd> mfds, LogicTree<List<Mfd>> mfdsTree, Optional<List<Map<FocalMech, Double>>> focalMechMaps, - GmmSet gmms, - double weight) { - - Properties props = feature.properties(); + GmmSet gmms) { - GridSourceSet.Builder builder = new GridSourceSet.Builder(type); - builder.name(name) + GridRuptureSet.Builder builder = new GridRuptureSet.Builder(type) + .name(name) .id(id) - .setting(setting) - .weight(weight) - .gmms(gmms); - builder.gridConfig(config); - props.getDouble(Key.STRIKE).ifPresent(builder::strike); + .feature(feature) + .modelData(data); // System.out.println(mfds.get(0).properties().type()); - builder.locations(locations, mfds, mfdsTree, focalMechMaps); + builder.nodeData(locations, mfds, mfdsTree, focalMechMaps); return builder.build(); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GridRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GridRuptureSet.java index 75223476..79d5b2d8 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/GridRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GridRuptureSet.java @@ -1,16 +1,53 @@ package gov.usgs.earthquake.nshmp.model; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import static gov.usgs.earthquake.nshmp.Earthquakes.checkCrustalDepth; +import static gov.usgs.earthquake.nshmp.Earthquakes.checkSlabDepth; +import static gov.usgs.earthquake.nshmp.Faults.checkStrike; +import static gov.usgs.earthquake.nshmp.fault.FocalMech.NORMAL; +import static gov.usgs.earthquake.nshmp.fault.FocalMech.REVERSE; +import static gov.usgs.earthquake.nshmp.fault.FocalMech.STRIKE_SLIP; +import static gov.usgs.earthquake.nshmp.model.PointSourceType.FIXED_STRIKE; +import static gov.usgs.earthquake.nshmp.model.SourceType.GRID; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.EnumMap; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.NavigableMap; import java.util.Optional; +import java.util.OptionalDouble; +import java.util.function.Function; +import java.util.function.Predicate; +import com.google.common.collect.ImmutableSortedMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.primitives.Doubles; + +import gov.usgs.earthquake.nshmp.Earthquakes; +import gov.usgs.earthquake.nshmp.data.DoubleData; +import gov.usgs.earthquake.nshmp.data.IntervalTable; +import gov.usgs.earthquake.nshmp.data.MutableXySequence; +import gov.usgs.earthquake.nshmp.data.Sequences; +import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.fault.FocalMech; +import gov.usgs.earthquake.nshmp.fault.surface.RuptureScaling; import gov.usgs.earthquake.nshmp.geo.Location; +import gov.usgs.earthquake.nshmp.geo.LocationList; +import gov.usgs.earthquake.nshmp.geo.Locations; +import gov.usgs.earthquake.nshmp.geo.json.Properties; import gov.usgs.earthquake.nshmp.mfd.Mfd; import gov.usgs.earthquake.nshmp.mfd.Mfds; +import gov.usgs.earthquake.nshmp.model.GridLoader.Key; +import gov.usgs.earthquake.nshmp.model.PointSource.DepthModel; +import gov.usgs.earthquake.nshmp.tree.Branch; import gov.usgs.earthquake.nshmp.tree.LogicTree; import gov.usgs.earthquake.nshmp.tree.Trees; @@ -19,26 +56,44 @@ import gov.usgs.earthquake.nshmp.tree.Trees; * * @author U.S. Geological Survey */ -class GridRuptureSet implements RuptureSet { +public class GridRuptureSet extends AbstractRuptureSet<PointSource> { private final int id; private final String name; - private final SourceFeature.Grid feature; + private final SourceFeature feature; private final ModelData data; private final List<Location> locations; private final List<Mfd> mfds; private final LogicTree<List<Mfd>> mfdsTree; private final LogicTree<Mfd> mfdTree; - private final Optional<List<Map<FocalMech, Double>>> focalMechMaps; - private GridSourceSet gss; + final List<Map<FocalMech, Double>> mechMaps; // may be nCopies + private final boolean singularMechs; + private final NavigableMap<Double, Map<Double, Double>> magDepthMap; + private final OptionalDouble strike; + + /* optimizable = magMaster.isPresent() */ + private final Optional<double[]> magMaster; + + final RuptureScaling rupScaling; + private final double maxDepth; + private final PointSourceType pointSourceType; + + final DepthModel depthModel; + final boolean optimizable; + + final double gridSpacing; + final int smoothingDensity; + final double smoothingLimit; + final double distanceBin; /* * Enforce expectation that all MFDs are GR* */ GridRuptureSet(Builder builder) { + super(builder); this.id = builder.id; this.name = builder.name; this.feature = builder.feature; @@ -48,88 +103,152 @@ class GridRuptureSet implements RuptureSet { this.mfds = builder.mfds; this.mfdsTree = builder.mfdsTree; this.mfdTree = builder.mfdTree; - this.focalMechMaps = builder.focalMechMaps; - } - // TODO declare this in RuptureSet? Source T issue - SourceSet<PointSource> sourceSet(double weight) { - if (gss == null) { - gss = createSourceSet(weight); - } + this.rupScaling = builder.rupScaling; + this.mechMaps = builder.mechMaps; + this.singularMechs = builder.singularMechs; + this.magDepthMap = builder.magDepthMap; + this.pointSourceType = builder.pointSourceType; + this.strike = builder.strike; + this.magMaster = builder.magMaster; + this.maxDepth = builder.maxDepth; - // System.out.println("--- GSS ---"); - // for (PointSource ptSrc : gss) { - // System.out.println(ptSrc); - // for (Rupture rup : ptSrc) { - // System.out.println(rup); - // break; - // } - // } - // System.out.println(gss.mechMaps); + this.optimizable = + (type() != SourceType.ZONE) && + (pointSourceType() != FIXED_STRIKE) && + this.magMaster.isPresent(); - return gss; - } + double[] depthMags = this.mfds.get(0).data().xValues().toArray(); - GridSourceSet createSourceSet(double weight) { - SourceConfig.Grid config = data.sourceConfig().asGrid(); - GridSourceSet grid = GridLoader.createGrid( - name, - id, - data.tectonicSetting(), - SourceType.GRID, - config, - feature.source, - locations, - mfds, - mfdsTree, - focalMechMaps, - data.gmms(), - weight); + this.depthModel = DepthModel.create( + magDepthMap, + Doubles.asList(depthMags), + maxDepth); - return grid; + this.gridSpacing = builder.gridSpacing; + this.smoothingDensity = builder.smoothingDensity; + this.smoothingLimit = builder.smoothingLimit; + this.distanceBin = builder.distanceBin; } @Override - public String name() { - return name; + public int size() { + return locations.size(); } @Override - public int id() { - return id; + public LogicTree<Mfd> mfdTree() { + return mfdTree; } @Override - public int size() { - return locations.size(); + public Location location(Location site) { + throw new UnsupportedOperationException(); } - @Override - public SourceType type() { - return SourceType.GRID; + /** + * The {@link PointSource} representation used by this source set. + */ + public PointSourceType pointSourceType() { + return pointSourceType; + } + + /** + * Whether this source set is capable of being optimized. There are + * circumstances under which a grid optimization table can not be built. + */ + public boolean optimizable() { + return optimizable; + } + + /** + * For internal use only. Public for access outside of package. + */ + public static String sizeString(RuptureSet<? extends Source> ruptures, int size) { + if (ruptures instanceof Table) { + Table t = (Table) ruptures; + return t.parentCount() + " (" + t.rowCount + " of " + t.maximumSize + ")"; + } + return Integer.toString(size); } @Override - public LogicTree<Mfd> mfdTree() { - return mfdTree; + public Predicate<PointSource> distanceFilter(Location loc, double distance) { + return new DistanceFilter(loc, distance); + } + + /* Not inlined for use by area sources */ + static final class DistanceFilter implements Predicate<PointSource> { + private final Predicate<Location> filter; + + DistanceFilter(Location loc, double distance) { + filter = Locations.distanceAndRectangleFilter(loc, distance); + } + + @Override + public boolean test(PointSource source) { + return filter.test(source.loc); + } } @Override - public Location location(Location site) { - throw new UnsupportedOperationException(); + public Iterator<PointSource> iterator() { + return new Iterator<PointSource>() { + int caret = 0; + + @Override + public boolean hasNext() { + return caret < locations.size(); + } + + @Override + public PointSource next() { + return getSource(caret++); + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } + + private PointSource getSource(int index) { + + /* + * Stricter rules regarding what sorts of default mfds can be used with grid + * sources (in an individual grid source set) will allow parsers to create + * XySequence based MFDs directly using copyOf so as to not create zillions + * of mag arrays. + */ + Location loc = locations.get(index); + Mfd mfd = mfds.get(index); + Map<FocalMech, Double> mechMap = mechMaps.get(index); + + switch (pointSourceType) { + case POINT: + return new PointSource(GRID, loc, mfd, mechMap, rupScaling, depthModel); + + case FINITE: + return new PointSourceFinite(GRID, loc, mfd, mechMap, rupScaling, depthModel); + + case FIXED_STRIKE: + return new PointSourceFixedStrike(GRID, loc, mfd, mechMap, rupScaling, depthModel, + strike.orElseThrow()); + + default: + throw new IllegalStateException("Unhandled point source type"); + } } static Builder builder() { return new Builder(); } - static class Builder { - - private boolean built = false; + /* Single use builder. */ + static class Builder extends AbstractRuptureSet.Builder { - private Integer id; - private String name; - private SourceFeature.Grid feature; + private SourceFeature feature; // Grid or Zone private ModelData data; private List<Location> locations; @@ -137,25 +256,121 @@ class GridRuptureSet implements RuptureSet { private LogicTree<List<Mfd>> mfdsTree; private LogicTree<Mfd> mfdTree; - private Optional<List<Map<FocalMech, Double>>> focalMechMaps = Optional.empty(); + // from GSSb + private RuptureScaling rupScaling; + private List<Map<FocalMech, Double>> mechMaps = Lists.newArrayList(); + private NavigableMap<Double, Map<Double, Double>> magDepthMap; + private PointSourceType pointSourceType; + private OptionalDouble strike = OptionalDouble.empty(); + private Optional<double[]> magMaster; + private Double maxDepth; + // private Double mMin; + // private Double mMax; + // private Double Δm; + + private Map<FocalMech, Double> mechMap; + private boolean singularMechs = true; + private Double gridSpacing; + private Integer smoothingDensity; + private Double smoothingLimit; + private Double distanceBin; + + private Builder() { + super(SourceType.GRID); + } + + Builder(SourceType type) { + super(type); + } + + @Override + Builder name(String name) { + super.name(name); + return this; + } + + @Override Builder id(int id) { - this.id = id; + super.id(id); return this; } - Builder name(String name) { - this.name = name; + private Builder gridConfig(SourceConfig.Grid gridConfig) { + + this.maxDepth = gridConfig.maxDepth; + + // makes bad assumption of GRID + // reenable but need slab handling + // validateDepth(this.maxDepth, SourceType.GRID); + + this.rupScaling = gridConfig.ruptureScaling; + if (strike.isEmpty()) { // already set to FIXED_STRIKE + this.pointSourceType = gridConfig.pointSourceType; + } + + this.mechMap = convertFocalMechTree(gridConfig.focalMechTree); + checkArgument(this.mechMap.size() == 3); + + this.magDepthMap = convertMagDepthMap(gridConfig.gridDepthMap); + + validateMagCutoffs(this.magDepthMap); + + // makes bad assumption of GRID + // reenable but need slab handling + // validateDepthMap(this.magDepthMap, SourceType.GRID); + + this.gridSpacing = gridConfig.gridSpacing; + this.smoothingDensity = gridConfig.smoothingDensity; + this.smoothingLimit = gridConfig.smoothingLimit; + this.distanceBin = gridConfig.distanceBin; + return this; } - Builder feature(SourceFeature.Grid feature) { + private Map<FocalMech, Double> convertFocalMechTree(LogicTree<FocalMech> tree) { + Map<FocalMech, Double> fmMmap = new EnumMap<FocalMech, Double>(FocalMech.class); + Arrays.stream(FocalMech.values()).forEach(v -> fmMmap.put(v, 0.0)); + tree.forEach(branch -> fmMmap.put( + FocalMech.valueOf(branch.id()), + branch.weight())); + return Maps.immutableEnumMap(fmMmap); + } + + private NavigableMap<Double, Map<Double, Double>> convertMagDepthMap( + Map<String, GridDepthModel> depthMap) { + return depthMap.values().stream() + .collect(ImmutableSortedMap.toImmutableSortedMap( + Comparator.naturalOrder(), + model -> model.mRange.upperEndpoint(), + model -> convertDepthTree(model.depthTree))); + } + + private Map<Double, Double> convertDepthTree(LogicTree<Double> tree) { + return tree.stream() + .collect(ImmutableSortedMap.toImmutableSortedMap( + Comparator.naturalOrder(), + Branch::value, + Branch::weight)); + } + + Builder feature(SourceFeature feature) { this.feature = feature; + Properties props = feature.source.properties(); + OptionalDouble strike = props.getDouble(Key.STRIKE); + if (strike.isPresent()) { + checkStrike(strike.orElseThrow()); + this.strike = strike; + this.pointSourceType = FIXED_STRIKE; + } return this; } Builder modelData(ModelData data) { this.data = data; + super.setting(data.tectonicSetting()); + super.gmms(data.gmms()); + gridConfig(data.sourceConfig().asGrid()); return this; } @@ -163,31 +378,153 @@ class GridRuptureSet implements RuptureSet { List<Location> locations, List<Mfd> mfds, LogicTree<List<Mfd>> mfdsTree, - List<Map<FocalMech, Double>> focalMechMaps) { + Optional<List<Map<FocalMech, Double>>> mechMaps) { - // check sizes + checkNotNull(locations); + checkNotNull(mfds); + checkNotNull(mfdsTree); + checkNotNull(mechMaps); + checkArgument(locations.size() > 0); + checkArgument(mfds.size() == locations.size()); + mfdsTree.forEach(branch -> checkArgument(branch.value().size() == locations.size())); + + if (mechMaps.isPresent()) { + checkArgument(mechMaps.orElseThrow().size() == locations.size()); + this.mechMaps = mechMaps.orElseThrow(); + } this.locations = locations; this.mfds = mfds; this.mfdsTree = mfdsTree; this.mfdTree = Trees.transform(mfdsTree, Mfds::combine, Optional.empty()); - this.focalMechMaps = Optional.ofNullable(focalMechMaps); + return this; } + static void validateDepthMap( + Map<Double, Map<Double, Double>> magDepthMap, + SourceType type) { + for (Map<Double, Double> magMap : magDepthMap.values()) { + for (double depth : magMap.keySet()) { + validateDepth(depth, type); + } + } + } + + static void validateDepth(double depth, SourceType type) { + switch (type) { + case GRID: + checkCrustalDepth(depth); + break; + case SLAB: + checkSlabDepth(depth); + break; + case ZONE: + checkCrustalDepth(depth); + break; + default: + throw new IllegalStateException(type + " not a grid or related source type"); + } + } + + static void validateMaxAndMapDepths(Map<Double, Map<Double, Double>> magDepthMap, + double maxDepth, String id) { + for (Map<Double, Double> magMap : magDepthMap.values()) { + for (double depth : magMap.keySet()) { + checkState(depth <= maxDepth, "%s mag-depth-weight map depth %s > %s", id, + depth, maxDepth); + } + } + } + + static void validateMagCutoffs(Map<Double, Map<Double, Double>> magDepthMap) { + double mMax = Earthquakes.MAG_RANGE.upperEndpoint(); + for (double mag : magDepthMap.keySet()) { + if (mag > mMax) { + return; + } + } + throw new IllegalStateException("MagDepthMap must contain at least one M > " + mMax); + } + void validateAndInit(String label) { - checkState(!built, "Single use builder"); - checkNotNull(id, "%s id", label); - checkNotNull(name, "%s name", label); + + validateState(label); + checkNotNull(feature, "%s feature", label); checkNotNull(data, "%s MFD reference data", label); checkNotNull(locations, "%s locations", label); checkNotNull(mfds, "%s total MFDs", label); + checkState(!mfds.isEmpty(), "%s has no MFDs", label); checkNotNull(mfdsTree, "%s MFDs logic tree", label); checkNotNull(mfdTree, "%s collapsed MFD logic tree", label); - built = true; + checkNotNull(pointSourceType != null, "%s point source type", label); + checkNotNull(rupScaling, "%s rupture-scaling relation", label); + checkNotNull(magDepthMap, "%s mag-depth-weight map", label); + checkNotNull(maxDepth, "%s max depth", label); + checkNotNull(mechMap, "%s focal mech map", label); + + checkNotNull(gridSpacing, "%s grid spacing", label); + checkNotNull(smoothingDensity != null, "%s smoothing density", label); + checkNotNull(smoothingLimit != null, "%s smoothing limit", label); + checkNotNull(distanceBin != null, "%s optimization table distance bin", label); + + // checkState(mMin != null, "%s min mag not set", buildId); + // checkState(mMax != null, "%s max mag not set", buildId); + // checkState(Δm != null, "%s delta mag not set", buildId); + + /* + * There are too many assumptions built into this; whose to say ones bin + * spacing should be only be in the hundredths? + * + * Where did this come from anyway? Are mag deltas really all that strange + * + * We should read precision of supplied mMin and mMax and delta and use + * largest for formatting + * + * In the case of single combined/flattened MFDs, mags may not be + * uniformly spaced. Can this be refactored + */ + double[] mfdMags = mfds.get(0).data().xValues().toArray(); + magMaster = Optional.of(mfdMags); + + /* + * Validate size of mechMaps; size could get out of sync if mixed calls to + * location(...) were made; one can imagine a future use case where a + * default is required with an override in a few locations; for now, if + * custom mechMaps are required, there must be one for each node. If no + * custom maps supplied populate mechMaps with nCopies (singleton list + * with multiple elements) + */ + + if (mechMaps.isEmpty()) { + mechMaps = Collections.nCopies(locations.size(), mechMap); + } else { + checkState( + mechMaps.size() == locations.size(), + "%s only %s of %s focal mech maps were added", label, + mechMaps.size(), locations.size()); + singularMechs = false; + } + + /* + * Validate depths. depths will already have been checked for consistency + * with allowable depths for different source types. Must also ensure that + * all depths (zTor) in the magDepthMap are < maxDepth. + */ + validateMaxAndMapDepths(magDepthMap, maxDepth, label); + + /* Validate strike value and type agreement. */ + if (strike.isPresent()) { + checkState(pointSourceType == FIXED_STRIKE, + "Source type must be FIXED_STRIKE for strike [%s]", strike.orElseThrow()); + } else { + checkState(pointSourceType != FIXED_STRIKE, + "Source type FIXED_STRIKE invalid for strike [%s]", strike); + } + } /* Create a new rupture set. */ @@ -196,4 +533,527 @@ class GridRuptureSet implements RuptureSet { return new GridRuptureSet(this); } } + + /* + * Notes on refactoring GridRuptureSet xml. + * + * The existing implementation permits one mfd per node and that maps to the + * default of the same 'type'. This was fine assuming mfds were collapsed + * (CEUS 2008) or MFDs were explicitly one type or another as required for CA + * 2008 grids (GR or INCR, the latter to handle the M>6.5 rate reduction). + * + * However, we want to be able to track mMax and other logic tree branches. + * + * Given gridded sources where rate or a-value at each node is consistent for + * all applicable MFDs: + * + * - each node will map to all defaults + * + * - defaults must have unique 'id' (logic tree branch id), but can be of + * similar 'type'; throw error if field doesn't match + * + * - CA 2008 will need to be converted to only INCR MFDs with explicit rates + * for all nodes. + * + * - refactor IncrementalMfd to Mfd in XML + * + * - refactor Node to Source + * + * - usually the MFD 'type' is consistent across branches for a given grid + * source set so collapsing MFD's isn't too problematic. + * + * Grid optimizations: + * + * - create GridRuptureSet subclass, one for each default MFD (siblings) + * + * - also create a subclass that uses a collapsed MFD, or perhaps this is just + * the original. + */ + + /* + * If, for a basic HazardResult, we want to be able to give a per-source-set + * decomposition by ground motion model, or just a decomposition of the total + * curve, we'll need to have a table of the curves for every model. + * + * If not necessary, then can have table of total curves and table of mean + * (and sigma?) for each model. Just mean is necessary for disaggeregation + * epsilon + * + * OK... so... + * + * Preliminary implementations of grid source optimizations modeled after the + * NSHMP Fortran codes porecomputed median curves in distance and magnitude + * (using a weighted combination of Gmms) and then performed lookups for each + * source, aggregating a total curve along the way. This approach is lossy in + * that data for individual Gmms is lost, and it was never extended to support + * disaggregation where ground motion mean and sigma are required. + * + * Further consideration of these issues suggests that, rather than + * aggregating curves along the way, we should build a separate table in + * magnitude and distance of rates while looping over sources. In the end, + * curves could be computed once for each distance and magnitude bin. Although + * full curves for each Gmm could be precomputed, the time to loop over the + * rate table may not be significant enough to warrant the memory overhead + * (bear in mind that's a lot of curves when considering large logic trees of + * Gmms and numerous periods). + * + * There is also the additional issue of additional epistemic uncertinaty on + * ground motions, which does not need to be considered here if building + * magnitude-distance rate tables. + * + * There is the additional issue of different focal mechanisms. For NGAW2 and + * the WUS, we would need to have 5 curves per gmm and r/m bin: 2 reverse, 2 + * normal 1 strike slip + * + * Also assumes uniform vs30 or bsain terms; these will likely be spatially + * varying in the future; this would also be incompatible with gmm tables. + * + * Precomputed curves may still be warranted for map calculations where Gmm + * specific data and disaggregation are irrelevant. + */ + + /* + * Why, you say? + * + * Simply put, speed. In the 2014 CEUS NSHM, 1000km from a site nets about 30k + * sources, each of which has an associated MFD with up to 33 values (and that + * assumes the different mMax models have been collapsed together). So 990k + * curve calculations per GMM. However, if the rates of those sources are + * first aggregated into a matrix of distance (300) and magnitude (33) bins, + * then only 900 chazard curve calculations need be performed per GMM. Ha! + */ + + /* + * need to specify magnitude and distance discretization + */ + + /** + * Create a {@code Function} for a location the condenses a + * {@code GridRuptureSet} into tabular form (distance, magnitude and azimuth + * bins) for speedier iteration. + * + * @param loc reference point for table + */ + public static Function<GridRuptureSet, RuptureSet<? extends Source>> optimizer( + Location loc, boolean smooth) { + return new Optimizer(loc, smooth); + } + + private static class Optimizer implements + Function<GridRuptureSet, RuptureSet<? extends Source>> { + + private final Location loc; + private final boolean smooth; + + Optimizer(Location loc, boolean smooth) { + this.loc = loc; + this.smooth = smooth; + } + + @Override + public Table apply(GridRuptureSet sources) { + return new Table(sources, loc, smooth); + } + } + + /* + * Notes on dealing with mixedMech situations (e.g. UC3) + * + * Need 3 tables (SS, R, N) + * + * Rate in each R-M bin gets partitioned across three tables. + * + * When building sources we could normalize the rates to create a mechMap on + * the fly and sum back to the total rate. + * + * Alternatively, and preferably, we reconsolidate partitioned rates into a + * list + */ + + /* + * Upgrade this to DataVolume to handle azimuth bins?? split over focal mechs? + * required for UC3 grids + */ + + /** + * Tabular implementation of a {@code GridRuptureSet}. This class consolidates + * the point sources that influence hazard at a site using a + * magnitude-distance-rate {@code DataTable}, from which a list of sources is + * generated. A {@code Table} is created on a per-calculation basis and is + * unique to a location. + * + * @see GridRuptureSet#optimizer(Location) + */ + private static final class Table extends AbstractRuptureSet<PointSource> { + + private final GridRuptureSet parent; + private final Location origin; + private final List<PointSource> sources; + + /* + * Row count reflects the number of rows used in a DataTable when building + * sources. Although this will likely be the same as sources.size(), it may + * not be. For example, when using multi-mechs many more sources are created + * because the different focal mechs arce (can) not be combined given + * bpossibly varying rates across different magnitudes. + */ + private int rowCount; + private int maximumSize; + private int parentCount; + + private Table(GridRuptureSet parent, Location origin, boolean smoothed) { + + // This is dirty - arose from conversion to ass constructor taking + // builder + super(new AbstractRuptureSet.Builder(parent.type()) { + { + this.name = parent.name(); + this.id = parent.id(); + this.setting = parent.setting(); + this.gmmSet = parent.groundMotionModels(); + } + }); + + this.parent = parent; + setLeafWeight(parent.weight()); + this.origin = origin; + this.sources = parent.singularMechs + ? initSources(smoothed) + : initMultiMechSources(smoothed); + } + + /** + * The number of sources drawn from a parent {@code GridRuptureSet} during + * initialization. + */ + public int parentCount() { + return parentCount; + } + + @Override + public String name() { + return parent.name() + " (opt)"; + } + + @Override + public int size() { + return parent.size(); + } + + @Override + public Predicate<PointSource> distanceFilter(Location loc, double distance) { + throw new UnsupportedOperationException(); + } + + @Override + public Iterator<PointSource> iterator() { + throw new UnsupportedOperationException(); + } + + @Override + public LogicTree<Mfd> mfdTree() { + throw new UnsupportedOperationException(); + } + + @Override + public Location location(Location site) { + throw new UnsupportedOperationException(); + } + + @Override + public Iterable<PointSource> iterableForLocation(Location loc) { + /* + * Ignore location; simply iterate over the list of sources. Source list + * will be empty if mfdTable is empty (all zeros). + */ + return sources; + } + + private static final double SRC_TO_SITE_AZIMUTH = 0.0; + // private static final double SMOOTHING_LIMIT = 40.0; // km + + /* creates the type of point source specified in the parent */ + private List<PointSource> initSources(boolean smoothed) { + /* For now, should only be getting here for GR MFDs */ + Mfd modelMfd = parent.mfds.get(0); + Mfd.Properties props = modelMfd.properties(); // probably INCR + double[] mags = modelMfd.data().xValues().toArray(); + + // Can we get this from the mfd.properties? + // the original min max won't have offsets + double Δm = mags[1] - mags[0]; + double ΔmBy2 = Δm / 2.0; + double mMin = mags[0] - ΔmBy2; + double mMax = mags[mags.length - 1] + ΔmBy2; + + // table keys are specified as lowermost and uppermost bin edges + // double Δm = parent.Δm; + // double ΔmBy2 = Δm / 2.0; + // double mMin = parent.magMaster[0] - ΔmBy2; + // double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2; + double rMax = parent.groundMotionModels().maxDistance(); + double[] smoothingOffsets = smoothingOffsets( + parent.smoothingDensity, + parent.gridSpacing); + + IntervalTable.Builder tableBuilder = new IntervalTable.Builder() + .rows(0.0, rMax, parent.distanceBin) + .columns(mMin, mMax, Δm); + + for (PointSource source : parent.iterableForLocation(origin)) { + double r = Locations.horzDistanceFast(origin, source.loc); + /* Experimental smoothing. */ + if (smoothed && r < parent.smoothingLimit) { + addSmoothed(tableBuilder, origin, source.loc, source.mfd, smoothingOffsets); + } else { + tableBuilder.add(r, source.mfd.data()); + } + parentCount++; + } + IntervalTable mfdTable = tableBuilder.build(); + + // System.out.println(parent.name()); + // System.out.println(mfdTable); + // System.out.println("TableSum: " + mfdTable.collapse().sum()); + // System.out.println(mfdTable.rows()); + + List<Double> distances = mfdTable.rows(); + maximumSize = distances.size(); + List<PointSource> b = new ArrayList<>(); + for (double r : distances) { + Mfd mfd = Mfd.create(mfdTable.row(r)); + if (mfd.data().isClear()) { + continue; + } + Location loc = Locations.location(origin, SRC_TO_SITE_AZIMUTH, r); + b.add(PointSources.pointSource( + parent.type(), + parent.pointSourceType, + loc, + mfd, + parent.mechMaps.get(0), + parent.rupScaling, + parent.depthModel)); + rowCount++; + } + return List.copyOf(b); + } + + /* always creates finite point sources */ + private List<PointSource> initMultiMechSources(boolean smoothed) { + + /* For now, should only be getting here for GR MFDs */ + Mfd modelMfd = parent.mfds.get(0); + Mfd.Properties props = modelMfd.properties(); // probably INCR + double[] mags = modelMfd.data().xValues().toArray(); + + double Δm = mags[1] - mags[0]; + double ΔmBy2 = Δm / 2.0; + double mMin = mags[0] - ΔmBy2; + double mMax = mags[mags.length - 1] + ΔmBy2; + + // double Δm = parent.Δm; + // double ΔmBy2 = Δm / 2.0; + // double mMin = parent.magMaster[0] - ΔmBy2; + // double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2; + double rMax = parent.groundMotionModels().maxDistance(); + double[] smoothingOffsets = smoothingOffsets( + parent.smoothingDensity, + parent.gridSpacing); + + IntervalTable.Builder ssTableBuilder = new IntervalTable.Builder() + .rows(0.0, rMax, parent.distanceBin) + .columns(mMin, mMax, Δm); + + IntervalTable.Builder rTableBuilder = new IntervalTable.Builder() + .rows(0.0, rMax, parent.distanceBin) + .columns(mMin, mMax, Δm); + + IntervalTable.Builder nTableBuilder = new IntervalTable.Builder() + .rows(0.0, rMax, parent.distanceBin) + .columns(mMin, mMax, Δm); + + // XySequence srcMfdSum = null; + + for (PointSource source : parent.iterableForLocation(origin)) { + double r = Locations.horzDistanceFast(origin, source.loc); + + // if (srcMfdSum == null) { + // srcMfdSum = XySequence.emptyCopyOf(source.mfd); + // } + // srcMfdSum.add(source.mfd); + + Mfd ssMfd = Mfd.Builder.from(source.mfd).scale(source.mechWtMap.get(STRIKE_SLIP)).build(); + Mfd rMfd = Mfd.Builder.from(source.mfd).scale(source.mechWtMap.get(REVERSE)).build(); + Mfd nMfd = Mfd.Builder.from(source.mfd).scale(source.mechWtMap.get(NORMAL)).build(); + + // XySequence ssMfd = + // MutableXySequence.copyOf(source.mfd.data()).multiply( + // source.mechWtMap.get(STRIKE_SLIP)); + // XySequence rMfd = + // MutableXySequence.copyOf(source.mfd.data()).multiply( + // source.mechWtMap.get(REVERSE)); + // XySequence nMfd = + // MutableXySequence.copyOf(source.mfd.data()).multiply( + // source.mechWtMap.get(NORMAL)); + + if (smoothed && r < parent.smoothingLimit) { + addSmoothed(ssTableBuilder, origin, source.loc, ssMfd, smoothingOffsets); + addSmoothed(rTableBuilder, origin, source.loc, rMfd, smoothingOffsets); + addSmoothed(nTableBuilder, origin, source.loc, nMfd, smoothingOffsets); + } else { + ssTableBuilder.add(r, ssMfd.data()); + rTableBuilder.add(r, rMfd.data()); + nTableBuilder.add(r, nMfd.data()); + } + + parentCount++; + } + + IntervalTable ssTable = ssTableBuilder.build(); + // System.out.println("SS Table:" + TextUtils.NEWLINE + ssTable); + IntervalTable rTable = rTableBuilder.build(); + // System.out.println("R Table:" + TextUtils.NEWLINE + rTable); + IntervalTable nTable = nTableBuilder.build(); + // System.out.println("N Table:" + TextUtils.NEWLINE + nTable); + + // DataTable tableSum = DataTable.Builder.fromModel(ssTable) + // .add(ssTable) + // .add(rTable) + // .add(nTable) + // .build(); + // + // XySequence tableMfdSum = + // XySequence.emptyCopyOf(tableSum.row(0.1)); + // for (double row : tableSum.rows()) { + // tableMfdSum.add(tableSum.row(row)); + // } + // System.out.println("sourcesMfd:"); + // System.out.println(srcMfdSum); + // + // System.out.println("tableMfd:"); + // System.out.println(tableMfdSum); + + List<Double> distances = ssTable.rows(); + maximumSize = distances.size(); + List<PointSource> b = new ArrayList<>(); + for (double r : distances) { + Location loc = Locations.location(origin, SRC_TO_SITE_AZIMUTH, r); + boolean tableRowUsed = false; + + Mfd ssMfd = Mfd.create(ssTable.row(r)); + if (ssMfd.data().isClear()) { + continue; + } + b.add(PointSources.pointSource( + parent.type(), + PointSourceType.FINITE, + loc, + ssMfd, + parent.mechMaps.get(0), + parent.rupScaling, + parent.depthModel)); + tableRowUsed = true; + + Mfd rMfd = Mfd.create(rTable.row(r)); + if (rMfd.data().isClear()) { + continue; + } + b.add(PointSources.pointSource( + parent.type(), + PointSourceType.FINITE, + loc, + rMfd, + parent.mechMaps.get(0), + parent.rupScaling, + parent.depthModel)); + tableRowUsed = true; + + Mfd nMfd = Mfd.create(nTable.row(r)); + if (nMfd.data().isClear()) { + continue; + } + b.add(PointSources.pointSource( + parent.type(), + PointSourceType.FINITE, + loc, + nMfd, + parent.mechMaps.get(0), + parent.rupScaling, + parent.depthModel)); + tableRowUsed = true; + + if (tableRowUsed) { + rowCount++; + } + } + return List.copyOf(b); + } + + /* + * Return a distance dependent discretization. Currently this is fixed at + * 1km for r<400km and 5km for r>= 400km + */ + // private static double distanceDiscretization(double r) { + // return r < 400.0 ? 1.0 : 5.0; + // } + + private static double[] smoothingOffsets(int density, double spacing) { + return offsets(spacing, (density == 4) + ? OFFSET_SCALE_4 + : OFFSET_SCALE_10); + } + + } + + static void addSmoothed( + IntervalTable.Builder builder, + Location site, + Location source, + Mfd mfd, + double[] smoothingOffsets) { + + LocationList distributedLocs = distributedLocations(source, smoothingOffsets); + double rateScale = 1.0 / distributedLocs.size(); + XySequence scaledMfd = MutableXySequence.copyOf(mfd.data()).multiply(rateScale); + for (Location loc : distributedLocs) { + double r = Locations.horzDistanceFast(site, loc); + builder.add(r, scaledMfd); + } + } + + static LocationList distributedLocations(Location loc, double[] offsets) { + double lon = loc.longitude; + double lat = loc.latitude; + LocationList.Builder locations = LocationList.builder(); + for (double Δlat : offsets) { + double offsetLat = lat + Δlat; + for (double Δlon : offsets) { + double offsetLon = lon + Δlon; + locations.add(Location.create(offsetLon, offsetLat)); + } + } + return locations.build(); + } + + private static final int PRECISION = 5; + private static final double[] OFFSET_SCALE_10 = Sequences + .arrayBuilder(-0.45, 0.45, 0.1) + .build(); + private static final double[] OFFSET_SCALE_4 = Sequences + .arrayBuilder(-0.375, 0.375, 0.25) + .build(); + + /* + * Create lat-lon offset values for use when building list of distributed + * point source locations. + * + * NOTE DataArray would be cleaner here + */ + private static double[] offsets(double cellScale, double[] offsetArray) { + return DoubleData.round(PRECISION, DoubleData.multiply( + cellScale, + Arrays.copyOf(offsetArray, offsetArray.length))); + } + } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java deleted file mode 100644 index 87f9680d..00000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java +++ /dev/null @@ -1,1034 +0,0 @@ -package gov.usgs.earthquake.nshmp.model; - -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkState; -import static gov.usgs.earthquake.nshmp.Earthquakes.checkCrustalDepth; -import static gov.usgs.earthquake.nshmp.Earthquakes.checkSlabDepth; -import static gov.usgs.earthquake.nshmp.Faults.checkStrike; -import static gov.usgs.earthquake.nshmp.fault.FocalMech.NORMAL; -import static gov.usgs.earthquake.nshmp.fault.FocalMech.REVERSE; -import static gov.usgs.earthquake.nshmp.fault.FocalMech.STRIKE_SLIP; -import static gov.usgs.earthquake.nshmp.model.PointSourceType.FIXED_STRIKE; -import static gov.usgs.earthquake.nshmp.model.SourceType.GRID; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.EnumMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.NavigableMap; -import java.util.Optional; -import java.util.OptionalDouble; -import java.util.function.Function; -import java.util.function.Predicate; - -import com.google.common.collect.ImmutableSortedMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Ordering; -import com.google.common.primitives.Doubles; - -import gov.usgs.earthquake.nshmp.Earthquakes; -import gov.usgs.earthquake.nshmp.data.DoubleData; -import gov.usgs.earthquake.nshmp.data.IntervalTable; -import gov.usgs.earthquake.nshmp.data.MutableXySequence; -import gov.usgs.earthquake.nshmp.data.Sequences; -import gov.usgs.earthquake.nshmp.data.XySequence; -import gov.usgs.earthquake.nshmp.fault.FocalMech; -import gov.usgs.earthquake.nshmp.fault.surface.RuptureScaling; -import gov.usgs.earthquake.nshmp.geo.Location; -import gov.usgs.earthquake.nshmp.geo.LocationList; -import gov.usgs.earthquake.nshmp.geo.Locations; -import gov.usgs.earthquake.nshmp.mfd.Mfd; -import gov.usgs.earthquake.nshmp.model.PointSource.DepthModel; -import gov.usgs.earthquake.nshmp.tree.Branch; -import gov.usgs.earthquake.nshmp.tree.LogicTree; - -/** - * A container class for related, evenly-spaced {@link PointSource}s with - * varying magnitudes and/or rates derived from an {@link Mfd} at each grid - * node. - * - * @author U.S. Geological Survey - */ -public class GridSourceSet extends AbstractSourceSet<PointSource> { - - // final SourceType type; - final List<Location> locations; - final List<Mfd> mfds; - final LogicTree<List<Mfd>> mfdsTree; - // private final Map<FocalMech, Double> mechMap; // default, used for copyOf - final List<Map<FocalMech, Double>> mechMaps; // may be nCopies - private final boolean singularMechs; - private final NavigableMap<Double, Map<Double, Double>> magDepthMap; - private final OptionalDouble strike; - - /* optimizable = magMaster.isPresent() */ - private final Optional<double[]> magMaster; - - final RuptureScaling rupScaling; - private final double maxDepth; - private final PointSourceType sourceType; - - final DepthModel depthModel; - final boolean optimizable; - - final double gridSpacing; - final int smoothingDensity; - final double smoothingLimit; - final double distanceBin; - - /* - * WHether or not a grid can be optimized using rate tables is a function of - * whether magMaster is present, or not. - */ - - /* - * Most grid sources have the same focal mech map everywhere; in these cases, - * mechMaps will have been created using Collections.nCopies() with minimal - * overhead. - */ - - private GridSourceSet(Builder builder) { - super(builder); - // this.type = builder.type; - this.locations = builder.locations; - this.mfds = builder.mfds; - this.mfdsTree = builder.mfdsTree; - this.rupScaling = builder.rupScaling; - this.mechMaps = builder.mechMaps; - this.singularMechs = builder.singularMechs; - this.magDepthMap = builder.magDepthMap; - this.sourceType = builder.sourceType; - this.strike = builder.strike; - this.magMaster = builder.magMaster; - this.maxDepth = builder.maxDepth; - - /* - * skip fixed strike grids and single magnitude MFDs - * - * Should be cleaner way of handling this - */ - // System.out.println(Δm); - // this.optimizable = (sourceType() != FIXED_STRIKE) && !Double.isNaN(Δm); - // magMaster is always present, no? - this.optimizable = - (type() != SourceType.ZONE) && - (sourceType() != FIXED_STRIKE) && - this.magMaster.isPresent(); - // System.out.println(Arrays.toString(magMaster.orElseThrow())); - - double[] depthMags = this.mfds.get(0).data().xValues().toArray(); - // System.out.println(mfdsTree); - - // System.out.println(Arrays.toString(depthMags)); - - this.depthModel = DepthModel.create( - magDepthMap, - Doubles.asList(depthMags), - maxDepth); - - // System.out.println("singulearMechs: " + singularMechs); - // System.out.println("ptSrcType: " + sourceType); - // for (int i = 0; i < locations.size(); i++) { - // System.out.println(locations.get(i)); - // System.out.println(mfds.get(i)); - // } - // System.out.println(mfds); - - // System.out.println(Mfds.combine(mfds)); - // - // int i = locations.indexOf(Location.create(-121.9, 45.4)); - // System.out.println(mfds.get(i)); - - this.gridSpacing = builder.gridSpacing; - this.smoothingDensity = builder.smoothingDensity; - this.smoothingLimit = builder.smoothingLimit; - this.distanceBin = builder.distanceBin; - } - - /** - * The {@link PointSource} representation used by this source set. - */ - public PointSourceType sourceType() { - return sourceType; - } - - /** - * Whether this source set is capable of being optimized. There are - * circumstances under which a grid optimization table can not be built. - */ - public boolean optimizable() { - return optimizable; - } - - @Override - public int size() { - return locations.size(); - } - - /** - * For internal use only. Public for access outside of package. - */ - public static String sizeString(SourceSet<? extends Source> sourceSet, int size) { - if (sourceSet instanceof Table) { - Table t = (Table) sourceSet; - return t.parentCount() + " (" + t.rowCount + " of " + t.maximumSize + ")"; - } - return Integer.toString(size); - } - - @Override - public Predicate<PointSource> distanceFilter(Location loc, double distance) { - return new DistanceFilter(loc, distance); - } - - /* Not inlined for use by area sources */ - static final class DistanceFilter implements Predicate<PointSource> { - private final Predicate<Location> filter; - - DistanceFilter(Location loc, double distance) { - filter = Locations.distanceAndRectangleFilter(loc, distance); - } - - @Override - public boolean test(PointSource source) { - return filter.test(source.loc); - } - - @Override - public String toString() { - return "GridSourceSet.DistanceFilter[ " + filter.toString() + " ]"; - } - } - - @Override - public Iterator<PointSource> iterator() { - return new Iterator<PointSource>() { - int caret = 0; - - @Override - public boolean hasNext() { - return caret < locations.size(); - } - - @Override - public PointSource next() { - return getSource(caret++); - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - - private PointSource getSource(int index) { - - /* - * Stricter rules regarding what sorts of default mfds can be used with grid - * sources (in an individual grid source set) will allow parsers to create - * XySequence based MFDs directly using copyOf so as to not create zillions - * of mag arrays. - */ - Location loc = locations.get(index); - Mfd mfd = mfds.get(index); - Map<FocalMech, Double> mechMap = mechMaps.get(index); - - switch (sourceType) { - case POINT: - return new PointSource(GRID, loc, mfd, mechMap, rupScaling, depthModel); - - case FINITE: - return new PointSourceFinite(GRID, loc, mfd, mechMap, rupScaling, depthModel); - - case FIXED_STRIKE: - return new PointSourceFixedStrike(GRID, loc, mfd, mechMap, rupScaling, depthModel, - strike.orElseThrow()); - - default: - throw new IllegalStateException("Unhandled point source type"); - } - } - - // Builder accomodates overriding a default mechMap to support UC3 - // grid sources; may add others later - - /* Single use builder. */ - static class Builder extends AbstractSourceSet.Builder { - - private static final String ID = "GridSourceSet.Builder"; - - // private SourceType type; - private List<Location> locations = Lists.newArrayList(); - private List<Mfd> mfds = Lists.newArrayList(); - private LogicTree<List<Mfd>> mfdsTree; - private RuptureScaling rupScaling; - private List<Map<FocalMech, Double>> mechMaps = Lists.newArrayList(); - private NavigableMap<Double, Map<Double, Double>> magDepthMap; - private PointSourceType sourceType; - private OptionalDouble strike = OptionalDouble.empty(); - private Optional<double[]> magMaster; - private Double maxDepth; - // private Double mMin; - // private Double mMax; - // private Double Δm; - - private Map<FocalMech, Double> mechMap; - private boolean singularMechs = true; - - private Double gridSpacing; - private Integer smoothingDensity; - private Double smoothingLimit; - private Double distanceBin; - - Builder() { - super(SourceType.GRID); - } - - Builder(SourceType type) { - super(type); - } - - Builder strike(double strike) { - this.strike = OptionalDouble.of(checkStrike(strike)); - this.sourceType = FIXED_STRIKE; - return this; - } - - Builder gridConfig(SourceConfig.Grid gridConfig) { - // only used for zone sources - // this.spacing = gridConfig.spacing; - - this.maxDepth = gridConfig.maxDepth; - - // makes bad assumption of GRID - // reenable but need slab handling - // validateDepth(this.maxDepth, SourceType.GRID); - - this.rupScaling = gridConfig.ruptureScaling; - this.sourceType = gridConfig.pointSourceType; - this.mechMap = convertFocalMechTree(gridConfig.focalMechTree); - checkArgument(this.mechMap.size() == 3); - - this.magDepthMap = convertMagDepthMap(gridConfig.gridDepthMap); - // System.out.println(gridConfig.gridDepthMap); - // System.out.println(this.magDepthMap); - - validateMagCutoffs(this.magDepthMap); - - // makes bad assumption of GRID - // reenable but need slab handling - // validateDepthMap(this.magDepthMap, SourceType.GRID); - - this.gridSpacing = gridConfig.gridSpacing; - this.smoothingDensity = gridConfig.smoothingDensity; - this.smoothingLimit = gridConfig.smoothingLimit; - this.distanceBin = gridConfig.distanceBin; - - return this; - } - - private Map<FocalMech, Double> convertFocalMechTree(LogicTree<FocalMech> tree) { - Map<FocalMech, Double> fmMmap = new EnumMap<FocalMech, Double>(FocalMech.class); - Arrays.stream(FocalMech.values()).forEach(v -> fmMmap.put(v, 0.0)); - tree.forEach(branch -> fmMmap.put( - FocalMech.valueOf(branch.id()), - branch.weight())); - return Maps.immutableEnumMap(fmMmap); - } - - private NavigableMap<Double, Map<Double, Double>> convertMagDepthMap( - Map<String, GridDepthModel> depthMap) { - return depthMap.values().stream() - .collect(ImmutableSortedMap.toImmutableSortedMap( - Ordering.natural(), - model -> model.mRange.upperEndpoint(), - model -> convertDepthTree(model.depthTree))); - } - - private Map<Double, Double> convertDepthTree(LogicTree<Double> tree) { - return tree.stream() - .collect(ImmutableSortedMap.toImmutableSortedMap( - Ordering.natural(), - Branch::value, - Branch::weight)); - } - - // locations, total Mfds, underlying mfd-tree - Builder locations( - List<Location> locations, - List<Mfd> mfds, - LogicTree<List<Mfd>> mfdsTree, - Optional<List<Map<FocalMech, Double>>> focalMechMaps) { - - checkArgument(locations.size() == mfds.size()); - // wholesale replacement of arrays - this.locations = locations; - this.mfds = mfds; - this.mfdsTree = mfdsTree; - - if (focalMechMaps.isPresent()) { - mechMaps = focalMechMaps.orElseThrow(); - } - // This assumes (rightly so) that all supplied mfds have the same - // but we need to get rid of the odd dependency on Δm - Mfd model = mfds.get(0); - // this.mMin = model.data().x(0); - // this.mMax = model.data().x(model.data().size() - 1); - // // System.out.println(type); - // this.Δm = (type == Mfd.Type.SINGLE) - // ? Double.NaN - // : model.data().x(1) - this.mMin; - return this; - } - - static void validateDepthMap( - Map<Double, Map<Double, Double>> magDepthMap, - SourceType type) { - for (Map<Double, Double> magMap : magDepthMap.values()) { - for (double depth : magMap.keySet()) { - validateDepth(depth, type); - } - } - } - - static void validateDepth(double depth, SourceType type) { - switch (type) { - case GRID: - checkCrustalDepth(depth); - break; - case SLAB: - checkSlabDepth(depth); - break; - case ZONE: - checkCrustalDepth(depth); - break; - default: - throw new IllegalStateException(type + " not a grid or related source type"); - } - } - - static void validateMaxAndMapDepths(Map<Double, Map<Double, Double>> magDepthMap, - double maxDepth, String id) { - for (Map<Double, Double> magMap : magDepthMap.values()) { - for (double depth : magMap.keySet()) { - checkState(depth <= maxDepth, "%s mag-depth-weight map depth %s > %s", id, - depth, maxDepth); - } - } - } - - static void validateMagCutoffs(Map<Double, Map<Double, Double>> magDepthMap) { - double mMax = Earthquakes.MAG_RANGE.upperEndpoint(); - for (double mag : magDepthMap.keySet()) { - if (mag > mMax) { - return; - } - } - throw new IllegalStateException("MagDepthMap must contain at least one M > " + mMax); - } - - @Override - void validateState(String buildId) { - super.validateState(buildId); - // checkState(type != null, "%s source type not set", buildId); - checkState(strike != null, "%s strike not set", buildId); - checkState(sourceType != null, "%s point source type not set", buildId); - checkState(!locations.isEmpty(), "%s has no locations", buildId); - checkState(!mfds.isEmpty(), "%s has no Mfds", buildId); - checkState(rupScaling != null, "%s has no rupture-scaling relation set", buildId); - checkState(magDepthMap != null, "%s mag-depth-weight map not set", buildId); - checkState(maxDepth != null, "%s max depth not set", buildId); - checkState(mechMap != null, "%s focal mech map not set", buildId); - - checkState(gridSpacing != null, "%s grid spacing not set", buildId); - checkState(smoothingDensity != null, "%s smoothing density not set", buildId); - checkState(smoothingLimit != null, "%s smoothing limit not set", buildId); - checkState(distanceBin != null, "%s optimization table distance bin not set", buildId); - - // checkState(mMin != null, "%s min mag not set", buildId); - // checkState(mMax != null, "%s max mag not set", buildId); - // checkState(Δm != null, "%s delta mag not set", buildId); - - /* - * There are too many assumptions built into this; whose to say ones bin - * spacing should be only be in the hundredths? - * - * Where did this come from anyway? Are mag deltas really all that strange - * - * We should read precision of supplied mMin and mMax and delta and use - * largest for formatting - * - * In the case of single combined/flattened MFDs, mags may not be - * uniformly spaced. Can this be refactored - */ - // if (Double.isNaN(Δm)) { - // magMaster = mfds.get(0).data().xValues().toArray(); - // } else { - // double cleanDelta = Double.valueOf(String.format("%.2f", Δm)); - // magMaster = DoubleData.buildCleanSequence(mMin, mMax, cleanDelta, true, - // 2); - // } - double[] mfdMags = mfds.get(0).data().xValues().toArray(); - magMaster = Optional.of(mfdMags); - - /* - * Validate size of mechMaps; size could get out of sync if mixed calls to - * location(...) were made; one can imagine a future use case where a - * default is required with an override in a few locations; for now, if - * custom mechMaps are required, there must be one for each node. If no - * custom maps supplied populate mechMaps with nCopies (singleton list - * with multiple elements) - */ - - if (mechMaps.isEmpty()) { - mechMaps = Collections.nCopies(locations.size(), mechMap); - } else { - checkState( - mechMaps.size() == locations.size(), - "%s only %s of %s focal mech maps were added", ID, - mechMaps.size(), locations.size()); - singularMechs = false; - } - - /* - * Validate depths. depths will already have been checked for consistency - * with allowable depths for different source types. Must also ensure that - * all depths (zTor) in the magDepthMap are < maxDepth. - */ - validateMaxAndMapDepths(magDepthMap, maxDepth, ID); - - /* Validate strike value and type agreement. */ - if (strike.isPresent()) { - checkState(sourceType == FIXED_STRIKE, - "Source type must be FIXED_STRIKE for strike [%s]", strike.orElseThrow()); - } else { - checkState(sourceType != FIXED_STRIKE, - "Source type FIXED_STRIKE invalid for strike [%s]", strike); - } - } - - GridSourceSet build() { - validateState(ID); - return new GridSourceSet(this); - } - - } - - /* - * Notes on refactoring GridSourceSet xml. - * - * The existing implementation permits one mfd per node and that maps to the - * default of the same 'type'. This was fine assuming mfds were collapsed - * (CEUS 2008) or MFDs were explicitly one type or another as required for CA - * 2008 grids (GR or INCR, the latter to handle the M>6.5 rate reduction). - * - * However, we want to be able to track mMax and other logic tree branches. - * - * Given gridded sources where rate or a-value at each node is consistent for - * all applicable MFDs: - * - * - each node will map to all defaults - * - * - defaults must have unique 'id' (logic tree branch id), but can be of - * similar 'type'; throw error if field doesn't match - * - * - CA 2008 will need to be converted to only INCR MFDs with explicit rates - * for all nodes. - * - * - refactor IncrementalMfd to Mfd in XML - * - * - refactor Node to Source - * - * - usually the MFD 'type' is consistent across branches for a given grid - * source set so collapsing MFD's isn't too problematic. - * - * Grid optimizations: - * - * - create GridSourceSet subclass, one for each default MFD (siblings) - * - * - also create a subclass that uses a collapsed MFD, or perhaps this is just - * the original. - */ - - /* - * If, for a basic HazardResult, we want to be able to give a per-source-set - * decomposition by ground motion model, or just a decomposition of the total - * curve, we'll need to have a table of the curves for every model. - * - * If not necessary, then can have table of total curves and table of mean - * (and sigma?) for each model. Just mean is necessary for disaggeregation - * epsilon - * - * OK... so... - * - * Preliminary implementations of grid source optimizations modeled after the - * NSHMP Fortran codes porecomputed median curves in distance and magnitude - * (using a weighted combination of Gmms) and then performed lookups for each - * source, aggregating a total curve along the way. This approach is lossy in - * that data for individual Gmms is lost, and it was never extended to support - * disaggregation where ground motion mean and sigma are required. - * - * Further consideration of these issues suggests that, rather than - * aggregating curves along the way, we should build a separate table in - * magnitude and distance of rates while looping over sources. In the end, - * curves could be computed once for each distance and magnitude bin. Although - * full curves for each Gmm could be precomputed, the time to loop over the - * rate table may not be significant enough to warrant the memory overhead - * (bear in mind that's a lot of curves when considering large logic trees of - * Gmms and numerous periods). - * - * There is also the additional issue of additional epistemic uncertinaty on - * ground motions, which does not need to be considered here if building - * magnitude-distance rate tables. - * - * There is the additional issue of different focal mechanisms. For NGAW2 and - * the WUS, we would need to have 5 curves per gmm and r/m bin: 2 reverse, 2 - * normal 1 strike slip - * - * Also assumes uniform vs30 or bsain terms; these will likely be spatially - * varying in the future; this would also be incompatible with gmm tables. - * - * Precomputed curves may still be warranted for map calculations where Gmm - * specific data and disaggregation are irrelevant. - */ - - /* - * Why, you say? - * - * Simply put, speed. In the 2014 CEUS NSHM, 1000km from a site nets about 30k - * sources, each of which has an associated MFD with up to 33 values (and that - * assumes the different mMax models have been collapsed together). So 990k - * curve calculations per GMM. However, if the rates of those sources are - * first aggregated into a matrix of distance (300) and magnitude (33) bins, - * then only 900 chazard curve calculations need be performed per GMM. Ha! - */ - - /* - * need to specify magnitude and distance discretization - */ - - /** - * Create a {@code Function} for a location the condenses a - * {@code GridSourceSet} into tabular form (distance, magnitude and azimuth - * bins) for speedier iteration. - * - * @param loc reference point for table - */ - public static Function<GridSourceSet, SourceSet<? extends Source>> optimizer( - Location loc, boolean smooth) { - return new Optimizer(loc, smooth); - } - - private static class Optimizer implements - Function<GridSourceSet, SourceSet<? extends Source>> { - - private final Location loc; - private final boolean smooth; - - Optimizer(Location loc, boolean smooth) { - this.loc = loc; - this.smooth = smooth; - } - - @Override - public Table apply(GridSourceSet sources) { - return new Table(sources, loc, smooth); - } - } - - /* - * Notes on dealing with mixedMech situations (e.g. UC3) - * - * Need 3 tables (SS, R, N) - * - * Rate in each R-M bin gets partitioned across three tables. - * - * When building sources we could normalize the rates to create a mechMap on - * the fly and sum back to the total rate. - * - * Alternatively, and preferably, we reconsolidate partitioned rates into a - * list - */ - - /* - * Upgrade this to DataVolume to handle azimuth bins?? split over focal mechs? - * required for UC3 grids - */ - - /** - * Tabular implementation of a {@code GridSourceSet}. This class consolidates - * the point sources that influence hazard at a site using a - * magnitude-distance-rate {@code DataTable}, from which a list of sources is - * generated. A {@code Table} is created on a per-calculation basis and is - * unique to a location. - * - * @see GridSourceSet#optimizer(Location) - */ - private static final class Table extends AbstractSourceSet<PointSource> { - - private final GridSourceSet parent; - private final Location origin; - private final List<PointSource> sources; - - /* - * Row count reflects the number of rows used in a DataTable when building - * sources. Although this will likely be the same as sources.size(), it may - * not be. For example, when using multi-mechs many more sources are created - * because the different focal mechs arce (can) not be combined given - * bpossibly varying rates across different magnitudes. - */ - private int rowCount; - private int maximumSize; - private int parentCount; - - private Table(GridSourceSet parent, Location origin, boolean smoothed) { - - // This is dirty - arose from conversion to ass constructor taking - // builder - super(new AbstractSourceSet.Builder(null) { - { - this.name = parent.name(); - this.id = parent.id(); - this.type = parent.type(); - this.setting = parent.setting(); - this.weight = parent.weight(); - this.gmmSet = parent.groundMotionModels(); - } - }); - - this.parent = parent; - this.origin = origin; - this.sources = parent.singularMechs - ? initSources(smoothed) - : initMultiMechSources(smoothed); - } - - /** - * The number of sources drawn from a parent {@code GridSourceSet} during - * initialization. - */ - public int parentCount() { - return parentCount; - } - - @Override - public String name() { - return parent.name() + " (opt)"; - } - - @Override - public int size() { - return parent.size(); - } - - @Override - public Predicate<PointSource> distanceFilter(Location loc, double distance) { - throw new UnsupportedOperationException(); - } - - @Override - public Iterator<PointSource> iterator() { - throw new UnsupportedOperationException(); - } - - @Override - public Iterable<PointSource> iterableForLocation(Location loc) { - /* - * Ignore location; simply iterate over the list of sources. Source list - * will be empty if mfdTable is empty (all zeros). - */ - return sources; - } - - private static final double SRC_TO_SITE_AZIMUTH = 0.0; - // private static final double SMOOTHING_LIMIT = 40.0; // km - - /* creates the type of point source specified in the parent */ - private List<PointSource> initSources(boolean smoothed) { - /* For now, should only be getting here for GR MFDs */ - Mfd modelMfd = parent.mfds.get(0); - Mfd.Properties props = modelMfd.properties(); // probably INCR - double[] mags = modelMfd.data().xValues().toArray(); - - // Can we get this from the mfd.properties? - // the original min max won't have offsets - double Δm = mags[1] - mags[0]; - double ΔmBy2 = Δm / 2.0; - double mMin = mags[0] - ΔmBy2; - double mMax = mags[mags.length - 1] + ΔmBy2; - - // table keys are specified as lowermost and uppermost bin edges - // double Δm = parent.Δm; - // double ΔmBy2 = Δm / 2.0; - // double mMin = parent.magMaster[0] - ΔmBy2; - // double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2; - double rMax = parent.groundMotionModels().maxDistance(); - double[] smoothingOffsets = smoothingOffsets( - parent.smoothingDensity, - parent.gridSpacing); - - IntervalTable.Builder tableBuilder = new IntervalTable.Builder() - .rows(0.0, rMax, parent.distanceBin) - .columns(mMin, mMax, Δm); - - for (PointSource source : parent.iterableForLocation(origin)) { - double r = Locations.horzDistanceFast(origin, source.loc); - /* Experimental smoothing. */ - if (smoothed && r < parent.smoothingLimit) { - addSmoothed(tableBuilder, origin, source.loc, source.mfd, smoothingOffsets); - } else { - tableBuilder.add(r, source.mfd.data()); - } - parentCount++; - } - IntervalTable mfdTable = tableBuilder.build(); - - // System.out.println(parent.name()); - // System.out.println(mfdTable); - // System.out.println("TableSum: " + mfdTable.collapse().sum()); - // System.out.println(mfdTable.rows()); - - List<Double> distances = mfdTable.rows(); - maximumSize = distances.size(); - List<PointSource> b = new ArrayList<>(); - for (double r : distances) { - Mfd mfd = Mfd.create(mfdTable.row(r)); - if (mfd.data().isClear()) { - continue; - } - Location loc = Locations.location(origin, SRC_TO_SITE_AZIMUTH, r); - b.add(PointSources.pointSource( - parent.type(), - parent.sourceType, - loc, - mfd, - parent.mechMaps.get(0), - parent.rupScaling, - parent.depthModel)); - rowCount++; - } - return List.copyOf(b); - } - - /* always creates finite point sources */ - private List<PointSource> initMultiMechSources(boolean smoothed) { - - /* For now, should only be getting here for GR MFDs */ - Mfd modelMfd = parent.mfds.get(0); - Mfd.Properties props = modelMfd.properties(); // probably INCR - double[] mags = modelMfd.data().xValues().toArray(); - - double Δm = mags[1] - mags[0]; - double ΔmBy2 = Δm / 2.0; - double mMin = mags[0] - ΔmBy2; - double mMax = mags[mags.length - 1] + ΔmBy2; - - // double Δm = parent.Δm; - // double ΔmBy2 = Δm / 2.0; - // double mMin = parent.magMaster[0] - ΔmBy2; - // double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2; - double rMax = parent.groundMotionModels().maxDistance(); - double[] smoothingOffsets = smoothingOffsets( - parent.smoothingDensity, - parent.gridSpacing); - - IntervalTable.Builder ssTableBuilder = new IntervalTable.Builder() - .rows(0.0, rMax, parent.distanceBin) - .columns(mMin, mMax, Δm); - - IntervalTable.Builder rTableBuilder = new IntervalTable.Builder() - .rows(0.0, rMax, parent.distanceBin) - .columns(mMin, mMax, Δm); - - IntervalTable.Builder nTableBuilder = new IntervalTable.Builder() - .rows(0.0, rMax, parent.distanceBin) - .columns(mMin, mMax, Δm); - - // XySequence srcMfdSum = null; - - for (PointSource source : parent.iterableForLocation(origin)) { - double r = Locations.horzDistanceFast(origin, source.loc); - - // if (srcMfdSum == null) { - // srcMfdSum = XySequence.emptyCopyOf(source.mfd); - // } - // srcMfdSum.add(source.mfd); - - Mfd ssMfd = Mfd.Builder.from(source.mfd).scale(source.mechWtMap.get(STRIKE_SLIP)).build(); - Mfd rMfd = Mfd.Builder.from(source.mfd).scale(source.mechWtMap.get(REVERSE)).build(); - Mfd nMfd = Mfd.Builder.from(source.mfd).scale(source.mechWtMap.get(NORMAL)).build(); - - // XySequence ssMfd = - // MutableXySequence.copyOf(source.mfd.data()).multiply( - // source.mechWtMap.get(STRIKE_SLIP)); - // XySequence rMfd = - // MutableXySequence.copyOf(source.mfd.data()).multiply( - // source.mechWtMap.get(REVERSE)); - // XySequence nMfd = - // MutableXySequence.copyOf(source.mfd.data()).multiply( - // source.mechWtMap.get(NORMAL)); - - if (smoothed && r < parent.smoothingLimit) { - addSmoothed(ssTableBuilder, origin, source.loc, ssMfd, smoothingOffsets); - addSmoothed(rTableBuilder, origin, source.loc, rMfd, smoothingOffsets); - addSmoothed(nTableBuilder, origin, source.loc, nMfd, smoothingOffsets); - } else { - ssTableBuilder.add(r, ssMfd.data()); - rTableBuilder.add(r, rMfd.data()); - nTableBuilder.add(r, nMfd.data()); - } - - parentCount++; - } - - IntervalTable ssTable = ssTableBuilder.build(); - // System.out.println("SS Table:" + TextUtils.NEWLINE + ssTable); - IntervalTable rTable = rTableBuilder.build(); - // System.out.println("R Table:" + TextUtils.NEWLINE + rTable); - IntervalTable nTable = nTableBuilder.build(); - // System.out.println("N Table:" + TextUtils.NEWLINE + nTable); - - // DataTable tableSum = DataTable.Builder.fromModel(ssTable) - // .add(ssTable) - // .add(rTable) - // .add(nTable) - // .build(); - // - // XySequence tableMfdSum = - // XySequence.emptyCopyOf(tableSum.row(0.1)); - // for (double row : tableSum.rows()) { - // tableMfdSum.add(tableSum.row(row)); - // } - // System.out.println("sourcesMfd:"); - // System.out.println(srcMfdSum); - // - // System.out.println("tableMfd:"); - // System.out.println(tableMfdSum); - - List<Double> distances = ssTable.rows(); - maximumSize = distances.size(); - List<PointSource> b = new ArrayList<>(); - for (double r : distances) { - Location loc = Locations.location(origin, SRC_TO_SITE_AZIMUTH, r); - boolean tableRowUsed = false; - - Mfd ssMfd = Mfd.create(ssTable.row(r)); - if (ssMfd.data().isClear()) { - continue; - } - b.add(PointSources.pointSource( - parent.type(), - PointSourceType.FINITE, - loc, - ssMfd, - parent.mechMaps.get(0), - parent.rupScaling, - parent.depthModel)); - tableRowUsed = true; - - Mfd rMfd = Mfd.create(rTable.row(r)); - if (rMfd.data().isClear()) { - continue; - } - b.add(PointSources.pointSource( - parent.type(), - PointSourceType.FINITE, - loc, - rMfd, - parent.mechMaps.get(0), - parent.rupScaling, - parent.depthModel)); - tableRowUsed = true; - - Mfd nMfd = Mfd.create(nTable.row(r)); - if (nMfd.data().isClear()) { - continue; - } - b.add(PointSources.pointSource( - parent.type(), - PointSourceType.FINITE, - loc, - nMfd, - parent.mechMaps.get(0), - parent.rupScaling, - parent.depthModel)); - tableRowUsed = true; - - if (tableRowUsed) { - rowCount++; - } - } - return List.copyOf(b); - } - - /* - * Return a distance dependent discretization. Currently this is fixed at - * 1km for r<400km and 5km for r>= 400km - */ - // private static double distanceDiscretization(double r) { - // return r < 400.0 ? 1.0 : 5.0; - // } - - private static double[] smoothingOffsets(int density, double spacing) { - return offsets(spacing, (density == 4) - ? OFFSET_SCALE_4 - : OFFSET_SCALE_10); - } - } - - static void addSmoothed( - IntervalTable.Builder builder, - Location site, - Location source, - Mfd mfd, - double[] smoothingOffsets) { - - LocationList distributedLocs = distributedLocations(source, smoothingOffsets); - double rateScale = 1.0 / distributedLocs.size(); - XySequence scaledMfd = MutableXySequence.copyOf(mfd.data()).multiply(rateScale); - for (Location loc : distributedLocs) { - double r = Locations.horzDistanceFast(site, loc); - builder.add(r, scaledMfd); - } - } - - static LocationList distributedLocations(Location loc, double[] offsets) { - double lon = loc.longitude; - double lat = loc.latitude; - LocationList.Builder locations = LocationList.builder(); - for (double Δlat : offsets) { - double offsetLat = lat + Δlat; - for (double Δlon : offsets) { - double offsetLon = lon + Δlon; - locations.add(Location.create(offsetLon, offsetLat)); - } - } - return locations.build(); - } - - private static final int PRECISION = 5; - private static final double[] OFFSET_SCALE_10 = Sequences - .arrayBuilder(-0.45, 0.45, 0.1) - .build(); - private static final double[] OFFSET_SCALE_4 = Sequences - .arrayBuilder(-0.375, 0.375, 0.25) - .build(); - - /* - * Create lat-lon offset values for use when building list of distributed - * point source locations. - * - * NOTE DataArray would be cleaner here - */ - private static double[] offsets(double cellScale, double[] offsetArray) { - return DoubleData.round(PRECISION, DoubleData.multiply( - cellScale, - Arrays.copyOf(offsetArray, offsetArray.length))); - } - -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java b/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java index 77292e46..8634d422 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java @@ -2,7 +2,9 @@ package gov.usgs.earthquake.nshmp.model; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import static com.google.common.base.Strings.padEnd; import static gov.usgs.earthquake.nshmp.Text.NEWLINE; +import static java.util.stream.Collectors.toCollection; import static java.util.stream.Collectors.toUnmodifiableMap; import java.awt.geom.Area; @@ -36,30 +38,28 @@ import gov.usgs.earthquake.nshmp.geo.json.Feature; import gov.usgs.earthquake.nshmp.gmm.Gmm; import gov.usgs.earthquake.nshmp.gmm.GroundMotionModel; import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass; -import gov.usgs.earthquake.nshmp.model.SourceTree.Leaf; -import gov.usgs.earthquake.nshmp.tree.Branch; -import gov.usgs.earthquake.nshmp.tree.LogicTree; /** * A {@code HazardModel} is the top-level wrapper for earthquake {@link Source} * definitions and attendant {@link GroundMotionModel}s used in probabilisitic * seismic hazard analyses (PSHAs) and related calculations. A - * {@code HazardModel} contains of a number of {@link SourceSet}s that define - * logical groupings of sources by {@link SourceType}. Each {@code SourceSet} + * {@code HazardModel} contains of a number of {@link RuptureSet}s that define + * logical groupings of sources by {@link SourceType}. Each {@code RuptureSet} * carries with it references to the {@code GroundMotionModel}s and associated * weights to be used when evaluating hazard. * * @author U.S. Geological Survey * @see Source - * @see SourceSet + * @see RuptureSet * @see SourceType */ -public final class HazardModel implements Iterable<SourceSet<? extends Source>> { +public final class HazardModel implements Iterable<SourceTree> { private final String name; private final CalcConfig config; private final Map<NehrpSiteClass, Double> siteClassMap; - private final Multimap<SourceType, SourceSet<? extends Source>> sourceSetMap; + + private final Multimap<SourceType, RuptureSet<? extends Source>> ruptureSetMap; private final Set<TectonicSetting> settings; private final Map<TectonicSetting, MapRegion> mapRegionMap; private final SiteData siteData; @@ -71,7 +71,7 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> this.name = builder.info.name; this.config = builder.config; this.siteClassMap = Maps.immutableEnumMap(builder.info.siteClassMap); - this.sourceSetMap = builder.sourceSetMap; + this.ruptureSetMap = builder.ruptureSetMap; this.settings = Sets.immutableEnumSet(builder.settings); this.mapRegionMap = Maps.immutableEnumMap(builder.mapRegionMap); this.siteData = builder.siteData; @@ -101,19 +101,15 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> } /** - * The number of {@code SourceSet}s in this {@code HazardModel}. + * The number of {@code RuptureSet}s in this {@code HazardModel}. */ - public int size() { - return sourceSetMap.size(); + public int size() { // TODO this should be the number of trees + return ruptureSetMap.size(); } @Override - public Iterator<SourceSet<? extends Source>> iterator() { - return sourceSetMap.values().iterator(); - } - - public Iterator<SourceSet<? extends Source>> iterableForLocation(Location location) { - return null; + public Iterator<SourceTree> iterator() { + return treesMap.values().iterator(); } /** @@ -135,15 +131,15 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> */ public Set<Gmm> gmms() { return Streams.stream(this) - .flatMap(ss -> ss.groundMotionModels().gmms().stream()) - .collect(Sets.toImmutableEnumSet()); + .flatMap(tree -> tree.gmms().gmms().stream()) + .collect(toCollection(() -> EnumSet.noneOf(Gmm.class))); } /** * The set of {@code SourceType}s included in this model. */ public Set<SourceType> types() { - return sourceSetMap.keySet(); + return ruptureSetMap.keySet(); } /** @@ -191,7 +187,7 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> * * @param location of interest */ - public Predicate<SourceSet<? extends Source>> settingFilter( + public Predicate<RuptureSet<? extends Source>> settingFilter( CalcConfig config, Location location) { return new TectonicSettingFilter(this, config, location); @@ -201,11 +197,16 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> public String toString() { StringBuilder sb = new StringBuilder("HazardModel: "); sb.append(name).append(NEWLINE); - for (SourceSet<? extends Source> sourceSet : this) { - String typeStr = " " + sourceSet.type() + " Source Set"; - sb.append(Strings.padEnd(typeStr, 26, ' ')); - sb.append(sourceSet); - sb.append(NEWLINE); + for (SourceTree tree : this) { + sb.append(padEnd(tree.type().toString(), 12, ' ')) + .append(tree.id()) + .append(" ") + .append(Strings.padEnd(tree.name(), 56, ' ')) + .append("size: ") + .append(tree.size()) + .append(NEWLINE) + .append(tree) + .append(NEWLINE); } return sb.toString(); } @@ -224,9 +225,9 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> private Info info; CalcConfig config; // also accessible during model initialization - private SetMultimap<SourceType, SourceSet<? extends Source>> sourceSetMap; + private SetMultimap<SourceType, RuptureSet<? extends Source>> ruptureSetMap; - private ImmutableSetMultimap.Builder<SourceType, SourceSet<? extends Source>> sourceMapBuilder; + private ImmutableSetMultimap.Builder<SourceType, RuptureSet<? extends Source>> ruptureMapBuilder; private Set<TectonicSetting> settings = EnumSet.noneOf(TectonicSetting.class); private Map<TectonicSetting, MapRegion> mapRegionMap = new EnumMap<>(TectonicSetting.class); @@ -240,7 +241,7 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> private List<SourceTree> trees = new ArrayList<>(); private Builder() { - sourceMapBuilder = ImmutableSetMultimap.builder(); + ruptureMapBuilder = ImmutableSetMultimap.builder(); treesMapBuilder = ImmutableListMultimap.builder(); } @@ -270,12 +271,12 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> trees.add(tree); settings.add(tree.setting()); treesMapBuilder.put(tree.setting(), tree); - addSourceSetsFromTree(tree); + addRuptureSetsFromTree(tree); return this; } - private Builder addSourceSet(SourceSet<? extends Source> source) { - sourceMapBuilder.put(source.type(), source); + private Builder addRuptureSet(RuptureSet<? extends Source> source) { + ruptureMapBuilder.put(source.type(), source); return this; } @@ -283,7 +284,7 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> checkState(!built, "This %s instance as already been used", mssgID); checkState(info != null, "%s info not set", mssgID); checkState(config != null, "%s config not set", mssgID); - checkState(sourceSetMap.size() > 0, "%s has no source sets", mssgID); + checkState(ruptureSetMap.size() > 0, "%s has no source sets", mssgID); checkState(settings.size() > 0, "%s has no tectonic settings", mssgID); checkState(siteData != null, "%s site data not set", mssgID); checkState(treesMap.size() > 0, "%s has no source trees", mssgID); @@ -292,7 +293,7 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> } HazardModel build() { - sourceSetMap = sourceMapBuilder.build(); + ruptureSetMap = ruptureMapBuilder.build(); treesMap = treesMapBuilder.build(); idMap = treesMap.values().stream() .collect(toUnmodifiableMap( @@ -302,211 +303,9 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> return new HazardModel(this); } - private void addSourceSetsFromTree(SourceTree tree) { - switch (tree.type()) { - case ZONE: - zoneSourceSetsFromTree(tree); - break; - case FAULT: - case FAULT_CLUSTER: - case FAULT_SYSTEM: - faultSourceSetsFromTree(tree); - break; - case GRID: - gridSourceSetsFromTree(tree); - break; - case SLAB: - slabSourceSetsFromTree(tree); - break; - case INTERFACE: - case DECOLLEMENT: - interfaceSourceSetsFromTree(tree); - break; - default: - throw new UnsupportedOperationException(); - } - } - - /* - * We really should be able to collapse cluster rate tree as well; never - * really thought about this because the rate tree was formerly isolated in - * individual\source files, but now we've split the top level source-tree - * branches on geometry/position - */ - private void faultSourceSetsFromTree(SourceTree tree) { - - /* SYSTEM: each leaf is a SystemSourceSet */ - if (tree.type() == SourceType.FAULT_SYSTEM) { - for (Leaf leaf : tree.branches().keySet()) { - RuptureSet rs = leaf.ruptureSet; - double branchWeight = tree.leaves().get(leaf); - SystemRuptureSet srs = (SystemRuptureSet) rs; - SystemSourceSet sss = srs.createSourceSet(branchWeight); - addSourceSet(sss); - } - return; - } - - /* - * Check if mixed fault/cluster types - * - * FAULT/CLUSTER: each FAULT leaf is a FaultSource in a FaultSourceSet, - * each CLUSTER leaf is an individual clusterSourceSet - */ - - FaultSourceSet.Builder fss = new FaultSourceSet.Builder(); - fss.name(tree.name()) - .setting(tree.setting()) - .weight(1.0) - .gmms(tree.gmms()); - - for (Leaf leaf : tree.branches().keySet()) { - - RuptureSet rs = leaf.ruptureSet; - double branchWeight = tree.leaves().get(leaf); - - if (rs.type() == SourceType.FAULT) { - - double leafWeight = tree.leaves().get(leaf); - FaultRuptureSet frs = (FaultRuptureSet) rs; - - /* - * Revisit: We're overwriting the id with the id of each rupture set - * leaf for dip variants this is ok, but there are many other sources - * where this WILL NOT hold; rupture-sets commonly have unique IDs - * that are distinct from the (possibly multiple) underlying fault - * sections. - */ - fss.id(frs.id()); - - // Leaf weight is being applied to total MFD - FaultSource faultSource = faultRuptureSetToSource(frs, leafWeight); - // System.out.println("FS size: " + faultSource.size()); - - if (faultSource.size() > 0) { - fss.source(faultSource, leafWeight); - } else { - System.out.println(" No source ruptures: " + faultSource.name()); - } - } else { - - ClusterRuptureSet crs = (ClusterRuptureSet) rs; - ClusterSourceSet.Builder css = new ClusterSourceSet.Builder(); - css.name(crs.name) - .id(crs.id) - .setting(tree.setting()) - .gmms(tree.gmms()) - .weight(branchWeight); // NM geometry branch - - // Consider ability to collapse cluster rate-tree. - LogicTree<Double> rateTree = crs.data.rateTree().orElseThrow(); - - for (Branch<Double> rateBranch : rateTree) { - - double rate = (rateBranch.value() <= 0.0) ? 0.0 : rateBranch.value(); - String rateLabel = (rate == 0.0) ? "0" : String.valueOf((int) (1.0 / rate)); - String clusterLabel = crs.name + " : " + rateLabel + "-yr"; - ClusterSource.Builder csBuilder = new ClusterSource.Builder() - .name(clusterLabel) - .id(crs.id) - .rate(rate); // cluster rate - - FaultSourceSet.Builder cfss = new FaultSourceSet.Builder(); - cfss.name(clusterLabel) - .id(crs.id) - .setting(tree.setting()) - .gmms(tree.gmms()); - - // The weight of the cfss gets called by ClusterSource.weight() - // and is used in HazardCurveSet.Builder.addCurves() - cfss.weight(rateBranch.weight()); - - for (FaultRuptureSet frs : crs.faultRuptureSets) { - cfss.source(faultRuptureSetToSource(frs, 1.0), 1.0); - } - csBuilder.faults(cfss.buildFaultSet()); - css.source(csBuilder.buildClusterSource()); - - } - - addSourceSet(css.buildClusterSet()); - } - } - if (fss.sources.size() > 0) { - addSourceSet(fss.buildFaultSet()); - } else { - System.out.println(" Empty Source Set: " + fss.name); - } - } - - private FaultSource faultRuptureSetToSource( - FaultRuptureSet frs, - double leafWeight) { - - return new FaultSource.Builder() - .feature(frs.feature) - .config(frs.data.sourceConfig().asFault()) - .mfdTree(frs.mfdTree) - .mfdTotal(frs.mfdTotal) - .branchWeight(leafWeight) - .build(); + private void addRuptureSetsFromTree(SourceTree tree) { + tree.forEach(branch -> addRuptureSet(branch.value())); } - - private void interfaceSourceSetsFromTree(SourceTree tree) { - - InterfaceSourceSet.Builder builder = new InterfaceSourceSet.Builder(); - builder.name(tree.name()) - .id(-1) - .setting(tree.setting()) - .weight(1.0) - .gmms(tree.gmms()); - - for (Leaf leaf : tree.branches().keySet()) { - double leafWeight = tree.leaves().get(leaf); - InterfaceRuptureSet irs = (InterfaceRuptureSet) leaf.ruptureSet; - InterfaceSource is = interfaceRuptureSetToSource(irs, leafWeight); - builder.source(is, leafWeight); - } - addSourceSet(builder.buildSubductionSet()); - } - - private InterfaceSource interfaceRuptureSetToSource( - InterfaceRuptureSet irs, - double weight) { - - return new InterfaceSource.Builder() - .feature(irs.feature) - .config(irs.data.sourceConfig().asInterface()) - .mfdTree(irs.mfdTree) - .mfdTotal(irs.mfdTotal) - .branchWeight(weight) - .build(); - } - - private void zoneSourceSetsFromTree(SourceTree tree) { - for (Leaf leaf : tree.branches().keySet()) { - double leafWeight = tree.leaves().get(leaf); - ZoneRuptureSet zrs = (ZoneRuptureSet) leaf.ruptureSet; - addSourceSet(zrs.sourceSet(leafWeight)); - } - } - - private void slabSourceSetsFromTree(SourceTree tree) { - for (Leaf leaf : tree.branches().keySet()) { - double leafWeight = tree.leaves().get(leaf); - SlabRuptureSet srs = (SlabRuptureSet) leaf.ruptureSet; - addSourceSet(srs.sourceSet(leafWeight)); - } - } - - private void gridSourceSetsFromTree(SourceTree tree) { - for (Leaf leaf : tree.branches().keySet()) { - double leafWeight = tree.leaves().get(leaf); - GridRuptureSet grs = (GridRuptureSet) leaf.ruptureSet; - addSourceSet(grs.sourceSet(leafWeight)); - } - } - } static final class Info { @@ -525,7 +324,7 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> * A source filter that skips those in tectonic settings if the supplied * location is outside the map-region for the setting. */ - static class TectonicSettingFilter implements Predicate<SourceSet<? extends Source>> { + static class TectonicSettingFilter implements Predicate<RuptureSet<? extends Source>> { private Set<TectonicSetting> settings; @@ -543,8 +342,8 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> } @Override - public boolean test(SourceSet<? extends Source> sourceSet) { - return settings.contains(sourceSet.setting()); + public boolean test(RuptureSet<? extends Source> ruptures) { + return settings.contains(ruptures.setting()); } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java index d4960b97..052bc3a7 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java @@ -4,13 +4,14 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static gov.usgs.earthquake.nshmp.model.Deserialize.MFD_TREE; -import static gov.usgs.earthquake.nshmp.model.SourceType.INTERFACE; import static java.util.stream.Collectors.collectingAndThen; import static java.util.stream.Collectors.toList; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.function.Predicate; import gov.usgs.earthquake.nshmp.Earthquakes; import gov.usgs.earthquake.nshmp.fault.surface.ApproxGriddedSurface; @@ -30,7 +31,7 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; * * @author U.S. Geological Survey */ -public class InterfaceRuptureSet implements RuptureSet { +public class InterfaceRuptureSet extends AbstractRuptureSet<InterfaceSource> { /* May be original interface section or stitched. */ final SourceFeature.Interface feature; @@ -40,10 +41,11 @@ public class InterfaceRuptureSet implements RuptureSet { final Mfd mfdTotal; final List<Integer> sectionIds;// reference: actually needed? - final GriddedSurface surface; + private final List<InterfaceSource> source; InterfaceRuptureSet(Builder builder) { + super(builder); this.feature = builder.feature; this.data = builder.data; @@ -52,31 +54,49 @@ public class InterfaceRuptureSet implements RuptureSet { this.sectionIds = builder.sectionIds; this.surface = builder.surface; + this.source = List.of(createSource()); } - @Override - public String name() { - return feature.name; + private InterfaceSource createSource() { + return new InterfaceSource.Builder() + .feature(feature) + .config(data.sourceConfig().asInterface()) + .mfdTree(mfdTree) + .mfdTotal(mfdTotal) + .build(); } @Override - public int id() { - return feature.id; + public int size() { + return source.size(); // TODO should this be # ruptures } @Override - public int size() { - throw new UnsupportedOperationException(); + public LogicTree<Mfd> mfdTree() { + return mfdTree; } @Override - public SourceType type() { - return INTERFACE; + public Iterator<InterfaceSource> iterator() { + return source.iterator(); } @Override - public LogicTree<Mfd> mfdTree() { - return mfdTree; + public Predicate<InterfaceSource> distanceFilter( + Location loc, + double distance) { + + return new Predicate<InterfaceSource>() { + private Predicate<Location> filter = Locations.distanceFilter(loc, distance); + + @Override + public boolean test(InterfaceSource source) { + return filter.test(source.upperTrace.first()) || + filter.test(source.upperTrace.last()) || + filter.test(source.lowerTrace.first()) || + filter.test(source.lowerTrace.last()); + } + }; } /** @@ -93,23 +113,12 @@ public class InterfaceRuptureSet implements RuptureSet { .build()); } - @Override - public String toString() { - Map<Object, Object> data = Map.of( - "name", name(), - "id", id(), - "feature", feature.source.toJson()); - return getClass().getSimpleName() + " " + data; - } - static Builder builder() { return new Builder(); } /* Single use builder */ - static class Builder { - - private boolean built = false; + static class Builder extends AbstractRuptureSet.Builder { private SourceFeature.Interface feature; private ModelData data; @@ -122,17 +131,19 @@ public class InterfaceRuptureSet implements RuptureSet { private Mfd mfdTotal; private GriddedSurface surface; - /* Validated and carried forward in rupture set feature. */ - private String name; - private Integer id; + Builder() { + super(SourceType.INTERFACE); + } + @Override Builder name(String name) { - this.name = name; + super.name(name); return this; } + @Override Builder id(int id) { - this.id = id; + super.id(id); return this; } @@ -151,6 +162,8 @@ public class InterfaceRuptureSet implements RuptureSet { /* Set MFD helper data. */ Builder modelData(ModelData data) { this.data = data; + super.setting(data.tectonicSetting()); + super.gmms(data.gmms()); return this; } @@ -160,9 +173,8 @@ public class InterfaceRuptureSet implements RuptureSet { } private void validateAndInit(String label) { - checkState(!built, "Single use builder"); - checkNotNull(name, "%s name", label); - checkNotNull(id, "%s id", label); + validateState(label); + checkNotNull(data, "%s model data", label); checkNotNull(sectionIds, "%s section id list", label); checkNotNull(mfdPropsTree, "%s MFD logic tree", label); @@ -184,15 +196,11 @@ public class InterfaceRuptureSet implements RuptureSet { feature.traces.get(0), feature.traces.get(1), config.surfaceSpacing); - - built = true; } private SourceFeature.Interface createFeature() { Map<Integer, SourceFeature.Interface> featureMap = data.interfaceFeatureMap().orElseThrow(); - // System.out.println(sectionIds); - // System.out.println(featureMap.keySet()); checkState(featureMap.keySet().containsAll(sectionIds)); Feature f = joinFeatures(sectionIds.stream() diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSource.java index 1c1c9916..8984aacc 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSource.java @@ -49,9 +49,7 @@ public class InterfaceSource implements Source { final LogicTree<Mfd> mfdTree; - final double branchWeight; // rupture set branch weight - - private final List<Mfd> scaledMfds; // includes ruptureSet weight + private final List<Mfd> scaledMfds; // includes ruptures weight private final List<List<Rupture>> ruptureLists; // 1:1 with Mfds private InterfaceSource(Builder builder) { @@ -59,14 +57,13 @@ public class InterfaceSource implements Source { this.config = builder.config; this.mfdTree = builder.mfdTree; - this.branchWeight = builder.branchWeight; upperTrace = feature.traces.get(0); lowerTrace = feature.traces.get(feature.traces.size() - 1); surface = new ApproxGriddedSurface(upperTrace, lowerTrace, config.surfaceSpacing); - this.scaledMfds = ModelTrees.scaledMfdList(mfdTree, branchWeight); + this.scaledMfds = ModelTrees.scaledMfdList(mfdTree); this.ruptureLists = scaledMfds.stream() .map(this::createRuptureList) .collect(toUnmodifiableList()); @@ -160,8 +157,6 @@ public class InterfaceSource implements Source { LogicTree<Mfd> mfdTree; Mfd mfdTotal; - Double branchWeight; - Builder feature(SourceFeature.Interface feature) { this.feature = feature; return this; @@ -184,11 +179,6 @@ public class InterfaceSource implements Source { return this; } - Builder branchWeight(double weight) { - this.branchWeight = weight; - return this; - } - InterfaceSource build() { /* @@ -203,7 +193,6 @@ public class InterfaceSource implements Source { checkState(config != null, "%s config not set", ID); checkState(mfdTree != null, "%s MFD tree not set", ID); checkState(mfdTotal != null, "%s total MFD not set", ID); - checkState(branchWeight != null, "%s source branch weight not set", ID); built = true; return new InterfaceSource(this); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSourceSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSourceSet.java deleted file mode 100644 index ccaaf4cd..00000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSourceSet.java +++ /dev/null @@ -1,95 +0,0 @@ -package gov.usgs.earthquake.nshmp.model; - -import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.Preconditions.checkState; - -import java.util.Iterator; -import java.util.List; -import java.util.function.Predicate; - -import com.google.common.collect.Lists; - -import gov.usgs.earthquake.nshmp.geo.Location; -import gov.usgs.earthquake.nshmp.geo.Locations; -import gov.usgs.earthquake.nshmp.tree.LogicGroup; -import gov.usgs.earthquake.nshmp.tree.LogicTree; - -/** - * Container class for groups of related {@link InterfaceSource}s. - * - * @author U.S. Geological Survey - * @see InterfaceSource - */ -public class InterfaceSourceSet extends AbstractSourceSet<InterfaceSource> { - - private final List<InterfaceSource> sources; - - /* Not used. Source weight is built into source MFDs */ - private final LogicTree<InterfaceSource> sourceTree; - - private InterfaceSourceSet(Builder builder) { - super(builder); - this.sources = builder.sources; - this.sourceTree = builder.sourceTree; - } - - @Override - public Iterator<InterfaceSource> iterator() { - return sources.iterator(); - } - - @Override - public int size() { - return sources.size(); - } - - @Override - public Predicate<InterfaceSource> distanceFilter(Location loc, - double distance) { - - return new Predicate<InterfaceSource>() { - private Predicate<Location> filter = Locations.distanceFilter(loc, distance); - - @Override - public boolean test(InterfaceSource source) { - return filter.test(source.upperTrace.first()) || - filter.test(source.upperTrace.last()) || - filter.test(source.lowerTrace.first()) || - filter.test(source.lowerTrace.last()); - } - - @Override - public String toString() { - return "InterfaceSourceSet.DistanceFilter[ " + filter.toString() + " ]"; - } - }; - } - - /* Single use builder. */ - static class Builder extends FaultSourceSet.Builder { - - static final String ID = "InterfaceSourceSet.Builder"; - - final List<InterfaceSource> sources = Lists.newArrayList(); - final LogicGroup.Builder<InterfaceSource> treeBuilder = LogicGroup.builder(); - LogicTree<InterfaceSource> sourceTree; - - Builder() { - super(SourceType.INTERFACE); - } - - Builder source(InterfaceSource source, double weight) { - sources.add(checkNotNull(source, "InterfaceSource is null")); - treeBuilder.addBranch(source.name(), source, weight); - return this; - } - - InterfaceSourceSet buildSubductionSet() { - validateState(ID); - sourceTree = treeBuilder.build(); - checkState(sources.size() > 0, "%s source list is empty", id); - return new InterfaceSourceSet(this); - } - } - -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java index 1384698c..ed3b9935 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java @@ -67,7 +67,7 @@ class ModelFiles { return read(dir, GRID_DATA_DIR, Function.identity()); } - /* Check for grid data directory. */ + /* Check for fault system ruputure data file. */ static Optional<Path> checkRupturesFile(Path dir) { return read(dir, RUPTURES, Function.identity()); } @@ -226,32 +226,32 @@ class ModelFiles { } /* Interface rupture-set. */ - static Optional<RuptureSet> readInterfaceRuptureSet(Path dir, ModelData data) { + static Optional<InterfaceRuptureSet> readInterfaceRuptureSet(Path dir, ModelData data) { return read(dir, RUPTURE_SET, data, Deserialize::interfaceRuptureSet); } /* Fault rupture-set. */ - static Optional<RuptureSet> readFaultRuptureSet(Path dir, ModelData data) { + static Optional<FaultRuptureSet> readFaultRuptureSet(Path dir, ModelData data) { return read(dir, RUPTURE_SET, data, Deserialize::faultRuptureSet); } - /* Fault cluster-set. */ - static Optional<RuptureSet> readFaultClusterSet(Path dir, ModelData data) { - return read(dir, CLUSTER_SET, data, Deserialize::clusterRuptureSet); + /* Fault cluster-set data. */ + static Optional<ClusterRuptureSet.Data> readFaultClusterSet(Path dir, ModelData data) { + return read(dir, CLUSTER_SET, data, Deserialize::clusterRuptureSetData); } /* Fault system rupture-set. */ - static Optional<RuptureSet> readSystemRuptureSet(Path dir, ModelData data) { + static Optional<SystemRuptureSet> readSystemRuptureSet(Path dir, ModelData data) { return read(dir, RUPTURE_SET, data, Deserialize::systemRuptureSet); } /* Grid rupture-set. */ - static Optional<List<RuptureSet>> readGridRuptureSets(Path dir, ModelData data) { + static Optional<List<GridRuptureSet>> readGridRuptureSets(Path dir, ModelData data) { return read(dir, RUPTURE_SETS, data, Deserialize::gridRuptureSets); } /* Slab rupture-set. */ - static Optional<List<RuptureSet>> readSlabRuptureSets(Path dir, ModelData data) { + static Optional<List<SlabRuptureSet>> readSlabRuptureSets(Path dir, ModelData data) { return read(dir, RUPTURE_SETS, data, Deserialize::slabRuptureSets); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java index f3f37061..5a26ed12 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java @@ -412,8 +412,8 @@ abstract class ModelLoader { .build(); feature = new SourceFeature.NshmFault(newFeature); - RuptureSet ruptureSet = createSingleRuptureSet(data, feature); - treeBuilder.addLeaf(branch.orElseThrow(), ruptureSet); + RuptureSet<? extends Source> ruptures = createSingleRuptureSet(data, feature); + treeBuilder.addLeaf(branch.orElseThrow(), ruptures); return treeBuilder; } @@ -458,11 +458,11 @@ abstract class ModelLoader { .build()) .build(); - RuptureSet ruptureSet = createSingleRuptureSet( + RuptureSet<? extends Source> ruptures = createSingleRuptureSet( data, new SourceFeature.NshmFault(dipFeature)); - treeBuilder.addLeaf(dipSourceTree.get(i), ruptureSet); + treeBuilder.addLeaf(dipSourceTree.get(i), ruptures); } return treeBuilder;// .build(); } @@ -564,7 +564,7 @@ abstract class ModelLoader { /* * Don't use loadRateTree() here; nested rate trees on branches are more - * likely numeric recurrence rates than slip rate branches. + * likely numeric recurrence rates than slip or cluster rate branches. */ readRateTree(dir).ifPresent(data::rateTree); @@ -581,10 +581,70 @@ abstract class ModelLoader { } else { - RuptureSet rs = readFaultRuptureSet(dir, data).orElseGet( - () -> readFaultClusterSet(dir, data).orElseThrow()); + /* Could have fault or cluster rupture-set. */ + var frs = readFaultRuptureSet(dir, data); + if (frs.isPresent()) { + treeBuilder.addLeaf(branch, frs.orElseThrow()); + } else { + var crsd = readFaultClusterSet(dir, data).orElseThrow(); + processClusterBranch(branch, treeBuilder, data, crsd); + } + } + } + + /* + * Process fault clusters adding additional cluster rate branches. The + * supplied cluster-set is used as a model to create a tree of rate + * branches, each with the same cluster geometry. + */ + private void processClusterBranch( + Branch<Path> branch, + SourceTree.Builder treeBuilder, + ModelData data, + ClusterRuptureSet.Data clusterData) { + + LogicTree<Double> rateTree = data.rateTree().orElseThrow(); + LogicTree.Builder<Path> pathTreeBuilder = LogicTree.builder(rateTree.name()); + List<ClusterRuptureSet> crsList = new ArrayList<>(); + + for (int i = 0; i < rateTree.size(); i++) { + + Branch<Double> rateBranch = rateTree.get(i); + double rate = (rateBranch.value() <= 0.0) ? 0.0 : rateBranch.value(); + String rateLabel = rateBranch.id() + ((rate == 0.0) + ? "-null" + : "-" + String.valueOf((int) Math.round(1.0 / rate)) + "-yr"); + String clusterLabel = clusterData.name + " : " + rateLabel; + + Path branchPath = root.relativize(branch.value()).resolve(rateLabel); + System.out.println(" branch: " + branchPath); + + ClusterSource clusterSource = new ClusterSource.Builder() + .name(clusterLabel) + .id(clusterData.id) + .rate(rateBranch.value()) // cluster rate + .faults(clusterData.faultRuptureSets) + .buildClusterSource(); + + ClusterRuptureSet crs = ClusterRuptureSet.builder() + .name(clusterLabel) + .id(clusterData.id) + .setSource(clusterSource) + .modelData(data) + .build(); + crsList.add(crs); + + pathTreeBuilder.addBranch( + clusterLabel, + Path.of(clusterLabel), + rateBranch.weight()); + } + + LogicTree<Path> pathTree = pathTreeBuilder.build(); + treeBuilder.addBranches(branch, pathTree); - treeBuilder.addLeaf(branch, rs); + for (int i = 0; i < pathTree.size(); i++) { + treeBuilder.addLeaf(pathTree.get(i), crsList.get(i)); } } @@ -937,7 +997,8 @@ abstract class ModelLoader { /* Expect a rupture-sets file. */ - List<? extends RuptureSet> ruptureSets = readGridRuptureSets(dir, data).orElseThrow(); + List<? extends RuptureSet<? extends Source>> ruptureSets = + readGridRuptureSets(dir, data).orElseThrow(); /* Attach singleton as leaf. */ if (ruptureSets.size() == 1) { @@ -947,8 +1008,8 @@ abstract class ModelLoader { /* Otherwise create logic group of rupture set pseudo paths. */ LogicGroup.Builder<Path> grsGroup = LogicGroup.builder(); - for (RuptureSet rs : ruptureSets) { - grsGroup.addBranch(rs.name(), Path.of(rs.name()), 1.0); + for (RuptureSet<? extends Source> ruptureSet : ruptureSets) { + grsGroup.addBranch(ruptureSet.name(), Path.of(ruptureSet.name()), 1.0); } LogicTree<Path> grsTree = grsGroup.build(); treeBuilder.addBranches(branch, grsTree); @@ -1062,7 +1123,8 @@ abstract class ModelLoader { /* Expect a rupture-sets file. */ - List<? extends RuptureSet> ruptureSets = readSlabRuptureSets(dir, data).orElseThrow(); + List<? extends RuptureSet<? extends Source>> ruptureSets = + readSlabRuptureSets(dir, data).orElseThrow(); /* Attach singleton as leaf. */ if (ruptureSets.size() == 1) { @@ -1072,8 +1134,8 @@ abstract class ModelLoader { /* Otherwise create logic group of rupture set pseudo paths. */ LogicGroup.Builder<Path> grsGroup = LogicGroup.builder(); - for (RuptureSet rs : ruptureSets) { - grsGroup.addBranch(rs.name(), Path.of(rs.name()), 1.0); + for (RuptureSet<? extends Source> ruptureSet : ruptureSets) { + grsGroup.addBranch(ruptureSet.name(), Path.of(ruptureSet.name()), 1.0); } LogicTree<Path> grsTree = grsGroup.build(); treeBuilder.addBranches(branch, grsTree); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelTrees.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelTrees.java index 276b9d60..04d1c63c 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelTrees.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelTrees.java @@ -209,6 +209,7 @@ class ModelTrees { } /* LogicTree<MFD> --> List<MFD * branchWeight * weight> */ + @Deprecated // this supports weight collapsing which we're trying to avoid static List<Mfd> scaledMfdList(LogicTree<Mfd> tree, double weight) { return tree.stream() .map(branch -> reduceMfdBranch(branch, weight)) diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/Models.java b/src/main/java/gov/usgs/earthquake/nshmp/model/Models.java index 8791f318..a0b97819 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/Models.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/Models.java @@ -148,8 +148,8 @@ public class Models { this.branches = tree.branches().entrySet().stream() .map(e -> new SourceBranch( e.getValue().toString(), - tree.leaves().get(e.getKey()), - e.getKey().ruptureSet.mfdTree())) + tree.weights().get(e.getKey()), + e.getKey().ruptures.mfdTree())) .collect(toList()); } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/PointSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/PointSource.java index 945c2c19..6d455af4 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/PointSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/PointSource.java @@ -66,7 +66,7 @@ class PointSource implements Source { * not simulate finiteness (e.g. rupture rRup values will differ from rJB only * by virtue of the depth of the source). * - * @param type of source, as supplied from a parent {@code SourceSet} + * @param type of source, as supplied from a parent {@code RuptureSet} * @param loc <code>Location</code> of the point source * @param mfd magnitude frequency distribution of the source * @param mechWtMap <code>Map</code> of focal mechanism weights @@ -110,7 +110,7 @@ class PointSource implements Source { /** * Overridden to return {@code -1}. Note that {@code PointSource}s may be - * retrieved by index using {@link GridSourceSet#source(int)}. + * retrieved by index using {@link GridRuptureSet#source(int)}. */ @Override public int id() { @@ -193,7 +193,7 @@ class PointSource implements Source { /* * Get the number of mag-depth iterations required to get to mMax. See - * explanation in GridSourceSet for how magDepthIndices is set up + * explanation in GridRuptureSet for how magDepthIndices is set up */ magDepthSize = depthModel.magDepthIndices.lastIndexOf(mfd.data().size() - 1) + 1; @@ -298,7 +298,7 @@ class PointSource implements Source { * (have more magnitudes) than required by grid or area point source * implementations as it usually spans the [mMin mMax] of some master MFD. * Implementations will only ever reference those indices up to their - * individual mMax so there should only be one per GridSourceSet or + * individual mMax so there should only be one per GridRuptureSet or * AreaSource. * * Given magDepthMap: @@ -330,7 +330,7 @@ class PointSource implements Source { * source type dependent and may be used when computing the maximum width of a * point source. * - * All DepthModel validation is currently performed in GridSourceSet.Builder. + * All DepthModel validation is currently performed in GridRuptureSet.Builder. */ static final class DepthModel { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/PointSourceFinite.java b/src/main/java/gov/usgs/earthquake/nshmp/model/PointSourceFinite.java index 9d8c66c8..b62d0dd3 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/PointSourceFinite.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/PointSourceFinite.java @@ -49,7 +49,7 @@ class PointSourceFinite extends PointSource { * Constructs a new point earthquake source that provides ruptures will * simulate finite fault parameterizations such as hanging-wall effects. * - * @param type of source, as supplied from a parent {@code SourceSet} + * @param type of source, as supplied from a parent {@code RuptureSet} * @param loc <code>Location</code> of the point source * @param mfd magnitude frequency distribution of the source * @param mechWtMap <code>Map</code> of focal mechanism weights @@ -151,7 +151,7 @@ class PointSourceFinite extends PointSource { /* * Get the number of mag-depth iterations required to get to mMax. See - * explanation in GridSourceSet for how magDepthIndices is set up + * explanation in GridRuptureSet for how magDepthIndices is set up */ magDepthSize = depthModel.magDepthIndices.lastIndexOf(mfd.data().size() - 1) + 1; diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/PointSourceFixedStrike.java b/src/main/java/gov/usgs/earthquake/nshmp/model/PointSourceFixedStrike.java index 74fc26d1..22ffa27d 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/PointSourceFixedStrike.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/PointSourceFixedStrike.java @@ -48,7 +48,7 @@ class PointSourceFixedStrike extends PointSourceFinite { * Constructs a new point earthquake source that provides fixed-strike * ruptures. * - * @param type of source, as supplied from parent {@code SourceSet} + * @param type of source, as supplied from parent {@code RuptureSet} * @param loc <code>Location</code> of the point source * @param mfd magnitude frequency distribution of the source * @param mechWtMap <code>Map</code> of focal mechanism weights diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/PointSources.java b/src/main/java/gov/usgs/earthquake/nshmp/model/PointSources.java index 5817f73b..f7c94deb 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/PointSources.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/PointSources.java @@ -96,7 +96,7 @@ public class PointSources { } /** - * Using the supplied {@code Site}s and a GridSourceSet, from which the + * Using the supplied {@code Site}s and a GridRuptureSet, from which the * standard data that is used to build point sources is derived, create and * return a {@code List<InputList>}. * @@ -107,7 +107,7 @@ public class PointSources { Location loc, Mfd mfd, Map<FocalMech, Double> mechWtMap, - GridSourceSet grid) { + GridRuptureSet grid) { Source source = pointSource( sourceType, diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/RuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/RuptureSet.java index 0939a812..b167d52f 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/RuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/RuptureSet.java @@ -1,6 +1,9 @@ package gov.usgs.earthquake.nshmp.model; +import java.util.function.Predicate; + import gov.usgs.earthquake.nshmp.geo.Location; +import gov.usgs.earthquake.nshmp.gmm.GroundMotionModel; import gov.usgs.earthquake.nshmp.mfd.Mfd; import gov.usgs.earthquake.nshmp.tree.LogicTree; @@ -11,7 +14,7 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; * * @author U.S. Geological Survey */ -public interface RuptureSet { +public interface RuptureSet<T extends Source> extends Iterable<T> { /** * The display name of the rupture set. @@ -24,15 +27,34 @@ public interface RuptureSet { int id(); /** - * The number of {@link Rupture}s in this rupture set. + * The {@code TectonicSetting} identifier. */ - int size(); + TectonicSetting setting(); /** * The {@code SourceType} identifier. */ SourceType type(); + /** + * Return the {@link GroundMotionModel}s associated with this + * {@code RuptureSet} as a {@link GmmSet}. + * + * @see GmmSet + */ + GmmSet groundMotionModels(); + + /** + * The number of {@link Rupture}s in this rupture set. + */ + int size(); + + /** + * The weight of the {@link SourceTree} branch to which this rupture set is + * attached. + */ + double weight(); + /** * The logic tree of MFDs in this rupture set. */ @@ -43,6 +65,44 @@ public interface RuptureSet { * location. The details of what this method returns are implementation * secific. */ - Location location(Location site); + Location location(Location site); // TODO move to utility ?? + + /** + * Return an {@code Iterable} over those {@code Source}s that are within the + * cutoff distance of the supplied {@code Location}. The cutoff distance is + * derived from the {@code GroundMotionModel}s associated with this + * {@code RuptureSet}. + * + * @param loc {@code Location} of interest + */ + Iterable<T> iterableForLocation(Location loc); + + /** + * Return an {@code Iterable} over those {@code Source}s that are within + * {@code distance} of the supplied {@code Location}. + * + * @param loc {@code Location} of interest + * @param distance limit + */ + Iterable<T> iterableForLocation(Location loc, double distance); + + /** + * Return a {@link Predicate} that evaluates to {@code true} if this source is + * within {@code distance} of the supplied {@link Location}. This + * {@code Predicate} performs a quick distance calculation and is used to + * determine whether this source should be included in a hazard calculation. + * + * @param loc {@code Location} of interest + * @param distance limit + */ + Predicate<T> distanceFilter(Location loc, double distance); + + /** + * Return the total + * @return + */ + // Mfd mfd(); + // instead of Source + // interface .Model } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SlabRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SlabRuptureSet.java index 31d68d2d..9aaeed51 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SlabRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/SlabRuptureSet.java @@ -4,27 +4,12 @@ package gov.usgs.earthquake.nshmp.model; * Subduction intraslab rupture set implemention. * * @author U.S. Geological Survey - * */ class SlabRuptureSet extends GridRuptureSet { - private SlabSourceSet sss; - SlabRuptureSet(Builder builder) { super(builder); - } - - @Override - SourceSet<PointSource> sourceSet(double weight) { - if (sss == null) { - sss = new SlabSourceSet(createSourceSet(weight)); - } - return sss; - } - - @Override - public SourceType type() { - return SourceType.SLAB; + // TODO heandle ss weight } static Builder builder() { @@ -33,6 +18,10 @@ class SlabRuptureSet extends GridRuptureSet { static final class Builder extends GridRuptureSet.Builder { + Builder() { + super(SourceType.SLAB); + } + @Override SlabRuptureSet build() { validateAndInit("SlabRuptureSet.Builder"); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SlabSourceSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SlabSourceSet.java deleted file mode 100644 index 87d7eb6a..00000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SlabSourceSet.java +++ /dev/null @@ -1,88 +0,0 @@ -package gov.usgs.earthquake.nshmp.model; - -import java.util.Iterator; -import java.util.function.Predicate; - -import gov.usgs.earthquake.nshmp.geo.Location; - -/** - * Container class for related, evenly-spaced subduction slab sources. Class - * decorates a {@link GridSourceSet}. - * - * @author U.S. Geological Survey - * @see GridSourceSet - */ -public final class SlabSourceSet implements SourceSet<PointSource> { - - private final GridSourceSet delegate; - - SlabSourceSet(GridSourceSet delegate) { - this.delegate = delegate; - } - - @Override - public final int compareTo(SourceSet<PointSource> other) { - return delegate.compareTo(other); - } - - @Override - public final String name() { - return delegate.name(); - } - - @Override - public int id() { - return delegate.id(); - } - - @Override - public final String toString() { - return delegate.toString(); - } - - @Override - public final Iterator<PointSource> iterator() { - return delegate.iterator(); - } - - @Override - public final SourceType type() { - return SourceType.SLAB; - } - - @Override - public final TectonicSetting setting() { - return delegate.setting(); - } - - @Override - public final int size() { - return delegate.size(); - } - - @Override - public final double weight() { - return delegate.weight(); - } - - @Override - public final Iterable<PointSource> iterableForLocation(Location loc) { - return delegate.iterableForLocation(loc); - } - - @Override - public final Iterable<PointSource> iterableForLocation(Location loc, double distance) { - return delegate.iterableForLocation(loc, distance); - } - - @Override - public final GmmSet groundMotionModels() { - return delegate.groundMotionModels(); - } - - @Override - public final Predicate<PointSource> distanceFilter(Location loc, double distance) { - return delegate.distanceFilter(loc, distance); - } - -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceSet.java deleted file mode 100644 index 7cf576a4..00000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceSet.java +++ /dev/null @@ -1,90 +0,0 @@ -package gov.usgs.earthquake.nshmp.model; - -import java.util.function.Predicate; - -import gov.usgs.earthquake.nshmp.geo.Location; -import gov.usgs.earthquake.nshmp.gmm.GroundMotionModel; - -/** - * Wrapper class for groups of {@link Source}s of similar {@link SourceType}. - * The use of the word 'Set' in the class name implies the {@code Source}s in a - * {@code SourceSet} will be unique, but no steps are taken to enforce this. - * - * @author U.S. Geological Survey - */ -public interface SourceSet<T extends Source> extends Iterable<T>, Comparable<SourceSet<T>> { - - /** - * A display name. - */ - String name(); - - /** - * A unique integer id. - */ - int id(); - - /** - * The {@code TectonicSetting} identifier. - */ - TectonicSetting setting(); - - /** - * The {@code SourceType} identifier. - */ - SourceType type(); - - /** - * The number of {@code Source}s in this {@code SourceSet}. - */ - int size(); - - /** - * The weight applicable to this {@code SourceSet}. - */ - double weight(); - - /** - * Return an {@code Iterable} over those {@code Source}s that are within the - * cutoff distance of the supplied {@code Location}. The cutoff distance is - * derived from the {@code GroundMotionModel}s associated with this - * {@code SourceSet}. - * - * @param loc {@code Location} of interest - */ - Iterable<T> iterableForLocation(Location loc); - - /** - * Return an {@code Iterable} over those {@code Source}s that are within - * {@code distance} of the supplied {@code Location}. - * - * @param loc {@code Location} of interest - * @param distance limit - */ - Iterable<T> iterableForLocation(Location loc, double distance); - - /** - * Return a {@link Predicate} that evaluates to {@code true} if this source is - * within {@code distance} of the supplied {@link Location}. This - * {@code Predicate} performs a quick distance calculation and is used to - * determine whether this source should be included in a hazard calculation. - * - * @param loc {@code Location} of interest - * @param distance limit - */ - Predicate<T> distanceFilter(Location loc, double distance); - - /** - * Return the {@link GroundMotionModel}s associated with this - * {@code SourceSet} as a {@link GmmSet}. - * - * @see GmmSet - */ - GmmSet groundMotionModels(); - - /** - * Return the total - * @return - */ - // Mfd mfd(); -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceTree.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceTree.java index e9b2a2be..c0c7fb1e 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceTree.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceTree.java @@ -3,20 +3,24 @@ package gov.usgs.earthquake.nshmp.model; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import static gov.usgs.earthquake.nshmp.Text.NEWLINE; import static gov.usgs.earthquake.nshmp.Text.checkName; import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toUnmodifiableMap; import java.nio.file.Path; +import java.util.AbstractList; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; +import com.google.common.base.Strings; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.graph.EndpointPair; import com.google.common.graph.Graphs; import com.google.common.graph.ImmutableNetwork; @@ -36,7 +40,9 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; * * @author U.S. Geological Survey */ -public class SourceTree { +public class SourceTree extends + AbstractList<Branch<RuptureSet<? extends Source>>> implements + LogicTree<RuptureSet<? extends Source>> { /* * Developer notes: @@ -55,9 +61,11 @@ public class SourceTree { private final TectonicSetting setting; private final SourceType type; private final GmmSet gmms; - private final List<List<Node>> nodes; + private final List<List<Node>> nodes; // for string representation private final Map<Leaf, List<Branch<Path>>> branches; - private final Map<Leaf, Double> leaves; + private final Map<Leaf, Double> weights; + private final List<Leaf> leaves; + // private final Map<String, Leaf> leafIds; // not currently used directly private final Network<Node, Branch<Path>> tree; @@ -70,7 +78,9 @@ public class SourceTree { this.gmms = builder.gmms; this.nodes = builder.nodes; this.branches = builder.branches; + this.weights = builder.leafWeights; this.leaves = builder.leaves; + // this.leafIds = builder.leafIds; this.tree = ImmutableNetwork.copyOf(builder.tree); // List<String> branchStrs = branches.keySet().stream() @@ -81,6 +91,7 @@ public class SourceTree { } /** The name of this tree. */ + @Override public String name() { return name; } @@ -105,14 +116,14 @@ public class SourceTree { return gmms; } - /** The leaf nodes of this tree mapped to their branches. */ + /** The leaf nodes of this tree mapped to their branch paths. */ public Map<Leaf, List<Branch<Path>>> branches() { return branches; } /** The leaf nodes of this tree mapped to the total branch weight. */ - public Map<Leaf, Double> leaves() { - return leaves; + public Map<Leaf, Double> weights() { + return weights; } /** @@ -125,6 +136,54 @@ public class SourceTree { .collect(toList()); } + @Override + public Branch<RuptureSet<? extends Source>> sample(double probability) { + throw new UnsupportedOperationException(); + } + + @Override + public List<Branch<RuptureSet<? extends Source>>> sample(double[] probabilities) { + throw new UnsupportedOperationException(); + } + + @Override + public Branch<RuptureSet<? extends Source>> get(int index) { + return new Branch<RuptureSet<? extends Source>>() { + + @Override + public String id() { + return leaves.get(index).toString(); + } + + @Override + public RuptureSet<? extends Source> value() { + return leaves.get(index).ruptures; + } + + @Override + public double weight() { + return weights.get(leaves.get(index)); + } + }; + } + + @Override + public int size() { + return leaves.size(); + } + + @Override + public String toString() { + // nodes().toString() + NEWLINE + + return this.stream() + .sorted((b1, b2) -> b1.value().name().compareTo(b2.value().name())) + .map(branch -> " ↳ " + + branch.value() + + Strings.padEnd(" weight: " + branch.weight(), 20, ' ') + + branch.id()) + .collect(Collectors.joining(NEWLINE)); + } + static Builder builder() { return new Builder(); } @@ -150,7 +209,9 @@ public class SourceTree { /* Created on build. */ private List<List<Node>> nodes; private Map<Leaf, List<Branch<Path>>> branches; - private Map<Leaf, Double> leaves; + private Map<Leaf, Double> leafWeights; + private List<Leaf> leaves; + // private Map<String, Leaf> leafIds; private Builder() {} @@ -219,14 +280,14 @@ public class SourceTree { * Add a leaf to the source tree. This method replaces the target Node * associated with the supplied branch with a terminal Leaf node. */ - Builder addLeaf(Branch<Path> parent, RuptureSet ruptureSet) { + Builder addLeaf(Branch<Path> parent, RuptureSet<? extends Source> ruptures) { checkState(this.tree != null, "Tree root node is not set"); checkArgument(tree.edges().contains(parent), "Missing parent branch"); EndpointPair<Node> endpoints = tree.incidentNodes(parent); Node target = endpoints.target(); Node source = endpoints.source(); tree.removeNode(target); - Leaf leaf = new Leaf(target.index, ruptureSet); // transfer assigned index + Leaf leaf = new Leaf(target.index, ruptures); // transfer assigned index tree.addEdge(source, leaf, parent); leafList.add(leaf); return this; @@ -250,17 +311,19 @@ public class SourceTree { private void buildTreeFields() { /* - * On build: Generate lists of logic tree branches (branch paths) from the - * root of a tree to each of the supplied leaf nodes. Because the tree is - * a directed graph, traversing a transposed view starting with each leaf - * yields the required node path. + * Generate lists of logic tree branches (branch paths) from the root of a + * tree to each of the supplied leaf nodes. Because the tree is a directed + * graph, traversing a transposed view starting with each leaf yields the + * required node path. */ Traverser<Node> traverser = Traverser.forTree(Graphs.transpose(tree)); Map<Leaf, List<Branch<Path>>> branchListsMap = new HashMap<>(); List<List<Node>> nodeLists = new ArrayList<>(); for (Leaf leaf : leafList) { - List<Node> nodeList = Lists.newArrayList(traverser.depthFirstPostOrder(leaf)); + Iterable<Node> it = traverser.depthFirstPostOrder(leaf); + List<Node> nodeList = StreamSupport.stream(it.spliterator(), false) + .collect(Collectors.toList()); checkState(nodeList.size() > 1); // 2 nodes minimum [root -> leaf] nodeLists.add(nodeList); @@ -276,14 +339,28 @@ public class SourceTree { branches = Map.copyOf(branchListsMap); /* - * On build: Create map of leaves and their weights. Note that the values - * of the returned map will not sum to one when the source tree contains - * one or more LogicGroups. + * Create map of leaves and their weights. Note that the values of the + * returned map will not sum to one when the source tree contains one or + * more LogicGroups. */ - leaves = branches.entrySet().stream() - .collect(toUnmodifiableMap( + leafWeights = branches.entrySet().stream().collect( + toUnmodifiableMap( Entry::getKey, e -> leafWeight(e.getValue()))); + leaves = List.copyOf(branches.keySet()); + // leafIds = leaves.stream().collect( + // toUnmodifiableMap( + // Leaf::toString, + // leaf -> leaf)); + + /* + * Assign weights to RuptureSets. It would be nice if a RuptureSet didn't + * know about it's source tree weight, but it's much easier in the current + * calculation pipeline if they do. + */ + leaves.stream().forEach(leaf -> { + ((AbstractRuptureSet<?>) leaf.ruptures).setLeafWeight(leafWeights.get(leaf)); + }); } /* Compute cumulative weight of a source branch from root to leaf. */ @@ -323,11 +400,11 @@ public class SourceTree { static final class Leaf extends Node { - final RuptureSet ruptureSet; + final RuptureSet<? extends Source> ruptures; - Leaf(int index, RuptureSet ruptureSet) { + Leaf(int index, RuptureSet<? extends Source> ruptures) { super(index); - this.ruptureSet = ruptureSet; + this.ruptures = ruptures; } @Override diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SystemRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SystemRuptureSet.java index 1ed210c9..8c49c028 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SystemRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/SystemRuptureSet.java @@ -2,7 +2,12 @@ package gov.usgs.earthquake.nshmp.model; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.Preconditions.checkState; +import static gov.usgs.earthquake.nshmp.Earthquakes.checkCrustalDepth; +import static gov.usgs.earthquake.nshmp.Earthquakes.checkCrustalWidth; +import static gov.usgs.earthquake.nshmp.Earthquakes.checkMagnitude; +import static gov.usgs.earthquake.nshmp.Faults.checkDip; +import static gov.usgs.earthquake.nshmp.Faults.checkRake; +import static gov.usgs.earthquake.nshmp.geo.Locations.horzDistanceFast; import static gov.usgs.earthquake.nshmp.model.SourceFeature.Key.DEPTH; import static gov.usgs.earthquake.nshmp.model.SourceFeature.Key.DIP; import static gov.usgs.earthquake.nshmp.model.SourceFeature.Key.INDICES; @@ -10,17 +15,33 @@ import static gov.usgs.earthquake.nshmp.model.SourceFeature.Key.M; import static gov.usgs.earthquake.nshmp.model.SourceFeature.Key.RAKE; import static gov.usgs.earthquake.nshmp.model.SourceFeature.Key.RATE; import static gov.usgs.earthquake.nshmp.model.SourceFeature.Key.WIDTH; -import static gov.usgs.earthquake.nshmp.model.SourceType.FAULT_SYSTEM; -import static java.util.stream.Collectors.toUnmodifiableList; +import static java.lang.Math.min; +import static java.util.stream.Collectors.toList; import java.nio.file.Path; +import java.util.BitSet; +import java.util.Comparator; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.function.Function; +import java.util.function.Predicate; import java.util.stream.IntStream; import java.util.stream.Stream; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.primitives.Doubles; + +import gov.usgs.earthquake.nshmp.Faults; +import gov.usgs.earthquake.nshmp.calc.HazardInput; +import gov.usgs.earthquake.nshmp.calc.InputList; +import gov.usgs.earthquake.nshmp.calc.Site; +import gov.usgs.earthquake.nshmp.calc.SystemInputList; import gov.usgs.earthquake.nshmp.data.DelimitedData; import gov.usgs.earthquake.nshmp.data.DelimitedData.Record; +import gov.usgs.earthquake.nshmp.data.Indexing; +import gov.usgs.earthquake.nshmp.data.IntervalArray; import gov.usgs.earthquake.nshmp.fault.surface.DefaultGriddedSurface; import gov.usgs.earthquake.nshmp.fault.surface.GriddedSurface; import gov.usgs.earthquake.nshmp.geo.Location; @@ -33,7 +54,7 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; * * @author U.S. Geological Survey */ -public class SystemRuptureSet implements RuptureSet { +public class SystemRuptureSet extends AbstractRuptureSet<SystemRuptureSet.SystemSource> { /* * Developer notes: @@ -48,73 +69,99 @@ public class SystemRuptureSet implements RuptureSet { * 32**** until we can work on reducing redundancy. There should then be a * mapping in rupture-set between index and relevant fault section. This will * better support more logic tree branches beyond the two fault models. + * + * Unfiltered UCERF3 rupture counts: FM31 = 253,706 FM32 = 305,709 */ private final ModelData data; - private final String name; - private final Integer id; private final Map<Integer, SystemSection> sectionMap; private final Path ruptures; - private SystemSourceSet sourceSet; + // from SSSb + private final GriddedSurface[] sections; + private final String[] sectionNames; + + private final BitSet[] bitsets; + private final double[] mags; + private final double[] rates; + private final double[] depths; + private final double[] dips; + private final double[] widths; + private final double[] rakes; + + public final Statistics stats; + + /* + * Developer notes: + * + * Revisit the fact that BitSets are mutable and could potentially be altered + * via a SystemSource. + * + * Don't like the fact that original trace data for sections is lost; same for + * other attributes + */ SystemRuptureSet(Builder builder) { + super(builder); this.data = builder.data; - this.name = builder.name; - this.id = builder.id; this.sectionMap = builder.sectionMap; this.ruptures = builder.ruptures; - } - - SystemSourceSet createSourceSet(double branchWeight) { - if (sourceSet == null) { - - SystemSourceSet.Builder builder = new SystemSourceSet.Builder(); - - builder.name(name) - .id(id) - .setting(data.tectonicSetting()) - .weight(branchWeight) - .gmms(data.gmms()); - - List<GriddedSurface> sections = sectionMap.keySet().stream() - .sorted() - .map(sectionMap::get) - .map(SystemRuptureSet::featureToSurface) - .collect(toUnmodifiableList()); - builder.sections(sections); - - List<String> names = sectionMap.keySet().stream() - .sorted() - .map(sectionMap::get) - .map(feature -> feature.name) - .collect(toUnmodifiableList()); - builder.sectionNames(names); - - DelimitedData.comma(ruptures).records() - .forEach(record -> addRupture(builder, record)); - - sourceSet = builder.build(); + this.sections = sectionMap.keySet().stream() + .sorted() + .map(sectionMap::get) + .map(SystemRuptureSet::featureToSurface) + .toArray(GriddedSurface[]::new); + + this.sectionNames = sectionMap.keySet().stream() + .sorted() + .map(sectionMap::get) + .map(feature -> feature.name) + .toArray(String[]::new); + + /* Initialize rupture attribute arrays. */ + List<Record> ruptureRecords; + try (Stream<Record> s = DelimitedData.comma(ruptures).records()) { + ruptureRecords = s.collect(toList()); } - return sourceSet; - } - - private static void addRupture( - SystemSourceSet.Builder builder, - Record record) { - - builder.mag(record.getDouble(M)) - .rate(record.getDouble(RATE)) - .indices(ruptureIndexStringToIndices(record.get(INDICES))) - .depth(record.getDouble(DEPTH)) - .dip(record.getDouble(DIP)) - .rake(record.getDouble(RAKE)) - .width(record.getDouble(WIDTH)); + int nRups = ruptureRecords.size(); + this.bitsets = new BitSet[nRups]; + this.mags = new double[nRups]; + this.rates = new double[nRups]; + this.depths = new double[nRups]; + this.dips = new double[nRups]; + this.widths = new double[nRups]; + this.rakes = new double[nRups]; + + /* Populate rupture attribute arrays. */ + double mMin = Double.POSITIVE_INFINITY; + double mMax = Double.NEGATIVE_INFINITY;; + for (int i = 0; i < ruptureRecords.size(); i++) { + Record r = ruptureRecords.get(i); + + int[] indices = indexStringToIndices(r.get(INDICES)); + checkArgument(indices.length > 1, "Ruptures must consist of 2 or more sections"); + this.bitsets[i] = Indexing.indicesToBits(sections.length, indices); + + double m = checkMagnitude(r.getDouble(M)); + mMin = (m < mMin) ? m : mMin; + mMax = (m > mMax) ? m : mMax; + this.mags[i] = m; + + double rate = r.getDouble(RATE); + checkArgument(Doubles.isFinite(rate), "Non-finite rate value"); + this.rates[i] = rate; + + this.depths[i] = checkCrustalDepth(r.getDouble(DEPTH)); + this.dips[i] = checkDip(r.getDouble(DIP)); + this.widths[i] = checkCrustalWidth(r.getDouble(WIDTH)); + this.rakes[i] = checkRake(r.getDouble(RAKE)); + } + this.stats = new Statistics(mMin, mMax); } - private static int[] ruptureIndexStringToIndices(String s) { + private static int[] indexStringToIndices(String s) { return Stream.of(s.split("-")) .flatMapToInt(SystemRuptureSet::toIndices) .toArray(); @@ -156,28 +203,176 @@ public class SystemRuptureSet implements RuptureSet { } @Override - public String name() { - return name; + public int size() { + return bitsets.length; } @Override - public int id() { - return id; + public LogicTree<Mfd> mfdTree() { + throw new UnsupportedOperationException(); } @Override - public int size() { - return sourceSet.size(); + public Iterator<SystemSource> iterator() { + return new Iterator<SystemSource>() { + int caret = 0; + + @Override + public boolean hasNext() { + return caret < size(); + } + + @Override + public SystemSource next() { + return new SystemSource(caret++); + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + }; } @Override - public SourceType type() { - return FAULT_SYSTEM; + public Predicate<SystemSource> distanceFilter(Location loc, double distance) { + BitSet siteBitset = bitsetForLocation(loc, distance); + return new BitsetFilter(siteBitset); } - @Override - public LogicTree<Mfd> mfdTree() { - throw new UnsupportedOperationException(); + /** + * The fault section surface corresponding to the supplied {@code index}. + * + * <p>This method exists because system source sets are complex and commonly + * encapsulate 100K+ sources. The results of a hazard calculation and + * disaggregation are therefore better represented in the context of + * individual fault sections, rather than on a per-source basis. + * + * @param index of fault section surface to retrieve + */ + public GriddedSurface section(int index) { + return sections[index]; + } + + /** + * The name of the fault section corresponding to the supplied {@code index}. + * + * @param index of fault section name to retrieve + */ + public String sectionName(int index) { + return sectionNames[index]; + } + + /** + * A single source in a fault system. These sources do not currently support + * rupture iteration. + * + * <p>We skip the notion of a {@code Rupture} for now. Aleatory uncertainty on + * magnitude isn't required, but if it is, we'll alter this implementation to + * return List<GmmInput> per source rather than one GmmInput. + */ + public final class SystemSource implements Source { + + private final int index; + + private SystemSource(int index) { + this.index = index; + } + + @Override + public String name() { + // How to create name? RuptureSet will need parent section names + return "Unnamed fault system source"; + } + + @Override + public int size() { + return 1; + } + + @Override + public int id() { + return index; + } + + @Override + public SourceType type() { + return SourceType.FAULT_SYSTEM; + } + + /** + * This method is not required for disaggregation and currently throws an + * {@code UnsupportedOperationException}. + */ + @Override + public Location location(Location location) { + throw new UnsupportedOperationException(); + } + + @Override + public List<Mfd> mfds() { + throw new UnsupportedOperationException(); + } + + @Override + public Iterator<Rupture> iterator() { + /* + * Rupture iterator not currently supported but may be in future if + * aleatory uncertainty is added to or required on source magnitudes. + * Currently system source:rupture is 1:1. + */ + throw new UnsupportedOperationException(); + } + + private final BitSet bitset() { + return bitsets[index]; + } + + private final double magnitude() { + return mags[index]; + } + + private final double rate() { + return rates[index]; + } + + private final double depth() { + return depths[index]; + } + + private final double dip() { + return dips[index]; + } + + private final double width() { + return widths[index]; + } + + private final double rake() { + return rakes[index]; + } + } + + /** + * Container of summary data for this sytem source set. + */ + public static final class Statistics { + + /* Currently used to build section participation MFDs for disagg. */ + + /** Minimum magnitude over all ruptures. */ + public final double mMin; + + /** Maximum magnitude over all ruptures. */ + public final double mMax; + + Statistics( + double mMin, + double mMax) { + + this.mMin = mMin; + this.mMax = mMax; + } } /** @@ -186,15 +381,8 @@ public class SystemRuptureSet implements RuptureSet { */ @Override public Location location(Location site) { - return null; // Locations.closestPoint(site, feature.trace); - } - - @Override - public String toString() { - return getClass().getSimpleName() + " " + Map.of( - "name", name(), - "id", id());// update - // "feature", feature.source.toJson()); + throw new UnsupportedOperationException(); + // return null; // Locations.closestPoint(site, feature.trace); } static Builder builder() { @@ -211,30 +399,44 @@ public class SystemRuptureSet implements RuptureSet { } /* Single use builder */ - static class Builder { - - private boolean built = false; + static class Builder extends AbstractRuptureSet.Builder { private ModelData data; - private String name; - private Integer id; private Map<Integer, SystemSection> sectionMap; private Path ruptures; + /* Unfiltered UCERF3: FM31 = 253,706 FM32 = 305,709 */ + static final int RUP_SET_SIZE = 306000; + + static final String ID = "SystemRuptureSet.Builder"; + + private List<GriddedSurface> sections; + private List<String> sectionNames; + + private double mMin = Double.POSITIVE_INFINITY; + private double mMax = Double.NEGATIVE_INFINITY; + + Builder() { + super(SourceType.FAULT_SYSTEM); + } + + @Override Builder name(String name) { - this.name = name; + super.name(name); return this; } + @Override Builder id(int id) { - this.id = id; + super.id(id); return this; } Builder sections(Map<Integer, SystemSection> sectionMap) { checkArgument(checkNotNull(sectionMap).size() >= 1); this.sectionMap = sectionMap; + return this; } @@ -246,23 +448,327 @@ public class SystemRuptureSet implements RuptureSet { /* Set MFD helper data. */ Builder modelData(ModelData data) { this.data = checkNotNull(data); + super.setting(data.tectonicSetting()); + super.gmms(data.gmms()); return this; } - SystemRuptureSet build() { - validateAndInit("SystemRuptureSet.Builder"); - return new SystemRuptureSet(this); + Builder sections(List<GriddedSurface> sections) { + checkNotNull(sections, "Section surface list is null"); + checkArgument(sections.size() > 0, "Section surface list is empty"); + this.sections = sections; + return this; + } + + Builder sectionNames(List<String> names) { + checkNotNull(names, "Section name list is null"); + checkArgument(names.size() > 0, "Section name list is empty"); + this.sectionNames = names; + return this; } private void validateAndInit(String label) { - checkState(!built, "Single use builder"); - checkNotNull(name, "%s name", label); - checkNotNull(id, "%s id", label); + validateState(label); checkNotNull(data, "%s model data", label); checkNotNull(sectionMap, "%s subsection map", label); checkNotNull(ruptures, "%s rupture file", label); - built = true; } + + SystemRuptureSet build() { + validateAndInit("SystemRuptureSet.Builder"); + return new SystemRuptureSet(this); + } + } + + /* + * Handle rate calculations internally as SystemSource is not fully + * implemented. If/when it is, this should be removed in favor using + * iterableForLocation and getRupture(0). + */ + + /** + * Return an instance of a {@code Function} that converts a + * {@code SystemRuptureSet} to a ground motion model {@code InputList}. + * + * @param location with which to initialize instance. + * @param distance if interest (relevant source radius) + * @param modelMfd MFD to populate + */ + public static Function<SystemRuptureSet, IntervalArray> toRatesFunction( + Location location, + double distance, + IntervalArray modelMfd) { + throw new UnsupportedOperationException("HANDLE WEIGHTS IN RATES"); + // throwing UOE until we know that the SrcSet weight formerly applied + // below is applied to the IntervalArray upstream + // return new ToRates(location, distance, modelMfd); + } + + private static final class ToRates implements Function<SystemRuptureSet, IntervalArray> { + + private final Location location; + private final double distance; + private final IntervalArray modelMfd; + + ToRates( + Location location, + double distance, + IntervalArray modelMfd) { + + this.location = location; + this.distance = distance; + this.modelMfd = modelMfd; + } + + @Override + public IntervalArray apply(SystemRuptureSet ruptures) { + IntervalArray.Builder mfdForLocation = IntervalArray.Builder.fromModel(modelMfd); + BitSet siteBitset = ruptures.bitsetForLocation(location, distance); + if (siteBitset.isEmpty()) { + return modelMfd; + } + Predicate<SystemSource> distanceFilter = new BitsetFilter(siteBitset); + for (SystemSource source : Iterables.filter(ruptures, distanceFilter::test)) { + mfdForLocation.add(source.magnitude(), source.rate()); + } + // was... return mfdForLocation.multiply(srcSet.weight()).build(); TODO ?? + return mfdForLocation.build(); + } + } + + /* + * System source calculation pipeline. + * + * Rather than expose highly mutable bitsets and attendant logic that is used + * to generate HazardInputs from SystemRuptureSets, we opt to locate transform + * Functions and related classes here. + * + * System sources (e.g. UCERF3 ruptures) are composed of multiple small (~7km + * long x ~15km wide) adjacent fault sections in a large and dense fault + * network. Each source is defined by the indices of the participating fault + * sections, magnitude, average width, average rake, and average dip, among + * other parameters. + * + * In order to filter ruptures and calculate distance parameters when doing a + * hazard calculation, one can treat each source as a finite fault. This + * ultimately requires careful handling of site-to-source calculations when + * trying to reduce redundant calculations because many sources share the same + * fault sections and sources will be handled one-at-a-time, in sequence. + * + * Alternatively, one can approach the problem from an indexing standpoint, + * precomuting that data which will be required, and then mining it on a + * per-source basis, as follows: + * + * 1) For each source, create a BitSet with size = nSections. Set the bits for + * each section that the source uses. [sourceBitSet] + * + * 2) Create another BitSet with size = nSections. Set the bits for each + * section within the distance cutoff for a Site. Do this quickly using only + * the centroid of each fault section. [siteBitSet] + * + * 3) Create and populate a Map<SectionIndex, double[rJB, rRup, rX]> of + * distance metrics for each section in the siteBitSet. This is created + * pre-sorted ascending on rRup (the closest sections to a site come first). + * + * 4) For each sourceBitSet, 'siteBitSet.intersects(sourceBitSet)' returns + * whether a source is close enough to the site to be considered. + * + * 5) For each considered source, 'sourceBitSet AND siteBitSet' yields a + * BitSet that only includes set bits with indices in the distance metric + * table. + * + * 6) For each source, loop the ascending indices, checking whether the bit at + * 'index' is set in the sources bitset. The first hit will be the closest + * section in a source, relative to a site. (the rX value used is keyed to the + * minimum rRup). + * + * 7) Build GmmInputs and proceed with hazard calculation. + * + * Note on the above. Although one could argue that only rRup or rJb be + * calculated first, there are geometries for which min(rRup) != min(rJB); + * e.g. location on hanging wall of dipping fault that abuts a vertical + * fault... vertical might yield min(rRup) but min(rJB) would be 0 (over + * dipping fault). While checking the bits in a source, we therefore look at + * the three closest sections. + */ + + /* + * Concurrency + * + * The use case assumed here is that 1 or 2 fault systems (e.g. UCERF3 + * branch-averaged solutions) will most commonly be run when supporting + * web-services. We therefore compute hazard by first creating a (large) input + * list and then distribute the more time consuming curve calculation. + * However, if many fault systems were to be run, it probably makes sense to + * farm each onto an independent thread. + */ + + /** + * Return an instance of a {@code Function} that converts a + * {@code SystemRuptureSet} to a ground motion model {@code InputList}. + * + * @param site with which to initialize instance. + */ + public static Function<SystemRuptureSet, InputList> toInputsFunction(Site site) { + return new ToInputs(site); + } + + private static final class ToInputs implements Function<SystemRuptureSet, InputList> { + + private final Site site; + + ToInputs(Site site) { + this.site = site; + } + + @Override + public InputList apply(SystemRuptureSet ruptures) { + + /* Create Site BitSet. */ + double maxDistance = ruptures.groundMotionModels().maxDistance(); + BitSet siteBitset = ruptures.bitsetForLocation(site.location(), maxDistance); + if (siteBitset.isEmpty()) { + return SystemInputList.empty(ruptures); + } + + /* Create and fill distance map. */ + int[] siteIndices = Indexing.bitsToIndices(siteBitset); + ImmutableMap.Builder<Integer, double[]> rMapBuilder = + ImmutableMap.<Integer, double[]> builder() + .orderEntriesByValue(new DistanceTypeSorter(R_RUP_INDEX)); + for (int i : siteIndices) { + Distance r = ruptures.sections[i].distanceTo(site.location()); + rMapBuilder.put(i, new double[] { r.rJB, r.rRup, r.rX }); + } + + /* Create inputs. */ + Map<Integer, double[]> rMap = rMapBuilder.build(); + Function<SystemSource, HazardInput> inputGenerator = new InputGenerator(rMap, site); + Predicate<SystemSource> rFilter = new BitsetFilter(siteBitset); + Iterable<SystemSource> sources = Iterables.filter(ruptures, rFilter::test); + + /* Fill input list. */ + SystemInputList inputs = new SystemInputList(ruptures, rMap.keySet()); + for (SystemSource source : sources) { + inputs.add(inputGenerator.apply(source)); + // for disagg + inputs.addBitset(source.bitset()); + } + + return inputs; + } + } + + private static final class DistanceTypeSorter implements Comparator<double[]> { + + final int rTypeIndex; + + DistanceTypeSorter(int rTypeIndex) { + this.rTypeIndex = rTypeIndex; + } + + @Override + public int compare(double[] left, double[] right) { + if (left != null && right != null) { + return Double.compare(left[rTypeIndex], right[rTypeIndex]); + } else { + throw new IllegalStateException(); + } + } + } + + /* + * Predicate that tests the intersection of a site bitset (fault sections + * wihtin a specified distance of a site) with source bitsets (fault sections + * that participate in a SystemSource/Rupture). + */ + private static class BitsetFilter implements Predicate<SystemSource> { + + private final BitSet siteBitset; + + BitsetFilter(BitSet siteBitset) { + this.siteBitset = siteBitset; + } + + @Override + public boolean test(SystemSource source) { + return siteBitset.intersects(source.bitset()); + } + } + + private static final int R_JB_INDEX = 0; + private static final int R_RUP_INDEX = 1; + private static final int R_X_INDEX = 2; + + private static final int R_HIT_LIMIT = 3; + + private static final class InputGenerator implements Function<SystemSource, HazardInput> { + + private final Map<Integer, double[]> rMap; + private final Site site; + + InputGenerator( + Map<Integer, double[]> rMap, + Site site) { + + this.rMap = rMap; + this.site = site; + } + + @Override + public HazardInput apply(SystemSource source) { + + /* Find r minima. */ + BitSet sections = source.bitset(); + double rJB = Double.MAX_VALUE; + double rRup = Double.MAX_VALUE; + double rX = Double.MAX_VALUE; + int hitCount = 0; + for (int sectionIndex : rMap.keySet()) { + if (sections.get(sectionIndex)) { + double[] distances = rMap.get(sectionIndex); + rJB = min(rJB, distances[R_JB_INDEX]); + double rRupNew = distances[R_RUP_INDEX]; + if (rRupNew < rRup) { + rRup = rRupNew; + rX = distances[R_X_INDEX]; + } + if (++hitCount > R_HIT_LIMIT) { + break; + } + } + } + + double dip = source.dip(); + double width = source.width(); + double zTor = source.depth(); + double zHyp = Faults.hypocentralDepth(dip, width, zTor); + + return new HazardInput( + source.rate(), + source.magnitude(), + rJB, + rRup, + rX, + dip, + width, + zTor, + zHyp, + source.rake(), + site.vs30(), + site.vsInferred(), + site.z1p0(), + site.z2p5()); + } + } + + private final BitSet bitsetForLocation(Location loc, double r) { + BitSet bits = new BitSet(sections.length); + int count = 0; + for (GriddedSurface surface : sections) { + bits.set(count++, horzDistanceFast(loc, surface.centroid()) <= r); + } + return bits; } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SystemSourceSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SystemSourceSet.java deleted file mode 100644 index 0dacae99..00000000 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SystemSourceSet.java +++ /dev/null @@ -1,662 +0,0 @@ -package gov.usgs.earthquake.nshmp.model; - -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.Preconditions.checkState; -import static gov.usgs.earthquake.nshmp.Earthquakes.checkCrustalDepth; -import static gov.usgs.earthquake.nshmp.Earthquakes.checkCrustalWidth; -import static gov.usgs.earthquake.nshmp.Earthquakes.checkMagnitude; -import static gov.usgs.earthquake.nshmp.Faults.checkDip; -import static gov.usgs.earthquake.nshmp.Faults.checkRake; -import static gov.usgs.earthquake.nshmp.geo.Locations.horzDistanceFast; -import static java.lang.Math.min; - -import java.util.ArrayList; -import java.util.BitSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.function.Function; -import java.util.function.Predicate; - -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Iterables; -import com.google.common.collect.Ordering; -import com.google.common.primitives.Doubles; - -import gov.usgs.earthquake.nshmp.Faults; -import gov.usgs.earthquake.nshmp.calc.HazardInput; -import gov.usgs.earthquake.nshmp.calc.InputList; -import gov.usgs.earthquake.nshmp.calc.Site; -import gov.usgs.earthquake.nshmp.calc.SystemInputList; -import gov.usgs.earthquake.nshmp.data.Indexing; -import gov.usgs.earthquake.nshmp.data.IntervalArray; -import gov.usgs.earthquake.nshmp.fault.surface.GriddedSurface; -import gov.usgs.earthquake.nshmp.geo.Location; -import gov.usgs.earthquake.nshmp.mfd.Mfd; - -/** - * Wrapper class for related {@link SystemSource}s. - * - * @author U.S. Geological Survey - */ -public final class SystemSourceSet extends AbstractSourceSet<SystemSourceSet.SystemSource> { - - private final GriddedSurface[] sections; - private final String[] sectionNames; - private final BitSet[] bitsets; - private final double[] mags; - private final double[] rates; - private final double[] depths; - private final double[] dips; - private final double[] widths; - private final double[] rakes; - - public final Statistics stats; - - /* - * Developer notes: - * - * Revisit the fact that BitSets are mutable and could potentially be altered - * via a SystemSource. - * - * Don't like the fact that original trace data for sections is lost; same for - * other attributes - */ - - private SystemSourceSet(Builder builder) { - super(builder); - this.sections = builder.sections.toArray(new GriddedSurface[] {}); - this.sectionNames = builder.sectionNames.toArray(new String[] {}); - this.bitsets = builder.bitsets.toArray(new BitSet[] {}); - this.mags = Doubles.toArray(builder.mags); - this.rates = Doubles.toArray(builder.rates); - this.depths = Doubles.toArray(builder.depths); - this.dips = Doubles.toArray(builder.dips); - this.widths = Doubles.toArray(builder.widths); - this.rakes = Doubles.toArray(builder.rakes); - this.stats = builder.stats; - } - - @Override - public int size() { - return bitsets.length; - } - - @Override - public Iterator<SystemSource> iterator() { - return new Iterator<SystemSource>() { - int caret = 0; - - @Override - public boolean hasNext() { - return caret < size(); - } - - @Override - public SystemSource next() { - return new SystemSource(caret++); - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - - @Override - public Predicate<SystemSource> distanceFilter(Location loc, double distance) { - BitSet siteBitset = bitsetForLocation(loc, distance); - return new BitsetFilter(siteBitset); - } - - /** - * The fault section surface corresponding to the supplied {@code index}. - * - * <p>This method exists because system source sets are complex and commonly - * encapsulate 100K+ sources. The results of a hazard calculation and - * disaggregation are therefore better represented in the context of - * individual fault sections, rather than on a per-source basis. - * - * @param index of fault section surface to retrieve - */ - public GriddedSurface section(int index) { - return sections[index]; - } - - /** - * The name of the fault section corresponding to the supplied {@code index}. - * - * @param index of fault section name to retrieve - */ - public String sectionName(int index) { - return sectionNames[index]; - } - - /** - * A single source in a fault system. These sources do not currently support - * rupture iteration. - * - * <p>We skip the notion of a {@code Rupture} for now. Aleatory uncertainty on - * magnitude isn't required, but if it is, we'll alter this implementation to - * return List<GmmInput> per source rather than one GmmInput. - */ - public final class SystemSource implements Source { - - private final int index; - - private SystemSource(int index) { - this.index = index; - } - - @Override - public String name() { - // How to create name? SourceSet will need parent section names - return "Unnamed fault system source"; - } - - @Override - public int size() { - return 1; - } - - @Override - public int id() { - return index; - } - - @Override - public SourceType type() { - return SourceType.FAULT_SYSTEM; - } - - /** - * This method is not required for disaggregation and currently throws an - * {@code UnsupportedOperationException}. - */ - @Override - public Location location(Location location) { - throw new UnsupportedOperationException(); - } - - @Override - public List<Mfd> mfds() { - throw new UnsupportedOperationException(); - } - - @Override - public Iterator<Rupture> iterator() { - /* - * Rupture iterator not currently supported but may be in future if - * aleatory uncertainty is added to or required on source magnitudes. - * Currently system source:rupture is 1:1. - */ - throw new UnsupportedOperationException(); - } - - private final BitSet bitset() { - return bitsets[index]; - } - - private final double magnitude() { - return mags[index]; - } - - private final double rate() { - return rates[index]; - } - - private final double depth() { - return depths[index]; - } - - private final double dip() { - return dips[index]; - } - - private final double width() { - return widths[index]; - } - - private final double rake() { - return rakes[index]; - } - } - - /** - * Container of summary data for this sytem source set. - */ - public static final class Statistics { - - /* Currently used to build section participation MFDs for disagg. */ - - /** Minimum magnitude over all ruptures. */ - public final double mMin; - - /** Maximum magnitude over all ruptures. */ - public final double mMax; - - Statistics( - double mMin, - double mMax) { - - this.mMin = mMin; - this.mMax = mMax; - } - } - - /* - * Single use builder. Quirky behavior: Note that sections() must be called - * before any calls to indices(). All indices and data fields should be - * repeatedly called in order to ensure correctly ordered fields when - * iterating ruptures. - */ - static class Builder extends AbstractSourceSet.Builder { - - /* Unfiltered UCERF3: FM31 = 253,706 FM32 = 305,709 */ - static final int RUP_SET_SIZE = 306000; - - static final String ID = "SystemSourceSet.Builder"; - - private List<GriddedSurface> sections; - private List<String> sectionNames; - private final List<BitSet> bitsets = new ArrayList<>(RUP_SET_SIZE); - private final List<Double> mags = new ArrayList<>(RUP_SET_SIZE); - private final List<Double> rates = new ArrayList<>(RUP_SET_SIZE); - private final List<Double> depths = new ArrayList<>(RUP_SET_SIZE); - private final List<Double> dips = new ArrayList<>(RUP_SET_SIZE); - private final List<Double> widths = new ArrayList<>(RUP_SET_SIZE); - private final List<Double> rakes = new ArrayList<>(RUP_SET_SIZE); - - private double mMin = Double.POSITIVE_INFINITY; - private double mMax = Double.NEGATIVE_INFINITY; - - Builder() { - super(SourceType.FAULT_SYSTEM); - } - - Builder sections(List<GriddedSurface> sections) { - checkNotNull(sections, "Section surface list is null"); - checkArgument(sections.size() > 0, "Section surface list is empty"); - this.sections = sections; - return this; - } - - Builder sectionNames(List<String> names) { - checkNotNull(names, "Section name list is null"); - checkArgument(names.size() > 0, "Section name list is empty"); - this.sectionNames = names; - return this; - } - - Builder indices(int[] indices) { - checkState(sections != null, "Indices may only be set after call to sections()"); - checkNotNull(indices, "Rupture index list is null"); - // NOTE we're doublechecking a UCERF3 rule that ruptures be composed - // of at least 2 sections; this may not be the case in the future. - checkArgument(indices.length > 1, "Rupture index list must contain 2 or more values"); - bitsets.add(Indexing.indicesToBits(sections.size(), indices)); - return this; - } - - Builder mag(double mag) { - mags.add(checkMagnitude(mag)); - mMin = (mag < mMin) ? mag : mMin; - mMax = (mag > mMax) ? mag : mMax; - return this; - } - - Builder rate(double rate) { - checkArgument(Doubles.isFinite(rate), "Rate is not a finite value"); - rates.add(rate); - return this; - } - - Builder depth(double depth) { - depths.add(checkCrustalDepth(depth)); - return this; - } - - Builder dip(double dip) { - dips.add(checkDip(dip)); - return this; - } - - Builder width(double width) { - widths.add(checkCrustalWidth(width)); - return this; - } - - Builder rake(double rake) { - rakes.add(checkRake(rake)); - return this; - } - - @Override - void validateState(String buildId) { - super.validateState(buildId); - - checkState(sections.size() > 0, "%s no sections added", buildId); - checkState(bitsets.size() > 0, "%s no index lists added", buildId); - checkState( - sections.size() == sectionNames.size(), - "%s section list (%s) and name list (%s) are different sizes", - buildId, sections.size(), sectionNames.size()); - - int target = bitsets.size(); - checkSize(mags.size(), target, buildId, "magnitudes"); - checkSize(rates.size(), target, buildId, "rates"); - checkSize(depths.size(), target, buildId, "depths"); - checkSize(dips.size(), target, buildId, "dips"); - checkSize(widths.size(), target, buildId, "widths"); - checkSize(rakes.size(), target, buildId, "rakes"); - } - - private static void checkSize(int size, int target, String classId, String dataId) { - checkState(size == target, "%s too few %s [%s of %s]", classId, dataId, size, target); - } - - Statistics stats; - - SystemSourceSet build() { - validateState(ID); - stats = new Statistics(mMin, mMax); - return new SystemSourceSet(this); - } - } - - /* - * Handle rate calculations internally as SystemSource is not fully - * implemented. If/when it is, this should be removed in favor using - * iterableForLocation and getRupture(0). - */ - - /** - * Return an instance of a {@code Function} that converts a - * {@code SystemSourceSet} to a ground motion model {@code InputList}. - * - * @param location with which to initialize instance. - * @param distance if interest (relevant source radius) - * @param modelMfd MFD to populate - */ - public static Function<SystemSourceSet, IntervalArray> toRatesFunction( - Location location, - double distance, - IntervalArray modelMfd) { - return new ToRates(location, distance, modelMfd); - } - - private static final class ToRates implements Function<SystemSourceSet, IntervalArray> { - - private final Location location; - private final double distance; - private final IntervalArray modelMfd; - - ToRates( - Location location, - double distance, - IntervalArray modelMfd) { - - this.location = location; - this.distance = distance; - this.modelMfd = modelMfd; - } - - @Override - public IntervalArray apply(SystemSourceSet sourceSet) { - IntervalArray.Builder mfdForLocation = IntervalArray.Builder.fromModel(modelMfd); - BitSet siteBitset = sourceSet.bitsetForLocation(location, distance); - if (siteBitset.isEmpty()) { - return modelMfd; - } - Predicate<SystemSource> distanceFilter = new BitsetFilter(siteBitset); - for (SystemSource source : Iterables.filter(sourceSet, distanceFilter::test)) { - mfdForLocation.add(source.magnitude(), source.rate()); - } - return mfdForLocation.multiply(sourceSet.weight()).build(); - } - } - - /* - * System source calculation pipeline. - * - * Rather than expose highly mutable bitsets and attendant logic that is used - * to generate HazardInputs from SystemSourceSets, we opt to locate transform - * Functions and related classes here. - * - * System sources (e.g. UCERF3 ruptures) are composed of multiple small (~7km - * long x ~15km wide) adjacent fault sections in a large and dense fault - * network. Each source is defined by the indices of the participating fault - * sections, magnitude, average width, average rake, and average dip, among - * other parameters. - * - * In order to filter ruptures and calculate distance parameters when doing a - * hazard calculation, one can treat each source as a finite fault. This - * ultimately requires careful handling of site-to-source calculations when - * trying to reduce redundant calculations because many sources share the same - * fault sections and sources will be handled one-at-a-time, in sequence. - * - * Alternatively, one can approach the problem from an indexing standpoint, - * precomuting that data which will be required, and then mining it on a - * per-source basis, as follows: - * - * 1) For each source, create a BitSet with size = nSections. Set the bits for - * each section that the source uses. [sourceBitSet] - * - * 2) Create another BitSet with size = nSections. Set the bits for each - * section within the distance cutoff for a Site. Do this quickly using only - * the centroid of each fault section. [siteBitSet] - * - * 3) Create and populate a Map<SectionIndex, double[rJB, rRup, rX]> of - * distance metrics for each section in the siteBitSet. This is created - * pre-sorted ascending on rRup (the closest sections to a site come first). - * - * 4) For each sourceBitSet, 'siteBitSet.intersects(sourceBitSet)' returns - * whether a source is close enough to the site to be considered. - * - * 5) For each considered source, 'sourceBitSet AND siteBitSet' yields a - * BitSet that only includes set bits with indices in the distance metric - * table. - * - * 6) For each source, loop the ascending indices, checking whether the bit at - * 'index' is set in the sources bitset. The first hit will be the closest - * section in a source, relative to a site. (the rX value used is keyed to the - * minimum rRup). - * - * 7) Build GmmInputs and proceed with hazard calculation. - * - * Note on the above. Although one could argue that only rRup or rJb be - * calculated first, there are geometries for which min(rRup) != min(rJB); - * e.g. location on hanging wall of dipping fault that abuts a vertical - * fault... vertical might yield min(rRup) but min(rJB) would be 0 (over - * dipping fault). While checking the bits in a source, we therefore look at - * the three closest sections. - */ - - /* - * Concurrency - * - * The use case assumed here is that 1 or 2 fault systems (e.g. UCERF3 - * branch-averaged solutions) will most commonly be run when supporting - * web-services. We therefore compute hazard by first creating a (large) input - * list and then distribute the more time consuming curve calculation. - * However, if many fault systems were to be run, it probably makes sense to - * farm each onto an independent thread. - */ - - /** - * Return an instance of a {@code Function} that converts a - * {@code SystemSourceSet} to a ground motion model {@code InputList}. - * - * @param site with which to initialize instance. - */ - public static Function<SystemSourceSet, InputList> toInputsFunction(Site site) { - return new ToInputs(site); - } - - private static final class ToInputs implements Function<SystemSourceSet, InputList> { - - private final Site site; - - ToInputs(Site site) { - this.site = site; - } - - @Override - public InputList apply(SystemSourceSet sourceSet) { - - /* Create Site BitSet. */ - double maxDistance = sourceSet.groundMotionModels().maxDistance(); - BitSet siteBitset = sourceSet.bitsetForLocation(site.location(), maxDistance); - if (siteBitset.isEmpty()) { - return SystemInputList.empty(sourceSet); - } - - /* Create and fill distance map. */ - int[] siteIndices = Indexing.bitsToIndices(siteBitset); - ImmutableMap.Builder<Integer, double[]> rMapBuilder = - ImmutableMap.<Integer, double[]> builder() - .orderEntriesByValue(new DistanceTypeSorter(R_RUP_INDEX)); - for (int i : siteIndices) { - Distance r = sourceSet.sections[i].distanceTo(site.location()); - rMapBuilder.put(i, new double[] { r.rJB, r.rRup, r.rX }); - } - - /* Create inputs. */ - Map<Integer, double[]> rMap = rMapBuilder.build(); - Function<SystemSource, HazardInput> inputGenerator = new InputGenerator(rMap, site); - Predicate<SystemSource> rFilter = new BitsetFilter(siteBitset); - Iterable<SystemSource> sources = Iterables.filter(sourceSet, rFilter::test); - - /* Fill input list. */ - SystemInputList inputs = new SystemInputList(sourceSet, rMap.keySet()); - for (SystemSource source : sources) { - inputs.add(inputGenerator.apply(source)); - // for disagg - inputs.addBitset(source.bitset()); - } - - return inputs; - } - } - - private static final class DistanceTypeSorter extends Ordering<double[]> { - - final int rTypeIndex; - - DistanceTypeSorter(int rTypeIndex) { - this.rTypeIndex = rTypeIndex; - } - - @Override - public int compare(double[] left, double[] right) { - if (left != null && right != null) { - return Double.compare(left[rTypeIndex], right[rTypeIndex]); - } else { - throw new IllegalStateException(); - } - } - } - - /* - * Predicate that tests the intersection of a site bitset (fault sections - * wihtin a specified distance of a site) with source bitsets (fault sections - * that participate in a SystemSource/Rupture). - */ - private static class BitsetFilter implements Predicate<SystemSource> { - - private static final String ID = "BitsetFilter"; - private final BitSet siteBitset; - - BitsetFilter(BitSet siteBitset) { - this.siteBitset = siteBitset; - } - - @Override - public boolean test(SystemSource source) { - return siteBitset.intersects(source.bitset()); - } - - @Override - public String toString() { - return ID + " " + siteBitset; - } - } - - private static final int R_JB_INDEX = 0; - private static final int R_RUP_INDEX = 1; - private static final int R_X_INDEX = 2; - - private static final int R_HIT_LIMIT = 3; - - private static final class InputGenerator implements Function<SystemSource, HazardInput> { - - private final Map<Integer, double[]> rMap; - private final Site site; - - InputGenerator( - Map<Integer, double[]> rMap, - Site site) { - - this.rMap = rMap; - this.site = site; - } - - @Override - public HazardInput apply(SystemSource source) { - - /* Find r minima. */ - BitSet sections = source.bitset(); - double rJB = Double.MAX_VALUE; - double rRup = Double.MAX_VALUE; - double rX = Double.MAX_VALUE; - int hitCount = 0; - for (int sectionIndex : rMap.keySet()) { - if (sections.get(sectionIndex)) { - double[] distances = rMap.get(sectionIndex); - rJB = min(rJB, distances[R_JB_INDEX]); - double rRupNew = distances[R_RUP_INDEX]; - if (rRupNew < rRup) { - rRup = rRupNew; - rX = distances[R_X_INDEX]; - } - if (++hitCount > R_HIT_LIMIT) { - break; - } - } - } - - double dip = source.dip(); - double width = source.width(); - double zTor = source.depth(); - double zHyp = Faults.hypocentralDepth(dip, width, zTor); - - return new HazardInput( - source.rate(), - source.magnitude(), - rJB, - rRup, - rX, - dip, - width, - zTor, - zHyp, - source.rake(), - site.vs30(), - site.vsInferred(), - site.z1p0(), - site.z2p5()); - } - } - - private final BitSet bitsetForLocation(Location loc, double r) { - BitSet bits = new BitSet(sections.length); - int count = 0; - for (GriddedSurface surface : sections) { - bits.set(count++, horzDistanceFast(loc, surface.centroid()) <= r); - } - return bits; - } - -} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ZoneRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ZoneRuptureSet.java index 8ac414c1..696596a3 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ZoneRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ZoneRuptureSet.java @@ -2,7 +2,6 @@ package gov.usgs.earthquake.nshmp.model; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.Preconditions.checkState; import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR; import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.SINGLE; import static gov.usgs.earthquake.nshmp.model.Deserialize.MFD_TREE; @@ -12,8 +11,10 @@ import static java.util.stream.Collectors.toUnmodifiableList; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; +import java.util.Iterator; import java.util.List; import java.util.Optional; +import java.util.function.Predicate; import gov.usgs.earthquake.nshmp.data.DoubleData; import gov.usgs.earthquake.nshmp.geo.Location; @@ -26,11 +27,11 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; import gov.usgs.earthquake.nshmp.tree.Trees; /* - * Areal source zone rupture set implemention. + * Areal source zone rupture set implementation. * * @author U.S. Geological Survey */ -class ZoneRuptureSet implements RuptureSet { +class ZoneRuptureSet extends AbstractRuptureSet<PointSource> { private final SourceFeature.Zone feature; private final ModelData data; @@ -40,7 +41,7 @@ class ZoneRuptureSet implements RuptureSet { private final LogicTree<List<Mfd>> mfdsTree; private final LogicTree<Mfd> mfdTree; - private GridSourceSet gss; + private GridRuptureSet delegate; /* * Developer notes: @@ -55,65 +56,50 @@ class ZoneRuptureSet implements RuptureSet { */ ZoneRuptureSet(Builder builder) { + super(builder); this.feature = builder.feature; this.data = builder.data; this.locations = builder.locations; this.mfdsTree = builder.mfdsTree; this.mfdTree = builder.mfdTree; this.mfds = builder.mfds; + // TODO handle SS weight + this.delegate = createRuptureSet(); } - // Declare this in RuptureSet? Source T issue - GridSourceSet sourceSet(double weight) { - if (gss == null) { - gss = createSourceSet(weight); - } - return gss; - } - - private GridSourceSet createSourceSet(double weight) { - SourceConfig.Grid config = data.sourceConfig().asGrid(); - - GridSourceSet grid = GridLoader.createGrid( + private GridRuptureSet createRuptureSet() { + GridRuptureSet grid = GridLoader.createGrid( feature.name, feature.id, - data.tectonicSetting(), + data, SourceType.ZONE, - config, - feature.source, + feature, locations, mfds, mfdsTree, Optional.empty(), - data.gmms(), - weight); - + data.gmms()); return grid; } @Override - public String name() { - return feature.name; - } - - @Override - public int id() { - return feature.id; + public int size() { + return locations.size(); } @Override - public int size() { - return locations.size(); + public LogicTree<Mfd> mfdTree() { + throw new UnsupportedOperationException(); } @Override - public SourceType type() { - return SourceType.ZONE; + public Predicate<PointSource> distanceFilter(Location loc, double distance) { + return delegate.distanceFilter(loc, distance); } @Override - public LogicTree<Mfd> mfdTree() { - throw new UnsupportedOperationException(); + public Iterator<PointSource> iterator() { + return delegate.iterator(); } @Override @@ -126,9 +112,8 @@ class ZoneRuptureSet implements RuptureSet { return new Builder(); } - static final class Builder { - - private boolean built = false; + /* Single use builder. */ + static final class Builder extends AbstractRuptureSet.Builder { private SourceFeature.Zone feature; private ModelData data; @@ -144,13 +129,33 @@ class ZoneRuptureSet implements RuptureSet { private LogicTree<double[]> ratesTree; + private Builder() { + super(SourceType.ZONE); + } + + // @Override + // Builder name(String name) { + // super.name(name); + // return this; + // } + // + // @Override + // Builder id(int id) { + // super.id(id); + // return this; + // } + Builder feature(SourceFeature.Zone feature) { this.feature = feature; + super.name(feature.name); + super.id(feature.id); return this; } Builder modelData(ModelData data) { this.data = data; + super.setting(data.tectonicSetting()); + super.gmms(data.gmms()); return this; } @@ -176,7 +181,7 @@ class ZoneRuptureSet implements RuptureSet { } private void validateAndInit(String label) { - checkState(!built, "Single use builder"); + validateState(label); checkNotNull(data, "%s model data", label); checkNotNull(feature, "%s feature", label); checkNotNull(ratesPath, "%s rates file path", label); @@ -283,8 +288,8 @@ class ZoneRuptureSet implements RuptureSet { } } return mfdsTree.build(); - } } + } diff --git a/src/test/java/gov/usgs/earthquake/nshmp/model/LoaderTests.java b/src/test/java/gov/usgs/earthquake/nshmp/model/LoaderTests.java index 20ff5c46..2237cd95 100644 --- a/src/test/java/gov/usgs/earthquake/nshmp/model/LoaderTests.java +++ b/src/test/java/gov/usgs/earthquake/nshmp/model/LoaderTests.java @@ -69,7 +69,7 @@ class LoaderTests { void emptyTestEclipseBug() { /* * Silly workaround for junt5 tests in eclipse where no tests are found - * unless at least one @Test annotaion exists. Doesn't fix fact that one + * unless at least one @Test annotation exists. Doesn't fix fact that one * test below cannot be run on its own. */ } -- GitLab