From 5403254e53f43d112616a5139ec37d7916f81ed8 Mon Sep 17 00:00:00 2001 From: Peter Powers <pmpowers@usgs.gov> Date: Sun, 29 Nov 2020 16:07:44 -0700 Subject: [PATCH] lib dev --- .../usgs/earthquake/nshmp/calc/EqRate.java | 4 +- .../earthquake/nshmp/calc/HazardCurveSet.java | 2 + .../earthquake/nshmp/calc/Transforms.java | 5 +- .../gov/usgs/earthquake/nshmp/mfd/Mfds.java | 6 +- .../nshmp/model/ClusterRuptureSet.java | 54 +- .../earthquake/nshmp/model/ClusterSource.java | 4 +- .../earthquake/nshmp/model/Deserialize.java | 82 +- .../nshmp/model/FaultRuptureSet.java | 68 +- .../earthquake/nshmp/model/FaultSource.java | 4 + .../earthquake/nshmp/model/HazardModel.java | 102 ++- .../nshmp/model/InterfaceRuptureSet.java | 5 - .../earthquake/nshmp/model/MfdConfig.java | 5 +- .../earthquake/nshmp/model/ModelFiles.java | 219 +++++- .../earthquake/nshmp/model/ModelLoader.java | 278 ++++--- .../earthquake/nshmp/model/ModelUtil.java | 18 + .../earthquake/nshmp/model/SourceFeature.java | 7 +- .../nshmp/model/SystemRuptureSet.java | 720 ++++++++++++++++++ 17 files changed, 1242 insertions(+), 341 deletions(-) create mode 100644 src/main/java/gov/usgs/earthquake/nshmp/model/SystemRuptureSet.java diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java index 4a5ca5a0..a60c3810 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java @@ -22,7 +22,7 @@ import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.mfd.Mfd; import gov.usgs.earthquake.nshmp.mfd.Mfds; -import gov.usgs.earthquake.nshmp.model.ClusterRuptureSet; +import gov.usgs.earthquake.nshmp.model.ClusterSource; import gov.usgs.earthquake.nshmp.model.ClusterSourceSet; import gov.usgs.earthquake.nshmp.model.Distance; import gov.usgs.earthquake.nshmp.model.HazardModel; @@ -284,7 +284,7 @@ public class EqRate { IntervalArray.Builder sourceSetMfd = IntervalArray.Builder.fromModel(modelMfd); for (Source source : sourceSet.iterableForLocation(location, distance)) { - ClusterRuptureSet clusterSource = (ClusterRuptureSet) source; + ClusterSource clusterSource = (ClusterSource) source; IntervalArray.Builder faultMfd = Builder .copyOf(faultMfd( clusterSource.faults(), diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCurveSet.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCurveSet.java index 31558acf..e523c2b6 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCurveSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardCurveSet.java @@ -207,6 +207,8 @@ final class HazardCurveSet { Map<Gmm, XySequence> clusterCurves = new EnumMap<>(Gmm.class); // loop Gmms based on what's supported at this distance for (Gmm gmm : gmmWeightMap.keySet()) { + // gmmWt * geometryWt * rateWt + // (mags have already been collapsed, UNDO TODO) double weight = gmmWeightMap.get(gmm) * sourceSet.weight() * clusterWeight; XySequence clusterCurve = MutableXySequence.copyOf(curveMapIn.get(gmm)).multiply(weight); curveMapBuild.get(gmm).add(clusterCurve); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java index 1b3c791d..92120ea7 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java @@ -612,11 +612,11 @@ final class Transforms { /* * TODO: this is klunky; we need to get and store the gmm branch weights * so that they can be applied AFTER the cluster calculations have been - * done for each gmm branch. Is there a better way? SHould Gmms that + * done for each gmm branch. Is there a better way? Should Gmms that * return MultiScalarGroundMotions be able to supply period dependent * weights? * - * ALso, we've presently got to dig down to see if we've got + * Also, we've presently got to dig down to see if we've got * multiScalarGMs; if we do, we can then only process that type. */ ScalarGroundMotion sgmModel = clusterGroundMotions.get(0).gmMap @@ -718,6 +718,7 @@ final class Transforms { truncationLevel, imt, utilCurve); + // mag weight stored as rate; this needs to change utilCurve.multiply(groundMotions.inputs.get(i).rate); magVarCurve.add(utilCurve); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java index 49c2c997..e3a21019 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java @@ -50,9 +50,9 @@ public final class Mfds { */ public static double cumulativeRate(XySequence xy) { return xy.yValues().reduce(0.0, Double::sum); - // return xy.stream().mapToDouble(XyPoint::y) - // .reduce(Double::sum) - // .getAsDouble(); + // TODO is this method even necessary, really? + // the above can also be xy.yValues().reduce(Double::sum); + // or even shorter, but maybe ith more overhead? xy.yValues().sum() } /** diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java index 50b895ff..18441bd0 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java @@ -45,18 +45,14 @@ public class ClusterRuptureSet implements RuptureSet { final String name; final int id; - final double rate; // from the default mfd xml - // final FaultSourceSet faults; - - // final LogicTree<Double> rateTree; + final ModelData data; // holds rate tree (numeric values) final List<FaultRuptureSet> faultRuptureSets; private ClusterRuptureSet(Builder builder) { this.name = builder.name; this.id = builder.id; - // this.rateTree = builder.rateTree; + this.data = builder.data; this.faultRuptureSets = builder.faultRuptureSets; - this.rate = 0.002; // TODO clean/fix } @Override @@ -92,13 +88,13 @@ public class ClusterRuptureSet implements RuptureSet { return Locations.closestPoint(site, locs.build()); } - /** - * {@code (1 / return period)} of this source in years. - * @return the cluster rate - */ - public double rate() { - return rate; - } + // /** + // * {@code (1 / return period)} of this source in years. + // * @return the cluster rate + // */ + // public double rate() { + // return rate; + // } /** * The weight applicable to this {@code ClusterSource}. @@ -119,7 +115,6 @@ public class ClusterRuptureSet implements RuptureSet { public String toString() { Map<String, ?> data = Map.of( "name", name(), - "rate", rate(), "weight", weight()); StringBuilder sb = new StringBuilder() .append(getClass().getSimpleName()) @@ -143,24 +138,12 @@ public class ClusterRuptureSet implements RuptureSet { boolean built = false; - // private ModelData data; - - // required + private ModelData data; // holds cluster rate-tree private String name; private Integer id; - // private Map<Integer, SourceFeature.NshmFault> featureMap; - - // private LogicTree<Double> rateTree; - - // List<FaultRuptureSet.Builder> faultRuptureSetBuilders = new - // ArrayList<>(); - List<FaultRuptureSet> faultRuptureSets = new ArrayList<>(); - // Double rate; - // FaultSourceSet faults; - Builder name(String name) { this.name = checkName(name, "ClusterRuptureSet"); return this; @@ -171,11 +154,11 @@ public class ClusterRuptureSet implements RuptureSet { return this; } - // /* Set MFD helper data. */ - // Builder modelData(ModelData data) { - // this.data = data; - // return this; - // } + /* Set MFD helper data. */ + Builder modelData(ModelData data) { + this.data = data; + return this; + } // @Deprecated // Builder rate(double rate) { @@ -202,6 +185,11 @@ public class ClusterRuptureSet implements RuptureSet { // } Builder addRuptureSet(FaultRuptureSet ruptureSet) { + if (faultRuptureSets.size() > 1) { + ModelUtil.checkTreeIdsAndWeights( + faultRuptureSets.get(0).mfdTree, + ruptureSet.mfdTree); + } faultRuptureSets.add(ruptureSet); return this; } @@ -217,7 +205,7 @@ public class ClusterRuptureSet implements RuptureSet { checkState(!built, "Single use builder"); checkNotNull(name, "%s name", label); checkNotNull(id, "%s id", label); - // checkNotNull(data, "%s model data", label); + checkNotNull(data, "%s model data", label); // checkState(rate != null, "%s rate not set", source); // checkState(faults != null, "%s has no fault sources", source); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java index 50bf28a8..da26025a 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java @@ -42,7 +42,9 @@ import gov.usgs.earthquake.nshmp.mfd.Mfd; */ public class ClusterSource implements Source { - final double rate; // from the default mfd xml + // the weight of ClusterSource is stored in the nested FSS + + final double rate; final FaultSourceSet faults; ClusterSource(double rate, FaultSourceSet faults) { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java index 3be25e6d..ffae5a8a 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java @@ -27,6 +27,8 @@ import com.google.gson.JsonParser; import gov.usgs.earthquake.nshmp.fault.FocalMech; import gov.usgs.earthquake.nshmp.fault.surface.RuptureFloating; import gov.usgs.earthquake.nshmp.fault.surface.RuptureScaling; +import gov.usgs.earthquake.nshmp.geo.json.Feature; +import gov.usgs.earthquake.nshmp.geo.json.GeoJson; import gov.usgs.earthquake.nshmp.gmm.Gmm; import gov.usgs.earthquake.nshmp.gmm.UncertaintyModel; import gov.usgs.earthquake.nshmp.mfd.Mfd; @@ -98,44 +100,44 @@ class Deserialize { /* Read a MFD configuration. */ static MfdConfig mfdConfig(Path json) { - var config = jsonObject(json); - var builder = MfdConfig.builder(); + JsonObject config = jsonObject(json); + MfdConfig.Builder builder = MfdConfig.builder(); - var mMin = config.get(MfdConfig.MIN_MAGNITUDE_KEY); - builder = mMin.isJsonNull() - ? builder - : builder.minimumMagnitude(mMin.getAsDouble()); + JsonElement mMin = config.get(MfdConfig.MIN_MAGNITUDE_KEY); + if (!mMin.isJsonNull()) { + builder.minimumMagnitude(mMin.getAsDouble()); + } - var epiTree = config.get(MfdConfig.EPISTEMIC_TREE_KEY); - builder = epiTree.isJsonNull() - ? builder - : builder.epistemicTree(doubleTree(epiTree, MfdConfig.EPISTEMIC_TREE_KEY)); + JsonElement epiTree = config.get(MfdConfig.EPISTEMIC_TREE_KEY); + if (!epiTree.isJsonNull()) { + builder.epistemicTree(doubleTree(epiTree, MfdConfig.EPISTEMIC_TREE_KEY)); + } - var aleaProps = config.get(MfdConfig.ALEATORY_PROPS_KEY); - builder = aleaProps.isJsonNull() - ? builder - : builder.aleatoryProperties(GSON.fromJson(aleaProps, AleatoryProperties.class)); + JsonElement aleaProps = config.get(MfdConfig.ALEATORY_PROPS_KEY); + if (!aleaProps.isJsonNull()) { + builder.aleatoryProperties(GSON.fromJson(aleaProps, AleatoryProperties.class)); + } return builder.build(); } /* Read a GMM configuration. */ static GmmConfig gmmConfig(Path json) { - var config = jsonObject(json); - var builder = GmmConfig.builder(); + JsonObject config = jsonObject(json); + GmmConfig.Builder builder = GmmConfig.builder(); double rMax = config.get(GmmConfig.MAX_DISTANCE_KEY).getAsDouble(); builder.maximumDistance(rMax); - var epiTree = config.get(GmmConfig.EPISTEMIC_TREE_KEY); - builder = epiTree.isJsonNull() - ? builder - : builder.epistemicTree(doubleTree(epiTree, GmmConfig.EPISTEMIC_TREE_KEY)); + JsonElement epiTree = config.get(GmmConfig.EPISTEMIC_TREE_KEY); + if (!epiTree.isJsonNull()) { + builder.epistemicTree(doubleTree(epiTree, GmmConfig.EPISTEMIC_TREE_KEY)); + } - var epiModel = config.get(GmmConfig.EPISTEMIC_MODEL_KEY); - builder = epiModel.isJsonNull() - ? builder - : builder.epistemicModel(UncertaintyModel.valueOf(epiModel.getAsString())); + JsonElement epiModel = config.get(GmmConfig.EPISTEMIC_MODEL_KEY); + if (!epiModel.isJsonNull()) { + builder.epistemicModel(UncertaintyModel.valueOf(epiModel.getAsString())); + } return builder.build(); } @@ -232,6 +234,13 @@ class Deserialize { return Map.copyOf(sectionMap); } + /* Create an id:section (system) map from a fault-sections.geojson. */ + static Map<Integer, SourceFeature.NshmFault> systemSections(Path json) { + List<Feature> features = GeoJson.from(json).toFeatureCollection().features(); + // SourceFeature.NshmFault.newNshmFault(geojson) + return null; + } + /* Create an interface rupture set. */ static InterfaceRuptureSet interfaceRuptureSet(Path json, ModelData data) { @@ -266,19 +275,16 @@ class Deserialize { ModelData data) { JsonObject obj = jsonObject(json); - return faultRuptureSet(obj, data.mfdMap()) - .modelData(data) - .build(); + return faultRuptureSet(obj, data); } /* Create a fault rupture set builder. */ - private static FaultRuptureSet.Builder faultRuptureSet( + private static FaultRuptureSet faultRuptureSet( JsonObject obj, - Optional<Map<String, LogicTree<Mfd.Properties>>> mfdMap) { - - // TODO can this return FRS instead of builder + ModelData data) { - FaultRuptureSet.Builder ruptureSet = FaultRuptureSet.builder(); + FaultRuptureSet.Builder ruptureSet = FaultRuptureSet.builder() + .modelData(data); ruptureSet.name(obj.get(NAME).getAsString()); @@ -300,9 +306,9 @@ class Deserialize { ruptureSet.properties(props); } - ruptureSet.mfdTree(mfdTree(obj, mfdMap)); + ruptureSet.mfdTree(mfdTree(obj, data.mfdMap())); - return ruptureSet; + return ruptureSet.build(); } /* Create a rupture set builder, initialized with data from file. */ @@ -311,7 +317,8 @@ class Deserialize { ModelData data) { JsonObject obj = jsonObject(json); - ClusterRuptureSet.Builder clusterSet = ClusterRuptureSet.builder(); + ClusterRuptureSet.Builder clusterSet = ClusterRuptureSet.builder() + .modelData(data); clusterSet.name(obj.get(NAME).getAsString()); clusterSet.id(obj.get(ID).getAsInt()); @@ -321,9 +328,7 @@ class Deserialize { JsonArray ruptures = obj.get(RUPTURE_SETS).getAsJsonArray(); for (JsonElement rupture : ruptures) { - FaultRuptureSet ruptureSet = faultRuptureSet(rupture.getAsJsonObject(), data.mfdMap()) - .modelData(data) - .build(); + FaultRuptureSet ruptureSet = faultRuptureSet(rupture.getAsJsonObject(), data); clusterSet.addRuptureSet(ruptureSet); } @@ -385,7 +390,6 @@ class Deserialize { default: throw new IllegalArgumentException("Unknown MFD type: " + type); } - System.out.println(properties); return properties; } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java index a3530f62..f574a664 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java @@ -200,7 +200,6 @@ public class FaultRuptureSet implements RuptureSet { feature = createFeature(); - // System.out.println(feature.source.toJson()); // TODO test length property rounding /* Filter of unlikely combination of SINGLEs epistemic uncertainty. */ @@ -214,10 +213,6 @@ public class FaultRuptureSet implements RuptureSet { MfdConfig mfdConfig = data.mfdConfig().orElseThrow(); Optional<LogicTree<Double>> moRateTree = Optional.empty(); - if (data.isClusterModel()) { - System.out.println("YOYO"); - } - /* Build mfd-tree from fault slip or recurrence rates. */ if (data.faultRateTree().isPresent()) { @@ -248,14 +243,15 @@ public class FaultRuptureSet implements RuptureSet { // // feature.magnitude // } - System.out.println(mfdTree); + // System.out.println(mfdTree); // if (name.contains("Grand")) { + // if (data.isClusterModel()) { // System.out.println(mfdTree); - // System.out.println(mfdTotal.data()); - for (Branch<Mfd> b : mfdTree) { - System.out.println(b.id() + " " + b.value().data()); - } + // // System.out.println(mfdTotal.data()); + // for (Branch<Mfd> b : mfdTree) { + // System.out.println(b.id() + " " + b.value().data()); + // } // } SourceConfig.Fault config = data.sourceConfig().asFault(); @@ -317,7 +313,6 @@ public class FaultRuptureSet implements RuptureSet { .properties(props.build()) .build(); - System.out.println(feature.toJson()); return new SourceFeature.NshmFault(feature); } @@ -327,7 +322,6 @@ public class FaultRuptureSet implements RuptureSet { LogicTree.Builder<Mfd.Properties> propsTree = LogicTree.builder(mfdPropsTree.name()); LogicTree<Double> rateTree = data.rateTree().orElseThrow(); for (Branch<Double> rBranch : rateTree) { - System.out.println(rBranch.value()); /* * TODO need to be careful here; this catch is for additive New Madrid * branches; null rates have been converted to 0; perhaps we should use @@ -378,41 +372,9 @@ public class FaultRuptureSet implements RuptureSet { branch.value().type() + " MFD not supported"); } } - LogicTree<Mfd.Properties> pt = propsTree.build(); - // System.out.println(pt); - // for (Branch<Mfd.Properties> b : pt) { - // if (b.value().type == Mfd.Type.SINGLE) { - // System.out.println(" " + b.value().type + " " + - // b.value().getAsSingle().m); - // } else { - // Mfd.Properties.GutenbergRichter grPr = b.value().getAsGr(); - // System.out.println( - // " " + b.value().type + " " + grPr.mMin + " " + grPr.mMax + " " + - // grPr.Δm); - // } - // } - return pt; + return propsTree.build(); } - // TODO clean - // public static void main(String[] args) { - // - // /* - // * This is for the Bever Creek fault in Oregon; it's not clear how two - // * different magnitudes exist, yes it is - // */ - // double mo1 = Earthquakes.magToMoment(6.51); - // double mo2 = Earthquakes.magToMoment(6.52); - // - // double moRate = 2.677288582552237E15; - // - // System.out.println("rate 6.51: " + moRate / mo1); - // System.out.println("rate 6.52: " + moRate / mo2); - // - // System.out.println(Mfds.gutenbergRichterRate(1.82066, 0.8, 6.51)); - // - // } - /* * Update GR properties for fault 'characteristic' magnitude, or return * SINGLE properties instead if mMax < mCutoff or nm = 1. @@ -453,13 +415,8 @@ public class FaultRuptureSet implements RuptureSet { * TODO: consider short citcuiting singletons if feature as represented in * model is consistent with rupture set requirements * - * TODO: Split into 3 features for normal faults; this may happen prior to - * getting here - * - * TODO: Copy properties from multi segment ruptures (mostly 2008 model) - * * TODO: For documentation; there are many options avilable when defining - * both sfautl and grid based sources that may necessarily be able to be + * both fault and grid based sources that may necessarily be able to be * combined. For example, normal fault dip variants are difficult to handle * in the context of a cluster model. */ @@ -552,8 +509,6 @@ public class FaultRuptureSet implements RuptureSet { ? mo * rate : Earthquakes.moment(area, rate / 1000.0); - // System.out.println(branch.id() + " " + moRate); - /* * 2014 model consistency. Convert to annual rate and set scale to seven * decimal places to match the MFD values in fortran and XML input files @@ -566,9 +521,6 @@ public class FaultRuptureSet implements RuptureSet { */ double eqRate = Maths.round(moRate / mo, 7); moRate = eqRate * mo; - - // System.out.println(branch.id() + " " + moRate); - moRateTree.addBranch(branch.id(), moRate, branch.weight()); } return moRateTree.build(); @@ -630,10 +582,7 @@ public class FaultRuptureSet implements RuptureSet { double mMaxEpi = gr.mMax() + epiBranch.value() + gr.Δm() / 2; double weightEpi = mfdWt * epiBranch.weight(); - // System.out.println(grData.mMin + " " + grData.mMax); - // checkEpiLow(grData.mMax + epiBranch.value()) // TODO check if ever thrown - // boolean grData.mMin + (grData.Δm / 2) int nMagEpi = Mfds.magCount(gr.mMin(), mMaxEpi, gr.Δm()); checkState(nMagEpi > 0, "GR MFD epistemic branch with no magnitudes"); boolean epiOk = checkEpi(gr.mMin(), gr.mMax(), gr.Δm(), epiBranch.value()); @@ -675,7 +624,6 @@ public class FaultRuptureSet implements RuptureSet { String mfdId = mfdBranch.id(); double mfdWt = mfdBranch.weight(); - System.out.println(single.rate()); if (moBranch.isPresent()) { mfdId = mfdBranchId(moBranch.orElseThrow().id(), mfdId); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java index 1ad50342..56daab8e 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java @@ -88,6 +88,10 @@ public class FaultSource implements Source { .map(this::createRuptureList) .collect(toUnmodifiableList()); + // for (Mfd mfd : scaledMfds) { + // System.out.println(mfd.data()); + // } + // System.out.println(scaledMfds); // System.out.println(ruptureLists.size()); // TODO clean diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java b/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java index 05be8680..83114150 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java @@ -18,11 +18,14 @@ import com.google.common.collect.Sets; import com.google.common.collect.Streams; import gov.usgs.earthquake.nshmp.Faults; +import gov.usgs.earthquake.nshmp.Maths; import gov.usgs.earthquake.nshmp.calc.CalcConfig; import gov.usgs.earthquake.nshmp.geo.LocationList; import gov.usgs.earthquake.nshmp.gmm.Gmm; import gov.usgs.earthquake.nshmp.gmm.GroundMotionModel; import gov.usgs.earthquake.nshmp.model.SourceTree.Leaf; +import gov.usgs.earthquake.nshmp.tree.Branch; +import gov.usgs.earthquake.nshmp.tree.LogicTree; /** * A {@code HazardModel} is the top-level wrapper for earthquake {@link Source} @@ -213,10 +216,6 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> } private void addSourceSetsFromTree(SourceTree tree) { - // System.out.println(tree.leafBranches.values().size()); - // System.out.println(tree.type); - // System.out.println(tree.name); - switch (tree.type()) { case ZONE: zoneSourceSetsFromTree(tree); @@ -238,61 +237,90 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> } } + /* + * TODO We really should be able to collapse cluster rate tree as well; + * never really thought about this because the rate tree was formerly + * isolated in individual\source files, but now we've split the top level + * source-tree branches on geometry/position + */ private void faultSourceSetsFromTree(SourceTree tree) { - FaultSourceSet.Builder faults = new FaultSourceSet.Builder(); - faults.name(tree.name()); - faults.id(-1); - faults.weight(1.0); - faults.gmms(tree.gmms()); - - ClusterSourceSet.Builder clusters = new ClusterSourceSet.Builder(); - clusters.name(tree.name()); - clusters.id(-1); - clusters.weight(1.0); - clusters.gmms(tree.gmms()); - int clusterCount = 0; + // ClusterSourceSet.Builder clusters = new ClusterSourceSet.Builder(); + // clusters.name(tree.name()); + // clusters.id(-1); + // clusters.weight(1.0); + // clusters.gmms(tree.gmms()); + // int clusterCount = 0; for (Leaf leaf : tree.branchMap().keySet()) { RuptureSet rs = leaf.ruptureSet; + double branchWeight = tree.branchWeights().get(leaf); + if (leaf.ruptureSet.type() == SourceType.CLUSTER) { System.out.println("Flt to ClusterSrcSet: " + rs.type() + " " + rs.name()); ClusterRuptureSet crs = (ClusterRuptureSet) rs; - ClusterSource.Builder csBuilder = new ClusterSource.Builder() - .name(crs.name) - .id(crs.id) - .rate(0.002); // TODO needs rate-tree handling - - FaultSourceSet.Builder fss = new FaultSourceSet.Builder(); - fss.name(crs.name); - fss.id(crs.id); - fss.weight(crs.weight()); - fss.gmms(tree.gmms()); - - for (FaultRuptureSet frs : crs.faultRuptureSets) { - fss.source(faultRuptureSetToSource(frs, 1.0), 1.0); + + ClusterSourceSet.Builder css = new ClusterSourceSet.Builder(); + css.name(crs.name); + css.id(crs.id); + css.gmms(tree.gmms()); + css.weight(branchWeight); // NM geometry branch + + // TODO consider ability to collabse cluster rate-tree. + LogicTree<Double> rateTree = crs.data.rateTree().orElseThrow(); + + for (Branch<Double> rateBranch : rateTree) { + + double returnPeriod = rateBranch.value(); + double rate = (returnPeriod <= 0.0) + ? 0.0 + : Maths.roundToDigits(1.0 / returnPeriod, 4); + + String clusterLabel = crs.name + " : " + ((int) returnPeriod) + "-yr"; + ClusterSource.Builder csBuilder = new ClusterSource.Builder() + .name(clusterLabel) + .id(crs.id) + .rate(rate); // cluster rate + + FaultSourceSet.Builder fss = new FaultSourceSet.Builder(); + fss.name(clusterLabel); + fss.id(crs.id); + fss.gmms(tree.gmms()); + + // The weight of the fss gets called by ClusterSource.weight() + // and is used in HazardCurveSet.Builder.addCurves() + fss.weight(rateBranch.weight()); + + for (FaultRuptureSet frs : crs.faultRuptureSets) { + fss.source(faultRuptureSetToSource(frs, 1.0), 1.0); + } + csBuilder.faults(fss.buildFaultSet()); + css.source(csBuilder.buildClusterSource()); + } - csBuilder.faults(fss.buildFaultSet()); - clusters.source(csBuilder.buildClusterSource()); - clusterCount++; + + addSourceSet(css.buildClusterSet()); } else { System.out.println("Flt to FaultSrcSet: " + rs.type() + " " + rs.name()); + FaultSourceSet.Builder faults = new FaultSourceSet.Builder(); + faults.name(tree.name()); + faults.id(-1); + faults.weight(1.0); + faults.gmms(tree.gmms()); + double leafWeight = tree.branchWeights().get(leaf); FaultRuptureSet frs = (FaultRuptureSet) rs; faults.source(faultRuptureSetToSource(frs, leafWeight), leafWeight); - } - } - addSourceSet(faults.buildFaultSet()); - if (clusterCount > 0) { - addSourceSet(clusters.buildClusterSet()); + addSourceSet(faults.buildFaultSet()); + } } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java index 66eae12c..f895947d 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java @@ -241,11 +241,6 @@ public class InterfaceRuptureSet implements RuptureSet { ModelData mfdData, LogicTree<Mfd.Properties> mfdPropertiesTree) { - /* Filter of unlikely combination. */ - if (mfdData.mfdConfig().isPresent()) { - FaultRuptureSet.checkEpistemic(mfdData.mfdConfig().get(), mfdPropertiesTree); - } - LogicTree.Builder<Mfd> mfdTree = LogicTree.builder(MFD_TREE); for (Branch<Mfd.Properties> branch : mfdPropertiesTree) { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java index b4f54605..3b417424 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java @@ -1,5 +1,6 @@ package gov.usgs.earthquake.nshmp.model; +import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import java.util.Optional; @@ -52,7 +53,7 @@ class MfdConfig { private boolean built = false; Builder epistemicTree(LogicTree<Double> tree) { - this.epistemicTree = Optional.ofNullable(tree); + this.epistemicTree = Optional.of(checkNotNull(tree)); if (this.epistemicTree.isPresent()) { this.minEpiOffset = OptionalDouble.of( this.epistemicTree.get().branches().stream() @@ -64,7 +65,7 @@ class MfdConfig { } Builder aleatoryProperties(AleatoryProperties props) { - this.aleatoryProperties = Optional.ofNullable(props); + this.aleatoryProperties = Optional.ofNullable(checkNotNull(props)); return this; } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java index 6b97a839..0ee54f75 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java @@ -1,10 +1,19 @@ package gov.usgs.earthquake.nshmp.model; +import static com.google.common.base.CaseFormat.LOWER_HYPHEN; +import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE; +import static com.google.common.base.Preconditions.checkNotNull; + +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.BiFunction; +import java.util.function.Consumer; import java.util.function.Function; +import java.util.stream.Stream; import gov.usgs.earthquake.nshmp.calc.CalcConfig; import gov.usgs.earthquake.nshmp.gmm.Gmm; @@ -12,49 +21,142 @@ import gov.usgs.earthquake.nshmp.mfd.Mfd; import gov.usgs.earthquake.nshmp.tree.LogicTree; /** - * Factory class with methods to read individual files in a NSHM. All read*() - * methods delegate file parsing to Deserialize and return an Optional that may - * be empty if the requested file is not required or found. All methods take a - * parent directory as an argument. + * Factory class for processing NSHM model files and directories. Class provides + * methods to check for, process, or read all file types and directory contents + * in a NSHM. Most read*() methods delegate file parsing to Deserialize and + * return an Optional that may be empty if the requested file is not found. All + * such methods take a parent directory as an argument. * * @author U.S. Geological Survey */ class ModelFiles { - static final String MODEL_INFO = "model-info.json"; - static final String CALC_CONFIG = CalcConfig.FILE_NAME; static final String FAULT_SOURCES = "fault-sources"; static final String GRID_SOURCES = "grid-sources"; static final String ZONE_SOURCES = "zone-sources"; - static final String GMM_TREE = "gmm-tree.json"; - static final String GMM_CONFIG = "gmm-config.json"; - static final String FAULT_CONFIG = "fault-config.json"; - static final String GRID_CONFIG = "grid-config.json"; - static final String INTERFACE_CONFIG = "interface-config.json"; - static final String SLAB_CONFIG = "slab-config.json"; - static final String ZONE_CONFIG = "zone-config.json"; - static final String MFD_CONFIG = "mfd-config.json"; - static final String MFD_MAP = "mfd-map.json"; - static final String RATE_TREE = "rate-tree.json"; - static final String RUPTURE_SET = "rupture-set.json"; - static final String CLUSTER_SET = "cluster-set.json"; - static final String SECTION_DIR = "sections"; - static final String GRID_DATA_DIR = "grid-data"; static final String SOURCE_TREE = "source-tree.json"; - static final String SOURCE_GROUP = "source-group.json"; - - /* Return a source-tree or source-group path if present. */ - static Optional<Path> lookForSourceTree(Path dir) { - Path sourceTree = dir.resolve(SOURCE_TREE); - Path sourceGroup = dir.resolve(SOURCE_GROUP); - if (Files.exists(sourceTree)) { - return Optional.of(sourceTree); + + private static final String CALC_CONFIG = CalcConfig.FILE_NAME; + private static final String MODEL_INFO = "model-info.json"; + private static final String GMM_TREE = "gmm-tree.json"; + private static final String GMM_CONFIG = "gmm-config.json"; + private static final String FAULT_CONFIG = "fault-config.json"; + private static final String GRID_CONFIG = "grid-config.json"; + private static final String INTERFACE_CONFIG = "interface-config.json"; + private static final String SLAB_CONFIG = "slab-config.json"; + private static final String ZONE_CONFIG = "zone-config.json"; + private static final String MFD_CONFIG = "mfd-config.json"; + private static final String MFD_MAP = "mfd-map.json"; + private static final String RATE_TREE = "rate-tree.json"; + private static final String RUPTURE_SET = "rupture-set.json"; + private static final String CLUSTER_SET = "cluster-set.json"; + private static final String SECTION_DIR = "sections"; + private static final String FAULT_SECTIONS = "fault-sections.geojson"; + private static final String GRID_DATA_DIR = "grid-data"; + private static final String SOURCE_GROUP = "source-group.json"; + + /* Check for source-tree or source-group logic tree. */ + static Optional<Path> checkSourceTree(Path dir) { + return read(dir, SOURCE_TREE, dir::resolve) + .or(() -> read(dir, SOURCE_GROUP, dir::resolve)); + } + + /* Check for grid data directory. */ + static Optional<Path> checkGridDataDirectory(Path dir) { + return read(dir, GRID_DATA_DIR, dir::resolve); + } + + /* Check for tectonic setting. */ + static Optional<TectonicSetting> checkTectonicSetting(Path dir) { + try { + String dirName = checkNotNull(dir.getFileName()).toString(); + String casedName = LOWER_HYPHEN.to(UPPER_UNDERSCORE, dirName); + return Optional.of(TectonicSetting.valueOf(casedName)); + } catch (IllegalArgumentException iae) { + return Optional.empty(); } - if (Files.exists(sourceGroup)) { - return Optional.of(sourceGroup); + } + + /* Process calc config that may accompany model. */ + static void readCalcConfig( + Path dir, + Consumer<CalcConfig> consumer) { + + try { + CalcConfig.Builder calcConfigBuilder = CalcConfig.withDefaults(); + Path customCalcConfig = dir.resolve(CALC_CONFIG); + if (Files.exists(customCalcConfig)) { + calcConfigBuilder.extend(CalcConfig.fromFile(customCalcConfig)); + } + consumer.accept(calcConfigBuilder.build()); + } catch (IOException ioe) { + throw new RuntimeException(ioe); } - return Optional.empty(); + } + + /* Process top level directories. */ + static void readDirectories( + Path dir, + Function<Path, List<SourceTree>> creator, + Consumer<List<SourceTree>> consumer) { + + try (Stream<Path> paths = Files.list(dir)) { + paths.filter(ModelFiles::isValidDirectory) + .map(creator::apply) + .forEach(consumer::accept); + } catch (IOException ioe) { + throw new RuntimeException(ioe); + } + } + + /* Process GeoJSON files in supplied directory. */ + static void readGeoJsons( + Path dir, + ModelData data, + BiFunction<Path, ModelData, SourceTree> creator, + Consumer<SourceTree> consumer) { + + try (Stream<Path> paths = Files.list(dir)) { + paths.filter(path -> path.endsWith(".geojson")) + .map(path -> creator.apply(path, data)) + .forEach(consumer::accept); + } catch (IOException ioe) { + throw new RuntimeException(ioe); + } + } + + /* Process nested directories in supplied directory. */ + static void readNestedDirectories( + Path dir, + ModelData data, + BiFunction<Path, ModelData, List<SourceTree>> creator, + Consumer<List<SourceTree>> consumer) { + + try (Stream<Path> paths = Files.list(dir)) { + paths.filter(ModelFiles::isValidDirectory) + .map(path -> creator.apply(path, data)) + .forEach(consumer::accept); + } catch (IOException ioe) { + throw new RuntimeException(ioe); + } + } + + private static boolean isValidDirectory(Path path) { + try { + String filename = path.getFileName().toString(); + return Files.isDirectory(path) && + !Files.isHidden(path) && + !filename.startsWith("~") && + !filename.equals(GRID_DATA_DIR); + } catch (IOException ioe) { + throw new RuntimeException(ioe); + } + } + + /* Model info. */ + static Optional<HazardModel.Info> readModelInfo(Path dir) { + return read(dir, MODEL_INFO, Deserialize::modelInfo); } /* A GMM tree and adjacent config, relative to parent. */ @@ -69,53 +171,75 @@ class ModelFiles { return Optional.empty(); } - /* Return a source-tree or source-group logic tree if present. */ + /* A source-tree or source-group logic tree. */ static Optional<LogicTree<Path>> readSourceTree(Path dir) { return read(dir, SOURCE_TREE, Deserialize::sourceTree) .or(() -> read(dir, SOURCE_GROUP, Deserialize::sourceGroup)); } - /* Finite fault config, relative to parent. */ + /* Finite fault config. */ static Optional<SourceConfig> readFaultConfig(Path dir) { return read(dir, FAULT_CONFIG, Deserialize::faultConfig); } - /* Grid config, relative to parent. */ + /* Grid config. */ static Optional<SourceConfig> readGridConfig(Path dir) { return read(dir, GRID_CONFIG, Deserialize::gridConfig); } - /* Interface config (same as fault), relative to parent. */ + /* Interface config (same as fault). */ static Optional<SourceConfig> readInterfaceConfig(Path dir) { return read(dir, INTERFACE_CONFIG, Deserialize::interfaceConfig); } - /* Intraslab config (same as grid), relative to parent. */ + /* Intraslab config (same as grid). */ static Optional<SourceConfig.Grid> readSlabConfig(Path dir) { return read(dir, SLAB_CONFIG, Deserialize::gridConfig); } - /* Zone config (same as grid), relative to parent. */ + /* Zone config (same as grid). */ static Optional<SourceConfig.Grid> readZoneConfig(Path dir) { return read(dir, ZONE_CONFIG, Deserialize::gridConfig); } - /* Read a fault section directory. */ + /* Fault system section file. */ // TODO needs to return subsection + // features + // static Optional<Map<Integer, SourceFeature.NshmFault>> + // readSystemSections(Path dir) { + // return read(dir, FAULT_SECTIONS, Deserialize::systemSections); + // } + + /* Fault section directory. */ static Optional<Map<Integer, SourceFeature.NshmFault>> readFaultSections(Path dir) { return read(dir, SECTION_DIR, Deserialize::faultSections); } - /* Read an interface section directory. */ + /* Interface section directory. */ static Optional<Map<Integer, SourceFeature.Interface>> readInterfaceSections(Path dir) { return read(dir, SECTION_DIR, Deserialize::interfaceSections); } - /* Read a mfd-map of MFD property logic trees, relative to source tree. */ + /* Read a interface rupture-set. */ + static Optional<RuptureSet> readInterfaceRuptureSet(Path dir, ModelData data) { + return read(dir, RUPTURE_SET, data, Deserialize::interfaceRuptureSet); + } + + /* Fault rupture-set. */ + static Optional<RuptureSet> readFaultRuptureSet(Path dir, ModelData data) { + return read(dir, RUPTURE_SET, data, Deserialize::faultRuptureSet); + } + + /* Fault cluster-set. */ + static Optional<RuptureSet> readFaultClusterSet(Path dir, ModelData data) { + return read(dir, CLUSTER_SET, data, Deserialize::clusterRuptureSet); + } + + /* Map of MFD property logic trees. */ static Optional<Map<String, LogicTree<Mfd.Properties>>> readMfdMap(Path dir) { return read(dir, MFD_MAP, Deserialize::mfdMap); } - /* Read a mfd-config, relative to parent. */ + /* MFD config. */ static Optional<MfdConfig> readMfdConfig(Path dir) { return read(dir, MFD_CONFIG, Deserialize::mfdConfig); } @@ -147,4 +271,17 @@ class ModelFiles { return Optional.empty(); } + private static <R> Optional<R> read( + Path dir, + String file, + ModelData data, + BiFunction<Path, ModelData, R> deserializer) { + + Path path = dir.resolve(file); + if (Files.exists(path)) { + return Optional.of(deserializer.apply(path, data)); + } + return Optional.empty(); + } + } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java index 1ab59866..b66dd4c4 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java @@ -5,23 +5,25 @@ import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static gov.usgs.earthquake.nshmp.model.DipSlipModel.FIXED; -import static gov.usgs.earthquake.nshmp.model.ModelFiles.CALC_CONFIG; -import static gov.usgs.earthquake.nshmp.model.ModelFiles.CLUSTER_SET; import static gov.usgs.earthquake.nshmp.model.ModelFiles.FAULT_SOURCES; -import static gov.usgs.earthquake.nshmp.model.ModelFiles.GRID_DATA_DIR; -import static gov.usgs.earthquake.nshmp.model.ModelFiles.MODEL_INFO; -import static gov.usgs.earthquake.nshmp.model.ModelFiles.RUPTURE_SET; import static gov.usgs.earthquake.nshmp.model.ModelFiles.SOURCE_TREE; +import static gov.usgs.earthquake.nshmp.model.ModelFiles.checkGridDataDirectory; +import static gov.usgs.earthquake.nshmp.model.ModelFiles.checkSourceTree; +import static gov.usgs.earthquake.nshmp.model.ModelFiles.checkTectonicSetting; +import static gov.usgs.earthquake.nshmp.model.ModelFiles.readFaultClusterSet; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readFaultConfig; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readFaultRateTree; +import static gov.usgs.earthquake.nshmp.model.ModelFiles.readFaultRuptureSet; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readFaultSections; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readGmms; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readGridConfig; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readGridRateTree; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readInterfaceConfig; +import static gov.usgs.earthquake.nshmp.model.ModelFiles.readInterfaceRuptureSet; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readInterfaceSections; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readMfdConfig; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readMfdMap; +import static gov.usgs.earthquake.nshmp.model.ModelFiles.readModelInfo; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readRateTree; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readSlabConfig; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readSourceTree; @@ -49,7 +51,6 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.gson.JsonElement; -import gov.usgs.earthquake.nshmp.calc.CalcConfig; import gov.usgs.earthquake.nshmp.fault.FocalMech; import gov.usgs.earthquake.nshmp.geo.json.Feature; import gov.usgs.earthquake.nshmp.geo.json.Properties; @@ -60,10 +61,12 @@ import gov.usgs.earthquake.nshmp.tree.LogicGroup; import gov.usgs.earthquake.nshmp.tree.LogicTree; /** - * Utility class with methods for loading different source types. + * Earthquake source model loading class. * * Although source types are broadly similar, differences in model structure - * necessitate source type specific loaders. + * necessitate source type specific loaders. Models are loaded recursively and + * much of the heavy lifting of checking for the presence of, reading, and + * deserializing are handled by the factory classes ModelFiles and Deserialize. * * @author U.S. Geological Survey */ @@ -84,33 +87,39 @@ abstract class ModelLoader { static HazardModel load(Path model) throws IOException { try { + System.out.println(model); - HazardModel.Builder builder = HazardModel.builder(); - - /* Load required model info. */ - Path infoPath = model.resolve(MODEL_INFO); - HazardModel.Info info = Deserialize.modelInfo(infoPath); + HazardModel.Builder builder = HazardModel.builder() + .info(readModelInfo(model).orElseThrow()); + // HazardModel.Info info = readModelInfo(model).orElseThrow(); // log.info("Model: " + info.name); - System.out.println(info.name); - builder.info(info); + // System.out.println(info.name); + // builder.info(info); /* Look for optional calc config. */ - CalcConfig.Builder calcConfigBuilder = CalcConfig.withDefaults(); - Path customCalcConfig = model.resolve(CALC_CONFIG); - if (Files.exists(customCalcConfig)) { - calcConfigBuilder.extend(CalcConfig.fromFile(customCalcConfig)); - } - builder.config(calcConfigBuilder.build()); + ModelFiles.readCalcConfig(model, builder::config); + // CalcConfig.Builder calcConfigBuilder = CalcConfig.withDefaults(); + // Path customCalcConfig = model.resolve(CALC_CONFIG); + // if (Files.exists(customCalcConfig)) { + // calcConfigBuilder.extend(CalcConfig.fromFile(customCalcConfig)); + // } + // builder.config(calcConfigBuilder.build()); /* Process tectonic setting source directories. */ - try (DirectoryStream<Path> paths = newDirectoryStream(model, SETTINGS_FILTER)) { - for (Path path : paths) { - TectonicSetting setting = - parseTectonicSetting(checkNotNull(path.getFileName()).toString()); - List<SourceTree> trees = loadTectonicSetting(setting, path); - builder.addSourceTrees(trees); - } - } + ModelFiles.readDirectories( + model, + ModelLoader::loadTectonicSetting, + builder::addSourceTrees); + + // try (DirectoryStream<Path> paths = newDirectoryStream(model, + // SETTINGS_FILTER)) { + // for (Path path : paths) { + // TectonicSetting setting = + // parseTectonicSetting(checkNotNull(path.getFileName()).toString()); + // List<SourceTree> trees = loadTectonicSetting(path); + // builder.addSourceTrees(trees); + // } + // } return builder.build(); } catch (Exception e) { @@ -123,25 +132,28 @@ abstract class ModelLoader { } } - private static List<SourceTree> loadTectonicSetting( - TectonicSetting setting, - Path dir) { - - switch (setting) { - // case ACTIVE_CRUST: - case STABLE_CRUST: - return loadCrustalSources(dir); + /* Process teconic setting directories. */ + private static List<SourceTree> loadTectonicSetting(Path dir) { + Optional<TectonicSetting> setting = checkTectonicSetting(dir); + /* Return empty tree list for non-matching diectories. */ + if (setting.isEmpty()) { + return List.of(); + } + switch (setting.orElseThrow()) { + case ACTIVE_CRUST: + // case STABLE_CRUST: + return crustalSources(dir); case SUBDUCTION_INTERFACE: - return ModelLoader.interfaceSources(dir); + return interfaceSources(dir); // case SUBDUCTION_SLAB: - // return ModelLoader.slabSources(dir); - default: + // return slabSources(dir); + default: // TODO this can go away if all settings present return List.of(); // throw new UnsupportedOperationException("Unsupported source type"); } } - private static List<SourceTree> loadCrustalSources(Path dir) { + private static List<SourceTree> crustalSources(Path dir) { List<SourceTree> trees = new ArrayList<>(); @@ -153,13 +165,13 @@ abstract class ModelLoader { String filename = checkNotNull(path.getFileName()).toString(); switch (filename) { case FAULT_SOURCES: - trees.addAll(ModelLoader.faultSources(path, gmms)); + trees.addAll(faultSources(path, gmms)); break; // case GRID_SOURCES: - // trees.addAll(ModelLoader.gridSources(path, gmms)); + // trees.addAll(gridSources(path, gmms)); // break; // case ZONE_SOURCES: - // trees.addAll(ModelLoader.zoneSources(path, gmms)); + // trees.addAll(zoneSources(path, gmms)); // break; // default: // throw new UnsupportedOperationException("Unsupported source type"); @@ -216,35 +228,38 @@ abstract class ModelLoader { loadRateTree(dir, data); /* (1) Process source tree, if present. */ + checkSourceTree(dir); + if (Files.exists(dir.resolve(SOURCE_TREE))) { - SourceTree tree = loadSourceTree(dir, data); - return List.of(tree); + return List.of(loadSourceTree(dir, data)); } List<SourceTree> trees = new ArrayList<>(); /* (2) Process any GeoJSON source files. */ - try (DirectoryStream<Path> sourceStream = newDirectoryStream(dir, "*.geojson")) { - for (Path path : sourceStream) { - SourceTree tree = loadSingleSource(path, data); - trees.add(tree); - } - } catch (IOException ioe) { - throw new RuntimeException(ioe); - } + ModelFiles.readGeoJsons(dir, data, this::loadSingleSource, trees::add); + + // try (DirectoryStream<Path> sourceStream = newDirectoryStream(dir, + // "*.geojson")) { + // for (Path path : sourceStream) { + // SourceTree tree = loadSingleSource(path, data); + // trees.add(tree); + // } + // } catch (IOException ioe) { + // throw new RuntimeException(ioe); + // } /* (3) Lastly, recurse into any nested directories. */ - try (DirectoryStream<Path> nestedDirStream = newDirectoryStream(dir, NESTED_DIR_FILTER)) { - for (Path path : nestedDirStream) { - // TODO temp clean - if (checkNotNull(path.getFileName()).toString().equals("ucerf3")) { - continue; - } - trees.addAll(loadSourceDirectory(path, data)); - } - } catch (IOException ioe) { - throw new RuntimeException(ioe); - } + ModelFiles.readNestedDirectories(dir, data, this::loadSourceDirectory, trees::addAll); + + // try (DirectoryStream<Path> nestedDirStream = newDirectoryStream(dir, + // NESTED_DIR_FILTER)) { + // for (Path path : nestedDirStream) { + // trees.addAll(loadSourceDirectory(path, data)); + // } + // } catch (IOException ioe) { + // throw new RuntimeException(ioe); + // } return trees; } @@ -303,7 +318,6 @@ abstract class ModelLoader { /* Possibly split on normal fault dip variants. */ SourceConfig.Fault config = data.sourceConfig().asFault(); - boolean isNormal = FocalMech.fromRake(feature.rake) == FocalMech.NORMAL; if (config.dipTree.isPresent() && isNormal) { System.out.println("dip tree: " + root.relativize(geojson)); @@ -413,7 +427,12 @@ abstract class ModelLoader { System.out.println(" tree: " + root.relativize(dir)); LogicTree<Path> tree = readSourceTree(dir).orElseThrow(); - data.faultFeatureMap(readFaultSections(dir).orElseThrow()); + + /* Set flag to process a logic-tree of fault-system solutions. */ + var faultSections = readFaultSections(dir); + boolean faultSystemTree = faultSections.isEmpty(); + + faultSections.ifPresent(data::faultFeatureMap); SourceTree.Builder treeBuilder = SourceTree.builder() .name(checkNotNull(dir.getFileName()).toString()) @@ -422,13 +441,16 @@ abstract class ModelLoader { .root(tree); for (Branch<Path> branch : tree) { - processBranch(branch, treeBuilder, data); + if (faultSystemTree) { + processSystemBranch(branch, treeBuilder, data); + } else { + processBranch(branch, treeBuilder, data); + } } - return treeBuilder.build(); } - /* Process source tree branches. */ + /* Process fault source tree branches. */ private void processBranch( Branch<Path> branch, SourceTree.Builder treeBuilder, @@ -438,6 +460,7 @@ abstract class ModelLoader { System.out.println(" branch: " + root.relativize(dir)); if (checkNotNull(dir.getFileName()).toString().equals("null")) { + System.out.println("TODO null branch"); return; } @@ -463,25 +486,58 @@ abstract class ModelLoader { } else { - Path ruptureSetPath = dir.resolve(RUPTURE_SET); - boolean isFaultRuptureSet = Files.exists(ruptureSetPath); - - Path setPath = isFaultRuptureSet - ? ruptureSetPath - : dir.resolve(CLUSTER_SET); - - if (isFaultRuptureSet) { - - /* All required properties set in deserializer. */ - FaultRuptureSet frs = Deserialize.faultRuptureSet(setPath, data); - treeBuilder.addLeaf(branch, frs); + RuptureSet rs = readFaultRuptureSet(dir, data).orElse( + readFaultClusterSet(dir, data).orElseThrow()); + treeBuilder.addLeaf(branch, rs); + + // TODO clean + // Path ruptureSetPath = dir.resolve(RUPTURE_SET); + // boolean isFaultRuptureSet = Files.exists(ruptureSetPath); + // + // Path setPath = isFaultRuptureSet + // ? ruptureSetPath + // : dir.resolve(CLUSTER_SET); + // + // if (isFaultRuptureSet) { + // + // System.out.println("--fault--"); + // /* All required properties set in deserializer. */ + // FaultRuptureSet frs = Deserialize.faultRuptureSet(setPath, data); + // treeBuilder.addLeaf(branch, frs); + // + // } else { + // + // System.out.println("--cluster--"); + // /* All required properties set in deserializer. */ + // ClusterRuptureSet crs = Deserialize.clusterRuptureSet(setPath, data); + // treeBuilder.addLeaf(branch, crs); + // } + } + } - } else { + /* Process fault system source tree branches. */ + private void processSystemBranch( + Branch<Path> branch, + SourceTree.Builder treeBuilder, + ModelData data) { - /* All required properties set in deserializer. */ - ClusterRuptureSet crs = Deserialize.clusterRuptureSet(setPath, data); - treeBuilder.addLeaf(branch, crs); + /* + * No configuration overrides are currently expected for + * fault-system-solutions so this is simpler than above. + */ + Path dir = branch.value(); + System.out.println(" system: " + root.relativize(dir)); + Optional<LogicTree<Path>> tree = readSourceTree(dir); + if (tree.isPresent()) { + LogicTree<Path> children = tree.get(); + treeBuilder.addBranches(branch, children); + for (Branch<Path> child : children) { + processSystemBranch(child, treeBuilder, data); } + } else { + // readSystemSections(dir).orElseThrow(); + SystemRuptureSet.Builder sys = SystemRuptureSet.builder(); + } } } @@ -572,7 +628,7 @@ abstract class ModelLoader { return treeBuilder.build(); } - /* Process source tree branches. */ + /* Process interface source tree branches. */ private void processBranch( Branch<Path> branch, SourceTree.Builder treeBuilder, @@ -586,7 +642,6 @@ abstract class ModelLoader { } ModelData data = ModelData.copyOf(dataIn); - readMfdConfig(dir).ifPresent(data::mfdConfig); readRateTree(dir).ifPresent(data::rateTree); /* Can be source-tree or source-group. */ @@ -601,11 +656,9 @@ abstract class ModelLoader { } } else { - - Path ruptureSetPath = dir.resolve(RUPTURE_SET); - - InterfaceRuptureSet irs = Deserialize.interfaceRuptureSet(ruptureSetPath, data); - treeBuilder.addLeaf(branch, irs); + treeBuilder.addLeaf( + branch, + readInterfaceRuptureSet(dir, data).orElseThrow()); } } } @@ -677,8 +730,7 @@ abstract class ModelLoader { LogicTree<Path> tree = readSourceTree(dir).orElseThrow(); /* Top-level source trees require a grid-data directory. */ - Path gridDataDir = root.resolve(GRID_DATA_DIR); - checkState(Files.exists(gridDataDir)); + Path gridDataDir = checkGridDataDirectory(dir).orElseThrow(); data.gridDataDirectory(gridDataDir); SourceTree.Builder treeBuilder = SourceTree.builder() @@ -694,7 +746,7 @@ abstract class ModelLoader { return treeBuilder.build(); } - /* Process source tree branches. */ + /* Process grid source tree branches. */ private void processBranch( Branch<Path> branch, SourceTree.Builder treeBuilder, @@ -708,7 +760,6 @@ abstract class ModelLoader { } ModelData data = ModelData.copyOf(dataIn); - readMfdConfig(dir).ifPresent(data::mfdConfig); loadRateTree(dir, data); Optional<LogicTree<Path>> tree = readSourceTree(dir); @@ -858,7 +909,7 @@ abstract class ModelLoader { return treeBuilder.build(); } - /* Process source tree branches. */ + /* Process intraslab source tree branches. */ private void processBranch( Branch<Path> branch, SourceTree.Builder treeBuilder, @@ -872,7 +923,6 @@ abstract class ModelLoader { } ModelData data = ModelData.copyOf(dataIn); - readMfdConfig(dir).ifPresent(data::mfdConfig); readRateTree(dir).ifPresent(data::rateTree); Optional<LogicTree<Path>> tree = readSourceTree(dir); @@ -1002,7 +1052,7 @@ abstract class ModelLoader { return treeBuilder.build(); } - /* Process source tree branches. */ + /* Process zone (area) source tree branches. */ private void processBranch( Branch<Path> branch, SourceTree.Builder treeBuilder, @@ -1016,7 +1066,6 @@ abstract class ModelLoader { } ModelData data = ModelData.copyOf(dataIn); - readMfdConfig(dir).ifPresent(data::mfdConfig); readRateTree(dir).ifPresent(data::rateTree); Optional<LogicTree<Path>> tree = readSourceTree(dir); @@ -1061,8 +1110,9 @@ abstract class ModelLoader { } } - private static TectonicSetting parseTectonicSetting(String lowerHyphen) { - return TectonicSetting.valueOf(LOWER_HYPHEN.to(UPPER_UNDERSCORE, lowerHyphen)); + @Deprecated + private static TectonicSetting parseTectonicSetting(String s) { + return TectonicSetting.valueOf(LOWER_HYPHEN.to(UPPER_UNDERSCORE, s)); } /* @@ -1089,15 +1139,17 @@ abstract class ModelLoader { * start with a tilde (~), or that are named 'sources' that may exist in grid, * area, or slab type directories. */ - private static DirectoryStream.Filter<Path> NESTED_DIR_FILTER = new DirectoryStream.Filter<>() { - @Override - public boolean accept(Path path) throws IOException { - Preconditions.checkNotNull(path); - var fileName = Preconditions.checkNotNull(path.getFileName()); - String s = fileName.toString(); - return Files.isDirectory(path) && !Files.isHidden(path) && !s.startsWith("~") && - !s.equals(GRID_DATA_DIR); // TODO needed? - } - }; + // private static DirectoryStream.Filter<Path> NESTED_DIR_FILTER = new + // DirectoryStream.Filter<>() { + // @Override + // public boolean accept(Path path) throws IOException { + // Preconditions.checkNotNull(path); + // var fileName = Preconditions.checkNotNull(path.getFileName()); + // String s = fileName.toString(); + // return Files.isDirectory(path) && !Files.isHidden(path) && + // !s.startsWith("~") && + // !s.equals(ModelFiles.GRID_DATA_DIR); // TODO needed? + // } + // }; } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java index b6b85305..683ed37c 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java @@ -1,11 +1,14 @@ package gov.usgs.earthquake.nshmp.model; +import static com.google.common.base.Preconditions.checkArgument; import static java.lang.Math.log10; +import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toUnmodifiableList; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; +import java.util.Map; import gov.usgs.earthquake.nshmp.Maths; import gov.usgs.earthquake.nshmp.data.MutableXySequence; @@ -18,6 +21,8 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; class ModelUtil { + // TODO TreeUtils or MfdTrees class?? + /* Convert a logic tree of mfd properties to builders. */ static LogicTree<Mfd.Builder> mfdPropsToBuilders(LogicTree<Mfd.Properties> propsTree) { LogicTree.Builder<Mfd.Builder> mfdTree = LogicTree.builder(propsTree.name()); @@ -69,6 +74,19 @@ class ModelUtil { .allMatch(type::equals); } + /* Check if the IDs and weights of two mfd-trees are the same. */ + static void checkTreeIdsAndWeights(LogicTree<Mfd> tree1, LogicTree<Mfd> tree2) { + checkArgument( + mfdTreeWeightMap(tree1).equals(mfdTreeWeightMap(tree2)), + "mfd-trees do not have identical IDs and wieghts\n Tree1: %s \nTree2: %s", + tree1, tree2); + } + + /* Create map of mfd-tree ID:weight. */ + private static Map<String, Double> mfdTreeWeightMap(LogicTree<Mfd> tree) { + return tree.branches().stream().collect(toMap(Branch::id, Branch::weight)); + } + /* * Note: if we create custom MFDs for each M* branch, we'll have the same * reference x-values for each model, but when added, a new set of x-vlues diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java index 89b75f67..c22255cd 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java @@ -188,13 +188,13 @@ public abstract class SourceFeature { static final String LENGTH = "length"; static final String WIDTH = "width"; static final String MAGNITUDE = "magnitude"; - static final String UPPER_DEPTH = "upperDepth"; - static final String LOWER_DEPTH = "lowerDepth"; + static final String UPPER_DEPTH = "upper-depth"; + static final String LOWER_DEPTH = "lower-depth"; static final String RAKE = "rake"; static final String RATE = "rate"; static final String RATES = "rates"; static final String RATE_MAP = "rate-map"; - static final String RATE_TYPE = "rateType"; + static final String RATE_TYPE = "rate-type"; static final String STRIKE = "strike"; static final String MFD_TREE = "mfd-tree"; } @@ -232,6 +232,7 @@ public abstract class SourceFeature { Properties props = section.properties(); dip = props.getDouble(Key.DIP).orElseThrow(); checkDip(dip); + System.out.println(props); upperDepth = props.getDouble(Key.UPPER_DEPTH).orElseThrow(); checkCrustalDepth(upperDepth); lowerDepth = props.getDouble(Key.LOWER_DEPTH).orElseThrow(); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SystemRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SystemRuptureSet.java new file mode 100644 index 00000000..c2e92808 --- /dev/null +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/SystemRuptureSet.java @@ -0,0 +1,720 @@ +package gov.usgs.earthquake.nshmp.model; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.common.base.Preconditions.checkState; +import static gov.usgs.earthquake.nshmp.model.Deserialize.MFD_TREE; +import static gov.usgs.earthquake.nshmp.model.Deserialize.RATE_TREE; +import static gov.usgs.earthquake.nshmp.model.RateType.RECURRENCE; +import static gov.usgs.earthquake.nshmp.model.SourceType.FAULT; +import static java.util.stream.Collectors.collectingAndThen; +import static java.util.stream.Collectors.toList; + +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import gov.usgs.earthquake.nshmp.Earthquakes; +import gov.usgs.earthquake.nshmp.Faults; +import gov.usgs.earthquake.nshmp.Maths; +import gov.usgs.earthquake.nshmp.fault.surface.DefaultGriddedSurface; +import gov.usgs.earthquake.nshmp.fault.surface.GriddedSurface; +import gov.usgs.earthquake.nshmp.geo.Location; +import gov.usgs.earthquake.nshmp.geo.LocationList; +import gov.usgs.earthquake.nshmp.geo.json.Feature; +import gov.usgs.earthquake.nshmp.geo.json.Properties; +import gov.usgs.earthquake.nshmp.mfd.Mfd; +import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.Single; +import gov.usgs.earthquake.nshmp.mfd.Mfds; +import gov.usgs.earthquake.nshmp.model.SourceFeature.Key; +import gov.usgs.earthquake.nshmp.tree.Branch; +import gov.usgs.earthquake.nshmp.tree.LogicTree; + +/** + * Crustal fault-system rupture set. + * + * @author U.S. Geological Survey + */ +public class SystemRuptureSet implements RuptureSet { + + /* + * TODO figure out if ModelData is really needed beyond RuptureSet.Builder; + * the copying of ModelData when processing single branches is likely not + * necessary as the feature map is all that's being used and in this instnace + * it's ok to overwrite it each time. + */ + + /* + * General comments on fault earthquake rates. The majority of faults in NSHMs + * are constrained by slip rates but some are governed by explicit logic trees + * of slip and/or recurrence rates with pre-defined MFD parameters. In both + * cases we require moment rate in order to correctly balance MFD logic tree + * branches of epistemic and aleatory uncertainty + * + * This ends up being a little circular for GR with no epistemic uncertainty + * in that we consrtuct an MFD, scale to it's incremental rate, compute its + * moment rate, only to then build the MFD again scaling directly to moment + * rate. + */ + + /* + * TODO we removed aleaPops - MoBalance; 2008 CA a-priori a-faults do not + * moment balance so we'll need to figure out how to handle that; it's + * consistent with SSCn SINGLE mag trees; slip-rate based mfds mo-balance, + * others don't + * + */ + // final SourceFeature.NshmFault feature; + // final ModelData data; + // + // final LogicTree<Mfd> mfdTree; + // final Mfd mfdTotal; + // + // final List<Integer> sectionIds; // reference: actually needed? + // final GriddedSurface surface; + + SystemRuptureSet(Builder builder) { + // this.feature = builder.feature; + // this.data = builder.data; + // + // this.mfdTree = builder.mfdTree; + // this.mfdTotal = builder.mfdTotal; + // + // this.sectionIds = builder.sectionIds; + // this.surface = builder.surface; + } + + @Override + public String name() { + return null; // feature.name; + } + + @Override + public int id() { + return -1; // feature.id; + } + + @Override + public int size() { + // TODO what should this be ?? + // a value based on the size of the Mfds?? + return 1; + } + + @Override + public SourceType type() { + return FAULT; + } + + /** + * The closest point on the fault trace, relative to the supplied site + * {@code Location}. + */ + @Override + public Location location(Location site) { + return null; // Locations.closestPoint(site, feature.trace); + } + + @Override + public String toString() { + return getClass().getSimpleName() + " " + Map.of( + "name", name(), + "id", id());// , TODO update + // "feature", feature.source.toJson()); + } + + static Builder builder() { + return new Builder(); + } + + /* Single use builder */ + static class Builder { + + private boolean built = false; + + private SourceFeature.NshmFault feature; + private ModelData data; + + private LogicTree<Mfd.Properties> mfdPropsTree; + + /* created on build */ + private LogicTree<Mfd> mfdTree; + private Mfd mfdTotal; + private GriddedSurface surface; + + /* + * TODO clean Name, id, and and sectionIds Validated and carried forward in + * rupture set feature. + */ + private String name; + private Integer id; + private List<Integer> sectionIds; + private Optional<Map<String, ?>> ruptureProps = Optional.empty(); + + Builder name(String name) { + this.name = name; + return this; + } + + Builder id(int id) { + this.id = id; + return this; + } + + Builder sections(List<Integer> sections) { + checkArgument(sections.size() >= 1); + this.sectionIds = List.copyOf(sections); + return this; + } + + Builder properties(Map<String, ?> props) { + this.ruptureProps = Optional.of(checkNotNull(props)); + return this; + } + + /* Set the MFD tree. */ + Builder mfdTree(LogicTree<Mfd.Properties> mfdTree) { + this.mfdPropsTree = mfdTree; + return this; + } + + /* Set MFD helper data. */ + Builder modelData(ModelData data) { + this.data = checkNotNull(data); + return this; + } + + SystemRuptureSet build() { + validateAndInit("SystemRuptureSet.Builder"); + return new SystemRuptureSet(this); + } + + private void validateAndInit(String label) { + checkState(!built, "Single use builder"); + checkNotNull(name, "%s name", label); + checkNotNull(id, "%s id", label); + checkNotNull(data, "%s model data", label); + checkNotNull(sectionIds, "%s section id list", label); + checkNotNull(mfdPropsTree, "%s MFD logic tree", label); + + feature = createFeature(); + + // TODO test length property rounding + + /* Filter of unlikely combination of SINGLEs epistemic uncertainty. */ + checkEpistemic(data.mfdConfig().get(), mfdPropsTree); + + /* + * Fault MFDs are constructed in a variety of ways. Single fault MFDs may + * be created from a logic tree of slip rates. + */ + + MfdConfig mfdConfig = data.mfdConfig().orElseThrow(); + Optional<LogicTree<Double>> moRateTree = Optional.empty(); + + /* Build mfd-tree from fault slip or recurrence rates. */ + if (data.faultRateTree().isPresent()) { + + // System.out.println(data.faultRateTree().get()); + + /* Create Mo-rate tree. */ + moRateTree = Optional.of(createMoRateTree( + feature, + data.faultRateTree().orElseThrow())); + + /* Update MFD properties tree based on source magnitude. */ + mfdPropsTree = updateMfdsForSlip(); + + } else if (data.rateTree().isPresent() && !data.isClusterModel()) { + + /* Update MFD properties tree to include M and R branches. */ + mfdPropsTree = updateMfdsForRecurrence(); + } + + mfdTree = createMfdTree(mfdPropsTree, moRateTree, mfdConfig); + mfdTotal = ModelUtil.reduceMfdTree(mfdTree); + + // for (Branch<Double> b : moRateTree) { + // System.out.println(b.id() + " " + b.value()); + // double eqMo = Earthquakes.magToMoment(feature.magnitude.getAsDouble()); + // System.out.println(eqMo); + // System.out.println(b.value() / eqMo); + // // feature.magnitude + // } + + // System.out.println(mfdTree); + + // if (name.contains("Grand")) { + // if (data.isClusterModel()) { + // System.out.println(mfdTree); + // // System.out.println(mfdTotal.data()); + // for (Branch<Mfd> b : mfdTree) { + // System.out.println(b.id() + " " + b.value().data()); + // } + // } + + SourceConfig.Fault config = data.sourceConfig().asFault(); + surface = DefaultGriddedSurface.builder() + .trace(feature.trace) + .depth(feature.upperDepth) + .dip(feature.dip) + .width(Faults.width( + feature.dip, + feature.upperDepth, + feature.lowerDepth)) + .spacing(config.surfaceSpacing) + .build(); + + built = true; + } + + private SourceFeature.NshmFault createFeature() { + + Map<Integer, SourceFeature.NshmFault> featureMap = + data.faultFeatureMap().orElseThrow(); + checkState(featureMap.keySet().containsAll(sectionIds)); + + if (sectionIds.size() == 1) { + return featureMap.get(sectionIds.get(0)); + } + + /* Collect participating features */ + List<SourceFeature.NshmFault> features = sectionIds.stream() + .map(featureMap::get) + .collect(toList()); + + /* Consolidate states */ + List<String> states = features.stream() + .map(f -> f.states) + .flatMap(List::stream) + .distinct() + .collect(toList()); + + /* Join section traces */ + LocationList trace = features.stream() + .map(f -> f.source) + .map(Feature::asLineString) + .flatMap(LocationList::stream) + .distinct() + .collect(collectingAndThen( + toList(), + LocationList::copyOf)); + + /* Update properties */ + Properties.Builder props = Properties.fromFeature(features.get(0).source) + .put(Key.NAME, name) + .put(Key.STATES, states); + ruptureProps.ifPresent(p -> p.forEach(props::put)); + + /* Build new feature from stitched traces and props of 1st section */ + Feature feature = Feature.lineString(trace) + .id(id) + .properties(props.build()) + .build(); + + return new SourceFeature.NshmFault(feature); + } + + /* Create mfd-tree from a logic tree of SINGLE MFDs and a rate-tree. */ + private LogicTree<Mfd.Properties> updateMfdsForRecurrence() { + checkState(ModelUtil.mfdsAreType(mfdPropsTree, Mfd.Type.SINGLE)); + LogicTree.Builder<Mfd.Properties> propsTree = LogicTree.builder(mfdPropsTree.name()); + LogicTree<Double> rateTree = data.rateTree().orElseThrow(); + for (Branch<Double> rBranch : rateTree) { + /* + * TODO need to be careful here; this catch is for additive New Madrid + * branches; null rates have been converted to 0; perhaps we should use + * 0 in JSON and still have this 0 check here; we also use 0 rates to + * deal with imbalanced geodetic logic tree branches so maybe just + * document these cases + * + */ + double rate = (rBranch.value() <= 0.0) + ? 0.0 + : Maths.roundToDigits(1.0 / rBranch.value(), 4); + for (Branch<Mfd.Properties> mBranch : mfdPropsTree) { + Single props = mBranch.value().getAsSingle(); + String id = String.join(":", props.type().name(), mBranch.id(), rBranch.id()); + double weight = mBranch.weight() * rBranch.weight(); + propsTree.addBranch(id, new Single(props.m(), rate), weight); + } + } + return propsTree.build(); + } + + /* + * The properties picked up from a mfd-tree for faults with slip rates are + * only used to set GR mMin and b-value; all other fields are just required + * placeholders. This method uses the 'characteristic' magnitude of the + * feature to set the SINGLE magnitude and GR Δm and mMax. + */ + private LogicTree<Mfd.Properties> updateMfdsForSlip() { + LogicTree.Builder<Mfd.Properties> propsTree = LogicTree.builder(mfdPropsTree.name()); + double Mw = feature.magnitude.orElseThrow(); + for (Branch<Mfd.Properties> branch : mfdPropsTree) { + switch (branch.value().type()) { + case GR: + double mCutoff = data.mfdConfig().orElseThrow().minMagnitude.orElseThrow(); + propsTree.addBranch( + branch.id(), + updateGr(branch.value(), Mw, mCutoff), + branch.weight()); + break; + case SINGLE: + propsTree.addBranch( + branch.id(), + new Mfd.Properties.Single(Mw), + branch.weight()); + break; + default: + throw new UnsupportedOperationException( + branch.value().type() + " MFD not supported"); + } + } + return propsTree.build(); + } + + /* + * Update GR properties for fault 'characteristic' magnitude, or return + * SINGLE properties instead if mMax < mCutoff or nm = 1. + */ + private static Mfd.Properties updateGr( + Mfd.Properties props, + double mMax, + double mCutoff) { + + Mfd.Properties.GutenbergRichter grProps = props.getAsGr(); + + /* Return SINGLE for m <= GR minimum. */ + if (mMax <= mCutoff) { + return new Mfd.Properties.Single(mMax); + } + + double Rm = Maths.round(mMax - grProps.mMin(), 6); + + /* Return SINGLE if GR only has one magnitude bin. */ + if (Rm <= 0.1) { + double m = Maths.round(grProps.mMin() + Rm / 2, 6); + return new Mfd.Properties.Single(m); + } + + /* + * USGS NSHM Gutenberg-Richter discretization algorithm. Cutoff for nm = 4 + * in fltrate*.f is >0.3, but in reality M6.8 events have 4 GR magnitudes + * in input files. + */ + int nm = Rm > 0.7 ? 8 : Rm > 0.5 ? 6 : Rm >= 0.3 ? 4 : 2; + double Δm = Maths.round(Rm / nm, 6); + + // System.out.println(Rm + " " + grProps.mMin + " " + mMax + " " + Δm); + return new Mfd.Properties.GutenbergRichter(grProps.b(), Δm, grProps.mMin(), mMax); + } + + /* + * TODO: consider short citcuiting singletons if feature as represented in + * model is consistent with rupture set requirements + * + * TODO: For documentation; there are many options avilable when defining + * both fault and grid based sources that may necessarily be able to be + * combined. For example, normal fault dip variants are difficult to handle + * in the context of a cluster model. + */ + + } + + /* Logic tree of moment rates. */ + private static LogicTree<Mfd> createMfdTree( + LogicTree<Mfd.Properties> mfdPropsTree, + Optional<LogicTree<Double>> moRateTree, + MfdConfig mfdConfig) { + + LogicTree.Builder<Mfd> mfdTree = LogicTree.builder(MFD_TREE); + if (moRateTree.isPresent()) { + LogicTree<Double> moTree = moRateTree.orElseThrow(); + moTree.forEach(branch -> addMfds(mfdTree, mfdPropsTree, Optional.of(branch), mfdConfig)); + } else { + addMfds(mfdTree, mfdPropsTree, Optional.empty(), mfdConfig); + } + return mfdTree.build(); + } + + private static void addMfds( + LogicTree.Builder<Mfd> mfdTree, // receiver tree + LogicTree<Mfd.Properties> mfdPropsTree, + Optional<Branch<Double>> moBranch, + MfdConfig mfdConfig) { + + for (Branch<Mfd.Properties> mfdBranch : mfdPropsTree) { + switch (mfdBranch.value().type()) { + case SINGLE: + addSingleMfds(mfdTree, mfdBranch, moBranch, mfdConfig); + break; + case GR: + addGrMfds(mfdTree, mfdBranch, moBranch, mfdConfig); + break; + default: + throw new UnsupportedOperationException( + mfdBranch.value().type() + " MFD not supported"); + } + } + } + + /* + * Create a logic tree of moment rates for a fault source given a feature and + * rate data. If present, a rate-dip may be used to compute a constraining + * moment rate (e.g. WUS normal faults) + */ + private static LogicTree<Double> createMoRateTree( + SourceFeature.NshmFault feature, + LogicTree<String> faultRateTree) { + + double dip = feature.rateDip.isPresent() + ? feature.rateDip.getAsDouble() + : feature.dip; + double width = Faults.width(dip, feature.upperDepth, feature.lowerDepth); + double length = feature.length.orElseThrow(); + double area = length * width * 1e6; + double Mw = feature.magnitude.getAsDouble(); + double mo = Earthquakes.magToMoment(Mw); + + Map<String, Double> rateMap = feature.rateMap; + RateType rateType = feature.rateType; + LogicTree.Builder<Double> moRateTree = LogicTree.builder(RATE_TREE); + + // TODO clean + if (rateMap.containsValue(null)) { + System.out.println(" " + rateMap); + } + + for (Branch<String> branch : faultRateTree.branches()) { + + /* + * TODO Verify missing fault rates and imbalanced logic trees. In some + * cases (e.g. TX) need to give GEO branch weight of 1.0. To do this we + * should actually put geo rate in BIRD and ZENG branches to preserve + * logic tree. In other cases (e.g. OR offshore) we need empty branches + * because GEO weight is 0.8 and imbalanced; maybe we just handle this + * with setting rate to 0, as below. + */ + Double rateCheck = rateMap.get(branch.id()); + double rate = (rateCheck != null) ? rateCheck : 0.0; + + /* CONUS: 2008 are vertical, 2014 are on-plane. */ + if (rateType == RateType.VERTICAL_SLIP) { + rate = Faults.projectVerticalSlip(dip, rate); + } + + double moRate = (rateType == RECURRENCE) + ? mo * rate + : Earthquakes.moment(area, rate / 1000.0); + + /* + * 2014 model consistency. Convert to annual rate and set scale to seven + * decimal places to match the MFD values in fortran and XML input files + * and then convert back to moment rate. The steps of going from slip rate + * to explicit MFD parameters and then back to build MFDs is no longer + * necessary, but the cumulative effects of rounding at various steps in + * legacy codes are difficult to replicate without doing things like this. + * TODO consider describing different rounding/precision characteristics + * in a model defined in a source config. + */ + double eqRate = Maths.round(moRate / mo, 7); + moRate = eqRate * mo; + moRateTree.addBranch(branch.id(), moRate, branch.weight()); + } + return moRateTree.build(); + } + + /* Fault GR */ + private static void addGrMfds( + LogicTree.Builder<Mfd> mfdTree, // receiver tree + Branch<Mfd.Properties> mfdBranch, // properties may include rate + Optional<Branch<Double>> moBranch, // or rate supplied separately + MfdConfig mfdConfig) { + + Mfd.Properties.GutenbergRichter gr = mfdBranch.value().getAsGr(); + + String mfdId = mfdBranch.id(); + double mfdWt = mfdBranch.weight(); + + if (moBranch.isPresent()) { + mfdId = mfdBranchId(moBranch.orElseThrow().id(), mfdId); + mfdWt *= moBranch.orElseThrow().weight(); + } + + // TODO does this ever throw? + int nMag = Mfds.magCount(gr.mMin(), gr.mMax(), gr.Δm()); + checkState(nMag > 0, "GR MFD with no magnitudes [%s]", mfdBranch.id()); + + double moRate = moBranch.isPresent() + ? moBranch.orElseThrow().value() + : ModelUtil.grMomentRate(gr); + + /* + * Optional epistemic uncertainty; no aleatory with GR. This allows + * epistemic uncertainty because the cutoff is based on mMin (6.5), but in + * reality a few faults end up with no magnitudes in their loe + */ + boolean epistemic = hasEpistemic(mfdConfig, gr.mMax() - gr.Δm() / 2); + + /* + * TODO edge cases (Rush Peak in brange.3dip.gr.in) suggest data.mMin should + * be used instead of unc.epiCutoff + * + * TODO note in documentation that epi ±0.2 is applied to mMax-Δm/2, not + * mMax + * + * TODO conduct sensitivity study for epi being applied to actual mMax, not + * centered mMax for GR MFD + */ + + if (epistemic) { + + for (Branch<Double> epiBranch : mfdConfig.epistemicTree.get()) { + + /* + * Given changes to how GR MFD sequences are built (centered and bounded + * by the upper a lower bin edges), M + epi - Δm/2 commonly cuts off the + * upper bin. As a result we add Δm/2 knowing it will be removed (e.g. + * for M7, M + epi 7.2 as upper limit of sequence). + */ + double mMaxEpi = gr.mMax() + epiBranch.value() + gr.Δm() / 2; + double weightEpi = mfdWt * epiBranch.weight(); + + // TODO check if ever thrown + int nMagEpi = Mfds.magCount(gr.mMin(), mMaxEpi, gr.Δm()); + checkState(nMagEpi > 0, "GR MFD epistemic branch with no magnitudes"); + boolean epiOk = checkEpi(gr.mMin(), gr.mMax(), gr.Δm(), epiBranch.value()); + if (!epiOk) { + /* + * Formerly handled by skipping mfd/rupture;can't do that here because + * we need to preserve the entire logic tree; set rate to 0 instead + */ + System.out.println("GR MFD epistemic branch with no magnitudes"); + } + + String id = mfdBranchId(mfdId, epiBranch.id()); + Mfd mfd = ModelUtil.newGrBuilder(gr, mMaxEpi) + .scaleToMomentRate(epiOk ? moRate : 0) + .build(); + mfdTree.addBranch(id, mfd, weightEpi); + } + } else { + + Mfd mfd = gr.toBuilder() + .scaleToMomentRate(moRate) + .build(); + mfdTree.addBranch(mfdId, mfd, mfdWt); + } + } + + private static boolean checkEpi(double mMin, double mMax, double Δm, double Δepi) { + return (mMax - Δm / 2 + Δepi) > (mMin + Δm / 2); + } + + /* Fault SINGLE */ + private static void addSingleMfds( + LogicTree.Builder<Mfd> mfdTree, // receiver tree + Branch<Mfd.Properties> mfdBranch, // properties may include rate + Optional<Branch<Double>> moBranch, // or rate supplied separately + MfdConfig mfdConfig) { + + Mfd.Properties.Single single = mfdBranch.value().getAsSingle(); + + String mfdId = mfdBranch.id(); + double mfdWt = mfdBranch.weight(); + + if (moBranch.isPresent()) { + mfdId = mfdBranchId(moBranch.orElseThrow().id(), mfdId); + mfdWt *= moBranch.orElseThrow().weight(); + } + + double moRate = moBranch.isPresent() + ? moBranch.orElseThrow().value() + : single.rate() * Earthquakes.magToMoment(single.m()); + + /* Optional epistemic uncertainty. */ + boolean epistemic = hasEpistemic(mfdConfig, single.m()); + + /* Optional aleatory variability. */ + boolean aleatory = mfdConfig.aleatoryProperties.isPresent(); + + /* mMax epistemic uncertainty branches */ + if (epistemic) { + + for (Branch<Double> epiBranch : mfdConfig.epistemicTree.get()) { + + double mEpi = single.m() + epiBranch.value(); + double weightEpi = mfdWt * epiBranch.weight(); + String id = mfdBranchId(mfdId, epiBranch.id()); + + /* Possibly apply aleatory variability */ + if (aleatory) { + + MfdConfig.AleatoryProperties aleaProps = mfdConfig.aleatoryProperties.get(); + Mfd mfd = ModelUtil.newGaussianBuilder(mEpi, aleaProps) + .scaleToMomentRate(moRate) + .build(); + mfdTree.addBranch(id, mfd, weightEpi); + + } else { + + Mfd mfd = Mfd.newSingleBuilder(mEpi) + .scaleToMomentRate(moRate) + .build(); + mfdTree.addBranch(id, mfd, weightEpi); + } + } + + } else { /* No epistemic uncertainty */ + + /* Possibly apply aleatory variability */ + if (aleatory) { + + MfdConfig.AleatoryProperties aleaProps = mfdConfig.aleatoryProperties.get(); + Mfd mfd = ModelUtil.newGaussianBuilder(single.m(), aleaProps) + .scaleToMomentRate(moRate) + .build(); + mfdTree.addBranch(mfdId, mfd, mfdWt); + + } else { + + Mfd mfd = Mfd.newSingleBuilder(single.m()) + .scaleToMomentRate(moRate) + .build(); + mfdTree.addBranch(mfdId, mfd, mfdWt); + } + } + } + + private static String mfdBranchId(String... ids) { + return String.join(" : ", ids); + } + + private static boolean hasEpistemic(MfdConfig mfdConfig, double magnitude) { + boolean epistemic = mfdConfig.epistemicTree.isPresent(); + + /* No epistemic uncertainty if: m - Δm < mfdConfig.minMagnitude */ + if (epistemic) { + double mEpiCutoff = mfdConfig.minMagnitude.getAsDouble(); + double mEpiOffset = mfdConfig.minEpiOffset.getAsDouble(); + epistemic = (magnitude + mEpiOffset) > mEpiCutoff; + } + return epistemic; + } + + /* + * It is unlikely that a logic tree of SINGLE MFDs will be combined with MFD + * epistemic uncertainty on magnitude, usually one or the other representation + * is preferred. + */ + static void checkEpistemic(MfdConfig mfdConfig, LogicTree<Mfd.Properties> mfdTree) { + boolean multipleSingleBranches = (mfdTree.size() > 1) && + ModelUtil.mfdsAreType(mfdTree, Mfd.Type.SINGLE); + boolean hasEpistemic = mfdConfig.epistemicTree.isPresent(); + checkState( + !(multipleSingleBranches && hasEpistemic), + "Multiple SINGLE MFDs incompatable with M epistemic uncertainty"); + } + +} -- GitLab