From 777c4e35bbe36ca62e5f057fd9398aef5443debc Mon Sep 17 00:00:00 2001 From: Peter Powers <pmpowers@usgs.gov> Date: Mon, 23 Nov 2020 07:11:50 -0700 Subject: [PATCH] mfd updates; cluster dev --- .../usgs/earthquake/nshmp/data/Sequences.java | 44 +- .../earthquake/nshmp/geo/json/Properties.java | 34 +- .../gov/usgs/earthquake/nshmp/mfd/Mfd.java | 197 ++++++-- .../gov/usgs/earthquake/nshmp/mfd/Mfds.java | 44 +- .../nshmp/model/ClusterRuptureSet.java | 90 ++-- .../earthquake/nshmp/model/Deserialize.java | 63 ++- .../nshmp/model/FaultRuptureSet.java | 474 +++++++++++------- .../earthquake/nshmp/model/FaultSource.java | 28 +- .../earthquake/nshmp/model/GridLoader.java | 11 +- .../earthquake/nshmp/model/HazardModel.java | 10 +- .../nshmp/model/InterfaceRuptureSet.java | 15 +- .../nshmp/model/InterfaceSource.java | 2 +- .../earthquake/nshmp/model/MfdConfig.java | 15 +- .../earthquake/nshmp/model/ModelData.java | 55 +- .../earthquake/nshmp/model/ModelFiles.java | 2 +- .../earthquake/nshmp/model/ModelLoader.java | 28 +- .../earthquake/nshmp/model/ModelUtil.java | 50 +- .../earthquake/nshmp/model/SourceFeature.java | 20 +- .../usgs/earthquake/nshmp/package-info.java | 7 + .../usgs/earthquake/nshmp/tree/LogicTree.java | 21 + 20 files changed, 790 insertions(+), 420 deletions(-) create mode 100644 src/main/java/gov/usgs/earthquake/nshmp/package-info.java diff --git a/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java b/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java index 75054b49..ea1df501 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java @@ -72,6 +72,27 @@ public class Sequences { return -1; } + /** + * Computes the expected size of a sequence. + * + * This method rounds {@code (max-min)/Δ} to 1e-6 before casting to an integer + * to determine the number of elements in the sequence. If (max-min)/Δ is + * reasonably close to an integer but max is off by some double-precision + * error (e.g. 6.7999999999), the returned size would be large enough to + * include a maximum value of 6.8. + * + * @param min the first value in the sequence + * @param max the last value in the sequence + * @param Δ sequence step size + * @throws IllegalArgumentException if (1) {@code min}, {@code max}, and + * {@code Δ} are not finite (2) {@code max <= min}, or (3) + * {@code Δ <= 0} + */ + public static int size(double min, double max, double Δ) { + checkSequenceParameters(min, max, Δ); + return ((int) Maths.round(((max - min) / Δ), 6)) + 1; + } + /* * Ensure validity of sequence discretization parameters. Confirms that for a * specified range [min..max] and Δ that (1) min, max, and Δ are finite, (2) @@ -100,7 +121,6 @@ public class Sequences { * @param max value for sequence; if sequence is 'centered', {@code max} will * be the last value, otherwise last value will be {@code max - Δ/2} * @param Δ sequence step size - * @return a new array sequence builder * @throws IllegalArgumentException if (1) {@code min}, {@code max}, and * {@code Δ} are not finite (2) {@code max <= min}, or (3) * {@code Δ <= 0} @@ -175,29 +195,41 @@ public class Sequences { public double[] build() { double min = centered ? this.min : this.min + Δ / 2.0; double max = centered ? this.max : this.max - Δ / 2.0; + int size = size(min, max, Δ); double[] sequence = descending - ? DoubleData.flip(buildSequence(-max, -min, Δ)) - : buildSequence(min, max, Δ); + ? DoubleData.flip(buildSequence(-max, Δ, size)) + : buildSequence(min, Δ, size); return (scale >= 0) ? DoubleData.round(scale, sequence) : sequence; } /* + * TODO clean + * * Method rounds (max-min)/Δ to 1e-6 before casting to an integer to * determine the number of elements in the sequence. If (max-min)/Δ is * reasonably close to an integer but max is off by some double-precision * error (e.g. 6.7999999999), then max of 6.8 will be the upper end of the * sequence. */ - private static double[] buildSequence(double min, double max, double Δ) { - checkSequenceParameters(min, max, Δ); - int size = ((int) Maths.round(((max - min) / Δ), 6)) + 1; + // private static double[] buildSequence(double min, double max, double Δ) { + // checkSequenceParameters(min, max, Δ); + // int size = ((int) Maths.round(((max - min) / Δ), 6)) + 1; + // checkArgument(Range.openClosed(0, 100000).contains(size), "sequence + // size"); + // return IntStream.range(0, size) + // .mapToDouble(i -> min + Δ * i) + // .toArray(); + // } + + private static double[] buildSequence(double min, double Δ, int size) { checkArgument(Range.openClosed(0, 100000).contains(size), "sequence size"); return IntStream.range(0, size) .mapToDouble(i -> min + Δ * i) .toArray(); } + } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java b/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java index ddabcc2d..fddead02 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java @@ -3,6 +3,7 @@ package gov.usgs.earthquake.nshmp.geo.json; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import java.lang.reflect.Type; import java.util.LinkedHashMap; import java.util.Map; import java.util.Optional; @@ -59,7 +60,7 @@ public final class Properties { * Return an {@code Optional} describing a value of the specified class for * the specified key, or an empty {@code Optional} if the key is absent, its * value is {@code null} or its value cannot be interpreted as an object of - * type {@code T}. + * class {@code T}. Use this method for non-generic objects. * * @param <T> the type of the desired object * @param key to get value for @@ -75,6 +76,32 @@ public final class Properties { } } + /** + * Return an {@code Optional} describing a value of the specified type for the + * specified key, or an empty {@code Optional} if the key is absent, its value + * is {@code null} or its value cannot be interpreted as an object of type + * {@code T}. Use this method for generic objects. + * + * @param <T> the type of the desired object + * @param key to get value for + * @param typeOfT the genericized type of src. You can obtain this type by + * using the {@link TypeToken} class. For example, to get the type for + * {@code Collection<Foo>}, you should use: + * + * <pre> + * Type typeOfT = new TypeToken<Collection<Foo>>() {}.getType(); + * </pre> + */ + public <T> Optional<T> get(String key, Type typeOfT) { + try { + JsonElement element = getJsonElement(key).get(); + T obj = GeoJson.GSON_DEFAULT.fromJson(element, typeOfT); + return Optional.of(obj); + } catch (Exception e) { + return Optional.empty(); + } + } + /** * Return an {@code Optional} describing the value for the specified key, or * an empty {@code Optional} if the key is absent, its value is {@code null} @@ -142,6 +169,11 @@ public final class Properties { return new LinkedHashMap<>(source); } + @Override + public String toString() { + return "Properties: " + source.toString(); + } + /** * Return a reusable property map builder that preserves the order in which * properties are added. Note that {@code build()} returns a copy of the diff --git a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java index 8acc1dd9..375cdc8d 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java @@ -9,6 +9,7 @@ import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.SINGLE; import static gov.usgs.earthquake.nshmp.mfd.Mfds.gutenbergRichterRate; import java.util.Arrays; +import java.util.Map; import java.util.Optional; import java.util.function.Consumer; @@ -25,9 +26,9 @@ import gov.usgs.earthquake.nshmp.data.XySequence; * values are backed by an immutable {@link XySequence}, however their * construction is varied. * - * <p>Factory methods are provided to create incremental MFDs directly and - * obtain builders for different MFD {@link Type types}. The {@link Builder} - * class includes factory methods to initialize builders from data or a copy of + * <p>Factory methods are provided to directly create incremental MFD builders + * for different MFD {@link Type types}. Note that the {@link Builder} class + * also includes factory methods to initialize builders from data or a copy of * an existing MFD. Once obtained, a builder can be used to scale and shape the * MFD prior to calling {@code build()}. Example: * @@ -53,6 +54,11 @@ import gov.usgs.earthquake.nshmp.data.XySequence; * calls to {@link Builder#transform(Consumer)} can irrevocably change the state * of the builder between {@code build()} calls. * + * <p>MFD {@link Properties} hold references to the parameters used to + * initialize an MFD, including its {@link Type}. Properties objects may be + * constructed directly and support a {@link Properties#toBuilder()} method. + * NSHM + * * @author U.S. Geological Survey * @see Mfds * @see XySequence @@ -104,7 +110,7 @@ public final class Mfd { } /** - * Create an {@code Mfd.Builder} for a single magnitude. + * Create an {@code Mfd.Builder} for a single magnitude with a rate of one. * * @param magnitude * @throws IllegalArgumentException if magnitude is outside the range @@ -120,7 +126,8 @@ public final class Mfd { * When building the underlying magnitude distribtion, this implementation * rounds each magnitude to 5 decimal places. This implementation constructs a * distribution that is {@code μ ± 2σ} wide and represented by {@code nm} - * evenly distributed magnitude values. + * evenly distributed magnitude values. The initial distribution integrates to + * one. * * @param μ the mean magnitude * @param nm the number of magnitudes in the resultant distribution @@ -142,7 +149,7 @@ public final class Mfd { * Create an {@code Mfd.Builder} for a truncated <a * href="https://en.wikipedia.org/wiki/Gutenberg–Richter_law" * target="_top">Gutenberg–Richter</a> MFD. When building the underlying - * magnitude distribtion, this implementation rounds each magnitude to 3 + * magnitude distribtion, this implementation rounds each magnitude to 4 * decimal places. * * @param mMin the minimum truncation magnitude @@ -174,7 +181,7 @@ public final class Mfd { * {@link Mfds#paretoRate(double, double, double, double)} for reference) to * compute scale factors for each magnitude bin that are then applied to a * standard Gutenberg–Richter distribution. When building the underlying - * magnitude distribtion, this implementation rounds each magnitude to 3 + * magnitude distribtion, this implementation rounds each magnitude to 4 * decimal places. * * @param mMin the minimum truncation magnitude @@ -235,9 +242,11 @@ public final class Mfd { /** * Magnitude frequency distribution (MFD) builder. A builder may be obtained - * via the factory methods in the {@link Mfd} class. An {@code Mfd.Builder} is - * an intermediate class that can modify and shape an MFD prior to calling - * {@link #build()}. + * via the factory methods in the {@link Mfd} class, or via factory methods + * herein that copy existing data. An {@code Mfd.Builder} is an intermediate + * class that can modify and shape an MFD prior to calling {@link #build()}. + * + * @see Mfd */ public static class Builder { @@ -273,6 +282,14 @@ public final class Mfd { * shared MFD backing arrays with mMin == mMax over all grid sources, we * reduce the rates of the uppermost magnitude bins to zero rather than * truncating the entire MFD. + * + * TODO note MFD types that get created from from() methods below + * + * TODO note what happens when combining MFDs wrt to resultant type. + * + * TODO developer notes: use of no-arg constructors ensure correct + * deserialization behavior where existing fields such as GutenbergRichter.a + * and Single.rate will not be overridden if absent from JSON. */ /** @@ -330,6 +347,7 @@ public final class Mfd { props, new double[] { props.m }, Optional.of(new double[] { props.rate == 0.0 ? 1.0 : props.rate })); + // TODO check rate above } /* Incremental MFD */ @@ -374,16 +392,16 @@ public final class Mfd { /* Gutenberg–Richter with exponential taper */ static Builder initTaperedGutenbergRichter(Properties.GrTaper props) { - double mMin = checkMagnitude(props.mMin); - double mMax = checkMagnitude(props.mMax); + double mMin = checkMagnitude(props.mMin()); + double mMax = checkMagnitude(props.mMax()); double mc = checkMagnitude(props.mc); - double[] magnitudes = Sequences.arrayBuilder(mMin, mMax, props.Δm) + double[] magnitudes = Sequences.arrayBuilder(mMin, mMax, props.Δm()) .scale(4) .build(); Builder builder = new Builder(props, magnitudes, Optional.empty()); - GrTaper grTaper = new GrTaper(props.Δm, props.b, mc); + GrTaper grTaper = new GrTaper(props.Δm(), props.b(), mc); builder.mfd.forEach(p -> p.set( - gutenbergRichterRate(0, props.b, p.x()) * grTaper.scale(p.x()))); + gutenbergRichterRate(0, props.b(), p.x()) * grTaper.scale(p.x()))); return builder; } @@ -448,6 +466,7 @@ public final class Mfd { return mfd.x(mfd.size() - 1); } + @Deprecated public Properties properties() { return props; } @@ -517,8 +536,12 @@ public final class Mfd { */ public static class Properties { - /** The MFD type. */ - public final Type type; + private final Type type; + + /* No-arg constructor for deserialization. */ + private Properties() { + this.type = SINGLE; + } /** * Sole constructor. @@ -528,9 +551,14 @@ public final class Mfd { this.type = type; } + /** The MFD type. */ + public Type type() { + return type; + } + /** * Properties for a single MFD. - * @throws ClassCastException if properties are for other type of MFD + * @throws ClassCastException if properties are for other MFD type */ public Single getAsSingle() { return (Single) this; @@ -538,15 +566,15 @@ public final class Mfd { /** * Properties for a Gutenberg-Richter MFD. - * @throws ClassCastException if properties are for other type of MFD + * @throws ClassCastException if properties are for other MFD type */ public GutenbergRichter getAsGr() { return (GutenbergRichter) this; } /** - * Properties for a taperd Gutenberg-Richter MFD. - * @throws ClassCastException if properties are for other type of MFD + * Properties for a tapered Gutenberg-Richter MFD. + * @throws ClassCastException if properties are for other MFD type */ public GrTaper getAsGrTaper() { return (GrTaper) this; @@ -560,22 +588,45 @@ public final class Mfd { /** Properties of a single magnitude MFD. */ public static final class Single extends Properties { - /** The sole magnitude of the MFD */ - public final double m; + private final double m; + private final double rate; - /** The rate of the sole magnitude; may be 1. */ - public final double rate; + /* No-arg constructor for deserialization. */ + private Single() { + m = Double.NaN; + rate = 1.0; + }; - Single(double m) { + public Single(double m, double rate) { super(SINGLE); this.m = m; - this.rate = 1.0; + this.rate = rate; } + public Single(double m) { + this(m, 1.0); + } + + /** The sole magnitude of the MFD */ + public double m() { + return m; + } + + /** The rate of the sole magnitude. */ + public double rate() { + return rate; + } + + // TODO note in documentation default a and rate = 1 for correct scaling + // behavior; and no-arg constructor + // TODO use case for createSingle(m,r) and createGr(...) where // rate or a-value is known in advance, e.g. when crateing model fault // MFDs that will likely be subject to uncertainty modifications + // TODO It seems we also want to just be able to create properties + // directly + // TODO public toMfd(); instead of toBuilder() // lets prefer just initializing builders with copies from() // we shouldn't be dealing in LogicTree<Mfd.Properties>, just make copies @@ -589,6 +640,16 @@ public final class Mfd { return Builder.initGaussian(this, nm, σ, nσ); } + @Override + public String toString() { + return new StringBuilder() + .append(type()) + .append(": ") + .append(Map.of("m", m, "rate", rate)) + .toString(); + } + + @Deprecated public static Single dummy() { return new Single(7.0); } @@ -597,50 +658,94 @@ public final class Mfd { /** Properties of a Gutenberg–Richter MFD. */ public static class GutenbergRichter extends Properties { - public final double a; - - /** The Gutenberg-Richter {@code b}-value. */ - public final double b; + private final double a; + private final double b; + private final double Δm; + private final double mMin; + private final double mMax; - /** The magnitude bin width. */ - public final double Δm; - - /** The minimum magnitude. */ - public final double mMin; - - /** The maximum magnitude. */ - public final double mMax; + /* Dummy, no-arg constructor for deserialization. */ + private GutenbergRichter() { + this.a = 1.0; + this.b = Double.NaN; + this.Δm = Double.NaN; + this.mMin = Double.NaN; + this.mMax = Double.NaN; + } - private GutenbergRichter(Type type, double b, double Δm, double mMin, double mMax) { + private GutenbergRichter(Type type, double a, double b, double Δm, double mMin, double mMax) { super(type); + this.a = a; this.b = b; this.Δm = Δm; this.mMin = mMin; this.mMax = mMax; - this.a = 1.0; } - GutenbergRichter(double b, double Δm, double mMin, double mMax) { - this(GR, b, Δm, mMin, mMax); + public GutenbergRichter(double b, double Δm, double mMin, double mMax) { + this(GR, 1.0, b, Δm, mMin, mMax); + } + + /** The Gutenberg-Richter {@code a}-value. */ + public double a() { + return a; } + /** The Gutenberg-Richter {@code b}-value. */ + public double b() { + return b; + } + + /** The magnitude bin width. */ + public double Δm() { + return Δm; + }; + + /** The minimum magnitude. */ + public double mMin() { + return mMin; + }; + + /** The maximum magnitude. */ + public double mMax() { + return mMax; + }; + @Override public Builder toBuilder() { return Builder.initGutenbergRichter(this); } + + @Override + public String toString() { + return new StringBuilder() + .append(type()) + .append(": ") + .append(Map.of("a", a, "b", b, "Δm", Δm, "mMin", mMin, "mMax", mMax)) + .toString(); + } } /** Tapered Gutenberg–Richter MFD properties. */ public static final class GrTaper extends GutenbergRichter { - /** The corner magnitude. */ - public final double mc; + private final double mc; + + /* Dummy, no-arg constructor for deserialization. */ + private GrTaper() { + this.mc = Double.NaN; + } GrTaper(double b, double Δm, double mMin, double mMax, double mc) { - super(GR_TAPER, b, Δm, mMin, mMax); + super(GR_TAPER, 1.0, b, Δm, mMin, mMax); this.mc = mc; } + /** The corner magnitude. */ + public final double mc() { + return mc; + } + @Override public Builder toBuilder() { return Builder.initTaperedGutenbergRichter(this); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java index 74af2b60..49c2c997 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java @@ -12,34 +12,15 @@ import com.google.common.base.Converter; import com.google.common.collect.Range; import gov.usgs.earthquake.nshmp.Earthquakes; +import gov.usgs.earthquake.nshmp.Maths; import gov.usgs.earthquake.nshmp.data.MutableXySequence; import gov.usgs.earthquake.nshmp.data.XySequence; /** * Utility methods for working with magnitude frequency distributions (MFDs). - * - * <pre> - * Mfd mfd = Mfds.newGutenbergRighterMfd(5.05, 7.25, 0.1, 1.0) - * .scaleToCumulativeRate(0.02) - * .transform(p -> p.set((p.x() > 6.5) ? p.y() * 0.667 : p.y())) - * .build() - * </pre> - * - * In the example above, an incremental MFD with a <a - * href="https://en.wikipedia.org/wiki/Gutenberg–Richter_law" - * target="_top">Gutenberg–Richter</a> distribution of rates is initialized from - * {@code M=5.05} to {@code M=7.25} with a step size of {@code 0.1} magnitude - * units. The resultant magnitude array, - * {@code [5.05, 5.15, 5.25, ... , 7.15, 7.25]}, is typically used to define the - * centers of 0.1 magnitude intervals spanning the magnitude range - * {@code [5.0..7.3]}. The MFD is then scaled to the cumulative rate of 0.02 - * events/year, and lastly those rates above {@code M=6.5} are scaled to ⅔ of - * their present value. Note that a {@code MfdBuilder} executes methods in the - * order defined, so reversing {@code scaleToCumulativeRate()} and - * {@code transform()} in the example above results in two different MFDs. + * See the {@link Mfd} class for details on the creation of MFDs. * * @author U.S. Geological Survey - * @see MfdBuilder * @see Mfd */ public final class Mfds { @@ -113,10 +94,10 @@ public final class Mfds { return a * Math.pow(10, -b * mMin); } - public static void main(String[] args) { - System.out.println(incrRate(0.01, -1, 5.05)); - System.out.println(gutenbergRichterRate(Math.log10(0.01), -1, 5.05)); - } + // public static void main(String[] args) { + // System.out.println(incrRate(0.01, -1, 5.05)); + // System.out.println(gutenbergRichterRate(Math.log10(0.01), -1, 5.05)); + // } /** * Computes total moment rate as done by NSHMP code from supplied magnitude @@ -205,6 +186,19 @@ public final class Mfds { return (int) ((mMax - mMin) / dMag + 1.4); } + static int size(double min, double max, double Δ) { + return ((int) Maths.round(((max - min) / Δ), 6)) + 1; + } + + public static void main(String[] args) { + System.out.println(magCount(6.57, 6.51, 0.14)); + System.out.println(magCount(6.55, 6.9501, 0.1)); + + System.out.println(size(6.57, 6.51, 0.14)); + System.out.println(size(6.55, 6.9501, 0.1)); + + } + // TODO docs; consider moving to cumulate method in data/sequence package; // not necessarily MFD specific public static XySequence toCumulative(XySequence incremental) { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java index a7576d80..50b895ff 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java @@ -13,7 +13,6 @@ import java.util.Map; import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.geo.LocationList; import gov.usgs.earthquake.nshmp.geo.Locations; -import gov.usgs.earthquake.nshmp.tree.LogicTree; /** * Cluster source representation. Each cluster source wraps a @@ -41,19 +40,21 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree; */ public class ClusterRuptureSet implements RuptureSet { + // for documentation: a cluster model can only be applied to + // logic trees of SINGLE MFDs with parallel (?) weights final String name; final int id; final double rate; // from the default mfd xml // final FaultSourceSet faults; - final LogicTree<Double> rateTree; + // final LogicTree<Double> rateTree; final List<FaultRuptureSet> faultRuptureSets; private ClusterRuptureSet(Builder builder) { this.name = builder.name; this.id = builder.id; - this.rateTree = builder.rateTree; + // this.rateTree = builder.rateTree; this.faultRuptureSets = builder.faultRuptureSets; this.rate = 0.002; // TODO clean/fix } @@ -142,20 +143,22 @@ public class ClusterRuptureSet implements RuptureSet { boolean built = false; - private ModelData data; + // private ModelData data; // required private String name; private Integer id; - private Map<Integer, SourceFeature.NshmFault> featureMap; + // private Map<Integer, SourceFeature.NshmFault> featureMap; - private LogicTree<Double> rateTree; + // private LogicTree<Double> rateTree; - List<FaultRuptureSet.Builder> faultRuptureSetBuilders = new ArrayList<>(); - List<FaultRuptureSet> faultRuptureSets; + // List<FaultRuptureSet.Builder> faultRuptureSetBuilders = new + // ArrayList<>(); - Double rate; + List<FaultRuptureSet> faultRuptureSets = new ArrayList<>(); + + // Double rate; // FaultSourceSet faults; Builder name(String name) { @@ -168,32 +171,38 @@ public class ClusterRuptureSet implements RuptureSet { return this; } - /* Set MFD helper data. */ - Builder modelData(ModelData data) { - this.data = data; - return this; - } + // /* Set MFD helper data. */ + // Builder modelData(ModelData data) { + // this.data = data; + // return this; + // } - @Deprecated - Builder rate(double rate) { - // TODO what sort of value checking should be done for rate (<1 ??) - this.rate = rate; - return this; - } + // @Deprecated + // Builder rate(double rate) { + // // TODO what sort of value checking should be done for rate (<1 ??) + // this.rate = rate; + // return this; + // } - Builder rateTree(LogicTree<Double> rateTree) { - // TODO what sort of value checking should be done for rate (<1 ??) - this.rateTree = rateTree; - return this; - } + // Builder rateTree(LogicTree<Double> rateTree) { + // // TODO what sort of value checking should be done for rate (<1 ??) + // this.rateTree = rateTree; + // return this; + // } - Builder featureMap(Map<Integer, SourceFeature.NshmFault> featureMap) { - this.featureMap = Map.copyOf(featureMap); - return this; - } + // Builder featureMap(Map<Integer, SourceFeature.NshmFault> featureMap) { + // this.featureMap = Map.copyOf(featureMap); + // return this; + // } - Builder addRuptureSet(FaultRuptureSet.Builder ruptureSet) { - faultRuptureSetBuilders.add(ruptureSet); + // @Deprecated + // Builder addRuptureSet(FaultRuptureSet.Builder ruptureSet) { + // faultRuptureSetBuilders.add(ruptureSet); + // return this; + // } + + Builder addRuptureSet(FaultRuptureSet ruptureSet) { + faultRuptureSets.add(ruptureSet); return this; } @@ -207,7 +216,8 @@ public class ClusterRuptureSet implements RuptureSet { void validateState(String label) { checkState(!built, "Single use builder"); checkNotNull(name, "%s name", label); - checkNotNull(id, "%s geometry ID", label); + checkNotNull(id, "%s id", label); + // checkNotNull(data, "%s model data", label); // checkState(rate != null, "%s rate not set", source); // checkState(faults != null, "%s has no fault sources", source); @@ -216,14 +226,14 @@ public class ClusterRuptureSet implements RuptureSet { // .map(frsb -> frsb.config(config)) // .map(FaultSource.Builder::build) // .collect(Collectors.toList()); - - checkNotNull(featureMap, "%s feature map", label); - - faultRuptureSets = new ArrayList<>(); - for (FaultRuptureSet.Builder frsb : faultRuptureSetBuilders) { - frsb.modelData(data); - faultRuptureSets.add(frsb.build()); - } + // System.out.println(data.faultFeatureMap().isPresent()); + // checkNotNull(featureMap, "%s feature map", label); + + // faultRuptureSets = new ArrayList<>(); + // for (FaultRuptureSet.Builder frsb : faultRuptureSetBuilders) { + // frsb.modelData(data); + // faultRuptureSets.add(frsb.build()); + // } checkState(faultRuptureSets.size() > 0); built = true; diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java index 56cc938c..3be25e6d 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java @@ -4,6 +4,7 @@ import static com.google.common.base.Preconditions.checkState; import java.io.BufferedReader; import java.io.IOException; +import java.lang.reflect.Type; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; @@ -11,9 +12,11 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Optional; import com.google.common.collect.Range; import com.google.common.primitives.Ints; +import com.google.common.reflect.TypeToken; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; @@ -229,13 +232,10 @@ class Deserialize { return Map.copyOf(sectionMap); } - // TODO ?? test that mfdMap.get() throws NoSuchElement when mfd map missing - // but required by mfd-tree:string - // - // TODO by the time wer down deserializing rupture sets, + /* Create an interface rupture set. */ + static InterfaceRuptureSet interfaceRuptureSet(Path json, ModelData data) { - /* Create a rupture set builder, initialized with data from file. */ - static InterfaceRuptureSet.Builder interfaceRuptureSet(Path json, ModelData data) { + // TODO compress JsonObject obj = jsonObject(json); @@ -255,24 +255,28 @@ class Deserialize { ruptureSet.sections(List.of(id)); } - ruptureSet.mfdTree(mfdTree(obj, data)); + ruptureSet.mfdTree(mfdTree(obj, data.mfdMap())); - return ruptureSet; + return ruptureSet.build(); } - /* Create a rupture set builder, initialized with data from file. */ - static FaultRuptureSet.Builder faultRuptureSet( + /* Create a fault rupture set. */ + static FaultRuptureSet faultRuptureSet( Path json, ModelData data) { JsonObject obj = jsonObject(json); - return faultRuptureSet(obj, data); + return faultRuptureSet(obj, data.mfdMap()) + .modelData(data) + .build(); } - /* Create a rupture set builder, initialized with data from file. */ + /* Create a fault rupture set builder. */ private static FaultRuptureSet.Builder faultRuptureSet( JsonObject obj, - ModelData data) { + Optional<Map<String, LogicTree<Mfd.Properties>>> mfdMap) { + + // TODO can this return FRS instead of builder FaultRuptureSet.Builder ruptureSet = FaultRuptureSet.builder(); @@ -289,13 +293,20 @@ class Deserialize { ruptureSet.sections(List.of(id)); } - ruptureSet.mfdTree(mfdTree(obj, data)); + /* Optional additional or overriding 'properties'. */ + if (obj.has(PROPERTIES)) { + Type propsType = new TypeToken<Map<String, ?>>() {}.getType(); + Map<String, ?> props = GSON.fromJson(obj.get(PROPERTIES), propsType); + ruptureSet.properties(props); + } + + ruptureSet.mfdTree(mfdTree(obj, mfdMap)); return ruptureSet; } /* Create a rupture set builder, initialized with data from file. */ - static ClusterRuptureSet.Builder clusterRuptureSet( + static ClusterRuptureSet clusterRuptureSet( Path json, ModelData data) { @@ -305,20 +316,27 @@ class Deserialize { clusterSet.name(obj.get(NAME).getAsString()); clusterSet.id(obj.get(ID).getAsInt()); + /* Set cluster flag so faultRuptureSet MFDs are built correctly. */ + data.clusterModel(); + JsonArray ruptures = obj.get(RUPTURE_SETS).getAsJsonArray(); for (JsonElement rupture : ruptures) { - FaultRuptureSet.Builder ruptureSet = faultRuptureSet(rupture.getAsJsonObject(), data); + FaultRuptureSet ruptureSet = faultRuptureSet(rupture.getAsJsonObject(), data.mfdMap()) + .modelData(data) + .build(); clusterSet.addRuptureSet(ruptureSet); } - return clusterSet; + return clusterSet.build(); } - /* Get a logic tree of MFD properties via parsing of map lookup. */ - private static LogicTree<Mfd.Properties> mfdTree(JsonObject obj, ModelData data) { + /* Create an mfd-tree defined inline or via map lookup. */ + private static LogicTree<Mfd.Properties> mfdTree( + JsonObject obj, + Optional<Map<String, LogicTree<Mfd.Properties>>> mfdMap) { JsonElement mfdElement = obj.get(MFD_TREE); return mfdElement.isJsonPrimitive() - ? data.mfdMap().orElseThrow().get(mfdElement.getAsString()) + ? mfdMap.orElseThrow().get(mfdElement.getAsString()) : mfdPropertiesTree(mfdElement); } @@ -331,7 +349,7 @@ class Deserialize { return Map.copyOf(mfdMap); } - /* Create a logic tree of MFD properties. */ + /* Create an mfd-tree from in inline element. */ static LogicTree<Mfd.Properties> mfdPropertiesTree(JsonElement json) { LogicTree.Builder<Mfd.Properties> builder = LogicTree.builder(MFD_TREE); for (JsonElement branch : json.getAsJsonArray()) { @@ -367,6 +385,7 @@ class Deserialize { default: throw new IllegalArgumentException("Unknown MFD type: " + type); } + System.out.println(properties); return properties; } @@ -526,7 +545,7 @@ class Deserialize { for (JsonElement branch : json.getAsJsonArray()) { JsonObject obj = branch.getAsJsonObject(); JsonElement valueElement = obj.get(VALUE); - double value = valueElement.isJsonNull() ? Double.NaN : valueElement.getAsDouble(); + double value = valueElement.isJsonNull() ? 0.0 : valueElement.getAsDouble(); builder.addBranch( obj.get(ID).getAsString(), value, diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java index f45f6dae..a3530f62 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java @@ -5,18 +5,18 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static gov.usgs.earthquake.nshmp.model.Deserialize.MFD_TREE; import static gov.usgs.earthquake.nshmp.model.Deserialize.RATE_TREE; +import static gov.usgs.earthquake.nshmp.model.RateType.RECURRENCE; import static gov.usgs.earthquake.nshmp.model.SourceType.FAULT; import static java.util.stream.Collectors.collectingAndThen; import static java.util.stream.Collectors.toList; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; +import java.util.Optional; import gov.usgs.earthquake.nshmp.Earthquakes; import gov.usgs.earthquake.nshmp.Faults; import gov.usgs.earthquake.nshmp.Maths; -import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.fault.surface.DefaultGriddedSurface; import gov.usgs.earthquake.nshmp.fault.surface.GriddedSurface; import gov.usgs.earthquake.nshmp.geo.Location; @@ -25,7 +25,9 @@ import gov.usgs.earthquake.nshmp.geo.Locations; import gov.usgs.earthquake.nshmp.geo.json.Feature; import gov.usgs.earthquake.nshmp.geo.json.Properties; import gov.usgs.earthquake.nshmp.mfd.Mfd; +import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.Single; import gov.usgs.earthquake.nshmp.mfd.Mfds; +import gov.usgs.earthquake.nshmp.model.SourceFeature.Key; import gov.usgs.earthquake.nshmp.tree.Branch; import gov.usgs.earthquake.nshmp.tree.LogicTree; @@ -43,6 +45,19 @@ public class FaultRuptureSet implements RuptureSet { * it's ok to overwrite it each time. */ + /* + * General comments on fault earthquake rates. The majority of faults in NSHMs + * are constrained by slip rates but some are governed by explicit logic trees + * of slip and/or recurrence rates with pre-defined MFD parameters. In both + * cases we require moment rate in order to correctly balance MFD logic tree + * branches of epistemic and aleatory uncertainty + * + * This ends up being a little circular for GR with no epistemic uncertainty + * in that we consrtuct an MFD, scale to it's incremental rate, compute its + * moment rate, only to then build the MFD again scaling directly to moment + * rate. + */ + /* * TODO we removed aleaPops - MoBalance; 2008 CA a-priori a-faults do not * moment balance so we'll need to figure out how to handle that; it's @@ -122,7 +137,6 @@ public class FaultRuptureSet implements RuptureSet { private ModelData data; private LogicTree<Mfd.Properties> mfdPropsTree; - // private LogicTree<Mfd.Properties> mfdPropsTreeOut; /* created on build */ private LogicTree<Mfd> mfdTree; @@ -130,12 +144,13 @@ public class FaultRuptureSet implements RuptureSet { private GriddedSurface surface; /* - * Name, id, and and sectionIds Validated and carried forward in rupture set - * feature. + * TODO clean Name, id, and and sectionIds Validated and carried forward in + * rupture set feature. */ private String name; private Integer id; private List<Integer> sectionIds; + private Optional<Map<String, ?>> ruptureProps = Optional.empty(); Builder name(String name) { this.name = name; @@ -153,6 +168,11 @@ public class FaultRuptureSet implements RuptureSet { return this; } + Builder properties(Map<String, ?> props) { + this.ruptureProps = Optional.of(checkNotNull(props)); + return this; + } + /* Set the MFD tree. */ Builder mfdTree(LogicTree<Mfd.Properties> mfdTree) { this.mfdPropsTree = mfdTree; @@ -172,17 +192,54 @@ public class FaultRuptureSet implements RuptureSet { private void validateAndInit(String label) { checkState(!built, "Single use builder"); - checkNotNull(sectionIds, "%s section ID list", label); - checkNotNull(data, "%s model data", label); - checkNotNull(name, "%s name", label); checkNotNull(id, "%s id", label); + checkNotNull(data, "%s model data", label); + checkNotNull(sectionIds, "%s section id list", label); checkNotNull(mfdPropsTree, "%s MFD logic tree", label); feature = createFeature(); - /* Create Mo-rate tree. */ - LogicTree<Double> moRateTree = createMoRateTree(feature, data); + // System.out.println(feature.source.toJson()); + // TODO test length property rounding + + /* Filter of unlikely combination of SINGLEs epistemic uncertainty. */ + checkEpistemic(data.mfdConfig().get(), mfdPropsTree); + + /* + * Fault MFDs are constructed in a variety of ways. Single fault MFDs may + * be created from a logic tree of slip rates. + */ + + MfdConfig mfdConfig = data.mfdConfig().orElseThrow(); + Optional<LogicTree<Double>> moRateTree = Optional.empty(); + + if (data.isClusterModel()) { + System.out.println("YOYO"); + } + + /* Build mfd-tree from fault slip or recurrence rates. */ + if (data.faultRateTree().isPresent()) { + + // System.out.println(data.faultRateTree().get()); + + /* Create Mo-rate tree. */ + moRateTree = Optional.of(createMoRateTree( + feature, + data.faultRateTree().orElseThrow())); + + /* Update MFD properties tree based on source magnitude. */ + mfdPropsTree = updateMfdsForSlip(); + + } else if (data.rateTree().isPresent() && !data.isClusterModel()) { + + /* Update MFD properties tree to include M and R branches. */ + mfdPropsTree = updateMfdsForRecurrence(); + } + + mfdTree = createMfdTree(mfdPropsTree, moRateTree, mfdConfig); + mfdTotal = ModelUtil.reduceMfdTree(mfdTree); + // for (Branch<Double> b : moRateTree) { // System.out.println(b.id() + " " + b.value()); // double eqMo = Earthquakes.magToMoment(feature.magnitude.getAsDouble()); @@ -191,20 +248,11 @@ public class FaultRuptureSet implements RuptureSet { // // feature.magnitude // } - /* Filter of unlikely combination of SINGLEs epistemic uncertainty. */ - checkEpistemic(data.mfdConfig().get(), mfdPropsTree); - - /* Update MFD properties based on source magnitude. */ - mfdPropsTree = updateMfdProperties(); - - /* Combine rate-tree, mfd-props-tree, and uncertainty. */ - mfdTree = buildMfdTree(moRateTree, mfdPropsTree, data); - - mfdTotal = ModelUtil.reduceMfdTree(mfdTree); + System.out.println(mfdTree); + // if (name.contains("Grand")) { // System.out.println(mfdTree); - - // if (name.contains("Angel")) { + // System.out.println(mfdTotal.data()); for (Branch<Mfd> b : mfdTree) { System.out.println(b.id() + " " + b.value().data()); } @@ -226,37 +274,92 @@ public class FaultRuptureSet implements RuptureSet { } private SourceFeature.NshmFault createFeature() { + Map<Integer, SourceFeature.NshmFault> featureMap = data.faultFeatureMap().orElseThrow(); checkState(featureMap.keySet().containsAll(sectionIds)); - /* - * Most (all?) single fault features will have been joined already. - */ if (sectionIds.size() == 1) { return featureMap.get(sectionIds.get(0)); } - Feature f = joinFeatures(sectionIds.stream() + /* Collect participating features */ + List<SourceFeature.NshmFault> features = sectionIds.stream() .map(featureMap::get) - .map(feature -> feature.source) - .collect(toList())); - return new SourceFeature.NshmFault(f); + .collect(toList()); + + /* Consolidate states */ + List<String> states = features.stream() + .map(f -> f.states) + .flatMap(List::stream) + .distinct() + .collect(toList()); + + /* Join section traces */ + LocationList trace = features.stream() + .map(f -> f.source) + .map(Feature::asLineString) + .flatMap(LocationList::stream) + .distinct() + .collect(collectingAndThen( + toList(), + LocationList::copyOf)); + + /* Update properties */ + Properties.Builder props = Properties.fromFeature(features.get(0).source) + .put(Key.NAME, name) + .put(Key.STATES, states); + ruptureProps.ifPresent(p -> p.forEach(props::put)); + + /* Build new feature from stitched traces and props of 1st section */ + Feature feature = Feature.lineString(trace) + .id(id) + .properties(props.build()) + .build(); + + System.out.println(feature.toJson()); + return new SourceFeature.NshmFault(feature); + } + + /* Create mfd-tree from a logic tree of SINGLE MFDs and a rate-tree. */ + private LogicTree<Mfd.Properties> updateMfdsForRecurrence() { + checkState(ModelUtil.mfdsAreType(mfdPropsTree, Mfd.Type.SINGLE)); + LogicTree.Builder<Mfd.Properties> propsTree = LogicTree.builder(mfdPropsTree.name()); + LogicTree<Double> rateTree = data.rateTree().orElseThrow(); + for (Branch<Double> rBranch : rateTree) { + System.out.println(rBranch.value()); + /* + * TODO need to be careful here; this catch is for additive New Madrid + * branches; null rates have been converted to 0; perhaps we should use + * 0 in JSON and still have this 0 check here; we also use 0 rates to + * deal with imbalanced geodetic logic tree branches so maybe just + * document these cases + * + */ + double rate = (rBranch.value() <= 0.0) + ? 0.0 + : Maths.roundToDigits(1.0 / rBranch.value(), 4); + for (Branch<Mfd.Properties> mBranch : mfdPropsTree) { + Single props = mBranch.value().getAsSingle(); + String id = String.join(":", props.type().name(), mBranch.id(), rBranch.id()); + double weight = mBranch.weight() * rBranch.weight(); + propsTree.addBranch(id, new Single(props.m(), rate), weight); + } + } + return propsTree.build(); } /* - * The properties picked up from a mfd-tree for faults are only used to set - * GR mMin and b-value; all other fields are just required placeholders. - * This method uses the 'characteristic' magnitude of the feature to set the - * SINGLE magnitude and GR Δm and mMax. + * The properties picked up from a mfd-tree for faults with slip rates are + * only used to set GR mMin and b-value; all other fields are just required + * placeholders. This method uses the 'characteristic' magnitude of the + * feature to set the SINGLE magnitude and GR Δm and mMax. */ - private LogicTree<Mfd.Properties> updateMfdProperties() { + private LogicTree<Mfd.Properties> updateMfdsForSlip() { LogicTree.Builder<Mfd.Properties> propsTree = LogicTree.builder(mfdPropsTree.name()); double Mw = feature.magnitude.orElseThrow(); for (Branch<Mfd.Properties> branch : mfdPropsTree) { - Mfd.Type type = branch.value().type; - // System.out.println(Mw + " " + type); - switch (type) { + switch (branch.value().type()) { case GR: double mCutoff = data.mfdConfig().orElseThrow().minMagnitude.orElseThrow(); propsTree.addBranch( @@ -267,11 +370,12 @@ public class FaultRuptureSet implements RuptureSet { case SINGLE: propsTree.addBranch( branch.id(), - Mfd.newSingleBuilder(Mw).properties(), + new Mfd.Properties.Single(Mw), branch.weight()); break; default: - throw new UnsupportedOperationException(type + " MFD not supported"); + throw new UnsupportedOperationException( + branch.value().type() + " MFD not supported"); } } LogicTree<Mfd.Properties> pt = propsTree.build(); @@ -322,15 +426,15 @@ public class FaultRuptureSet implements RuptureSet { /* Return SINGLE for m <= GR minimum. */ if (mMax <= mCutoff) { - return Mfd.newSingleBuilder(mMax).properties(); + return new Mfd.Properties.Single(mMax); } - double Rm = Maths.round(mMax - grProps.mMin, 6); + double Rm = Maths.round(mMax - grProps.mMin(), 6); /* Return SINGLE if GR only has one magnitude bin. */ if (Rm <= 0.1) { - double m = Maths.round(grProps.mMin + Rm / 2, 6); - return Mfd.newSingleBuilder(m).properties(); + double m = Maths.round(grProps.mMin() + Rm / 2, 6); + return new Mfd.Properties.Single(m); } /* @@ -342,11 +446,7 @@ public class FaultRuptureSet implements RuptureSet { double Δm = Maths.round(Rm / nm, 6); // System.out.println(Rm + " " + grProps.mMin + " " + mMax + " " + Δm); - return Mfd.newGutenbergRichterBuilder( - grProps.mMin, - mMax, - Δm, - grProps.b).properties(); + return new Mfd.Properties.GutenbergRichter(grProps.b(), Δm, grProps.mMin(), mMax); } /* @@ -364,65 +464,43 @@ public class FaultRuptureSet implements RuptureSet { * in the context of a cluster model. */ - /* - * Joins the traces in the supplied features into a new Feature with the - * properties of the first feature supplied. - */ - private Feature joinFeatures(List<Feature> features) { - Feature.Builder feature = (features.size() == 1) - ? Feature.copyOf(features.get(0)) - : joinFaultSections(features); - if (features.size() > 1) { - - } - Feature last = features.get(features.size() - 1); - Map<String, ?> props = Properties.fromFeature(last).build(); - return joinFaultSections(features) - .id(id) - .properties(props) - .build(); - } - - /* - * Joins the traces of the supplied fault sections, in order, reducing any - * identical points at the ends of sections. - */ - private static Feature.Builder joinFaultSections(List<Feature> features) { - // TODO copy properties of first feature into builder - LocationList joined = features.stream() - .map(Feature::asLineString) - .flatMap(LocationList::stream) - .distinct() - .collect(collectingAndThen( - toList(), - LocationList::copyOf)); - return Feature.lineString(joined); - } } - private static LogicTree<Mfd> buildMfdTree( - LogicTree<Double> moRateTree, + /* Logic tree of moment rates. */ + private static LogicTree<Mfd> createMfdTree( LogicTree<Mfd.Properties> mfdPropsTree, - ModelData mfdData) { + Optional<LogicTree<Double>> moRateTree, + MfdConfig mfdConfig) { LogicTree.Builder<Mfd> mfdTree = LogicTree.builder(MFD_TREE); + if (moRateTree.isPresent()) { + LogicTree<Double> moTree = moRateTree.orElseThrow(); + moTree.forEach(branch -> addMfds(mfdTree, mfdPropsTree, Optional.of(branch), mfdConfig)); + } else { + addMfds(mfdTree, mfdPropsTree, Optional.empty(), mfdConfig); + } + return mfdTree.build(); + } - for (Branch<Double> rateBranch : moRateTree) { - for (Branch<Mfd.Properties> mfdBranch : mfdPropsTree) { - Mfd.Type type = mfdBranch.value().type; - switch (type) { - case SINGLE: - createSingleMfds(mfdTree, mfdData, rateBranch, mfdBranch); - break; - case GR: - createGrMfds(mfdTree, mfdData, rateBranch, mfdBranch); - break; - default: - throw new UnsupportedOperationException(type + " MFD not supported"); - } + private static void addMfds( + LogicTree.Builder<Mfd> mfdTree, // receiver tree + LogicTree<Mfd.Properties> mfdPropsTree, + Optional<Branch<Double>> moBranch, + MfdConfig mfdConfig) { + + for (Branch<Mfd.Properties> mfdBranch : mfdPropsTree) { + switch (mfdBranch.value().type()) { + case SINGLE: + addSingleMfds(mfdTree, mfdBranch, moBranch, mfdConfig); + break; + case GR: + addGrMfds(mfdTree, mfdBranch, moBranch, mfdConfig); + break; + default: + throw new UnsupportedOperationException( + mfdBranch.value().type() + " MFD not supported"); } } - return mfdTree.build(); } /* @@ -432,7 +510,7 @@ public class FaultRuptureSet implements RuptureSet { */ private static LogicTree<Double> createMoRateTree( SourceFeature.NshmFault feature, - ModelData data) { + LogicTree<String> faultRateTree) { double dip = feature.rateDip.isPresent() ? feature.rateDip.getAsDouble() @@ -440,8 +518,11 @@ public class FaultRuptureSet implements RuptureSet { double width = Faults.width(dip, feature.upperDepth, feature.lowerDepth); double length = feature.length.orElseThrow(); double area = length * width * 1e6; + double Mw = feature.magnitude.getAsDouble(); + double mo = Earthquakes.magToMoment(Mw); + Map<String, Double> rateMap = feature.rateMap; - LogicTree<String> slipRateTree = data.slipRateTree().orElseThrow(); + RateType rateType = feature.rateType; LogicTree.Builder<Double> moRateTree = LogicTree.builder(RATE_TREE); // TODO clean @@ -449,23 +530,27 @@ public class FaultRuptureSet implements RuptureSet { System.out.println(" " + rateMap); } - for (Branch<String> branch : slipRateTree.branches()) { + for (Branch<String> branch : faultRateTree.branches()) { /* - * TODO Verify missing slip rates and imbalanced logic trees. In some + * TODO Verify missing fault rates and imbalanced logic trees. In some * cases (e.g. TX) need to give GEO branch weight of 1.0. To do this we * should actually put geo rate in BIRD and ZENG branches to preserve * logic tree. In other cases (e.g. OR offshore) we need empty branches * because GEO weight is 0.8 and imbalanced; maybe we just handle this * with setting rate to 0, as below. */ - Double slipCheck = rateMap.get(branch.id()); - double slipRate = (slipCheck != null) ? slipCheck : 0.0; + Double rateCheck = rateMap.get(branch.id()); + double rate = (rateCheck != null) ? rateCheck : 0.0; + + /* CONUS: 2008 are vertical, 2014 are on-plane. */ + if (rateType == RateType.VERTICAL_SLIP) { + rate = Faults.projectVerticalSlip(dip, rate); + } - /* TODO 2008 will need vertical to on-fault conversions */ - // slipRate = Faults.projectVerticalSlip(dip, slipRate) / 1000.0; - slipRate = slipRate / 1000.0; - double moRate = Earthquakes.moment(area, slipRate); + double moRate = (rateType == RECURRENCE) + ? mo * rate + : Earthquakes.moment(area, rate / 1000.0); // System.out.println(branch.id() + " " + moRate); @@ -479,10 +564,8 @@ public class FaultRuptureSet implements RuptureSet { * TODO consider describing different rounding/precision characteristics * in a model defined in a source config. */ - double Mw = feature.magnitude.getAsDouble(); - double mo = Earthquakes.magToMoment(Mw); - double rate = Maths.round(moRate / mo, 7); - moRate = rate * mo; + double eqRate = Maths.round(moRate / mo, 7); + moRate = eqRate * mo; // System.out.println(branch.id() + " " + moRate); @@ -491,43 +574,47 @@ public class FaultRuptureSet implements RuptureSet { return moRateTree.build(); } - private static XySequence buildTotalMfd(LogicTree<XySequence> mfdTree) { - List<XySequence> mfds = mfdTree.branches() - .stream() - .map(Branch::value) - .collect(Collectors.toList()); - return XySequence.combine(mfds); - } - /* Fault GR */ - private static void createGrMfds( - LogicTree.Builder<Mfd> mfdTree, - ModelData mfdData, - Branch<Double> moRateBranch, - Branch<Mfd.Properties> mfdPropsBranch) { + private static void addGrMfds( + LogicTree.Builder<Mfd> mfdTree, // receiver tree + Branch<Mfd.Properties> mfdBranch, // properties may include rate + Optional<Branch<Double>> moBranch, // or rate supplied separately + MfdConfig mfdConfig) { - MfdConfig mfdConfig = mfdData.mfdConfig().get(); - Mfd.Properties.GutenbergRichter grData = mfdPropsBranch.value().getAsGr(); + Mfd.Properties.GutenbergRichter gr = mfdBranch.value().getAsGr(); - double mfdWeight = mfdPropsBranch.weight(); - String mfdId = grData.type.name(); // mfdPropsBranch.id(); + String mfdId = mfdBranch.id(); + double mfdWt = mfdBranch.weight(); - double rateWeight = moRateBranch.weight(); - String rateId = moRateBranch.id(); - double tmr = moRateBranch.value(); + if (moBranch.isPresent()) { + mfdId = mfdBranchId(moBranch.orElseThrow().id(), mfdId); + mfdWt *= moBranch.orElseThrow().weight(); + } - double mfdRateWeight = mfdWeight * rateWeight; + // TODO does this ever throw? + int nMag = Mfds.magCount(gr.mMin(), gr.mMax(), gr.Δm()); + checkState(nMag > 0, "GR MFD with no magnitudes [%s]", mfdBranch.id()); - /* Optional epistemic uncertainty; no aleatory with GR. */ - boolean epistemic = hasEpistemic(mfdConfig, grData.mMax - grData.Δm / 2); + double moRate = moBranch.isPresent() + ? moBranch.orElseThrow().value() + : ModelUtil.grMomentRate(gr); - // TODO does this ever throw? (as well as epi check below) - int nMag = Mfds.magCount(grData.mMin, grData.mMax, grData.Δm); - checkState(nMag > 0, "GR MFD with no magnitudes [%s]", mfdPropsBranch.id()); + /* + * Optional epistemic uncertainty; no aleatory with GR. This allows + * epistemic uncertainty because the cutoff is based on mMin (6.5), but in + * reality a few faults end up with no magnitudes in their loe + */ + boolean epistemic = hasEpistemic(mfdConfig, gr.mMax() - gr.Δm() / 2); /* * TODO edge cases (Rush Peak in brange.3dip.gr.in) suggest data.mMin should * be used instead of unc.epiCutoff + * + * TODO note in documentation that epi ±0.2 is applied to mMax-Δm/2, not + * mMax + * + * TODO conduct sensitivity study for epi being applied to actual mMax, not + * centered mMax for GR MFD */ if (epistemic) { @@ -540,60 +627,67 @@ public class FaultRuptureSet implements RuptureSet { * upper bin. As a result we add Δm/2 knowing it will be removed (e.g. * for M7, M + epi 7.2 as upper limit of sequence). */ - double mMaxEpi = grData.mMax + epiBranch.value() + grData.Δm / 2; - double weightEpi = mfdRateWeight * epiBranch.weight(); - - /* - * For mMax magnitudes close to 6.5, ((mMax-epi)-mMin) can be smaller - * than Δm. If this is the case adjust mMax back up to mMin + Δm. - */ - // System.out.println("epi: " + grData.mMin + " " + mMaxEpi + " " + - // grData.Δm); - // if (mMaxEpi - grData.mMin < grData.Δm) { - // // mMaxEpi = grData.mMin + - // } + double mMaxEpi = gr.mMax() + epiBranch.value() + gr.Δm() / 2; + double weightEpi = mfdWt * epiBranch.weight(); + // System.out.println(grData.mMin + " " + grData.mMax); + // checkEpiLow(grData.mMax + epiBranch.value()) // TODO check if ever thrown - int nMagEpi = Mfds.magCount(grData.mMin, mMaxEpi, grData.Δm); + // boolean grData.mMin + (grData.Δm / 2) + int nMagEpi = Mfds.magCount(gr.mMin(), mMaxEpi, gr.Δm()); checkState(nMagEpi > 0, "GR MFD epistemic branch with no magnitudes"); + boolean epiOk = checkEpi(gr.mMin(), gr.mMax(), gr.Δm(), epiBranch.value()); + if (!epiOk) { + /* + * Formerly handled by skipping mfd/rupture;can't do that here because + * we need to preserve the entire logic tree; set rate to 0 instead + */ + System.out.println("GR MFD epistemic branch with no magnitudes"); + } - String id = mfdBranchName(rateId, mfdId, epiBranch.id()); - Mfd mfd = ModelUtil.newGrBuilder(grData, mMaxEpi) - .scaleToMomentRate(tmr) + String id = mfdBranchId(mfdId, epiBranch.id()); + Mfd mfd = ModelUtil.newGrBuilder(gr, mMaxEpi) + .scaleToMomentRate(epiOk ? moRate : 0) .build(); mfdTree.addBranch(id, mfd, weightEpi); } } else { - String id = mfdBranchName(rateId, mfdId); - Mfd mfd = grData.toBuilder() - .scaleToMomentRate(tmr) + Mfd mfd = gr.toBuilder() + .scaleToMomentRate(moRate) .build(); - mfdTree.addBranch(id, mfd, mfdRateWeight); + mfdTree.addBranch(mfdId, mfd, mfdWt); } } + private static boolean checkEpi(double mMin, double mMax, double Δm, double Δepi) { + return (mMax - Δm / 2 + Δepi) > (mMin + Δm / 2); + } + /* Fault SINGLE */ - private static void createSingleMfds( - LogicTree.Builder<Mfd> mfdTree, - ModelData mfdData, - Branch<Double> moRateBranch, - Branch<Mfd.Properties> mfdPropsBranch) { + private static void addSingleMfds( + LogicTree.Builder<Mfd> mfdTree, // receiver tree + Branch<Mfd.Properties> mfdBranch, // properties may include rate + Optional<Branch<Double>> moBranch, // or rate supplied separately + MfdConfig mfdConfig) { - MfdConfig mfdConfig = mfdData.mfdConfig().get(); - Mfd.Properties.Single singleData = mfdPropsBranch.value().getAsSingle(); + Mfd.Properties.Single single = mfdBranch.value().getAsSingle(); - double mfdWeight = mfdPropsBranch.weight(); - String mfdId = singleData.type.name(); // mfdPropsBranch.id(); + String mfdId = mfdBranch.id(); + double mfdWt = mfdBranch.weight(); + System.out.println(single.rate()); - double rateWeight = moRateBranch.weight(); - String rateId = moRateBranch.id(); - double tmr = moRateBranch.value(); + if (moBranch.isPresent()) { + mfdId = mfdBranchId(moBranch.orElseThrow().id(), mfdId); + mfdWt *= moBranch.orElseThrow().weight(); + } - double mfdRateWeight = mfdWeight * rateWeight; + double moRate = moBranch.isPresent() + ? moBranch.orElseThrow().value() + : single.rate() * Earthquakes.magToMoment(single.m()); /* Optional epistemic uncertainty. */ - boolean epistemic = hasEpistemic(mfdConfig, singleData.m); + boolean epistemic = hasEpistemic(mfdConfig, single.m()); /* Optional aleatory variability. */ boolean aleatory = mfdConfig.aleatoryProperties.isPresent(); @@ -603,23 +697,23 @@ public class FaultRuptureSet implements RuptureSet { for (Branch<Double> epiBranch : mfdConfig.epistemicTree.get()) { - double mEpi = singleData.m + epiBranch.value(); - double weightEpi = mfdRateWeight * epiBranch.weight(); - String id = mfdBranchName(rateId, mfdId, epiBranch.id()); + double mEpi = single.m() + epiBranch.value(); + double weightEpi = mfdWt * epiBranch.weight(); + String id = mfdBranchId(mfdId, epiBranch.id()); /* Possibly apply aleatory variability */ if (aleatory) { MfdConfig.AleatoryProperties aleaProps = mfdConfig.aleatoryProperties.get(); Mfd mfd = ModelUtil.newGaussianBuilder(mEpi, aleaProps) - .scaleToMomentRate(tmr) + .scaleToMomentRate(moRate) .build(); mfdTree.addBranch(id, mfd, weightEpi); } else { Mfd mfd = Mfd.newSingleBuilder(mEpi) - .scaleToMomentRate(tmr) + .scaleToMomentRate(moRate) .build(); mfdTree.addBranch(id, mfd, weightEpi); } @@ -627,29 +721,27 @@ public class FaultRuptureSet implements RuptureSet { } else { /* No epistemic uncertainty */ - String id = mfdBranchName(rateId, mfdId); - /* Possibly apply aleatory variability */ if (aleatory) { MfdConfig.AleatoryProperties aleaProps = mfdConfig.aleatoryProperties.get(); - Mfd mfd = ModelUtil.newGaussianBuilder(singleData.m, aleaProps) - .scaleToMomentRate(tmr) + Mfd mfd = ModelUtil.newGaussianBuilder(single.m(), aleaProps) + .scaleToMomentRate(moRate) .build(); - mfdTree.addBranch(id, mfd, mfdRateWeight); + mfdTree.addBranch(mfdId, mfd, mfdWt); } else { - Mfd mfd = Mfd.newSingleBuilder(singleData.m) - .scaleToMomentRate(tmr) + Mfd mfd = Mfd.newSingleBuilder(single.m()) + .scaleToMomentRate(moRate) .build(); - mfdTree.addBranch(id, mfd, mfdRateWeight); + mfdTree.addBranch(mfdId, mfd, mfdWt); } } } - private static String mfdBranchName(String... args) { - return String.join(" : ", args); + private static String mfdBranchId(String... ids) { + return String.join(" : ", ids); } private static boolean hasEpistemic(MfdConfig mfdConfig, double magnitude) { @@ -670,10 +762,8 @@ public class FaultRuptureSet implements RuptureSet { * is preferred. */ static void checkEpistemic(MfdConfig mfdConfig, LogicTree<Mfd.Properties> mfdTree) { - boolean multipleSingleBranches = (mfdTree.size() > 1) && mfdTree.branches() - .stream() - .map(branch -> branch.value().type) - .allMatch(Mfd.Type.SINGLE::equals); + boolean multipleSingleBranches = (mfdTree.size() > 1) && + ModelUtil.mfdsAreType(mfdTree, Mfd.Type.SINGLE); boolean hasEpistemic = mfdConfig.epistemicTree.isPresent(); checkState( !(multipleSingleBranches && hasEpistemic), diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java index 8f123de4..1ad50342 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java @@ -83,14 +83,20 @@ public class FaultSource implements Source { this.branchWeight = builder.branchWeight; this.scaledMfds = ModelUtil.scaledMfdList(mfdTree, branchWeight); - System.out.println(mfdTree); + // System.out.println(mfdTree); this.ruptureLists = scaledMfds.stream() .map(this::createRuptureList) .collect(toUnmodifiableList()); + // System.out.println(ruptureLists.size()); + // TODO clean // scaledMfds.stream().map(Mfd::data).forEach(System.out::println); + // TODO fault source total MFD not yet scaled by source weight (e.g. dip + // branch) TODO at least for 2014 the MFD tree and total MFD are identical + // across dip branches + checkState(Iterables.size(Iterables.concat(ruptureLists)) > 0, "FaultSource has no ruptures"); } @@ -238,8 +244,11 @@ public class FaultSource implements Source { } } - checkState(ruptureList.size() > 0, "Rupture list for %s is empty", name); - return List.copyOf(ruptureList); + // We now have rates of zero for null branches that preserve + // logic tree structure but that result in empty rupture lists + // checkState(ruptureList.size() > 0, "Rupture list for %s is empty", name); + + return ruptureList; // ends up in collected unmodifiable list } /* Single use builder */ @@ -259,6 +268,7 @@ public class FaultSource implements Source { Double depth; Double rake; LogicTree<Mfd> mfdTree; + Mfd mfdTotal; Double spacing; RuptureScaling rupScaling; RuptureFloating rupFloating; @@ -301,12 +311,17 @@ public class FaultSource implements Source { } Builder mfdTree(LogicTree<Mfd> mfdTree) { - checkNotNull(mfdTree, "MFD list is null"); - checkArgument(mfdTree.size() > 0, "MFD list is empty"); + checkArgument(checkNotNull(mfdTree).size() > 0, "MFD list is empty"); this.mfdTree = mfdTree; return this; } + Builder mfdTotal(Mfd mfdTotal) { + checkNotNull(mfdTotal, "MFD total is null"); + this.mfdTotal = mfdTotal; + return this; + } + Builder surfaceSpacing(double spacing) { this.spacing = checkInRange(SURFACE_GRID_SPACING_RANGE, "Floater Offset", spacing); return this; @@ -341,7 +356,8 @@ public class FaultSource implements Source { checkState(width != null, "%s width not set", buildId); checkState(depth != null, "%s depth not set", buildId); checkState(rake != null, "%s rake not set", buildId); - checkState(mfdTree.size() > 0, "%s has no MFDs", buildId); + checkState(mfdTree != null, "%s MFD tree not set", buildId); + checkState(mfdTotal != null, "%s total MFD not set", buildId); checkState(spacing != null, "%s surface grid spacing not set", buildId); checkState(rupScaling != null, "%s rupture-scaling relation not set", buildId); checkState(rupFloating != null, "%s rupture-floating model not set", buildId); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java index 6a23c14a..4981064b 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java @@ -176,7 +176,7 @@ class GridLoader { for (Branch<Mfd> branch : modelTree) { Mfd.Properties.GutenbergRichter grProps = branch.value().properties().getAsGr(); - double rate = Mfds.gutenbergRichterRate(a, grProps.b, grProps.mMin); + double rate = Mfds.gutenbergRichterRate(a, grProps.b(), grProps.mMin()); Mfd.Builder nodeMfd = Mfd.Builder.from(branch.value()) .scaleToIncrementalRate(rate); mMax.ifPresent(m -> truncate(nodeMfd, m)); @@ -239,7 +239,7 @@ class GridLoader { private static Mfd.Type checkType(LogicTree<Mfd.Properties> mfdTree) { Set<Mfd.Type> types = mfdTree.branches().stream() .map(Branch::value) - .map(p -> p.type) + .map(Mfd.Properties::type) .collect(toSet()); checkArgument(types.size() == 1, "Grid mfd-tree has multiple mfd types: %s", types); return types.iterator().next(); @@ -248,7 +248,7 @@ class GridLoader { private static double checkMinMagnitude(LogicTree<Mfd.Properties> mfdTree) { Set<Double> mins = mfdTree.branches().stream() .map(Branch::value) - .map(p -> p.getAsGr().mMin) + .map(p -> p.getAsGr().mMin()) .collect(toSet()); checkArgument(mins.size() == 1, "Grid mfd-tree has different mfd mMin: %s", mins); return mins.iterator().next(); @@ -257,7 +257,7 @@ class GridLoader { private static double checkMagnitudeDelta(LogicTree<Mfd.Properties> mfdTree) { Set<Double> deltas = mfdTree.branches().stream() .map(Branch::value) - .map(p -> p.getAsGr().Δm) + .map(p -> p.getAsGr().Δm()) .collect(toSet()); checkArgument(deltas.size() == 1, "Grid mfd-tree has different mfd Δm: %s", deltas); return deltas.iterator().next(); @@ -292,7 +292,8 @@ class GridLoader { /* Truncate special GR cases; e.g. WUS double counting. */ private static void truncate(Mfd.Builder builder, double mMax) { - Type type = builder.properties().type; + Type type = builder.properties().type(); + // TODO 6.5 should be obtained from config double m = (type == Type.GR_MMAX_GR) ? 6.5 : mMax; builder.transform(xy -> zeroAboveM(xy, m)); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java b/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java index cc89f723..05be8680 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java @@ -256,9 +256,11 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> for (Leaf leaf : tree.branchMap().keySet()) { RuptureSet rs = leaf.ruptureSet; - System.out.println("Flt to SrcSet: " + rs.type() + " " + rs.name()); if (leaf.ruptureSet.type() == SourceType.CLUSTER) { + + System.out.println("Flt to ClusterSrcSet: " + rs.type() + " " + rs.name()); + ClusterRuptureSet crs = (ClusterRuptureSet) rs; ClusterSource.Builder csBuilder = new ClusterSource.Builder() .name(crs.name) @@ -279,12 +281,16 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> clusterCount++; } else { + + System.out.println("Flt to FaultSrcSet: " + rs.type() + " " + rs.name()); + double leafWeight = tree.branchWeights().get(leaf); FaultRuptureSet frs = (FaultRuptureSet) rs; faults.source(faultRuptureSetToSource(frs, leafWeight), leafWeight); } } addSourceSet(faults.buildFaultSet()); + if (clusterCount > 0) { addSourceSet(clusters.buildClusterSet()); } @@ -309,11 +315,11 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> .depth(feature.upperDepth) .rake(feature.rake) .mfdTree(frs.mfdTree) + .mfdTotal(frs.mfdTotal) .surfaceSpacing(config.surfaceSpacing) .ruptureScaling(config.ruptureScaling) .ruptureFloating(config.ruptureFloating) .branchWeight(leafWeight) - // List.of(Mfd.copyOf(frs.mfdTotal).scale(leafWeight).build())) .buildFaultSource(); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java index 995fee58..66eae12c 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java @@ -151,18 +151,18 @@ public class InterfaceRuptureSet implements RuptureSet { private void validateAndInit(String label) { checkState(!built, "Single use builder"); - checkNotNull(sectionIds, "%s section ID list", label); - checkNotNull(data, "%s model data", label); - checkNotNull(name, "%s name", label); checkNotNull(id, "%s id", label); + + checkNotNull(data, "%s model data", label); + checkNotNull(sectionIds, "%s section id list", label); checkNotNull(mfdPropsTree, "%s MFD logic tree", label); // TODO temp checking props are always the same; // don't think this is always true at least for faults checkState(mfdPropsTree.branches().stream() .map(Branch::value) - .map(props -> props.type) + .map(Mfd.Properties::type) .distinct().limit(2).count() <= 1); mfdTree = buildMfdTree(data, mfdPropsTree); @@ -249,9 +249,7 @@ public class InterfaceRuptureSet implements RuptureSet { LogicTree.Builder<Mfd> mfdTree = LogicTree.builder(MFD_TREE); for (Branch<Mfd.Properties> branch : mfdPropertiesTree) { - - Mfd.Type type = branch.value().type; - switch (type) { + switch (branch.value().type()) { case SINGLE: createSingleMfds(mfdTree, mfdData, branch); break; @@ -259,7 +257,8 @@ public class InterfaceRuptureSet implements RuptureSet { createGrMfds(mfdTree, mfdData, branch); break; default: - throw new UnsupportedOperationException(type + " MFD not supported"); + throw new UnsupportedOperationException( + branch.value().type() + " MFD not supported"); } } return mfdTree.build(); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSource.java index c1137381..6edec9af 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceSource.java @@ -160,7 +160,7 @@ public class InterfaceSource extends FaultSource { List<Rupture> createRuptureList(Mfd mfd) { List<Rupture> ruptureList = new ArrayList<>(); // TODO be careful when working with combined MFDs - RuptureFloating floatModel = (mfd.properties().type == Mfd.Type.SINGLE) + RuptureFloating floatModel = (mfd.properties().type() == Mfd.Type.SINGLE) ? RuptureFloating.OFF : rupFloating; for (XyPoint xy : mfd.data()) { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java index 661f2a1c..b4f54605 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java @@ -86,20 +86,17 @@ class MfdConfig { static class AleatoryProperties { - final int count; - final boolean momentBalanced; - final int σSize; + final int size; + final int nσ; final double σ; AleatoryProperties( - int count, - boolean momentBalanced, - int σSize, + int size, + int nσ, double σ) { - this.count = count; - this.momentBalanced = momentBalanced; - this.σSize = σSize; + this.size = size; + this.nσ = nσ; this.σ = σ; } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelData.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelData.java index 9eea29ce..19b8e556 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelData.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelData.java @@ -23,13 +23,17 @@ class ModelData { private GmmSet gmms; private SourceConfig srcConfig; + private boolean clusterModel = false; - // TODO shouldn't we reuire this + // TODO shouldn't we require this private Optional<MfdConfig> mfdConfig = Optional.empty(); private Optional<Map<String, LogicTree<Mfd.Properties>>> mfdMap = Optional.empty(); + + // TODO should make rateTree more specific private Optional<LogicTree<Double>> rateTree = Optional.empty(); private Optional<LogicTree<Path>> gridRateTree = Optional.empty(); - private Optional<LogicTree<String>> slipRateTree = Optional.empty(); + // used for SLIP or RECURRENCE + private Optional<LogicTree<String>> faultRateTree = Optional.empty(); private Optional<Map<Integer, SourceFeature.NshmFault>> faultFeatureMap = Optional.empty(); private Optional<Map<Integer, SourceFeature.Interface>> interfaceFeatureMap = Optional.empty(); @@ -49,11 +53,12 @@ class ModelData { ModelData copy = new ModelData(); copy.gmms = original.gmms; copy.srcConfig = original.srcConfig; + copy.clusterModel = original.clusterModel; copy.mfdConfig = original.mfdConfig; copy.mfdMap = original.mfdMap; copy.rateTree = original.rateTree; copy.gridRateTree = original.gridRateTree; - copy.slipRateTree = original.slipRateTree; + copy.faultRateTree = original.faultRateTree; copy.gridDataDir = original.gridDataDir; copy.faultFeatureMap = original.faultFeatureMap; copy.interfaceFeatureMap = original.interfaceFeatureMap; @@ -78,6 +83,14 @@ class ModelData { return srcConfig; } + void clusterModel() { + this.clusterModel = true; + } + + boolean isClusterModel() { + return clusterModel; + } + void mfdConfig(MfdConfig mfdConfig) { this.mfdConfig = Optional.of(checkNotNull(mfdConfig)); } @@ -94,37 +107,37 @@ class ModelData { return mfdMap; } - void rateTree(LogicTree<Double> rateTree) { - this.rateTree = Optional.of(checkNotNull(rateTree)); + void gridDataDirectory(Path gridDataDir) { + checkState(this.gridDataDir.isEmpty()); + this.gridDataDir = Optional.of(checkNotNull(gridDataDir)); } - Optional<LogicTree<Double>> rateTree() { - return rateTree; + Optional<Path> gridDataDirectory() { + return gridDataDir; } - void gridRateTree(LogicTree<Path> gridRateTree) { - this.gridRateTree = Optional.of(checkNotNull(gridRateTree)); + void rateTree(LogicTree<Double> rateTree) { + this.rateTree = Optional.of(checkNotNull(rateTree)); } - Optional<LogicTree<Path>> gridRateTree() { - return gridRateTree; + Optional<LogicTree<Double>> rateTree() { + return rateTree; } - void slipRateTree(LogicTree<String> slipRateTree) { - this.slipRateTree = Optional.of(checkNotNull(slipRateTree)); + void faultRateTree(LogicTree<String> faultRateTree) { + this.faultRateTree = Optional.of(checkNotNull(faultRateTree)); } - Optional<LogicTree<String>> slipRateTree() { - return slipRateTree; + Optional<LogicTree<String>> faultRateTree() { + return faultRateTree; } - void gridDataDirectory(Path gridDataDir) { - checkState(this.gridDataDir.isEmpty()); - this.gridDataDir = Optional.of(checkNotNull(gridDataDir)); + void gridRateTree(LogicTree<Path> gridRateTree) { + this.gridRateTree = Optional.of(checkNotNull(gridRateTree)); } - Optional<Path> gridDataDirectory() { - return gridDataDir; + Optional<LogicTree<Path>> gridRateTree() { + return gridRateTree; } void interfaceFeatureMap(Map<Integer, SourceFeature.Interface> interfaceFeatureMap) { @@ -145,6 +158,8 @@ class ModelData { /* ↑ stateful - static ↓ */ + // TODO clean ? + enum MagnitudeBranch { M1, M2, diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java index 35d6147b..6b97a839 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java @@ -131,7 +131,7 @@ class ModelFiles { } /* Rate-tree containing deformation model ids. */ - static Optional<LogicTree<String>> readSlipRateTree(Path dir) { + static Optional<LogicTree<String>> readFaultRateTree(Path dir) { return read(dir, RATE_TREE, Deserialize::slipRateTree); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java index 1db85e57..1ab59866 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java @@ -13,6 +13,7 @@ import static gov.usgs.earthquake.nshmp.model.ModelFiles.MODEL_INFO; import static gov.usgs.earthquake.nshmp.model.ModelFiles.RUPTURE_SET; import static gov.usgs.earthquake.nshmp.model.ModelFiles.SOURCE_TREE; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readFaultConfig; +import static gov.usgs.earthquake.nshmp.model.ModelFiles.readFaultRateTree; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readFaultSections; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readGmms; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readGridConfig; @@ -23,7 +24,6 @@ import static gov.usgs.earthquake.nshmp.model.ModelFiles.readMfdConfig; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readMfdMap; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readRateTree; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readSlabConfig; -import static gov.usgs.earthquake.nshmp.model.ModelFiles.readSlipRateTree; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readSourceTree; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readZoneConfig; import static gov.usgs.earthquake.nshmp.model.SourceType.FAULT; @@ -128,8 +128,8 @@ abstract class ModelLoader { Path dir) { switch (setting) { - case ACTIVE_CRUST: - // case STABLE_CRUST: + // case ACTIVE_CRUST: + case STABLE_CRUST: return loadCrustalSources(dir); case SUBDUCTION_INTERFACE: return ModelLoader.interfaceSources(dir); @@ -237,7 +237,7 @@ abstract class ModelLoader { try (DirectoryStream<Path> nestedDirStream = newDirectoryStream(dir, NESTED_DIR_FILTER)) { for (Path path : nestedDirStream) { // TODO temp clean - if (path.getFileName().toString().equals("ucerf3")) { + if (checkNotNull(path.getFileName()).toString().equals("ucerf3")) { continue; } trees.addAll(loadSourceDirectory(path, data)); @@ -288,7 +288,7 @@ abstract class ModelLoader { @Override void loadRateTree(Path dir, ModelData data) { - readSlipRateTree(dir).ifPresent(data::slipRateTree); + readFaultRateTree(dir).ifPresent(data::faultRateTree); } @Override @@ -443,7 +443,12 @@ abstract class ModelLoader { ModelData data = ModelData.copyOf(dataIn); readMfdConfig(dir).ifPresent(data::mfdConfig); - loadRateTree(dir, data); + + /* + * Don't use loadRateTree() here; nested rate trees on branches are more + * likely numeric recurrence rates than slip rate branches. + */ + readRateTree(dir).ifPresent(data::rateTree); /* Can be source-tree or source-group. */ Optional<LogicTree<Path>> tree = readSourceTree(dir); @@ -467,14 +472,14 @@ abstract class ModelLoader { if (isFaultRuptureSet) { - FaultRuptureSet frs = Deserialize.faultRuptureSet(setPath, data) - .build(); + /* All required properties set in deserializer. */ + FaultRuptureSet frs = Deserialize.faultRuptureSet(setPath, data); treeBuilder.addLeaf(branch, frs); } else { - ClusterRuptureSet crs = Deserialize.clusterRuptureSet(setPath, data) - .build(); + /* All required properties set in deserializer. */ + ClusterRuptureSet crs = Deserialize.clusterRuptureSet(setPath, data); treeBuilder.addLeaf(branch, crs); } } @@ -599,8 +604,7 @@ abstract class ModelLoader { Path ruptureSetPath = dir.resolve(RUPTURE_SET); - InterfaceRuptureSet irs = Deserialize.interfaceRuptureSet(ruptureSetPath, data) - .build(); + InterfaceRuptureSet irs = Deserialize.interfaceRuptureSet(ruptureSetPath, data); treeBuilder.addLeaf(branch, irs); } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java index 88ea9a99..b6b85305 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java @@ -56,11 +56,19 @@ class ModelUtil { /* Find the maximum mMax, assuming a tree of GR MFDs. */ static double mfdTreeMaxMagnitude(LogicTree<Mfd.Properties> propsTree) { return propsTree.branches().stream() - .mapToDouble(b -> b.value().getAsGr().mMax) + .mapToDouble(b -> b.value().getAsGr().mMax()) .max() .getAsDouble(); } + /* Check if all MFDs in a tree are the same type. */ + static boolean mfdsAreType(LogicTree<Mfd.Properties> mfdTree, Mfd.Type type) { + return mfdTree.branches() + .stream() + .map(branch -> branch.value().type()) + .allMatch(type::equals); + } + /* * Note: if we create custom MFDs for each M* branch, we'll have the same * reference x-values for each model, but when added, a new set of x-vlues @@ -75,8 +83,8 @@ class ModelUtil { */ private static Mfd mMaxGrMfd(Mfd.Properties props, double mMax) { GutenbergRichter gr = props.getAsGr(); - return Mfd.newGutenbergRichterBuilder(gr.mMin, mMax, gr.Δm, gr.b) - .transform(p -> p.set(p.x() > gr.mMax ? 0.0 : p.y())) + return Mfd.newGutenbergRichterBuilder(gr.mMin(), mMax, gr.Δm(), gr.b()) + .transform(p -> p.set(p.x() > gr.mMax() ? 0.0 : p.y())) .build(); } @@ -97,7 +105,7 @@ class ModelUtil { double[] weights = new double[mfdTree.size()]; for (int i = 0; i < mfdTree.size(); i++) { Branch<Mfd.Properties> mfd = mfdTree.branches().get(i); - magnitudes[i] = mfd.value().getAsSingle().m; + magnitudes[i] = mfd.value().getAsSingle().m(); weights[i] = mfd.weight(); } return Mfd.create(magnitudes, weights); @@ -212,31 +220,37 @@ class ModelUtil { } /* - * Returns the expected magnitude for the supplied length rounded to two - * decimal places. Note that the function used here is not the inverted form - * of that used in RuptureScaling.NSHM_FAULT_WC94_LENGTH.dimensions() See - * Wells & Coppersmith (1994) Table 2A. + * The expected magnitude for the supplied length rounded to two decimal + * places. Note that the function used here is not the inverted form of that + * used in RuptureScaling.NSHM_FAULT_WC94_LENGTH.dimensions() See Wells & + * Coppersmith (1994) Table 2A. */ static double magnitudeWc94(double length) { return Maths.round(5.08 + 1.16 * log10(length), 2); } - /* Convenience method to pass aleatory properties. */ + /* Gaussian MFD builder. */ static Mfd.Builder newGaussianBuilder(double m, MfdConfig.AleatoryProperties aleaProps) { return Mfd.newGaussianBuilder( m, - aleaProps.count, + aleaProps.size, aleaProps.σ, - aleaProps.σSize); + aleaProps.nσ); + } + + /* GR MFD builder with mMax override. */ + static Mfd.Builder newGrBuilder(Mfd.Properties.GutenbergRichter gr, double mMax) { + return Mfd.newGutenbergRichterBuilder(gr.mMin(), mMax, gr.Δm(), gr.b()); } - /* Convenience method to override mMax. */ - static Mfd.Builder newGrBuilder(Mfd.Properties.GutenbergRichter grData, double mMax) { - return Mfd.newGutenbergRichterBuilder( - grData.mMin, - mMax, - grData.Δm, - grData.b); + /* Expected moment rate of a GR MFD. */ + static double grMomentRate(Mfd.Properties.GutenbergRichter gr) { + double mMin = gr.mMin() + gr.Δm() / 2; + double incrRate = Mfds.gutenbergRichterRate(gr.a(), gr.b(), mMin); + return Mfds.momentRate(gr.toBuilder() + .scaleToIncrementalRate(incrRate) + .build() + .data()); } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java index 7e20819b..89b75f67 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java @@ -8,6 +8,7 @@ import static gov.usgs.earthquake.nshmp.Faults.checkSlipRate; import static gov.usgs.earthquake.nshmp.Text.checkName; import java.awt.geom.Area; +import java.lang.reflect.Type; import java.net.URL; import java.nio.file.Path; import java.util.ArrayList; @@ -17,6 +18,8 @@ import java.util.NoSuchElementException; import java.util.Optional; import java.util.OptionalDouble; +import com.google.common.reflect.TypeToken; + import gov.usgs.earthquake.nshmp.Earthquakes; import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.geo.LocationList; @@ -59,8 +62,8 @@ public abstract class SourceFeature { /** * An immutable list of all states in which the feature is located. If - * {@code states().size() > 1}, the states are ordered relative to decreasing - * percentage of the feature (trace or area) in each state. + * {@code states().size() > 1}, the states are generally ordered relative to + * decreasing percentage of the feature (trace or area) in each state. */ public final List<String> states; @@ -309,17 +312,22 @@ public abstract class SourceFeature { } /* - * A NSHM can have either single rate, which maps to "GEO", or a rate-map with + * A NSHM can have either single rate, which maps to "GEO", no rate (becuase + * type is RECURRENCE), which maps to GEO with a rate of 0, or a rate-map with * multiple rates. */ private static Map<String, Double> nshmRateMap(Properties props) { OptionalDouble rate = props.getDouble(Key.RATE); + RateType type = props.get(Key.RATE_TYPE, RateType.class).orElseThrow(); if (rate.isPresent()) { return Map.of("GEO", rate.getAsDouble()); } - @SuppressWarnings("unchecked") - Map<String, Double> rateMap = props.get(Key.RATE_MAP, Map.class).orElseThrow(); - return rateMap; + if (rate.isEmpty() && type == RateType.RECURRENCE) { + return Map.of("GEO", 0.0); + } + Type mapType = new TypeToken<Map<String, Double>>() {}.getType(); + Optional<Map<String, Double>> rateMap = props.get(Key.RATE_MAP, mapType); + return rateMap.orElseThrow(); } /** A section of a subduction interface. */ diff --git a/src/main/java/gov/usgs/earthquake/nshmp/package-info.java b/src/main/java/gov/usgs/earthquake/nshmp/package-info.java new file mode 100644 index 00000000..466b8d84 --- /dev/null +++ b/src/main/java/gov/usgs/earthquake/nshmp/package-info.java @@ -0,0 +1,7 @@ +/** + * <i>nshmp-lib</i> is a product of the U.S. Geologic Survey National Seismic + * Hazard Model Project (NSHMP). It contains classes used to parse and + * initialize National Seismic Hazard Models (NSHMs) and compute earthquake + * hazard (and related calculations) therefrom. + */ +package gov.usgs.earthquake.nshmp; diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java index 2a846bf0..f56da83c 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java @@ -1,9 +1,12 @@ package gov.usgs.earthquake.nshmp.tree; +import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; +import java.util.Set; import gov.usgs.earthquake.nshmp.Text; import gov.usgs.earthquake.nshmp.data.DoubleData; @@ -116,6 +119,7 @@ public interface LogicTree<T> extends Iterable<Branch<T>> { final String name; List<Branch<T>> branches = new ArrayList<>(); + Set<String> ids = new HashSet<>(); double[] cumulativeWeights; boolean built = false; @@ -136,11 +140,14 @@ public interface LogicTree<T> extends Iterable<Branch<T>> { * @return this {@code Builder} object * @throws NullPointerException if {@code id} or {@code value} is null * @throws IllegalArgumentException if {@code id} is empty or whitespace + * @throws IllegalArgumentException if a branch with {@code id} has already + * been added * @throws IllegalArgumentException if {@code weight} is outside the range * (0..1] */ public Builder<T> addBranch(String id, T value, double weight) { branches.add(new RegularBranch<>(id, value, weight)); + checkId(id); return this; } @@ -171,6 +178,11 @@ public interface LogicTree<T> extends Iterable<Branch<T>> { .mapToDouble(Branch::weight) .toArray(); } + + void checkId(String id) { + checkArgument(!ids.contains(id), "Duplicate branch [%s]", id); + ids.add(id); + } } /** @@ -192,11 +204,14 @@ public interface LogicTree<T> extends Iterable<Branch<T>> { * @param weight of the branch * @return this {@code Builder} object * @throws NullPointerException if {@code id} or {@code value} is null + * @throws IllegalArgumentException if a branch with {@code id} has already + * been added * @throws IllegalArgumentException if {@code weight} is outside the range * (0..1] */ public EnumBuilder<E, T> addBranch(E id, T value, double weight) { branches.add(new EnumBranch<E, T>(id, value, weight)); + checkId(id.name()); return this; } } @@ -219,11 +234,14 @@ public interface LogicTree<T> extends Iterable<Branch<T>> { * @param weight of the branch * @return this {@code Builder} object * @throws NullPointerException if {@code id} is null + * @throws IllegalArgumentException if a branch with {@code id} has already + * been added * @throws IllegalArgumentException if {@code weight} is outside the range * (0..1] */ public EnumValueBuilder<E> addBranch(E id, double weight) { branches.add(new EnumValueBranch<E>(id, weight)); + checkId(id.name()); return this; } } @@ -246,11 +264,14 @@ public interface LogicTree<T> extends Iterable<Branch<T>> { * @return this {@code Builder} object * @throws NullPointerException if {@code id} is null * @throws IllegalArgumentException if {@code id} is empty or whitespace + * @throws IllegalArgumentException if a branch with {@code id} has already + * been added * @throws IllegalArgumentException if {@code weight} is outside the range * (0..1] */ public StringValueBuilder addBranch(String id, double weight) { branches.add(new StringValueBranch(id, weight)); + checkId(id); return this; } } -- GitLab