diff --git a/src/main/java/gov/usgs/earthquake/nshmp/Faults.java b/src/main/java/gov/usgs/earthquake/nshmp/Faults.java index 2373376ac9ba8da91fc22f3af9fe71da16a228fd..c1b3d23547bc15806fbe834bfa1aab2cafda1f9e 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/Faults.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/Faults.java @@ -104,7 +104,15 @@ public final class Faults { } public static double width(double dip, double zUpper, double zLower) { - return (zLower - zUpper) / Math.sin(dip * Maths.TO_RADIANS); + return sinProjection(dip, zLower - zUpper); + } + + public static double projectVerticalSlip(double dip, double slip) { + return sinProjection(dip, slip); + } + + private static double sinProjection(double dip, double value) { + return value / Math.sin(dip * Maths.TO_RADIANS); } /** diff --git a/src/main/java/gov/usgs/earthquake/nshmp/Maths.java b/src/main/java/gov/usgs/earthquake/nshmp/Maths.java index 1b38fd79849aa915e152ce63f63e8f578f4e7129..b794ce31b65cbe5f6d5d28579b191c9ec2b93980 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/Maths.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/Maths.java @@ -4,6 +4,7 @@ import static gov.usgs.earthquake.nshmp.data.DoubleData.multiply; import static gov.usgs.earthquake.nshmp.data.DoubleData.sum; import java.math.BigDecimal; +import java.math.MathContext; import java.math.RoundingMode; import java.util.Arrays; @@ -158,6 +159,19 @@ public final class Maths { return x < μ ? 1.0 : 0.0; } + /** + * TODO document; change other round methods to roundToScale (breaking change) + * + * @param value + * @param digits + * @return + */ + public static double roundToDigits(double value, int digits) { + return BigDecimal.valueOf(value) + .round(new MathContext(digits, RoundingMode.HALF_UP)) + .doubleValue(); + } + /** * Round a double to a specified number of decimal places according to * {@link RoundingMode#HALF_UP}. Internally this method uses the scaling and diff --git a/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java b/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java index 03af602d5e93204975dec6ca85b3d02a534c91ba..75054b49ef737642762ea2a590566f8665540faa 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java @@ -72,53 +72,48 @@ public class Sequences { return -1; } - /** + /* * Ensure validity of sequence discretization parameters. Confirms that for a - * specified range {@code [min..max]} and {@code Δ} that: - * - * <ul><li>{@code min}, {@code max}, and {@code Δ} are finite</li> - * - * <li>{@code max > min}</li> - * - * <li>{@code Δ > 0}</li> - * - * <li>{@code Δ ≤ max - min}</li></ul> - * - * @param min value - * @param max value - * @param Δ discretization delta - * @return the supplied {@code Δ} for use inline + * specified range [min..max] and Δ that (1) min, max, and Δ are finite, (2) + * max > min, and (3) Δ > 0. Returns the supplied Δ for use inline. */ - static double checkDelta(double min, double max, double Δ) { + static double checkSequenceParameters(double min, double max, double Δ) { DoubleData.checkFinite("Sequence minimum", min); DoubleData.checkFinite("Sequence maximum", max); DoubleData.checkFinite("Sequence Δ", Δ); - checkArgument(max > min, "max [%s] <= min [%s]", max, min); - checkArgument(Δ > 0.0, "Invalid Δ [%s]", Δ); - checkArgument(Δ <= max - min, "Δ [%s] > max - min [%s]", Δ, max - min); + checkArgument(max > min, "Sequence max [%s] <= min [%s]", max, min); + checkArgument(Δ > 0.0, "Sequence Δ [%s] < 0", Δ); return Δ; } /** - * Create a new array sequence builder. + * Create a new sequential value array builder. If {@code ((max - min) / Δ)} + * is reasonable close to an integer (i.e. the value may be off by some + * expected double precision error), {@code max} will be included in the + * sequence. If supplied {@code min} and {@code max} values are 'centered' and + * {@code (max - min) < Δ}, or they are bin edges and not 'centered' and + * {@code ((max - Δ/2) - (min + Δ/2)) < Δ}, the sequence will contain only one + * value. * * @param min value for sequence; if sequence is 'centered', {@code min} will - * be the first sequence value, otherwise first value will be - * {@code min + Δ/2}. + * be the first value, otherwise first value will be {@code min + Δ/2} * @param max value for sequence; if sequence is 'centered', {@code max} will - * be the last sequence value, otherwise last value will be - * {@code max - Δ/2}. + * be the last value, otherwise last value will be {@code max - Δ/2} * @param Δ sequence step size * @return a new array sequence builder + * @throws IllegalArgumentException if (1) {@code min}, {@code max}, and + * {@code Δ} are not finite (2) {@code max <= min}, or (3) + * {@code Δ <= 0} */ public static ArrayBuilder arrayBuilder(double min, double max, double Δ) { return new ArrayBuilder(min, max, Δ); } /** - * Array sequence builder. Sequences are often used to mark the bin centers of - * a histogram, for example the x-values or magnitudes in a - * magnitude-frequency Distribution (MFD). + * Sequential value array builder. Monotonically increasing or decreasing + * arrays with uniformely spaced values are commonly used to mark the bin + * centers of a histogram, for example the x-values or magnitudes in a + * magnitude-frequency distribution (MFD). */ public static class ArrayBuilder { @@ -158,18 +153,6 @@ public class Sequences { return this; } - // TODO clean - public static void main(String[] args) { - double[] test = arrayBuilder(5.05, 7.83, 0.1).centered().build(); - System.out.println(Arrays.toString(test)); - test = arrayBuilder(5.05, 7.85, 0.1).scale(4).centered().build(); - System.out.println(Arrays.toString(test)); - test = arrayBuilder(6.55625, 7.34385, 0.1125).scale(5).centered().build(); - System.out.println(Arrays.toString(test)); - test = arrayBuilder(6.5, 7.4, 0.1125).scale(5).build(); - System.out.println(Arrays.toString(test)); - } - /** * Sets the precision (or {@link BigDecimal#scale()}) of the values in the * sequence. @@ -190,8 +173,8 @@ public class Sequences { * decreasing sequence of values. */ public double[] build() { - double min = centered ? this.min : this.min + (Δ / 2.0); - double max = centered ? this.max : this.max - (Δ / 2.0); + double min = centered ? this.min : this.min + Δ / 2.0; + double max = centered ? this.max : this.max - Δ / 2.0; double[] sequence = descending ? DoubleData.flip(buildSequence(-max, -min, Δ)) : buildSequence(min, max, Δ); @@ -201,15 +184,18 @@ public class Sequences { } /* - * MFD (mMax - mMin) / Δm should be reasonably close to an integer; method - * rounds values at 1e-6 before casting to skip double precision errors. + * Method rounds (max-min)/Δ to 1e-6 before casting to an integer to + * determine the number of elements in the sequence. If (max-min)/Δ is + * reasonably close to an integer but max is off by some double-precision + * error (e.g. 6.7999999999), then max of 6.8 will be the upper end of the + * sequence. */ - private static double[] buildSequence(double min, double max, double step) { - checkDelta(min, max, step); - int size = ((int) Maths.round(((max - min) / step), 6)) + 1; + private static double[] buildSequence(double min, double max, double Δ) { + checkSequenceParameters(min, max, Δ); + int size = ((int) Maths.round(((max - min) / Δ), 6)) + 1; checkArgument(Range.openClosed(0, 100000).contains(size), "sequence size"); return IntStream.range(0, size) - .mapToDouble(i -> min + step * i) + .mapToDouble(i -> min + Δ * i) .toArray(); } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/fault/surface/RuptureScaling.java b/src/main/java/gov/usgs/earthquake/nshmp/fault/surface/RuptureScaling.java index 6fb8c7bc902214d247af8a3a9d96f2435570e85b..44ca3b67fe8154a6fba9881cb49b4bbcfaf2c8ed 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/fault/surface/RuptureScaling.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/fault/surface/RuptureScaling.java @@ -56,21 +56,18 @@ public enum RuptureScaling { */ NSHM_FAULT_WC94_LENGTH { @Override - public Dimensions dimensions(double mag, - double maxWidth) { + public Dimensions dimensions(double mag, double maxWidth) { double length = lengthWc94(mag); return new Dimensions(length, min(maxWidth, length)); } @Override - public Map<Dimensions, Double> dimensionsDistribution(double mag, - double maxWidth) { + public Map<Dimensions, Double> dimensionsDistribution(double mag, double maxWidth) { throw new UnsupportedOperationException(); } @Override - public double pointSourceDistance(double mag, - double distance) { + public double pointSourceDistance(double mag, double distance) { throw new UnsupportedOperationException(); } }, @@ -84,21 +81,18 @@ public enum RuptureScaling { */ NSHM_FAULT_CA_ELLB_WC94_AREA { @Override - public Dimensions dimensions(double mag, - double maxWidth) { + public Dimensions dimensions(double mag, double maxWidth) { double length = lengthCa08(mag, maxWidth); return new Dimensions(length, min(maxWidth, length)); } @Override - public Map<Dimensions, Double> dimensionsDistribution(double mag, - double maxWidth) { + public Map<Dimensions, Double> dimensionsDistribution(double mag, double maxWidth) { throw new UnsupportedOperationException(); } @Override - public double pointSourceDistance(double mag, - double distance) { + public double pointSourceDistance(double mag, double distance) { throw new UnsupportedOperationException(); } }, @@ -111,21 +105,18 @@ public enum RuptureScaling { NSHM_POINT_WC94_LENGTH { /* Steve Harmsen likened 1.5 to the Golden Ratio, 1.618... */ @Override - public Dimensions dimensions(double mag, - double maxWidth) { + public Dimensions dimensions(double mag, double maxWidth) { double length = lengthWc94(mag); return new Dimensions(length, min(maxWidth, length / 1.5)); } @Override - public Map<Dimensions, Double> dimensionsDistribution(double mag, - double maxWidth) { + public Map<Dimensions, Double> dimensionsDistribution(double mag, double maxWidth) { throw new UnsupportedOperationException(); } @Override - public double pointSourceDistance(double mag, - double distance) { + public double pointSourceDistance(double mag, double distance) { return correctedRjb(mag, distance, RJB_WC94LENGTH); } }, @@ -139,20 +130,17 @@ public enum RuptureScaling { NSHM_SUB_GEOMAT_LENGTH { @Override - public Dimensions dimensions(double mag, - double maxWidth) { + public Dimensions dimensions(double mag, double maxWidth) { return new Dimensions(pow(10.0, (mag - 4.94) / 1.39), maxWidth); } @Override - public Map<Dimensions, Double> dimensionsDistribution(double mag, - double maxWidth) { + public Map<Dimensions, Double> dimensionsDistribution(double mag, double maxWidth) { throw new UnsupportedOperationException(); } @Override - public double pointSourceDistance(double mag, - double distance) { + public double pointSourceDistance(double mag, double distance) { return correctedRjb(mag, distance, RJB_GEOMATRIX); } }, @@ -175,15 +163,13 @@ public enum RuptureScaling { .build(); @Override - public Dimensions dimensions(double mag, - double maxWidth) { + public Dimensions dimensions(double mag, double maxWidth) { double area = pow(10, (mag - 4.0)); return dimensionCalc(area, maxWidth); } @Override - public Map<Dimensions, Double> dimensionsDistribution(double mag, - double maxWidth) { + public Map<Dimensions, Double> dimensionsDistribution(double mag, double maxWidth) { double area = pow(10, (mag - 4.0)); Map<Dimensions, Double> dimensionsMap = new LinkedHashMap<>(); for (int i = 0; i < normal2s.data().size(); i++) { @@ -216,8 +202,7 @@ public enum RuptureScaling { */ NSHM_SOMERVILLE { @Override - public Dimensions dimensions(double mag, - double maxWidth) { + public Dimensions dimensions(double mag, double maxWidth) { double area = pow(10, mag - 4.366); double width = sqrt(area); return (width < maxWidth) ? new Dimensions(width, width) @@ -246,8 +231,7 @@ public enum RuptureScaling { */ NONE { @Override - public Dimensions dimensions(double mag, - double maxWidth) { + public Dimensions dimensions(double mag, double maxWidth) { throw new UnsupportedOperationException(); } @@ -367,9 +351,15 @@ public enum RuptureScaling { return area / width; } + /** + * Container class for computed rupture dimensions. + */ public final static class Dimensions { + /** Rupture or fault length. */ public final double length; + + /** Rupture or fault width. */ public final double width; private Dimensions(double length, double width) { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/geo/Location.java b/src/main/java/gov/usgs/earthquake/nshmp/geo/Location.java index 8adad0fe13fb64e9c55b27cc96310d81e3bf18bd..a77ec25a941891e93531c8ad14c170ec0a6ab003 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/geo/Location.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/geo/Location.java @@ -14,7 +14,7 @@ import gov.usgs.earthquake.nshmp.Maths; * expressed in terms of longitude and latitude in decimal degrees, and depth in * km. Location depth is always positive-down, consistent with seismological * convention. Locations may be defined using longitude values in the range: - * [-180°..360°]. Location instances are immutable. + * (-360°..360°). Location instances are immutable. * * <p>Note that constructors and static factory methods take arguments in the * order: {@code [lon, lat, depth]}, which is consistent with {@code String} diff --git a/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java b/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java index 4b934895366b819960446ae1e0dd143d19801ef6..ddabcc2d90b39cf0940d0e0b98066fd2af765bde 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java @@ -187,11 +187,11 @@ public final class Properties { * Add a property to this builder. * * @param key for value - * @param value for key + * @param value for key; may be {@code null} * @return this builder */ public Builder put(String key, Object value) { - map.put(checkNotNull(key), checkNotNull(value)); + map.put(checkNotNull(key), value); return this; } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java index 02e856d0dde52532736452de8269a0a87b33709c..8acc1dd9fd49113432862c06c7fcd547ade241d1 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java @@ -352,6 +352,7 @@ public final class Mfd { double mMax = checkMagnitude(μ + nσ * σ); double Δm = (mMax - mMin) / (nm - 1); double[] magnitudes = Sequences.arrayBuilder(mMin, mMax, Δm) + .centered() .scale(5) .build(); Builder builder = new Builder(props, magnitudes, Optional.empty()); @@ -364,7 +365,7 @@ public final class Mfd { double mMin = checkMagnitude(props.mMin); double mMax = checkMagnitude(props.mMax); double[] magnitudes = Sequences.arrayBuilder(mMin, mMax, props.Δm) - .scale(3) + .scale(4) .build(); Builder builder = new Builder(props, magnitudes, Optional.empty()); builder.mfd.forEach(p -> p.set(gutenbergRichterRate(props.a, props.b, p.x()))); @@ -377,7 +378,7 @@ public final class Mfd { double mMax = checkMagnitude(props.mMax); double mc = checkMagnitude(props.mc); double[] magnitudes = Sequences.arrayBuilder(mMin, mMax, props.Δm) - .scale(3) + .scale(4) .build(); Builder builder = new Builder(props, magnitudes, Optional.empty()); GrTaper grTaper = new GrTaper(props.Δm, props.b, mc); @@ -571,8 +572,13 @@ public final class Mfd { this.rate = 1.0; } + // TODO use case for createSingle(m,r) and createGr(...) where + // rate or a-value is known in advance, e.g. when crateing model fault + // MFDs that will likely be subject to uncertainty modifications + // TODO public toMfd(); instead of toBuilder() // lets prefer just initializing builders with copies from() + // we shouldn't be dealing in LogicTree<Mfd.Properties>, just make copies @Override public Builder toBuilder() { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java index 6cca5901ae314d2057a714c4619350fb9b344711..56cc938cc3295b42a4c30ce4d254f0e900ed3816 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java @@ -68,6 +68,7 @@ class Deserialize { static final String RUPTURE_SETS = "rupture-sets"; static final String MFD_TREE = "mfd-tree"; static final String GMM_TREE = "gmm-tree"; + static final String SLIP_RATE_TREE = "slip-rate-tree"; static final String GRID_RATE_TREE = "grid-rate-tree"; static final String RATE_TREE = "rate-tree"; static final String SOURCE_TREE = "source-tree"; @@ -170,7 +171,7 @@ class Deserialize { return builder.build(); } - /* Create a rate logic tree. */ + /* Create a numeric rate logic tree. */ static LogicTree<Double> rateTree(Path json) { return doubleTreeWithNulls(jsonArray(json), RATE_TREE); } @@ -188,6 +189,18 @@ class Deserialize { return builder.build(); } + /* Create a fault source rate logic tree; IDs of rate models in a rate-map. */ + static LogicTree<String> slipRateTree(Path json) { + LogicTree.StringValueBuilder builder = LogicTree.stringValueBuilder(SLIP_RATE_TREE); + for (JsonElement branch : jsonArray(json)) { + JsonObject obj = branch.getAsJsonObject(); + builder.addBranch( + obj.get(ID).getAsString(), + obj.get(WEIGHT).getAsDouble()); + } + return builder.build(); + } + /* Create an id:section map from a fault-sections directory. */ static Map<Integer, SourceFeature.NshmFault> faultSections(Path dir) { Map<Integer, SourceFeature.NshmFault> sectionMap = new HashMap<>(); @@ -395,7 +408,7 @@ class Deserialize { return obj; } - /* Create a grid source configuration. */ + /* Create a fault source configuration. */ static SourceConfig.Fault faultConfig(Path json) { JsonObject obj = jsonObject(json); double spacing = obj.get(SURFACE_SPACING).getAsDouble(); @@ -412,9 +425,27 @@ class Deserialize { config.dipTree(Deserialize.doubleTree(dipTree, DIP_TREE)); } + JsonElement dipSlipModel = obj.get(DIP_SLIP_MODEL); + if (!dipSlipModel.isJsonNull()) { + config.dipSlipModel(DipSlipModel.valueOf(dipSlipModel.getAsString())); + } + return config.build(); } + /* Create an interface source configuration. */ + static SourceConfig.Interface interfaceConfig(Path json) { + JsonObject obj = jsonObject(json); + double spacing = obj.get(SURFACE_SPACING).getAsDouble(); + String floatingStr = obj.get(RUPTURE_FLOATING).getAsString(); + String scalingStr = obj.get(RUPTURE_SCALING).getAsString(); + return SourceConfig.Interface.builder(json) + .spacing(spacing) + .floating(RuptureFloating.valueOf(floatingStr)) + .scaling(RuptureScaling.valueOf(scalingStr)) + .build(); + } + /* Create a grid source configuration. */ static SourceConfig.Grid gridConfig(Path json) { JsonObject obj = jsonObject(json); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java index 968a868344506cb5c85706d79eb173e81b46d7a7..f45f6dae2de65e62eb52d7f7a4546df232dce5d9 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java @@ -15,6 +15,7 @@ import java.util.stream.Collectors; import gov.usgs.earthquake.nshmp.Earthquakes; import gov.usgs.earthquake.nshmp.Faults; +import gov.usgs.earthquake.nshmp.Maths; import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.fault.surface.DefaultGriddedSurface; import gov.usgs.earthquake.nshmp.fault.surface.GriddedSurface; @@ -41,14 +42,21 @@ public class FaultRuptureSet implements RuptureSet { * necessary as the feature map is all that's being used and in this instnace * it's ok to overwrite it each time. */ + + /* + * TODO we removed aleaPops - MoBalance; 2008 CA a-priori a-faults do not + * moment balance so we'll need to figure out how to handle that; it's + * consistent with SSCn SINGLE mag trees; slip-rate based mfds mo-balance, + * others don't + * + */ final SourceFeature.NshmFault feature; final ModelData data; final LogicTree<Mfd> mfdTree; - // final Mfd mfdTotal; + final Mfd mfdTotal; final List<Integer> sectionIds; // reference: actually needed? - final GriddedSurface surface; FaultRuptureSet(Builder builder) { @@ -56,6 +64,8 @@ public class FaultRuptureSet implements RuptureSet { this.data = builder.data; this.mfdTree = builder.mfdTree; + this.mfdTotal = builder.mfdTotal; + this.sectionIds = builder.sectionIds; this.surface = builder.surface; } @@ -112,10 +122,11 @@ public class FaultRuptureSet implements RuptureSet { private ModelData data; private LogicTree<Mfd.Properties> mfdPropsTree; - private LogicTree<Mfd.Properties> mfdPropsTreeOut; + // private LogicTree<Mfd.Properties> mfdPropsTreeOut; /* created on build */ private LogicTree<Mfd> mfdTree; + private Mfd mfdTotal; private GriddedSurface surface; /* @@ -168,20 +179,36 @@ public class FaultRuptureSet implements RuptureSet { checkNotNull(id, "%s id", label); checkNotNull(mfdPropsTree, "%s MFD logic tree", label); - mfdTree = buildMfdTree(data, mfdPropsTree); - // mfdTotal = buildTotalMfd(mfdTree); + feature = createFeature(); - mfdPropsTreeOut = buildMfdPropsTree(data, mfdPropsTree); + /* Create Mo-rate tree. */ + LogicTree<Double> moRateTree = createMoRateTree(feature, data); + // for (Branch<Double> b : moRateTree) { + // System.out.println(b.id() + " " + b.value()); + // double eqMo = Earthquakes.magToMoment(feature.magnitude.getAsDouble()); + // System.out.println(eqMo); + // System.out.println(b.value() / eqMo); + // // feature.magnitude + // } - // if feature.rateMap - feature = createFeature(); + /* Filter of unlikely combination of SINGLEs epistemic uncertainty. */ + checkEpistemic(data.mfdConfig().get(), mfdPropsTree); - System.out.println("---"); - System.out.println(mfdPropsTree); - System.out.println(feature); - System.out.println(feature.rateMap); - System.out.println("---"); - System.out.println(); + /* Update MFD properties based on source magnitude. */ + mfdPropsTree = updateMfdProperties(); + + /* Combine rate-tree, mfd-props-tree, and uncertainty. */ + mfdTree = buildMfdTree(moRateTree, mfdPropsTree, data); + + mfdTotal = ModelUtil.reduceMfdTree(mfdTree); + + // System.out.println(mfdTree); + + // if (name.contains("Angel")) { + for (Branch<Mfd> b : mfdTree) { + System.out.println(b.id() + " " + b.value().data()); + } + // } SourceConfig.Fault config = data.sourceConfig().asFault(); surface = DefaultGriddedSurface.builder() @@ -203,6 +230,13 @@ public class FaultRuptureSet implements RuptureSet { data.faultFeatureMap().orElseThrow(); checkState(featureMap.keySet().containsAll(sectionIds)); + /* + * Most (all?) single fault features will have been joined already. + */ + if (sectionIds.size() == 1) { + return featureMap.get(sectionIds.get(0)); + } + Feature f = joinFeatures(sectionIds.stream() .map(featureMap::get) .map(feature -> feature.source) @@ -210,6 +244,111 @@ public class FaultRuptureSet implements RuptureSet { return new SourceFeature.NshmFault(f); } + /* + * The properties picked up from a mfd-tree for faults are only used to set + * GR mMin and b-value; all other fields are just required placeholders. + * This method uses the 'characteristic' magnitude of the feature to set the + * SINGLE magnitude and GR Δm and mMax. + */ + private LogicTree<Mfd.Properties> updateMfdProperties() { + LogicTree.Builder<Mfd.Properties> propsTree = LogicTree.builder(mfdPropsTree.name()); + double Mw = feature.magnitude.orElseThrow(); + for (Branch<Mfd.Properties> branch : mfdPropsTree) { + Mfd.Type type = branch.value().type; + // System.out.println(Mw + " " + type); + switch (type) { + case GR: + double mCutoff = data.mfdConfig().orElseThrow().minMagnitude.orElseThrow(); + propsTree.addBranch( + branch.id(), + updateGr(branch.value(), Mw, mCutoff), + branch.weight()); + break; + case SINGLE: + propsTree.addBranch( + branch.id(), + Mfd.newSingleBuilder(Mw).properties(), + branch.weight()); + break; + default: + throw new UnsupportedOperationException(type + " MFD not supported"); + } + } + LogicTree<Mfd.Properties> pt = propsTree.build(); + // System.out.println(pt); + // for (Branch<Mfd.Properties> b : pt) { + // if (b.value().type == Mfd.Type.SINGLE) { + // System.out.println(" " + b.value().type + " " + + // b.value().getAsSingle().m); + // } else { + // Mfd.Properties.GutenbergRichter grPr = b.value().getAsGr(); + // System.out.println( + // " " + b.value().type + " " + grPr.mMin + " " + grPr.mMax + " " + + // grPr.Δm); + // } + // } + return pt; + } + + // TODO clean + // public static void main(String[] args) { + // + // /* + // * This is for the Bever Creek fault in Oregon; it's not clear how two + // * different magnitudes exist, yes it is + // */ + // double mo1 = Earthquakes.magToMoment(6.51); + // double mo2 = Earthquakes.magToMoment(6.52); + // + // double moRate = 2.677288582552237E15; + // + // System.out.println("rate 6.51: " + moRate / mo1); + // System.out.println("rate 6.52: " + moRate / mo2); + // + // System.out.println(Mfds.gutenbergRichterRate(1.82066, 0.8, 6.51)); + // + // } + + /* + * Update GR properties for fault 'characteristic' magnitude, or return + * SINGLE properties instead if mMax < mCutoff or nm = 1. + */ + private static Mfd.Properties updateGr( + Mfd.Properties props, + double mMax, + double mCutoff) { + + Mfd.Properties.GutenbergRichter grProps = props.getAsGr(); + + /* Return SINGLE for m <= GR minimum. */ + if (mMax <= mCutoff) { + return Mfd.newSingleBuilder(mMax).properties(); + } + + double Rm = Maths.round(mMax - grProps.mMin, 6); + + /* Return SINGLE if GR only has one magnitude bin. */ + if (Rm <= 0.1) { + double m = Maths.round(grProps.mMin + Rm / 2, 6); + return Mfd.newSingleBuilder(m).properties(); + } + + /* + * USGS NSHM Gutenberg-Richter discretization algorithm. Cutoff for nm = 4 + * in fltrate*.f is >0.3, but in reality M6.8 events have 4 GR magnitudes + * in input files. + */ + int nm = Rm > 0.7 ? 8 : Rm > 0.5 ? 6 : Rm >= 0.3 ? 4 : 2; + double Δm = Maths.round(Rm / nm, 6); + + // System.out.println(Rm + " " + grProps.mMin + " " + mMax + " " + Δm); + return Mfd.newGutenbergRichterBuilder( + grProps.mMin, + mMax, + Δm, + grProps.b).properties(); + } + /* * TODO: consider short citcuiting singletons if feature as represented in * model is consistent with rupture set requirements @@ -219,7 +358,7 @@ public class FaultRuptureSet implements RuptureSet { * * TODO: Copy properties from multi segment ruptures (mostly 2008 model) * - * TODO: For documentaiton; there are many options avilable when defining + * TODO: For documentation; there are many options avilable when defining * both sfautl and grid based sources that may necessarily be able to be * combined. For example, normal fault dip variants are difficult to handle * in the context of a cluster model. @@ -261,74 +400,95 @@ public class FaultRuptureSet implements RuptureSet { } } - private static LogicTree<Mfd.Properties> buildMfdPropsTree( - ModelData mfdData, - LogicTree<Mfd.Properties> mfdPropertiesTree) { - - // Not true, def model branches have epitemic uncertainty applied - // shouldn't have rate-tree AND epistemic uncertainty, both - // of which increase the number of branches - if (mfdData.rateTree().isPresent()) { - LogicTree.Builder<Mfd.Properties> newPropsTree = - LogicTree.builder(MFD_TREE); - LogicTree<Double> rateTree = mfdData.rateTree().get(); - for (Branch<Mfd.Properties> props : mfdPropertiesTree) { - for (Branch<Double> rate : rateTree) { - newPropsTree.addBranch( - props.id(), - props.value(), - props.weight() * rate.weight()); - } - } - return newPropsTree.build(); - } + private static LogicTree<Mfd> buildMfdTree( + LogicTree<Double> moRateTree, + LogicTree<Mfd.Properties> mfdPropsTree, + ModelData mfdData) { - if (mfdData.mfdConfig().isPresent()) { - MfdConfig mfdConfig = mfdData.mfdConfig().get(); - if (mfdConfig.epistemicTree.isPresent()) { - LogicTree.Builder<Mfd.Properties> newPropsTree = - LogicTree.builder(MFD_TREE); - LogicTree<Double> epiTree = mfdConfig.epistemicTree.get(); - for (Branch<Mfd.Properties> props : mfdPropertiesTree) { - for (Branch<Double> epi : epiTree) { - newPropsTree.addBranch( - props.id(), - props.value(), - props.weight() * epi.weight()); - } + LogicTree.Builder<Mfd> mfdTree = LogicTree.builder(MFD_TREE); + + for (Branch<Double> rateBranch : moRateTree) { + for (Branch<Mfd.Properties> mfdBranch : mfdPropsTree) { + Mfd.Type type = mfdBranch.value().type; + switch (type) { + case SINGLE: + createSingleMfds(mfdTree, mfdData, rateBranch, mfdBranch); + break; + case GR: + createGrMfds(mfdTree, mfdData, rateBranch, mfdBranch); + break; + default: + throw new UnsupportedOperationException(type + " MFD not supported"); } - return newPropsTree.build(); } } - - return mfdPropertiesTree; + return mfdTree.build(); } - private static LogicTree<Mfd> buildMfdTree( - ModelData mfdData, - LogicTree<Mfd.Properties> mfdPropertiesTree) { - - /* Filter of unlikely combination. */ - checkEpistemic(mfdData.mfdConfig().get(), mfdPropertiesTree); - - LogicTree.Builder<Mfd> mfdTree = LogicTree.builder(MFD_TREE); + /* + * Create a logic tree of moment rates for a fault source given a feature and + * rate data. If present, a rate-dip may be used to compute a constraining + * moment rate (e.g. WUS normal faults) + */ + private static LogicTree<Double> createMoRateTree( + SourceFeature.NshmFault feature, + ModelData data) { + + double dip = feature.rateDip.isPresent() + ? feature.rateDip.getAsDouble() + : feature.dip; + double width = Faults.width(dip, feature.upperDepth, feature.lowerDepth); + double length = feature.length.orElseThrow(); + double area = length * width * 1e6; + Map<String, Double> rateMap = feature.rateMap; + LogicTree<String> slipRateTree = data.slipRateTree().orElseThrow(); + LogicTree.Builder<Double> moRateTree = LogicTree.builder(RATE_TREE); + + // TODO clean + if (rateMap.containsValue(null)) { + System.out.println(" " + rateMap); + } - for (Branch<Mfd.Properties> branch : mfdPropertiesTree) { - - Mfd.Type type = branch.value().type; - // System.out.println(branch.value().getClass()); - switch (type) { - case SINGLE: - createSingleMfds(mfdTree, mfdData, branch); - break; - case GR: - createGrMfds(mfdTree, mfdData, branch); - break; - default: - throw new UnsupportedOperationException(type + " MFD not supported"); - } + for (Branch<String> branch : slipRateTree.branches()) { + + /* + * TODO Verify missing slip rates and imbalanced logic trees. In some + * cases (e.g. TX) need to give GEO branch weight of 1.0. To do this we + * should actually put geo rate in BIRD and ZENG branches to preserve + * logic tree. In other cases (e.g. OR offshore) we need empty branches + * because GEO weight is 0.8 and imbalanced; maybe we just handle this + * with setting rate to 0, as below. + */ + Double slipCheck = rateMap.get(branch.id()); + double slipRate = (slipCheck != null) ? slipCheck : 0.0; + + /* TODO 2008 will need vertical to on-fault conversions */ + // slipRate = Faults.projectVerticalSlip(dip, slipRate) / 1000.0; + slipRate = slipRate / 1000.0; + double moRate = Earthquakes.moment(area, slipRate); + + // System.out.println(branch.id() + " " + moRate); + + /* + * 2014 model consistency. Convert to annual rate and set scale to seven + * decimal places to match the MFD values in fortran and XML input files + * and then convert back to moment rate. The steps of going from slip rate + * to explicit MFD parameters and then back to build MFDs is no longer + * necessary, but the cumulative effects of rounding at various steps in + * legacy codes are difficult to replicate without doing things like this. + * TODO consider describing different rounding/precision characteristics + * in a model defined in a source config. + */ + double Mw = feature.magnitude.getAsDouble(); + double mo = Earthquakes.magToMoment(Mw); + double rate = Maths.round(moRate / mo, 7); + moRate = rate * mo; + + // System.out.println(branch.id() + " " + moRate); + + moRateTree.addBranch(branch.id(), moRate, branch.weight()); } - return mfdTree.build(); + return moRateTree.build(); } private static XySequence buildTotalMfd(LogicTree<XySequence> mfdTree) { @@ -339,94 +499,98 @@ public class FaultRuptureSet implements RuptureSet { return XySequence.combine(mfds); } - /* fault GR */ + /* Fault GR */ private static void createGrMfds( LogicTree.Builder<Mfd> mfdTree, ModelData mfdData, - Branch<Mfd.Properties> branch) { + Branch<Double> moRateBranch, + Branch<Mfd.Properties> mfdPropsBranch) { + + MfdConfig mfdConfig = mfdData.mfdConfig().get(); + Mfd.Properties.GutenbergRichter grData = mfdPropsBranch.value().getAsGr(); + + double mfdWeight = mfdPropsBranch.weight(); + String mfdId = grData.type.name(); // mfdPropsBranch.id(); + + double rateWeight = moRateBranch.weight(); + String rateId = moRateBranch.id(); + double tmr = moRateBranch.value(); - var mfdConfig = mfdData.mfdConfig().get(); - var grData = branch.value().getAsGr(); - var mfdWeight = branch.weight(); - var mfdId = branch.id(); + double mfdRateWeight = mfdWeight * rateWeight; /* Optional epistemic uncertainty; no aleatory with GR. */ - boolean epistemic = hasEpistemic(mfdConfig, grData.mMax); + boolean epistemic = hasEpistemic(mfdConfig, grData.mMax - grData.Δm / 2); + // TODO does this ever throw? (as well as epi check below) int nMag = Mfds.magCount(grData.mMin, grData.mMax, grData.Δm); - - // TODO does this ever throw? - checkState(nMag > 0, "GR MFD with no magnitudes [%s]", branch.id()); + checkState(nMag > 0, "GR MFD with no magnitudes [%s]", mfdPropsBranch.id()); /* - * TODO clean; this was handled previously by GR_Data.hasMagExceptions() - * * TODO edge cases (Rush Peak in brange.3dip.gr.in) suggest data.mMin should * be used instead of unc.epiCutoff */ - // boolean uncertAllowed = unc.hasEpistemic && (data.mMax + - // unc.epiDeltas[0]) >= unc.epiCutoff; - - double tmr = Mfds.totalMoRate(grData.mMin, nMag, grData.Δm, grData.a, grData.b); if (epistemic) { for (Branch<Double> epiBranch : mfdConfig.epistemicTree.get()) { - double mMaxEpi = grData.mMax + epiBranch.value(); - double weightEpi = mfdWeight * epiBranch.weight(); - int nMagEpi = Mfds.magCount(grData.mMin, mMaxEpi, grData.Δm); - - // TODO check if ever thrown - checkState(nMagEpi > 0, "GR MFD epistemic branch with no magnitudes"); - - String id = String.join("_", mfdId, epiBranch.id()); + /* + * Given changes to how GR MFD sequences are built (centered and bounded + * by the upper a lower bin edges), M + epi - Δm/2 commonly cuts off the + * upper bin. As a result we add Δm/2 knowing it will be removed (e.g. + * for M7, M + epi 7.2 as upper limit of sequence). + */ + double mMaxEpi = grData.mMax + epiBranch.value() + grData.Δm / 2; + double weightEpi = mfdRateWeight * epiBranch.weight(); /* - * epi branches preserve Mo between mMin and dMag(nMag-1), not mMax to - * ensure that Mo is 'spent' on earthquakes represented by the epi GR - * distribution with adj. mMax. + * For mMax magnitudes close to 6.5, ((mMax-epi)-mMin) can be smaller + * than Δm. If this is the case adjust mMax back up to mMin + Δm. */ + // System.out.println("epi: " + grData.mMin + " " + mMaxEpi + " " + + // grData.Δm); + // if (mMaxEpi - grData.mMin < grData.Δm) { + // // mMaxEpi = grData.mMin + + // } - Mfd mfd = ModelData.newGrBuilder(grData) - .scaleToMomentRate(tmr * weightEpi) + // TODO check if ever thrown + int nMagEpi = Mfds.magCount(grData.mMin, mMaxEpi, grData.Δm); + checkState(nMagEpi > 0, "GR MFD epistemic branch with no magnitudes"); + + String id = mfdBranchName(rateId, mfdId, epiBranch.id()); + Mfd mfd = ModelUtil.newGrBuilder(grData, mMaxEpi) + .scaleToMomentRate(tmr) .build(); mfdTree.addBranch(id, mfd, weightEpi); } } else { - Mfd mfd = ModelData.newGrBuilder(grData) - .scaleToMomentRate(tmr * mfdWeight) + + String id = mfdBranchName(rateId, mfdId); + Mfd mfd = grData.toBuilder() + .scaleToMomentRate(tmr) .build(); - mfdTree.addBranch(mfdId, mfd, mfdWeight); + mfdTree.addBranch(id, mfd, mfdRateWeight); } } - /* fault SINGLE */ + /* Fault SINGLE */ private static void createSingleMfds( LogicTree.Builder<Mfd> mfdTree, ModelData mfdData, - Branch<Mfd.Properties> branch) { + Branch<Double> moRateBranch, + Branch<Mfd.Properties> mfdPropsBranch) { - // System.out.println(branch.id() + " " + branch.weight() + " " + - // branch.value()); - // - // System.out.println(branch.value().properties); - // System.out.println(((MfdSupport.Single) - // branch.value().properties).getClass()); + MfdConfig mfdConfig = mfdData.mfdConfig().get(); + Mfd.Properties.Single singleData = mfdPropsBranch.value().getAsSingle(); - var mfdConfig = mfdData.mfdConfig().get(); - var singleData = branch.value().getAsSingle(); - var mfdWeight = branch.weight(); - var mfdId = branch.id(); + double mfdWeight = mfdPropsBranch.weight(); + String mfdId = singleData.type.name(); // mfdPropsBranch.id(); - LogicTree<Double> rateTree = mfdData.rateTree() - .orElse(LogicTree.<Double> builder(RATE_TREE) - .addBranch("single", singleData.rate, 1.0) - .build()); + double rateWeight = moRateBranch.weight(); + String rateId = moRateBranch.id(); + double tmr = moRateBranch.value(); - // TODO this was handled previously by GR_Data.hasMagExceptions() - // need to catch the single floaters that are less than 6.5 - // note: unc.epiDeltas[0] may be null + double mfdRateWeight = mfdWeight * rateWeight; /* Optional epistemic uncertainty. */ boolean epistemic = hasEpistemic(mfdConfig, singleData.m); @@ -434,122 +598,68 @@ public class FaultRuptureSet implements RuptureSet { /* Optional aleatory variability. */ boolean aleatory = mfdConfig.aleatoryProperties.isPresent(); - /* - * TODO clean - * - * Developer note: fault sources with M<6.5 && that float not sure what the - * implications of this prior rule are. - */ - // double minUncertMag = data.m + (hasEpistemic ? unc.epiDeltas[0] : 0.0); - // boolean uncertAllowed = !((minUncertMag < unc.epiCutoff) && data.floats); - - /* Loop over magnitude epistemic uncertainty. */ + /* mMax epistemic uncertainty branches */ if (epistemic) { - for (Branch<Double> rateBranch : rateTree) { - - /* Total moment rate */ - double tmr = rateBranch.value() * Earthquakes.magToMoment(singleData.m); - - /* Total event rate */ - double tcr = rateBranch.value(); - - for (Branch<Double> epiBranch : mfdConfig.epistemicTree.get()) { - - double mEpi = singleData.m + epiBranch.value(); - double weightEpi = mfdWeight * rateBranch.weight() * epiBranch.weight(); - - String id = String.join("_", mfdId, rateBranch.id(), epiBranch.id()); - - /* Possibly apply aleatory variability. */ - if (aleatory) { + for (Branch<Double> epiBranch : mfdConfig.epistemicTree.get()) { - MfdConfig.AleatoryProperties aleaProps = mfdConfig.aleatoryProperties.get(); - Mfd.Builder mfd = ModelData.newGaussianBuilder(mEpi, aleaProps); + double mEpi = singleData.m + epiBranch.value(); + double weightEpi = mfdRateWeight * epiBranch.weight(); + String id = mfdBranchName(rateId, mfdId, epiBranch.id()); - if (aleaProps.momentBalanced) { - mfd.scaleToMomentRate(tmr * weightEpi); - } else { - mfd.scaleToCumulativeRate(tcr * weightEpi); - } + /* Possibly apply aleatory variability */ + if (aleatory) { - mfdTree.addBranch(id, mfd.build(), weightEpi); + MfdConfig.AleatoryProperties aleaProps = mfdConfig.aleatoryProperties.get(); + Mfd mfd = ModelUtil.newGaussianBuilder(mEpi, aleaProps) + .scaleToMomentRate(tmr) + .build(); + mfdTree.addBranch(id, mfd, weightEpi); - } else { + } else { - /* TODO note prior codes all moment balanced epi branches */ - Mfd mfd = Mfd.newSingleBuilder(mEpi) - .scaleToMomentRate(tmr * weightEpi) - .build(); - mfdTree.addBranch(id, mfd, weightEpi); - } + Mfd mfd = Mfd.newSingleBuilder(mEpi) + .scaleToMomentRate(tmr) + .build(); + mfdTree.addBranch(id, mfd, weightEpi); } } - } else { // no epistemic uncertainty + } else { /* No epistemic uncertainty */ + + String id = mfdBranchName(rateId, mfdId); - /* Possibly apply aleatory variability. */ + /* Possibly apply aleatory variability */ if (aleatory) { MfdConfig.AleatoryProperties aleaProps = mfdConfig.aleatoryProperties.get(); - Mfd.Builder mfd = ModelData.newGaussianBuilder(singleData.m, aleaProps); - - /* Nest rate loop to reuse MfdBuilder */ - for (Branch<Double> rateBranch : rateTree) { - - /* Total moment rate */ - double tmr = rateBranch.value() * Earthquakes.magToMoment(singleData.m); - - /* Total event rate */ - double tcr = rateBranch.value(); - - double rateWeight = mfdWeight * rateBranch.weight(); - - if (aleaProps.momentBalanced) { - mfd.scaleToMomentRate(tmr * rateWeight); - } else { - mfd.scaleToCumulativeRate(tcr * rateWeight); - } - - String id = String.join("_", mfdId, rateBranch.id()); - - mfdTree.addBranch(id, mfd.build(), rateWeight); - } + Mfd mfd = ModelUtil.newGaussianBuilder(singleData.m, aleaProps) + .scaleToMomentRate(tmr) + .build(); + mfdTree.addBranch(id, mfd, mfdRateWeight); } else { - Mfd.Builder mfdBase = Mfd.newSingleBuilder(singleData.m); - - for (Branch<Double> rateBranch : rateTree) { - double rateWeight = mfdWeight * rateBranch.weight(); - - String id = String.join("_", mfdId, rateBranch.id()); - - Mfd mfd = mfdBase - .scaleToCumulativeRate(rateBranch.value()) - .build(); - - mfdTree.addBranch(id, mfd, rateWeight); - } + Mfd mfd = Mfd.newSingleBuilder(singleData.m) + .scaleToMomentRate(tmr) + .build(); + mfdTree.addBranch(id, mfd, mfdRateWeight); } } } + private static String mfdBranchName(String... args) { + return String.join(" : ", args); + } + private static boolean hasEpistemic(MfdConfig mfdConfig, double magnitude) { boolean epistemic = mfdConfig.epistemicTree.isPresent(); - /* - * Short-circuit application of epistemic uncertainty if: - * - * m - Δm < mfdConfig.minMagnitude - */ + /* No epistemic uncertainty if: m - Δm < mfdConfig.minMagnitude */ if (epistemic) { double mEpiCutoff = mfdConfig.minMagnitude.getAsDouble(); - double mEpiMin = mfdConfig.epistemicTree.get().branches().stream() - .mapToDouble(b -> b.value()) - .min() - .getAsDouble(); - epistemic = (magnitude - mEpiMin) > mEpiCutoff; + double mEpiOffset = mfdConfig.minEpiOffset.getAsDouble(); + epistemic = (magnitude + mEpiOffset) > mEpiCutoff; } return epistemic; } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java index 78e0bcf765a80d4cac1ffaee6f86227128149bf4..8f123de43d621ef64d9e4dc66add21def935b956 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java @@ -83,6 +83,7 @@ public class FaultSource implements Source { this.branchWeight = builder.branchWeight; this.scaledMfds = ModelUtil.scaledMfdList(mfdTree, branchWeight); + System.out.println(mfdTree); this.ruptureLists = scaledMfds.stream() .map(this::createRuptureList) .collect(toUnmodifiableList()); @@ -236,7 +237,8 @@ public class FaultSource implements Source { ruptureList.add(rup); } } - checkState(ruptureList.size() > 0, "Rupture list is empty"); + + checkState(ruptureList.size() > 0, "Rupture list for %s is empty", name); return List.copyOf(ruptureList); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java b/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java index be364f8acd0b6d92858ba06fbdcb963118b2d532..cc89f723eb0076fe072e44f75ce907425df40f56 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/HazardModel.java @@ -256,7 +256,7 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>> for (Leaf leaf : tree.branchMap().keySet()) { RuptureSet rs = leaf.ruptureSet; - System.out.println("Flt toSrcSet: " + rs.type() + " " + rs.name()); + System.out.println("Flt to SrcSet: " + rs.type() + " " + rs.name()); if (leaf.ruptureSet.type() == SourceType.CLUSTER) { ClusterRuptureSet crs = (ClusterRuptureSet) rs; diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java index bdd9943a8a41027a13ef9bcd09501643686597ad..995fee5804dd16e3ded0fbbe8192f3602590bc42 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java @@ -265,7 +265,7 @@ public class InterfaceRuptureSet implements RuptureSet { return mfdTree.build(); } - /* fault GR */ + /* Interface GR */ private static void createGrMfds( LogicTree.Builder<Mfd> mfdTree, ModelData mfdData, @@ -277,7 +277,7 @@ public class InterfaceRuptureSet implements RuptureSet { mfdTree.addBranch(branch.id(), mfd, branch.weight()); } - /* fault SINGLE */ + /* Interface SINGLE */ private static void createSingleMfds( LogicTree.Builder<Mfd> mfdTree, ModelData mfdData, diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java index 8eadd5b271d21e6a4269484c2336752d858d6ff5..661f2a1cd3c3f473456e83ed34f4f3d3b20e307b 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java @@ -22,11 +22,13 @@ class MfdConfig { static final String MIN_MAGNITUDE_KEY = "minimum-magnitude"; final Optional<LogicTree<Double>> epistemicTree; + final OptionalDouble minEpiOffset; final Optional<AleatoryProperties> aleatoryProperties; final OptionalDouble minMagnitude; private MfdConfig(Builder builder) { this.epistemicTree = builder.epistemicTree; + this.minEpiOffset = builder.minEpiOffset; this.aleatoryProperties = builder.aleatoryProperties; this.minMagnitude = builder.minMagnitude; } @@ -44,12 +46,20 @@ class MfdConfig { static class Builder { private Optional<LogicTree<Double>> epistemicTree = Optional.empty(); + private OptionalDouble minEpiOffset = OptionalDouble.empty(); private Optional<AleatoryProperties> aleatoryProperties = Optional.empty(); private OptionalDouble minMagnitude = OptionalDouble.empty(); private boolean built = false; Builder epistemicTree(LogicTree<Double> tree) { this.epistemicTree = Optional.ofNullable(tree); + if (this.epistemicTree.isPresent()) { + this.minEpiOffset = OptionalDouble.of( + this.epistemicTree.get().branches().stream() + .mapToDouble(b -> b.value()) + .min() + .getAsDouble()); + } return this; } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelData.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelData.java index 56875ddeef4871d3ffc76bfad8f0f9295fecc2ff..9eea29ce3537d4586deb4639105c02d923460a33 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelData.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelData.java @@ -24,10 +24,12 @@ class ModelData { private GmmSet gmms; private SourceConfig srcConfig; + // TODO shouldn't we reuire this private Optional<MfdConfig> mfdConfig = Optional.empty(); private Optional<Map<String, LogicTree<Mfd.Properties>>> mfdMap = Optional.empty(); private Optional<LogicTree<Double>> rateTree = Optional.empty(); private Optional<LogicTree<Path>> gridRateTree = Optional.empty(); + private Optional<LogicTree<String>> slipRateTree = Optional.empty(); private Optional<Map<Integer, SourceFeature.NshmFault>> faultFeatureMap = Optional.empty(); private Optional<Map<Integer, SourceFeature.Interface>> interfaceFeatureMap = Optional.empty(); @@ -51,6 +53,7 @@ class ModelData { copy.mfdMap = original.mfdMap; copy.rateTree = original.rateTree; copy.gridRateTree = original.gridRateTree; + copy.slipRateTree = original.slipRateTree; copy.gridDataDir = original.gridDataDir; copy.faultFeatureMap = original.faultFeatureMap; copy.interfaceFeatureMap = original.interfaceFeatureMap; @@ -107,6 +110,14 @@ class ModelData { return gridRateTree; } + void slipRateTree(LogicTree<String> slipRateTree) { + this.slipRateTree = Optional.of(checkNotNull(slipRateTree)); + } + + Optional<LogicTree<String>> slipRateTree() { + return slipRateTree; + } + void gridDataDirectory(Path gridDataDir) { checkState(this.gridDataDir.isEmpty()); this.gridDataDir = Optional.of(checkNotNull(gridDataDir)); @@ -156,20 +167,4 @@ class ModelData { R8; } - static Mfd.Builder newGaussianBuilder(double m, MfdConfig.AleatoryProperties aleaProps) { - return Mfd.newGaussianBuilder( - m, - aleaProps.count, - aleaProps.σ, - aleaProps.σSize); - } - - static Mfd.Builder newGrBuilder(Mfd.Properties.GutenbergRichter grData) { - return Mfd.newGutenbergRichterBuilder( - grData.mMin, - grData.mMax, - grData.Δm, - grData.b); - } - } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java index 9a0b019665bf81a3e0f8a29006a16042e0bce256..35d6147b4d74f597ceaa00a74594172d78365107 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java @@ -87,7 +87,7 @@ class ModelFiles { /* Interface config (same as fault), relative to parent. */ static Optional<SourceConfig> readInterfaceConfig(Path dir) { - return read(dir, INTERFACE_CONFIG, Deserialize::faultConfig); + return read(dir, INTERFACE_CONFIG, Deserialize::interfaceConfig); } /* Intraslab config (same as grid), relative to parent. */ @@ -130,6 +130,11 @@ class ModelFiles { return read(dir, RATE_TREE, Deserialize::gridRateTree); } + /* Rate-tree containing deformation model ids. */ + static Optional<LogicTree<String>> readSlipRateTree(Path dir) { + return read(dir, RATE_TREE, Deserialize::slipRateTree); + } + private static <R> Optional<R> read( Path dir, String file, diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java index c1623c7b6c908eab0c2bba461925bb0f3d4b1464..1db85e5727ebf1fc7cbff36e6baa663112f7d10a 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java @@ -4,6 +4,7 @@ import static com.google.common.base.CaseFormat.LOWER_HYPHEN; import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import static gov.usgs.earthquake.nshmp.model.DipSlipModel.FIXED; import static gov.usgs.earthquake.nshmp.model.ModelFiles.CALC_CONFIG; import static gov.usgs.earthquake.nshmp.model.ModelFiles.CLUSTER_SET; import static gov.usgs.earthquake.nshmp.model.ModelFiles.FAULT_SOURCES; @@ -22,6 +23,7 @@ import static gov.usgs.earthquake.nshmp.model.ModelFiles.readMfdConfig; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readMfdMap; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readRateTree; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readSlabConfig; +import static gov.usgs.earthquake.nshmp.model.ModelFiles.readSlipRateTree; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readSourceTree; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readZoneConfig; import static gov.usgs.earthquake.nshmp.model.SourceType.FAULT; @@ -185,9 +187,7 @@ abstract class ModelLoader { } /* Load source type specific rate tree, if present. */ - void loadRateTree(Path dir, ModelData data) { - readRateTree(dir).ifPresent(data::rateTree); - } + abstract void loadRateTree(Path dir, ModelData data); /* Load source type specific config, if present. */ abstract void loadSourceConfig(Path dir, ModelData data); @@ -286,11 +286,19 @@ abstract class ModelLoader { readFaultConfig(dir).ifPresent(data::sourceConfig); } + @Override + void loadRateTree(Path dir, ModelData data) { + readSlipRateTree(dir).ifPresent(data::slipRateTree); + } + @Override SourceTree loadSingleSource( Path geojson, ModelData data) { + /* Ensure singleton mfd-map. */ + checkState(data.mfdMap().orElseThrow().size() == 1); + SourceFeature.NshmFault feature = SourceFeature.newNshmFault(geojson); /* Possibly split on normal fault dip variants. */ @@ -304,10 +312,22 @@ abstract class ModelLoader { System.out.println(" source: " + root.relativize(geojson)); + /* Create new feature with updated properties. */ + double length = feature.trace.length(); + double Mw = ModelUtil.magnitudeWc94(length); + Feature nshmFeature = Feature.copyOf(feature.source) + .properties(Properties.fromFeature(feature.source) + .put(Key.LENGTH, length) + .put(Key.MAGNITUDE, Mw) + .build()) + .build(); + feature = new SourceFeature.NshmFault(nshmFeature); + LogicTree<Path> root = LogicTree.singleton( Deserialize.SOURCE_TREE, feature.name, Path.of(feature.name)); + SourceTree.Builder treeBuilder = SourceTree.builder() .name(feature.name) .type(FAULT) @@ -330,29 +350,28 @@ abstract class ModelLoader { SourceConfig.Fault config = data.sourceConfig().asFault(); LogicTree<Double> dipTree = config.dipTree.orElseThrow(); LogicTree<Path> dipSourceTree = ModelUtil.toPathTree(dipTree); - - System.out.println(dipTree); + DipSlipModel dipSlipModel = config.dipSlipModel.orElseThrow(); SourceTree.Builder treeBuilder = SourceTree.builder() .name(feature.name) .type(FAULT) .gmms(data.gmms()) .root(dipSourceTree); - // TODO how do we split down different rate-tree options ?? - double slipRate = feature.rateMap.get("GEO"); - RateType rateType = feature.rateType; - // DipSlipModel slipModel = config. - - System.out.println(feature.rateMap.get("GEO")); + double length = feature.trace.length(); + double Mw = ModelUtil.magnitudeWc94(length); for (int i = 0; i < dipTree.size(); i++) { Branch<Double> dipBranch = dipTree.branches().get(i); int branchDip = (int) (feature.dip + dipBranch.value()); + int rateDip = (int) ((dipSlipModel == FIXED) ? feature.dip : branchDip); String branchName = String.format("%s [%s°]", feature.name, branchDip); Feature dipFeature = Feature.copyOf(feature.source) .properties(Properties.fromFeature(feature.source) .put(Key.NAME, branchName) .put(Key.DIP, branchDip) + .put(Key.RATE_DIP, rateDip) + .put(Key.LENGTH, length) + .put(Key.MAGNITUDE, Mw) .build()) .build(); @@ -371,12 +390,8 @@ abstract class ModelLoader { ModelData data, SourceFeature.NshmFault feature) { - // TODO dummy mfds for now - Mfd.Properties.Single mfdProps = Mfd.Properties.Single.dummy(); - LogicTree<Mfd.Properties> mfdTree = LogicTree.singleton( - Deserialize.MFD_TREE, - "M1", - mfdProps); + Map<String, LogicTree<Mfd.Properties>> mfdMap = data.mfdMap().orElseThrow(); + LogicTree<Mfd.Properties> mfdTree = mfdMap.values().iterator().next(); data.faultFeatureMap(Map.of(feature.id, feature)); @@ -429,7 +444,6 @@ abstract class ModelLoader { ModelData data = ModelData.copyOf(dataIn); readMfdConfig(dir).ifPresent(data::mfdConfig); loadRateTree(dir, data); - // readRateTree(dir).ifPresent(data::rateTree); /* Can be source-tree or source-group. */ Optional<LogicTree<Path>> tree = readSourceTree(dir); @@ -461,7 +475,6 @@ abstract class ModelLoader { ClusterRuptureSet crs = Deserialize.clusterRuptureSet(setPath, data) .build(); - treeBuilder.addLeaf(branch, crs); } } @@ -487,6 +500,11 @@ abstract class ModelLoader { readInterfaceConfig(dir).ifPresent(data::sourceConfig); } + @Override + void loadRateTree(Path dir, ModelData data) { + readRateTree(dir).ifPresent(data::rateTree); + } + @Override SourceTree loadSingleSource( Path geojson, @@ -769,6 +787,11 @@ abstract class ModelLoader { readSlabConfig(dir).ifPresent(data::sourceConfig); } + @Override + void loadRateTree(Path dir, ModelData data) { + readRateTree(dir).ifPresent(data::rateTree); + } + @Override SourceTree loadSingleSource( Path geojson, @@ -908,6 +931,11 @@ abstract class ModelLoader { readZoneConfig(dir).ifPresent(data::sourceConfig); } + @Override + void loadRateTree(Path dir, ModelData data) { + readRateTree(dir).ifPresent(data::rateTree); + } + @Override SourceTree loadSingleSource( Path geojson, diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java index 61c2847e3b31353a49d7431aa0ce5c1cff519f65..88ea9a996f6791cef98f1f7bc64fa9dc54aea2a3 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelUtil.java @@ -1,11 +1,13 @@ package gov.usgs.earthquake.nshmp.model; +import static java.lang.Math.log10; import static java.util.stream.Collectors.toUnmodifiableList; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; +import gov.usgs.earthquake.nshmp.Maths; import gov.usgs.earthquake.nshmp.data.MutableXySequence; import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.mfd.Mfd; @@ -208,4 +210,33 @@ class ModelUtil { } return treeOfLists.build(); } + + /* + * Returns the expected magnitude for the supplied length rounded to two + * decimal places. Note that the function used here is not the inverted form + * of that used in RuptureScaling.NSHM_FAULT_WC94_LENGTH.dimensions() See + * Wells & Coppersmith (1994) Table 2A. + */ + static double magnitudeWc94(double length) { + return Maths.round(5.08 + 1.16 * log10(length), 2); + } + + /* Convenience method to pass aleatory properties. */ + static Mfd.Builder newGaussianBuilder(double m, MfdConfig.AleatoryProperties aleaProps) { + return Mfd.newGaussianBuilder( + m, + aleaProps.count, + aleaProps.σ, + aleaProps.σSize); + } + + /* Convenience method to override mMax. */ + static Mfd.Builder newGrBuilder(Mfd.Properties.GutenbergRichter grData, double mMax) { + return Mfd.newGutenbergRichterBuilder( + grData.mMin, + mMax, + grData.Δm, + grData.b); + } + } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceConfig.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceConfig.java index ad7ec827568cd2f9de5daded5c4c34787ea67c2e..fe24c957a8c72c404a27b4cda0afd9852e331a33 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceConfig.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceConfig.java @@ -34,7 +34,9 @@ abstract class SourceConfig { return (Grid) this; } - // static abstract Finite + Interface asInterface() { + return (Interface) this; + } static class Fault extends SourceConfig { @@ -110,6 +112,64 @@ abstract class SourceConfig { } } + static class Interface extends SourceConfig { + + final double surfaceSpacing; + final RuptureFloating ruptureFloating; + final RuptureScaling ruptureScaling; + + private Interface(Builder builder) { + super(builder.resource); + this.surfaceSpacing = builder.surfaceSpacing; + this.ruptureFloating = builder.ruptureFloating; + this.ruptureScaling = builder.ruptureScaling; + } + + static Builder builder(Path resource) { + return new Builder(resource); + } + + static class Builder { + + private boolean built = false; + + private Path resource; + private Double surfaceSpacing; + private RuptureFloating ruptureFloating; + private RuptureScaling ruptureScaling; + + private Builder(Path resource) { + this.resource = resource; + } + + Builder spacing(double spacing) { + checkArgument(spacing > 0.0); + this.surfaceSpacing = spacing; + return this; + } + + Builder floating(RuptureFloating floating) { + this.ruptureFloating = floating; + return this; + } + + Builder scaling(RuptureScaling scaling) { + this.ruptureScaling = scaling; + return this; + } + + Interface build() { + checkState(!built, "This builder has already been used"); + checkNotNull(resource); + checkNotNull(surfaceSpacing); + checkNotNull(ruptureFloating); + checkNotNull(ruptureScaling); + this.built = true; + return new Interface(this); + } + } + } + static class Grid extends SourceConfig { final double gridSpacing; diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java index 0440985ff230c24335fb0abfcf3ef1ee509406cc..7e20819b33f5c7190576ef3f666cfb0e212c9034 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java @@ -181,11 +181,16 @@ public abstract class SourceFeature { static final String STATE = "state"; static final String STATES = "states"; static final String DIP = "dip"; + static final String RATE_DIP = "rate-dip"; + static final String LENGTH = "length"; + static final String WIDTH = "width"; + static final String MAGNITUDE = "magnitude"; static final String UPPER_DEPTH = "upperDepth"; static final String LOWER_DEPTH = "lowerDepth"; static final String RAKE = "rake"; static final String RATE = "rate"; static final String RATES = "rates"; + static final String RATE_MAP = "rate-map"; static final String RATE_TYPE = "rateType"; static final String STRIKE = "strike"; static final String MFD_TREE = "mfd-tree"; @@ -212,8 +217,6 @@ public abstract class SourceFeature { /** The fault section rate. */ public final double rate; - // for RECURRENCE, rate set to 0.0; consider OptionalDouble - /** The fault section {@code RateType}. */ public final RateType rateType; @@ -262,8 +265,20 @@ public abstract class SourceFeature { /** The fault section {@code RateType}. */ final RateType rateType; + /** The mapping of rates from different deformation models. */ final Map<String, Double> rateMap; + // TODO clean + // these will be populated in new features derived from model + // rateDip will only be present for normal faults with a dip-tree + // length and width will be provided so users can see source + // parameterization + + final OptionalDouble rateDip; + final OptionalDouble length; + final OptionalDouble width; + final OptionalDouble magnitude; + NshmFault(Feature section) { super(section); trace = section.asLineString(); @@ -279,6 +294,14 @@ public abstract class SourceFeature { rateType = RateType.valueOf(props.getString(Key.RATE_TYPE).orElseThrow()); rateMap = nshmRateMap(props); + + // optional fields for use in FaultRuptureSet.Builder + // putting these in feature for easy reference for Gmm parameterization + rateDip = props.getDouble(Key.RATE_DIP); + length = props.getDouble(Key.LENGTH); + width = props.getDouble(Key.WIDTH); + magnitude = props.getDouble(Key.MAGNITUDE); + // rate = props.getDouble(Key.RATE).orElseThrow(); // checkRate(rateType, rate); props.getString(Key.STATE).orElseThrow(); @@ -294,17 +317,9 @@ public abstract class SourceFeature { if (rate.isPresent()) { return Map.of("GEO", rate.getAsDouble()); } - - Map<String, Double> test = props.get("rate-map", Map.class).orElseThrow(); - // System.out.println(test.getClass()); - // - // System.out.println(test.keySet().iterator().next().getClass()); - // System.out.println(test.values().iterator().next().getClass()); - // System.out.println(test.entrySet().iterator().next()); - - // JsonObject map = - // props.getJsonElement("rate-map").orElseThrow().getAsJsonObject(); - return test; // Map.copyOf(test); + @SuppressWarnings("unchecked") + Map<String, Double> rateMap = props.get(Key.RATE_MAP, Map.class).orElseThrow(); + return rateMap; } /** A section of a subduction interface. */ diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceTree.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceTree.java index f74fafbad238a24c7bf2c4b465a092baadd27f7d..54c2a8bfd0834154895f54d84b9f0d620a49cbc5 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceTree.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceTree.java @@ -106,9 +106,9 @@ public class SourceTree { this.leafWeights = builder.leafWeights; // TODO clean - for (Leaf leaf : leafBranches.keySet()) { - System.out.println(leafBranches.get(leaf) + " " + leafWeights.get(leaf)); - } + // for (Leaf leaf : leafBranches.keySet()) { + // System.out.println(leafBranches.get(leaf) + " " + leafWeights.get(leaf)); + // } } static Builder builder() { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java index 48cf93f12ba59cd5807a6b58915870edbc18f9aa..2a846bf055767891243af8cede44a3406c2c1514 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java @@ -159,7 +159,9 @@ public interface LogicTree<T> extends Iterable<Branch<T>> { checkState(!branches.isEmpty(), "Empty logic tree"); double[] weights = weightArray(branches); DoubleData.checkWeightSum(weights); - cumulativeWeights = DoubleData.round(12, DoubleData.cumulate(weights)); + cumulativeWeights = DoubleData.round( + RegularTree.WEIGHT_SCALE, + DoubleData.cumulate(weights)); branches = List.copyOf(branches); return new RegularTree<T>(this); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularBranch.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularBranch.java index 65f846dbb3ead354a7e23e6b51b81eefad883b49..1aa2cacb956611356641fcbc39e2e9bba0abc297 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularBranch.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularBranch.java @@ -4,6 +4,8 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static gov.usgs.earthquake.nshmp.data.DoubleData.checkWeight; +import gov.usgs.earthquake.nshmp.Maths; + /* Basic logic tree branch. */ class RegularBranch<T> implements Branch<T> { @@ -14,7 +16,7 @@ class RegularBranch<T> implements Branch<T> { RegularBranch(String id, T value, double weight) { this.id = checkId(id); this.value = checkNotNull(value); - this.weight = checkWeight(weight); + this.weight = checkWeight(Maths.round(weight, RegularTree.WEIGHT_SCALE)); } @Override @@ -43,6 +45,6 @@ class RegularBranch<T> implements Branch<T> { } static String toString(String id, double weight) { - return id + '[' + weight + ']'; + return id + " [" + weight + "]"; } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularTree.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularTree.java index 1ce917f07dcf4c9eeb1404a4bde382b98c9a227c..a28fcf6331dbd58dc59471c8c76c75d783d8e87e 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularTree.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularTree.java @@ -9,6 +9,9 @@ import gov.usgs.earthquake.nshmp.Text; /* Basic logic tree implementation. */ class RegularTree<T> implements LogicTree<T> { + // TODO document in addBranch and cumulate + static final int WEIGHT_SCALE = 12; + private final String name; private final List<Branch<T>> branches; private final double[] cumulativeWeights; diff --git a/src/test/java/gov/usgs/earthquake/nshmp/data/SequencesTests.java b/src/test/java/gov/usgs/earthquake/nshmp/data/SequencesTests.java index 1fb16a2faf0682e3be0e58920c26abb0a7fbaa51..185e9137a355b0beb40833886953475274dc4972 100644 --- a/src/test/java/gov/usgs/earthquake/nshmp/data/SequencesTests.java +++ b/src/test/java/gov/usgs/earthquake/nshmp/data/SequencesTests.java @@ -1,13 +1,19 @@ package gov.usgs.earthquake.nshmp.data; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.util.Arrays; import org.junit.jupiter.api.Test; +import com.google.common.primitives.Doubles; + class SequencesTests { @Test - final void testNonZeroIndex() { + final void nonZeroIndexTests() { double[] values = new double[] { 0.0, 0.0, 1.0, 1.0, 0.0, 0.0 }; double[] zeros = new double[] { 0.0 }; int first = 2; @@ -19,4 +25,83 @@ class SequencesTests { assertEquals(-1, Sequences.lastNonZeroIndex(zeros), 0.0); } + @Test + final void arrayBuilderTests() { + + double[] expected = + new double[] { 5.05, 5.1499999999999995, 5.25, 5.35, 5.45, 5.55, 5.65, 5.75 }; + double[] actual = Sequences.arrayBuilder(5.05, 5.83, 0.1).centered().build(); + assertArrayEquals(expected, actual); + + expected = + new double[] { 5.05, 5.15, 5.25, 5.35, 5.45, 5.55, 5.65, 5.75, 5.85 }; + actual = Sequences.arrayBuilder(5.05, 5.85, 0.1).scale(4).centered().build(); + assertArrayEquals(expected, actual); + + expected = + new double[] { 6.55625, 6.66875, 6.78125, 6.89375, 7.00625, 7.11875, 7.23125, 7.34375 }; + actual = Sequences.arrayBuilder(6.55625, 7.34385, 0.1125).scale(5).centered().build(); + assertArrayEquals(expected, actual); + + expected = + new double[] { 6.55625, 6.66875, 6.78125, 6.89375, 7.00625, 7.11875, 7.23125, 7.34375 }; + actual = Sequences.arrayBuilder(6.5, 7.4, 0.1125).scale(5).build(); + assertArrayEquals(expected, actual); + + expected = Arrays.copyOf(expected, expected.length); + Doubles.reverse(expected); + actual = Sequences.arrayBuilder(6.5, 7.4, 0.1125).descending().scale(5).build(); + assertArrayEquals(expected, actual); + + expected = new double[] { 6.575, 6.725 }; + actual = Sequences.arrayBuilder(6.5, 6.8, 0.15).scale(5).build(); + assertArrayEquals(expected, actual); + + double maxNoRound = 6.7999999; // 2 value array expected + double[] expectNoRound = new double[] { 6.5, 6.65 }; + double[] actualNoRound = Sequences.arrayBuilder(6.5, maxNoRound, 0.15) + .scale(5) + .centered() + .build(); + assertArrayEquals(expectNoRound, actualNoRound); + + double maxRound = 6.79999999; // 3 value array expected + double[] expectRound = new double[] { 6.5, 6.65, 6.8 }; + double[] actualRound = Sequences.arrayBuilder(6.5, maxRound, 0.15) + .scale(5) + .centered() + .build(); + assertArrayEquals(expectRound, actualRound); + + } + + @Test + final void arrayBuilderExceptions() { + + /* finiteness */ + assertThrows(IllegalArgumentException.class, () -> { + Sequences.arrayBuilder(Double.NaN, 6.5, 0.1).build(); + }); + assertThrows(IllegalArgumentException.class, () -> { + Sequences.arrayBuilder(6.5, Double.NaN, 0.1).build(); + }); + assertThrows(IllegalArgumentException.class, () -> { + Sequences.arrayBuilder(6.0, 6.5, Double.NaN).build(); + }); + + /* max <= min */ + assertThrows(IllegalArgumentException.class, () -> { + Sequences.arrayBuilder(6.5, 6.5, 0.15).centered().build(); + }); + assertThrows(IllegalArgumentException.class, () -> { + Sequences.arrayBuilder(6.5, 6.6, 0.15).build(); + }); + + /* Δ <= 0 */ + assertThrows(IllegalArgumentException.class, () -> { + Sequences.arrayBuilder(6.5, 6.6, 0.0).centered().build(); + }); + + } + } diff --git a/src/test/java/gov/usgs/earthquake/nshmp/tree/LogicTreeTests.java b/src/test/java/gov/usgs/earthquake/nshmp/tree/LogicTreeTests.java index 3e2a836893efa644d9f739a26ec2beb904d103c8..1c96d456a2542da4ae82b0ffd62435cd4b336ced 100644 --- a/src/test/java/gov/usgs/earthquake/nshmp/tree/LogicTreeTests.java +++ b/src/test/java/gov/usgs/earthquake/nshmp/tree/LogicTreeTests.java @@ -108,7 +108,7 @@ class LogicTreeTests { } private static String branchString(String id, double weight) { - return id + '[' + weight + ']'; + return id + " [" + weight + "]"; } final static String TEST_TREE = "test-tree"; @@ -126,21 +126,17 @@ class LogicTreeTests { .addBranch(id[2], value[2], weight[2]) .build(); String treeString = new StringBuilder() - .append(TEST_TREE).append(" ┬ ").append(id[0]) - .append("[").append(weight[0]).append("]") + .append(TEST_TREE).append(" ┬ ").append(branchString(id[0], weight[0])) .append(Text.NEWLINE) - .append(" ├ ").append(id[1]) - .append("[").append(weight[1]).append("]") + .append(" ├ ").append(branchString(id[1], weight[1])) .append(Text.NEWLINE) - .append(" └ ").append(id[2]) - .append("[").append(weight[2]).append("]") + .append(" └ ").append(branchString(id[2], weight[2])) .toString(); assertEquals(treeString, tree.toString()); LogicTree<String> singletonTree = LogicTree.singleton(TEST_TREE, id[0], value[0]); String singletonTreeString = new StringBuilder() - .append(TEST_TREE).append(" ─ ").append(id[0]) - .append("[").append(1.0).append("]") + .append(TEST_TREE).append(" ─ ").append(branchString(id[0], 1.0)) .toString(); assertEquals(singletonTreeString, singletonTree.toString()); }