diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java index 9fae6ba2fa9cdb373afb403e0830d937951d405b..b645e813a099bc84e163cf4388a3da0ae011688a 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java @@ -25,7 +25,6 @@ import gov.usgs.earthquake.nshmp.mfd.Mfd; import gov.usgs.earthquake.nshmp.model.ClusterRuptureSet; import gov.usgs.earthquake.nshmp.model.ClusterSource; import gov.usgs.earthquake.nshmp.model.Distance; -import gov.usgs.earthquake.nshmp.model.FaultRuptureSet; import gov.usgs.earthquake.nshmp.model.HazardModel; import gov.usgs.earthquake.nshmp.model.Rupture; import gov.usgs.earthquake.nshmp.model.RuptureSet; @@ -282,6 +281,7 @@ public class EqRate { * Nested fault rates are in fact weights that need to be scaled by the * cluster rate. */ + private static IntervalArray clusterMfd( ClusterRuptureSet ruptures, Location location, @@ -291,11 +291,10 @@ public class EqRate { IntervalArray.Builder srcSetMfd = IntervalArray.Builder.fromModel(modelMfd); for (Source source : ruptures.iterableForLocation(location, distance)) { ClusterSource clusterSource = (ClusterSource) source; - - for (FaultRuptureSet frs : clusterSource.faults()) { + for (RuptureSet<? extends Source> rs : clusterSource.ruptureSets()) { IntervalArray.Builder faultMfd = Builder .copyOf(faultMfd( - frs, + rs, location, distance, modelMfd)) diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java index b0b69821f10e65d513d4de866388ec6d645c2698..bddac1394698b9937273df5d3c8488be4f2eb91e 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/Transforms.java @@ -32,7 +32,6 @@ import gov.usgs.earthquake.nshmp.gmm.Imt; import gov.usgs.earthquake.nshmp.model.ClusterRuptureSet; import gov.usgs.earthquake.nshmp.model.ClusterSource; import gov.usgs.earthquake.nshmp.model.Distance; -import gov.usgs.earthquake.nshmp.model.FaultRuptureSet; import gov.usgs.earthquake.nshmp.model.GmmSet; import gov.usgs.earthquake.nshmp.model.HazardModel; import gov.usgs.earthquake.nshmp.model.Rupture; @@ -430,9 +429,9 @@ final class Transforms { @Override public ClusterInputs apply(ClusterSource clusterSource) { ClusterInputs clusterInputs = new ClusterInputs(clusterSource); - for (FaultRuptureSet frs : clusterSource.faults()) { - checkState(frs.size() == 1); - clusterInputs.add(transform.apply(frs.iterator().next())); + for (RuptureSet<? extends Source> ruptureSet : clusterSource.ruptureSets()) { + checkState(ruptureSet.size() == 1); + clusterInputs.add(transform.apply(ruptureSet.iterator().next())); } return clusterInputs; } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java index 102502b978c942fe338a784f17a023ea285ea9a2..1013c45e17d6cb8b971f997cdaf2122cec0a8d8a 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java @@ -2,6 +2,8 @@ package gov.usgs.earthquake.nshmp.model; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import static gov.usgs.earthquake.nshmp.model.SourceType.FAULT; +import static gov.usgs.earthquake.nshmp.model.SourceType.INTERFACE; import java.util.Iterator; import java.util.List; @@ -13,17 +15,17 @@ import gov.usgs.earthquake.nshmp.tree.LogicGroup; import gov.usgs.earthquake.nshmp.tree.LogicTree; /** - * Cluster source representation. Each cluster source wraps a - * {@code FaultRuptureSet} containing one or more fault representations that - * rupture as independent events but with a similar rate. For example, at New - * Madrid, each ClusterRuptureSet has 5 ClusterSources, one for each position - * variant of the model. For each position variant there is one FaultRuptureSet - * containing the FaultSources in the cluster, each of which may have one, or - * more, magnitude or other variants represented by its internal list of - * {@code Mfd}s. + * Cluster source representation. Each cluster source wraps a {@code RuptureSet} + * containing one or more fault representations that rupture as independent + * events but with a similar rate. For example, at New Madrid, each + * ClusterRuptureSet has 5 ClusterSources, one for each position variant of the + * model. For each position variant there is one FaultRuptureSet containing the + * FaultSources in the cluster, each of which may have one, or more, magnitude + * or other variants represented by its internal list of {@code Mfd}s. * * <p>Cluster source hazard is calculated from the joint probabilities of ground - * motions from the wrapped faults. + * motions from the wrapped faults. It is only used for finite fault and + * interface sources. * * <p>A {@code ClusterSource} cannot be created directly; it may only be created * by a private parser. @@ -60,9 +62,9 @@ public class ClusterRuptureSet extends AbstractRuptureSet<ClusterSource> { double rate = clusterSource.rate(); LogicGroup.Builder<Mfd> builder = LogicGroup.builder(); - for (FaultRuptureSet frs : clusterSource.faults()) { - Mfd mfd = ModelTrees.reduceMfdTree(frs.mfdTree()); - builder.addBranch(frs.name(), mfd, rate); + for (RuptureSet<? extends Source> ruptureSet : clusterSource.ruptureSets()) { + Mfd mfd = ModelTrees.reduceMfdTree(ruptureSet.mfdTree()); + builder.addBranch(ruptureSet.name(), mfd, rate); } return builder.build(); } @@ -79,14 +81,39 @@ public class ClusterRuptureSet extends AbstractRuptureSet<ClusterSource> { return new Predicate<ClusterSource>() { - private final Predicate<FaultSource> filter = + private final Predicate<FaultSource> faultFilter = new FaultRuptureSet.DistanceFilter(loc, distance); + private final Predicate<InterfaceSource> interfaceFilter = + new InterfaceRuptureSet.DistanceFilter(loc, distance); @Override public boolean test(ClusterSource cs) { - return cs.faults().stream() - .map(frs -> frs.iterator().next()) - .anyMatch(filter); + SourceType type = cs.ruptureSets().get(0).type(); + if (type == FAULT) { + return cs.ruptureSets().stream() + .map(rs -> rs.iterator().next()) + .map(source -> (FaultSource) source) + .anyMatch(faultFilter); + + } else if (type == INTERFACE) { + return cs.ruptureSets().stream() + .map(rs -> rs.iterator().next()) + .map(source -> (InterfaceSource) source) + .anyMatch(interfaceFilter); + + } else { + throw new IllegalStateException("Invalid cluster sources type: " + type); + } + // Predicate<? extends Source> filter = (type == FAULT) + // ? faultFilter + // : interfac eFilter; + // + // // this knows that there shouldn't be floating ruptures and so + // // taking the first rupture will do an adequate geometry test + // return cs.ruptureSets().stream() + // .map(rs -> rs.iterator().next()) + // .map(source -> (FaultSource) source) + // .anyMatch(faultFilter); } }; } @@ -162,26 +189,26 @@ public class ClusterRuptureSet extends AbstractRuptureSet<ClusterSource> { final String name; final int id; final ModelData data; - final List<FaultRuptureSet> faultRuptureSets; + final List<? extends RuptureSet<? extends Source>> ruptureSets; Data( String name, int id, ModelData data, - List<FaultRuptureSet> faultRuptureSets) { + List<? extends RuptureSet<? extends Source>> ruptureSets) { - if (faultRuptureSets.size() > 1) { - for (int i = 1; i < faultRuptureSets.size(); i++) { + if (ruptureSets.size() > 1) { + for (int i = 1; i < ruptureSets.size(); i++) { ModelTrees.checkTreeIdsAndWeights( - faultRuptureSets.get(0).mfdTree, - faultRuptureSets.get(i).mfdTree); + ruptureSets.get(0).mfdTree(), + ruptureSets.get(i).mfdTree()); } } this.name = name; this.id = id; this.data = data; - this.faultRuptureSets = faultRuptureSets; + this.ruptureSets = ruptureSets; } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java index c40688b6351fc794de5f26629d4dd3d4b3bb09da..4eaab9a8958774d04306306c7530c1f35cefe089 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterSource.java @@ -41,13 +41,13 @@ public class ClusterSource implements Source { final String name; final int id; final double rate; - final List<FaultRuptureSet> faults; + final List<? extends RuptureSet<? extends Source>> ruptureSets; ClusterSource(Builder builder) { this.name = builder.name; this.id = builder.id; this.rate = builder.rate; - this.faults = builder.faults; + this.ruptureSets = builder.ruptureSets; } @Override @@ -57,7 +57,7 @@ public class ClusterSource implements Source { @Override public int size() { - return faults.size(); + return ruptureSets.size(); } @Override @@ -77,8 +77,8 @@ public class ClusterSource implements Source { @Override public Location location(Location site) { LocationList.Builder locs = LocationList.builder(); - for (FaultRuptureSet fault : faults) { - locs.add(fault.location(site)); + for (RuptureSet<? extends Source> ruptureSet : ruptureSets) { + locs.add(ruptureSet.location(site)); } return Locations.closestPoint(site, locs.build()); } @@ -87,9 +87,10 @@ public class ClusterSource implements Source { public List<Mfd> mfds() { // TODO this should be consistent with what is done in Models List<Mfd> xyMfds = new ArrayList<>(); - for (FaultRuptureSet frs : faults) { - checkState(frs.size() == 1); // TODO bad assumption of single source - for (Mfd mfd : frs.iterator().next().mfds()) { + for (RuptureSet<? extends Source> ruptureSet : ruptureSets) { + // TODO bad assumption of single source + checkState(ruptureSet.size() == 1); + for (Mfd mfd : ruptureSet.iterator().next().mfds()) { xyMfds.add(Mfd.Builder.from(mfd).scale(rate).build()); } } @@ -105,11 +106,11 @@ public class ClusterSource implements Source { } /** - * The {@code FaultRuptureSet} of all {@code FaultSource}s that participate in - * this cluster. + * The {@code RuptureSet} of all {@code Source}s that participate in this + * cluster. */ - public List<FaultRuptureSet> faults() { - return faults; + public List<? extends RuptureSet<? extends Source>> ruptureSets() { + return ruptureSets; } /** @@ -131,9 +132,9 @@ public class ClusterSource implements Source { .append(" ") .append(data) .append(LINE_SEPARATOR.value()); - for (FaultRuptureSet frs : faults) { + for (RuptureSet<? extends Source> ruptureSet : ruptureSets) { sb.append(" ") - .append(frs.toString()) + .append(ruptureSet.toString()) .append(LINE_SEPARATOR.value()); } return sb.toString(); @@ -147,7 +148,7 @@ public class ClusterSource implements Source { String name; Integer id; Double rate; - List<FaultRuptureSet> faults; + List<? extends RuptureSet<? extends Source>> ruptureSets; Builder name(String name) { this.name = checkName(name, "ClusterSource"); @@ -165,10 +166,11 @@ public class ClusterSource implements Source { return this; } - Builder faults(List<FaultRuptureSet> faults) { - checkState(checkNotNull(faults, "Fault source set is null").size() > 0, - "Fault source set is empty"); - this.faults = faults; + Builder ruptureSets(List<? extends RuptureSet<? extends Source>> ruptureSets) { + checkNotNull(ruptureSets); + checkState(ruptureSets.size() > 0, "Fault source set is empty"); + // check either interface or fault? + this.ruptureSets = ruptureSets; return this; } @@ -177,7 +179,7 @@ public class ClusterSource implements Source { checkNotNull(name, "%s name", label); checkNotNull(id, "%s id", label); checkNotNull(rate, "%s rate", label); - checkNotNull(faults, "%s fault rupture sets", label); + checkNotNull(ruptureSets, "%s rupture sets", label); built = true; } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java index a8255a1df695ed11d4ff80efd35dbb802b307fc9..e2b3276d01bbdc4a68f6d9cf4edd954d3a6a5f35 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java @@ -317,9 +317,19 @@ class Deserialize { } /* Create an interface rupture set. */ - static InterfaceRuptureSet interfaceRuptureSet(Path json, ModelData data) { + static InterfaceRuptureSet interfaceRuptureSet( + Path json, + ModelData data) { JsonObject obj = jsonObject(json); + return interfaceRuptureSet(obj, data); + } + + /* Create an interface rupture set. */ + private static InterfaceRuptureSet interfaceRuptureSet( + JsonObject obj, + ModelData data) { + int id = obj.get(ID).getAsInt(); InterfaceRuptureSet.Builder ruptureSet = InterfaceRuptureSet.builder() @@ -338,6 +348,33 @@ class Deserialize { return ruptureSet.build(); } + /* Create a cluster rupture set, initialized with data from file. */ + static ClusterRuptureSet.Data interfaceClusterSet( + Path json, + ModelData data) { + + JsonObject obj = jsonObject(json); + + /* Set cluster flag so faultRuptureSet MFDs are built correctly. */ + data.clusterModel(); + + List<InterfaceRuptureSet> interfaceRuptureSets = new ArrayList<>(); + JsonArray ruptures = obj.get(RUPTURE_SETS).getAsJsonArray(); + for (JsonElement rupture : ruptures) { + InterfaceRuptureSet ruptureSet = interfaceRuptureSet(rupture.getAsJsonObject(), data); + // set leaf weight set here for nested FaultRuptureSets as they + // will not be set by source tree on build. + ruptureSet.setLeafWeight(1.0); + interfaceRuptureSets.add(ruptureSet); + } + + return new ClusterRuptureSet.Data( + obj.get(NAME).getAsString(), + obj.get(ID).getAsInt(), + data, + interfaceRuptureSets); + } + /* Create a fault system rupture set. */ static SystemRuptureSet systemRuptureSet(Path json, ModelData data) { @@ -394,7 +431,7 @@ class Deserialize { } /* Create a cluster rupture set, initialized with data from file. */ - static ClusterRuptureSet.Data clusterRuptureSetData( + static ClusterRuptureSet.Data faultClusterSet( Path json, ModelData data) { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java index a0211f848e70b0af088ea499179066b2a457373b..c596a17246945be31d6fe28148322e293e352bb8 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java @@ -82,21 +82,25 @@ public class InterfaceRuptureSet extends AbstractRuptureSet<InterfaceSource> { } @Override - public Predicate<InterfaceSource> distanceFilter( - Location loc, - double distance) { - - return new Predicate<InterfaceSource>() { - private Predicate<Location> filter = Locations.distanceFilter(loc, distance); - - @Override - public boolean test(InterfaceSource source) { - return filter.test(source.upperTrace.first()) || - filter.test(source.upperTrace.last()) || - filter.test(source.lowerTrace.first()) || - filter.test(source.lowerTrace.last()); - } - }; + public Predicate<InterfaceSource> distanceFilter(Location loc, double distance) { + return new DistanceFilter(loc, distance); + } + + /* Not inlined for use by cluster sources */ + static class DistanceFilter implements Predicate<InterfaceSource> { + private final Predicate<Location> filter; + + DistanceFilter(Location loc, double distance) { + filter = Locations.distanceFilter(loc, distance); + } + + @Override + public boolean test(InterfaceSource source) { + return filter.test(source.upperTrace.first()) || + filter.test(source.upperTrace.last()) || + filter.test(source.lowerTrace.first()) || + filter.test(source.lowerTrace.last()); + } } /** diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java index ed3b99350f7e0cf7de0106a07f9bfc958d46961b..16cbcb033cde884fd471d163e58f3b3e77401255 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelFiles.java @@ -230,6 +230,11 @@ class ModelFiles { return read(dir, RUPTURE_SET, data, Deserialize::interfaceRuptureSet); } + /* Interface cluster-set data. */ + static Optional<ClusterRuptureSet.Data> readInterfaceClusterSet(Path dir, ModelData data) { + return read(dir, CLUSTER_SET, data, Deserialize::interfaceClusterSet); + } + /* Fault rupture-set. */ static Optional<FaultRuptureSet> readFaultRuptureSet(Path dir, ModelData data) { return read(dir, RUPTURE_SET, data, Deserialize::faultRuptureSet); @@ -237,7 +242,7 @@ class ModelFiles { /* Fault cluster-set data. */ static Optional<ClusterRuptureSet.Data> readFaultClusterSet(Path dir, ModelData data) { - return read(dir, CLUSTER_SET, data, Deserialize::clusterRuptureSetData); + return read(dir, CLUSTER_SET, data, Deserialize::faultClusterSet); } /* Fault system rupture-set. */ diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java index 07e175f375f46106a32baa38f45571ff56a195ba..f980cb380b9b61fe98df6eb43237c127b6c696e6 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java @@ -19,6 +19,7 @@ import static gov.usgs.earthquake.nshmp.model.ModelFiles.readGmms; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readGridConfig; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readGridFeatures; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readGridRuptureSets; +import static gov.usgs.earthquake.nshmp.model.ModelFiles.readInterfaceClusterSet; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readInterfaceConfig; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readInterfaceFeatures; import static gov.usgs.earthquake.nshmp.model.ModelFiles.readInterfaceRuptureSet; @@ -594,8 +595,8 @@ abstract class ModelLoader { if (frs.isPresent()) { treeBuilder.addLeaf(branch, frs.orElseThrow()); } else { - var crsd = readFaultClusterSet(dir, data).orElseThrow(); - processClusterBranch(branch, treeBuilder, data, crsd); + var fcs = readFaultClusterSet(dir, data).orElseThrow(); + processClusterBranch(branch, treeBuilder, data, fcs); } } } @@ -632,7 +633,7 @@ abstract class ModelLoader { .name(clusterLabel) .id(clusterData.id) .rate(rateBranch.value()) // cluster rate - .faults(clusterData.faultRuptureSets) + .ruptureSets(clusterData.ruptureSets) .buildClusterSource(); ClusterRuptureSet crs = ClusterRuptureSet.builder() @@ -900,9 +901,76 @@ abstract class ModelLoader { } } else { - treeBuilder.addLeaf( - branch, - readInterfaceRuptureSet(dir, data).orElseThrow()); + + /* Could have fault or cluster rupture-set. */ + var irs = readInterfaceRuptureSet(dir, data); + if (irs.isPresent()) { + treeBuilder.addLeaf(branch, irs.orElseThrow()); + } else { + var ics = readInterfaceClusterSet(dir, data).orElseThrow(); + processClusterBranch(branch, treeBuilder, data, ics); + } + + // treeBuilder.addLeaf( + // branch, + // readInterfaceRuptureSet(dir, data).orElseThrow()); + } + } + + /* + * Process fault clusters adding additional cluster rate branches. The + * supplied cluster-set is used as a model to create a tree of rate + * branches, each with the same cluster geometry. + */ + private void processClusterBranch( + Branch<Path> branch, + SourceTree.Builder treeBuilder, + ModelData data, + ClusterRuptureSet.Data clusterData) { + + LogicTree<Double> rateTree = data.rateTree().orElseThrow(); + LogicTree.Builder<Path> pathTreeBuilder = LogicTree.builder(rateTree.name()); + List<ClusterRuptureSet> crsList = new ArrayList<>(); + + for (int i = 0; i < rateTree.size(); i++) { + + Branch<Double> rateBranch = rateTree.get(i); + double rate = (rateBranch.value() <= 0.0) ? 0.0 : rateBranch.value(); + String rateLabel = rateBranch.id() + ((rate == 0.0) + ? "-null" + : "-" + String.valueOf((int) Math.round(1.0 / rate)) + "-yr"); + String clusterLabel = clusterData.name + " : " + rateLabel; + + Path clusterPath = branch.value().resolve(rateLabel); + Path branchPath = root.relativize(clusterPath); + System.out.println(" branch: " + branchPath); + + ClusterSource clusterSource = new ClusterSource.Builder() + .name(clusterLabel) + .id(clusterData.id) + .rate(rateBranch.value()) // cluster rate + .ruptureSets(clusterData.ruptureSets) + .buildClusterSource(); + + ClusterRuptureSet crs = ClusterRuptureSet.builder() + .name(clusterLabel) + .id(clusterData.id) + .setSource(clusterSource) + .modelData(data) + .build(); + crsList.add(crs); + + pathTreeBuilder.addBranch( + clusterLabel, + clusterPath, + rateBranch.weight()); + } + + LogicTree<Path> pathTree = pathTreeBuilder.build(); + treeBuilder.addBranches(branch, pathTree); + + for (int i = 0; i < pathTree.size(); i++) { + treeBuilder.addLeaf(pathTree.get(i), crsList.get(i)); } } }