From 093cc9b65dfd1b134e52d6bf9d314eb59638e2d8 Mon Sep 17 00:00:00 2001 From: Peter Powers <pmpowers@usgs.gov> Date: Tue, 8 Dec 2015 12:46:02 -0700 Subject: [PATCH] deaggregation nested class cleanup --- src/org/opensha2/calc/Calcs.java | 2 +- src/org/opensha2/calc/Deaggregation.java | 416 ++++++++++++++--------- src/org/opensha2/calc/Hazard.java | 20 +- src/org/opensha2/calc/Results.java | 6 +- 4 files changed, 266 insertions(+), 178 deletions(-) diff --git a/src/org/opensha2/calc/Calcs.java b/src/org/opensha2/calc/Calcs.java index 1fbfc8709..06f804d27 100644 --- a/src/org/opensha2/calc/Calcs.java +++ b/src/org/opensha2/calc/Calcs.java @@ -102,7 +102,7 @@ public class Calcs { checkNotNull(hazard); checkInRange(rpRange, "Return period", returnPeriod); - return Deaggregation.create(hazard, returnPeriod); + return Deaggregation.atReturnPeriod(hazard, returnPeriod); } /** diff --git a/src/org/opensha2/calc/Deaggregation.java b/src/org/opensha2/calc/Deaggregation.java index a3e7c0463..250ec43aa 100644 --- a/src/org/opensha2/calc/Deaggregation.java +++ b/src/org/opensha2/calc/Deaggregation.java @@ -2,8 +2,10 @@ package org.opensha2.calc; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import static com.google.common.base.Strings.padEnd; import static org.opensha2.data.Data.checkInRange; import static org.opensha2.util.TextUtils.NEWLINE; +import static org.opensha2.util.TextUtils.format; import static org.opensha2.data.Data.multiply; import static com.google.common.primitives.Doubles.toArray; @@ -16,7 +18,6 @@ import java.util.Map.Entry; import java.util.Set; import org.opensha2.calc.CalcConfig.DeaggData; -import org.opensha2.calc.Deaggregation.Dataset.Builder; import org.opensha2.data.Data; import org.opensha2.data.DataTable; import org.opensha2.data.DataTables; @@ -42,134 +43,222 @@ import com.google.common.collect.Multimaps; import com.google.common.collect.Range; /** - * For one (or each Imt) One Deagg per source set and ground motion model these - * are then combined for total deagg and also combined across each unique gmm + * Hazard deaggregation. Given a {@link Hazard} result, this class will + * deaggregate the results at all spectral periods supplied in the result at an + * intensity measure level or return period of interest. * * @author Peter Powers */ public final class Deaggregation { - // TODO get us from CalcConfig - private static final ExceedanceModel EXCEEDANCE = ExceedanceModel.TRUNCATION_UPPER_ONLY; - private static final double trunc = 3.0; - /* - * TODO should we preflight against model; specifically, a hazard results - * will include sources out to whatever distance is specified by gmm.xml and - * mfds will be whatever has been specified; does it make sense to disallow - * deaggregation if M and R exceed that which the deagg is capable of - * handling? - * - * really should have auto deagg scaling; how best to prescan contributing - * sources? Could loop source set curves, determine if total rate for ss is - * greater than some min cutoff and use remaining source set s to set r and - * m binning - also set some minimum? although this could be set on client - * - * ss should be able to indicate a magnitude range; r range comes from gmms - * - * I think we should screen contributing source sets and log warning if - * specified deagg config does not span hazard result range - * - * Deagg is going to operate on all relevant sources, if a source is out of - * range, that's ok, contributing source lists will still have the total - * contribution deagg limits are strange and really should just be used to - * limit plot dimensions In addition to logging, results should come with a - * warning + * Developer notes and TODO + * ------------------------------------------------------------------------- + * Consider auto-scaling of results (dataset bounds). + * ------------------------------------------------------------------------- + * Consider warnings if config does not span source set range. + * ------------------------------------------------------------------------- + * Deaggregate on probability of occurrence instead of exceedance. + * ------------------------------------------------------------------------- + * Revisit precision issues associated with integre based return period; + * 2%in50 years os really 0.00040405414, not 1/2475 = 0.0004040404 */ - private final Hazard hazard; - private final Dataset model; - private final double returnPeriod; - private final Map<Imt, Deagg> deaggs; - - private Deaggregation( - Hazard hazard, - Dataset model, - double returnPeriod, - Map<Imt, Deagg> deaggs) { + private final Map<Imt, ImtDeagg> deaggs; - this.hazard = hazard; - this.model = model; - this.returnPeriod = returnPeriod; + private Deaggregation(Map<Imt, ImtDeagg> deaggs) { this.deaggs = deaggs; } - public Exporter export(Imt imt) { - return new Exporter(deaggs.get(imt).totalDataset, "Total"); - } + /** + * Deaggregate {@code hazard} at the intensity measure level corresponding + * to the supplied {@code returnPeriod}. + * + * @param hazard to deaggregate. + * @param returnPeriod at which to deaggregate {@code hazard} + */ + public static Deaggregation atReturnPeriod(Hazard hazard, double returnPeriod) { + Map<Imt, ImtDeagg> imtDeaggMap = Maps.newEnumMap(Imt.class); + Config.Builder cb = Config.builder(hazard); + double rate = 1.0 / returnPeriod; - public Exporter export(Imt imt, Gmm gmm) { - return new Exporter(deaggs.get(imt).gmmDatasets.get(gmm), gmm.toString()); + for (Entry<Imt, XySequence> entry : hazard.totalCurves.entrySet()) { + Imt imt = entry.getKey(); + double iml = IML_INTERPOLATE.findX(entry.getValue(), rate); + Config config = cb.imt(imt).iml(iml, rate, returnPeriod).build(); + System.out.println(config); + ImtDeagg imtDeagg = ImtDeagg.create(hazard, config); + imtDeaggMap.put(imt, imtDeagg); + } + + return new Deaggregation(Maps.immutableEnumMap(imtDeaggMap)); } - // all HazardResult curves are in x-log space already + /* Hazard curves are already in log-x space. */ private static final Interpolator IML_INTERPOLATE = Interpolator.builder() .logy() .decreasingX() .build(); - // TODO return period is problematic if defined by an integer - // number of years; for instance we'll not get the true 2%in50 - // for a return period of 2475, or whatever other, rate recovered - public static Deaggregation create(Hazard hazard, double returnPeriod) { + /** + * Deaggregate {@code hazard} at the supplied intensity measure level. + * + * @param hazard to deaggregate. + * @param iml intensity measure level at which to deaggregate {@code hazard} + */ + public static Deaggregation atIml(Hazard hazard, double iml) { - Map<Imt, Deagg> imtDeaggMap = Maps.newEnumMap(Imt.class); - Dataset model = Dataset.builder(hazard.config).build(); + Map<Imt, ImtDeagg> imtDeaggMap = Maps.newEnumMap(Imt.class); + Config.Builder cb = Config.builder(hazard); for (Entry<Imt, XySequence> entry : hazard.totalCurves.entrySet()) { Imt imt = entry.getKey(); - double rate = 1.0 / returnPeriod; - double iml = IML_INTERPOLATE.findX(entry.getValue(), rate); - - Deagg deagg = new Deagg(hazard, model, imt, rate, iml); - imtDeaggMap.put(imt, deagg); + double rate = RATE_INTERPOLATE.findY(entry.getValue(), iml); + double returnPeriod = 1.0 / rate; + Config config = cb.imt(imt).iml(iml, rate, returnPeriod).build(); + ImtDeagg imtDeagg = ImtDeagg.create(hazard, config); + imtDeaggMap.put(imt, imtDeagg); } - return new Deaggregation( - hazard, - model, - returnPeriod, - Maps.immutableEnumMap(imtDeaggMap)); + return new Deaggregation(Maps.immutableEnumMap(imtDeaggMap)); } - // TODO need to provide string summary - @Override public String toString() { - // Entry<Imt, Deagg> entry = deaggs.entrySet().iterator().next(); - StringBuilder sb = new StringBuilder(); - for (Entry<Imt, Deagg> entry : deaggs.entrySet()) { - sb.append("Deagg for IMT: ").append(entry.getKey()).append(NEWLINE); - sb.append(entry.getValue()); + /* Hazard curves are already in log-x space. */ + private static final Interpolator RATE_INTERPOLATE = Interpolator.builder() + .logy() + .build(); + + /* + * A deaggregation configuration container. This class provides a reusable + * builder that comes in handly when iterating over IMTs and only the return + * period and iml require updating. A unique config is required for each + * deaggregation performed. + * + * Note that this is a class of convenience and assumes that return period + * and IML are in agreement for the IMT of interest, i.e. one or the other + * has been correctly derived from the total curve in a hazard object. + */ + static class Config { + + final Imt imt; + final Dataset model; + final double iml; + final double rate; + final double returnPeriod; + final ExceedanceModel probabilityModel; + final double truncation; + + private Config( + Imt imt, + Dataset model, + double iml, + double rate, + double returnPeriod, + ExceedanceModel probabilityModel, + double truncation) { + + this.imt = imt; + this.model = model; + this.iml = iml; + this.rate = rate; + this.returnPeriod = returnPeriod; + this.probabilityModel = probabilityModel; + this.truncation = truncation; } - return sb.toString(); + + @Override public String toString() { + return new StringBuilder("Deagg calc config:") + .append(format("imt")).append(imt).append(" [").append(imt.name()).append("]") + .append(format("iml")).append(iml) + .append(format("rate")).append(rate) + .append(format("returnPeriod")).append(returnPeriod).append(" years") + .append(format("probabilityModel")).append(probabilityModel) + .append(" [trunc = ").append(truncation).append("]") + .toString(); + } + + static Builder builder(Hazard hazard) { + return new Builder() + .dataModel( + Dataset.builder(hazard.config).build()) + .probabilityModel( + hazard.config.exceedanceModel, + hazard.config.truncationLevel); + } + + /* Reusable builder */ + static class Builder { + + private Imt imt; + private Dataset model; + private Double iml; + private Double rate; + private Double returnPeriod; + private ExceedanceModel probabilityModel; + private Double truncation; + + Builder imt(Imt imt) { + this.imt = imt; + return this; + } + + Builder dataModel(Dataset model) { + this.model = model; + return this; + } + + /* + * Supply the target iml along with corresponding annual rate and + * return period for the IMT of interest. + */ + Builder iml(double iml, double rate, double returnPeriod) { + this.iml = iml; + this.rate = rate; + this.returnPeriod = returnPeriod; + return this; + } + + Builder probabilityModel(ExceedanceModel probabilityModel, double truncation) { + this.probabilityModel = probabilityModel; + this.truncation = truncation; + return this; + } + + Config build() { + return new Config( + checkNotNull(imt), + checkNotNull(model), + checkNotNull(iml), + checkNotNull(rate), + checkNotNull(returnPeriod), + checkNotNull(probabilityModel), + checkNotNull(truncation)); + } + } + } /* One per Imt in supplied Hazard. */ - static class Deagg { - - // final HazardResult hazard; - // final Imt imt; + static class ImtDeagg { + final Config config; final Dataset totalDataset; - - /* Reduction to Gmms. */ final Map<Gmm, Dataset> gmmDatasets; - Deagg(Hazard hazard, Dataset model, Imt imt, double rate, double iml) { - // this.hazard = hazard; - // this.imt = imt; + static ImtDeagg create(Hazard hazard, Config config) { + return new ImtDeagg(hazard, config); + } + + private ImtDeagg(Hazard hazard, Config config) { + this.config = config; ListMultimap<Gmm, Dataset> datasets = MultimapBuilder .enumKeys(Gmm.class) .arrayListValues() .build(); - for (HazardCurveSet curveSet : hazard.sourceSetMap.values()) { - - Map<Gmm, Dataset> sourceSetDatasets = Deaggregator.of(curveSet) - .withDataModel(model) - .forImt(imt) - .atIml(rate, iml) - .deaggregate(); + for (HazardCurveSet curveSet : hazard.sourceSetCurves.values()) { + Map<Gmm, Dataset> sourceSetDatasets = Deaggregator.deaggregate(curveSet, config); /* * Each dataset (above) contains the contributing sources (rate @@ -177,7 +266,7 @@ public final class Deaggregation { * * barWeight = sourceSet rate */ - + // TODO clean // for (Entry<Gmm, Dataset> entry : // sourceSetDatasets.entrySet()) { // Dataset d = entry.getValue(); @@ -216,11 +305,27 @@ public final class Deaggregation { // sb.append(index++).append(": ").append(source).append(NEWLINE); totalRate += (source.rate + source.skipRate); } - sb.append("TOTAL via sources: " + totalRate).append(NEWLINE); - sb.append("TOTAL via barWt : " + totalDataset.barWeight).append(NEWLINE); - sb.append(NEWLINE); - sb.append(totalDataset.rmε); - sb.append(NEWLINE); + // sb.append("TOTAL via sources: " + totalRate).append(NEWLINE); + // sb.append("TOTAL via barWt : " + + // totalDataset.barWeight).append(NEWLINE); + for (Entry<SourceSet<? extends Source>, Double> entry : totalDataset.sourceSets + .entrySet()) { + + SourceSet<? extends Source> sourceSet = entry.getKey(); + double contribution = entry.getValue(); + sb.append(sourceSet); + sb.append("Contrib: ").append(contribution); + sb.append(NEWLINE); +// sb.append(" Id: ").append(padEnd(Integer.toString(sourceSet.id()), 8, ' ')); +// sb.append("Name: ").append(padEnd(name(), 38, ' ')); +// sb.append("Size: ").append(padEnd(Integer.toString(size()), 8, ' ')); +// sb.append("Weight: ").append(padEnd(Double.toString(weight), 12, ' ')); + } +// sb.append(totalDataset.sourceSets).append(NEWLINE).append(NEWLINE); + // sb.append(totalDataset.sources).append(NEWLINE).append(NEWLINE); +// sb.append(NEWLINE); +// sb.append(totalDataset.rmε); +// sb.append(NEWLINE); return sb.toString(); } @@ -238,9 +343,10 @@ public final class Deaggregation { }; /* - * TODO track and report ranked source set contributions TODO track and - * report ranked sources; may have source with same name in different - * sourceSets + * TODO track and report ranked source set contributions + * + * TODO track and report ranked sources; may have source with same name in + * different sourceSets */ // do we want to track the relative location in each distance bin: @@ -283,39 +389,38 @@ public final class Deaggregation { } } - /* Builder pattern; one per source set. */ + /* One deaggregator per source set. */ private static class Deaggregator { - private final HazardCurveSet hazard; + private final HazardCurveSet curves; private final SourceSet<? extends Source> sources; private final GmmSet gmmSet; - /* - * Empty 'model' is used to initialize all builders; also used for - * distance and magnitude index lookups in processSource() - */ - private Dataset model; - private Map<Gmm, Dataset.Builder> datasetBuilders; + private final Imt imt; + private final Dataset model; + private final double iml; + private final ExceedanceModel probModel; + private final double trunc; - private Imt imt; - private Double rate; - private Double iml; + private final Map<Gmm, Dataset.Builder> datasetBuilders; - private Deaggregator(HazardCurveSet hazard) { - this.hazard = hazard; - this.sources = hazard.sourceSet; + private Deaggregator(HazardCurveSet curves, Config config) { + this.curves = curves; + this.sources = curves.sourceSet; this.gmmSet = sources.groundMotionModels(); - } - /* Create a new deaggregator. */ - static Deaggregator of(HazardCurveSet hazard) { - return new Deaggregator(checkNotNull(hazard)); + this.imt = config.imt; + this.model = config.model; + this.iml = config.iml; + this.probModel = config.probabilityModel; + this.trunc = config.truncation; + + this.datasetBuilders = initDataBuilders(gmmSet.gmms(), config.model); } - Deaggregator withDataModel(Dataset model) { - this.model = model; - this.datasetBuilders = initDataBuilders(gmmSet.gmms(), model); - return this; + private static Map<Gmm, Dataset> deaggregate(HazardCurveSet curves, Config config) { + Deaggregator deaggregator = new Deaggregator(curves, config); + return deaggregator.deaggregate(); } private static Map<Gmm, Dataset.Builder> initDataBuilders(Set<Gmm> gmms, Dataset model) { @@ -326,51 +431,27 @@ public final class Deaggregation { return map; } - Deaggregator forImt(Imt imt) { - // TODO check valid imt agains HCS - this.imt = imt; - return this; - } - - Deaggregator atIml(double rate, double iml) { - // TODO check valid iml agains curve x-range for imt?? - this.rate = rate; - this.iml = iml; - return this; - } - - Map<Gmm, Dataset> deaggregate() { - checkState(); - - for (GroundMotions gms : hazard.hazardGroundMotionsList) { + private Map<Gmm, Dataset> deaggregate() { + for (GroundMotions gms : curves.hazardGroundMotionsList) { InputList inputs = gms.inputs; double minDistance = inputs.minDistance; Map<Gmm, List<Double>> μLists = gms.means.get(imt); Map<Gmm, List<Double>> σLists = gms.sigmas.get(imt); Map<Gmm, Double> gmms = gmmSet.gmmWeightMap(minDistance); - processSource(inputs, gmms, μLists, σLists, EXCEEDANCE); + processSource(inputs, gmms, μLists, σLists); } - for (Dataset.Builder builder : datasetBuilders.values()) { builder.sourceSet(sources); } - return createDataMap(); } - private void checkState() { - String clazz = getClass().getSimpleName(); - checkNotNull(model, "%s: data model not set", clazz); - checkNotNull(imt, "%s: IMT not set", clazz); - checkNotNull(iml, "%s: target IML not set", clazz); - } - private Map<Gmm, Dataset> createDataMap() { return Maps.immutableEnumMap( Maps.transformValues( datasetBuilders, new Function<Dataset.Builder, Dataset>() { - @Override public Dataset apply(Builder builder) { + @Override public Dataset apply(Dataset.Builder builder) { return builder.build(); } })); @@ -380,8 +461,7 @@ public final class Deaggregation { InputList inputs, Map<Gmm, Double> gmms, Map<Gmm, List<Double>> μLists, - Map<Gmm, List<Double>> σLists, - ExceedanceModel exceedanceModel) { + Map<Gmm, List<Double>> σLists) { /* Local EnumSet based keys. */ final Set<Gmm> gmmKeys = EnumSet.copyOf(gmms.keySet()); @@ -409,7 +489,7 @@ public final class Deaggregation { double σ = σLists.get(gmm).get(i); double ε = epsilon(μ, σ, iml); - double probAtIml = exceedanceModel.exceedance(μ, σ, trunc, imt, iml); + double probAtIml = probModel.exceedance(μ, σ, trunc, imt, iml); double rate = probAtIml * in.rate * sources.weight() * gmmWeight; if (skipRupture) { @@ -417,9 +497,6 @@ public final class Deaggregation { continue; } gmmSourceRates.put(gmm, gmmSourceRates.get(gmm) + rate); - - // System.out.println(μ + " " + σ + " " + iml); - // System.out.println("ε: " + ε); int εIndex = model.epsilonIndex(ε); datasetBuilders.get(gmm).add( @@ -437,7 +514,6 @@ public final class Deaggregation { gmmSkipRates.get(gmm)); datasetBuilders.get(gmm).add(source); } - } private static Map<Gmm, Double> createRateMap(Set<Gmm> gmms) { @@ -447,15 +523,30 @@ public final class Deaggregation { } return rateMap; } - } private static double epsilon(double μ, double σ, double iml) { return (μ - iml) / σ; } - private static final Range<Double> rRange = Range.closed(0.0, 1000.0); - private static final Range<Double> εRange = Range.closed(-3.0, 3.0); + @Override public String toString() { + // Entry<Imt, Deagg> entry = deaggs.entrySet().iterator().next(); + StringBuilder sb = new StringBuilder(); + for (Entry<Imt, ImtDeagg> entry : deaggs.entrySet()) { + sb.append("Deagg for IMT: ").append(entry.getKey()).append(NEWLINE); + sb.append(entry.getValue()); + } + return sb.toString(); + } + + public Exporter export(Imt imt) { + System.out.println(this); + return new Exporter(deaggs.get(imt).totalDataset, "Total"); + } + + public Exporter export(Imt imt, Gmm gmm) { + return new Exporter(deaggs.get(imt).gmmDatasets.get(gmm), gmm.toString()); + } public static class Exporter { @@ -525,11 +616,11 @@ public final class Deaggregation { this.summary = ImmutableList.of( element("Deaggregation targets", true, ImmutableList.of( item("Return period", 2475, "yrs"), - item("Exceedance rate", 4.0404e-4, "yrsâ»Â¹"), + item("Exceedance rate", 4.0404e-4, "yrâ»Â¹"), item("Exceedance IML", 0.6085, "g"))), element("Recovered targets", false, ImmutableList.of( item("Return period", 2521, "yrs"), - item("Exceedance rate", 3.9315e-4, "yrsâ»Â¹"), + item("Exceedance rate", 3.9315e-4, "yrâ»Â¹"), item("Exceedance IML", 0.6085, "g"))), element("Mean", true, ImmutableList.of( item("r", 11.2, "km"), @@ -583,6 +674,9 @@ public final class Deaggregation { } + private static final Range<Double> rRange = Range.closed(0.0, 1000.0); + private static final Range<Double> εRange = Range.closed(-3.0, 3.0); + /* * Deaggregation dataset that stores deaggregation results of individual * SourceSets and Gmms. Datasets may be recombined via add(). @@ -738,8 +832,6 @@ public final class Deaggregation { static class Builder { - private static final String ID = "Deaggregation.Dataset.Builder"; - private DataVolume.Builder rmε; /* Weighted mean contributions */ @@ -879,17 +971,15 @@ public final class Deaggregation { sources.build()); } } - } - /* Serialization helpers. */ - /** * Returns a list of objects that define the ε bins used in this * deaggregation when serialized to JSON. */ public List<?> εBins() { ImmutableList.Builder<εBin> bins = ImmutableList.builder(); + Dataset model = deaggs.values().iterator().next().config.model; List<Double> εs = model.rmε.levels(); for (int i = 0; i < εs.size() - 1; i++) { Double min = (i == 0) ? null : εs.get(i); diff --git a/src/org/opensha2/calc/Hazard.java b/src/org/opensha2/calc/Hazard.java index 3f8cfdd7c..8a71da65f 100644 --- a/src/org/opensha2/calc/Hazard.java +++ b/src/org/opensha2/calc/Hazard.java @@ -34,22 +34,20 @@ import com.google.common.collect.SetMultimap; */ public final class Hazard { - // TODO refactor to just Hazard because that's what it is - - final SetMultimap<SourceType, HazardCurveSet> sourceSetMap; // TODO refactor to sourceSetCurves + final SetMultimap<SourceType, HazardCurveSet> sourceSetCurves; final Map<Imt, XySequence> totalCurves; final HazardModel model; final Site site; final CalcConfig config; private Hazard( - SetMultimap<SourceType, HazardCurveSet> sourceSetMap, + SetMultimap<SourceType, HazardCurveSet> sourceSetCurves, Map<Imt, XySequence> totalCurves, HazardModel model, Site site, CalcConfig config) { - this.sourceSetMap = sourceSetMap; + this.sourceSetCurves = sourceSetCurves; this.totalCurves = totalCurves; this.model = model; this.site = site; @@ -60,9 +58,9 @@ public final class Hazard { String LF = StandardSystemProperty.LINE_SEPARATOR.value(); StringBuilder sb = new StringBuilder("HazardResult:"); sb.append(LF); - for (SourceType type : EnumSet.copyOf(sourceSetMap.keySet())) { + for (SourceType type : EnumSet.copyOf(sourceSetCurves.keySet())) { sb.append(type).append("SourceSet:").append(LF); - for (HazardCurveSet curveSet : sourceSetMap.get(type)) { + for (HazardCurveSet curveSet : sourceSetCurves.get(type)) { SourceSet<? extends Source> ss = curveSet.sourceSet; sb.append(" ").append(ss); sb.append("Used: "); @@ -139,7 +137,7 @@ public final class Hazard { private Site site; private CalcConfig config; - private ImmutableSetMultimap.Builder<SourceType, HazardCurveSet> resultMapBuilder; + private ImmutableSetMultimap.Builder<SourceType, HazardCurveSet> curveMapBuilder; private Map<Imt, XySequence> totalCurves; private Builder(CalcConfig config) { @@ -148,7 +146,7 @@ public final class Hazard { for (Entry<Imt, XySequence> entry : config.logModelCurves.entrySet()) { totalCurves.put(entry.getKey(), emptyCopyOf(entry.getValue())); } - resultMapBuilder = ImmutableSetMultimap.builder(); + curveMapBuilder = ImmutableSetMultimap.builder(); } Builder site(Site site) { @@ -164,7 +162,7 @@ public final class Hazard { } Builder addCurveSet(HazardCurveSet curveSet) { - resultMapBuilder.put(curveSet.sourceSet.type(), curveSet); + curveMapBuilder.put(curveSet.sourceSet.type(), curveSet); for (Entry<Imt, XySequence> entry : curveSet.totalCurves.entrySet()) { totalCurves.get(entry.getKey()).add(entry.getValue()); } @@ -180,7 +178,7 @@ public final class Hazard { Hazard build() { validateState(ID); return new Hazard( - resultMapBuilder.build(), + curveMapBuilder.build(), Maps.immutableEnumMap(totalCurves), model, site, diff --git a/src/org/opensha2/calc/Results.java b/src/org/opensha2/calc/Results.java index cf57c6b6b..ccac36d06 100644 --- a/src/org/opensha2/calc/Results.java +++ b/src/org/opensha2/calc/Results.java @@ -121,12 +121,12 @@ public class Results { } } - public static Map<Imt, Map<SourceType, XySequence>> totalsByType(Hazard result) { + public static Map<Imt, Map<SourceType, XySequence>> totalsByType(Hazard hazard) { ImmutableMap.Builder<Imt, Map<SourceType, XySequence>> imtMapBuilder = ImmutableMap.builder(); - Map<Imt, XySequence> curves = result.curves(); + Map<Imt, XySequence> curves = hazard.curves(); Set<Imt> imts = curves.keySet(); for (Imt imt : imts) { @@ -134,7 +134,7 @@ public class Results { XySequence modelCurve = emptyCopyOf(curves.get(imt)); Map<SourceType, XySequence> typeCurves = new EnumMap<>(SourceType.class); - Multimap<SourceType, HazardCurveSet> curveSets = result.sourceSetMap; + Multimap<SourceType, HazardCurveSet> curveSets = hazard.sourceSetCurves; for (SourceType type : curveSets.keySet()) { XySequence typeCurve = copyOf(modelCurve); for (HazardCurveSet curveSet : curveSets.get(type)) { -- GitLab