From 16b83b7287f6c7a0012c4310937d77b39ff0a009 Mon Sep 17 00:00:00 2001
From: Peter Powers <pmpowers@usgs.gov>
Date: Thu, 17 Dec 2020 14:06:13 -0700
Subject: [PATCH] grid dev

---
 .../earthquake/nshmp/model/Deserialize.java   |   2 +-
 .../nshmp/model/FaultRuptureSet.java          |   4 +-
 .../nshmp/model/GridDepthModel.java           |   7 +
 .../earthquake/nshmp/model/GridLoader.java    |  41 +---
 .../earthquake/nshmp/model/GridSourceSet.java | 207 ++++++++----------
 .../usgs/earthquake/nshmp/model/MfdTrees.java | 118 +++++++---
 .../earthquake/nshmp/model/ModelLoader.java   |   2 +-
 7 files changed, 197 insertions(+), 184 deletions(-)

diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java
index 1b5bb487..9f3243c5 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java
@@ -456,7 +456,7 @@ class Deserialize {
 
   private static JsonElement validateGrTaper(JsonObject o) {
     validateGr(o);
-    return checkValue(o, "cMag");
+    return checkValue(o, "mc");
   }
 
   private static JsonElement validateIncr(JsonObject o) {
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java
index 14a8bf48..28fa26e0 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java
@@ -328,7 +328,7 @@ public class FaultRuptureSet implements RuptureSet {
 
     /* Create mfd-tree from a logic tree of SINGLE MFDs and a rate-tree. */
     private LogicTree<Mfd.Properties> updateMfdsForRecurrence() {
-      checkState(MfdTrees.mfdsAreType(mfdPropsTree, Mfd.Type.SINGLE));
+      MfdTrees.checkType(mfdPropsTree, Mfd.Type.SINGLE);
       LogicTree.Builder<Mfd.Properties> propsTree = LogicTree.builder(mfdPropsTree.name());
       LogicTree<Double> rateTree = data.rateTree().orElseThrow();
       for (Branch<Double> rBranch : rateTree) {
@@ -741,7 +741,7 @@ public class FaultRuptureSet implements RuptureSet {
    */
   static void checkEpistemic(MfdConfig mfdConfig, LogicTree<Mfd.Properties> mfdTree) {
     boolean multipleSingleBranches = (mfdTree.size() > 1) &&
-        MfdTrees.mfdsAreType(mfdTree, Mfd.Type.SINGLE);
+        MfdTrees.checkTreeTypes(mfdTree, Mfd.Type.SINGLE);
     boolean hasEpistemic = mfdConfig.epistemicTree.isPresent();
     checkState(
         !(multipleSingleBranches && hasEpistemic),
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GridDepthModel.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GridDepthModel.java
index e2e385a5..cda1427f 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/GridDepthModel.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GridDepthModel.java
@@ -2,6 +2,7 @@ package gov.usgs.earthquake.nshmp.model;
 
 import com.google.common.collect.Range;
 
+import gov.usgs.earthquake.nshmp.Text;
 import gov.usgs.earthquake.nshmp.tree.LogicTree;
 
 /*
@@ -20,4 +21,10 @@ class GridDepthModel {
     this.mRange = mRange;
     this.depthTree = depthTree;
   }
+
+  @Override
+  public String toString() {
+    return mRange + Text.NEWLINE + depthTree + Text.NEWLINE;
+  }
+
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java
index f9bc1e85..37c48725 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java
@@ -1,10 +1,7 @@
 package gov.usgs.earthquake.nshmp.model;
 
-import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkState;
-import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR_TYPES;
 import static gov.usgs.earthquake.nshmp.model.Deserialize.MFD_TREE;
-import static java.util.stream.Collectors.toSet;
 import static java.util.stream.Collectors.toUnmodifiableList;
 import static java.util.stream.Collectors.toUnmodifiableMap;
 
@@ -17,7 +14,6 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Optional;
 import java.util.OptionalDouble;
-import java.util.Set;
 import java.util.stream.Stream;
 
 import gov.usgs.earthquake.nshmp.data.DelimitedData;
@@ -349,9 +345,7 @@ class GridLoader {
         LogicTree<Mfd.Properties> propsTree = Deserialize.mfdTree(
             grid.source.properties(),
             data);
-        checkType(propsTree);
-        checkMinMagnitude(propsTree);
-        checkMagnitudeDelta(propsTree);
+        MfdTrees.checkGrMinAndDelta(propsTree);
         LogicTree<Mfd> mfdTree = MfdTrees.propsTreeToMmaxMfdTree(propsTree);
         modelMfds.put(entry.getKey(), mfdTree);
       }
@@ -367,35 +361,6 @@ class GridLoader {
     }
   }
 
-  private static Mfd.Type checkType(LogicTree<Mfd.Properties> mfdTree) {
-    Set<Mfd.Type> types = mfdTree.stream()
-        .map(Branch::value)
-        .map(Mfd.Properties::type)
-        .collect(toSet());
-    checkArgument(
-        types.size() == 1 || GR_TYPES.containsAll(types),
-        "Grid mfd-tree has multiple mfd types: %s", types);
-    return types.iterator().next();
-  }
-
-  private static double checkMinMagnitude(LogicTree<Mfd.Properties> mfdTree) {
-    Set<Double> mins = mfdTree.stream()
-        .map(Branch::value)
-        .map(p -> p.getAsGr().mMin())
-        .collect(toSet());
-    checkArgument(mins.size() == 1, "Grid mfd-tree has different mfd mMin: %s", mins);
-    return mins.iterator().next();
-  }
-
-  private static double checkMagnitudeDelta(LogicTree<Mfd.Properties> mfdTree) {
-    Set<Double> deltas = mfdTree.stream()
-        .map(Branch::value)
-        .map(p -> p.getAsGr().Δm())
-        .collect(toSet());
-    checkArgument(deltas.size() == 1, "Grid mfd-tree has different mfd Δm: %s", deltas);
-    return deltas.iterator().next();
-  }
-
   /*
    * Data container for feature polygon.
    *
@@ -420,7 +385,6 @@ class GridLoader {
     builder.transform(xy -> zeroAboveM(xy, m));
   }
 
-  // zone
   static GridSourceSet createGrid(
       SourceConfig.Grid config,
       Feature feature,
@@ -441,7 +405,8 @@ class GridLoader {
     builder.gridConfig(config);
     props.getDouble(Key.STRIKE).ifPresent(builder::strike);
 
-    builder.locations(locations, mfds, mfdsTree, focalMechMaps, Mfd.Type.SINGLE);
+    System.out.println(mfds.get(0).properties().type());
+    builder.locations(locations, mfds, mfdsTree, focalMechMaps);
 
     return builder.build();
   }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java
index a8173f76..6056c845 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java
@@ -1,10 +1,8 @@
 package gov.usgs.earthquake.nshmp.model;
 
 import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
 import static gov.usgs.earthquake.nshmp.Earthquakes.checkCrustalDepth;
-import static gov.usgs.earthquake.nshmp.Earthquakes.checkMagnitude;
 import static gov.usgs.earthquake.nshmp.Earthquakes.checkSlabDepth;
 import static gov.usgs.earthquake.nshmp.Faults.checkStrike;
 import static gov.usgs.earthquake.nshmp.fault.FocalMech.NORMAL;
@@ -59,23 +57,27 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
   final List<Location> locations;
   final List<Mfd> mfds;
   final LogicTree<List<Mfd>> mfdsTree;
-  final RuptureScaling rupScaling;
-  private final Map<FocalMech, Double> mechMap; // default, used for copyOf
-  private final List<Map<FocalMech, Double>> mechMaps;
+  // private final Map<FocalMech, Double> mechMap; // default, used for copyOf
+  private final List<Map<FocalMech, Double>> mechMaps; // may be nCopies
   private final boolean singularMechs;
   private final NavigableMap<Double, Map<Double, Double>> magDepthMap;
-  private final PointSourceType sourceType;
   private final OptionalDouble strike;
-  private final double[] magMaster;
-  private final Double mMin;
-  private final Double mMax;
-  private final double Δm;
 
+  /* optimizable = magMaster.isPresent() */
+  private final Optional<double[]> magMaster;
+
+  final RuptureScaling rupScaling;
   private final double maxDepth;
+  private final PointSourceType sourceType;
 
   final DepthModel depthModel;
   final boolean optimizable;
 
+  /*
+   * WHether or not a grid can be optimized using rate tables is a function of
+   * whether magMaster is present, or not.
+   */
+
   /*
    * Most grid sources have the same focal mech map everywhere; in these cases,
    * mechMaps will have been created using Collections.nCopies() with minimal
@@ -88,16 +90,12 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
     this.mfds = builder.mfds;
     this.mfdsTree = builder.mfdsTree;
     this.rupScaling = builder.rupScaling;
-    this.mechMap = builder.mechMap;
     this.mechMaps = builder.mechMaps;
     this.singularMechs = builder.singularMechs;
     this.magDepthMap = builder.magDepthMap;
     this.sourceType = builder.sourceType;
     this.strike = builder.strike;
     this.magMaster = builder.magMaster;
-    this.mMin = builder.mMin;
-    this.mMax = builder.mMax;
-    this.Δm = builder.Δm;
     this.maxDepth = builder.maxDepth;
 
     /*
@@ -105,11 +103,20 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
      *
      * TODO should be cleaner way of handling this
      */
-    this.optimizable = (sourceType() != FIXED_STRIKE) && !Double.isNaN(Δm);
+    // System.out.println(Δm);
+    // this.optimizable = (sourceType() != FIXED_STRIKE) && !Double.isNaN(Δm);
+    this.optimizable = this.magMaster.isPresent();
+
+    System.out.println(Arrays.toString(magMaster.orElseThrow()));
+
+    double[] depthMags = this.mfds.get(0).data().xValues().toArray();
+    // System.out.println(mfdsTree);
+
+    System.out.println(Arrays.toString(depthMags));
 
     this.depthModel = DepthModel.create(
         magDepthMap,
-        Doubles.asList(magMaster),
+        Doubles.asList(depthMags),
         maxDepth);
   }
 
@@ -239,11 +246,11 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
     private NavigableMap<Double, Map<Double, Double>> magDepthMap;
     private PointSourceType sourceType;
     private OptionalDouble strike = OptionalDouble.empty();
-    private double[] magMaster;
+    private Optional<double[]> magMaster;
     private Double maxDepth;
-    private Double mMin;
-    private Double mMax;
-    private Double Δm;
+    // private Double mMin;
+    // private Double mMax;
+    // private Double Δm;
 
     private Map<FocalMech, Double> mechMap;
     private boolean singularMechs = true;
@@ -254,24 +261,28 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
       return this;
     }
 
-    Builder sourceType(PointSourceType sourceType) {
-      this.sourceType = checkNotNull(sourceType);
-      return this;
-    }
-
-    Builder ruptureScaling(RuptureScaling rupScaling) {
-      this.rupScaling = checkNotNull(rupScaling, "RupScaling is null");
-      return this;
-    }
-
     Builder gridConfig(SourceConfig.Grid gridConfig) {
-      // this.spacing = gridConfig.spacing; // TODO would only be used for area
-      // sources
+      // TODO would only be used for zone
+      // this.spacing = gridConfig.spacing;
+
       this.maxDepth = gridConfig.maxDepth;
+      // TODO makes bad assumption of GRID
+      validateDepth(this.maxDepth, SourceType.GRID);
+
       this.rupScaling = gridConfig.ruptureScaling;
       this.sourceType = gridConfig.pointSourceType;
       this.mechMap = convertFocalMechTree(gridConfig.focalMechTree);
+      checkArgument(this.mechMap.size() == 3);
+
       this.magDepthMap = convertMagDepthMap(gridConfig.gridDepthMap);
+      // System.out.println(gridConfig.gridDepthMap);
+      // System.out.println(this.magDepthMap);
+
+      validateMagCutoffs(this.magDepthMap);
+
+      // TODO makes bad assumption of GRID
+      validateDepthMap(this.magDepthMap, SourceType.GRID);
+
       return this;
     }
 
@@ -301,51 +312,9 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
               Branch::weight));
     }
 
-    Builder depthMap(NavigableMap<Double, Map<Double, Double>> magDepthMap, SourceType type) {
-      checkNotNull(magDepthMap, "MagDepthMap is null");
-      checkArgument(magDepthMap.size() > 0, "MagDepthMap must have at least one entry");
-      // the structure of the map and its weights will have been fully
-      // validated by parser; still need to check that depths are
-      // appropriate; 'type' indicates how to validate depths across
-      // wrapper classes
-      validateDepthMap(magDepthMap, type);
-      // there must be at least one mag key that is >= MAX_MAG
-      validateMagCutoffs(magDepthMap);
-      this.magDepthMap = magDepthMap;
-      return this;
-    }
-
-    Builder maxDepth(Double maxDepth, SourceType type) {
-      this.maxDepth = checkNotNull(maxDepth, "Maximum depth is null");
-      validateDepth(maxDepth, type);
-      return this;
-    }
-
-    Builder mechs(Map<FocalMech, Double> mechMap) {
-      // weights will have already been checked
-      checkArgument(!checkNotNull(mechMap).isEmpty());
-      checkArgument(mechMap.size() == 3);
-      this.mechMap = mechMap;
-      return this;
-    }
-
     /*
-     * magMaster mfd data
-     *
-     * we could require that this be set first and then all node mfds are
-     * checked against this.
-     *
-     * should be better way to get master/total mfd
+     * TODO How/where do we ensure that all Mfds in a grid are the same type??
      */
-    @Deprecated
-    Builder mfdData(double mMin, double mMax, double Δm) {
-      // TODO need better validation here
-      checkArgument(checkMagnitude(mMin) <= checkMagnitude(mMax));
-      this.mMin = mMin;
-      this.mMax = mMax;
-      this.Δm = Δm;
-      return this;
-    }
 
     // locations, total Mfds, underlying mfd-tree
     // TODO test consistency, size etc ?
@@ -353,8 +322,7 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
         List<Location> locations,
         List<Mfd> mfds,
         LogicTree<List<Mfd>> mfdsTree,
-        Optional<List<Map<FocalMech, Double>>> focalMechMaps,
-        Mfd.Type type) {
+        Optional<List<Map<FocalMech, Double>>> focalMechMaps) {
 
       checkArgument(locations.size() == mfds.size());
       // wholesale replacement of arrays
@@ -368,30 +336,17 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
       // TODO this assumes (rightly so) that all supplied mfds have the same
       // but we need to get rid of the odd dependency on Δm
       Mfd model = mfds.get(0);
-      this.mMin = model.data().x(0);
-      this.mMax = model.data().x(model.data().size() - 1);
-      this.Δm = (type == Mfd.Type.SINGLE)
-          ? Double.NaN
-          : model.data().x(1) - this.mMin;
-      return this;
-    }
-
-    Builder location(Location loc, Mfd mfd) {
-      this.mfds.add(checkNotNull(mfd, "MFD is null"));
-      this.locations.add(checkNotNull(loc, "Location is null"));
+      // this.mMin = model.data().x(0);
+      // this.mMax = model.data().x(model.data().size() - 1);
+      // // System.out.println(type);
+      // this.Δm = (type == Mfd.Type.SINGLE)
+      // ? Double.NaN
+      // : model.data().x(1) - this.mMin;
       return this;
     }
 
-    Builder location(Location loc, Mfd mfd, Map<FocalMech, Double> mechMap) {
-      this.mfds.add(checkNotNull(mfd, "MFD is null"));
-      this.locations.add(checkNotNull(loc, "Location is null"));
-      checkArgument(!checkNotNull(mechMap).isEmpty());
-      checkArgument(mechMap.size() == 3);
-      this.mechMaps.add(mechMap);
-      return this;
-    }
-
-    static void validateDepthMap(Map<Double, Map<Double, Double>> magDepthMap,
+    static void validateDepthMap(
+        Map<Double, Map<Double, Double>> magDepthMap,
         SourceType type) {
       for (Map<Double, Double> magMap : magDepthMap.values()) {
         for (double depth : magMap.keySet()) {
@@ -448,9 +403,9 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
       checkState(maxDepth != null, "%s max depth not set", buildId);
       checkState(mechMap != null, "%s focal mech map not set", buildId);
 
-      checkState(mMin != null, "%s min mag not set", buildId);
-      checkState(mMax != null, "%s max mag not set", buildId);
-      checkState(Δm != null, "%s delta mag not set", buildId);
+      // checkState(mMin != null, "%s min mag not set", buildId);
+      // checkState(mMax != null, "%s max mag not set", buildId);
+      // checkState(Δm != null, "%s delta mag not set", buildId);
 
       /*
        * TODO there are too many assumptions built into this; whose to say ones
@@ -464,12 +419,15 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
        * TODO in the case of single combined/flattened MFDs, mags may not be
        * uniformly spaced. Can this be refactored
        */
-      if (Double.isNaN(Δm)) {
-        magMaster = mfds.get(0).data().xValues().toArray();
-      } else {
-        double cleanDelta = Double.valueOf(String.format("%.2f", Δm));
-        magMaster = DoubleData.buildCleanSequence(mMin, mMax, cleanDelta, true, 2);
-      }
+      // if (Double.isNaN(Δm)) {
+      // magMaster = mfds.get(0).data().xValues().toArray();
+      // } else {
+      // double cleanDelta = Double.valueOf(String.format("%.2f", Δm));
+      // magMaster = DoubleData.buildCleanSequence(mMin, mMax, cleanDelta, true,
+      // 2);
+      // }
+      double[] mfdMags = mfds.get(0).data().xValues().toArray();
+      magMaster = Optional.of(mfdMags);
 
       /*
        * Validate size of mechMaps; size could get out of sync if mixed calls to
@@ -755,11 +713,21 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
     /* creates the type of point source specified in the parent */
     private List<PointSource> initSources(boolean smoothed) {
 
-      // table keys are specified as lowermost and uppermost bin edges
-      double Δm = parent.Δm;
+      /* For now, should only be getting here for GR MFDs */
+      Mfd modelMfd = parent.mfds.get(0);
+      Mfd.Properties props = modelMfd.properties(); // probably INCR
+      double[] mags = modelMfd.data().xValues().toArray();
+
+      double Δm = mags[1] - mags[0];
       double ΔmBy2 = Δm / 2.0;
-      double mMin = parent.magMaster[0] - ΔmBy2;
-      double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2;
+      double mMin = mags[0] - ΔmBy2;
+      double mMax = mags[mags.length - 1] + ΔmBy2;
+
+      // table keys are specified as lowermost and uppermost bin edges
+      // double Δm = parent.Δm;
+      // double ΔmBy2 = Δm / 2.0;
+      // double mMin = parent.magMaster[0] - ΔmBy2;
+      // double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2;
       double rMax = parent.groundMotionModels().maxDistance();
       double[] smoothingOffsets = smoothingOffsets(rMax, 0.1);
 
@@ -807,10 +775,21 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
 
     /* always creates finite point sources */
     private List<PointSource> initMultiMechSources(boolean smoothed) {
-      double Δm = parent.Δm;
+
+      /* For now, should only be getting here for GR MFDs */
+      Mfd modelMfd = parent.mfds.get(0);
+      Mfd.Properties props = modelMfd.properties(); // probably INCR
+      double[] mags = modelMfd.data().xValues().toArray();
+
+      double Δm = mags[1] - mags[0];
       double ΔmBy2 = Δm / 2.0;
-      double mMin = parent.magMaster[0] - ΔmBy2;
-      double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2;
+      double mMin = mags[0] - ΔmBy2;
+      double mMax = mags[mags.length - 1] + ΔmBy2;
+
+      // double Δm = parent.Δm;
+      // double ΔmBy2 = Δm / 2.0;
+      // double mMin = parent.magMaster[0] - ΔmBy2;
+      // double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2;
       double rMax = parent.groundMotionModels().maxDistance();
       double[] smoothingOffsets = smoothingOffsets(rMax, 0.1);
 
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdTrees.java b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdTrees.java
index 5f765e57..441ce5fb 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdTrees.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdTrees.java
@@ -1,17 +1,21 @@
 package gov.usgs.earthquake.nshmp.model;
 
 import static com.google.common.base.Preconditions.checkArgument;
+import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR_TYPES;
 import static java.util.stream.Collectors.toMap;
+import static java.util.stream.Collectors.toSet;
 import static java.util.stream.Collectors.toUnmodifiableList;
 
 import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import gov.usgs.earthquake.nshmp.data.MutableXySequence;
 import gov.usgs.earthquake.nshmp.data.XySequence;
 import gov.usgs.earthquake.nshmp.mfd.Mfd;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties;
 import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.GutenbergRichter;
 import gov.usgs.earthquake.nshmp.mfd.Mfds;
 import gov.usgs.earthquake.nshmp.tree.Branch;
@@ -25,9 +29,9 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree;
 class MfdTrees {
 
   /* Convert a logic tree of mfd properties to builders. */
-  static LogicTree<Mfd.Builder> mfdPropsToBuilders(LogicTree<Mfd.Properties> propsTree) {
-    LogicTree.Builder<Mfd.Builder> mfdTree = LogicTree.builder(propsTree.name());
-    for (Branch<Mfd.Properties> branch : propsTree) {
+  static LogicTree<Mfd.Builder> mfdPropsToBuilders(LogicTree<Mfd.Properties> tree) {
+    LogicTree.Builder<Mfd.Builder> mfdTree = LogicTree.builder(tree.name());
+    for (Branch<Mfd.Properties> branch : tree) {
       mfdTree.addBranch(branch.id(), branch.value().toBuilder(), branch.weight());
     }
     return mfdTree.build();
@@ -38,10 +42,10 @@ class MfdTrees {
    * b-values possible) to a tree of model MFDs with identical x-values. This
    * supports optimizations in XySequence.combine().
    */
-  static LogicTree<Mfd> propsTreeToMmaxMfdTree(LogicTree<Mfd.Properties> propsTree) {
-    double mMax = mfdTreeMaxMagnitude(propsTree);
-    LogicTree.Builder<Mfd> mfdTree = LogicTree.builder(propsTree.name());
-    propsTree.stream()
+  static LogicTree<Mfd> propsTreeToMmaxMfdTree(LogicTree<Mfd.Properties> tree) {
+    double mMax = mfdTreeMaxMagnitude(tree);
+    LogicTree.Builder<Mfd> mfdTree = LogicTree.builder(tree.name());
+    tree.stream()
         .forEach(branch -> mfdTree.addBranch(
             branch.id(),
             mMaxGrMfd(branch.value(), mMax),
@@ -49,21 +53,79 @@ class MfdTrees {
     return mfdTree.build();
   }
 
-  /* Find the maximum mMax, assuming a tree of GR MFDs. */
-  static double mfdTreeMaxMagnitude(LogicTree<Mfd.Properties> propsTree) {
-    return propsTree.stream()
+  /* Find the maximum mMax, checking that tree is all GR MFDs. */
+  static double mfdTreeMaxMagnitude(LogicTree<Mfd.Properties> tree) {
+    return tree.stream()
         .mapToDouble(b -> b.value().getAsGr().mMax())
         .max()
         .getAsDouble();
   }
 
-  /* Check if all MFDs in a tree are the same type. */
-  static boolean mfdsAreType(LogicTree<Mfd.Properties> mfdTree, Mfd.Type type) {
-    return mfdTree.stream()
-        .map(branch -> branch.value().type())
+  /* Ensure all MFDs in a tree are the same as specified type. */
+  static void checkType(LogicTree<Mfd.Properties> tree, Mfd.Type type) {
+    Mfd.Type refType = tree.get(0).value().type();
+    checkArgument(
+        checkTreeTypes(tree, refType) && refType == type,
+        "mfd-tree types are not type: %s", type);
+  }
+
+  /* Check if all MFDs in a tree are the specified type. */
+  static boolean checkTreeTypes(LogicTree<Mfd.Properties> tree, Mfd.Type type) {
+    return tree.stream()
+        .map(Branch::value)
+        .map(Mfd.Properties::type)
         .allMatch(type::equals);
   }
 
+  /* Ensure all MFDs in a tree are some Gutenberg-Richter type. */
+  static void checkGrTypes(LogicTree<Mfd.Properties> tree) {
+    Set<Mfd.Type> types = tree.stream()
+        .map(Branch::value)
+        .map(Mfd.Properties::type)
+        .collect(toSet());
+    checkArgument(
+        types.size() == 1 || GR_TYPES.containsAll(types),
+        "mfd-tree types are not all Gutenberg-Richter: %s", types);
+  }
+
+  /* Ensure all MFDs are Gutenberg-Richter and that mMin and Δm match. */
+  static void checkGrMinAndDelta(LogicTree<Mfd.Properties> tree) {
+    checkGrTypes(tree);
+    GutenbergRichter refProps = tree.get(0).value().getAsGr();
+    boolean minAndDeltaEqual = tree.stream()
+        .skip(1)
+        .map(Branch::value)
+        .map(Properties::getAsGr)
+        .allMatch(props -> checkMinAndDelta(refProps, props));
+    checkArgument(
+        minAndDeltaEqual,
+        "Gutenberg-Richter mfd-tree has different mMin and Δm");
+  }
+
+  private static boolean checkMinAndDelta(
+      GutenbergRichter props1,
+      GutenbergRichter props2) {
+    return (props1.mMin() == props2.mMin() && props1.Δm() == props2.Δm());
+  }
+
+  private static double checkMinMagnitude(LogicTree<Mfd.Properties> mfdTree) {
+    Set<Double> mins = mfdTree.stream()
+        .map(Branch::value)
+        .map(p -> p.getAsGr().mMin())
+        .collect(toSet());
+    checkArgument(mins.size() == 1, "Grid mfd-tree has different mfd mMin: %s", mins);
+    return mins.iterator().next();
+  }
+
+  private static double checkMagnitudeDelta(LogicTree<Mfd.Properties> mfdTree) {
+    Set<Double> deltas = mfdTree.stream()
+        .map(Branch::value)
+        .map(p -> p.getAsGr().Δm())
+        .collect(toSet());
+    checkArgument(deltas.size() == 1, "Grid mfd-tree has different mfd Δm: %s", deltas);
+    return deltas.iterator().next();
+  }
+
   /* Check if the IDs and weights of two mfd-trees are the same. */
   static void checkTreeIdsAndWeights(LogicTree<Mfd> tree1, LogicTree<Mfd> tree2) {
     checkArgument(
@@ -108,11 +170,11 @@ class MfdTrees {
    * set to it's weight. This MFD can then be used as a model for a builder that
    * can be scaled by rate.
    */
-  static Mfd reduceSingleMfdTree(LogicTree<Mfd.Properties> mfdTree) {
-    double[] magnitudes = new double[mfdTree.size()];
-    double[] weights = new double[mfdTree.size()];
-    for (int i = 0; i < mfdTree.size(); i++) {
-      Branch<Mfd.Properties> mfd = mfdTree.get(i);
+  static Mfd reduceSingleMfdTree(LogicTree<Mfd.Properties> tree) {
+    double[] magnitudes = new double[tree.size()];
+    double[] weights = new double[tree.size()];
+    for (int i = 0; i < tree.size(); i++) {
+      Branch<Mfd.Properties> mfd = tree.get(i);
       magnitudes[i] = mfd.value().getAsSingle().magnitude();
       weights[i] = mfd.weight();
     }
@@ -120,20 +182,20 @@ class MfdTrees {
   }
 
   /* Reduce a MFD logic tree; weighted branch combiner. */
-  static Mfd reduceMfdTree(LogicTree<Mfd> mfdTree) {
-    return Mfds.combine(scaledMfdList(mfdTree));
+  static Mfd reduceMfdTree(LogicTree<Mfd> tree) {
+    return Mfds.combine(scaledMfdList(tree));
   }
 
   /* LogicTree<MFD> --> List<MFD * branchWeight> */
-  static List<Mfd> scaledMfdList(LogicTree<Mfd> mfdTree) {
-    return mfdTree.stream()
+  static List<Mfd> scaledMfdList(LogicTree<Mfd> tree) {
+    return tree.stream()
         .map(MfdTrees::reduceMfdBranch)
         .collect(toUnmodifiableList());
   }
 
   /* LogicTree<MFD> --> List<MFD * branchWeight * weight> */
-  static List<Mfd> scaledMfdList(LogicTree<Mfd> mfdTree, double weight) {
-    return mfdTree.stream()
+  static List<Mfd> scaledMfdList(LogicTree<Mfd> tree, double weight) {
+    return tree.stream()
         .map(branch -> reduceMfdBranch(branch, weight))
         .collect(toUnmodifiableList());
   }
@@ -172,15 +234,15 @@ class MfdTrees {
    * Transpose a list of logic trees to a logic tree of immutable lists.
    * Supplied logic trees are assumed to have same branch names and IDs.
    */
-  static <T> LogicTree<List<T>> transposeTree(List<LogicTree<T>> listOfTrees) {
-    LogicTree<T> model = listOfTrees.get(0);
+  static <T> LogicTree<List<T>> transposeTree(List<LogicTree<T>> treeList) {
+    LogicTree<T> model = treeList.get(0);
 
     /* Init branch value lists. */
     List<List<T>> valueLists = new ArrayList<>(model.size());
     model.forEach(b -> valueLists.add(new ArrayList<T>()));
 
     /* Populate value lists. */
-    for (LogicTree<T> tree : listOfTrees) {
+    for (LogicTree<T> tree : treeList) {
       for (int i = 0; i < tree.size(); i++) {
         valueLists.get(i).add(tree.get(i).value());
       }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java
index 7096ceee..cbd3a01f 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java
@@ -73,7 +73,7 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree;
 abstract class ModelLoader {
 
   public static void main(String[] args) {
-    Path testModel = Paths.get("../nshm-conus-2018");
+    Path testModel = Paths.get("../nshm-conus-2018-tmp");
     HazardModel model = ModelLoader.load(testModel);
     System.out.println(model);
   }
-- 
GitLab