diff --git a/build.gradle b/build.gradle
index 7a84e4125fea32647bb1ccdc25463656075fadcd..fc1b7d45e37e9fe2f6086687fe872e3ea843b285 100644
--- a/build.gradle
+++ b/build.gradle
@@ -128,8 +128,8 @@ javadoc {
   options.encoding("UTF-8")
   options.docEncoding("UTF-8")
   options.charSet("UTF-8")
-  //options.addBooleanOption("Xold", true)
   options.addBooleanOption("Xdoclint:none", true)
+  //options.addStringOption("Xmaxwarns", "1")
   options.addBooleanOption("-no-module-directories", true)
   options.links(
       "https://docs.oracle.com/en/java/javase/11/docs/api/",
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/Maths.java b/src/main/java/gov/usgs/earthquake/nshmp/Maths.java
index b794ce31b65cbe5f6d5d28579b191c9ec2b93980..d46c99fec38c74bced6948b2078cfb1b6db75da5 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/Maths.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/Maths.java
@@ -1,7 +1,10 @@
 package gov.usgs.earthquake.nshmp;
 
+import static gov.usgs.earthquake.nshmp.data.DoubleData.checkInRange;
 import static gov.usgs.earthquake.nshmp.data.DoubleData.multiply;
 import static gov.usgs.earthquake.nshmp.data.DoubleData.sum;
+import static java.lang.Math.exp;
+import static java.lang.Math.log;
 
 import java.math.BigDecimal;
 import java.math.MathContext;
@@ -9,6 +12,7 @@ import java.math.RoundingMode;
 import java.util.Arrays;
 
 import com.google.common.base.Converter;
+import com.google.common.collect.Range;
 
 /**
  * Miscellaneous math utilities.
@@ -23,7 +27,7 @@ public final class Maths {
   public static final double PI_BY_2 = Math.PI / 2;
 
   /** Constant for 2Ï€. */
-  public static final double TWO_PI = 2 * Math.PI;
+  public static final double TWO_PI = 2.0 * Math.PI;
 
   /** Conversion multiplier for degrees to radians. */
   public static final double TO_RADIANS = Math.toRadians(1.0);
@@ -35,7 +39,7 @@ public final class Maths {
    * The precomputed √<span style="border-top:1px solid; padding:0 0.1em;"
    * >2</span>.
    */
-  public static final double SQRT_2 = Math.sqrt(2);
+  public static final double SQRT_2 = Math.sqrt(2.0);
 
   /**
    * The precomputed √<span style="border-top:1px solid; padding:0 0.1em;"
@@ -236,4 +240,72 @@ public final class Maths {
     }
   }
 
+  /**
+   * Convert an annual rate of occurrence to a Poisson probability of occurence
+   * over the specified time period.
+   *
+   * @param rate (annual) of occurence
+   * @param timespan of interest
+   * @return the Poisson probability of occurrence over the specified
+   *         {@code timespan}
+   */
+  public static double rateToProbability(double rate, double timespan) {
+    return 1 - exp(-rate * timespan);
+  }
+
+  /**
+   * Convert a Poisson probability of occurence of some event over a specified
+   * time period to the equivalent annual rate.
+   *
+   * @param P the Poisson probability
+   * @param timespan of interest
+   * @return the annnual rate of occurrence
+   */
+  public static double probabilityToRate(double P, double timespan) {
+    return -log(1 - P) / timespan;
+  }
+
+  /**
+   * Return a converter between annual rate and Poisson probability over a
+   * 1-year time span. The converter expects probabilities to be represented as
+   * fractions of 1.0.
+   */
+  public static Converter<Double, Double> annualRateToProbabilityConverter() {
+    return new AnnRateToPoissProbConverter(1.0);
+  }
+
+  /**
+   * Return a converter between annual rate and Poisson probability over the
+   * specified time span. The converter expects probabilities to be represented
+   * as fractions of 1.0.
+   */
+  public static Converter<Double, Double> annualRateToProbabilityConverter(double timespan) {
+    return new AnnRateToPoissProbConverter(timespan);
+  }
+
+  /**
+   * Supported timespans for Poisson probabilities: {@code [1..10000] years}.
+   */
+  public static final Range<Double> TIMESPAN_RANGE = Range.closed(1.0, 10000.0);
+
+  private static final class AnnRateToPoissProbConverter extends Converter<Double, Double> {
+
+    private final double timespan;
+
+    AnnRateToPoissProbConverter(double timespan) {
+      checkInRange(TIMESPAN_RANGE, "Timespan", timespan);
+      this.timespan = timespan;
+    }
+
+    @Override
+    protected Double doForward(Double rate) {
+      return rateToProbability(rate, timespan);
+    }
+
+    @Override
+    protected Double doBackward(Double prob) {
+      return probabilityToRate(prob, timespan);
+    }
+  }
+
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/CalcConfig.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/CalcConfig.java
index 8487b71dd5d36e9adf7ac91e4d0368f841a7af6e..356e8933f46822c18e2f813e7942dd3b1658abd3 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/calc/CalcConfig.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/CalcConfig.java
@@ -111,7 +111,7 @@ public final class CalcConfig {
   public static Builder copyOf(CalcConfig config) {
     Builder b = new Builder();
     if (config.resource.isPresent()) {
-      b.resource = config.resource.get();
+      b.resource = config.resource.orElseThrow();
     }
     b.hazard.copy(config.hazard);
     b.siteData.copy(config.siteData);
@@ -1237,7 +1237,7 @@ public final class CalcConfig {
   public String toString() {
     return new StringBuilder("Calc Config: ")
         .append(resource.isPresent()
-            ? resource.get().toAbsolutePath().normalize()
+            ? resource.orElseThrow().toAbsolutePath().normalize()
             : "(from defaults)")
         .append(hazard.asString())
         .append(deagg.asString())
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java
index 0e4016206a83e14f93521b8deda3c6bc7c67c443..178a6058798e947be66839c24334ebd340919620 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/EqRate.java
@@ -17,11 +17,11 @@ import gov.usgs.earthquake.nshmp.calc.CalcConfig.Rate.Bins;
 import gov.usgs.earthquake.nshmp.data.IntervalArray;
 import gov.usgs.earthquake.nshmp.data.IntervalArray.Builder;
 import gov.usgs.earthquake.nshmp.data.MutableXySequence;
+import gov.usgs.earthquake.nshmp.data.Sequences;
 import gov.usgs.earthquake.nshmp.data.XyPoint;
 import gov.usgs.earthquake.nshmp.data.XySequence;
 import gov.usgs.earthquake.nshmp.geo.Location;
 import gov.usgs.earthquake.nshmp.mfd.Mfd;
-import gov.usgs.earthquake.nshmp.mfd.Mfds;
 import gov.usgs.earthquake.nshmp.model.ClusterSource;
 import gov.usgs.earthquake.nshmp.model.ClusterSourceSet;
 import gov.usgs.earthquake.nshmp.model.Distance;
@@ -152,12 +152,12 @@ public class EqRate {
    */
   public static EqRate toCumulative(EqRate incremental) {
 
-    XySequence cumulativeTotal = Mfds.toCumulative(incremental.totalMfd);
+    XySequence cumulativeTotal = Sequences.toCumulative(incremental.totalMfd);
     ImmutableMap.Builder<SourceType, XySequence> cumulativeTypes = ImmutableMap.builder();
     for (Entry<SourceType, XySequence> entry : incremental.typeMfds.entrySet()) {
       cumulativeTypes.put(
           entry.getKey(),
-          Mfds.toCumulative(entry.getValue()));
+          Sequences.toCumulative(entry.getValue()));
     }
     return new EqRate(
         incremental.site,
@@ -174,7 +174,7 @@ public class EqRate {
    */
   public static EqRate toPoissonProbability(EqRate annualRates, double timespan) {
     Converter<Double, Double> converter =
-        Mfds.annualRateToProbabilityConverter(timespan)
+        Maths.annualRateToProbabilityConverter(timespan)
             .andThen(Maths.decimalToProbabilityConverter(2));
     Consumer<XyPoint> pointConvert = p -> p.set(converter.convert(p.y()));
 
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardExport.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardExport.java
index efde3513d1d3a0baa096b0ad94f9ca0fbf583c0f..ebc8aeb0d761d71bff7fa7a24bf0e8fa75f90adc 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardExport.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/HazardExport.java
@@ -38,6 +38,7 @@ import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 import com.google.common.primitives.Doubles;
 
+import gov.usgs.earthquake.nshmp.Maths;
 import gov.usgs.earthquake.nshmp.calc.Deaggregation.ImtDeagg;
 import gov.usgs.earthquake.nshmp.calc.HazardExport.Metadata.Builder;
 import gov.usgs.earthquake.nshmp.data.MutableXySequence;
@@ -48,7 +49,6 @@ import gov.usgs.earthquake.nshmp.gmm.Gmm;
 import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.internal.Parsing;
 import gov.usgs.earthquake.nshmp.internal.Parsing.Delimiter;
-import gov.usgs.earthquake.nshmp.mfd.Mfds;
 import gov.usgs.earthquake.nshmp.model.HazardModel;
 import gov.usgs.earthquake.nshmp.model.Source;
 import gov.usgs.earthquake.nshmp.model.SourceSet;
@@ -113,7 +113,7 @@ public final class HazardExport {
 
     Function<Double, String> formatter = Parsing.formatDoubleFunction(VALUE_FMT);
     this.valueFormatter = (config.hazard.valueFormat == POISSON_PROBABILITY)
-        ? Mfds.annualRateToProbabilityConverter().andThen(formatter)
+        ? Maths.annualRateToProbabilityConverter().andThen(formatter)
         : formatter;
 
     Site demoSite = sites.iterator().next();
@@ -152,8 +152,8 @@ public final class HazardExport {
     if (exportBinary) {
       checkState(exportBinary && sites.mapBounds().isPresent(), BINARY_EXTENTS_REQUIRED_MSSG);
       Builder metaBuilder = Metadata.builder()
-          .bounds(sites.mapBounds().get())
-          .spacing(sites.mapSpacing().get())
+          .bounds(sites.mapBounds().orElseThrow())
+          .spacing(sites.mapSpacing().orElseThrow())
           .description("nshmp-haz generated curves")
           .timestamp(new Timestamp(System.currentTimeMillis()).toString())
           .vs30(sites.iterator().next().vs30);
@@ -250,7 +250,7 @@ public final class HazardExport {
     checkState(!used, "This result handler is expired");
     writeHazard(hazard);
     if (deagg.isPresent()) {
-      writeDeagg(deagg.get());
+      writeDeagg(deagg.orElseThrow());
     }
     resultCount++;
     if (resultCount % 10 == 0) {
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/ReturnPeriod.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/ReturnPeriod.java
index 6b840039827189bb60135cb5763d171a98d36e41..60130f96754e7dee7cd8589b16154a70f5a8967e 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/calc/ReturnPeriod.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/ReturnPeriod.java
@@ -2,6 +2,7 @@ package gov.usgs.earthquake.nshmp.calc;
 
 import com.google.common.base.MoreObjects;
 
+import gov.usgs.earthquake.nshmp.Maths;
 import gov.usgs.earthquake.nshmp.mfd.Mfds;
 
 /**
@@ -88,6 +89,6 @@ public enum ReturnPeriod {
     String[] values = name.substring(2).split("IN");
     double prob = Double.parseDouble(values[0]) / 100.0;
     double time = Double.parseDouble(values[1]);
-    return Mfds.probToRate(prob, time);
+    return Maths.probabilityToRate(prob, time);
   }
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/Site.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/Site.java
index 93c7c17ef94c730595b1e4a18e31542e15421061..c316c8a3651a7ee0d1f3afa935478035a13a4770 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/calc/Site.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/Site.java
@@ -348,7 +348,8 @@ public class Site {
       try {
         double lon = Maths.round(location.longitude, 2);
         double lat = Maths.round(location.latitude, 2);
-        URL siteUrl = new URL(basinDataProvider.get() + String.format(BASIN_QUERY, lon, lat));
+        URL siteUrl =
+            new URL(basinDataProvider.orElseThrow() + String.format(BASIN_QUERY, lon, lat));
         HttpURLConnection connection = (HttpURLConnection) siteUrl.openConnection();
         try (Reader reader = new InputStreamReader(connection.getInputStream(), Charsets.UTF_8)) {
           return GSON.fromJson(reader, BasinTerms.class);
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/calc/Sites.java b/src/main/java/gov/usgs/earthquake/nshmp/calc/Sites.java
index dc400ae35779b0f7278d2afbae4fff1c185da515..baf3d2e629933357c9a5b5c047cb3bd33329d9b2 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/calc/Sites.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/calc/Sites.java
@@ -384,7 +384,7 @@ public abstract class Sites implements Iterable<Site> {
 
     Region mapRegion = calcRegion;
     if (mapBounds.isPresent()) {
-      Bounds b = mapBounds.get();
+      Bounds b = mapBounds.orElseThrow();
       Region r = Regions.createRectangular(boundsName, b.min, b.max);
       mapRegion = Regions.intersectionOf(mapName, r, calcRegion);
     }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/data/ArrayXySequence.java b/src/main/java/gov/usgs/earthquake/nshmp/data/ArrayXySequence.java
index 5fdfdb7b6ae747d706ba8c114ca8058e1ac6f259..6e531a07bdd5d3048a92533379f96f59db88ac0f 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/data/ArrayXySequence.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/data/ArrayXySequence.java
@@ -149,6 +149,11 @@ class ArrayXySequence implements XySequence {
       return yUnchecked(index);
     }
 
+    @Override
+    public int index() {
+      return index;
+    }
+
     @Override
     public String toString() {
       return "XyPoint: [" + x() + ", " + y() + "]";
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/data/DelimitedData.java b/src/main/java/gov/usgs/earthquake/nshmp/data/DelimitedData.java
index aa50fc4b8b741c4913e7d49d25a5854731db0312..d2054a34a8e2a8c30b01dd2befdf2c6850670efc 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/data/DelimitedData.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/data/DelimitedData.java
@@ -195,7 +195,7 @@ public class DelimitedData {
         .filter(DelimitedData::filterComments)
         .findFirst();
     checkState(header.isPresent(), "Header row missing");
-    List<String> keyList = splitter.splitToList(header.get());
+    List<String> keyList = splitter.splitToList(header.orElseThrow());
     Set<String> keySet = Set.copyOf(keyList);
     checkState(
         keyList.size() == keySet.size(),
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/data/Indexing.java b/src/main/java/gov/usgs/earthquake/nshmp/data/Indexing.java
index bc57f13d13f6b72d11e1d9f890ff7b3cc4d977ae..686b36dfd6eb438b39efc3399a1b9ca523c9731f 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/data/Indexing.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/data/Indexing.java
@@ -80,7 +80,7 @@ public final class Indexing {
    *
    * <li>{@code NaN} is considered to be equal to itself and greater than all
    * other double values (including Double.POSITIVE_INFINITY) per the behavior
-   * of {@link Double#compareTo(Double)}.</li> <ul>
+   * of {@link Double#compareTo(Double)}.</li> </ul>
    *
    * @param data for which to compute sort indices
    * @param ascending sort order if {@code true}, descending if {@code false}
@@ -361,7 +361,7 @@ public final class Indexing {
    * @return the supplied {@code value}
    * @throws IllegalArgumentException
    */
-  public static double checkInRange(Range<Integer> range, String label, int value) {
+  public static int checkInRange(Range<Integer> range, String label, int value) {
     checkArgument(range.contains(value), "%s [%s] not in range %s", label, value, range);
     return value;
   }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/data/MutableXySequence.java b/src/main/java/gov/usgs/earthquake/nshmp/data/MutableXySequence.java
index 67fc7036d435735e79fad1105a00653c10053ee0..3f68cb22f04eb0ba8dfc7aac4dbc39794c3b35f6 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/data/MutableXySequence.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/data/MutableXySequence.java
@@ -49,7 +49,7 @@ public interface MutableXySequence extends XySequence {
       Optional<double[]> ys) {
 
     return construct(Arrays.copyOf(xs, xs.length), (ys.isPresent())
-        ? Arrays.copyOf(ys.get(), ys.get().length)
+        ? Arrays.copyOf(ys.orElseThrow(), ys.orElseThrow().length)
         : new double[xs.length]);
   }
 
@@ -71,7 +71,7 @@ public interface MutableXySequence extends XySequence {
       Optional<Collection<? extends Number>> ys) {
 
     return construct(Doubles.toArray(xs), (ys.isPresent())
-        ? Doubles.toArray(ys.get())
+        ? Doubles.toArray(ys.orElseThrow())
         : new double[xs.size()]);
   }
 
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java b/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java
index ea1df5014d6caefb304c2bde44cb904ef9732f46..a3e1c4f86ead255505d301a17292ae57f2f7aa0b 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/data/Sequences.java
@@ -107,6 +107,18 @@ public class Sequences {
     return Δ;
   }
 
+  // TODO docs; consider moving to cumulate method in data/sequence package;
+  // not necessarily MFD specific
+  public static XySequence toCumulative(XySequence incremental) {
+    MutableXySequence cumulative = MutableXySequence.copyOf(incremental);
+    double sum = 0.0;
+    for (int i = incremental.size() - 1; i >= 0; i--) {
+      sum += incremental.y(i);
+      cumulative.set(i, sum);
+    }
+    return XySequence.copyOf(cumulative);
+  }
+
   /**
    * Create a new sequential value array builder. If {@code ((max - min) / Δ)}
    * is reasonable close to an integer (i.e. the value may be off by some
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/data/XyPoint.java b/src/main/java/gov/usgs/earthquake/nshmp/data/XyPoint.java
index 97a778c95ba023b4520af4c6f51148acc9dfa3d4..2e8d75fa261f92b588ff35d586abef70eebd6e36 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/data/XyPoint.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/data/XyPoint.java
@@ -20,6 +20,12 @@ public interface XyPoint {
    */
   double y();
 
+  /**
+   * Return the index of this point.
+   * @return index
+   */
+  int index();
+
   /**
    * Set the y-value of this point.
    *
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/data/XySequence.java b/src/main/java/gov/usgs/earthquake/nshmp/data/XySequence.java
index 7d8852fd2ab62dd0cbc1c6bfa2779b481e9245c1..9677a0a58c17a5d660c5929357ad9caae31386fc 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/data/XySequence.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/data/XySequence.java
@@ -5,6 +5,8 @@ import static com.google.common.base.Preconditions.checkArgument;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Map;
+import java.util.Spliterator;
+import java.util.Spliterators;
 import java.util.TreeMap;
 import java.util.function.Predicate;
 import java.util.stream.Collectors;
@@ -36,10 +38,6 @@ import com.google.common.primitives.Doubles;
  */
 public interface XySequence extends Iterable<XyPoint> {
 
-  // TODO better spliterator?
-  // TODO add index to XyPoint?
-  // TODO singleton?
-
   /**
    * Create a new, immutable sequence from the supplied value arrays.
    *
@@ -203,6 +201,11 @@ public interface XySequence extends Iterable<XyPoint> {
     return StreamSupport.stream(spliterator(), false);
   }
 
+  @Override
+  default Spliterator<XyPoint> spliterator() {
+    return Spliterators.spliterator(iterator(), size(), Spliterator.SIZED);
+  }
+
   /**
    * Returns the number of points in this sequence.
    */
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/geo/Coordinates.java b/src/main/java/gov/usgs/earthquake/nshmp/geo/Coordinates.java
index 0654bdad7ede4bac9d04f462ed2b5870c229bb09..b816f88f3eb761229e1f8a33d8322dbb596fd184 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/geo/Coordinates.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/geo/Coordinates.java
@@ -14,7 +14,7 @@ import gov.usgs.earthquake.nshmp.Maths;
 public class Coordinates {
 
   /**
-   * The Authalic mean radius (A<subscript>r</subscript>) of the earth:
+   * The Authalic mean radius (A<sub>r</sub>) of the earth:
    * {@code 6371.0072 km}.
    *
    * @see <a href="http://en.wikipedia.org/wiki/Earth_radius#Authalic_radius"
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/geo/Locations.java b/src/main/java/gov/usgs/earthquake/nshmp/geo/Locations.java
index 55a3f6263425fd0b57a1c1dbb9c53d98d8a869a7..b70365cbea0ce893a1e8736301591814a53e9153 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/geo/Locations.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/geo/Locations.java
@@ -108,10 +108,10 @@ public final class Locations {
    * differences between the points as the sides of a right triangle. The
    * longitudinal distance is scaled by the cosine of the mean latitude.
    *
-   * <p><b>Note:</b> This method does <i>NOT</i> support values spanning
-   * #177;180° and fails where the numeric angle exceeds 180°. Convert data to
-   * the 0-360° interval or use {@link #horzDistance(Location, Location)} in
-   * such instances.
+   * <p><b>Note:</b> This method does <i>NOT</i> support values spanning ±180°
+   * and fails where the numeric angle exceeds 180°. Convert data to the 0-360°
+   * interval or use {@link #horzDistance(Location, Location)} in such
+   * instances.
    *
    * @param p1 the first {@code Location} point
    * @param p2 the second {@code Location} point
@@ -168,7 +168,7 @@ public final class Locations {
    * orthogonal.
    *
    * <p><b>Note:</b> This method is very imprecise at large separations and
-   * should not be used for points >200km apart. If an estimate of separation
+   * should not be used for points &gt;200km apart. If an estimate of separation
    * distance is not known in advance use
    * {@link #linearDistance(Location, Location)} for more reliable results.
    *
@@ -201,7 +201,7 @@ public final class Locations {
    * <p>This method, though more accurate over longer distances and line
    * lengths, is up to 20x slower than
    * {@link #distanceToLineFast(Location, Location, Location)}. However, this
-   * method returns accurate results for values spanning #177;180°.
+   * method returns accurate results for values spanning ±180°.
    *
    * <p>If the line should instead be treated as a segment such that the result
    * will be a distance to an endpoint if {@code p3} does not project onto the
@@ -231,8 +231,8 @@ public final class Locations {
    * each {@code Location} is ignored. This is a fast, geometric, cartesion
    * (flat-earth approximation) solution in which longitude of the line points
    * are scaled by the cosine of latitude; it is only appropriate for use over
-   * short distances (e.g. <200 km). The sign of the result indicates which side
-   * of the supplied line {@code p3} is on (right:[+] left:[-]).
+   * short distances (e.g. &lt;200 km). The sign of the result indicates which
+   * side of the supplied line {@code p3} is on (right:[+] left:[-]).
    *
    * <p><b>Note:</b> This method does <i>NOT</i> support values spanning ±180°
    * and results for such input values are not guaranteed. Convert data to the
@@ -366,7 +366,7 @@ public final class Locations {
    * component of each {@code Location} is ignored. This is a fast, geometric,
    * cartesion (flat-earth approximation) solution in which longitude of the
    * line points are scaled by the cosine of latitude; it is only appropriate
-   * for use over short distances (e.g. <200 km).
+   * for use over short distances (e.g. &lt;200 km).
    *
    * <p><b>Note:</b> This method fails for values spanning ±180°; see
    * {@link #distanceToSegment(Location, Location, Location)}.
@@ -468,8 +468,20 @@ public final class Locations {
   }
 
   /*
-   * Helper assumes supplied values in radians and kilometers. Returned location
-   * is rounded to 6 decimal places, sub-meter scale precision.
+   * Helper assumes supplied values in radians and kilometers.
+   *
+   * TODO revisit. Introduced rounding of result in refactoring for JSON. This
+   * changes hazard, most significantly when creating pseudo-sources for grid
+   * source optimization tables. I think it would be nice to have locations be
+   * lower precision when resampling traces or splitting location lists.
+   * Consider introducing a rounding flag.
+   *
+   * Interestingly the Locations tests did not fail when the formatting was
+   * removed
+   *
+   * Location.create( Maths.round(lon2 * Maths.TO_DEGREES, 5), Maths.round(lat2
+   * * Maths.TO_DEGREES, 5), Maths.round(depth + Δv, 5));
+   *
    */
   private static Location location(
       double lon,
@@ -486,10 +498,7 @@ public final class Locations {
     double cosD = cos(ad);
     double lat2 = asin(sinLat1 * cosD + cosLat1 * sinD * cos(az));
     double lon2 = lon + atan2(sin(az) * sinD * cosLat1, cosD - sinLat1 * sin(lat2));
-    return Location.create(
-        Maths.round(lon2 * Maths.TO_DEGREES, 5),
-        Maths.round(lat2 * Maths.TO_DEGREES, 5),
-        Maths.round(depth + Δv, 5));
+    return Location.create(lon2 * Maths.TO_DEGREES, lat2 * Maths.TO_DEGREES, depth + Δv);
   }
 
   /**
@@ -545,8 +554,8 @@ public final class Locations {
 
   /**
    * Returns {@code true} if the supplied {@code Location}s are very, very close
-   * to one another. Internally, lat, lon, and depth values must be within <1mm
-   * of each other.
+   * to one another. Internally, lat, lon, and depth values must be within
+   * &lt;1mm of each other.
    *
    * @param p1 the first {@code Location} to compare
    * @param p2 the second {@code Location} to compare
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Feature.java b/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Feature.java
index eb1f0a14845a56bcb5521450629bab35179909e4..97929726465d18f6ef1df4e655d4ffbb8f4a2cb3 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Feature.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Feature.java
@@ -316,7 +316,7 @@ public class Feature {
 
     /**
      * Set the optional {@code bbox} (bounding box) field of this feature. See
-     * the GeoJSON <a href="http://geojson.org" target="_top">specification</a
+     * the GeoJSON <a href="http://geojson.org" target="_top">specification</a>
      * for details on bounding boxes.
      *
      * @throws IllegalArgument
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java b/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java
index fddead0244d566a00a101c8241a4254766d059a1..2418d652a28d3ef692a8820402fb3b851f2262de 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/geo/json/Properties.java
@@ -68,7 +68,7 @@ public final class Properties {
    */
   public <T> Optional<T> get(String key, Class<T> classOfT) {
     try {
-      JsonElement element = getJsonElement(key).get();
+      JsonElement element = getJsonElement(key).orElseThrow();
       T obj = GeoJson.GSON_DEFAULT.fromJson(element, classOfT);
       return Optional.of(obj);
     } catch (Exception e) {
@@ -94,7 +94,7 @@ public final class Properties {
    */
   public <T> Optional<T> get(String key, Type typeOfT) {
     try {
-      JsonElement element = getJsonElement(key).get();
+      JsonElement element = getJsonElement(key).orElseThrow();
       T obj = GeoJson.GSON_DEFAULT.fromJson(element, typeOfT);
       return Optional.of(obj);
     } catch (Exception e) {
@@ -111,7 +111,7 @@ public final class Properties {
    */
   public Optional<Boolean> getBoolean(String key) {
     try {
-      return Optional.of(getJsonElement(key).get().getAsBoolean());
+      return Optional.of(getJsonElement(key).orElseThrow().getAsBoolean());
     } catch (Exception e) {
       return Optional.empty();
     }
@@ -126,7 +126,7 @@ public final class Properties {
    */
   public OptionalInt getInt(String key) {
     try {
-      return OptionalInt.of(getJsonElement(key).get().getAsInt());
+      return OptionalInt.of(getJsonElement(key).orElseThrow().getAsInt());
     } catch (Exception e) {
       return OptionalInt.empty();
     }
@@ -141,7 +141,7 @@ public final class Properties {
    */
   public OptionalDouble getDouble(String key) {
     try {
-      return OptionalDouble.of(getJsonElement(key).get().getAsDouble());
+      return OptionalDouble.of(getJsonElement(key).orElseThrow().getAsDouble());
     } catch (Exception e) {
       return OptionalDouble.empty();
     }
@@ -156,7 +156,7 @@ public final class Properties {
    */
   public Optional<String> getString(String key) {
     try {
-      return Optional.of(getJsonElement(key).get().getAsString());
+      return Optional.of(getJsonElement(key).orElseThrow().getAsString());
     } catch (Exception e) {
       return Optional.empty();
     }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/AbrahamsonEtAl_2014.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/AbrahamsonEtAl_2014.java
index d6d07e5d7568c7ab924794f99165708be4604ef7..31d05140612b8bcaece68680690b0efc7347a690 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/AbrahamsonEtAl_2014.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/AbrahamsonEtAl_2014.java
@@ -26,8 +26,8 @@ import gov.usgs.earthquake.nshmp.data.Interpolator;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Abrahamson, Silva & Kamai (2014) next generation ground
- * motion model for active crustal regions developed as part of <a
+ * Implementation of the Abrahamson, Silva &amp; Kamai (2014) next generation
+ * ground motion model for active crustal regions developed as part of <a
  * href="http://peer.berkeley.edu/ngawest2">NGA West II</a>.
  *
  * <p><b>Note:</b> Direct instantiation of {@code GroundMotionModel}s is
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/AbrahamsonSilva_1997.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/AbrahamsonSilva_1997.java
index d7051df63b5c68d4d5ce253cf3af8d2917e15344..218e3f46323a877cbd1edf4e4a6d3c1a75cde32e 100755
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/AbrahamsonSilva_1997.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/AbrahamsonSilva_1997.java
@@ -17,7 +17,7 @@ import gov.usgs.earthquake.nshmp.Faults;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Abrahamson & Silva (1997) ground motion model for
+ * Implementation of the Abrahamson &amp; Silva (1997) ground motion model for
  * shallow earthquakes in active continental crust. In keeping with prior NSHMP
  * implementations of this older model, only soft rock sites are supported (Vs30
  * = 760 m/s).
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/AtkinsonBoore_2006.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/AtkinsonBoore_2006.java
index 74ec5749f4c7b4b133c13c4d1fcd3211413d5ea1..4f8013aeb12847cc98652422aa5844e2022b4346 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/AtkinsonBoore_2006.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/AtkinsonBoore_2006.java
@@ -22,8 +22,8 @@ import gov.usgs.earthquake.nshmp.gmm.GmmUtils.CeusSiteClass;
 
 /**
  * Abstract implementation of the ground motion model for stable continental
- * regions by Atkinson & Boore (2006). This implementation matches that used in
- * the 2008 USGS NSHMP. In addition to have two stress-drop scaling variants,
+ * regions by Atkinson &amp; Boore (2006). This implementation matches that used
+ * in the 2008 USGS NSHMP. In addition to have two stress-drop scaling variants,
  * this model also comes in magnitude converting (mb to Mw) flavors to support
  * the 2008 central and eastern US model.
  *
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/AtkinsonBoore_2006p.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/AtkinsonBoore_2006p.java
index 44ddf7c06854755992822dfa80183eb74448374d..c1681e7200b6bfd2aa21ed2087879eb4afed7925 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/AtkinsonBoore_2006p.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/AtkinsonBoore_2006p.java
@@ -13,10 +13,10 @@ import gov.usgs.earthquake.nshmp.gmm.GroundMotionTables.GroundMotionTable;
 
 /**
  * Modified form of the relationship for the Central and Eastern US by Atkinson
- * & Boore (2006). This implementation matches that used in the 2014 USGS NSHMP,
- * incorporates a new, magnitude-dependent stress parameter, and uses table
- * lookups instead of functional forms to compute ground motions. This relation
- * is commonly referred to as AB06 Prime (AB06').
+ * &amp; Boore (2006). This implementation matches that used in the 2014 USGS
+ * NSHMP, incorporates a new, magnitude-dependent stress parameter, and uses
+ * table lookups instead of functional forms to compute ground motions. This
+ * relation is commonly referred to as AB06 Prime (AB06').
  *
  * <p><b>Note:</b> Direct instantiation of {@code GroundMotionModel}s is
  * prohibited. Use {@link Gmm#instance(Imt)} to retrieve an instance for a
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/BcHydro_2012.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/BcHydro_2012.java
index 0f444e6f171cc2ef92baaa9bbe6761f4442c9acf..9931f55c3971b907c38b9358e659731c1ca07379 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/BcHydro_2012.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/BcHydro_2012.java
@@ -27,9 +27,9 @@ import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 // TODO add reference to UsgsPgvSupport
 /**
  * Abstract implementation of the subduction ground motion model created for BC
- * Hydro, Canada, by Addo, Abrahamson, & Youngs (2012). An update to this
- * implementation is documented in Abrahamson, Gregor, & Addo (2016). This model
- * supports both slab and interface type events.
+ * Hydro, Canada, by Addo, Abrahamson, &amp; Youngs (2012). An update to this
+ * implementation is documented in Abrahamson, Gregor, &amp; Addo (2016). This
+ * model supports both slab and interface type events.
  *
  * <p><b>Note:</b> Direct instantiation of {@code GroundMotionModel}s is
  * prohibited. Use {@link Gmm#instance(Imt)} to retrieve an instance for a
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreAtkinson_2008.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreAtkinson_2008.java
index 2eef94b745b24a6ad46da0b13e663ce918d39adf..623a39b2b99e40c2ce6a466067cedb8c4d1dba37 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreAtkinson_2008.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreAtkinson_2008.java
@@ -23,7 +23,7 @@ import gov.usgs.earthquake.nshmp.Faults;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Boore & Atkinson (2008) next generation attenuation
+ * Implementation of the Boore &amp; Atkinson (2008) next generation attenuation
  * relationship for active crustal regions developed as part of <a
  * href="http://peer.berkeley.edu/ngawest/" target="_top">NGA West I</a>.
  *
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreEtAl_1997.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreEtAl_1997.java
index afbd2df2a0d89e5342f93d26f9c13643689d4888..b09523003a872fe63ca52efa35b56e197046cf96 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreEtAl_1997.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreEtAl_1997.java
@@ -17,10 +17,10 @@ import gov.usgs.earthquake.nshmp.Faults;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Boore, Joyner & Fumal (1997) ground motion model for
- * shallow earthquakes in active continental crust. In keeping with prior NSHMP
- * implementations of this older model, only soft rock sites are supported (Vs30
- * = 760 m/s).
+ * Implementation of the Boore, Joyner &amp; Fumal (1997) ground motion model
+ * for shallow earthquakes in active continental crust. In keeping with prior
+ * NSHMP implementations of this older model, only soft rock sites are supported
+ * (Vs30 = 760 m/s).
  *
  * <p><b>Note:</b> Direct instantiation of {@code GroundMotionModel}s is
  * prohibited. Use {@link Gmm#instance(Imt)} to retrieve an instance for a
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreEtAl_2014.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreEtAl_2014.java
index 50f7ca6f3c99f31b9256e2a89ff01d2adbd17908..a592e4fbcbfb18d213d989ba103df26d71b0a667 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreEtAl_2014.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/BooreEtAl_2014.java
@@ -23,7 +23,7 @@ import gov.usgs.earthquake.nshmp.Faults;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Boore, Stewart, Seyhan, & Atkinson (2014) next
+ * Implementation of the Boore, Stewart, Seyhan, &amp; Atkinson (2014) next
  * generation ground motion model for active crustal regions developed as part
  * of<a href="http://peer.berkeley.edu/ngawest2">NGA West II</a>.
  *
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2003.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2003.java
index 63c96478f1db8b887f1144e5d0e92e325b0e8adc..9b35e467e4f1966ed34d73144482cd8b0438a0e5 100755
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2003.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2003.java
@@ -19,14 +19,14 @@ import gov.usgs.earthquake.nshmp.Faults;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Campbell & Bozorgnia (2003) ground motion model for
+ * Implementation of the Campbell &amp; Bozorgnia (2003) ground motion model for
  * shallow earthquakes in active continental crust. In keeping with prior NSHMP
  * implementations of this older model, only soft rock sites are supported (Vs30
  * = 760 m/s) following specific guidance by the model authors. Longer periods
- * are also not implemented here due to a dependency on Boore, Joyner & Fumal
- * (1997) whose longest period is 2 sec. This implementation also ignores the
- * author's suggestion to use 'uncorrected' PGA values when computing PGA and
- * uses 'corrected' values for both PGA and spectral acceleration.
+ * are also not implemented here due to a dependency on Boore, Joyner &amp;
+ * Fumal (1997) whose longest period is 2 sec. This implementation also ignores
+ * the author's suggestion to use 'uncorrected' PGA values when computing PGA
+ * and uses 'corrected' values for both PGA and spectral acceleration.
  *
  * <p><b>Note:</b> Direct instantiation of {@code GroundMotionModel}s is
  * prohibited. Use {@link Gmm#instance(Imt)} to retrieve an instance for a
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2008.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2008.java
index 649839fc85d23ecf59268b8c97ae90020d768294..394f5d1967f217dc74100427c5f431ba2e1fb084 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2008.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2008.java
@@ -29,8 +29,8 @@ import gov.usgs.earthquake.nshmp.Faults;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Campbell & Bozorgnia (2008) next generation attenuation
- * for active crustal regions relationship developed as part of <a
+ * Implementation of the Campbell &amp; Bozorgnia (2008) next generation
+ * attenuation for active crustal regions relationship developed as part of <a
  * href="http://peer.berkeley.edu/ngawest/" target="_top">NGA West I</a>.
  *
  * <p><b>Note:</b> Direct instantiation of {@code GroundMotionModel}s is
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2014.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2014.java
index 2f32c749a0632b2d77ea402a781b35982643ddc3..0fa965c1412740e8ab8cbf468dd6d15e2942fdd1 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2014.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/CampbellBozorgnia_2014.java
@@ -31,7 +31,7 @@ import gov.usgs.earthquake.nshmp.Maths;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Campbell & Bozorgnia (2014) next generation ground
+ * Implementation of the Campbell &amp; Bozorgnia (2014) next generation ground
  * motion model for active crustal regions developed as part of <a
  * href="http://peer.berkeley.edu/ngawest2" target="_top">NGA West II</a>.
  *
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/ChiouYoungs_2008.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/ChiouYoungs_2008.java
index 8f5b671faed52cba239c291a5e7f6039517ae8ba..11b6bce8e068a73c68124a821d033ba94ce8adcd 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/ChiouYoungs_2008.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/ChiouYoungs_2008.java
@@ -28,7 +28,7 @@ import gov.usgs.earthquake.nshmp.Maths;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Chiou & Youngs (2008) next generation attenuation
+ * Implementation of the Chiou &amp; Youngs (2008) next generation attenuation
  * relationship for active crustal regions developed as part of <a
  * href="http://peer.berkeley.edu/ngawest/" target="_top">NGA West I</a>.
  *
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/ChiouYoungs_2014.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/ChiouYoungs_2014.java
index 0d4c00f99b599ae928e44ddc69dcae33d90c9ba8..8a092e73e9a0e45251dd1e1edc1c9f545829e33a 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/ChiouYoungs_2014.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/ChiouYoungs_2014.java
@@ -27,7 +27,7 @@ import gov.usgs.earthquake.nshmp.Maths;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Chiou & Youngs (2014) next generation attenuation
+ * Implementation of the Chiou &amp; Youngs (2014) next generation attenuation
  * relationship for active crustal regions developed as part of <a
  * href="http://peer.berkeley.edu/ngawest2" target="_top">NGA West II</a>.
  *
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/Gmm.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/Gmm.java
index ba78f7a1c51dcf4a6d788939ee08495334989084..74fc8ae09adb1f970f33f3096229e55bbfde6cbc 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/Gmm.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/Gmm.java
@@ -969,7 +969,7 @@ public enum Gmm {
       NgaEastUsgs_2017.COEFFS_SIGMA_PANEL,
       NgaEastUsgs_2017.CONSTRAINTS),
 
-  /** NGA-East for USGS Seed Models with Guo & Chapman Gulf CPA **/
+  /** NGA-East for USGS Seed Models with Guo &amp; Chapman Gulf CPA **/
   NGA_EAST_USGS_SEEDS_CPA(
       NgaEastUsgs_2017.UsgsSeedsCpa.class,
       NgaEastUsgs_2017.UsgsSeedsCpa.NAME,
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/GmmInput.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/GmmInput.java
index 81ce1cefadb22cb531e718378a71983c36996d05..d7b443f286da917e12d8b0ee2e9963a80fe368ea 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/GmmInput.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/GmmInput.java
@@ -714,7 +714,7 @@ public class GmmInput {
       for (Entry<Field, Optional<?>> entry : constraintMap.entrySet()) {
         Optional<?> opt = entry.getValue();
         String optStr = Strings.padEnd(
-            opt.isPresent() ? opt.get().toString() : "", CONSTRAINT_STR_COL_WIDTH, ' ');
+            opt.isPresent() ? opt.orElseThrow().toString() : "", CONSTRAINT_STR_COL_WIDTH, ' ');
         sb.append(optStr);
       }
       return sb;
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/GraizerKalkan_2015.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/GraizerKalkan_2015.java
index cf0b0bd37eabb4548c78b7e74d6ca38940681df4..40ef8e65f9a716d1696d16cb99eb9a8c7b636e1a 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/GraizerKalkan_2015.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/GraizerKalkan_2015.java
@@ -22,7 +22,7 @@ import gov.usgs.earthquake.nshmp.data.Interpolator;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the updated Graizer & Kalkan (2015, 2016) ground motion
+ * Implementation of the updated Graizer &amp; Kalkan (2015, 2016) ground motion
  * model. The model computes spectral accelerations as a continuous function of
  * spectral period. For consistency with the NGA-West2, this implementation
  * supports the common set of (22) spectral periods supported by the NGA-West2
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/MagConverter.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/MagConverter.java
index 228a14fde2f026f45eabcc5924361aac92fd7efd..46f30bdbad7ff57f6f7cad64df92a581fd61a328 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/MagConverter.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/MagConverter.java
@@ -25,7 +25,7 @@ public enum MagConverter {
   },
 
   /**
-   * m<sub>b</sub> to M<sub>w</sub> conversion of Atkinson & Boore (1995).
+   * m<sub>b</sub> to M<sub>w</sub> conversion of Atkinson &amp; Boore (1995).
    *
    * <p><b>Reference:</b> Atkinson, G.M., and Boore, D.M., 1995, Ground motion
    * relations for eastern North America: Bulletin of the Seismological Society
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/MunsonThurber_1997.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/MunsonThurber_1997.java
index d90c06f2e5e856292cdaa1021a384bb2d3163f0e..2b8668cb9447e75328cf1491c4b35c561bcf4b54 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/MunsonThurber_1997.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/MunsonThurber_1997.java
@@ -12,7 +12,7 @@ import com.google.common.collect.Range;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Munson & Thurber (1997) ground motion model for
+ * Implementation of the Munson &amp; Thurber (1997) ground motion model for
  * horizontal peak ground acceleration (PGA) for the island of Hawaii. In
  * keeping with prior NSHMP implementations of this older model, only lava sites
  * are supported (Vs30 = 650 m/s).
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/PezeshkEtAl_2011.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/PezeshkEtAl_2011.java
index fe7cc8ff5e475af6da9507d4c4c5dd5c1a1f4f9b..160f33c9c699a081b88d7ac42c5d74c9fe5cf2e5 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/PezeshkEtAl_2011.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/PezeshkEtAl_2011.java
@@ -14,10 +14,10 @@ import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 import gov.usgs.earthquake.nshmp.gmm.GroundMotionTables.GroundMotionTable;
 
 /**
- * Implementation of the Pezeshk, Zandieh, & Tavakoli (2011) ground motion model
- * for stable continental regions. This implementation matches that used in the
- * 2014 USGS NSHMP and uses table lookups (median) and functional forms (sigma)
- * to compute ground motions.
+ * Implementation of the Pezeshk, Zandieh, &amp; Tavakoli (2011) ground motion
+ * model for stable continental regions. This implementation matches that used
+ * in the 2014 USGS NSHMP and uses table lookups (median) and functional forms
+ * (sigma) to compute ground motions.
  *
  * <p><b>Note:</b> Direct instantiation of {@code GroundMotionModel}s is
  * prohibited. Use {@link Gmm#instance(Imt)} to retrieve an instance for a
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/TavakoliPezeshk_2005.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/TavakoliPezeshk_2005.java
index 14f14697fc924fea4560f25c8026968fd1f3488e..c62a3be142f1b117fc4700b1b16520a743813559 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/TavakoliPezeshk_2005.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/TavakoliPezeshk_2005.java
@@ -15,7 +15,7 @@ import com.google.common.collect.Range;
 import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
 
 /**
- * Implementation of the Tavakoli & Pezeshk (2005) ground motion model for
+ * Implementation of the Tavakoli &amp; Pezeshk (2005) ground motion model for
  * stable continental regions. This implementation matches that used in the 2008
  * USGS NSHMP and comes in two additional magnitude converting (mb to Mw)
  * flavors to support the 2008 central and eastern US model.
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/UsgsPgvSupport.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/UsgsPgvSupport.java
index 8aa1acf3dfc7d625b270e7e18688f041dcfcd895..2a1179fa2d50ebc1794aa44f6ca18ac2333dc944 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/UsgsPgvSupport.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/UsgsPgvSupport.java
@@ -155,8 +155,8 @@ class UsgsPgvSupport {
     double targetPeriod = computeTPgv(in.Mw);
 
     // GMM sa Imt bounds
-    Imt minImt = saImts.stream().findFirst().get();
-    Imt maxImt = Streams.findLast(saImts.stream()).get();
+    Imt minImt = saImts.stream().findFirst().orElseThrow();
+    Imt maxImt = Streams.findLast(saImts.stream()).orElseThrow();
     double minPeriod = minImt.period();
     double maxPeriod = maxImt.period();
 
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/gmm/YoungsEtAl_1997.java b/src/main/java/gov/usgs/earthquake/nshmp/gmm/YoungsEtAl_1997.java
index b20183467cf6165e07b3b72695c3f816639d8894..b8a39135c8ca3bed1e1aacf1f79223ce19d9b44d 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/gmm/YoungsEtAl_1997.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/gmm/YoungsEtAl_1997.java
@@ -24,8 +24,8 @@ import gov.usgs.earthquake.nshmp.gmm.GmmInput.Constraints;
  * implementation has been modified from its original form to an NGA style (S.
  * Harmsen 7/13/2009) wherein mean ground motion varies continuously with Vs30
  * (sigma remains the same as original). This is acheived through use of a
- * period-dependent site amplification function modified from Boore & Atkinson
- * (2008).
+ * period-dependent site amplification function modified from Boore &amp;
+ * Atkinson (2008).
  *
  * <p>This model supports both slab and interface type events. In the 2008
  * NSHMP, the 'interface' form is used with the Cascadia subduction zone models
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/internal/www/WsUtils.java b/src/main/java/gov/usgs/earthquake/nshmp/internal/www/WsUtils.java
index 88a240ed9fa1f92e14ef451fa5ad474f6d6b65b0..b76a1c8586fc93c6ffe79531bb666aeb461358b1 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/internal/www/WsUtils.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/internal/www/WsUtils.java
@@ -71,7 +71,7 @@ public class WsUtils {
       for (Field field : Field.values()) {
         Optional<?> opt = constraints.get(field);
         if (opt.isPresent()) {
-          Range<?> value = (Range<?>) opt.get();
+          Range<?> value = (Range<?>) opt.orElseThrow();
           Constraint constraint = new Constraint(
               field.id,
               value.lowerEndpoint(),
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java
index ae89763211edcb0599d6df63d2eda9129b96b6b1..b0340803b76435a7233f48f5b85d37160370462e 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfd.java
@@ -1,22 +1,28 @@
 package gov.usgs.earthquake.nshmp.mfd;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
 import static gov.usgs.earthquake.nshmp.Earthquakes.checkMagnitude;
-import static gov.usgs.earthquake.nshmp.Earthquakes.magToMoment;
 import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR;
+import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR_MMAX_GR;
+import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR_MMAX_SINGLE;
 import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR_TAPER;
-import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.INCR;
 import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.SINGLE;
-import static gov.usgs.earthquake.nshmp.mfd.Mfds.gutenbergRichterRate;
+import static gov.usgs.earthquake.nshmp.mfd.Mfds.checkGaussianMagnitudeCount;
+import static gov.usgs.earthquake.nshmp.mfd.Mfds.checkGaussianSigma;
+import static gov.usgs.earthquake.nshmp.mfd.Mfds.checkGaussianSigmaCount;
+import static gov.usgs.earthquake.nshmp.mfd.Mfds.checkGrBvalue;
+import static gov.usgs.earthquake.nshmp.mfd.Mfds.checkRate;
 
 import java.util.Arrays;
 import java.util.EnumSet;
-import java.util.Map;
 import java.util.Optional;
 import java.util.Set;
 import java.util.function.Consumer;
 
 import gov.usgs.earthquake.nshmp.Earthquakes;
 import gov.usgs.earthquake.nshmp.Maths;
+import gov.usgs.earthquake.nshmp.Text;
 import gov.usgs.earthquake.nshmp.data.MutableXySequence;
 import gov.usgs.earthquake.nshmp.data.Sequences;
 import gov.usgs.earthquake.nshmp.data.XyPoint;
@@ -58,8 +64,20 @@ import gov.usgs.earthquake.nshmp.data.XySequence;
  *
  * <p>MFD {@link Properties} hold references to the parameters used to
  * initialize an MFD, including its {@link Type}. Properties objects may be
- * constructed directly and support a {@link Properties#toBuilder()} method.
- * NSHM
+ * constructed directly and support a {@link Properties#toBuilder()} method. The
+ * the {@code Mfd.Type} will be {@code INCREMENTAL} if the MFD was created
+ * directly from magnitude and rate data or is the result of combining more than
+ * one MFD via {@link Mfds#combine(java.util.Collection)}. Similarly, properties
+ * <i>may</i> not align with an associated MFD downstream if a builder
+ * initialized with a set of properties has been used to create multiple MFDs
+ * with variably scaled rates or transformed in various ways. Note also that
+ * when MFDs are {@link Mfds#combine(java.util.Collection) combined}, the
+ * resulting combined MFD will be of type: {@link Type#INCR}.
+ *
+ * <p>Users of MFDs are encouraged take advantage of the conveniences of the
+ * underlying {@code XySequence}, especially streaming support. For example, to
+ * compute the cumulative rate of events in an MFD, one should use
+ * {@code mfd.data().yValues().sum()}.
  *
  * @author U.S. Geological Survey
  * @see Mfds
@@ -67,6 +85,24 @@ import gov.usgs.earthquake.nshmp.data.XySequence;
  */
 public final class Mfd {
 
+  /*
+   * Developer notes:
+   *
+   * Builders: use of no-arg constructors ensure correct deserialization
+   * behavior where existing fields such as GutenbergRichter.a and Single.rate
+   * will not be overridden if absent from JSON.
+   *
+   * Builders: MFD properties are checked in toBuilder() methods allowing us to
+   * verify values set via JSON deserialization. toBuilder() methods
+   * additionally check that type is correctly set to defend against
+   * deserialization errors.
+   *
+   * TODO equals/hashCode?
+   *
+   * TODO note in documentation default a and rate = 1 for correct scaling
+   * behavior
+   */
+
   private final XySequence data;
   private final Properties props;
 
@@ -75,16 +111,25 @@ public final class Mfd {
     this.props = props;
   }
 
-  /** The immutable values representing this MFD. */
+  /**
+   * The immutable values representing this MFD.
+   */
   public XySequence data() {
     return data;
   }
 
-  /** The properties used to initialize the MFD. */
+  /**
+   * The properties used to initialize the MFD.
+   */
   public Properties properties() {
     return props;
   }
 
+  @Override
+  public String toString() {
+    return "MFD:" + props + Text.NEWLINE + data;
+  }
+
   /**
    * Create an MFD from copies of the supplied arrays. The returned MFD will be
    * of type: {@link Type#INCR}.
@@ -96,6 +141,10 @@ public final class Mfd {
    *         are not the same size
    * @throws IllegalArgumentException if {@code magnitudes} does not increase
    *         monotonically or contains repeated values
+   * @throws IllegalArgumentException if {@code magnitudes} or {@code rates}
+   *         contain invalid values per
+   *         {@link Earthquakes#checkMagnitude(double)} and
+   *         {@link Mfds#checkRate(double)}
    */
   public static Mfd create(double[] magnitudes, double[] rates) {
     return create(XySequence.create(magnitudes, rates));
@@ -106,9 +155,14 @@ public final class Mfd {
    * {@link Type#INCR}. The supplied sequence will only be copied if necessary.
    *
    * @param xy data to wrap in an MFD
+   * @throws IllegalArgumentException if sequence contains invalid magnitudes
+   *         (x-values) or rates (y-values) per
+   *         {@link Earthquakes#checkMagnitude(double)} and
+   *         {@link Mfds#checkRate(double)}
    */
   public static Mfd create(XySequence xy) {
-    return new Mfd(XySequence.copyOf(xy), new Properties(INCR));
+    Mfds.checkValues(xy.xValues(), xy.yValues());
+    return new Mfd(XySequence.copyOf(xy), new Properties());
   }
 
   /**
@@ -119,7 +173,7 @@ public final class Mfd {
    *         {@code [-2.0..9.7]}
    */
   public static Mfd.Builder newSingleBuilder(double magnitude) {
-    return Mfd.Builder.initSingle(new Properties.Single(magnitude));
+    return new Properties.Single(magnitude).toBuilder();
   }
 
   /**
@@ -129,22 +183,28 @@ public final class Mfd {
    * rounds each magnitude to 5 decimal places. This implementation constructs a
    * distribution that is {@code μ ± 2σ} wide and represented by {@code nm}
    * evenly distributed magnitude values. The initial distribution integrates to
-   * one.
+   * one. The MFD, once built, will be of type: {@link Type#SINGLE} as it
+   * represents a single magnitude combined with a model of uncertainty. In the
+   * context of a hazard model, a normal distribution of magnitudes is typically
+   * used to represent aleatory variability.
    *
-   * @param μ the mean magnitude
+   * @param mu (μ) the mean magnitude
    * @param nm the number of magnitudes in the resultant distribution
-   * @param σ the standard deviation
-   * @param nσ the number of standard deviations about {@code μ}
+   * @param sigma (σ) the standard deviation
+   * @param nsigma (nσ) the number of standard deviations about {@code μ}
    * @throws IllegalArgumentException if {@code μ} is outside the range
    *         {@code [-2.0..9.7]}
    * @throws IllegalArgumentException if {@code σ} is outside the range
    *         {@code (0..0.5]}
+   * @throws IllegalArgumentException if {@code nσ} is outside the range
+   *         {@code [0..5]}
+   * @throws IllegalArgumentException if {@code σ} is outside the range
+   *         {@code [1..21]}
    */
   public static Mfd.Builder newGaussianBuilder(
       double μ, int nm,
       double σ, int nσ) {
-
-    return Mfd.Builder.initGaussian(new Properties.Single(μ), nm, σ, nσ);
+    return new Properties.Single(μ).toGaussianBuilder(nm, σ, nσ);
   }
 
   /**
@@ -152,26 +212,30 @@ public final class Mfd {
    * href="https://en.wikipedia.org/wiki/Gutenberg–Richter_law"
    * target="_top">Gutenberg–Richter</a> MFD. When building the underlying
    * magnitude distribtion, this implementation rounds each magnitude to 4
-   * decimal places.
+   * decimal places. Note that {@code mMin} and {@code mMax} define the
+   * lowermost and uppermost magnitude bin edges in a magnitude distribution,
+   * respectively, and that magnitude values in a Gutenberg-Richter MFD are
+   * defined as bin centers. That is, if {@code mMin = 5.0} and
+   * {@code Δm = 0.1}, the first magnitude in the distrobution will be
+   * {@code mMin + Δm / 2 = 5.05}.
    *
+   * @param b the Gutenberg-Richter b-value
+   * @param deltam (Δm) the magnitude step of the distribtion
    * @param mMin the minimum truncation magnitude
    * @param mMax the maximum truncation magnitude
-   * @param Δm the magnitude step of the distribtion
-   * @param b the Gutenberg-Richter b-value
    * @throws IllegalArgumentException if {@code mMin} or {@code mMax} is outside
    *         the range {@code [-2.0..9.7]}
    * @throws IllegalArgumentException if {@code mMin > mMax}
    * @throws IllegalArgumentException if {@code Δm > mMax - mMin}
    * @throws IllegalArgumentException if {@code b} is outside the range
-   *         {@code [0..2]}
+   *         {@code [0..3]}
    */
   public static Mfd.Builder newGutenbergRichterBuilder(
-      double mMin,
-      double mMax,
+      double b,
       double Δm,
-      double b) {
-
-    return Mfd.Builder.initGutenbergRichter(new Properties.GutenbergRichter(b, Δm, mMin, mMax));
+      double mMin,
+      double mMax) {
+    return new Properties.GutenbergRichter(1.0, b, Δm, mMin, mMax).toBuilder();
   }
 
   /**
@@ -184,11 +248,16 @@ public final class Mfd {
    * compute scale factors for each magnitude bin that are then applied to a
    * standard Gutenberg–Richter distribution. When building the underlying
    * magnitude distribtion, this implementation rounds each magnitude to 4
-   * decimal places.
+   * decimal places. Note that {@code mMin} and {@code mMax} define the
+   * lowermost and uppermost magnitude bin edges in a magnitude distribution,
+   * respectively, and that magnitude values in a Gutenberg-Richter MFD are
+   * defined as bin centers. That is, if {@code mMin = 5.0} and
+   * {@code Δm = 0.1}, the first magnitude in the distrobution will be
+   * {@code mMin + Δm / 2 = 5.05}.
    *
    * @param mMin the minimum truncation magnitude
    * @param mMax the maximum truncation magnitude
-   * @param Δm the magnitude step of the distribtion
+   * @param deltam (Δm) the magnitude step of the distribtion
    * @param b the Gutenberg-Richter b-value
    * @param mc the corner magnitude of the distribution
    * @throws IllegalArgumentException if {@code mMin}, {@code mMax}, or
@@ -197,7 +266,7 @@ public final class Mfd {
    *         not {@code mMin < mc < mMax}
    * @throws IllegalArgumentException if {@code Δm > mMax – mMin}
    * @throws IllegalArgumentException if {@code b} is outside the range
-   *         {@code [0..2]}
+   *         {@code [0..3]}
    */
   public static Mfd.Builder newTaperedGutenbergRichterBuilder(
       double b,
@@ -205,28 +274,40 @@ public final class Mfd {
       double mMin,
       double mMax,
       double mc) {
-
-    return Mfd.Builder.initTaperedGutenbergRichter(
-        new Properties.GrTaper(b, Δm, mMin, mMax, mc));
+    return new Properties.TaperedGr(1.0, b, Δm, mMin, mMax, mc).toBuilder();
   }
 
   /** Magnitude-frequency distribution (MFD) type identifier. */
   public enum Type {
 
-    /** An MFD with a single magnitude and rate. */
+    /**
+     * An MFD with a single magnitude and rate.
+     *
+     * @see Mfd#newSingleBuilder(double)
+     */
     SINGLE,
 
-    /** An MFD defining multiple magnitudes with varying rates. */
+    /**
+     * An MFD defining multiple magnitudes with varying rates.
+     *
+     * @see Mfd#create(double[], double[])
+     * @see Mfd#create(XySequence)
+     */
     INCR,
 
-    /** An incremental Gutenberg-Richter MFD. */
+    /**
+     * An incremental Gutenberg-Richter MFD.
+     *
+     * @see Mfd#newGutenbergRichterBuilder(double, double, double, double)
+     */
     GR,
 
     /**
      * An incremental Gutenberg-Richter MFD with a rate scaling hint. This type
      * of MFD is used with NSHM grid sources to indicate that rates above
      * {@code M=6.5} (the minimum magnitude for finite faults) should be set to
-     * zero if certain conditions are met.
+     * zero if certain conditions are met. This type of MFD can not be created
+     * directly.
      */
     GR_MMAX_GR,
 
@@ -234,15 +315,17 @@ public final class Mfd {
      * An incremental Gutenberg-Richter MFD with a rate scaling hint. This type
      * of MFD is used with NSHM grid sources to indicate rates above some
      * 'characterisitic' fault magnitude should be set to zero if certain
-     * conditions are met.
+     * conditions are met. This type of MFD can not be created directly.
      */
     GR_MMAX_SINGLE,
 
-    /** A Gutenberg-Richter MFD with a tapered upper tail. */
+    /**
+     * A Gutenberg-Richter MFD with a tapered upper tail.
+     *
+     * @see Mfd#newTaperedGutenbergRichterBuilder(double, double, double,
+     *      double, double)
+     */
     GR_TAPER;
-
-    /** A set containing all Gutenberg-Richter MFD types. */
-    public static final Set<Type> GR_TYPES = EnumSet.of(GR, GR_MMAX_GR, GR_MMAX_SINGLE, GR_TAPER);
   }
 
   /**
@@ -258,67 +341,31 @@ public final class Mfd {
     private final Properties props;
     private final MutableXySequence mfd;
 
-    /*
-     * TODO clean Developer notes:
-     *
-     * add constraints preconditions for params other than magnitude
-     *
-     * doublecheck preconditions and tests thereof
-     *
-     * Double counting: a long-present feature in the NSHMs is the capping of
-     * mMax in grid cells that also host a finite fault source. Because the
-     * finite fault source MFDs are composed of GR and SINGLE branches, there
-     * are corresponding caps on the grid source side, 6.5 for GR, and whatever
-     * the 'characteristic' magnitude of the local fault is for SINGLE.
-     *
-     * Builder provides access to type() to handle MFD subtypes such as
-     * GR_MMAX_GR that indicate special rate scaling, but there are significant
-     * benefits (shared backing arrays) to the repeated use of logic trees of
-     * builders when generating logic trees of MFDs.
-     *
-     * For documentation:
-     *
-     * Generally, we want MFDs that feed into the hazard integral to be clean.
-     * By clean, we mean free of leading or trailing magnitude bins with rates
-     * equal to zero. However, there are circumstances where it makes more sense
-     * to allow zero rate bins. One such case arises when addressing double
-     * counting in grid source cells where a fault is present. In these cells,
-     * MFDs are truncated at fault-GR-mMin or fault-CH. In order to support a
-     * shared MFD backing arrays with mMin == mMax over all grid sources, we
-     * reduce the rates of the uppermost magnitude bins to zero rather than
-     * truncating the entire MFD.
-     *
-     * TODO note MFD types that get created from from() methods below
-     *
-     * TODO note what happens when combining MFDs wrt to resultant type.
-     *
-     * TODO developer notes: use of no-arg constructors ensure correct
-     * deserialization behavior where existing fields such as GutenbergRichter.a
-     * and Single.rate will not be overridden if absent from JSON.
-     */
-
     /**
      * Create an {@code Mfd.Builder} initialized with copies of the supplied
-     * arrays.
+     * arrays. MFDs created from this builder will be of type:
+     * {@link Type#INCR}.
      *
      * @param magnitudes to initialize builder with
      * @param rates to initialize builder with
      */
     public static Builder from(double[] magnitudes, double[] rates) {
-      return new Builder(new Properties(INCR), magnitudes, Optional.of(rates));
+      return new Builder(new Properties(), magnitudes, rates);
     }
 
     /**
      * Create an {@code Mfd.Builder} initialized with the supplied sequence.
+     * MFDs created from this builder will be of type: {@link Type#INCR}.
      *
-     * @param mfd to initialize builder with
+     * @param xy sequence to initialize builder with
      */
-    public static Builder from(XySequence mfd) {
-      return new Builder(mfd);
+    public static Builder from(XySequence xy) {
+      return new Builder(xy);
     }
 
     /**
-     * Create an {@code Mfd.Builder} initialized with the supplied MFD.
+     * Create an {@code Mfd.Builder} initialized with the supplied MFD. MFDs
+     * created from this builder will be of the ssame type as the supplied MFD.
      *
      * @param mfd to initialize builder with
      */
@@ -326,156 +373,32 @@ public final class Mfd {
       return new Builder(mfd);
     }
 
-    private Builder(XySequence src) {
-      this.props = new Properties(INCR);
-      this.mfd = MutableXySequence.copyOf(src);
-    }
-
-    private Builder(Mfd src) {
-      this.props = src.props;
-      this.mfd = MutableXySequence.copyOf(src.data);
-    }
-
-    private Builder(
-        Properties props,
-        double[] magnitudes,
-        Optional<double[]> rates) {
-
-      this.props = props;
-      this.mfd = MutableXySequence.create(magnitudes, rates);
-    }
-
-    /* Single MFD */
-    static Builder initSingle(Properties.Single props) {
-      checkMagnitude(props.m);
-      return new Builder(
-          props,
-          new double[] { props.m },
-          Optional.of(new double[] { props.rate == 0.0 ? 1.0 : props.rate }));
-      // TODO check rate above
-    }
-
-    /* Incremental MFD */
-    static Builder initIncremental(
-        double[] magnitudes,
-        double[] rates) {
-
-      Arrays.stream(magnitudes).forEach(Earthquakes::checkMagnitude);
-
-      return new Builder(
-          new Properties(INCR),
-          magnitudes,
-          Optional.of(rates));
-    }
-
-    /* Gaussian MFD */
-    static Builder initGaussian(Properties.Single props, int nm, double σ, int nσ) {
-      double μ = checkMagnitude(props.m);
-      double mMin = checkMagnitude(μ - nσ * σ);
-      double mMax = checkMagnitude(μ + nσ * σ);
-      double Δm = (mMax - mMin) / (nm - 1);
-      double[] magnitudes = Sequences.arrayBuilder(mMin, mMax, Δm)
-          .centered()
-          .scale(5)
-          .build();
-      Builder builder = new Builder(props, magnitudes, Optional.empty());
-      builder.mfd.forEach(p -> p.set(Maths.normalPdf(μ, σ, p.x())));
-      return builder;
+    private Builder(XySequence xy) {
+      Mfds.checkValues(xy.xValues(), xy.yValues());
+      this.props = new Properties();
+      this.mfd = MutableXySequence.copyOf(xy);
     }
 
-    /* Gutenberg–Richter MFD */
-    static Builder initGutenbergRichter(Properties.GutenbergRichter props) {
-      double mMin = checkMagnitude(props.mMin);
-      double mMax = checkMagnitude(props.mMax);
-      double[] magnitudes = Sequences.arrayBuilder(mMin, mMax, props.Δm)
-          .scale(4)
-          .build();
-      Builder builder = new Builder(props, magnitudes, Optional.empty());
-      builder.mfd.forEach(p -> p.set(gutenbergRichterRate(props.a, props.b, p.x())));
-      return builder;
+    private Builder(Mfd mfd) {
+      this.props = mfd.props;
+      this.mfd = MutableXySequence.copyOf(mfd.data);
     }
 
-    /* Gutenberg–Richter with exponential taper */
-    static Builder initTaperedGutenbergRichter(Properties.GrTaper props) {
-      double mMin = checkMagnitude(props.mMin());
-      double mMax = checkMagnitude(props.mMax());
-      double mc = checkMagnitude(props.mc);
-      double[] magnitudes = Sequences.arrayBuilder(mMin, mMax, props.Δm())
-          .scale(4)
-          .build();
-      Builder builder = new Builder(props, magnitudes, Optional.empty());
-      GrTaper grTaper = new GrTaper(props.Δm(), props.b(), mc);
-      builder.mfd.forEach(p -> p.set(
-          gutenbergRichterRate(0, props.b(), p.x()) * grTaper.scale(p.x())));
-      return builder;
+    private Builder(Properties props, double[] magnitudes) {
+      this(props, magnitudes, new double[magnitudes.length]);
     }
 
-    private static final double M_MIN_MOMENT = magToMoment(4.0);
-    private static final double M_MAX_MOMENT = magToMoment(9.05);
-
-    /* Gutenberg–Richter tapering scale function */
-    private static class GrTaper {
-
-      final double ΔmBy2;
-      final double β;
-      final double Mcm;
-
-      GrTaper(double Δm, double b, double mc) {
-        ΔmBy2 = Δm / 2.0;
-        β = b / 1.5;
-        Mcm = magToMoment(mc);
-      }
-
-      /* Resolve bin edges and return scale factor */
-      double scale(double m) {
-        double Mlo = magToMoment(m - ΔmBy2);
-        double Mhi = magToMoment(m + ΔmBy2);
-        double binRateTapered = paretoΔ(M_MIN_MOMENT, Mlo, Mhi, β, Mcm);
-        double binRateUntapered = paretoΔ(M_MIN_MOMENT, Mlo, Mhi, β, M_MAX_MOMENT);
-        return binRateTapered / binRateUntapered;
-      }
-    }
-
-    /*
-     * Computes the number of events in a tapered GR magnitude bin. All 'M'
-     * values are scalar seismic moment.
-     */
-    private static double paretoΔ(
-        double Mt,
-        double Mlo,
-        double Mhi,
-        double β,
-        double Mcm) {
-
-      return Mfds.paretoRate(Mt, Mlo, β, Mcm) - Mfds.paretoRate(Mt, Mhi, β, Mcm);
+    private Builder(Properties props, double[] magnitudes, double[] rates) {
+      Mfds.checkValues(Arrays.stream(magnitudes), Arrays.stream(rates));
+      this.props = props;
+      this.mfd = MutableXySequence.create(magnitudes, Optional.of(rates));
     }
 
-    /**
-     * Return a newly created {@code Mfd}.
-     */
+    /** Return a newly created {@code Mfd}. */
     public Mfd build() {
       return new Mfd(XySequence.copyOf(mfd), props);
     }
 
-    /**
-     * The minimum magnitude being considered in this MFD.
-     */
-    public double min() {
-      return mfd.x(0);
-    }
-
-    /**
-     * The maximum magnitude being considered in this MFD.
-     */
-    public double max() {
-      return mfd.x(mfd.size() - 1);
-    }
-
-    @Deprecated
-    public Properties properties() {
-      return props;
-    }
-
     /**
      * Scales the entire MFD by the supplied value, for example, a weight.
      *
@@ -494,7 +417,7 @@ public final class Mfd {
      * @return this {@code Builder} object
      */
     public Builder scaleToCumulativeRate(double cumulativeRate) {
-      return scale(cumulativeRate / Mfds.cumulativeRate(mfd));
+      return scale(checkRate(cumulativeRate) / mfd.yValues().sum());
     }
 
     /**
@@ -506,7 +429,7 @@ public final class Mfd {
      * @return this {@code Builder} object
      */
     public Builder scaleToIncrementalRate(double incrementalRate) {
-      return scale(incrementalRate / mfd.y(0));
+      return scale(checkRate(incrementalRate) / mfd.y(0));
     }
 
     /**
@@ -516,7 +439,7 @@ public final class Mfd {
      * @return this {@code Builder} object
      */
     public Builder scaleToMomentRate(double momentRate) {
-      return scale(momentRate / Mfds.momentRate(mfd));
+      return scale(checkRate(momentRate) / Mfds.momentRate(mfd));
     }
 
     /**
@@ -527,7 +450,7 @@ public final class Mfd {
      * @return this {@code Builder} object
      */
     public Builder transform(Consumer<XyPoint> action) {
-      mfd.transform(action);
+      mfd.transform(checkNotNull(action));
       return this;
     }
   }
@@ -535,24 +458,20 @@ public final class Mfd {
   /**
    * Properties object associated with a MFD. A properties object wraps the
    * original parameters required to initialize a {@link Mfd.Builder}.
-   * Properties <i>may</i> not align with an associated MFD downstream include
-   * specific not include rate or uncertainty model information that may have
-   * been used to scale and/or create multiple MFDs.
    */
   public static class Properties {
 
     private final Type type;
 
-    /* No-arg constructor for deserialization. */
-    private Properties() {
-      this.type = SINGLE;
-    }
-
     /**
-     * Sole constructor.
-     * @param type of the associated MFD
+     * General purpose properties object for use with MFDs of type:
+     * {@link Type#INCR}
      */
-    protected Properties(Type type) {
+    public Properties() {
+      this.type = Type.INCR;
+    }
+
+    Properties(Type type) {
       this.type = type;
     }
 
@@ -563,6 +482,7 @@ public final class Mfd {
 
     /**
      * Properties for a single MFD.
+     *
      * @throws ClassCastException if properties are for other MFD type
      */
     public Single getAsSingle() {
@@ -571,6 +491,7 @@ public final class Mfd {
 
     /**
      * Properties for a Gutenberg-Richter MFD.
+     *
      * @throws ClassCastException if properties are for other MFD type
      */
     public GutenbergRichter getAsGr() {
@@ -579,10 +500,11 @@ public final class Mfd {
 
     /**
      * Properties for a tapered Gutenberg-Richter MFD.
+     *
      * @throws ClassCastException if properties are for other MFD type
      */
-    public GrTaper getAsGrTaper() {
-      return (GrTaper) this;
+    public TaperedGr getAsGrTaper() {
+      return (TaperedGr) this;
     }
 
     /** Return a MFD builder initialized with this properties object. */
@@ -590,6 +512,11 @@ public final class Mfd {
       throw new UnsupportedOperationException();
     }
 
+    @Override
+    public String toString() {
+      return type().toString();
+    }
+
     /** Properties of a single magnitude MFD. */
     public static final class Single extends Properties {
 
@@ -602,18 +529,29 @@ public final class Mfd {
         rate = 1.0;
       };
 
-      public Single(double m, double rate) {
+      /**
+       * Create a new properties object for a single MFD.
+       *
+       * @param magnitude of the distribution
+       * @param rate of the sole magnitude in the distribution
+       */
+      public Single(double magnitude, double rate) {
         super(SINGLE);
-        this.m = m;
+        this.m = magnitude;
         this.rate = rate;
       }
 
-      public Single(double m) {
-        this(m, 1.0);
+      /**
+       * Create a new properties object for a single MFD with a rate of one.
+       *
+       * @param magnitude of the distribution
+       */
+      public Single(double magnitude) {
+        this(magnitude, 1.0);
       }
 
       /** The sole magnitude of the MFD */
-      public double m() {
+      public double magnitude() {
         return m;
       }
 
@@ -622,42 +560,44 @@ public final class Mfd {
         return rate;
       }
 
-      // TODO note in documentation default a and rate = 1 for correct scaling
-      // behavior; and no-arg constructor
-
-      // TODO use case for createSingle(m,r) and createGr(...) where
-      // rate or a-value is known in advance, e.g. when crateing model fault
-      // MFDs that will likely be subject to uncertainty modifications
-
-      // TODO It seems we also want to just be able to create properties
-      // directly
-
-      // TODO public toMfd(); instead of toBuilder()
-      // lets prefer just initializing builders with copies from()
-      // we shouldn't be dealing in LogicTree<Mfd.Properties>, just make
-      // copies
-
       @Override
       public Builder toBuilder() {
-        return Builder.initSingle(this);
+        checkState(type().equals(SINGLE));
+        /* Values checked in builder */
+        return new Builder(
+            this,
+            new double[] { this.m },
+            // TODO this is bad
+            new double[] { this.rate == 0.0 ? 1.0 : this.rate });
       }
 
       public Builder toGaussianBuilder(int nm, double σ, int nσ) {
-        return Builder.initGaussian(this, nm, σ, nσ);
+        checkState(type().equals(SINGLE));
+        checkGaussianMagnitudeCount(nm);
+        checkGaussianSigma(σ);
+        checkGaussianSigmaCount(nσ);
+        /* Pre-check magnitudes; final arrays are checked in builder */
+        double μ = checkMagnitude(this.m);
+        double mMin = checkMagnitude(μ - nσ * σ);
+        double mMax = checkMagnitude(μ + nσ * σ);
+        double Δm = (mMax - mMin) / (nm - 1);
+        double[] magnitudes = Sequences.arrayBuilder(mMin, mMax, Δm)
+            .centered()
+            .scale(5)
+            .build();
+        Builder builder = new Builder(this, magnitudes);
+        builder.mfd.transform(p -> p.set(Maths.normalPdf(μ, σ, p.x())));
+        builder.scaleToCumulativeRate(rate);
+        return builder;
       }
 
       @Override
       public String toString() {
-        return new StringBuilder()
-            .append(type())
-            .append(": ")
-            .append(Map.of("m", m, "rate", rate))
+        String props = new StringBuilder()
+            .append("magnitude=").append(m)
+            .append(", rate=").append(rate)
             .toString();
-      }
-
-      @Deprecated
-      public static Single dummy() {
-        return new Single(7.0);
+        return Mfds.propsToString(type(), props);
       }
     }
 
@@ -679,8 +619,10 @@ public final class Mfd {
         this.mMax = Double.NaN;
       }
 
-      private GutenbergRichter(Type type, double a, double b, double Δm, double mMin,
-          double mMax) {
+      private GutenbergRichter(
+          Type type, double a, double b,
+          double Δm, double mMin, double mMax) {
+
         super(type);
         this.a = a;
         this.b = b;
@@ -689,8 +631,28 @@ public final class Mfd {
         this.mMax = mMax;
       }
 
-      public GutenbergRichter(double b, double Δm, double mMin, double mMax) {
-        this(GR, 1.0, b, Δm, mMin, mMax);
+      /**
+       * Create a new properties object for a Gutenberg-Richter MFD.
+       *
+       * @param a value of the distribution
+       * @param b value of the distribution
+       * @param deltam (Δm) magnitude bin width of the distribution
+       * @param mMin maximum magnitude of the distribution
+       */
+      public GutenbergRichter(double a, double b, double Δm, double mMin, double mMax) {
+        this(GR, a, b, Δm, mMin, mMax);
+      }
+
+      /**
+       * Create a new properties object for a Gutenberg-Richter MFD with a copy
+       * of the supplied properties and an updated maximum magnitude.
+       *
+       * @param gr properties to copy
+       * @param mMax updated maximum magnitude
+       */
+      public GutenbergRichter(GutenbergRichter gr, double mMax) {
+        /* Supports common magnitude arrays for variable mMax MFDs */
+        this(gr.type(), gr.a, gr.b, gr.Δm, gr.mMin, mMax);
       }
 
       /** The Gutenberg-Richter {@code a}-value. */
@@ -718,36 +680,78 @@ public final class Mfd {
         return mMax;
       };
 
+      private static final Set<Type> GR_TYPES = EnumSet.of(
+          GR,
+          GR_MMAX_GR,
+          GR_MMAX_SINGLE);
+
       @Override
       public Builder toBuilder() {
-        return Builder.initGutenbergRichter(this);
+        checkState(GR_TYPES.contains(type()));
+        checkGrBvalue(this.b);
+        /* Pre-check magnitudes; final arrays are checked in builder */
+        double mMin = checkMagnitude(this.mMin);
+        double mMax = checkMagnitude(this.mMax);
+        double[] magnitudes = Sequences.arrayBuilder(mMin, mMax, this.Δm)
+            .scale(4)
+            .build();
+        Builder builder = new Builder(this, magnitudes);
+        builder.transform(p -> p.set(Mfds.gutenbergRichterRate(a, b, p.x())));
+        return builder;
       }
 
       @Override
       public String toString() {
+        return Mfds.propsToString(type(), propsString());
+      }
+
+      private String propsString() {
         return new StringBuilder()
-            .append(type())
-            .append(": ")
-            .append(Map.of("a", a, "b", b, "Δm", Δm, "mMin", mMin, "mMax", mMax))
+            .append("a=").append(a)
+            .append(", b=").append(b)
+            .append(", Δm=").append(Δm)
+            .append(", mMin=").append(mMin)
+            .append(", mMax=").append(mMax)
             .toString();
       }
     }
 
     /** Tapered Gutenberg–Richter MFD properties. */
-    public static final class GrTaper extends GutenbergRichter {
+    public static final class TaperedGr extends GutenbergRichter {
 
       private final double mc;
 
       /* Dummy, no-arg constructor for deserialization. */
-      private GrTaper() {
+      private TaperedGr() {
         this.mc = Double.NaN;
       }
 
-      GrTaper(double b, double Δm, double mMin, double mMax, double mc) {
-        super(GR_TAPER, 1.0, b, Δm, mMin, mMax);
+      /**
+       * Create a new properties object for a tapered Gutenberg-Richter MFD.
+       *
+       * @param a value of the distribution
+       * @param b value of the distribution
+       * @param deltam (Δm) magnitude bin width of the distribution
+       * @param mMin minimum magnitude of the distribution
+       * @param mc corner magnitude of the distribution
+       */
+      public TaperedGr(double a, double b, double Δm, double mMin, double mMax, double mc) {
+        super(GR_TAPER, a, b, Δm, mMin, mMax);
         this.mc = mc;
       }
 
+      /**
+       * Create a new properties object for a tapered Gutenberg-Richter MFD with
+       * a copy of the supplied properties and an updated maximum magnitude.
+       *
+       * @param gr properties to copy
+       * @param mMax updated maximum magnitude
+       */
+      public TaperedGr(TaperedGr gr, double mMax) {
+        /* Supports common magnitude arrays for variable mMax MFDs */
+        this(gr.a(), gr.b(), gr.Δm(), gr.mMin(), mMax, gr.mc);
+      }
+
       /** The corner magnitude. */
       public final double mc() {
         return mc;
@@ -755,9 +759,85 @@ public final class Mfd {
 
       @Override
       public Builder toBuilder() {
-        return Builder.initTaperedGutenbergRichter(this);
+        checkState(type().equals(GR_TAPER));
+        checkGrBvalue(b());
+        /* Pre-check magnitudes; final arrays are checked in builder */
+        double mMin = checkMagnitude(mMin());
+        double mMax = checkMagnitude(mMax());
+        double mc = checkMagnitude(this.mc);
+        double[] magnitudes = Sequences.arrayBuilder(mMin, mMax, Δm())
+            .scale(4)
+            .build();
+        Builder builder = new Builder(this, magnitudes);
+        TaperFunction grTaper = new TaperFunction(Δm(), b(), mc);
+        builder.transform(p -> p.set(taperedRate(p, grTaper)));
+
+        /*
+         * The tapering function actually does not recover the un-tapered rates;
+         * it's close, but not exact. The difference, however, is uniform across
+         * all m-bins. In order to get the same result by creating a tapered GR
+         * directly with a given a-value and by creating a generic taperd MFD
+         * that we then scaleToIncrementalRate() using the given a-value, we
+         * correct the tapered MFD by the offset.
+         */
+        double grRate = Mfds.gutenbergRichterRate(a(), b(), builder.mfd.x(0));
+        double tgrRate = builder.mfd.y(0);
+        double aCorrection = grRate / tgrRate;
+        builder.scale(aCorrection);
+
+        return builder;
+      }
+
+      private double taperedRate(XyPoint p, TaperFunction fn) {
+        return Mfds.gutenbergRichterRate(a(), b(), p.x()) * fn.scale(p.x());
+      }
+
+      @Override
+      public String toString() {
+        return Mfds.propsToString(
+            type(),
+            super.propsString() + ", mc=" + mc);
       }
     }
-  }
 
+    private static final double M_MIN_MOMENT = Earthquakes.magToMoment(4.0);
+    private static final double M_MAX_MOMENT = Earthquakes.magToMoment(9.05);
+
+    /* Gutenberg–Richter tapering scale function */
+    private static class TaperFunction {
+
+      final double ΔmBy2;
+      final double β;
+      final double Mcm;
+
+      TaperFunction(double Δm, double b, double mc) {
+        ΔmBy2 = Δm / 2.0;
+        β = b / 1.5;
+        Mcm = Earthquakes.magToMoment(mc);
+      }
+
+      /* Resolve bin edges and return scale factor */
+      double scale(double m) {
+        double Mlo = Earthquakes.magToMoment(m - ΔmBy2);
+        double Mhi = Earthquakes.magToMoment(m + ΔmBy2);
+        double binRateTapered = paretoΔ(M_MIN_MOMENT, Mlo, Mhi, β, Mcm);
+        double binRateUntapered = paretoΔ(M_MIN_MOMENT, Mlo, Mhi, β, M_MAX_MOMENT);
+        return binRateTapered / binRateUntapered;
+      }
+    }
+
+    /*
+     * Compute the number of events in a tapered GR magnitude bin. All 'M'
+     * values are scalar seismic moment.
+     */
+    private static double paretoΔ(
+        double Mt,
+        double Mlo,
+        double Mhi,
+        double β,
+        double Mcm) {
+
+      return Mfds.paretoRate(Mt, Mlo, β, Mcm) - Mfds.paretoRate(Mt, Mhi, β, Mcm);
+    }
+  }
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java
index e3a2101979c892f0bdbb730e90f4b2b7908f05b0..74efdd3df2690571da42a18b3e8aefabab0640cd 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/mfd/Mfds.java
@@ -1,20 +1,17 @@
 package gov.usgs.earthquake.nshmp.mfd;
 
-import static com.google.common.base.Preconditions.checkArgument;
 import static gov.usgs.earthquake.nshmp.data.DoubleData.checkInRange;
-import static java.lang.Math.exp;
-import static java.lang.Math.log;
 import static java.util.stream.Collectors.toList;
 
 import java.util.Collection;
+import java.util.stream.DoubleStream;
 
-import com.google.common.base.Converter;
 import com.google.common.collect.Range;
 
 import gov.usgs.earthquake.nshmp.Earthquakes;
-import gov.usgs.earthquake.nshmp.Maths;
-import gov.usgs.earthquake.nshmp.data.MutableXySequence;
+import gov.usgs.earthquake.nshmp.data.Indexing;
 import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Type;
 
 /**
  * Utility methods for working with magnitude frequency distributions (MFDs).
@@ -27,9 +24,53 @@ public final class Mfds {
 
   private Mfds() {}
 
+  /**
+   * Ensure {@code rate ≥ 0.0}.
+   *
+   * @param rate to validate
+   * @return the validated rate
+   * @throws IllegalArgumentException if {@code rate} value is less than 0.0
+   */
+  public static double checkRate(double rate) {
+    return checkInRange(Range.atLeast(0.0), "Rate", rate);
+  }
+
+  static void checkValues(DoubleStream magnitudes, DoubleStream rates) {
+    magnitudes.forEach(Earthquakes::checkMagnitude);
+    rates.forEach(Mfds::checkRate);
+  }
+
+  /* Check Gaussian sigma. */
+  static double checkGaussianSigma(double σ) {
+    return checkInRange(Range.openClosed(0.0, 0.5), "Gaussian σ", σ);
+  }
+
+  /* Check Gaussian sigma count. */
+  static int checkGaussianSigmaCount(int nσ) {
+    return Indexing.checkInRange(Range.closed(0, 5), "Gaussian nσ", nσ);
+  }
+
+  /* Check Gaussian magnitude count. */
+  static int checkGaussianMagnitudeCount(int nm) {
+    return Indexing.checkInRange(Range.closed(1, 21), "Gaussian nm", nm);
+  }
+
+  /* Check Gaussian sigma. */
+  static double checkGrBvalue(double b) {
+    return checkInRange(Range.closed(0.0, 3.0), "Gutenberg-Richter b-value", b);
+  }
+
+  static String propsToString(Type type, String propsString) {
+    return new StringBuilder(type.toString())
+        .append(" {")
+        .append(propsString)
+        .append("}")
+        .toString();
+  }
+
   /**
    * Combine the supplied MFDs into a single MFD, summing the rates of duplicate
-   * magnitudes.
+   * magnitudes. The resulting combined MFD will be of type: {@link Type#INCR}.
    *
    * @param mfds sequences to combine
    * @throws IllegalArgumentException if {@code mfds} is empty
@@ -41,20 +82,6 @@ public final class Mfds {
     return Mfd.create(combined);
   }
 
-  /**
-   * Computes the cumulative rate of an MFD defined by the magnitudes (x-vlaues)
-   * and incremental rates (y-values) in a XySquence.
-   *
-   * @param xy sequence for which to compute cumulative rate
-   * @return
-   */
-  public static double cumulativeRate(XySequence xy) {
-    return xy.yValues().reduce(0.0, Double::sum);
-    // TODO is this method even necessary, really?
-    // the above can also be xy.yValues().reduce(Double::sum);
-    // or even shorter, but maybe ith more overhead? xy.yValues().sum()
-  }
-
   /**
    * Returns the <a href="https://en.wikipedia.org/wiki/Gutenberg–Richter_law"
    * target="_top">Gutenberg–Richter</a> event rate for the supplied a- and
@@ -65,8 +92,8 @@ public final class Mfds {
    * {@code log10(A)} instead.
    *
    * <p><b>Reference:</b> Gutenberg, B., Richter, C. F., 1956, Magnitude and
-   * Energy of Earthquakes, Annali di Geofisica, v. 9, p. 1–15, doi:<a
-   * href="https://doi.org/10.4401/ag-5590" target=_top">10.4401/ag-5590<a>
+   * Energy of Earthquakes, Annali di Geofisica, v. 9, p. 1–15, doi:&lt;a
+   * href="https://doi.org/10.4401/ag-5590" target=_top"&gt;10.4401/ag-5590<a>
    *
    * @param a value (log10 rate of M=0 events)
    * @param b value
@@ -78,68 +105,18 @@ public final class Mfds {
   }
 
   /**
-   * Computes the Gutenberg-Richter incremental rate at the supplied magnitude.
-   * Convenience method for {@code N(M) = a * 10<sup>-bm</sup>}.
-   *
-   * TODO is this confusing? the NSHMP stores a-values in different ways [a A]
-   * where a = log10(A); should users just supply grRate() with
-   *
-   * @param a value (incremental and defined wrt {@code dMag} for M0)
-   * @param b value
-   * @param mMin minimum magnitude of distribution
-   * @return the rate at the supplied magnitude
-   */
-  @Deprecated
-  public static double incrRate(double a, double b, double mMin) {
-    return a * Math.pow(10, -b * mMin);
-  }
-
-  // public static void main(String[] args) {
-  // System.out.println(incrRate(0.01, -1, 5.05));
-  // System.out.println(gutenbergRichterRate(Math.log10(0.01), -1, 5.05));
-  // }
-
-  /**
-   * Computes total moment rate as done by NSHMP code from supplied magnitude
-   * info and the Gutenberg-Richter a- and b-values. <b>Note:</b> the a- and
-   * b-values assume an incremental distribution.
+   * Returns the total moment rate of an incremental MFD.
    *
-   * @param mMin minimum magnitude (after adding {@code dMag/2})
-   * @param nMag number of magnitudes
-   * @param dMag magnitude bin width
-   * @param a value (incremental and defined wrt {@code dMag} for M0)
-   * @param b value
-   * @return the total moment rate
+   * @param mfd for which to compute total moment rate
    */
-  @Deprecated
-  public static double totalMoRate(
-      double mMin,
-      int nMag,
-      double dMag,
-      double a,
-      double b) {
-
-    double moRate = 1e-10; // start with small, non-zero rate
-    double M;
-    for (int i = 0; i < nMag; i++) {
-      M = mMin + i * dMag;
-      moRate += gutenbergRichterRate(a, b, M) * Earthquakes.magToMoment(M);
-    }
-    return moRate;
+  public static double momentRate(Mfd mfd) {
+    return momentRate(mfd.data());
   }
 
-  /**
-   * Returns the total moment rate of an MFD defined by the magnitudes
-   * (x-vlaues) and incremental rates (y-values) in the supplied XySquence.
-   *
-   * @param xy sequence for which to compute total moment rate
-   * @return
-   */
-  public static double momentRate(XySequence xy) {
+  static double momentRate(XySequence xy) {
     return xy.stream()
         .mapToDouble(p -> p.y() * Earthquakes.magToMoment(p.x()))
-        .reduce(Double::sum)
-        .getAsDouble();
+        .sum();
   }
 
   /**
@@ -155,7 +132,7 @@ public final class Mfds {
    *
    * @param Mt the scalar seismic moment of catalog completeness threshold
    * @param M the scalar seismic moment of the target magnitude
-   * @param β the index parameter of the distribution; {@code β = ⅔b}
+   * @param beta (β) the index parameter of the distribution; {@code β = ⅔b}
    *        (Gutenberg–Richter {@code b}-value)
    * @param Mcm the scalar seismic moment of the upper bound corner magnitude
    */
@@ -168,114 +145,4 @@ public final class Mfds {
     return Math.pow(Mt / M, β) * Math.exp((Mt - M) / Mcm);
   }
 
-  /**
-   * Determines the number of magnitude bins for the supplied arguments. If dMag
-   * does not divide evenly into {@code mMax - mMin}, and the result of this
-   * method is used to build a Gutenberg-Richter MFD, the maximum magnitude of
-   * the MFD may not equal the {@code mMax} supplied here.
-   *
-   * @param mMin minimum magnitude to consider
-   * @param mMax maximum magnitude to consider
-   * @param dMag magnitude delta
-   * @return the number of magnitude bins
-   */
-  @Deprecated
-  public static int magCount(double mMin, double mMax, double dMag) {
-    checkArgument(dMag > 0.0);
-    // TODO marked as deprecated to revisit
-    return (int) ((mMax - mMin) / dMag + 1.4);
-  }
-
-  static int size(double min, double max, double Δ) {
-    return ((int) Maths.round(((max - min) / Δ), 6)) + 1;
-  }
-
-  public static void main(String[] args) {
-    System.out.println(magCount(6.57, 6.51, 0.14));
-    System.out.println(magCount(6.55, 6.9501, 0.1));
-
-    System.out.println(size(6.57, 6.51, 0.14));
-    System.out.println(size(6.55, 6.9501, 0.1));
-
-  }
-
-  // TODO docs; consider moving to cumulate method in data/sequence package;
-  // not necessarily MFD specific
-  public static XySequence toCumulative(XySequence incremental) {
-    MutableXySequence cumulative = MutableXySequence.copyOf(incremental);
-    double sum = 0.0;
-    for (int i = incremental.size() - 1; i >= 0; i--) {
-      sum += incremental.y(i);
-      cumulative.set(i, sum);
-    }
-    return XySequence.copyOf(cumulative);
-  }
-
-  /**
-   * Given an observed annual rate of occurrence of some event (in num/yr),
-   * method returns the Poisson probability of occurence over the specified time
-   * period.
-   * @param rate (annual) of occurence of some event
-   * @param timespan of interest
-   * @return the Poisson probability of occurrence in the specified {@code time}
-   */
-  public static double rateToProb(double rate, double timespan) {
-    return 1 - exp(-rate * timespan);
-  }
-
-  /**
-   * Given the Poisson probability of the occurence of some event over a
-   * specified time period, method returns the annual rate of occurrence of that
-   * event.
-   * @param P the Poisson probability of an event's occurrence
-   * @param timespan of interest
-   * @return the annnual rate of occurrence of the event
-   */
-  public static double probToRate(double P, double timespan) {
-    return -log(1 - P) / timespan;
-  }
-
-  /**
-   * Return a converter between annual rate and Poisson probability over a
-   * 1-year time span. The converter expects probabilities to be represented as
-   * fractions of 1.0.
-   */
-  public static Converter<Double, Double> annualRateToProbabilityConverter() {
-    return new AnnRateToPoissProbConverter(1.0);
-  }
-
-  /**
-   * Return a converter between annual rate and Poisson probability over the
-   * specified time span. The converter expects probabilities to be represented
-   * as fractions of 1.0.
-   */
-  public static Converter<Double, Double> annualRateToProbabilityConverter(double timespan) {
-    return new AnnRateToPoissProbConverter(timespan);
-  }
-
-  /**
-   * Supported timespans for Poisson probabilities: {@code [1..10000] years}.
-   */
-  public static final Range<Double> TIMESPAN_RANGE = Range.closed(1.0, 10000.0);
-
-  private static final class AnnRateToPoissProbConverter extends Converter<Double, Double> {
-
-    private final double timespan;
-
-    AnnRateToPoissProbConverter(double timespan) {
-      checkInRange(TIMESPAN_RANGE, "Timespan", timespan);
-      this.timespan = timespan;
-    }
-
-    @Override
-    protected Double doForward(Double rate) {
-      return rateToProb(rate, timespan);
-    }
-
-    @Override
-    protected Double doBackward(Double prob) {
-      return probToRate(prob, timespan);
-    }
-  }
-
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/mfd/package-info.java b/src/main/java/gov/usgs/earthquake/nshmp/mfd/package-info.java
index f8aba763636def7cd8c2706ee4cd4d1f67ce36c5..dc64f801befcbf200ebe7eb85a835aa360f08faf 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/mfd/package-info.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/mfd/package-info.java
@@ -1,4 +1,6 @@
 /**
  * Magnitude frequency distribution (MFD) classes and utilties.
+ *
+ * @author U.S. Geological Survey
  */
 package gov.usgs.earthquake.nshmp.mfd;
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java
index 3e3b80e43da8a66c9f3c0212d47afeba04d25399..702d247d79e493a89e3d272c181586726e6a5eee 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ClusterRuptureSet.java
@@ -40,8 +40,6 @@ import gov.usgs.earthquake.nshmp.geo.Locations;
  */
 public class ClusterRuptureSet implements RuptureSet {
 
-  // for documentation: a cluster model can only be applied to
-  // logic trees of SINGLE MFDs with parallel (?) weights
   final String name;
   final int id;
 
@@ -88,14 +86,6 @@ public class ClusterRuptureSet implements RuptureSet {
     return Locations.closestPoint(site, locs.build());
   }
 
-  // /**
-  // * {@code (1 / return period)} of this source in years.
-  // * @return the cluster rate
-  // */
-  // public double rate() {
-  // return rate;
-  // }
-
   /**
    * The weight applicable to this {@code ClusterSource}.
    */
@@ -160,30 +150,6 @@ public class ClusterRuptureSet implements RuptureSet {
       return this;
     }
 
-    // @Deprecated
-    // Builder rate(double rate) {
-    // // TODO what sort of value checking should be done for rate (<1 ??)
-    // this.rate = rate;
-    // return this;
-    // }
-
-    // Builder rateTree(LogicTree<Double> rateTree) {
-    // // TODO what sort of value checking should be done for rate (<1 ??)
-    // this.rateTree = rateTree;
-    // return this;
-    // }
-
-    // Builder featureMap(Map<Integer, SourceFeature.NshmFault> featureMap) {
-    // this.featureMap = Map.copyOf(featureMap);
-    // return this;
-    // }
-
-    // @Deprecated
-    // Builder addRuptureSet(FaultRuptureSet.Builder ruptureSet) {
-    // faultRuptureSetBuilders.add(ruptureSet);
-    // return this;
-    // }
-
     Builder addRuptureSet(FaultRuptureSet ruptureSet) {
       if (faultRuptureSets.size() > 1) {
         MfdTrees.checkTreeIdsAndWeights(
@@ -194,35 +160,11 @@ public class ClusterRuptureSet implements RuptureSet {
       return this;
     }
 
-    // Builder faults(FaultSourceSet faults) {
-    // checkState(checkNotNull(faults, "Fault source set is null").size() > 0,
-    // "Fault source set is empty");
-    // this.faults = faults;
-    // return this;
-    // }
-
     void validateState(String label) {
       checkState(!built, "Single use builder");
       checkNotNull(name, "%s name", label);
       checkNotNull(id, "%s id", label);
       checkNotNull(data, "%s model data", label);
-
-      // checkState(rate != null, "%s rate not set", source);
-      // checkState(faults != null, "%s has no fault sources", source);
-
-      // faultRuptureSets = faultRuptureSetBuilders.stream()
-      // .map(frsb -> frsb.config(config))
-      // .map(FaultSource.Builder::build)
-      // .collect(Collectors.toList());
-      // System.out.println(data.faultFeatureMap().isPresent());
-      // checkNotNull(featureMap, "%s feature map", label);
-
-      // faultRuptureSets = new ArrayList<>();
-      // for (FaultRuptureSet.Builder frsb : faultRuptureSetBuilders) {
-      // frsb.modelData(data);
-      // faultRuptureSets.add(frsb.build());
-      // }
-
       checkState(faultRuptureSets.size() > 0);
       built = true;
     }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java
index 4fe96e938ddfe9d20c994a6bdcfac87244e645e8..9f3243c56b468bad872723f834abc4b9449b25bf 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/Deserialize.java
@@ -36,9 +36,9 @@ import gov.usgs.earthquake.nshmp.geo.json.Properties;
 import gov.usgs.earthquake.nshmp.gmm.Gmm;
 import gov.usgs.earthquake.nshmp.gmm.UncertaintyModel;
 import gov.usgs.earthquake.nshmp.mfd.Mfd;
-import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.GrTaper;
 import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.GutenbergRichter;
 import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.Single;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.TaperedGr;
 import gov.usgs.earthquake.nshmp.model.MfdConfig.AleatoryProperties;
 import gov.usgs.earthquake.nshmp.tree.LogicGroup;
 import gov.usgs.earthquake.nshmp.tree.LogicTree;
@@ -415,7 +415,7 @@ class Deserialize {
         properties = GSON.fromJson(validateGr(obj), GutenbergRichter.class);
         break;
       case GR_TAPER:
-        properties = GSON.fromJson(validateGrTaper(obj), GrTaper.class);
+        properties = GSON.fromJson(validateGrTaper(obj), TaperedGr.class);
         break;
       case INCR:
         // TODO custom props object that we'll replace once MFDs built
@@ -432,7 +432,6 @@ class Deserialize {
     double[] magnitudes;
 
     Incremental(double[] magnitudes, double[] rates) {
-      super(Mfd.Type.INCR);
       this.magnitudes = magnitudes;
     }
 
@@ -443,10 +442,12 @@ class Deserialize {
   }
 
   private static JsonElement validateSingle(JsonObject o) {
+    checkValue(o, "type");
     return checkValue(o, "m");
   }
 
   private static JsonElement validateGr(JsonObject o) {
+    checkValue(o, "type");
     checkValue(o, "b");
     checkValue(o, "Δm");
     checkValue(o, "mMin");
@@ -455,10 +456,11 @@ class Deserialize {
 
   private static JsonElement validateGrTaper(JsonObject o) {
     validateGr(o);
-    return checkValue(o, "cMag");
+    return checkValue(o, "mc");
   }
 
   private static JsonElement validateIncr(JsonObject o) {
+    checkValue(o, "type");
     return checkValue(o, "magnitudes");
   }
 
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java
index 39ac429a3fe9fcbc1a11f5a067dfcacfe0e1b58b..28fa26e0f32cf64650b45465c7a9172cdeec5dda 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultRuptureSet.java
@@ -205,7 +205,7 @@ public class FaultRuptureSet implements RuptureSet {
       // TODO test length property rounding
 
       /* Filter of unlikely combination of SINGLEs epistemic uncertainty. */
-      checkEpistemic(data.mfdConfig().get(), mfdPropsTree);
+      checkEpistemic(data.mfdConfig().orElseThrow(), mfdPropsTree);
 
       /*
        * Fault MFDs are constructed in a variety of ways. Single fault MFDs may
@@ -328,7 +328,7 @@ public class FaultRuptureSet implements RuptureSet {
 
     /* Create mfd-tree from a logic tree of SINGLE MFDs and a rate-tree. */
     private LogicTree<Mfd.Properties> updateMfdsForRecurrence() {
-      checkState(MfdTrees.mfdsAreType(mfdPropsTree, Mfd.Type.SINGLE));
+      MfdTrees.checkType(mfdPropsTree, Mfd.Type.SINGLE);
       LogicTree.Builder<Mfd.Properties> propsTree = LogicTree.builder(mfdPropsTree.name());
       LogicTree<Double> rateTree = data.rateTree().orElseThrow();
       for (Branch<Double> rBranch : rateTree) {
@@ -347,7 +347,7 @@ public class FaultRuptureSet implements RuptureSet {
           Single props = mBranch.value().getAsSingle();
           String id = String.join(":", props.type().name(), mBranch.id(), rBranch.id());
           double weight = mBranch.weight() * rBranch.weight();
-          propsTree.addBranch(id, new Single(props.m(), rate), weight);
+          propsTree.addBranch(id, new Single(props.magnitude(), rate), weight);
         }
       }
       return propsTree.build();
@@ -418,7 +418,7 @@ public class FaultRuptureSet implements RuptureSet {
       double Δm = Maths.round(Rm / nm, 6);
 
       // System.out.println(Rm + " " + grProps.mMin + " " + mMax + " " + Δm);
-      return new Mfd.Properties.GutenbergRichter(grProps.b(), Δm, grProps.mMin(), mMax);
+      return new Mfd.Properties.GutenbergRichter(1.0, grProps.b(), Δm, grProps.mMin(), mMax);
     }
 
     /*
@@ -499,7 +499,7 @@ public class FaultRuptureSet implements RuptureSet {
       System.out.println("          " + rateMap);
     }
 
-    for (Branch<String> branch : faultRateTree.branches()) {
+    for (Branch<String> branch : faultRateTree) {
 
       /*
        * TODO Verify missing fault rates and imbalanced logic trees. In some
@@ -556,7 +556,7 @@ public class FaultRuptureSet implements RuptureSet {
     }
 
     // TODO does this ever throw?
-    int nMag = Mfds.magCount(gr.mMin(), gr.mMax(), gr.Δm());
+    int nMag = magCount(gr.mMin(), gr.mMax(), gr.Δm());
     checkState(nMag > 0, "GR MFD with no magnitudes [%s]", mfdBranch.id());
 
     double moRate = moBranch.isPresent()
@@ -583,7 +583,7 @@ public class FaultRuptureSet implements RuptureSet {
 
     if (epistemic) {
 
-      for (Branch<Double> epiBranch : mfdConfig.epistemicTree.get()) {
+      for (Branch<Double> epiBranch : mfdConfig.epistemicTree.orElseThrow()) {
 
         /*
          * Given changes to how GR MFD sequences are built (centered and bounded
@@ -595,7 +595,7 @@ public class FaultRuptureSet implements RuptureSet {
         double weightEpi = mfdWt * epiBranch.weight();
 
         // TODO check if ever thrown
-        int nMagEpi = Mfds.magCount(gr.mMin(), mMaxEpi, gr.Δm());
+        int nMagEpi = magCount(gr.mMin(), mMaxEpi, gr.Δm());
         checkState(nMagEpi > 0, "GR MFD epistemic branch with no magnitudes");
         boolean epiOk = checkEpi(gr.mMin(), gr.mMax(), gr.Δm(), epiBranch.value());
         if (!epiOk) {
@@ -621,6 +621,24 @@ public class FaultRuptureSet implements RuptureSet {
     }
   }
 
+  /**
+   * Determines the number of magnitude bins for the supplied arguments. If dMag
+   * does not divide evenly into {@code mMax - mMin}, and the result of this
+   * method is used to build a Gutenberg-Richter MFD, the maximum magnitude of
+   * the MFD may not equal the {@code mMax} supplied here.
+   *
+   * @param mMin minimum magnitude to consider
+   * @param mMax maximum magnitude to consider
+   * @param dMag magnitude delta
+   * @return the number of magnitude bins
+   */
+  @Deprecated
+  public static int magCount(double mMin, double mMax, double dMag) {
+    checkArgument(dMag > 0.0);
+    // TODO marked as deprecated to revisit CLEAN ME
+    return (int) ((mMax - mMin) / dMag + 1.4);
+  }
+
   private static boolean checkEpi(double mMin, double mMax, double Δm, double Δepi) {
     return (mMax - Δm / 2 + Δepi) > (mMin + Δm / 2);
   }
@@ -644,10 +662,10 @@ public class FaultRuptureSet implements RuptureSet {
 
     double moRate = moBranch.isPresent()
         ? moBranch.orElseThrow().value()
-        : single.rate() * Earthquakes.magToMoment(single.m());
+        : single.rate() * Earthquakes.magToMoment(single.magnitude());
 
     /* Optional epistemic uncertainty. */
-    boolean epistemic = hasEpistemic(mfdConfig, single.m());
+    boolean epistemic = hasEpistemic(mfdConfig, single.magnitude());
 
     /* Optional aleatory variability. */
     boolean aleatory = mfdConfig.aleatoryProperties.isPresent();
@@ -655,16 +673,16 @@ public class FaultRuptureSet implements RuptureSet {
     /* mMax epistemic uncertainty branches */
     if (epistemic) {
 
-      for (Branch<Double> epiBranch : mfdConfig.epistemicTree.get()) {
+      for (Branch<Double> epiBranch : mfdConfig.epistemicTree.orElseThrow()) {
 
-        double mEpi = single.m() + epiBranch.value();
+        double mEpi = single.magnitude() + epiBranch.value();
         double weightEpi = mfdWt * epiBranch.weight();
         String id = mfdBranchId(mfdId, epiBranch.id());
 
         /* Possibly apply aleatory variability */
         if (aleatory) {
 
-          MfdConfig.AleatoryProperties aleaProps = mfdConfig.aleatoryProperties.get();
+          MfdConfig.AleatoryProperties aleaProps = mfdConfig.aleatoryProperties.orElseThrow();
           Mfd mfd = newGaussianBuilder(mEpi, aleaProps)
               .scaleToMomentRate(moRate)
               .build();
@@ -684,15 +702,15 @@ public class FaultRuptureSet implements RuptureSet {
       /* Possibly apply aleatory variability */
       if (aleatory) {
 
-        MfdConfig.AleatoryProperties aleaProps = mfdConfig.aleatoryProperties.get();
-        Mfd mfd = newGaussianBuilder(single.m(), aleaProps)
+        MfdConfig.AleatoryProperties aleaProps = mfdConfig.aleatoryProperties.orElseThrow();
+        Mfd mfd = newGaussianBuilder(single.magnitude(), aleaProps)
             .scaleToMomentRate(moRate)
             .build();
         mfdTree.addBranch(mfdId, mfd, mfdWt);
 
       } else {
 
-        Mfd mfd = Mfd.newSingleBuilder(single.m())
+        Mfd mfd = Mfd.newSingleBuilder(single.magnitude())
             .scaleToMomentRate(moRate)
             .build();
         mfdTree.addBranch(mfdId, mfd, mfdWt);
@@ -723,7 +741,7 @@ public class FaultRuptureSet implements RuptureSet {
    */
   static void checkEpistemic(MfdConfig mfdConfig, LogicTree<Mfd.Properties> mfdTree) {
     boolean multipleSingleBranches = (mfdTree.size() > 1) &&
-        MfdTrees.mfdsAreType(mfdTree, Mfd.Type.SINGLE);
+        MfdTrees.checkTreeTypes(mfdTree, Mfd.Type.SINGLE);
     boolean hasEpistemic = mfdConfig.epistemicTree.isPresent();
     checkState(
         !(multipleSingleBranches && hasEpistemic),
@@ -751,7 +769,7 @@ public class FaultRuptureSet implements RuptureSet {
 
   /* GR MFD builder with mMax override. */
   static Mfd.Builder newGrBuilder(Mfd.Properties.GutenbergRichter gr, double mMax) {
-    return Mfd.newGutenbergRichterBuilder(gr.mMin(), mMax, gr.Δm(), gr.b());
+    return Mfd.newGutenbergRichterBuilder(gr.b(), gr.Δm(), gr.mMin(), mMax);
   }
 
   /* Expected moment rate of a GR MFD. */
@@ -760,7 +778,6 @@ public class FaultRuptureSet implements RuptureSet {
     double incrRate = Mfds.gutenbergRichterRate(gr.a(), gr.b(), mMin);
     return Mfds.momentRate(gr.toBuilder()
         .scaleToIncrementalRate(incrRate)
-        .build()
-        .data());
+        .build());
   }
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java
index aee2ab8440c066bd43233eae66374fe2fd127fc9..c1047556f4e2ca6ed47b13b035159c4b0b4090c2 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/FaultSource.java
@@ -136,7 +136,7 @@ public class FaultSource implements Source {
 
   @Override
   public List<Mfd> mfds() {
-    return mfdTree.branches().stream()
+    return mfdTree.stream()
         .map(Branch::value)
         .collect(toUnmodifiableList());
   }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GmmSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GmmSet.java
index 8cb60e98f6483e260cc1b359c80f47f33141a1c4..d0128579ba98ef0208638c9ec5b67ea4dcb1562e 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/GmmSet.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GmmSet.java
@@ -8,14 +8,11 @@ import static gov.usgs.earthquake.nshmp.data.DoubleData.checkWeights;
 
 import java.util.Map;
 import java.util.Objects;
-import java.util.Optional;
 import java.util.Set;
-import java.util.stream.StreamSupport;
 
 import com.google.common.collect.Maps;
 import com.google.common.collect.Range;
 
-import gov.usgs.earthquake.nshmp.data.DoubleData;
 import gov.usgs.earthquake.nshmp.gmm.Gmm;
 import gov.usgs.earthquake.nshmp.gmm.GroundMotionModel;
 import gov.usgs.earthquake.nshmp.gmm.UncertaintyModel;
@@ -51,47 +48,44 @@ public final class GmmSet {
   private final int hashCode;
 
   private final UncertType uncertainty;
-  private final double epiValue;
-  private final double[][] epiValues;
-  private final double[] epiWeights; // TODO DataArray (vs. Table, Volume)
-
-  private final Optional<UncertaintyModel> epiModel;
-
-  GmmSet(
-      Map<Gmm, Double> weightMapLo,
-      double maxDistLo,
-      Map<Gmm, Double> weightMapHi,
-      double maxDistHi,
-      double[] epiValues,
-      double[] epiWeights,
-      Optional<UncertaintyModel> epiModel) {
-
-    this.weightMapLo = weightMapLo;
-    this.maxDistLo = maxDistLo;
-    this.weightMapHi = weightMapHi;
-    this.maxDistHi = (weightMapHi != null) ? maxDistHi : maxDistLo;
-    this.singular = weightMapHi == null;
+  private final double[] epiWeights;
+
+  private final UncertaintyModel epiModel;
+
+  GmmSet(Builder builder) {
+
+    this.weightMapLo = builder.gmmWtMapLo;
+    this.maxDistLo = builder.maxDistanceLo;
+    this.weightMapHi = builder.gmmWtMapHi;
+    this.maxDistHi = (this.weightMapHi != null)
+        ? builder.maxDistanceHi
+        : maxDistLo;
+    this.singular = this.weightMapHi == null;
 
     this.hashCode = Objects.hash(
         this.weightMapLo, this.maxDistLo,
         this.weightMapHi, this.maxDistHi);
 
-    uncertainty = (epiValues == null) ? UncertType.NONE : (epiValues.length == 1)
-        ? UncertType.SINGLE : UncertType.MULTI;
-
-    this.epiWeights = epiWeights;
-    if (uncertainty == UncertType.NONE) {
-      this.epiValue = Double.NaN;
-      this.epiValues = null;
-    } else if (uncertainty == UncertType.SINGLE) {
-      this.epiValue = epiValues[0];
-      this.epiValues = null;
-    } else {
-      this.epiValue = Double.NaN;
-      this.epiValues = initEpiValues(epiValues);
-    }
+    this.uncertainty = (builder.epiModel == null)
+        ? UncertType.NONE
+        : UncertType.MULTI;
+
+    this.epiWeights = builder.uncWeights;
+
+    // if (uncertainty == UncertType.NONE) {
+    // this.epiValue = Double.NaN;
+    // this.epiValues = null;
+    // } else {
+    // this.epiValue = Double.NaN;
+    // this.epiValues = initEpiValues(builder.uncValues);
+    // }
 
-    this.epiModel = epiModel;
+    this.epiModel = builder.epiModel;
+
+    // System.out.println(epiModel);
+    // System.out.println(epiValues);
+    // System.out.println(epiWeights);
+    // System.out.println(uncertainty);
   }
 
   /**
@@ -142,13 +136,13 @@ public final class GmmSet {
         this.maxDistHi == that.maxDistHi;
   }
 
-  private static double[][] initEpiValues(double[] v) {
-    return new double[][] {
-        { v[0], v[1], v[2] },
-        { v[3], v[4], v[5] },
-        { v[6], v[7], v[8] }
-    };
-  }
+  // private static double[][] initEpiValues(double[] v) {
+  // return new double[][] {
+  // { v[0], v[1], v[2] },
+  // { v[3], v[4], v[5] },
+  // { v[6], v[7], v[8] }
+  // };
+  // }
 
   public boolean epiUncertainty() {
     return uncertainty != UncertType.NONE;
@@ -163,7 +157,7 @@ public final class GmmSet {
   public double epiValue(double m, double r) {
     switch (uncertainty) {
       case MULTI:
-        return epiModel.get().value(m, r);
+        return epiModel.value(m, r);
       default:
         return 0.0;
     }
@@ -189,9 +183,9 @@ public final class GmmSet {
     return epiWeights;
   }
 
+  // TODO put NONE in UncertaintyModel
   private static enum UncertType {
     NONE,
-    SINGLE,
     MULTI;
   }
 
@@ -209,63 +203,69 @@ public final class GmmSet {
     private double maxDistanceHi;
 
     // optional
-    private double[] uncValues;
+    // private double[] uncValues;
+    private UncertaintyModel epiModel;
     private double[] uncWeights;
 
-    private Optional<UncertaintyModel> epiModel;
-
     // leave maxDistanceHi as primitive unless validation required
     // at some later date; GmmSet throws NPE if Double useds
 
     // refactor uncertainty only set with primary gmms
 
     Builder primaryModels(LogicTree<Gmm> tree, GmmConfig config) {
-      // checkArgument(checkNotNull(tree, "Map is null").size() > 0, "Map is
-      // empty");
+      checkNotNull(tree);
+      checkNotNull(config);
 
       // setBuilder.uncertainty(uncValues, uncWeights); TODO from gmm-config
-      gmmWtMapLo = StreamSupport.stream(tree.spliterator(), false)
+      gmmWtMapLo = tree.stream()
           .collect(Maps.toImmutableEnumMap(
               Branch::value,
               Branch::weight));
 
-      primaryMaxDistance(config.maxDistance);
-
-      epiModel = config.epistemicModel;
+      maxDistanceLo = config.maxDistance;
 
+      // TODO use tree instead of extracting weights
+      if (config.epistemicModel.isPresent()) {
+        epiModel = config.epistemicModel.orElseThrow();
+        uncWeights = config.epistemicTree.orElseThrow().stream()
+            .mapToDouble(Branch::weight)
+            .toArray();
+      }
       return this;
     }
 
-    Builder primaryModels(Map<Gmm, Double> gmmWtMap, GmmConfig config) {
-      checkArgument(checkNotNull(gmmWtMap, "Map is null").size() > 0, "Map is empty");
-      checkWeights(gmmWtMap.values());
-      gmmWtMapLo = Maps.immutableEnumMap(gmmWtMap);
-
-      primaryMaxDistance(config.maxDistance);
-
-      // TODO just pass config to GmmSet
-      // if (config.epistemicTree.isPresent()) {
-      // double[] values = new double[3];
-      // double[] weights = new double[3];
-      // List<Branch<Double>> epiBranches =
-      // config.epistemicTree.get().branches();
-      // checkState(epiBranches.size() == 3);
-      // for (int i = 0; i < 3; i++) {
-      // Branch<Double> branch = epiBranches.get(0);
-      // values[i] = branch.value();
-      // weights[i] = branch.weight();
-      // }
-      // uncertainty(values, weights);
-      // }
-
-      return this;
-    }
+    // Builder primaryModels(Map<Gmm, Double> gmmWtMap, GmmConfig config) {
+    // checkArgument(checkNotNull(gmmWtMap, "Map is null").size() > 0, "Map is
+    // empty");
+    // checkWeights(gmmWtMap.values());
+    // gmmWtMapLo = Maps.immutableEnumMap(gmmWtMap);
+    //
+    // primaryMaxDistance(config.maxDistance);
+    //
+    // // TODO just pass config to GmmSet
+    // // if (config.epistemicTree.isPresent()) {
+    // // double[] values = new double[3];
+    // // double[] weights = new double[3];
+    // // List<Branch<Double>> epiBranches =
+    // // config.epistemicTree.get().branches();
+    // // checkState(epiBranches.size() == 3);
+    // // for (int i = 0; i < 3; i++) {
+    // // Branch<Double> branch = epiBranches.get(0);
+    // // values[i] = branch.value();
+    // // weights[i] = branch.weight();
+    // // }
+    // // uncertainty(values, weights);
+    // // }
+    //
+    // return this;
+    // }
 
-    @Deprecated
-    Builder primaryMaxDistance(double maxDistance) {
-      maxDistanceLo = checkInRange(MAX_DIST_RANGE, "Max distance", maxDistance);
-      return this;
-    }
+    // @Deprecated
+    // Builder primaryMaxDistance(double maxDistance) {
+    // maxDistanceLo = checkInRange(MAX_DIST_RANGE, "Max distance",
+    // maxDistance);
+    // return this;
+    // }
 
     Builder secondaryModels(Map<Gmm, Double> gmmWtMap, GmmConfig config) {
       checkArgument(checkNotNull(gmmWtMap, "Map is null").size() > 0, "Map is empty");
@@ -283,17 +283,20 @@ public final class GmmSet {
       return this;
     }
 
-    Builder uncertainty(double[] values, double[] weights) {
-      checkNotNull(values, "Values is null");
-      checkArgument(values.length == 9 || values.length == 1,
-          "Values must contain 1 or 9 values");
-      checkArgument(checkNotNull(weights, "Weights are null").length == 3,
-          "Weights must contain 3 values");
-      checkArgument(DoubleData.sum(weights) == 1.0, "%s uncertainty weights must sum to 1");
-      uncValues = values;
-      uncWeights = weights;
-      return this;
-    }
+    // TODO need to checkArgument that values is 9 long
+
+    // Builder uncertainty(double[] values, double[] weights) {
+    // checkNotNull(values, "Values is null");
+    // checkArgument(values.length == 9 || values.length == 1,
+    // "Values must contain 1 or 9 values");
+    // checkArgument(checkNotNull(weights, "Weights are null").length == 3,
+    // "Weights must contain 3 values");
+    // checkArgument(DoubleData.sum(weights) == 1.0, "%s uncertainty weights
+    // must sum to 1");
+    // uncValues = values;
+    // uncWeights = weights;
+    // return this;
+    // }
 
     void validateState(String id) {
 
@@ -314,11 +317,11 @@ public final class GmmSet {
             maxDistanceLo);
       }
 
-      if (uncValues != null) {
+      if (epiModel != null) {
         checkNotNull(uncWeights, "%s uncertainty weights not set", id);
       }
       if (uncWeights != null) {
-        checkNotNull(uncValues, "%s uncertainty values not set", id);
+        checkNotNull(epiModel, "%s uncertainty values not set", id);
       }
 
       built = true;
@@ -328,11 +331,7 @@ public final class GmmSet {
 
       validateState(ID);
       try {
-        GmmSet gmmSet = new GmmSet(
-            gmmWtMapLo, maxDistanceLo,
-            gmmWtMapHi, maxDistanceHi,
-            uncValues, uncWeights,
-            epiModel);
+        GmmSet gmmSet = new GmmSet(this);
         return gmmSet;
       } catch (Exception e) {
         e.printStackTrace();
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GridDepthModel.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GridDepthModel.java
index e2e385a565b9bc07da73fe2b4161e79cf14c6c00..cda1427f69b82d2e5521088d3558213bf2f971b3 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/GridDepthModel.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GridDepthModel.java
@@ -2,6 +2,7 @@ package gov.usgs.earthquake.nshmp.model;
 
 import com.google.common.collect.Range;
 
+import gov.usgs.earthquake.nshmp.Text;
 import gov.usgs.earthquake.nshmp.tree.LogicTree;
 
 /*
@@ -20,4 +21,10 @@ class GridDepthModel {
     this.mRange = mRange;
     this.depthTree = depthTree;
   }
+
+  @Override
+  public String toString() {
+    return mRange + Text.NEWLINE + depthTree + Text.NEWLINE;
+  }
+
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java
index 9fdfd4448760a8a3b0fb6c5095ab9cb7e18fe365..0751493ae567f1ffa5484db8e48e20962e8c6884 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GridLoader.java
@@ -1,10 +1,7 @@
 package gov.usgs.earthquake.nshmp.model;
 
-import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkState;
-import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR_TYPES;
 import static gov.usgs.earthquake.nshmp.model.Deserialize.MFD_TREE;
-import static java.util.stream.Collectors.toSet;
 import static java.util.stream.Collectors.toUnmodifiableList;
 import static java.util.stream.Collectors.toUnmodifiableMap;
 
@@ -17,9 +14,9 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Optional;
 import java.util.OptionalDouble;
-import java.util.Set;
 import java.util.stream.Stream;
 
+import gov.usgs.earthquake.nshmp.Maths;
 import gov.usgs.earthquake.nshmp.data.DelimitedData;
 import gov.usgs.earthquake.nshmp.data.DelimitedData.Record;
 import gov.usgs.earthquake.nshmp.data.XyPoint;
@@ -28,6 +25,7 @@ import gov.usgs.earthquake.nshmp.geo.Location;
 import gov.usgs.earthquake.nshmp.geo.json.Feature;
 import gov.usgs.earthquake.nshmp.geo.json.Properties;
 import gov.usgs.earthquake.nshmp.mfd.Mfd;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.GutenbergRichter;
 import gov.usgs.earthquake.nshmp.mfd.Mfd.Type;
 import gov.usgs.earthquake.nshmp.mfd.Mfds;
 import gov.usgs.earthquake.nshmp.tree.Branch;
@@ -105,7 +103,7 @@ class GridLoader {
       return Optional.empty();
     }
 
-    Mfd.Type type = mfdTree.branches().get(0).value().type();
+    Mfd.Type type = mfdTree.get(0).value().type();
     return (type == Mfd.Type.INCR)
         ? Optional.of(feature)
         : Optional.empty();
@@ -156,8 +154,8 @@ class GridLoader {
        * TODO improve this (bad single-branch assumption): get only branch of
        * mfd logic tree
        */
-      checkState(mfdTree.branches().size() == 1);
-      Branch<Mfd.Properties> propsBranch = mfdTree.branches().get(0);
+      checkState(mfdTree.size() == 1);
+      Branch<Mfd.Properties> propsBranch = mfdTree.get(0);
       Mfd.Builder mfdBuilder = propsBranch.value().toBuilder();
 
       try (Stream<Record> s = ratefile.records()) {
@@ -296,6 +294,8 @@ class GridLoader {
           .collect(toUnmodifiableList());
     }
 
+    // private boolean temp = true;
+
     private void processRecord(Record record, boolean mMaxPresent) {
 
       Location loc = Location.create(
@@ -311,9 +311,19 @@ class GridLoader {
       FeatureData featureData = dataMap.get(gridId);
       featureData.locations.add(loc);
 
-      LogicTree<Mfd> modelMfd = modelMfdTrees.get(gridId);
-      LogicTree<Mfd> nodeMfd = createNodeMfdTree(modelMfd, a, mMax);
-      featureData.mfdTrees.add(nodeMfd);
+      LogicTree<Mfd> modelMfds = modelMfdTrees.get(gridId);
+      LogicTree<Mfd> nodeMfds = createNodeMfdTree(modelMfds, a, mMax);
+
+      // if (temp) {
+      // System.out.println(nodeMfds);
+      // System.out.println(a + " " + mMax);
+      // for (Branch<Mfd> branch : nodeMfds) {
+      // System.out.println(branch.value().properties().getAsGr().mMin());
+      // System.out.println(branch.value());
+      // }
+      // temp = false;
+      // }
+      featureData.mfdTrees.add(nodeMfds);
     }
 
     private LogicTree<Mfd> createNodeMfdTree(
@@ -325,10 +335,11 @@ class GridLoader {
       for (Branch<Mfd> branch : modelTree) {
 
         Mfd.Properties.GutenbergRichter grProps = branch.value().properties().getAsGr();
-        double rate = Mfds.gutenbergRichterRate(a, grProps.b(), grProps.mMin());
+        double mMin = grProps.mMin() + grProps.Δm() * 0.5;
+        double rate = Mfds.gutenbergRichterRate(a, grProps.b(), mMin);
         Mfd.Builder nodeMfd = Mfd.Builder.from(branch.value())
             .scaleToIncrementalRate(rate);
-        mMax.ifPresent(m -> truncate(nodeMfd, m));
+        mMax.ifPresent(m -> truncate(grProps, nodeMfd, m));
         nodeMfdTree.addBranch(
             branch.id(),
             nodeMfd.build(),
@@ -349,10 +360,7 @@ class GridLoader {
         LogicTree<Mfd.Properties> propsTree = Deserialize.mfdTree(
             grid.source.properties(),
             data);
-        checkType(propsTree);
-        checkMinMagnitude(propsTree);
-        checkMagnitudeDelta(propsTree);
-        LogicTree<Mfd> mfdTree = MfdTrees.propsTreeToMmaxMfdTree(propsTree);
+        LogicTree<Mfd> mfdTree = MfdTrees.grPropsTreeToMmaxMfdTree(propsTree);
         modelMfds.put(entry.getKey(), mfdTree);
       }
       return Map.copyOf(modelMfds);
@@ -367,35 +375,6 @@ class GridLoader {
     }
   }
 
-  private static Mfd.Type checkType(LogicTree<Mfd.Properties> mfdTree) {
-    Set<Mfd.Type> types = mfdTree.branches().stream()
-        .map(Branch::value)
-        .map(Mfd.Properties::type)
-        .collect(toSet());
-    checkArgument(
-        types.size() == 1 || GR_TYPES.containsAll(types),
-        "Grid mfd-tree has multiple mfd types: %s", types);
-    return types.iterator().next();
-  }
-
-  private static double checkMinMagnitude(LogicTree<Mfd.Properties> mfdTree) {
-    Set<Double> mins = mfdTree.branches().stream()
-        .map(Branch::value)
-        .map(p -> p.getAsGr().mMin())
-        .collect(toSet());
-    checkArgument(mins.size() == 1, "Grid mfd-tree has different mfd mMin: %s", mins);
-    return mins.iterator().next();
-  }
-
-  private static double checkMagnitudeDelta(LogicTree<Mfd.Properties> mfdTree) {
-    Set<Double> deltas = mfdTree.branches().stream()
-        .map(Branch::value)
-        .map(p -> p.getAsGr().Δm())
-        .collect(toSet());
-    checkArgument(deltas.size() == 1, "Grid mfd-tree has different mfd Δm: %s", deltas);
-    return deltas.iterator().next();
-  }
-
   /*
    * Data container for feature polygon.
    *
@@ -414,14 +393,28 @@ class GridLoader {
   }
 
   /* Truncate special GR cases; e.g. WUS double counting. */
-  private static void truncate(Mfd.Builder builder, double mMax) {
-    Type type = builder.properties().type();
-    // TODO 6.5 should be obtained from config
-    double m = (type == Type.GR_MMAX_GR) ? 6.5 : mMax;
-    builder.transform(xy -> zeroAboveM(xy, m));
+  private static void truncate(
+      GutenbergRichter grProps,
+      Mfd.Builder builder,
+      double mMax) {
+    Mfd.Type type = grProps.type();
+    if (type == Type.GR_MMAX_GR) {
+      builder.transform(xy -> zeroAboveM(xy, 6.5));
+    } else if (type == Type.GR_MMAX_SINGLE) {
+      /*
+       * Keeps the magnitude bin just above mMax if mMax > m_gr + Δm/2. Rounding
+       * to 5 decimal places ensures that mMax/Δm doesn't equal *.00...001,
+       * which ceil() would take up to *.1
+       *
+       * We really should just be cutting out the uppermost bin with overlap but
+       * this is consistent with what was being done in legacy fortran codes.
+       */
+      double Δm = grProps.Δm();
+      double m = Math.ceil(Maths.round(mMax / Δm, 5)) * Δm;
+      builder.transform(xy -> zeroAboveM(xy, m));
+    }
   }
 
-  // zone
   static GridSourceSet createGrid(
       SourceConfig.Grid config,
       Feature feature,
@@ -442,7 +435,8 @@ class GridLoader {
     builder.gridConfig(config);
     props.getDouble(Key.STRIKE).ifPresent(builder::strike);
 
-    builder.locations(locations, mfds, mfdsTree, focalMechMaps, Mfd.Type.SINGLE);
+    // System.out.println(mfds.get(0).properties().type());
+    builder.locations(locations, mfds, mfdsTree, focalMechMaps);
 
     return builder.build();
   }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GridRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GridRuptureSet.java
index 6fd95b1baf66397329d9f1b8ea990777abd486ef..1a263b5b046ce60b1d8160b794ca04df4f7155e9 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/GridRuptureSet.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GridRuptureSet.java
@@ -48,6 +48,17 @@ class GridRuptureSet implements RuptureSet {
     if (gss == null) {
       gss = createSourceSet(weight);
     }
+
+    // System.out.println("--- GSS ---");
+    // for (PointSource ptSrc : gss) {
+    // System.out.println(ptSrc);
+    // for (Rupture rup : ptSrc) {
+    // System.out.println(rup);
+    // break;
+    // }
+    // }
+    // System.out.println(gss.mechMaps);
+
     return gss;
   }
 
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java
index 11c1c515afa65982c79d4772a87caea57b90f8bd..ab326580b2bd1419472557f8b0fa8a483ab1775b 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/GridSourceSet.java
@@ -1,10 +1,8 @@
 package gov.usgs.earthquake.nshmp.model;
 
 import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
 import static gov.usgs.earthquake.nshmp.Earthquakes.checkCrustalDepth;
-import static gov.usgs.earthquake.nshmp.Earthquakes.checkMagnitude;
 import static gov.usgs.earthquake.nshmp.Earthquakes.checkSlabDepth;
 import static gov.usgs.earthquake.nshmp.Faults.checkStrike;
 import static gov.usgs.earthquake.nshmp.fault.FocalMech.NORMAL;
@@ -59,23 +57,27 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
   final List<Location> locations;
   final List<Mfd> mfds;
   final LogicTree<List<Mfd>> mfdsTree;
-  final RuptureScaling rupScaling;
-  private final Map<FocalMech, Double> mechMap; // default, used for copyOf
-  private final List<Map<FocalMech, Double>> mechMaps;
+  // private final Map<FocalMech, Double> mechMap; // default, used for copyOf
+  final List<Map<FocalMech, Double>> mechMaps; // may be nCopies
   private final boolean singularMechs;
   private final NavigableMap<Double, Map<Double, Double>> magDepthMap;
-  private final PointSourceType sourceType;
   private final OptionalDouble strike;
-  private final double[] magMaster;
-  private final Double mMin;
-  private final Double mMax;
-  private final double Δm;
 
+  /* optimizable = magMaster.isPresent() */
+  private final Optional<double[]> magMaster;
+
+  final RuptureScaling rupScaling;
   private final double maxDepth;
+  private final PointSourceType sourceType;
 
   final DepthModel depthModel;
   final boolean optimizable;
 
+  /*
+   * WHether or not a grid can be optimized using rate tables is a function of
+   * whether magMaster is present, or not.
+   */
+
   /*
    * Most grid sources have the same focal mech map everywhere; in these cases,
    * mechMaps will have been created using Collections.nCopies() with minimal
@@ -88,16 +90,12 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
     this.mfds = builder.mfds;
     this.mfdsTree = builder.mfdsTree;
     this.rupScaling = builder.rupScaling;
-    this.mechMap = builder.mechMap;
     this.mechMaps = builder.mechMaps;
     this.singularMechs = builder.singularMechs;
     this.magDepthMap = builder.magDepthMap;
     this.sourceType = builder.sourceType;
     this.strike = builder.strike;
     this.magMaster = builder.magMaster;
-    this.mMin = builder.mMin;
-    this.mMax = builder.mMax;
-    this.Δm = builder.Δm;
     this.maxDepth = builder.maxDepth;
 
     /*
@@ -105,12 +103,29 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
      *
      * TODO should be cleaner way of handling this
      */
-    this.optimizable = (sourceType() != FIXED_STRIKE) && !Double.isNaN(Δm);
+    // System.out.println(Δm);
+    // this.optimizable = (sourceType() != FIXED_STRIKE) && !Double.isNaN(Δm);
+    this.optimizable = this.magMaster.isPresent();
+
+    // System.out.println(Arrays.toString(magMaster.orElseThrow()));
+
+    double[] depthMags = this.mfds.get(0).data().xValues().toArray();
+    // System.out.println(mfdsTree);
+
+    // System.out.println(Arrays.toString(depthMags));
 
     this.depthModel = DepthModel.create(
         magDepthMap,
-        Doubles.asList(magMaster),
+        Doubles.asList(depthMags),
         maxDepth);
+
+    // System.out.println("singulearMechs: " + singularMechs);
+    // System.out.println("ptSrcType: " + sourceType);
+    // for (int i = 0; i < locations.size(); i++) {
+    // System.out.println(locations.get(i));
+    // System.out.println(mfds.get(i));
+    // }
+    // System.out.println(mfds);
   }
 
   @Override
@@ -239,11 +254,11 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
     private NavigableMap<Double, Map<Double, Double>> magDepthMap;
     private PointSourceType sourceType;
     private OptionalDouble strike = OptionalDouble.empty();
-    private double[] magMaster;
+    private Optional<double[]> magMaster;
     private Double maxDepth;
-    private Double mMin;
-    private Double mMax;
-    private Double Δm;
+    // private Double mMin;
+    // private Double mMax;
+    // private Double Δm;
 
     private Map<FocalMech, Double> mechMap;
     private boolean singularMechs = true;
@@ -254,24 +269,31 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
       return this;
     }
 
-    Builder sourceType(PointSourceType sourceType) {
-      this.sourceType = checkNotNull(sourceType);
-      return this;
-    }
-
-    Builder ruptureScaling(RuptureScaling rupScaling) {
-      this.rupScaling = checkNotNull(rupScaling, "RupScaling is null");
-      return this;
-    }
-
     Builder gridConfig(SourceConfig.Grid gridConfig) {
-      // this.spacing = gridConfig.spacing; // TODO would only be used for area
-      // sources
+      // TODO would only be used for zone
+      // this.spacing = gridConfig.spacing;
+
       this.maxDepth = gridConfig.maxDepth;
+
+      // TODO makes bad assumption of GRID
+      // TODO reenable but need slab handling
+      // validateDepth(this.maxDepth, SourceType.GRID);
+
       this.rupScaling = gridConfig.ruptureScaling;
       this.sourceType = gridConfig.pointSourceType;
       this.mechMap = convertFocalMechTree(gridConfig.focalMechTree);
+      checkArgument(this.mechMap.size() == 3);
+
       this.magDepthMap = convertMagDepthMap(gridConfig.gridDepthMap);
+      // System.out.println(gridConfig.gridDepthMap);
+      // System.out.println(this.magDepthMap);
+
+      validateMagCutoffs(this.magDepthMap);
+
+      // TODO makes bad assumption of GRID
+      // TODO reenable but need slab handling
+      // validateDepthMap(this.magDepthMap, SourceType.GRID);
+
       return this;
     }
 
@@ -294,58 +316,16 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
     }
 
     private Map<Double, Double> convertDepthTree(LogicTree<Double> tree) {
-      return tree.branches().stream()
+      return tree.stream()
           .collect(ImmutableSortedMap.toImmutableSortedMap(
               Ordering.natural(),
               Branch::value,
               Branch::weight));
     }
 
-    Builder depthMap(NavigableMap<Double, Map<Double, Double>> magDepthMap, SourceType type) {
-      checkNotNull(magDepthMap, "MagDepthMap is null");
-      checkArgument(magDepthMap.size() > 0, "MagDepthMap must have at least one entry");
-      // the structure of the map and its weights will have been fully
-      // validated by parser; still need to check that depths are
-      // appropriate; 'type' indicates how to validate depths across
-      // wrapper classes
-      validateDepthMap(magDepthMap, type);
-      // there must be at least one mag key that is >= MAX_MAG
-      validateMagCutoffs(magDepthMap);
-      this.magDepthMap = magDepthMap;
-      return this;
-    }
-
-    Builder maxDepth(Double maxDepth, SourceType type) {
-      this.maxDepth = checkNotNull(maxDepth, "Maximum depth is null");
-      validateDepth(maxDepth, type);
-      return this;
-    }
-
-    Builder mechs(Map<FocalMech, Double> mechMap) {
-      // weights will have already been checked
-      checkArgument(!checkNotNull(mechMap).isEmpty());
-      checkArgument(mechMap.size() == 3);
-      this.mechMap = mechMap;
-      return this;
-    }
-
     /*
-     * magMaster mfd data
-     *
-     * we could require that this be set first and then all node mfds are
-     * checked against this.
-     *
-     * should be better way to get master/total mfd
+     * TODO How/where do we ensure that all Mfds in a grid are the same type??
      */
-    @Deprecated
-    Builder mfdData(double mMin, double mMax, double Δm) {
-      // TODO need better validation here
-      checkArgument(checkMagnitude(mMin) <= checkMagnitude(mMax));
-      this.mMin = mMin;
-      this.mMax = mMax;
-      this.Δm = Δm;
-      return this;
-    }
 
     // locations, total Mfds, underlying mfd-tree
     // TODO test consistency, size etc ?
@@ -353,8 +333,7 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
         List<Location> locations,
         List<Mfd> mfds,
         LogicTree<List<Mfd>> mfdsTree,
-        Optional<List<Map<FocalMech, Double>>> focalMechMaps,
-        Mfd.Type type) {
+        Optional<List<Map<FocalMech, Double>>> focalMechMaps) {
 
       checkArgument(locations.size() == mfds.size());
       // wholesale replacement of arrays
@@ -368,30 +347,17 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
       // TODO this assumes (rightly so) that all supplied mfds have the same
       // but we need to get rid of the odd dependency on Δm
       Mfd model = mfds.get(0);
-      this.mMin = model.data().x(0);
-      this.mMax = model.data().x(model.data().size() - 1);
-      this.Δm = (type == Mfd.Type.SINGLE)
-          ? Double.NaN
-          : model.data().x(1) - this.mMin;
+      // this.mMin = model.data().x(0);
+      // this.mMax = model.data().x(model.data().size() - 1);
+      // // System.out.println(type);
+      // this.Δm = (type == Mfd.Type.SINGLE)
+      // ? Double.NaN
+      // : model.data().x(1) - this.mMin;
       return this;
     }
 
-    Builder location(Location loc, Mfd mfd) {
-      this.mfds.add(checkNotNull(mfd, "MFD is null"));
-      this.locations.add(checkNotNull(loc, "Location is null"));
-      return this;
-    }
-
-    Builder location(Location loc, Mfd mfd, Map<FocalMech, Double> mechMap) {
-      this.mfds.add(checkNotNull(mfd, "MFD is null"));
-      this.locations.add(checkNotNull(loc, "Location is null"));
-      checkArgument(!checkNotNull(mechMap).isEmpty());
-      checkArgument(mechMap.size() == 3);
-      this.mechMaps.add(mechMap);
-      return this;
-    }
-
-    static void validateDepthMap(Map<Double, Map<Double, Double>> magDepthMap,
+    static void validateDepthMap(
+        Map<Double, Map<Double, Double>> magDepthMap,
         SourceType type) {
       for (Map<Double, Double> magMap : magDepthMap.values()) {
         for (double depth : magMap.keySet()) {
@@ -448,9 +414,9 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
       checkState(maxDepth != null, "%s max depth not set", buildId);
       checkState(mechMap != null, "%s focal mech map not set", buildId);
 
-      checkState(mMin != null, "%s min mag not set", buildId);
-      checkState(mMax != null, "%s max mag not set", buildId);
-      checkState(Δm != null, "%s delta mag not set", buildId);
+      // checkState(mMin != null, "%s min mag not set", buildId);
+      // checkState(mMax != null, "%s max mag not set", buildId);
+      // checkState(Δm != null, "%s delta mag not set", buildId);
 
       /*
        * TODO there are too many assumptions built into this; whose to say ones
@@ -464,12 +430,15 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
        * TODO in the case of single combined/flattened MFDs, mags may not be
        * uniformly spaced. Can this be refactored
        */
-      if (Double.isNaN(Δm)) {
-        magMaster = mfds.get(0).data().xValues().toArray();
-      } else {
-        double cleanDelta = Double.valueOf(String.format("%.2f", Δm));
-        magMaster = DoubleData.buildCleanSequence(mMin, mMax, cleanDelta, true, 2);
-      }
+      // if (Double.isNaN(Δm)) {
+      // magMaster = mfds.get(0).data().xValues().toArray();
+      // } else {
+      // double cleanDelta = Double.valueOf(String.format("%.2f", Δm));
+      // magMaster = DoubleData.buildCleanSequence(mMin, mMax, cleanDelta, true,
+      // 2);
+      // }
+      double[] mfdMags = mfds.get(0).data().xValues().toArray();
+      magMaster = Optional.of(mfdMags);
 
       /*
        * Validate size of mechMaps; size could get out of sync if mixed calls to
@@ -755,11 +724,23 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
     /* creates the type of point source specified in the parent */
     private List<PointSource> initSources(boolean smoothed) {
 
-      // table keys are specified as lowermost and uppermost bin edges
-      double Δm = parent.Δm;
+      /* For now, should only be getting here for GR MFDs */
+      Mfd modelMfd = parent.mfds.get(0);
+      Mfd.Properties props = modelMfd.properties(); // probably INCR
+      double[] mags = modelMfd.data().xValues().toArray();
+
+      // TODO can we get this from the mfd.properties?
+      // the original min max won't have offsets
+      double Δm = mags[1] - mags[0];
       double ΔmBy2 = Δm / 2.0;
-      double mMin = parent.magMaster[0] - ΔmBy2;
-      double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2;
+      double mMin = mags[0] - ΔmBy2;
+      double mMax = mags[mags.length - 1] + ΔmBy2;
+
+      // table keys are specified as lowermost and uppermost bin edges
+      // double Δm = parent.Δm;
+      // double ΔmBy2 = Δm / 2.0;
+      // double mMin = parent.magMaster[0] - ΔmBy2;
+      // double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2;
       double rMax = parent.groundMotionModels().maxDistance();
       double[] smoothingOffsets = smoothingOffsets(rMax, 0.1);
 
@@ -781,6 +762,8 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
 
       // System.out.println(parent.name());
       // System.out.println(mfdTable);
+      // System.out.println("TableSum: " + mfdTable.collapse().sum());
+      // System.out.println(mfdTable.rows());
 
       List<Double> distances = mfdTable.rows();
       maximumSize = distances.size();
@@ -791,7 +774,6 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
           continue;
         }
         Location loc = Locations.location(origin, SRC_TO_SITE_AZIMUTH, r);
-
         b.add(PointSources.pointSource(
             parent.type(),
             parent.sourceType,
@@ -807,10 +789,21 @@ public class GridSourceSet extends AbstractSourceSet<PointSource> {
 
     /* always creates finite point sources */
     private List<PointSource> initMultiMechSources(boolean smoothed) {
-      double Δm = parent.Δm;
+
+      /* For now, should only be getting here for GR MFDs */
+      Mfd modelMfd = parent.mfds.get(0);
+      Mfd.Properties props = modelMfd.properties(); // probably INCR
+      double[] mags = modelMfd.data().xValues().toArray();
+
+      double Δm = mags[1] - mags[0];
       double ΔmBy2 = Δm / 2.0;
-      double mMin = parent.magMaster[0] - ΔmBy2;
-      double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2;
+      double mMin = mags[0] - ΔmBy2;
+      double mMax = mags[mags.length - 1] + ΔmBy2;
+
+      // double Δm = parent.Δm;
+      // double ΔmBy2 = Δm / 2.0;
+      // double mMin = parent.magMaster[0] - ΔmBy2;
+      // double mMax = parent.magMaster[parent.magMaster.length - 1] + ΔmBy2;
       double rMax = parent.groundMotionModels().maxDistance();
       double[] smoothingOffsets = smoothingOffsets(rMax, 0.1);
 
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java
index bb370de4681780d0c30a81493a187551d85b2915..579a5f6b60734fa5039adf2ca83fc053e3d4aa10 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/InterfaceRuptureSet.java
@@ -164,7 +164,7 @@ public class InterfaceRuptureSet implements RuptureSet {
 
       // TODO temp checking props are always the same;
       // don't think this is always true at least for faults
-      checkState(mfdPropsTree.branches().stream()
+      checkState(mfdPropsTree.stream()
           .map(Branch::value)
           .map(Mfd.Properties::type)
           .distinct().limit(2).count() <= 1);
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java
index 3b41742470345347f6c6940de23de226b693b49a..8b40849086498cf3eab21733b1b62d36f5cdb90b 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdConfig.java
@@ -56,7 +56,7 @@ class MfdConfig {
       this.epistemicTree = Optional.of(checkNotNull(tree));
       if (this.epistemicTree.isPresent()) {
         this.minEpiOffset = OptionalDouble.of(
-            this.epistemicTree.get().branches().stream()
+            this.epistemicTree.orElseThrow().stream()
                 .mapToDouble(b -> b.value())
                 .min()
                 .getAsDouble());
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdTrees.java b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdTrees.java
index 61d2695477a65a14f6e14501b47ec1ba1f60cf68..ad3421e52a8ca8ae6baa963138b9e141042e1d02 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/MfdTrees.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/MfdTrees.java
@@ -1,18 +1,28 @@
 package gov.usgs.earthquake.nshmp.model;
 
 import static com.google.common.base.Preconditions.checkArgument;
+import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR;
+import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR_MMAX_GR;
+import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR_MMAX_SINGLE;
+import static gov.usgs.earthquake.nshmp.mfd.Mfd.Type.GR_TAPER;
 import static java.util.stream.Collectors.toMap;
+import static java.util.stream.Collectors.toSet;
 import static java.util.stream.Collectors.toUnmodifiableList;
 
 import java.nio.file.Path;
 import java.util.ArrayList;
+import java.util.EnumSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import gov.usgs.earthquake.nshmp.data.MutableXySequence;
 import gov.usgs.earthquake.nshmp.data.XySequence;
 import gov.usgs.earthquake.nshmp.mfd.Mfd;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties;
 import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.GutenbergRichter;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.TaperedGr;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Type;
 import gov.usgs.earthquake.nshmp.mfd.Mfds;
 import gov.usgs.earthquake.nshmp.tree.Branch;
 import gov.usgs.earthquake.nshmp.tree.LogicTree;
@@ -24,10 +34,18 @@ import gov.usgs.earthquake.nshmp.tree.LogicTree;
  */
 class MfdTrees {
 
+  /* All Gutenberg-Richter MFD types. */
+  private static final Set<Type> GR_TYPES = EnumSet.of(
+      GR,
+      GR_MMAX_GR,
+      GR_MMAX_SINGLE,
+      GR_TAPER);
+
   /* Convert a logic tree of mfd properties to builders. */
-  static LogicTree<Mfd.Builder> mfdPropsToBuilders(LogicTree<Mfd.Properties> propsTree) {
-    LogicTree.Builder<Mfd.Builder> mfdTree = LogicTree.builder(propsTree.name());
-    for (Branch<Mfd.Properties> branch : propsTree.branches()) {
+  @Deprecated // TODO clean
+  static LogicTree<Mfd.Builder> mfdPropsToBuilders(LogicTree<Mfd.Properties> tree) {
+    LogicTree.Builder<Mfd.Builder> mfdTree = LogicTree.builder(tree.name());
+    for (Branch<Mfd.Properties> branch : tree) {
       mfdTree.addBranch(branch.id(), branch.value().toBuilder(), branch.weight());
     }
     return mfdTree.build();
@@ -38,47 +56,28 @@ class MfdTrees {
    * b-values possible) to a tree of model MFDs with identical x-values. This
    * supports optimizations in XySequence.combine().
    */
-  static LogicTree<Mfd> propsTreeToMmaxMfdTree(LogicTree<Mfd.Properties> propsTree) {
-    double mMax = mfdTreeMaxMagnitude(propsTree);
-    LogicTree.Builder<Mfd> mfdTree = LogicTree.builder(propsTree.name());
-    propsTree.branches().stream()
-        .forEach(branch -> mfdTree.addBranch(
-            branch.id(),
-            mMaxGrMfd(branch.value(), mMax),
-            branch.weight()));
+  static LogicTree<Mfd> grPropsTreeToMmaxMfdTree(LogicTree<Mfd.Properties> tree) {
+    checkGrMinAndDelta(tree);
+    double mMax = mfdTreeMaxMagnitude(tree);
+    LogicTree.Builder<Mfd> mfdTree = LogicTree.builder(tree.name());
+    tree.stream().forEach(branch -> mfdTree.addBranch(
+        branch.id(),
+        mMaxGrMfd(branch.value(), mMax),
+        branch.weight()));
     return mfdTree.build();
   }
 
-  /* Find the maximum mMax, assuming a tree of GR MFDs. */
-  static double mfdTreeMaxMagnitude(LogicTree<Mfd.Properties> propsTree) {
-    return propsTree.branches().stream()
+  /* Find the maximum mMax, checking that tree is all GR MFDs. */
+  private static double mfdTreeMaxMagnitude(LogicTree<Mfd.Properties> tree) {
+    return tree.stream()
         .mapToDouble(b -> b.value().getAsGr().mMax())
         .max()
         .getAsDouble();
   }
 
-  /* Check if all MFDs in a tree are the same type. */
-  static boolean mfdsAreType(LogicTree<Mfd.Properties> mfdTree, Mfd.Type type) {
-    return mfdTree.branches()
-        .stream()
-        .map(branch -> branch.value().type())
-        .allMatch(type::equals);
-  }
-
-  /* Check if the IDs and weights of two mfd-trees are the same. */
-  static void checkTreeIdsAndWeights(LogicTree<Mfd> tree1, LogicTree<Mfd> tree2) {
-    checkArgument(
-        mfdTreeWeightMap(tree1).equals(mfdTreeWeightMap(tree2)),
-        "mfd-trees do not have identical IDs and wieghts\n Tree1: %s \nTree2: %s",
-        tree1, tree2);
-  }
-
-  /* Create map of mfd-tree ID:weight. */
-  private static Map<String, Double> mfdTreeWeightMap(LogicTree<Mfd> tree) {
-    return tree.branches().stream().collect(toMap(Branch::id, Branch::weight));
-  }
-
   /*
+   * TODO clean/consolidate these notes
+   *
    * Note: if we create custom MFDs for each M* branch, we'll have the same
    * reference x-values for each model, but when added, a new set of x-vlues
    * will be created each time by XySequence.combine(). We therefore create MFDs
@@ -89,17 +88,93 @@ class MfdTrees {
    * mMax sets the upper limit of the actual MFD. Zero rate bins will not scale
    * in new builder.from(); method is for the combination of GR logic trees of
    * MFDs with similar binning.
+   *
+   * The rate zeroing done below permits combining of GR_TAPER and GR branches,
+   * where GR_TAPER might go as high as M8. It should not be confused with the
+   * rate zeroing that occurrs later on as individual node MFDs are being
+   * created for GR_MMAX_GR and GR_MMAX_SINGLE to avoid double counting.
+   *
+   * once transforming, is OK to cast GR_TAPER as GR to get original mMax
    */
   private static Mfd mMaxGrMfd(Mfd.Properties props, double mMax) {
-    GutenbergRichter gr = props.getAsGr();
-    return Mfd.newGutenbergRichterBuilder(gr.mMin(), mMax, gr.Δm(), gr.b())
-        .transform(p -> p.set(p.x() > gr.mMax() ? 0.0 : p.y()))
+    Mfd.Properties newProps = (props.type() == Type.GR_TAPER)
+        ? new TaperedGr(props.getAsGrTaper(), mMax)
+        : new GutenbergRichter(props.getAsGr(), mMax);
+    return newProps.toBuilder()
+        .transform(p -> p.set((p.x() > props.getAsGr().mMax()) ? 0.0 : p.y()))
         .build();
   }
 
+  /* Ensure all MFDs in a tree are the same as specified type. */
+  static void checkType(LogicTree<Mfd.Properties> tree, Mfd.Type type) {
+    Mfd.Type refType = tree.get(0).value().type();
+    checkArgument(
+        checkTreeTypes(tree, refType) && refType == type,
+        "mfd-tree types are not type: %s", type);
+  }
+
+  /* Check if all MFDs in a tree are the specified type. */
+  static boolean checkTreeTypes(LogicTree<Mfd.Properties> tree, Mfd.Type type) {
+    return tree.stream()
+        .map(Branch::value)
+        .map(Mfd.Properties::type)
+        .allMatch(type::equals);
+  }
+
+  /* Ensure all MFDs in a tree are some Gutenberg-Richter type. */
+  private static void checkGrTypes(LogicTree<Mfd.Properties> tree) {
+    Set<Mfd.Type> types = tree.stream()
+        .map(Branch::value)
+        .map(Mfd.Properties::type)
+        .collect(toSet());
+    checkArgument(
+        types.size() == 1 || GR_TYPES.containsAll(types),
+        "mfd-tree types are not all Gutenberg-Richter: %s", types);
+  }
+
+  /* Ensure all MFDs are Gutenberg-Richter and that mMin and Δm match. */
+  private static void checkGrMinAndDelta(LogicTree<Mfd.Properties> tree) {
+
+    // TODO consider condensing
+    // Properties::getAsGr will fail as checkGrTypes would
+    // (GR_TAPER. is subclass so getAsGr works)
+    // checkMinAndDelta as predicate could throw exception
+    // and then map to max(mMax)
+
+    checkGrTypes(tree);
+    GutenbergRichter refProps = tree.get(0).value().getAsGr();
+    boolean minAndDeltaEqual = tree.stream()
+        .skip(1)
+        .map(Branch::value)
+        .map(Properties::getAsGr)
+        .allMatch(props -> checkMinAndDelta(refProps, props));
+    checkArgument(
+        minAndDeltaEqual,
+        "Gutenberg-Richter mfd-tree has different mMin and Δm");
+  }
+
+  private static boolean checkMinAndDelta(
+      GutenbergRichter props1,
+      GutenbergRichter props2) {
+    return (props1.mMin() == props2.mMin() && props1.Δm() == props2.Δm());
+  }
+
+  /* Check if the IDs and weights of two mfd-trees are the same. */
+  static void checkTreeIdsAndWeights(LogicTree<Mfd> tree1, LogicTree<Mfd> tree2) {
+    checkArgument(
+        mfdTreeWeightMap(tree1).equals(mfdTreeWeightMap(tree2)),
+        "mfd-trees do not have identical IDs and wieghts\n Tree1: %s \nTree2: %s",
+        tree1, tree2);
+  }
+
+  /* Create map of mfd-tree ID:weight. */
+  private static Map<String, Double> mfdTreeWeightMap(LogicTree<Mfd> tree) {
+    return tree.stream().collect(toMap(Branch::id, Branch::weight));
+  }
+
   /* Get a list of branch IDs from a tree. */
   static List<String> treeIds(LogicTree<?> tree) {
-    return tree.branches().stream()
+    return tree.stream()
         .map(Branch::id)
         .collect(toUnmodifiableList());
   }
@@ -109,32 +184,32 @@ class MfdTrees {
    * set to it's weight. This MFD can then be used as a model for a builder that
    * can be scaled by rate.
    */
-  static Mfd reduceSingleMfdTree(LogicTree<Mfd.Properties> mfdTree) {
-    double[] magnitudes = new double[mfdTree.size()];
-    double[] weights = new double[mfdTree.size()];
-    for (int i = 0; i < mfdTree.size(); i++) {
-      Branch<Mfd.Properties> mfd = mfdTree.branches().get(i);
-      magnitudes[i] = mfd.value().getAsSingle().m();
+  static Mfd reduceSingleMfdTree(LogicTree<Mfd.Properties> tree) {
+    double[] magnitudes = new double[tree.size()];
+    double[] weights = new double[tree.size()];
+    for (int i = 0; i < tree.size(); i++) {
+      Branch<Mfd.Properties> mfd = tree.get(i);
+      magnitudes[i] = mfd.value().getAsSingle().magnitude();
       weights[i] = mfd.weight();
     }
     return Mfd.create(magnitudes, weights);
   }
 
   /* Reduce a MFD logic tree; weighted branch combiner. */
-  static Mfd reduceMfdTree(LogicTree<Mfd> mfdTree) {
-    return Mfds.combine(scaledMfdList(mfdTree));
+  static Mfd reduceMfdTree(LogicTree<Mfd> tree) {
+    return Mfds.combine(scaledMfdList(tree));
   }
 
   /* LogicTree<MFD> --> List<MFD * branchWeight> */
-  static List<Mfd> scaledMfdList(LogicTree<Mfd> mfdTree) {
-    return mfdTree.branches().stream()
+  static List<Mfd> scaledMfdList(LogicTree<Mfd> tree) {
+    return tree.stream()
         .map(MfdTrees::reduceMfdBranch)
         .collect(toUnmodifiableList());
   }
 
   /* LogicTree<MFD> --> List<MFD * branchWeight * weight> */
-  static List<Mfd> scaledMfdList(LogicTree<Mfd> mfdTree, double weight) {
-    return mfdTree.branches().stream()
+  static List<Mfd> scaledMfdList(LogicTree<Mfd> tree, double weight) {
+    return tree.stream()
         .map(branch -> reduceMfdBranch(branch, weight))
         .collect(toUnmodifiableList());
   }
@@ -173,24 +248,24 @@ class MfdTrees {
    * Transpose a list of logic trees to a logic tree of immutable lists.
    * Supplied logic trees are assumed to have same branch names and IDs.
    */
-  static <T> LogicTree<List<T>> transposeTree(List<LogicTree<T>> listOfTrees) {
-    LogicTree<T> model = listOfTrees.get(0);
+  static <T> LogicTree<List<T>> transposeTree(List<LogicTree<T>> treeList) {
+    LogicTree<T> model = treeList.get(0);
 
     /* Init branch value lists. */
     List<List<T>> valueLists = new ArrayList<>(model.size());
     model.forEach(b -> valueLists.add(new ArrayList<T>()));
 
     /* Populate value lists. */
-    for (LogicTree<T> tree : listOfTrees) {
+    for (LogicTree<T> tree : treeList) {
       for (int i = 0; i < tree.size(); i++) {
-        valueLists.get(i).add(tree.branches().get(i).value());
+        valueLists.get(i).add(tree.get(i).value());
       }
     }
 
     /* Create tree of lists. */
     LogicTree.Builder<List<T>> treeOfLists = LogicTree.builder(model.name());
     for (int i = 0; i < model.size(); i++) {
-      Branch<T> branch = model.branches().get(i);
+      Branch<T> branch = model.get(i);
       treeOfLists.addBranch(
           branch.id(),
           List.copyOf(valueLists.get(i)),
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java
index aaced98d58edf5354ed4e4c35153f3c63dc690e6..7096ceeef5248f6b4a26b165f7363be5e861c291 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java
@@ -175,9 +175,6 @@ abstract class ModelLoader {
   /*
    * TODO In process*Branch methods we are exiting for null ("do nothing")
    * branches (so not adding them to source tree) and therefore can't sample
-   *
-   * TODO set ignore checkNotNull(path) for class
-   *
    */
   private ModelLoader(Path root) {
     this.root = root;
@@ -302,7 +299,7 @@ abstract class ModelLoader {
           .root(root);
 
       loadSingleRuptureSet(
-          root.branches().get(0),
+          root.get(0),
           treeBuilder,
           data,
           feature);
@@ -328,7 +325,7 @@ abstract class ModelLoader {
       double Mw = FaultRuptureSet.magnitudeWc94(length);
 
       for (int i = 0; i < dipTree.size(); i++) {
-        Branch<Double> dipBranch = dipTree.branches().get(i);
+        Branch<Double> dipBranch = dipTree.get(i);
         int branchDip = (int) (feature.dip + dipBranch.value());
         int rateDip = (int) ((dipSlipModel == FIXED) ? feature.dip : branchDip);
         String branchName = String.format("%s [%s°]", feature.name, branchDip);
@@ -343,7 +340,7 @@ abstract class ModelLoader {
             .build();
 
         loadSingleRuptureSet(
-            dipSourceTree.branches().get(i),
+            dipSourceTree.get(i),
             treeBuilder,
             data,
             new SourceFeature.NshmFault(dipFeature));
@@ -430,7 +427,7 @@ abstract class ModelLoader {
       Optional<LogicTree<Path>> tree = readSourceTree(dir);
       if (tree.isPresent()) {
 
-        LogicTree<Path> children = tree.get();
+        LogicTree<Path> children = tree.orElseThrow();
         treeBuilder.addBranches(branch, children);
 
         for (Branch<Path> child : children) {
@@ -459,7 +456,7 @@ abstract class ModelLoader {
       System.out.println("  system: " + root.relativize(dir));
       Optional<LogicTree<Path>> tree = readSourceTree(dir);
       if (tree.isPresent()) {
-        LogicTree<Path> children = tree.get();
+        LogicTree<Path> children = tree.orElseThrow();
         treeBuilder.addBranches(branch, children);
         for (Branch<Path> child : children) {
           processSystemBranch(child, treeBuilder, data);
@@ -506,7 +503,7 @@ abstract class ModelLoader {
 
       // TODO dummy mfds for now TODO TODO TODO
       System.out.println(" -- STOP -- this isnt working correctly");
-      Mfd.Properties.Single mfdProps = Mfd.Properties.Single.dummy();
+      Mfd.Properties.Single mfdProps = new Mfd.Properties.Single(7.0);
 
       LogicTree<Mfd.Properties> mfdTree = LogicTree.singleton(
           Deserialize.MFD_TREE,
@@ -530,7 +527,7 @@ abstract class ModelLoader {
           .type(INTERFACE)
           .gmms(data.gmms())
           .root(root)
-          .addLeaf(root.branches().get(0), ruptureSet)
+          .addLeaf(root.get(0), ruptureSet)
           .build();
 
       return tree;
@@ -578,7 +575,7 @@ abstract class ModelLoader {
       Optional<LogicTree<Path>> tree = readSourceTree(dir);
       if (tree.isPresent()) {
 
-        LogicTree<Path> children = tree.get();
+        LogicTree<Path> children = tree.orElseThrow();
         treeBuilder.addBranches(branch, children);
 
         for (Branch<Path> child : children) {
@@ -638,7 +635,7 @@ abstract class ModelLoader {
           .gmms(data.gmms())
           .root(rateTree);
 
-      for (Branch<Path> rateBranch : rateTree.branches()) {
+      for (Branch<Path> rateBranch : rateTree) {
 
         /* Set rates file. */
         data.gridRateFile(rateBranch.value());
@@ -692,7 +689,7 @@ abstract class ModelLoader {
       Optional<LogicTree<Path>> tree = readSourceTree(dir);
       if (tree.isPresent()) {
 
-        LogicTree<Path> children = tree.get();
+        LogicTree<Path> children = tree.orElseThrow();
         treeBuilder.addBranches(branch, children);
 
         for (Branch<Path> child : children) {
@@ -706,7 +703,7 @@ abstract class ModelLoader {
         LogicTree<Path> rateTree = data.gridRateTree().orElseThrow();
         treeBuilder.addBranches(branch, rateTree);
 
-        for (Branch<Path> rateBranch : rateTree.branches()) {
+        for (Branch<Path> rateBranch : rateTree) {
 
           /* Set rates file. */
           data.gridRateFile(rateBranch.value());
@@ -729,7 +726,7 @@ abstract class ModelLoader {
           treeBuilder.addBranches(rateBranch, grsTree);
 
           for (int j = 0; j < grsTree.size(); j++) {
-            treeBuilder.addLeaf(grsTree.branches().get(j), ruptureSets.get(j));
+            treeBuilder.addLeaf(grsTree.get(j), ruptureSets.get(j));
           }
         }
       }
@@ -777,7 +774,7 @@ abstract class ModelLoader {
           .gmms(data.gmms())
           .root(rateTree);
 
-      for (Branch<Path> rateBranch : rateTree.branches()) {
+      for (Branch<Path> rateBranch : rateTree) {
 
         /* Set rates file; slab needs to be resolved to local dir. */
         data.gridRateFile(dir.resolve(rateBranch.value()));
@@ -831,7 +828,7 @@ abstract class ModelLoader {
       Optional<LogicTree<Path>> tree = readSourceTree(dir);
       if (tree.isPresent()) {
 
-        LogicTree<Path> children = tree.get();
+        LogicTree<Path> children = tree.orElseThrow();
         treeBuilder.addBranches(branch, children);
 
         for (Branch<Path> child : children) {
@@ -845,7 +842,7 @@ abstract class ModelLoader {
         LogicTree<Path> rateTree = data.gridRateTree().orElseThrow();
         treeBuilder.addBranches(branch, rateTree);
 
-        for (Branch<Path> rateBranch : rateTree.branches()) {
+        for (Branch<Path> rateBranch : rateTree) {
 
           /* Set rates file; slab needs to be resolved to local dir. */
           data.gridRateFile(dir.resolve(rateBranch.value()));
@@ -869,7 +866,7 @@ abstract class ModelLoader {
           treeBuilder.addBranches(rateBranch, srsTree);
 
           for (int j = 0; j < srsTree.size(); j++) {
-            treeBuilder.addLeaf(srsTree.branches().get(j), ruptureSets.get(j));
+            treeBuilder.addLeaf(srsTree.get(j), ruptureSets.get(j));
           }
         }
       }
@@ -935,7 +932,7 @@ abstract class ModelLoader {
           .type(ZONE)
           .gmms(data.gmms())
           .root(root)
-          .addLeaf(root.branches().get(0), ruptureSet)
+          .addLeaf(root.get(0), ruptureSet)
           .build();
 
       return tree;
@@ -981,7 +978,7 @@ abstract class ModelLoader {
       Optional<LogicTree<Path>> tree = readSourceTree(dir);
       if (tree.isPresent()) {
 
-        LogicTree<Path> children = tree.get();
+        LogicTree<Path> children = tree.orElseThrow();
         treeBuilder.addBranches(branch, children);
 
         for (Branch<Path> child : children) {
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java
index e628b49184be841c46cc72e693d6fd58c910083b..f1ed93eab7ace43aa14f07bc9156cf2886e75455 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/SourceFeature.java
@@ -81,9 +81,9 @@ public abstract class SourceFeature {
     country = props.getString(Key.COUNTRY).orElse("US");
     state = props.getString(Key.STATE);
     states = List.of(props.get(Key.STATES, String[].class).orElse(
-        state.isPresent() ? new String[] { state.get() } : new String[0]));
+        state.isPresent() ? new String[] { state.orElseThrow() } : new String[0]));
     checkArgument(
-        state.isEmpty() || states.get(0).equals(state.get()),
+        state.isEmpty() || states.get(0).equals(state.orElseThrow()),
         "Inconsistent 'state' and 'states' fields");
     this.source = feature;
   }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ZoneRuptureSet.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ZoneRuptureSet.java
index c6260128ff9031d3a354f981d59e6867a0f60c01..493fa0890168c11f95193c0bb5542b3cdc5a24b0 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/model/ZoneRuptureSet.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ZoneRuptureSet.java
@@ -156,8 +156,8 @@ class ZoneRuptureSet implements RuptureSet {
       checkNotNull(feature, "%s feature", label);
       checkNotNull(ratesPath, "%s rates file path", label);
       if (mfdTreeKey.isPresent()) {
-        String key = mfdTreeKey.get();
-        mfdPropertiesTree = data.mfdMap().get().get(key);
+        String key = mfdTreeKey.orElseThrow();
+        mfdPropertiesTree = data.mfdMap().orElseThrow().get(key);
       }
       checkNotNull(mfdPropertiesTree, "%s MFD tree not set", label);
       initLocationAndRateLists();
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/Branch.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/Branch.java
index 81c75936cb0d77fea31206395f98d80df63df85c..67f586ea90332400d6fdbd134ee45a84da21c0a4 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/tree/Branch.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/Branch.java
@@ -2,6 +2,9 @@ package gov.usgs.earthquake.nshmp.tree;
 
 /**
  * A logic tree branch.
+ *
+ * @param <T> the type of value stored in a branch
+ * @author U.S. Geological Survey
  */
 public interface Branch<T> {
 
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/EnumBranch.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/EnumBranch.java
index deb8bce9a2854cb6cfc6f06ca72887b575b12229..18cdca10ed3102f684fc58b36cee62877a411491 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/tree/EnumBranch.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/EnumBranch.java
@@ -3,7 +3,11 @@ package gov.usgs.earthquake.nshmp.tree;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static gov.usgs.earthquake.nshmp.data.DoubleData.checkWeight;
 
-/* Branch with id and value derived from an enum. */
+/**
+ * Branch with id and value derived from an enum.
+ *
+ * @author U.S. Geological Survey
+ */
 class EnumBranch<E extends Enum<E>, T> implements Branch<T> {
 
   private final E id;
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/EnumValueBranch.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/EnumValueBranch.java
index 375576ff1908b8d39306976c2a1ef1a43f417614..6515c2e4848069954ead477a92af21279a6ff09a 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/tree/EnumValueBranch.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/EnumValueBranch.java
@@ -3,7 +3,11 @@ package gov.usgs.earthquake.nshmp.tree;
 import static com.google.common.base.Preconditions.checkNotNull;
 import static gov.usgs.earthquake.nshmp.data.DoubleData.checkWeight;
 
-/* Branch with id and value backed by an enum. */
+/**
+ * Branch with id and value backed by an enum.
+ *
+ * @author U.S. Geological Survey
+ */
 class EnumValueBranch<E extends Enum<E>> implements Branch<E> {
 
   private final E id;
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/GroupBranch.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/GroupBranch.java
index cb886fa62accd04a57c995ee5270987803cfaa10..7c429648d8f62ef4dca8d09605acead6eb29b256 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/tree/GroupBranch.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/GroupBranch.java
@@ -4,7 +4,11 @@ import static com.google.common.base.Preconditions.checkNotNull;
 import static gov.usgs.earthquake.nshmp.data.DoubleData.checkIsPositiveAndReal;
 import static gov.usgs.earthquake.nshmp.tree.RegularBranch.checkId;
 
-/* Logic group branch. */
+/**
+ * Logic group branch.
+ *
+ * @author U.S. Geological Survey
+ */
 class GroupBranch<T> implements Branch<T> {
 
   private final String id;
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicGroup.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicGroup.java
index 844177ca1b43d8acea7d16e2ef726b261a7662ad..94ab66b19e9efb13fca8827e147c1e452affd8eb 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicGroup.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicGroup.java
@@ -2,6 +2,7 @@ package gov.usgs.earthquake.nshmp.tree;
 
 import static com.google.common.base.Preconditions.checkState;
 
+import java.util.AbstractList;
 import java.util.Iterator;
 import java.util.List;
 
@@ -15,20 +16,22 @@ import java.util.List;
  * {@code UnsupportedOperationException}. A logic group branch <i>may</i>
  * include a scale factor that is used in lieu of a branch weight; in most
  * real-world cases, the scale value is one.
+ *
+ * @param <T> the type of value stored in a branch
+ * @author U.S. Geological Survey
  */
-public final class LogicGroup<T> implements LogicTree<T> {
+public final class LogicGroup<T> extends AbstractList<Branch<T>> implements LogicTree<T> {
 
   private static final String NAME = "logic-group";
   private final List<Branch<T>> branches;
 
   LogicGroup(List<Branch<T>> branches) {
-    this.branches = branches;
+    this.branches = List.copyOf(branches);
   }
 
-  /** Return an immutable list of the branches in this group. */
   @Override
-  public List<Branch<T>> branches() {
-    return branches;
+  public Branch<T> get(int index) {
+    return branches.get(index);
   }
 
   /** Return the name of this group. */
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java
index f56da83c46d56b5f1d75b31c72fa8a839cfb045f..9889585d53cc6a46b6d86111085910f96604e089 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/LogicTree.java
@@ -14,21 +14,20 @@ import gov.usgs.earthquake.nshmp.data.DoubleData;
 /**
  * A logic tree.
  *
- * <p>Factory methods are provided that return builders for specialized branch
- * implementations.
+ * <p>Logic trees are implemented as immutable lists of branches, where each
+ * branch has an ID, value and weight. Factory methods are provided that return
+ * builders for specialized branch implementations.
  *
  * @param <T> the type of value stored in the branches of the tree
  * @author U.S. Geological Survey
  */
-public interface LogicTree<T> extends Iterable<Branch<T>> {
-
-  /** Return an immutable list of the branches in this tree. */
-  List<Branch<T>> branches();
+public interface LogicTree<T> extends List<Branch<T>> {
 
   /** Return the name of this tree. */
   String name();
 
   /** Return the number of branches in this tree. */
+  @Override
   int size();
 
   /**
@@ -169,7 +168,6 @@ public interface LogicTree<T> extends Iterable<Branch<T>> {
       cumulativeWeights = DoubleData.round(
           RegularTree.WEIGHT_SCALE,
           DoubleData.cumulate(weights));
-      branches = List.copyOf(branches);
       return new RegularTree<T>(this);
     }
 
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularBranch.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularBranch.java
index 1aa2cacb956611356641fcbc39e2e9bba0abc297..cd903fbf81c05b130976e8bba232a46b172ed9aa 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularBranch.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularBranch.java
@@ -6,7 +6,11 @@ import static gov.usgs.earthquake.nshmp.data.DoubleData.checkWeight;
 
 import gov.usgs.earthquake.nshmp.Maths;
 
-/* Basic logic tree branch. */
+/**
+ * Basic logic tree branch.
+ *
+ * @author U.S. Geological Survey
+ */
 class RegularBranch<T> implements Branch<T> {
 
   private final String id;
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularTree.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularTree.java
index 18ea0aaacbb659d3a1610d9e7bc1893a4ab84f67..18233545bc2fb7bfa924669002d4fb54836cc32f 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularTree.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/RegularTree.java
@@ -1,15 +1,23 @@
 package gov.usgs.earthquake.nshmp.tree;
 
+import java.util.AbstractList;
 import java.util.ArrayList;
-import java.util.Iterator;
+import java.util.Arrays;
 import java.util.List;
 
 import gov.usgs.earthquake.nshmp.Text;
 
-/* Basic logic tree implementation. */
-class RegularTree<T> implements LogicTree<T> {
+/**
+ * Basic logic tree implementation.
+ *
+ * @author U.S. Geological Survey
+ */
+class RegularTree<T> extends AbstractList<Branch<T>> implements LogicTree<T> {
 
-  // TODO document in addBranch and cumulate
+  /*
+   * Scale used for rounding cumulative weights to eliminate double precision
+   * rounding errors when summing wieghts.
+   */
   static final int WEIGHT_SCALE = 12;
 
   private final String name;
@@ -18,8 +26,25 @@ class RegularTree<T> implements LogicTree<T> {
 
   RegularTree(Builder<T> builder) {
     this.name = builder.name;
-    this.branches = builder.branches;
-    this.cumulativeWeights = builder.cumulativeWeights;
+    this.branches = List.copyOf(builder.branches);
+    this.cumulativeWeights = Arrays.copyOf(
+        builder.cumulativeWeights,
+        builder.cumulativeWeights.length);
+  }
+
+  @Override
+  public Branch<T> get(int index) {
+    return branches.get(index);
+  }
+
+  @Override
+  public String name() {
+    return name;
+  }
+
+  @Override
+  public int size() {
+    return branches.size();
   }
 
   @Override
@@ -46,26 +71,6 @@ class RegularTree<T> implements LogicTree<T> {
     return toString(name, branches);
   }
 
-  @Override
-  public Iterator<Branch<T>> iterator() {
-    return branches.iterator();
-  }
-
-  @Override
-  public List<Branch<T>> branches() {
-    return branches;
-  }
-
-  @Override
-  public String name() {
-    return name;
-  }
-
-  @Override
-  public int size() {
-    return branches.size();
-  }
-
   static <T> String toString(String name, List<Branch<T>> branches) {
     StringBuilder sb = new StringBuilder(name);
     for (int i = 0; i < branches.size(); i++) {
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/StringValueBranch.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/StringValueBranch.java
index 5f4c882d0cb238854c7aa9be26267f0fdcced404..0b7baac604a5e5631a99ffd15238d87a962019b0 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/tree/StringValueBranch.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/StringValueBranch.java
@@ -3,7 +3,11 @@ package gov.usgs.earthquake.nshmp.tree;
 import static gov.usgs.earthquake.nshmp.data.DoubleData.checkWeight;
 import static gov.usgs.earthquake.nshmp.tree.RegularBranch.checkId;
 
-/* Branch with identical id and value string. */
+/**
+ * Branch with identical id and value string.
+ *
+ * @author U.S. Geological Survey
+ */
 class StringValueBranch implements Branch<String> {
 
   private final String id;
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/tree/package-info.java b/src/main/java/gov/usgs/earthquake/nshmp/tree/package-info.java
index f0298098f1be4f549164030b6626a089432917b3..cd5d09c7e0dc09c0f0cc410f5b0c6492305179c0 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/tree/package-info.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/tree/package-info.java
@@ -1,4 +1,6 @@
 /**
- * Logic tree classes and utilties.
+ * Logic tree representations.
+ *
+ * @author U.S. Geological Survey
  */
 package gov.usgs.earthquake.nshmp.tree;
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/data/MutableXySequenceTests.java b/src/test/java/gov/usgs/earthquake/nshmp/data/MutableXySequenceTests.java
index 2b1543c33d2140f54ec022b1a1bf58de4a79caac..52cd9205cbee31e5a8f738121ff5378b54effa15 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/data/MutableXySequenceTests.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/data/MutableXySequenceTests.java
@@ -101,7 +101,7 @@ class MutableXySequenceTests {
   final void streamTest() {
     // check that stream consists of mutable points
     assertEquals(
-        xy.stream().findFirst().get().getClass(),
+        xy.stream().findFirst().orElseThrow().getClass(),
         MutableArrayXySequence.MutablePoint.class);
   }
 
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/geo/LocationListTests.java b/src/test/java/gov/usgs/earthquake/nshmp/geo/LocationListTests.java
index 829fc63d554ec23ec65f0598685b47f387bc8ca3..a8b17c315b2c36bc3190b284ed6a77ab98ddb75b 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/geo/LocationListTests.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/geo/LocationListTests.java
@@ -134,18 +134,29 @@ class LocationListTests {
 
     LocationList sl0 = subLists.get(0);
     assertEquals(sl0.first(), loc1);
-    assertEquals(sl0.last(), Location.create(0.1, 0.0));
+
+    // TODO revisit: this changed when we removed
+    // 5 decimal rounding in Locations
+
+    // assertEquals(sl0.last(), Location.create(0.1, 0.0));
+    assertEquals(sl0.last(), Location.create(0.09999999999999999, 6.1232308869976715E-18));
 
     LocationList sl3 = subLists.get(3);
-    assertEquals(sl3.first(), Location.create(0.3, 0.0));
-    assertEquals(sl3.last(), Location.create(0.4, 0.0));
+    // assertEquals(sl3.first(), Location.create(0.3, 0.0));
+    assertEquals(sl3.first(), Location.create(0.3, 1.8369664682372418E-17));
+
+    // assertEquals(sl3.last(), Location.create(0.4, 0.0));
+    assertEquals(sl3.last(), Location.create(0.39999999999999997, 2.4492867590777903E-17));
 
     LocationList sl6 = subLists.get(6);
-    assertEquals(sl6.first(), Location.create(0.6, 0.0));
-    assertEquals(sl6.last(), Location.create(0.7, 0.0));
+    // assertEquals(sl6.first(), Location.create(0.6, 0.0));
+    assertEquals(sl6.first(), Location.create(0.6000000000000001, 6.123230886997674E-18));
+    // assertEquals(sl6.last(), Location.create(0.7, 0.0));
+    assertEquals(sl6.last(), Location.create(0.7, 1.2246452447783745E-17));
 
     LocationList sl9 = subLists.get(9);
-    assertEquals(sl9.first(), Location.create(0.9, 0.0));
+    // assertEquals(sl9.first(), Location.create(0.9, 0.0));
+    assertEquals(sl9.first(), Location.create(0.8999999999999999, 2.449286759077791E-17));
     assertEquals(sl9.last(), loc3);
 
     // length greater than list
@@ -176,8 +187,11 @@ class LocationListTests {
     assertSame(resampled.first(), pp1);
     assertSame(resampled.last(), pp2);
     Location mid = resampled.get(7);
-    assertEquals(mid.longitude, 0.49997, 0.0);
-    assertEquals(mid.latitude, 0.50002, 0.0);
+
+    // TODO revisit: this changed when we removed
+    // 5 decimal rounding in Locations
+    assertEquals(0.49996509384838933, mid.longitude, 0.0); // was 0.49997
+    assertEquals(0.5000222122727477, mid.latitude, 0.0); // 0.50002
 
     // singleton
     locs = LocationList.of(pp1);
@@ -282,9 +296,14 @@ class LocationListTests {
     LocationList locs = LocationList.of(pp1, pp2);
     LocationVector v = LocationVector.create(135 * Maths.TO_RADIANS, 5.0, 5.0);
     LocationList transLoc = locs.translate(v);
-    Location pp1trans = Location.create(0.0318, -0.0318, 5.0);
-    Location pp2trans = Location.create(1.0318, 0.9682, 5.0);
+    // Location pp1trans = Location.create(0.0318, -0.0318, 5.0);
+    // Location pp2trans = Location.create(1.0318, 0.9682, 5.0);
+
+    Location pp1trans = Location.create(0.031795787631496104, -0.03179578273558637, 5.0);
+    Location pp2trans = Location.create(1.031800322985746, 0.9682040632704144, 5.0);
 
+    // TODO revisit: this changed when we removed
+    // 5 decimal rounding in Locations
     assertEquals(pp1trans, transLoc.get(0));
     assertEquals(pp2trans, transLoc.get(1));
   }
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/geo/LocationsTests.java b/src/test/java/gov/usgs/earthquake/nshmp/geo/LocationsTests.java
index ffcb3868d6b6c58ccb357fee3d2297792f3f68a4..4846ce7e69ac5be5818e94df50edb05fef170a0c 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/geo/LocationsTests.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/geo/LocationsTests.java
@@ -1424,8 +1424,8 @@ class LocationsTests {
    * (flat-earth approximation) solution in which longitude is scaled by the
    * cosine of latitude; it is only appropriate for use over short distances
    * (e.g. &lt;200 km).<br/> <br/> <b>Note:</b> This method does <i>NOT</i>
-   * support values spanning &#177;180&#176; and results for such input values
-   * are not guaranteed.
+   * support values spanning ±180° and results for such input values are not
+   * guaranteed.
    *
    * @param p1 the first <code>Location</code> point on the line
    * @param p2 the second <code>Location</code> point on the line
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/geo/json/GeoJsonTest.java b/src/test/java/gov/usgs/earthquake/nshmp/geo/json/GeoJsonTest.java
index cd7076521c2dfa549ff24889c9d915633cffffe4..467dee4a9c744a1108f63127754b96ac0b10ca58 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/geo/json/GeoJsonTest.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/geo/json/GeoJsonTest.java
@@ -144,20 +144,20 @@ class GeoJsonTest {
 
     List<Feature> features = fc.features();
     assertEquals(3, features.size());
-    checkBboxIsEqualCopy(BBOX, fc.bbox().get());
+    checkBboxIsEqualCopy(BBOX, fc.bbox().orElseThrow());
 
     Feature f1 = features.get(0);
-    assertEquals("featureId", f1.idAsString().get());
+    assertEquals("featureId", f1.idAsString().orElseThrow());
     assertEquals(Type.POINT, f1.type());
     checkBboxIsOptionalEmpty(f1.bbox());
     Properties f1Props = f1.properties();
-    assertEquals("#ff0080", f1Props.getString("color").get());
+    assertEquals("#ff0080", f1Props.getString("color").orElseThrow());
     assertEquals(1.0, f1Props.getDouble("id").getAsDouble(), 0.0);
     assertEquals(1, f1Props.getInt("id").getAsInt());
 
     Feature f3 = features.get(2);
     assertEquals(3, f3.idAsInt().getAsInt());
-    checkBboxIsEqualCopy(BBOX, f3.bbox().get());
+    checkBboxIsEqualCopy(BBOX, f3.bbox().orElseThrow());
   }
 
   private void checkBboxIsOptionalEmpty(Optional<double[]> actualBbox) {
@@ -314,13 +314,13 @@ class GeoJsonTest {
       assertEquals(Optional.empty(), f.idAsString());
       f = GeoJson.from(Feature.point(TEST_POINT).id("3").build().toJson()).toFeature();
       assertEquals(3, f.idAsInt().getAsInt());
-      assertEquals("3", f.idAsString().get());
+      assertEquals("3", f.idAsString().orElseThrow());
     }
 
     @Test
     void testBbox_shouldReturnEqualCopyOrOptionalEmpty() {
       // Feature bbox
-      checkBboxIsEqualCopy(BBOX, TEST_LINE_STRING_FEATURE.bbox().get());
+      checkBboxIsEqualCopy(BBOX, TEST_LINE_STRING_FEATURE.bbox().orElseThrow());
       // Missing bbox is Optional.empty
       checkBboxIsOptionalEmpty(dummyFeatureLineString().bbox());
     }
@@ -339,7 +339,7 @@ class GeoJsonTest {
 
       Feature copy = Feature.copyOf(FEATURE_FOR_COPY).build();
 
-      assertEquals("featureId", copy.idAsString().get());
+      assertEquals("featureId", copy.idAsString().orElseThrow());
       assertEquals(TEST_POINT, copy.asPoint());
       assertTrue(copy.bbox().isEmpty());
       assertTrue(copy.properties().map().isEmpty());
@@ -386,7 +386,7 @@ class GeoJsonTest {
       // make a modified copy
       Feature targetCopyMod = Feature.copyOf(FEATURE_FOR_COPY)
           .properties(Properties.fromFeature(FEATURE_FOR_COPY).build())
-          .id(FEATURE_FOR_COPY.idAsString().get() + "2")
+          .id(FEATURE_FOR_COPY.idAsString().orElseThrow() + "2")
           .build();
 
       // test logic branches
@@ -540,7 +540,7 @@ class GeoJsonTest {
     @Test
     void testBbox_shouldReturnEqualCopyOrOptionalEmpty() {
       // Feature Collection bbox
-      checkBboxIsEqualCopy(BBOX, TEST_FEATURE_COLLECTION.bbox().get());
+      checkBboxIsEqualCopy(BBOX, TEST_FEATURE_COLLECTION.bbox().orElseThrow());
       // Missing bbox is Optional.empty
       checkBboxIsOptionalEmpty(TEST_FEATURE_COLLECTION.features().get(0).bbox());
     }
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/geo/json/PropertiesTests.java b/src/test/java/gov/usgs/earthquake/nshmp/geo/json/PropertiesTests.java
index 4073cd242ad19fb3284abe4b43fb5ee36052aeef..5c0aad267c304a884eb7555a281c5db8e7d44a50 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/geo/json/PropertiesTests.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/geo/json/PropertiesTests.java
@@ -89,7 +89,7 @@ class PropertiesTests {
     assertTrue(propAbsent.getString(key).isEmpty());
     assertTrue(propNull.getString(key).isEmpty());
     assertTrue(propObject.getString(key).isEmpty());
-    assertEquals("test1234", propString.getString(key).get());
+    assertEquals("test1234", propString.getString(key).orElseThrow());
   }
 
   @Test
@@ -98,11 +98,11 @@ class PropertiesTests {
     Properties props = TEST_FEATURE_PROPERTIES.properties();
 
     assertTrue(props.getString(DESCRIPTION.toString()).isPresent());
-    assertTrue(props.getBoolean("boolean").get());
+    assertTrue(props.getBoolean("boolean").orElseThrow());
     assertEquals(42, props.getInt("id").getAsInt());
     assertEquals(0.5, props.getDouble(FILL_OPACITY.toString()).getAsDouble(), 0.0);
-    assertEquals("description", props.getString(DESCRIPTION.toString()).get());
-    TestObject actual = props.get("object", TestObject.class).get();
+    assertEquals("description", props.getString(DESCRIPTION.toString()).orElseThrow());
+    TestObject actual = props.get("object", TestObject.class).orElseThrow();
     TestObject expected = new TestObject("test", ImmutableList.of(1.0, 2.0));
     assertEquals(expected.getClass(), actual.getClass());
     assertEquals(expected, actual);
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/mfd/IncrementalMfdBuilderTest.java b/src/test/java/gov/usgs/earthquake/nshmp/mfd/IncrementalMfdBuilderTest.java
deleted file mode 100644
index 894f499aa2b6f311782ad612b7186fad91ec4a89..0000000000000000000000000000000000000000
--- a/src/test/java/gov/usgs/earthquake/nshmp/mfd/IncrementalMfdBuilderTest.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/**
- *
- */
-package gov.usgs.earthquake.nshmp.mfd;
-
-/**
- * @author U.S. Geological Survey
- *
- */
-class IncrementalMfdBuilderTest {
-
-  // /**
-  // * @throws java.lang.Exception
-  // */
-  // @BeforeAll
-  // public static void setUpBeforeClass() throws Exception {}
-  //
-  // /**
-  // * @throws java.lang.Exception
-  // */
-  // @AfterAll
-  // public static void tearDownAfterClass() throws Exception {}
-  //
-  // /**
-  // * @throws java.lang.Exception
-  // */
-  // @Before
-  // public void setUp() throws Exception {}
-  //
-  // /**
-  // * @throws java.lang.Exception
-  // */
-  // @After
-  // public void tearDown() throws Exception {}
-  //
-  // /**
-  // * Test method for {@link
-  // gov.usgs.earthquake.nshmp.mfd.IncrementalMfdBuilder#builder()}.
-  // */
-  // @Test
-  // public final void testBuilder() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#Object()}.
-  // */
-  // @Test
-  // public final void testObject() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#getClass()}.
-  // */
-  // @Test
-  // public final void testGetClass() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#hashCode()}.
-  // */
-  // @Test
-  // public final void testHashCode() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#equals(java.lang.Object)}.
-  // */
-  // @Test
-  // public final void testEquals() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#clone()}.
-  // */
-  // @Test
-  // public final void testClone() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#toString()}.
-  // */
-  // @Test
-  // public final void testToString() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#notify()}.
-  // */
-  // @Test
-  // public final void testNotify() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#notifyAll()}.
-  // */
-  // @Test
-  // public final void testNotifyAll() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#wait(long)}.
-  // */
-  // @Test
-  // public final void testWaitLong() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#wait(long, int)}.
-  // */
-  // @Test
-  // public final void testWaitLongInt() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#wait()}.
-  // */
-  // @Test
-  // public final void testWait() {
-  // fail("Not yet implemented"); // TODO
-  // }
-  //
-  // /**
-  // * Test method for {@link java.lang.Object#finalize()}.
-  // */
-  // @Test
-  // public final void testFinalize() {
-  // fail("Not yet implemented"); // TODO
-  // }
-
-}
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/mfd/MfdTests.java b/src/test/java/gov/usgs/earthquake/nshmp/mfd/MfdTests.java
new file mode 100644
index 0000000000000000000000000000000000000000..6c501842245f6e20e32fca3c0a8a9032c78bac6d
--- /dev/null
+++ b/src/test/java/gov/usgs/earthquake/nshmp/mfd/MfdTests.java
@@ -0,0 +1,393 @@
+package gov.usgs.earthquake.nshmp.mfd;
+
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+import org.junit.jupiter.api.Test;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+
+import gov.usgs.earthquake.nshmp.Text;
+import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Builder;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.GutenbergRichter;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.Single;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.TaperedGr;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Type;
+
+class MfdTests {
+
+  // TODO add test to see if we can slip in negative a value
+  // via GR properties
+
+  static final double[] M = { 5.05, 5.15, 5.25, 5.35, 5.45 };
+  static final double[] R = { 0.1, 0.08, 0.06, 0.04, 0.02 };
+
+  @Test
+  void testCreate() {
+    /* Covers array and xy constructors. */
+    Mfd mfd = Mfd.create(M, R);
+    assertArrayEquals(M, mfd.data().xValues().toArray());
+    assertArrayEquals(R, mfd.data().yValues().toArray());
+    assertEquals(Type.INCR, mfd.properties().type());
+  }
+
+  @Test
+  void testBuilderFrom() {
+    Mfd mfd = Builder.from(M, R).build();
+    assertEquals(Type.INCR, mfd.properties().type());
+    assertArrayEquals(M, mfd.data().xValues().toArray());
+    assertArrayEquals(R, mfd.data().yValues().toArray());
+
+    mfd = Builder.from(XySequence.create(M, R)).build();
+    assertEquals(Type.INCR, mfd.properties().type());
+    assertArrayEquals(M, mfd.data().xValues().toArray());
+    assertArrayEquals(R, mfd.data().yValues().toArray());
+
+    Mfd gr = Mfd.newGutenbergRichterBuilder(1.0, 0.1, 5.0, 5.5).build();
+    mfd = Builder.from(gr).build();
+    assertEquals(Type.GR, mfd.properties().type());
+    assertArrayEquals(GR_M, mfd.data().xValues().toArray());
+    assertArrayEquals(GR_R, mfd.data().yValues().toArray());
+  }
+
+  @Test
+  void mfdToString() {
+    Mfd mfd = Builder.from(M, R).build();
+    assertEquals("MFD:" + mfd.properties() + Text.NEWLINE + mfd.data(), mfd.toString());
+  }
+
+  @Test
+  void testPropsToBuilderException() {
+    /* Base INCR builder may not support toBuilder */
+    Properties props = new Properties();
+    assertThrows(UnsupportedOperationException.class, () -> {
+      props.toBuilder();
+    });
+  }
+
+  @Test
+  void testPropsToString() {
+
+  }
+
+  static final double[] GAUSS_M = {
+      6.76,
+      6.808,
+      6.856,
+      6.904,
+      6.952,
+      7.0,
+      7.048,
+      7.096,
+      7.144,
+      7.192,
+      7.24 };
+
+  static final double[] GAUSS_R = {
+      0.022190548492442928,
+      0.04558899978527847,
+      0.07981140824009195,
+      0.11906462996609925,
+      0.1513608096777362,
+      0.16396720767670236,
+      0.1513608096777362,
+      0.11906462996609925,
+      0.07981140824009195,
+      0.04558899978527847,
+      0.022190548492442928 };
+
+  @Test
+  void testSingle() {
+
+    /* Factory builder; covers props.toBuilder() */
+    Mfd mfd = Mfd.newSingleBuilder(6.0).build();
+    XySequence xy = mfd.data();
+    assertTrue(xy.size() == 1);
+    assertEquals(6.0, xy.min().x());
+    assertEquals(1.0, xy.min().y());
+
+    /* Factory builder; covers props.toGaussianBuilder() */
+    mfd = Mfd.newGaussianBuilder(7.0, 11, 0.12, 2).build();
+    xy = mfd.data();
+    assertTrue(xy.size() == 11);
+    assertArrayEquals(GAUSS_M, xy.xValues().toArray());
+    assertArrayEquals(GAUSS_R, xy.yValues().toArray());
+    assertEquals(1.0, xy.yValues().sum());
+
+    /* Properties */
+    Properties props = new Single(5.0, 2.0);
+    Single singleProps = props.getAsSingle(); // cover getAs
+    assertEquals(5.0, singleProps.magnitude());
+    assertEquals(2.0, singleProps.rate());
+    singleProps = new Single(7.0);
+    assertEquals(7.0, singleProps.magnitude());
+    assertEquals(1.0, singleProps.rate());
+
+    /* props.toString() */
+    assertEquals(
+        singleProps.type() +
+            " {magnitude=" + singleProps.magnitude() +
+            ", rate=" + singleProps.rate() + "}",
+        singleProps.toString());
+
+    // TODO changing rate=0 to 1 is probably not correct
+    // and this check should/will go away
+    singleProps = new Single(7.0, 0.0);
+    mfd = singleProps.toBuilder().build();
+    xy = mfd.data();
+    assertEquals(7.0, xy.min().x());
+    assertEquals(1.0, xy.min().y());
+  }
+
+  static final double[] GR_M = {
+      5.05,
+      5.15,
+      5.25,
+      5.35,
+      5.45 };
+
+  static final double[] GR_R = {
+      8.912509381337459E-5,
+      7.079457843841373E-5,
+      5.623413251903491E-5,
+      4.466835921509635E-5,
+      3.5481338923357534E-5 };
+
+  @Test
+  void testGutenbergRichter() {
+
+    /* Factory builder; covers props.toBuilder() */
+    Mfd mfd = Mfd.newGutenbergRichterBuilder(1.0, 0.1, 5.0, 5.5).build();
+    XySequence xy = mfd.data();
+    assertTrue(xy.size() == 5);
+    assertArrayEquals(GR_M, xy.xValues().toArray());
+    assertArrayEquals(GR_R, xy.yValues().toArray());
+
+    /* Properties */
+    Properties props = new GutenbergRichter(1.0, 1.0, 0.1, 5.0, 5.5);
+    GutenbergRichter grProps = props.getAsGr(); // cover getAs
+    assertEquals(1.0, grProps.a());
+    assertEquals(1.0, grProps.b());
+    assertEquals(0.1, grProps.Δm());
+    assertEquals(5.0, grProps.mMin());
+    assertEquals(5.5, grProps.mMax());
+
+    grProps = new GutenbergRichter(grProps, 7.5);
+    assertEquals(1.0, grProps.a());
+    assertEquals(1.0, grProps.b());
+    assertEquals(0.1, grProps.Δm());
+    assertEquals(5.0, grProps.mMin());
+    assertEquals(7.5, grProps.mMax());
+
+    /* props.toString() */
+    assertEquals(
+        grProps.type() + " {a=" + grProps.a() +
+            ", b=" + grProps.b() +
+            ", Δm=" + grProps.Δm() +
+            ", mMin=" + grProps.mMin() +
+            ", mMax=" + grProps.mMax() + "}",
+        grProps.toString());
+  }
+
+  private static final double[] TAPERED_GR_M = {
+      5.05, 5.15, 5.25, 5.35, 5.45,
+      5.55, 5.65, 5.75, 5.85, 5.95,
+      6.05, 6.15, 6.25, 6.35, 6.45,
+      6.55, 6.65, 6.75, 6.85, 6.95,
+      7.05, 7.15, 7.25, 7.35, 7.45,
+      7.55, 7.65, 7.75, 7.85, 7.95 };
+
+  /* Generated using props.toBuilder().build() */
+  private static final double[] TAPERED_GR_R = {
+      3.6480433574236396E-4,
+      3.0345403881799134E-4,
+      2.5242909958337484E-4,
+      2.099931150361607E-4,
+      1.7470192588897718E-4,
+      1.4535446859752395E-4,
+      1.2095189730212227E-4,
+      1.0066358352203966E-4,
+      8.37988347826301E-5,
+      6.978336683158799E-5,
+      5.813972403773392E-5,
+      4.8470974011098916E-5,
+      4.04471093306309E-5,
+      3.37936743657631E-5,
+      2.8282200517277988E-5,
+      2.3722080373146463E-5,
+      1.9953542732997602E-5,
+      1.6841412644916558E-5,
+      1.4269370950942086E-5,
+      1.213450863835374E-5,
+      1.0342192289241769E-5,
+      8.80177763108668E-6,
+      7.4247305061395814E-6,
+      6.1282556531428345E-6,
+      4.848713278025381E-6,
+      3.56685822196953E-6,
+      2.3358516405761174E-6,
+      1.2823409762916184E-6,
+      5.437829885686743E-7,
+      1.5949675112240326E-7 };
+
+  @Test
+  void testTaperedGr() {
+
+    double aVal = Math.log10(4.0);
+    double bVal = 0.8;
+    double Δm = 0.1;
+    double mMin = 5.0;
+    double mMax = 8.0;
+    double mc = 7.5;
+
+    /* Factory builder; covers props.toBuilder() */
+    double incrRate = Mfds.gutenbergRichterRate(aVal, bVal, 5.05);
+    Mfd mfd = Mfd.newTaperedGutenbergRichterBuilder(bVal, Δm, mMin, mMax, mc)
+        .scaleToIncrementalRate(incrRate)
+        .build();
+    XySequence xy = mfd.data();
+    assertTrue(xy.size() == 30);
+    assertArrayEquals(TAPERED_GR_M, xy.xValues().toArray());
+    /* These end up being very slightly different due to scaling */
+    assertArrayEquals(TAPERED_GR_R, xy.yValues().toArray(), 1e-17);
+
+    /* Properties */
+    Properties props = new TaperedGr(aVal, bVal, Δm, mMin, mMax, mc);
+    TaperedGr grProps = props.getAsGrTaper(); // cover getAs
+    assertEquals(aVal, grProps.a());
+    assertEquals(bVal, grProps.b());
+    assertEquals(Δm, grProps.Δm());
+    assertEquals(mMin, grProps.mMin());
+    assertEquals(mMax, grProps.mMax());
+    assertEquals(mc, grProps.mc());
+    assertTrue(xy.size() == 30);
+    mfd = props.toBuilder().build();
+    xy = mfd.data();
+    assertArrayEquals(TAPERED_GR_M, xy.xValues().toArray());
+    assertArrayEquals(TAPERED_GR_R, xy.yValues().toArray());
+
+    grProps = new TaperedGr(grProps, 8.5);
+    assertEquals(aVal, grProps.a());
+    assertEquals(bVal, grProps.b());
+    assertEquals(Δm, grProps.Δm());
+    assertEquals(mMin, grProps.mMin());
+    assertEquals(8.5, grProps.mMax());
+    assertEquals(mc, grProps.mc());
+
+    /* props.toString() */
+    assertEquals(
+        grProps.type() + " {a=" + grProps.a() +
+            ", b=" + grProps.b() +
+            ", Δm=" + grProps.Δm() +
+            ", mMin=" + grProps.mMin() +
+            ", mMax=" + grProps.mMax() +
+            ", mc=" + grProps.mc() + "}",
+        grProps.toString());
+  }
+
+  @Test
+  void testBuilderTransforms() {
+
+    /* tolerances set based on experimentaion with each transform */
+
+    double bVal = 1.0;
+    double aVal = Math.log10(150.0);
+    double Δm = 0.1;
+    double mMin = 5.0;
+    double mMax = 8.0;
+
+    Mfd mfd = new GutenbergRichter(aVal, bVal, Δm, mMin, mMax)
+        .toBuilder()
+        .build();
+
+    /* Scale to incremental */
+    double incrRate = Mfds.gutenbergRichterRate(aVal, bVal, 5.05);
+    Mfd scaleIncrMfd = Mfd.newGutenbergRichterBuilder(bVal, Δm, mMin, mMax)
+        .scaleToIncrementalRate(incrRate)
+        .build();
+
+    assertArrayEquals(
+        mfd.data().yValues().toArray(),
+        scaleIncrMfd.data().yValues().toArray(),
+        1e-18);
+
+    /* Scale to cumulative */
+    double cumulativeRateExpected = mfd.data().yValues().sum() * 2.0;
+    Mfd scaleCumulativeMfd = Mfd.newGutenbergRichterBuilder(bVal, Δm, mMin, mMax)
+        .scaleToCumulativeRate(cumulativeRateExpected)
+        .build();
+    double cumulativeRateActual = scaleCumulativeMfd.data().yValues().sum();
+    assertEquals(cumulativeRateExpected, cumulativeRateActual, 1e-17);
+
+    /* Scale to moment */
+    double momentRateExpected = Mfds.momentRate(mfd.data()) * 2.0;
+    Mfd scaleMomentMfd = Mfd.newGutenbergRichterBuilder(bVal, Δm, mMin, mMax)
+        .scaleToMomentRate(momentRateExpected)
+        .build();
+    double momentRateActual = Mfds.momentRate(scaleMomentMfd.data());
+    /* very small delta relative to 1e16 moment rate values */
+    assertEquals(momentRateExpected, momentRateActual, 10);
+
+    /* Transform */
+    double transformRateExpected = cumulativeRateExpected;
+    Mfd transformMfd = new GutenbergRichter(aVal, bVal, Δm, mMin, mMax)
+        .toBuilder()
+        .transform(xy -> xy.set(xy.y() * 2.0))
+        .build();
+    double transformRateActual = transformMfd.data().yValues().sum();
+    /* these values happen to match exactly */
+    assertEquals(transformRateExpected, transformRateActual);
+  }
+
+  /* No-arg constructor tests */
+  static final Path MFDS_JSON = Paths.get("src/test/resources/mfd/mfd-map.json");
+  static final Gson GSON = new GsonBuilder().create();
+
+  @Test
+  void testNoArgConstructors() {
+    JsonObject obj = readJson(MFDS_JSON);
+    Single singleProps = GSON.fromJson(obj.get("mfd-single"), Single.class);
+    assertEquals(Type.SINGLE, singleProps.type());
+    assertEquals(6.75, singleProps.magnitude());
+    assertEquals(1.0, singleProps.rate());
+    GutenbergRichter grProps = GSON.fromJson(obj.get("mfd-gr"), GutenbergRichter.class);
+    assertEquals(Type.GR, grProps.type());
+    assertEquals(1.0, grProps.a());
+    assertEquals(1.0, grProps.b());
+    assertEquals(0.1, grProps.Δm());
+    assertEquals(4.7, grProps.mMin());
+    assertEquals(6.5, grProps.mMax());
+    TaperedGr grTaperProps = GSON.fromJson(obj.get("mfd-gr-taper"), TaperedGr.class);
+    assertEquals(Type.GR_TAPER, grTaperProps.type());
+    assertEquals(1.0, grTaperProps.a());
+    assertEquals(0.8, grTaperProps.b());
+    assertEquals(0.1, grTaperProps.Δm());
+    assertEquals(5.0, grTaperProps.mMin());
+    assertEquals(8.0, grTaperProps.mMax());
+    assertEquals(7.5, grTaperProps.mc());
+    /* Check default behavior */
+    Properties incrProps = GSON.fromJson(obj.get("mfd-empty"), Properties.class);
+    assertEquals(Type.INCR, incrProps.type());
+  }
+
+  private static JsonObject readJson(Path path) {
+    try (BufferedReader br = Files.newBufferedReader(path)) {
+      return JsonParser.parseReader(br).getAsJsonObject();
+    } catch (IOException ioe) {
+      throw new RuntimeException(ioe);
+    }
+  }
+}
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/mfd/Mfds2Test.java b/src/test/java/gov/usgs/earthquake/nshmp/mfd/Mfds2Test.java
deleted file mode 100644
index 50e6a8a231a298cb792dd3a9651a8bee7d565b35..0000000000000000000000000000000000000000
--- a/src/test/java/gov/usgs/earthquake/nshmp/mfd/Mfds2Test.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- *
- */
-package gov.usgs.earthquake.nshmp.mfd;
-
-/**
- * @author U.S. Geological Survey
- *
- */
-class Mfds2Test {
-
-  // /**
-  // * @throws java.lang.Exception
-  // */
-  // @BeforeAll
-  // public static void setUpBeforeClass() throws Exception {}
-  //
-  // /**
-  // * @throws java.lang.Exception
-  // */
-  // @AfterAll
-  // public static void tearDownAfterClass() throws Exception {}
-  //
-  // /**
-  // * @throws java.lang.Exception
-  // */
-  // @Before
-  // public void setUp() throws Exception {}
-  //
-  // /**
-  // * @throws java.lang.Exception
-  // */
-  // @After
-  // public void tearDown() throws Exception {}
-  //
-  // /**
-  // * Test method for {@link gov.usgs.earthquake.nshmp.mfd.Mfds2#builder()}.
-  // */
-  // @Test
-  // public final void testBuilder() {
-  // fail("Not yet implemented"); // TODO
-  // }
-
-}
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/mfd/MfdsTests.java b/src/test/java/gov/usgs/earthquake/nshmp/mfd/MfdsTests.java
index cbcb65f6635f46eaa58940b7cbcaafe87aca24c8..14b991a056667e93807ae4ff1246528120f38da5 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/mfd/MfdsTests.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/mfd/MfdsTests.java
@@ -1,68 +1,198 @@
 package gov.usgs.earthquake.nshmp.mfd;
 
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.DoubleStream;
+
+import org.junit.jupiter.api.Test;
+
+import gov.usgs.earthquake.nshmp.Earthquakes;
+import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.GutenbergRichter;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Properties.Single;
+import gov.usgs.earthquake.nshmp.mfd.Mfd.Type;
+
 class MfdsTests {
 
-  private static final double MFD_TOL = 1e-10;
-
-  // @Test
-  // final void testTaperedGR() {
-  // // IncrementalMfd tGR = Mfds.newTaperedGutenbergRichterMFD(5.05, 0.1, 30,
-  // // 4.0, 0.8, 7.5, 1.0);
-  //
-  // double incrRate = Mfds.incrRate(4.0, 0.8, 5.05);
-  // XySequence tGR = Mfd.newTaperedGutenbergRichterMfd(5.05, 7.95, 0.1, 0.8,
-  // 7.5)
-  // .scaleToIncrementalRate(incrRate)
-  // .build();
-  // assertArrayEquals(TAPERED_GR_MAGS, tGR.xValues().toArray(), MFD_TOL);
-  // assertArrayEquals(TAPERED_GR_RATES, tGR.yValues().toArray(), MFD_TOL);
-  // }
-
-  public static void main(String[] args) {
-    // TODO clean
-    // IncrementalMfd tGR = Mfd.newTaperedGutenbergRichterMFD(5.05, 0.1, 30,
-    // 4.0, 0.8, 7.5, 1.0);
-    // System.out.println(tGR.xValues());
-    // System.out.println(tGR.yValues());
-    // for (Point2D p : tGR) {
-    // System.out.println(p.getX() + " " + p.getY());
-    // }
+  @Test
+  void testCheckRate() {
+    assertEquals(1.0, Mfds.checkRate(1.0));
+    assertThrows(IllegalArgumentException.class, () -> {
+      Mfds.checkRate(-1.0);
+    });
+  }
+
+  @Test
+  void testCheckValues() {
+    assertThrows(IllegalArgumentException.class, () -> {
+      Mfds.checkValues(DoubleStream.of(-5.0), DoubleStream.of(1.0));
+    });
+    assertThrows(IllegalArgumentException.class, () -> {
+      Mfds.checkValues(DoubleStream.of(5.0), DoubleStream.of(-1.0));
+    });
+    assertDoesNotThrow(() -> {
+      Mfds.checkValues(DoubleStream.of(5.0), DoubleStream.of(1.0));
+    });
   }
 
-  private static final double[] TAPERED_GR_MAGS = {
-      5.05, 5.15, 5.25, 5.35, 5.45, 5.55, 5.65,
-      5.75, 5.85, 5.95, 6.05, 6.15, 6.25, 6.35, 6.45, 6.55, 6.65, 6.75, 6.85, 6.95, 7.05, 7.15,
-      7.25, 7.35, 7.45, 7.55, 7.65, 7.75, 7.85, 7.95 };
-
-  private static final double[] TAPERED_GR_RATES = {
-      3.6487347712647455E-4,
-      3.0351155247723856E-4,
-      2.524769424833213E-4,
-      2.1003291504182055E-4,
-      1.747350371537821E-4,
-      1.4538201763727163E-4,
-      1.2097482132130435E-4,
-      1.0068266229609008E-4,
-      8.38147171800033E-5,
-      6.979659287661515E-5,
-      5.815074326255645E-5,
-      4.848016071724235E-5,
-      4.0454775273297684E-5,
-      3.380007928256618E-5,
-      2.828756084416522E-5,
-      2.37265764202333E-5,
-      1.995732452895587E-5,
-      1.6844604598702625E-5,
-      1.4272075425536466E-5,
-      1.2136808492386587E-5,
-      1.0344152445466779E-5,
-      8.803445832444012E-6,
-      7.426137715686029E-6,
-      6.129417141745154E-6,
-      4.849632254896957E-6,
-      3.5675342487878273E-6,
-      2.3362943546550987E-6,
-      1.2825840184413836E-6,
-      5.438860517858615E-7,
-      1.5952698054966954E-7 };
+  @Test
+  void testCheckMfdProperty() {
+    assertThrows(IllegalArgumentException.class, () -> {
+      Mfds.checkGaussianMagnitudeCount(0);
+    });
+    assertThrows(IllegalArgumentException.class, () -> {
+      Mfds.checkGaussianMagnitudeCount(22);
+    });
+    assertThrows(IllegalArgumentException.class, () -> {
+      Mfds.checkGaussianSigma(0.0);
+    });
+    assertThrows(IllegalArgumentException.class, () -> {
+      Mfds.checkGaussianSigma(0.51);
+    });
+    assertThrows(IllegalArgumentException.class, () -> {
+      Mfds.checkGaussianSigmaCount(-1);
+    });
+    assertThrows(IllegalArgumentException.class, () -> {
+      Mfds.checkGaussianSigmaCount(6);
+    });
+  }
+
+  @Test
+  void testPropsToString() {
+    assertEquals(
+        Type.SINGLE.name() + " {dummy}",
+        Mfds.propsToString(Type.SINGLE, "dummy"));
+  }
+
+  private static final double[] COMBINE_M = {
+      5.05, 5.15, 5.25, 5.35, 5.45, 5.55, 5.65, 5.75, 5.85, 5.95, // GR
+      6.05, 6.15, 6.25, 6.35, 6.45, 6.55, 6.65, 6.75, // GR
+      6.76, 6.808, // GAUSS
+      6.85, // GR
+      6.856, 6.904,
+      6.95, // GR
+      6.952, 6.96,
+      7.0, // GR GAUSS
+      7.008, 7.048, // GAUSS
+      7.05, // GR
+      7.056, 7.096, 7.104, 7.144, // GAUSS
+      7.15, // GR
+      7.152, 7.192, 7.2, 7.24, 7.248, // GAUSS
+      7.25, // GR
+      7.296, 7.344, // GAUSS
+      7.35, // GR
+      7.392, 7.44, // GAUSS
+      7.45 }; // GR
+
+  private static final double[] COMBINE_R = {
+      0.9803760319471205,
+      0.7787403628225511,
+      0.6185754577093839,
+      0.4913519513660598,
+      0.39029472815693284,
+      0.31002212243909005,
+      0.24625932524251715,
+      0.1956107351042815,
+      0.15537912990850308,
+      0.12342202997321594,
+      0.09803760319471205,
+      0.0778740362822551,
+      0.0618575457709384,
+      0.04913519513660598,
+      0.039029472815693286,
+      0.031002212243909005,
+      0.024625932524251715,
+      0.019561073510428153,
+      4.438109698488586E-5,
+      9.117799957055695E-5,
+      0.015537912990850309,
+      1.5962281648018392E-4,
+      2.3812925993219852E-4,
+      0.012342202997321593,
+      3.0272161935547244E-4,
+      2.219054849244293E-5,
+      3.2793441535340477E-4,
+      4.5588999785278476E-5,
+      3.0272161935547244E-4,
+      0.009803760319471205,
+      7.981140824009196E-5,
+      2.3812925993219852E-4,
+      1.1906462996609926E-4,
+      1.5962281648018392E-4,
+      0.007787403628225511,
+      1.5136080967773622E-4,
+      9.117799957055695E-5,
+      1.6396720767670238E-4,
+      4.438109698488586E-5,
+      1.5136080967773622E-4,
+      5.623413251903491E-4,
+      1.1906462996609926E-4,
+      7.981140824009196E-5,
+      4.466835921509635E-4,
+      4.5588999785278476E-5,
+      2.219054849244293E-5,
+      3.548133892335753E-4 };
+
+  @Test
+  void testCombine() {
+    Mfd mfd1 = new Single(7.0, 0.002).toGaussianBuilder(11, 0.12, 2).build();
+    Mfd mfd2 = new Single(7.2, 0.001).toGaussianBuilder(11, 0.12, 2).build();
+    Mfd mfd3 = new GutenbergRichter(5.0, 1.0, 0.1, 5.0, 7.2).toBuilder().build();
+    Mfd mfd4 = new GutenbergRichter(4.0, 1.0, 0.1, 5.0, 7.5).toBuilder().build();
+    XySequence actual = Mfds.combine(List.of(mfd1, mfd2, mfd3, mfd4)).data();
+    XySequence expected = XySequence.create(COMBINE_M, COMBINE_R);
+    assertEquals(expected, actual);
+  }
+
+  private static final double[] GR_RATE_M = {
+      5.05, 5.15, 5.25, 5.35, 5.45,
+      5.55, 5.65, 5.75, 5.85, 5.95 };
+
+  private static final double[] GR_RATE_R = {
+      0.009120108393559097,
+      0.007585775750291836,
+      0.00630957344480193,
+      0.005248074602497723,
+      0.004365158322401657,
+      0.00363078054770101,
+      0.003019951720402013,
+      0.002511886431509577,
+      0.0020892961308540407,
+      0.0017378008287493728 };
+
+  @Test
+  void testGutenbergRichterRate() {
+    double a = 2;
+    double b = 0.8;
+    double[] actual = Arrays.stream(GR_RATE_M)
+        .map(m -> Mfds.gutenbergRichterRate(a, b, m))
+        .toArray();
+    assertArrayEquals(GR_RATE_R, actual);
+  }
+
+  private static final double COMBINE_MO_RATE = 4.2389375463986755E18;
+
+  @Test
+  void testMomentRate() {
+    Mfd mfd = Mfd.create(XySequence.create(COMBINE_M, COMBINE_R));
+    assertEquals(COMBINE_MO_RATE, Mfds.momentRate(mfd));
+  }
+
+  private static final double PARETO_RATE = 3.6159044829897224E-5;
+
+  @Test
+  void testParetoRate() {
+    double Mt = Earthquakes.magToMoment(4.5);
+    double M = Earthquakes.magToMoment(7.0);
+    double Mcm = Earthquakes.magToMoment(6.5);
+    double b = 0.8;
+    double β = b / 1.5;
+    double actual = Mfds.paretoRate(Mt, M, β, Mcm);
+    assertEquals(PARETO_RATE, actual);
+  }
 }
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/model/SourceFeatureTests.java b/src/test/java/gov/usgs/earthquake/nshmp/model/SourceFeatureTests.java
index 2d9a3009477bddb4767e865d3b4d99bd40f75bb4..0acc3b84f7c250546eb6b667409da9f72fe34d67 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/model/SourceFeatureTests.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/model/SourceFeatureTests.java
@@ -28,7 +28,7 @@ class SourceFeatureTests {
 
     assertEquals(100000, section.id);
     assertEquals("Test Fault Section", section.name);
-    assertEquals("NV", section.state.get());
+    assertEquals("NV", section.state.orElseThrow());
     assertEquals(List.of("NV", "CA"), section.states);
 
     /* Test no states array */
@@ -47,7 +47,7 @@ class SourceFeatureTests {
 
     assertEquals(100000, section.id);
     assertEquals("Test Fault Section", section.name);
-    assertEquals("NV", section.state.get());
+    assertEquals("NV", section.state.orElseThrow());
     assertEquals(List.of("NV", "CA"), section.states);
     assertEquals(0.0, section.upperDepth);
     assertEquals(15.0, section.lowerDepth);
@@ -71,7 +71,7 @@ class SourceFeatureTests {
         Location.create(-119.7, 39.1),
         Location.create(-119.9, 39.1),
         Location.create(-119.9, 38.6));
-    assertEquals(zoneBorder, zoneSection.zone.get());
+    assertEquals(zoneBorder, zoneSection.zone.orElseThrow());
 
   }
 
@@ -85,7 +85,7 @@ class SourceFeatureTests {
     assertEquals(100001, section.id);
     assertEquals("Test Interface Section",
         section.name);
-    assertEquals("WA", section.state.get());
+    assertEquals("WA", section.state.orElseThrow());
     assertEquals(List.of("WA", "OR"), section.states);
 
     LocationList upperTrace = LocationList.of(
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/model/peer/PeerTest.java b/src/test/java/gov/usgs/earthquake/nshmp/model/peer/PeerTest.java
index 647dc02ce8d5a15f336dca7887211578954d5d1f..11c07a9d6f9c6e867f41c8de2dc07aad500b342a 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/model/peer/PeerTest.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/model/peer/PeerTest.java
@@ -30,6 +30,7 @@ import com.google.common.primitives.Doubles;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 
+import gov.usgs.earthquake.nshmp.Maths;
 import gov.usgs.earthquake.nshmp.calc.CalcConfig;
 import gov.usgs.earthquake.nshmp.calc.Hazard;
 import gov.usgs.earthquake.nshmp.calc.HazardCalcs;
@@ -37,7 +38,6 @@ import gov.usgs.earthquake.nshmp.calc.Site;
 import gov.usgs.earthquake.nshmp.calc.Sites;
 import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.internal.Parsing;
-import gov.usgs.earthquake.nshmp.mfd.Mfds;
 import gov.usgs.earthquake.nshmp.model.HazardModel;
 
 public class PeerTest {
@@ -114,7 +114,7 @@ public class PeerTest {
     double[] actual = Doubles.toArray(
         FluentIterable
             .from(result.curves().get(Imt.PGA).yValues().boxed().collect(Collectors.toList()))
-            .transform(Mfds.annualRateToProbabilityConverter())
+            .transform(Maths.annualRateToProbabilityConverter())
             .toList());
     checkArgument(actual.length == expected.length);
 
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/tree/LogicTreeTests.java b/src/test/java/gov/usgs/earthquake/nshmp/tree/LogicTreeTests.java
index 1c96d456a2542da4ae82b0ffd62435cd4b336ced..607875af17f18c61d96278f5f38ea9166a4d66c0 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/tree/LogicTreeTests.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/tree/LogicTreeTests.java
@@ -37,12 +37,12 @@ class LogicTreeTests {
     assertEquals(3, tree.size());
     assertEquals("basic-tree", tree.name());
 
-    Branch<String> branch = tree.branches().get(1);
+    Branch<String> branch = tree.get(1);
     assertEquals(id[1], branch.id());
     assertEquals(value[1], branch.value());
     assertEquals(weight[1], branch.weight());
     assertEquals(branchString(id[1], weight[1]), branch.toString());
-    assertEquals(tree.branches().get(0), tree.iterator().next()); // iterator
+    assertEquals(tree.get(0), tree.iterator().next()); // iterator
 
     /* StringValueTree */
     LogicTree.StringValueBuilder stringValueTreeBuilder =
@@ -53,7 +53,7 @@ class LogicTreeTests {
         .addBranch(id[2], weight[2])
         .build();
 
-    branch = tree.branches().get(2);
+    branch = tree.get(2);
     assertEquals(id[2], branch.id());
     assertEquals(id[2], branch.value());
     assertEquals(0.1, branch.weight());
@@ -68,7 +68,7 @@ class LogicTreeTests {
         .addBranch(EnumKey.ID_2, value[2], weight[2])
         .build();
 
-    branch = tree.branches().get(2);
+    branch = tree.get(2);
     assertEquals(EnumKey.ID_2.name(), branch.id());
     assertEquals(value[2], branch.value());
     assertEquals(0.1, branch.weight());
@@ -83,7 +83,7 @@ class LogicTreeTests {
         .addBranch(EnumKey.ID_2, weight[2])
         .build();
 
-    Branch<EnumKey> enumBranch = enumValueTree.branches().get(2);
+    Branch<EnumKey> enumBranch = enumValueTree.get(2);
     assertEquals(EnumKey.ID_2.name(), enumBranch.id());
     assertEquals(EnumKey.ID_2, enumBranch.value());
     assertEquals(0.1, enumBranch.weight());
@@ -97,13 +97,13 @@ class LogicTreeTests {
         .build();
 
     assertEquals("logic-group", group.name());
-    branch = group.branches().get(0);
+    branch = group.get(0);
     assertEquals(id[0], branch.id());
     assertEquals(value[0], branch.value());
     assertEquals(weight[0], branch.weight());
     assertEquals(branchString(id[0], weight[0]), branch.toString());
     assertEquals(2, group.size());
-    assertEquals(group.branches().get(0), group.iterator().next()); // iterator
+    assertEquals(group.get(0), group.iterator().next()); // iterator
 
   }
 
@@ -139,6 +139,18 @@ class LogicTreeTests {
         .append(TEST_TREE).append(" ─ ").append(branchString(id[0], 1.0))
         .toString();
     assertEquals(singletonTreeString, singletonTree.toString());
+
+    /* Logic Group */
+    LogicGroup<String> group = LogicGroup.<String> builder()
+        .addBranch(id[0], value[0], 1.0)
+        .addBranch(id[1], value[1], 1.0)
+        .build();
+    String groupString = new StringBuilder()
+        .append("logic-group").append(" ┬ ").append(branchString(id[0], 1.0))
+        .append(Text.NEWLINE)
+        .append("            â”” ").append(branchString(id[1], 1.0))
+        .toString();
+    assertEquals(groupString, group.toString());
   }
 
   @Test
@@ -166,6 +178,13 @@ class LogicTreeTests {
           .addBranch(" ", 1.0);
     });
 
+    /* Duplicate branch id. */
+    assertThrows(IllegalArgumentException.class, () -> {
+      LogicTree.builder(TEST_TREE)
+          .addBranch("id", "value1", 0.4)
+          .addBranch("id", "value2", 0.6);
+    });
+
     /* Used builder */
     assertThrows(IllegalStateException.class, () -> {
       LogicTree.Builder<String> builder = LogicTree.<String> builder(TEST_TREE)
@@ -223,24 +242,22 @@ class LogicTreeTests {
         .addBranch(id[3], value[3], weight[3])
         .build();
 
-    List<Branch<String>> branches = tree.branches();
-
     List<Branch<String>> expected = List.of(
-        branches.get(0),
-        branches.get(1),
-        branches.get(1),
-        branches.get(2),
-        branches.get(2),
-        branches.get(3),
-        branches.get(3),
-        branches.get(3));
+        tree.get(0),
+        tree.get(1),
+        tree.get(1),
+        tree.get(2),
+        tree.get(2),
+        tree.get(3),
+        tree.get(3),
+        tree.get(3));
 
     List<Branch<String>> samples = tree.sample(probabilities);
     assertEquals(expected, samples);
 
     /* Test P<0 and P>1 */
-    assertEquals(branches.get(0), tree.sample(-0.5));
-    assertEquals(branches.get(3), tree.sample(1.5));
+    assertEquals(tree.get(0), tree.sample(-0.5));
+    assertEquals(tree.get(3), tree.sample(1.5));
 
     /*
      * Creating the cumulative weight array introduces double precision math
@@ -250,7 +267,7 @@ class LogicTreeTests {
      * values. For example, if we sample at 0.8999999999999999 and had not
      * rounded then we would get back branch 3 instead of 2.
      */
-    assertEquals(branches.get(2), tree.sample(0.8999999999999999));
+    assertEquals(tree.get(2), tree.sample(0.8999999999999999));
   }
 
   private static final String SINGLETON_VALUE = "singleton-value";
@@ -266,7 +283,7 @@ class LogicTreeTests {
         TEST_TREE,
         SINGLETON_ID,
         SINGLETON_VALUE);
-    assertEquals(tree.branches().get(0).id(), SINGLETON_ID);
+    assertEquals(tree.get(0).id(), SINGLETON_ID);
   }
 
   @Test
@@ -277,7 +294,7 @@ class LogicTreeTests {
           TEST_TREE,
           SINGLETON_ID,
           SINGLETON_VALUE);
-      tree.branches().remove(0);
+      tree.remove(0);
     });
   }
 
diff --git a/src/test/resources/mfd/mfd-map.json b/src/test/resources/mfd/mfd-map.json
new file mode 100644
index 0000000000000000000000000000000000000000..de96430df78af340d2123c11b41541b69f835c6c
--- /dev/null
+++ b/src/test/resources/mfd/mfd-map.json
@@ -0,0 +1,22 @@
+{
+  "mfd-single" : {
+    "type": "SINGLE",
+    "m": 6.75 
+  },
+  "mfd-gr": {
+    "type": "GR", 
+    "mMax": 6.5, 
+    "mMin": 4.7, 
+    "Δm": 0.1, 
+    "b": 1.0
+  },
+  "mfd-gr-taper": { 
+    "type": "GR_TAPER",
+    "b": 0.8,
+    "Δm": 0.1,
+    "mMin": 5.0,
+    "mMax": 8.0,
+    "mc": 7.5
+  },
+  "mfd-empty": {}
+}