From 36df97d8a784eaf4e784a65cb43bedf508de987c Mon Sep 17 00:00:00 2001
From: Peter Powers <pmpowers@usgs.gov>
Date: Fri, 27 Mar 2015 12:27:23 -0600
Subject: [PATCH] calc refactor

---
 src/org/opensha/calc/AsyncCalc.java           |  61 ++--
 src/org/opensha/calc/CalcConfig.java          |  61 +++-
 src/org/opensha/calc/Calcs.java               |  39 +-
 src/org/opensha/calc/ClusterCurves.java       |  22 +-
 src/org/opensha/calc/Deagg.java               |  20 +-
 src/org/opensha/calc/HazardCurveSet.java      |  87 +++--
 src/org/opensha/calc/HazardCurves.java        |  24 +-
 src/org/opensha/calc/HazardGroundMotions.java |  68 ++--
 src/org/opensha/calc/HazardResult.java        |  38 +-
 src/org/opensha/calc/SigmaModel.java          |   2 +-
 src/org/opensha/calc/SiteSet.java             |   1 -
 src/org/opensha/calc/Transforms.java          | 208 ++++++-----
 src/org/opensha/calc/Utils.java               |   2 +
 src/org/opensha/data/ArrayXY_Sequence.java    |  14 +-
 src/org/opensha/data/DataUtils.java           |   4 +
 .../fault/surface/DefaultGriddedSurface.java  |   2 +-
 .../eq/fault/surface/RuptureFloating.java     |   4 +-
 src/org/opensha/eq/model/FaultParser.java     |   1 +
 src/org/opensha/eq/model/FaultSource.java     |  17 +-
 src/org/opensha/eq/model/HazardModel.java     |  13 +-
 src/org/opensha/eq/model/InterfaceParser.java |   1 +
 src/org/opensha/eq/model/InterfaceSource.java |  23 +-
 src/org/opensha/eq/model/Loader.java          |  17 +-
 src/org/opensha/gmm/Gmm.java                  |  47 ++-
 src/org/opensha/mfd/Mfds.java                 |  51 ++-
 src/org/opensha/programs/HazardCurve.java     | 345 +++++++++++-------
 src/org/opensha/programs/HazardMap.java       |   2 +-
 src/org/opensha/util/Logging.java             |   6 +-
 28 files changed, 750 insertions(+), 430 deletions(-)

diff --git a/src/org/opensha/calc/AsyncCalc.java b/src/org/opensha/calc/AsyncCalc.java
index a33341cd9..3793deb80 100644
--- a/src/org/opensha/calc/AsyncCalc.java
+++ b/src/org/opensha/calc/AsyncCalc.java
@@ -16,6 +16,7 @@ import static org.opensha.calc.Transforms.sourceToInputs;
 
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 
@@ -29,6 +30,7 @@ import org.opensha.gmm.GroundMotionModel;
 import org.opensha.gmm.Imt;
 
 import com.google.common.base.Function;
+import com.google.common.collect.Table;
 import com.google.common.util.concurrent.ListenableFuture;
 
 /**
@@ -67,9 +69,9 @@ final class AsyncCalc {
 	 * HazardGroundMotions.
 	 */
 	static AsyncList<HazardGroundMotions> toGroundMotions(AsyncList<HazardInputs> inputsList,
-			SourceSet<? extends Source> sourceSet, Imt imt) {
-		Map<Gmm, GroundMotionModel> gmmInstances = Gmm.instances(sourceSet.groundMotionModels()
-			.gmms(), imt);
+			SourceSet<? extends Source> sourceSet, Set<Imt> imts) {
+		Set<Gmm> gmms = sourceSet.groundMotionModels().gmms();
+		Table<Gmm, Imt, GroundMotionModel> gmmInstances = Gmm.instances(gmms, imts);
 		Function<HazardInputs, HazardGroundMotions> function = inputsToGroundMotions(gmmInstances);
 		AsyncList<HazardGroundMotions> result = createWithCapacity(inputsList.size());
 		for (ListenableFuture<HazardInputs> hazardInputs : inputsList) {
@@ -83,8 +85,9 @@ final class AsyncCalc {
 	 * HazardCurves.
 	 */
 	static AsyncList<HazardCurves> toHazardCurves(AsyncList<HazardGroundMotions> groundMotionsList,
-			ArrayXY_Sequence modelCurve) {
-		Function<HazardGroundMotions, HazardCurves> function = groundMotionsToCurves(modelCurve);
+			Map<Imt, ArrayXY_Sequence> modelCurves, SigmaModel sigmaModel, double truncLevel) {
+		Function<HazardGroundMotions, HazardCurves> function = groundMotionsToCurves(modelCurves,
+			sigmaModel, truncLevel);
 		AsyncList<HazardCurves> result = createWithCapacity(groundMotionsList.size());
 		for (ListenableFuture<HazardGroundMotions> groundMotions : groundMotionsList) {
 			result.add(transform(groundMotions, function, EX));
@@ -96,9 +99,9 @@ final class AsyncCalc {
 	 * Reduce a List of future HazardCurves to a future HazardCurveSet.
 	 */
 	static ListenableFuture<HazardCurveSet> toHazardCurveSet(AsyncList<HazardCurves> curves,
-			SourceSet<? extends Source> sourceSet, ArrayXY_Sequence modelCurve) {
+			SourceSet<? extends Source> sourceSet, Map<Imt, ArrayXY_Sequence> modelCurves) {
 		Function<List<HazardCurves>, HazardCurveSet> function = curveConsolidator(sourceSet,
-			modelCurve);
+			modelCurves);
 		return transform(allAsList(curves), function, EX);
 	}
 
@@ -106,23 +109,11 @@ final class AsyncCalc {
 	 * Reduce a List of future HazardCurveSets into a future HazardResult.
 	 */
 	static ListenableFuture<HazardResult> toHazardResult(AsyncList<HazardCurveSet> curveSets,
-			ArrayXY_Sequence modelCurve) {
-		Function<List<HazardCurveSet>, HazardResult> function = curveSetConsolidator(modelCurve);
+			Map<Imt, ArrayXY_Sequence> modelCurves) {
+		Function<List<HazardCurveSet>, HazardResult> function = curveSetConsolidator(modelCurves);
 		return transform(allAsList(curveSets), function, EX);
 	}
 
-	/**
-	 * Convert a ClusterSourceSet to a List of future HazardInputs Lists.
-	 */
-	static AsyncList<ClusterInputs> toClusterInputs(ClusterSourceSet sourceSet, Site site) {
-		Function<ClusterSource, ClusterInputs> function = clusterSourceToInputs(site);
-		AsyncList<ClusterInputs> result = AsyncList.create();
-		for (ClusterSource source : sourceSet.locationIterable(site.location)) {
-			result.add(transform(immediateFuture(source), function, EX));
-		}
-		return result;
-	}
-
 	/*
 	 * Cluster sources below...
 	 * 
@@ -137,14 +128,26 @@ final class AsyncCalc {
 	 * its internal List of IncrementalMfds.
 	 */
 
+	/**
+	 * Convert a ClusterSourceSet to a List of future HazardInputs Lists.
+	 */
+	static AsyncList<ClusterInputs> toClusterInputs(ClusterSourceSet sourceSet, Site site) {
+		Function<ClusterSource, ClusterInputs> function = clusterSourceToInputs(site);
+		AsyncList<ClusterInputs> result = AsyncList.create();
+		for (ClusterSource source : sourceSet.locationIterable(site.location)) {
+			result.add(transform(immediateFuture(source), function, EX));
+		}
+		return result;
+	}
+
 	/**
 	 * Convert a List of future HazardInputs Lists to a List of future
 	 * HazardGroundMotions Lists.
 	 */
 	static AsyncList<ClusterGroundMotions> toClusterGroundMotions(
-			AsyncList<ClusterInputs> inputsList, ClusterSourceSet sourceSet, Imt imt) {
-		Map<Gmm, GroundMotionModel> gmmInstances = Gmm.instances(sourceSet.groundMotionModels()
-			.gmms(), imt);
+			AsyncList<ClusterInputs> inputsList, ClusterSourceSet sourceSet, Set<Imt> imts) {
+		Set<Gmm> gmms = sourceSet.groundMotionModels().gmms();
+		Table<Gmm, Imt, GroundMotionModel> gmmInstances = Gmm.instances(gmms, imts);
 		Function<ClusterInputs, ClusterGroundMotions> function = clusterInputsToGroundMotions(gmmInstances);
 		AsyncList<ClusterGroundMotions> result = createWithCapacity(inputsList.size());
 		for (ListenableFuture<ClusterInputs> inputs : inputsList) {
@@ -158,8 +161,10 @@ final class AsyncCalc {
 	 * ClusterCurves.
 	 */
 	static AsyncList<ClusterCurves> toClusterCurves(
-			AsyncList<ClusterGroundMotions> clusterGroundMotions, ArrayXY_Sequence model) {
-		Function<ClusterGroundMotions, ClusterCurves> function = clusterGroundMotionsToCurves(model);
+			AsyncList<ClusterGroundMotions> clusterGroundMotions,
+			Map<Imt, ArrayXY_Sequence> modelCurves, SigmaModel sigmaModel, double truncLevel) {
+		Function<ClusterGroundMotions, ClusterCurves> function = clusterGroundMotionsToCurves(
+			modelCurves, sigmaModel, truncLevel);
 		AsyncList<ClusterCurves> result = createWithCapacity(clusterGroundMotions.size());
 		for (ListenableFuture<ClusterGroundMotions> groundMotions : clusterGroundMotions) {
 			result.add(transform(groundMotions, function, EX));
@@ -171,9 +176,9 @@ final class AsyncCalc {
 	 * Reduce a List of future ClusterCurves to a future HazardCurveSet.
 	 */
 	static ListenableFuture<HazardCurveSet> toHazardCurveSet(AsyncList<ClusterCurves> curvesList,
-			ClusterSourceSet clusterSourceSet, ArrayXY_Sequence model) {
+			ClusterSourceSet clusterSourceSet, Map<Imt, ArrayXY_Sequence> modelCurves) {
 		Function<List<ClusterCurves>, HazardCurveSet> function = clusterCurveConsolidator(
-			clusterSourceSet, model);
+			clusterSourceSet, modelCurves);
 		return transform(allAsList(curvesList), function, EX);
 	}
 
diff --git a/src/org/opensha/calc/CalcConfig.java b/src/org/opensha/calc/CalcConfig.java
index e55ad4f61..a57838488 100644
--- a/src/org/opensha/calc/CalcConfig.java
+++ b/src/org/opensha/calc/CalcConfig.java
@@ -16,9 +16,13 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
+import org.opensha.data.ArrayXY_Sequence;
+import org.opensha.data.DataUtils;
+import org.opensha.gmm.GroundMotionModel;
 import org.opensha.gmm.Imt;
 
 import com.google.common.base.Strings;
+import com.google.common.cache.LoadingCache;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.gson.Gson;
@@ -30,6 +34,8 @@ import com.google.gson.GsonBuilder;
  */
 public final class CalcConfig {
 
+	static final String FILE_NAME = "config.json";
+
 	public final SigmaModel sigmaModel;
 	public final double truncationLevel;
 
@@ -37,7 +43,7 @@ public final class CalcConfig {
 	
 	public final double[] defaultImls;
 	public final Map<Imt, double[]> customImls;
-
+	
 	public final Deagg deagg;
 
 	public final SiteSet sites;
@@ -55,6 +61,9 @@ public final class CalcConfig {
 		 * Default values. These are initialized here because gson will not
 		 * deserialize field initialized final primitives and Strings.
 		 */
+		
+		// TODO consider adding TypeAdapter for enums that will throw an
+		// exception if invalid enum value is supplied in config.json
 
 		sigmaModel = SigmaModel.TRUNCATION_UPPER_ONLY;
 		truncationLevel = 3.0;
@@ -83,45 +92,73 @@ public final class CalcConfig {
 		}
 
 		StringBuilder sb = new StringBuilder("Calculation config:").append(NEWLINE)
-			.append("       Sigma model: ")
+			.append("          Sigma model: ")
 			.append("type=").append(sigmaModel).append(", ")
-			.append("level=").append(truncationLevel)
+			.append("truncLevel=").append(truncationLevel)
 			.append(NEWLINE)
-			.append("              IMTs: ").append(imts)
+			.append("                 IMTs: ").append(imts)
 			.append(NEWLINE)
-			.append("      Default IMLs: ")
+			.append("         Default IMLs: ")
 			.append(Arrays.toString(defaultImls))
 			.append(NEWLINE)
 			.append(customImlStr)
-			.append("   Deaggregation R: ")
+			.append("      Deaggregation R: ")
 			.append("min=").append(deagg.rMin).append(", ")
 			.append("max=").append(deagg.rMax).append(", ")
 			.append("Δ=").append(deagg.Δr)
 			.append(NEWLINE)
-			.append("   Deaggregation M: ")
+			.append("      Deaggregation M: ")
 			.append("min=").append(deagg.mMin).append(", ")
 			.append("max=").append(deagg.mMax).append(", ")
 			.append("Δ=").append(deagg.Δm)
 			.append(NEWLINE)
-			.append("   Deaggregation ε: ")
+			.append("      Deaggregation ε: ")
 			.append("min=").append(deagg.εMin).append(", ")
 			.append("max=").append(deagg.εMax).append(", ")
 			.append("Δ=").append(deagg.Δε)
 			.append(NEWLINE);
 
 		for (Site site : sites) {
-			sb.append("              ").append(site.toString()).append(NEWLINE);
+			sb.append("                 ").append(site.toString()).append(NEWLINE);
 		}
 
 		return sb.toString();
 	}
 
-	double[] imlsForImt(Imt imt) {
+	public double[] imlsForImt(Imt imt) {
 		return customImls.containsKey(imt) ? customImls.get(imt) : defaultImls;
 	}
+	
+	/**
+	 * Returns models of the intensity measure levels for each {@code Imt} adressed
+	 * by this calculation. Note that the x-values in each sequence are in natural
+	 * log space.
+	 */
+	public Map<Imt, ArrayXY_Sequence> logModelCurves() {
+		Map<Imt, ArrayXY_Sequence> curveMap = Maps.newEnumMap(Imt.class);
+		for (Imt imt : imts) {
+			double[] imls = imlsForImt(imt);
+			imls = Arrays.copyOf(imls, imls.length);
+			DataUtils.ln(imls);
+			curveMap.put(imt, ArrayXY_Sequence.create(imls, null));
+		}
+		return curveMap;
+	}
+	
+	public Map<Imt, ArrayXY_Sequence> modelCurves() {
+		Map<Imt, ArrayXY_Sequence> curveMap = Maps.newEnumMap(Imt.class);
+		for (Imt imt : imts) {
+			double[] imls = imlsForImt(imt);
+			imls = Arrays.copyOf(imls, imls.length);
+			curveMap.put(imt, ArrayXY_Sequence.create(imls, null));
+		}
+		return curveMap;
+	}
+
 
-	static CalcConfig load(Path path) throws IOException {
-		Reader reader = Files.newBufferedReader(path, UTF_8);
+	public static CalcConfig load(Path path) throws IOException {
+		Path configPath = path.resolve(FILE_NAME);
+		Reader reader = Files.newBufferedReader(configPath, UTF_8);
 		CalcConfig config = GSON.fromJson(reader, CalcConfig.class);
 		reader.close();
 		return config;
diff --git a/src/org/opensha/calc/Calcs.java b/src/org/opensha/calc/Calcs.java
index 3e2f86fa8..b5d6ad338 100644
--- a/src/org/opensha/calc/Calcs.java
+++ b/src/org/opensha/calc/Calcs.java
@@ -9,6 +9,7 @@ import static org.opensha.calc.AsyncCalc.toHazardCurves;
 import static org.opensha.calc.AsyncCalc.toHazardResult;
 import static org.opensha.calc.AsyncCalc.toInputs;
 
+import java.util.Map;
 import java.util.concurrent.ExecutionException;
 
 import org.opensha.data.ArrayXY_Sequence;
@@ -28,20 +29,24 @@ import com.google.common.util.concurrent.ListenableFuture;
  */
 public class Calcs {
 
+	// TODO (below) situations where multiple Imts are being processed, we
+	// should short
+	// circuit toInputs() as this step is independent of Imt
+
 	/**
 	 * Compute a hazard curve.
 	 * 
 	 * @param model to use
-	 * @param imt intensity measure type
+	 * @param config
 	 * @param site of interest
-	 * @param imls sequence of intensity measure levels (x-values) to populate
 	 * @throws InterruptedException
 	 * @throws ExecutionException
 	 */
-	public static HazardResult hazardCurve(HazardModel model, Imt imt, Site site,
-			ArrayXY_Sequence imls) throws InterruptedException, ExecutionException {
+	public static HazardResult hazardCurve(HazardModel model, CalcConfig config, Site site)
+			throws InterruptedException, ExecutionException {
 
 		AsyncList<HazardCurveSet> curveSetCollector = AsyncList.createWithCapacity(model.size());
+		Map<Imt, ArrayXY_Sequence> modelCurves = config.logModelCurves();
 
 		for (SourceSet<? extends Source> sourceSet : model) {
 
@@ -53,12 +58,13 @@ public class Calcs {
 				if (inputs.isEmpty()) continue; // all sources out of range
 
 				AsyncList<ClusterGroundMotions> groundMotions = toClusterGroundMotions(inputs,
-					clusterSourceSet, imt);
+					clusterSourceSet, config.imts);
 
-				AsyncList<ClusterCurves> clusterCurves = toClusterCurves(groundMotions, imls);
+				AsyncList<ClusterCurves> clusterCurves = toClusterCurves(groundMotions,
+					modelCurves, config.sigmaModel, config.truncationLevel);
 
 				ListenableFuture<HazardCurveSet> curveSet = toHazardCurveSet(clusterCurves,
-					clusterSourceSet, imls);
+					clusterSourceSet, modelCurves);
 
 				curveSetCollector.add(curveSet);
 
@@ -68,25 +74,26 @@ public class Calcs {
 				if (inputs.isEmpty()) continue; // all sources out of range
 
 				AsyncList<HazardGroundMotions> groundMotions = toGroundMotions(inputs, sourceSet,
-					imt);
+					config.imts);
 
-				AsyncList<HazardCurves> hazardCurves = toHazardCurves(groundMotions, imls);
+				AsyncList<HazardCurves> hazardCurves = toHazardCurves(groundMotions, modelCurves,
+					config.sigmaModel, config.truncationLevel);
 
 				ListenableFuture<HazardCurveSet> curveSet = toHazardCurveSet(hazardCurves,
-					sourceSet, imls);
+					sourceSet, modelCurves);
 
 				curveSetCollector.add(curveSet);
 
 			}
 		}
-
-		ListenableFuture<HazardResult> futureResult = toHazardResult(curveSetCollector, imls);
-
+		
+		ListenableFuture<HazardResult> futureResult = toHazardResult(curveSetCollector, modelCurves);
+		
 		return futureResult.get();
 
-//		System.out.println(sw.stop().elapsed(TimeUnit.MILLISECONDS));
-//
-//		return result;
+		// System.out.println(sw.stop().elapsed(TimeUnit.MILLISECONDS));
+		//
+		// return result;
 
 		// TODO move timers
 		// } catch (Exception e) {
diff --git a/src/org/opensha/calc/ClusterCurves.java b/src/org/opensha/calc/ClusterCurves.java
index 76294ccc8..df572ed07 100644
--- a/src/org/opensha/calc/ClusterCurves.java
+++ b/src/org/opensha/calc/ClusterCurves.java
@@ -7,6 +7,10 @@ import java.util.Map;
 
 import org.opensha.data.ArrayXY_Sequence;
 import org.opensha.gmm.Gmm;
+import org.opensha.gmm.Imt;
+
+import com.google.common.collect.ArrayTable;
+import com.google.common.collect.Table;
 
 /**
  * Container class for the combined hazard curves derived from the individual
@@ -19,10 +23,10 @@ import org.opensha.gmm.Gmm;
 final class ClusterCurves {
 
 	final ClusterGroundMotions clusterGroundMotions;
-	final Map<Gmm, ArrayXY_Sequence> curveMap;
+	final Map<Imt, Map<Gmm, ArrayXY_Sequence>> curveMap;
 
 	private ClusterCurves(ClusterGroundMotions clusterGroundMotions,
-		Map<Gmm, ArrayXY_Sequence> curveMap) {
+			Map<Imt, Map<Gmm, ArrayXY_Sequence>> curveMap) {
 		this.clusterGroundMotions = clusterGroundMotions;
 		this.curveMap = curveMap;
 	}
@@ -37,15 +41,21 @@ final class ClusterCurves {
 		private boolean built = false;
 
 		private final ClusterGroundMotions clusterGroundMotions;
-		private final Map<Gmm, ArrayXY_Sequence> curveMap;
+		private final Map<Imt, Map<Gmm, ArrayXY_Sequence>> curveMap;
 
 		private Builder(ClusterGroundMotions clusterGroundMotions) {
 			this.clusterGroundMotions = clusterGroundMotions;
-			curveMap = new EnumMap<>(Gmm.class);
+			// look at first HazardGM to determine curve table dimensions
+			HazardGroundMotions model = clusterGroundMotions.get(0);
+			curveMap = new EnumMap<>(Imt.class);
+			for (Imt imt : model.means.keySet()) {
+				Map<Gmm, ArrayXY_Sequence> gmmMap = new EnumMap<>(Gmm.class);
+				curveMap.put(imt, gmmMap);
+			}
 		}
 
-		Builder addCurve(Gmm gmm, ArrayXY_Sequence curve) {
-			curveMap.put(gmm, curve);
+		Builder addCurve(Imt imt, Gmm gmm, ArrayXY_Sequence curve) {
+			curveMap.get(imt).put(gmm, curve);
 			return this;
 		}
 
diff --git a/src/org/opensha/calc/Deagg.java b/src/org/opensha/calc/Deagg.java
index c248695d0..140614446 100644
--- a/src/org/opensha/calc/Deagg.java
+++ b/src/org/opensha/calc/Deagg.java
@@ -115,7 +115,7 @@ class Deagg {
 				Set<HazardCurveSet> hazardCurveSets = hazard.sourceSetMap.get(type);
 				switch (type) {
 					case FAULT:
-						processFaultSources(hazardCurveSets);
+//						processFaultSources(hazardCurveSets);
 				}
 			}
 		}
@@ -125,15 +125,15 @@ class Deagg {
 		 * gross contribution of each source set - the gross contribution of
 		 * each Gmm (could be further subdivision of above) -
 		 */
-		private void processFaultSources(Set<HazardCurveSet> curveSets) {
-			for (HazardCurveSet curveSet : curveSets) {
-				for (HazardGroundMotions groundMotions : curveSet.hazardGroundMotionsList) {
-					for (Gmm gmm : groundMotions.means.keySet()) {
-
-					}
-				}
-			}
-		}
+//		private void processFaultSources(Set<HazardCurveSet> curveSets) {
+//			for (HazardCurveSet curveSet : curveSets) {
+//				for (HazardGroundMotions groundMotions : curveSet.hazardGroundMotionsList) {
+//					for (Gmm gmm : groundMotions.means.keySet()) {
+//
+//					}
+//				}
+//			}
+//		}
 
 	}
 
diff --git a/src/org/opensha/calc/HazardCurveSet.java b/src/org/opensha/calc/HazardCurveSet.java
index f9cafd2c5..e686257ea 100644
--- a/src/org/opensha/calc/HazardCurveSet.java
+++ b/src/org/opensha/calc/HazardCurveSet.java
@@ -9,12 +9,14 @@ import java.util.EnumMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Set;
 
 import org.opensha.data.ArrayXY_Sequence;
 import org.opensha.eq.model.Source;
 import org.opensha.eq.model.SourceSet;
 import org.opensha.eq.model.SourceType;
 import org.opensha.gmm.Gmm;
+import org.opensha.gmm.Imt;
 
 /**
  * Container class for hazard curves derived from a {@code SourceSet}. Class
@@ -43,23 +45,24 @@ final class HazardCurveSet {
 	final SourceSet<? extends Source> sourceSet;
 	final List<HazardGroundMotions> hazardGroundMotionsList;
 	final List<ClusterGroundMotions> clusterGroundMotionsList;
-	final Map<Gmm, ArrayXY_Sequence> gmmCurveMap;
-	final ArrayXY_Sequence totalCurve;
+	final Map<Imt, Map<Gmm, ArrayXY_Sequence>> curveMap;
+	final Map<Imt, ArrayXY_Sequence> totalCurves;
 
 	private HazardCurveSet(SourceSet<? extends Source> sourceSet,
 		List<HazardGroundMotions> hazardGroundMotionsList,
 		List<ClusterGroundMotions> clusterGroundMotionsList,
-		Map<Gmm, ArrayXY_Sequence> gmmCurveMap, ArrayXY_Sequence totalCurve) {
+		Map<Imt, Map<Gmm, ArrayXY_Sequence>> curveMap, Map<Imt, ArrayXY_Sequence> totalCurves) {
 
 		this.sourceSet = sourceSet;
 		this.hazardGroundMotionsList = hazardGroundMotionsList;
 		this.clusterGroundMotionsList = clusterGroundMotionsList;
-		this.gmmCurveMap = gmmCurveMap;
-		this.totalCurve = totalCurve;
+		this.curveMap = curveMap;
+		this.totalCurves = totalCurves;
 	}
 
-	static Builder builder(SourceSet<? extends Source> sourceSet, ArrayXY_Sequence modelCurve) {
-		return new Builder(sourceSet, modelCurve);
+	static Builder builder(SourceSet<? extends Source> sourceSet,
+			Map<Imt, ArrayXY_Sequence> modelCurves) {
+		return new Builder(sourceSet, modelCurves);
 	}
 
 	static class Builder {
@@ -67,17 +70,19 @@ final class HazardCurveSet {
 		private static final String ID = "HazardCurveSet.Builder";
 		private boolean built = false;
 
-		private final ArrayXY_Sequence modelCurve;
+		private final Map<Imt, ArrayXY_Sequence> modelCurves;
 
 		private final SourceSet<? extends Source> sourceSet;
 		private final List<HazardGroundMotions> hazardGroundMotionsList;
 		private final List<ClusterGroundMotions> clusterGroundMotionsList;
-		private final Map<Gmm, ArrayXY_Sequence> gmmCurveMap;
-		private ArrayXY_Sequence totalCurve;
+		private final Map<Imt, Map<Gmm, ArrayXY_Sequence>> curveMap;
+		private final Map<Imt, ArrayXY_Sequence> totalCurves;
+
+		private Builder(SourceSet<? extends Source> sourceSet,
+			Map<Imt, ArrayXY_Sequence> modelCurves) {
 
-		private Builder(SourceSet<? extends Source> sourceSet, ArrayXY_Sequence modelCurve) {
 			this.sourceSet = sourceSet;
-			this.modelCurve = modelCurve;
+			this.modelCurves = modelCurves;
 			if (sourceSet.type() == SourceType.CLUSTER) {
 				clusterGroundMotionsList = new ArrayList<>();
 				hazardGroundMotionsList = null;
@@ -85,10 +90,18 @@ final class HazardCurveSet {
 				hazardGroundMotionsList = new ArrayList<>();
 				clusterGroundMotionsList = null;
 			}
-			gmmCurveMap = new EnumMap<>(Gmm.class);
-			for (Gmm gmm : sourceSet.groundMotionModels().gmms()) {
-				gmmCurveMap.put(gmm, ArrayXY_Sequence.copyOf(modelCurve).clear());
+			Set<Gmm> gmms = sourceSet.groundMotionModels().gmms();
+			Set<Imt> imts = modelCurves.keySet();
+			curveMap = new EnumMap<>(Imt.class);
+			for (Imt imt : imts) {
+				Map<Gmm, ArrayXY_Sequence> gmmMap = new EnumMap<>(Gmm.class);
+				curveMap.put(imt, gmmMap);
+				for (Gmm gmm : gmms) {
+					ArrayXY_Sequence emptyCurve = copyOf(modelCurves.get(imt)).clear();
+					gmmMap.put(gmm, emptyCurve);
+				}
 			}
+			totalCurves = new EnumMap<>(Imt.class);
 		}
 
 		Builder addCurves(HazardCurves hazardCurves) {
@@ -96,11 +109,13 @@ final class HazardCurveSet {
 			hazardGroundMotionsList.add(hazardCurves.groundMotions);
 			double distance = hazardCurves.groundMotions.inputs.minDistance;
 			Map<Gmm, Double> gmmWeightMap = sourceSet.groundMotionModels().gmmWeightMap(distance);
-			for (Entry<Gmm, Double> entry : gmmWeightMap.entrySet()) {
-				// copy so as to not mutate incoming curves
-				ArrayXY_Sequence copy = copyOf(hazardCurves.curveMap.get(entry.getKey()));
-				copy.multiply(entry.getValue());
-				gmmCurveMap.get(entry.getKey()).add(copy);
+			for (Entry<Imt, Map<Gmm, ArrayXY_Sequence>> imtEntry : hazardCurves.curveMap.entrySet()) {
+				Map<Gmm, ArrayXY_Sequence> builderCurveMap = curveMap.get(imtEntry.getKey());
+				for (Entry<Gmm, ArrayXY_Sequence> gmmEntry : imtEntry.getValue().entrySet()) {
+					Gmm gmm = gmmEntry.getKey();
+					double gmmWeight = gmmWeightMap.get(gmm);
+					builderCurveMap.get(gmm).add(copyOf(gmmEntry.getValue()).multiply(gmmWeight));
+				}
 			}
 			return this;
 		}
@@ -109,15 +124,17 @@ final class HazardCurveSet {
 			checkNotNull(clusterGroundMotionsList, "%s was not intialized with a ClusterSourceSet",
 				ID);
 			clusterGroundMotionsList.add(clusterCurves.clusterGroundMotions);
-			double weight = clusterCurves.clusterGroundMotions.parent.weight();
+			double clusterWeight = clusterCurves.clusterGroundMotions.parent.weight();
 			double distance = clusterCurves.clusterGroundMotions.minDistance;
 			Map<Gmm, Double> gmmWeightMap = sourceSet.groundMotionModels().gmmWeightMap(distance);
-			for (Entry<Gmm, Double> entry : gmmWeightMap.entrySet()) {
-				// copy so as to not mutate incoming curves
-				ArrayXY_Sequence copy = copyOf(clusterCurves.curveMap.get(entry.getKey()));
-				// scale by cluster and gmm wieght
-				copy.multiply(weight).multiply(entry.getValue());
-				gmmCurveMap.get(entry.getKey()).add(copy);
+			for (Entry<Imt, Map<Gmm, ArrayXY_Sequence>> imtEntry : clusterCurves.curveMap
+				.entrySet()) {
+				Map<Gmm, ArrayXY_Sequence> builderCurveMap = curveMap.get(imtEntry.getKey());
+				for (Entry<Gmm, ArrayXY_Sequence> gmmEntry : imtEntry.getValue().entrySet()) {
+					Gmm gmm = gmmEntry.getKey();
+					double totalWeight = gmmWeightMap.get(gmm) * clusterWeight;
+					builderCurveMap.get(gmm).add(copyOf(gmmEntry.getValue()).multiply(totalWeight));
+				}
 			}
 			return this;
 		}
@@ -127,20 +144,24 @@ final class HazardCurveSet {
 			built = true;
 			computeFinal();
 			return new HazardCurveSet(sourceSet, hazardGroundMotionsList, clusterGroundMotionsList,
-				gmmCurveMap, totalCurve);
+				curveMap, totalCurves);
 		}
 
 		/*
 		 * Create the final wieghted (Gmm) combined curve. The Gmm curves were
-		 * scaled by their weights in an earlier step (TODO not sure this is true).
+		 * scaled by their weights while building (above).
 		 */
 		private void computeFinal() {
-			ArrayXY_Sequence totalCurve = ArrayXY_Sequence.copyOf(modelCurve).clear();
 			double sourceSetWeight = sourceSet.weight();
-			for (ArrayXY_Sequence curve : gmmCurveMap.values()) {
-				totalCurve.add(copyOf(curve).multiply(sourceSetWeight));
+			for (Entry<Imt, Map<Gmm, ArrayXY_Sequence>> entry : curveMap.entrySet()) {
+				Imt imt = entry.getKey();
+				ArrayXY_Sequence totalCurve = copyOf(modelCurves.get(imt)).clear();
+				for (ArrayXY_Sequence curve : entry.getValue().values()) {
+					totalCurve.add(curve);
+				}
+				totalCurve.multiply(sourceSetWeight);
+				totalCurves.put(imt, totalCurve);
 			}
-			this.totalCurve = totalCurve;
 		}
 	}
 
diff --git a/src/org/opensha/calc/HazardCurves.java b/src/org/opensha/calc/HazardCurves.java
index 9fb1cc0d4..bc33695a6 100644
--- a/src/org/opensha/calc/HazardCurves.java
+++ b/src/org/opensha/calc/HazardCurves.java
@@ -7,22 +7,24 @@ import java.util.Map;
 
 import org.opensha.data.ArrayXY_Sequence;
 import org.opensha.gmm.Gmm;
+import org.opensha.gmm.Imt;
 
 /**
  * Container class for the combined hazard curves derived from the
  * {@code Rupture}s in an individual {@code Source}, one for each
- * {@code GroundMotionModel} used. The curves will have been scaled by the
- * associated Mfd or rupture weights, but not by {@code GroundMotionModel}
- * weights.
+ * {@code GroundMotionModel} and {@code Imt} of interest. The curves will have
+ * been scaled by the associated Mfd or rupture weights, but not by
+ * {@code GroundMotionModel} weights.
  * 
  * @author Peter Powers
  */
 final class HazardCurves {
 
 	final HazardGroundMotions groundMotions;
-	final Map<Gmm, ArrayXY_Sequence> curveMap;
+	final Map<Imt, Map<Gmm, ArrayXY_Sequence>> curveMap;
 
-	private HazardCurves(HazardGroundMotions groundMotions, Map<Gmm, ArrayXY_Sequence> curveMap) {
+	private HazardCurves(HazardGroundMotions groundMotions,
+		Map<Imt, Map<Gmm, ArrayXY_Sequence>> curveMap) {
 		this.groundMotions = groundMotions;
 		this.curveMap = curveMap;
 	}
@@ -37,15 +39,19 @@ final class HazardCurves {
 		private boolean built = false;
 
 		private final HazardGroundMotions groundMotions;
-		private final Map<Gmm, ArrayXY_Sequence> curveMap;
+		private final Map<Imt, Map<Gmm, ArrayXY_Sequence>> curveMap;
 
 		private Builder(HazardGroundMotions groundMotions) {
 			this.groundMotions = groundMotions;
-			curveMap = new EnumMap<>(Gmm.class);
+			curveMap = new EnumMap<>(Imt.class);
+			for (Imt imt : groundMotions.means.keySet()) {
+				Map<Gmm, ArrayXY_Sequence> gmmMap = new EnumMap<>(Gmm.class);
+				curveMap.put(imt, gmmMap);
+			}
 		}
 
-		Builder addCurve(Gmm gmm, ArrayXY_Sequence curve) {
-			curveMap.put(gmm, curve);
+		Builder addCurve(Imt imt, Gmm gmm, ArrayXY_Sequence curve) {
+			curveMap.get(imt).put(gmm, curve);
 			return this;
 		}
 
diff --git a/src/org/opensha/calc/HazardGroundMotions.java b/src/org/opensha/calc/HazardGroundMotions.java
index eb71bb0d1..1ae02afa5 100644
--- a/src/org/opensha/calc/HazardGroundMotions.java
+++ b/src/org/opensha/calc/HazardGroundMotions.java
@@ -5,14 +5,16 @@ import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
 import static com.google.common.base.StandardSystemProperty.LINE_SEPARATOR;
 
+import java.util.EnumMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Set;
 
 import org.opensha.gmm.Gmm;
+import org.opensha.gmm.Imt;
 import org.opensha.gmm.ScalarGroundMotion;
 
-import com.google.common.collect.Maps;
 import com.google.common.primitives.Doubles;
 
 /**
@@ -25,22 +27,22 @@ final class HazardGroundMotions {
 
 	/*
 	 * NOTE the inputList supplied to Builder will be immutable but the mean and
-	 * sigma list maps are not; builder backs mean and sigma lists with
+	 * sigma list tables are not; builder backs mean and sigma lists with
 	 * double[].
 	 * 
 	 * Can't use Multimaps.newListMultimap(map, factory) backed with
-	 * Doubles.asList(double[]) because list must be empty to start with
-	 * and growable.
+	 * Doubles.asList(double[]) because list must be empty to start with and
+	 * growable.
 	 * 
 	 * http://code.google.com/p/guava-libraries/issues/detail?id=1827
 	 */
 
 	final HazardInputs inputs;
-	final Map<Gmm, List<Double>> means;
-	final Map<Gmm, List<Double>> sigmas;
+	final Map<Imt, Map<Gmm, List<Double>>> means;
+	final Map<Imt, Map<Gmm, List<Double>>> sigmas;
 
-	private HazardGroundMotions(HazardInputs inputs, Map<Gmm, List<Double>> means,
-		Map<Gmm, List<Double>> sigmas) {
+	private HazardGroundMotions(HazardInputs inputs, Map<Imt, Map<Gmm, List<Double>>>  means,
+			Map<Imt, Map<Gmm, List<Double>>> sigmas) {
 		this.inputs = inputs;
 		this.means = means;
 		this.sigmas = sigmas;
@@ -53,18 +55,24 @@ final class HazardGroundMotions {
 		for (int i = 0; i < inputs.size(); i++) {
 			sb.append(inputs.get(i));
 			sb.append(" ");
-			for (Gmm gmm : means.keySet()) {
-				sb.append(gmm.name()).append(" ");
-				sb.append(String.format("%.3f", means.get(gmm).get(i))).append(" ");
-				sb.append(String.format("%.3f", sigmas.get(gmm).get(i))).append(" ");
+			for (Entry<Imt, Map<Gmm, List<Double>>> imtEntry : means.entrySet()) {
+				Imt imt = imtEntry.getKey();
+				sb.append(imt.name()).append(" [");
+				for (Entry<Gmm, List<Double>> gmmEntry : imtEntry.getValue().entrySet()) {
+					Gmm gmm = gmmEntry.getKey();
+					sb.append(gmm.name()).append(" ");
+					sb.append(String.format("μ=%.3f", gmmEntry.getValue().get(i))).append(" ");
+					sb.append(String.format("σ=%.3f", sigmas.get(imt).get(gmm).get(i))).append(" ");
+				}
+				sb.append("] ");
 			}
 			sb.append(LINE_SEPARATOR.value());
 		}
 		return sb.toString();
 	}
 
-	static Builder builder(HazardInputs inputs, Set<Gmm> gmms) {
-		return new Builder(inputs, gmms);
+	static Builder builder(HazardInputs inputs, Set<Gmm> gmms, Set<Imt> imts) {
+		return new Builder(inputs, gmms, imts);
 	}
 
 	static class Builder {
@@ -75,22 +83,22 @@ final class HazardGroundMotions {
 		private int addCount = 0;
 
 		private final HazardInputs inputs;
-		private final Map<Gmm, List<Double>> means;
-		private final Map<Gmm, List<Double>> sigmas;
+		private final Map<Imt, Map<Gmm, List<Double>>> means;
+		private final Map<Imt, Map<Gmm, List<Double>>> sigmas;
 
-		private Builder(HazardInputs inputs, Set<Gmm> gmms) {
+		private Builder(HazardInputs inputs, Set<Gmm> gmms, Set<Imt> imts) {
 			checkArgument(checkNotNull(inputs).size() > 0);
 			checkArgument(checkNotNull(gmms).size() > 0);
 			this.inputs = inputs;
-			means = initValueMap(gmms, inputs.size());
-			sigmas = initValueMap(gmms, inputs.size());
-			size = gmms.size() * inputs.size();
+			means = initValueTable(gmms, imts, inputs.size());
+			sigmas = initValueTable(gmms, imts, inputs.size());
+			size = gmms.size() * imts.size() * inputs.size();
 		}
 
-		Builder add(Gmm gmm, ScalarGroundMotion sgm, int index) {
+		Builder add(Gmm gmm, Imt imt, ScalarGroundMotion sgm, int index) {
 			checkState(addCount < size, "This %s instance is already full", ID);
-			means.get(gmm).set(index, sgm.mean());
-			sigmas.get(gmm).set(index, sgm.sigma());
+			means.get(imt).get(gmm).set(index, sgm.mean());
+			sigmas.get(imt).get(gmm).set(index, sgm.sigma());
 			addCount++;
 			return this;
 		}
@@ -102,12 +110,16 @@ final class HazardGroundMotions {
 			return new HazardGroundMotions(inputs, means, sigmas);
 		}
 
-		static Map<Gmm, List<Double>> initValueMap(Set<Gmm> gmms, int size) {
-			Map<Gmm, List<Double>> map = Maps.newEnumMap(Gmm.class);
-			for (Gmm gmm : gmms) {
-				map.put(gmm, Doubles.asList(new double[size]));
+		static Map<Imt, Map<Gmm, List<Double>>> initValueTable(Set<Gmm> gmms, Set<Imt> imts, int size) {
+			Map<Imt, Map<Gmm, List<Double>>> imtMap = new EnumMap<>(Imt.class);
+			for (Imt imt : imts) {
+				Map<Gmm, List<Double>> gmmMap = new EnumMap<>(Gmm.class);
+				for (Gmm gmm : gmms) {
+					gmmMap.put(gmm, Doubles.asList(new double[size]));
+				}
+				imtMap.put(imt, gmmMap);
 			}
-			return map;
+			return imtMap;
 		}
 
 	}
diff --git a/src/org/opensha/calc/HazardResult.java b/src/org/opensha/calc/HazardResult.java
index 336088165..67039957e 100644
--- a/src/org/opensha/calc/HazardResult.java
+++ b/src/org/opensha/calc/HazardResult.java
@@ -4,8 +4,13 @@ import static com.google.common.base.Preconditions.checkState;
 import static org.opensha.data.ArrayXY_Sequence.copyOf;
 import static org.opensha.eq.model.SourceType.CLUSTER;
 
+import java.util.EnumMap;
+import java.util.Map;
+import java.util.Map.Entry;
+
 import org.opensha.data.ArrayXY_Sequence;
 import org.opensha.eq.model.SourceType;
+import org.opensha.gmm.Imt;
 
 import com.google.common.base.StandardSystemProperty;
 import com.google.common.collect.ImmutableSetMultimap;
@@ -19,12 +24,12 @@ import com.google.common.collect.SetMultimap;
 public final class HazardResult {
 
 	final SetMultimap<SourceType, HazardCurveSet> sourceSetMap;
-	final ArrayXY_Sequence totalCurve;
+	final Map<Imt, ArrayXY_Sequence> totalCurves;
 
 	private HazardResult(SetMultimap<SourceType, HazardCurveSet> sourceSetMap,
-		ArrayXY_Sequence totalCurve) {
+		Map<Imt, ArrayXY_Sequence> totalCurves) {
 		this.sourceSetMap = sourceSetMap;
-		this.totalCurve = totalCurve;
+		this.totalCurves = totalCurves;
 	}
 
 	@Override public String toString() {
@@ -59,7 +64,7 @@ public final class HazardResult {
 					// sb.append(curveSet.clusterGroundMotionsList);
 
 				} else {
-//					 sb.append(curveSet.hazardGroundMotionsList);
+					// sb.append(curveSet.hazardGroundMotionsList);
 				}
 			}
 		}
@@ -69,12 +74,12 @@ public final class HazardResult {
 	/**
 	 * The total mean hazard curve.
 	 */
-	public ArrayXY_Sequence curve() {
-		return totalCurve;
+	public Map<Imt, ArrayXY_Sequence> curves() {
+		return totalCurves;
 	}
-	
-	static Builder builder(ArrayXY_Sequence modelCurve) {
-		return new Builder(modelCurve);
+
+	static Builder builder(Map<Imt, ArrayXY_Sequence> modelCurves) {
+		return new Builder(modelCurves);
 	}
 
 	static class Builder {
@@ -83,22 +88,27 @@ public final class HazardResult {
 		private boolean built = false;
 
 		private ImmutableSetMultimap.Builder<SourceType, HazardCurveSet> resultMapBuilder;
-		private ArrayXY_Sequence totalCurve;
+		private Map<Imt, ArrayXY_Sequence> totalCurves;
 
-		private Builder(ArrayXY_Sequence modelCurve) {
-			totalCurve = copyOf(modelCurve).clear();
+		private Builder(Map<Imt, ArrayXY_Sequence> modelCurves) {
+			totalCurves = new EnumMap<>(Imt.class);
+			for (Entry<Imt, ArrayXY_Sequence> entry : modelCurves.entrySet()) {
+				totalCurves.put(entry.getKey(), copyOf(entry.getValue()).clear());
+			}
 			resultMapBuilder = ImmutableSetMultimap.builder();
 		}
 
 		Builder addCurveSet(HazardCurveSet curveSet) {
 			resultMapBuilder.put(curveSet.sourceSet.type(), curveSet);
-			totalCurve.add(curveSet.totalCurve);
+			for (Entry<Imt, ArrayXY_Sequence> entry : curveSet.totalCurves.entrySet()) {
+				totalCurves.get(entry.getKey()).add(entry.getValue());
+			}
 			return this;
 		}
 
 		HazardResult build() {
 			checkState(!built, "This %s instance has already been used", ID);
-			return new HazardResult(resultMapBuilder.build(), totalCurve);
+			return new HazardResult(resultMapBuilder.build(), totalCurves);
 		}
 
 	}
diff --git a/src/org/opensha/calc/SigmaModel.java b/src/org/opensha/calc/SigmaModel.java
index 91b77798a..8733d285f 100644
--- a/src/org/opensha/calc/SigmaModel.java
+++ b/src/org/opensha/calc/SigmaModel.java
@@ -120,7 +120,7 @@ public enum SigmaModel {
 	 * 'clamps' on ground motions that have historically been applied in the
 	 * CEUS NSHM due to sometimes unreasonably high ground motions implied by
 	 * {@code μ + 3σ}. Model imposes one-sided (upper) truncation at
-	 * {@code μ + 3σ} if clamp is not exceeded.
+	 * {@code μ + nσ} if clamp is not exceeded.
 	 */
 	NSHM_CEUS_MAX_INTENSITY {
 		@Override double exceedance(double μ, double σ, double n, Imt imt, double value) {
diff --git a/src/org/opensha/calc/SiteSet.java b/src/org/opensha/calc/SiteSet.java
index a1471dca9..9953ae4b8 100644
--- a/src/org/opensha/calc/SiteSet.java
+++ b/src/org/opensha/calc/SiteSet.java
@@ -49,7 +49,6 @@ import com.google.gson.annotations.Expose;
  */
 public final class SiteSet implements Iterable<Site> {
 
-	@Expose(deserialize = false)
 	final private GriddedRegion region;
 	final private Builder builder;
 	final private List<Site> sites;
diff --git a/src/org/opensha/calc/Transforms.java b/src/org/opensha/calc/Transforms.java
index 0f9cb81a9..e06ae5f24 100644
--- a/src/org/opensha/calc/Transforms.java
+++ b/src/org/opensha/calc/Transforms.java
@@ -23,10 +23,13 @@ import org.opensha.eq.model.SourceSet;
 import org.opensha.gmm.Gmm;
 import org.opensha.gmm.GmmInput;
 import org.opensha.gmm.GroundMotionModel;
+import org.opensha.gmm.Imt;
 
 import com.google.common.base.Function;
 import com.google.common.collect.ListMultimap;
 import com.google.common.collect.MultimapBuilder;
+import com.google.common.collect.Table;
+import com.google.common.collect.Table.Cell;
 
 /**
  * Factory class for creating data transforms.
@@ -48,7 +51,7 @@ final class Transforms {
 	 * Return a Function that transforms HazardInputs to HazardGroundMotions.
 	 */
 	static Function<HazardInputs, HazardGroundMotions> inputsToGroundMotions(
-			Map<Gmm, GroundMotionModel> models) {
+			Table<Gmm, Imt, GroundMotionModel> models) {
 		return new InputsToGroundMotions(models);
 	}
 
@@ -56,8 +59,8 @@ final class Transforms {
 	 * Return a Function that transforms HazardGroundMotions to HazardCurves.
 	 */
 	static Function<HazardGroundMotions, HazardCurves> groundMotionsToCurves(
-			ArrayXY_Sequence modelCurve) {
-		return new GroundMotionsToCurves(modelCurve);
+			Map<Imt, ArrayXY_Sequence> modelCurves, SigmaModel sigmaModel, double truncLevel) {
+		return new GroundMotionsToCurves(modelCurves, sigmaModel, truncLevel);
 	}
 
 	/**
@@ -65,8 +68,8 @@ final class Transforms {
 	 * HazardCurveSet.
 	 */
 	static Function<List<HazardCurves>, HazardCurveSet> curveConsolidator(
-			SourceSet<? extends Source> sourceSet, ArrayXY_Sequence modelCurve) {
-		return new CurveConsolidator(sourceSet, modelCurve);
+			SourceSet<? extends Source> sourceSet, Map<Imt, ArrayXY_Sequence> modelCurves) {
+		return new CurveConsolidator(sourceSet, modelCurves);
 	}
 
 	/**
@@ -74,8 +77,8 @@ final class Transforms {
 	 * HazardResult.
 	 */
 	static Function<List<HazardCurveSet>, HazardResult> curveSetConsolidator(
-			ArrayXY_Sequence modelCurve) {
-		return new CurveSetConsolidator(modelCurve);
+			Map<Imt, ArrayXY_Sequence> modelCurves) {
+		return new CurveSetConsolidator(modelCurves);
 	}
 
 	/**
@@ -91,7 +94,7 @@ final class Transforms {
 	 * in a ClusterSource to a List of HazardGroundMotions.
 	 */
 	static Function<ClusterInputs, ClusterGroundMotions> clusterInputsToGroundMotions(
-			Map<Gmm, GroundMotionModel> models) {
+			Table<Gmm, Imt, GroundMotionModel> models) {
 		return new ClusterInputsToGroundMotions(models);
 	}
 
@@ -100,8 +103,8 @@ final class Transforms {
 	 * ClusterCurves.
 	 */
 	static Function<ClusterGroundMotions, ClusterCurves> clusterGroundMotionsToCurves(
-			ArrayXY_Sequence modelCurve) {
-		return new ClusterGroundMotionsToCurves(modelCurve);
+			Map<Imt, ArrayXY_Sequence> modelCurves, SigmaModel sigmaModel, double truncLevel) {
+		return new ClusterGroundMotionsToCurves(modelCurves, sigmaModel, truncLevel);
 	}
 
 	/**
@@ -109,8 +112,8 @@ final class Transforms {
 	 * HazardCurveSet.
 	 */
 	static Function<List<ClusterCurves>, HazardCurveSet> clusterCurveConsolidator(
-			ClusterSourceSet clusterSourceSet, ArrayXY_Sequence modelCurve) {
-		return new ClusterCurveConsolidator(clusterSourceSet, modelCurve);
+			ClusterSourceSet clusterSourceSet, Map<Imt, ArrayXY_Sequence> modelCurves) {
+		return new ClusterCurveConsolidator(clusterSourceSet, modelCurves);
 	}
 
 	private static class SourceToInputs implements Function<Source, HazardInputs> {
@@ -159,61 +162,80 @@ final class Transforms {
 	private static class InputsToGroundMotions implements
 			Function<HazardInputs, HazardGroundMotions> {
 
-		private final Map<Gmm, GroundMotionModel> gmmInstances;
+		private final Table<Gmm, Imt, GroundMotionModel> gmmInstances;
 
-		InputsToGroundMotions(Map<Gmm, GroundMotionModel> gmmInstances) {
+		InputsToGroundMotions(Table<Gmm, Imt, GroundMotionModel> gmmInstances) {
 			this.gmmInstances = gmmInstances;
 		}
 
 		@Override public HazardGroundMotions apply(HazardInputs hazardInputs) {
 
-			HazardGroundMotions.Builder gmBuilder = HazardGroundMotions.builder(hazardInputs,
-				gmmInstances.keySet());
+			HazardGroundMotions.Builder builder = HazardGroundMotions.builder(hazardInputs,
+				gmmInstances.rowKeySet(), gmmInstances.columnKeySet());
 
-			for (Entry<Gmm, GroundMotionModel> entry : gmmInstances.entrySet()) {
+			for (Cell<Gmm, Imt, GroundMotionModel> cell : gmmInstances.cellSet()) {
+				Gmm rowKey = cell.getRowKey();
+				Imt colKey = cell.getColumnKey();
+				GroundMotionModel gmm = cell.getValue();
 				int inputIndex = 0;
 				for (GmmInput gmmInput : hazardInputs) {
-					gmBuilder.add(entry.getKey(), entry.getValue().calc(gmmInput), inputIndex++);
+					builder.add(rowKey, colKey, gmm.calc(gmmInput), inputIndex++);
 				}
 			}
-			HazardGroundMotions results = gmBuilder.build();
-			return results;
+			return builder.build();
 		}
 	}
 
-	/*
-	 * Transforms HazardGroundMotions to HazardCurves that contains one curve
-	 * per gmm.
-	 */
 	private static class GroundMotionsToCurves implements
 			Function<HazardGroundMotions, HazardCurves> {
 
-		private final ArrayXY_Sequence modelCurve;
+		private final Map<Imt, ArrayXY_Sequence> modelCurves;
+		private final SigmaModel sigmaModel;
+		private final double truncLevel;
 
-		GroundMotionsToCurves(ArrayXY_Sequence modelCurve) {
-			this.modelCurve = modelCurve;
+		GroundMotionsToCurves(Map<Imt, ArrayXY_Sequence> modelCurves, SigmaModel sigmaModel,
+			double truncLevel) {
+			this.modelCurves = modelCurves;
+			this.sigmaModel = sigmaModel;
+			this.truncLevel = truncLevel;
 		}
 
 		@Override public HazardCurves apply(HazardGroundMotions groundMotions) {
 
 			HazardCurves.Builder curveBuilder = HazardCurves.builder(groundMotions);
-			ArrayXY_Sequence utilCurve = ArrayXY_Sequence.copyOf(modelCurve);
 
-			for (Gmm gmm : groundMotions.means.keySet()) {
+			for (Entry<Imt, ArrayXY_Sequence> entry : modelCurves.entrySet()) {
+
+				ArrayXY_Sequence modelCurve = entry.getValue();
+				Imt imt = entry.getKey();
+
+				ArrayXY_Sequence utilCurve = ArrayXY_Sequence.copyOf(modelCurve);
+
+				Map<Gmm, List<Double>> gmmMeans = groundMotions.means.get(imt);
+				Map<Gmm, List<Double>> gmmSigmas = groundMotions.sigmas.get(imt);
+
+				for (Gmm gmm : gmmMeans.keySet()) {
+
+					ArrayXY_Sequence gmmCurve = ArrayXY_Sequence.copyOf(modelCurve);
 
-				ArrayXY_Sequence gmmCurve = ArrayXY_Sequence.copyOf(modelCurve);
+					List<Double> means = gmmMeans.get(gmm);
+					List<Double> sigmas = gmmSigmas.get(gmm);
 
-				List<Double> means = groundMotions.means.get(gmm);
-				List<Double> sigmas = groundMotions.sigmas.get(gmm);
+					for (int i = 0; i < means.size(); i++) {
+						// TODO the model curve is passed in in linear space but
+						// for
+						// lognormal we need x-values to be ln(x)
+						sigmaModel.exceedance(means.get(i), sigmas.get(i), truncLevel, imt,
+							utilCurve);
 
-				for (int i = 0; i < means.size(); i++) {
-					// TODO the model curve is passed in in linear space but for
-					// lognormal we need x-values to be ln(x)
-					setProbExceed(means.get(i), sigmas.get(i), utilCurve, TRUNCATION_UPPER_ONLY, 3.0);
-					utilCurve.multiply(groundMotions.inputs.get(i).rate);
-					gmmCurve.add(utilCurve);
+						// TODO clean
+						// setProbExceed(means.get(i), sigmas.get(i), utilCurve,
+						// TRUNCATION_UPPER_ONLY, 3.0);
+						utilCurve.multiply(groundMotions.inputs.get(i).rate);
+						gmmCurve.add(utilCurve);
+					}
+					curveBuilder.addCurve(imt, gmm, gmmCurve);
 				}
-				curveBuilder.addCurve(gmm, gmmCurve);
 			}
 			return curveBuilder.build();
 		}
@@ -221,17 +243,18 @@ final class Transforms {
 
 	private static class CurveConsolidator implements Function<List<HazardCurves>, HazardCurveSet> {
 
-		private final ArrayXY_Sequence modelCurve;
+		private final Map<Imt, ArrayXY_Sequence> modelCurves;
 		private final SourceSet<? extends Source> sourceSet;
 
-		CurveConsolidator(SourceSet<? extends Source> sourceSet, ArrayXY_Sequence modelCurve) {
+		CurveConsolidator(SourceSet<? extends Source> sourceSet,
+			Map<Imt, ArrayXY_Sequence> modelCurves) {
 			this.sourceSet = sourceSet;
-			this.modelCurve = modelCurve;
+			this.modelCurves = modelCurves;
 		}
 
 		@Override public HazardCurveSet apply(List<HazardCurves> curvesList) {
 
-			HazardCurveSet.Builder curveSetBuilder = HazardCurveSet.builder(sourceSet, modelCurve);
+			HazardCurveSet.Builder curveSetBuilder = HazardCurveSet.builder(sourceSet, modelCurves);
 
 			for (HazardCurves curves : curvesList) {
 				curveSetBuilder.addCurves(curves);
@@ -243,15 +266,15 @@ final class Transforms {
 	private static class CurveSetConsolidator implements
 			Function<List<HazardCurveSet>, HazardResult> {
 
-		private final ArrayXY_Sequence modelCurve;
+		private final Map<Imt, ArrayXY_Sequence> modelCurves;
 
-		CurveSetConsolidator(ArrayXY_Sequence modelCurve) {
-			this.modelCurve = modelCurve;
+		CurveSetConsolidator(Map<Imt, ArrayXY_Sequence> modelCurves) {
+			this.modelCurves = modelCurves;
 		}
 
 		@Override public HazardResult apply(List<HazardCurveSet> curveSetList) {
 
-			HazardResult.Builder resultBuilder = HazardResult.builder(modelCurve);
+			HazardResult.Builder resultBuilder = HazardResult.builder(modelCurves);
 
 			for (HazardCurveSet curves : curveSetList) {
 				resultBuilder.addCurveSet(curves);
@@ -282,7 +305,7 @@ final class Transforms {
 
 		private final InputsToGroundMotions transform;
 
-		ClusterInputsToGroundMotions(Map<Gmm, GroundMotionModel> gmmInstances) {
+		ClusterInputsToGroundMotions(Table<Gmm, Imt, GroundMotionModel> gmmInstances) {
 			transform = new InputsToGroundMotions(gmmInstances);
 		}
 
@@ -305,45 +328,65 @@ final class Transforms {
 	private static class ClusterGroundMotionsToCurves implements
 			Function<ClusterGroundMotions, ClusterCurves> {
 
-		private final ArrayXY_Sequence modelCurve;
+		private final Map<Imt, ArrayXY_Sequence> modelCurves;
+		private final SigmaModel sigmaModel;
+		private final double truncLevel;
 
-		ClusterGroundMotionsToCurves(ArrayXY_Sequence modelCurve) {
-			this.modelCurve = modelCurve;
+		ClusterGroundMotionsToCurves(Map<Imt, ArrayXY_Sequence> modelCurves, SigmaModel sigmaModel,
+			double truncLevel) {
+			
+			this.modelCurves = modelCurves;
+			this.sigmaModel = sigmaModel;
+			this.truncLevel = truncLevel;
 		}
 
-		// TODO we're not doing any checking to see if Gmm keys are identical;
-		// internally, we know they should be, so perhaps it's not necessary
-		// verify this; is this referring to the builders used baing able to
-		// accept multiple, overriding calls to addCurve ??
-
 		@Override public ClusterCurves apply(ClusterGroundMotions clusterGroundMotions) {
 
-			// aggregator of curves for each fault in a cluster
-			ListMultimap<Gmm, ArrayXY_Sequence> faultCurves = MultimapBuilder.enumKeys(Gmm.class)
-				.arrayListValues(clusterGroundMotions.size()).build();
-			ArrayXY_Sequence utilCurve = ArrayXY_Sequence.copyOf(modelCurve);
-
-			for (HazardGroundMotions hazardGroundMotions : clusterGroundMotions) {
-				for (Gmm gmm : hazardGroundMotions.means.keySet()) {
-					ArrayXY_Sequence magVarCurve = ArrayXY_Sequence.copyOf(modelCurve);
-					List<Double> means = hazardGroundMotions.means.get(gmm);
-					List<Double> sigmas = hazardGroundMotions.sigmas.get(gmm);
-					for (int i = 0; i < hazardGroundMotions.inputs.size(); i++) {
-						// TODO needs ln(x-values)
-						setProbExceed(means.get(i), sigmas.get(i), utilCurve, TRUNCATION_UPPER_ONLY, 3.0);
-						utilCurve.multiply(hazardGroundMotions.inputs.get(i).rate);
-						magVarCurve.add(utilCurve);
+			Builder builder = ClusterCurves.builder(clusterGroundMotions);
+
+			for (Entry<Imt, ArrayXY_Sequence> entry : modelCurves.entrySet()) {
+
+				ArrayXY_Sequence modelCurve = entry.getValue();
+				Imt imt = entry.getKey();
+
+				// aggregator of curves for each fault in a cluster
+				ListMultimap<Gmm, ArrayXY_Sequence> faultCurves = MultimapBuilder
+					.enumKeys(Gmm.class)
+					.arrayListValues(clusterGroundMotions.size())
+					.build();
+				ArrayXY_Sequence utilCurve = ArrayXY_Sequence.copyOf(modelCurve);
+
+				for (HazardGroundMotions hazardGroundMotions : clusterGroundMotions) {
+
+					Map<Gmm, List<Double>> gmmMeans = hazardGroundMotions.means.get(imt);
+					Map<Gmm, List<Double>> gmmSigmas = hazardGroundMotions.sigmas.get(imt);
+
+					for (Gmm gmm : gmmMeans.keySet()) {
+						ArrayXY_Sequence magVarCurve = ArrayXY_Sequence.copyOf(modelCurve);
+						List<Double> means = gmmMeans.get(gmm);
+						List<Double> sigmas = gmmSigmas.get(gmm);
+						for (int i = 0; i < hazardGroundMotions.inputs.size(); i++) {
+							sigmaModel.exceedance(means.get(i), sigmas.get(i), truncLevel, imt,
+								utilCurve);
+
+							// TODO needs ln(x-values)
+//							setProbExceed(means.get(i), sigmas.get(i), utilCurve,
+//								TRUNCATION_UPPER_ONLY, 3.0);
+							utilCurve.multiply(hazardGroundMotions.inputs.get(i).rate);
+							magVarCurve.add(utilCurve);
+						}
+						faultCurves.put(gmm, magVarCurve);
 					}
-					faultCurves.put(gmm, magVarCurve);
 				}
-			}
 
-			Builder builder = ClusterCurves.builder(clusterGroundMotions);
-			double rate = clusterGroundMotions.parent.rate();
-			for (Gmm gmm : faultCurves.keySet()) {
-				ArrayXY_Sequence clusterCurve = Utils.calcClusterExceedProb(faultCurves.get(gmm));
-				builder.addCurve(gmm, clusterCurve.multiply(rate));
+				double rate = clusterGroundMotions.parent.rate();
+				for (Gmm gmm : faultCurves.keySet()) {
+					ArrayXY_Sequence clusterCurve = Utils.calcClusterExceedProb(faultCurves
+						.get(gmm));
+					builder.addCurve(imt, gmm, clusterCurve.multiply(rate));
+				}
 			}
+
 			return builder.build();
 		}
 	}
@@ -351,18 +394,19 @@ final class Transforms {
 	private static class ClusterCurveConsolidator implements
 			Function<List<ClusterCurves>, HazardCurveSet> {
 
-		private final ArrayXY_Sequence modelCurve;
+		private final Map<Imt, ArrayXY_Sequence> modelCurves;
 		private final ClusterSourceSet clusterSourceSet;
 
-		ClusterCurveConsolidator(ClusterSourceSet clusterSourceSet, ArrayXY_Sequence modelCurve) {
+		ClusterCurveConsolidator(ClusterSourceSet clusterSourceSet,
+			Map<Imt, ArrayXY_Sequence> modelCurves) {
 			this.clusterSourceSet = clusterSourceSet;
-			this.modelCurve = modelCurve;
+			this.modelCurves = modelCurves;
 		}
 
 		@Override public HazardCurveSet apply(List<ClusterCurves> curvesList) {
 
 			HazardCurveSet.Builder curveSetBuilder = HazardCurveSet.builder(clusterSourceSet,
-				modelCurve);
+				modelCurves);
 
 			for (ClusterCurves curves : curvesList) {
 				curveSetBuilder.addCurves(curves);
diff --git a/src/org/opensha/calc/Utils.java b/src/org/opensha/calc/Utils.java
index e20aac564..398067953 100644
--- a/src/org/opensha/calc/Utils.java
+++ b/src/org/opensha/calc/Utils.java
@@ -32,6 +32,7 @@ public class Utils {
 	 * @param truncType none, one-sided, or two-sided truncation
 	 * @param truncLevel in number of standard deviations
 	 */
+	@Deprecated
 	public static double calcProbExceed(double μ, double σ, double value,
 			SigmaModel truncType, double truncLevel) {
 
@@ -94,6 +95,7 @@ public class Utils {
 	 * @param truncLevel in number of standard deviations
 	 * @return a reference to the supplied sequence
 	 */
+	@Deprecated
 	public static XY_Sequence setProbExceed(double μ, double σ, XY_Sequence values,
 			SigmaModel truncType, double truncLevel) {
 
diff --git a/src/org/opensha/data/ArrayXY_Sequence.java b/src/org/opensha/data/ArrayXY_Sequence.java
index dfcab4390..d8fd1084a 100644
--- a/src/org/opensha/data/ArrayXY_Sequence.java
+++ b/src/org/opensha/data/ArrayXY_Sequence.java
@@ -12,6 +12,7 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Objects;
 
+import com.google.common.base.Function;
 import com.google.common.primitives.Doubles;
 
 /**
@@ -239,7 +240,7 @@ public class ArrayXY_Sequence extends AbstractXY_Sequence {
 	}
 	
 	/**
-	 * Sets all y-value to 0.
+	 * Sets all y-values to 0.
 	 * 
 	 * @return {@code this} sequence, for use inline
 	 */
@@ -247,6 +248,17 @@ public class ArrayXY_Sequence extends AbstractXY_Sequence {
 		Arrays.fill(ys, 0.0);
 		return this;
 	}
+	
+	/**
+	 * Transforms all y-values in place using the supplied {@link Function}.
+	 * 
+	 * @param function for transform
+	 * @return {@code this} sequence, for use inline
+	 */
+	public ArrayXY_Sequence transform(Function<Double, Double> function) {
+		DataUtils.uncheckedTransform(function, ys);
+		return this;
+	}
 
 	private ArrayXY_Sequence validateSequence(ArrayXY_Sequence sequence) {
 		checkArgument(checkNotNull(sequence).xHash == xHash);
diff --git a/src/org/opensha/data/DataUtils.java b/src/org/opensha/data/DataUtils.java
index 18e9431e5..54731d840 100644
--- a/src/org/opensha/data/DataUtils.java
+++ b/src/org/opensha/data/DataUtils.java
@@ -506,6 +506,10 @@ public final class DataUtils {
 	public static double[] transform(Function<Double, Double> function, double... data) {
 		checkNotNull(function);
 		validateDataArray(data);
+		return uncheckedTransform(function, data);
+	}
+	
+	static double[] uncheckedTransform(Function<Double, Double> function, double... data) {
 		for (int i = 0; i < data.length; i++) {
 			data[i] = function.apply(data[i]);
 		}
diff --git a/src/org/opensha/eq/fault/surface/DefaultGriddedSurface.java b/src/org/opensha/eq/fault/surface/DefaultGriddedSurface.java
index a4c1f57f0..7de298f42 100644
--- a/src/org/opensha/eq/fault/surface/DefaultGriddedSurface.java
+++ b/src/org/opensha/eq/fault/surface/DefaultGriddedSurface.java
@@ -142,7 +142,7 @@ public class DefaultGriddedSurface extends AbstractGriddedSurface {
 		// build() may only be called once
 		// use Doubles to ensure fields are initially null
 
-		private static final Range<Double> SPACING_RANGE = Range.closed(0.1, 20.0);
+		private static final Range<Double> SPACING_RANGE = Range.closed(0.01, 20.0);
 
 		private static final String ID = "DefaultGriddedSurface.Builder";
 		private boolean built = false;
diff --git a/src/org/opensha/eq/fault/surface/RuptureFloating.java b/src/org/opensha/eq/fault/surface/RuptureFloating.java
index ebc6dd002..b5a7f75c3 100644
--- a/src/org/opensha/eq/fault/surface/RuptureFloating.java
+++ b/src/org/opensha/eq/fault/surface/RuptureFloating.java
@@ -71,11 +71,13 @@ public enum RuptureFloating {
 					double scaledRate = rate * entry.getValue();
 					floaters.addAll(createFloaters(surfaces, mag, scaledRate, rake));
 				}
+				System.out.println(floaters.size());
+
 				return floaters;
 			}
-
 			Dimensions d = scaling.dimensions(mag, maxWidth);
 			List<GriddedSurface> surfaces = createFloatingSurfaces(surface, d.length, d.width);
+			System.out.println(surfaces.size());
 			return createFloaters(surfaces, mag, rate, rake);
 		}
 	},
diff --git a/src/org/opensha/eq/model/FaultParser.java b/src/org/opensha/eq/model/FaultParser.java
index b03ff8fa0..23dc12709 100644
--- a/src/org/opensha/eq/model/FaultParser.java
+++ b/src/org/opensha/eq/model/FaultParser.java
@@ -156,6 +156,7 @@ class FaultParser extends DefaultHandler {
 						.name(srcName)
 						.ruptureScaling(rupScaling)
 						.ruptureFloating(config.ruptureFloating)
+						.ruptureVariability(config.ruptureVariability)
 						.surfaceSpacing(config.surfaceSpacing);
 					log.fine("     Source: " + srcName);
 					break;
diff --git a/src/org/opensha/eq/model/FaultSource.java b/src/org/opensha/eq/model/FaultSource.java
index 34b74104b..73a676eee 100644
--- a/src/org/opensha/eq/model/FaultSource.java
+++ b/src/org/opensha/eq/model/FaultSource.java
@@ -57,6 +57,7 @@ public class FaultSource implements Source {
 	final double spacing;
 	final RuptureScaling rupScaling;
 	final RuptureFloating rupFloating;
+	final boolean rupVariability;
 	final GriddedSurface surface;
 
 	private final List<List<Rupture>> ruptureLists; // 1:1 with Mfds
@@ -64,7 +65,7 @@ public class FaultSource implements Source {
 	// package privacy for subduction subclass
 	FaultSource(String name, LocationList trace, double dip, double width, GriddedSurface surface,
 		double rake, List<IncrementalMfd> mfds, double spacing, RuptureScaling rupScaling,
-		RuptureFloating rupFloating) {
+		RuptureFloating rupFloating, boolean rupVariability) {
 
 		this.name = name;
 		this.trace = trace;
@@ -76,6 +77,7 @@ public class FaultSource implements Source {
 		this.spacing = spacing;
 		this.rupScaling = rupScaling;
 		this.rupFloating = rupFloating;
+		this.rupVariability = rupVariability;
 
 		ruptureLists = initRuptureLists();
 		checkState(Iterables.size(Iterables.concat(ruptureLists)) > 0,
@@ -137,7 +139,7 @@ public class FaultSource implements Source {
 				DefaultGriddedSurface surf = (DefaultGriddedSurface) surface;
 				
 				List<Rupture> floaters = rupFloating.createFloatingRuptures(surf, rupScaling, mag,
-					rate, rake, false);
+					rate, rake, rupVariability);
 				rupListbuilder.addAll(floaters);
 				
 			} else {
@@ -155,7 +157,7 @@ public class FaultSource implements Source {
 		private static final String ID = "FaultSource.Builder";
 		private boolean built = false;
 
-		private static final Range<Double> SURFACE_GRID_SPACING_RANGE = Range.closed(0.1, 20.0);
+		private static final Range<Double> SURFACE_GRID_SPACING_RANGE = Range.closed(0.01, 20.0);
 
 		// required
 		String name;
@@ -169,6 +171,7 @@ public class FaultSource implements Source {
 		Double spacing;
 		RuptureScaling rupScaling;
 		RuptureFloating rupFloating;
+		Boolean rupVariability;
 
 		Builder name(String name) {
 			this.name = validateName(name);
@@ -231,6 +234,11 @@ public class FaultSource implements Source {
 			return this;
 		}
 
+		Builder ruptureVariability(boolean rupVariability) {
+			this.rupVariability = rupVariability;
+			return this;
+		}
+
 		void validateState(String id) {
 			checkState(!built, "This %s instance as already been used", id);
 			checkState(name != null, "%s name not set", id);
@@ -243,6 +251,7 @@ public class FaultSource implements Source {
 			checkState(spacing != null, "%s surface grid spacing not set", id);
 			checkState(rupScaling != null, "%s rupture-scaling relation not set", id);
 			checkState(rupFloating != null, "%s rupture-floating model not set", id);
+			checkState(rupVariability != null, "%s rupture-area variability flag not set", id);
 			built = true;
 		}
 
@@ -257,7 +266,7 @@ public class FaultSource implements Source {
 				.depth(depth).dip(dip).width(width).spacing(spacing).build();
 
 			return new FaultSource(name, trace, dip, width, surface, rake,
-				ImmutableList.copyOf(mfds), spacing, rupScaling, rupFloating);
+				ImmutableList.copyOf(mfds), spacing, rupScaling, rupFloating, rupVariability);
 		}
 	}
 
diff --git a/src/org/opensha/eq/model/HazardModel.java b/src/org/opensha/eq/model/HazardModel.java
index 7598405bb..01d671a93 100644
--- a/src/org/opensha/eq/model/HazardModel.java
+++ b/src/org/opensha/eq/model/HazardModel.java
@@ -15,6 +15,7 @@ import java.nio.file.Path;
 import java.util.Iterator;
 import java.util.Properties;
 
+import org.opensha.calc.CalcConfig;
 import org.opensha.calc.SigmaModel;
 import org.opensha.eq.fault.surface.RuptureFloating;
 import org.opensha.eq.model.AreaSource.GridScaling;
@@ -79,12 +80,12 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>>,
 
 	private final String name;
 	private final SetMultimap<SourceType, SourceSet<? extends Source>> sourceSetMap;
-	private final ModelConfig config;
+	private final CalcConfig config;
 	
 	// TODO do we really need config here; calc config properties will likely be accessed from the 
 	// source set or source level; should probably push config to SourceSets, possibly overriding default
 
-	private HazardModel(String name, ModelConfig config,
+	private HazardModel(String name, CalcConfig config,
 		SetMultimap<SourceType, SourceSet<? extends Source>> sourceSetMap) {
 		this.name = name;
 		this.config = config;
@@ -127,6 +128,10 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>>,
 	@Override public String name() {
 		return name;
 	}
+	
+	public CalcConfig config() {
+		return config;
+	}
 
 	@Override public String toString() {
 		return "HazardModel: " + name + NEWLINE + sourceSetMap.toString();
@@ -147,14 +152,14 @@ public final class HazardModel implements Iterable<SourceSet<? extends Source>>,
 		// ImmutableSetMultimap.Builder preserves value addition order
 		private ImmutableSetMultimap.Builder<SourceType, SourceSet<? extends Source>> sourceMapBuilder;
 		private SetMultimap<SourceType, SourceSet<? extends Source>> sourceSetMap;
-		private ModelConfig config;
+		private CalcConfig config;
 		private String name;
 
 		private Builder() {
 			sourceMapBuilder = ImmutableSetMultimap.builder();
 		}
 
-		Builder config(ModelConfig config) {
+		Builder config(CalcConfig config) {
 			this.config = checkNotNull(config);
 			return this;
 		}
diff --git a/src/org/opensha/eq/model/InterfaceParser.java b/src/org/opensha/eq/model/InterfaceParser.java
index 776ab3b49..3c2f37044 100644
--- a/src/org/opensha/eq/model/InterfaceParser.java
+++ b/src/org/opensha/eq/model/InterfaceParser.java
@@ -132,6 +132,7 @@ class InterfaceParser extends DefaultHandler {
 					sourceBuilder.name(srcName);
 					sourceBuilder.ruptureScaling(rupScaling);
 					sourceBuilder.ruptureFloating(config.ruptureFloating);
+					sourceBuilder.ruptureVariability(config.ruptureVariability);
 					sourceBuilder.surfaceSpacing(config.surfaceSpacing);
 					log.fine("     Source: " + srcName);
 					break;
diff --git a/src/org/opensha/eq/model/InterfaceSource.java b/src/org/opensha/eq/model/InterfaceSource.java
index 5b746c3fd..008c43cc8 100644
--- a/src/org/opensha/eq/model/InterfaceSource.java
+++ b/src/org/opensha/eq/model/InterfaceSource.java
@@ -40,17 +40,18 @@ public class InterfaceSource extends FaultSource {
 
 	private InterfaceSource(String name, LocationList upperTrace, LocationList lowerTrace,
 		double dip, double width, GriddedSurface surface, double rake, List<IncrementalMfd> mfds,
-		double spacing, RuptureScaling rupScaling, RuptureFloating rupFloating) {
-		
+		double spacing, RuptureScaling rupScaling, RuptureFloating rupFloating,
+		boolean rupVariability) {
 
-		super(name, upperTrace, dip, width, surface, rake, mfds, spacing, rupScaling, rupFloating);
+		super(name, upperTrace, dip, width, surface, rake, mfds, spacing, rupScaling, rupFloating,
+			rupVariability);
 
 		this.lowerTrace = (lowerTrace == null) ? surface.getEvenlyDiscritizedLowerEdge()
 			: lowerTrace;
-		
+
 		// TODO lowerTrace may be null and this is bad bad; lowerTrace
 		// is referenced in InterfaceSourceSet distanceFilter and
-		// we should populate this even if the original source only 
+		// we should populate this even if the original source only
 		// specified an upper trace. This highlights another shortcoming
 		// of Container2D and GriddedSurface: why is there no getRow(int)
 		// of getBottomRow() given that there is a getUpperEdge(),
@@ -58,7 +59,7 @@ public class InterfaceSource extends FaultSource {
 		// due to seismogenic depth constraints. For now, we are ignoring
 		// lower trace in distance filter, but given large width of interface
 		// sources TODO clean up Container2D methods
-		
+
 	}
 
 	@Override public String toString() {
@@ -90,14 +91,12 @@ public class InterfaceSource extends FaultSource {
 		// required
 		private LocationList lowerTrace;
 
-		@Override
-		Builder depth(double depth) {
+		@Override Builder depth(double depth) {
 			this.depth = validateInterfaceDepth(depth);
 			return this;
 		}
-		
-		@Override
-		Builder width(double width) {
+
+		@Override Builder width(double width) {
 			this.width = validateInterfaceWidth(width);
 			return this;
 		}
@@ -149,7 +148,7 @@ public class InterfaceSource extends FaultSource {
 			}
 
 			return new InterfaceSource(name, trace, lowerTrace, dip, width, surface, rake,
-				ImmutableList.copyOf(mfds), spacing, rupScaling, rupFloating);
+				ImmutableList.copyOf(mfds), spacing, rupScaling, rupFloating, rupVariability);
 		}
 
 	}
diff --git a/src/org/opensha/eq/model/Loader.java b/src/org/opensha/eq/model/Loader.java
index 4300cba8f..ce45b095f 100644
--- a/src/org/opensha/eq/model/Loader.java
+++ b/src/org/opensha/eq/model/Loader.java
@@ -28,6 +28,7 @@ import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.parsers.SAXParser;
 import javax.xml.parsers.SAXParserFactory;
 
+import org.opensha.calc.CalcConfig;
 import org.opensha.eq.model.HazardModel.Builder;
 import org.xml.sax.SAXException;
 import org.xml.sax.SAXParseException;
@@ -86,10 +87,12 @@ class Loader {
 			checkArgument(Files.exists(modelPath), "Path does not exist: %s", path);
 			Path typeDirPath = typeDirectory(modelPath);
 			
-			ModelConfig baseConfig = ModelConfig.load(typeDirPath);
-			log.info(baseConfig.toString());
+			ModelConfig modelConfig = ModelConfig.load(typeDirPath);
+			log.info(modelConfig.toString());
 			log.info("");
-			builder.config(baseConfig);
+			
+			CalcConfig calcConfig = CalcConfig.load(typeDirPath);
+			builder.config(calcConfig);
 			
 			typePaths = typeDirectoryList(typeDirPath);
 			checkState(typePaths.size() > 0, "Empty model: %s", modelPath.getFileName());
@@ -101,7 +104,7 @@ class Loader {
 				String typeName = cleanZipName(typePath.getFileName().toString());
 				log.info("");
 				log.info("========  " + typeName + " Sources  ========");
-				processTypeDir(typePath, builder, baseConfig);
+				processTypeDir(typePath, builder, modelConfig);
 			}
 
 		} catch (IOException | URISyntaxException e) {
@@ -166,7 +169,7 @@ class Loader {
 		}
 	}
 
-	private static void processTypeDir(Path typeDir, Builder builder, ModelConfig baseConfig) throws IOException {
+	private static void processTypeDir(Path typeDir, Builder builder, ModelConfig modelConfig) throws IOException {
 
 		String typeName = cleanZipName(typeDir.getFileName().toString());
 		SourceType type = SourceType.fromString(typeName);
@@ -184,7 +187,7 @@ class Loader {
 		}
 
 		// load alternate config if such exists
-		ModelConfig config = baseConfig;
+		ModelConfig config = modelConfig;
 		Path configPath = typeDir.resolve(ModelConfig.FILE_NAME);
 		if (Files.exists(configPath)) {
 			config = ModelConfig.load(typeDir);
@@ -355,7 +358,7 @@ class Loader {
 	/* This method will exit runtime environment */
 	private static void handleConfigException(Exception e) {
 		StringBuilder sb = new StringBuilder(LF);
-		sb.append("** ModelConfig error: ").append(e.getMessage());
+		sb.append("** Configuration error: ").append(e.getMessage());
 		log.log(SEVERE, sb.toString(), e);
 		System.exit(1);
 	}
diff --git a/src/org/opensha/gmm/Gmm.java b/src/org/opensha/gmm/Gmm.java
index b642a713e..6dbf87a1a 100644
--- a/src/org/opensha/gmm/Gmm.java
+++ b/src/org/opensha/gmm/Gmm.java
@@ -11,8 +11,10 @@ import org.opensha.gmm.CeusMb.*;
 import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
+import com.google.common.collect.ArrayTable;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+import com.google.common.collect.Table;
 import com.google.common.util.concurrent.UncheckedExecutionException;
 
 /**
@@ -164,11 +166,13 @@ public enum Gmm {
 	AB_06_PRIME(AtkinsonBoore_2006p.class, AtkinsonBoore_2006p.NAME, AtkinsonBoore_2006p.CC),
 
 	/** @see AtkinsonBoore_2006 */
-	AB_06_140BAR(AtkinsonBoore_2006.StressDrop_140bar.class, AtkinsonBoore_2006.StressDrop_140bar.NAME,
+	AB_06_140BAR(AtkinsonBoore_2006.StressDrop_140bar.class,
+			AtkinsonBoore_2006.StressDrop_140bar.NAME,
 			AtkinsonBoore_2006.CC_A),
 
 	/** @see AtkinsonBoore_2006 */
-	AB_06_200BAR(AtkinsonBoore_2006.StressDrop_200bar.class, AtkinsonBoore_2006.StressDrop_200bar.NAME,
+	AB_06_200BAR(AtkinsonBoore_2006.StressDrop_200bar.class,
+			AtkinsonBoore_2006.StressDrop_200bar.NAME,
 			AtkinsonBoore_2006.CC_A),
 
 	/** @see Atkinson_2008p */
@@ -247,22 +251,24 @@ public enum Gmm {
 
 	/** @see SadighEtAl_1997 */
 	SADIGH_97(SadighEtAl_1997.class, SadighEtAl_1997.NAME, SadighEtAl_1997.CC_BC_HI),
-	
+
 	/** @see McVerryEtAl_2000 */
-	MCVERRY_00_CRUSTAL(McVerryEtAl_2000.Crustal.class, McVerryEtAl_2000.Crustal.NAME, McVerryEtAl_2000.CC),
+	MCVERRY_00_CRUSTAL(McVerryEtAl_2000.Crustal.class, McVerryEtAl_2000.Crustal.NAME,
+			McVerryEtAl_2000.CC),
 
 	/** @see McVerryEtAl_2000 */
-	MCVERRY_00_INTERFACE(McVerryEtAl_2000.Interface.class, McVerryEtAl_2000.Interface.NAME, McVerryEtAl_2000.CC),
-	
+	MCVERRY_00_INTERFACE(McVerryEtAl_2000.Interface.class, McVerryEtAl_2000.Interface.NAME,
+			McVerryEtAl_2000.CC),
+
 	/** @see McVerryEtAl_2000 */
 	MCVERRY_00_SLAB(McVerryEtAl_2000.Slab.class, McVerryEtAl_2000.Slab.NAME, McVerryEtAl_2000.CC),
-	
+
 	/** @see McVerryEtAl_2000 */
-	MCVERRY_00_VOLCANIC(McVerryEtAl_2000.Volcanic.class, McVerryEtAl_2000.Volcanic.NAME, McVerryEtAl_2000.CC);
+	MCVERRY_00_VOLCANIC(McVerryEtAl_2000.Volcanic.class, McVerryEtAl_2000.Volcanic.NAME,
+			McVerryEtAl_2000.CC);
 
 	// TODO clean?
 	// GK_2013(GraizerKalkan_2013.class);
-	
 
 	// TODO all the methods of this class need argument checking and unit tests
 
@@ -302,7 +308,7 @@ public enum Gmm {
 	/**
 	 * Retrieves multiple {@code GroundMotionModel} instances, either by
 	 * creating new ones, or fetching them from a cache.
-	 * @param gmms to retieve
+	 * @param gmms to retrieve
 	 * @param imt
 	 * @return a {@code Map} of {@code GroundMotionModel} instances
 	 * @throws UncheckedExecutionException if there is an instantiation problem
@@ -315,6 +321,27 @@ public enum Gmm {
 		return instances;
 	}
 
+	// TODO deprecate/delete above??
+	
+	/**
+	 * Retrieves a {@code Table} of {@code GroundMotionModel} instances for a
+	 * range of {@code Imt}s, either by creating new ones, or fetching them from
+	 * a cache.
+	 * @param gmms to retrieve
+	 * @param imts
+	 * @return a {@code Table} of {@code GroundMotionModel} instances
+	 * @throws UncheckedExecutionException if there is an instantiation problem
+	 */
+	public static Table<Gmm, Imt, GroundMotionModel> instances(Set<Gmm> gmms, Set<Imt> imts) {
+		Table<Gmm, Imt, GroundMotionModel> instances = ArrayTable.create(gmms, imts);
+		for (Gmm gmm : gmms) {
+			for (Imt imt : imts) {
+				instances.put(gmm, imt, gmm.instance(imt));
+			}
+		}
+		return instances;
+	}
+
 	@Override public String toString() {
 		return name;
 	}
diff --git a/src/org/opensha/mfd/Mfds.java b/src/org/opensha/mfd/Mfds.java
index 9c4eaf42c..17c088676 100644
--- a/src/org/opensha/mfd/Mfds.java
+++ b/src/org/opensha/mfd/Mfds.java
@@ -15,6 +15,7 @@ import org.opensha.data.DataUtils;
 import org.opensha.data.XY_Sequence;
 import org.opensha.eq.Magnitudes;
 
+import com.google.common.base.Converter;
 import com.google.common.primitives.Doubles;
 
 /**
@@ -33,7 +34,7 @@ public final class Mfds {
 	private static final int DEFAULT_TRUNC_LEVEL = 2;
 
 	private Mfds() {}
-	
+
 	/**
 	 * Creates a new single magnitude {@code IncrementalMfd}.
 	 * 
@@ -158,14 +159,14 @@ public final class Mfds {
 		mfd.setAllButTotCumRate(min, min + (size - 1) * delta, moRate, b);
 		return mfd;
 	}
-	
+
 	/*
 	 * A Tapered GR distribution is difficult to make as a child of GR because
-	 * to fully initialize a GR requires multiple steps (e.g. scaleTo...)
-	 * Could do it independently; would require calculateRelativeRates. We'll
-	 * just create a factory method for now until MFD TODO Builders are impl.
+	 * to fully initialize a GR requires multiple steps (e.g. scaleTo...) Could
+	 * do it independently; would require calculateRelativeRates. We'll just
+	 * create a factory method for now until MFD TODO Builders are impl.
 	 */
-	
+
 	public static IncrementalMfd newTaperedGutenbergRichterMFD(double min, double delta, int size,
 			double a, double b, double corner, double weight) {
 		GutenbergRichterMfd mfd = newGutenbergRichterMFD(min, delta, size, b, 1.0);
@@ -174,10 +175,10 @@ public final class Mfds {
 		taper(mfd, corner);
 		return mfd;
 	}
-	
+
 	private static final double TAPERED_LARGE_MAG = 9.05;
 	private static final double SMALL_MO_MAG = 4.0;
-	
+
 	/*
 	 * This Tapered-GR implementation maintains consistency with NSHM but should
 	 * probably be revisited because scaling varies with choice of
@@ -186,25 +187,25 @@ public final class Mfds {
 	 * supllied MFD and use Magnitudes.MAX_MAG for TAPERED_LARGE_MAG instead.
 	 */
 	private static void taper(GutenbergRichterMfd mfd, double mCorner) {
-		
+
 		double minMo = magToMoment_N_m(SMALL_MO_MAG);
 		double cornerMo = magToMoment_N_m(mCorner);
 		double largeMo = magToMoment_N_m(TAPERED_LARGE_MAG);
 		double beta = mfd.get_bValue() / 1.5;
 		double binHalfWidth = mfd.getDelta() / 2.0;
-		
-		for (int i=0; i<mfd.getNum(); i++) {
+
+		for (int i = 0; i < mfd.getNum(); i++) {
 			double mag = mfd.getX(i);
 			double magMoLo = magToMoment_N_m(mag - binHalfWidth);
 			double magMoHi = magToMoment_N_m(mag + binHalfWidth);
-			
+
 			double magBinCountTapered = magBinCount(minMo, magMoLo, magMoHi, beta, cornerMo);
 			double magBinCount = magBinCount(minMo, magMoLo, magMoHi, beta, largeMo);
 			double scale = magBinCountTapered / magBinCount;
 			mfd.set(i, mfd.getY(i) * scale);
 		}
 	}
-	
+
 	/*
 	 * Convenience method for computing the number of events in a tapered GR
 	 * magnitude bin.
@@ -213,7 +214,7 @@ public final class Mfds {
 			double cornerMo) {
 		return pareto(minMo, magMoLo, beta, cornerMo) - pareto(minMo, magMoHi, beta, cornerMo);
 	}
-	
+
 	/*
 	 * Complementary Pareto distribution: cumulative number of events with
 	 * seismic moment greater than magMo with an exponential taper
@@ -326,7 +327,23 @@ public final class Mfds {
 	public static double probToRate(double P, double time) {
 		return -log(1 - P) / time;
 	}
-	
+
+	public static Converter<Double, Double> annRateToPoissProbConverter() {
+		return AnnualRateToPoissonProbConverter.INSTANCE;
+	}
+
+	private static final class AnnualRateToPoissonProbConverter extends Converter<Double, Double> {
+		static final AnnualRateToPoissonProbConverter INSTANCE = new AnnualRateToPoissonProbConverter();
+
+		@Override protected Double doForward(Double rate) {
+			return rateToProb(rate, 1.0);
+		}
+
+		@Override protected Double doBackward(Double prob) {
+			return probToRate(prob, 1.0);
+		}
+	}
+
 	/**
 	 * Convert an {@code IncrementalMfd} to an {@code ArrayXY_Sequence}.
 	 * 
@@ -343,8 +360,7 @@ public final class Mfds {
 	 * Combine all {@code mfds} into a single sequence.
 	 * @param mfds
 	 */
-	@Deprecated
-	public static XY_Sequence combine(IncrementalMfd... mfds) {
+	@Deprecated public static XY_Sequence combine(IncrementalMfd... mfds) {
 		// TODO slated for removal once MFDs descend from XY_Sequence
 		checkArgument(checkNotNull(mfds).length > 0);
 		List<XY_Sequence> sequences = new ArrayList<>();
@@ -353,6 +369,5 @@ public final class Mfds {
 		}
 		return DataUtils.combine(sequences);
 	}
-	
 
 }
diff --git a/src/org/opensha/programs/HazardCurve.java b/src/org/opensha/programs/HazardCurve.java
index 500e61d21..0eb84b0b3 100644
--- a/src/org/opensha/programs/HazardCurve.java
+++ b/src/org/opensha/programs/HazardCurve.java
@@ -6,6 +6,7 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
 import static java.util.logging.Level.SEVERE;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -14,6 +15,7 @@ import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.concurrent.ExecutionException;
 import java.util.logging.Logger;
 
@@ -23,17 +25,25 @@ import org.opensha.calc.HazardResult;
 import org.opensha.calc.Site;
 import org.opensha.calc.Utils;
 import org.opensha.data.ArrayXY_Sequence;
+import org.opensha.data.XY_Sequence;
 import org.opensha.eq.model.HazardModel;
 import org.opensha.geo.Location;
 import org.opensha.gmm.Imt;
+import org.opensha.mfd.Mfds;
 import org.opensha.util.Logging;
 import org.opensha.util.Parsing;
 import org.opensha.util.Parsing.Delimiter;
 
+import com.google.common.base.Converter;
 import com.google.common.base.Stopwatch;
 import com.google.common.base.Throwables;
+import com.google.common.collect.ArrayTable;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableMap.Builder;
+import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+import com.google.common.collect.Table;
 
 /**
  * Entry point for computing a hazard curve at a {@link Site} from a
@@ -52,12 +62,8 @@ public class HazardCurve {
 
 	private static final double DEFAULT_VS_30 = 760.0;
 	private static final ArrayXY_Sequence modelCurve = Utils.nshmpCurve();
-	private static final String HEADER1 = "# HazardCurve results";
-	private static final String HEADER2;
-
-	static {
-		HEADER2 = "lon,lat," + Parsing.join(modelCurve.xValues(), Delimiter.COMMA);
-	}
+	private static final String FILE_NAME = "curves.csv";
+	private static final String HEADER_COMMENTS = "# HazardCurve results";
 
 	/**
 	 * Calculate a hazard curve at a {@link Site}.
@@ -65,25 +71,108 @@ public class HazardCurve {
 	 * @param imt intensity measure type
 	 * @param site of interest
 	 */
-	public static HazardResult calc(HazardModel model, Imt imt, Site site) {
+//	public static HazardResult calc(HazardModel model, Imt imt, Site site) {
+//		try {
+//			return Calcs.hazardCurve(model, imt, site, modelCurve);
+//		} catch (ExecutionException | InterruptedException e) {
+//			Throwables.propagate(e);
+//			return null;
+//		}
+//	}
+	
+	public static Map<Site, HazardResult> calc(HazardModel model) {
+		return calc(model, model.config());
+	}
+	
+	// config associated with model will be ignored
+	// TODO should this variant have had SiteSet replaced by data in sites.csv
+	// if supplied upstream
+	public static Map<Site, HazardResult> calc(HazardModel model, CalcConfig config) {
+		
+//		Logger log = Logger.getLogger(HazardCurve.class.getName());
+//
+//		log.info("");
+//		log.info(config.toString());
+
+		Builder<Site, HazardResult> resultTable = ImmutableMap.builder();
+		for (Site site : config.sites) {
+			System.out.println(site.name);
+			HazardResult result = calc(model, config, site);
+			resultTable.put(site, result);
+		}
+		
+		
+//		log.info("HazardCurve: loading model ...");
+//		log.info("");
+
+		return resultTable.build();
+	}
+	
+	private static HazardResult calc(HazardModel model, CalcConfig config, Site site) {
+		
+		Logger log = Logger.getLogger(HazardCurve.class.getName());
+
+		log.info("");
+		log.info(config.toString());
+		
 		try {
-			return Calcs.hazardCurve(model, imt, site, modelCurve);
+			return Calcs.hazardCurve(model, config, site);
 		} catch (ExecutionException | InterruptedException e) {
 			Throwables.propagate(e);
 			return null;
 		}
 	}
 	
-//	public static Map<Site, HazardResult> calc(HazardModel model, CalcConfig config) {
-//		Map<Site, HazardResult> resultMap = new HashMap<>();
-//		
-//		
-//	}
+	/*
+	 * Note that calculations are performed entirely in log space. We convert
+	 * back here to linear space for output. Calculations are also performed 
+	 * in rate space and the option is provided here to convert to Poisson
+	 * probability.
+	 */
+	public static void writeResults(Path dir, Map<Imt, ArrayXY_Sequence> modelCurves,
+			Map<Site, HazardResult> results, boolean poisson) throws IOException {
+		for (Imt imt : modelCurves.keySet()) {
+			List<String> lines = new ArrayList<>();
+			lines.add(HEADER_COMMENTS);
+			// x-values in linear space
+			ArrayXY_Sequence modelCurve = modelCurves.get(imt);
+			lines.add(createHeaderRow(modelCurve));
+			for (Entry<Site, HazardResult> entry : results.entrySet()) {
+				Site site = entry.getKey();
+				HazardResult result = entry.getValue();
+				// x-values in log space
+				ArrayXY_Sequence calcCurve = result.curves().get(imt);
+				if (poisson) {
+					calcCurve = ArrayXY_Sequence.copyOf(calcCurve);
+					calcCurve.transform(Mfds.annRateToPoissProbConverter());
+				}
+				List<Double> locData = Lists.newArrayList(
+					site.location.lon(),
+					site.location.lat());
+				Iterable<Double> lineDat = Iterables.concat(locData, calcCurve.yValues());
+				String line = Parsing.join(lineDat, Delimiter.COMMA);
+				lines.add(line);
+			}
+			
+			Files.createDirectories(dir);
+			String filename = imt + "-" + FILE_NAME;
+			Path outPath = dir.resolve(filename);
+			Files.write(outPath, lines, StandardCharsets.UTF_8);
+		}
+	}
 	
+	/*
+	 * Creates a header row for a csv files that will contain data/curves matching the
+	 * supplied sequence.
+	 */
+	private static String createHeaderRow(XY_Sequence sequence) {
+		return "lon,lat," + Parsing.join(sequence.xValues(), Delimiter.COMMA);
+	}
 	
-
 	// TODO rename to HazardCurves (already have this object as data container)
 	//   or HazardCalc or just Hazard and handle single and map based calculations
+	//
+	// cinfig override could perhaps have sites.csv set as SiteSet
 	
 	// TODO always output to current directory?
 	
@@ -140,123 +229,123 @@ public class HazardCurve {
 	 * 
 	 * @param args
 	 */
-	public static void main(String[] args) {
-		if (args.length < 4 || args.length > 5) {
-			System.err.println(USAGE);
-			System.exit(1);
-		}
-
-		Logging.init();
-		Logger log = Logger.getLogger(HazardCurve.class.getName());
-
-		try {
-			try {
-				// check if lon value is parseable and route to single calc
-				Double.valueOf(args[2]);
-				runSingle(args, log);
-			} catch (NumberFormatException nfe) {
-				// otherwise route to multi calc
-				runMulti(args, log);
-			}
-		} catch (Exception e) {
-			System.err.println("Error processing request; arguments: " + Arrays.toString(args));
-			System.err.println();
-			e.printStackTrace();
-			System.err.println();
-			System.err.println(USAGE);
-		}
-	}
-
-	/*
-	 * test args: ../nshmp-model-dev/models/2008/Western\ US PGA -118.25 34.05
-	 */
-	private static void runSingle(String[] args, Logger log) {
-		Path modelPath = Paths.get(args[0]);
-		Imt imt = Imt.valueOf(args[1]);
-		double lon = Double.valueOf(args[2]);
-		double lat = Double.valueOf(args[3]);
-		double vs30 = (args.length > 4) ? Double.valueOf(args[4]) : DEFAULT_VS_30;
-
-		Location loc = Location.create(lat, lon);
-		Site site = Site.builder().location(loc).vs30(vs30).build();
-
-		try {
-			log.info("");
-			log.info("HazardCurve: loading model ...");
-			log.info("");
-			HazardModel model = HazardModel.load(modelPath, modelPath.getFileName().toString());
-
-			log.info("");
-			log.info("HazardCurve: calculating curve ...");
-			Stopwatch sw = Stopwatch.createStarted();
-			HazardResult result = Calcs.hazardCurve(model, imt, site, modelCurve);
-			sw.stop();
-			log.info("HazardCurve: complete (" + sw.elapsed(MILLISECONDS) + "ms)");
-			System.out.println(Parsing.join(result.curve().yValues(), Delimiter.COMMA));
-			System.exit(0);
-		} catch (Exception e) {
-			StringBuilder sb = new StringBuilder(LF);
-			sb.append("** Calculation error: ").append(e.getMessage()).append(LF);
-			sb.append("** Exiting **").append(LF).append(LF);
-			log.log(SEVERE, sb.toString(), e);
-			System.exit(1);
-		}
-	}
-
-	/*
-	 * test args: ../nshmp-model-dev/models/2008/Western\ US PGA tmp/test/sites.csv tmp/test/curves.csv
-	 */
-	private static void runMulti(String[] args, Logger log) throws IOException {
-		Path modelPath = Paths.get(args[0]);
-		Imt imt = Imt.valueOf(args[1]);
-		Path sitesPath = Paths.get(args[2]);
-		Path outPath = Paths.get(args[3]);
-
-		List<Site> sites = readSitesFile(sitesPath);
+//	public static void main(String[] args) {
+//		if (args.length < 4 || args.length > 5) {
+//			System.err.println(USAGE);
+//			System.exit(1);
+//		}
+//
+//		Logging.init();
+//		Logger log = Logger.getLogger(HazardCurve.class.getName());
+//
+//		try {
+//			try {
+//				// check if lon value is parseable and route to single calc
+//				Double.valueOf(args[2]);
+//				runSingle(args, log);
+//			} catch (NumberFormatException nfe) {
+//				// otherwise route to multi calc
+//				runMulti(args, log);
+//			}
+//		} catch (Exception e) {
+//			System.err.println("Error processing request; arguments: " + Arrays.toString(args));
+//			System.err.println();
+//			e.printStackTrace();
+//			System.err.println();
+//			System.err.println(USAGE);
+//		}
+//	}
 
-		log.info("");
-		log.info("HazardCurve: loading model ...");
-		log.info("");
-		HazardModel model = HazardModel.load(modelPath, modelPath.getFileName().toString());
-		List<HazardResult> results = new ArrayList<>();
+//	/*
+//	 * test args: ../nshmp-model-dev/models/2008/Western\ US PGA -118.25 34.05
+//	 */
+//	private static void runSingle(String[] args, Logger log) {
+//		Path modelPath = Paths.get(args[0]);
+//		Imt imt = Imt.valueOf(args[1]);
+//		double lon = Double.valueOf(args[2]);
+//		double lat = Double.valueOf(args[3]);
+//		double vs30 = (args.length > 4) ? Double.valueOf(args[4]) : DEFAULT_VS_30;
+//
+//		Location loc = Location.create(lat, lon);
+//		Site site = Site.builder().location(loc).vs30(vs30).build();
+//
+//		try {
+//			log.info("");
+//			log.info("HazardCurve: loading model ...");
+//			log.info("");
+//			HazardModel model = HazardModel.load(modelPath, modelPath.getFileName().toString());
+//
+//			log.info("");
+//			log.info("HazardCurve: calculating curve ...");
+//			Stopwatch sw = Stopwatch.createStarted();
+//			HazardResult result = Calcs.hazardCurve(model, imt, site, modelCurve);
+//			sw.stop();
+//			log.info("HazardCurve: complete (" + sw.elapsed(MILLISECONDS) + "ms)");
+//			System.out.println(Parsing.join(result.curve().yValues(), Delimiter.COMMA));
+//			System.exit(0);
+//		} catch (Exception e) {
+//			StringBuilder sb = new StringBuilder(LF);
+//			sb.append("** Calculation error: ").append(e.getMessage()).append(LF);
+//			sb.append("** Exiting **").append(LF).append(LF);
+//			log.log(SEVERE, sb.toString(), e);
+//			System.exit(1);
+//		}
+//	}
 
-		try {
-			log.info("");
-			log.info("HazardCurve: calculating curves ...");
-			int count = 0;
-			Stopwatch sw = Stopwatch.createUnstarted();
-			for (Site site : sites) {
-				sw.start();
-				results.add(Calcs.hazardCurve(model, imt, site, modelCurve));
-				log.info("  " + count + " complete (" + sw.elapsed(MILLISECONDS) + "ms)");
-				sw.reset();
-				count++;
-			}
-			log.info("HazardCurve: writing curves ...");
-			writeCurvesFile(outPath, sites, results);
-			log.info("HazardCurve: complete");
-			System.exit(0);
-		} catch (Exception e) {
-			StringBuilder sb = new StringBuilder(LF);
-			sb.append("** Calculation error: ").append(e.getMessage()).append(LF);
-			sb.append("** Exiting **").append(LF).append(LF);
-			log.log(SEVERE, sb.toString(), e);
-			System.exit(1);
-		}
-	}
+//	/*
+//	 * test args: ../nshmp-model-dev/models/2008/Western\ US PGA tmp/test/sites.csv tmp/test/curves.csv
+//	 */
+//	private static void runMulti(String[] args, Logger log) throws IOException {
+//		Path modelPath = Paths.get(args[0]);
+//		Imt imt = Imt.valueOf(args[1]);
+//		Path sitesPath = Paths.get(args[2]);
+//		Path outPath = Paths.get(args[3]);
+//
+//		List<Site> sites = readSitesFile(sitesPath);
+//
+//		log.info("");
+//		log.info("HazardCurve: loading model ...");
+//		log.info("");
+//		HazardModel model = HazardModel.load(modelPath, modelPath.getFileName().toString());
+//		List<HazardResult> results = new ArrayList<>();
+//
+//		try {
+//			log.info("");
+//			log.info("HazardCurve: calculating curves ...");
+//			int count = 0;
+//			Stopwatch sw = Stopwatch.createUnstarted();
+//			for (Site site : sites) {
+//				sw.start();
+//				results.add(Calcs.hazardCurve(model, imt, site, modelCurve));
+//				log.info("  " + count + " complete (" + sw.elapsed(MILLISECONDS) + "ms)");
+//				sw.reset();
+//				count++;
+//			}
+//			log.info("HazardCurve: writing curves ...");
+//			writeCurvesFile(outPath, sites, results);
+//			log.info("HazardCurve: complete");
+//			System.exit(0);
+//		} catch (Exception e) {
+//			StringBuilder sb = new StringBuilder(LF);
+//			sb.append("** Calculation error: ").append(e.getMessage()).append(LF);
+//			sb.append("** Exiting **").append(LF).append(LF);
+//			log.log(SEVERE, sb.toString(), e);
+//			System.exit(1);
+//		}
+//	}
 
-	private static void writeCurvesFile(Path out, List<Site> sites, List<HazardResult> results)
-			throws IOException {
-		List<String> lines = Lists.newArrayList(HEADER1, HEADER2);
-		for (int i = 0; i < sites.size(); i++) {
-			StringBuilder sb = new StringBuilder();
-			Location loc = sites.get(i).location;
-			sb.append(loc.lon()).append(',').append(loc.lat()).append(',');
-			sb.append(Parsing.join(results.get(i).curve().yValues(), Delimiter.COMMA));
-			lines.add(sb.toString());
-		}
-		Files.write(out, lines, US_ASCII);
-	}
+//	private static void writeCurvesFile(Path out, List<Site> sites, List<HazardResult> results)
+//			throws IOException {
+//		List<String> lines = Lists.newArrayList(HEADER1, HEADER2);
+//		for (int i = 0; i < sites.size(); i++) {
+//			StringBuilder sb = new StringBuilder();
+//			Location loc = sites.get(i).location;
+//			sb.append(loc.lon()).append(',').append(loc.lat()).append(',');
+//			sb.append(Parsing.join(results.get(i).curve().yValues(), Delimiter.COMMA));
+//			lines.add(sb.toString());
+//		}
+//		Files.write(out, lines, US_ASCII);
+//	}
 
 	private static List<Site> readSitesFile(Path path) throws IOException {
 		List<String> lines = Files.readAllLines(path, US_ASCII);
diff --git a/src/org/opensha/programs/HazardMap.java b/src/org/opensha/programs/HazardMap.java
index 644da3bab..f26343bd0 100644
--- a/src/org/opensha/programs/HazardMap.java
+++ b/src/org/opensha/programs/HazardMap.java
@@ -66,7 +66,7 @@ public class HazardMap {
 		Builder<ArrayXY_Sequence> curveBuilder = ImmutableList.builder();
 		for (Site site : sites) {
 			siteBuilder.add(site);
-			curveBuilder.add(HazardCurve.calc(model, imt, site).curve());
+//			curveBuilder.add(HazardCurve.calc(model, imt, site).curve());
 		}
 		return new Result(siteBuilder.build(), curveBuilder.build());
 	}
diff --git a/src/org/opensha/util/Logging.java b/src/org/opensha/util/Logging.java
index 1a02e04a9..7546c2a6d 100644
--- a/src/org/opensha/util/Logging.java
+++ b/src/org/opensha/util/Logging.java
@@ -24,12 +24,12 @@ public class Logging {
 	private static final String LF = LINE_SEPARATOR.value();
 
 	/**
-	 * Initialize logging from {@code lib/logging.properties}.
+	 * Initialize logging from {@code logging.properties}.
 	 */
 	public static void init() {
 		try {
-			InputStream is = new FileInputStream("lib/logging.properties");
-//			InputStream is = Logging.class.getResourceAsStream("/lib/logging.properties");
+			InputStream is = Logging.class.getResourceAsStream("/logging.properties");
+			if (is == null) is = new FileInputStream("lib/logging.properties");
 			LogManager.getLogManager().readConfiguration(is);
 		} catch (IOException ioe) {
 			ioe.printStackTrace();
-- 
GitLab