From d1b8c2f4f580e8fd516ff73d32a0534985320896 Mon Sep 17 00:00:00 2001
From: Peter Powers <pmpowers@usgs.gov>
Date: Tue, 27 Oct 2015 13:11:03 -0600
Subject: [PATCH] deagg dev

---
 src/org/opensha2/calc/Calcs.java              |   2 +-
 .../opensha2/calc/DeaggResultPrototype.java   |   1 +
 src/org/opensha2/calc/Deaggregation.java      | 293 ++++++++++++++----
 src/org/opensha2/data/Data.java               |  33 +-
 src/org/opensha2/programs/DeaggCalc.java      |  95 +++++-
 src/org/opensha2/programs/HazardCalc.java     |  20 +-
 6 files changed, 350 insertions(+), 94 deletions(-)

diff --git a/src/org/opensha2/calc/Calcs.java b/src/org/opensha2/calc/Calcs.java
index e2efe86f7..be9736c42 100644
--- a/src/org/opensha2/calc/Calcs.java
+++ b/src/org/opensha2/calc/Calcs.java
@@ -41,7 +41,7 @@ public class Calcs {
 	// TODO refactor method names to be consistent with refactored hazard/deagg class names
 
 	/*
-	 * Implementation notes:
+	 * Developer notes:
 	 * 
 	 * -------------------------------------------------------------------------
 	 * Method argument order in this class, CalcFactory, and Transforms follow
diff --git a/src/org/opensha2/calc/DeaggResultPrototype.java b/src/org/opensha2/calc/DeaggResultPrototype.java
index ec201c07c..942d0895a 100644
--- a/src/org/opensha2/calc/DeaggResultPrototype.java
+++ b/src/org/opensha2/calc/DeaggResultPrototype.java
@@ -20,6 +20,7 @@ import com.google.gson.GsonBuilder;
  *
  * @author Peter Powers
  */
+@Deprecated
 public class DeaggResultPrototype {
 
 	private String id;
diff --git a/src/org/opensha2/calc/Deaggregation.java b/src/org/opensha2/calc/Deaggregation.java
index b666df3ce..ca2de827e 100644
--- a/src/org/opensha2/calc/Deaggregation.java
+++ b/src/org/opensha2/calc/Deaggregation.java
@@ -1,8 +1,13 @@
 package org.opensha2.calc;
 
 import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
 import static org.opensha2.data.Data.checkInRange;
 import static org.opensha2.eq.Magnitudes.checkMagnitude;
+import static org.opensha2.util.TextUtils.NEWLINE;
+import static org.opensha2.data.Data.multiply;
+import static org.opensha2.data.Data.clean;
+import static com.google.common.primitives.Doubles.toArray;
 
 import java.util.ArrayList;
 import java.util.Collection;
@@ -10,10 +15,11 @@ import java.util.EnumSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.Queue;
 import java.util.Set;
 
 import org.opensha2.calc.CalcConfig.DeaggData;
+import org.opensha2.calc.DeaggResultPrototype.SourceContribution;
+import org.opensha2.calc.DeaggResultPrototype.SourceTypeContribution;
 import org.opensha2.calc.Deaggregation.Dataset.Builder;
 import org.opensha2.data.Data;
 import org.opensha2.data.DataTable;
@@ -29,15 +35,16 @@ import org.opensha2.gmm.Gmm;
 import org.opensha2.gmm.Imt;
 
 import com.google.common.base.Function;
-import com.google.common.base.StandardSystemProperty;
 import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Iterables;
 import com.google.common.collect.ListMultimap;
+import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import com.google.common.collect.MinMaxPriorityQueue;
 import com.google.common.collect.MultimapBuilder;
 import com.google.common.collect.Multimaps;
-import com.google.common.collect.Ordering;
 import com.google.common.collect.Range;
+import com.google.common.primitives.Doubles;
 
 /**
  * For one (or each Imt) One Deagg per source set and ground motion model these
@@ -67,6 +74,12 @@ public final class Deaggregation {
 	 * 
 	 * I think we should screen contributing source sets and log warning if
 	 * specified deagg config does not span hazard result range
+	 * 
+	 * Deagg is going to operate on all relevant sources, if a source is out of
+	 * range, that's ok, contributing source lists will still have the total
+	 * contribution deagg limits are strange and really should just be used to
+	 * limit plot dimensions In addition to logging, results should come with a
+	 * warning
 	 */
 
 	private final HazardResult hazard;
@@ -79,6 +92,14 @@ public final class Deaggregation {
 		this.deaggs = deaggs;
 	}
 
+	public Exporter export(Imt imt) {
+		return new Exporter(deaggs.get(imt).totalDataset, "Total");
+	}
+
+	public Exporter export(Imt imt, Gmm gmm) {
+		return new Exporter(deaggs.get(imt).gmmDatasets.get(gmm), gmm.toString());
+	}
+
 	// all HazardResult curves are in x-log space already
 	private static final Interpolator IML_INTERPOLATE = Interpolator.builder()
 		.logy()
@@ -125,13 +146,14 @@ public final class Deaggregation {
 		// final Imt imt;
 
 		final Dataset totalDataset;
+
+		/* Reduction to Gmms. */
 		final Map<Gmm, Dataset> gmmDatasets;
 
 		Deagg(HazardResult hazard, Dataset model, Imt imt, double rate, double iml) {
 			// this.hazard = hazard;
 			// this.imt = imt;
 
-			// may want to consider SetMultimap
 			ListMultimap<Gmm, Dataset> datasets = MultimapBuilder
 				.enumKeys(Gmm.class)
 				.arrayListValues()
@@ -139,42 +161,68 @@ public final class Deaggregation {
 
 			for (HazardCurveSet curveSet : hazard.sourceSetMap.values()) {
 
-				System.out.println(curveSet.sourceSet.name());
-
 				Map<Gmm, Dataset> sourceSetDatasets = Deaggregator.of(curveSet)
 					.withDataModel(model)
 					.forImt(imt)
 					.atIml(rate, iml)
 					.deaggregate();
+
+				/*
+				 * Each dataset (above) contains the contributing sources (rate
+				 * and skipped rate)
+				 * 
+				 * barWeight = sourceSet rate
+				 */
+
+				// for (Entry<Gmm, Dataset> entry :
+				// sourceSetDatasets.entrySet()) {
+				// Dataset d = entry.getValue();
+				// String g = entry.getKey().name();
+				//
+				// double srcSetRate = 0.0;
+				// for (SourceContribution c : d.sources) {
+				// srcSetRate += c.rate;
+				// }
+				// System.out.println(g + " " + d);
+				// System.out.println(d.barWeight + " " + srcSetRate);
+				//
+				// }
+
 				datasets.putAll(Multimaps.forMap(sourceSetDatasets));
 			}
 
-			gmmDatasets = Maps.immutableEnumMap(Maps.transformValues(
-				Multimaps.asMap(datasets),
-				DATASET_CONSOLIDATOR));
+			
+			gmmDatasets = Maps.immutableEnumMap(
+				Maps.transformValues(
+					Multimaps.asMap(datasets),
+					DATASET_CONSOLIDATOR));
 
 			totalDataset = DATASET_CONSOLIDATOR.apply(gmmDatasets.values());
+
+//			for (Dataset d : gmmDatasets.values()) {
+//				System.out.println("BarWt: " + d.barWeight);
+//
+//			}
 		}
 
 		@Override public String toString() {
 			StringBuilder sb = new StringBuilder();
-			int index = 0;
+//			int index = 0;
 			double totalRate = 0.0;
-			for (Contribution source : totalDataset.sources) {
+			for (SourceContribution source : totalDataset.sources) {
 				// sb.append(index++).append(":  ").append(source).append(NEWLINE);
-				totalRate += (source.sourceRate + source.skipRate);
+				totalRate += (source.rate + source.skipRate);
 			}
-			sb.append(NEWLINE);
-			sb.append("TOTAL: " + totalRate).append(NEWLINE);
+			sb.append("TOTAL via sources: " + totalRate).append(NEWLINE);
+			sb.append("TOTAL via barWt  : " + totalDataset.barWeight).append(NEWLINE);
 			sb.append(NEWLINE);
 			sb.append(totalDataset.rmε);
+			sb.append(NEWLINE);
 			return sb.toString();
 		}
 
 	}
 
-	private static final String NEWLINE = StandardSystemProperty.LINE_SEPARATOR.value();
-
 	private static final Function<Collection<Dataset>, Dataset> DATASET_CONSOLIDATOR =
 		new Function<Collection<Dataset>, Dataset>() {
 			@Override public Dataset apply(Collection<Dataset> datasets) {
@@ -196,39 +244,43 @@ public final class Deaggregation {
 	// i.e. the bin plots at the contribution weighted distance
 	// private Comparator<ContributingRupture> comparator = Ordering.natural();
 
-	private Queue<Contribution> contribQueue = MinMaxPriorityQueue
-		.orderedBy(Ordering.natural())
-		.maximumSize(20)
-		.create();
+	// private Queue<Contribution> contribQueue = MinMaxPriorityQueue
+	// .orderedBy(Ordering.natural())
+	// .maximumSize(20)
+	// .create();
 
 	/* Wrapper class for a Source and it's contribution to hazard. */
-	static class Contribution implements Comparable<Contribution> {
+	static class SourceContribution implements Comparable<SourceContribution> {
+
+		// TODO need better way to identify source
+		// point source are created on the fly so they would need to be
+		// compared/summed by location
 
 		final String source;
-		final double sourceRate;
+		final double rate;
 		final double skipRate;
 
-		private Contribution(String source, double sourceRate, double skipRate) {
+		private SourceContribution(String source, double sourceRate, double skipRate) {
 			this.source = source;
-			this.sourceRate = sourceRate;
+			this.rate = sourceRate;
 			this.skipRate = skipRate;
 		}
 
-		@Override public int compareTo(Contribution other) {
-			return Double.compare(sourceRate, other.sourceRate);
+		@Override public int compareTo(SourceContribution other) {
+			return Double.compare(rate, other.rate);
 		}
 
 		@Override public String toString() {
 			StringBuilder sb = new StringBuilder();
-			sb.append(sourceRate).append(" ");
+			sb.append(rate).append(" ");
 			sb.append(skipRate).append(" ");
-			sb.append(sourceRate + skipRate).append(" ");
+			sb.append(rate + skipRate).append(" ");
 			sb.append(source);
 			return sb.toString();
 		}
 	}
 
-	/* Builder pattern */
+	/* Builder pattern; one per source set. */
 	private static class Deaggregator {
 
 		private final HazardCurveSet hazard;
@@ -287,16 +339,17 @@ public final class Deaggregation {
 		Map<Gmm, Dataset> deaggregate() {
 			checkState();
 
-			List<Contribution> sources = new ArrayList<>();
-
 			for (GroundMotions gms : hazard.hazardGroundMotionsList) {
 				InputList inputs = gms.inputs;
 				double minDistance = inputs.minDistance;
 				Map<Gmm, List<Double>> μLists = gms.means.get(imt);
 				Map<Gmm, List<Double>> σLists = gms.sigmas.get(imt);
 				Map<Gmm, Double> gmms = gmmSet.gmmWeightMap(minDistance);
-				Contribution source = processSource(inputs, gmms, μLists, σLists, EXCEEDANCE);
-				sources.add(source);
+				processSource(inputs, gmms, μLists, σLists, EXCEEDANCE);
+			}
+
+			for (Dataset.Builder builder : datasetBuilders.values()) {
+				builder.sourceSet(sources);
 			}
 
 			return createDataMap();
@@ -320,7 +373,7 @@ public final class Deaggregation {
 					}));
 		}
 
-		private Contribution processSource(
+		private void processSource(
 				InputList inputs,
 				Map<Gmm, Double> gmms,
 				Map<Gmm, List<Double>> μLists,
@@ -334,12 +387,6 @@ public final class Deaggregation {
 			Map<Gmm, Double> gmmSourceRates = createRateMap(gmmKeys);
 			Map<Gmm, Double> gmmSkipRates = createRateMap(gmmKeys);
 
-			/* Rate across all gmms. */
-			double sourceRate = 0.0;
-
-			/* Possibly skipped rate portion. */
-			double skipRate = 0.0;
-
 			/* Add rupture data to builders */
 			for (int i = 0; i < inputs.size(); i++) {
 
@@ -364,11 +411,9 @@ public final class Deaggregation {
 
 					if (skipRupture) {
 						gmmSkipRates.put(gmm, gmmSkipRates.get(gmm) + rate);
-						skipRate += rate;
 						continue;
 					}
 					gmmSourceRates.put(gmm, gmmSourceRates.get(gmm) + rate);
-					sourceRate += rate;
 
 					// System.out.println(μ + " " + σ + " " + iml);
 					// System.out.println("ε: " + ε);
@@ -383,14 +428,13 @@ public final class Deaggregation {
 
 			/* Add sources/contributors to builders. */
 			for (Gmm gmm : gmmKeys) {
-				Contribution source = new Contribution(
+				SourceContribution source = new SourceContribution(
 					inputs.parentName(),
 					gmmSourceRates.get(gmm),
 					gmmSkipRates.get(gmm));
 				datasetBuilders.get(gmm).add(source);
 			}
 
-			return new Contribution(inputs.parentName(), sourceRate, skipRate);
 		}
 
 		private static Map<Gmm, Double> createRateMap(Set<Gmm> gmms) {
@@ -410,6 +454,66 @@ public final class Deaggregation {
 	private static final Range<Double> rRange = Range.closed(0.0, 1000.0);
 	private static final Range<Double> εRange = Range.closed(-3.0, 3.0);
 
+	public static class Exporter {
+
+		final String component;
+		final List<Bin> data;
+//		final double sum;
+		final List<SourceTypeContribution> primarySourceSets;
+		final List<SourceContributionTmp> primarySources;
+
+
+		Exporter(Dataset data, String component) {
+			this.component = component;
+			List<Bin> binList = new ArrayList<>();
+			
+//			double sumTmp = 0.0;
+			
+			// iterate magnitudes descending, distances ascending
+			DataVolume binData = data.rmε;
+			List<Double> magnitudes = Lists.reverse(binData.columns());
+			List<Double> distances = binData.rows();
+			double toPercent = 100.0 / data.barWeight;
+//			System.out.println(data.barWeight);
+			for (double r : distances) {
+				for (double m : magnitudes) {
+					XySequence εColumn = binData.column(r, m);
+					if (εColumn.isEmpty()) continue;
+					double[] εValues = clean(2, multiply(toPercent, toArray(εColumn.yValues())));
+//					sumTmp += Data.sum(εValues);
+					binList.add(new Bin(r, m, εValues));
+				}
+			}
+			this.data = binList;
+//			this.sum = sumTmp;
+			
+			this.primarySourceSets = ImmutableList.of(
+				new SourceTypeContribution("California B-Faults CH", 28.5, -1, 5.0, 7.4, 0.4),
+				new SourceTypeContribution("California B-Faults GR", 22.0, -1, 6.2, 6.7, 0.15),
+				new SourceTypeContribution("CA Crustal Gridded", 15.0, -1, 7.0, 6.7, -0.2));
+
+			this.primarySources = ImmutableList.of(
+				new SourceContributionTmp("Puente Hills", 5.2, 521, 3.2, 7.6, 0.5, 160.1),
+				new SourceContributionTmp("Elysian Park", 4.0, 431, 5.6, 6.8, 0.7, 340.0),
+				new SourceContributionTmp("San Andreas (Mojave)", 1.2, 44, 32.1, 8.2, 1.5, 22.3));
+
+		}
+
+		static class Bin {
+
+			double distance;
+			double magnitude;
+			double[] εvalues;
+
+			Bin(double distance, double magnitude, double[] εvalues) {
+				this.distance = distance;
+				this.magnitude = magnitude;
+				this.εvalues = εvalues;
+			}
+		}
+
+	}
+
 	/*
 	 * Deaggregation dataset that stores deaggregation results of individual
 	 * SourceSets and Gmms. Datasets may be recombined via add().
@@ -430,7 +534,8 @@ public final class Deaggregation {
 		private final DataTable positionWeights;
 
 		/* Contributors */
-		private final List<Contribution> sources;
+		private final Map<SourceSet<? extends Source>, Double> sourceSets;
+		private final List<SourceContribution> sources;
 
 		private Dataset(
 				DataVolume rmε,
@@ -439,7 +544,8 @@ public final class Deaggregation {
 				DataTable rPositions,
 				DataTable mPositions,
 				DataTable positionWeights,
-				List<Contribution> sources) {
+				Map<SourceSet<? extends Source>, Double> sourceSets,
+				List<SourceContribution> sources) {
 
 			this.rmε = rmε;
 
@@ -453,6 +559,7 @@ public final class Deaggregation {
 			this.positionWeights = positionWeights;
 
 			this.sources = sources;
+			this.sourceSets = sourceSets;
 		}
 
 		/*
@@ -519,9 +626,7 @@ public final class Deaggregation {
 
 		/**
 		 * Initialize a deaggregation dataset builder from the settings in a
-		 * calculation configuration. Method delegates to
-		 * {@link #builder(double, double, double, double, double, double, double, double, double)}
-		 * .
+		 * calculation configuration.
 		 * 
 		 * @param config to process
 		 * @see CalcConfig
@@ -557,15 +662,9 @@ public final class Deaggregation {
 			 * to min and max supplied; we only check ranges here.
 			 */
 			return new Builder(
-				checkInRange(rRange, "Min distance", rMin),
-				checkInRange(rRange, "Min distance", rMax),
-				Δr,
-				checkMagnitude(mMin),
-				checkMagnitude(mMax),
-				Δm,
-				checkInRange(εRange, "Min epsilon", εMin),
-				checkInRange(εRange, "Max epsilon", εMax),
-				Δε);
+				rMin, rMax, Δr,
+				mMin, mMax, Δm,
+				εMin, εMax, Δε);
 		}
 
 		static class Builder {
@@ -583,7 +682,8 @@ public final class Deaggregation {
 			private DataTable.Builder mPositions;
 			private DataTable.Builder positionWeights;
 
-			private List<Contribution> sources;
+			private Map<SourceSet<? extends Source>, Double> sourceSets;
+			private ImmutableList.Builder<SourceContribution> sources;
 
 			private Builder(
 					double rMin, double rMax, double Δr,
@@ -614,7 +714,8 @@ public final class Deaggregation {
 					.rows(rMin, rMax, Δr)
 					.columns(mMin, mMax, Δm);
 
-				sources = new ArrayList<>();
+				sourceSets = Maps.newHashMap();
+				sources = ImmutableList.builder();
 			}
 
 			private Builder(Dataset model) {
@@ -622,7 +723,8 @@ public final class Deaggregation {
 				rPositions = DataTable.Builder.fromModel(model.rPositions);
 				mPositions = DataTable.Builder.fromModel(model.mPositions);
 				positionWeights = DataTable.Builder.fromModel(model.positionWeights);
-				sources = new ArrayList<>();
+				sourceSets = Maps.newHashMap();
+				sources = ImmutableList.builder();
 			}
 
 			/*
@@ -640,7 +742,7 @@ public final class Deaggregation {
 					double rw, double mw, double εw,
 					double rate) {
 
-				rmε.set(ri, mi, εi, rate);
+				rmε.add(ri, mi, εi, rate);
 
 				rBar += rw;
 				mBar += mw;
@@ -654,8 +756,17 @@ public final class Deaggregation {
 				return this;
 			}
 
+			// TODO check that this has been set on final validation; size>1
+			// check if singleton? once reducing individual field will not have
+			// been set
+			Builder sourceSet(SourceSet<? extends Source> sourceSet) {
+				checkState(sourceSets.isEmpty(), "SourceSet for dataset has already been set");
+				sourceSets.put(sourceSet, 0.0);
+				return this;
+			}
+
 			/* Add a contributing source to a dataset. */
-			Builder add(Contribution source) {
+			Builder add(SourceContribution source) {
 				sources.add(source);
 				return this;
 			}
@@ -668,18 +779,26 @@ public final class Deaggregation {
 				rBar += other.rBar;
 				mBar += other.mBar;
 				εBar += other.εBar;
-				barWeight += barWeight;
+				barWeight += other.barWeight;
 
 				rPositions.add(other.rPositions);
 				mPositions.add(other.mPositions);
 				positionWeights.add(other.positionWeights);
 
 				sources.addAll(other.sources);
+				Data.add(sourceSets, other.sourceSets);
 
 				return this;
 			}
 
 			Dataset build() {
+
+				if (sourceSets.size() == 1) {
+					Entry<SourceSet<? extends Source>, Double> entry =
+						Iterables.getOnlyElement(sourceSets.entrySet());
+					sourceSets.put(entry.getKey(), barWeight);
+				}
+
 				return new Dataset(
 					rmε.build(),
 					rBar, mBar, εBar,
@@ -687,10 +806,52 @@ public final class Deaggregation {
 					rPositions.build(),
 					mPositions.build(),
 					positionWeights.build(),
-					ImmutableList.copyOf(sources));
+					ImmutableMap.copyOf(sourceSets),
+					sources.build());
 			}
 		}
 
 	}
+	
+	static class SourceTypeContribution {
+		String name;
+		double contribution;
+		int id;
+		double rBar;
+		double mBar;
+		double εBar;
+
+		SourceTypeContribution(String name, double contribution, int id, double rBar, double mBar,
+				double εBar) {
+			this.name = name;
+			this.contribution = contribution;
+			this.id = id;
+			this.mBar = mBar;
+			this.rBar = rBar;
+			this.εBar = εBar;
+		}
+	}
+
+	static class SourceContributionTmp {
+		String name;
+		double contribution;
+		int id;
+		double r;
+		double m;
+		double ε;
+		double azimuth;
+
+		SourceContributionTmp(String name, double contribution, int id, double r, double m, double ε,
+				double azimuth) {
+			this.name = name;
+			this.contribution = contribution;
+			this.id = id;
+			this.m = m;
+			this.r = r;
+			this.ε = ε;
+			this.azimuth = azimuth;
+		}
+	}
+
 
 }
diff --git a/src/org/opensha2/data/Data.java b/src/org/opensha2/data/Data.java
index 3cae0492e..197cbd64c 100644
--- a/src/org/opensha2/data/Data.java
+++ b/src/org/opensha2/data/Data.java
@@ -20,6 +20,7 @@ import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Random;
 
 import org.opensha2.util.MathUtils;
@@ -79,21 +80,21 @@ import com.google.common.primitives.Ints;
 public final class Data {
 
 	/*
-	 * 
-	 * TODO refactor to just Data.*
-	 * 
 	 * TODO verify that 'unchecked' variants actually improve performance; in
 	 * most cases all that's being done is an array.length comparison
 	 */
 
 	/*
-	 * Developer note: Transform Functions vs Pure Iteration
+	 * Developer notes:
+	 * -------------------------------------------------------------------------
+	 * Transform Functions vs Pure Iteration
 	 * 
 	 * The original implementation of this class used the built-in transform()
 	 * methods and math Functions to operate on data arrays. Tests showed the
 	 * Function approach to be only marginally slower, but much more processor
 	 * intensive suggesting there would be a performance penalty in
 	 * multi-threaded applications.
+	 * -------------------------------------------------------------------------
 	 */
 
 	private Data() {}
@@ -246,6 +247,26 @@ public final class Data {
 		return data1;
 	}
 
+	/**
+	 * Adds the entries of {@code map2} to {@code map1} in place. If a key from
+	 * {@code map2} exists in {@code map1}, then the value for that key is added
+	 * to the corresponding value in {@code map1}. If no such key exists in map
+	 * 1, then the key and value from map2 are transferred as is. Note that this
+	 * method is <i>not</i> synchronized.
+	 * 
+	 * @param map1
+	 * @param map2
+	 * @return a reference to {@code map1}
+	 */
+	public static <T> Map<T, Double> add(Map<T, Double> map1, Map<T, Double> map2) {
+		for (T key : map2.keySet()) {
+			Double v2 = map2.get(key);
+			Double v1 = (map1.containsKey(key)) ? map1.get(key) + v2 : v2;
+			map1.put(key, v1);
+		}
+		return map1;
+	}
+
 	/**
 	 * Subtract the values of {@code data2} from {@code data1} in place. To
 	 * subtract a term from every value of a dataset, use
@@ -847,8 +868,8 @@ public final class Data {
 
 	/**
 	 * Verify that the domain of a {@code double[]} does not exceed that of the
-	 * supplied {@link Range}. Method returns the supplied values for
-	 * use inline.
+	 * supplied {@link Range}. Method returns the supplied values for use
+	 * inline.
 	 * 
 	 * @param range of allowable values
 	 * @param values to validate
diff --git a/src/org/opensha2/programs/DeaggCalc.java b/src/org/opensha2/programs/DeaggCalc.java
index a37bcef42..b92d06006 100644
--- a/src/org/opensha2/programs/DeaggCalc.java
+++ b/src/org/opensha2/programs/DeaggCalc.java
@@ -1,25 +1,96 @@
 package org.opensha2.programs;
 
+import static java.lang.Runtime.getRuntime;
+import static java.util.concurrent.Executors.newFixedThreadPool;
+
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executor;
+import java.util.concurrent.ExecutorService;
+
+import org.opensha2.calc.CalcConfig;
+import org.opensha2.calc.Calcs;
+import org.opensha2.calc.Deaggregation;
 import org.opensha2.calc.HazardResult;
 import org.opensha2.calc.Site;
 import org.opensha2.eq.model.HazardModel;
 import org.opensha2.gmm.Imt;
 
+import com.google.common.base.Optional;
+import com.google.common.base.Throwables;
+
 /**
- * Add comments here
- *
+ * Entry point for deaggregating probabilisitic seismic hazard.
+ * 
+ * computing a hazard curve at a {@link Site} from a {@link HazardModel}. The
+ * {@code main()} method of this class returns mean hazard curves for the model
+ * and {@link Imt} specified. For more detailed results at a {@code Site},
+ * consider programmatically using the {@code calc()} methods of this class.
+ * 
  * @author Peter Powers
  */
 public class DeaggCalc {
 
-//	public Deagg calc(HazardResult model, Imt imt, Site site, double rate) {
-//		
-//		
-//		// HazardResult should reference model??
-//		
-//		// compute targetIml from totalCurve
-//		
-//	}
-	
-	// 
+	public static void main(String[] args) {
+
+		/* Delegate to run which has a return value for testing. */
+
+		String status = run(args);
+		if (status != null) {
+			System.err.print(status);
+			System.exit(1);
+		}
+		System.exit(0);
+	}
+
+	static String run(String[] args) {
+		throw new UnsupportedOperationException(
+			"Not yet implemented: how to specify return period");
+	}
+
+	// TODO when doing deagg as a program, what is output?
+	// Do we skip curve output? probably not
+	// - csv files of total and each gmm, deagg table
+	// - metadatafile about mean, mode (json??)
+	// - file of contributions?
+	// - one folder per site
+
+	/**
+	 * Perform a hazard deaggregation at a {@code site} for a {@code model},
+	 * {@code config}, and return period. If an {@code executor} is supplied, it
+	 * will be used to distribute hazard calculation tasks; otherwise, one will
+	 * be created.
+	 * 
+	 * <p><b>Note:</b> any model initialization settings in {@code config} will
+	 * be ignored as the supplied model will already have been initialized.</p>
+	 * 
+	 * @param model to use
+	 * @param config calculation configuration
+	 * @param site of interest
+	 * @param returnPeriod at which to deaggregate
+	 * @param executor to use ({@link Optional})
+	 * @return a HazardResult
+	 */
+	public static Deaggregation calc(
+			HazardModel model,
+			CalcConfig config,
+			Site site,
+			double returnPeriod,
+			Optional<Executor> executor) {
+
+		Optional<Executor> execLocal = executor.or(Optional.of(createExecutor()));
+
+		try {
+			HazardResult result = Calcs.hazardCurve(model, config, site, execLocal);
+			if (!executor.isPresent()) ((ExecutorService) executor).shutdown();
+			return Calcs.deaggregation(result, returnPeriod);
+		} catch (ExecutionException | InterruptedException e) {
+			Throwables.propagate(e);
+			return null;
+		}
+	}
+
+	private static ExecutorService createExecutor() {
+		return newFixedThreadPool(getRuntime().availableProcessors());
+	}
+
 }
diff --git a/src/org/opensha2/programs/HazardCalc.java b/src/org/opensha2/programs/HazardCalc.java
index 294ccb9eb..83baa1ca0 100644
--- a/src/org/opensha2/programs/HazardCalc.java
+++ b/src/org/opensha2/programs/HazardCalc.java
@@ -34,11 +34,11 @@ import com.google.common.base.Stopwatch;
 import com.google.common.base.Throwables;
 
 /**
- * Entry point for computing a hazard curve at a {@link Site} from a
- * {@link HazardModel}. The {@code main()} method of this class returns mean
- * hazard curves for the model and {@link Imt} specified. For more detailed
- * results at a {@code Site}, consider programmatically using the {@code calc()}
- * methods of this class.
+ * Entry point for computing probabilisitic seismic hazard at a {@link Site}
+ * from a {@link HazardModel}. The main method of this class outputs mean hazard
+ * curves for the model and {@link Imt}s specified per the calculation
+ * configuration. For more detailed results, consider programmatically using the @
+ * calc()} method of this class.
  * 
  * @author Peter Powers
  */
@@ -47,7 +47,7 @@ public class HazardCalc {
 	private static final int FLUSH_LIMIT = 2;
 
 	/**
-	 * Entry point for a hazard curve calculation.
+	 * Entry point for a hazard calculation.
 	 * 
 	 * <p>Computing hazard curves requires at least 1, and at most 3, arguments.
 	 * At a minimum, the path to a model zip file or directory must be
@@ -77,13 +77,15 @@ public class HazardCalc {
 	 *      example calculations</a>
 	 */
 	public static void main(String[] args) {
-		// delegate to run which has a return value for testing
+
+		/* Delegate to run which has a return value for testing. */
+
 		String status = run(args);
 		if (status != null) {
 			System.err.print(status);
-			// System.exit(1); TODO clean?
+			System.exit(1);
 		}
-		// System.exit(0);
+		System.exit(0);
 	}
 
 	static String run(String[] args) {
-- 
GitLab