From 3dbd672c6f33f3298566a3699902f5dca3918134 Mon Sep 17 00:00:00 2001
From: Peter Powers <pmpowers@usgs.gov>
Date: Thu, 3 Feb 2022 16:56:18 -0700
Subject: [PATCH] consolidated disagg cli apps; json out only

---
 .../gov/usgs/earthquake/nshmp/DisaggCalc.java | 423 +++++++++++++++++-
 .../usgs/earthquake/nshmp/DisaggEpsilon.java  | 371 ---------------
 .../gov/usgs/earthquake/nshmp/HazardCalc.java |   7 +-
 .../nshmp/www/hazard/DisaggController.java    |  12 +-
 .../nshmp/www/hazard/DisaggService.java       |   2 +-
 .../nshmp/www/hazard/HazardController.java    |   8 +-
 6 files changed, 419 insertions(+), 404 deletions(-)
 delete mode 100644 src/main/java/gov/usgs/earthquake/nshmp/DisaggEpsilon.java

diff --git a/src/main/java/gov/usgs/earthquake/nshmp/DisaggCalc.java b/src/main/java/gov/usgs/earthquake/nshmp/DisaggCalc.java
index 3719b7301..59387fcd4 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/DisaggCalc.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/DisaggCalc.java
@@ -1,21 +1,39 @@
 package gov.usgs.earthquake.nshmp;
 
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
 import static gov.usgs.earthquake.nshmp.Text.NEWLINE;
+import static java.util.stream.Collectors.toList;
+import static java.util.stream.Collectors.toSet;
 
 import java.io.IOException;
+import java.io.Writer;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.EnumMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Optional;
+import java.util.Set;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.logging.FileHandler;
 import java.util.logging.Logger;
+import java.util.stream.Collectors;
 
-import com.google.common.base.Preconditions;
+import com.google.common.base.Splitter;
+import com.google.common.base.Stopwatch;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
 import com.google.common.util.concurrent.MoreExecutors;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
 
 import gov.usgs.earthquake.nshmp.calc.CalcConfig;
 import gov.usgs.earthquake.nshmp.calc.Disaggregation;
@@ -25,24 +43,45 @@ import gov.usgs.earthquake.nshmp.calc.HazardExport;
 import gov.usgs.earthquake.nshmp.calc.Site;
 import gov.usgs.earthquake.nshmp.calc.Sites;
 import gov.usgs.earthquake.nshmp.calc.ThreadCount;
+import gov.usgs.earthquake.nshmp.data.Interpolator;
+import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.internal.Logging;
 import gov.usgs.earthquake.nshmp.model.HazardModel;
 
 /**
- * Disaggregate probabilisitic seismic hazard at a return period of interest.
+ * Disaggregate probabilistic seismic hazard at a return period of interest or
+ * at specific ground motion levels.
  *
  * @author U.S. Geological Survey
  */
 public class DisaggCalc {
 
+  private static final Gson GSON = new GsonBuilder()
+      .serializeSpecialFloatingPointValues()
+      .serializeNulls()
+      .create();
+
   /**
    * Entry point for the disaggregation of probabilisitic seismic hazard.
    *
-   * <p>Disaggregating siesmic hazard is largeley identical to a hazard
-   * calculation except that a return period (in years) must be supplied as an
-   * additional argument after the 'site(s)' argument. See the
-   * {@link HazardCalc#main(String[]) HazardCalc program} for more information
-   * on required parameters.
+   * <p>Two approaches to disaggregation of seimic hazard are possible with this
+   * application. In the first approach, the 'sites' file is the same as it
+   * would be for a hazard calculation, and disaggregation is performed for all
+   * configured intensity measures at the 'returnPeriod' (in years) of interest
+   * specified in the config file (default = 2475 years, equivalent to 2% in 50
+   * years).
+   *
+   * <p>In the second approach, the sites file includes columns for each
+   * spectral period or other intensity measure and the target ground motion
+   * level to disaggregate for each. For example, the target values could be a
+   * risk-targeted spectral accelerations, or they could be ground motion levels
+   * precomputed for a specific return period.
+   *
+   * <p>Note that the first approach will do the full hazard calculation and
+   * compute hazard curves from which the target disaggregation ground motion
+   * level will be determined. In the second approach, the ground motion targets
+   * are known and the time consuming hazard curve calculation can be avoided.
    *
    * <p>Please refer to the nshmp-haz <a
    * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs">
@@ -78,9 +117,10 @@ public class DisaggCalc {
     Logging.init();
     Logger log = Logger.getLogger(DisaggCalc.class.getName());
     Path tmpLog = HazardCalc.createTempLog();
+    String tmpLogName = checkNotNull(tmpLog.getFileName()).toString();
 
     try {
-      FileHandler fh = new FileHandler(Preconditions.checkNotNull(tmpLog.getFileName()).toString());
+      FileHandler fh = new FileHandler(tmpLogName);
       fh.setFormatter(new Logging.ConsoleFormatter());
       log.getParent().addHandler(fh);
 
@@ -88,6 +128,14 @@ public class DisaggCalc {
       Path modelPath = Paths.get(args[0]);
       HazardModel model = HazardModel.load(modelPath);
 
+      log.info("");
+      Path siteFile = Paths.get(args[1]);
+      log.info("Site file: " + siteFile.toAbsolutePath().normalize());
+      checkArgument(
+          siteFile.toString().endsWith(".csv"),
+          "Only *.csv site files supported");
+
+      /* Calculation configuration, possibly user supplied. */
       CalcConfig config = model.config();
       if (argCount == 3) {
         Path userConfigPath = Paths.get(args[2]);
@@ -97,13 +145,64 @@ public class DisaggCalc {
       }
       log.info(config.toString());
 
-      log.info("");
-      List<Site> sites = HazardCalc.readSites(args[1], config, model.siteData(), log);
-      log.info("Sites: " + Sites.toString(sites));
+      /* Column header data. */
+      Set<String> allColumns = columns(siteFile);
+      Set<String> siteColumns = new HashSet<>(allColumns);
+      siteColumns.retainAll(SITE_KEYS);
+      int colsToSkip = siteColumns.size(); // needed?
+      log.info("Site data columns: " + colsToSkip);
+
+      /* Sites */
+      List<Site> sites = Sites.fromCsv(siteFile, config, model.siteData());
+      log.info("Sites: " + sites.size());
+
+      Set<Imt> modelImts = model.config().hazard.imts;
+
+      /*
+       * If no IML columns present, disaggregate at IMTs and return period from
+       * config, otherwise disaggregate at target IMLs are present.
+       *
+       * We've removed support for gejson site files at present.
+       */
+      Path out;
+      if (siteColumns.size() == allColumns.size()) {
+        checkArgument(
+            modelImts.containsAll(config.hazard.imts),
+            "Config specifies IMTs not supported by model");
+
+        // List<Imt> imts = config.imts;
+
+        // Path out = calc(model, config, sites, imtImlMaps, log);
+
+        double returnPeriod = config.disagg.returnPeriod;
+
+        out = calcRp(model, config, sites, returnPeriod, log);
+
+      } else {
+
+        List<Imt> imts = readImtList(siteFile, colsToSkip);
+        checkArgument(
+            modelImts.containsAll(imts),
+            "Sites file contains IMTs not supported by model");
+        List<Map<Imt, Double>> imls = readSpectra(siteFile, imts, colsToSkip);
+        checkArgument(
+            sites.size() == imls.size(),
+            "Sites and spectra lists different sizes");
+        log.info("Spectra: " + imls.size()); // 1:1 with sites
 
-      double returnPeriod = config.disagg.returnPeriod;
+        out = calcIml(model, config, sites, imls, log);
+      }
+
+      // List<Map<Imt, Double>> imtImlMaps = readSpectra(siteFile, imts,
+      // colsToSkip);
+      // log.info("Spectra: " + imtImlMaps.size());
+
+      // checkArgument(sites.size() == imtImlMaps.size(), "Sites and spectra
+      // lists different sizes");
+      // Spectra should be checked against IMTs supported by model GMMs
+
+      // Path out = calc(model, config, sites, imls, log);
 
-      Path out = calc(model, config, sites, returnPeriod, log);
       log.info(PROGRAM + ": finished");
 
       /* Transfer log and write config, windows requires fh.close() */
@@ -118,6 +217,58 @@ public class DisaggCalc {
     }
   }
 
+  private static final Set<String> SITE_KEYS = ImmutableSet.of(
+      Site.Key.NAME,
+      Site.Key.LAT,
+      Site.Key.LON,
+      Site.Key.VS30,
+      Site.Key.VS_INF,
+      Site.Key.Z1P0,
+      Site.Key.Z2P5);
+
+  private static Set<String> columns(Path path) throws IOException {
+    String header = Files.lines(path).findFirst().get();
+    return Arrays.stream(header.split(","))
+        .map(String::trim)
+        .collect(toSet());
+  }
+
+  private static List<Imt> readImtList(Path path, int colsToSkip) throws IOException {
+    String header = Files.lines(path).findFirst().get();
+    return Splitter.on(',')
+        .trimResults()
+        .splitToList(header)
+        .stream()
+        .skip(colsToSkip)
+        .map(Imt::valueOf)
+        .collect(ImmutableList.toImmutableList());
+  }
+
+  private static List<Map<Imt, Double>> readSpectra(Path path, List<Imt> imts, int colsToSkip)
+      throws IOException {
+    return Files.lines(path)
+        .skip(1)
+        .map(s -> readSpectra(imts, s, colsToSkip))
+        .collect(ImmutableList.toImmutableList());
+  }
+
+  private static Map<Imt, Double> readSpectra(List<Imt> imts, String line, int colsToSkip) {
+
+    double[] imls = Splitter.on(',')
+        .trimResults()
+        .splitToList(line)
+        .stream()
+        .skip(colsToSkip)
+        .mapToDouble(Double::valueOf)
+        .toArray();
+
+    EnumMap<Imt, Double> imtImlMap = new EnumMap<>(Imt.class);
+    for (int i = 0; i < imts.size(); i++) {
+      imtImlMap.put(imts.get(i), imls[i]);
+    }
+    return imtImlMap;
+  }
+
   /*
    * Compute hazard curves using the supplied model, config, and sites. Method
    * returns the path to the directory where results were written.
@@ -126,7 +277,7 @@ public class DisaggCalc {
    * HazardCalc.calc() that will trigger disaggregations if the value is
    * present.
    */
-  private static Path calc(
+  private static Path calcRp(
       HazardModel model,
       CalcConfig config,
       List<Site> sites,
@@ -143,26 +294,252 @@ public class DisaggCalc {
       log.info("Threads: " + ((ThreadPoolExecutor) exec).getCorePoolSize());
     }
 
-    log.info(PROGRAM + ": calculating ...");
+    log.info(PROGRAM + " (return period): calculating ...");
 
     HazardExport handler = HazardExport.create(model, config, sites, log);
+    Path disaggDir = handler.outputDir().resolve("disagg");
+    Files.createDirectory(disaggDir);
+
+    Stopwatch stopwatch = Stopwatch.createStarted();
+    int logInterval = sites.size() > 1000 ? 100 : sites.size() > 100 ? 10 : 1;
+
+    for (int i = 0; i < sites.size(); i++) {
+      Site site = sites.get(i);
 
-    for (Site site : sites) {
       Hazard hazard = HazardCalcs.hazard(model, config, site, exec);
-      Disaggregation disagg = HazardCalcs.disaggReturnPeriod(hazard, returnPeriod, exec);
-      handler.write(hazard, Optional.of(disagg));
-      log.fine(hazard.toString());
+
+      Map<Imt, Double> imls = imlsForReturnPeriod(hazard, returnPeriod);
+
+      Disaggregation disagg = Disaggregation.atImls(hazard, imls, exec);
+
+      // needs to handle disagg same way as iml
+      // handler.write(hazard, Optional.of(disagg));
+      handler.write(hazard, Optional.empty());
+
+      Response response = new Response.Builder()
+          .site(site)
+          .returnPeriod(returnPeriod)
+          .imls(imls)
+          .disagg(disagg)
+          .build();
+
+      String filename = disaggFilename(site);
+      Path resultPath = disaggDir.resolve(filename);
+      Writer writer = Files.newBufferedWriter(resultPath);
+      GSON.toJson(response, writer);
+      writer.close();
+
+      if (i % logInterval == 0) {
+        log.info(String.format(
+            "     %s of %s sites completed in %s",
+            i + 1, sites.size(), stopwatch));
+      }
     }
     handler.expire();
 
     log.info(String.format(
-        PROGRAM + ": %s sites completed in %s",
+        PROGRAM + " (return period): %s sites completed in %s",
         handler.resultCount(), handler.elapsedTime()));
 
     exec.shutdown();
     return handler.outputDir();
   }
 
+  /* Hazard curves are already in log-x space. */
+  static final Interpolator IML_INTERPOLATER = Interpolator.builder()
+      .logy()
+      .decreasingY()
+      .build();
+
+  // this should be in a factory
+  private static Map<Imt, Double> imlsForReturnPeriod(
+      Hazard hazard,
+      double returnPeriod) {
+
+    double rate = 1.0 / returnPeriod;
+    Map<Imt, Double> imls = new EnumMap<>(Imt.class);
+    for (Entry<Imt, XySequence> entry : hazard.curves().entrySet()) {
+      double iml = IML_INTERPOLATER.findX(entry.getValue(), rate);
+      // remove exp below by transforming disagg-epsilon to log earlier
+      imls.put(entry.getKey(), Math.exp(iml));
+    }
+    return imls;
+  }
+
+  /*
+   * Compute hazard curves using the supplied model, config, and sites. Method
+   * returns the path to the directory where results were written.
+   *
+   * TODO consider refactoring to supply an Optional<Double> return period to
+   * HazardCalc.calc() that will trigger disaggregations if the value is
+   * present.
+   */
+  private static Path calcIml(
+      HazardModel model,
+      CalcConfig config,
+      List<Site> sites,
+      List<Map<Imt, Double>> imls,
+      Logger log) throws IOException {
+
+    ExecutorService exec = null;
+    ThreadCount threadCount = config.performance.threadCount;
+    if (threadCount == ThreadCount.ONE) {
+      exec = MoreExecutors.newDirectExecutorService();
+      log.info("Threads: Running on calling thread");
+    } else {
+      exec = Executors.newFixedThreadPool(threadCount.value());
+      log.info("Threads: " + ((ThreadPoolExecutor) exec).getCorePoolSize());
+    }
+
+    log.info(PROGRAM + " (IML): calculating ...");
+    Path outDir = createOutputDir(config.output.directory);
+    Path disaggDir = outDir.resolve("disagg");
+    Files.createDirectory(disaggDir);
+
+    Stopwatch stopwatch = Stopwatch.createStarted();
+    int logInterval = sites.size() > 1000 ? 100 : sites.size() > 100 ? 10 : 1;
+
+    for (int i = 0; i < sites.size(); i++) {
+
+      Site site = sites.get(i);
+      Map<Imt, Double> siteImls = imls.get(i);
+
+      Hazard hazard = HazardCalcs.hazard(model, config, site, exec);
+      Disaggregation disagg = Disaggregation.atImls(hazard, siteImls, exec);
+
+      Response response = new Response.Builder()
+          .site(site)
+          .imls(siteImls)
+          .disagg(disagg)
+          .build();
+
+      String filename = disaggFilename(site);
+      Path resultPath = disaggDir.resolve(filename);
+      Writer writer = Files.newBufferedWriter(resultPath);
+      GSON.toJson(response, writer);
+      writer.close();
+
+      if (i % logInterval == 0) {
+        log.info(String.format(
+            "     %s of %s sites completed in %s",
+            i + 1, sites.size(), stopwatch));
+      }
+    }
+
+    log.info(String.format(
+        PROGRAM + " (IML): %s sites completed in %s",
+        sites.size(), stopwatch));
+
+    exec.shutdown();
+    return outDir;
+  }
+
+  private static final class Response {
+
+    final Response.Metadata metadata;
+    final Object data;
+
+    Response(Response.Metadata metadata, Object data) {
+      this.metadata = metadata;
+      this.data = data;
+    }
+
+    static final class Metadata {
+
+      final String name;
+      final double longitude;
+      final double latitude;
+      final double vs30;
+      final Double returnPeriod;
+      final Map<String, Double> imls;
+
+      Metadata(Site site, Double returnPeriod, Map<Imt, Double> imls) {
+        this.name = site.name();
+        this.longitude = site.location().longitude;
+        this.latitude = site.location().latitude;
+        this.vs30 = site.vs30();
+        this.returnPeriod = returnPeriod;
+        this.imls = imls.entrySet().stream()
+            .collect(Collectors.toMap(
+                e -> e.getKey().name(),
+                Entry::getValue,
+                (x, y) -> y,
+                () -> new LinkedHashMap<String, Double>()));
+      }
+    }
+
+    static final class Builder {
+
+      Disaggregation disagg;
+      Site site;
+      Double returnPeriod; // optional
+      Map<Imt, Double> imls;
+
+      Builder imls(Map<Imt, Double> imls) {
+        this.imls = imls;
+        return this;
+      }
+
+      Builder returnPeriod(double returnPeriod) {
+        this.returnPeriod = returnPeriod;
+        return this;
+      }
+
+      Builder site(Site site) {
+        this.site = site;
+        return this;
+      }
+
+      Builder disagg(Disaggregation disagg) {
+        this.disagg = disagg;
+        return this;
+      }
+
+      Response build() {
+
+        List<ImtDisagg> disaggs = imls.keySet().stream()
+            .map(imt -> new ImtDisagg(imt, disagg.toJson(imt)))
+            .collect(toList());
+
+        return new Response(
+            new Response.Metadata(site, returnPeriod, imls),
+            disaggs);
+      }
+    }
+  }
+
+  // this could be consolidated with DisaggService
+  private static final class ImtDisagg {
+    final String imt;
+    final Object data;
+
+    ImtDisagg(Imt imt, Object data) {
+      this.imt = imt.name();
+      this.data = data;
+    }
+  }
+
+  // duplicate of that in HazardExport
+  private static Path createOutputDir(Path dir) throws IOException {
+    int i = 1;
+    Path incrementedDir = dir;
+    while (Files.exists(incrementedDir)) {
+      incrementedDir = incrementedDir.resolveSibling(dir.getFileName() + "-" + i);
+      i++;
+    }
+    Files.createDirectories(incrementedDir);
+    return incrementedDir;
+  }
+
+  private static String disaggFilename(Site site) {
+    return site.name().equals(Site.NO_NAME)
+        ? String.format(
+            "%.2f,%.2f.json",
+            site.location().longitude,
+            site.location().latitude)
+        : site.name() + ".json";
+  }
+
   private static final String PROGRAM = DisaggCalc.class.getSimpleName();
   private static final String USAGE_COMMAND =
       "java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.DisaggCalc model sites [config]";
@@ -181,9 +558,10 @@ public class DisaggCalc {
       .append("Where:").append(NEWLINE)
       .append("  'model' is a model directory")
       .append(NEWLINE)
-      .append("  'sites' is a *.csv file or *.geojson file of sites and data")
+      .append(
+          "  'sites' is a *.csv file of locations, site parameters and (optional) target ground motion levels")
       .append(NEWLINE)
-      .append("     - site class and basin terms are optional")
+      .append("     - Header: lon,lat,PGA,SA0P01,SA0P02,...")
       .append(NEWLINE)
       .append("  'config' (optional) supplies a calculation configuration")
       .append(NEWLINE)
@@ -191,7 +569,6 @@ public class DisaggCalc {
       .append("For more information, see:").append(NEWLINE)
       .append("  ").append(USAGE_URL1).append(NEWLINE)
       .append("  ").append(USAGE_URL2).append(NEWLINE)
-      .append(NEWLINE)
       .toString();
 
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/DisaggEpsilon.java b/src/main/java/gov/usgs/earthquake/nshmp/DisaggEpsilon.java
deleted file mode 100644
index e4d039601..000000000
--- a/src/main/java/gov/usgs/earthquake/nshmp/DisaggEpsilon.java
+++ /dev/null
@@ -1,371 +0,0 @@
-package gov.usgs.earthquake.nshmp;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static gov.usgs.earthquake.nshmp.Text.NEWLINE;
-import static gov.usgs.earthquake.nshmp.calc.DataType.GMM;
-import static gov.usgs.earthquake.nshmp.calc.DataType.SOURCE;
-
-import java.io.IOException;
-import java.io.Writer;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.EnumMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.logging.FileHandler;
-import java.util.logging.Logger;
-
-import com.google.common.base.Preconditions;
-import com.google.common.base.Splitter;
-import com.google.common.base.Stopwatch;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Sets;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-
-import gov.usgs.earthquake.nshmp.calc.CalcConfig;
-import gov.usgs.earthquake.nshmp.calc.Disaggregation;
-import gov.usgs.earthquake.nshmp.calc.Hazard;
-import gov.usgs.earthquake.nshmp.calc.HazardCalcs;
-import gov.usgs.earthquake.nshmp.calc.Site;
-import gov.usgs.earthquake.nshmp.calc.Sites;
-import gov.usgs.earthquake.nshmp.calc.ThreadCount;
-import gov.usgs.earthquake.nshmp.gmm.Imt;
-import gov.usgs.earthquake.nshmp.internal.Logging;
-import gov.usgs.earthquake.nshmp.model.HazardModel;
-
-/**
- * Disaggregate probabilistic seismic hazard at a return period of interest or
- * at specific ground motion levels.
- *
- * @author U.S. Geological Survey
- */
-public class DisaggEpsilon {
-
-  private static final Gson GSON = new GsonBuilder()
-      .serializeSpecialFloatingPointValues()
-      .serializeNulls()
-      .create();
-
-  /**
-   * Entry point for the disaggregation of probabilisitic seismic hazard.
-   *
-   * <p>Two approaches to disaggregation of seimic hazard are possible with this
-   * application. In the first approach, the 'sites' file is the same as it
-   * would be for a hazard calculation, and disaggregation is performed for all
-   * configured intensity measures at the 'returnPeriod' (in years) of interest
-   * specified in the config file (default = 2475 years).
-   *
-   * <p>In the second approach, the sites file includes columns for each
-   * spectral period and the target ground motion level to disaggregate for
-   * each. For example, the target values could be a risk-targeted response
-   * spectrum, or they could be ground motion levels precomputed for a specific
-   * return period.
-   *
-   * <p>It is important to note that the first approach will do the full hazard
-   * calculation and compute hazard curves from which the target disaggregation
-   * ground motion level will be determined. In the second approach, the ground
-   * motion targets are known and the time consuming hazard curve calculation
-   * can be avoided.
-   *
-   * <p>Please refer to the nshmp-haz <a
-   * href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/docs">
-   * docs</a> for comprehensive descriptions of source models, configuration
-   * files, site files, and hazard calculations.
-   *
-   * @see <a
-   *      href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/blob/main/docs/pages/Building-&-Running.md">
-   *      nshmp-haz Building & Running</a>
-   * @see <a
-   *      href="https://code.usgs.gov/ghsc/nshmp/nshmp-haz/-/tree/main/etc/examples">
-   *      example calculations</a>
-   */
-  public static void main(String[] args) {
-
-    /* Delegate to run which has a return value for testing. */
-
-    Optional<String> status = run(args);
-    if (status.isPresent()) {
-      System.err.print(status.get());
-      System.exit(1);
-    }
-    System.exit(0);
-  }
-
-  static Optional<String> run(String[] args) {
-    int argCount = args.length;
-
-    if (argCount < 2 || argCount > 3) {
-      return Optional.of(USAGE);
-    }
-
-    Logging.init();
-    Logger log = Logger.getLogger(DisaggCalc.class.getName());
-    Path tmpLog = HazardCalc.createTempLog();
-
-    try {
-      FileHandler fh = new FileHandler(Preconditions.checkNotNull(tmpLog.getFileName()).toString());
-      fh.setFormatter(new Logging.ConsoleFormatter());
-      log.getParent().addHandler(fh);
-
-      log.info(PROGRAM + ": " + HazardCalc.VERSION);
-      Path modelPath = Paths.get(args[0]);
-      HazardModel model = HazardModel.load(modelPath);
-
-      log.info("");
-      Path siteFile = Paths.get(args[1]);
-      log.info("Site and spectra file: " + siteFile.toAbsolutePath().normalize());
-      checkArgument(siteFile.toString().endsWith(".csv"), "Only *.csv site files supported");
-
-      int colsToSkip = headerCount(siteFile);
-      List<Imt> imts = readImtList(siteFile, colsToSkip);
-
-      CalcConfig config = model.config();
-      if (argCount == 3) {
-        Path userConfigPath = Paths.get(args[2]);
-        config = CalcConfig.copyOf(model.config())
-            .extend(CalcConfig.from(userConfigPath))
-            .build();
-      }
-      log.info(config.toString());
-
-      List<Site> sites = ImmutableList.copyOf(Sites.fromCsv(siteFile, config, model.siteData()));
-      log.info("Sites: " + sites.size());
-
-      log.info("Site data columns: " + colsToSkip);
-      List<Map<Imt, Double>> imtImlMaps = readSpectra(siteFile, imts, colsToSkip);
-      log.info("Spectra: " + imtImlMaps.size());
-
-      checkArgument(sites.size() == imtImlMaps.size(), "Sites and spectra lists different sizes");
-      // Spectra should be checked against IMTs supported by model GMMs
-
-      Path out = calc(model, config, sites, imtImlMaps, log);
-
-      log.info(PROGRAM + ": finished");
-
-      /* Transfer log and write config, windows requires fh.close() */
-      fh.close();
-      Files.move(tmpLog, out.resolve(PROGRAM + ".log"));
-      config.write(out);
-
-      return Optional.empty();
-
-    } catch (Exception e) {
-      return HazardCalc.handleError(e, log, tmpLog, args, PROGRAM, USAGE);
-    }
-  }
-
-  // TODO removed this set from Site; temp repair
-  static final Set<String> SITE_KEYS = ImmutableSet.of(
-      "name",
-      "lat",
-      "lon",
-      "vs30",
-      "vsInf",
-      "z1p0",
-      "z2p5");
-
-  /* returns the number of site data columns are present. */
-  private static int headerCount(Path path) throws IOException {
-    String header = Files.lines(path).findFirst().get();
-    Set<String> columns = ImmutableSet.copyOf(Splitter.on(',').trimResults().split(header));
-    return Sets.intersection(columns, SITE_KEYS).size();
-  }
-
-  private static List<Imt> readImtList(Path path, int colsToSkip) throws IOException {
-    String header = Files.lines(path).findFirst().get();
-    return Splitter.on(',')
-        .trimResults()
-        .splitToList(header)
-        .stream()
-        .skip(colsToSkip)
-        .map(Imt::valueOf)
-        .collect(ImmutableList.toImmutableList());
-  }
-
-  private static List<Map<Imt, Double>> readSpectra(Path path, List<Imt> imts, int colsToSkip)
-      throws IOException {
-    return Files.lines(path)
-        .skip(1)
-        .map(s -> readSpectra(imts, s, colsToSkip))
-        .collect(ImmutableList.toImmutableList());
-  }
-
-  private static Map<Imt, Double> readSpectra(List<Imt> imts, String line, int colsToSkip) {
-
-    double[] imls = Splitter.on(',')
-        .trimResults()
-        .splitToList(line)
-        .stream()
-        .skip(colsToSkip)
-        .mapToDouble(Double::valueOf)
-        .toArray();
-
-    EnumMap<Imt, Double> imtImlMap = new EnumMap<>(Imt.class);
-    for (int i = 0; i < imts.size(); i++) {
-      imtImlMap.put(imts.get(i), imls[i]);
-    }
-    return imtImlMap;
-  }
-
-  /*
-   * Compute hazard curves using the supplied model, config, and sites. Method
-   * returns the path to the directory where results were written.
-   *
-   * TODO consider refactoring to supply an Optional<Double> return period to
-   * HazardCalc.calc() that will trigger disaggregations if the value is
-   * present.
-   */
-  private static Path calc(
-      HazardModel model,
-      CalcConfig config,
-      List<Site> sites,
-      List<Map<Imt, Double>> rtrSpectra,
-      Logger log) throws IOException {
-
-    ExecutorService exec = null;
-    ThreadCount threadCount = config.performance.threadCount;
-    if (threadCount == ThreadCount.ONE) {
-      exec = MoreExecutors.newDirectExecutorService();
-      log.info("Threads: Running on calling thread");
-    } else {
-      exec = Executors.newFixedThreadPool(threadCount.value());
-      log.info("Threads: " + ((ThreadPoolExecutor) exec).getCorePoolSize());
-    }
-
-    log.info(PROGRAM + ": calculating ...");
-    Path outDir = createOutputDir(config.output.directory);
-    Path siteDir = outDir.resolve("vs30-" + (int) sites.get(0).vs30());
-    Files.createDirectory(siteDir);
-
-    Stopwatch stopwatch = Stopwatch.createStarted();
-
-    for (int i = 0; i < sites.size(); i++) {
-
-      Site site = sites.get(i);
-      Map<Imt, Double> spectrum = rtrSpectra.get(i);
-
-      // task: use IMLs from site spectra
-      Hazard hazard = HazardCalcs.hazard(model, config, site, exec);
-      Disaggregation disagg = Disaggregation.atImls(hazard, spectrum, exec);
-
-      boolean gmmsOut = config.output.dataTypes.contains(GMM);
-      boolean typesOut = config.output.dataTypes.contains(SOURCE);
-
-      List<Response> responses = new ArrayList<>(spectrum.size());
-      for (Imt imt : spectrum.keySet()) {
-        ResponseData imtMetadata = new ResponseData(
-            ImmutableList.of(),
-            site,
-            imt,
-            spectrum.get(imt));
-        Response response = new Response(
-            imtMetadata,
-            disagg.toJson(imt, false, gmmsOut, typesOut, false));
-        responses.add(response);
-      }
-      Result result = new Result(responses);
-
-      String filename = String.format(
-          "edisagg_%.2f_%.2f.json",
-          site.location().longitude,
-          site.location().latitude);
-
-      Path resultPath = siteDir.resolve(filename);
-      Writer writer = Files.newBufferedWriter(resultPath);
-      GSON.toJson(result, writer);
-      writer.close();
-      log.info(String.format(
-          "     %s of %s sites completed in %s",
-          i + 1, sites.size(), stopwatch));
-    }
-
-    exec.shutdown();
-    return siteDir;
-  }
-
-  private static class Result {
-
-    final List<Response> response;
-
-    Result(List<Response> response) {
-      this.response = response;
-    }
-  }
-
-  private static final class ResponseData {
-
-    final List<String> models;
-    final double longitude;
-    final double latitude;
-    final String imt;
-    final double iml;
-    final double vs30;
-
-    ResponseData(List<String> models, Site site, Imt imt, double iml) {
-      this.models = models;
-      this.longitude = site.location().longitude;
-      this.latitude = site.location().latitude;
-      this.imt = imt.toString();
-      this.iml = iml;
-      this.vs30 = site.vs30();
-    }
-  }
-
-  private static final class Response {
-
-    final ResponseData metadata;
-    final Object data;
-
-    Response(ResponseData metadata, Object data) {
-      this.metadata = metadata;
-      this.data = data;
-    }
-  }
-
-  static Path createOutputDir(Path dir) throws IOException {
-    int i = 1;
-    Path incrementedDir = dir;
-    while (Files.exists(incrementedDir)) {
-      incrementedDir = incrementedDir.resolveSibling(dir.getFileName() + "-" + i);
-      i++;
-    }
-    Files.createDirectories(incrementedDir);
-    return incrementedDir;
-  }
-
-  private static final String PROGRAM = DisaggEpsilon.class.getSimpleName();
-  private static final String USAGE_COMMAND =
-      "java -cp nshmp-haz.jar gov.usgs.earthquake.nshmp.DisaggEpsilon model sites [config]";
-
-  private static final String USAGE = new StringBuilder()
-      .append(NEWLINE)
-      .append(PROGRAM).append(" [").append(HazardCalc.VERSION).append("]").append(NEWLINE)
-      .append(NEWLINE)
-      .append("Usage:").append(NEWLINE)
-      .append("  ").append(USAGE_COMMAND).append(NEWLINE)
-      .append(NEWLINE)
-      .append("Where:").append(NEWLINE)
-      .append("  'model' is a model directory")
-      .append(NEWLINE)
-      .append(
-          "  'sites' is a *.csv file of locations, site parameters and (optional) target ground motion levels")
-      .append(NEWLINE)
-      .append("     - Header: lon,lat,PGA,SA0P01,SA0P02,...")
-      .append(NEWLINE)
-      .append("       (spectral periods must be ascending)")
-      .append(NEWLINE)
-      .append("  'config' (optional) supplies a calculation configuration")
-      .append(NEWLINE)
-      .toString();
-
-}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java b/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java
index 9773931f4..94b501ffb 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/HazardCalc.java
@@ -49,7 +49,8 @@ public class HazardCalc {
    * At a minimum, the path to a model directory and a file of site(s) at which
    * to perform calculations must be specified. Under the 2-argument scenario,
    * model initialization and calculation configuration settings are drawn from
-   * the default configuration. Sites may be defined in a CSV or GeoJSON file.
+   * the default configuration for the model. Sites may be defined in a CSV or
+   * GeoJSON file.
    *
    * <p>To override any default calculation configuration settings, also supply
    * the path to a configuration file as a third argument.
@@ -88,9 +89,10 @@ public class HazardCalc {
     Logging.init();
     Logger log = Logger.getLogger(HazardCalc.class.getName());
     Path tmpLog = createTempLog();
+    String tmpLogName = checkNotNull(tmpLog.getFileName()).toString();
 
     try {
-      FileHandler fh = new FileHandler(checkNotNull(tmpLog.getFileName()).toString());
+      FileHandler fh = new FileHandler(tmpLogName);
       fh.setFormatter(new Logging.ConsoleFormatter());
       log.getParent().addHandler(fh);
 
@@ -98,6 +100,7 @@ public class HazardCalc {
       Path modelPath = Paths.get(args[0]);
       HazardModel model = HazardModel.load(modelPath);
 
+      /* Calculation configuration, possibly user supplied. */
       CalcConfig config = model.config();
       if (argCount == 3) {
         Path userConfigPath = Paths.get(args[2]);
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggController.java b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggController.java
index 680d946ad..c18aaf088 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggController.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggController.java
@@ -8,10 +8,10 @@ import java.util.Set;
 import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.www.NshmpMicronautServlet;
 import gov.usgs.earthquake.nshmp.www.ServletUtil;
-
 import io.micronaut.core.annotation.Nullable;
 import io.micronaut.http.HttpRequest;
 import io.micronaut.http.HttpResponse;
+import io.micronaut.http.MediaType;
 import io.micronaut.http.annotation.Controller;
 import io.micronaut.http.annotation.Get;
 import io.micronaut.http.annotation.PathVariable;
@@ -43,7 +43,7 @@ public class DisaggController {
   @ApiResponse(
       description = "Disaggregation service metadata",
       responseCode = "200")
-  @Get
+  @Get(produces = MediaType.APPLICATION_JSON)
   public HttpResponse<String> doGetMetadata(HttpRequest<?> http) {
     try {
       return DisaggService.getMetadata(http);
@@ -74,7 +74,9 @@ public class DisaggController {
   @ApiResponse(
       description = "Disaggregation",
       responseCode = "200")
-  @Get(uri = "rp/{longitude}/{latitude}/{vs30}/{returnPeriod}{?imt}")
+  @Get(
+      uri = "rp/{longitude}/{latitude}/{vs30}/{returnPeriod}{?imt}",
+      produces = MediaType.APPLICATION_JSON)
   public HttpResponse<String> doGetDisaggReturnPeriod(
       HttpRequest<?> http,
       @Schema(
@@ -117,7 +119,9 @@ public class DisaggController {
   @ApiResponse(
       description = "Disaggregation",
       responseCode = "200")
-  @Get(uri = "iml/{longitude}/{latitude}/{vs30}")
+  @Get(
+      uri = "iml/{longitude}/{latitude}/{vs30}",
+      produces = MediaType.APPLICATION_JSON)
   public HttpResponse<String> doGetDisaggIml(
       HttpRequest<?> http,
       @Schema(
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggService.java b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggService.java
index ac428621e..aa92f52c8 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggService.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/DisaggService.java
@@ -27,7 +27,6 @@ import gov.usgs.earthquake.nshmp.www.ResponseBody;
 import gov.usgs.earthquake.nshmp.www.ServletUtil;
 import gov.usgs.earthquake.nshmp.www.hazard.HazardService.Metadata;
 import gov.usgs.earthquake.nshmp.www.meta.Parameter;
-
 import io.micronaut.http.HttpRequest;
 import io.micronaut.http.HttpResponse;
 import jakarta.inject.Singleton;
@@ -168,6 +167,7 @@ public final class DisaggService {
         .vs30(request.vs30)
         .build();
 
+    // could just get from HazardService
     CompletableFuture<Hazard> hazFuture = CompletableFuture.supplyAsync(
         () -> HazardCalcs.hazard(
             model, config, site,
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/HazardController.java b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/HazardController.java
index 0fe36de8a..72990d21e 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/HazardController.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/www/hazard/HazardController.java
@@ -5,10 +5,10 @@ import java.util.Set;
 import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.www.NshmpMicronautServlet;
 import gov.usgs.earthquake.nshmp.www.ServletUtil;
-
 import io.micronaut.core.annotation.Nullable;
 import io.micronaut.http.HttpRequest;
 import io.micronaut.http.HttpResponse;
+import io.micronaut.http.MediaType;
 import io.micronaut.http.annotation.Controller;
 import io.micronaut.http.annotation.Get;
 import io.micronaut.http.annotation.PathVariable;
@@ -40,7 +40,7 @@ public class HazardController {
   @ApiResponse(
       description = "Hazard service metadata",
       responseCode = "200")
-  @Get
+  @Get(produces = MediaType.APPLICATION_JSON)
   public HttpResponse<String> doGetMetadata(HttpRequest<?> http) {
     try {
       return HazardService.getMetadata(http);
@@ -72,7 +72,9 @@ public class HazardController {
   @ApiResponse(
       description = "Hazard curves",
       responseCode = "200")
-  @Get(uri = "/{longitude}/{latitude}/{vs30}{?truncate,maxdir,imt}")
+  @Get(
+      uri = "/{longitude}/{latitude}/{vs30}{?truncate,maxdir,imt}",
+      produces = MediaType.APPLICATION_JSON)
   public HttpResponse<String> doGetHazard(
       HttpRequest<?> http,
       @Schema(
-- 
GitLab