diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ca77c43ee01dd599bca1620d38d2cb4936451b3d..7438e952026b0e53bde8ace15a9707d786403747 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -211,7 +211,7 @@ Trigger AWS Deployment:
         --form ref=${REF} \
         --form "variables[description]=Triggered by nshmp-ws-static" \
         --form "variables[ENVIRONMENT]=${ENVIRONMENT}" \
-        --form "variables[NSHMP_NETCDF_CONUS_2018_IMAGE]=${IMAGE_NAME}" \
+        --form "variables[NSHMP_WS_STATIC_IMAGE]=${IMAGE_NAME}" \
         --form "variables[TRIGGER_PASSCODE]=${NSHMP_WS_STATIC_CDK_TRIGGER_TOKEN}" \
         "https://${PRIVATE_GITLAB}/api/v4/projects/${NSHMP_WS_STATIC_CDK_PROJECT_ID}/trigger/pipeline"
   stage: trigger
diff --git a/Dockerfile b/Dockerfile
index 18dcb4524bfd113c55ff23e8b1cf0c924b1ca0fb..6e0c8c43211e4f533bbc4a4f5d73f0acad640ea7 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -29,20 +29,20 @@ RUN ./gradlew assemble
 FROM ${FROM_IMAGE}
 
 # Path to the NetCDF file to use
-ENV NETCDF_FILE src/main/resources/default.nc
+ENV NETCDF_FILE hazard-example.nc
 ENV CONTEXT_PATH "/"
 ENV JAVA_OPTS=""
 
 WORKDIR /app
 
 COPY --from=builder /app/build/libs/nshmp-ws-static-all.jar nshmp-ws-static.jar
+COPY --from=builder /app/src/main/resources/hazard-example.nc .
 
 ENTRYPOINT /usr/bin/java \
     ${JAVA_OPTS} \
     -jar \
     nshmp-ws-static.jar \
     "-Dmicronaut.server.context-path=${CONTEXT_PATH}" \
-    -nshm=${NSHM} \
     -netcdf=${NETCDF_FILE}
 
 EXPOSE 8080
diff --git a/gradle.properties b/gradle.properties
index 6e69a119387b179bc044d6f830b3041c159abe31..4c9854e9cc11223fbfb14bc8ffefa5c59798535d 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -1,10 +1,9 @@
-cdmVersion = 5.1.0
 githooksVersion = 1.2.0
 jacksonVersion = 2.9.0
 junitVersion = 5.8.2
 micronautVersion = 3.2.3
 micronautRxVersion = 2.1.1
-netcdfVersion = 5.1.0
+netcdfVersion = 5.5.2
 nodePluginVersion = 3.0.1
 nodeVersion = 16.3.0
 nshmpLibVersion = 0.8.2
diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle
index 5d0bbfcfa842838020e95c04571dd5f92be0e8bb..3a35167eb637aca8feb638a6133808166d0b158c 100644
--- a/gradle/dependencies.gradle
+++ b/gradle/dependencies.gradle
@@ -5,7 +5,7 @@ dependencies {
   implementation "ghsc:nshmp-ws-utils:${nshmpWsUtilsVersion}"
 
   // NetCDF
-  implementation "edu.ucar:cdm:${cdmVersion}"
+  implementation "edu.ucar:cdm-core:${netcdfVersion}"
   implementation "edu.ucar:netcdf4:${netcdfVersion}"
   implementation "org.slf4j:slf4j-jdk14:${slfVersion}"
 
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/Netcdf.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/Netcdf.java
new file mode 100644
index 0000000000000000000000000000000000000000..7dd22b3c01317235e45b06ccab50e7773d29c2e7
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/Netcdf.java
@@ -0,0 +1,97 @@
+package gov.usgs.earthquake.nshmp.netcdf;
+
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import gov.usgs.earthquake.nshmp.geo.Location;
+import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
+import gov.usgs.earthquake.nshmp.netcdf.data.BoundingData;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfData;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfShape;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticData;
+
+/**
+ * Abstarct class for NetCDF types: hazard curves and ground motions.
+ *
+ * @author U.S. Geological Survey
+ */
+public abstract class Netcdf<T> {
+
+  protected final Path netcdfPath;
+  protected final NetcdfDataType dataType;
+  protected final NetcdfData netcdfData;
+  protected NetcdfShape netcdfShape;
+
+  private static final Logger LOGGER = Logger.getLogger("ucar");
+
+  static {
+    /* Update ucar logger */
+    LOGGER.setLevel(Level.SEVERE);
+  }
+
+  public Netcdf(Path netcdfPath) {
+    this.netcdfPath = netcdfPath;
+
+    if (Files.notExists(netcdfPath)) {
+      throw new IllegalArgumentException("Path to Netcdf file [" + netcdfPath + "] does not exist");
+    }
+
+    dataType = NetcdfDataType.getDataType(netcdfPath);
+    netcdfData = getNetcdfData(netcdfPath);
+    netcdfShape = buildNetcdfShape();
+  }
+
+  /**
+   * Returns the bounding data from a specific site.
+   *
+   * @param site The site to get the bounding data
+   */
+  public abstract BoundingData<T> boundingData(Location site);
+
+  /**
+   * Returns the data type.
+   */
+  public NetcdfDataType dataType() {
+    return dataType;
+  }
+
+  /**
+   * Returns the NetCDF data.
+   */
+  public abstract NetcdfData netcdfData();
+
+  /**
+   * Returns the NetCDF path.
+   */
+  public Path netcdfPath() {
+    return netcdfPath;
+  }
+
+  /**
+   * Returns the NetCDF shape`
+   */
+  public NetcdfShape netcdfShape() {
+    return netcdfShape;
+  }
+
+  /**
+   * Returns the static data from a specific site.
+   *
+   * @param site The site to get the static data
+   */
+  public abstract StaticData<T> staticData(Location site);
+
+  /**
+   * Returns the static data from a site and site class.
+   *
+   * @param site The site to get the static data
+   * @param siteClass The site class to get the static data
+   */
+  public abstract T staticData(Location site, NehrpSiteClass siteClass);
+
+  abstract NetcdfShape buildNetcdfShape();
+
+  abstract NetcdfData getNetcdfData(Path netcdfPath);
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NetcdfDataType.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NetcdfDataType.java
new file mode 100644
index 0000000000000000000000000000000000000000..d7bb13c55cb393d6fad60b864e903487580fee39
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NetcdfDataType.java
@@ -0,0 +1,32 @@
+package gov.usgs.earthquake.nshmp.netcdf;
+
+import java.io.IOException;
+import java.nio.file.Path;
+
+import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key;
+
+import ucar.nc2.dataset.NetcdfDatasets;
+
+/**
+ * Supported NetCDF data types.
+ */
+public enum NetcdfDataType {
+
+  GROUND_MOTIONS,
+  HAZARD_CURVES;
+
+  /**
+   * Returns the data type read from a NetCDF file with attribute "dataType".
+   *
+   * @param netcdfPath Path to NetCDF file
+   */
+  public static NetcdfDataType getDataType(Path netcdfPath) {
+    try (var ncd = NetcdfDatasets.openDataset(netcdfPath.toString())) {
+      var group = ncd.getRootGroup();
+      var vDataType = group.attributes().findAttribute(Key.DATA_TYPE);
+      return NetcdfDataType.valueOf(vDataType.getStringValue());
+    } catch (IOException e) {
+      throw new RuntimeException("Could not read Netcdf file [" + netcdfPath + " ]");
+    }
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NetcdfGroundMotions.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NetcdfGroundMotions.java
new file mode 100644
index 0000000000000000000000000000000000000000..c8ce2480e36d1979b7c7508f94f45a85c8ae2408
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NetcdfGroundMotions.java
@@ -0,0 +1,75 @@
+package gov.usgs.earthquake.nshmp.netcdf;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+import java.io.IOException;
+import java.nio.file.Path;
+
+import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.geo.Location;
+import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
+import gov.usgs.earthquake.nshmp.netcdf.data.BoundingData;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfData;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfShape;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfShape.IndexKey;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticData;
+import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingReaderGroundMotions;
+import gov.usgs.earthquake.nshmp.netcdf.reader.Reader;
+
+import ucar.nc2.dataset.NetcdfDatasets;
+
+/**
+ * NetCDF data for ground motions.
+ *
+ * @author U.S. Geological Survey
+ */
+public class NetcdfGroundMotions extends Netcdf<XySequence> {
+
+  public NetcdfGroundMotions(Path netcdfPath) {
+    super(netcdfPath);
+  }
+
+  @Override
+  public BoundingData<XySequence> boundingData(Location site) {
+    return new BoundingReaderGroundMotions(this, site).boundingData();
+  }
+
+  @Override
+  public NetcdfData netcdfData() {
+    return netcdfData;
+  }
+
+  @Override
+  public StaticData<XySequence> staticData(Location site) {
+    return boundingData(site).get(site);
+  }
+
+  @Override
+  public XySequence staticData(Location site, NehrpSiteClass siteClass) {
+    checkArgument(
+        netcdfData.siteClasses().contains(siteClass),
+        "Site class [" + siteClass + "] not supported");
+    return staticData(site).get(siteClass);
+  }
+
+  @Override
+  NetcdfShape buildNetcdfShape() {
+    return NetcdfShape.builder()
+        .add(IndexKey.SITE_CLASS, 0)
+        .add(IndexKey.LATITUDE, 1)
+        .add(IndexKey.LONGITUDE, 2)
+        .add(IndexKey.IMT, 3)
+        .build();
+  }
+
+  @Override
+  NetcdfData getNetcdfData(Path netcdfPath) {
+    try (var ncd = NetcdfDatasets.openDataset(netcdfPath.toString())) {
+      var group = ncd.getRootGroup();
+      var reader = new Reader(group);
+      return reader.readData();
+    } catch (IOException e) {
+      throw new RuntimeException("Could not read Netcdf file [" + netcdfPath + " ]");
+    }
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NetcdfHazardCurves.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NetcdfHazardCurves.java
new file mode 100644
index 0000000000000000000000000000000000000000..aa94e7cc2d53a2ff23e856347744ee6b36aadea5
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NetcdfHazardCurves.java
@@ -0,0 +1,76 @@
+package gov.usgs.earthquake.nshmp.netcdf;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+import java.io.IOException;
+import java.nio.file.Path;
+
+import gov.usgs.earthquake.nshmp.geo.Location;
+import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
+import gov.usgs.earthquake.nshmp.netcdf.data.BoundingData;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfDataHazardCurves;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfShape;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfShape.IndexKey;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticData;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticDataHazardCurves;
+import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingReaderHazardCurves;
+import gov.usgs.earthquake.nshmp.netcdf.reader.ReaderHazardCurves;
+
+import ucar.nc2.dataset.NetcdfDatasets;
+
+/**
+ * NetCDF data for hazard curves.
+ *
+ * @author U.S. Geological Survey
+ */
+public class NetcdfHazardCurves extends Netcdf<StaticDataHazardCurves> {
+
+  public NetcdfHazardCurves(Path netcdfPath) {
+    super(netcdfPath);
+  }
+
+  @Override
+  public BoundingData<StaticDataHazardCurves> boundingData(Location site) {
+    return new BoundingReaderHazardCurves(this, site).boundingData();
+  }
+
+  @Override
+  public NetcdfDataHazardCurves netcdfData() {
+    return (NetcdfDataHazardCurves) netcdfData;
+  }
+
+  @Override
+  public StaticData<StaticDataHazardCurves> staticData(Location site) {
+    return boundingData(site).get(site);
+  }
+
+  @Override
+  public StaticDataHazardCurves staticData(Location site, NehrpSiteClass siteClass) {
+    checkArgument(
+        netcdfData.siteClasses().contains(siteClass),
+        "Site class [" + siteClass + "] not supported");
+    return staticData(site).get(siteClass);
+  }
+
+  @Override
+  NetcdfShape buildNetcdfShape() {
+    return NetcdfShape.builder()
+        .add(IndexKey.SITE_CLASS, 0)
+        .add(IndexKey.IMT, 1)
+        .add(IndexKey.LATITUDE, 2)
+        .add(IndexKey.LONGITUDE, 3)
+        .add(IndexKey.IML, 4)
+        .build();
+  }
+
+  @Override
+  NetcdfDataHazardCurves getNetcdfData(Path netcdfPath) {
+    try (var ncd = NetcdfDatasets.openDataset(netcdfPath.toString())) {
+      var group = ncd.getRootGroup();
+      var reader = new ReaderHazardCurves(group);
+      return reader.readData();
+    } catch (IOException e) {
+      throw new RuntimeException("Could not read Netcdf file [" + netcdfPath + " ]");
+    }
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReader.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReader.java
deleted file mode 100644
index 98600bf0d64d6187309f8461d920b2c830ae113b..0000000000000000000000000000000000000000
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReader.java
+++ /dev/null
@@ -1,130 +0,0 @@
-package gov.usgs.earthquake.nshmp.netcdf;
-
-import static com.google.common.base.Preconditions.checkArgument;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import gov.usgs.earthquake.nshmp.data.XySequence;
-import gov.usgs.earthquake.nshmp.geo.Location;
-import gov.usgs.earthquake.nshmp.gmm.Imt;
-import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
-import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards;
-import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazardsReader;
-import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfCoordinates;
-import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazard;
-import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazards;
-
-import ucar.nc2.dataset.NetcdfDataset;
-
-/**
- *
- * Hazard curves stored in netCDF
- *
- * This class is currently specific to NSHMP 2018a data release.
- *
- * TODO: generalize for any NSHMP model data release with subclasses per data
- * release.
- *
- * TODO: Replace loops with streams
- *
- * TODO: Utilize XySequence.copyOf()
- *
- * TODO: Update netCDF file format, rename variables, implement enums,
- * standardize attributes, remove Vs30 dimension from Imls
- *
- * TODO: attempt to calculate index, possibly as
- * NshmpNetcdfCoordinates.calcIdxLTEQ()
- *
- * @author U.S. Geological Survey
- */
-public class NshmNetcdfReader {
-
-  private static final Logger LOGGER = Logger.getLogger("ucar");
-
-  private final Path path;
-  private final NetcdfCoordinates coords;
-
-  static {
-    /* Update ucar logger */
-    LOGGER.setLevel(Level.SEVERE);
-  }
-
-  public NshmNetcdfReader(Path path) {
-    this.path = path;
-
-    if (Files.notExists(path)) {
-      throw new IllegalArgumentException("Path to Netcdf file [" + path + "] does not exist");
-    }
-
-    try (var ncd = NetcdfDataset.openDataset(path.toString())) {
-      coords = new NetcdfCoordinates(ncd.getRootGroup());
-    } catch (IOException e) {
-      throw new RuntimeException("Could not read Netcdf file [" + path + " ]");
-    }
-  }
-
-  /**
-   * Returns the NetCDF dimensions and coordinate variables.
-   */
-  public NetcdfCoordinates coordinates() {
-    return coords;
-  }
-
-  /**
-   * Returns the path to the NetCDF file.
-   */
-  public Path path() {
-    return path;
-  }
-
-  /**
-   * Return the full set of hazard curves at the bounding grid points and the
-   * target Location. Intended for validation uses.
-   *
-   * @param site The location to get bounding hazards
-   */
-  public BoundingHazards boundingHazards(Location site) {
-    return BoundingHazardsReader.boundingHazards(this, site);
-  }
-
-  /**
-   * Return a {@code Map<SiteClass, Map<Imt, XySequence>>} with hazard curves at
-   * the specified Location.
-   *
-   * @param site The site to get the hazard curves
-   */
-  public StaticHazards hazard(Location site) {
-    return boundingHazards(site).get(site);
-  }
-
-  /**
-   * Return a hazard curve for each Imt at a specific site class.
-   *
-   * @param site The site to get hazard curves
-   * @param siteClass The site class
-   */
-  public StaticHazard hazard(Location site, NehrpSiteClass siteClass) {
-    checkArgument(
-        coords.siteClasses().contains(siteClass),
-        "Site class [" + siteClass + "] not supported");
-    return hazard(site).get(siteClass);
-  }
-
-  /**
-   * Return a single hazard curve for the specified Location, SiteClass, and
-   * Imt.
-   *
-   * @param site The site to get the hazard curves
-   * @param siteClass The site class
-   * @param imt The IMT
-   */
-  public XySequence hazard(Location site, NehrpSiteClass siteClass, Imt imt) {
-    checkArgument(coords.imts().contains(imt), "Imt [" + imt + "] not supported");
-    return hazard(site, siteClass).get(imt);
-  }
-
-}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/SiteClass.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/SiteClass.java
index 96eb4d3cf7060c6e61b1da01a8df9a7567595349..3d38dc3a675277445b9a50ab902750290320b34b 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/SiteClass.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/SiteClass.java
@@ -101,5 +101,4 @@ public enum SiteClass {
   public String toString() {
     return display;
   }
-
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/BoundingData.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/BoundingData.java
new file mode 100644
index 0000000000000000000000000000000000000000..6fcda9d66b1a82164c270f380531c26bc3a8c709
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/BoundingData.java
@@ -0,0 +1,45 @@
+package gov.usgs.earthquake.nshmp.netcdf.data;
+
+import static com.google.common.base.Preconditions.checkState;
+
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import gov.usgs.earthquake.nshmp.geo.Location;
+
+/**
+ * Location to static data mapper.
+ *
+ * @author U.S. Geological Survey
+ */
+public class BoundingData<T> extends LinkedHashMap<Location, StaticData<T>> {
+
+  public BoundingData() {}
+
+  private BoundingData(Map<Location, StaticData<T>> boundingHazards) {
+    putAll(boundingHazards);
+  }
+
+  public static <T> Builder<T> builder() {
+    return new Builder<T>();
+  }
+
+  public static class Builder<T> {
+    Map<Location, StaticData<T>> boundingData;
+
+    private Builder() {
+      boundingData = new HashMap<>();
+    }
+
+    public Builder<T> put(Location location, StaticData<T> staticData) {
+      boundingData.put(location, staticData);
+      return this;
+    }
+
+    public BoundingData<T> build() {
+      checkState(!boundingData.isEmpty(), "Must add static data");
+      return new BoundingData<>(boundingData);
+    }
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/NetcdfData.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/NetcdfData.java
new file mode 100644
index 0000000000000000000000000000000000000000..7accd3e2863583e7d2b1e25f37efad29195f161b
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/NetcdfData.java
@@ -0,0 +1,118 @@
+package gov.usgs.earthquake.nshmp.netcdf.data;
+
+import static com.google.common.base.Preconditions.checkState;
+
+import java.util.List;
+import java.util.Map;
+
+import gov.usgs.earthquake.nshmp.gmm.Imt;
+import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
+
+/**
+ * Data info from the NetCDF file.
+ *
+ * @author U.S. Geological Survey
+ */
+public class NetcdfData {
+  private final List<Imt> imts;
+  private final double[] latitudes;
+  private final double[] longitudes;
+  private final List<NehrpSiteClass> siteClasses;
+  private final Map<NehrpSiteClass, Double> vs30Map;
+
+  protected NetcdfData(Builder builder) {
+    imts = builder.imts;
+    latitudes = builder.latitudes;
+    longitudes = builder.longitudes;
+    siteClasses = builder.siteClasses;
+    vs30Map = builder.vs30Map;
+  }
+
+  /**
+   * Return the Imts
+   */
+  public List<Imt> imts() {
+    return List.copyOf(imts);
+  }
+
+  /**
+   * Returns the latitudes.
+   */
+  public double[] latitudes() {
+    return latitudes.clone();
+  }
+
+  /**
+   * Returns the longitudes.
+   */
+  public double[] longitudes() {
+    return longitudes.clone();
+  }
+
+  /**
+   * Return the site classes
+   */
+  public List<NehrpSiteClass> siteClasses() {
+    return List.copyOf(siteClasses);
+  }
+
+  /**
+   * Returns the VS30 map
+   */
+  public Map<NehrpSiteClass, Double> vs30Map() {
+    return Map.copyOf(vs30Map);
+  }
+
+  public static Builder builder() {
+    return new Builder();
+  }
+
+  public static class Builder {
+    List<Imt> imts;
+    double[] latitudes;
+    double[] longitudes;
+    List<NehrpSiteClass> siteClasses;
+    Map<NehrpSiteClass, Double> vs30Map;
+
+    Builder() {}
+
+    public Builder imts(List<Imt> imts) {
+      this.imts = imts;
+      return this;
+    }
+
+    public Builder latitudes(double[] latitudes) {
+      this.latitudes = latitudes;
+      return this;
+    }
+
+    public Builder longitudes(double[] longitudes) {
+      this.longitudes = longitudes;
+      return this;
+    }
+
+    public Builder siteClasses(List<NehrpSiteClass> siteClasses) {
+      this.siteClasses = siteClasses;
+      return this;
+    }
+
+    public Builder vs30Map(Map<NehrpSiteClass, Double> vs30Map) {
+      this.vs30Map = vs30Map;
+      return this;
+    }
+
+    public NetcdfData build() {
+      checkBuildState();
+      return new NetcdfData(this);
+    }
+
+    void checkBuildState() {
+      checkState(!imts.isEmpty(), "Must add imts");
+      checkState(!(latitudes.length == 0), "Must add latitude");
+      checkState(!(longitudes.length == 0), "Must add longitudes");
+      checkState(!siteClasses.isEmpty(), "Must add site classes");
+      checkState(!vs30Map.isEmpty(), "Must add vs30s");
+    }
+  }
+
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/NetcdfDataHazardCurves.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/NetcdfDataHazardCurves.java
new file mode 100644
index 0000000000000000000000000000000000000000..e678fa34a7819c544efbd48e9152976b23bfea4d
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/NetcdfDataHazardCurves.java
@@ -0,0 +1,96 @@
+package gov.usgs.earthquake.nshmp.netcdf.data;
+
+import static com.google.common.base.Preconditions.checkState;
+
+import java.util.List;
+import java.util.Map;
+
+import gov.usgs.earthquake.nshmp.gmm.Imt;
+import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
+
+/**
+ * NetCDF data for hazard curves.
+ *
+ * @author U.S. Geological Survey
+ */
+public class NetcdfDataHazardCurves extends NetcdfData {
+
+  private final Map<Imt, double[]> imls;
+
+  NetcdfDataHazardCurves(Builder builder) {
+    super(builder);
+    imls = builder.imls;
+  }
+
+  /**
+   * Returns the Imls
+   */
+  public Map<Imt, double[]> imls() {
+    return imls;
+  }
+
+  /**
+   * Returns the number of Imls
+   */
+  public int nIml() {
+    return imls.values().iterator().next().length;
+  }
+
+  public static Builder builder() {
+    return new Builder();
+  }
+
+  public static class Builder extends NetcdfData.Builder {
+    Map<Imt, double[]> imls;
+
+    Builder() {
+      super();
+    }
+
+    public Builder imls(Map<Imt, double[]> imls) {
+      this.imls = imls;
+      return this;
+    }
+
+    @Override
+    public Builder imts(List<Imt> imts) {
+      super.imts(imts);
+      return this;
+    }
+
+    @Override
+    public Builder latitudes(double[] latitudes) {
+      super.latitudes(latitudes);
+      return this;
+    }
+
+    @Override
+    public Builder longitudes(double[] longitudes) {
+      super.longitudes(longitudes);
+      return this;
+    }
+
+    @Override
+    public Builder siteClasses(List<NehrpSiteClass> siteClasses) {
+      super.siteClasses(siteClasses);
+      return this;
+    }
+
+    @Override
+    public Builder vs30Map(Map<NehrpSiteClass, Double> vs30Map) {
+      super.vs30Map(vs30Map);
+      return this;
+    }
+
+    public NetcdfDataHazardCurves build() {
+      checkBuildState();
+      return new NetcdfDataHazardCurves(this);
+    }
+
+    void checkBuildState() {
+      super.checkBuildState();
+      checkState(!imls.isEmpty(), "Must add imls");
+    }
+  }
+
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/NetcdfShape.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/NetcdfShape.java
new file mode 100644
index 0000000000000000000000000000000000000000..632de5c87c400353264b900a85162704e9258afb
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/NetcdfShape.java
@@ -0,0 +1,69 @@
+package gov.usgs.earthquake.nshmp.netcdf.data;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Create NetCDF shapes and keep track of indices.
+ *
+ * @author U.S. Geological Survey
+ */
+public class NetcdfShape {
+
+  private final Map<IndexKey, Integer> indexMap;
+
+  private NetcdfShape(Builder builder) {
+    indexMap = builder.indexMap;
+  }
+
+  public int[] buildShape(IndexMap... sizes) {
+    int[] shape = new int[indexMap.size()];
+    Arrays.fill(shape, 0);
+
+    Arrays.stream(sizes).forEach(size -> {
+      shape[indexMap.get(size.indexKey)] = size.size;
+    });
+
+    return shape;
+  }
+
+  public static Builder builder() {
+    return new Builder();
+  }
+
+  public static class IndexMap {
+    public final IndexKey indexKey;
+    public final int size;
+
+    public IndexMap(IndexKey indexKey, int size) {
+      this.indexKey = indexKey;
+      this.size = size;
+    }
+  }
+
+  public static enum IndexKey {
+    IML,
+    IMT,
+    LATITUDE,
+    LONGITUDE,
+    SITE_CLASS;
+  }
+
+  public static class Builder {
+    Map<IndexKey, Integer> indexMap;
+
+    private Builder() {
+      indexMap = new HashMap<>();
+    }
+
+    public Builder add(IndexKey key, int index) {
+      indexMap.put(key, index);
+      return this;
+    }
+
+    public NetcdfShape build() {
+      return new NetcdfShape(this);
+    }
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/StaticData.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/StaticData.java
new file mode 100644
index 0000000000000000000000000000000000000000..21642ae253a2c6c1974a48a8a8779e7aeb2505e6
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/StaticData.java
@@ -0,0 +1,46 @@
+package gov.usgs.earthquake.nshmp.netcdf.data;
+
+import static com.google.common.base.Preconditions.checkState;
+
+import java.util.EnumMap;
+
+import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
+
+/**
+ * NEHRP site class to data mapper.
+ *
+ * @author U.S. Geological Survey
+ */
+public class StaticData<T> extends EnumMap<NehrpSiteClass, T> {
+
+  public StaticData() {
+    super(NehrpSiteClass.class);
+  }
+
+  StaticData(EnumMap<NehrpSiteClass, T> data) {
+    super(NehrpSiteClass.class);
+    putAll(data);
+  }
+
+  public static <T> Builder<T> builder() {
+    return new Builder<T>();
+  }
+
+  public static class Builder<T> {
+    EnumMap<NehrpSiteClass, T> data;
+
+    private Builder() {
+      data = new EnumMap<>(NehrpSiteClass.class);
+    }
+
+    public Builder<T> put(NehrpSiteClass siteClass, T data) {
+      this.data.put(siteClass, data);
+      return this;
+    }
+
+    public StaticData<T> build() {
+      checkState(!data.isEmpty(), "Must add data");
+      return new StaticData<T>(data);
+    }
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazard.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/StaticDataHazardCurves.java
similarity index 61%
rename from src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazard.java
rename to src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/StaticDataHazardCurves.java
index 5ee1f26dd08af76d6968f48b29821438a4d82884..e630a1eed18bc099d7265af88932976432754eb9 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazard.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/data/StaticDataHazardCurves.java
@@ -1,4 +1,4 @@
-package gov.usgs.earthquake.nshmp.netcdf.reader;
+package gov.usgs.earthquake.nshmp.netcdf.data;
 
 import static com.google.common.base.Preconditions.checkState;
 
@@ -7,23 +7,22 @@ import java.util.EnumMap;
 import gov.usgs.earthquake.nshmp.data.XySequence;
 import gov.usgs.earthquake.nshmp.gmm.Imt;
 
-@SuppressWarnings("serial")
-public class StaticHazard extends EnumMap<Imt, XySequence> {
+/**
+ * IMT to XySequence mapper.
+ *
+ * @auther U.S. Geological Survey
+ */
+public class StaticDataHazardCurves extends EnumMap<Imt, XySequence> {
 
-  public StaticHazard() {
+  public StaticDataHazardCurves() {
     super(Imt.class);
   }
 
-  private StaticHazard(EnumMap<Imt, XySequence> staticHazard) {
+  private StaticDataHazardCurves(EnumMap<Imt, XySequence> staticHazard) {
     super(Imt.class);
     putAll(staticHazard);
   }
 
-  // unnecessary method?
-  // public Map<Imt, XySequence> staticHazard() {
-  // return Map.copyOf(this);
-  // }
-
   public static Builder builder() {
     return new Builder();
   }
@@ -40,10 +39,9 @@ public class StaticHazard extends EnumMap<Imt, XySequence> {
       return this;
     }
 
-    public StaticHazard build() {
+    public StaticDataHazardCurves build() {
       checkState(!staticHazard.isEmpty(), "Must add hazards");
-      return new StaticHazard(staticHazard);
+      return new StaticDataHazardCurves(staticHazard);
     }
   }
-
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java
deleted file mode 100644
index 76a772327e2ea05b6c4a93c6b8b1f10d4e05adba..0000000000000000000000000000000000000000
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package gov.usgs.earthquake.nshmp.netcdf.reader;
-
-import static com.google.common.base.Preconditions.checkState;
-
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import gov.usgs.earthquake.nshmp.geo.Location;
-
-@SuppressWarnings("serial")
-public class BoundingHazards extends LinkedHashMap<Location, StaticHazards> {
-
-  public BoundingHazards() {}
-
-  private BoundingHazards(Map<Location, StaticHazards> boundingHazards) {
-    putAll(boundingHazards);
-  }
-
-  public static Builder builder() {
-    return new Builder();
-  }
-
-  public static class Builder {
-    Map<Location, StaticHazards> boundingHazards;
-
-    private Builder() {
-      boundingHazards = new HashMap<>();
-    }
-
-    public Builder put(Location location, StaticHazards staticHazards) {
-      boundingHazards.put(location, staticHazards);
-      return this;
-    }
-
-    public BoundingHazards build() {
-      checkState(!boundingHazards.isEmpty(), "Must add hazards");
-      return new BoundingHazards(boundingHazards);
-    }
-
-  }
-
-}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsReader.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsReader.java
deleted file mode 100644
index 4953f9eeb796ab25afd1eb4161f51b791a2531d0..0000000000000000000000000000000000000000
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsReader.java
+++ /dev/null
@@ -1,208 +0,0 @@
-package gov.usgs.earthquake.nshmp.netcdf.reader;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.stream.Collectors;
-
-import gov.usgs.earthquake.nshmp.data.XySequence;
-import gov.usgs.earthquake.nshmp.geo.Location;
-import gov.usgs.earthquake.nshmp.geo.LocationList;
-import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReader;
-import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key;
-
-import ucar.ma2.Array;
-import ucar.ma2.DataType;
-import ucar.ma2.InvalidRangeException;
-import ucar.nc2.dataset.NetcdfDataset;
-
-/**
- * Container for gridded hazard curves at four closest grid points to target
- *
- * @author U.S. Geological Survey
- */
-public class BoundingHazardsReader {
-
-  private final NshmNetcdfReader netcdf;
-  private final NetcdfCoordinates coords;
-  private BoundingHazards boundingHazards;
-  private List<BoundingLocation> boundingLocations = new ArrayList<>();
-
-  BoundingHazardsReader(NshmNetcdfReader netcdf, Location site) {
-    this.netcdf = netcdf;
-    this.coords = netcdf.coordinates();
-    coords.contains(site);
-    setBoundingHazards(site);
-  }
-
-  /**
-   * Returns the bounding hazards at four closet grid points to target.
-   *
-   * @param netcdf The {@code Netcdf}
-   * @param site The site to get bounding hazards
-   */
-  public static BoundingHazards boundingHazards(
-      NshmNetcdfReader netcdf,
-      Location site) {
-    return new BoundingHazardsReader(netcdf, site).boundingHazards;
-  }
-
-  LocationList boundingLocations() {
-    var locations = boundingLocations.stream()
-        .map(boundingLocation -> boundingLocation.location)
-        .collect(Collectors.toList());
-
-    return LocationList.copyOf(locations);
-  }
-
-  /**
-   * Get data for target point
-   *
-   * @param d1 data at first point (p1)
-   * @param d2 data at second point (p2)
-   * @param frac fractional distance between p1 and p2 to target point
-   */
-  static StaticHazards getTargetData(
-      StaticHazards d1,
-      StaticHazards d2,
-      double frac) {
-    NetcdfUtils.checkBoundingHazard(d1, d2);
-    return frac == 0.0 ? d1 : frac == 1.0 ? d2 : NetcdfUtils.linearInterpolate(d1, d2, frac);
-  }
-
-  private void setBoundingHazards(Location site) {
-    var longitudes = coords.longitudes();
-    var latitudes = coords.latitudes();
-
-    var idxLonLL = NetcdfUtils.getIdxLTEQ(longitudes, site.longitude);
-    var idxLatLL = NetcdfUtils.getIdxLTEQ(latitudes, site.latitude);
-
-    var lonLeft = longitudes[idxLonLL];
-    var lonRight = longitudes[idxLonLL + 1];
-    var latLower = latitudes[idxLatLL];
-    var latUpper = latitudes[idxLatLL + 1];
-
-    boundingLocations.add(new BoundingLocation(lonLeft, latLower, 0, 0));
-    boundingLocations.add(new BoundingLocation(lonLeft, latUpper, 0, 1));
-    boundingLocations.add(new BoundingLocation(lonRight, latUpper, 1, 1));
-    boundingLocations.add(new BoundingLocation(lonRight, latLower, 1, 0));
-
-    var hazards = extractHazardsAt(idxLonLL, idxLatLL);
-
-    var fracLon = NetcdfUtils.calcGridFrac(longitudes, idxLonLL, site.longitude);
-    var fracLat = NetcdfUtils.calcGridFrac(latitudes, idxLatLL, site.latitude);
-
-    var builder = BoundingHazards.builder();
-    hazards.forEach((key, value) -> builder.put(key, value));
-    builder.put(
-        site,
-        calcTargetHazards(hazards, fracLon, fracLat))
-        .build();
-    boundingHazards = builder.build();
-
-    NetcdfUtils.checkBoundingHazards(boundingHazards, boundingLocations.get(0).location);
-  }
-
-  private StaticHazards calcTargetHazards(BoundingHazards hazards, double fracLon, double fracLat) {
-    var westTarget = getTargetData(
-        hazards.get(boundingLocations.get(0).location),
-        hazards.get(boundingLocations.get(1).location),
-        fracLat);
-
-    var eastTarget = getTargetData(
-        hazards.get(boundingLocations.get(3).location),
-        hazards.get(boundingLocations.get(2).location),
-        fracLat);
-
-    return getTargetData(westTarget, eastTarget, fracLon);
-  }
-
-  /*
-   * Return hazard curves for four closest grid points by SiteClass and Imt
-   */
-  private BoundingHazards extractHazardsAt(
-      int idxLonLL,
-      int idxLatLL) {
-    try (NetcdfDataset ncd = NetcdfDataset.openDataset(netcdf.path().toString())) {
-      var boundingHazardMaps = BoundingHazards.builder();
-      var targetGroup = ncd.getRootGroup();
-
-      var targetOrigin = new int[] { 0, 0, idxLatLL, idxLonLL, 0 };
-      var targetShape = new int[] {
-          coords.siteClasses().size(),
-          coords.imts().size(),
-          2,
-          2,
-          coords.nIml()
-      };
-
-      /*
-       * Array aHazards now has shape [nVs,nImt,2,2,nIml] ...so origin will now
-       * be [0,0,0,0,0] for LL grid point ...and shape of requested array is
-       * [nVs,nImt,1,1,nIml]
-       */
-      var aHazards = targetGroup.findVariable(Key.HAZARD).read(targetOrigin, targetShape);
-
-      var shape = targetShape.clone();
-      shape[2] = 1;
-      shape[3] = 1;
-
-      for (var boundingLocation : boundingLocations) {
-        boundingHazardMaps.put(
-            boundingLocation.location,
-            mapHazardsFromArray(aHazards.section(boundingLocation.origin, shape)));
-      }
-
-      return boundingHazardMaps.build();
-    } catch (IOException | InvalidRangeException e) {
-      throw new RuntimeException("Could not read Netcdf file [" + netcdf.path() + "]");
-    }
-  }
-
-  /*
-   * Read hazard curves from netCDF variable into map of hazards by SiteClass
-   * and Imt
-   *
-   * TODO: if target is on a grid point (or on a grid lat or lon), no need to
-   * read 4 bounding points ?
-   */
-  private StaticHazards mapHazardsFromArray(Array hazards) {
-    var vsImtHazardMap = StaticHazards.builder();
-
-    for (int iSiteClass = 0; iSiteClass < coords.siteClasses().size(); iSiteClass++) {
-      var siteClass = coords.siteClasses().get(iSiteClass);
-
-      var imtHazardMap = StaticHazard.builder();
-      for (int iImt = 0; iImt < coords.imts().size(); iImt++) {
-        var imt = coords.imts().get(iImt);
-        var origin = new int[] { iSiteClass, iImt, 0 };
-        var shape = new int[] { 1, 1, coords.nIml() };
-
-        try {
-          var xySequence = XySequence.create(
-              coords.imls().get(imt),
-              (double[]) hazards.section(origin, shape).reduce().get1DJavaArray(DataType.DOUBLE));
-
-          imtHazardMap.put(imt, xySequence);
-        } catch (InvalidRangeException e) {
-          throw new RuntimeException(e.getMessage());
-        }
-      }
-
-      vsImtHazardMap.put(siteClass, imtHazardMap.build());
-    }
-
-    return vsImtHazardMap.build();
-  }
-
-  static class BoundingLocation {
-    final Location location;
-    final int[] origin;
-
-    BoundingLocation(double longitude, double latitude, int longitudeIndex, int latitudeIndex) {
-      location = Location.create(longitude, latitude);
-      origin = new int[] { 0, 0, latitudeIndex, longitudeIndex, 0 };
-    }
-  }
-
-}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingReader.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingReader.java
new file mode 100644
index 0000000000000000000000000000000000000000..d750ea5ba47d6582c1436aaf541269f8aedddece
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingReader.java
@@ -0,0 +1,134 @@
+package gov.usgs.earthquake.nshmp.netcdf.reader;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import gov.usgs.earthquake.nshmp.geo.Location;
+import gov.usgs.earthquake.nshmp.geo.LocationList;
+import gov.usgs.earthquake.nshmp.netcdf.Netcdf;
+import gov.usgs.earthquake.nshmp.netcdf.data.BoundingData;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfData;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfShape;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfShape.IndexKey;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticData;
+
+import ucar.ma2.Array;
+
+/**
+ * Abstract class to read in NetCDF file and create the bounding locations and
+ * associated data.
+ *
+ * @author U.S. Geological Survey
+ */
+public abstract class BoundingReader<T> {
+
+  private final NetcdfData netcdfData;
+  private BoundingData<T> boundingData;
+  private List<BoundingLocation> boundingLocations = new ArrayList<>();
+
+  BoundingReader(Netcdf<T> netcdf, Location site) {
+    this.netcdfData = netcdf.netcdfData();
+    boundingLocations = setBoundingLocations(netcdf, site);
+    boundingData = setBoundingData(netcdf, site, boundingLocations);
+  }
+
+  /**
+   * Returns the bounding data associated with a location
+   */
+  public BoundingData<T> boundingData() {
+    return boundingData;
+  }
+
+  /**
+   * Returns the bounding locations
+   */
+  LocationList boundingLocations() {
+    var locations = boundingLocations.stream()
+        .map(boundingLocation -> boundingLocation.location)
+        .collect(Collectors.toList());
+
+    return LocationList.copyOf(locations);
+  }
+
+  abstract StaticData<T> calculateTargetData(
+      List<BoundingLocation> boundingLocations,
+      BoundingData<T> boundingData,
+      double fracLon,
+      double fracLat);
+
+  abstract BoundingData<T> extractDataAt(
+      Netcdf<T> netcdf,
+      List<BoundingLocation> boundingLocations,
+      int idxLonLL,
+      int idxLatLL);
+
+  /**
+   * Get data for target point
+   *
+   * @param d1 data at first point (p1)
+   * @param d2 data at second point (p2)
+   * @param frac fractional distance between p1 and p2 to target point
+   */
+  abstract StaticData<T> getTargetData(StaticData<T> d1, StaticData<T> d2, double frac);
+
+  /*
+   * Read hazard curves from netCDF variable into map of hazards by SiteClass
+   * and Imt
+   *
+   * TODO: if target is on a grid point (or on a grid lat or lon), no need to
+   * read 4 bounding points ?
+   */
+  abstract StaticData<T> mapDataFromArray(
+      Netcdf<T> netcdf,
+      Array array);
+
+  /**
+   * Set the bounding data
+   */
+  abstract BoundingData<T> setBoundingData(
+      Netcdf<T> netcdf,
+      Location site,
+      List<BoundingLocation> boundingLocations);
+
+  static class BoundingLocation {
+    final Location location;
+    final int[] origin;
+
+    BoundingLocation(
+        NetcdfShape netcdfShape,
+        Location location,
+        int longitudeIndex,
+        int latitudeIndex) {
+      this.location = location;
+      origin = netcdfShape.buildShape(
+          new NetcdfShape.IndexMap(IndexKey.LATITUDE, latitudeIndex),
+          new NetcdfShape.IndexMap(IndexKey.LONGITUDE, longitudeIndex));
+    }
+  }
+
+  private List<BoundingLocation> setBoundingLocations(Netcdf<T> netcdf, Location site) {
+    var boundingLocations = new ArrayList<BoundingLocation>();
+    var longitudes = netcdfData.longitudes();
+    var latitudes = netcdfData.latitudes();
+
+    var idxLonLL = NetcdfUtils.getIdxLTEQ(longitudes, site.longitude);
+    var idxLatLL = NetcdfUtils.getIdxLTEQ(latitudes, site.latitude);
+
+    var lonLeft = longitudes[idxLonLL];
+    var lonRight = longitudes[idxLonLL + 1];
+    var latLower = latitudes[idxLatLL];
+    var latUpper = latitudes[idxLatLL + 1];
+
+    boundingLocations.add(
+        new BoundingLocation(netcdf.netcdfShape(), Location.create(lonLeft, latLower), 0, 0));
+    boundingLocations.add(
+        new BoundingLocation(netcdf.netcdfShape(), Location.create(lonLeft, latUpper), 0, 1));
+    boundingLocations.add(
+        new BoundingLocation(netcdf.netcdfShape(), Location.create(lonRight, latUpper), 1, 1));
+    boundingLocations.add(
+        new BoundingLocation(netcdf.netcdfShape(), Location.create(lonRight, latLower), 1, 0));
+
+    return boundingLocations;
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingReaderGroundMotions.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingReaderGroundMotions.java
new file mode 100644
index 0000000000000000000000000000000000000000..fc4b2f3fb958f5cbc8e86d5d60a66b8030ff31b8
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingReaderGroundMotions.java
@@ -0,0 +1,182 @@
+package gov.usgs.earthquake.nshmp.netcdf.reader;
+
+import java.io.IOException;
+import java.util.List;
+
+import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.geo.Location;
+import gov.usgs.earthquake.nshmp.gmm.Imt;
+import gov.usgs.earthquake.nshmp.netcdf.Netcdf;
+import gov.usgs.earthquake.nshmp.netcdf.NetcdfGroundMotions;
+import gov.usgs.earthquake.nshmp.netcdf.data.BoundingData;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfShape;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfShape.IndexKey;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticData;
+import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key;
+
+import ucar.ma2.Array;
+import ucar.ma2.DataType;
+import ucar.ma2.InvalidRangeException;
+import ucar.nc2.dataset.NetcdfDatasets;
+
+/**
+ * Creates the bounding locations and data associated with a specific site for
+ * ground motions.
+ *
+ * @author U.S. Geological Survey
+ */
+public class BoundingReaderGroundMotions extends BoundingReader<XySequence> {
+
+  public static final double PGA_VALUE = 0.001;
+  public static final double PGV_VALUE = 0.0001;
+
+  public BoundingReaderGroundMotions(NetcdfGroundMotions netcdf, Location site) {
+    super(netcdf, site);
+  }
+
+  @Override
+  StaticData<XySequence> calculateTargetData(
+      List<BoundingLocation> boundingLocations,
+      BoundingData<XySequence> boundingData,
+      double fracLon,
+      double fracLat) {
+    var westTarget = getTargetData(
+        boundingData.get(boundingLocations.get(0).location),
+        boundingData.get(boundingLocations.get(1).location),
+        fracLat);
+
+    var eastTarget = getTargetData(
+        boundingData.get(boundingLocations.get(3).location),
+        boundingData.get(boundingLocations.get(2).location),
+        fracLat);
+
+    return getTargetData(westTarget, eastTarget, fracLon);
+  }
+
+  @Override
+  BoundingData<XySequence> extractDataAt(
+      Netcdf<XySequence> netcdf,
+      List<BoundingLocation> boundingLocations,
+      int idxLonLL,
+      int idxLatLL) {
+    try (var ncd = NetcdfDatasets.openDataset(netcdf.netcdfPath().toString())) {
+      var netcdfData = netcdf.netcdfData();
+      var boundingData = BoundingData.<XySequence> builder();
+      var targetGroup = ncd.getRootGroup();
+      var netcdfShape = netcdf.netcdfShape();
+
+      var targetOrigin = netcdfShape.buildShape(
+          new NetcdfShape.IndexMap(IndexKey.LATITUDE, idxLatLL),
+          new NetcdfShape.IndexMap(IndexKey.LONGITUDE, idxLonLL));
+      var targetShape = netcdfShape.buildShape(
+          new NetcdfShape.IndexMap(IndexKey.SITE_CLASS, netcdfData.siteClasses().size()),
+          new NetcdfShape.IndexMap(IndexKey.LATITUDE, 2),
+          new NetcdfShape.IndexMap(IndexKey.LONGITUDE, 2),
+          new NetcdfShape.IndexMap(IndexKey.IMT, netcdfData.imts().size()));
+
+      /*
+       * Array aHazards now has shape [nVs,2,2,nImt] ...so origin will now be
+       * [0,0,0,0] for LL grid point ...and shape of requested array is
+       * [nVs,1,1,nImt]
+       */
+      var groundMotionArray = targetGroup
+          .findVariableLocal(Key.GROUND_MOTION)
+          .read(targetOrigin, targetShape);
+
+      var shape = netcdfShape.buildShape(
+          new NetcdfShape.IndexMap(IndexKey.SITE_CLASS, netcdfData.siteClasses().size()),
+          new NetcdfShape.IndexMap(IndexKey.LATITUDE, 1),
+          new NetcdfShape.IndexMap(IndexKey.LONGITUDE, 1),
+          new NetcdfShape.IndexMap(IndexKey.IMT, netcdfData.imts().size()));
+
+      for (var boundingLocation : boundingLocations) {
+        boundingData.put(
+            boundingLocation.location,
+            mapDataFromArray(netcdf, groundMotionArray.section(boundingLocation.origin, shape)));
+      }
+
+      return boundingData.build();
+    } catch (IOException | InvalidRangeException e) {
+      throw new RuntimeException(
+          "Could not read Netcdf file [" + netcdf.netcdfPath() + "]. " + e.getMessage());
+    }
+  }
+
+  @Override
+  StaticData<XySequence> getTargetData(
+      StaticData<XySequence> d1,
+      StaticData<XySequence> d2,
+      double frac) {
+    NetcdfUtils.checkBoundingGroundMotion(d1, d2);
+    return frac == 0.0 ? d1
+        : frac == 1.0 ? d2 : NetcdfUtils.linearInterpolateGroundMotions(d1, d2, frac);
+  }
+
+  @Override
+  StaticData<XySequence> mapDataFromArray(
+      Netcdf<XySequence> netcdf,
+      Array array) {
+    var netcdfData = netcdf.netcdfData();
+    var staticData = StaticData.<XySequence> builder();
+
+    for (int iSiteClass = 0; iSiteClass < netcdfData.siteClasses().size(); iSiteClass++) {
+      var siteClass = netcdfData.siteClasses().get(iSiteClass);
+
+      var imts = netcdfData.imts();
+      var periods = imts.stream()
+          .mapToDouble(imt -> {
+            if (imt == Imt.PGA) {
+              return PGA_VALUE;
+            } else if (imt == Imt.PGV) {
+              return PGV_VALUE;
+            }
+            return imt.period();
+          })
+          .toArray();
+
+      var origin = new int[] { iSiteClass, 0 };
+      var shape = new int[] { 1, imts.size() };
+
+      try {
+        var xySequence = XySequence.create(
+            periods,
+            (double[]) array.section(origin, shape).reduce().get1DJavaArray(DataType.DOUBLE));
+
+        staticData.put(siteClass, xySequence);
+      } catch (InvalidRangeException e) {
+        throw new RuntimeException(e.getMessage());
+      }
+    }
+
+    return staticData.build();
+  }
+
+  @Override
+  BoundingData<XySequence> setBoundingData(
+      Netcdf<XySequence> netcdf,
+      Location site,
+      List<BoundingLocation> boundingLocations) {
+    var netcdfGroundMotions = (NetcdfGroundMotions) netcdf;
+    var netcdfData = netcdfGroundMotions.netcdfData();
+    var longitudes = netcdfData.longitudes();
+    var latitudes = netcdfData.latitudes();
+    var idxLonLL = NetcdfUtils.getIdxLTEQ(longitudes, site.longitude);
+    var idxLatLL = NetcdfUtils.getIdxLTEQ(latitudes, site.latitude);
+
+    var groundMotions =
+        extractDataAt(netcdfGroundMotions, boundingLocations, idxLonLL, idxLatLL);
+    var fracLon = NetcdfUtils.calcGridFrac(longitudes, idxLonLL, site.longitude);
+    var fracLat = NetcdfUtils.calcGridFrac(latitudes, idxLatLL, site.latitude);
+
+    var builder = BoundingData.<XySequence> builder();
+    groundMotions.forEach((key, value) -> builder.put(key, value));
+    builder.put(
+        site,
+        calculateTargetData(boundingLocations, groundMotions, fracLon, fracLat))
+        .build();
+    var boundingData = builder.build();
+
+    NetcdfUtils.checkBoundingGroundMotions(boundingData, boundingLocations.get(0).location);
+    return boundingData;
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingReaderHazardCurves.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingReaderHazardCurves.java
new file mode 100644
index 0000000000000000000000000000000000000000..f48287c2a954d1eedb06e966c29720b487bedbc4
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingReaderHazardCurves.java
@@ -0,0 +1,171 @@
+package gov.usgs.earthquake.nshmp.netcdf.reader;
+
+import java.io.IOException;
+import java.util.List;
+
+import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.geo.Location;
+import gov.usgs.earthquake.nshmp.netcdf.Netcdf;
+import gov.usgs.earthquake.nshmp.netcdf.NetcdfHazardCurves;
+import gov.usgs.earthquake.nshmp.netcdf.data.BoundingData;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfShape;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfShape.IndexKey;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticData;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticDataHazardCurves;
+import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key;
+
+import ucar.ma2.Array;
+import ucar.ma2.DataType;
+import ucar.ma2.InvalidRangeException;
+import ucar.nc2.dataset.NetcdfDatasets;
+
+/**
+ * Creates the bounding locations and data associated with a specific site for
+ * hazard curves.
+ *
+ * @author U.S. Geological Survey
+ */
+public class BoundingReaderHazardCurves extends BoundingReader<StaticDataHazardCurves> {
+
+  public BoundingReaderHazardCurves(NetcdfHazardCurves netcdf, Location site) {
+    super(netcdf, site);
+  }
+
+  @Override
+  StaticData<StaticDataHazardCurves> calculateTargetData(
+      List<BoundingLocation> boundingLocations,
+      BoundingData<StaticDataHazardCurves> hazards,
+      double fracLon,
+      double fracLat) {
+    var westTarget = getTargetData(
+        hazards.get(boundingLocations.get(0).location),
+        hazards.get(boundingLocations.get(1).location),
+        fracLat);
+
+    var eastTarget = getTargetData(
+        hazards.get(boundingLocations.get(3).location),
+        hazards.get(boundingLocations.get(2).location),
+        fracLat);
+
+    return getTargetData(westTarget, eastTarget, fracLon);
+  }
+
+  @Override
+  BoundingData<StaticDataHazardCurves> extractDataAt(
+      Netcdf<StaticDataHazardCurves> netcdf,
+      List<BoundingLocation> boundingLocations,
+      int idxLonLL,
+      int idxLatLL) {
+    try (var ncd = NetcdfDatasets.openDataset(netcdf.netcdfPath().toString())) {
+      var netcdfData = ((NetcdfHazardCurves) netcdf).netcdfData();
+      var boundingData = BoundingData.<StaticDataHazardCurves> builder();
+      var targetGroup = ncd.getRootGroup();
+      var netcdfShape = netcdf.netcdfShape();
+
+      var targetOrigin = netcdfShape.buildShape(
+          new NetcdfShape.IndexMap(IndexKey.LATITUDE, idxLatLL),
+          new NetcdfShape.IndexMap(IndexKey.LONGITUDE, idxLonLL));
+      var targetShape = netcdfShape.buildShape(
+          new NetcdfShape.IndexMap(IndexKey.SITE_CLASS, netcdfData.siteClasses().size()),
+          new NetcdfShape.IndexMap(IndexKey.LATITUDE, 2),
+          new NetcdfShape.IndexMap(IndexKey.LONGITUDE, 2),
+          new NetcdfShape.IndexMap(IndexKey.IMT, netcdfData.imts().size()),
+          new NetcdfShape.IndexMap(IndexKey.IML, netcdfData.nIml()));
+
+      /*
+       * Array aHazards now has shape [nVs,nImt,2,2,nIml] ...so origin will now
+       * be [0,0,0,0,0] for LL grid point ...and shape of requested array is
+       * [nVs,nImt,1,1,nIml]
+       */
+      var aHazards = targetGroup.findVariableLocal(Key.HAZARD).read(targetOrigin, targetShape);
+
+      var shape = netcdfShape.buildShape(
+          new NetcdfShape.IndexMap(IndexKey.SITE_CLASS, netcdfData.siteClasses().size()),
+          new NetcdfShape.IndexMap(IndexKey.LATITUDE, 1),
+          new NetcdfShape.IndexMap(IndexKey.LONGITUDE, 1),
+          new NetcdfShape.IndexMap(IndexKey.IMT, netcdfData.imts().size()),
+          new NetcdfShape.IndexMap(IndexKey.IML, netcdfData.nIml()));
+
+      for (var boundingLocation : boundingLocations) {
+        boundingData.put(
+            boundingLocation.location,
+            mapDataFromArray(netcdf, aHazards.section(boundingLocation.origin, shape)));
+      }
+
+      return boundingData.build();
+    } catch (IOException | InvalidRangeException e) {
+      throw new RuntimeException("Could not read Netcdf file [" + netcdf.netcdfPath() + "]");
+    }
+  }
+
+  @Override
+  StaticData<StaticDataHazardCurves> getTargetData(
+      StaticData<StaticDataHazardCurves> d1,
+      StaticData<StaticDataHazardCurves> d2,
+      double frac) {
+    NetcdfUtils.checkBoundingHazard(d1, d2);
+    return frac == 0.0 ? d1
+        : frac == 1.0 ? d2 : NetcdfUtils.linearInterpolateHazardCurves(d1, d2, frac);
+  }
+
+  @Override
+  StaticData<StaticDataHazardCurves> mapDataFromArray(
+      Netcdf<StaticDataHazardCurves> netcdf,
+      Array hazards) {
+    var netcdfData = ((NetcdfHazardCurves) netcdf).netcdfData();
+    var vsImtHazardMap = StaticData.<StaticDataHazardCurves> builder();
+
+    for (int iSiteClass = 0; iSiteClass < netcdfData.siteClasses().size(); iSiteClass++) {
+      var siteClass = netcdfData.siteClasses().get(iSiteClass);
+
+      var imtHazardMap = StaticDataHazardCurves.builder();
+      for (int iImt = 0; iImt < netcdfData.imts().size(); iImt++) {
+        var imt = netcdfData.imts().get(iImt);
+        var origin = new int[] { iSiteClass, iImt, 0 };
+        var shape = new int[] { 1, 1, netcdfData.nIml() };
+
+        try {
+          var xySequence = XySequence.create(
+              netcdfData.imls().get(imt),
+              (double[]) hazards.section(origin, shape).reduce().get1DJavaArray(DataType.DOUBLE));
+
+          imtHazardMap.put(imt, xySequence);
+        } catch (InvalidRangeException e) {
+          throw new RuntimeException(e.getMessage());
+        }
+      }
+
+      vsImtHazardMap.put(siteClass, imtHazardMap.build());
+    }
+
+    return vsImtHazardMap.build();
+  }
+
+  @Override
+  BoundingData<StaticDataHazardCurves> setBoundingData(
+      Netcdf<StaticDataHazardCurves> netcdf,
+      Location site,
+      List<BoundingLocation> boundingLocations) {
+    var netcdfHazard = (NetcdfHazardCurves) netcdf;
+    var netcdfData = netcdfHazard.netcdfData();
+    var longitudes = netcdfData.longitudes();
+    var latitudes = netcdfData.latitudes();
+    var idxLonLL = NetcdfUtils.getIdxLTEQ(longitudes, site.longitude);
+    var idxLatLL = NetcdfUtils.getIdxLTEQ(latitudes, site.latitude);
+
+    var hazards = extractDataAt(netcdfHazard, boundingLocations, idxLonLL, idxLatLL);
+    var fracLon = NetcdfUtils.calcGridFrac(longitudes, idxLonLL, site.longitude);
+    var fracLat = NetcdfUtils.calcGridFrac(latitudes, idxLatLL, site.latitude);
+
+    var builder = BoundingData.<StaticDataHazardCurves> builder();
+    hazards.forEach((key, value) -> builder.put(key, value));
+    builder.put(
+        site,
+        calculateTargetData(boundingLocations, hazards, fracLon, fracLat))
+        .build();
+    var boundingData = builder.build();
+
+    NetcdfUtils.checkBoundingHazards(boundingData, boundingLocations.get(0).location);
+    return boundingData;
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinates.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinates.java
deleted file mode 100644
index 079d597712fcae71bf26e49af4f6cb64cdf1c0e2..0000000000000000000000000000000000000000
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinates.java
+++ /dev/null
@@ -1,254 +0,0 @@
-package gov.usgs.earthquake.nshmp.netcdf.reader;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkState;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import com.google.common.reflect.TypeToken;
-import com.google.gson.Gson;
-
-import gov.usgs.earthquake.nshmp.Maths;
-import gov.usgs.earthquake.nshmp.geo.BorderType;
-import gov.usgs.earthquake.nshmp.geo.Location;
-import gov.usgs.earthquake.nshmp.geo.LocationList;
-import gov.usgs.earthquake.nshmp.geo.Region;
-import gov.usgs.earthquake.nshmp.geo.Regions;
-import gov.usgs.earthquake.nshmp.geo.json.Feature;
-import gov.usgs.earthquake.nshmp.geo.json.FeatureCollection;
-import gov.usgs.earthquake.nshmp.geo.json.GeoJson;
-import gov.usgs.earthquake.nshmp.geo.json.Properties;
-import gov.usgs.earthquake.nshmp.gmm.Imt;
-import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
-import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key;
-
-import ucar.ma2.DataType;
-import ucar.ma2.InvalidRangeException;
-import ucar.nc2.Group;
-import ucar.nc2.Variable;
-
-/*
- * Container for dimensions and coordinate variables of NSHMP NetCDF file to
- * facilitate indexing operations prior to data retrieval.
- */
-public class NetcdfCoordinates {
-
-  private final List<NehrpSiteClass> siteClasses;
-  private final Map<NehrpSiteClass, Double> vs30Map;
-  private final List<Imt> imts;
-  private final Map<Imt, double[]> imls;
-  private final int nIml;
-  private final Region region;
-  private final double[] longitudes;
-  private final double[] latitudes;
-
-  private static final String FLAG_KEY = "flag_key";
-  private static final Gson GSON = new Gson();
-
-  public NetcdfCoordinates(Group targetGroup) throws IOException {
-    // This bypasses the netCDF dimensions, but since we know what the
-    // variables and their dimensions should be, this is OK(???)
-    // TODO: probably better (proper) to access netCDF dimensions
-
-    var vSiteClass = targetGroup.findVariable(Key.SITE_CLASS);
-    var vImls = targetGroup.findVariable(Key.IMLS);
-    var vImts = targetGroup.findVariable(Key.IMT);
-    var vBounds = targetGroup.findVariable(Key.BOUNDS);
-
-    var vs30s = NetcdfUtils.getDoubleArray(targetGroup, Key.VS30);
-    var lats = NetcdfUtils.getDoubleArray(targetGroup, Key.LAT);
-    var lons = NetcdfUtils.getDoubleArray(targetGroup, Key.LON);
-
-    var mapTypeToken = new TypeToken<Map<String, String>>() {}.getType();
-
-    // get list of SiteClass enums and build vs30 map
-    List<NehrpSiteClass> scList = new ArrayList<NehrpSiteClass>();
-    Map<NehrpSiteClass, Double> vsMap = new HashMap<NehrpSiteClass, Double>();
-    var siteClassArray = vSiteClass.read();
-    Map<String, String> siteClassFlagKeys = GSON
-        .fromJson(vSiteClass.findAttribute(FLAG_KEY)
-            .getValues().toString(), mapTypeToken);
-
-    for (int i = 0; i < vSiteClass.getSize(); i++) {
-      var scInt = siteClassArray.getInt(i);
-      var scString = siteClassFlagKeys.get(Integer.toString(scInt)).toString();
-      NehrpSiteClass siteClass = NehrpSiteClass.valueOf(scString);
-      scList.add(siteClass);
-      vsMap.put(siteClass, vs30s[i]);
-    }
-    siteClasses = List.copyOf(scList);
-    vs30Map = Map.copyOf(vsMap);
-
-    // get list of IMT enums
-    List<Imt> imtList = new ArrayList<Imt>();
-    Map<String, String> imtFlagKeys = GSON
-        .fromJson(vImts.findAttribute(FLAG_KEY)
-            .getValues().toString(), mapTypeToken);
-
-    var imtArray = vImts.read();
-    for (int i = 0; i < vImts.getSize(); i++) {
-      var imtInt = imtArray.getInt(i);
-      var imtString = imtFlagKeys.get(Integer.toString(imtInt)).toString();
-      imtList.add(Imt.valueOf(imtString));
-    }
-    imts = List.copyOf(imtList);
-
-    // vImls has dimensions (Imt, Iml)
-    // alternatively get nIml from Dimension Iml
-    nIml = targetGroup.findDimension(Key.IMLS).getLength();
-
-    // get map of IMLs
-    imls = mapImls(vImls);
-
-    latitudes = Arrays.stream(lats)
-        // TODO: Dynamic set location precision from NetCDF
-        .map(lat -> Maths.round(lat, 3))
-        .toArray();
-
-    longitudes = Arrays.stream(lons)
-        // TODO: Dynamic set location precision from NetCDF
-        .map(lon -> Maths.round(lon, 3))
-        .toArray();
-
-    /*
-     * build region from GeoJSON string - follow logic in {@code
-     * Sites.createSiteRegion()}, which is private, without converting to
-     * GriddedRegion. {@code Sites.createSiteRegion()} requires that if an
-     * extents Feature (rectangular bounds) is present, it MUST be the first
-     * feature
-     *
-     * I think we just want the polygon border and don't need the gridded sites?
-     * Unless these could be used to more efficiently look up site index?
-     */
-    // TODO: Error handling? how much, if any, error checking? Can we assume
-    // it's a valid GeoJSON file here if we make sure we only load valid files
-    // into the netCDF?
-
-    // Read from netCDF
-    String bounds = vBounds.readScalarString();
-    // Convert string to FeatureCollection
-    FeatureCollection dataRegions = GeoJson.from(bounds).toFeatureCollection();
-    // Extract Features
-    List<Feature> features = dataRegions.features();
-
-    // From {@code Sites.createSiteRegion()}
-    checkState(features.size() <= 2, "Only 2 polygon features may be defined");
-    int mapRegionIndex = 0;
-    if (features.size() > 1) {
-      // don't need to process the rectangular map extents, if present
-      mapRegionIndex = 1;
-    }
-    Feature sitesPoly = features.get(mapRegionIndex);
-    LocationList sitesPolyBorder = sitesPoly.asPolygonBorder();
-    Properties properties = sitesPoly.properties();
-    // get region name - either "title" or "name" property???
-    String mapName =
-        properties.getString("title").orElse(properties.getString("name").orElse("Unnamed Map"));
-    region = Regions.create(mapName, sitesPolyBorder, BorderType.MERCATOR_LINEAR);
-  }
-
-  /**
-   * Returns the Imls
-   */
-  public Map<Imt, double[]> imls() {
-    return imls;
-  }
-
-  /**
-   * Return the Imts
-   */
-  public List<Imt> imts() {
-    return List.copyOf(imts);
-  }
-
-  /**
-   * Returns the latitudes.
-   */
-  public double[] latitudes() {
-    return latitudes.clone();
-  }
-
-  /**
-   * Returns the longitudes.
-   */
-  public double[] longitudes() {
-    return longitudes.clone();
-  }
-
-  /**
-   * Returns the number of Imls
-   */
-  public int nIml() {
-    return nIml;
-  }
-
-  /**
-   * Returns the region
-   */
-  public Region region() {
-    return Regions.copyOf(region);
-  }
-
-  /**
-   * Return the site classes
-   */
-  public List<NehrpSiteClass> siteClasses() {
-    return List.copyOf(siteClasses);
-  }
-
-  /**
-   * Validate a target site is contained with in the bounds.
-   *
-   * @param site The site to test
-   */
-  public void contains(Location site) {
-    checkArgument(
-        region.contains(site),
-        String.format("Target site [%s] is not contained within %s calculation region",
-            site.toString(), region.name()));
-    // No longer necessary? region is now the calculation polygon, rather than a
-    // rectangular boundary
-    var bounds = region.bounds();
-    checkArgument(
-        region.contains(site),
-        String.format("Target site [%s] out of range %s", site.toString(), bounds.toString()));
-  }
-
-  /*
-   * convert 2D Iml variable (dimensions Imt, Iml) to map of Imls by Imt
-   *
-   * TODO: use MultiMap or SetMultiMap (etc.) to store unique IML sets? Could
-   * then also initialize the underlying XySequence objects for reading in the
-   * hazard curves...
-   */
-  private Map<Imt, double[]> mapImls(Variable vImls) {
-    Map<Imt, double[]> imtImlMap = new EnumMap<Imt, double[]>(Imt.class);
-    for (int j = 0; j < imts.size(); j++) {
-      var imt = imts.get(j);
-
-      // set origin and shape of double[] Imls to read
-      var origin = new int[] { j, 0 };
-      var shape = new int[] { 1, nIml };
-
-      try {
-        imtImlMap.put(
-            imt,
-            (double[]) vImls.read(origin, shape).reduce().get1DJavaArray(DataType.DOUBLE));
-      } catch (IOException | InvalidRangeException e) {
-        var msg = "Failed read attempt for vImls with origin: " +
-            Arrays.toString(origin) + ", shape: " + Arrays.toString(shape);
-        throw new RuntimeException(msg);
-      }
-    }
-
-    return Collections.unmodifiableMap(imtImlMap);
-  }
-
-}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
index f1c2e7737de5ee7da705d6afc4fcba3dd933bd40..e52269c1e8e9830f5408cf35dcb37a64ef1fb029 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
@@ -12,6 +12,9 @@ import com.google.common.math.DoubleMath;
 import gov.usgs.earthquake.nshmp.data.XySequence;
 import gov.usgs.earthquake.nshmp.geo.Location;
 import gov.usgs.earthquake.nshmp.geo.LocationList;
+import gov.usgs.earthquake.nshmp.netcdf.data.BoundingData;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticData;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticDataHazardCurves;
 
 import ucar.ma2.DataType;
 import ucar.nc2.Group;
@@ -51,37 +54,118 @@ public class NetcdfUtils {
     return builder.build();
   }
 
+  /*
+   * Calculate fractional distance from a1 to t, between a1 and a2
+   */
+  static double calcFrac(double a1, double a2, double t) {
+    if (Math.abs(t - a1) < LOCATION_TOLERANCE) {
+      // target value == a1
+      return 0.0;
+    } else if (Math.abs(t - a2) < LOCATION_TOLERANCE) {
+      // target value == a2
+      return 1.0;
+    } else {
+      // calculate fractional distance to t between a[i] and a[i+1]
+      return (t - a1) / (a2 - a1);
+    }
+  }
+
+  /*
+   * Calculate fractional distance from a[i] to t, between a[i] and a[i+1]
+   */
+  static double calcGridFrac(double[] a, int i, double t) {
+    return calcFrac(a[i], a[i + 1], t);
+  }
+
   /**
-   * Returns a {@code double[]} from a netCDF group
+   * Check whether bounding ground motions contain the same: Site classes, IMTs
+   * per each site class, and ground motions.
    *
-   * @param group The netCDF group
-   * @param key The key to read from the group
-   * @throws IOException
+   * @param a static data A
+   * @param b static B
    */
-  static double[] getDoubleArray(Group group, String key) throws IOException {
-    return (double[]) get1DArray(group, key, DataType.DOUBLE);
+  static void checkBoundingGroundMotion(
+      StaticData<XySequence> a,
+      StaticData<XySequence> b) {
+    checkState(a.size() == b.size(), "Maps are not the same size");
+    checkState(a.keySet().containsAll(b.keySet()), "Site classes do not match");
+    a.keySet().forEach(key -> checkXySequence(a.get(key), b.get(key)));
   }
 
   /**
-   * Returns a {@code int[]} from a netCDF group
+   * Checks bounding hazard maps contain the same: Site classes, IMTs per each
+   * site class, and ground motions per each IMT
    *
-   * @param group The netCDF group
-   * @param key The key to read from the group
-   * @throws IOException
+   * @param boundingData The bounding ground motions
    */
-  static int[] getIntArray(Group group, String key) throws IOException {
-    return (int[]) get1DArray(group, key, DataType.INT);
+  static void checkBoundingGroundMotions(
+      BoundingData<XySequence> boundingData,
+      Location location) {
+    checkArgument(boundingData.containsKey(location), "Location not in bounding hazards");
+    boundingData.keySet().stream()
+        .filter(loc -> loc.equals(location))
+        .forEach(key -> {
+          checkBoundingGroundMotion(boundingData.get(location), boundingData.get(key));
+        });
   }
 
   /**
-   * Returns a {@code String[]} from a netCDF group
+   * Check whether bounding hazards contain the same: Site classes, IMTs per
+   * each site class, and ground motions per each IMT
    *
-   * @param group The netCDF group
-   * @param key The key to read from the group
-   * @throws IOException
+   * @param a Bounding hazard map A
+   * @param b Bounding hazard map B
    */
-  static String[] getStringArray(Group group, String key) throws IOException {
-    return (String[]) get1DArray(group, key, DataType.STRING);
+  static void checkBoundingHazard(
+      StaticData<StaticDataHazardCurves> a,
+      StaticData<StaticDataHazardCurves> b) {
+    checkState(a.size() == b.size(), "Maps are not the same size");
+    checkState(a.keySet().containsAll(b.keySet()), "Site classes do not match");
+    a.keySet().forEach(key -> checkHazards(a.get(key), b.get(key)));
+  }
+
+  /**
+   * Checks bounding hazard maps contain the same: Site classes, IMTs per each
+   * site class, and ground motions per each IMT
+   *
+   * @param boundingData The bounding hazards
+   */
+  static void checkBoundingHazards(
+      BoundingData<StaticDataHazardCurves> boundingData,
+      Location location) {
+    checkArgument(boundingData.containsKey(location), "Location not in bounding hazards");
+    boundingData.keySet().stream()
+        .filter(loc -> loc.equals(location))
+        .forEach(key -> {
+          checkBoundingHazard(boundingData.get(location), boundingData.get(key));
+        });
+  }
+
+  /**
+   * Check whether hazards contain the same: IMTs and ground motions per each
+   * IMT
+   *
+   * @param a Hazard A
+   * @param b Hazard B
+   */
+  static void checkHazards(
+      StaticDataHazardCurves a,
+      StaticDataHazardCurves b) {
+    checkState(a.size() == b.size(), "Maps are not the same size");
+    checkState(a.keySet().containsAll(b.keySet()), "IMTs do not match");
+    a.keySet().forEach(key -> checkXySequence(a.get(key), b.get(key)));
+  }
+
+  /**
+   * Check that the X values are identical.
+   *
+   * @param a Sequence A
+   * @param b Sequence B
+   */
+  static void checkXySequence(XySequence a, XySequence b) {
+    checkState(
+        Arrays.equals(a.xValues().toArray(), b.xValues().toArray()),
+        "Hazard curves xValues are not the same");
   }
 
   /**
@@ -93,7 +177,7 @@ public class NetcdfUtils {
    * @throws IOException
    */
   static Object get1DArray(Group group, String key, DataType dataType) throws IOException {
-    var var = group.findVariable(key);
+    var var = group.findVariableLocal(key);
     checkNotNull(
         var,
         String.format("Could not find variable [%s] in group [%s]", key, group.getFullName()));
@@ -101,6 +185,17 @@ public class NetcdfUtils {
     return var.read().get1DJavaArray(dataType);
   }
 
+  /**
+   * Returns a {@code double[]} from a netCDF group
+   *
+   * @param group The netCDF group
+   * @param key The key to read from the group
+   * @throws IOException
+   */
+  static double[] getDoubleArray(Group group, String key) throws IOException {
+    return (double[]) get1DArray(group, key, DataType.DOUBLE);
+  }
+
   /*
    * find index of first element in a (sorted ascending) that is less than or
    * equal to target value t. If target value is equal to the maximum value in a
@@ -139,39 +234,41 @@ public class NetcdfUtils {
     return idx;
   }
 
-  /*
-   * Calculate fractional distance from a1 to t, between a1 and a2
+  /**
+   * Returns a {@code int[]} from a netCDF group
+   *
+   * @param group The netCDF group
+   * @param key The key to read from the group
+   * @throws IOException
    */
-  static double calcFrac(double a1, double a2, double t) {
-    if (Math.abs(t - a1) < LOCATION_TOLERANCE) {
-      // target value == a1
-      return 0.0;
-    } else if (Math.abs(t - a2) < LOCATION_TOLERANCE) {
-      // target value == a2
-      return 1.0;
-    } else {
-      // calculate fractional distance to t between a[i] and a[i+1]
-      return (t - a1) / (a2 - a1);
-    }
+  static int[] getIntArray(Group group, String key) throws IOException {
+    return (int[]) get1DArray(group, key, DataType.INT);
   }
 
-  /*
-   * Calculate fractional distance from a[i] to t, between a[i] and a[i+1]
+  /**
+   * Returns a {@code String[]} from a netCDF group
+   *
+   * @param group The netCDF group
+   * @param key The key to read from the group
+   * @throws IOException
    */
-  static double calcGridFrac(double[] a, int i, double t) {
-    return calcFrac(a[i], a[i + 1], t);
+  static String[] getStringArray(Group group, String key) throws IOException {
+    return (String[]) get1DArray(group, key, DataType.STRING);
   }
 
   /*
    * Linear interpolation of data values to a target point
    */
-  static StaticHazards linearInterpolate(StaticHazards v1, StaticHazards v2, double frac) {
+  static StaticData<StaticDataHazardCurves> linearInterpolateHazardCurves(
+      StaticData<StaticDataHazardCurves> v1,
+      StaticData<StaticDataHazardCurves> v2,
+      double frac) {
     checkBoundingHazard(v1, v2);
 
-    var targetMap = StaticHazards.builder();
+    var targetMap = StaticData.<StaticDataHazardCurves> builder();
 
     v1.keySet().forEach(siteClass -> {
-      var imtHazards = StaticHazard.builder();
+      var imtHazards = StaticDataHazardCurves.builder();
       var v1StaticHazards = v1.get(siteClass);
       var v2StaticHazards = v2.get(siteClass);
 
@@ -194,73 +291,43 @@ public class NetcdfUtils {
     return targetMap.build();
   }
 
-  /**
-   * Checks bounding hazard maps contain the same: Site classes, IMTs per each
-   * site class, and ground motions per each IMT
-   *
-   *
-   * @param boundingHazards The bounding hazards
+  /*
+   * Linear interpolation of data values to a target point
    */
-  static void checkBoundingHazards(
-      BoundingHazards boundingHazards,
-      Location location) {
-    checkArgument(boundingHazards.containsKey(location), "Location not in bounding hazards");
-    boundingHazards.keySet().stream()
-        .filter(loc -> loc.equals(location))
-        .forEach(key -> {
-          checkBoundingHazard(boundingHazards.get(location), boundingHazards.get(key));
-        });
-  }
+  static StaticData<XySequence> linearInterpolateGroundMotions(
+      StaticData<XySequence> v1,
+      StaticData<XySequence> v2,
+      double frac) {
+    checkBoundingGroundMotion(v1, v2);
 
-  /**
-   * Check whether bounding hazards contain the same: Site classes, IMTs per
-   * each site class, and ground motions per each IMT
-   *
-   * @param a Bounding hazard map A
-   * @param b Bounding hazard map B
-   */
-  static void checkBoundingHazard(
-      StaticHazards a,
-      StaticHazards b) {
-    checkState(a.size() == b.size(), "Maps are not the same size");
-    checkState(a.keySet().containsAll(b.keySet()), "Site classes do not match");
-    a.keySet().forEach(key -> checkHazards(a.get(key), b.get(key)));
-  }
+    var targetMap = StaticData.<XySequence> builder();
 
-  /**
-   * Check whether hazards contain the same: IMTs and ground motions per each
-   * IMT
-   *
-   * @param a Hazard A
-   * @param b Hazard B
-   */
-  static void checkHazards(StaticHazard a, StaticHazard b) {
-    checkState(a.size() == b.size(), "Maps are not the same size");
-    checkState(a.keySet().containsAll(b.keySet()), "IMTs do not match");
-    a.keySet().forEach(key -> checkGroundMotions(a.get(key), b.get(key)));
-  }
+    v1.keySet().forEach(siteClass -> {
+      var v1Data = v1.get(siteClass).yValues().toArray();
+      var v2Data = v2.get(siteClass).yValues().toArray();
+      var target = new double[v1Data.length];
 
-  /**
-   * Check that the X values are identical.
-   *
-   * @param a Sequence A
-   * @param b Sequence B
-   */
-  static void checkGroundMotions(XySequence a, XySequence b) {
-    checkState(
-        Arrays.equals(a.xValues().toArray(), b.xValues().toArray()),
-        "Hazard curves xValues are not the same");
+      for (int i = 0; i < v1Data.length; i++) {
+        target[i] = v1Data[i] * (1 - frac) + v2Data[i] * frac;
+      }
+
+      var xValues = v1.get(siteClass).xValues().toArray();
+      targetMap.put(siteClass, XySequence.create(xValues, target));
+    });
+
+    return targetMap.build();
   }
 
-  static class Key {
-    static final String BOUNDS = "dataBounds";
-    static final String GRID_MASK = "gridMask";
-    static final String HAZARD = "hazard";
-    static final String IMLS = "iml";
-    static final String IMT = "imt";
-    static final String LAT = "lat";
-    static final String LON = "lon";
-    static final String SITE_CLASS = "siteClass";
-    static final String VS30 = "vs30";
+  public static class Key {
+    public static final String DATA_TYPE = "dataType";
+    public static final String GRID_MASK = "gridMask";
+    public static final String GROUND_MOTION = "groundMotion";
+    public static final String HAZARD = "hazard";
+    public static final String IMLS = "iml";
+    public static final String IMT = "imt";
+    public static final String LAT = "lat";
+    public static final String LON = "lon";
+    public static final String SITE_CLASS = "siteClass";
+    public static final String VS30 = "vs30";
   }
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/Reader.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/Reader.java
new file mode 100644
index 0000000000000000000000000000000000000000..1596d6b4133a0da6269cc510abedc870dc122033
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/Reader.java
@@ -0,0 +1,97 @@
+package gov.usgs.earthquake.nshmp.netcdf.reader;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.reflect.TypeToken;
+import com.google.gson.Gson;
+
+import gov.usgs.earthquake.nshmp.Maths;
+import gov.usgs.earthquake.nshmp.gmm.Imt;
+import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfData;
+import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key;
+
+import ucar.nc2.Group;
+
+/**
+ * Read in NetCDF file.
+ *
+ * @author U.S. Geological Survey
+ */
+public class Reader {
+
+  Group targetGroup;
+
+  static final String FLAG_KEY = "flag_key";
+  static final Gson GSON = new Gson();
+
+  public Reader(Group targetGroup) {
+    this.targetGroup = targetGroup;
+  }
+
+  /**
+   * Returns the netcdf dara.
+   */
+  public NetcdfData readData() throws IOException {
+    var vSiteClass = targetGroup.findVariableLocal(Key.SITE_CLASS);
+    var vs30s = NetcdfUtils.getDoubleArray(targetGroup, Key.VS30);
+    var vImts = targetGroup.findVariableLocal(Key.IMT);
+
+    var mapTypeToken = new TypeToken<Map<String, String>>() {}.getType();
+
+    // get list of IMT enums
+    List<Imt> imtList = new ArrayList<Imt>();
+    Map<String, String> imtFlagKeys = GSON
+        .fromJson(vImts.findAttribute(FLAG_KEY)
+            .getValues().toString(), mapTypeToken);
+
+    var imtArray = vImts.read();
+    for (int i = 0; i < vImts.getSize(); i++) {
+      var imtInt = imtArray.getInt(i);
+      var imtString = imtFlagKeys.get(Integer.toString(imtInt)).toString();
+      imtList.add(Imt.valueOf(imtString));
+    }
+    var imts = List.copyOf(imtList);
+
+    // get list of SiteClass enums and build vs30 map
+    var scList = new ArrayList<NehrpSiteClass>();
+    var vsMap = new HashMap<NehrpSiteClass, Double>();
+    var siteClassArray = vSiteClass.read();
+    Map<String, String> siteClassFlagKeys = GSON
+        .fromJson(vSiteClass.findAttribute(FLAG_KEY)
+            .getValues().toString(), mapTypeToken);
+
+    for (int i = 0; i < vSiteClass.getSize(); i++) {
+      var scInt = siteClassArray.getInt(i);
+      var scString = siteClassFlagKeys.get(Integer.toString(scInt)).toString();
+      var siteClass = NehrpSiteClass.valueOf(scString);
+      scList.add(siteClass);
+      vsMap.put(siteClass, vs30s[i]);
+    }
+    var siteClasses = List.copyOf(scList);
+    var vs30Map = Map.copyOf(vsMap);
+
+    var latitudes = Arrays.stream(NetcdfUtils.getDoubleArray(targetGroup, Key.LAT))
+        // TODO: Dynamic set location precision from NetCDF
+        .map(lat -> Maths.round(lat, 3))
+        .toArray();
+
+    var longitudes = Arrays.stream(NetcdfUtils.getDoubleArray(targetGroup, Key.LON))
+        // TODO: Dynamic set location precision from NetCDF
+        .map(lon -> Maths.round(lon, 3))
+        .toArray();
+
+    return NetcdfData.builder()
+        .imts(imts)
+        .latitudes(latitudes)
+        .longitudes(longitudes)
+        .siteClasses(siteClasses)
+        .vs30Map(vs30Map)
+        .build();
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/ReaderHazardCurves.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/ReaderHazardCurves.java
new file mode 100644
index 0000000000000000000000000000000000000000..b1c74eeefeec0e36a52de65acc031802f5ff1e7c
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/ReaderHazardCurves.java
@@ -0,0 +1,99 @@
+package gov.usgs.earthquake.nshmp.netcdf.reader;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.reflect.TypeToken;
+
+import gov.usgs.earthquake.nshmp.gmm.Imt;
+import gov.usgs.earthquake.nshmp.netcdf.data.NetcdfDataHazardCurves;
+import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key;
+
+import ucar.ma2.DataType;
+import ucar.ma2.InvalidRangeException;
+import ucar.nc2.Group;
+import ucar.nc2.Variable;
+
+/**
+ * Read in hazard curve NetCDF files.
+ *
+ * @author U.S. Geological Survey
+ */
+public class ReaderHazardCurves extends Reader {
+
+  public ReaderHazardCurves(Group targetGroup) {
+    super(targetGroup);
+  }
+
+  @Override
+  public NetcdfDataHazardCurves readData() throws IOException {
+    var coords = super.readData();
+    var vImls = targetGroup.findVariableLocal(Key.IMLS);
+    var vImts = targetGroup.findVariableLocal(Key.IMT);
+
+    var mapTypeToken = new TypeToken<Map<String, String>>() {}.getType();
+
+    // get list of IMT enums
+    List<Imt> imtList = new ArrayList<Imt>();
+    Map<String, String> imtFlagKeys = GSON
+        .fromJson(vImts.findAttribute(FLAG_KEY)
+            .getValues().toString(), mapTypeToken);
+
+    var imtArray = vImts.read();
+    for (int i = 0; i < vImts.getSize(); i++) {
+      var imtInt = imtArray.getInt(i);
+      var imtString = imtFlagKeys.get(Integer.toString(imtInt)).toString();
+      imtList.add(Imt.valueOf(imtString));
+    }
+    var imts = List.copyOf(imtList);
+
+    // get map of IMLs
+    var imls = mapImls(vImls, imts);
+
+    return NetcdfDataHazardCurves.builder()
+        .imls(imls)
+        .imts(imts)
+        .latitudes(coords.latitudes())
+        .longitudes(coords.longitudes())
+        .siteClasses(coords.siteClasses())
+        .vs30Map(coords.vs30Map())
+        .build();
+  }
+
+  /*
+   * convert 2D Iml variable (dimensions Imt, Iml) to map of Imls by Imt
+   *
+   * TODO: use MultiMap or SetMultiMap (etc.) to store unique IML sets? Could
+   * then also initialize the underlying XySequence objects for reading in the
+   * hazard curves...
+   */
+  private Map<Imt, double[]> mapImls(Variable vImls, List<Imt> imts) {
+    var nIml = vImls.getShape(1);
+    Map<Imt, double[]> imtImlMap = new EnumMap<Imt, double[]>(Imt.class);
+
+    for (int j = 0; j < imts.size(); j++) {
+      var imt = imts.get(j);
+
+      // set origin and shape of double[] Imls to read
+      var origin = new int[] { j, 0 };
+      var shape = new int[] { 1, nIml };
+
+      try {
+        imtImlMap.put(
+            imt,
+            (double[]) vImls.read(origin, shape).reduce().get1DJavaArray(DataType.DOUBLE));
+      } catch (IOException | InvalidRangeException e) {
+        var msg = "Failed read attempt for vImls with origin: " +
+            Arrays.toString(origin) + ", shape: " + Arrays.toString(shape);
+        throw new RuntimeException(msg);
+      }
+    }
+
+    return Collections.unmodifiableMap(imtImlMap);
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazards.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazards.java
deleted file mode 100644
index 5be53f16c3b076cb67511eda468bbc99d832dc8a..0000000000000000000000000000000000000000
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazards.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package gov.usgs.earthquake.nshmp.netcdf.reader;
-
-import static com.google.common.base.Preconditions.checkState;
-
-import java.util.EnumMap;
-
-import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
-
-@SuppressWarnings("serial")
-public class StaticHazards extends EnumMap<NehrpSiteClass, StaticHazard> {
-
-  public StaticHazards() {
-    super(NehrpSiteClass.class);
-  }
-
-  private StaticHazards(EnumMap<NehrpSiteClass, StaticHazard> staticHazards) {
-    super(NehrpSiteClass.class);
-    this.putAll(staticHazards);
-  }
-
-  public static Builder builder() {
-    return new Builder();
-  }
-
-  public static class Builder {
-    EnumMap<NehrpSiteClass, StaticHazard> staticHazards;
-
-    private Builder() {
-      staticHazards = new EnumMap<>(NehrpSiteClass.class);
-    }
-
-    public Builder put(NehrpSiteClass siteClass, StaticHazard staticHazard) {
-      staticHazards.put(siteClass, staticHazard);
-      return this;
-    }
-
-    public StaticHazards build() {
-      checkState(!staticHazards.isEmpty(), "Must add hazards");
-      return new StaticHazards(staticHazards);
-    }
-
-  }
-
-}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/Application.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/Application.java
index 396bc14e9d2c2fd4abc44ad61ac64e9c89a22dec..f17109b94566a6f0b44329ee9e9bd604e2ddbec7 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/Application.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/Application.java
@@ -7,9 +7,9 @@ import io.swagger.v3.oas.annotations.info.Info;
 @OpenAPIDefinition(
     info = @Info(
         title = "NSHMP Static Data Services",
-        description = "### Get static hazard curves\n" +
-            "See the service usage for current NSHM with supported longitudes, " +
-            "latitudes, site classes, and IMTS"))
+        description = "### Get static curves\n" +
+            "See the service usage for supported longitudes, " +
+            "latitudes, and site classes"))
 public class Application {
 
   public static void main(String[] args) {
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfController.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfController.java
index 5a00fc8a2a557d07197e391ff8b2b36ec7f06eca..81ac818c3b102ab400864839c54d5a8454d4e19b 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfController.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfController.java
@@ -2,11 +2,14 @@ package gov.usgs.earthquake.nshmp.netcdf.www;
 
 import java.nio.file.Path;
 
-import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
+import gov.usgs.earthquake.nshmp.netcdf.NetcdfDataType;
+import gov.usgs.earthquake.nshmp.netcdf.NetcdfGroundMotions;
+import gov.usgs.earthquake.nshmp.netcdf.NetcdfHazardCurves;
 import gov.usgs.earthquake.nshmp.www.NshmpMicronautServlet;
 
 import io.micronaut.context.annotation.Value;
+import io.micronaut.context.event.StartupEvent;
 import io.micronaut.core.annotation.Nullable;
 import io.micronaut.http.HttpRequest;
 import io.micronaut.http.HttpResponse;
@@ -15,7 +18,7 @@ import io.micronaut.http.annotation.Controller;
 import io.micronaut.http.annotation.Get;
 import io.micronaut.http.annotation.PathVariable;
 import io.micronaut.http.annotation.QueryValue;
-import io.swagger.v3.oas.annotations.Hidden;
+import io.micronaut.runtime.event.annotation.EventListener;
 import io.swagger.v3.oas.annotations.Operation;
 import io.swagger.v3.oas.annotations.media.Content;
 import io.swagger.v3.oas.annotations.media.Schema;
@@ -24,83 +27,76 @@ import io.swagger.v3.oas.annotations.tags.Tag;
 import jakarta.inject.Inject;
 
 /**
- * Micronaut controller for getting static hazards form a NetCDF file.
+ * Micronaut controller for getting static hazards or ground motions from a
+ * NetCDF file.
  *
  * @see NetcdfService
  *
  * @author U.S. Geological Survey
  */
-@Tag(name = "Hazard Data")
-@Controller("/hazard")
+@Tag(name = "Static Data")
+@Controller("/curves")
 public class NetcdfController {
 
   @Inject
   private NshmpMicronautServlet servlet;
 
   @Value("${nshmp-ws-static.netcdf-file}")
-  Path path;
+  Path netcdfPath;
+
+  NetcdfService service;
 
   /**
-   * GET method to return a hazard curve using URL query.
-   *
-   * @param request The HTTP request
-   * @param longitude The longitude of the site
-   * @param latitude Latitude of the site
-   * @param siteClass The site class (optional)
-   * @param imt The IMT (optional)
+   * Read in data type and return the appropriate service to use.
    */
-  @Operation(
-      summary = "Returns hazard curve(s) given a longitude, latitude, and/or a site class, and imt",
-      description = "Retrieve hazard curve(s) from a NSHM NetCDF file.\n\n" +
-          "For supported longitudes, latitudes, site classes, and IMTs see the usage information.",
-      operationId = "netcdf_data_doGetHazard")
-  @ApiResponse(
-      description = "Returns a hazard curve from the NSHM NetCDF file",
-      responseCode = "200",
-      content = @Content(
-          schema = @Schema(type = "string")))
-  @Get(uri = "{?longitude,latitude,siteClass,imt}", produces = MediaType.APPLICATION_JSON)
-  public HttpResponse<String> doGetHazard(
-      HttpRequest<?> request,
-      @Schema(required = true) @QueryValue @Nullable Double longitude,
-      @Schema(required = true) @QueryValue @Nullable Double latitude,
-      @QueryValue @Nullable NehrpSiteClass siteClass,
-      @QueryValue @Nullable Imt imt) {
-    var query = new Query(longitude, latitude, siteClass, imt);
-    return NetcdfService.handleDoGet(request, path, query);
+  @EventListener
+  void startup(StartupEvent event) {
+    var dataType = NetcdfDataType.getDataType(netcdfPath);
+
+    switch (dataType) {
+      case GROUND_MOTIONS:
+        var netcdfGroundMotions = new NetcdfGroundMotions(netcdfPath);
+        service = new NetcdfServiceGroundMotions(netcdfGroundMotions);
+        break;
+      case HAZARD_CURVES:
+        var netcdfHazard = new NetcdfHazardCurves(netcdfPath);
+        service = new NetcdfServiceHazardCurves(netcdfHazard);
+        break;
+      default:
+        throw new RuntimeException("Data type [" + dataType + "] not supported");
+    }
   }
 
   /**
-   * GET method to return a hazard curve using slash delimited.
+   * GET method to return a static curve using URL query.
    *
    * @param request The HTTP request
    * @param longitude The longitude of the site
    * @param latitude Latitude of the site
-   * @param siteClass The site class
-   * @param imt The IMT
+   * @param siteClass The site class (optional)
    */
   @Operation(
-      summary = "Returns a hazard curve given a longitude, latitude, site class, and imt",
-      description = "Retrieve a hazard curve from a NSHM NetCDF file.\n\n" +
-          "For supported longitudes, latitudes, site classes, and IMTs see the usage information.",
-      operationId = "netcdf_data_doGetHazardSlashWithSiteClassAndImt")
+      summary = "Returns curve(s) given a longitude, latitude, and/or a site class",
+      description = "Retrieve static curve(s) from a NSHM NetCDF file.\n\n" +
+          "For supported longitudes, latitudes, and site classes see the usage information.",
+      operationId = "netcdf_data_doGetHazard")
   @ApiResponse(
-      description = "Returns a hazard curve from the NSHM NetCDF file",
+      description = "Returns a static curve from the NetCDF file",
       responseCode = "200",
       content = @Content(
           schema = @Schema(type = "string")))
-  @Get(uri = "/{longitude}/{latitude}/{siteClass}/{imt}")
-  public HttpResponse<String> doGetHazardSlashBySiteImt(
+  @Get(uri = "{?longitude,latitude,siteClass}", produces = MediaType.APPLICATION_JSON)
+  public HttpResponse<String> doGet(
       HttpRequest<?> request,
-      @Schema(required = true) @PathVariable @Nullable Double longitude,
-      @Schema(required = true) @PathVariable @Nullable Double latitude,
-      @Schema(required = true) @PathVariable @Nullable NehrpSiteClass siteClass,
-      @Schema(required = true) @PathVariable @Nullable Imt imt) {
-    return doGetHazard(request, longitude, latitude, siteClass, imt);
+      @Schema(required = true) @QueryValue @Nullable Double longitude,
+      @Schema(required = true) @QueryValue @Nullable Double latitude,
+      @QueryValue @Nullable NehrpSiteClass siteClass) {
+    var query = new Query(longitude, latitude, siteClass);
+    return service.handleServiceCall(request, query);
   }
 
   /**
-   * GET method to return hazard curves using slash delimited.
+   * GET method to return static curves using slash delimited.
    *
    * @param request The HTTP request
    * @param longitude The longitude of the site
@@ -108,22 +104,22 @@ public class NetcdfController {
    * @param siteClass The site class
    */
   @Operation(
-      summary = "Returns hazard curves given a longitude, latitude, and site class.",
-      description = "Retrieve hazard curves from a NSHM NetCDF file.\n\n" +
+      summary = "Returns static curves given a longitude, latitude, and site class.",
+      description = "Retrieve static curves from a NetCDF file.\n\n" +
           "For supported longitudes, latitudes, and site classes see the usage information.",
       operationId = "netcdf_data_doGetHazardSlashWithSiteClass")
   @ApiResponse(
-      description = "Returns hazard curves from the NSHM NetCDF file",
+      description = "Returns static curves from the NetCDF file",
       responseCode = "200",
       content = @Content(
           schema = @Schema(type = "string")))
   @Get(uri = "/{longitude}/{latitude}/{siteClass}", produces = MediaType.APPLICATION_JSON)
-  public HttpResponse<String> doGetHazardSlashBySite(
+  public HttpResponse<String> doGetSlashBySite(
       HttpRequest<?> request,
       @Schema(required = true) @PathVariable @Nullable Double longitude,
       @Schema(required = true) @PathVariable @Nullable Double latitude,
       @Schema(required = true) @PathVariable @Nullable NehrpSiteClass siteClass) {
-    return doGetHazard(request, longitude, latitude, siteClass, null);
+    return doGet(request, longitude, latitude, siteClass);
   }
 
   /**
@@ -134,86 +130,32 @@ public class NetcdfController {
    * @param latitude Latitude of the site
    */
   @Operation(
-      summary = "Returns hazard curves given a longitude and latitude.",
-      description = "Retrieve hazard curves from a NSHM NetCDF file.\n\n" +
+      summary = "Returns static curves given a longitude and latitude.",
+      description = "Retrieve static curves from a NetCDF file.\n\n" +
           "For supported longitudes and latitudes see the usage information.",
       operationId = "netcdf_data_doGetHazardSlash")
   @ApiResponse(
-      description = "Returns hazard curves from the NSHM NetCDF file",
+      description = "Returns static curves from the NSHM NetCDF file",
       responseCode = "200",
       content = @Content(
           schema = @Schema(type = "string")))
   @Get(uri = "/{longitude}/{latitude}", produces = MediaType.APPLICATION_JSON)
-  public HttpResponse<String> doGetHazardSlash(
+  public HttpResponse<String> doGetSlash(
       HttpRequest<?> request,
       @Schema(required = true) @PathVariable @Nullable Double longitude,
       @Schema(required = true) @PathVariable @Nullable Double latitude) {
-    return doGetHazard(request, longitude, latitude, null, null);
-  }
-
-  /**
-   * Returns the border as a GeoJSON feature collections.
-   *
-   * @param request The HTTP request
-   */
-  @Operation(
-      summary = "Returns the border as a GeoJSON feature collection.",
-      description = "Returns a GeoJSON feature collections of the border defining the data.",
-      operationId = "netcdf_data_region_doGetBorder")
-  @ApiResponse(
-      description = "GeoJSON feature collection",
-      responseCode = "200",
-      content = @Content(
-          schema = @Schema(type = "string")))
-  @Get(uri = "/border", produces = MediaType.APPLICATION_JSON)
-  public HttpResponse<String> doGetBorder(HttpRequest<?> request) {
-    return NetcdfService.handleDoGetBorder(request, path);
-  }
-
-  /**
-   * GET method to return the bounding hazards using URL query.
-   *
-   * @param request The HTTP request
-   * @param longitude The longitude
-   * @param latitude The latitude
-   */
-  @Hidden
-  @Get(uri = "/bounding{?longitude,latitude}", produces = MediaType.APPLICATION_JSON)
-  public HttpResponse<String> doGetHazardBounding(
-      HttpRequest<?> request,
-      @Schema(required = true) @QueryValue @Nullable Double longitude,
-      @Schema(required = true) @QueryValue @Nullable Double latitude) {
-    var query = new Query(longitude, latitude, null, null);
-    return NetcdfService.handleDoGetBounding(request, path, query);
-  }
-
-  /**
-   * GET method to return the bounding hazards using slash delimited.
-   *
-   * @param request The HTTP request
-   * @param longitude The longitude
-   * @param latitude The latitude
-   */
-  @Hidden
-  @Get(uri = "/bounding/{longitude}/{latitude}", produces = MediaType.APPLICATION_JSON)
-  public HttpResponse<String> doGetHazardBoundingSlash(
-      HttpRequest<?> request,
-      @Schema(required = true) @QueryValue @Nullable Double longitude,
-      @Schema(required = true) @QueryValue @Nullable Double latitude) {
-    return doGetHazardBounding(request, longitude, latitude);
+    return doGet(request, longitude, latitude, null);
   }
 
   static class Query {
     final Double longitude;
     final Double latitude;
     final NehrpSiteClass siteClass;
-    final Imt imt;
 
-    Query(Double longitude, Double latitude, NehrpSiteClass siteClass, Imt imt) {
+    Query(Double longitude, Double latitude, NehrpSiteClass siteClass) {
       this.longitude = longitude;
       this.latitude = latitude;
       this.siteClass = siteClass;
-      this.imt = imt;
     }
   }
 
@@ -221,7 +163,5 @@ public class NetcdfController {
     BOUNDING,
     CURVES,
     CURVES_BY_SITE_CLASS,
-    CURVES_BY_SITE_CLASS_IMT;
   }
-
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java
index 4f229cd69d297ffe361c90a998ae27595b282a6b..cbd80556ecc57d8b803ad18016d3ca151b790e43 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java
@@ -2,347 +2,193 @@ package gov.usgs.earthquake.nshmp.netcdf.www;
 
 import static gov.usgs.earthquake.nshmp.netcdf.www.NetcdfWsUtils.GSON;
 
-import java.nio.file.Path;
 import java.util.List;
 import java.util.logging.Logger;
-import java.util.stream.Collectors;
 
 import gov.usgs.earthquake.nshmp.data.XySequence;
-import gov.usgs.earthquake.nshmp.geo.Bounds;
 import gov.usgs.earthquake.nshmp.geo.Location;
-import gov.usgs.earthquake.nshmp.geo.json.Feature;
-import gov.usgs.earthquake.nshmp.geo.json.GeoJson;
-import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
-import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReader;
-import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards;
-import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazard;
-import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazards;
+import gov.usgs.earthquake.nshmp.netcdf.Netcdf;
 import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Query;
 import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Service;
-import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfWsUtils.Key;
 import gov.usgs.earthquake.nshmp.www.Response;
-import gov.usgs.earthquake.nshmp.www.WsUtils;
-import gov.usgs.earthquake.nshmp.www.meta.Status;
 
 import io.micronaut.http.HttpRequest;
 import io.micronaut.http.HttpResponse;
 
 /**
- * Service handler for {@code NetcdfController}.
+ * Abstract service handler for {@code NetcdfController}.
  *
  * @see NetcdfController
  *
  * @author U.S. Geological Survey
  */
-public class NetcdfService {
-
-  private static NshmNetcdfReader netcdf;
-
-  private static String SERVICE_NAME = "Netcdf NSHM Service for the";
-  private static String SERVICE_DESCRIPTION = "Get static hazard curves for";
-
-  private static final Logger LOGGER = Logger.getLogger(NetcdfService.class.getName());
-
-  static HttpResponse<String> handleDoGet(
-      HttpRequest<?> request,
-      Path path,
-      Query query) {
-    SERVICE_NAME = String.format(SERVICE_NAME);
+public abstract class NetcdfService {
+
+  protected static final Logger LOGGER = Logger.getLogger(NetcdfService.class.getName());
+
+  Netcdf<?> netcdf;
+
+  protected NetcdfService(Netcdf<?> netcdf) {
+    this.netcdf = netcdf;
+  }
+
+  /**
+   * Returns the metadata response.
+   *
+   * @param httpRequest The HTTP request
+   */
+  abstract Response<String, Metadata> getMetadataResponse(HttpRequest<?> httpRequest);
+
+  /**
+   * Returns the service name
+   */
+  abstract String getServiceName();
+
+  /**
+   * Returns the netcdf object associated with the specific data type.
+   */
+  abstract Netcdf<?> netcdf();
+
+  /**
+   * Returns the static curves at a specific location.
+   *
+   * @param <T> The response type
+   * @param request The request data
+   * @param url The URL for the service call
+   */
+  abstract <T> Response<RequestData, T> processCurves(RequestData request, String url);
+
+  /**
+   * Returns the static curves at a specific location and site class.
+   *
+   * @param <T> The response type
+   * @param request The request data
+   * @param url The URL for the service call
+   */
+  abstract <T> Response<RequestDataSiteClass, T> processCurvesSiteClass(
+      RequestDataSiteClass request,
+      String url);
+
+  /**
+   * Process the service request and returns the reponse.
+   *
+   * @param httpRequest The HTTP request
+   * @param query The HTTP query
+   * @param service The NetCDF service
+   */
+  abstract Response<?, ?> processRequest(HttpRequest<?> httpRequest, Query query, Service service);
+
+  /**
+   * Handler of HTTP request, returns the service response.
+   *
+   * @param httpRequest The HTTP request
+   * @param query The service query
+   */
+  HttpResponse<String> handleServiceCall(HttpRequest<?> httpRequest, Query query) {
     try {
-      setNetcdf(path);
-      var url = request.getUri().toString();
-      LOGGER.info(SERVICE_NAME + " - Request: " + url);
-      LOGGER.fine(SERVICE_NAME + " - Query:\n" + GSON.toJson(query));
+      var url = httpRequest.getUri().toString();
+      LOGGER.info("Request: " + url);
+      LOGGER.fine("Query:\n" + GSON.toJson(query));
 
       if (query.longitude == null && query.latitude == null) {
-        return metadata(request, path);
-      }
-      var service = getService(path, query);
-      Response<?, ?> svcResponse = processRequest(request, netcdf, service, query);
-      var response = GSON.toJson(svcResponse);
-      LOGGER.fine(SERVICE_NAME + " - Result:\n" + response);
-      return HttpResponse.ok(response);
-    } catch (Exception e) {
-      var url = request.getUri().toString();
-      return NetcdfWsUtils.handleError(e, SERVICE_NAME, url);
-    }
-  }
-
-  static HttpResponse<String> handleDoGetBounding(
-      HttpRequest<?> request,
-      Path path,
-      Query query) {
-    SERVICE_NAME = String.format(SERVICE_NAME);
-    try {
-      setNetcdf(path);
-      LOGGER.fine(SERVICE_NAME + " - Query:\n" + GSON.toJson(query));
-
-      if (query.longitude == null && query.latitude == null) {
-        return metadata(request, path);
-      }
-      Response<?, ?> svcResponse = processRequest(request, netcdf, Service.BOUNDING, query);
-      var response = GSON.toJson(svcResponse);
-      LOGGER.fine(SERVICE_NAME + " - Result:\n" + response);
-      return HttpResponse.ok(response);
-    } catch (Exception e) {
-      var url = request.getUri().toString();
-      return NetcdfWsUtils.handleError(e, SERVICE_NAME, url);
-    }
-
-  }
-
-  static HttpResponse<String> handleDoGetBorder(
-      HttpRequest<?> request,
-      Path path) {
-    SERVICE_NAME = String.format(SERVICE_NAME);
-    try {
-      setNetcdf(path);
-      var geojson = GeoJson.builder();
-      var border = netcdf.coordinates().region().border();
-
-      var id = 0;
-      for (var location : border) {
-        var feature = Feature.point(location)
-            .id(id++)
-            .build();
-
-        geojson.add(feature);
+        return metadata(httpRequest);
       }
-
-      return HttpResponse.ok(geojson.build().toJson());
-    } catch (Exception e) {
-      var url = request.getUri().toString();
-      return NetcdfWsUtils.handleError(e, SERVICE_NAME, url);
-    }
-  }
-
-  static HttpResponse<String> metadata(HttpRequest<?> request, Path path) {
-    try {
-      setNetcdf(path);
-      String netcdfFile = path == null ? "" : path.toString();
-      var metadata = new Metadata(request, netcdfFile);
-      var url = request.getUri().toString();
-      var svcResponse =
-          new Response<>(Status.USAGE, SERVICE_NAME, url, metadata, url);
+      var service = getService(query);
+      Response<?, ?> svcResponse = processRequest(httpRequest, query, service);
       var response = GSON.toJson(svcResponse);
-      LOGGER.fine(SERVICE_NAME + " - Result:\n" + response);
+      LOGGER.fine("Result:\n" + response);
       return HttpResponse.ok(response);
     } catch (Exception e) {
-      var url = request.getUri().toString();
-      return NetcdfWsUtils.handleError(e, SERVICE_NAME, url);
+      var url = httpRequest.getUri().toString();
+      return NetcdfWsUtils.handleError(e, getServiceName(), url);
     }
   }
 
-  static Service getService(Path path, Query query) {
-    if (query.siteClass != null && query.imt != null) {
-      return Service.CURVES_BY_SITE_CLASS_IMT;
-    } else if (query.siteClass != null && query.imt == null) {
+  private Service getService(Query query) {
+    if (query.siteClass != null) {
       return Service.CURVES_BY_SITE_CLASS;
     } else {
       return Service.CURVES;
     }
   }
 
-  static Response<?, ?> processRequest(
-      HttpRequest<?> request,
-      NshmNetcdfReader netcdf,
-      Service service,
-      Query query) {
-    var requestData = new RequestData(query.longitude, query.latitude);
-    var url = request.getUri().toString();
-
-    switch (service) {
-      case BOUNDING:
-        return processBounding(requestData, url);
-      case CURVES:
-        return processCurves(requestData, url);
-      case CURVES_BY_SITE_CLASS:
-        requestData = new RequestDataCurves(query.longitude, query.latitude, query.siteClass);
-        return processCurvesSiteClass(
-            (RequestDataCurves) requestData, url);
-      case CURVES_BY_SITE_CLASS_IMT:
-        requestData = new RequestDataCurve(
-            query.longitude, query.latitude, query.siteClass, query.imt);
-        return processCurve((RequestDataCurve) requestData, url);
-      default:
-        throw new RuntimeException("Netcdf service [" + service + "] not found");
-    }
-  }
-
-  static Response<RequestData, List<List<List<ResponseData>>>> processBounding(
-      RequestData request,
-      String url) {
-    WsUtils.checkValue(Key.LATITUDE, request.site.latitude);
-    WsUtils.checkValue(Key.LONGITUDE, request.site.longitude);
-    var bounding = netcdf.boundingHazards(request.site);
-    var boundingAsList = toLists(request.site, bounding);
-    return new Response<>(Status.SUCCESS, SERVICE_NAME, request, boundingAsList, url);
-  }
-
-  static Response<RequestData, List<List<ResponseData>>> processCurves(
-      RequestData request,
-      String url) {
-    WsUtils.checkValue(Key.LATITUDE, request.site.latitude);
-    WsUtils.checkValue(Key.LONGITUDE, request.site.longitude);
-    var curves = netcdf.hazard(request.site);
-    var curvesAsList = toList(request.site, curves);
-    return new Response<>(Status.SUCCESS, SERVICE_NAME, request, curvesAsList, url);
-  }
-
-  static Response<RequestDataCurves, List<ResponseData>> processCurvesSiteClass(
-      RequestDataCurves request,
-      String url) {
-    WsUtils.checkValue(Key.LATITUDE, request.site.latitude);
-    WsUtils.checkValue(Key.LONGITUDE, request.site.longitude);
-    WsUtils.checkValue(Key.SITE_CLASS, request.siteClass);
-    var curves = netcdf.hazard(request.site, request.siteClass);
-    var curvesAsList = toList(request, curves);
-    return new Response<>(Status.SUCCESS, SERVICE_NAME, request, curvesAsList, url);
-  }
-
-  static Response<RequestDataCurve, ResponseData> processCurve(
-      RequestDataCurve request,
-      String url) {
-    WsUtils.checkValue(Key.LATITUDE, request.site.latitude);
-    WsUtils.checkValue(Key.LONGITUDE, request.site.longitude);
-    WsUtils.checkValue(Key.SITE_CLASS, request.siteClass);
-    WsUtils.checkValue(Key.IMT, request.imt);
-
-    var site = request.site;
-    var metadata = new ResponseMetadata(
-        site.longitude, site.latitude, request.siteClass, request.imt);
-    var curve = netcdf.hazard(request.site, request.siteClass, request.imt);
-    var responseData = new ResponseData(metadata, curve);
-    return new Response<>(Status.SUCCESS, SERVICE_NAME, request, responseData, url);
+  private HttpResponse<String> metadata(HttpRequest<?> httpRequest) {
+    var svcResponse = getMetadataResponse(httpRequest);
+    var response = GSON.toJson(svcResponse);
+    LOGGER.fine("Result:\n" + response);
+    return HttpResponse.ok(response);
   }
 
-  static List<List<List<ResponseData>>> toLists(
-      Location site,
-      BoundingHazards bounding) {
-
-    return bounding.entrySet().stream()
-        .map(entry -> toList(site, entry.getValue()))
-        .collect(Collectors.toList());
-  }
-
-  static List<List<ResponseData>> toList(
-      Location site,
-      StaticHazards curves) {
-    return curves.entrySet().stream()
-        .map(entry -> {
-          var request = new RequestDataCurves(site.longitude, site.latitude, entry.getKey());
-          return toList(request, entry.getValue());
-        })
-        .collect(Collectors.toList());
-  }
-
-  static List<ResponseData> toList(
-      RequestDataCurves request,
-      StaticHazard curves) {
-    return curves.entrySet().stream()
-        .map((entry) -> {
-          var site = request.site;
-          var metadata = new ResponseMetadata(
-              site.longitude, site.latitude, request.siteClass, entry.getKey());
-          return new ResponseData(metadata, entry.getValue());
-        })
-        .collect(Collectors.toList());
-  }
-
-  static class RequestData {
-    Location site;
-
-    RequestData(double longitude, double latitude) {
-      site = Location.create(longitude, latitude);
-    }
-  }
-
-  static class RequestDataCurves extends RequestData {
-    NehrpSiteClass siteClass;
+  class Metadata {
+    final String description;
+    final String[] syntax;
+    final Parameters parameters;
 
-    RequestDataCurves(double longitude, double latitude, NehrpSiteClass siteClass) {
-      super(longitude, latitude);
-      this.siteClass = siteClass;
+    Metadata(HttpRequest<?> request, String netcdfFile, String description) {
+      var url = request.getUri().toString();
+      url = url.endsWith("/") ? url.substring(0, url.length() - 1) : url;
+      this.description = description;
+      syntax = new String[] {
+          url + "/{longitude:number}/{latitude:number}",
+          url + "?longitude={number}&latitude={number}",
+          url + "/{longitude:number}/{latitude:number}/{siteClass:NehrpSiteClass}",
+          url + "?longitude={number}&latitude={number}&siteClass={NehrpSiteClass}",
+      };
+      parameters = new Parameters();
     }
   }
 
-  static class RequestDataCurve extends RequestDataCurves {
-    Imt imt;
+  class Parameters {
+    final String netcdfFile;
+    final List<NehrpSiteClass> siteClasses;
 
-    RequestDataCurve(double longitude, double latitude, NehrpSiteClass siteClass, Imt imt) {
-      super(longitude, latitude, siteClass);
-      this.imt = imt;
+    Parameters() {
+      this.netcdfFile = netcdf().netcdfPath().toString();
+      var coordinates = netcdf().netcdfData();
+      siteClasses = coordinates.siteClasses();
     }
   }
 
-  static class ResponseMetadata extends RequestDataCurve {
+  static class ResponseMetadata extends RequestDataSiteClass {
     final String xLabel;
     final String yLabel;
 
     ResponseMetadata(
-        double longitude,
-        double latitude,
-        NehrpSiteClass siteClass,
-        Imt imt) {
-      super(longitude, latitude, siteClass, imt);
-      xLabel = "Ground Motion (g)";
-      yLabel = "Annual Frequency of Exceedence";
+        String xLabel,
+        String yLabel,
+        Location site,
+        NehrpSiteClass siteClass) {
+      super(site, siteClass);
+      this.xLabel = xLabel;
+      this.yLabel = yLabel;
     }
   }
 
-  static class ResponseData {
-    final ResponseMetadata metadata;
+  static class ResponseData<T extends ResponseMetadata> {
+    final T metadata;
     final XySequence data;
 
-    ResponseData(ResponseMetadata metadata, XySequence data) {
+    ResponseData(T metadata, XySequence data) {
       this.metadata = metadata;
       this.data = data;
     }
   }
 
-  static class Metadata {
-
-    final String description;
-    final String[] syntax;
-    final Parameters parameters;
+  static class RequestData {
+    Location site;
 
-    Metadata(HttpRequest<?> request, String netcdfFile) {
-      var url = request.getUri().toString();
-      description = String.format(SERVICE_DESCRIPTION);
-      syntax = new String[] {
-          url + "/{longitude:number}/{latitude:number}",
-          url + "?longitude={number}&latitude={number}",
-          url + "/{longitude:number}/{latitude:number}/{siteClass:NehrpSiteClass}",
-          url + "?longitude={number}&latitude={number}&siteClass={NehrpSiteClass}",
-          url + "/{longitude:number}/{latitude:number}/{siteClass:NehrpSiteClass}/{imt:Imt}",
-          url + "?longitude={number}&latitude={number}&siteClass={NehrpSiteClass}&imt={imt}",
-          url + "/border"
-      };
-      parameters = new Parameters(netcdfFile);
+    RequestData(Location site) {
+      this.site = site;
     }
   }
 
-  static class Parameters {
-    final String netcdfFile;
-    final Bounds bounds;
-    final List<NehrpSiteClass> siteClasses;
-    final List<Imt> imts;
-
-    Parameters(String netcdfFile) {
-      this.netcdfFile = netcdfFile;
-      var coordinates = netcdf.coordinates();
-      bounds = coordinates.region().bounds();
-      siteClasses = coordinates.siteClasses();
-      imts = coordinates.imts();
-    }
-
-  }
+  static class RequestDataSiteClass extends RequestData {
+    NehrpSiteClass siteClass;
 
-  private static void setNetcdf(Path path) {
-    if (netcdf == null) {
-      netcdf = new NshmNetcdfReader(path);
+    RequestDataSiteClass(Location site, NehrpSiteClass siteClass) {
+      super(site);
+      this.siteClass = siteClass;
     }
   }
-
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfServiceGroundMotions.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfServiceGroundMotions.java
new file mode 100644
index 0000000000000000000000000000000000000000..bf4bd4e69fc38d9bbb4986de3403cd271731d1d3
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfServiceGroundMotions.java
@@ -0,0 +1,136 @@
+package gov.usgs.earthquake.nshmp.netcdf.www;
+
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.geo.Location;
+import gov.usgs.earthquake.nshmp.gmm.Imt;
+import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
+import gov.usgs.earthquake.nshmp.netcdf.NetcdfGroundMotions;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticData;
+import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingReaderGroundMotions;
+import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Query;
+import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Service;
+import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfWsUtils.Key;
+import gov.usgs.earthquake.nshmp.www.Response;
+import gov.usgs.earthquake.nshmp.www.WsUtils;
+import gov.usgs.earthquake.nshmp.www.meta.Status;
+
+import io.micronaut.http.HttpRequest;
+
+/**
+ * Handles service calls for ground motions.
+ *
+ * @see NetcdfController
+ *
+ * @author U.S. Geological Survey
+ */
+public class NetcdfServiceGroundMotions extends NetcdfService {
+
+  static final String SERVICE_DESCRIPTION = "Get static ground motions from a NetCDF file";
+  static final String SERVICE_NAME = "Static Ground Motions";
+  static final String X_LABEL = "Period (s)";
+  static final String Y_LABEL = "Median Ground Motion (g)";
+
+  protected NetcdfServiceGroundMotions(NetcdfGroundMotions netcdf) {
+    super(netcdf);
+  }
+
+  @Override
+  Response<String, Metadata> getMetadataResponse(HttpRequest<?> request) {
+    var metadata = new Metadata(request, netcdf().netcdfPath().toString(), SERVICE_DESCRIPTION);
+    var url = request.getUri().toString();
+    return new Response<>(Status.USAGE, SERVICE_NAME, url, metadata, url);
+  }
+
+  @Override
+  String getServiceName() {
+    return SERVICE_NAME;
+  }
+
+  @Override
+  NetcdfGroundMotions netcdf() {
+    return (NetcdfGroundMotions) netcdf;
+  }
+
+  @Override
+  Response<?, ?> processRequest(HttpRequest<?> httpRequest, Query query, Service service) {
+    var site = Location.create(query.longitude, query.latitude);
+    var requestData = new RequestData(site);
+    var url = httpRequest.getUri().toString();
+
+    switch (service) {
+      case CURVES:
+        return processCurves(requestData, url);
+      case CURVES_BY_SITE_CLASS:
+        requestData = new RequestDataSiteClass(site, query.siteClass);
+        return processCurvesSiteClass(
+            (RequestDataSiteClass) requestData, url);
+      default:
+        throw new RuntimeException("Netcdf service [" + service + "] not found");
+    }
+  }
+
+  @Override
+  Response<RequestDataSiteClass, ResponseData<ResponseMetadata>> processCurvesSiteClass(
+      RequestDataSiteClass request,
+      String url) {
+    WsUtils.checkValue(Key.LATITUDE, request.site.latitude);
+    WsUtils.checkValue(Key.LONGITUDE, request.site.longitude);
+    WsUtils.checkValue(Key.SITE_CLASS, request.siteClass);
+    var curves = netcdf().staticData(request.site, request.siteClass);
+    var responseData = toResponseData(request, curves);
+    return new Response<>(Status.SUCCESS, SERVICE_NAME, request, responseData, url);
+  }
+
+  @Override
+  Response<RequestData, List<ResponseData<ResponseMetadata>>> processCurves(RequestData request,
+      String url) {
+    WsUtils.checkValue(Key.LATITUDE, request.site.latitude);
+    WsUtils.checkValue(Key.LONGITUDE, request.site.longitude);
+    var curves = netcdf().staticData(request.site);
+    var responseData = toList(request.site, curves);
+    return new Response<>(Status.SUCCESS, SERVICE_NAME, request, responseData, url);
+  }
+
+  List<ResponseData<ResponseMetadata>> toList(
+      Location site,
+      StaticData<XySequence> curves) {
+    return curves.entrySet().stream()
+        .map(entry -> {
+          var request = new RequestDataSiteClass(site, entry.getKey());
+          return toResponseData(request, entry.getValue());
+        })
+        .collect(Collectors.toList());
+  }
+
+  ResponseData<ResponseMetadata> toResponseData(
+      RequestDataSiteClass request,
+      XySequence curves) {
+    var metadata = new ResponseMetadataGroundMotions(
+        X_LABEL,
+        Y_LABEL,
+        request.site,
+        request.siteClass);
+    return new ResponseData<>(metadata, curves);
+  }
+
+  static class ResponseMetadataGroundMotions extends ResponseMetadata {
+    Map<String, Double> imtValues;
+
+    ResponseMetadataGroundMotions(
+        String xLabel,
+        String yLabel,
+        Location site,
+        NehrpSiteClass siteClass) {
+      super(xLabel, yLabel, site, siteClass);
+      imtValues = Map.of(
+          Imt.PGA.name(),
+          BoundingReaderGroundMotions.PGA_VALUE,
+          Imt.PGV.name(),
+          BoundingReaderGroundMotions.PGV_VALUE);
+    }
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfServiceHazardCurves.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfServiceHazardCurves.java
new file mode 100644
index 0000000000000000000000000000000000000000..38db9f6f1e65da8333a209d70fe5122bd93c3686
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfServiceHazardCurves.java
@@ -0,0 +1,141 @@
+package gov.usgs.earthquake.nshmp.netcdf.www;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.geo.Location;
+import gov.usgs.earthquake.nshmp.gmm.Imt;
+import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
+import gov.usgs.earthquake.nshmp.netcdf.NetcdfHazardCurves;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticData;
+import gov.usgs.earthquake.nshmp.netcdf.data.StaticDataHazardCurves;
+import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Query;
+import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Service;
+import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfWsUtils.Key;
+import gov.usgs.earthquake.nshmp.www.Response;
+import gov.usgs.earthquake.nshmp.www.WsUtils;
+import gov.usgs.earthquake.nshmp.www.meta.Status;
+
+import io.micronaut.http.HttpRequest;
+
+/**
+ * Handles service calls for hazard curves.
+ *
+ * @see NetcdfController
+ *
+ * @author U.S. Geological Survey
+ */
+public class NetcdfServiceHazardCurves extends NetcdfService {
+
+  static final String SERVICE_DESCRIPTION = "Get static hazard curves from a NetCDF file";
+  static final String SERVICE_NAME = "Static Hazard Curves";
+  static final String X_LABEL = "Ground Motion (g)";
+  static final String Y_LABEL = "Annual Frequency of Exceedence";
+
+  NetcdfServiceHazardCurves(NetcdfHazardCurves netcdf) {
+    super(netcdf);
+  }
+
+  @Override
+  Response<String, Metadata> getMetadataResponse(HttpRequest<?> request) {
+    var metadata = new Metadata(request, netcdf().netcdfPath().toString(), SERVICE_DESCRIPTION);
+    var url = request.getUri().toString();
+    return new Response<>(Status.USAGE, SERVICE_NAME, url, metadata, url);
+  }
+
+  @Override
+  String getServiceName() {
+    return SERVICE_NAME;
+  }
+
+  @Override
+  NetcdfHazardCurves netcdf() {
+    return (NetcdfHazardCurves) netcdf;
+  }
+
+  @Override
+  Response<RequestData, List<List<ResponseData<ResponseMetadataHazard>>>> processCurves(
+      RequestData request,
+      String url) {
+    WsUtils.checkValue(Key.LATITUDE, request.site.latitude);
+    WsUtils.checkValue(Key.LONGITUDE, request.site.longitude);
+    var curves = netcdf().staticData(request.site);
+    var curvesAsList = toList(request.site, curves);
+    return new Response<>(Status.SUCCESS, SERVICE_NAME, request, curvesAsList, url);
+  }
+
+  @Override
+  Response<RequestDataSiteClass, List<ResponseData<ResponseMetadataHazard>>> processCurvesSiteClass(
+      RequestDataSiteClass request,
+      String url) {
+    WsUtils.checkValue(Key.LATITUDE, request.site.latitude);
+    WsUtils.checkValue(Key.LONGITUDE, request.site.longitude);
+    WsUtils.checkValue(Key.SITE_CLASS, request.siteClass);
+    var curves = netcdf().staticData(request.site, request.siteClass);
+    var curvesAsList = toList(request, curves);
+    return new Response<>(Status.SUCCESS, SERVICE_NAME, request, curvesAsList, url);
+  }
+
+  @Override
+  Response<?, ?> processRequest(HttpRequest<?> httpRequest, Query query, Service service) {
+    var site = Location.create(query.longitude, query.latitude);
+    var requestData = new RequestData(site);
+    var url = httpRequest.getUri().toString();
+
+    switch (service) {
+      case CURVES:
+        return processCurves(requestData, url);
+      case CURVES_BY_SITE_CLASS:
+        requestData = new RequestDataSiteClass(site, query.siteClass);
+        return processCurvesSiteClass(
+            (RequestDataSiteClass) requestData, url);
+      default:
+        throw new RuntimeException("Netcdf service [" + service + "] not found");
+    }
+  }
+
+  List<ResponseData<ResponseMetadataHazard>> toList(
+      RequestDataSiteClass request,
+      StaticDataHazardCurves curves) {
+    return curves.entrySet().stream()
+        .map((entry) -> {
+          return toResponseData(request, entry.getKey(), entry.getValue());
+        })
+        .collect(Collectors.toList());
+  }
+
+  List<List<ResponseData<ResponseMetadataHazard>>> toList(
+      Location site,
+      StaticData<StaticDataHazardCurves> curves) {
+    return curves.entrySet().stream()
+        .map(entry -> {
+          var request = new RequestDataSiteClass(site, entry.getKey());
+          return toList(request, entry.getValue());
+        })
+        .collect(Collectors.toList());
+  }
+
+  ResponseData<ResponseMetadataHazard> toResponseData(
+      RequestDataSiteClass request,
+      Imt imt,
+      XySequence curves) {
+    var metadata =
+        new ResponseMetadataHazard(X_LABEL, Y_LABEL, request.site, request.siteClass, imt);
+    return new ResponseData<>(metadata, curves);
+  }
+
+  static class ResponseMetadataHazard extends ResponseMetadata {
+    final Imt imt;
+
+    ResponseMetadataHazard(
+        String xLabel,
+        String yLabel,
+        Location site,
+        NehrpSiteClass siteClass,
+        Imt imt) {
+      super(xLabel, yLabel, site, siteClass);
+      this.imt = imt;
+    }
+  }
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfWsUtils.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfWsUtils.java
index fa3d3eb73ea9640ec9a8e565c730a484650b1bf2..be973c511e50b720dae084d523f25a59d5e50101 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfWsUtils.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfWsUtils.java
@@ -18,7 +18,6 @@ import gov.usgs.earthquake.nshmp.www.meta.Status;
 import io.micronaut.http.HttpResponse;
 
 public class NetcdfWsUtils {
-
   static final Gson GSON;
 
   private static final Logger LOGGER = Logger.getLogger(NetcdfWsUtils.class.getName());
diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml
index db526f3a7c18686e5fe3f42abf3a45eada8fa589..448a7d535a1b583240dcdbd0dacac534c08ad192 100644
--- a/src/main/resources/application.yml
+++ b/src/main/resources/application.yml
@@ -11,5 +11,7 @@ micronaut:
         mapping: /**
 
 nshmp-ws-static:
-  netcdf-file: ${netcdf:src/main/resources/default.nc}
-  nshm-group: ${nshm:EXAMPLE}
+  # Hazard example
+  netcdf-file: ${netcdf:src/main/resources/hazard-example.nc}
+  # Ground motions example
+  # netcdf-file: ${netcdf:src/main/resources/rtsa-example.nc}
diff --git a/src/main/resources/default.nc b/src/main/resources/hazard-example.nc
similarity index 99%
rename from src/main/resources/default.nc
rename to src/main/resources/hazard-example.nc
index 3199b654b5f9a1679f668787db3810870b0a2769..3820079c56974cd1881312454536edb277ad5af2 100644
Binary files a/src/main/resources/default.nc and b/src/main/resources/hazard-example.nc differ
diff --git a/src/main/resources/rtsa-example.nc b/src/main/resources/rtsa-example.nc
new file mode 100644
index 0000000000000000000000000000000000000000..f8cce277c3d44c040806c416d8b3d5a5e903c99d
Binary files /dev/null and b/src/main/resources/rtsa-example.nc differ
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReaderTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReaderTests.java
deleted file mode 100644
index 0148c8cce851f21547d86835b382db73b7e529b7..0000000000000000000000000000000000000000
--- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReaderTests.java
+++ /dev/null
@@ -1,295 +0,0 @@
-package gov.usgs.earthquake.nshmp.netcdf;
-
-import static com.google.common.base.Preconditions.checkState;
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Collections;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Stream;
-
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.Arguments;
-import org.junit.jupiter.params.provider.MethodSource;
-
-import com.google.common.io.Resources;
-
-import gov.usgs.earthquake.nshmp.data.XySequence;
-import gov.usgs.earthquake.nshmp.geo.BorderType;
-import gov.usgs.earthquake.nshmp.geo.Location;
-import gov.usgs.earthquake.nshmp.geo.LocationList;
-import gov.usgs.earthquake.nshmp.geo.Region;
-import gov.usgs.earthquake.nshmp.geo.Regions;
-import gov.usgs.earthquake.nshmp.geo.json.Feature;
-import gov.usgs.earthquake.nshmp.geo.json.FeatureCollection;
-import gov.usgs.earthquake.nshmp.geo.json.GeoJson;
-import gov.usgs.earthquake.nshmp.geo.json.Properties;
-import gov.usgs.earthquake.nshmp.gmm.Imt;
-import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
-import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards;
-import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazard;
-import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazards;
-
-public class NshmNetcdfReaderTests {
-
-  static final String CONUS_TEST_FILE = "nshmp-conus-test-fv.0.3.nc";
-  static final Path NETCDF_PATH = Paths.get(Resources.getResource(CONUS_TEST_FILE).getPath());
-
-  static final String TEST_INVALID_NC_FILE = "invalid-netcdf-file.nc";
-  static final Path NETCDF_INVALID_FILE = Paths.get(
-      Resources.getResource(TEST_INVALID_NC_FILE).getPath());
-
-  static final String CONUS_TEST_MAP_FILE = "map-netcdf-test-0p05.geojson";
-  static final Path CONUS_TEST_MAP_PATH =
-      Paths.get(Resources.getResource(CONUS_TEST_MAP_FILE).getPath());
-
-  // difference tolerance, until we can incorporate precision into
-  // NshmNetcdfReader
-  public static final double IML_TOL = 1e-6;
-  static final double HAZ_TOL = 1e-8;
-
-  static final Location TARGET_LOCATION = Location.create(-105.234, 39.213);
-
-  public static final double[] EXPECTED_LONGITUDES =
-      new double[] { -105.3, -105.25, -105.2, -105.15 };
-  public static final double[] EXPECTED_LATITUDES = new double[] { 39.15, 39.2, 39.25, 39.3 };
-
-  public static final Region EXPECTED_REGION;
-
-  static final LocationList BOUNDING_LOCATIONS = LocationList.builder()
-      .add(EXPECTED_LONGITUDES[1], EXPECTED_LATITUDES[1])
-      .add(EXPECTED_LONGITUDES[1], EXPECTED_LATITUDES[2])
-      .add(EXPECTED_LONGITUDES[2], EXPECTED_LATITUDES[2])
-      .add(EXPECTED_LONGITUDES[2], EXPECTED_LATITUDES[1])
-      .build();
-
-  static final LocationList LOCATIONS = LocationList.builder()
-      .addAll(BOUNDING_LOCATIONS)
-      .add(TARGET_LOCATION)
-      .build();
-
-  static final List<double[]> XS = List.of(
-      new double[] { 0.00233, 0.0265, 0.302, 3.44 },
-      new double[] { 0.00333, 0.0379, 0.432, 4.92 });
-
-  static final List<double[]> HAZARDS = List.of( // [idxSc,idxImt,idxLon,idxLat]
-      // lower left bounding hazard
-      new double[] { 4.387208E-02, 4.250093E-03, 1.192809E-04, 2.201278E-07 }, // [0,0,1,1]
-      new double[] { 6.567874E-02, 5.199003E-03, 6.540930E-05, 6.705638E-08 }, // [0,1,1,1]
-      new double[] { 4.127881E-02, 3.905505E-03, 1.221450E-04, 3.597946E-07 }, // [1,0,1,1]
-      new double[] { 5.262404E-02, 3.470381E-03, 4.079298E-05, 4.619109E-08 }, // [1,1,1,1]
-      // upper left bounding hazard
-      new double[] { 4.388691E-02, 4.249376E-03, 1.185702E-04, 2.198896E-07 }, // [0,0,1,2]
-      new double[] { 6.564134E-02, 5.175665E-03, 6.480199E-05, 6.684925E-08 }, // [0,1,1,2]
-      new double[] { 4.130831E-02, 3.907337E-03, 1.214354E-04, 3.593860E-07 }, // [1,0,1,2]
-      new double[] { 5.258384E-02, 3.454936E-03, 4.047655E-05, 4.606202E-08 }, // [1,1,1,2]
-      // upper right bounding hazard
-      new double[] { 4.346677E-02, 4.125130E-03, 1.135500E-04, 2.171376E-07 }, // [0,0,2,2]
-      new double[] { 6.530787E-02, 5.078100E-03, 6.258566E-05, 6.466215E-08 }, // [0,1,2,2]
-      new double[] { 4.089805E-02, 3.789999E-03, 1.163111E-04, 3.544692E-07 }, // [1,0,2,2]
-      new double[] { 5.224910E-02, 3.379782E-03, 3.912059E-05, 4.462088E-08 }, // [1,1,2,2]
-      // lower right bounding hazard
-      new double[] { 4.344459E-02, 4.124759E-03, 1.141019E-04, 2.173907E-07 }, // [0,0,2,1]
-      new double[] { 6.532912E-02, 5.098603E-03, 6.312435E-05, 6.484549E-08 }, // [0,1,2,1]
-      new double[] { 4.086227E-02, 3.787166E-03, 1.168667E-04, 3.549025E-07 }, // [1,0,2,1]
-      new double[] { 5.227672E-02, 3.393233E-03, 3.939733E-05, 4.473717E-08 }, // [1,1,2,1]
-      // target site hazard
-      new double[] { 4.373975E-02, 4.209890E-03, 1.174520E-04, 2.191888E-07 }, // [0,0,t,t]
-      new double[] { 6.555848E-02, 5.161043E-03, 6.452592E-05, 6.629702E-08 }, // [0,1,t,t]
-      new double[] { 4.115371E-02, 3.868196E-03, 1.202843E-04, 3.581208E-07 }, // [1,0,t,t]
-      new double[] { 5.250349E-02, 3.441844E-03, 4.026740E-05, 4.569334E-08 });// [1,1,t,t]
-
-  public static final List<NehrpSiteClass> SITE_CLASSES =
-      List.of(NehrpSiteClass.CD, NehrpSiteClass.C);
-  public static final List<Imt> IMTS = List.of(Imt.PGA, Imt.SA0P4);
-
-  static final int TARGET_LOWER_LEFT_LONGITUDE_IDX = 1;
-  static final int TARGET_LOWER_LEFT_LATITUDE_IDX = 1;
-
-  static final double TARGET_LONGITUDE_FRAC;
-  static final double TARGET_LATITUDE_FRAC;
-
-  // public static Map<NehrpSiteClass, Map<Imt, double[]>> IMLS = new
-  // HashMap<>();
-  public static Map<Imt, double[]> IMLS = new HashMap<>();
-
-  static BoundingHazards BOUNDING_HAZARDS;
-
-  public static final NshmNetcdfReader NETCDF =
-      new NshmNetcdfReader(NETCDF_PATH);
-
-  static {
-    var builder = BoundingHazards.builder();
-
-    var iHaz = 0;
-    for (var location : LOCATIONS) {
-      var siteClassMap = StaticHazards.builder();
-
-      for (var siteClass : SITE_CLASSES) {
-        var imtMap = StaticHazard.builder();
-
-        for (var iImt = 0; iImt < IMTS.size(); iImt++) {
-          var imt = IMTS.get(iImt);
-          var xy = XySequence.create(XS.get(iImt), HAZARDS.get(iHaz++));
-          imtMap.put(imt, xy);
-        }
-
-        siteClassMap.put(siteClass, imtMap.build());
-      }
-
-      builder.put(location, siteClassMap.build());
-    }
-
-    BOUNDING_HAZARDS = builder.build();
-
-    // Extract test region from CONUS_TEST_MAP_FILE
-    FeatureCollection dataRegions = GeoJson.from(CONUS_TEST_MAP_PATH).toFeatureCollection();
-    List<Feature> features = dataRegions.features();
-
-    // From {@code Sites.createSiteRegion()}
-    checkState(features.size() <= 2, "Only 2 polygon features may be defined");
-    int mapRegionIndex = 0;
-    if (features.size() > 1) {
-      // don't need to process the rectangular map extents, if present
-      mapRegionIndex = 1;
-    }
-    Feature sitesPoly = features.get(mapRegionIndex);
-    LocationList sitesPolyBorder = sitesPoly.asPolygonBorder();
-    Properties properties = sitesPoly.properties();
-    // get region name - either "title" or "name" property???
-    String mapName =
-        properties.getString("title").orElse(properties.getString("name").orElse("Unnamed Map"));
-    EXPECTED_REGION = Regions.create(mapName, sitesPolyBorder, BorderType.MERCATOR_LINEAR);
-
-    var imls = new EnumMap<Imt, double[]>(Imt.class);
-    for (var i = 0; i < IMTS.size(); i++) {
-      imls.put(IMTS.get(i), XS.get(i));
-    }
-    IMLS = Collections.unmodifiableMap(imls);
-
-    // calculate interpolation fraction for longitude and latitude
-    int i = TARGET_LOWER_LEFT_LONGITUDE_IDX;
-    var locationA = LOCATIONS.get(i);
-    var locationB = LOCATIONS.get(i + 1);
-    TARGET_LONGITUDE_FRAC = (TARGET_LOCATION.longitude - locationA.longitude) /
-        (locationB.longitude - locationA.longitude);
-
-    i = TARGET_LOWER_LEFT_LATITUDE_IDX;
-    TARGET_LATITUDE_FRAC = (TARGET_LOCATION.latitude - locationA.latitude) /
-        (locationB.latitude - locationA.latitude);
-  }
-
-  @Test
-  final void pathTests() {
-    assertEquals(NETCDF_PATH, NETCDF.path());
-    assertTrue(NETCDF_PATH.equals(NETCDF.path()));
-  }
-
-  @Test
-  final void boundingHazardsTests() {
-    var boundingHazards = NETCDF.boundingHazards(TARGET_LOCATION);
-
-    assertEquals(BOUNDING_HAZARDS.size(), boundingHazards.size());
-
-    for (var location : LOCATIONS) {
-      assertTrue(boundingHazards.containsKey(location));
-      assertTrue(BOUNDING_HAZARDS.containsKey(location));
-
-      var expected = BOUNDING_HAZARDS.get(location);
-      var actual = boundingHazards.get(location);
-      testHazards(expected, actual);
-    }
-  }
-
-  @ParameterizedTest(name = "{index} ==> Site: {0}")
-  @MethodSource("byLocation")
-  final void hazardTests(Location site) {
-    var expected = BOUNDING_HAZARDS.get(site);
-    var actual = NETCDF.hazard(site);
-    testHazards(expected, actual);
-  }
-
-  @ParameterizedTest(name = "{index} ==> Site: {0}, {1}")
-  @MethodSource("bySiteClass")
-  final void hazardWithSiteClassTests(Location site, NehrpSiteClass siteClass) {
-    var expected = BOUNDING_HAZARDS.get(site).get(siteClass);
-    var actual = NETCDF.hazard(site, siteClass);
-    testHazard(expected, actual);
-  }
-
-  @ParameterizedTest(name = "{index} ==> {0}, {1}, {2}")
-  @MethodSource("bySiteClassImt")
-  final void hazardWithSiteClassImtTests(Location site, NehrpSiteClass siteClass, Imt imt) {
-    var expected = BOUNDING_HAZARDS.get(site).get(siteClass).get(imt);
-    var actual = NETCDF.hazard(site, siteClass, imt);
-    testSequence(expected, actual);
-  }
-
-  @Test
-  final void invalidNetcdfFileTest() {
-    assertThrows(IllegalArgumentException.class, () -> {
-      new NshmNetcdfReader(Paths.get("fileDoesNotExist"));
-    });
-
-    assertThrows(RuntimeException.class, () -> {
-      new NshmNetcdfReader(NETCDF_INVALID_FILE);
-    });
-  }
-
-  private static Stream<Location> byLocation() {
-    return LOCATIONS.stream();
-  }
-
-  private static Stream<? extends Arguments> bySiteClass() {
-    return byLocation().flatMap(location -> {
-      return SITE_CLASSES.stream().map(siteClass -> new Object[] { location, siteClass });
-    }).map(Arguments::of);
-  }
-
-  private static Stream<? extends Arguments> bySiteClassImt() {
-    return bySiteClass().map(args -> args.get())
-        .flatMap(obj -> IMTS.stream().map(imt -> new Object[] { obj[0], obj[1], imt }))
-        .map(Arguments::of);
-  }
-
-  private void testHazards(
-      StaticHazards expected,
-      StaticHazards actual) {
-    for (var siteEntry : expected.entrySet()) {
-      var siteClass = siteEntry.getKey();
-      assertTrue(actual.containsKey(siteClass));
-      assertTrue(expected.containsKey(siteClass));
-      testHazard(expected.get(siteClass), actual.get(siteClass));
-    }
-  }
-
-  private void testHazard(StaticHazard expected, StaticHazard actual) {
-    for (var imtEntry : expected.entrySet()) {
-      var imt = imtEntry.getKey();
-      assertTrue(actual.containsKey(imt));
-      assertTrue(expected.containsKey(imt));
-      testSequence(expected.get(imt), actual.get(imt));
-    }
-  }
-
-  private void testSequence(XySequence expected, XySequence actual) {
-    assertArrayEquals(
-        expected.xValues().toArray(),
-        actual.xValues().toArray(),
-        IML_TOL);
-
-    assertArrayEquals(
-        expected.yValues().toArray(),
-        actual.yValues().toArray(),
-        HAZ_TOL);
-  }
-
-}
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsTests.java
deleted file mode 100644
index ea5864ed9e6bea5e774a517fad767b5ac8b744ec..0000000000000000000000000000000000000000
--- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsTests.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package gov.usgs.earthquake.nshmp.netcdf.reader;
-
-import static org.junit.jupiter.api.Assertions.assertThrows;
-
-import org.junit.jupiter.api.Test;
-
-class BoundingHazardsTests {
-
-  @Test
-  final void builderTest() {
-    // calling build() on empty builder should fail
-    assertThrows(IllegalStateException.class, () -> {
-      var boundingHazards = BoundingHazards.builder().build();
-    });
-  }
-
-}
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinatesTest.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinatesTest.java
deleted file mode 100644
index 0a07f175997bb5b86a60ec43bf8d2cd8fe0b72ec..0000000000000000000000000000000000000000
--- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinatesTest.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package gov.usgs.earthquake.nshmp.netcdf.reader;
-
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-
-import org.junit.jupiter.api.Test;
-
-import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReaderTests;
-
-class NetcdfCoordinatesTest {
-
-  @Test
-  final void coordinatesTests() {
-    var coords = NshmNetcdfReaderTests.NETCDF.coordinates();
-
-    // Check locations
-    assertArrayEquals(NshmNetcdfReaderTests.EXPECTED_LATITUDES, coords.latitudes());
-    assertArrayEquals(NshmNetcdfReaderTests.EXPECTED_LONGITUDES, coords.longitudes());
-
-    // Check IMTs
-    assertEquals(NshmNetcdfReaderTests.IMTS, coords.imts());
-    assertEquals(NshmNetcdfReaderTests.IMTS.size(), coords.imts().size());
-
-    // Check site classes
-    assertEquals(NshmNetcdfReaderTests.SITE_CLASSES, coords.siteClasses());
-
-    // Check region
-    var expectedRegion = NshmNetcdfReaderTests.EXPECTED_REGION;
-    var expectedBorder = expectedRegion.border();
-
-    var actualRegion = coords.region();
-    var actualBorder = actualRegion.border();
-
-    assertEquals(expectedBorder.size(), actualBorder.size());
-    assertEquals(expectedBorder.bounds().max, actualBorder.bounds().max);
-    assertEquals(expectedBorder.bounds().min, actualBorder.bounds().min);
-
-    for (var i = 0; i < expectedBorder.size(); i++) {
-      assertEquals(expectedBorder.get(i), actualBorder.get(i));
-    }
-
-    // Check IMLs
-    var actualImls = coords.imls();
-    assertEquals(NshmNetcdfReaderTests.IMLS.size(), actualImls.size());
-
-    for (var expectedImlEntry : NshmNetcdfReaderTests.IMLS.entrySet()) {
-      var expectedImt = expectedImlEntry.getKey();
-      assertTrue(actualImls.containsKey(expectedImt));
-      assertEquals(expectedImlEntry.getValue().length, actualImls.get(expectedImt).length);
-
-      var expectedValue = expectedImlEntry.getValue();
-      var actualValue = actualImls.get(expectedImt);
-
-      assertArrayEquals(expectedValue, actualValue, NshmNetcdfReaderTests.IML_TOL);
-    }
-  }
-
-}
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java
deleted file mode 100644
index 05b33abaf0ed62a5a56b6ff395a09301c2a53a8e..0000000000000000000000000000000000000000
--- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java
+++ /dev/null
@@ -1,238 +0,0 @@
-package gov.usgs.earthquake.nshmp.netcdf.reader;
-
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
-import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-
-import java.util.List;
-
-import org.junit.jupiter.api.Test;
-
-import gov.usgs.earthquake.nshmp.data.XySequence;
-import gov.usgs.earthquake.nshmp.geo.LocationList;
-import gov.usgs.earthquake.nshmp.gmm.Imt;
-import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass;
-
-class NetcdfUtilsTests {
-
-  private static final double[] LONGITUDES = new double[] {
-      -106.00, -105.95, -105.90, -105.85, -105.80, -105.75, -105.70, -105.65, -105.60, -105.55,
-      -105.50, -105.45, -105.40, -105.35, -105.30, -105.25, -105.20, -105.15, -105.10, -105.05,
-      -105.00 };
-  private static final double[] LATITUDES = new double[] {
-      39.05, 39.06, 39.07, 39.08, 39.09, 39.10, 39.11, 39.12,
-      39.13, 39.14, 39.15, 39.16, 39.17, 39.18, 39.19, 39.20, 39.21, 39.22, 39.23, 39.24, 39.25,
-      39.26, 39.27, 39.28, 39.29, 39.30, 39.31, 39.32, 39.33, 39.34, 39.35 };
-
-  private static final double[] BORDER_LONGITUDES = new double[] { 5.0, 6.0 };
-  private static final double[] BORDER_LATITUDES = new double[] { 8.0, 9.0 };
-  private static final LocationList BORDER_LOCATIONS;
-
-  private static final double TOL = 1e-7;
-
-  static StaticHazards mapHaz0;
-  static StaticHazards mapHaz1;
-  static StaticHazards mapHazTarget;
-  static StaticHazards mapDiffImtSize;
-  static StaticHazards mapDiffScSize;
-  static StaticHazards mapDiffImlValue;
-
-  private static final int N_IML = 3;
-  private static final double FRAC = 0.5;
-
-  static {
-
-    BORDER_LOCATIONS = LocationList.builder()
-        .add(BORDER_LONGITUDES[0], BORDER_LATITUDES[0])
-        .add(BORDER_LONGITUDES[0], BORDER_LATITUDES[1])
-        .add(BORDER_LONGITUDES[1], BORDER_LATITUDES[1])
-        .add(BORDER_LONGITUDES[1], BORDER_LATITUDES[0])
-        .add(BORDER_LONGITUDES[0], BORDER_LATITUDES[0])
-        .build();
-
-    var siteClasses = List.of(NehrpSiteClass.B, NehrpSiteClass.C, NehrpSiteClass.D);
-    var imts = List.of(Imt.PGA, Imt.SA0P1, Imt.SA1P5);
-    var imlValues = new double[] { 0.1, 0.5, 0.75 };
-
-    var mapHaz0Builder = StaticHazards.builder();
-    var mapHaz1Builder = StaticHazards.builder();
-    var mapHazTargetBuilder = StaticHazards.builder();
-    var mapDiffImlValueBuilder = StaticHazards.builder();
-
-    for (NehrpSiteClass sc : siteClasses) {
-      var imtMap0 = StaticHazard.builder();
-      var imtMap1 = StaticHazard.builder();
-      var imtMapTarget = StaticHazard.builder();
-      var imtMapDiffIml = StaticHazard.builder();
-
-      for (Imt imt : imts) {
-        double[] zeros = new double[N_IML];
-        double[] ones = new double[N_IML];
-        double[] half = new double[N_IML];
-        for (int i = 0; i < N_IML; i++) {
-          ones[i] = 1.0;
-          half[i] = FRAC;
-        }
-        imtMap0.put(imt, XySequence.create(imlValues, zeros));
-        imtMap1.put(imt, XySequence.create(imlValues, ones));
-        imtMapTarget.put(imt, XySequence.create(imlValues, half));
-
-        // insert different Iml value
-        if (sc == siteClasses.get(siteClasses.size() - 1) && imt == imts.get(imts.size() - 1)) {
-          double[] imlValuesAlt = imlValues.clone();
-          imlValuesAlt[imlValuesAlt.length - 1] += 0.1;
-          imtMapDiffIml.put(imt, XySequence.create(imlValuesAlt, ones));
-        } else {
-          imtMapDiffIml.put(imt, XySequence.create(imlValues, ones));
-        }
-      }
-      mapHaz0Builder.put(sc, imtMap0.build());
-      mapHaz1Builder.put(sc, imtMap1.build());
-      mapHazTargetBuilder.put(sc, imtMapTarget.build());
-      mapDiffImlValueBuilder.put(sc, imtMapDiffIml.build());
-    }
-
-    mapHaz0 = mapHaz0Builder.build();
-    mapHaz1 = mapHaz1Builder.build();
-    mapHazTarget = mapHazTargetBuilder.build();
-    mapDiffImlValue = mapDiffImlValueBuilder.build();
-
-    // Add extra site class
-    var mapDiffScSizeBuilder = StaticHazards.builder();
-    mapHaz0.forEach((key, value) -> mapDiffScSizeBuilder.put(key, value));
-    mapDiffScSizeBuilder.put(NehrpSiteClass.A, mapHaz0.get(siteClasses.get(0)));
-    mapDiffScSize = mapDiffScSizeBuilder.build();
-
-    // Add extra IMT
-    var mapDiffImtSizeBuilder = StaticHazards.builder();
-    mapHaz0.forEach((siteClass, staticHazard) -> {
-      var builder = StaticHazard.builder();
-      staticHazard.forEach((imt, xy) -> {
-        builder.put(imt, xy);
-      });
-      builder.put(Imt.SA10P0, XySequence.create(imlValues, new double[N_IML]));
-      mapDiffImtSizeBuilder.put(siteClass, builder.build());
-    });
-
-    mapDiffImtSize = mapDiffImtSizeBuilder.build();
-  }
-
-  @Test
-  final void buildBorderTest() {
-    assertEquals(BORDER_LOCATIONS, NetcdfUtils.buildBorder(BORDER_LONGITUDES, BORDER_LATITUDES));
-  }
-
-  @Test
-  final void getIdxLTEQTest() {
-    // target is out of range, expect IAE
-    assertThrows(IllegalArgumentException.class, () -> {
-      NetcdfUtils.getIdxLTEQ(LONGITUDES, -100.0);
-    });
-
-    for (int i = 0; i < LONGITUDES.length - 1; i++) {
-      assertEquals(i, NetcdfUtils.getIdxLTEQ(LONGITUDES, LONGITUDES[i]));
-      assertEquals(i, NetcdfUtils.getIdxLTEQ(LONGITUDES, LONGITUDES[i] + 0.025));
-    }
-
-    assertEquals(
-        LONGITUDES.length - 2,
-        NetcdfUtils.getIdxLTEQ(LONGITUDES, LONGITUDES[LONGITUDES.length - 1]));
-
-    for (int i = 0; i < LATITUDES.length - 1; i++) {
-      assertEquals(i, NetcdfUtils.getIdxLTEQ(LATITUDES, LATITUDES[i]));
-      assertEquals(i, NetcdfUtils.getIdxLTEQ(LATITUDES, LATITUDES[i] + 0.005));
-    }
-
-    // if target is equal to last element of array, return index of second to
-    // last element in array
-    assertEquals(LATITUDES.length - 2,
-        NetcdfUtils.getIdxLTEQ(LATITUDES, LATITUDES[LATITUDES.length - 1]));
-
-  }
-
-  @Test
-  final void calcFracTest() {
-    double bottom = 5.0;
-    double top = bottom + 1.0;
-    double frac = 0.36;
-    assertEquals(0.0, NetcdfUtils.calcFrac(bottom, top, bottom));
-    assertEquals(1.0, NetcdfUtils.calcFrac(bottom, top, top));
-    assertEquals(0.0, NetcdfUtils.calcFrac(
-        bottom,
-        top,
-        bottom + NetcdfUtils.LOCATION_TOLERANCE * 0.9));
-    assertEquals(1.0, NetcdfUtils.calcFrac(
-        bottom,
-        top,
-        top - NetcdfUtils.LOCATION_TOLERANCE * 0.9));
-    assertEquals(frac, NetcdfUtils.calcFrac(bottom, top, bottom + frac), 1e-4);
-  }
-
-  @Test
-  final void calcGridFracTest() {
-    double f = 0.13;
-    int i = 4;
-    assertEquals(f,
-        NetcdfUtils.calcGridFrac(LONGITUDES, i,
-            (LONGITUDES[i] + f * (LONGITUDES[i + 1] - LONGITUDES[i]))),
-        TOL);
-
-    assertEquals(0, NetcdfUtils.calcGridFrac(LONGITUDES, i, LONGITUDES[i]));
-    assertEquals(1, NetcdfUtils.calcGridFrac(LONGITUDES, i, LONGITUDES[i + 1]));
-  }
-
-  @Test
-  final void linearInterpolateTest() {
-    var actual = NetcdfUtils.linearInterpolate(mapHaz0, mapHaz1, FRAC);
-    assertTrue(mapHazTarget.keySet().containsAll(actual.keySet()));
-
-    mapHazTarget.forEach((siteClass, staticHazard) -> {
-      assertTrue(staticHazard.keySet().containsAll(actual.get(siteClass).keySet()));
-      staticHazard.forEach((imt, xy) -> {
-        var actualXy = actual.get(siteClass).get(imt);
-        assertArrayEquals(xy.xValues().toArray(), actualXy.xValues().toArray(), 0);
-      });
-    });
-
-    // attempt to interpolate maps of difference sizes
-    assertThrows(IllegalStateException.class, () -> {
-      NetcdfUtils.linearInterpolate(mapHaz0, mapDiffImtSize, FRAC);
-    });
-
-    assertThrows(IllegalStateException.class, () -> {
-      NetcdfUtils.linearInterpolate(mapHaz0, mapDiffScSize, FRAC);
-    });
-  }
-
-  @Test
-  final void checkMapConsistencyTests() {
-    assertDoesNotThrow(() -> {
-      NetcdfUtils.checkBoundingHazard(mapHaz0, mapHaz0);
-    });
-
-    assertDoesNotThrow(() -> {
-      NetcdfUtils.checkBoundingHazard(mapHaz0, mapHaz1);
-    });
-
-    assertDoesNotThrow(() -> {
-      NetcdfUtils.checkBoundingHazard(mapHaz0, mapHazTarget);
-    });
-
-    // compare maps with different size at first level (SiteClass)
-    assertThrows(IllegalStateException.class, () -> {
-      NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffScSize);
-    });
-
-    // compare maps with different size at second level (Imt)
-    assertThrows(IllegalStateException.class, () -> {
-      NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffImtSize);
-    });
-
-    // compare maps with a single different Iml value
-    assertThrows(IllegalStateException.class, () -> {
-      NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffImlValue);
-    });
-  }
-}
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazardTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazardTests.java
deleted file mode 100644
index aaba2e59cb25ec881f44d3b00fe533ee66ce368b..0000000000000000000000000000000000000000
--- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazardTests.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package gov.usgs.earthquake.nshmp.netcdf.reader;
-
-import static org.junit.jupiter.api.Assertions.assertThrows;
-
-import org.junit.jupiter.api.Test;
-
-class StaticHazardTests {
-
-  @Test
-  final void builderTest() {
-    // calling build() on empty builder should fail
-    assertThrows(IllegalStateException.class, () -> {
-      var staticHazard = StaticHazard.builder().build();
-    });
-  }
-
-}
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazardsTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazardsTests.java
deleted file mode 100644
index 7bedfabc839ab4112ca93dcfcc38ff0f227d3bc6..0000000000000000000000000000000000000000
--- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazardsTests.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package gov.usgs.earthquake.nshmp.netcdf.reader;
-
-import static org.junit.jupiter.api.Assertions.assertThrows;
-
-import org.junit.jupiter.api.Test;
-
-class StaticHazardsTests {
-
-  @Test
-  final void builderTest() {
-    // calling build() on empty builder should fail
-    assertThrows(IllegalStateException.class, () -> {
-      var staticHazards = StaticHazards.builder().build();
-    });
-  }
-
-}
diff --git a/src/test/resources/nshmp-conus-test-fv.1.0.nc b/src/test/resources/nshmp-conus-test-fv.1.0.nc
new file mode 100644
index 0000000000000000000000000000000000000000..ac2e5d506110070fbba0ac34edb9748d38df4c15
Binary files /dev/null and b/src/test/resources/nshmp-conus-test-fv.1.0.nc differ