diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReader.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReader.java
index f0c55623f5c08610999ddb6d19cd6b034369977e..a324ab65d6a86902f0ee52e9c338515a058e6abb 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReader.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReader.java
@@ -5,7 +5,6 @@ import static com.google.common.base.Preconditions.checkArgument;
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Path;
-import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
@@ -13,7 +12,10 @@ import gov.usgs.earthquake.nshmp.data.XySequence;
 import gov.usgs.earthquake.nshmp.geo.Location;
 import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards;
+import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazardsReader;
 import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfCoordinates;
+import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazard;
+import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazards;
 
 import ucar.nc2.dataset.NetcdfDataset;
 
@@ -98,8 +100,8 @@ public class NshmNetcdfReader {
    *
    * @param site The location to get bounding hazards
    */
-  public Map<Location, Map<SiteClass, Map<Imt, XySequence>>> boundingHazards(Location site) {
-    return BoundingHazards.boundingHazards(this, site);
+  public BoundingHazards boundingHazards(Location site) {
+    return BoundingHazardsReader.boundingHazards(this, site);
   }
 
   /**
@@ -108,7 +110,7 @@ public class NshmNetcdfReader {
    *
    * @param site The site to get the hazard curves
    */
-  public Map<SiteClass, Map<Imt, XySequence>> hazard(Location site) {
+  public StaticHazards hazard(Location site) {
     return boundingHazards(site).get(site);
   }
 
@@ -118,7 +120,7 @@ public class NshmNetcdfReader {
    * @param site The site to get hazard curves
    * @param siteClass The site class
    */
-  public Map<Imt, XySequence> hazard(Location site, SiteClass siteClass) {
+  public StaticHazard hazard(Location site, SiteClass siteClass) {
     checkArgument(
         coords.siteClasses().contains(siteClass),
         "Site class [" + siteClass + "] not supported");
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java
index 2275031f372b4991393a564bd2ea2397ed761eb8..447eb749f47ca612e18205572bd479d1d3e29689 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java
@@ -1,254 +1,75 @@
 package gov.usgs.earthquake.nshmp.netcdf.reader;
 
-import java.io.IOException;
-import java.util.EnumMap;
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
 
-import com.google.common.collect.Maps;
-
-import gov.usgs.earthquake.nshmp.data.XySequence;
 import gov.usgs.earthquake.nshmp.geo.Location;
-import gov.usgs.earthquake.nshmp.geo.LocationList;
-import gov.usgs.earthquake.nshmp.gmm.Imt;
-import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReader;
-import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
-import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key;
-
-import ucar.ma2.Array;
-import ucar.ma2.DataType;
-import ucar.ma2.InvalidRangeException;
-import ucar.nc2.Group;
-import ucar.nc2.Variable;
-import ucar.nc2.dataset.NetcdfDataset;
-
-/*
- * Container for gridded hazard curves at four closest grid points to target
- */
-public class BoundingHazards {
-
-  private final NshmNetcdfReader netcdf;
-  private final NetcdfCoordinates coords;
-  private Map<Location, Map<SiteClass, Map<Imt, XySequence>>> boundingHazards;
-  private LocationList boundingLocations;
-
-  BoundingHazards(NshmNetcdfReader netcdf, Location site) {
-    this.netcdf = netcdf;
-    this.coords = netcdf.coordinates();
-    coords.contains(site);
-    setBoundingHazards(site);
-  }
-
-  /**
-   * Returns the bounding hazards at four closet grid points to target.
-   *
-   * @param netcdf The {@code Netcdf}
-   * @param site The site to get bounding hazards
-   * @param baseGroup The Netcdf base group
-   */
-  public static Map<Location, Map<SiteClass, Map<Imt, XySequence>>> boundingHazards(
-      NshmNetcdfReader netcdf,
-      Location site) {
-    return new BoundingHazards(netcdf, site).boundingHazards;
-  }
 
-  LocationList boundingLocations() {
-    return LocationList.copyOf(boundingLocations);
-  }
-
-  private void setBoundingHazards(Location site) {
-    var longitudes = coords.longitudes();
-    var latitudes = coords.latitudes();
-
-    var idxLonLL = NetcdfUtils.getIdxLTEQ(longitudes, site.longitude);
-    var idxLatLL = NetcdfUtils.getIdxLTEQ(latitudes, site.latitude);
+public class BoundingHazards implements Iterable<Entry<Location, StaticHazards>> {
 
-    var lonLeft = longitudes[idxLonLL];
-    var lonRight = longitudes[idxLonLL + 1];
-    var latLower = latitudes[idxLatLL];
-    var latUpper = latitudes[idxLatLL + 1];
+  private final Map<Location, StaticHazards> boundingHazards;
 
-    boundingLocations = LocationList.builder()
-        .add(latLower, lonLeft)
-        .add(latUpper, lonLeft)
-        .add(latUpper, lonRight)
-        .add(latLower, lonRight)
-        .build();
-
-    boundingHazards = extractHazardsAt(idxLonLL, idxLatLL);
+  BoundingHazards(Map<Location, StaticHazards> boundingHazards) {
+    this.boundingHazards = boundingHazards;
+  }
 
-    var fracLon = NetcdfUtils.calcGridFrac(longitudes, idxLonLL, site.longitude);
-    var fracLat = NetcdfUtils.calcGridFrac(latitudes, idxLatLL, site.latitude);
-    boundingHazards.put(
-        site,
-        calcTargetHazards(fracLon, fracLat));
+  public Map<Location, StaticHazards> boundingHazards() {
+    return Map.copyOf(boundingHazards);
+  }
 
-    // validate boundingHazards
-    NetcdfUtils.checkBoundingHazards(boundingHazards, boundingLocations.first());
+  public boolean containsKey(Location location) {
+    return boundingHazards.containsKey(location);
   }
 
-  private Map<SiteClass, Map<Imt, XySequence>> calcTargetHazards(double fracLon, double fracLat) {
-    var westTarget = getTargetData(
-        boundingHazards.get(boundingLocations.get(0)),
-        boundingHazards.get(boundingLocations.get(1)), fracLat);
+  public Set<Entry<Location, StaticHazards>> entrySet() {
+    return boundingHazards.entrySet();
+  }
 
-    var eastTarget = getTargetData(
-        boundingHazards.get(boundingLocations.get(3)),
-        boundingHazards.get(boundingLocations.get(2)), fracLat);
+  public StaticHazards get(Location location) {
+    checkArgument(boundingHazards.containsKey(location), "Location [" + location + "] not found");
+    return boundingHazards.get(location);
+  }
 
-    return getTargetData(westTarget, eastTarget, fracLon);
+  public Set<Location> keySet() {
+    return boundingHazards.keySet();
   }
 
-  /*
-   * Return hazard curves for four closest grid points as a List of:
-   * Map<SiteClass, Map<Imt, XySequence(iml,hazard)>>
-   *
-   * List order is clockwise from lower left corner: LL, UL, UR, LR, [T] with an
-   * empty slot for the interpolated target hazards
-   */
-  private Map<Location, Map<SiteClass, Map<Imt, XySequence>>> extractHazardsAt(
-      int idxLonLL,
-      int idxLatLL) {
-    var boundingHazardMaps = new HashMap<Location, Map<SiteClass, Map<Imt, XySequence>>>(5);
-
-    try (NetcdfDataset ncd = NetcdfDataset.openDataset(netcdf.path().toString())) {
-      Group targetGroup = ncd.findGroup(netcdf.nshmGroup().baseGroup());
-
-      // TODO: rename variable in netCDF
-      Variable vHazards = targetGroup.findVariable(Key.AEPS);
-
-      // set up origin and shape arrays for reading hazard curves at four
-      // bounding
-      // (lon,lat) grid points
-      int[] origin = new int[] { 0, 0, idxLatLL, idxLonLL, 0 };
-      int[] shape = new int[] {
-          coords.siteClasses().size(),
-          coords.imts().size(),
-          2,
-          2,
-          coords.nIml()
-      };
-
-      // read data into Array for bounding grid points
-      var aHazards = vHazards.read(origin, shape);
-
-      // Array aHazards now has shape [nVs,nImt,2,2,nIml]
-      // ...so origin will now be [0,0,0,0,0] for LL grid point
-      // ...and shape of requested array is [nVs,nImt,1,1,nIml]
-
-      // Array aHazards is size 2 on the lon and lat dimensions
-      shape[2] = 1;
-      shape[3] = 1;
-
-      // TODO: XySequence instead of double[]
-
-      // Put data into hazard map for LL point
-      origin[2] = 0; // lat index position
-      origin[3] = 0; // lon index position
-      boundingHazardMaps.put(
-          boundingLocations.get(0),
-          mapHazardsFromArray(aHazards.section(origin, shape)));
-      // Put data into hazard map for UL point
-      origin[2] = 1; // lat index position
-      origin[3] = 0; // lon index position
-      boundingHazardMaps.put(
-          boundingLocations.get(1),
-          mapHazardsFromArray(aHazards.section(origin, shape)));
-      // Put data into hazard map for UR point
-      origin[2] = 1; // lat index position
-      origin[3] = 1; // lon index position
-      boundingHazardMaps.put(
-          boundingLocations.get(2),
-          mapHazardsFromArray(aHazards.section(origin, shape)));
-      // Put data into hazard map for LR point
-      origin[2] = 0; // lat index position
-      origin[3] = 1; // lon index position
-
-      boundingHazardMaps.put(
-          boundingLocations.get(3),
-          mapHazardsFromArray(aHazards.section(origin, shape)));
-
-    } catch (IOException | InvalidRangeException e) {
-      // shouldn't get here because the reader was initialized with a valid and
-      // existing netCDF file. Is the only way to trigger this error is to
-      // remove or corrupt the original netCDF file used to initialize the
-      // reader after initialization?
-      throw new RuntimeException("Could not read Netcdf file [" + netcdf.path() + "]");
-    }
+  public int size() {
+    return boundingHazards.size();
+  }
 
-    // interpolate hazards to target point
-    return boundingHazardMaps;
+  @Override
+  public Iterator<Entry<Location, StaticHazards>> iterator() {
+    return entrySet().iterator();
+  }
 
+  public static Builder builder() {
+    return new Builder();
   }
 
-  /*
-   * Read hazard curves from netCDF variable into map of hazards by SiteClass
-   * and Imt
-   *
-   * TODO: if target is on a grid point (or on a grid lat or lon), no need to
-   * read 4 bounding points ?
-   */
-  private Map<SiteClass, Map<Imt, XySequence>> mapHazardsFromArray(Array hazards) {
-    // hazards has had its lat and lon dimensions reduced (so it now is rank 3)
-    // hazards[SiteClass, Imt, Iml]
-
-    EnumMap<SiteClass, Map<Imt, XySequence>> vsImtHazardMap = Maps.newEnumMap(SiteClass.class);
-
-    for (int iSc = 0; iSc < coords.siteClasses().size(); iSc++) {
-      var sc = coords.siteClasses().get(iSc);
-
-      Map<Imt, XySequence> imtHazardMap = Maps.newEnumMap(Imt.class);
-      for (int iImt = 0; iImt < coords.imts().size(); iImt++) {
-        Imt imt = coords.imts().get(iImt);
-
-        // set origin and shape of double[] hazards to read
-        var origin = new int[] { iSc, iImt, 0 };
-        var shape = new int[] { 1, 1, coords.nIml() };
-
-        // try block needed for sectioning Array
-        try {
-          imtHazardMap.put(imt,
-              XySequence.create(
-                  coords.imls().get(sc).get(imt),
-                  (double[]) hazards.section(origin, shape).reduce()
-                      .get1DJavaArray(DataType.DOUBLE)));
-        } catch (InvalidRangeException e) {
-          throw new RuntimeException(e.getMessage());
-        }
-      }
-
-      vsImtHazardMap.put(sc, imtHazardMap);
+  public static class Builder {
+    Map<Location, StaticHazards> boundingHazards;
+
+    private Builder() {
+      boundingHazards = new HashMap<>();
     }
 
-    return Maps.immutableEnumMap(vsImtHazardMap);
-  }
+    public Builder put(Location location, StaticHazards staticHazards) {
+      boundingHazards.put(location, staticHazards);
+      return this;
+    }
 
-  /*
-   * Get data for target point
-   *
-   * @param d1 data at first point (p1)
-   *
-   * @param d2 data at second point (p2)
-   *
-   * @param frac fractional distance between p1 and p2 to target point
-   */
-  static Map<SiteClass, Map<Imt, XySequence>> getTargetData(
-      Map<SiteClass, Map<Imt, XySequence>> d1,
-      Map<SiteClass, Map<Imt, XySequence>> d2,
-      double frac) {
-    // do we need better checking here? or is it safe to assume that every
-    // Map<SiteClass, Map<Imt,double[]>> passed in is consistent?
-    NetcdfUtils.checkBoundingHazard(d1, d2);
-
-    if (frac == 0.0) {
-      // target is the same as d1
-      return d1;
-    } else if (frac == 1.0) {
-      // target is the same as d2
-      return d2;
-    } else {
-      return NetcdfUtils.linearInterpolate(d1, d2, frac);
+    public BoundingHazards build() {
+      checkState(!boundingHazards.isEmpty(), "Must add hazards");
+      return new BoundingHazards(boundingHazards);
     }
+
   }
 
 }
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsReader.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsReader.java
new file mode 100644
index 0000000000000000000000000000000000000000..c8f1b4ef7a08a28983434aa06739bc523e4977fb
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsReader.java
@@ -0,0 +1,212 @@
+package gov.usgs.earthquake.nshmp.netcdf.reader;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.geo.Location;
+import gov.usgs.earthquake.nshmp.geo.LocationList;
+import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReader;
+import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key;
+
+import ucar.ma2.Array;
+import ucar.ma2.DataType;
+import ucar.ma2.InvalidRangeException;
+import ucar.nc2.dataset.NetcdfDataset;
+
+/**
+ * Container for gridded hazard curves at four closest grid points to target
+ * 
+ * @author U.S. Geological Survey
+ */
+public class BoundingHazardsReader {
+
+  private final NshmNetcdfReader netcdf;
+  private final NetcdfCoordinates coords;
+  private BoundingHazards boundingHazards;
+  private List<BoundingLocation> boundingLocations = new ArrayList<>();
+
+  BoundingHazardsReader(NshmNetcdfReader netcdf, Location site) {
+    this.netcdf = netcdf;
+    this.coords = netcdf.coordinates();
+    coords.contains(site);
+    setBoundingHazards(site);
+  }
+
+  /**
+   * Returns the bounding hazards at four closet grid points to target.
+   *
+   * @param netcdf The {@code Netcdf}
+   * @param site The site to get bounding hazards
+   */
+  public static BoundingHazards boundingHazards(
+      NshmNetcdfReader netcdf,
+      Location site) {
+    return new BoundingHazardsReader(netcdf, site).boundingHazards;
+  }
+
+  LocationList boundingLocations() {
+    var locations = boundingLocations.stream()
+        .map(boundingLocation -> boundingLocation.location)
+        .collect(Collectors.toList());
+
+    return LocationList.copyOf(locations);
+  }
+
+  /**
+   * Get data for target point
+   *
+   * @param d1 data at first point (p1)
+   * @param d2 data at second point (p2)
+   * @param frac fractional distance between p1 and p2 to target point
+   */
+  static StaticHazards getTargetData(
+      StaticHazards d1,
+      StaticHazards d2,
+      double frac) {
+    NetcdfUtils.checkBoundingHazard(d1, d2);
+    return frac == 0.0 ? d1 : frac == 1.0 ? d2 : NetcdfUtils.linearInterpolate(d1, d2, frac);
+  }
+
+  private void setBoundingHazards(Location site) {
+    var longitudes = coords.longitudes();
+    var latitudes = coords.latitudes();
+
+    var idxLonLL = NetcdfUtils.getIdxLTEQ(longitudes, site.longitude);
+    var idxLatLL = NetcdfUtils.getIdxLTEQ(latitudes, site.latitude);
+
+    var lonLeft = longitudes[idxLonLL];
+    var lonRight = longitudes[idxLonLL + 1];
+    var latLower = latitudes[idxLatLL];
+    var latUpper = latitudes[idxLatLL + 1];
+
+    boundingLocations.add(new BoundingLocation(latLower, lonLeft, 0, 0));
+    boundingLocations.add(new BoundingLocation(latUpper, lonLeft, 1, 0));
+    boundingLocations.add(new BoundingLocation(latUpper, lonRight, 1, 1));
+    boundingLocations.add(new BoundingLocation(latLower, lonRight, 0, 1));
+
+    var boundingHazardsBuilder = extractHazardsAt(idxLonLL, idxLatLL);
+
+    var fracLon = NetcdfUtils.calcGridFrac(longitudes, idxLonLL, site.longitude);
+    var fracLat = NetcdfUtils.calcGridFrac(latitudes, idxLatLL, site.latitude);
+
+    boundingHazards = boundingHazardsBuilder.put(
+        site,
+        calcTargetHazards(fracLon, fracLat))
+        .build();
+
+    NetcdfUtils.checkBoundingHazards(boundingHazards, boundingLocations.get(0).location);
+  }
+
+  private StaticHazards calcTargetHazards(double fracLon, double fracLat) {
+    var westTarget = getTargetData(
+        boundingHazards.get(boundingLocations.get(0).location),
+        boundingHazards.get(boundingLocations.get(1).location),
+        fracLat);
+
+    var eastTarget = getTargetData(
+        boundingHazards.get(boundingLocations.get(3).location),
+        boundingHazards.get(boundingLocations.get(2).location),
+        fracLat);
+
+    return getTargetData(westTarget, eastTarget, fracLon);
+  }
+
+  /*
+   * Return hazard curves for four closest grid points as a List of:
+   * Map<SiteClass, Map<Imt, XySequence(iml,hazard)>>
+   *
+   * List order is clockwise from lower left corner: LL, UL, UR, LR, [T] with an
+   * empty slot for the interpolated target hazards
+   */
+  private BoundingHazards.Builder extractHazardsAt(
+      int idxLonLL,
+      int idxLatLL) {
+
+    try (NetcdfDataset ncd = NetcdfDataset.openDataset(netcdf.path().toString())) {
+      var boundingHazardMaps = BoundingHazards.builder();
+      var targetGroup = ncd.findGroup(netcdf.nshmGroup().baseGroup());
+
+      var targetOrigin = new int[] { 0, 0, idxLatLL, idxLonLL, 0 };
+      var targetShape = new int[] {
+          coords.siteClasses().size(),
+          coords.imts().size(),
+          2,
+          2,
+          coords.nIml()
+      };
+
+      // TODO: rename variable in netCDF
+      /*
+       * Array aHazards now has shape [nVs,nImt,2,2,nIml] ...so origin will now
+       * be [0,0,0,0,0] for LL grid point ...and shape of requested array is
+       * [nVs,nImt,1,1,nIml]
+       */
+      var aHazards = targetGroup.findVariable(Key.AEPS).read(targetOrigin, targetShape);
+
+      var shape = targetShape.clone();
+      shape[2] = 1;
+      shape[3] = 1;
+
+      for (var boundingLocation : boundingLocations) {
+        boundingHazardMaps.put(
+            boundingLocation.location,
+            mapHazardsFromArray(aHazards.section(boundingLocation.origin, shape)));
+      }
+
+      return boundingHazardMaps;
+    } catch (IOException | InvalidRangeException e) {
+      throw new RuntimeException("Could not read Netcdf file [" + netcdf.path() + "]");
+    }
+  }
+
+  /*
+   * Read hazard curves from netCDF variable into map of hazards by SiteClass
+   * and Imt
+   *
+   * TODO: if target is on a grid point (or on a grid lat or lon), no need to
+   * read 4 bounding points ?
+   */
+  private StaticHazards mapHazardsFromArray(Array hazards) {
+    var vsImtHazardMap = StaticHazards.builder();
+
+    for (int iSiteClass = 0; iSiteClass < coords.siteClasses().size(); iSiteClass++) {
+      var siteClass = coords.siteClasses().get(iSiteClass);
+
+      var imtHazardMap = StaticHazard.builder();
+      for (int iImt = 0; iImt < coords.imts().size(); iImt++) {
+        var imt = coords.imts().get(iImt);
+
+        var origin = new int[] { iSiteClass, iImt, 0 };
+        var shape = new int[] { 1, 1, coords.nIml() };
+
+        try {
+          var xySequence = XySequence.create(
+              coords.imls().get(siteClass).get(imt),
+              (double[]) hazards.section(origin, shape).reduce().get1DJavaArray(DataType.DOUBLE));
+
+          imtHazardMap.put(imt, xySequence);
+        } catch (InvalidRangeException e) {
+          throw new RuntimeException(e.getMessage());
+        }
+      }
+
+      vsImtHazardMap.put(siteClass, imtHazardMap.build());
+    }
+
+    return vsImtHazardMap.build();
+  }
+
+  static class BoundingLocation {
+    final Location location;
+    final int[] origin;
+
+    BoundingLocation(double longitude, double latitude, int longitudeIndex, int latitudeIndex) {
+      location = Location.create(latitude, longitude);
+      origin = new int[] { 0, 0, latitudeIndex, longitudeIndex, 0 };
+    }
+  }
+
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
index 0424989edb43d16b57a297123904843e591bf666..945557ca5b6bae9a54f60431981b66d9c2f5a225 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
@@ -6,16 +6,12 @@ import static com.google.common.base.Preconditions.checkState;
 
 import java.io.IOException;
 import java.util.Arrays;
-import java.util.Map;
 
-import com.google.common.collect.Maps;
 import com.google.common.math.DoubleMath;
 
 import gov.usgs.earthquake.nshmp.data.XySequence;
 import gov.usgs.earthquake.nshmp.geo.Location;
 import gov.usgs.earthquake.nshmp.geo.LocationList;
-import gov.usgs.earthquake.nshmp.gmm.Imt;
-import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
 
 import ucar.ma2.DataType;
 import ucar.nc2.Group;
@@ -156,41 +152,34 @@ public class NetcdfUtils {
 
   /*
    * Linear interpolation of data values to a target point
-   *
-   * TODO: use XySequence.copyOf(), streams
    */
-  static Map<SiteClass, Map<Imt, XySequence>> linearInterpolate(
-      Map<SiteClass, Map<Imt, XySequence>> v1,
-      Map<SiteClass, Map<Imt, XySequence>> v2,
-      double frac) {
-    // return v1 * (1 - frac) + v2 * frac
-    // DoubleData routines add and multiply in place, we don't want to modify
-    // the original data here
-    if (v1.size() != v2.size()) {
-      throw new IllegalArgumentException("Map size disagreement, cannot interpolate");
-    }
+  static StaticHazards linearInterpolate(StaticHazards v1, StaticHazards v2, double frac) {
+    checkBoundingHazard(v1, v2);
 
-    Map<SiteClass, Map<Imt, XySequence>> tMap = Maps.newEnumMap(SiteClass.class);
+    var targetMap = StaticHazards.builder();
 
-    for (SiteClass sc : v1.keySet()) {
-      if (v1.get(sc).size() != v2.get(sc).size()) {
-        throw new IllegalArgumentException("Array size disagreement, cannot interpolate");
-      }
-      Map<Imt, XySequence> imtHazards = Maps.newEnumMap(Imt.class);
+    v1.keySet().forEach(siteClass -> {
+      var imtHazards = StaticHazard.builder();
+      var v1StaticHazards = v1.get(siteClass);
+      var v2StaticHazards = v2.get(siteClass);
 
-      for (Imt imt : v1.get(sc).keySet()) {
-        var v1Haz = v1.get(sc).get(imt).yValues().toArray();
-        var v2Haz = v2.get(sc).get(imt).yValues().toArray();
-        var t = new double[v1Haz.length];
+      v1StaticHazards.keySet().forEach(imt -> {
+        var v1Haz = v1StaticHazards.get(imt).yValues().toArray();
+        var v2Haz = v2StaticHazards.get(imt).yValues().toArray();
+        var target = new double[v1Haz.length];
 
         for (int i = 0; i < v1Haz.length; i++) {
-          t[i] = v1Haz[i] * (1 - frac) + v2Haz[i] * frac;
+          target[i] = v1Haz[i] * (1 - frac) + v2Haz[i] * frac;
         }
-        imtHazards.put(imt, XySequence.create(v1.get(sc).get(imt).xValues().toArray(), t));
-      }
-      tMap.put(sc, imtHazards);
-    }
-    return tMap;
+
+        var xValues = v1StaticHazards.get(imt).xValues().toArray();
+        imtHazards.put(imt, XySequence.create(xValues, target));
+      });
+
+      targetMap.put(siteClass, imtHazards.build());
+    });
+
+    return targetMap.build();
   }
 
   /**
@@ -201,7 +190,7 @@ public class NetcdfUtils {
    * @param boundingHazards The bounding hazards
    */
   static void checkBoundingHazards(
-      Map<Location, Map<SiteClass, Map<Imt, XySequence>>> boundingHazards,
+      BoundingHazards boundingHazards,
       Location location) {
     checkArgument(boundingHazards.containsKey(location), "Location not in bounding hazards");
     boundingHazards.keySet().stream()
@@ -219,10 +208,10 @@ public class NetcdfUtils {
    * @param b Bounding hazard map B
    */
   static void checkBoundingHazard(
-      Map<SiteClass, Map<Imt, XySequence>> a,
-      Map<SiteClass, Map<Imt, XySequence>> b) {
+      StaticHazards a,
+      StaticHazards b) {
     checkState(a.size() == b.size(), "Maps are not the same size");
-    checkState(a.keySet().containsAll(a.keySet()), "Site classes do not match");
+    checkState(a.keySet().containsAll(b.keySet()), "Site classes do not match");
     a.keySet().forEach(key -> checkHazards(a.get(key), b.get(key)));
   }
 
@@ -233,9 +222,9 @@ public class NetcdfUtils {
    * @param a Hazard A
    * @param b Hazard B
    */
-  static void checkHazards(Map<Imt, XySequence> a, Map<Imt, XySequence> b) {
+  static void checkHazards(StaticHazard a, StaticHazard b) {
     checkState(a.size() == b.size(), "Maps are not the same size");
-    checkState(a.keySet().containsAll(a.keySet()), "IMTs do not match");
+    checkState(a.keySet().containsAll(b.keySet()), "IMTs do not match");
     a.keySet().forEach(key -> checkGroundMotions(a.get(key), b.get(key)));
   }
 
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazard.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazard.java
new file mode 100644
index 0000000000000000000000000000000000000000..a52652fdf382155fa31df1c9c4129a3c93e2d8dc
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazard.java
@@ -0,0 +1,75 @@
+package gov.usgs.earthquake.nshmp.netcdf.reader;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.gmm.Imt;
+
+public class StaticHazard implements Iterable<Entry<Imt, XySequence>> {
+
+  private final Map<Imt, XySequence> staticHazard;
+
+  StaticHazard(Map<Imt, XySequence> staticHazard) {
+    this.staticHazard = staticHazard;
+  }
+
+  public Set<Entry<Imt, XySequence>> entrySet() {
+    return staticHazard.entrySet();
+  }
+
+  public boolean containsKey(Imt imt) {
+    return staticHazard.containsKey(imt);
+  }
+
+  public XySequence get(Imt imt) {
+    checkArgument(staticHazard.containsKey(imt), "Imt [" + imt + "] not found");
+    return staticHazard.get(imt);
+  }
+
+  public Set<Imt> keySet() {
+    return staticHazard.keySet();
+  }
+
+  public int size() {
+    return staticHazard.size();
+  }
+
+  public Map<Imt, XySequence> staticHazard() {
+    return Map.copyOf(staticHazard);
+  }
+
+  @Override
+  public Iterator<Entry<Imt, XySequence>> iterator() {
+    return entrySet().iterator();
+  }
+
+  public static Builder builder() {
+    return new Builder();
+  }
+
+  public static class Builder {
+    Map<Imt, XySequence> staticHazard;
+
+    private Builder() {
+      staticHazard = new HashMap<>();
+    }
+
+    public Builder put(Imt imt, XySequence xySequence) {
+      staticHazard.put(imt, xySequence);
+      return this;
+    }
+
+    public StaticHazard build() {
+      checkState(!staticHazard.isEmpty(), "Must add hazards");
+      return new StaticHazard(staticHazard);
+    }
+  }
+
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazards.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazards.java
new file mode 100644
index 0000000000000000000000000000000000000000..f95cdbfdb568308b60b2cd39045476d0e7ae60f9
--- /dev/null
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazards.java
@@ -0,0 +1,75 @@
+package gov.usgs.earthquake.nshmp.netcdf.reader;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
+
+public class StaticHazards implements Iterable<Entry<SiteClass, StaticHazard>> {
+
+  private final Map<SiteClass, StaticHazard> staticHazards;
+
+  StaticHazards(Map<SiteClass, StaticHazard> staticHazards) {
+    this.staticHazards = staticHazards;
+  }
+
+  public boolean containsKey(SiteClass siteClass) {
+    return staticHazards.containsKey(siteClass);
+  }
+
+  public Set<Entry<SiteClass, StaticHazard>> entrySet() {
+    return staticHazards.entrySet();
+  }
+
+  public StaticHazard get(SiteClass siteClass) {
+    checkArgument(staticHazards.containsKey(siteClass), "Site class [" + siteClass + "] not found");
+    return staticHazards.get(siteClass);
+  }
+
+  public Set<SiteClass> keySet() {
+    return staticHazards.keySet();
+  }
+
+  public int size() {
+    return staticHazards.size();
+  }
+
+  public Map<SiteClass, StaticHazard> staticHazards() {
+    return Map.copyOf(staticHazards);
+  }
+
+  @Override
+  public Iterator<Entry<SiteClass, StaticHazard>> iterator() {
+    return entrySet().iterator();
+  }
+
+  public static Builder builder() {
+    return new Builder();
+  }
+
+  public static class Builder {
+    Map<SiteClass, StaticHazard> staticHazards;
+
+    private Builder() {
+      staticHazards = new HashMap<>();
+    }
+
+    public Builder put(SiteClass siteClass, StaticHazard staticHazard) {
+      staticHazards.put(siteClass, staticHazard);
+      return this;
+    }
+
+    public StaticHazards build() {
+      checkState(!staticHazards.isEmpty(), "Must add hazards");
+      return new StaticHazards(staticHazards);
+    }
+
+  }
+
+}
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java
index aa0d91657f207c02a178de71e2a42a71c0e099b5..003f2c3d07204b05218f97cabd7b769fd0f802ef 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java
@@ -4,7 +4,6 @@ import static gov.usgs.earthquake.nshmp.netcdf.www.NetcdfWsUtils.GSON;
 
 import java.nio.file.Path;
 import java.util.List;
-import java.util.Map;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
 
@@ -21,6 +20,9 @@ import gov.usgs.earthquake.nshmp.internal.www.meta.Status;
 import gov.usgs.earthquake.nshmp.netcdf.NshmGroup;
 import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReader;
 import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
+import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards;
+import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazard;
+import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazards;
 import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Query;
 import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Service;
 import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfWsUtils.Key;
@@ -215,7 +217,7 @@ public class NetcdfService {
   static List<List<List<ResponseData>>> toLists(
       NshmGroup nshmGroup,
       Location site,
-      Map<Location, Map<SiteClass, Map<Imt, XySequence>>> bounding) {
+      BoundingHazards bounding) {
 
     return bounding.entrySet().stream()
         .map(entry -> toList(nshmGroup, site, entry.getValue()))
@@ -225,7 +227,7 @@ public class NetcdfService {
   static List<List<ResponseData>> toList(
       NshmGroup nshmGroup,
       Location site,
-      Map<SiteClass, Map<Imt, XySequence>> curves) {
+      StaticHazards curves) {
     return curves.entrySet().stream()
         .map(entry -> {
           var request = new RequestDataCurves(site.longitude, site.latitude, entry.getKey());
@@ -237,7 +239,7 @@ public class NetcdfService {
   static List<ResponseData> toList(
       NshmGroup nshmGroup,
       RequestDataCurves request,
-      Map<Imt, XySequence> curves) {
+      StaticHazard curves) {
     return curves.entrySet().stream()
         .map((entry) -> {
           var site = request.site;
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReaderTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReaderTests.java
index 748fd5f87e8cbf55cefd0ab3d68349af5d837ce4..0584f365eab948ee73ac7d531077811c6c5cac09 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReaderTests.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReaderTests.java
@@ -25,7 +25,10 @@ import gov.usgs.earthquake.nshmp.geo.Location;
 import gov.usgs.earthquake.nshmp.geo.LocationList;
 import gov.usgs.earthquake.nshmp.geo.Regions;
 import gov.usgs.earthquake.nshmp.gmm.Imt;
+import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards;
 import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils;
+import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazard;
+import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazards;
 
 class NshmNetcdfReaderTests {
 
@@ -95,27 +98,19 @@ class NshmNetcdfReaderTests {
 
   static Map<SiteClass, Map<Imt, double[]>> IMLS = new HashMap<>();
 
-  static Map<Location, Map<SiteClass, Map<Imt, XySequence>>> BOUNDING_HAZARDS = new HashMap<>();
+  static BoundingHazards BOUNDING_HAZARDS;
 
   static final NshmNetcdfReader NETCDF = new NshmNetcdfReader(NshmGroup.CONUS_2018, NETCDF_PATH);
 
-  static class NetcdfHazard {
-    Location location;
-    Map<SiteClass, Map<Imt, XySequence>> hazard;
-
-    NetcdfHazard(Location location, Map<SiteClass, Map<Imt, XySequence>> hazard) {
-      this.location = location;
-      this.hazard = hazard;
-    }
-  }
-
   static {
+    var builder = BoundingHazards.builder();
+
     var iHaz = 0;
     for (var location : LOCATIONS) {
-      var scMap = new HashMap<SiteClass, Map<Imt, XySequence>>();
+      var siteClassMap = StaticHazards.builder();
 
       for (var siteClass : SITE_CLASSES) {
-        var imtMap = new HashMap<Imt, XySequence>();
+        var imtMap = StaticHazard.builder();
 
         for (var iImt = 0; iImt < IMTS.size(); iImt++) {
           var imt = IMTS.get(iImt);
@@ -123,12 +118,14 @@ class NshmNetcdfReaderTests {
           imtMap.put(imt, xy);
         }
 
-        scMap.put(siteClass, imtMap);
+        siteClassMap.put(siteClass, imtMap.build());
       }
 
-      BOUNDING_HAZARDS.put(location, scMap);
+      builder.put(location, siteClassMap.build());
     }
 
+    BOUNDING_HAZARDS = builder.build();
+
     SITE_CLASSES.forEach(siteClass -> {
       var haz = new HashMap<Imt, double[]>();
 
@@ -282,8 +279,8 @@ class NshmNetcdfReaderTests {
   }
 
   private void testHazards(
-      Map<SiteClass, Map<Imt, XySequence>> expected,
-      Map<SiteClass, Map<Imt, XySequence>> actual) {
+      StaticHazards expected,
+      StaticHazards actual) {
     for (var siteEntry : expected.entrySet()) {
       var siteClass = siteEntry.getKey();
       assertTrue(actual.containsKey(siteClass));
@@ -292,7 +289,7 @@ class NshmNetcdfReaderTests {
     }
   }
 
-  private void testHazard(Map<Imt, XySequence> expected, Map<Imt, XySequence> actual) {
+  private void testHazard(StaticHazard expected, StaticHazard actual) {
     for (var imtEntry : expected.entrySet()) {
       var imt = imtEntry.getKey();
       assertTrue(actual.containsKey(imt));
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java
index 2eaacb20513527badeb611197c85a9d46747e7c2..00f90792ef6b978b0290105d701675a805022459 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java
@@ -1,17 +1,15 @@
 package gov.usgs.earthquake.nshmp.netcdf.reader;
 
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
 import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
-import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 
 import org.junit.jupiter.api.Test;
 
-import com.google.common.collect.Maps;
-
 import gov.usgs.earthquake.nshmp.data.XySequence;
 import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
@@ -29,35 +27,32 @@ class NetcdfUtilsTests {
 
   private static final double TOL = 1e-7;
 
-  static Map<SiteClass, Map<Imt, XySequence>> mapHaz0 = Maps.newEnumMap(SiteClass.class);
-  static Map<SiteClass, Map<Imt, XySequence>> mapHaz1 = Maps.newEnumMap(SiteClass.class);
-  static Map<SiteClass, Map<Imt, XySequence>> mapHazTarget = Maps.newEnumMap(SiteClass.class);
-  static Map<SiteClass, Map<Imt, XySequence>> mapDiffImtSize = Maps.newEnumMap(SiteClass.class);
-  static Map<SiteClass, Map<Imt, XySequence>> mapDiffScSize = Maps.newEnumMap(SiteClass.class);
-  static Map<SiteClass, Map<Imt, XySequence>> mapDiffImlValue = Maps.newEnumMap(SiteClass.class);
+  static StaticHazards mapHaz0;
+  static StaticHazards mapHaz1;
+  static StaticHazards mapHazTarget;
+  static StaticHazards mapDiffImtSize;
+  static StaticHazards mapDiffScSize;
+  static StaticHazards mapDiffImlValue;
 
-  private static List<SiteClass> siteClasses = new ArrayList<SiteClass>();
-  private static List<Imt> imts = new ArrayList<Imt>();
   private static final int N_IML = 3;
   private static final double FRAC = 0.5;
 
   static {
-    siteClasses.add(SiteClass.B);
-    siteClasses.add(SiteClass.C);
-    siteClasses.add(SiteClass.D);
-
-    imts.add(Imt.PGA);
-    imts.add(Imt.SA0P1);
-    imts.add(Imt.SA1P5);
+    var siteClasses = List.of(SiteClass.B, SiteClass.C, SiteClass.D);
+    var imts = List.of(Imt.PGA, Imt.SA0P1, Imt.SA1P5);
+    var imlValues = new double[] { 0.1, 0.5, 0.75 };
 
-    double[] imlValues = new double[] { 0.1, 0.5, 0.75 };
+    var mapHaz0Builder = StaticHazards.builder();
+    var mapHaz1Builder = StaticHazards.builder();
+    var mapHazTargetBuilder = StaticHazards.builder();
+    var mapDiffImlValueBuilder = StaticHazards.builder();
 
     for (SiteClass sc : siteClasses) {
-      Map<Imt, XySequence> imtMap0 = Maps.newEnumMap(Imt.class);
-      Map<Imt, XySequence> imtMap1 = Maps.newEnumMap(Imt.class);
-      Map<Imt, XySequence> imtMapTarget = Maps.newEnumMap(Imt.class);
-      Map<Imt, XySequence> imtMapBiggerErr = Maps.newEnumMap(Imt.class);
-      Map<Imt, XySequence> imtMapDiffIml = Maps.newEnumMap(Imt.class);
+      var imtMap0 = StaticHazard.builder();
+      var imtMap1 = StaticHazard.builder();
+      var imtMapTarget = StaticHazard.builder();
+      var imtMapDiffIml = StaticHazard.builder();
+
       for (Imt imt : imts) {
         double[] zeros = new double[N_IML];
         double[] ones = new double[N_IML];
@@ -69,7 +64,6 @@ class NetcdfUtilsTests {
         imtMap0.put(imt, XySequence.create(imlValues, zeros));
         imtMap1.put(imt, XySequence.create(imlValues, ones));
         imtMapTarget.put(imt, XySequence.create(imlValues, half));
-        imtMapBiggerErr.put(imt, XySequence.create(imlValues, ones));
 
         // insert different Iml value
         if (sc == siteClasses.get(siteClasses.size() - 1) && imt == imts.get(imts.size() - 1)) {
@@ -80,19 +74,35 @@ class NetcdfUtilsTests {
           imtMapDiffIml.put(imt, XySequence.create(imlValues, ones));
         }
       }
-      mapHaz0.put(sc, imtMap0);
-      mapHaz1.put(sc, imtMap1);
-      mapHazTarget.put(sc, imtMapTarget);
-      mapDiffImtSize.put(sc, imtMapBiggerErr);
-      mapDiffImlValue.put(sc, imtMapDiffIml);
+      mapHaz0Builder.put(sc, imtMap0.build());
+      mapHaz1Builder.put(sc, imtMap1.build());
+      mapHazTargetBuilder.put(sc, imtMapTarget.build());
+      mapDiffImlValueBuilder.put(sc, imtMapDiffIml.build());
     }
 
-    // add another map
-    mapDiffScSize.putAll(mapDiffImtSize);
-    mapDiffScSize.put(SiteClass.A, mapDiffScSize.get(siteClasses.get(0)));
-    mapDiffImtSize.get(siteClasses.get(0))
-        .put(Imt.SA10P0, XySequence.create(imlValues, new double[N_IML]));
+    mapHaz0 = mapHaz0Builder.build();
+    mapHaz1 = mapHaz1Builder.build();
+    mapHazTarget = mapHazTargetBuilder.build();
+    mapDiffImlValue = mapDiffImlValueBuilder.build();
+
+    // Add extra site class
+    var mapDiffScSizeBuilder = StaticHazards.builder();
+    mapHaz0.forEach(entry -> mapDiffScSizeBuilder.put(entry.getKey(), entry.getValue()));
+    mapDiffScSizeBuilder.put(SiteClass.A, mapHaz0.get(siteClasses.get(0)));
+    mapDiffScSize = mapDiffScSizeBuilder.build();
+
+    // Add extra IMT
+    var mapDiffImtSizeBuilder = StaticHazards.builder();
+    mapHaz0.forEach(entry -> {
+      var builder = StaticHazard.builder();
+      entry.getValue().forEach(imtEntry -> {
+        builder.put(imtEntry.getKey(), imtEntry.getValue());
+      });
+      builder.put(Imt.SA10P0, XySequence.create(imlValues, new double[N_IML]));
+      mapDiffImtSizeBuilder.put(entry.getKey(), builder.build());
+    });
 
+    mapDiffImtSize = mapDiffImtSizeBuilder.build();
   }
 
   @Test
@@ -138,12 +148,23 @@ class NetcdfUtilsTests {
 
   @Test
   final void testLinearInterpolate() {
-    assertEquals(mapHazTarget, NetcdfUtils.linearInterpolate(mapHaz0, mapHaz1, FRAC));
+    var actual = NetcdfUtils.linearInterpolate(mapHaz0, mapHaz1, FRAC);
+    assertTrue(mapHazTarget.keySet().containsAll(actual.keySet()));
+
+    mapHazTarget.forEach(entry -> {
+      assertTrue(entry.getValue().keySet().containsAll(actual.get(entry.getKey()).keySet()));
+      entry.getValue().forEach(imtEntry -> {
+        var actualXy = actual.get(entry.getKey()).get(imtEntry.getKey());
+        assertArrayEquals(imtEntry.getValue().xValues().toArray(), actualXy.xValues().toArray(), 0);
+      });
+    });
+
     // attempt to interpolate maps of difference sizes
-    assertThrows(IllegalArgumentException.class, () -> {
+    assertThrows(IllegalStateException.class, () -> {
       NetcdfUtils.linearInterpolate(mapHaz0, mapDiffImtSize, FRAC);
     });
-    assertThrows(IllegalArgumentException.class, () -> {
+
+    assertThrows(IllegalStateException.class, () -> {
       NetcdfUtils.linearInterpolate(mapHaz0, mapDiffScSize, FRAC);
     });
   }