diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java
index 971b6b305116d7270df1428530e5ec42afe40643..2275031f372b4991393a564bd2ea2397ed761eb8 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java
@@ -82,6 +82,9 @@ public class BoundingHazards {
     boundingHazards.put(
         site,
         calcTargetHazards(fracLon, fracLat));
+
+    // validate boundingHazards
+    NetcdfUtils.checkBoundingHazards(boundingHazards, boundingLocations.first());
   }
 
   private Map<SiteClass, Map<Imt, XySequence>> calcTargetHazards(double fracLon, double fracLat) {
@@ -164,6 +167,7 @@ public class BoundingHazards {
       boundingHazardMaps.put(
           boundingLocations.get(3),
           mapHazardsFromArray(aHazards.section(origin, shape)));
+
     } catch (IOException | InvalidRangeException e) {
       // shouldn't get here because the reader was initialized with a valid and
       // existing netCDF file. Is the only way to trigger this error is to
@@ -228,15 +232,13 @@ public class BoundingHazards {
    *
    * @param frac fractional distance between p1 and p2 to target point
    */
-  private static Map<SiteClass, Map<Imt, XySequence>> getTargetData(
+  static Map<SiteClass, Map<Imt, XySequence>> getTargetData(
       Map<SiteClass, Map<Imt, XySequence>> d1,
       Map<SiteClass, Map<Imt, XySequence>> d2,
       double frac) {
     // do we need better checking here? or is it safe to assume that every
     // Map<SiteClass, Map<Imt,double[]>> passed in is consistent?
-    if (d1.size() != d2.size()) {
-      throw new IllegalArgumentException("Array size disagreement, cannot interpolate");
-    }
+    NetcdfUtils.checkBoundingHazard(d1, d2);
 
     if (frac == 0.0) {
       // target is the same as d1
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
index cbfa77a7292e5ef629ba586dac12a8660c1b5161..0424989edb43d16b57a297123904843e591bf666 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
@@ -1,6 +1,8 @@
 package gov.usgs.earthquake.nshmp.netcdf.reader;
 
+import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
 
 import java.io.IOException;
 import java.util.Arrays;
@@ -10,6 +12,7 @@ import com.google.common.collect.Maps;
 import com.google.common.math.DoubleMath;
 
 import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.geo.Location;
 import gov.usgs.earthquake.nshmp.geo.LocationList;
 import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
@@ -20,7 +23,7 @@ import ucar.nc2.Group;
 public class NetcdfUtils {
 
   /**
-   * Creates a border geoing clockwise of the given longitudes and latitudes.
+   * Creates a border going clockwise of the given longitudes and latitudes.
    * 
    * @param longitudes The longitudes
    * @param latitudes The latitudes
@@ -49,9 +52,9 @@ public class NetcdfUtils {
   }
 
   /**
-   * Returns a {@code double[]} from a NetCDF group
+   * Returns a {@code double[]} from a netCDF group
    * 
-   * @param group The NetCDF group
+   * @param group The netCDF group
    * @param key The key to read from the group
    * @throws IOException
    */
@@ -60,9 +63,9 @@ public class NetcdfUtils {
   }
 
   /**
-   * Returns a {@code int[]} from a NetCDF group
+   * Returns a {@code int[]} from a netCDF group
    * 
-   * @param group The NetCDF group
+   * @param group The netCDF group
    * @param key The key to read from the group
    * @throws IOException
    */
@@ -71,9 +74,9 @@ public class NetcdfUtils {
   }
 
   /**
-   * Get a 1D array from a NetCDF group.
+   * Get a 1D array from a netCDF group.
    * 
-   * @param group The NetCDF group
+   * @param group The netCDF group
    * @param key The key to read from the group
    * @param dataType The data type to read
    * @throws IOException
@@ -190,6 +193,64 @@ public class NetcdfUtils {
     return tMap;
   }
 
+  /**
+   * Checks bounding hazard maps contain the same: Site classes, IMTs per each
+   * site class, and ground motions per each IMT
+   * 
+   * 
+   * @param boundingHazards The bounding hazards
+   */
+  static void checkBoundingHazards(
+      Map<Location, Map<SiteClass, Map<Imt, XySequence>>> boundingHazards,
+      Location location) {
+    checkArgument(boundingHazards.containsKey(location), "Location not in bounding hazards");
+    boundingHazards.keySet().stream()
+        .filter(loc -> loc.equals(location))
+        .forEach(key -> {
+          checkBoundingHazard(boundingHazards.get(location), boundingHazards.get(key));
+        });
+  }
+
+  /**
+   * Check whether bounding hazards contain the same: Site classes, IMTs per
+   * each site class, and ground motions per each IMT
+   * 
+   * @param a Bounding hazard map A
+   * @param b Bounding hazard map B
+   */
+  static void checkBoundingHazard(
+      Map<SiteClass, Map<Imt, XySequence>> a,
+      Map<SiteClass, Map<Imt, XySequence>> b) {
+    checkState(a.size() == b.size(), "Maps are not the same size");
+    checkState(a.keySet().containsAll(a.keySet()), "Site classes do not match");
+    a.keySet().forEach(key -> checkHazards(a.get(key), b.get(key)));
+  }
+
+  /**
+   * Check whether hazards contain the same: IMTs and ground motions per each
+   * IMT
+   * 
+   * @param a Hazard A
+   * @param b Hazard B
+   */
+  static void checkHazards(Map<Imt, XySequence> a, Map<Imt, XySequence> b) {
+    checkState(a.size() == b.size(), "Maps are not the same size");
+    checkState(a.keySet().containsAll(a.keySet()), "IMTs do not match");
+    a.keySet().forEach(key -> checkGroundMotions(a.get(key), b.get(key)));
+  }
+
+  /**
+   * Check that the X values are identical.
+   * 
+   * @param a Sequence A
+   * @param b Sequence B
+   */
+  static void checkGroundMotions(XySequence a, XySequence b) {
+    checkState(
+        Arrays.equals(a.xValues().toArray(), b.xValues().toArray()),
+        "Hazard curves xValues are not the same");
+  }
+
   static class Key {
     static final String AEPS = "AEPs";
     static final String IMLS = "Imls";
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java
index 923911f8565e65c60429953e9eb8f296384a7af7..2eaacb20513527badeb611197c85a9d46747e7c2 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java
@@ -1,5 +1,6 @@
 package gov.usgs.earthquake.nshmp.netcdf.reader;
 
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertThrows;
 
@@ -15,7 +16,7 @@ import gov.usgs.earthquake.nshmp.data.XySequence;
 import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
 
-public class NetcdfUtilsTests {
+class NetcdfUtilsTests {
 
   private static final double[] LONGITUDES = new double[] {
       -106.00, -105.95, -105.90, -105.85, -105.80, -105.75, -105.70, -105.65, -105.60, -105.55,
@@ -33,6 +34,7 @@ public class NetcdfUtilsTests {
   static Map<SiteClass, Map<Imt, XySequence>> mapHazTarget = Maps.newEnumMap(SiteClass.class);
   static Map<SiteClass, Map<Imt, XySequence>> mapDiffImtSize = Maps.newEnumMap(SiteClass.class);
   static Map<SiteClass, Map<Imt, XySequence>> mapDiffScSize = Maps.newEnumMap(SiteClass.class);
+  static Map<SiteClass, Map<Imt, XySequence>> mapDiffImlValue = Maps.newEnumMap(SiteClass.class);
 
   private static List<SiteClass> siteClasses = new ArrayList<SiteClass>();
   private static List<Imt> imts = new ArrayList<Imt>();
@@ -55,6 +57,7 @@ public class NetcdfUtilsTests {
       Map<Imt, XySequence> imtMap1 = Maps.newEnumMap(Imt.class);
       Map<Imt, XySequence> imtMapTarget = Maps.newEnumMap(Imt.class);
       Map<Imt, XySequence> imtMapBiggerErr = Maps.newEnumMap(Imt.class);
+      Map<Imt, XySequence> imtMapDiffIml = Maps.newEnumMap(Imt.class);
       for (Imt imt : imts) {
         double[] zeros = new double[N_IML];
         double[] ones = new double[N_IML];
@@ -67,23 +70,33 @@ public class NetcdfUtilsTests {
         imtMap1.put(imt, XySequence.create(imlValues, ones));
         imtMapTarget.put(imt, XySequence.create(imlValues, half));
         imtMapBiggerErr.put(imt, XySequence.create(imlValues, ones));
+
+        // insert different Iml value
+        if (sc == siteClasses.get(siteClasses.size() - 1) && imt == imts.get(imts.size() - 1)) {
+          double[] imlValuesAlt = imlValues.clone();
+          imlValuesAlt[imlValuesAlt.length - 1] += 0.1;
+          imtMapDiffIml.put(imt, XySequence.create(imlValuesAlt, ones));
+        } else {
+          imtMapDiffIml.put(imt, XySequence.create(imlValues, ones));
+        }
       }
       mapHaz0.put(sc, imtMap0);
       mapHaz1.put(sc, imtMap1);
       mapHazTarget.put(sc, imtMapTarget);
       mapDiffImtSize.put(sc, imtMapBiggerErr);
+      mapDiffImlValue.put(sc, imtMapDiffIml);
     }
 
     // add another map
     mapDiffScSize.putAll(mapDiffImtSize);
     mapDiffScSize.put(SiteClass.A, mapDiffScSize.get(siteClasses.get(0)));
-    mapDiffImtSize.get(siteClasses.get(0)).put(Imt.SA10P0,
-        XySequence.create(imlValues, new double[N_IML]));
+    mapDiffImtSize.get(siteClasses.get(0))
+        .put(Imt.SA10P0, XySequence.create(imlValues, new double[N_IML]));
 
   }
 
   @Test
-  public final void testGetIdxLTEQ() {
+  final void testGetIdxLTEQ() {
     // target is out of range, expect IAE
     assertThrows(IllegalArgumentException.class, () -> {
       NetcdfUtils.getIdxLTEQ(LONGITUDES, -100.0);
@@ -94,7 +107,8 @@ public class NetcdfUtilsTests {
       assertEquals(i, NetcdfUtils.getIdxLTEQ(LONGITUDES, LONGITUDES[i] + 0.025));
     }
 
-    assertEquals(LONGITUDES.length - 2,
+    assertEquals(
+        LONGITUDES.length - 2,
         NetcdfUtils.getIdxLTEQ(LONGITUDES, LONGITUDES[LONGITUDES.length - 1]));
 
     for (int i = 0; i < LATITUDES.length - 1; i++) {
@@ -110,7 +124,7 @@ public class NetcdfUtilsTests {
   }
 
   @Test
-  public final void testCalcGridFrac() {
+  final void testCalcGridFrac() {
     double f = 0.13;
     int i = 4;
     assertEquals(f,
@@ -123,7 +137,7 @@ public class NetcdfUtilsTests {
   }
 
   @Test
-  public final void testLinearInterpolate() {
+  final void testLinearInterpolate() {
     assertEquals(mapHazTarget, NetcdfUtils.linearInterpolate(mapHaz0, mapHaz1, FRAC));
     // attempt to interpolate maps of difference sizes
     assertThrows(IllegalArgumentException.class, () -> {
@@ -134,4 +148,33 @@ public class NetcdfUtilsTests {
     });
   }
 
+  @Test
+  final void checkMapConsistencyTests() {
+    assertDoesNotThrow(() -> {
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapHaz0);
+    });
+
+    assertDoesNotThrow(() -> {
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapHaz1);
+    });
+
+    assertDoesNotThrow(() -> {
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapHazTarget);
+    });
+
+    // compare maps with different size at first level (SiteClass)
+    assertThrows(IllegalStateException.class, () -> {
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffScSize);
+    });
+
+    // compare maps with different size at second level (Imt)
+    assertThrows(IllegalStateException.class, () -> {
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffImtSize);
+    });
+
+    // compare maps with a single different Iml value
+    assertThrows(IllegalStateException.class, () -> {
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffImlValue);
+    });
+  }
 }