diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java
index 9214148f5eb5b4b2f776a3a09e5110d3260e0871..2275031f372b4991393a564bd2ea2397ed761eb8 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java
@@ -82,6 +82,9 @@ public class BoundingHazards {
     boundingHazards.put(
         site,
         calcTargetHazards(fracLon, fracLat));
+
+    // validate boundingHazards
+    NetcdfUtils.checkBoundingHazards(boundingHazards, boundingLocations.first());
   }
 
   private Map<SiteClass, Map<Imt, XySequence>> calcTargetHazards(double fracLon, double fracLat) {
@@ -165,14 +168,6 @@ public class BoundingHazards {
           boundingLocations.get(3),
           mapHazardsFromArray(aHazards.section(origin, shape)));
 
-      // validate boundingHazardMaps
-      NetcdfUtils.checkHazardMapConsistency(boundingHazardMaps.get(boundingLocations.get(0)),
-          boundingHazardMaps.get(boundingLocations.get(1)));
-      NetcdfUtils.checkHazardMapConsistency(boundingHazardMaps.get(boundingLocations.get(0)),
-          boundingHazardMaps.get(boundingLocations.get(2)));
-      NetcdfUtils.checkHazardMapConsistency(boundingHazardMaps.get(boundingLocations.get(0)),
-          boundingHazardMaps.get(boundingLocations.get(3)));
-
     } catch (IOException | InvalidRangeException e) {
       // shouldn't get here because the reader was initialized with a valid and
       // existing netCDF file. Is the only way to trigger this error is to
@@ -243,7 +238,7 @@ public class BoundingHazards {
       double frac) {
     // do we need better checking here? or is it safe to assume that every
     // Map<SiteClass, Map<Imt,double[]>> passed in is consistent?
-    NetcdfUtils.checkHazardMapConsistency(d1, d2);
+    NetcdfUtils.checkBoundingHazard(d1, d2);
 
     if (frac == 0.0) {
       // target is the same as d1
diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
index e60dad5d79154c8c994715bbee9039072f50ac5b..0424989edb43d16b57a297123904843e591bf666 100644
--- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
+++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java
@@ -1,6 +1,8 @@
 package gov.usgs.earthquake.nshmp.netcdf.reader;
 
+import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
 
 import java.io.IOException;
 import java.util.Arrays;
@@ -10,6 +12,7 @@ import com.google.common.collect.Maps;
 import com.google.common.math.DoubleMath;
 
 import gov.usgs.earthquake.nshmp.data.XySequence;
+import gov.usgs.earthquake.nshmp.geo.Location;
 import gov.usgs.earthquake.nshmp.geo.LocationList;
 import gov.usgs.earthquake.nshmp.gmm.Imt;
 import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
@@ -191,58 +194,61 @@ public class NetcdfUtils {
   }
 
   /**
-   * Confirm that two maps are consistent. In other words, confirm that they
-   * have the same key-sets and value types and, if values are of XySequence,
-   * that they have the same x-values. Recurses into nested maps (e.g.
-   * Map<SiteClass, Map<Imt, XySequence>)
+   * Checks bounding hazard maps contain the same: Site classes, IMTs per each
+   * site class, and ground motions per each IMT
    * 
-   * @param map1
-   * @param map2
+   * 
+   * @param boundingHazards The bounding hazards
    */
-  static void checkHazardMapConsistency(Map<?, ?> map1, Map<?, ?> map2) {
+  static void checkBoundingHazards(
+      Map<Location, Map<SiteClass, Map<Imt, XySequence>>> boundingHazards,
+      Location location) {
+    checkArgument(boundingHazards.containsKey(location), "Location not in bounding hazards");
+    boundingHazards.keySet().stream()
+        .filter(loc -> loc.equals(location))
+        .forEach(key -> {
+          checkBoundingHazard(boundingHazards.get(location), boundingHazards.get(key));
+        });
+  }
 
-    // Is IllegalArgumentException the appropriate exception here?
+  /**
+   * Check whether bounding hazards contain the same: Site classes, IMTs per
+   * each site class, and ground motions per each IMT
+   * 
+   * @param a Bounding hazard map A
+   * @param b Bounding hazard map B
+   */
+  static void checkBoundingHazard(
+      Map<SiteClass, Map<Imt, XySequence>> a,
+      Map<SiteClass, Map<Imt, XySequence>> b) {
+    checkState(a.size() == b.size(), "Maps are not the same size");
+    checkState(a.keySet().containsAll(a.keySet()), "Site classes do not match");
+    a.keySet().forEach(key -> checkHazards(a.get(key), b.get(key)));
+  }
 
-    // if (!Arrays.equals(map1.keySet().toArray(), map2.keySet().toArray())) {
-    if (map1.keySet().size() != map2.keySet().size()) {
-      throw new IllegalArgumentException("Maps do not have the same number of keys");
-    }
-    for (var key : map1.keySet()) {
-      if (!map2.containsKey(key)) {
-        throw new IllegalArgumentException("Maps do not share the same key set");
-      }
-      var value1 = map1.get(key);
-      var value2 = map2.get(key);
-      if (value1.getClass() != value2.getClass()) {
-        throw new IllegalArgumentException("Classes of map values are not consistent");
-      }
-      // try {
-      // @SuppressWarnings("unused")
-      // var value1Keys = ((Map<?,?>) value1).keySet();
-      // // no error getting keySet, so this is a map, check it
-      // checkHazardMapConsistency((Map<?, ?>) value1, (Map<?, ?>) value2);
-      // } catch (ClassCastException e) {
-      // // do nothing, value1 and value2 are not maps, continue
-      // System.err.println(e.getMessage());
-      // System.err.println(" "+e.getClass().getSimpleName());
-      // }
-      if (value1 instanceof Map) {
-        checkHazardMapConsistency((Map<?, ?>) value1, (Map<?, ?>) value2);
-      } else if (value1 instanceof XySequence) {
-        // We could directly compare memory location if XySequences are built
-        // using XySequence.copyOf()
-        if (!Arrays.equals(
-            ((XySequence) value1).xValues().toArray(),
-            ((XySequence) value2).xValues().toArray())) {
-          throw new IllegalArgumentException("Hazard curves xValues are not the same");
-        }
-      } else {
-        // we shouldn't get here for hazard maps
-        throw new IllegalArgumentException(
-            "Unexpected value type: " + value1.getClass().getSimpleName());
-      }
+  /**
+   * Check whether hazards contain the same: IMTs and ground motions per each
+   * IMT
+   * 
+   * @param a Hazard A
+   * @param b Hazard B
+   */
+  static void checkHazards(Map<Imt, XySequence> a, Map<Imt, XySequence> b) {
+    checkState(a.size() == b.size(), "Maps are not the same size");
+    checkState(a.keySet().containsAll(a.keySet()), "IMTs do not match");
+    a.keySet().forEach(key -> checkGroundMotions(a.get(key), b.get(key)));
+  }
 
-    }
+  /**
+   * Check that the X values are identical.
+   * 
+   * @param a Sequence A
+   * @param b Sequence B
+   */
+  static void checkGroundMotions(XySequence a, XySequence b) {
+    checkState(
+        Arrays.equals(a.xValues().toArray(), b.xValues().toArray()),
+        "Hazard curves xValues are not the same");
   }
 
   static class Key {
diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java
index 20fab302911e82d8635d1475e6164ebc0b413fd7..2eaacb20513527badeb611197c85a9d46747e7c2 100644
--- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java
+++ b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java
@@ -90,8 +90,8 @@ class NetcdfUtilsTests {
     // add another map
     mapDiffScSize.putAll(mapDiffImtSize);
     mapDiffScSize.put(SiteClass.A, mapDiffScSize.get(siteClasses.get(0)));
-    mapDiffImtSize.get(siteClasses.get(0)).put(Imt.SA10P0,
-        XySequence.create(imlValues, new double[N_IML]));
+    mapDiffImtSize.get(siteClasses.get(0))
+        .put(Imt.SA10P0, XySequence.create(imlValues, new double[N_IML]));
 
   }
 
@@ -107,7 +107,8 @@ class NetcdfUtilsTests {
       assertEquals(i, NetcdfUtils.getIdxLTEQ(LONGITUDES, LONGITUDES[i] + 0.025));
     }
 
-    assertEquals(LONGITUDES.length - 2,
+    assertEquals(
+        LONGITUDES.length - 2,
         NetcdfUtils.getIdxLTEQ(LONGITUDES, LONGITUDES[LONGITUDES.length - 1]));
 
     for (int i = 0; i < LATITUDES.length - 1; i++) {
@@ -150,25 +151,30 @@ class NetcdfUtilsTests {
   @Test
   final void checkMapConsistencyTests() {
     assertDoesNotThrow(() -> {
-      NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapHaz0);
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapHaz0);
     });
+
     assertDoesNotThrow(() -> {
-      NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapHaz1);
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapHaz1);
     });
+
     assertDoesNotThrow(() -> {
-      NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapHazTarget);
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapHazTarget);
     });
+
     // compare maps with different size at first level (SiteClass)
-    assertThrows(IllegalArgumentException.class, () -> {
-      NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapDiffScSize);
+    assertThrows(IllegalStateException.class, () -> {
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffScSize);
     });
+
     // compare maps with different size at second level (Imt)
-    assertThrows(IllegalArgumentException.class, () -> {
-      NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapDiffImtSize);
+    assertThrows(IllegalStateException.class, () -> {
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffImtSize);
     });
+
     // compare maps with a single different Iml value
-    assertThrows(IllegalArgumentException.class, () -> {
-      NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapDiffImlValue);
+    assertThrows(IllegalStateException.class, () -> {
+      NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffImlValue);
     });
   }
 }