From 0b03885256cbbfcdce2706cc4555b453631f955e Mon Sep 17 00:00:00 2001 From: Jason Altekruse <jaltekruse@contractor.usgs.gov> Date: Wed, 11 Mar 2020 10:48:39 -0600 Subject: [PATCH] validate hazard maps, with tests --- .../nshmp/netcdf/reader/BoundingHazards.java | 21 ++++-- .../nshmp/netcdf/reader/NetcdfUtils.java | 67 +++++++++++++++++-- .../nshmp/netcdf/reader/NetcdfUtilsTests.java | 45 +++++++++++-- 3 files changed, 119 insertions(+), 14 deletions(-) diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java index 971b6b3..85cc6ae 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazards.java @@ -164,6 +164,15 @@ public class BoundingHazards { boundingHazardMaps.put( boundingLocations.get(3), mapHazardsFromArray(aHazards.section(origin, shape))); + + // validate boundingHazardMaps + NetcdfUtils.checkHazardMapConsistency(boundingHazardMaps.get(boundingLocations.get(0)), + boundingHazardMaps.get(boundingLocations.get(1))); + NetcdfUtils.checkHazardMapConsistency(boundingHazardMaps.get(boundingLocations.get(0)), + boundingHazardMaps.get(boundingLocations.get(2))); + NetcdfUtils.checkHazardMapConsistency(boundingHazardMaps.get(boundingLocations.get(0)), + boundingHazardMaps.get(boundingLocations.get(3))); + } catch (IOException | InvalidRangeException e) { // shouldn't get here because the reader was initialized with a valid and // existing netCDF file. Is the only way to trigger this error is to @@ -228,15 +237,19 @@ public class BoundingHazards { * * @param frac fractional distance between p1 and p2 to target point */ - private static Map<SiteClass, Map<Imt, XySequence>> getTargetData( + static Map<SiteClass, Map<Imt, XySequence>> getTargetData( Map<SiteClass, Map<Imt, XySequence>> d1, Map<SiteClass, Map<Imt, XySequence>> d2, double frac) { // do we need better checking here? or is it safe to assume that every // Map<SiteClass, Map<Imt,double[]>> passed in is consistent? - if (d1.size() != d2.size()) { - throw new IllegalArgumentException("Array size disagreement, cannot interpolate"); - } + // Check that Maps d1 and d2 are the same size (i.e. have the same number of + // SiteClass-Map pairs) + // if (d1.size() != d2.size()) { + // throw new IllegalArgumentException("Map size disagreement, cannot + // interpolate"); + // } + NetcdfUtils.checkHazardMapConsistency(d1, d2); if (frac == 0.0) { // target is the same as d1 diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java index c8eff0d..e60dad5 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java @@ -49,9 +49,9 @@ public class NetcdfUtils { } /** - * Returns a {@code double[]} from a NetCDF group + * Returns a {@code double[]} from a netCDF group * - * @param group The NetCDF group + * @param group The netCDF group * @param key The key to read from the group * @throws IOException */ @@ -60,9 +60,9 @@ public class NetcdfUtils { } /** - * Returns a {@code int[]} from a NetCDF group + * Returns a {@code int[]} from a netCDF group * - * @param group The NetCDF group + * @param group The netCDF group * @param key The key to read from the group * @throws IOException */ @@ -71,9 +71,9 @@ public class NetcdfUtils { } /** - * Get a 1D array from a NetCDF group. + * Get a 1D array from a netCDF group. * - * @param group The NetCDF group + * @param group The netCDF group * @param key The key to read from the group * @param dataType The data type to read * @throws IOException @@ -190,6 +190,61 @@ public class NetcdfUtils { return tMap; } + /** + * Confirm that two maps are consistent. In other words, confirm that they + * have the same key-sets and value types and, if values are of XySequence, + * that they have the same x-values. Recurses into nested maps (e.g. + * Map<SiteClass, Map<Imt, XySequence>) + * + * @param map1 + * @param map2 + */ + static void checkHazardMapConsistency(Map<?, ?> map1, Map<?, ?> map2) { + + // Is IllegalArgumentException the appropriate exception here? + + // if (!Arrays.equals(map1.keySet().toArray(), map2.keySet().toArray())) { + if (map1.keySet().size() != map2.keySet().size()) { + throw new IllegalArgumentException("Maps do not have the same number of keys"); + } + for (var key : map1.keySet()) { + if (!map2.containsKey(key)) { + throw new IllegalArgumentException("Maps do not share the same key set"); + } + var value1 = map1.get(key); + var value2 = map2.get(key); + if (value1.getClass() != value2.getClass()) { + throw new IllegalArgumentException("Classes of map values are not consistent"); + } + // try { + // @SuppressWarnings("unused") + // var value1Keys = ((Map<?,?>) value1).keySet(); + // // no error getting keySet, so this is a map, check it + // checkHazardMapConsistency((Map<?, ?>) value1, (Map<?, ?>) value2); + // } catch (ClassCastException e) { + // // do nothing, value1 and value2 are not maps, continue + // System.err.println(e.getMessage()); + // System.err.println(" "+e.getClass().getSimpleName()); + // } + if (value1 instanceof Map) { + checkHazardMapConsistency((Map<?, ?>) value1, (Map<?, ?>) value2); + } else if (value1 instanceof XySequence) { + // We could directly compare memory location if XySequences are built + // using XySequence.copyOf() + if (!Arrays.equals( + ((XySequence) value1).xValues().toArray(), + ((XySequence) value2).xValues().toArray())) { + throw new IllegalArgumentException("Hazard curves xValues are not the same"); + } + } else { + // we shouldn't get here for hazard maps + throw new IllegalArgumentException( + "Unexpected value type: " + value1.getClass().getSimpleName()); + } + + } + } + static class Key { static final String AEPS = "AEPs"; static final String IMLS = "Imls"; diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java index 923911f..20fab30 100644 --- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java +++ b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java @@ -1,5 +1,6 @@ package gov.usgs.earthquake.nshmp.netcdf.reader; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -15,7 +16,7 @@ import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.gmm.Imt; import gov.usgs.earthquake.nshmp.netcdf.SiteClass; -public class NetcdfUtilsTests { +class NetcdfUtilsTests { private static final double[] LONGITUDES = new double[] { -106.00, -105.95, -105.90, -105.85, -105.80, -105.75, -105.70, -105.65, -105.60, -105.55, @@ -33,6 +34,7 @@ public class NetcdfUtilsTests { static Map<SiteClass, Map<Imt, XySequence>> mapHazTarget = Maps.newEnumMap(SiteClass.class); static Map<SiteClass, Map<Imt, XySequence>> mapDiffImtSize = Maps.newEnumMap(SiteClass.class); static Map<SiteClass, Map<Imt, XySequence>> mapDiffScSize = Maps.newEnumMap(SiteClass.class); + static Map<SiteClass, Map<Imt, XySequence>> mapDiffImlValue = Maps.newEnumMap(SiteClass.class); private static List<SiteClass> siteClasses = new ArrayList<SiteClass>(); private static List<Imt> imts = new ArrayList<Imt>(); @@ -55,6 +57,7 @@ public class NetcdfUtilsTests { Map<Imt, XySequence> imtMap1 = Maps.newEnumMap(Imt.class); Map<Imt, XySequence> imtMapTarget = Maps.newEnumMap(Imt.class); Map<Imt, XySequence> imtMapBiggerErr = Maps.newEnumMap(Imt.class); + Map<Imt, XySequence> imtMapDiffIml = Maps.newEnumMap(Imt.class); for (Imt imt : imts) { double[] zeros = new double[N_IML]; double[] ones = new double[N_IML]; @@ -67,11 +70,21 @@ public class NetcdfUtilsTests { imtMap1.put(imt, XySequence.create(imlValues, ones)); imtMapTarget.put(imt, XySequence.create(imlValues, half)); imtMapBiggerErr.put(imt, XySequence.create(imlValues, ones)); + + // insert different Iml value + if (sc == siteClasses.get(siteClasses.size() - 1) && imt == imts.get(imts.size() - 1)) { + double[] imlValuesAlt = imlValues.clone(); + imlValuesAlt[imlValuesAlt.length - 1] += 0.1; + imtMapDiffIml.put(imt, XySequence.create(imlValuesAlt, ones)); + } else { + imtMapDiffIml.put(imt, XySequence.create(imlValues, ones)); + } } mapHaz0.put(sc, imtMap0); mapHaz1.put(sc, imtMap1); mapHazTarget.put(sc, imtMapTarget); mapDiffImtSize.put(sc, imtMapBiggerErr); + mapDiffImlValue.put(sc, imtMapDiffIml); } // add another map @@ -83,7 +96,7 @@ public class NetcdfUtilsTests { } @Test - public final void testGetIdxLTEQ() { + final void testGetIdxLTEQ() { // target is out of range, expect IAE assertThrows(IllegalArgumentException.class, () -> { NetcdfUtils.getIdxLTEQ(LONGITUDES, -100.0); @@ -110,7 +123,7 @@ public class NetcdfUtilsTests { } @Test - public final void testCalcGridFrac() { + final void testCalcGridFrac() { double f = 0.13; int i = 4; assertEquals(f, @@ -123,7 +136,7 @@ public class NetcdfUtilsTests { } @Test - public final void testLinearInterpolate() { + final void testLinearInterpolate() { assertEquals(mapHazTarget, NetcdfUtils.linearInterpolate(mapHaz0, mapHaz1, FRAC)); // attempt to interpolate maps of difference sizes assertThrows(IllegalArgumentException.class, () -> { @@ -134,4 +147,28 @@ public class NetcdfUtilsTests { }); } + @Test + final void checkMapConsistencyTests() { + assertDoesNotThrow(() -> { + NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapHaz0); + }); + assertDoesNotThrow(() -> { + NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapHaz1); + }); + assertDoesNotThrow(() -> { + NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapHazTarget); + }); + // compare maps with different size at first level (SiteClass) + assertThrows(IllegalArgumentException.class, () -> { + NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapDiffScSize); + }); + // compare maps with different size at second level (Imt) + assertThrows(IllegalArgumentException.class, () -> { + NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapDiffImtSize); + }); + // compare maps with a single different Iml value + assertThrows(IllegalArgumentException.class, () -> { + NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapDiffImlValue); + }); + } } -- GitLab