Skip to content
Snippets Groups Projects
Commit 737debbe authored by Clayton, Brandon Scott's avatar Clayton, Brandon Scott
Browse files

update map checking

parent a06102bb
No related branches found
No related tags found
1 merge request!22More tests
...@@ -166,12 +166,7 @@ public class BoundingHazards { ...@@ -166,12 +166,7 @@ public class BoundingHazards {
mapHazardsFromArray(aHazards.section(origin, shape))); mapHazardsFromArray(aHazards.section(origin, shape)));
// validate boundingHazardMaps // validate boundingHazardMaps
NetcdfUtils.checkHazardMapConsistency(boundingHazardMaps.get(boundingLocations.get(0)), NetcdfUtils.checkBoundingHazards(boundingHazards);
boundingHazardMaps.get(boundingLocations.get(1)));
NetcdfUtils.checkHazardMapConsistency(boundingHazardMaps.get(boundingLocations.get(0)),
boundingHazardMaps.get(boundingLocations.get(2)));
NetcdfUtils.checkHazardMapConsistency(boundingHazardMaps.get(boundingLocations.get(0)),
boundingHazardMaps.get(boundingLocations.get(3)));
} catch (IOException | InvalidRangeException e) { } catch (IOException | InvalidRangeException e) {
// shouldn't get here because the reader was initialized with a valid and // shouldn't get here because the reader was initialized with a valid and
...@@ -243,7 +238,7 @@ public class BoundingHazards { ...@@ -243,7 +238,7 @@ public class BoundingHazards {
double frac) { double frac) {
// do we need better checking here? or is it safe to assume that every // do we need better checking here? or is it safe to assume that every
// Map<SiteClass, Map<Imt,double[]>> passed in is consistent? // Map<SiteClass, Map<Imt,double[]>> passed in is consistent?
NetcdfUtils.checkHazardMapConsistency(d1, d2); NetcdfUtils.checkBoundingHazard(d1, d2);
if (frac == 0.0) { if (frac == 0.0) {
// target is the same as d1 // target is the same as d1
......
package gov.usgs.earthquake.nshmp.netcdf.reader; package gov.usgs.earthquake.nshmp.netcdf.reader;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkNotNull;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.math.DoubleMath; import com.google.common.math.DoubleMath;
import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.data.XySequence;
import gov.usgs.earthquake.nshmp.geo.Location;
import gov.usgs.earthquake.nshmp.geo.LocationList; import gov.usgs.earthquake.nshmp.geo.LocationList;
import gov.usgs.earthquake.nshmp.gmm.Imt; import gov.usgs.earthquake.nshmp.gmm.Imt;
import gov.usgs.earthquake.nshmp.netcdf.SiteClass; import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
...@@ -191,58 +194,58 @@ public class NetcdfUtils { ...@@ -191,58 +194,58 @@ public class NetcdfUtils {
} }
/** /**
* Confirm that two maps are consistent. In other words, confirm that they * Checks bounding hazard maps contain the same: Site classes, IMTs per each
* have the same key-sets and value types and, if values are of XySequence, * site class, and ground motions per each IMT
* that they have the same x-values. Recurses into nested maps (e.g.
* Map<SiteClass, Map<Imt, XySequence>)
* *
* @param map1 *
* @param map2 * @param boundingHazards The bounding hazards
*/ */
static void checkHazardMapConsistency(Map<?, ?> map1, Map<?, ?> map2) { static void checkBoundingHazards(
Map<Location, Map<SiteClass, Map<Imt, XySequence>>> boundingHazards) {
var location = boundingHazards.keySet().stream().collect(Collectors.toList()).get(0);
boundingHazards.keySet().forEach(key -> {
checkBoundingHazard(boundingHazards.get(location), boundingHazards.get(key));
});
}
// Is IllegalArgumentException the appropriate exception here? /**
* Check whether bounding hazards contain the same: Site classes, IMTs per
* each site class, and ground motions per each IMT
*
* @param a Bounding hazard map A
* @param b Bounding hazard map B
*/
static void checkBoundingHazard(
Map<SiteClass, Map<Imt, XySequence>> a,
Map<SiteClass, Map<Imt, XySequence>> b) {
checkArgument(a.size() == b.size(), "Maps are not the same size");
checkArgument(a.keySet().containsAll(a.keySet()), "Site classes do not match");
a.keySet().forEach(key -> checkHazards(a.get(key), b.get(key)));
}
// if (!Arrays.equals(map1.keySet().toArray(), map2.keySet().toArray())) { /**
if (map1.keySet().size() != map2.keySet().size()) { * Check whether hazards contain the same: IMTs and ground motions per each
throw new IllegalArgumentException("Maps do not have the same number of keys"); * IMT
} *
for (var key : map1.keySet()) { * @param a Hazard A
if (!map2.containsKey(key)) { * @param b Hazard B
throw new IllegalArgumentException("Maps do not share the same key set"); */
} static void checkHazards(Map<Imt, XySequence> a, Map<Imt, XySequence> b) {
var value1 = map1.get(key); checkArgument(a.size() == b.size(), "Maps are not the same size");
var value2 = map2.get(key); checkArgument(a.keySet().containsAll(a.keySet()), "IMTs do not match");
if (value1.getClass() != value2.getClass()) { a.keySet().forEach(key -> checkGroundMotions(a.get(key), b.get(key)));
throw new IllegalArgumentException("Classes of map values are not consistent"); }
}
// try {
// @SuppressWarnings("unused")
// var value1Keys = ((Map<?,?>) value1).keySet();
// // no error getting keySet, so this is a map, check it
// checkHazardMapConsistency((Map<?, ?>) value1, (Map<?, ?>) value2);
// } catch (ClassCastException e) {
// // do nothing, value1 and value2 are not maps, continue
// System.err.println(e.getMessage());
// System.err.println(" "+e.getClass().getSimpleName());
// }
if (value1 instanceof Map) {
checkHazardMapConsistency((Map<?, ?>) value1, (Map<?, ?>) value2);
} else if (value1 instanceof XySequence) {
// We could directly compare memory location if XySequences are built
// using XySequence.copyOf()
if (!Arrays.equals(
((XySequence) value1).xValues().toArray(),
((XySequence) value2).xValues().toArray())) {
throw new IllegalArgumentException("Hazard curves xValues are not the same");
}
} else {
// we shouldn't get here for hazard maps
throw new IllegalArgumentException(
"Unexpected value type: " + value1.getClass().getSimpleName());
}
} /**
* Check that the X values are identical.
*
* @param a Sequence A
* @param b Sequence B
*/
static void checkGroundMotions(XySequence a, XySequence b) {
checkArgument(
Arrays.equals(a.xValues().toArray(), b.xValues().toArray()),
"Hazard curves xValues are not the same");
} }
static class Key { static class Key {
......
...@@ -90,8 +90,8 @@ class NetcdfUtilsTests { ...@@ -90,8 +90,8 @@ class NetcdfUtilsTests {
// add another map // add another map
mapDiffScSize.putAll(mapDiffImtSize); mapDiffScSize.putAll(mapDiffImtSize);
mapDiffScSize.put(SiteClass.A, mapDiffScSize.get(siteClasses.get(0))); mapDiffScSize.put(SiteClass.A, mapDiffScSize.get(siteClasses.get(0)));
mapDiffImtSize.get(siteClasses.get(0)).put(Imt.SA10P0, mapDiffImtSize.get(siteClasses.get(0))
XySequence.create(imlValues, new double[N_IML])); .put(Imt.SA10P0, XySequence.create(imlValues, new double[N_IML]));
} }
...@@ -107,7 +107,8 @@ class NetcdfUtilsTests { ...@@ -107,7 +107,8 @@ class NetcdfUtilsTests {
assertEquals(i, NetcdfUtils.getIdxLTEQ(LONGITUDES, LONGITUDES[i] + 0.025)); assertEquals(i, NetcdfUtils.getIdxLTEQ(LONGITUDES, LONGITUDES[i] + 0.025));
} }
assertEquals(LONGITUDES.length - 2, assertEquals(
LONGITUDES.length - 2,
NetcdfUtils.getIdxLTEQ(LONGITUDES, LONGITUDES[LONGITUDES.length - 1])); NetcdfUtils.getIdxLTEQ(LONGITUDES, LONGITUDES[LONGITUDES.length - 1]));
for (int i = 0; i < LATITUDES.length - 1; i++) { for (int i = 0; i < LATITUDES.length - 1; i++) {
...@@ -150,25 +151,30 @@ class NetcdfUtilsTests { ...@@ -150,25 +151,30 @@ class NetcdfUtilsTests {
@Test @Test
final void checkMapConsistencyTests() { final void checkMapConsistencyTests() {
assertDoesNotThrow(() -> { assertDoesNotThrow(() -> {
NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapHaz0); NetcdfUtils.checkBoundingHazard(mapHaz0, mapHaz0);
}); });
assertDoesNotThrow(() -> { assertDoesNotThrow(() -> {
NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapHaz1); NetcdfUtils.checkBoundingHazard(mapHaz0, mapHaz1);
}); });
assertDoesNotThrow(() -> { assertDoesNotThrow(() -> {
NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapHazTarget); NetcdfUtils.checkBoundingHazard(mapHaz0, mapHazTarget);
}); });
// compare maps with different size at first level (SiteClass) // compare maps with different size at first level (SiteClass)
assertThrows(IllegalArgumentException.class, () -> { assertThrows(IllegalArgumentException.class, () -> {
NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapDiffScSize); NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffScSize);
}); });
// compare maps with different size at second level (Imt) // compare maps with different size at second level (Imt)
assertThrows(IllegalArgumentException.class, () -> { assertThrows(IllegalArgumentException.class, () -> {
NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapDiffImtSize); NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffImtSize);
}); });
// compare maps with a single different Iml value // compare maps with a single different Iml value
assertThrows(IllegalArgumentException.class, () -> { assertThrows(IllegalArgumentException.class, () -> {
NetcdfUtils.checkHazardMapConsistency(mapHaz0, mapDiffImlValue); NetcdfUtils.checkBoundingHazard(mapHaz0, mapDiffImlValue);
}); });
} }
} }
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment