Skip to content
Snippets Groups Projects
Commit 425a8a82 authored by Clayton, Brandon Scott's avatar Clayton, Brandon Scott
Browse files

add wrapper classes

parent b344d9b1
No related branches found
No related tags found
1 merge request!24Wrapper classes
Showing
with 524 additions and 330 deletions
...@@ -5,7 +5,6 @@ import static com.google.common.base.Preconditions.checkArgument; ...@@ -5,7 +5,6 @@ import static com.google.common.base.Preconditions.checkArgument;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Map;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
...@@ -13,7 +12,10 @@ import gov.usgs.earthquake.nshmp.data.XySequence; ...@@ -13,7 +12,10 @@ import gov.usgs.earthquake.nshmp.data.XySequence;
import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.geo.Location;
import gov.usgs.earthquake.nshmp.gmm.Imt; import gov.usgs.earthquake.nshmp.gmm.Imt;
import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards; import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards;
import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazardsReader;
import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfCoordinates; import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfCoordinates;
import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazard;
import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazards;
import ucar.nc2.dataset.NetcdfDataset; import ucar.nc2.dataset.NetcdfDataset;
...@@ -98,8 +100,8 @@ public class NshmNetcdfReader { ...@@ -98,8 +100,8 @@ public class NshmNetcdfReader {
* *
* @param site The location to get bounding hazards * @param site The location to get bounding hazards
*/ */
public Map<Location, Map<SiteClass, Map<Imt, XySequence>>> boundingHazards(Location site) { public BoundingHazards boundingHazards(Location site) {
return BoundingHazards.boundingHazards(this, site); return BoundingHazardsReader.boundingHazards(this, site);
} }
/** /**
...@@ -108,7 +110,7 @@ public class NshmNetcdfReader { ...@@ -108,7 +110,7 @@ public class NshmNetcdfReader {
* *
* @param site The site to get the hazard curves * @param site The site to get the hazard curves
*/ */
public Map<SiteClass, Map<Imt, XySequence>> hazard(Location site) { public StaticHazards hazard(Location site) {
return boundingHazards(site).get(site); return boundingHazards(site).get(site);
} }
...@@ -118,7 +120,7 @@ public class NshmNetcdfReader { ...@@ -118,7 +120,7 @@ public class NshmNetcdfReader {
* @param site The site to get hazard curves * @param site The site to get hazard curves
* @param siteClass The site class * @param siteClass The site class
*/ */
public Map<Imt, XySequence> hazard(Location site, SiteClass siteClass) { public StaticHazard hazard(Location site, SiteClass siteClass) {
checkArgument( checkArgument(
coords.siteClasses().contains(siteClass), coords.siteClasses().contains(siteClass),
"Site class [" + siteClass + "] not supported"); "Site class [" + siteClass + "] not supported");
......
package gov.usgs.earthquake.nshmp.netcdf.reader; package gov.usgs.earthquake.nshmp.netcdf.reader;
import java.io.IOException; import static com.google.common.base.Preconditions.checkArgument;
import java.util.EnumMap; import static com.google.common.base.Preconditions.checkState;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.google.common.collect.Maps;
import gov.usgs.earthquake.nshmp.data.XySequence;
import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.geo.Location;
import gov.usgs.earthquake.nshmp.geo.LocationList;
import gov.usgs.earthquake.nshmp.gmm.Imt;
import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReader;
import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key;
import ucar.ma2.Array;
import ucar.ma2.DataType;
import ucar.ma2.InvalidRangeException;
import ucar.nc2.Group;
import ucar.nc2.Variable;
import ucar.nc2.dataset.NetcdfDataset;
/*
* Container for gridded hazard curves at four closest grid points to target
*/
public class BoundingHazards {
private final NshmNetcdfReader netcdf;
private final NetcdfCoordinates coords;
private Map<Location, Map<SiteClass, Map<Imt, XySequence>>> boundingHazards;
private LocationList boundingLocations;
BoundingHazards(NshmNetcdfReader netcdf, Location site) {
this.netcdf = netcdf;
this.coords = netcdf.coordinates();
coords.contains(site);
setBoundingHazards(site);
}
/**
* Returns the bounding hazards at four closet grid points to target.
*
* @param netcdf The {@code Netcdf}
* @param site The site to get bounding hazards
* @param baseGroup The Netcdf base group
*/
public static Map<Location, Map<SiteClass, Map<Imt, XySequence>>> boundingHazards(
NshmNetcdfReader netcdf,
Location site) {
return new BoundingHazards(netcdf, site).boundingHazards;
}
LocationList boundingLocations() { public class BoundingHazards implements Iterable<Entry<Location, StaticHazards>> {
return LocationList.copyOf(boundingLocations);
}
private void setBoundingHazards(Location site) {
var longitudes = coords.longitudes();
var latitudes = coords.latitudes();
var idxLonLL = NetcdfUtils.getIdxLTEQ(longitudes, site.longitude);
var idxLatLL = NetcdfUtils.getIdxLTEQ(latitudes, site.latitude);
var lonLeft = longitudes[idxLonLL]; private final Map<Location, StaticHazards> boundingHazards;
var lonRight = longitudes[idxLonLL + 1];
var latLower = latitudes[idxLatLL];
var latUpper = latitudes[idxLatLL + 1];
boundingLocations = LocationList.builder() BoundingHazards(Map<Location, StaticHazards> boundingHazards) {
.add(latLower, lonLeft) this.boundingHazards = boundingHazards;
.add(latUpper, lonLeft) }
.add(latUpper, lonRight)
.add(latLower, lonRight)
.build();
boundingHazards = extractHazardsAt(idxLonLL, idxLatLL);
var fracLon = NetcdfUtils.calcGridFrac(longitudes, idxLonLL, site.longitude); public Map<Location, StaticHazards> boundingHazards() {
var fracLat = NetcdfUtils.calcGridFrac(latitudes, idxLatLL, site.latitude); return Map.copyOf(boundingHazards);
boundingHazards.put( }
site,
calcTargetHazards(fracLon, fracLat));
// validate boundingHazards public boolean containsKey(Location location) {
NetcdfUtils.checkBoundingHazards(boundingHazards, boundingLocations.first()); return boundingHazards.containsKey(location);
} }
private Map<SiteClass, Map<Imt, XySequence>> calcTargetHazards(double fracLon, double fracLat) { public Set<Entry<Location, StaticHazards>> entrySet() {
var westTarget = getTargetData( return boundingHazards.entrySet();
boundingHazards.get(boundingLocations.get(0)), }
boundingHazards.get(boundingLocations.get(1)), fracLat);
var eastTarget = getTargetData( public StaticHazards get(Location location) {
boundingHazards.get(boundingLocations.get(3)), checkArgument(boundingHazards.containsKey(location), "Location [" + location + "] not found");
boundingHazards.get(boundingLocations.get(2)), fracLat); return boundingHazards.get(location);
}
return getTargetData(westTarget, eastTarget, fracLon); public Set<Location> keySet() {
return boundingHazards.keySet();
} }
/* public int size() {
* Return hazard curves for four closest grid points as a List of: return boundingHazards.size();
* Map<SiteClass, Map<Imt, XySequence(iml,hazard)>> }
*
* List order is clockwise from lower left corner: LL, UL, UR, LR, [T] with an
* empty slot for the interpolated target hazards
*/
private Map<Location, Map<SiteClass, Map<Imt, XySequence>>> extractHazardsAt(
int idxLonLL,
int idxLatLL) {
var boundingHazardMaps = new HashMap<Location, Map<SiteClass, Map<Imt, XySequence>>>(5);
try (NetcdfDataset ncd = NetcdfDataset.openDataset(netcdf.path().toString())) {
Group targetGroup = ncd.findGroup(netcdf.nshmGroup().baseGroup());
// TODO: rename variable in netCDF
Variable vHazards = targetGroup.findVariable(Key.AEPS);
// set up origin and shape arrays for reading hazard curves at four
// bounding
// (lon,lat) grid points
int[] origin = new int[] { 0, 0, idxLatLL, idxLonLL, 0 };
int[] shape = new int[] {
coords.siteClasses().size(),
coords.imts().size(),
2,
2,
coords.nIml()
};
// read data into Array for bounding grid points
var aHazards = vHazards.read(origin, shape);
// Array aHazards now has shape [nVs,nImt,2,2,nIml]
// ...so origin will now be [0,0,0,0,0] for LL grid point
// ...and shape of requested array is [nVs,nImt,1,1,nIml]
// Array aHazards is size 2 on the lon and lat dimensions
shape[2] = 1;
shape[3] = 1;
// TODO: XySequence instead of double[]
// Put data into hazard map for LL point
origin[2] = 0; // lat index position
origin[3] = 0; // lon index position
boundingHazardMaps.put(
boundingLocations.get(0),
mapHazardsFromArray(aHazards.section(origin, shape)));
// Put data into hazard map for UL point
origin[2] = 1; // lat index position
origin[3] = 0; // lon index position
boundingHazardMaps.put(
boundingLocations.get(1),
mapHazardsFromArray(aHazards.section(origin, shape)));
// Put data into hazard map for UR point
origin[2] = 1; // lat index position
origin[3] = 1; // lon index position
boundingHazardMaps.put(
boundingLocations.get(2),
mapHazardsFromArray(aHazards.section(origin, shape)));
// Put data into hazard map for LR point
origin[2] = 0; // lat index position
origin[3] = 1; // lon index position
boundingHazardMaps.put(
boundingLocations.get(3),
mapHazardsFromArray(aHazards.section(origin, shape)));
} catch (IOException | InvalidRangeException e) {
// shouldn't get here because the reader was initialized with a valid and
// existing netCDF file. Is the only way to trigger this error is to
// remove or corrupt the original netCDF file used to initialize the
// reader after initialization?
throw new RuntimeException("Could not read Netcdf file [" + netcdf.path() + "]");
}
// interpolate hazards to target point @Override
return boundingHazardMaps; public Iterator<Entry<Location, StaticHazards>> iterator() {
return entrySet().iterator();
}
public static Builder builder() {
return new Builder();
} }
/* public static class Builder {
* Read hazard curves from netCDF variable into map of hazards by SiteClass Map<Location, StaticHazards> boundingHazards;
* and Imt
* private Builder() {
* TODO: if target is on a grid point (or on a grid lat or lon), no need to boundingHazards = new HashMap<>();
* read 4 bounding points ?
*/
private Map<SiteClass, Map<Imt, XySequence>> mapHazardsFromArray(Array hazards) {
// hazards has had its lat and lon dimensions reduced (so it now is rank 3)
// hazards[SiteClass, Imt, Iml]
EnumMap<SiteClass, Map<Imt, XySequence>> vsImtHazardMap = Maps.newEnumMap(SiteClass.class);
for (int iSc = 0; iSc < coords.siteClasses().size(); iSc++) {
var sc = coords.siteClasses().get(iSc);
Map<Imt, XySequence> imtHazardMap = Maps.newEnumMap(Imt.class);
for (int iImt = 0; iImt < coords.imts().size(); iImt++) {
Imt imt = coords.imts().get(iImt);
// set origin and shape of double[] hazards to read
var origin = new int[] { iSc, iImt, 0 };
var shape = new int[] { 1, 1, coords.nIml() };
// try block needed for sectioning Array
try {
imtHazardMap.put(imt,
XySequence.create(
coords.imls().get(sc).get(imt),
(double[]) hazards.section(origin, shape).reduce()
.get1DJavaArray(DataType.DOUBLE)));
} catch (InvalidRangeException e) {
throw new RuntimeException(e.getMessage());
}
}
vsImtHazardMap.put(sc, imtHazardMap);
} }
return Maps.immutableEnumMap(vsImtHazardMap); public Builder put(Location location, StaticHazards staticHazards) {
} boundingHazards.put(location, staticHazards);
return this;
}
/* public BoundingHazards build() {
* Get data for target point checkState(!boundingHazards.isEmpty(), "Must add hazards");
* return new BoundingHazards(boundingHazards);
* @param d1 data at first point (p1)
*
* @param d2 data at second point (p2)
*
* @param frac fractional distance between p1 and p2 to target point
*/
static Map<SiteClass, Map<Imt, XySequence>> getTargetData(
Map<SiteClass, Map<Imt, XySequence>> d1,
Map<SiteClass, Map<Imt, XySequence>> d2,
double frac) {
// do we need better checking here? or is it safe to assume that every
// Map<SiteClass, Map<Imt,double[]>> passed in is consistent?
NetcdfUtils.checkBoundingHazard(d1, d2);
if (frac == 0.0) {
// target is the same as d1
return d1;
} else if (frac == 1.0) {
// target is the same as d2
return d2;
} else {
return NetcdfUtils.linearInterpolate(d1, d2, frac);
} }
} }
} }
package gov.usgs.earthquake.nshmp.netcdf.reader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import gov.usgs.earthquake.nshmp.data.XySequence;
import gov.usgs.earthquake.nshmp.geo.Location;
import gov.usgs.earthquake.nshmp.geo.LocationList;
import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReader;
import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key;
import ucar.ma2.Array;
import ucar.ma2.DataType;
import ucar.ma2.InvalidRangeException;
import ucar.nc2.dataset.NetcdfDataset;
/**
* Container for gridded hazard curves at four closest grid points to target
*
* @author U.S. Geological Survey
*/
public class BoundingHazardsReader {
private final NshmNetcdfReader netcdf;
private final NetcdfCoordinates coords;
private BoundingHazards boundingHazards;
private List<BoundingLocation> boundingLocations = new ArrayList<>();
BoundingHazardsReader(NshmNetcdfReader netcdf, Location site) {
this.netcdf = netcdf;
this.coords = netcdf.coordinates();
coords.contains(site);
setBoundingHazards(site);
}
/**
* Returns the bounding hazards at four closet grid points to target.
*
* @param netcdf The {@code Netcdf}
* @param site The site to get bounding hazards
*/
public static BoundingHazards boundingHazards(
NshmNetcdfReader netcdf,
Location site) {
return new BoundingHazardsReader(netcdf, site).boundingHazards;
}
LocationList boundingLocations() {
var locations = boundingLocations.stream()
.map(boundingLocation -> boundingLocation.location)
.collect(Collectors.toList());
return LocationList.copyOf(locations);
}
/**
* Get data for target point
*
* @param d1 data at first point (p1)
* @param d2 data at second point (p2)
* @param frac fractional distance between p1 and p2 to target point
*/
static StaticHazards getTargetData(
StaticHazards d1,
StaticHazards d2,
double frac) {
NetcdfUtils.checkBoundingHazard(d1, d2);
return frac == 0.0 ? d1 : frac == 1.0 ? d2 : NetcdfUtils.linearInterpolate(d1, d2, frac);
}
private void setBoundingHazards(Location site) {
var longitudes = coords.longitudes();
var latitudes = coords.latitudes();
var idxLonLL = NetcdfUtils.getIdxLTEQ(longitudes, site.longitude);
var idxLatLL = NetcdfUtils.getIdxLTEQ(latitudes, site.latitude);
var lonLeft = longitudes[idxLonLL];
var lonRight = longitudes[idxLonLL + 1];
var latLower = latitudes[idxLatLL];
var latUpper = latitudes[idxLatLL + 1];
boundingLocations.add(new BoundingLocation(latLower, lonLeft, 0, 0));
boundingLocations.add(new BoundingLocation(latUpper, lonLeft, 1, 0));
boundingLocations.add(new BoundingLocation(latUpper, lonRight, 1, 1));
boundingLocations.add(new BoundingLocation(latLower, lonRight, 0, 1));
var boundingHazardsBuilder = extractHazardsAt(idxLonLL, idxLatLL);
var fracLon = NetcdfUtils.calcGridFrac(longitudes, idxLonLL, site.longitude);
var fracLat = NetcdfUtils.calcGridFrac(latitudes, idxLatLL, site.latitude);
boundingHazards = boundingHazardsBuilder.put(
site,
calcTargetHazards(fracLon, fracLat))
.build();
NetcdfUtils.checkBoundingHazards(boundingHazards, boundingLocations.get(0).location);
}
private StaticHazards calcTargetHazards(double fracLon, double fracLat) {
var westTarget = getTargetData(
boundingHazards.get(boundingLocations.get(0).location),
boundingHazards.get(boundingLocations.get(1).location),
fracLat);
var eastTarget = getTargetData(
boundingHazards.get(boundingLocations.get(3).location),
boundingHazards.get(boundingLocations.get(2).location),
fracLat);
return getTargetData(westTarget, eastTarget, fracLon);
}
/*
* Return hazard curves for four closest grid points as a List of:
* Map<SiteClass, Map<Imt, XySequence(iml,hazard)>>
*
* List order is clockwise from lower left corner: LL, UL, UR, LR, [T] with an
* empty slot for the interpolated target hazards
*/
private BoundingHazards.Builder extractHazardsAt(
int idxLonLL,
int idxLatLL) {
try (NetcdfDataset ncd = NetcdfDataset.openDataset(netcdf.path().toString())) {
var boundingHazardMaps = BoundingHazards.builder();
var targetGroup = ncd.findGroup(netcdf.nshmGroup().baseGroup());
var targetOrigin = new int[] { 0, 0, idxLatLL, idxLonLL, 0 };
var targetShape = new int[] {
coords.siteClasses().size(),
coords.imts().size(),
2,
2,
coords.nIml()
};
// TODO: rename variable in netCDF
/*
* Array aHazards now has shape [nVs,nImt,2,2,nIml] ...so origin will now
* be [0,0,0,0,0] for LL grid point ...and shape of requested array is
* [nVs,nImt,1,1,nIml]
*/
var aHazards = targetGroup.findVariable(Key.AEPS).read(targetOrigin, targetShape);
var shape = targetShape.clone();
shape[2] = 1;
shape[3] = 1;
for (var boundingLocation : boundingLocations) {
boundingHazardMaps.put(
boundingLocation.location,
mapHazardsFromArray(aHazards.section(boundingLocation.origin, shape)));
}
return boundingHazardMaps;
} catch (IOException | InvalidRangeException e) {
throw new RuntimeException("Could not read Netcdf file [" + netcdf.path() + "]");
}
}
/*
* Read hazard curves from netCDF variable into map of hazards by SiteClass
* and Imt
*
* TODO: if target is on a grid point (or on a grid lat or lon), no need to
* read 4 bounding points ?
*/
private StaticHazards mapHazardsFromArray(Array hazards) {
var vsImtHazardMap = StaticHazards.builder();
for (int iSiteClass = 0; iSiteClass < coords.siteClasses().size(); iSiteClass++) {
var siteClass = coords.siteClasses().get(iSiteClass);
var imtHazardMap = StaticHazard.builder();
for (int iImt = 0; iImt < coords.imts().size(); iImt++) {
var imt = coords.imts().get(iImt);
var origin = new int[] { iSiteClass, iImt, 0 };
var shape = new int[] { 1, 1, coords.nIml() };
try {
var xySequence = XySequence.create(
coords.imls().get(siteClass).get(imt),
(double[]) hazards.section(origin, shape).reduce().get1DJavaArray(DataType.DOUBLE));
imtHazardMap.put(imt, xySequence);
} catch (InvalidRangeException e) {
throw new RuntimeException(e.getMessage());
}
}
vsImtHazardMap.put(siteClass, imtHazardMap.build());
}
return vsImtHazardMap.build();
}
static class BoundingLocation {
final Location location;
final int[] origin;
BoundingLocation(double longitude, double latitude, int longitudeIndex, int latitudeIndex) {
location = Location.create(latitude, longitude);
origin = new int[] { 0, 0, latitudeIndex, longitudeIndex, 0 };
}
}
}
...@@ -6,16 +6,12 @@ import static com.google.common.base.Preconditions.checkState; ...@@ -6,16 +6,12 @@ import static com.google.common.base.Preconditions.checkState;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Map;
import com.google.common.collect.Maps;
import com.google.common.math.DoubleMath; import com.google.common.math.DoubleMath;
import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.data.XySequence;
import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.geo.Location;
import gov.usgs.earthquake.nshmp.geo.LocationList; import gov.usgs.earthquake.nshmp.geo.LocationList;
import gov.usgs.earthquake.nshmp.gmm.Imt;
import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
import ucar.ma2.DataType; import ucar.ma2.DataType;
import ucar.nc2.Group; import ucar.nc2.Group;
...@@ -156,41 +152,34 @@ public class NetcdfUtils { ...@@ -156,41 +152,34 @@ public class NetcdfUtils {
/* /*
* Linear interpolation of data values to a target point * Linear interpolation of data values to a target point
*
* TODO: use XySequence.copyOf(), streams
*/ */
static Map<SiteClass, Map<Imt, XySequence>> linearInterpolate( static StaticHazards linearInterpolate(StaticHazards v1, StaticHazards v2, double frac) {
Map<SiteClass, Map<Imt, XySequence>> v1, checkBoundingHazard(v1, v2);
Map<SiteClass, Map<Imt, XySequence>> v2,
double frac) {
// return v1 * (1 - frac) + v2 * frac
// DoubleData routines add and multiply in place, we don't want to modify
// the original data here
if (v1.size() != v2.size()) {
throw new IllegalArgumentException("Map size disagreement, cannot interpolate");
}
Map<SiteClass, Map<Imt, XySequence>> tMap = Maps.newEnumMap(SiteClass.class); var targetMap = StaticHazards.builder();
for (SiteClass sc : v1.keySet()) { v1.keySet().forEach(siteClass -> {
if (v1.get(sc).size() != v2.get(sc).size()) { var imtHazards = StaticHazard.builder();
throw new IllegalArgumentException("Array size disagreement, cannot interpolate"); var v1StaticHazards = v1.get(siteClass);
} var v2StaticHazards = v2.get(siteClass);
Map<Imt, XySequence> imtHazards = Maps.newEnumMap(Imt.class);
for (Imt imt : v1.get(sc).keySet()) { v1StaticHazards.keySet().forEach(imt -> {
var v1Haz = v1.get(sc).get(imt).yValues().toArray(); var v1Haz = v1StaticHazards.get(imt).yValues().toArray();
var v2Haz = v2.get(sc).get(imt).yValues().toArray(); var v2Haz = v2StaticHazards.get(imt).yValues().toArray();
var t = new double[v1Haz.length]; var target = new double[v1Haz.length];
for (int i = 0; i < v1Haz.length; i++) { for (int i = 0; i < v1Haz.length; i++) {
t[i] = v1Haz[i] * (1 - frac) + v2Haz[i] * frac; target[i] = v1Haz[i] * (1 - frac) + v2Haz[i] * frac;
} }
imtHazards.put(imt, XySequence.create(v1.get(sc).get(imt).xValues().toArray(), t));
} var xValues = v1StaticHazards.get(imt).xValues().toArray();
tMap.put(sc, imtHazards); imtHazards.put(imt, XySequence.create(xValues, target));
} });
return tMap;
targetMap.put(siteClass, imtHazards.build());
});
return targetMap.build();
} }
/** /**
...@@ -201,7 +190,7 @@ public class NetcdfUtils { ...@@ -201,7 +190,7 @@ public class NetcdfUtils {
* @param boundingHazards The bounding hazards * @param boundingHazards The bounding hazards
*/ */
static void checkBoundingHazards( static void checkBoundingHazards(
Map<Location, Map<SiteClass, Map<Imt, XySequence>>> boundingHazards, BoundingHazards boundingHazards,
Location location) { Location location) {
checkArgument(boundingHazards.containsKey(location), "Location not in bounding hazards"); checkArgument(boundingHazards.containsKey(location), "Location not in bounding hazards");
boundingHazards.keySet().stream() boundingHazards.keySet().stream()
...@@ -219,10 +208,10 @@ public class NetcdfUtils { ...@@ -219,10 +208,10 @@ public class NetcdfUtils {
* @param b Bounding hazard map B * @param b Bounding hazard map B
*/ */
static void checkBoundingHazard( static void checkBoundingHazard(
Map<SiteClass, Map<Imt, XySequence>> a, StaticHazards a,
Map<SiteClass, Map<Imt, XySequence>> b) { StaticHazards b) {
checkState(a.size() == b.size(), "Maps are not the same size"); checkState(a.size() == b.size(), "Maps are not the same size");
checkState(a.keySet().containsAll(a.keySet()), "Site classes do not match"); checkState(a.keySet().containsAll(b.keySet()), "Site classes do not match");
a.keySet().forEach(key -> checkHazards(a.get(key), b.get(key))); a.keySet().forEach(key -> checkHazards(a.get(key), b.get(key)));
} }
...@@ -233,9 +222,9 @@ public class NetcdfUtils { ...@@ -233,9 +222,9 @@ public class NetcdfUtils {
* @param a Hazard A * @param a Hazard A
* @param b Hazard B * @param b Hazard B
*/ */
static void checkHazards(Map<Imt, XySequence> a, Map<Imt, XySequence> b) { static void checkHazards(StaticHazard a, StaticHazard b) {
checkState(a.size() == b.size(), "Maps are not the same size"); checkState(a.size() == b.size(), "Maps are not the same size");
checkState(a.keySet().containsAll(a.keySet()), "IMTs do not match"); checkState(a.keySet().containsAll(b.keySet()), "IMTs do not match");
a.keySet().forEach(key -> checkGroundMotions(a.get(key), b.get(key))); a.keySet().forEach(key -> checkGroundMotions(a.get(key), b.get(key)));
} }
......
package gov.usgs.earthquake.nshmp.netcdf.reader;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import gov.usgs.earthquake.nshmp.data.XySequence;
import gov.usgs.earthquake.nshmp.gmm.Imt;
public class StaticHazard implements Iterable<Entry<Imt, XySequence>> {
private final Map<Imt, XySequence> staticHazard;
StaticHazard(Map<Imt, XySequence> staticHazard) {
this.staticHazard = staticHazard;
}
public Set<Entry<Imt, XySequence>> entrySet() {
return staticHazard.entrySet();
}
public boolean containsKey(Imt imt) {
return staticHazard.containsKey(imt);
}
public XySequence get(Imt imt) {
checkArgument(staticHazard.containsKey(imt), "Imt [" + imt + "] not found");
return staticHazard.get(imt);
}
public Set<Imt> keySet() {
return staticHazard.keySet();
}
public int size() {
return staticHazard.size();
}
public Map<Imt, XySequence> staticHazard() {
return Map.copyOf(staticHazard);
}
@Override
public Iterator<Entry<Imt, XySequence>> iterator() {
return entrySet().iterator();
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
Map<Imt, XySequence> staticHazard;
private Builder() {
staticHazard = new HashMap<>();
}
public Builder put(Imt imt, XySequence xySequence) {
staticHazard.put(imt, xySequence);
return this;
}
public StaticHazard build() {
checkState(!staticHazard.isEmpty(), "Must add hazards");
return new StaticHazard(staticHazard);
}
}
}
package gov.usgs.earthquake.nshmp.netcdf.reader;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
public class StaticHazards implements Iterable<Entry<SiteClass, StaticHazard>> {
private final Map<SiteClass, StaticHazard> staticHazards;
StaticHazards(Map<SiteClass, StaticHazard> staticHazards) {
this.staticHazards = staticHazards;
}
public boolean containsKey(SiteClass siteClass) {
return staticHazards.containsKey(siteClass);
}
public Set<Entry<SiteClass, StaticHazard>> entrySet() {
return staticHazards.entrySet();
}
public StaticHazard get(SiteClass siteClass) {
checkArgument(staticHazards.containsKey(siteClass), "Site class [" + siteClass + "] not found");
return staticHazards.get(siteClass);
}
public Set<SiteClass> keySet() {
return staticHazards.keySet();
}
public int size() {
return staticHazards.size();
}
public Map<SiteClass, StaticHazard> staticHazards() {
return Map.copyOf(staticHazards);
}
@Override
public Iterator<Entry<SiteClass, StaticHazard>> iterator() {
return entrySet().iterator();
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
Map<SiteClass, StaticHazard> staticHazards;
private Builder() {
staticHazards = new HashMap<>();
}
public Builder put(SiteClass siteClass, StaticHazard staticHazard) {
staticHazards.put(siteClass, staticHazard);
return this;
}
public StaticHazards build() {
checkState(!staticHazards.isEmpty(), "Must add hazards");
return new StaticHazards(staticHazards);
}
}
}
...@@ -4,7 +4,6 @@ import static gov.usgs.earthquake.nshmp.netcdf.www.NetcdfWsUtils.GSON; ...@@ -4,7 +4,6 @@ import static gov.usgs.earthquake.nshmp.netcdf.www.NetcdfWsUtils.GSON;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.logging.Logger; import java.util.logging.Logger;
import java.util.stream.Collectors; import java.util.stream.Collectors;
...@@ -21,6 +20,9 @@ import gov.usgs.earthquake.nshmp.internal.www.meta.Status; ...@@ -21,6 +20,9 @@ import gov.usgs.earthquake.nshmp.internal.www.meta.Status;
import gov.usgs.earthquake.nshmp.netcdf.NshmGroup; import gov.usgs.earthquake.nshmp.netcdf.NshmGroup;
import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReader; import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReader;
import gov.usgs.earthquake.nshmp.netcdf.SiteClass; import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards;
import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazard;
import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazards;
import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Query; import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Query;
import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Service; import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfController.Service;
import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfWsUtils.Key; import gov.usgs.earthquake.nshmp.netcdf.www.NetcdfWsUtils.Key;
...@@ -215,7 +217,7 @@ public class NetcdfService { ...@@ -215,7 +217,7 @@ public class NetcdfService {
static List<List<List<ResponseData>>> toLists( static List<List<List<ResponseData>>> toLists(
NshmGroup nshmGroup, NshmGroup nshmGroup,
Location site, Location site,
Map<Location, Map<SiteClass, Map<Imt, XySequence>>> bounding) { BoundingHazards bounding) {
return bounding.entrySet().stream() return bounding.entrySet().stream()
.map(entry -> toList(nshmGroup, site, entry.getValue())) .map(entry -> toList(nshmGroup, site, entry.getValue()))
...@@ -225,7 +227,7 @@ public class NetcdfService { ...@@ -225,7 +227,7 @@ public class NetcdfService {
static List<List<ResponseData>> toList( static List<List<ResponseData>> toList(
NshmGroup nshmGroup, NshmGroup nshmGroup,
Location site, Location site,
Map<SiteClass, Map<Imt, XySequence>> curves) { StaticHazards curves) {
return curves.entrySet().stream() return curves.entrySet().stream()
.map(entry -> { .map(entry -> {
var request = new RequestDataCurves(site.longitude, site.latitude, entry.getKey()); var request = new RequestDataCurves(site.longitude, site.latitude, entry.getKey());
...@@ -237,7 +239,7 @@ public class NetcdfService { ...@@ -237,7 +239,7 @@ public class NetcdfService {
static List<ResponseData> toList( static List<ResponseData> toList(
NshmGroup nshmGroup, NshmGroup nshmGroup,
RequestDataCurves request, RequestDataCurves request,
Map<Imt, XySequence> curves) { StaticHazard curves) {
return curves.entrySet().stream() return curves.entrySet().stream()
.map((entry) -> { .map((entry) -> {
var site = request.site; var site = request.site;
......
...@@ -25,7 +25,10 @@ import gov.usgs.earthquake.nshmp.geo.Location; ...@@ -25,7 +25,10 @@ import gov.usgs.earthquake.nshmp.geo.Location;
import gov.usgs.earthquake.nshmp.geo.LocationList; import gov.usgs.earthquake.nshmp.geo.LocationList;
import gov.usgs.earthquake.nshmp.geo.Regions; import gov.usgs.earthquake.nshmp.geo.Regions;
import gov.usgs.earthquake.nshmp.gmm.Imt; import gov.usgs.earthquake.nshmp.gmm.Imt;
import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards;
import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils; import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils;
import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazard;
import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazards;
class NshmNetcdfReaderTests { class NshmNetcdfReaderTests {
...@@ -95,27 +98,19 @@ class NshmNetcdfReaderTests { ...@@ -95,27 +98,19 @@ class NshmNetcdfReaderTests {
static Map<SiteClass, Map<Imt, double[]>> IMLS = new HashMap<>(); static Map<SiteClass, Map<Imt, double[]>> IMLS = new HashMap<>();
static Map<Location, Map<SiteClass, Map<Imt, XySequence>>> BOUNDING_HAZARDS = new HashMap<>(); static BoundingHazards BOUNDING_HAZARDS;
static final NshmNetcdfReader NETCDF = new NshmNetcdfReader(NshmGroup.CONUS_2018, NETCDF_PATH); static final NshmNetcdfReader NETCDF = new NshmNetcdfReader(NshmGroup.CONUS_2018, NETCDF_PATH);
static class NetcdfHazard {
Location location;
Map<SiteClass, Map<Imt, XySequence>> hazard;
NetcdfHazard(Location location, Map<SiteClass, Map<Imt, XySequence>> hazard) {
this.location = location;
this.hazard = hazard;
}
}
static { static {
var builder = BoundingHazards.builder();
var iHaz = 0; var iHaz = 0;
for (var location : LOCATIONS) { for (var location : LOCATIONS) {
var scMap = new HashMap<SiteClass, Map<Imt, XySequence>>(); var siteClassMap = StaticHazards.builder();
for (var siteClass : SITE_CLASSES) { for (var siteClass : SITE_CLASSES) {
var imtMap = new HashMap<Imt, XySequence>(); var imtMap = StaticHazard.builder();
for (var iImt = 0; iImt < IMTS.size(); iImt++) { for (var iImt = 0; iImt < IMTS.size(); iImt++) {
var imt = IMTS.get(iImt); var imt = IMTS.get(iImt);
...@@ -123,12 +118,14 @@ class NshmNetcdfReaderTests { ...@@ -123,12 +118,14 @@ class NshmNetcdfReaderTests {
imtMap.put(imt, xy); imtMap.put(imt, xy);
} }
scMap.put(siteClass, imtMap); siteClassMap.put(siteClass, imtMap.build());
} }
BOUNDING_HAZARDS.put(location, scMap); builder.put(location, siteClassMap.build());
} }
BOUNDING_HAZARDS = builder.build();
SITE_CLASSES.forEach(siteClass -> { SITE_CLASSES.forEach(siteClass -> {
var haz = new HashMap<Imt, double[]>(); var haz = new HashMap<Imt, double[]>();
...@@ -282,8 +279,8 @@ class NshmNetcdfReaderTests { ...@@ -282,8 +279,8 @@ class NshmNetcdfReaderTests {
} }
private void testHazards( private void testHazards(
Map<SiteClass, Map<Imt, XySequence>> expected, StaticHazards expected,
Map<SiteClass, Map<Imt, XySequence>> actual) { StaticHazards actual) {
for (var siteEntry : expected.entrySet()) { for (var siteEntry : expected.entrySet()) {
var siteClass = siteEntry.getKey(); var siteClass = siteEntry.getKey();
assertTrue(actual.containsKey(siteClass)); assertTrue(actual.containsKey(siteClass));
...@@ -292,7 +289,7 @@ class NshmNetcdfReaderTests { ...@@ -292,7 +289,7 @@ class NshmNetcdfReaderTests {
} }
} }
private void testHazard(Map<Imt, XySequence> expected, Map<Imt, XySequence> actual) { private void testHazard(StaticHazard expected, StaticHazard actual) {
for (var imtEntry : expected.entrySet()) { for (var imtEntry : expected.entrySet()) {
var imt = imtEntry.getKey(); var imt = imtEntry.getKey();
assertTrue(actual.containsKey(imt)); assertTrue(actual.containsKey(imt));
......
package gov.usgs.earthquake.nshmp.netcdf.reader; package gov.usgs.earthquake.nshmp.netcdf.reader;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import com.google.common.collect.Maps;
import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.data.XySequence;
import gov.usgs.earthquake.nshmp.gmm.Imt; import gov.usgs.earthquake.nshmp.gmm.Imt;
import gov.usgs.earthquake.nshmp.netcdf.SiteClass; import gov.usgs.earthquake.nshmp.netcdf.SiteClass;
...@@ -29,35 +27,32 @@ class NetcdfUtilsTests { ...@@ -29,35 +27,32 @@ class NetcdfUtilsTests {
private static final double TOL = 1e-7; private static final double TOL = 1e-7;
static Map<SiteClass, Map<Imt, XySequence>> mapHaz0 = Maps.newEnumMap(SiteClass.class); static StaticHazards mapHaz0;
static Map<SiteClass, Map<Imt, XySequence>> mapHaz1 = Maps.newEnumMap(SiteClass.class); static StaticHazards mapHaz1;
static Map<SiteClass, Map<Imt, XySequence>> mapHazTarget = Maps.newEnumMap(SiteClass.class); static StaticHazards mapHazTarget;
static Map<SiteClass, Map<Imt, XySequence>> mapDiffImtSize = Maps.newEnumMap(SiteClass.class); static StaticHazards mapDiffImtSize;
static Map<SiteClass, Map<Imt, XySequence>> mapDiffScSize = Maps.newEnumMap(SiteClass.class); static StaticHazards mapDiffScSize;
static Map<SiteClass, Map<Imt, XySequence>> mapDiffImlValue = Maps.newEnumMap(SiteClass.class); static StaticHazards mapDiffImlValue;
private static List<SiteClass> siteClasses = new ArrayList<SiteClass>();
private static List<Imt> imts = new ArrayList<Imt>();
private static final int N_IML = 3; private static final int N_IML = 3;
private static final double FRAC = 0.5; private static final double FRAC = 0.5;
static { static {
siteClasses.add(SiteClass.B); var siteClasses = List.of(SiteClass.B, SiteClass.C, SiteClass.D);
siteClasses.add(SiteClass.C); var imts = List.of(Imt.PGA, Imt.SA0P1, Imt.SA1P5);
siteClasses.add(SiteClass.D); var imlValues = new double[] { 0.1, 0.5, 0.75 };
imts.add(Imt.PGA);
imts.add(Imt.SA0P1);
imts.add(Imt.SA1P5);
double[] imlValues = new double[] { 0.1, 0.5, 0.75 }; var mapHaz0Builder = StaticHazards.builder();
var mapHaz1Builder = StaticHazards.builder();
var mapHazTargetBuilder = StaticHazards.builder();
var mapDiffImlValueBuilder = StaticHazards.builder();
for (SiteClass sc : siteClasses) { for (SiteClass sc : siteClasses) {
Map<Imt, XySequence> imtMap0 = Maps.newEnumMap(Imt.class); var imtMap0 = StaticHazard.builder();
Map<Imt, XySequence> imtMap1 = Maps.newEnumMap(Imt.class); var imtMap1 = StaticHazard.builder();
Map<Imt, XySequence> imtMapTarget = Maps.newEnumMap(Imt.class); var imtMapTarget = StaticHazard.builder();
Map<Imt, XySequence> imtMapBiggerErr = Maps.newEnumMap(Imt.class); var imtMapDiffIml = StaticHazard.builder();
Map<Imt, XySequence> imtMapDiffIml = Maps.newEnumMap(Imt.class);
for (Imt imt : imts) { for (Imt imt : imts) {
double[] zeros = new double[N_IML]; double[] zeros = new double[N_IML];
double[] ones = new double[N_IML]; double[] ones = new double[N_IML];
...@@ -69,7 +64,6 @@ class NetcdfUtilsTests { ...@@ -69,7 +64,6 @@ class NetcdfUtilsTests {
imtMap0.put(imt, XySequence.create(imlValues, zeros)); imtMap0.put(imt, XySequence.create(imlValues, zeros));
imtMap1.put(imt, XySequence.create(imlValues, ones)); imtMap1.put(imt, XySequence.create(imlValues, ones));
imtMapTarget.put(imt, XySequence.create(imlValues, half)); imtMapTarget.put(imt, XySequence.create(imlValues, half));
imtMapBiggerErr.put(imt, XySequence.create(imlValues, ones));
// insert different Iml value // insert different Iml value
if (sc == siteClasses.get(siteClasses.size() - 1) && imt == imts.get(imts.size() - 1)) { if (sc == siteClasses.get(siteClasses.size() - 1) && imt == imts.get(imts.size() - 1)) {
...@@ -80,19 +74,35 @@ class NetcdfUtilsTests { ...@@ -80,19 +74,35 @@ class NetcdfUtilsTests {
imtMapDiffIml.put(imt, XySequence.create(imlValues, ones)); imtMapDiffIml.put(imt, XySequence.create(imlValues, ones));
} }
} }
mapHaz0.put(sc, imtMap0); mapHaz0Builder.put(sc, imtMap0.build());
mapHaz1.put(sc, imtMap1); mapHaz1Builder.put(sc, imtMap1.build());
mapHazTarget.put(sc, imtMapTarget); mapHazTargetBuilder.put(sc, imtMapTarget.build());
mapDiffImtSize.put(sc, imtMapBiggerErr); mapDiffImlValueBuilder.put(sc, imtMapDiffIml.build());
mapDiffImlValue.put(sc, imtMapDiffIml);
} }
// add another map mapHaz0 = mapHaz0Builder.build();
mapDiffScSize.putAll(mapDiffImtSize); mapHaz1 = mapHaz1Builder.build();
mapDiffScSize.put(SiteClass.A, mapDiffScSize.get(siteClasses.get(0))); mapHazTarget = mapHazTargetBuilder.build();
mapDiffImtSize.get(siteClasses.get(0)) mapDiffImlValue = mapDiffImlValueBuilder.build();
.put(Imt.SA10P0, XySequence.create(imlValues, new double[N_IML]));
// Add extra site class
var mapDiffScSizeBuilder = StaticHazards.builder();
mapHaz0.forEach(entry -> mapDiffScSizeBuilder.put(entry.getKey(), entry.getValue()));
mapDiffScSizeBuilder.put(SiteClass.A, mapHaz0.get(siteClasses.get(0)));
mapDiffScSize = mapDiffScSizeBuilder.build();
// Add extra IMT
var mapDiffImtSizeBuilder = StaticHazards.builder();
mapHaz0.forEach(entry -> {
var builder = StaticHazard.builder();
entry.getValue().forEach(imtEntry -> {
builder.put(imtEntry.getKey(), imtEntry.getValue());
});
builder.put(Imt.SA10P0, XySequence.create(imlValues, new double[N_IML]));
mapDiffImtSizeBuilder.put(entry.getKey(), builder.build());
});
mapDiffImtSize = mapDiffImtSizeBuilder.build();
} }
@Test @Test
...@@ -138,12 +148,23 @@ class NetcdfUtilsTests { ...@@ -138,12 +148,23 @@ class NetcdfUtilsTests {
@Test @Test
final void testLinearInterpolate() { final void testLinearInterpolate() {
assertEquals(mapHazTarget, NetcdfUtils.linearInterpolate(mapHaz0, mapHaz1, FRAC)); var actual = NetcdfUtils.linearInterpolate(mapHaz0, mapHaz1, FRAC);
assertTrue(mapHazTarget.keySet().containsAll(actual.keySet()));
mapHazTarget.forEach(entry -> {
assertTrue(entry.getValue().keySet().containsAll(actual.get(entry.getKey()).keySet()));
entry.getValue().forEach(imtEntry -> {
var actualXy = actual.get(entry.getKey()).get(imtEntry.getKey());
assertArrayEquals(imtEntry.getValue().xValues().toArray(), actualXy.xValues().toArray(), 0);
});
});
// attempt to interpolate maps of difference sizes // attempt to interpolate maps of difference sizes
assertThrows(IllegalArgumentException.class, () -> { assertThrows(IllegalStateException.class, () -> {
NetcdfUtils.linearInterpolate(mapHaz0, mapDiffImtSize, FRAC); NetcdfUtils.linearInterpolate(mapHaz0, mapDiffImtSize, FRAC);
}); });
assertThrows(IllegalArgumentException.class, () -> {
assertThrows(IllegalStateException.class, () -> {
NetcdfUtils.linearInterpolate(mapHaz0, mapDiffScSize, FRAC); NetcdfUtils.linearInterpolate(mapHaz0, mapDiffScSize, FRAC);
}); });
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment