diff --git a/gradle.properties b/gradle.properties index a64e3cab3d10f65d99bf76bac75d9076a534202f..858f636991b51c616b5b8403fca7016ed2d3f5cb 100644 --- a/gradle.properties +++ b/gradle.properties @@ -7,7 +7,7 @@ logbackVersion = 1.2.3 mnOpenAPIVersion = 1.4.0 mnVersion = 1.3.2 netcdfVersion = 5.1.0 -nshmpLibVersion = 0.2.4 +nshmpLibVersion = 0.2.9 shadowVersion = 5.2.0 slfVersion = 1.7.30 spotbugsVersion = 4.2.4 diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReader.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReader.java index a324ab65d6a86902f0ee52e9c338515a058e6abb..fc9df599fc2b6347f75bff6dbe45dce6eec84060 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReader.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReader.java @@ -11,6 +11,7 @@ import java.util.logging.Logger; import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass; import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards; import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazardsReader; import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfCoordinates; @@ -120,7 +121,7 @@ public class NshmNetcdfReader { * @param site The site to get hazard curves * @param siteClass The site class */ - public StaticHazard hazard(Location site, SiteClass siteClass) { + public StaticHazard hazard(Location site, NehrpSiteClass siteClass) { checkArgument( coords.siteClasses().contains(siteClass), "Site class [" + siteClass + "] not supported"); @@ -135,7 +136,7 @@ public class NshmNetcdfReader { * @param siteClass The site class * @param imt The IMT */ - public XySequence hazard(Location site, SiteClass siteClass, Imt imt) { + public XySequence hazard(Location site, NehrpSiteClass siteClass, Imt imt) { checkArgument(coords.imts().contains(imt), "Imt [" + imt + "] not supported"); return hazard(site, siteClass).get(imt); } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsReader.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsReader.java index cdf0c9d4a0c5e189a4692fb525cd8a34614c79cf..20e1c38344cb1bb62723cbc7b87bb5887c8b987d 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsReader.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/BoundingHazardsReader.java @@ -136,13 +136,12 @@ public class BoundingHazardsReader { coords.nIml() }; - // TODO: rename variable in netCDF /* * Array aHazards now has shape [nVs,nImt,2,2,nIml] ...so origin will now * be [0,0,0,0,0] for LL grid point ...and shape of requested array is * [nVs,nImt,1,1,nIml] */ - var aHazards = targetGroup.findVariable(Key.AEPS).read(targetOrigin, targetShape); + var aHazards = targetGroup.findVariable(Key.HAZARD).read(targetOrigin, targetShape); var shape = targetShape.clone(); shape[2] = 1; @@ -182,7 +181,7 @@ public class BoundingHazardsReader { try { var xySequence = XySequence.create( - coords.imls().get(siteClass).get(imt), + coords.imls().get(imt), (double[]) hazards.section(origin, shape).reduce().get1DJavaArray(DataType.DOUBLE)); imtHazardMap.put(imt, xySequence); diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinates.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinates.java index c1e6c6f2d80cbbc126494544e872732db95ba8e3..c0620aec4b51d82030eceb3e17f02c70189a14c5 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinates.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinates.java @@ -1,24 +1,30 @@ package gov.usgs.earthquake.nshmp.netcdf.reader; import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkState; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.EnumMap; +import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; - -import com.google.common.collect.Maps; import gov.usgs.earthquake.nshmp.Maths; import gov.usgs.earthquake.nshmp.geo.BorderType; import gov.usgs.earthquake.nshmp.geo.Location; +import gov.usgs.earthquake.nshmp.geo.LocationList; import gov.usgs.earthquake.nshmp.geo.Region; import gov.usgs.earthquake.nshmp.geo.Regions; +import gov.usgs.earthquake.nshmp.geo.json.Feature; +import gov.usgs.earthquake.nshmp.geo.json.FeatureCollection; +import gov.usgs.earthquake.nshmp.geo.json.GeoJson; +import gov.usgs.earthquake.nshmp.geo.json.Properties; import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass; import gov.usgs.earthquake.nshmp.netcdf.NshmGroup; -import gov.usgs.earthquake.nshmp.netcdf.SiteClass; import gov.usgs.earthquake.nshmp.netcdf.reader.NetcdfUtils.Key; import ucar.ma2.DataType; @@ -32,9 +38,10 @@ import ucar.nc2.Variable; */ public class NetcdfCoordinates { - private final List<SiteClass> siteClasses; + private final List<NehrpSiteClass> siteClasses; + private final Map<NehrpSiteClass, Double> vs30Map; private final List<Imt> imts; - private final Map<SiteClass, Map<Imt, double[]>> imls; + private final Map<Imt, double[]> imls; private final int nIml; private final Region region; private final double[] longitudes; @@ -45,12 +52,43 @@ public class NetcdfCoordinates { // variables and their dimensions should be, this is OK(???) // TODO: probably better (proper) to access netCDF dimensions + var vSiteClass = targetGroup.findVariable(Key.SITE_CLASS); var vImls = targetGroup.findVariable(Key.IMLS); - var vs30s = NetcdfUtils.getIntArray(targetGroup, Key.VS30); - var imtVals = NetcdfUtils.getDoubleArray(targetGroup, Key.IMT); + var vImts = targetGroup.findVariable(Key.IMT); + var vBounds = targetGroup.findVariable(Key.BOUNDS); + + var vs30s = NetcdfUtils.getDoubleArray(targetGroup, Key.VS30); var lats = NetcdfUtils.getDoubleArray(targetGroup, Key.LAT); var lons = NetcdfUtils.getDoubleArray(targetGroup, Key.LON); + // get list of SiteClass enums and build vs30 map + List<NehrpSiteClass> scList = new ArrayList<NehrpSiteClass>(); + Map<NehrpSiteClass, Double> vsMap = new HashMap<NehrpSiteClass, Double>(); + var siteClassArray = vSiteClass.read(); + for (int i = 0; i < vSiteClass.getSize(); i++) { + String scString = siteClassArray.getObject(i).toString(); + NehrpSiteClass siteClass = NehrpSiteClass.valueOf(scString); + scList.add(siteClass); + vsMap.put(siteClass, vs30s[i]); + } + siteClasses = List.copyOf(scList); + vs30Map = Collections.unmodifiableMap(vsMap); + + // get list of IMT enums + List<Imt> imtList = new ArrayList<Imt>(); + var imtArray = vImts.read(); + for (int i = 0; i < vImts.getSize(); i++) { + imtList.add(Imt.valueOf(imtArray.getObject(i).toString())); + } + imts = List.copyOf(imtList); + + // vImls has dimensions (Imt, Iml) + // alternatively get nIml from Dimension Iml + nIml = targetGroup.findDimension(Key.IMLS).getLength(); + + // get map of IMLs + imls = mapImls(vImls); + latitudes = Arrays.stream(lats) .map(lat -> Maths.round(lat, nshmGroup.locationPrecision())) .toArray(); @@ -59,33 +97,47 @@ public class NetcdfCoordinates { .map(lon -> Maths.round(lon, nshmGroup.locationPrecision())) .toArray(); - var border = NetcdfUtils.buildBorder(longitudes, latitudes); - - region = Regions.create("region", border, BorderType.MERCATOR_LINEAR); - - // vImls has dimensions (vs30, Imt, Iml) - // alternatively get nIml from Dimension Iml - nIml = vImls.getDimension(2).getLength(); // Imls.length; - - // convert Vs30<int> to SiteClass Enum - // TODO: site class should be stored as enum in netCDF file rather than - // vs30 value, so this will break: - siteClasses = Arrays.stream(vs30s) - .mapToObj(vs30 -> SiteClass.ofValue(vs30)) - .collect(Collectors.toUnmodifiableList()); - - // TODO: Imt should be stored in netCDF as enum, so this will break: - imts = Arrays.stream(imtVals) - .mapToObj(x -> (x < 0.009) ? Imt.PGA : Imt.fromPeriod(x)) - .collect(Collectors.toUnmodifiableList()); - - imls = mapImls(vImls); + /* + * build region from GeoJSON string - follow logic in {@code + * Sites.createSiteRegion()}, which is private, without converting to + * GriddedRegion. {@code Sites.createSiteRegion()} requires that if an + * extents Feature (rectangular bounds) is present, it MUST be the first + * feature + * + * I think we just want the polygon border and don't need the gridded sites? + * Unless these could be used to more efficiently look up site index? + */ + // TODO: Error handling? how much, if any, error checking? Can we assume + // it's a valid GeoJSON file here if we make sure we only load valid files + // into the netCDF? + + // Read from netCDF + String bounds = vBounds.readScalarString(); + // Convert string to FeatureCollection + FeatureCollection dataRegions = GeoJson.from(bounds).toFeatureCollection(); + // Extract Features + List<Feature> features = dataRegions.features(); + + // From {@code Sites.createSiteRegion()} + checkState(features.size() <= 2, "Only 2 polygon features may be defined"); + int mapRegionIndex = 0; + if (features.size() > 1) { + // don't need to process the rectangular map extents, if present + mapRegionIndex = 1; + } + Feature sitesPoly = features.get(mapRegionIndex); + LocationList sitesPolyBorder = sitesPoly.asPolygonBorder(); + Properties properties = sitesPoly.properties(); + // get region name - either "title" or "name" property??? + String mapName = + properties.getString("title").orElse(properties.getString("name").orElse("Unnamed Map")); + region = Regions.create(mapName, sitesPolyBorder, BorderType.MERCATOR_LINEAR); } /** * Returns the Imls associated with a {@code NshmGroup}. */ - public Map<SiteClass, Map<Imt, double[]>> imls() { + public Map<Imt, double[]> imls() { return imls; } @@ -127,7 +179,7 @@ public class NetcdfCoordinates { /** * Return the site classes associated with a {@code NshmGroup}. */ - public List<SiteClass> siteClasses() { + public List<NehrpSiteClass> siteClasses() { return List.copyOf(siteClasses); } @@ -137,6 +189,12 @@ public class NetcdfCoordinates { * @param site The site to test */ public void contains(Location site) { + checkArgument( + region.contains(site), + String.format("Target site [%s] is not contained within %s calculation region", + site.toString(), region.name())); + // No longer necessary? region is now the calculation polygon, rather than a + // rectangular boundary var bounds = region.bounds(); checkArgument( region.contains(site), @@ -144,42 +202,33 @@ public class NetcdfCoordinates { } /* - * convert 3D Iml variable (dimensions vs30, Imt, Iml) to map of Imls by - * SiteClass and Imt + * convert 2D Iml variable (dimensions Imt, Iml) to map of Imls by Imt * * TODO: use MultiMap or SetMultiMap (etc.) to store unique IML sets? Could * then also initialize the underlying XySequence objects for reading in the * hazard curves... */ - private Map<SiteClass, Map<Imt, double[]>> mapImls(Variable vImls) { - EnumMap<SiteClass, Map<Imt, double[]>> vsImtImlMap = Maps.newEnumMap(SiteClass.class); - - for (int i = 0; i < siteClasses.size(); i++) { - var sc = siteClasses.get(i); - - Map<Imt, double[]> imtImlMap = Maps.newEnumMap(Imt.class); - for (int j = 0; j < imts.size(); j++) { - var imt = imts.get(j); - - // set origin and shape of double[] Imls to read - var origin = new int[] { i, j, 0 }; - var shape = new int[] { 1, 1, nIml }; - - try { - imtImlMap.put( - imt, - (double[]) vImls.read(origin, shape).reduce().get1DJavaArray(DataType.DOUBLE)); - } catch (IOException | InvalidRangeException e) { - var msg = "Failed read attempt for vImls with origin: " + - Arrays.toString(origin) + ", shape: " + Arrays.toString(shape); - throw new RuntimeException(msg); - } + private Map<Imt, double[]> mapImls(Variable vImls) { + Map<Imt, double[]> imtImlMap = new EnumMap<Imt, double[]>(Imt.class); + for (int j = 0; j < imts.size(); j++) { + var imt = imts.get(j); + + // set origin and shape of double[] Imls to read + var origin = new int[] { j, 0 }; + var shape = new int[] { 1, nIml }; + + try { + imtImlMap.put( + imt, + (double[]) vImls.read(origin, shape).reduce().get1DJavaArray(DataType.DOUBLE)); + } catch (IOException | InvalidRangeException e) { + var msg = "Failed read attempt for vImls with origin: " + + Arrays.toString(origin) + ", shape: " + Arrays.toString(shape); + throw new RuntimeException(msg); } - - vsImtImlMap.put(sc, imtImlMap); } - return Maps.immutableEnumMap(vsImtImlMap); + return Collections.unmodifiableMap(imtImlMap); } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java index 503b8f742abd39a27d27d9f30e4d2144909b0541..f1c2e7737de5ee7da705d6afc4fcba3dd933bd40 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtils.java @@ -73,6 +73,17 @@ public class NetcdfUtils { return (int[]) get1DArray(group, key, DataType.INT); } + /** + * Returns a {@code String[]} from a netCDF group + * + * @param group The netCDF group + * @param key The key to read from the group + * @throws IOException + */ + static String[] getStringArray(Group group, String key) throws IOException { + return (String[]) get1DArray(group, key, DataType.STRING); + } + /** * Get a 1D array from a netCDF group. * @@ -242,13 +253,14 @@ public class NetcdfUtils { } static class Key { - static final String AEPS = "AEPs"; - static final String IMLS = "Imls"; - static final String IMT = "Imt"; + static final String BOUNDS = "dataBounds"; + static final String GRID_MASK = "gridMask"; + static final String HAZARD = "hazard"; + static final String IMLS = "iml"; + static final String IMT = "imt"; static final String LAT = "lat"; static final String LON = "lon"; - static final String POE = "Poe"; - static final String SPACING = "spacing"; + static final String SITE_CLASS = "siteClass"; static final String VS30 = "vs30"; } } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazards.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazards.java index a634c22d60a5f53656ec858718273ba9ceec69f5..5be53f16c3b076cb67511eda468bbc99d832dc8a 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazards.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/reader/StaticHazards.java @@ -4,17 +4,17 @@ import static com.google.common.base.Preconditions.checkState; import java.util.EnumMap; -import gov.usgs.earthquake.nshmp.netcdf.SiteClass; +import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass; @SuppressWarnings("serial") -public class StaticHazards extends EnumMap<SiteClass, StaticHazard> { +public class StaticHazards extends EnumMap<NehrpSiteClass, StaticHazard> { public StaticHazards() { - super(SiteClass.class); + super(NehrpSiteClass.class); } - private StaticHazards(EnumMap<SiteClass, StaticHazard> staticHazards) { - super(SiteClass.class); + private StaticHazards(EnumMap<NehrpSiteClass, StaticHazard> staticHazards) { + super(NehrpSiteClass.class); this.putAll(staticHazards); } @@ -23,13 +23,13 @@ public class StaticHazards extends EnumMap<SiteClass, StaticHazard> { } public static class Builder { - EnumMap<SiteClass, StaticHazard> staticHazards; + EnumMap<NehrpSiteClass, StaticHazard> staticHazards; private Builder() { - staticHazards = new EnumMap<>(SiteClass.class); + staticHazards = new EnumMap<>(NehrpSiteClass.class); } - public Builder put(SiteClass siteClass, StaticHazard staticHazard) { + public Builder put(NehrpSiteClass siteClass, StaticHazard staticHazard) { staticHazards.put(siteClass, staticHazard); return this; } diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfController.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfController.java index c5625d1f7b25e9aba44df840bb97f7d5b85c23f9..fa76a7816b991d591770fa27cba3ae6b8d698a00 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfController.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfController.java @@ -6,9 +6,9 @@ import javax.annotation.Nullable; import javax.inject.Inject; import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass; import gov.usgs.earthquake.nshmp.internal.www.NshmpMicronautServlet; import gov.usgs.earthquake.nshmp.netcdf.NshmGroup; -import gov.usgs.earthquake.nshmp.netcdf.SiteClass; import io.micronaut.context.annotation.Value; import io.micronaut.http.HttpRequest; @@ -89,7 +89,7 @@ public class NetcdfController { HttpRequest<?> request, @Schema(required = true) @QueryValue @Nullable Double longitude, @Schema(required = true) @QueryValue @Nullable Double latitude, - @QueryValue @Nullable SiteClass siteClass, + @QueryValue @Nullable NehrpSiteClass siteClass, @QueryValue @Nullable Imt imt) { var urlHelper = servlet.urlHelper(request); var query = new Query(longitude, latitude, siteClass, imt); @@ -120,7 +120,7 @@ public class NetcdfController { HttpRequest<?> request, @Schema(required = true) @PathVariable @Nullable Double longitude, @Schema(required = true) @PathVariable @Nullable Double latitude, - @Schema(required = true) @PathVariable @Nullable SiteClass siteClass, + @Schema(required = true) @PathVariable @Nullable NehrpSiteClass siteClass, @Schema(required = true) @PathVariable @Nullable Imt imt) { return doGetHazard(request, longitude, latitude, siteClass, imt); } @@ -148,7 +148,7 @@ public class NetcdfController { HttpRequest<?> request, @Schema(required = true) @PathVariable @Nullable Double longitude, @Schema(required = true) @PathVariable @Nullable Double latitude, - @Schema(required = true) @PathVariable @Nullable SiteClass siteClass) { + @Schema(required = true) @PathVariable @Nullable NehrpSiteClass siteClass) { return doGetHazard(request, longitude, latitude, siteClass, null); } @@ -234,10 +234,10 @@ public class NetcdfController { static class Query { final Double longitude; final Double latitude; - final SiteClass siteClass; + final NehrpSiteClass siteClass; final Imt imt; - Query(Double longitude, Double latitude, SiteClass siteClass, Imt imt) { + Query(Double longitude, Double latitude, NehrpSiteClass siteClass, Imt imt) { this.longitude = longitude; this.latitude = latitude; this.siteClass = siteClass; diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java index d00e8320822c0c6c9fd01f6b23b85b52cbe50b00..7df22e0d72417a606c1f131e6efd7a98d54e3709 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfService.java @@ -13,13 +13,13 @@ import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.geo.json.Feature; import gov.usgs.earthquake.nshmp.geo.json.GeoJson; import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass; import gov.usgs.earthquake.nshmp.internal.www.NshmpMicronautServlet.UrlHelper; import gov.usgs.earthquake.nshmp.internal.www.Response; import gov.usgs.earthquake.nshmp.internal.www.WsUtils; import gov.usgs.earthquake.nshmp.internal.www.meta.Status; import gov.usgs.earthquake.nshmp.netcdf.NshmGroup; import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReader; -import gov.usgs.earthquake.nshmp.netcdf.SiteClass; import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards; import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazard; import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazards; @@ -259,9 +259,9 @@ public class NetcdfService { } static class RequestDataCurves extends RequestData { - SiteClass siteClass; + NehrpSiteClass siteClass; - RequestDataCurves(double longitude, double latitude, SiteClass siteClass) { + RequestDataCurves(double longitude, double latitude, NehrpSiteClass siteClass) { super(longitude, latitude); this.siteClass = siteClass; } @@ -270,7 +270,7 @@ public class NetcdfService { static class RequestDataCurve extends RequestDataCurves { Imt imt; - RequestDataCurve(double longitude, double latitude, SiteClass siteClass, Imt imt) { + RequestDataCurve(double longitude, double latitude, NehrpSiteClass siteClass, Imt imt) { super(longitude, latitude, siteClass); this.imt = imt; } @@ -285,7 +285,7 @@ public class NetcdfService { NshmGroup nshmGroup, double longitude, double latitude, - SiteClass siteClass, + NehrpSiteClass siteClass, Imt imt) { super(longitude, latitude, siteClass, imt); nshm = nshmGroup; @@ -321,7 +321,7 @@ public class NetcdfService { final String netcdfFile; final NshmGroup nshm; final Bounds bounds; - final List<SiteClass> siteClasses; + final List<NehrpSiteClass> siteClasses; final List<Imt> imts; Parameters(String netcdfFile) { diff --git a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfWsUtils.java b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfWsUtils.java index f2bc38e7f65ebc061ac55ba220e390ae731aa981..0bc1ebd36ef1a6ed1370172273912987b9220662 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfWsUtils.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/netcdf/www/NetcdfWsUtils.java @@ -2,8 +2,6 @@ package gov.usgs.earthquake.nshmp.netcdf.www; import static com.google.common.base.CaseFormat.UPPER_CAMEL; import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE; -import static gov.usgs.earthquake.nshmp.internal.www.WsUtils.EnumSerializer; -import static gov.usgs.earthquake.nshmp.internal.www.WsUtils.NaNSerializer; import java.util.logging.Logger; @@ -11,11 +9,13 @@ import com.google.gson.Gson; import com.google.gson.GsonBuilder; import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass; import gov.usgs.earthquake.nshmp.internal.www.NshmpMicronautServlet.UrlHelper; import gov.usgs.earthquake.nshmp.internal.www.Response; +import gov.usgs.earthquake.nshmp.internal.www.WsUtils.EnumSerializer; +import gov.usgs.earthquake.nshmp.internal.www.WsUtils.NaNSerializer; import gov.usgs.earthquake.nshmp.internal.www.meta.Status; import gov.usgs.earthquake.nshmp.netcdf.NshmGroup; -import gov.usgs.earthquake.nshmp.netcdf.SiteClass; import io.micronaut.http.HttpResponse; @@ -27,7 +27,7 @@ public class NetcdfWsUtils { GSON = new GsonBuilder() .registerTypeAdapter(Imt.class, new EnumSerializer<Imt>()) .registerTypeAdapter(NshmGroup.class, new EnumSerializer<NshmGroup>()) - .registerTypeAdapter(SiteClass.class, new EnumSerializer<SiteClass>()) + .registerTypeAdapter(NehrpSiteClass.class, new EnumSerializer<NehrpSiteClass>()) .registerTypeAdapter(Double.class, new NaNSerializer()) .disableHtmlEscaping() .serializeNulls() diff --git a/src/main/resources/default.nc b/src/main/resources/default.nc index 3d0fd9d8d719f7ab51b9c9a9d0fb1f114b8fda0f..20eaa6b5a1b4a7804eb545b230707f82769a1b27 100644 Binary files a/src/main/resources/default.nc and b/src/main/resources/default.nc differ diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReaderTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReaderTests.java index 1f07aafe4570bd9a08ea166a0e15aae4260bf401..b8437482ef885d7330e36fbe0f682d29d9de9f61 100644 --- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReaderTests.java +++ b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/NshmNetcdfReaderTests.java @@ -1,5 +1,6 @@ package gov.usgs.earthquake.nshmp.netcdf; +import static com.google.common.base.Preconditions.checkState; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -7,6 +8,8 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Collections; +import java.util.EnumMap; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -20,22 +23,34 @@ import org.junit.jupiter.params.provider.MethodSource; import com.google.common.io.Resources; import gov.usgs.earthquake.nshmp.data.XySequence; +import gov.usgs.earthquake.nshmp.geo.BorderType; import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.geo.LocationList; +import gov.usgs.earthquake.nshmp.geo.Region; +import gov.usgs.earthquake.nshmp.geo.Regions; +import gov.usgs.earthquake.nshmp.geo.json.Feature; +import gov.usgs.earthquake.nshmp.geo.json.FeatureCollection; +import gov.usgs.earthquake.nshmp.geo.json.GeoJson; +import gov.usgs.earthquake.nshmp.geo.json.Properties; import gov.usgs.earthquake.nshmp.gmm.Imt; +import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass; import gov.usgs.earthquake.nshmp.netcdf.reader.BoundingHazards; import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazard; import gov.usgs.earthquake.nshmp.netcdf.reader.StaticHazards; public class NshmNetcdfReaderTests { - static final String CONUS_TEST_FILE = "conus-test.nc"; + static final String CONUS_TEST_FILE = "netcdf-conus-test-fv0.3.nc"; static final Path NETCDF_PATH = Paths.get(Resources.getResource(CONUS_TEST_FILE).getPath()); static final String TEST_INVALID_NC_FILE = "invalid-netcdf-file.nc"; static final Path NETCDF_INVALID_FILE = Paths.get( Resources.getResource(TEST_INVALID_NC_FILE).getPath()); + static final String CONUS_TEST_MAP_FILE = "map-netcdf-test-0p05.geojson"; + static final Path CONUS_TEST_MAP_PATH = + Paths.get(Resources.getResource(CONUS_TEST_MAP_FILE).getPath()); + // difference tolerance, until we can incorporate precision into // NshmNetcdfReader public static final double IML_TOL = 1e-6; @@ -47,6 +62,8 @@ public class NshmNetcdfReaderTests { new double[] { -105.3, -105.25, -105.2, -105.15 }; public static final double[] EXPECTED_LATITUDES = new double[] { 39.15, 39.2, 39.25, 39.3 }; + public static final Region EXPECTED_REGION; + static final LocationList BOUNDING_LOCATIONS = LocationList.builder() .add(EXPECTED_LONGITUDES[1], EXPECTED_LATITUDES[1]) .add(EXPECTED_LONGITUDES[1], EXPECTED_LATITUDES[2]) @@ -63,29 +80,35 @@ public class NshmNetcdfReaderTests { new double[] { 0.00233, 0.0265, 0.302, 3.44 }, new double[] { 0.00333, 0.0379, 0.432, 4.92 }); - static final List<double[]> HAZARDS = List.of( - new double[] { 0.04387208, 0.004250093, 0.0001192809, 2.201278e-07 }, - new double[] { 0.06567874, 0.005199003, 6.54093e-05, 6.705638e-08 }, - new double[] { 0.04127881, 0.003905505, 0.000122145, 3.597946e-07 }, - new double[] { 0.05262404, 0.003470381, 4.079298e-05, 4.619109e-08 }, - new double[] { 0.04388691, 0.004249376, 0.0001185702, 2.198896e-07 }, - new double[] { 0.06564134, 0.005175665, 6.480198e-05, 6.684925e-08 }, - new double[] { 0.04130831, 0.003907337, 0.0001214354, 3.59386e-07 }, - new double[] { 0.05258384, 0.003454936, 4.047655e-05, 4.606202e-08 }, - new double[] { 0.04346677, 0.00412513, 0.00011355, 2.171376e-07 }, - new double[] { 0.06530787, 0.0050781, 6.258566e-05, 6.466215e-08 }, - new double[] { 0.04089805, 0.003789999, 0.0001163111, 3.544692e-07 }, - new double[] { 0.0522491, 0.003379781, 3.912059e-05, 4.462088e-08 }, - new double[] { 0.04344459, 0.004124759, 0.0001141019, 2.173907e-07 }, - new double[] { 0.06532912, 0.005098603, 6.312435e-05, 6.484549e-08 }, - new double[] { 0.04086227, 0.003787166, 0.0001168667, 3.549025e-07 }, - new double[] { 0.05227672, 0.003393233, 3.939733e-05, 4.473717e-08 }, - new double[] { 4.3739751E-02, 4.2098902E-03, 1.1745205E-04, 2.1918876E-07 }, - new double[] { 6.5558481E-02, 5.1610430E-03, 6.4525923E-05, 6.6297021E-08 }, - new double[] { 4.1153710E-02, 3.8681961E-03, 1.2028426E-04, 3.5812084E-07 }, - new double[] { 5.2503492E-02, 3.4418438E-03, 4.0267402E-05, 4.5693341E-08 }); - - public static final List<SiteClass> SITE_CLASSES = List.of(SiteClass.CD, SiteClass.C); + static final List<double[]> HAZARDS = List.of( // [idxSc,idxImt,idxLon,idxLat] + // lower left bounding hazard + new double[] { 4.387208E-02, 4.250093E-03, 1.192809E-04, 2.201278E-07 }, // [0,0,1,1] + new double[] { 6.567874E-02, 5.199003E-03, 6.540930E-05, 6.705638E-08 }, // [0,1,1,1] + new double[] { 4.127881E-02, 3.905505E-03, 1.221450E-04, 3.597946E-07 }, // [1,0,1,1] + new double[] { 5.262404E-02, 3.470381E-03, 4.079298E-05, 4.619109E-08 }, // [1,1,1,1] + // upper left bounding hazard + new double[] { 4.388691E-02, 4.249376E-03, 1.185702E-04, 2.198896E-07 }, // [0,0,1,2] + new double[] { 6.564134E-02, 5.175665E-03, 6.480199E-05, 6.684925E-08 }, // [0,1,1,2] + new double[] { 4.130831E-02, 3.907337E-03, 1.214354E-04, 3.593860E-07 }, // [1,0,1,2] + new double[] { 5.258384E-02, 3.454936E-03, 4.047655E-05, 4.606202E-08 }, // [1,1,1,2] + // upper right bounding hazard + new double[] { 4.346677E-02, 4.125130E-03, 1.135500E-04, 2.171376E-07 }, // [0,0,2,2] + new double[] { 6.530787E-02, 5.078100E-03, 6.258566E-05, 6.466215E-08 }, // [0,1,2,2] + new double[] { 4.089805E-02, 3.789999E-03, 1.163111E-04, 3.544692E-07 }, // [1,0,2,2] + new double[] { 5.224910E-02, 3.379782E-03, 3.912059E-05, 4.462088E-08 }, // [1,1,2,2] + // lower right bounding hazard + new double[] { 4.344459E-02, 4.124759E-03, 1.141019E-04, 2.173907E-07 }, // [0,0,2,1] + new double[] { 6.532912E-02, 5.098603E-03, 6.312435E-05, 6.484549E-08 }, // [0,1,2,1] + new double[] { 4.086227E-02, 3.787166E-03, 1.168667E-04, 3.549025E-07 }, // [1,0,2,1] + new double[] { 5.227672E-02, 3.393233E-03, 3.939733E-05, 4.473717E-08 }, // [1,1,2,1] + // target site hazard + new double[] { 4.373975E-02, 4.209890E-03, 1.174520E-04, 2.191888E-07 }, // [0,0,t,t] + new double[] { 6.555848E-02, 5.161043E-03, 6.452592E-05, 6.629702E-08 }, // [0,1,t,t] + new double[] { 4.115371E-02, 3.868196E-03, 1.202843E-04, 3.581208E-07 }, // [1,0,t,t] + new double[] { 5.250349E-02, 3.441844E-03, 4.026740E-05, 4.569334E-08 });// [1,1,t,t] + + public static final List<NehrpSiteClass> SITE_CLASSES = + List.of(NehrpSiteClass.CD, NehrpSiteClass.C); public static final List<Imt> IMTS = List.of(Imt.PGA, Imt.SA0P4); static final int TARGET_LOWER_LEFT_LONGITUDE_IDX = 1; @@ -94,7 +117,9 @@ public class NshmNetcdfReaderTests { static final double TARGET_LONGITUDE_FRAC; static final double TARGET_LATITUDE_FRAC; - public static Map<SiteClass, Map<Imt, double[]>> IMLS = new HashMap<>(); + // public static Map<NehrpSiteClass, Map<Imt, double[]>> IMLS = new + // HashMap<>(); + public static Map<Imt, double[]> IMLS = new HashMap<>(); static BoundingHazards BOUNDING_HAZARDS; @@ -125,15 +150,30 @@ public class NshmNetcdfReaderTests { BOUNDING_HAZARDS = builder.build(); - SITE_CLASSES.forEach(siteClass -> { - var haz = new HashMap<Imt, double[]>(); - - for (var i = 0; i < IMTS.size(); i++) { - haz.put(IMTS.get(i), XS.get(i)); - } + // Extract test region from CONUS_TEST_MAP_FILE + FeatureCollection dataRegions = GeoJson.from(CONUS_TEST_MAP_PATH).toFeatureCollection(); + List<Feature> features = dataRegions.features(); - IMLS.put(siteClass, haz); - }); + // From {@code Sites.createSiteRegion()} + checkState(features.size() <= 2, "Only 2 polygon features may be defined"); + int mapRegionIndex = 0; + if (features.size() > 1) { + // don't need to process the rectangular map extents, if present + mapRegionIndex = 1; + } + Feature sitesPoly = features.get(mapRegionIndex); + LocationList sitesPolyBorder = sitesPoly.asPolygonBorder(); + Properties properties = sitesPoly.properties(); + // get region name - either "title" or "name" property??? + String mapName = + properties.getString("title").orElse(properties.getString("name").orElse("Unnamed Map")); + EXPECTED_REGION = Regions.create(mapName, sitesPolyBorder, BorderType.MERCATOR_LINEAR); + + var imls = new EnumMap<Imt, double[]>(Imt.class); + for (var i = 0; i < IMTS.size(); i++) { + imls.put(IMTS.get(i), XS.get(i)); + } + IMLS = Collections.unmodifiableMap(imls); // calculate interpolation fraction for longitude and latitude int i = TARGET_LOWER_LEFT_LONGITUDE_IDX; @@ -185,7 +225,7 @@ public class NshmNetcdfReaderTests { @ParameterizedTest(name = "{index} ==> Site: {0}, {1}") @MethodSource("bySiteClass") - final void hazardWithSiteClassTests(Location site, SiteClass siteClass) { + final void hazardWithSiteClassTests(Location site, NehrpSiteClass siteClass) { var expected = BOUNDING_HAZARDS.get(site).get(siteClass); var actual = NETCDF.hazard(site, siteClass); testHazard(expected, actual); @@ -193,7 +233,7 @@ public class NshmNetcdfReaderTests { @ParameterizedTest(name = "{index} ==> {0}, {1}, {2}") @MethodSource("bySiteClassImt") - final void hazardWithSiteClassImtTests(Location site, SiteClass siteClass, Imt imt) { + final void hazardWithSiteClassImtTests(Location site, NehrpSiteClass siteClass, Imt imt) { var expected = BOUNDING_HAZARDS.get(site).get(siteClass).get(imt); var actual = NETCDF.hazard(site, siteClass, imt); testSequence(expected, actual); diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinatesTest.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinatesTest.java index bde5ef19762bd22cdc898679c9d40f515671f08b..83e107aa664d0c5f26ac4eb6c562ac95495df579 100644 --- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinatesTest.java +++ b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfCoordinatesTest.java @@ -6,8 +6,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import org.junit.jupiter.api.Test; -import gov.usgs.earthquake.nshmp.geo.BorderType; -import gov.usgs.earthquake.nshmp.geo.Regions; import gov.usgs.earthquake.nshmp.netcdf.NshmNetcdfReaderTests; class NetcdfCoordinatesTest { @@ -31,7 +29,7 @@ class NetcdfCoordinatesTest { var border = NetcdfUtils.buildBorder( NshmNetcdfReaderTests.EXPECTED_LONGITUDES, NshmNetcdfReaderTests.EXPECTED_LATITUDES); - var expectedRegion = Regions.create("test-region", border, BorderType.MERCATOR_LINEAR); + var expectedRegion = NshmNetcdfReaderTests.EXPECTED_REGION; var expectedBorder = expectedRegion.border(); var actualRegion = coords.region(); @@ -49,19 +47,15 @@ class NetcdfCoordinatesTest { var actualImls = coords.imls(); assertEquals(NshmNetcdfReaderTests.IMLS.size(), actualImls.size()); - for (var imlEntry : NshmNetcdfReaderTests.IMLS.entrySet()) { - var siteClass = imlEntry.getKey(); - assertTrue(actualImls.containsKey(siteClass)); - assertEquals(imlEntry.getValue().size(), actualImls.get(siteClass).size()); + for (var expectedImlEntry : NshmNetcdfReaderTests.IMLS.entrySet()) { + var expectedImt = expectedImlEntry.getKey(); + assertTrue(actualImls.containsKey(expectedImt)); + assertEquals(expectedImlEntry.getValue().length, actualImls.get(expectedImt).length); - for (var siteEntry : imlEntry.getValue().entrySet()) { - var imt = siteEntry.getKey(); - var expectedValue = siteEntry.getValue(); - var actualValue = actualImls.get(siteClass).get(imt); + var expectedValue = expectedImlEntry.getValue(); + var actualValue = actualImls.get(expectedImt); - assertTrue(actualImls.get(siteClass).containsKey(imt)); - assertArrayEquals(expectedValue, actualValue, NshmNetcdfReaderTests.IML_TOL); - } + assertArrayEquals(expectedValue, actualValue, NshmNetcdfReaderTests.IML_TOL); } } diff --git a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java index 1a21d02b6829e6390993b71edd647f257633f0d0..05b33abaf0ed62a5a56b6ff395a09301c2a53a8e 100644 --- a/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java +++ b/src/test/java/gov/usgs/earthquake/nshmp/netcdf/reader/NetcdfUtilsTests.java @@ -13,7 +13,7 @@ import org.junit.jupiter.api.Test; import gov.usgs.earthquake.nshmp.data.XySequence; import gov.usgs.earthquake.nshmp.geo.LocationList; import gov.usgs.earthquake.nshmp.gmm.Imt; -import gov.usgs.earthquake.nshmp.netcdf.SiteClass; +import gov.usgs.earthquake.nshmp.gmm.NehrpSiteClass; class NetcdfUtilsTests { @@ -52,7 +52,7 @@ class NetcdfUtilsTests { .add(BORDER_LONGITUDES[0], BORDER_LATITUDES[0]) .build(); - var siteClasses = List.of(SiteClass.B, SiteClass.C, SiteClass.D); + var siteClasses = List.of(NehrpSiteClass.B, NehrpSiteClass.C, NehrpSiteClass.D); var imts = List.of(Imt.PGA, Imt.SA0P1, Imt.SA1P5); var imlValues = new double[] { 0.1, 0.5, 0.75 }; @@ -61,7 +61,7 @@ class NetcdfUtilsTests { var mapHazTargetBuilder = StaticHazards.builder(); var mapDiffImlValueBuilder = StaticHazards.builder(); - for (SiteClass sc : siteClasses) { + for (NehrpSiteClass sc : siteClasses) { var imtMap0 = StaticHazard.builder(); var imtMap1 = StaticHazard.builder(); var imtMapTarget = StaticHazard.builder(); @@ -102,7 +102,7 @@ class NetcdfUtilsTests { // Add extra site class var mapDiffScSizeBuilder = StaticHazards.builder(); mapHaz0.forEach((key, value) -> mapDiffScSizeBuilder.put(key, value)); - mapDiffScSizeBuilder.put(SiteClass.A, mapHaz0.get(siteClasses.get(0))); + mapDiffScSizeBuilder.put(NehrpSiteClass.A, mapHaz0.get(siteClasses.get(0))); mapDiffScSize = mapDiffScSizeBuilder.build(); // Add extra IMT diff --git a/src/test/resources/conus-test.nc b/src/test/resources/conus-test.nc deleted file mode 100644 index 5b9f03a642e02d783787af86312ed2fb8d07f1fc..0000000000000000000000000000000000000000 Binary files a/src/test/resources/conus-test.nc and /dev/null differ diff --git a/src/test/resources/map-netcdf-test-0p05.geojson b/src/test/resources/map-netcdf-test-0p05.geojson new file mode 100644 index 0000000000000000000000000000000000000000..819addfaddbb0da515d7f47c0bf94e657394d4c7 --- /dev/null +++ b/src/test/resources/map-netcdf-test-0p05.geojson @@ -0,0 +1,50 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "id": "Extents", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [-105.31, 39.15], + [-105.31, 39.31], + [-105.10, 39.31], + [-105.10, 39.15], + [-105.31, 39.15] + ] + ] + }, + "properties": { + "fill": "#AA0078", + "stroke": "#AA0078", + "title": "Conterminous US Map Extents" + } + }, + { + "type": "Feature", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [-105.31, 39.15], + [-105.25, 39.25], + [-105.30, 39.25], + [-105.30, 39.31], + [-105.22, 39.30], + [-105.15, 39.30], + [-105.15, 39.25], + [-105.10, 39.25], + [-105.13, 39.15], + [-105.31, 39.15] + ] + ] + }, + "properties": { + "spacing": 0.05, + "title": "Conterminous US netCDF Test Region" + } + } + ] +} diff --git a/src/test/resources/netcdf-conus-test-fv0.3.nc b/src/test/resources/netcdf-conus-test-fv0.3.nc new file mode 100644 index 0000000000000000000000000000000000000000..1cff28e4aabc1888e673b28fe86f0a8c4444e7af Binary files /dev/null and b/src/test/resources/netcdf-conus-test-fv0.3.nc differ