From 694c6c83a903731ae032a576f986f760ef23d48b Mon Sep 17 00:00:00 2001 From: Peter Powers <pmpowers@usgs.gov> Date: Tue, 1 Mar 2022 07:55:14 -0700 Subject: [PATCH] updated siteData with margin --- .../earthquake/nshmp/model/ModelLoader.java | 2 + .../usgs/earthquake/nshmp/model/SiteData.java | 156 ++++++++++++++---- .../earthquake/nshmp/gmm/GmmInputTest.java | 2 +- 3 files changed, 127 insertions(+), 33 deletions(-) diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java index 5a26ed12..b3e1dcc2 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/ModelLoader.java @@ -98,12 +98,14 @@ abstract class ModelLoader { */ public static void main(String[] args) { + // Path testModel = Paths.get("../nshm-conus-2018-tmp"); Path testModel = Paths.get("../nshm-conus"); // Path testModel = Paths.get("../nshm-hawaii"); HazardModel model = ModelLoader.load(testModel); System.out.println(); System.out.println(model); + } private static Logger log; diff --git a/src/main/java/gov/usgs/earthquake/nshmp/model/SiteData.java b/src/main/java/gov/usgs/earthquake/nshmp/model/SiteData.java index d0aaadf4..d34cca8e 100644 --- a/src/main/java/gov/usgs/earthquake/nshmp/model/SiteData.java +++ b/src/main/java/gov/usgs/earthquake/nshmp/model/SiteData.java @@ -8,13 +8,18 @@ import static java.util.stream.Collectors.toUnmodifiableMap; import static java.util.stream.Collectors.toUnmodifiableSet; import java.awt.geom.Area; +import java.io.IOException; import java.math.BigDecimal; +import java.math.RoundingMode; +import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; import java.util.OptionalDouble; import java.util.Set; +import java.util.function.Function; import java.util.stream.Stream; +import java.util.stream.StreamSupport; import gov.usgs.earthquake.nshmp.Maths; import gov.usgs.earthquake.nshmp.data.DelimitedData; @@ -22,15 +27,13 @@ import gov.usgs.earthquake.nshmp.data.DelimitedData.Record; import gov.usgs.earthquake.nshmp.geo.Location; import gov.usgs.earthquake.nshmp.geo.Locations; import gov.usgs.earthquake.nshmp.geo.json.Feature; -import gov.usgs.earthquake.nshmp.geo.json.FeatureCollection; import gov.usgs.earthquake.nshmp.geo.json.GeoJson; import gov.usgs.earthquake.nshmp.geo.json.Properties; /** * Site data wrapper class. This class provides one method {@code get(Location)} * that returns the superset of all site data parameters required by the current - * suite of supported NSHMs. Currently this only includes the {@code z1.0} and - * {@code z2.5} basin depth values. + * suite of supported NSHMs. * * This class assumes that if a location 'hit test' succeeds on an area for * which there are site data, then non null values will exist for the location. @@ -39,34 +42,29 @@ import gov.usgs.earthquake.nshmp.geo.json.Properties; */ public class SiteData { - /* - * Developer notes: This class will soon be expanded to support a variety of - * site data such as sediment thickness. Web services should map into the - * SiteData.Values class. We're only working with the two basin depth values - * at present, but ultimately we'll probably want - * - * Consider having this class deliver defaults. - */ - /** Empty site data instance. */ - public static final SiteData EMPTY = new SiteData(null) { + public static final SiteData EMPTY = new SiteData(null, null) { @Override public Values get(Location loc) { return Values.EMPTY; } }; - private static Path BASIN_FILE = Path.of("basin/basins.geojson"); + private static Path BASIN_DIR = Path.of("basin"); + private static Path MARGIN_DIR = Path.of("margin"); private Set<Basin> basins; + private Set<Margin> margins; - private SiteData(Set<Basin> basins) { + private SiteData(Set<Basin> basins, Set<Margin> margins) { this.basins = basins; + this.margins = margins; } static SiteData create(Path dir) { - Set<Basin> basins = Basin.init(dir); - return new SiteData(basins); + Set<Basin> basins = init(dir.resolve(BASIN_DIR), Basin::new); + Set<Margin> margins = init(dir.resolve(MARGIN_DIR), Margin::new); + return new SiteData(basins, margins); } /** @@ -82,9 +80,29 @@ public class SiteData { break; } } + for (Margin margin : margins) { + if (margin.contains(location)) { + builder.marginValues(margin.get(location)); + break; + } + } return builder.build(); } + /* + * Snap value to closest multiple of spacing. + * + * Double precision rounding errors results in grid midpoints rounding both up + * and down in the initial Math.round() call. + */ + private static double snapToGrid(double value, double spacing, int scale) { + return Maths.round( + Math.round(value / spacing) * spacing, + scale, RoundingMode.HALF_UP); + } + + // @Override + /** Optional site data values associated with a location. */ public static class Values { @@ -96,15 +114,20 @@ public class SiteData { /** Depth to the shear-wave velocity horizon of 2.5 km/s, in km. */ public final OptionalDouble z2p5; + /** Cpntinental margin sediment thickness, in km. */ + public final OptionalDouble zSed; + private Values(Builder builder) { this.z1p0 = builder.z1p0; this.z2p5 = builder.z2p5; + this.zSed = builder.zSed; } private static class Builder { OptionalDouble z1p0 = OptionalDouble.empty(); OptionalDouble z2p5 = OptionalDouble.empty(); + OptionalDouble zSed = OptionalDouble.empty(); Builder basinValues(Basin.Values values) { this.z1p0 = OptionalDouble.of(values.z1p0); @@ -112,6 +135,11 @@ public class SiteData { return this; } + Builder marginValues(Margin.Values values) { + this.zSed = OptionalDouble.of(values.zSed); + return this; + } + Values build() { return new Values(this); } @@ -124,7 +152,20 @@ public class SiteData { record.getDouble("lat")); } - /* Container for a basin region. */ + private static <T> Set<T> init(Path dir, Function<Path, T> mapper) { + if (Files.exists(dir)) { + try (DirectoryStream<Path> ds = Files.newDirectoryStream(dir, "*.geojson")) { + return StreamSupport.stream(ds.spliterator(), false) + .map(mapper) + .collect(toUnmodifiableSet()); + } catch (IOException ioe) { + throw new RuntimeException(ioe); + } + } + return Set.of(); + } + + /* Container for a basin region and depth data. */ static final class Basin { final String name; @@ -135,7 +176,8 @@ public class SiteData { final Area area; final Map<Location, Values> dataMap; - private Basin(Path file, Feature feature) { + private Basin(Path geojson) { + Feature feature = GeoJson.from(geojson).toFeature(); Properties properties = feature.properties(); this.name = properties.getString(NAME).orElseThrow(); this.id = properties.getString(ID).orElseThrow(); @@ -144,7 +186,7 @@ public class SiteData { this.scale = BigDecimal.valueOf(spacing).scale(); this.feature = feature; this.area = new Area(Locations.toPath(feature.asPolygonBorder())); - Path dataFile = file.resolveSibling(id + ".csv"); + Path dataFile = geojson.resolveSibling(id + ".csv"); this.dataMap = loadData(dataFile); } @@ -154,24 +196,13 @@ public class SiteData { } Values get(Location loc) { + Location roundedLoc = Location.create( Maths.round(loc.longitude, scale), Maths.round(loc.latitude, scale)); return dataMap.get(roundedLoc); } - static Set<Basin> init(Path dir) { - Path file = dir.resolve(BASIN_FILE); - if (Files.exists(dir)) { - FeatureCollection fc = GeoJson.from(file).toFeatureCollection(); - Set<Basin> basins = fc.features().stream() - .map(feature -> new Basin(file, feature)) - .collect(toUnmodifiableSet()); - return basins; - } - return Set.of(); - } - private Map<Location, Values> loadData(Path file) { DelimitedData csv = DelimitedData.comma(file); try (Stream<Record> stream = csv.records()) { @@ -199,4 +230,65 @@ public class SiteData { } } + /* Container for continental margin sediment thickness data. */ + static final class Margin { + + final String name; + final String id; + final String model; + final double spacing; + final int scale; + final Feature feature; + final Area area; + final Map<Location, Values> dataMap; + + private Margin(Path geojson) { + Feature feature = GeoJson.from(geojson).toFeature(); + Properties properties = feature.properties(); + this.name = properties.getString(NAME).orElseThrow(); + this.id = properties.getString(ID).orElseThrow(); + this.model = properties.getString(MODEL).orElseThrow(); + this.spacing = properties.getDouble(SPACING).orElseThrow(); + this.scale = BigDecimal.valueOf(spacing).scale(); + this.feature = feature; + this.area = new Area(Locations.toPath(feature.asPolygonBorder())); + Path dataFile = geojson.resolveSibling(id + ".csv"); + this.dataMap = loadData(dataFile); + } + + /* Return whether the basin contains the supplied location. */ + boolean contains(Location loc) { + return area.contains(loc.longitude, loc.latitude); + } + + Values get(Location loc) { + Location snappedLoc = Location.create( + snapToGrid(loc.longitude, spacing, scale), + snapToGrid(loc.latitude, spacing, scale)); + return dataMap.get(snappedLoc); + } + + private Map<Location, Values> loadData(Path file) { + DelimitedData csv = DelimitedData.comma(file); + try (Stream<Record> stream = csv.records()) { + return stream.collect(toUnmodifiableMap( + SiteData::toLocation, + Margin::toValues)); + } + } + + private static Values toValues(Record record) { + return new Values(record.getDouble("zsed")); + } + + static final class Values { + + final double zSed; + + Values(double zSed) { + this.zSed = zSed; + } + } + } + } diff --git a/src/test/java/gov/usgs/earthquake/nshmp/gmm/GmmInputTest.java b/src/test/java/gov/usgs/earthquake/nshmp/gmm/GmmInputTest.java index 6d7fa401..74a4d0ce 100644 --- a/src/test/java/gov/usgs/earthquake/nshmp/gmm/GmmInputTest.java +++ b/src/test/java/gov/usgs/earthquake/nshmp/gmm/GmmInputTest.java @@ -257,7 +257,7 @@ class GmmInputTest { GmmInput gmm = GmmInput.builder().withDefaults().build(); GmmInput gmmCopy = GmmInput.builder().fromCopy(gmm).build(); GmmInput gmmDiff = GmmInput.builder().withDefaults() - .mag(10.0).z1p0(10.0).z2p5(10.0).zSed(10.0).build(); + .mag(10.0).z1p0(10.0).z2p5(10.0).build(); assertEquals(gmm.hashCode(), gmmCopy.hashCode()); assertNotEquals(gmm.hashCode(), gmmDiff.hashCode()); -- GitLab