Skip to content
Snippets Groups Projects
Commit c3978a98 authored by Clayton, Brandon Scott's avatar Clayton, Brandon Scott
Browse files

Merge branch 'cleanup' into 'main'

Cleanup

See merge request !219
parents 40380406 f9f2b133
No related branches found
Tags 0.3.0
1 merge request!219Cleanup
Pipeline #370714 passed
Showing
with 7 additions and 1766 deletions
......@@ -45,7 +45,6 @@ RUN mkdir /asset-output \
USER usgs-user
COPY --from=builder --chown=usgs-user:usgs-user /app/build/libs/nshmp-ws-all.jar .
COPY --from=builder --chown=usgs-user:usgs-user /app/fault-sections /app/fault-sections
EXPOSE 8080
......
......@@ -73,11 +73,9 @@ can then be accessed from:
```bash
http://localhost:PORT/nshmp/data
http://localhost:PORT/nshmp/data/fault-sections
# Example
http://localhost:8080/nshmp/data
http://localhost:8080/nshmp/data/fault-sections
```
The `PORT` should be replaced with the same value to start the container.
......
......@@ -24,7 +24,6 @@ java {
apply from: "${projectDir}/gradle/app-version.gradle"
apply from: "${projectDir}/gradle/dependencies.gradle"
apply from: "${projectDir}/gradle/fault-sections.gradle"
apply from: "${projectDir}/gradle/git-hooks.gradle"
apply from: "${projectDir}/gradle/node.gradle"
apply from: "${projectDir}/gradle/repositories.gradle"
......@@ -53,7 +52,6 @@ shadowJar {
}
tasks.withType(JavaCompile) {
dependsOn downloadFaultSections
options.encoding = "UTF-8"
options.compilerArgs.add("-parameters")
}
......
apply plugin: "de.undercouch.download"
ext {
archiveUrl = "https://code.usgs.gov/ghsc/nshmp/nshm-fault-sections/-/archive"
faultsDir = "fault-sections";
}
task downloadFaultSections() {
doLast {
// Download and unzip nshm-fault-sections tag 1.1
def zipFile1p1 = new File(faultsDir, "nshm-fault-sections-1.1.zip")
download.run {
src "${archiveUrl}/1.1/nshm-fault-sections-1.1.zip"
dest zipFile1p1
}
copy {
from zipTree(zipFile1p1)
into faultsDir
}
delete {
delete zipFile1p1
}
// Download and unzip nshm-fault-sections tag 1.1
def zipFile1p2 = new File(faultsDir, "nshm-fault-sections-1.2.zip")
download.run {
src "${archiveUrl}/1.2/nshm-fault-sections-1.2.zip"
dest zipFile1p2
}
copy {
from zipTree(zipFile1p2)
into faultsDir
}
delete {
delete zipFile1p2
}
}
}
task faultSectionsClean(type: Delete) {
delete faultsDir
}
clean.dependsOn faultSectionsClean
package gov.usgs.earthquake.nshmp.www;
import gov.usgs.earthquake.nshmp.www.services.FaultSectionsController;
import gov.usgs.earthquake.nshmp.www.services.GpsController;
import gov.usgs.earthquake.nshmp.www.services.GulfController;
import io.micronaut.context.ApplicationContext;
import io.micronaut.http.HttpRequest;
import io.micronaut.runtime.Micronaut;
import io.swagger.v3.oas.annotations.OpenAPIDefinition;
import io.swagger.v3.oas.annotations.info.Info;
......@@ -14,38 +8,16 @@ import io.swagger.v3.oas.annotations.info.Info;
info = @Info(
title = "NSHMP Non-NSHM Services",
description = "### Services not related to a National Seismic Hazard Model:\n" +
"* Fault Sections: Get fault sections in GeoJSON form\n" +
"* Geodesy: Get GPS data sets for NSHM\n" +
"* Ground Motion Model: Get response spectra and ground motion Vs. distance, " +
"and ground motion Vs. magnitude\n" +
"* Gulf: Get gulf coast sediment depth data"))
"* Ground Motion Models: \n" +
"\t* Response spectra \n" +
"\t* Hanging Wall effects \n" +
"\t* Ground motion Vs. Distance \n" +
"\t* Ground motion Vs. magnitude\n"))
public class Application {
public static void main(String[] args) {
ApplicationContext app = Micronaut.build(args)
Micronaut.build(args)
.mainClass(Application.class)
.start();
initFaultSections(app);
initGps(app);
initGulf(app);
}
/* Call usage to load nshm-fault-sections */
private static void initFaultSections(ApplicationContext app) {
FaultSectionsController cont = app.createBean(FaultSectionsController.class);
cont.doGetFaults(HttpRequest.GET(""), null, null);
}
/* Call usage to load GPS data */
private static void initGps(ApplicationContext app) {
GpsController cont = app.createBean(GpsController.class);
cont.doGetGpsData(HttpRequest.GET(""), null, null);
}
/* Call usage to load Gulf data */
private static void initGulf(ApplicationContext app) {
GulfController cont = app.createBean(GulfController.class);
cont.doGetGulf(HttpRequest.GET(""), null, null);
}
}
package gov.usgs.earthquake.nshmp.www;
/**
* The NSHM regions.
*
* @author U.S. Geological Survey
*/
public enum NshmRegion {
ALASKA("Alaska"),
AMSAM("American Somoa"),
CONUS("Conterminous US"),
GNMI("Guam and Northern Mariana Island"),
HAWAII("Hawaii"),
PRVI("Puerto Rico and U.S. Virgin Islands");
public final String display;
private NshmRegion(String display) {
this.display = display;
}
}
package gov.usgs.earthquake.nshmp.www.services;
package gov.usgs.earthquake.nshmp.www;
import java.nio.charset.StandardCharsets;
import java.util.stream.Collectors;
import com.google.common.io.Resources;
import gov.usgs.earthquake.nshmp.www.NshmpMicronautServlet;
import gov.usgs.earthquake.nshmp.www.Utils;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import io.micronaut.http.MediaType;
......
......@@ -9,9 +9,6 @@ import com.google.gson.GsonBuilder;
import io.micronaut.http.HttpResponse;
public class Utils {
public static final String FAULT_SECTIONS_RESOURCES = "fault-sections";
public static final String GPS_RESOURCES = "/gps/";
public static final String GULF_RESOURCES = "/gulf/";
public static final Gson GSON;
static {
......
package gov.usgs.earthquake.nshmp.www.fault;
import static com.google.common.base.Preconditions.checkState;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import gov.usgs.earthquake.nshmp.geo.json.FeatureCollection;
import gov.usgs.earthquake.nshmp.geo.json.GeoJson;
import gov.usgs.earthquake.nshmp.model.SourceFeature;
import gov.usgs.earthquake.nshmp.www.Utils;
public class FaultSections {
private static String GEOJSON_SUFFIX = ".geojson";
private static Path FAULT_SECTIONS = Path.of(Utils.FAULT_SECTIONS_RESOURCES);
private List<SourceFeature> features;
private FaultSections(List<SourceFeature> features) {
this.features = features;
}
/**
* Load all the fault sections.
*/
public static Map<NshmFaultSection, FaultSections> getFaultSections() {
try {
var faultSectionsMap = new HashMap<NshmFaultSection, FaultSections>();
for (var nshmFaultSection : NshmFaultSection.values()) {
Path path = FAULT_SECTIONS.resolve(nshmFaultSection.directory());
faultSectionsMap.put(
nshmFaultSection,
loadFaultSections(nshmFaultSection, path));
}
return Map.copyOf(faultSectionsMap);
} catch (Exception e) {
throw new RuntimeException(
String.format("Failed to load fault sections [%s]", FAULT_SECTIONS.toAbsolutePath()), e);
}
}
/**
* Returns the feature collection.
*/
public FeatureCollection toFeatureCollection() {
var builder = GeoJson.builder();
features.forEach(feature -> builder.add(feature.source));
return builder.build();
}
/**
* Returns the list of source features.
*/
public List<SourceFeature> features() {
return features;
}
private static FaultSections loadFaultSections(
NshmFaultSection nshmFaultSection,
Path faultSectionPath) {
faultSectionPath = faultSectionPath.toAbsolutePath().normalize();
if (!Files.exists(faultSectionPath)) {
throw new IllegalArgumentException(
"Fault sections path [" + faultSectionPath + "] does not exist");
}
try {
return new FaultSections(getFeatures(nshmFaultSection, faultSectionPath));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static List<SourceFeature> getFeatures(
NshmFaultSection nshmFaultSection,
Path faultSectionPath)
throws IOException {
List<Path> dirs = filterDirectories(nshmFaultSection, faultSectionPath);
List<Path> files = filterGeoJson(dirs);
List<SourceFeature> features = new ArrayList<>();
List<Integer> ids = new ArrayList<>();
for (Path geoJsonFile : files) {
boolean isInterface = geoJsonFile.toString().contains("(interface)");
boolean isDecollement = geoJsonFile.toString().contains("Decollement");
SourceFeature feature = isInterface
? SourceFeature.newInterfaceSection(geoJsonFile)
: isDecollement
? SourceFeature.newDecollementSection(geoJsonFile)
: SourceFeature.newFaultSection(geoJsonFile);
int id = feature.id;
checkState(
!ids.contains(id),
"Duplicate id: [%s] %s : %s ",
id, feature.state.orElseThrow(), feature.name);
features.add(feature);
}
return features;
}
private static List<Path> filterDirectories(NshmFaultSection nshmFaultSection, Path path)
throws IOException {
switch (nshmFaultSection.region()) {
case ALASKA:
return Files.walk(path)
.filter(Files::isDirectory)
.filter(file -> file.getFileName().toString().equals("AK"))
.collect(Collectors.toList());
case AMSAM:
return Files.walk(path)
.filter(Files::isDirectory)
.filter(file -> file.getFileName().toString().equals("AS"))
.collect(Collectors.toList());
case CONUS:
return Files.walk(path)
.filter(Files::isDirectory)
.filter(file -> file.getFileName().toString().length() == 2)
.filter(file -> file.getFileName().toString().equals("AK") == false)
.filter(file -> file.getFileName().toString().equals("AS") == false)
.filter(file -> file.getFileName().toString().equals("GU") == false)
.filter(file -> file.getFileName().toString().equals("HI") == false)
.filter(file -> file.getFileName().toString().equals("PR") == false)
.collect(Collectors.toList());
case GNMI:
return Files.walk(path)
.filter(Files::isDirectory)
.filter(file -> file.getFileName().toString().equals("GU"))
.collect(Collectors.toList());
case HAWAII:
return Files.walk(path)
.filter(Files::isDirectory)
.filter(file -> file.getFileName().toString().equals("HI"))
.collect(Collectors.toList());
case PRVI:
return Files.walk(path)
.filter(Files::isDirectory)
.filter(file -> file.getFileName().toString().equals("PR"))
.collect(Collectors.toList());
default:
return Files.walk(path)
.filter(Files::isDirectory)
.filter(file -> file.getFileName().toString().length() == 2)
.collect(Collectors.toList());
}
}
private static List<Path> filterGeoJson(List<Path> dirs) throws IOException {
List<Path> files = new ArrayList<>();
for (Path dir : dirs) {
files.addAll(Files.walk(dir)
.filter(Files::isRegularFile)
.filter(file -> file.getFileName().toString().endsWith(GEOJSON_SUFFIX))
.collect(Collectors.toList()));
}
return files;
}
}
package gov.usgs.earthquake.nshmp.www.fault;
import gov.usgs.earthquake.nshmp.www.NshmRegion;
import io.micronaut.core.annotation.Introspected;
@Introspected
/**
* NSHM fault sections.
*
* @author U.S. Geological Survey
*/
public enum NshmFaultSection {
ALASKA_2007(
"2007 Alaska Fault Sections",
NshmRegion.ALASKA,
"nshm-fault-sections-1.1"),
ALASKA_2023(
"2023 Alaska Fault Sections",
NshmRegion.ALASKA,
"nshm-fault-sections-1.1"),
AMSAM_2012(
"2012 American Somoa Fault Sections",
NshmRegion.AMSAM,
"nshm-fault-sections-1.1"),
CONUS_2018(
"2018 Conterminous U.S. Fault Sections",
NshmRegion.CONUS,
"nshm-fault-sections-1.1"),
CONUS_2023(
"2023 Conterminous U.S. Fault Sections",
NshmRegion.CONUS,
"nshm-fault-sections-1.1"),
GNMI_2012(
"2012 Guam & Northern Mariana Islands Fault Sections",
NshmRegion.GNMI,
"nshm-fault-sections-1.1"),
HAWAII_2021(
"2021 Hawaii Fault Sections",
NshmRegion.HAWAII,
"nshm-fault-sections-1.1"),
PRVI_2003(
"2003 Puerto Rico & U.S. Virgin Islands Fault Sections",
NshmRegion.PRVI,
"nshm-fault-sections-1.1");
private String label;
private NshmRegion region;
private String directory;
private NshmFaultSection(String label, NshmRegion region, String directory) {
this.label = label;
this.region = region;
this.directory = directory;
}
public String directory() {
return directory;
}
public String label() {
return label;
}
public NshmRegion region() {
return region;
}
}
package gov.usgs.earthquake.nshmp.www.gps;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import com.google.common.collect.ImmutableList;
import com.google.common.io.Resources;
import gov.usgs.earthquake.nshmp.data.DelimitedData;
import gov.usgs.earthquake.nshmp.data.DelimitedData.Record;
import gov.usgs.earthquake.nshmp.geo.Location;
import gov.usgs.earthquake.nshmp.geo.json.Feature;
import gov.usgs.earthquake.nshmp.geo.json.FeatureCollection;
import gov.usgs.earthquake.nshmp.geo.json.GeoJson;
import gov.usgs.earthquake.nshmp.geo.json.GeoJson.Builder;
import gov.usgs.earthquake.nshmp.www.Utils;
/**
* GPS data files must have recognized column headers. Some datasets may not
* have all fields (e.g. vertical component, site ID and type, correlation).
* Files may have comment lines starting with "#", these lines are skipped.
* First three data lines of a sample file, space delimited here for ease of
* reading:
*
* <pre>
* longitude latitude east north eastErr northErr correlation
* -118.1460 34.7540 -12.6980 15.8890 0.9600 0.8000 0.0010
* -119.9830 43.5900 -1.3320 2.0340 0.5900 0.6100 -0.0450
* </pre>
*
* Other fields that may be included in the future: correction
*
* Target GeoJSON format:
*
* <pre>
* {
* "type": "FeatureCollection",
* "features": [
* {
* "type": "Feature",
* "id": 0, // record number or store stationID here
* "geometry": {
* "type": "Point",
* "coordinates": [-117.0, 34.0]
* },
* "properties": {
* "stationID": STATION_ID_STRING, // stationID string, record number if field omitted
* "eastVelocity": 0.0000,
* "northVelocity": 0.0000,
* "verticalVelocity": 0.0000,
* "eastError": 0.0000,
* "northError": 0.0000,
* "verticalError": 0.0000,
* "correlation": 0.0000,
* "verticalPositiveIsDown": BOOLEAN,
* "velocityUnits": STRING // e.g. cm/yr, mm/yr
* }
* }
* ]
* }
* </pre>
*/
/**
* Container class to hold a {@code GpsData} {@code GpsDataSet}
*
* @author U.S. Geological Survey
*/
public class GpsData {
// Map stationId to GpsRecord
private final Map<String, GpsRecord> gpsData;
// Set of GPS fields
private final Set<String> gpsFields;
// List of original lines in CSV file
private final List<String> originalLines;
private final String dataSource;
private GpsData(
Map<String, GpsRecord> gpsData,
Set<String> gpsFields,
List<String> originalLines,
String dataSource) {
this.gpsData = gpsData;
this.gpsFields = gpsFields;
this.originalLines = originalLines;
this.dataSource = dataSource;
}
/**
* Read {@code GpsData} from CSV file, converting ASCII records to
* {@code GpsRecord}s.
*
* @return
*/
public static GpsData readGpsData(GpsDataSet gpsDataSet) {
String dataFile = Utils.GPS_RESOURCES + gpsDataSet.filename;
URL url = Resources.getResource(GpsDataSet.class, Utils.GPS_RESOURCES + gpsDataSet.filename);
// Read lines from resource
List<String> originalLines;
try {
originalLines = Resources.readLines(url, StandardCharsets.UTF_8);
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("Could not read file [" + gpsDataSet.filename + "]");
}
// Create DelimitedData
DelimitedData delimitedData = DelimitedData.comma(originalLines);
Set<String> keys = delimitedData.columnKeys();
Map<String, GpsRecord> gpsData;
try {
gpsData = buildGpsData(delimitedData, keys);
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("Problem reading delimited data in file [" + dataFile + "]");
}
return new GpsData(gpsData, keys, originalLines, dataFile);
}
private static Map<String, GpsRecord> buildGpsData(
DelimitedData delimitedData,
Set<String> fieldMap) throws IOException {
// get list of gpsRecords
List<Record> gpsRecords = delimitedData.records().collect(Collectors.toList());
/*
* Pass in record number for use as stationId, in case a stationId field is
* not present. If stationId field does exist, use it as the gpsData key.
*
* Use LinkedHashMap to preserve record order
*/
Map<String, GpsRecord> gpsData = new LinkedHashMap<>();
IntStream.range(0, gpsRecords.size()).boxed()
.forEach(i -> {
String id = (fieldMap.contains(Field.STATIONID))
? gpsRecords.get(i).get(Field.STATIONID) : String.valueOf(i);
GpsRecord rec =
GpsRecord.fromDelimitedData(i, gpsRecords.get(i), fieldMap);
gpsData.put(id, rec);
});
return Collections.unmodifiableMap(gpsData);
}
private static Optional<String> extractProperty(String line) {
if (line.length() == 0 || !line.contains(":")) {
return Optional.empty();
}
String prop = line.substring(line.lastIndexOf(":") + 1).strip();
return (prop.length() == 0) ? Optional.empty() : Optional.of(prop);
}
/**
* Get list of {@code Field} of ASCII input file.
*
* @return {@code List<Field>}
*/
List<String> getFields() {
return new ArrayList<>(this.gpsFields);
}
String getDataSource() {
return new String(dataSource);
}
/**
* Return copy of {@code List<String>} containing original lines from CSV
* file.
*/
List<String> originalLines() {
return ImmutableList.copyOf(originalLines);
}
/**
* Create FeatureCollection containing contents of {@code GpsData}
*
* @return {@code FeatureCollection}
*/
FeatureCollection asFeatureCollection() {
Builder fcBuilder = GeoJson.builder();
gpsData.entrySet().stream()
.forEach(
e -> fcBuilder.add(e.getValue().toFeature().id(e.getKey()).build()));
return fcBuilder.build();
}
/*
* GPS Field Strings
*/
static class Field {
static final String STATIONID = "stationId";
static final String SITETYPE = "siteType";
static final String LONGITUDE = "lon";
static final String LATITUDE = "lat";
static final String VEL_E = "vel_E";
static final String VEL_N = "vel_N";
static final String VEL_V = "vel_V";
static final String SIG_E = "sig_E";
static final String SIG_N = "sig_N";
static final String SIG_V = "sig_V";
static final String CORRELATION = "correlation";
}
/**
* Container class for a single {@code GpsRecord}
*/
private static class GpsRecord {
Location loc;
String stationId;
Map<String, Object> data;
GpsRecord(Location loc, Map<String, Object> data) {
this.loc = loc;
this.data = data;
this.stationId = (String) this.data.get(Field.STATIONID);
}
/**
* Generate a {@code GpsRecord} from a delimited data {@code Record}.
*
* @param recNumber Record number in input file, used as {@code stationId}
* if this field is not present in {@code Record}
* @param record Delimited data {@code Record}
* @param keys {@code List<String>} of {@code Record} field keys
* @return {@code GpsRecord}
*/
static GpsRecord fromDelimitedData(
int recNumber,
Record record,
Set<String> fieldMap) {
double lat = Double.NaN;
double lon = Double.NaN;
boolean hasStationId = false;
// Preserving Field order in record for now
Map<String, Object> recordData = new LinkedHashMap<>();
for (String field : fieldMap) {
switch (field) {
case Field.STATIONID:
recordData.put(field, record.get(field));
hasStationId = true;
break;
case Field.SITETYPE:
recordData.put(field, record.get(field));
break;
case Field.LONGITUDE:
lon = record.getDouble(field);
break;
case Field.LATITUDE:
lat = record.getDouble(field);
break;
default:
recordData.put(field, record.getDouble(field));
}
}
if (!hasStationId) {
recordData.put(Field.STATIONID, String.valueOf(recNumber));
}
return new GpsRecord(
Location.create(lon, lat),
Collections.unmodifiableMap(recordData));
}
@Override
public String toString() {
return String.format("<%s: %s>", this.loc.toString(), this.data.toString());
}
/**
* Convert {@code GpsRecord} to GeoJSON feature
*
* @return GeoJSON feature builder
*/
Feature.Builder toFeature() {
Map<String, Object> props = new LinkedHashMap<>();
data.keySet().stream()
.filter(k -> !k.equals(Field.LATITUDE))
.filter(k -> !k.equals(Field.LONGITUDE))
.forEach(k -> props.put(k.toString(), data.get(k)));
return Feature.point(this.loc)
// TODO: stationId as Feature ID? Otherwise buried in properties
// .id((String) props.get(Field.STATIONID))
.properties(Collections.unmodifiableMap(props));
}
}
}
package gov.usgs.earthquake.nshmp.www.gps;
import static com.google.common.base.CaseFormat.LOWER_HYPHEN;
import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE;
import java.util.Arrays;
import io.micronaut.core.annotation.Introspected;
/**
*
* @author U.S. Geological Survey
*/
@Introspected
public enum GpsDataSet {
AK_2020(
"Alaska 2020",
"ak_2020.csv"),
HI_2020(
"Hawaii 2020",
"hi_2020.csv"),
CONUS_2014_WUS(
"CONUS 2014 WUS",
"conus_2014_wus.csv"),
CONUS_2014_UCERF(
"CONUS 2014 UCERF",
"conus_2014_ucerf.csv"),
CONUS_2023(
"CONUS 2023",
"conus_2023.csv");
public final String label;
public final String id;
public String filename;
private GpsDataSet(String label, String filename) {
this.label = label;
this.filename = filename;
id = toId();
}
@Override
public String toString() {
return label;
}
public String toId() {
return UPPER_UNDERSCORE.to(LOWER_HYPHEN, name());
}
/**
* Returns a GpsDataSet with specified id.
*
* @param id The id of the dataSet
*/
public static GpsDataSet fromId(String id) {
return Arrays.asList(values()).stream()
.filter(dataSet -> dataSet.id.equals(id))
.findFirst()
.orElseThrow(() -> new IllegalArgumentException("GpsDataSet [" + id + "] not found"));
}
}
package gov.usgs.earthquake.nshmp.www.gps;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import com.google.common.base.Joiner;
import com.google.gson.JsonElement;
import gov.usgs.earthquake.nshmp.Text;
import gov.usgs.earthquake.nshmp.geo.json.FeatureCollection;
/**
* Read in all GPS data sets from data directory and store CSV and JSON string
* representations.
*
* @author U.S. Geological Survey
*/
public class GpsDataSets {
private Map<GpsDataSet, String> gpsDataSetsCsv;
private Map<GpsDataSet, FeatureCollection> gpsDataSetsJson;
private GpsDataSets(
Map<GpsDataSet, String> gpsDataSetsCsv,
Map<GpsDataSet, FeatureCollection> gpsDataSetsJson) {
this.gpsDataSetsCsv = gpsDataSetsCsv;
this.gpsDataSetsJson = gpsDataSetsJson;
}
public static GpsDataSets loadGpsDataSets() {
Map<GpsDataSet, String> mapCsv = new HashMap<>();
Map<GpsDataSet, FeatureCollection> mapFc = new HashMap<>();
for (GpsDataSet gpsDataSet : GpsDataSet.values()) {
GpsData gpsData = GpsData.readGpsData(gpsDataSet);
// build string of lines, append newline at EOF
String csv = Joiner.on(Text.NEWLINE).skipNulls().join(gpsData.originalLines()) + Text.NEWLINE;
mapCsv.put(gpsDataSet, csv);
mapFc.put(gpsDataSet, gpsData.asFeatureCollection());
}
return new GpsDataSets(Collections.unmodifiableMap(mapCsv), Collections.unmodifiableMap(mapFc));
}
public String getCsv(GpsDataSet gpsDataSet) {
return gpsDataSetsCsv.get(gpsDataSet);
}
public String getJsonString(GpsDataSet gpsDataSet) {
return gpsDataSetsJson.get(gpsDataSet).toJson();
}
public JsonElement getJsonTree(GpsDataSet gpsDataSet) {
return gpsDataSetsJson.get(gpsDataSet).toJsonTree();
}
}
package gov.usgs.earthquake.nshmp.www.gulf;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Resources;
import gov.usgs.earthquake.nshmp.Maths;
import gov.usgs.earthquake.nshmp.data.DelimitedData;
import gov.usgs.earthquake.nshmp.data.DelimitedData.Record;
import gov.usgs.earthquake.nshmp.geo.BorderType;
import gov.usgs.earthquake.nshmp.geo.Location;
import gov.usgs.earthquake.nshmp.geo.Region;
import gov.usgs.earthquake.nshmp.geo.Regions;
import gov.usgs.earthquake.nshmp.geo.json.Feature;
import gov.usgs.earthquake.nshmp.geo.json.GeoJson;
import gov.usgs.earthquake.nshmp.www.Utils;
/**
* Read in Gulf Coast sediment depth data.
*
* @author U.S. Geological Survey
*/
public class GulfData {
public static final double GULF_DATA_SPACING = 0.05;
static final String GULF_REGION_FILENAME = "sediment-depth-region.geojson";
static final String GULF_DATA_FILENAME = "CPADepthNSHM.csv";
private final Region gulfRegion;
private final ImmutableMap<Location, Double> gulfData;
private GulfData(Region gulfRegion, ImmutableMap<Location, Double> gulfData) {
this.gulfRegion = gulfRegion;
this.gulfData = gulfData;
}
/**
* Returns the {@code GulfData} associated with all files in the data
* directory
*/
public static GulfData readGulfData() {
Feature feature = readGulfRegion();
Region region = Regions.create(
feature.properties().getString("title").orElseThrow(),
feature.asPolygonBorder(),
BorderType.MERCATOR_LINEAR);
String dataFile = Utils.GULF_RESOURCES + GULF_DATA_FILENAME;
ImmutableMap<Location, Double> gulfData = null;
try {
List<String> lines = Resources.readLines(
Resources.getResource(GulfData.class, dataFile),
StandardCharsets.UTF_8);
DelimitedData data = DelimitedData.comma(lines);
Set<String> keys = data.columnKeys();
gulfData = data.records()
.map(record -> GulfDataRecord.fromDelimitedRecord(record, keys))
.collect(ImmutableMap.toImmutableMap(
GulfDataRecord::getLocation,
GulfDataRecord::getSedimentDepth));
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException("Could not read [" + dataFile + "]");
}
return new GulfData(region, gulfData);
}
/*
* Read Feature from GeoJSON
*/
private static Feature readGulfRegion() {
URL url = Resources.getResource(GulfData.class, Utils.GULF_RESOURCES + GULF_REGION_FILENAME);
return GeoJson.from(url).toFeature();
}
/**
* @return the gulfRegion
*/
public Region getGulfRegion() {
return gulfRegion;
}
// /**
// * Returns all gulf data as {@code ImmutableMap<Location, Double>}
// *
// * @return the gulfData
// */
// public ImmutableMap<Location, Double> getGulfData() {
// return gulfData;
// }
/*
* Return sediment depth at target {@code Location}
*/
public double getGulfData(Location loc) {
loc = processLocation(loc);
return checkNotNull(gulfData.get(loc));
}
/*
* Return object containing grid point {@code Location} and {@code
* sedimentDepth}
*/
public GulfDataResponse getGulfDataResponse(Location loc) {
loc = processLocation(loc);
return new GulfDataResponse(loc, checkNotNull(gulfData.get(loc)));
}
/*
* Return {@code sedimentDepth} as a {@code Feature}
*/
public Feature getGulfDataFeature(Location loc) {
loc = processLocation(loc);
return Feature.point(loc)
.properties(Map.of("sedimentDepth", gulfData.get(loc)))
.build();
}
/*
* Preprocess requested {@code Location}, validate against {@code gulfRegion},
* round to grid, and validate against {@code gulfData}
*/
private Location processLocation(Location loc) {
checkArgument(gulfRegion.contains(loc),
"Location [" + loc.toString() + "] is outside of Gulf data region");
double lon = roundToGrid(loc.longitude, GULF_DATA_SPACING);
double lat = roundToGrid(loc.latitude, GULF_DATA_SPACING);
loc = Location.create(lon, lat);
checkState(gulfData.containsKey(loc), "Location [%s] not found in Gulf dataset", loc);
return loc;
}
/**
* Round {@code value} to closest multiple of {@code spacing}
*
* TODO: clarify midpoint cases. It appears as if this case is handled
* differently for negative (longitude) and positive (latitude) numbers (see
* test for more details)
*
* TODO: move to www.Utils?
*
* @param value double
* @param spacing double
*/
public static double roundToGrid(double value, double spacing) {
return Maths.round(
Math.round(value / spacing) * spacing,
BigDecimal.valueOf(spacing).scale(),
RoundingMode.HALF_UP);
}
/**
* Container class for a {@code Location} and {@code sedimentDepth}
*/
static class GulfDataRecord {
Location loc;
double sedimentDepth;
GulfDataRecord(Location loc, double depth) {
this.loc = loc;
this.sedimentDepth = depth;
}
/**
* Returns the {@code Location}
*/
Location getLocation() {
return loc;
}
/**
* Returns the {@code sedimentDepth}
*/
double getSedimentDepth() {
return sedimentDepth;
}
static GulfDataRecord fromDelimitedRecord(Record record, Set<String> keys) {
double lon = Double.NaN;
double lat = Double.NaN;
double sedimentDepth = Double.NaN;
for (String key : keys) {
switch (key) {
case "longitude":
lon = record.getDouble(key);
break;
case "latitude":
lat = record.getDouble(key);
break;
case "depth":
sedimentDepth = record.getDouble(key);
break;
default:
throw new RuntimeException("Key [" + key + "] not supported");
}
}
return new GulfDataRecord(Location.create(lon, lat), sedimentDepth);
}
}
/*
* Container for gulf data snapped to grid
*/
public static class GulfDataResponse {
final double longitude;
final double latitude;
final double sedimentDepth;
public GulfDataResponse(Location loc, double sedimentDepth) {
this.longitude = loc.longitude;
this.latitude = loc.latitude;
this.sedimentDepth = sedimentDepth;
}
public double longitude() {
return longitude;
}
public double latitude() {
return latitude;
}
public double sedimentDepth() {
return sedimentDepth;
}
}
}
package gov.usgs.earthquake.nshmp.www.services;
import gov.usgs.earthquake.nshmp.geo.json.FeatureCollection;
import gov.usgs.earthquake.nshmp.www.NshmpMicronautServlet;
import gov.usgs.earthquake.nshmp.www.ResponseBody;
import gov.usgs.earthquake.nshmp.www.fault.NshmFaultSection;
import gov.usgs.earthquake.nshmp.www.services.FaultSectionsService.RequestData;
import io.micronaut.core.annotation.Nullable;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import io.micronaut.http.MediaType;
import io.micronaut.http.annotation.Controller;
import io.micronaut.http.annotation.Get;
import io.micronaut.http.annotation.PathVariable;
import io.micronaut.http.annotation.QueryValue;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.inject.Inject;
@Tag(name = "Fault Sections",
description = "Fault section services")
@Controller("/fault-sections")
public class FaultSectionsController {
@Inject
private NshmpMicronautServlet servlet;
/**
* GET method that returns the fault section based on a NSHM.
*
* @param request The HTTP request
* @param region The NSHM region
* @param year The NSHM year
* @param raw Whether to return raw GeoJSON feature collection
*/
@Operation(
summary = "Returns the fault section based on a NSHM region and year.",
description = "Returns a fault section based on a NSHM region and year.\n\n" +
"For supported regions and years usage information.\n\n" +
"Given no query parameters the usage information is returned.",
operationId = "fault-sections_doGetFaults")
@ApiResponse(
description = "Fault section",
responseCode = "200",
content = @Content(
schema = @Schema(implementation = Response.class)))
@Get(uri = "{?nshm, raw}", produces = MediaType.APPLICATION_JSON)
public HttpResponse<String> doGetFaults(
HttpRequest<?> request,
@QueryValue @Nullable NshmFaultSection nshm,
@QueryValue(defaultValue = "false") @Nullable Boolean raw) {
return FaultSectionsService.handleDoGetFaults(request, nshm, raw);
}
/**
* Slash delimited GET method that returns the fault section based on NSHM.
*
* @param request The HTTP request
* @param region The NSHM region
* @param year The NSHM year
* @param raw Whether to return raw GeoJSON feature collection
*/
@Operation(
summary = "Returns the fault section based on a NSHM region and year.",
description = "Returns a fault section.\n\n" +
"For supported NSHM regions and years see the usage information.\n\n" +
"Given no query parameters the usage information is returned.",
operationId = "fault-sections_doGetFaultsSlash")
@ApiResponse(
description = "Fault section",
responseCode = "200",
content = @Content(
schema = @Schema(implementation = Response.class)))
@Get(uri = "/{nshm}", produces = MediaType.APPLICATION_JSON)
public HttpResponse<String> doGetFaultsSlash(
HttpRequest<?> request,
@PathVariable @Nullable NshmFaultSection nshm) {
return FaultSectionsService.handleDoGetFaults(request, nshm, false);
}
/**
* Slash delimited GET method that returns the fault section based on NSHM.
*
* @param request The HTTP request
* @param region The NSHM region
* @param year The NSHM year
* @param raw Whether to return raw GeoJSON feature collection
*/
@Operation(
summary = "Returns the fault section based on a NSHM region and year.",
description = "Returns a fault section.\n\n" +
"For supported NSHM regions and years see the usage information.\n\n" +
"Given no query parameters the usage information is returned.",
operationId = "fault-sections_doGetFaultsSlashRaw")
@ApiResponse(
description = "Fault section",
responseCode = "200",
content = @Content(
schema = @Schema(implementation = FeatureCollection.class)))
@Get(uri = "/{nshm}/{raw}", produces = MediaType.APPLICATION_JSON)
public HttpResponse<String> doGetFaultsSlashRaw(
HttpRequest<?> request,
@PathVariable @Nullable NshmFaultSection nshm,
@PathVariable(defaultValue = "true") @Nullable Boolean raw) {
return FaultSectionsService.handleDoGetFaults(request, nshm, raw);
}
// Swagger schema
static class Response extends ResponseBody<RequestData, FeatureCollection> {};
}
package gov.usgs.earthquake.nshmp.www.services;
import static gov.usgs.earthquake.nshmp.www.Utils.GSON;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import gov.usgs.earthquake.nshmp.Text;
import gov.usgs.earthquake.nshmp.geo.json.FeatureCollection;
import gov.usgs.earthquake.nshmp.www.ResponseBody;
import gov.usgs.earthquake.nshmp.www.ResponseMetadata;
import gov.usgs.earthquake.nshmp.www.Utils;
import gov.usgs.earthquake.nshmp.www.WsVersion;
import gov.usgs.earthquake.nshmp.www.fault.FaultSections;
import gov.usgs.earthquake.nshmp.www.fault.NshmFaultSection;
import gov.usgs.earthquake.nshmp.www.meta.EnumParameter;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import jakarta.inject.Singleton;
/**
* Service handler for {@code FaultSectionsController}
*
* @author U.S. Geological Survey
*/
@Singleton
public class FaultSectionsService {
private static final String SERVICE_NAME = "Fault Sections Service";
private static final String SERVICE_DESCRIPTION = "Get fault sections as GeoJson";
private static final List<String> SERVICE_SYNTAXES = List.of(
"?nshm={NshmFaultSection}&raw={boolean}",
"{nshm:NshmFaultSection}/{raw:boolean}");
private static Map<NshmFaultSection, FaultSections> FAULT_SECTIONS =
FaultSections.getFaultSections();
static HttpResponse<String> handleDoGetFaults(
HttpRequest<?> request,
NshmFaultSection nshmFaultSection,
Boolean raw) {
try {
if (nshmFaultSection == null) {
return metadata(request);
}
var requestData = new RequestData(nshmFaultSection, raw);
var response = Text.cleanCoordinates(processRequest(request, requestData));
return HttpResponse.ok(response);
} catch (Exception e) {
return Utils.handleError(e, SERVICE_NAME, request.getUri().getPath());
}
}
private static ResponseBody<String, MetadataResponse> getMetadata(HttpRequest<?> request) {
var url = request.getUri().getPath();
return ResponseBody.<String, MetadataResponse> usage()
.name(SERVICE_NAME)
.url(url)
.metadata(new ResponseMetadata(WsVersion.appVersions()))
.request(url)
.response(new MetadataResponse(request))
.build();
}
private static HttpResponse<String> metadata(HttpRequest<?> request) {
var metadata = GSON.toJson(getMetadata(request));
return HttpResponse.ok(metadata);
}
private static String processRequest(HttpRequest<?> request, RequestData requestData) {
FeatureCollection featureCollection =
FAULT_SECTIONS.get(requestData.nshmFaultSection).toFeatureCollection();
if (requestData.raw) {
return featureCollection.toJson();
} else {
var response = ResponseBody.success()
.name(SERVICE_NAME)
.url(request.getUri().getPath())
.metadata(new ResponseMetadata(WsVersion.appVersions()))
.request(requestData)
.response(featureCollection.toJsonTree())
.build();
return GSON.toJson(response);
}
}
static class RequestData {
final NshmFaultSection nshmFaultSection;
final boolean raw;
RequestData(
NshmFaultSection nshmFaultSection,
Boolean raw) {
this.nshmFaultSection = nshmFaultSection;
this.raw = raw == null ? false : raw;
}
public NshmFaultSection nshm() {
return nshmFaultSection;
}
public boolean raw() {
return raw;
}
}
@SuppressWarnings("unused")
static class MetadataResponse {
final String description;
final List<String> syntax;
final Parameters parameters;
MetadataResponse(HttpRequest<?> request) {
var url = request.getUri().getPath();
description = SERVICE_DESCRIPTION;
syntax = SERVICE_SYNTAXES.stream()
.map(syntax -> url + syntax)
.collect(Collectors.toList());
parameters = new Parameters();
}
}
@SuppressWarnings("unused")
private static class Parameters {
final EnumParameter<NshmFaultSection> nshm;
final ObjectParameter<Boolean> raw;
Parameters() {
nshm = new EnumParameter<>("NSHM Fault Sections", Set.of(NshmFaultSection.values()));
raw = new ObjectParameter<Boolean>(
"Whether to return raw GeoJSON",
Set.of(true, false));
}
}
@SuppressWarnings("unused")
private static class ObjectParameter<T> {
final String label;
final Set<T> values;
ObjectParameter(String label, Set<T> values) {
this.label = label;
this.values = values;
}
}
}
package gov.usgs.earthquake.nshmp.www.services;
import gov.usgs.earthquake.nshmp.geo.json.FeatureCollection;
import gov.usgs.earthquake.nshmp.www.NshmpMicronautServlet;
import gov.usgs.earthquake.nshmp.www.ResponseBody;
import gov.usgs.earthquake.nshmp.www.gps.GpsDataSet;
import gov.usgs.earthquake.nshmp.www.services.GpsService.GpsFormat;
import gov.usgs.earthquake.nshmp.www.services.GpsService.RequestData;
import io.micronaut.core.annotation.Nullable;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import io.micronaut.http.MediaType;
import io.micronaut.http.annotation.Controller;
import io.micronaut.http.annotation.Get;
import io.micronaut.http.annotation.PathVariable;
import io.micronaut.http.annotation.QueryValue;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.inject.Inject;
/**
* GPS data service to return GPS data for NSHM.
*
* <p> Model GPS data can be returned as CSV or GeoJSON strings.
*
* @author U.S. Geological Survey
*/
@Tag(name = "GPS Services")
@Controller("/gps")
public class GpsController {
@Inject
private NshmpMicronautServlet servlet;
/**
* GET method that returns the GPS results for the specified model.
*
* @param request The HTTP request
* @param model The NSHM model
* @param format The requested data format
*/
@Operation(
summary = "Returns usage information or " +
"GPS dataset for the specified NSHM model",
description = "Returns a NSHM GPS dataset with velocities in mm/yr and, if applicable, " +
"vertical components are positive UP.\n\n" +
"For supported datasets and formats see the usage information\n\n" +
"Given no query parameters the usage information is returned",
operationId = "gps_doGetGps")
@ApiResponse(
description = "NSHM GPS datasets",
responseCode = "200",
content = @Content(
schema = @Schema(implementation = Response.class)))
@Get(uri = "{?model,format}", produces = { MediaType.APPLICATION_JSON, MediaType.TEXT_CSV })
public HttpResponse<String> doGetGpsData(
HttpRequest<?> request,
@QueryValue @Nullable GpsDataSet model,
@QueryValue @Nullable GpsFormat format) {
return GpsService.handleDoGetGps(request, model, format);
}
/**
* Slash delimited get method.
*
* @param request The HTTP request
* @param model The GPS dataset
* @param format Data format
*/
@Operation(
summary = "Returns all GPS data for a specific dataset",
description = "Returns a NSHM GPS dataset with velocities in mm/yr and, if applicable, " +
"vertical components are positive UP.\n\n" +
"For supported datasets and formats see the usage information\n\n",
operationId = "gps_doGetGpsSlash")
@ApiResponse(
description = "NSHM GPS datasets",
responseCode = "200")
@Get(uri = "/{model}", produces = MediaType.TEXT_CSV)
public HttpResponse<String> doGetGpsDataSlash(
HttpRequest<?> request,
@PathVariable @Nullable GpsDataSet model) {
return doGetGpsData(request, model, null);
}
/**
* Slash delimited get method.
*
* @param request The HTTP request
* @param model The GPS dataset
* @param format Data format
*/
@Operation(
summary = "Returns all GPS data for a specific dataset",
description = "Returns a NSHM GPS dataset with velocities in mm/yr and, if applicable, " +
"vertical components are positive UP.\n\n" +
"For supported datasets and formats see the usage information\n\n",
operationId = "gps_doGetGpsSlashFormat")
@ApiResponse(
description = "NSHM GPS datasets",
responseCode = "200",
content = @Content(
schema = @Schema(implementation = Response.class)))
@Get(uri = "/{model}/{format}", produces = { MediaType.APPLICATION_JSON, MediaType.TEXT_CSV })
public HttpResponse<String> doGetGpsDataSlashFormat(
HttpRequest<?> request,
@PathVariable @Nullable GpsDataSet model,
@PathVariable @Nullable GpsFormat format) {
return doGetGpsData(request, model, format);
}
// Swagger schema
private static class Response extends ResponseBody<RequestData, FeatureCollection> {}
}
package gov.usgs.earthquake.nshmp.www.services;
import static gov.usgs.earthquake.nshmp.www.Utils.GSON;
import static gov.usgs.earthquake.nshmp.www.WsUtils.checkValue;
import java.util.EnumSet;
import gov.usgs.earthquake.nshmp.www.ResponseBody;
import gov.usgs.earthquake.nshmp.www.ResponseMetadata;
import gov.usgs.earthquake.nshmp.www.Utils;
import gov.usgs.earthquake.nshmp.www.Utils.Key;
import gov.usgs.earthquake.nshmp.www.WsVersion;
import gov.usgs.earthquake.nshmp.www.gps.GpsDataSet;
import gov.usgs.earthquake.nshmp.www.gps.GpsDataSets;
import gov.usgs.earthquake.nshmp.www.meta.EnumParameter;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import jakarta.inject.Singleton;
/**
* Service handler for {@code GpsController}
*
* @author U.S. Geological Survey
*/
@Singleton
public class GpsService {
private static GpsDataSets GPSDATASETS = GpsDataSets.loadGpsDataSets();
private static final String SERVICE_NAME = "GPS Data Service";
private static final String SERVICE_DESCRIPTION =
"Get GPS NSHM data, velocity units are mm/yr and positive up";
private static final String SERVICE_SYNTAX = "?model={model}&format={format}";
static HttpResponse<String> handleDoGetGps(
HttpRequest<?> request,
GpsDataSet model,
GpsFormat format) {
try {
if (model == null) {
var metadata = GSON.toJson(getMetadata(request));
return HttpResponse.ok(metadata);
}
var query = new Query(model, format);
checkValue(Key.GPSDATASET, model);
var response = processGpsRequest(request, query);
return HttpResponse.ok(response);
} catch (Exception e) {
return Utils.handleError(e, SERVICE_NAME, request.getUri().getPath());
}
}
private static ResponseBody<String, MetadataResponse> getMetadata(HttpRequest<?> request) {
var url = request.getUri().getPath();
return ResponseBody.<String, MetadataResponse> usage()
.name(SERVICE_NAME)
.url(url)
.metadata(new ResponseMetadata(WsVersion.appVersions()))
.request(url)
.response(new MetadataResponse(request))
.build();
}
private static String processGpsRequest(HttpRequest<?> request, Query query) {
RequestData requestData = new RequestData(query.model, query.format);
if (query.format == GpsFormat.RAW_JSON) {
return GPSDATASETS.getJsonString(query.model);
}
if (query.format == GpsFormat.CSV) {
return GPSDATASETS.getCsv(requestData.model);
}
var values = GPSDATASETS.getJsonTree(requestData.model);
var response = ResponseBody.success()
.name(SERVICE_NAME)
.url(request.getUri().getPath())
.metadata(new ResponseMetadata(WsVersion.appVersions()))
.request(requestData)
.response(values)
.build();
return GSON.toJson(response, ResponseBody.class);
}
static class RequestData {
final GpsDataSet model;
final GpsFormat format;
RequestData(GpsDataSet model, GpsFormat format) {
this.model = model;
this.format = format;
}
GpsDataSet model() {
return model;
}
GpsFormat format() {
return format;
}
}
static class MetadataResponse {
final String description;
final String syntax;
final EnumParameter<GpsDataSet> datasets;
final EnumParameter<GpsFormat> formats;
public MetadataResponse(HttpRequest<?> request) {
description = SERVICE_DESCRIPTION;
syntax = request.getUri().getPath() + SERVICE_SYNTAX;
datasets = new EnumParameter<>(
"GPS Dataset",
EnumSet.allOf(GpsDataSet.class));
formats = new EnumParameter<>(
"Formats",
EnumSet.allOf(GpsFormat.class));
}
}
static class Query {
public final GpsDataSet model;
public final GpsService.GpsFormat format;
public Query(GpsDataSet model, GpsService.GpsFormat format) {
this.model = model;
this.format = format == null ? GpsFormat.CSV : format;
}
}
static enum GpsFormat {
CSV,
JSON,
RAW_JSON;
}
}
package gov.usgs.earthquake.nshmp.www.services;
import gov.usgs.earthquake.nshmp.www.NshmpMicronautServlet;
import gov.usgs.earthquake.nshmp.www.ResponseBody;
import gov.usgs.earthquake.nshmp.www.gulf.GulfData.GulfDataResponse;
import gov.usgs.earthquake.nshmp.www.services.GulfService.RequestData;
import io.micronaut.core.annotation.Nullable;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import io.micronaut.http.MediaType;
import io.micronaut.http.annotation.Controller;
import io.micronaut.http.annotation.Get;
import io.micronaut.http.annotation.PathVariable;
import io.micronaut.http.annotation.QueryValue;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.inject.Inject;
/**
* Gulf sediment depth data service.
*
* <p> Note: If the longitude and latitude supplied in the query is not
* contained in the dataset region, the resulting depth value is null.
*
* <p> Note: The supplied longitude and latitude values are rounded to the
* nearest grid point ({@code 0.05} degrees).
*
* @author U.S. Geological Survey
*/
@Tag(name = "Gulf Services")
@Controller("/gulf")
public class GulfController {
@Inject
private NshmpMicronautServlet servlet;
/**
* GET method that returns the gulf sediment depth results based on longitude
* and latitude. Returns usage if query is empty.
*
* @param request The HTTP request
* @param longitude The longitude of interest
* @param latitude The latitude of interest
*/
@Operation(
summary = "Returns usage information or " +
"sediment depth (m) based on a longitude and latitude.",
description = "Returns sediment depth (m) for a given site (longitude, latitude).\n\n" +
"For supported dataset region see the usage information.\n\n" +
"Given no query parameters the usage information is returned.",
operationId = "gulf_doGetGulf")
@ApiResponse(
description = "Gulf coast sediment depth",
responseCode = "200",
content = @Content(
schema = @Schema(implementation = Response.class)))
@Get(uri = "{?longitude,latitude}", produces = MediaType.APPLICATION_JSON)
public HttpResponse<String> doGetGulf(
HttpRequest<?> request,
@QueryValue @Nullable Double longitude,
@QueryValue @Nullable Double latitude) {
return GulfService.handleDoGetGulf(request, longitude, latitude);
}
/**
* Slash delimited get method.
*
* @param request The HTTP request
* @param longitude The longitude in degrees
* @param latitude The latitude in degrees
*/
@Operation(
summary = "Returns sediment depth (m) based on a longitude and latitude",
description = "Returns the sediment depth (m) for specified longitude and latitude.\n\n",
operationId = "gulf_doGetGulfSlash")
@ApiResponse(
description = "Gulf coast sediment depth",
responseCode = "200",
content = @Content(
schema = @Schema(implementation = Response.class)))
@Get(uri = "/{longitude}/{latitude}", produces = MediaType.APPLICATION_JSON)
public HttpResponse<String> doGetGulfSlash(HttpRequest<?> request,
@PathVariable @Nullable Double longitude,
@PathVariable @Nullable Double latitude) {
return doGetGulf(request, longitude, latitude);
}
// Swagger schema
static class Response extends ResponseBody<RequestData, GulfDataResponse> {}
}
package gov.usgs.earthquake.nshmp.www.services;
import static gov.usgs.earthquake.nshmp.www.Utils.GSON;
import static gov.usgs.earthquake.nshmp.www.WsUtils.checkValue;
import gov.usgs.earthquake.nshmp.geo.Location;
import gov.usgs.earthquake.nshmp.geo.Region;
import gov.usgs.earthquake.nshmp.www.ResponseBody;
import gov.usgs.earthquake.nshmp.www.ResponseMetadata;
import gov.usgs.earthquake.nshmp.www.Utils;
import gov.usgs.earthquake.nshmp.www.Utils.Key;
import gov.usgs.earthquake.nshmp.www.WsVersion;
import gov.usgs.earthquake.nshmp.www.gulf.GulfData;
import gov.usgs.earthquake.nshmp.www.gulf.GulfData.GulfDataResponse;
import io.micronaut.http.HttpRequest;
import io.micronaut.http.HttpResponse;
import jakarta.inject.Singleton;
@Singleton
public class GulfService {
private static GulfData GULF_DATA = GulfData.readGulfData();
private static final String SERVICE_NAME = "Gulf Coast Sediment Depth Service";
private static final String SERVICE_DESCRIPTION = "Get gulf coast sediment depth";
private static final String SERVICE_SYNTAX =
"?longitude={longitude}&latitude={latitude}";
static HttpResponse<String> handleDoGetGulf(
HttpRequest<?> request,
Double longitude,
Double latitude) {
try {
if (longitude == null || latitude == null) {
var metadata = GSON.toJson(getMetadata(request));
return HttpResponse.ok(metadata);
}
checkValue(Key.LONGITUDE, longitude);
checkValue(Key.LATITUDE, latitude);
var requestData = new RequestData(longitude, latitude);
String response = GSON.toJson(processGulf(request, requestData), ResponseBody.class);
return HttpResponse.ok(response);
} catch (Exception e) {
return Utils.handleError(e, SERVICE_NAME, request.getUri().getPath());
}
}
private static ResponseBody<String, MetadataResponse> getMetadata(HttpRequest<?> request) {
var url = request.getUri().getPath();
return ResponseBody.<String, MetadataResponse> usage()
.name(SERVICE_NAME)
.url(url)
.metadata(new ResponseMetadata(WsVersion.appVersions()))
.request(url)
.response(new MetadataResponse(request))
.build();
}
private static ResponseBody<RequestData, GulfDataResponse> processGulf(
HttpRequest<?> request,
RequestData requestData) {
var loc = Location.create(requestData.longitude, requestData.latitude);
return ResponseBody.<RequestData, GulfDataResponse> success()
.name(SERVICE_NAME)
.url(request.getUri().getPath())
.metadata(new ResponseMetadata(WsVersion.appVersions()))
.request(requestData)
.response(GULF_DATA.getGulfDataResponse(loc))
.build();
}
static class RequestData {
final double longitude;
final double latitude;
RequestData(double longitude, double latitude) {
this.longitude = longitude;
this.latitude = latitude;
}
public double longitude() {
return longitude;
}
public double latitude() {
return latitude;
}
}
private static class MetadataResponse {
final String description;
final String syntax;
final Region gulfRegion;
MetadataResponse(HttpRequest<?> request) {
description = SERVICE_DESCRIPTION;
syntax = request.getUri().getPath() + SERVICE_SYNTAX;
gulfRegion = GULF_DATA.getGulfRegion();
}
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment