Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
N
nshmp-lib
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
ghsc
National Seismic Hazard Model Project
nshmp-lib
Commits
9d5dc516
Commit
9d5dc516
authored
6 months ago
by
Powers, Peter M.
Browse files
Options
Downloads
Patches
Plain Diff
updated model loader tests to cover decomposed curves
parent
83e84e4d
No related branches found
No related tags found
1 merge request
!421
Magnitude curves
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
src/test/java/gov/usgs/earthquake/nshmp/model/LoaderTests.java
+158
-67
158 additions, 67 deletions
...est/java/gov/usgs/earthquake/nshmp/model/LoaderTests.java
with
158 additions
and
67 deletions
src/test/java/gov/usgs/earthquake/nshmp/model/LoaderTests.java
+
158
−
67
View file @
9d5dc516
package
gov.usgs.earthquake.nshmp.model
;
package
gov.usgs.earthquake.nshmp.model
;
import
static
gov
.
usgs
.
earthquake
.
nshmp
.
calc
.
HazardExport
.
CURVE_FILE
;
import
static
gov
.
usgs
.
earthquake
.
nshmp
.
calc
.
HazardExport
.
GMM_DIR
;
import
static
gov
.
usgs
.
earthquake
.
nshmp
.
calc
.
HazardExport
.
MAG_DIR
;
import
static
gov
.
usgs
.
earthquake
.
nshmp
.
calc
.
HazardExport
.
TYPE_DIR
;
import
static
java
.
lang
.
Math
.
abs
;
import
static
java
.
lang
.
Math
.
abs
;
import
static
java
.
util
.
stream
.
Collectors
.
toMap
;
import
static
java
.
util
.
stream
.
Collectors
.
toMap
;
import
static
org
.
junit
.
jupiter
.
api
.
Assertions
.
assertArrayEquals
;
import
static
org
.
junit
.
jupiter
.
api
.
Assertions
.
assertArrayEquals
;
...
@@ -11,10 +15,8 @@ import java.nio.file.Path;
...
@@ -11,10 +15,8 @@ import java.nio.file.Path;
import
java.nio.file.Paths
;
import
java.nio.file.Paths
;
import
java.util.Arrays
;
import
java.util.Arrays
;
import
java.util.EnumMap
;
import
java.util.EnumMap
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Map
;
import
java.util.Map.Entry
;
import
java.util.OptionalDouble
;
import
java.util.OptionalDouble
;
import
java.util.concurrent.ExecutorService
;
import
java.util.concurrent.ExecutorService
;
import
java.util.concurrent.Executors
;
import
java.util.concurrent.Executors
;
...
@@ -31,8 +33,10 @@ import gov.usgs.earthquake.nshmp.calc.HazardCalcs;
...
@@ -31,8 +33,10 @@ import gov.usgs.earthquake.nshmp.calc.HazardCalcs;
import
gov.usgs.earthquake.nshmp.calc.HazardExport
;
import
gov.usgs.earthquake.nshmp.calc.HazardExport
;
import
gov.usgs.earthquake.nshmp.calc.Site
;
import
gov.usgs.earthquake.nshmp.calc.Site
;
import
gov.usgs.earthquake.nshmp.calc.Sites
;
import
gov.usgs.earthquake.nshmp.calc.Sites
;
import
gov.usgs.earthquake.nshmp.data.MutableXySequence
;
import
gov.usgs.earthquake.nshmp.data.XySequence
;
import
gov.usgs.earthquake.nshmp.data.XySequence
;
import
gov.usgs.earthquake.nshmp.geo.Location
;
import
gov.usgs.earthquake.nshmp.geo.Location
;
import
gov.usgs.earthquake.nshmp.gmm.Gmm
;
import
gov.usgs.earthquake.nshmp.gmm.Imt
;
import
gov.usgs.earthquake.nshmp.gmm.Imt
;
class
LoaderTests
{
class
LoaderTests
{
...
@@ -51,14 +55,14 @@ class LoaderTests {
...
@@ -51,14 +55,14 @@ class LoaderTests {
static
HazardModel
model
;
static
HazardModel
model
;
static
List
<
Site
>
sites
;
static
List
<
Site
>
sites
;
static
Map
<
Location
,
Map
<
Imt
,
XySequence
>>
expecteds
;
static
Expecteds
expecteds
;
static
ExecutorService
exec
;
static
ExecutorService
exec
;
@BeforeAll
@BeforeAll
static
void
setUpBeforeClass
()
throws
IOException
{
static
void
setUpBeforeClass
()
throws
IOException
{
model
=
ModelLoader
.
load
(
MODEL_PATH
);
model
=
ModelLoader
.
load
(
MODEL_PATH
);
sites
=
Sites
.
fromCsv
(
SITES_PATH
,
model
.
siteData
(),
OptionalDouble
.
empty
());
sites
=
Sites
.
fromCsv
(
SITES_PATH
,
model
.
siteData
(),
OptionalDouble
.
empty
());
expecteds
=
load
Expecteds
();
expecteds
=
new
Expecteds
();
int
cores
=
Runtime
.
getRuntime
().
availableProcessors
();
int
cores
=
Runtime
.
getRuntime
().
availableProcessors
();
exec
=
Executors
.
newFixedThreadPool
(
cores
);
exec
=
Executors
.
newFixedThreadPool
(
cores
);
}
}
...
@@ -72,21 +76,67 @@ class LoaderTests {
...
@@ -72,21 +76,67 @@ class LoaderTests {
@MethodSource
(
"siteStream"
)
@MethodSource
(
"siteStream"
)
final
void
testLocation
(
Site
site
)
{
final
void
testLocation
(
Site
site
)
{
Location
loc
=
site
.
location
();
Location
loc
=
site
.
location
();
Map
<
Imt
,
XySequence
>
e
xpected
=
expecteds
.
get
(
loc
);
//
Map<Imt, XySequence>
totalE
xpected = expecteds.
totalCurves.
get(loc);
Hazard
actual
=
HazardCalcs
.
hazard
(
model
,
model
.
config
(),
site
,
exec
);
Hazard
actual
=
HazardCalcs
.
hazard
(
model
,
model
.
config
(),
site
,
exec
);
assertCurvesEqual
(
expected
,
actual
,
TOLERANCE
);
assertCurvesEqual
(
loc
,
expected
s
,
actual
,
TOLERANCE
);
}
}
private
static
Stream
<
Site
>
siteStream
()
{
private
static
Stream
<
Site
>
siteStream
()
{
return
sites
.
stream
();
return
sites
.
stream
();
}
}
private
static
void
assertCurvesEqual
(
Map
<
Imt
,
XySequence
>
expected
,
Hazard
actual
,
double
tol
)
{
private
static
void
assertCurvesEqual
(
expected
.
entrySet
().
forEach
(
Location
loc
,
e
->
assertCurveEquals
(
Expecteds
expecteds
,
e
.
getValue
(),
Hazard
actuals
,
actual
.
curves
().
get
(
e
.
getKey
()),
double
tol
)
{
tol
));
var
gmmActualsImtMap
=
HazardExport
.
curvesByGmm
(
actuals
);
var
typeActualsImtMap
=
HazardExport
.
curvesBySource
(
actuals
);
for
(
Imt
imt
:
expecteds
.
totalCurves
.
keySet
())
{
// total curve
XySequence
totalExpected
=
expecteds
.
totalCurves
.
get
(
imt
).
get
(
loc
);
XySequence
totalActual
=
actuals
.
curves
().
get
(
imt
);
assertCurveEquals
(
totalExpected
,
totalActual
,
tol
);
/*
* For decomposed curves we need to loop actuals because expecteds will
* have additional zero-values curves for missing GMMs, source types and
* magnitudes.
*/
// GMM curves
Map
<
Gmm
,
Map
<
Location
,
XySequence
>>
gmmExpecteds
=
expecteds
.
gmmCurves
.
get
(
imt
);
Map
<
Gmm
,
?
extends
XySequence
>
gmmActuals
=
gmmActualsImtMap
.
get
(
imt
);
for
(
Gmm
gmm
:
gmmActuals
.
keySet
())
{
XySequence
gmmExpected
=
gmmExpecteds
.
get
(
gmm
).
get
(
loc
);
XySequence
gmmActual
=
gmmActuals
.
get
(
gmm
);
assertCurveEquals
(
gmmExpected
,
gmmActual
,
tol
);
}
// Source type curves
Map
<
SourceType
,
Map
<
Location
,
XySequence
>>
typeExpecteds
=
expecteds
.
typeCurves
.
get
(
imt
);
Map
<
SourceType
,
?
extends
XySequence
>
typeActuals
=
typeActualsImtMap
.
get
(
imt
);
for
(
SourceType
type
:
typeActuals
.
keySet
())
{
XySequence
typeExpected
=
typeExpecteds
.
get
(
type
).
get
(
loc
);
XySequence
typeActual
=
typeActuals
.
get
(
type
);
assertCurveEquals
(
typeExpected
,
typeActual
,
tol
);
}
// Magnitude curves
Map
<
Double
,
XySequence
>
magExpecteds
=
expecteds
.
magCurves
.
get
(
imt
).
get
(
loc
);
Map
<
Double
,
MutableXySequence
>
magActuals
=
actuals
.
magCurves
().
get
(
imt
);
for
(
Double
m
:
magActuals
.
keySet
())
{
// if mag bin not used actuals will be null
// and expecteds will be array of zeros
if
(
magActuals
.
get
(
m
)
==
null
)
{
continue
;
}
assertCurveEquals
(
magExpecteds
.
get
(
m
),
magActuals
.
get
(
m
),
tol
);
}
}
}
}
private
static
void
assertCurveEquals
(
XySequence
expected
,
XySequence
actual
,
double
tol
)
{
private
static
void
assertCurveEquals
(
XySequence
expected
,
XySequence
actual
,
double
tol
)
{
...
@@ -103,7 +153,7 @@ class LoaderTests {
...
@@ -103,7 +153,7 @@ class LoaderTests {
// y-value difference relative to tolerance
// y-value difference relative to tolerance
assertArrayEquals
(
expectedYs
,
actualYs
,
tol
);
assertArrayEquals
(
expectedYs
,
actualYs
,
tol
);
// y-value
difference
relative to tolerance
// y-value
ratio
relative to tolerance
for
(
int
i
=
0
;
i
<
expectedYs
.
length
;
i
++)
{
for
(
int
i
=
0
;
i
<
expectedYs
.
length
;
i
++)
{
String
message
=
String
.
format
(
String
message
=
String
.
format
(
"arrays differ at [%s] expected:<[%s]> but was:<[%s]>"
,
"arrays differ at [%s] expected:<[%s]> but was:<[%s]>"
,
...
@@ -117,84 +167,125 @@ class LoaderTests {
...
@@ -117,84 +167,125 @@ class LoaderTests {
Double
.
valueOf
(
expected
).
equals
(
Double
.
valueOf
(
actual
));
Double
.
valueOf
(
expected
).
equals
(
Double
.
valueOf
(
actual
));
}
}
/* read curves then transpose */
private
static
class
Expecteds
{
private
static
Map
<
Location
,
Map
<
Imt
,
XySequence
>>
loadExpecteds
()
throws
IOException
{
// consider centralized curve file processing
Map
<
Imt
,
Map
<
Location
,
XySequence
>>
totalCurves
=
new
EnumMap
<>(
Imt
.
class
);
Map
<
Imt
,
Map
<
Location
,
XySequence
>>
curves
=
Files
.
walk
(
RESULTS_PATH
)
Map
<
Imt
,
Map
<
Gmm
,
Map
<
Location
,
XySequence
>>>
gmmCurves
=
new
EnumMap
<>(
Imt
.
class
);
.
filter
(
LoaderTests:
:
isCurveFile
)
Map
<
Imt
,
Map
<
SourceType
,
Map
<
Location
,
XySequence
>>>
typeCurves
=
new
EnumMap
<>(
Imt
.
class
);
.
collect
(
toMap
(
Map
<
Imt
,
Map
<
Location
,
Map
<
Double
,
XySequence
>>>
magCurves
=
new
EnumMap
<>(
Imt
.
class
);
LoaderTests:
:
imtFromPath
,
LoaderTests:
:
readCurves
));
Expecteds
()
{
return
transpose
(
curves
);
try
{
}
Map
<
Imt
,
Path
>
imtDirs
=
Files
.
list
(
RESULTS_PATH
)
.
filter
(
Files:
:
isDirectory
)
.
collect
(
toMap
(
p
->
Imt
.
valueOf
(
p
.
getFileName
().
toString
()),
p
->
p
));
imtDirs
.
forEach
(
this
::
loadImtDir
);
}
catch
(
IOException
ioe
)
{
throw
new
RuntimeException
(
ioe
);
}
}
private
static
boolean
isCurveFile
(
Path
path
)
{
void
loadImtDir
(
Imt
imt
,
Path
path
)
{
return
path
.
getFileName
().
toString
().
equals
(
"curves.csv"
);
try
{
}
totalCurves
.
put
(
imt
,
readLocationCurves
(
path
.
resolve
(
CURVE_FILE
)));
var
imtGmmCurves
=
Files
.
list
(
path
.
resolve
(
GMM_DIR
))
.
filter
(
Files:
:
isDirectory
)
.
collect
(
toMap
(
p
->
Gmm
.
valueOf
(
p
.
getFileName
().
toString
()),
p
->
readLocationCurves
(
p
.
resolve
(
CURVE_FILE
))));
gmmCurves
.
put
(
imt
,
imtGmmCurves
);
var
imtTypeCurves
=
Files
.
list
(
path
.
resolve
(
TYPE_DIR
))
.
filter
(
Files:
:
isDirectory
)
.
collect
(
toMap
(
p
->
SourceType
.
valueOf
(
p
.
getFileName
().
toString
()),
p
->
readLocationCurves
(
p
.
resolve
(
CURVE_FILE
))));
typeCurves
.
put
(
imt
,
imtTypeCurves
);
var
imtMagCurves
=
Files
.
list
(
path
.
resolve
(
MAG_DIR
))
.
filter
(
p
->
p
.
getFileName
().
toString
().
endsWith
(
".csv"
))
.
collect
(
toMap
(
p
->
readLocation
(
p
),
p
->
readMagnitudeCurves
(
p
)));
private
static
Imt
imtFromPath
(
Path
path
)
{
magCurves
.
put
(
imt
,
imtMagCurves
);
return
Imt
.
valueOf
(
path
.
getParent
().
getFileName
().
toString
());
}
catch
(
IOException
ioe
)
{
throw
new
RuntimeException
(
ioe
);
}
}
}
}
private
static
Map
<
Location
,
XySequence
>
readCurves
(
Path
path
)
{
private
static
Map
<
Location
,
XySequence
>
readLocationCurves
(
Path
path
)
{
int
offset
=
3
;
try
{
try
{
List
<
String
>
lines
=
Files
.
readAllLines
(
path
);
List
<
String
>
lines
=
Files
.
readAllLines
(
path
);
double
[]
imls
=
to
Values
(
lines
.
get
(
0
));
double
[]
imls
=
read
Values
(
lines
.
get
(
0
)
,
offset
);
return
lines
.
stream
()
return
lines
.
stream
()
.
skip
(
1
)
.
skip
(
1
)
.
map
(
line
->
toCurve
(
line
,
imls
))
.
collect
(
toMap
(
.
collect
(
toMap
(
Entry:
:
getKey
,
line
->
readLocation
(
line
)
,
Entry:
:
getValue
));
line
->
XySequence
.
create
(
imls
,
readValues
(
line
,
3
))
));
}
catch
(
IOException
ioe
)
{
}
catch
(
IOException
ioe
)
{
throw
new
RuntimeException
(
ioe
);
throw
new
RuntimeException
(
ioe
);
}
}
}
}
private
static
Entry
<
Location
,
XySequence
>
toCurve
(
String
line
,
double
[]
xs
)
{
/* Read a location from a line string. */
Site
site
=
toSite
(
line
);
private
static
Location
readLocation
(
String
line
)
{
double
[]
ys
=
toValues
(
line
);
String
[]
s
=
line
.
split
(
","
);
return
Map
.
entry
(
site
.
location
(),
XySequence
.
create
(
xs
,
ys
));
return
Location
.
create
(
Double
.
parseDouble
(
s
[
1
]),
Double
.
valueOf
(
s
[
2
]));
}
/* Read a location from a CSV filename. */
private
static
Location
readLocation
(
Path
path
)
{
String
f
=
path
.
getFileName
().
toString
();
String
[]
s
=
f
.
substring
(
0
,
f
.
length
()
-
4
).
split
(
","
);
return
Location
.
create
(
Double
.
parseDouble
(
s
[
1
]),
Double
.
parseDouble
(
s
[
2
]));
}
}
private
static
Site
toSite
(
String
line
)
{
private
static
Map
<
Double
,
XySequence
>
readMagnitudeCurves
(
Path
path
)
{
String
[]
s
=
Arrays
.
stream
(
line
.
split
(
","
))
int
offset
=
1
;
.
map
(
String:
:
trim
)
try
{
.
limit
(
3
)
List
<
String
>
lines
=
Files
.
readAllLines
(
path
);
.
toArray
(
String
[]::
new
);
double
[]
imls
=
readValues
(
lines
.
get
(
0
),
offset
);
String
name
=
s
[
0
];
Map
<
Double
,
XySequence
>
pp
=
lines
.
stream
()
Location
loc
=
Location
.
create
(
.
skip
(
1
)
Double
.
valueOf
(
s
[
1
]),
.
collect
(
toMap
(
Double
.
valueOf
(
s
[
2
]));
line
->
Double
.
valueOf
(
line
.
substring
(
0
,
line
.
indexOf
(
","
))),
return
Site
.
builder
()
line
->
readMagnitudeCurve
(
line
,
imls
,
offset
)));
.
name
(
name
)
return
pp
;
.
location
(
loc
)
}
catch
(
IOException
ioe
)
{
.
build
();
throw
new
RuntimeException
(
ioe
);
}
}
}
private
static
double
[]
toValues
(
String
line
)
{
private
static
XySequence
readMagnitudeCurve
(
String
line
,
double
[]
xs
,
int
offset
)
{
double
[]
ys
=
readValues
(
line
,
offset
);
return
XySequence
.
create
(
xs
,
ys
);
}
/* Read values from a comma-delimited string. */
private
static
double
[]
readValues
(
String
line
,
int
offset
)
{
return
Arrays
.
stream
(
line
.
split
(
","
))
return
Arrays
.
stream
(
line
.
split
(
","
))
.
map
(
String:
:
trim
)
.
map
(
String:
:
trim
)
.
skip
(
3
)
.
skip
(
offset
)
.
mapToDouble
(
Double:
:
parseDouble
)
.
mapToDouble
(
Double:
:
parseDouble
)
.
toArray
();
.
toArray
();
}
}
private
static
Map
<
Location
,
Map
<
Imt
,
XySequence
>>
transpose
(
Map
<
Imt
,
Map
<
Location
,
XySequence
>>
mapIn
)
{
Map
<
Location
,
Map
<
Imt
,
XySequence
>>
mapOut
=
new
HashMap
<>();
for
(
Entry
<
Imt
,
Map
<
Location
,
XySequence
>>
imtEntry
:
mapIn
.
entrySet
())
{
Imt
imt
=
imtEntry
.
getKey
();
for
(
Entry
<
Location
,
XySequence
>
locEntry
:
imtEntry
.
getValue
().
entrySet
())
{
Location
loc
=
locEntry
.
getKey
();
XySequence
xy
=
locEntry
.
getValue
();
mapOut
.
computeIfAbsent
(
loc
,
k
->
new
EnumMap
<>(
Imt
.
class
)).
put
(
imt
,
xy
);
}
}
return
mapOut
;
}
public
static
void
main
(
String
[]
args
)
throws
IOException
{
public
static
void
main
(
String
[]
args
)
throws
IOException
{
model
=
ModelLoader
.
load
(
MODEL_PATH
);
model
=
ModelLoader
.
load
(
MODEL_PATH
);
List
<
Site
>
sites
=
Sites
.
fromCsv
(
SITES_PATH
,
model
.
siteData
(),
OptionalDouble
.
empty
());
List
<
Site
>
sites
=
Sites
.
fromCsv
(
SITES_PATH
,
model
.
siteData
(),
OptionalDouble
.
empty
());
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment