diff --git a/Pipfile b/Pipfile
index 5f5bb1b6fcbb57618cfd5b1e63f291792c9b1756..b0976d84f49fc125d4148a160608122ee5973ba2 100644
--- a/Pipfile
+++ b/Pipfile
@@ -15,19 +15,18 @@ numpy = "*"
 scipy = "*"
 obspy = ">1.2.0"
 pycurl = "*"
-authlib = "*"
 
+authlib = "*"
+cryptography = "*"
+databases = {extras = ["postgresql", "sqlite"],version = "*"}
 fastapi = "*"
-pydantic = "*"
+httpx = "==0.11.1"
+openpyxl = "*"
+orm = "*"
+pydantic = "==1.4"
+sqlalchemy = "*"
 uvicorn = "*"
-
-flask = "*"
-flask-login = "*"
-flask-migrate = "*"
-flask-session = "*"
-flask-sqlalchemy = "*"
-psycopg2-binary = "*"
-gunicorn = "*"
+typesystem = "==0.2.4"
 
 [pipenv]
 allow_prereleases = true
diff --git a/Pipfile.lock b/Pipfile.lock
index 34e447006e6bb32e1251574089c68a8ae8516c32..732b95e0b49c84ae03702f9fe97cabdd01db160a 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
 {
     "_meta": {
         "hash": {
-            "sha256": "3d833d0fc478d001ec8e8485176a5578cffb063bc741e7d91d280f5180e13e78"
+            "sha256": "54200d2b34430d8ae6e6ff03ae3fac3aa0e70bdf65149f8b9208271915fe4c6e"
         },
         "pipfile-spec": 6,
         "requires": {},
@@ -14,11 +14,37 @@
         ]
     },
     "default": {
-        "alembic": {
-            "hashes": [
-                "sha256:035ab00497217628bf5d0be82d664d8713ab13d37b630084da8e1f98facf4dbf"
-            ],
-            "version": "==1.4.2"
+        "aiosqlite": {
+            "hashes": [
+                "sha256:4f02314a42db6722dc26f2a6119c64e3f05f141f57bbf2b1e1f9fd741b6d7fb8"
+            ],
+            "version": "==0.11.0"
+        },
+        "asyncpg": {
+            "hashes": [
+                "sha256:058baec9d6b75612412baa872a1aa47317d0ff88c318a49f9c4a2389043d5a8d",
+                "sha256:0c336903c3b08e970f8af2f606332f1738dba156bca83ed0467dc2f5c70da796",
+                "sha256:1388caa456070dab102be874205e3ae8fd1de2577d5de9fa22e65ba5c0f8b110",
+                "sha256:25edb0b947eb632b6b53e5a4b36cba5677297bb34cbaba270019714d0a5fed76",
+                "sha256:2af6a5a705accd36e13292ea43d08c20b15e52d684beb522cb3a7d3c9c8f3f48",
+                "sha256:391aea89871df8c1560750af6c7170f2772c2d133b34772acf3637e3cf4db93e",
+                "sha256:394bf19bdddbba07a38cd6fb526ebf66e120444d6b3097332b78efd5b26495b0",
+                "sha256:5664d1bd8abe64fc60a0e701eb85fa1d8c9a4a8018a5a59164d27238f2caf395",
+                "sha256:57666dfae38f4dbf84ffbf0c5c0f78733fef0e8e083230275dcb9ccad1d5ee09",
+                "sha256:74510234c294c6a6767089ba9c938f09a491426c24405634eb357bd91dffd734",
+                "sha256:95cd2df61ee00b789bdcd04a080e6d9188693b841db2bf9a87ebaed9e53147e0",
+                "sha256:a981500bf6947926e53c48f4d60ae080af1b4ad7fa78e363465a5b5ad4f2b65e",
+                "sha256:a9e6fd6f0f9e8bd77e9a4e1ef9a4f83a80674d9136a754ae3603e915da96b627",
+                "sha256:ad5ba062e09673b1a4b8d0facaf5a6d9719bf7b337440d10b07fe994d90a9552",
+                "sha256:ba90d3578bc6dddcbce461875672fd9bdb34f0b8215b68612dd3b65a956ff51c",
+                "sha256:c773c7dbe2f4d3ebc9e3030e94303e45d6742e6c2fc25da0c46a56ea3d83caeb",
+                "sha256:da238592235717419a6a7b5edc8564da410ebfd056ca4ecc41e70b1b5df86fba",
+                "sha256:e39aac2b3a2f839ce65aa255ce416de899c58b7d38d601d24ca35558e13b48e3",
+                "sha256:ec6e7046c98730cb2ba4df41387e10cb8963a3ac2918f69ae416f8aab9ca7b1b",
+                "sha256:f0c9719ac00615f097fe91082b785bce36dbf02a5ec4115ede0ebfd2cd9500cb",
+                "sha256:f7184689177eeb5a11fa1b2baf3f6f2e26bfd7a85acf4de1a3adbd0867d7c0e2"
+            ],
+            "version": "==0.20.1"
         },
         "authlib": {
             "hashes": [
@@ -30,10 +56,10 @@
         },
         "certifi": {
             "hashes": [
-                "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3",
-                "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"
+                "sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304",
+                "sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519"
             ],
-            "version": "==2019.11.28"
+            "version": "==2020.4.5.1"
         },
         "cffi": {
             "hashes": [
@@ -104,6 +130,7 @@
                 "sha256:ed1d0760c7e46436ec90834d6f10477ff09475c692ed1695329d324b2c5cd547",
                 "sha256:ef9a55013676907df6c9d7dd943eb1770d014f68beaa7e73250fb43c759f4585"
             ],
+            "index": "pypi",
             "version": "==2.9"
         },
         "cycler": {
@@ -113,81 +140,71 @@
             ],
             "version": "==0.10.0"
         },
-        "decorator": {
-            "hashes": [
-                "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760",
-                "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"
+        "databases": {
+            "extras": [
+                "postgresql",
+                "sqlite"
             ],
-            "version": "==4.4.2"
-        },
-        "fastapi": {
             "hashes": [
-                "sha256:a5cb9100d5f2b5dd82addbc2cdf8009258bce45b03ba21d3f5eecc88c7b5a716",
-                "sha256:cf26d47ede6bc6e179df951312f55fea7d4005dd53370245e216436ca4e22f22"
+                "sha256:a04db1d158a91db7bd49db16e14266e8e6c7336f06f88c700147690683c769a3"
             ],
             "index": "pypi",
-            "version": "==0.53.2"
+            "version": "==0.2.6"
         },
-        "flask": {
+        "decorator": {
             "hashes": [
-                "sha256:4efa1ae2d7c9865af48986de8aeb8504bf32c7f3d6fdc9353d34b21f4b127060",
-                "sha256:8a4fdd8936eba2512e9c85df320a37e694c93945b33ef33c89946a340a238557"
+                "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760",
+                "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"
             ],
-            "index": "pypi",
-            "version": "==1.1.2"
+            "version": "==4.4.2"
         },
-        "flask-login": {
+        "et-xmlfile": {
             "hashes": [
-                "sha256:6d33aef15b5bcead780acc339464aae8a6e28f13c90d8b1cf9de8b549d1c0b4b",
-                "sha256:7451b5001e17837ba58945aead261ba425fdf7b4f0448777e597ddab39f4fba0"
+                "sha256:614d9722d572f6246302c4491846d2c393c199cfa4edc9af593437691683335b"
             ],
-            "index": "pypi",
-            "version": "==0.5.0"
+            "version": "==1.0.1"
         },
-        "flask-migrate": {
+        "fastapi": {
             "hashes": [
-                "sha256:4dc4a5cce8cbbb06b8dc963fd86cf8136bd7d875aabe2d840302ea739b243732",
-                "sha256:a69d508c2e09d289f6e55a417b3b8c7bfe70e640f53d2d9deb0d056a384f37ee"
+                "sha256:33188cc5abe96fb93a9e01bd953c72194f04536eb3d7b87d880d434efd283268",
+                "sha256:c7ec84df59addd5d013c39b5b762b6a4afa4766fb344380abd2674f93e726d6a"
             ],
             "index": "pypi",
-            "version": "==2.5.3"
+            "version": "==0.54.0"
         },
-        "flask-session": {
+        "future": {
             "hashes": [
-                "sha256:a31c27e0c3287f00c825b3d9625aba585f4df4cccedb1e7dd5a69a215881a731",
-                "sha256:b9b32126bfc52c3169089f2ed9a40e34b589527bda48b633428e07d39d9c8792"
+                "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"
             ],
-            "index": "pypi",
-            "version": "==0.3.1"
+            "version": "==0.18.2"
         },
-        "flask-sqlalchemy": {
+        "h11": {
             "hashes": [
-                "sha256:0078d8663330dc05a74bc72b3b6ddc441b9a744e2f56fe60af1a5bfc81334327",
-                "sha256:6974785d913666587949f7c2946f7001e4fa2cb2d19f4e69ead02e4b8f50b33d"
+                "sha256:33d4bca7be0fa039f4e84d50ab00531047e53d6ee8ffbc83501ea602c169cae1",
+                "sha256:4bc6d6a1238b7615b266ada57e0618568066f57dd6fa967d1290ec9309b2f2f1"
             ],
-            "index": "pypi",
-            "version": "==2.4.1"
+            "version": "==0.9.0"
         },
-        "future": {
+        "h2": {
             "hashes": [
-                "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"
+                "sha256:61e0f6601fa709f35cdb730863b4e5ec7ad449792add80d1410d4174ed139af5",
+                "sha256:875f41ebd6f2c44781259005b157faed1a5031df3ae5aa7bcb4628a6c0782f14"
             ],
-            "version": "==0.18.2"
+            "version": "==3.2.0"
         },
-        "gunicorn": {
+        "hpack": {
             "hashes": [
-                "sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626",
-                "sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c"
+                "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89",
+                "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"
             ],
-            "index": "pypi",
-            "version": "==20.0.4"
+            "version": "==3.0.0"
         },
-        "h11": {
+        "hstspreload": {
             "hashes": [
-                "sha256:33d4bca7be0fa039f4e84d50ab00531047e53d6ee8ffbc83501ea602c169cae1",
-                "sha256:4bc6d6a1238b7615b266ada57e0618568066f57dd6fa967d1290ec9309b2f2f1"
+                "sha256:0aa4c2ebb768a11109f4d2008b3fce987adaef2de584b93a48756847ec84403c",
+                "sha256:e6b87847b1250c30e67bf68925d5e78b987d4be3fb61f921cdcd8ccea7dd4342"
             ],
-            "version": "==0.9.0"
+            "version": "==2020.3.31"
         },
         "httptools": {
             "hashes": [
@@ -207,6 +224,21 @@
             "markers": "sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy'",
             "version": "==0.1.1"
         },
+        "httpx": {
+            "hashes": [
+                "sha256:1d3893d3e4244c569764a6bae5c5a9fbbc4a6ec3825450b5696602af7a275576",
+                "sha256:7d2bfb726eeed717953d15dddb22da9c2fcf48a4d70ba1456aa0a7faeda33cf7"
+            ],
+            "index": "pypi",
+            "version": "==0.11.1"
+        },
+        "hyperframe": {
+            "hashes": [
+                "sha256:5187962cb16dcc078f23cb5a4b110098d546c3f41ff2d4038a9896893bbd0b40",
+                "sha256:a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f"
+            ],
+            "version": "==5.2.0"
+        },
         "idna": {
             "hashes": [
                 "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb",
@@ -214,19 +246,12 @@
             ],
             "version": "==2.9"
         },
-        "itsdangerous": {
-            "hashes": [
-                "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19",
-                "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"
-            ],
-            "version": "==1.1.0"
-        },
-        "jinja2": {
+        "jdcal": {
             "hashes": [
-                "sha256:c10142f819c2d22bdcd17548c46fa9b77cf4fda45097854c689666bf425e7484",
-                "sha256:c922560ac46888d47384de1dbdc3daaa2ea993af4b26a436dec31fa2c19ec668"
+                "sha256:1abf1305fce18b4e8aa248cf8fe0c56ce2032392bc64bbd61b5dff2a19ec8bba",
+                "sha256:472872e096eb8df219c23f2689fc336668bdb43d194094b5cc1707e1640acfc8"
             ],
-            "version": "==3.0.0a1"
+            "version": "==1.4.1"
         },
         "kiwisolver": {
             "hashes": [
@@ -281,51 +306,6 @@
             ],
             "version": "==4.5.0"
         },
-        "mako": {
-            "hashes": [
-                "sha256:3139c5d64aa5d175dbafb95027057128b5fbd05a40c53999f3905ceb53366d9d",
-                "sha256:8e8b53c71c7e59f3de716b6832c4e401d903af574f6962edbbbf6ecc2a5fe6c9"
-            ],
-            "version": "==1.1.2"
-        },
-        "markupsafe": {
-            "hashes": [
-                "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
-                "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
-                "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
-                "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
-                "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42",
-                "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
-                "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
-                "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
-                "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
-                "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
-                "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
-                "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b",
-                "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
-                "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15",
-                "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
-                "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
-                "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
-                "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
-                "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
-                "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
-                "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
-                "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
-                "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
-                "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
-                "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
-                "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
-                "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
-                "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
-                "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
-                "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
-                "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2",
-                "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",
-                "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"
-            ],
-            "version": "==1.1.1"
-        },
         "matplotlib": {
             "hashes": [
                 "sha256:2466d4dddeb0f5666fd1e6736cc5287a4f9f7ae6c1a9e0779deff798b28e1d35",
@@ -389,6 +369,20 @@
             "index": "pypi",
             "version": "==1.2.1"
         },
+        "openpyxl": {
+            "hashes": [
+                "sha256:547a9fc6aafcf44abe358b89ed4438d077e9d92e4f182c87e2dc294186dc4b64"
+            ],
+            "index": "pypi",
+            "version": "==3.0.3"
+        },
+        "orm": {
+            "hashes": [
+                "sha256:37cb4757b670c1713f4e0d65874c5afe819acbd712abb9743c97e1d4b00d511c"
+            ],
+            "index": "pypi",
+            "version": "==0.1.5"
+        },
         "psycopg2-binary": {
             "hashes": [
                 "sha256:040234f8a4a8dfd692662a8308d78f63f31a97e1c42d2480e5e6810c48966a29",
@@ -424,7 +418,6 @@
                 "sha256:eac8a3499754790187bb00574ab980df13e754777d346f85e0ff6df929bcd964",
                 "sha256:eaed1c65f461a959284649e37b5051224f4db6ebdc84e40b5e65f2986f101a08"
             ],
-            "index": "pypi",
             "version": "==2.8.4"
         },
         "pycparser": {
@@ -471,10 +464,10 @@
         },
         "pyparsing": {
             "hashes": [
-                "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f",
-                "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"
+                "sha256:67199f0c41a9c702154efb0e7a8cc08accf830eb003b4d9fa42c4059002e2492",
+                "sha256:700d17888d441604b0bd51535908dcb297561b040819cccde647a92439db5a2a"
             ],
-            "version": "==2.4.6"
+            "version": "==3.0.0a1"
         },
         "python-dateutil": {
             "hashes": [
@@ -483,14 +476,6 @@
             ],
             "version": "==2.8.1"
         },
-        "python-editor": {
-            "hashes": [
-                "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d",
-                "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b",
-                "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"
-            ],
-            "version": "==1.0.4"
-        },
         "requests": {
             "hashes": [
                 "sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee",
@@ -498,6 +483,13 @@
             ],
             "version": "==2.23.0"
         },
+        "rfc3986": {
+            "hashes": [
+                "sha256:0344d0bd428126ce554e7ca2b61787b6a28d2bbd19fc70ed2dd85efe31176405",
+                "sha256:df4eba676077cefb86450c8f60121b9ae04b94f65f85b69f3f731af0516b7b18"
+            ],
+            "version": "==1.3.2"
+        },
         "scipy": {
             "hashes": [
                 "sha256:00af72998a46c25bdb5824d2b729e7dabec0c765f9deb0b504f928591f5ff9d4",
@@ -532,10 +524,18 @@
             ],
             "version": "==1.14.0"
         },
+        "sniffio": {
+            "hashes": [
+                "sha256:20ed6d5b46f8ae136d00b9dcb807615d83ed82ceea6b2058cecb696765246da5",
+                "sha256:8e3810100f69fe0edd463d02ad407112542a11ffdc29f67db2bf3771afb87a21"
+            ],
+            "version": "==1.1.0"
+        },
         "sqlalchemy": {
             "hashes": [
                 "sha256:c4cca4aed606297afbe90d4306b49ad3a4cd36feb3f87e4bfd655c57fd9ef445"
             ],
+            "index": "pypi",
             "version": "==1.3.15"
         },
         "starlette": {
@@ -545,6 +545,13 @@
             ],
             "version": "==0.13.2"
         },
+        "typesystem": {
+            "hashes": [
+                "sha256:ba2bd10f1c5844d08dd8841e777bdee55bfca569bf21cb96cd0f91e0a4f66cd8"
+            ],
+            "index": "pypi",
+            "version": "==0.2.4"
+        },
         "urllib3": {
             "hashes": [
                 "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc",
@@ -601,13 +608,6 @@
                 "sha256:f8a7bff6e8664afc4e6c28b983845c5bc14965030e3fb98789734d416af77c4b"
             ],
             "version": "==8.1"
-        },
-        "werkzeug": {
-            "hashes": [
-                "sha256:2de2a5db0baeae7b2d2664949077c2ac63fbd16d98da0ff71837f7d1dea3fd43",
-                "sha256:6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c"
-            ],
-            "version": "==1.0.1"
         }
     },
     "develop": {
@@ -627,11 +627,11 @@
         },
         "beautifulsoup4": {
             "hashes": [
-                "sha256:05fd825eb01c290877657a56df4c6e4c311b3965bda790c613a3d6fb01a5462a",
-                "sha256:9fbb4d6e48ecd30bcacc5b63b94088192dcda178513b2ae3c394229f8911b887",
-                "sha256:e1505eeed31b0f4ce2dbb3bc8eb256c04cc2b3b72af7d551a4ab6efd5cbe5dae"
+                "sha256:594ca51a10d2b3443cbac41214e12dbb2a1cd57e1a7344659849e2e20ba6a8d8",
+                "sha256:a4bbe77fd30670455c5296242967a123ec28c37e9702a8a81bd2f20a4baf0368",
+                "sha256:d4e96ac9b0c3a6d3f0caae2e4124e6055c5dcafde8e2f831ff194c104f0775a0"
             ],
-            "version": "==4.8.2"
+            "version": "==4.9.0"
         },
         "black": {
             "hashes": [
@@ -770,10 +770,10 @@
         },
         "pyparsing": {
             "hashes": [
-                "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f",
-                "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"
+                "sha256:67199f0c41a9c702154efb0e7a8cc08accf830eb003b4d9fa42c4059002e2492",
+                "sha256:700d17888d441604b0bd51535908dcb297561b040819cccde647a92439db5a2a"
             ],
-            "version": "==2.4.6"
+            "version": "==3.0.0a1"
         },
         "pytest": {
             "hashes": [
@@ -809,29 +809,29 @@
         },
         "regex": {
             "hashes": [
-                "sha256:01b2d70cbaed11f72e57c1cfbaca71b02e3b98f739ce33f5f26f71859ad90431",
-                "sha256:046e83a8b160aff37e7034139a336b660b01dbfe58706f9d73f5cdc6b3460242",
-                "sha256:113309e819634f499d0006f6200700c8209a2a8bf6bd1bdc863a4d9d6776a5d1",
-                "sha256:200539b5124bc4721247a823a47d116a7a23e62cc6695744e3eb5454a8888e6d",
-                "sha256:25f4ce26b68425b80a233ce7b6218743c71cf7297dbe02feab1d711a2bf90045",
-                "sha256:269f0c5ff23639316b29f31df199f401e4cb87529eafff0c76828071635d417b",
-                "sha256:5de40649d4f88a15c9489ed37f88f053c15400257eeb18425ac7ed0a4e119400",
-                "sha256:7f78f963e62a61e294adb6ff5db901b629ef78cb2a1cfce3cf4eeba80c1c67aa",
-                "sha256:82469a0c1330a4beb3d42568f82dffa32226ced006e0b063719468dcd40ffdf0",
-                "sha256:8c2b7fa4d72781577ac45ab658da44c7518e6d96e2a50d04ecb0fd8f28b21d69",
-                "sha256:974535648f31c2b712a6b2595969f8ab370834080e00ab24e5dbb9d19b8bfb74",
-                "sha256:99272d6b6a68c7ae4391908fc15f6b8c9a6c345a46b632d7fdb7ef6c883a2bbb",
-                "sha256:9b64a4cc825ec4df262050c17e18f60252cdd94742b4ba1286bcfe481f1c0f26",
-                "sha256:9e9624440d754733eddbcd4614378c18713d2d9d0dc647cf9c72f64e39671be5",
-                "sha256:9ff16d994309b26a1cdf666a6309c1ef51ad4f72f99d3392bcd7b7139577a1f2",
-                "sha256:b33ebcd0222c1d77e61dbcd04a9fd139359bded86803063d3d2d197b796c63ce",
-                "sha256:bba52d72e16a554d1894a0cc74041da50eea99a8483e591a9edf1025a66843ab",
-                "sha256:bed7986547ce54d230fd8721aba6fd19459cdc6d315497b98686d0416efaff4e",
-                "sha256:c7f58a0e0e13fb44623b65b01052dae8e820ed9b8b654bb6296bc9c41f571b70",
-                "sha256:d58a4fa7910102500722defbde6e2816b0372a4fcc85c7e239323767c74f5cbc",
-                "sha256:f1ac2dc65105a53c1c2d72b1d3e98c2464a133b4067a51a3d2477b28449709a0"
-            ],
-            "version": "==2020.2.20"
+                "sha256:08119f707f0ebf2da60d2f24c2f39ca616277bb67ef6c92b72cbf90cbe3a556b",
+                "sha256:0ce9537396d8f556bcfc317c65b6a0705320701e5ce511f05fc04421ba05b8a8",
+                "sha256:1cbe0fa0b7f673400eb29e9ef41d4f53638f65f9a2143854de6b1ce2899185c3",
+                "sha256:2294f8b70e058a2553cd009df003a20802ef75b3c629506be20687df0908177e",
+                "sha256:23069d9c07e115537f37270d1d5faea3e0bdded8279081c4d4d607a2ad393683",
+                "sha256:24f4f4062eb16c5bbfff6a22312e8eab92c2c99c51a02e39b4eae54ce8255cd1",
+                "sha256:295badf61a51add2d428a46b8580309c520d8b26e769868b922750cf3ce67142",
+                "sha256:2a3bf8b48f8e37c3a40bb3f854bf0121c194e69a650b209628d951190b862de3",
+                "sha256:4385f12aa289d79419fede43f979e372f527892ac44a541b5446617e4406c468",
+                "sha256:5635cd1ed0a12b4c42cce18a8d2fb53ff13ff537f09de5fd791e97de27b6400e",
+                "sha256:5bfed051dbff32fd8945eccca70f5e22b55e4148d2a8a45141a3b053d6455ae3",
+                "sha256:7e1037073b1b7053ee74c3c6c0ada80f3501ec29d5f46e42669378eae6d4405a",
+                "sha256:90742c6ff121a9c5b261b9b215cb476eea97df98ea82037ec8ac95d1be7a034f",
+                "sha256:a58dd45cb865be0ce1d5ecc4cfc85cd8c6867bea66733623e54bd95131f473b6",
+                "sha256:c087bff162158536387c53647411db09b6ee3f9603c334c90943e97b1052a156",
+                "sha256:c162a21e0da33eb3d31a3ac17a51db5e634fc347f650d271f0305d96601dc15b",
+                "sha256:c9423a150d3a4fc0f3f2aae897a59919acd293f4cb397429b120a5fcd96ea3db",
+                "sha256:ccccdd84912875e34c5ad2d06e1989d890d43af6c2242c6fcfa51556997af6cd",
+                "sha256:e91ba11da11cf770f389e47c3f5c30473e6d85e06d7fd9dcba0017d2867aab4a",
+                "sha256:ea4adf02d23b437684cd388d557bf76e3afa72f7fed5bbc013482cc00c816948",
+                "sha256:fb95debbd1a824b2c4376932f2216cc186912e389bdb0e27147778cf6acb3f89"
+            ],
+            "version": "==2020.4.4"
         },
         "six": {
             "hashes": [
@@ -882,10 +882,10 @@
         },
         "virtualenv": {
             "hashes": [
-                "sha256:4e399f48c6b71228bf79f5febd27e3bbb753d9d5905776a86667bc61ab628a25",
-                "sha256:9e81279f4a9d16d1c0654a127c2c86e5bca2073585341691882c1e66e31ef8a5"
+                "sha256:6ea131d41c477f6c4b7863948a9a54f7fa196854dbef73efbdff32b509f4d8bf",
+                "sha256:94f647e12d1e6ced2541b93215e51752aecbd1bbb18eb1816e2867f7532b1fe1"
             ],
-            "version": "==20.0.15"
+            "version": "==20.0.16"
         },
         "waitress": {
             "hashes": [
diff --git a/create_db.py b/create_db.py
new file mode 100644
index 0000000000000000000000000000000000000000..1c34c2c21e877940c4e5aed50a654a354d9a5dec
--- /dev/null
+++ b/create_db.py
@@ -0,0 +1,3 @@
+from geomagio.api.db.create_db import create_db
+
+create_db()
diff --git a/geomagio/api/__init__.py b/geomagio/api/__init__.py
index 34f275ed3c9d7894766f1b188d9d5e13327f24bb..ee21241f2ddd1b9529d7ae775a3e96d3b1e267ee 100644
--- a/geomagio/api/__init__.py
+++ b/geomagio/api/__init__.py
@@ -1,3 +1,4 @@
 from .app import app
+from .db.create import create_db
 
-__all__ = ["app"]
+__all__ = ["app", "create_db"]
diff --git a/geomagio/api/app.py b/geomagio/api/app.py
index 8e4725cdc0a46e4d2e3fa6fd07928e0ef10de862..e206d680ed0f66f1149962b17e0c2a97c2720f05 100644
--- a/geomagio/api/app.py
+++ b/geomagio/api/app.py
@@ -9,13 +9,26 @@ and can be run using uvicorn, or any other ASGI server:
 from fastapi import FastAPI
 from starlette.responses import RedirectResponse
 
+from .db import database
+from . import secure
 from . import ws
 
 
 app = FastAPI()
+app.mount("/ws/secure", secure.app)
 app.mount("/ws", ws.app)
 
 
+@app.on_event("startup")
+async def on_startup():
+    await database.connect()
+
+
+@app.on_event("shutdown")
+async def on_shutdown():
+    await database.disconnect()
+
+
 @app.get("/", include_in_schema=False)
 async def redirect_to_ws():
     return RedirectResponse("/ws")
diff --git a/geomagio/api/db/__init__.py b/geomagio/api/db/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..50e414c7396b72a2aa4d0b7849ba42b801ab417e
--- /dev/null
+++ b/geomagio/api/db/__init__.py
@@ -0,0 +1,11 @@
+"""geomagio.api.db package.
+
+This package manages the database connection, data models,
+and provides methods for data access from other parts of the api.
+
+Modules outside the api should not access the database directly.
+"""
+
+from .common import database, sqlalchemy_metadata
+
+__all__ = ["database", "sqlalchemy_metadata"]
diff --git a/geomagio/api/db/common.py b/geomagio/api/db/common.py
new file mode 100644
index 0000000000000000000000000000000000000000..317861787e2ce3c2691955d9b1344a16d5c768ae
--- /dev/null
+++ b/geomagio/api/db/common.py
@@ -0,0 +1,45 @@
+"""
+Define the database connection and sqlalchemy metadata objects.
+
+
+Configuration:
+    uses environment variables:
+
+    DATABASE_URL  - url to connect to database.
+                    Default is "sqlite:///./api_database.db"
+
+
+Database models:
+
+    Register with metadata.
+
+        class DatabaseModel(orm.Model):
+            __database__ = database
+            __metadata__ = sqlalchemy_metadata
+
+    And import in create.py, so scripts can manage the database schema.
+
+
+Applications must manage the database connections:
+
+    @app.on_event("startup")
+    async def on_startup():
+        await database.connect()
+
+
+    @app.on_event("shutdown")
+    async def on_shutdown():
+        await database.disconnect()
+"""
+
+import os
+
+from databases import Database
+from sqlalchemy import MetaData
+
+
+# database connection
+database = Database(os.getenv("DATABASE_URL", "sqlite:///./api_database.db"))
+
+# metadata used to manage database schema
+sqlalchemy_metadata = MetaData()
diff --git a/geomagio/api/db/create.py b/geomagio/api/db/create.py
new file mode 100644
index 0000000000000000000000000000000000000000..dbabda419503e88df557422a6c09b54d65da3db2
--- /dev/null
+++ b/geomagio/api/db/create.py
@@ -0,0 +1,17 @@
+import sqlalchemy
+
+from .common import database, sqlalchemy_metadata
+
+# register models with sqlalchemy_metadata by importing
+from .session import Session
+
+
+def create_db():
+    """Create the database using sqlalchemy.
+    """
+    engine = sqlalchemy.create_engine(str(database.url))
+    sqlalchemy_metadata.create_all(engine)
+
+
+if __name__ == "__main__":
+    create_db()
diff --git a/geomagio/api/db/metadata.py b/geomagio/api/db/metadata.py
new file mode 100644
index 0000000000000000000000000000000000000000..384d970e0a7ccf5d8bb7c89e7cdd39e0cdc943cf
--- /dev/null
+++ b/geomagio/api/db/metadata.py
@@ -0,0 +1,93 @@
+import datetime
+import enum
+
+import orm
+
+
+from .common import database, sqlalchemy_metadata
+
+
+# known category values as enumeration
+class MetadataCategory(str, enum.Enum):
+    ADJUSTED_MATRIX = "adjusted-matrix"
+    FLAG = "flag"
+    READING = "reading"
+
+
+class Metadata(orm.Model):
+    """Metadata database model.
+
+    This class is used for Data flagging and other Metadata.
+
+    Flag example:
+    ```
+    automatic_flag = Metadata(
+        created_by = 'algorithm/version',
+        start_time = UTCDateTime('2020-01-02T00:17:00.1Z'),
+        end_time = UTCDateTime('2020-01-02T00:17:00.1Z'),
+        network = 'NT',
+        station = 'BOU',
+        channel = 'BEU',
+        category = CATEGORY_FLAG,
+        comment = "spike detected",
+        priority = 1,
+        data_valid = False)
+    ```
+
+    Adjusted Matrix example:
+    ```
+    adjusted_matrix = Metadata(
+        created_by = 'algorithm/version',
+        start_time = UTCDateTime('2020-01-02T00:17:00Z'),
+        end_time = None,
+        network = 'NT',
+        station = 'BOU',
+        category = CATEGORY_ADJUSTED_MATRIX,
+        comment = 'automatic adjusted matrix',
+        priority = 1,
+        value = {
+            'parameters': {'x': 1, 'y': 2, 'z': 3}
+            'matrix': [ ... ]
+        }
+    )
+    ```
+    """
+
+    __tablename__ = "metadata"
+    __database__ = database
+    __metadata__ = sqlalchemy_metadata
+
+    id = orm.Integer(primary_key=True)
+
+    # author
+    created_by = orm.Text(index=True)
+    created_time = orm.DateTime(default=datetime.datetime.utcnow, index=True)
+    # reviewer
+    reviewed_by = orm.Text(allow_null=True, index=True)
+    reviewed_time = orm.DateTime(allow_null=True, index=True)
+
+    # time range
+    starttime = orm.DateTime(allow_null=True, index=True)
+    endtime = orm.DateTime(allow_null=True, index=True)
+    # what metadata applies to
+    # channel/location allow_null for wildcard
+    network = orm.String(index=True, max_length=255)
+    station = orm.String(index=True, max_length=255)
+    channel = orm.String(allow_null=True, index=True, max_length=255)
+    location = orm.String(allow_null=True, index=True, max_length=255)
+
+    # category (flag, matrix, etc)
+    category = orm.String(index=True, max_length=255)
+    # higher priority overrides lower priority
+    priority = orm.Integer(default=1, index=True)
+    # whether data is valid (primarily for flags)
+    data_valid = orm.Boolean(default=True, index=True)
+    # value
+    metadata = orm.JSON(allow_null=True)
+    # whether metadata is valid (based on review)
+    metadata_valid = orm.Boolean(default=True, index=True)
+
+    # general comment
+    comment = orm.Text(allow_null=True)
+    # review specific comment
+    review_comment = orm.Text(allow_null=True)
diff --git a/geomagio/api/db/session.py b/geomagio/api/db/session.py
new file mode 100644
index 0000000000000000000000000000000000000000..f5a94fcaea153df3f40d109695975236bdd9d215
--- /dev/null
+++ b/geomagio/api/db/session.py
@@ -0,0 +1,51 @@
+import datetime
+import json
+from typing import Dict, Optional
+
+
+import orm
+from .common import database, sqlalchemy_metadata
+
+
+class Session(orm.Model):
+    """Model for database sessions.
+    """
+
+    __tablename__ = "session"
+    __database__ = database
+    __metadata__ = sqlalchemy_metadata
+
+    id = orm.Integer(primary_key=True)
+    session_id = orm.String(index=True, max_length=100)
+    data = orm.Text()
+    updated = orm.DateTime(index=True)
+
+
+async def delete_session(session_id: str):
+    try:
+        session = await Session.objects.get(session_id=session_id)
+        await session.delete()
+    except orm.exceptions.NoMatch:
+        return {}
+
+
+async def get_session(session_id: str) -> str:
+    try:
+        session = await Session.objects.get(session_id=session_id)
+        return session.data
+    except orm.exceptions.NoMatch:
+        return {}
+
+
+async def remove_expired_sessions(max_age: datetime.timedelta):
+    now = datetime.datetime.now(tz=datetime.timezone.utc)
+    await Session.objects.delete(updated__lt=(now - max_age))
+
+
+async def save_session(session_id: str, data: str):
+    updated = datetime.datetime.now(tz=datetime.timezone.utc)
+    try:
+        session = await Session.objects.get(session_id=session_id)
+        await session.update(data=data, updated=updated)
+    except orm.exceptions.NoMatch:
+        await Session.objects.create(session_id=session_id, data=data, updated=updated)
diff --git a/geomagio/api/secure/SessionMiddleware.py b/geomagio/api/secure/SessionMiddleware.py
new file mode 100644
index 0000000000000000000000000000000000000000..ed978a4a2ffab76d1d465a69f71766c6331ad594
--- /dev/null
+++ b/geomagio/api/secure/SessionMiddleware.py
@@ -0,0 +1,122 @@
+import base64
+import json
+from typing import Callable, Dict, Mapping
+import uuid
+
+from cryptography.fernet import Fernet
+from starlette.datastructures import MutableHeaders, Secret
+from starlette.requests import HTTPConnection
+from starlette.types import ASGIApp, Message, Receive, Scope, Send
+
+
+class SessionMiddleware:
+    """Based on Starlette SessionMiddleware.
+    https://github.com/encode/starlette/blob/0.13.2/starlette/middleware/sessions.py
+
+    Updated to store session id in cookie, and keep session data elsewhere.
+
+    Usage:
+        app.add_middleware(SessionMiddleware, **params)
+
+    Parameters
+    ----------
+    app: the ASGI application
+
+    delete_session_callback(session_id): callback to delete stored session data.
+    get_session_callback(session_id): callback to get stored session data.
+    save_session_callback(session_id): callback to update stored session data.
+    encryption: encrypt session data before storage if provided
+
+    session_cookie: name of session cookie
+    path: path for session cookie
+    max_age: how long session cookies last
+    same_site: cookie same site policy
+    https_only: whether to require https for cookies
+    """
+
+    def __init__(
+        self,
+        app: ASGIApp,
+        delete_session_callback: Callable[[str], None],
+        get_session_callback: Callable[[str], str],
+        save_session_callback: Callable[[str, str], None],
+        encryption: Fernet = None,
+        session_cookie: str = "session",
+        path: str = "/",
+        max_age: int = 14 * 24 * 60 * 60,  # 14 days, in seconds
+        same_site: str = "lax",
+        https_only: bool = False,
+    ) -> None:
+        self.app = app
+        self.encryption = encryption
+        self.delete_session_callback = delete_session_callback
+        self.get_session_callback = get_session_callback
+        self.save_session_callback = save_session_callback
+        self.session_cookie = session_cookie
+        self.path = path
+        self.max_age = max_age
+        self.security_flags = "httponly; samesite=" + same_site
+        if https_only:  # Secure flag can be used with HTTPS only
+            self.security_flags += "; secure"
+
+    async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
+        if scope["type"] not in ("http", "websocket"):  # pragma: no cover
+            await self.app(scope, receive, send)
+            return
+
+        connection = HTTPConnection(scope)
+        initial_session_was_empty = True
+        session_id = None
+
+        if self.session_cookie in connection.cookies:
+            session_id = connection.cookies[self.session_cookie]
+            try:
+                scope["session"] = await self.get_session(session_id)
+                initial_session_was_empty = False
+            except Exception:
+                scope["session"] = {}
+        else:
+            scope["session"] = {}
+
+        async def send_wrapper(message: Message) -> None:
+            nonlocal session_id
+            if message["type"] == "http.response.start":
+                if scope["session"]:
+                    session_id = session_id or uuid.uuid4().hex
+                    # Persist session
+                    await self.save_session(session_id, scope["session"])
+                    self.set_cookie(message=message, value=session_id)
+                elif not initial_session_was_empty:
+                    # Clear session
+                    await self.delete_session(session_id)
+                    self.set_cookie(message=message, value="null", max_age=-1)
+            await send(message)
+
+        await self.app(scope, receive, send_wrapper)
+
+    async def delete_session(self, session_id: str):
+        await self.delete_session_callback(session_id)
+
+    async def get_session(self, session_id: str) -> Dict:
+        data = await self.get_session_callback(session_id)
+        if self.encryption:
+            data = self.encryption.decrypt(data.encode("utf8"))
+        return json.loads(data)
+
+    async def save_session(self, session_id: str, data: Mapping):
+        data = json.dumps(data)
+        if self.encryption:
+            data = self.encryption.encrypt(data.encode("utf8")).decode("utf8")
+        await self.save_session_callback(session_id, data)
+
+    def set_cookie(
+        self, message: Message, value: str, max_age: int = None,
+    ):
+        headers = MutableHeaders(scope=message)
+        headers.append(
+            "Set-Cookie",
+            f"{self.session_cookie}={value};"
+            f" path={self.path};"
+            f" Max-Age={max_age or self.max_age};"
+            f" {self.security_flags}",
+        )
diff --git a/geomagio/api/secure/__init__.py b/geomagio/api/secure/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..2c745c5c16a686bae548edd004bf48ff4e4d1d33
--- /dev/null
+++ b/geomagio/api/secure/__init__.py
@@ -0,0 +1,5 @@
+"""Module with application for "/ws/secure" endpoints.
+"""
+from .app import app
+
+__all__ = ["app"]
diff --git a/geomagio/api/secure/app.py b/geomagio/api/secure/app.py
new file mode 100644
index 0000000000000000000000000000000000000000..e6508cef732e3d445f2540593fb82a257d233828
--- /dev/null
+++ b/geomagio/api/secure/app.py
@@ -0,0 +1,54 @@
+import json
+import os
+import uuid
+
+from fastapi import Depends, FastAPI, Request, Response
+
+from ..db.session import delete_session, get_session, save_session
+from .encryption import get_fernet
+from .login import current_user, router as login_router, User
+from .SessionMiddleware import SessionMiddleware
+
+
+app = FastAPI()
+
+# NOTE: database used for sessions is started by ..app.app,
+# which mounts this application at /ws/secure
+app.add_middleware(
+    middleware_class=SessionMiddleware,
+    delete_session_callback=delete_session,
+    get_session_callback=get_session,
+    save_session_callback=save_session,
+    encryption=get_fernet(
+        os.getenv("SECRET_KEY", uuid.uuid4().hex),
+        os.getenv("SECRET_SALT", "secret_salt"),
+    ),
+    path="/ws/secure",
+    session_cookie="PHPSESSID",
+)
+
+# include login routes to manage user
+app.include_router(login_router)
+
+
+@app.get("/")
+async def index(request: Request, user: User = Depends(current_user)):
+    """Route to demo user login.
+    """
+    if user:
+        link = f"""
+            Logged in as {user.email}<br/>
+            <a href="{request.url_for("logout")}">Logout</a>
+        """
+    else:
+        link = f'<a href="{request.url_for("login")}">Login</a>'
+    return Response(
+        f"""<!doctype html>
+<html>
+<body>
+    {link}
+    <pre>{json.dumps(request.session, indent=2)}</pre>
+</body>
+</html>""",
+        media_type="text/html",
+    )
diff --git a/geomagio/api/secure/encryption.py b/geomagio/api/secure/encryption.py
new file mode 100644
index 0000000000000000000000000000000000000000..8f405df6f3a52782739f30a6a538d7658b80c8eb
--- /dev/null
+++ b/geomagio/api/secure/encryption.py
@@ -0,0 +1,21 @@
+import base64
+from cryptography.fernet import Fernet
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
+
+
+def generate_key(password: str, salt: str) -> str:
+    kdf = PBKDF2HMAC(
+        algorithm=hashes.SHA256(),
+        length=32,
+        salt=salt.encode("utf8"),
+        iterations=100000,
+        backend=default_backend(),
+    )
+    key = base64.urlsafe_b64encode(kdf.derive(password.encode()))
+    return key
+
+
+def get_fernet(password: str, salt: str):
+    return Fernet(generate_key(password, salt))
diff --git a/geomagio/api/secure/login.py b/geomagio/api/secure/login.py
new file mode 100644
index 0000000000000000000000000000000000000000..2b7aafdeba4d2e4de1118b4127796737a8d18010
--- /dev/null
+++ b/geomagio/api/secure/login.py
@@ -0,0 +1,158 @@
+"""Module for authentication.
+
+Requires sessions to store user information (recommend ..db.SessionMiddleware)
+
+
+Configuration:
+    uses environment variables:
+
+    OPENID_CLIENT_ID      - application id, assigned by OIDC provider
+    OPENID_CLIENT_SECRET  - application secret, assigned by OIDC provider
+    OPENID_METADATA_URL   - url for OIDC provider information
+
+
+Usage:
+
+    current_user() - get user information if logged in
+
+        def login_optional(user: Optional[User] = Depends(current_user))
+
+    require_user() - require user login and group membership
+                     NOTE: this is a function generator that must be called.
+
+        def any_logged_in_user(user: User = Depends(require_user()))
+
+        def admin_users_only(user: User = Depends(require_user(allowed_groups=["admin"])))
+
+    router - APIRouter to be registered with FastAPI application:
+        app.include_router(login.router)
+
+        creates routes:
+            /authorize  - callback from OpenIDConnect provider after authentication
+            /login      - redirect to OpenIDConnect provider to authenticate
+            /logout     - logout current user
+            /user       - access current user information as json
+"""
+import logging
+import os
+from typing import Callable, List, Optional
+
+from authlib.integrations.starlette_client import OAuth
+from fastapi import APIRouter, Depends, HTTPException
+from pydantic import BaseModel
+from starlette.requests import Request
+from starlette.responses import RedirectResponse
+
+
+class User(BaseModel):
+    """Information about a logged in user.
+    """
+
+    email: str
+    sub: str  # unique outh id
+    groups: List[str] = []
+    name: str = None
+    nickname: str = None
+    picture: str = None
+
+
+async def current_user(request: Request) -> Optional[User]:
+    """Get logged in user, or None if not logged in.
+
+    Usage example:
+        user: Optional[User] = Depends(current_user)
+
+    """
+    if "user" in request.session:
+        return User(**request.session["user"])
+    return None
+
+
+def require_user(allowed_groups: List[str] = None,) -> Callable[[Request, User], User]:
+    """Create function to verifies user in allowed_groups
+
+    Usage example:
+        user: User = Depends(require_user(["admin"]))
+
+    Parameters
+    ----------
+    allowed_groups: require user to be member of any group in list.
+    """
+
+    async def verify_groups(
+        request: Request, user: Optional[User] = Depends(current_user)
+    ) -> User:
+        if not user:
+            # not logged in, redirect
+            return RedirectResponse(request.url_for("login"))
+        if allowed_groups is not None and not any(
+            g in user.groups for g in allowed_groups
+        ):
+            logging.info(
+                f"user ({user.email}, sub={user.sub})"
+                f" not member of any allowed group ({allowed_groups})"
+            )
+            raise HTTPException(401, "Not Authorized")
+        return user
+
+    return verify_groups
+
+
+oauth = OAuth()
+# creates provider "oauth.openid"
+oauth.register(
+    name="openid",
+    client_id=os.getenv("OPENID_CLIENT_ID"),
+    client_secret=os.getenv("OPENID_CLIENT_SECRET"),
+    server_metadata_url=os.getenv("OPENID_METADATA_URL"),
+    client_kwargs={"scope": "openid email profile"},
+)
+# routes for login/logout
+router = APIRouter()
+
+
+@router.get("/authorize")
+async def authorize(request: Request):
+    """Authorize callback after authenticating using OpenID
+    """
+    # finish login
+    token = await oauth.openid.authorize_access_token(request)
+    request.session["token"] = token
+    # add user to session
+    userinfo = await oauth.openid.userinfo(token=token)
+    request.session["user"] = dict(userinfo)
+    # redirect to original location
+    url = request.session.pop(
+        "after_authorize_redirect",
+        # or fall back to index
+        request.url_for("index"),
+    )
+    return RedirectResponse(url=url)
+
+
+@router.get("/login")
+async def login(request: Request):
+    """Redirect to OpenID provider.
+    """
+    redirect_uri = request.url_for("authorize")
+    # save original location
+    if "Referer" in request.headers:
+        request.session["after_authorize_redirect"] = request.headers["Referer"]
+    # redirect to openid login
+    return await oauth.openid.authorize_redirect(request, redirect_uri)
+
+
+@router.get("/logout")
+async def logout(request: Request):
+    """Clear session and redirect to index page.
+    """
+    request.session.pop("token", None)
+    request.session.pop("user", None)
+    return RedirectResponse(request.url_for("index"))
+
+
+@router.get("/user")
+async def user(request: Request, user: User = Depends(require_user())) -> User:
+    """Get currently logged in user.
+    """
+    return user
diff --git a/geomagio/api/ws/DataApiQuery.py b/geomagio/api/ws/DataApiQuery.py
index 7d89d73815ded285275c6e32ee39107a24689f6f..5d7cb3b2eabbf3867681bff082c4a6ed7d09cd86 100644
--- a/geomagio/api/ws/DataApiQuery.py
+++ b/geomagio/api/ws/DataApiQuery.py
@@ -5,6 +5,7 @@ from typing import Any, Dict, List, Optional, Union
 from obspy import UTCDateTime
 from pydantic import BaseModel, root_validator, validator
 
+from ... import pydantic_utcdatetime
 from .Element import ELEMENTS, ELEMENT_INDEX
 
 DEFAULT_ELEMENTS = ["X", "Y", "Z", "F"]
@@ -59,8 +60,8 @@ class SamplingPeriod(float, enum.Enum):
 
 class DataApiQuery(BaseModel):
     id: str
-    starttime: datetime.datetime = None
-    endtime: datetime.datetime = None
+    starttime: UTCDateTime = None
+    endtime: UTCDateTime = None
     elements: List[str] = DEFAULT_ELEMENTS
     sampling_period: SamplingPeriod = SamplingPeriod.MINUTE
     data_type: Union[DataType, str] = DataType.VARIATION
@@ -100,32 +101,18 @@ class DataApiQuery(BaseModel):
             )
         return id
 
-    @validator("starttime", pre=True, always=True)
-    def validate_starttime(
-        cls, starttime: Union[datetime.datetime, datetime.date]
-    ) -> datetime.datetime:
+    @validator("starttime", always=True)
+    def validate_starttime(cls, starttime: UTCDateTime) -> UTCDateTime:
         if not starttime:
             # default to start of current day
             now = datetime.datetime.now(tz=datetime.timezone.utc)
-            return datetime.datetime(
-                year=now.year,
-                month=now.month,
-                day=now.day,
-                tzinfo=datetime.timezone.utc,
-            )
-        elif isinstance(starttime, datetime.date):
-            return datetime.datetime(
-                year=starttime.year,
-                month=starttime.month,
-                day=starttime.day,
-                tzinfo=datetime.timezone.utc,
-            )
+            return UTCDateTime(year=now.year, month=now.month, day=now.day)
         return starttime
 
-    @validator("endtime", always=True, pre=True)
+    @validator("endtime", always=True)
     def validate_endtime(
-        cls, endtime: Union[datetime.datetime, datetime.date], *, values: Dict, **kwargs
-    ) -> datetime.datetime:
+        cls, endtime: UTCDateTime, *, values: Dict, **kwargs
+    ) -> UTCDateTime:
         """Default endtime is based on starttime.
 
         This method needs to be after validate_starttime.
@@ -133,14 +120,7 @@ class DataApiQuery(BaseModel):
         if not endtime:
             # endtime defaults to 1 day after startime
             starttime = values.get("starttime")
-            endtime = starttime + datetime.timedelta(seconds=86400 - 0.001)
-        elif isinstance(endtime, datetime.date):
-            return datetime.datetime(
-                year=endtime.year,
-                month=endtime.month,
-                day=endtime.day,
-                tzinfo=datetime.timezone.utc,
-            )
+            endtime = starttime + (86400 - 0.001)
         return endtime
 
     @root_validator
@@ -157,9 +137,7 @@ class DataApiQuery(BaseModel):
         if starttime > endtime:
             raise ValueError("Starttime must be before endtime.")
         # check data volume
-        samples = int(
-            len(elements) * (endtime - starttime).total_seconds() / sampling_period
-        )
+        samples = int(len(elements) * (endtime - starttime) / sampling_period)
         if samples > REQUEST_LIMIT:
             raise ValueError(f"Request exceeds limit ({samples} > {REQUEST_LIMIT})")
         # otherwise okay
diff --git a/geomagio/api/ws/app.py b/geomagio/api/ws/app.py
index 04fcebcf456f2d2ab3c57d041487d795006900e6..2ab28a600ed3a543e5d40c35fd2c5becaed26dd6 100644
--- a/geomagio/api/ws/app.py
+++ b/geomagio/api/ws/app.py
@@ -1,9 +1,9 @@
-import datetime
 from typing import Dict, Union
 
 from fastapi import FastAPI, Request, Response
 from fastapi.exceptions import RequestValidationError
-from starlette.responses import RedirectResponse
+from fastapi.responses import JSONResponse, PlainTextResponse, RedirectResponse
+from obspy import UTCDateTime
 
 from . import data, elements
 
@@ -31,37 +31,42 @@ async def redirect_to_docs():
     return RedirectResponse("/ws/docs")
 
 
-@app.exception_handler(ValueError)
+@app.exception_handler(RequestValidationError)
 async def validation_exception_handler(request: Request, exc: RequestValidationError):
     """Value errors are user errors.
     """
-    if "format" in request.query_params:
-        data_format = str(request.query_params["format"])
+    data_format = (
+        "format" in request.query_params
+        and str(request.query_params["format"])
+        or "text"
+    )
     return format_error(400, str(exc), data_format, request)
 
 
 @app.exception_handler(Exception)
-async def validation_exception_handler(request: Request, exc: RequestValidationError):
+async def server_exception_handler(request: Request, exc: Exception):
     """Other exceptions are server errors.
     """
-    if "format" in request.query_params:
-        data_format = str(request.query_params["format"])
+    data_format = (
+        "format" in request.query_params
+        and str(request.query_params["format"])
+        or "text"
+    )
     return format_error(500, str(exc), data_format, request)
 
 
 def format_error(
     status_code: int, exception: str, format: str, request: Request
 ) -> Response:
-    """Assign error_body value based on error format."""
+    """Assign error_body value based on error format.
+    """
     if format == "json":
         return json_error(status_code, exception, request.url)
     else:
-        return Response(
-            text_error(status_code, exception, request.url), media_type="text/plain"
-        )
+        return text_error(status_code, exception, request.url)
 
 
-def json_error(code: int, error: Exception, url: str) -> Dict:
+def json_error(code: int, error: Exception, url: str) -> Response:
     """Format json error message.
 
     Returns
@@ -69,26 +74,31 @@ def json_error(code: int, error: Exception, url: str) -> Dict:
     error_body : str
         body of json error message.
     """
-    return {
-        "type": "Error",
-        "metadata": {
-            "title": ERROR_CODE_MESSAGES[code],
-            "status": code,
-            "error": str(error),
-            "generated": datetime.datetime.utcnow(),
-            "url": str(url),
+    return JSONResponse(
+        content={
+            "type": "Error",
+            "metadata": {
+                "title": ERROR_CODE_MESSAGES[code],
+                "status": code,
+                "error": str(error),
+                "generated": f"{UTCDateTime().isoformat()}Z",
+                "url": str(url),
+                "version": VERSION,
+            },
         },
-    }
+        status_code=code,
+    )
 
 
-def text_error(code: int, error: Exception, url: str) -> str:
+def text_error(code: int, error: Exception, url: str) -> Response:
     """Format error message as plain text
 
     Returns
     -------
     error message formatted as plain text.
     """
-    return f"""Error {code}: {ERROR_CODE_MESSAGES[code]}
+    return PlainTextResponse(
+        content=f"""Error {code}: {ERROR_CODE_MESSAGES[code]}
 
 {error}
 
@@ -98,8 +108,10 @@ Request:
 {url}
 
 Request Submitted:
-{datetime.datetime.utcnow().isoformat()}
+{UTCDateTime().isoformat()}Z
 
 Service Version:
 {VERSION}
-"""
+""",
+        status_code=code,
+    )
diff --git a/geomagio/api/ws/data.py b/geomagio/api/ws/data.py
index 128126eac3235639aa4b370c85816a141ece29ad..16638de4a17e943aee01612a488aff16a16a505e 100644
--- a/geomagio/api/ws/data.py
+++ b/geomagio/api/ws/data.py
@@ -1,4 +1,3 @@
-import datetime
 import os
 from typing import Any, Dict, List, Union
 
@@ -67,8 +66,8 @@ def get_timeseries(data_factory: TimeseriesFactory, query: DataApiQuery) -> Stre
     """
     # get data
     timeseries = data_factory.get_timeseries(
-        starttime=UTCDateTime(query.starttime),
-        endtime=UTCDateTime(query.endtime),
+        starttime=query.starttime,
+        endtime=query.endtime,
         observatory=query.id,
         channels=query.elements,
         type=query.data_type,
@@ -83,8 +82,8 @@ router = APIRouter()
 @router.get("/data/")
 def get_data(
     id: str,
-    starttime: Union[datetime.datetime, datetime.date] = Query(None),
-    endtime: Union[datetime.datetime, datetime.date] = Query(None),
+    starttime: UTCDateTime = Query(None),
+    endtime: UTCDateTime = Query(None),
     elements: List[str] = Query(DEFAULT_ELEMENTS),
     sampling_period: Union[SamplingPeriod, float] = Query(SamplingPeriod.MINUTE),
     data_type: Union[DataType, str] = Query(DataType.ADJUSTED),
diff --git a/geomagio/pydantic_utcdatetime.py b/geomagio/pydantic_utcdatetime.py
new file mode 100644
index 0000000000000000000000000000000000000000..31a2bb39b79ffe6c45d5ec4d81508f8d0b16bf14
--- /dev/null
+++ b/geomagio/pydantic_utcdatetime.py
@@ -0,0 +1,62 @@
+"""Configure pydantic to allow UTCDateTime attributes on models.
+"""
+from datetime import datetime
+from typing import Any, Callable, Dict, List, Tuple, TypeVar, Union
+
+from obspy import UTCDateTime
+from pydantic.errors import PydanticValueError
+import pydantic.json
+import pydantic.schema
+import pydantic.validators
+
+
+# placeholder type for register_custom_pydantic_type method
+CustomType = TypeVar("CustomType")
+
+
+def register_custom_pydantic_type(
+    custom_type: CustomType,
+    encoder: Callable[[CustomType], Any],
+    json_schema: Dict,
+    parsers: List[Callable[[Any], CustomType]],
+):
+    try:
+        if custom_type.__custom_pydantic_type__:
+            # already registered
+            return
+    except AttributeError:
+        # not registered yet
+        pass
+    # add encoder
+    pydantic.json.ENCODERS_BY_TYPE[custom_type] = encoder
+    # add openapi mapping
+    pydantic.schema.field_class_to_schema += ((custom_type, json_schema),)
+    # add validator
+    pydantic.validators._VALIDATORS.append((custom_type, parsers))
+    # mark as installed
+    custom_type.__custom_pydantic_type__ = True
+
+
+class UTCDateTimeError(PydanticValueError):
+    msg_template = "invalid date-time format"
+
+
+def format_utcdatetime(o: UTCDateTime) -> str:
+    return o.isoformat()
+
+
+def parse_utcdatetime(
+    value: Union[datetime, float, int, str, UTCDateTime]
+) -> UTCDateTime:
+    try:
+        return UTCDateTime(value)
+    except:
+        raise UTCDateTimeError()
+
+
+register_custom_pydantic_type(
+    UTCDateTime,
+    encoder=format_utcdatetime,
+    json_schema={"type": "string", "format": "date-time"},
+    parsers=[parse_utcdatetime],
+)
diff --git a/geomagio/residual/Absolute.py b/geomagio/residual/Absolute.py
new file mode 100644
index 0000000000000000000000000000000000000000..d927757af2167ffc771d6bf7252580ff7a49dae3
--- /dev/null
+++ b/geomagio/residual/Absolute.py
@@ -0,0 +1,41 @@
+from typing import Optional
+
+from obspy import UTCDateTime
+from pydantic import BaseModel
+
+from .. import pydantic_utcdatetime
+
+
+class Absolute(BaseModel):
+    """Computed absolute and baseline measurement.
+
+    Attributes
+    ----------
+    element: the absolute and baseline component.
+    absolute: absolute measurement.
+        nT or ?radians?
+    baseline: baseline measurement.
+        nT or ?radians?
+    starttime: time of first measurement used.
+    endtime: time of last measurement used.
+    shift: used to correct polarity.
+    valid: whether values are considered valid.
+    """
+
+    element: str
+    absolute: Optional[float] = None
+    baseline: Optional[float] = None
+    starttime: Optional[UTCDateTime] = None
+    endtime: Optional[UTCDateTime] = None
+    shift: float = 0
+    valid: bool = True
+
+    def is_valid(self) -> bool:
+        return (
+            self.valid
+            and self.absolute is not None
+            and self.baseline is not None
+            and self.element is not None
+            and self.endtime is not None
+            and self.starttime is not None
+        )
diff --git a/geomagio/residual/Angle.py b/geomagio/residual/Angle.py
new file mode 100644
index 0000000000000000000000000000000000000000..06ce1c89354bd7e4726dcc9677a118ec084c3c68
--- /dev/null
+++ b/geomagio/residual/Angle.py
@@ -0,0 +1,13 @@
+from typing import List
+
+
+def from_dms(degrees: float = 0, minutes: float = 0, seconds: float = 0) -> float:
+    """Convert degrees, minutes, seconds to decimal degrees"""
+    return degrees + (minutes / 60.0) + (seconds / 3600.0)
+
+
+def to_dms(degrees: float) -> List[float]:
+    """Convert decimal degrees to degrees, minutes, seconds"""
+    minutes = (degrees - int(degrees)) * 60
+    seconds = (minutes - int(minutes)) * 60
+    return [int(degrees), int(minutes), seconds]
diff --git a/geomagio/residual/CalFileFactory.py b/geomagio/residual/CalFileFactory.py
new file mode 100644
index 0000000000000000000000000000000000000000..baf8f87243be6ef9b4513d535a39cae26b3263d7
--- /dev/null
+++ b/geomagio/residual/CalFileFactory.py
@@ -0,0 +1,90 @@
+"""Factory for CalFiles, used by MagProc.
+"""
+from __future__ import print_function
+
+from typing import List
+import itertools
+from io import StringIO
+
+from .Absolute import Absolute
+from .Reading import Reading
+
+
+class CalFileFactory(object):
+    def format_readings(self, readings: List[Reading]) -> str:
+        absolutes = []
+        # list of all absolutes
+        for r in readings:
+            absolutes.extend(r.absolutes)
+        return self._format_absolutes(absolutes)
+
+    def _format_absolutes(self, absolutes: List[Absolute]) -> str:
+        out = StringIO()
+        # filter invalid
+        absolutes = [a for a in absolutes if a.is_valid()]
+        # sort by starttime
+        absolutes = sorted(absolutes, key=lambda a: a.starttime)
+        # group by date
+        for date, cals in itertools.groupby(absolutes, key=lambda a: a.starttime.date):
+            # convert group to list so it can be reused
+            cals = list(cals)
+            # within each day, order by H, then D, then Z
+            for element in ["H", "D", "Z"]:
+                element_cals = [c for c in cals if c.element == element]
+                if not element_cals:
+                    # no matching values
+                    continue
+                # channel header
+                out.write(f"--{date:%Y %m %d} ({element})\n")
+                for c in element_cals:
+                    absolute, baseline = c.absolute, c.baseline
+                    if element == "D":  # convert to minutes
+                        absolute, baseline = absolute * 60, baseline * 60
+                    out.write(  # this is one line...
+                        f"{c.starttime.datetime:%H%M}-{c.endtime.datetime:%H%M}"
+                        f" c{baseline:9.1f}{absolute:9.1f}\n"
+                    )
+        # add new line to end
+        out.write("\n")
+        return out.getvalue()
+
+
+"""
+CAL format example:
+- ordered by date
+- within date, order by H, then D, then Z component
+- component values order by start time
+- D component values in minutes.
+
+
+--2015 03 30 (H)
+2140-2143 c    175.0  12531.3
+2152-2156 c    174.9  12533.3
+2205-2210 c    174.8  12533.1
+2220-2223 c    174.9  12520.7
+--2015 03 30 (D)
+2133-2137 c   1128.3   1118.5
+2145-2149 c   1128.4   1116.4
+2159-2203 c   1128.3   1113.1
+2212-2216 c   1128.4   1113.5
+--2015 03 30 (Z)
+2140-2143 c    -52.9  55403.4
+2152-2156 c    -52.8  55403.8
+2205-2210 c    -52.8  55404.0
+2220-2223 c    -52.8  55410.5
+--2015 07 27 (H)
+2146-2151 c    173.5  12542.5
+2204-2210 c    173.8  12542.5
+2225-2229 c    173.8  12547.2
+2240-2246 c    173.6  12538.7
+--2015 07 27 (D)
+2137-2142 c   1127.8   1109.2
+2154-2158 c   1128.3   1106.3
+2213-2220 c   1128.0   1106.3
+2232-2237 c   1128.3   1104.7
+--2015 07 27 (Z)
+2146-2151 c    -53.9  55382.7
+2204-2210 c    -54.0  55382.5
+2225-2229 c    -54.1  55383.7
+2240-2246 c    -54.1  55389.0
+"""
diff --git a/geomagio/residual/Measurement.py b/geomagio/residual/Measurement.py
new file mode 100644
index 0000000000000000000000000000000000000000..03d968261b321b7a6953b689b74ab12153970d6b
--- /dev/null
+++ b/geomagio/residual/Measurement.py
@@ -0,0 +1,24 @@
+from typing import Optional
+
+from obspy.core import UTCDateTime
+from pydantic import BaseModel
+
+from .. import pydantic_utcdatetime
+from .MeasurementType import MeasurementType
+
+
+class Measurement(BaseModel):
+    """One angle and time measurement with optional residual.
+
+    Attributes
+    ----------
+    measurement_type: type of measurement.
+    angle: measured angle, decimal degrees.
+    residual: residual at time of measurement.
+    time: when measurement was taken.
+    """
+
+    measurement_type: MeasurementType
+    angle: float = 0
+    residual: float = 0
+    time: Optional[UTCDateTime] = None
diff --git a/geomagio/residual/MeasurementType.py b/geomagio/residual/MeasurementType.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a00d90179d785abc26c77a9c9d654f5534efbcb
--- /dev/null
+++ b/geomagio/residual/MeasurementType.py
@@ -0,0 +1,26 @@
+import enum
+
+
+class MeasurementType(str, enum.Enum):
+    """Measurement types used during absolutes."""
+
+    # declination
+    FIRST_MARK_UP = "FirstMarkUp"
+    FIRST_MARK_DOWN = "FirstMarkDown"
+    WEST_DOWN = "WestDown"
+    EAST_DOWN = "EastDown"
+    WEST_UP = "WestUp"
+    EAST_UP = "EastUp"
+    SECOND_MARK_UP = "SecondMarkUp"
+    SECOND_MARK_DOWN = "SecondMarkDown"
+
+    # meridian
+    # meridian is the average of declination measurements
+    # but recorded because calculated and used during inclination measurements.
+    MERIDIAN = "Meridian"
+
+    # inclination
+    SOUTH_DOWN = "SouthDown"
+    NORTH_UP = "NorthUp"
+    SOUTH_UP = "SouthUp"
+    NORTH_DOWN = "NorthDown"
diff --git a/geomagio/residual/Reading.py b/geomagio/residual/Reading.py
new file mode 100644
index 0000000000000000000000000000000000000000..d324f147467ffa15f9056ce59e9ab55dfb92233d
--- /dev/null
+++ b/geomagio/residual/Reading.py
@@ -0,0 +1,50 @@
+import collections
+from typing import Dict, List, Optional
+
+from pydantic import BaseModel
+
+from .Absolute import Absolute
+from .Measurement import Measurement
+from .MeasurementType import MeasurementType
+
+
+class Reading(BaseModel):
+    """A collection of absolute measurements.
+
+    Attributes
+    ----------
+    absolutes: absolutes computed from measurements.
+    azimuth: azimuth angle to mark used for measurements, decimal degrees.
+    hemisphere: 1 for northern hemisphere, -1 for southern hemisphere
+    measurements: raw measurements used to compute absolutes.
+    metadata: metadata used during absolute calculations.
+    pier_correction: pier correction value, nT.
+    """
+
+    absolutes: Optional[List[Absolute]] = None
+    azimuth: float = 0
+    hemisphere: float = 1  # maybe hemisphere should be calculated from latitude
+    measurements: Optional[List[Measurement]] = []
+    metadata: Optional[Dict] = []
+    pier_correction: float = 0
+
+    def absolutes_index(self) -> Dict[str, Absolute]:
+        """Generate index of absolutes keyed by element.
+        """
+        return {a.element: a for a in self.absolutes}
+
+    def calculate_absolutes(self) -> List[Absolute]:
+        """Use measurements and metadata to (re)calculate absolutes.
+        """
+        raise NotImplementedError("TODO: implement this")
+
+    def measurement_index(self) -> Dict[MeasurementType, List[Measurement]]:
+        """Generate index of measurements keyed by MeasurementType.
+
+        Any missing MeasurementType returns an empty list.
+        There may be multiple measurements of each MeasurementType.
+        """
+        index = collections.defaultdict(list)
+        for m in self.measurements:
+            index[m.measurement_type].append(m)
+        return index
diff --git a/geomagio/residual/SpreadsheetAbsolutesFactory.py b/geomagio/residual/SpreadsheetAbsolutesFactory.py
new file mode 100644
index 0000000000000000000000000000000000000000..fbbccd92017a9a5e820d48fdb413e7b8c7de79a4
--- /dev/null
+++ b/geomagio/residual/SpreadsheetAbsolutesFactory.py
@@ -0,0 +1,219 @@
+import os
+from typing import Dict, IO, List, Mapping, Optional, Union
+
+from obspy.core import UTCDateTime
+import openpyxl
+
+from .Absolute import Absolute
+from .Measurement import Measurement
+from .MeasurementType import MeasurementType as mt
+from .Reading import Reading
+from . import Angle
+
+
+SPREADSHEET_MEASUREMENTS = [
+    # first mark
+    {"type": mt.FIRST_MARK_UP, "angle": "A13"},
+    {"type": mt.FIRST_MARK_UP, "angle": "B13"},
+    {"type": mt.FIRST_MARK_DOWN, "angle": "C13"},
+    {"type": mt.FIRST_MARK_DOWN, "angle": "D13"},
+    # declination
+    {"type": mt.WEST_DOWN, "angle": "C19", "residual": "E19", "time": "B19"},
+    {"type": mt.WEST_DOWN, "angle": "C20", "residual": "E20", "time": "B20"},
+    {"type": mt.EAST_DOWN, "angle": "C21", "residual": "E21", "time": "B21"},
+    {"type": mt.EAST_DOWN, "angle": "C22", "residual": "E22", "time": "B22"},
+    {"type": mt.WEST_UP, "angle": "C23", "residual": "E23", "time": "B23"},
+    {"type": mt.WEST_UP, "angle": "C24", "residual": "E24", "time": "B24"},
+    {"type": mt.EAST_UP, "angle": "C25", "residual": "E25", "time": "B25"},
+    {"type": mt.EAST_UP, "angle": "C26", "residual": "E26", "time": "B26"},
+    # second mark
+    {"type": mt.SECOND_MARK_UP, "angle": "A31"},
+    {"type": mt.SECOND_MARK_UP, "angle": "B31"},
+    {"type": mt.SECOND_MARK_DOWN, "angle": "C31"},
+    {"type": mt.SECOND_MARK_DOWN, "angle": "D31"},
+    # meridian
+    {"type": mt.MERIDIAN, "angle": "C37"},
+    # inclination
+    {"type": mt.SOUTH_DOWN, "angle": "D37", "residual": "E37", "time": "B37"},
+    {"type": mt.SOUTH_DOWN, "angle": "D38", "residual": "E38", "time": "B38"},
+    {"type": mt.NORTH_UP, "angle": "D39", "residual": "E39", "time": "B39"},
+    {"type": mt.NORTH_UP, "angle": "D40", "residual": "E40", "time": "B40"},
+    {"type": mt.SOUTH_UP, "angle": "D41", "residual": "E41", "time": "B41"},
+    {"type": mt.SOUTH_UP, "angle": "D42", "residual": "E42", "time": "B42"},
+    {"type": mt.NORTH_DOWN, "angle": "D43", "residual": "E43", "time": "B43"},
+    {"type": mt.NORTH_DOWN, "angle": "D44", "residual": "E44", "time": "B44"},
+    {"type": mt.NORTH_DOWN, "angle": "D45", "residual": "E45", "time": "B45"},
+]
+
+
+def parse_relative_time(base_date: str, time: str) -> UTCDateTime:
+    """Parse a relative date.
+
+    Arguments
+    ---------
+    base_date: date when time occurs (YYYYMMDD)
+    time: time on base_date (HHMMSS)
+        left padded with zeros to 6 characters
+    """
+    try:
+        return UTCDateTime(f"{base_date}T{time:06}")
+    except Exception as e:
+        print(f"error parsing relative date '{base_date}T{time:06}': {e}")
+        return None
+
+
+class SpreadsheetAbsolutesFactory(object):
+    """Read absolutes from residual spreadsheets.
+
+    Attributes
+    ----------
+    base_directory: directory where spreadsheets exist.
+        Assumed structure is base/OBS/YEAR/OBS/*.xlsm
+        Where each xlsm file is named OBS-YEARJULHHMM.xlsm
+    """
+
+    def __init__(self, base_directory="/Volumes/geomag/pub/observatories"):
+        self.base_directory = base_directory
+
+    def get_readings(
+        self,
+        observatory: str,
+        starttime: UTCDateTime,
+        endtime: UTCDateTime,
+        include_measurements: bool = True,
+    ) -> List[Reading]:
+        """Read spreadsheet files between starttime/endtime.
+        """
+        readings = []
+        start_filename = f"{observatory}-{starttime.datetime:%Y%j%H%M}.xlsm"
+        end_filename = f"{observatory}-{endtime.datetime:%Y%j%H%M}.xlsm"
+        for year in range(starttime.year, endtime.year + 1):
+            # start in observatory year directory to scan fewer files
+            observatory_directory = os.path.join(
+                self.base_directory, observatory, f"{year}", observatory
+            )
+            for (dirpath, dirnames, filenames) in os.walk(observatory_directory):
+                for filename in filenames:
+                    if start_filename <= filename < end_filename:
+                        readings.append(
+                            self.parse_spreadsheet(os.path.join(dirpath, filename))
+                        )
+        return readings
+
+    def parse_spreadsheet(self, path: str, include_measurements=True) -> Reading:
+        """Parse a residual spreadsheet file.
+
+        Be sure to check Reading metadata for errors.
+        """
+        workbook = openpyxl.load_workbook(path, data_only=True)
+        constants_sheet = workbook["constants"]
+        measurement_sheet = workbook["measurement"]
+        summary_sheet = workbook["Summary"]
+        metadata = self._parse_metadata(
+            constants_sheet, measurement_sheet, summary_sheet
+        )
+        absolutes = self._parse_absolutes(summary_sheet, metadata["date"])
+        measurements = (
+            include_measurements
+            and self._parse_measurements(measurement_sheet, metadata["date"])
+            or None
+        )
+        return Reading(
+            absolutes=absolutes,
+            azimuth=metadata["mark_azimuth"],
+            hemisphere=metadata["hemisphere"],
+            measurements=measurements,
+            metadata=metadata,
+            pier_correction=metadata["pier_correction"],
+        )
+
+    def _parse_absolutes(
+        self, sheet: openpyxl.worksheet, base_date: str
+    ) -> List[Absolute]:
+        """Parse absolutes from a summary sheet.
+        """
+        absolutes = [
+            Absolute(
+                element="D",
+                absolute=Angle.from_dms(
+                    degrees=sheet["C12"].value, minutes=sheet["D12"].value
+                ),
+                baseline=Angle.from_dms(minutes=sheet["F12"].value),
+                endtime=parse_relative_time(base_date, sheet["B12"].value),
+                starttime=parse_relative_time(base_date, sheet["B12"].value),
+            ),
+            Absolute(
+                element="H",
+                absolute=sheet["C17"].value,
+                baseline=sheet["F17"].value,
+                endtime=parse_relative_time(base_date, sheet["B17"].value),
+                starttime=parse_relative_time(base_date, sheet["B17"].value),
+            ),
+            Absolute(
+                element="Z",
+                absolute=sheet["C22"].value,
+                baseline=sheet["F22"].value,
+                endtime=parse_relative_time(base_date, sheet["B22"].value),
+                starttime=parse_relative_time(base_date, sheet["B22"].value),
+            ),
+        ]
+        return absolutes
+
+    def _parse_measurements(
+        self, sheet: openpyxl.worksheet, base_date: str
+    ) -> List[Measurement]:
+        """Parse measurements from a measurement sheet.
+        """
+        measurements = []
+        for m in SPREADSHEET_MEASUREMENTS:
+            measurement_type = m["type"]
+            angle = "angle" in m and sheet[m["angle"]].value or None
+            residual = "residual" in m and sheet[m["residual"]].value or None
+            time = (
+                "time" in m
+                and parse_relative_time(base_date, sheet[m["time"]].value)
+                or None
+            )
+            measurements.append(
+                Measurement(
+                    measurement_type=measurement_type,
+                    angle=angle,
+                    residual=residual,
+                    time=time,
+                )
+            )
+        return measurements
+
+    def _parse_metadata(
+        self,
+        constants_sheet: openpyxl.worksheet,
+        measurement_sheet: openpyxl.worksheet,
+        summary_sheet: openpyxl.worksheet,
+    ) -> Dict:
+        """Parse metadata from various sheets.
+        """
+        errors = []
+        mark_azimuth = None
+        try:
+            azimuth_number = measurement_sheet["F8"].value
+            mark_azimuth = Angle.from_dms(
+                minutes=constants_sheet[f"F{azimuth_number + 5}"].value
+            )
+        except:
+            errors.append("Unable to read mark azimuth")
+        year = measurement_sheet["B8"].value
+        return {
+            # pad in case month starts with zero (which is trimmed)
+            "date": f"{year}{measurement_sheet['C8'].value:04}",
+            "di_scale": measurement_sheet["K8"].value,
+            "errors": errors,
+            "hemisphere": measurement_sheet["J8"].value,
+            "instrument": f"{summary_sheet['B4'].value}",
+            "mark_azimuth": mark_azimuth,
+            "observer": measurement_sheet["E8"].value,
+            "pier_correction": constants_sheet["H6"].value,
+            "pier_name": summary_sheet["B5"].value,
+            "station": measurement_sheet["A8"].value,
+            "temperature": constants_sheet["J58"].value,
+            "year": year,
+        }
diff --git a/geomagio/residual/WebAbsolutesFactory.py b/geomagio/residual/WebAbsolutesFactory.py
new file mode 100644
index 0000000000000000000000000000000000000000..f3f062756abeec805911b12c4e12b50efa481027
--- /dev/null
+++ b/geomagio/residual/WebAbsolutesFactory.py
@@ -0,0 +1,108 @@
+import json
+import urllib
+from typing import Dict, IO, List, Mapping, Optional, Union
+
+from obspy.core import UTCDateTime
+
+from .Absolute import Absolute
+from .Measurement import Measurement
+from .MeasurementType import MeasurementType
+from .Reading import Reading
+
+
+class WebAbsolutesFactory(object):
+    """Read absolutes from web absolutes service.
+    """
+
+    def __init__(
+        self, url: str = "https://geomag.usgs.gov/baselines/observation.json.php"
+    ):
+        self.url = url
+
+    def get_readings(
+        self,
+        observatory: str,
+        starttime: UTCDateTime,
+        endtime: UTCDateTime,
+        include_measurements: bool = True,
+    ) -> List[Reading]:
+        """Get readings from the Web Absolutes Service."""
+        args = urllib.parse.urlencode(
+            {
+                "observatory": observatory,
+                "starttime": starttime.isoformat(),
+                "endtime": endtime.isoformat(),
+                "includemeasurements": include_measurements and "true" or "false",
+            }
+        )
+        with urllib.request.urlopen(f"{self.url}?{args}") as data:
+            return self.parse_json(data)
+
+    def parse_json(self, jsonstr: IO[str]) -> List[Reading]:
+        """Parse readings from the web absolutes JSON format.
+        """
+        readings = []
+        response = json.load(jsonstr)
+        for data in response["data"]:
+            metadata = self._parse_metadata(data)
+            readings.extend(
+                [self._parse_reading(metadata, r) for r in data["readings"]]
+            )
+        return readings
+
+    def _parse_absolute(self, element: str, data: Mapping) -> Absolute:
+        return Absolute(
+            element=element,
+            absolute=data["absolute"],
+            baseline=data["baseline"],
+            starttime=data["start"] and UTCDateTime(data["start"]) or None,
+            endtime=data["end"] and UTCDateTime(data["end"]) or None,
+            shift="shift" in data and data["shift"] or 0,
+            valid=data["valid"],
+        )
+
+    def _parse_measurement(self, data: Mapping) -> Measurement:
+        return Measurement(
+            measurement_type=MeasurementType(data["type"]),
+            angle=data["angle"],
+            residual=0,
+            time=data["time"] and UTCDateTime(data["time"]) or None,
+        )
+
+    def _parse_metadata(self, data: Mapping) -> Dict:
+        return {
+            "time": data["time"],
+            "reviewed": data["reviewed"],
+            "electronics": data["electronics"]["serial"],
+            "theodolite": data["theodolite"]["serial"],
+            "mark_name": data["mark"]["name"],
+            "mark_azimuth": data["mark"]["azimuth"],
+            "pier_name": data["pier"]["name"],
+            "pier_correction": data["pier"]["correction"],
+            "observer": data["observer"],
+            "reviewer": data["reviewer"],
+        }
+
+    def _parse_reading(self, metadata: Mapping, data: Mapping) -> Reading:
+        """Parse absolutes and measurements from Reading json.
+        """
+        absolutes = [
+            self._parse_absolute(element, data[element])
+            for element in ["D", "H", "Z"]
+            if element in data
+        ]
+        measurements = (
+            [self._parse_measurement(m) for m in data["measurements"]]
+            if "measurements" in data
+            else []
+        )
+        return Reading(
+            absolutes=absolutes,
+            azimuth=("mark_azimuth" in metadata and metadata["mark_azimuth"] or 0),
+            hemisphere=1,
+            measurements=measurements,
+            metadata=metadata,
+            pier_correction=(
+                "pier_correction" in metadata and metadata["pier_correction"] or 0
+            ),
+        )
diff --git a/geomagio/residual/__init__.py b/geomagio/residual/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ec21abb70a7b1603e4008d45ce4b09ddf9aa3d8
--- /dev/null
+++ b/geomagio/residual/__init__.py
@@ -0,0 +1,22 @@
+# residual module
+from __future__ import absolute_import
+
+from .Absolute import Absolute
+from . import Angle
+from .CalFileFactory import CalFileFactory
+from .Measurement import Measurement
+from .MeasurementType import MeasurementType
+from .Reading import Reading
+from .SpreadsheetAbsolutesFactory import SpreadsheetAbsolutesFactory
+from .WebAbsolutesFactory import WebAbsolutesFactory
+
+__all__ = [
+    "Absolute",
+    "Angle",
+    "CalFileFactory",
+    "Measurement",
+    "MeasurementType",
+    "Reading",
+    "SpreadsheetAbsolutesFactory",
+    "WebAbsolutesFactory",
+]
diff --git a/geomagio/webservice/__init__.py b/geomagio/webservice/__init__.py
deleted file mode 100644
index c101e5f89e0dca0c4aa692825d4ac07628fd32ce..0000000000000000000000000000000000000000
--- a/geomagio/webservice/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from __future__ import absolute_import
-
-from .app import create_app
-
-
-__all__ = ["create_app"]
diff --git a/geomagio/webservice/app.py b/geomagio/webservice/app.py
deleted file mode 100644
index 10d2cc305e38cefab52ac1edf4e7ea6401fe2e68..0000000000000000000000000000000000000000
--- a/geomagio/webservice/app.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from werkzeug.middleware.dispatcher import DispatcherMiddleware
-from werkzeug.serving import run_simple
-
-from .public_app import create_app as create_public_app
-from .restricted_app import create_app as create_restricted_app
-
-
-def create_app():
-    public_app = create_public_app()
-    restricted_app = create_restricted_app()
-    application = DispatcherMiddleware(public_app, {"/restricted": restricted_app})
-
-    return application
-
-
-application = create_app()
-
-if __name__ == "__main__":
-    run_simple(
-        hostname="localhost",
-        port=5000,
-        application=application,
-        use_reloader=True,
-        use_debugger=True,
-        use_evalex=True,
-    )
diff --git a/geomagio/webservice/data.py b/geomagio/webservice/data.py
deleted file mode 100644
index 8abc9805a7854149e98359fdeed05d83f8c65e65..0000000000000000000000000000000000000000
--- a/geomagio/webservice/data.py
+++ /dev/null
@@ -1,405 +0,0 @@
-from collections import OrderedDict
-from datetime import datetime
-from flask import Blueprint, Flask, jsonify, render_template, request, Response
-from json import dumps
-from obspy import UTCDateTime
-import os
-
-from ..edge import EdgeFactory
-from ..iaga2002 import IAGA2002Writer
-from ..imfjson import IMFJSONWriter
-from ..TimeseriesUtility import get_interval_from_delta
-
-
-DEFAULT_DATA_TYPE = "variation"
-DEFAULT_ELEMENTS = ["X", "Y", "Z", "F"]
-DEFAULT_OUTPUT_FORMAT = "iaga2002"
-DEFAULT_SAMPLING_PERIOD = "60"
-ERROR_CODE_MESSAGES = {
-    204: "No Data",
-    400: "Bad Request",
-    404: "Not Found",
-    409: "Conflict",
-    500: "Internal Server Error",
-    501: "Not Implemented",
-    503: "Service Unavailable",
-}
-REQUEST_LIMIT = 345600
-VALID_DATA_TYPES = ["variation", "adjusted", "quasi-definitive", "definitive"]
-VALID_ELEMENTS = [
-    "D",
-    "DIST",
-    "DST",
-    "E",
-    "E-E",
-    "E-N",
-    "F",
-    "G",
-    "H",
-    "SQ",
-    "SV",
-    "UK1",
-    "UK2",
-    "UK3",
-    "UK4",
-    "X",
-    "Y",
-    "Z",
-]
-VALID_OBSERVATORIES = [
-    "BDT",
-    "BOU",
-    "BRT",
-    "BRW",
-    "BSL",
-    "CMO",
-    "CMT",
-    "DED",
-    "DHT",
-    "FDT",
-    "FRD",
-    "FRN",
-    "GUA",
-    "HON",
-    "NEW",
-    "SHU",
-    "SIT",
-    "SJG",
-    "SJT",
-    "TST",
-    "TUC",
-    "USGS",
-]
-VALID_OUTPUT_FORMATS = ["iaga2002", "json"]
-VALID_SAMPLING_PERIODS = [0.1, 1, 60, 3600, 86400]
-
-
-blueprint = Blueprint("data", __name__)
-data_factory = None
-VERSION = "version"
-
-
-def init_app(app: Flask):
-    global blueprint
-    global data_factory
-    # set up data factory
-    data_factory = get_data_factory()
-    app.register_blueprint(blueprint)
-
-
-@blueprint.route("/data/", methods=["GET"])
-def get_data():
-    query_params = request.args
-    if not query_params:
-        return render_template(
-            "data/usage.html",
-            valid_data_types=VALID_DATA_TYPES,
-            valid_elements=VALID_ELEMENTS,
-            valid_observatories=VALID_OBSERVATORIES,
-            valid_sampling_periods=VALID_SAMPLING_PERIODS,
-        )
-    try:
-        parsed_query = parse_query(query_params)
-        validate_query(parsed_query)
-    except Exception as e:
-        return format_error(400, e)
-    try:
-        timeseries = get_timeseries(parsed_query)
-        return format_timeseries(timeseries, parsed_query)
-    except Exception as e:
-        return format_error(500, e)
-
-
-class WebServiceException(Exception):
-    """Base class for exceptions thrown by web services."""
-
-    pass
-
-
-class WebServiceQuery(object):
-    """Query parameters for a web service request.
-    Parameters
-    ----------
-    observatory_id : str
-        observatory
-    starttime : obspy.core.UTCDateTime
-        time of first requested sample
-    endtime : obspy.core.UTCDateTime
-        time of last requested sample
-    elements : array_like
-        list of requested elements
-    sampling_period : int
-        period between samples in seconds
-        default 60.
-    data_type : {'variation', 'adjusted', 'quasi-definitive', 'definitive'}
-        data type
-        default 'variation'.
-    output_format : {'iaga2002', 'json'}
-        output format.
-        default 'iaga2002'.
-    """
-
-    def __init__(
-        self,
-        observatory_id=None,
-        starttime=None,
-        endtime=None,
-        elements=("X", "Y", "Z", "F"),
-        sampling_period=60,
-        data_type="variation",
-        output_format="iaga2002",
-    ):
-        self.observatory_id = observatory_id
-        self.starttime = starttime
-        self.endtime = endtime
-        self.elements = elements
-        self.sampling_period = sampling_period
-        self.data_type = data_type
-        self.output_format = output_format
-
-
-def format_error(status_code, exception):
-    """Assign error_body value based on error format."""
-    if request.args.get("format") == "json":
-        return Response(json_error(status_code, exception), mimetype="application/json")
-    else:
-        return Response(iaga2002_error(status_code, exception), mimetype="text/plain")
-
-
-def format_timeseries(timeseries, query):
-    """Formats timeseries into JSON or IAGA data
-
-    Parameters
-    ----------
-    obspy.core.Stream
-        timeseries object with requested data
-
-    WebServiceQuery
-        parsed query object
-
-    Returns
-    -------
-    unicode
-        IAGA2002 or JSON formatted string.
-    """
-    if query.output_format == "json":
-        return Response(
-            IMFJSONWriter.format(timeseries, query.elements),
-            mimetype="application/json",
-        )
-    else:
-        return Response(
-            IAGA2002Writer.format(timeseries, query.elements), mimetype="text/plain",
-        )
-
-
-def get_data_factory():
-    """Reads environment variable to determine the factory to be used
-
-    Returns
-    -------
-    data_factory
-        Edge or miniseed factory object
-    """
-    data_type = os.getenv("DATA_TYPE", "edge")
-    data_host = os.getenv("DATA_HOST", "cwbpub.cr.usgs.gov")
-    data_port = os.getenv("DATA_PORT", 2060)
-
-    if data_type == "edge":
-        data_factory = EdgeFactory(host=data_host, port=data_port)
-        return data_factory
-    else:
-        return None
-
-
-def get_timeseries(query):
-    """Get timeseries data
-
-    Parameters
-    ----------
-     WebServiceQuery
-        parsed query object
-
-    Returns
-    -------
-    obspy.core.Stream
-        timeseries object with requested data
-    """
-    timeseries = data_factory.get_timeseries(
-        query.starttime,
-        query.endtime,
-        query.observatory_id,
-        query.elements,
-        query.data_type,
-        get_interval_from_delta(query.sampling_period),
-    )
-    return timeseries
-
-
-def iaga2002_error(code: int, error: Exception):
-    """Format iaga2002 error message.
-
-    Returns
-    -------
-    error_body : str
-        body of iaga2002 error message.
-    """
-    status_message = ERROR_CODE_MESSAGES[code]
-    error_body = f"""Error {code}: {status_message}
-
-{error}
-
-Usage details are available from {request.base_url}
-
-Request:
-{request.url}
-
-Request Submitted:
-{UTCDateTime().isoformat()}Z
-
-Service Version:
-{VERSION}
-"""
-    return error_body
-
-
-def json_error(code: int, error: Exception):
-    """Format json error message.
-
-    Returns
-    -------
-    error_body : str
-        body of json error message.
-    """
-    status_message = ERROR_CODE_MESSAGES[code]
-    error_dict = {
-        "type": "Error",
-        "metadata": {
-            "status": code,
-            "generated": UTCDateTime().isoformat() + "Z",
-            "url": request.url,
-            "title": status_message,
-            "error": str(error),
-        },
-    }
-    return dumps(error_dict, sort_keys=True).encode("utf8")
-
-
-def parse_query(query):
-    """Parse request arguments into a set of parameters
-
-    Parameters
-    ----------
-    query: Immutable Dict
-        request.args object
-
-    Returns
-    -------
-    WebServiceQuery
-        parsed query object
-
-    Raises
-    ------
-    WebServiceException
-        if any parameters are not supported.
-    """
-    # Get values
-    observatory_id = query.get("id")
-    starttime = query.get("starttime")
-    endtime = query.get("endtime")
-    elements = query.getlist("elements")
-    sampling_period = query.get("sampling_period", DEFAULT_SAMPLING_PERIOD)
-    data_type = query.get("type", DEFAULT_DATA_TYPE)
-    output_format = query.get("format", DEFAULT_OUTPUT_FORMAT)
-    # Parse values and set defaults
-    if len(elements) == 0:
-        elements = DEFAULT_ELEMENTS
-    if len(elements) == 1 and "," in elements[0]:
-        elements = [e.strip() for e in elements[0].split(",")]
-    if not starttime:
-        now = datetime.now()
-        starttime = UTCDateTime(year=now.year, month=now.month, day=now.day)
-    else:
-        try:
-            starttime = UTCDateTime(starttime)
-        except Exception as e:
-            raise WebServiceException(
-                f"Bad starttime value '{starttime}'."
-                " Valid values are ISO-8601 timestamps."
-            ) from e
-    if not endtime:
-        endtime = starttime + (24 * 60 * 60 - 1)
-    else:
-        try:
-            endtime = UTCDateTime(endtime)
-        except Exception as e:
-            raise WebServiceException(
-                f"Bad endtime value '{endtime}'."
-                " Valid values are ISO-8601 timestamps."
-            ) from e
-    try:
-        sampling_period = float(sampling_period)
-    except ValueError as e:
-        raise WebServiceException(
-            f"Bad sampling_period {sampling_period}"
-            ", valid values are {','.join(VALID_SAMPLING_PERIODS)}"
-        ) from e
-    # Create WebServiceQuery object and set properties
-    params = WebServiceQuery()
-    params.observatory_id = observatory_id
-    params.starttime = starttime
-    params.endtime = endtime
-    params.elements = elements
-    params.sampling_period = sampling_period
-    params.data_type = data_type
-    params.output_format = output_format
-    return params
-
-
-def validate_query(query):
-    """Verify that parameters are valid.
-
-    Parameters
-    ----------
-    query: Immutable Dict
-        request.args object
-
-    Raises
-    ------
-    WebServiceException
-        if any parameters are not supported.
-    """
-    # validate enumerated
-    if query.data_type not in VALID_DATA_TYPES:
-        raise WebServiceException(
-            f"Bad data type value '{query.data_type}'."
-            f" Valid values are: {', '.join(VALID_DATA_TYPES)}."
-        )
-    if query.observatory_id not in VALID_OBSERVATORIES:
-        raise WebServiceException(
-            f"Bad observatory id '{query.observatory_id}'."
-            f" Valid values are: {', '.join(VALID_OBSERVATORIES)}."
-        )
-    if query.output_format not in VALID_OUTPUT_FORMATS:
-        raise WebServiceException(
-            f"Bad format value '{query.output_format}'."
-            f" Valid values are: {', '.join(VALID_OUTPUT_FORMATS)}."
-        )
-    if query.sampling_period not in VALID_SAMPLING_PERIODS:
-        raise WebServiceException(
-            f"Bad sampling_period value '{query.sampling_period}'."
-            f" Valid values are: {', '.join(VALID_SAMPLING_PERIODS)}."
-        )
-    # validate combinations
-    if len(query.elements) > 4 and query.output_format == "iaga2002":
-        raise WebServiceException(
-            "No more than four elements allowed for iaga2002 format."
-        )
-    if query.starttime > query.endtime:
-        raise WebServiceException("starttime must be before endtime.")
-    # check data volume
-    samples = int(
-        len(query.elements) * (query.endtime - query.starttime) / query.sampling_period
-    )
-    if samples > REQUEST_LIMIT:
-        raise WebServiceException(f"Query exceeds request limit ({samples} > 345600)")
diff --git a/geomagio/webservice/database.py b/geomagio/webservice/database.py
deleted file mode 100644
index b156d3a157b74b4e47c8f5ffdb7b5704154d995a..0000000000000000000000000000000000000000
--- a/geomagio/webservice/database.py
+++ /dev/null
@@ -1,12 +0,0 @@
-import flask
-import flask_migrate
-import flask_sqlalchemy
-
-
-# database object
-db = flask_sqlalchemy.SQLAlchemy()
-
-
-def init_app(app: flask.Flask):
-    db.init_app(app)
-    flask_migrate.Migrate(app, db)
diff --git a/geomagio/webservice/login.py b/geomagio/webservice/login.py
deleted file mode 100644
index 8d593209fb1b1a069da0d23c31c20b0c08c9e819..0000000000000000000000000000000000000000
--- a/geomagio/webservice/login.py
+++ /dev/null
@@ -1,99 +0,0 @@
-from __future__ import absolute_import, unicode_literals, print_function
-from builtins import str
-
-import flask
-import flask_login
-import os
-from authlib.integrations.flask_client import OAuth
-
-from .database import db
-
-
-# Blueprint for auth routes
-blueprint = flask.Blueprint("login", __name__)
-login_manager = flask_login.LoginManager()
-oauth = OAuth()
-
-
-def init_app(app: flask.Flask):
-    """Flask app configuration method.
-    """
-    global blueprint
-    global login_manager
-    global oauth
-    # LoginManager
-    login_manager.init_app(app)
-    login_manager.login_view = "login.login"
-    # OpenID client
-    oauth.init_app(app)
-    # register oauth client (needs to happen after init_app)
-    # creates property "oauth.openid"
-    oauth.register(
-        name="openid",
-        client_id=os.getenv("OPENID_CLIENT_ID"),
-        client_secret=os.getenv("OPENID_CLIENT_SECRET"),
-        server_metadata_url=os.getenv("OPENID_METADATA_URL"),
-        client_kwargs={"scope": "openid email"},
-    )
-    # register blueprint routes
-    app.register_blueprint(blueprint)
-
-
-class User(db.Model, flask_login.UserMixin):
-    """User database model.
-    """
-
-    __tablename__ = "user"
-    id = db.Column(db.Integer, primary_key=True)
-    openid = db.Column(db.Text, unique=True, nullable=False)
-    email = db.Column(db.Text, unique=True, nullable=False)
-    groups = db.Column(db.Text)
-
-    def get_id(self) -> str:
-        return str(self.openid)
-
-    def to_dict(self):
-        return {
-            "id": self.id,
-            "openid": self.openid,
-            "email": self.email,
-            "groups": self.groups,
-        }
-
-
-@login_manager.user_loader
-def _load_user(user_id: str):
-    return User.query.filter_by(openid=user_id).first()
-
-
-@blueprint.route("/hello")
-@flask_login.login_required
-def hello():
-    return flask.render_template("hello.html")
-
-
-@blueprint.route("/login")
-def login():
-    redirect_uri = flask.url_for("login.authorize", _external=True)
-    return oauth.openid.authorize_redirect(redirect_uri)
-
-
-@blueprint.route("/login/callback")
-def authorize():
-    oauth.openid.authorize_access_token()
-    userinfo = oauth.openid.userinfo()
-    # check if existing user
-    user = User.query.filter_by(openid=userinfo.sub).first()
-    if not user:
-        user = User(openid=userinfo.sub, email=userinfo.email)
-        db.session.add(user)
-        db.session.commit()
-    flask_login.login_user(user)
-    return flask.redirect(flask.url_for("login.hello"))
-
-
-@blueprint.route("/logout")
-@flask_login.login_required
-def logout():
-    flask_login.logout_user()
-    return flask.redirect(flask.url_for("index"))
diff --git a/geomagio/webservice/metadata.py b/geomagio/webservice/metadata.py
deleted file mode 100644
index a87c444a8d62c5fabdead446b113033264ea3884..0000000000000000000000000000000000000000
--- a/geomagio/webservice/metadata.py
+++ /dev/null
@@ -1,89 +0,0 @@
-from __future__ import absolute_import
-
-import datetime
-from .database import db
-
-
-# known category values as constants
-CATEGORY_FLAG = "flag"
-CATEGORY_ADJUSTED_MATRIX = "adjusted-matrix"
-
-
-class Metadata(db.Model):
-    """Metadata database model.
-
-    This class is used for Data flagging and other Metadata.
-
-    Flag example:
-    ```
-    automatic_flag = Metadata(
-        created_by = 'algorithm/version',
-        start_time = UTCDateTime('2020-01-02T00:17:00.1Z'),
-        end_time = UTCDateTime('2020-01-02T00:17:00.1Z'),
-        network = 'NT',
-        station = 'BOU',
-        channel = 'BEU',
-        category = CATEGORY_FLAG,
-        comment = "spike detected",
-        priority = 1,
-        data_valid = False)
-    ```
-
-    Adjusted Matrix example:
-    ```
-    adjusted_matrix = Metadata(
-        created_by = 'algorithm/version',
-        start_time = UTCDateTime('2020-01-02T00:17:00Z'),
-        end_time = None,
-        network = 'NT',
-        station = 'BOU',
-        category = CATEGORY_ADJUSTED_MATRIX,
-        comment = 'automatic adjusted matrix',
-        priority = 1,
-        value = {
-            'parameters': {'x': 1, 'y': 2, 'z': 3}
-            'matrix': [ ... ]
-        }
-    )
-    ```
-    """
-
-    # table and primary key
-    __tablename__ = "metadata"
-    id = db.Column(db.Integer, primary_key=True)
-
-    # author
-    created_by = db.Column(db.Text, index=True)  # email/program id
-    created_time = db.Column(db.DateTime, default=datetime.datetime.utcnow)
-
-    # time range
-    start_time = db.Column(db.DateTime, index=True, nullable=True)
-    end_time = db.Column(db.DateTime, index=True, nullable=True)
-    # data this metadata applies to
-    # channel/location nullable for wildcard
-    network = db.Column(db.Text, index=True, nullable=False)
-    station = db.Column(db.Text, index=True, nullable=False)
-    channel = db.Column(db.Text, index=True, nullable=True)
-    location = db.Column(db.Text, index=True, nullable=True)
-
-    # metadata
-    # category (flag, matrix, etc)
-    category = db.Column(db.Text, index=True, nullable=False)
-    # comment
-    comment = db.Column(db.Text, nullable=True)
-    # higher priority overrides lower priority
-    priority = db.Column(db.Integer, default=1, index=True)
-    # whether data is valid during
-    data_valid = db.Column(db.Boolean, default=True, index=True)
-    # json encoded value
-    value = db.Column(db.JSON, nullable=False)
-
-    # reviewer
-    # email
-    reviewed_by = db.Column(db.Text)
-    # when reviewed
-    reviewed_time = db.Column(db.DateTime)
-    # comments by reviewer
-    review_comment = db.Column(db.Text)
-    # whether data rejected during review
-    review_reject = db.Column(db.Boolean, default=False, index=True)
diff --git a/geomagio/webservice/public_app.py b/geomagio/webservice/public_app.py
deleted file mode 100644
index ba56a46e5531ed3bbedb7908d426abe4139b29bc..0000000000000000000000000000000000000000
--- a/geomagio/webservice/public_app.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from __future__ import absolute_import, unicode_literals
-
-import os
-import flask
-
-from . import data
-
-
-def create_app():
-    app = flask.Flask(__name__, instance_relative_config=True)
-    # configure using environment variables
-    app.config.update(os.environ)
-
-    # connect modules
-    data.init_app(app)
-
-    # add default route
-    @app.route("/")
-    def index():
-        return flask.render_template("index.html")
-
-    return app
diff --git a/geomagio/webservice/restricted_app.py b/geomagio/webservice/restricted_app.py
deleted file mode 100644
index 27fcf880b187aae0c9f1160539df8f7f7ab0b4a8..0000000000000000000000000000000000000000
--- a/geomagio/webservice/restricted_app.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from __future__ import absolute_import, unicode_literals
-
-import os
-import flask
-
-from . import database, login, session
-
-
-def create_app():
-    app = flask.Flask(__name__, instance_relative_config=True)
-    # configure using environment variables
-    app.config.update(os.environ)
-
-    # connect modules
-    database.init_app(app)
-    login.init_app(app)
-    session.init_app(app)
-
-    # add default route
-    @app.route("/")
-    def index():
-        return "<h1>Restricted Page</h1>"
-
-    return app
diff --git a/geomagio/webservice/session.py b/geomagio/webservice/session.py
deleted file mode 100644
index 5d05014dc202651059fd28ad3bf8d9215d747156..0000000000000000000000000000000000000000
--- a/geomagio/webservice/session.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from __future__ import absolute_import
-
-import flask
-import flask_session
-
-from .database import db
-
-
-def init_app(app: flask.Flask):
-    app.config["SESSION_TYPE"] = "sqlalchemy"
-    app.config["SESSION_SQLALCHEMY"] = db
-    flask_session.Session(app)
diff --git a/geomagio/webservice/static/usage.css b/geomagio/webservice/static/usage.css
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/geomagio/webservice/static/usgs-logo.svg b/geomagio/webservice/static/usgs-logo.svg
deleted file mode 100644
index e8a7548dda3f5fd4fe2e22534116ed3aea37997c..0000000000000000000000000000000000000000
--- a/geomagio/webservice/static/usgs-logo.svg
+++ /dev/null
@@ -1,136 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0)  -->
-<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
-<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
-	 width="2353.326px" height="869.157px" viewBox="40.233 28.698 2353.326 869.157"
-	 enable-background="new 40.233 28.698 2353.326 869.157" xml:space="preserve">
-<g>
-	<g>
-		<path fill="#FFFFFF" d="M1135.755,38.678v425.73c0,89.8-50.726,184.595-203.721,184.595c-138.031,0-204.552-71.51-204.552-184.595
-			V38.678h125.561v417.414c0,74.001,31.595,103.109,77.328,103.109c54.881,0,79.823-36.592,79.823-103.109V38.678H1135.755z"/>
-		<path fill="#FFFFFF" d="M1339.936,649.003c-155.496,0-184.599-91.467-177.949-195.406h125.556
-			c0,57.382,2.496,105.603,71.52,105.603c42.403,0,63.19-27.445,63.19-68.182c0-108.098-252.779-114.749-252.779-293.523
-			c0-93.96,44.903-168.798,197.898-168.798c122.23,0,182.931,54.881,174.619,180.441h-122.234c0-44.905-7.483-90.638-58.209-90.638
-			c-40.734,0-64.849,22.452-64.849,64.027c0,113.918,252.772,103.938,252.772,292.686
-			C1549.471,630.71,1435.551,649.003,1339.936,649.003z"/>
-		<path fill="#FFFFFF" d="M2184.02,649.003c-155.49,0-184.587-91.467-177.94-195.406h125.56c0,57.382,2.493,105.603,71.508,105.603
-			c42.403,0,63.196-27.445,63.196-68.182c0-108.098-252.775-114.749-252.775-293.523c0-93.96,44.896-168.798,197.896-168.798
-			c122.225,0,182.93,54.881,174.618,180.441h-122.233c0-44.905-7.483-90.638-58.207-90.638c-40.744,0-64.859,22.452-64.859,64.027
-			c0,113.918,252.778,103.938,252.778,292.686C2393.559,630.71,2279.643,649.003,2184.02,649.003z"/>
-		<path fill="#FFFFFF" d="M1784.048,415.29h73.913v132.473l-6.15,2.289c-19.133,6.652-41.58,11.645-62.363,11.645
-			c-71.511,0-89.805-33.264-89.805-221.18c0-116.414,0-222.015,78.154-222.015c60.824,0,73.641,41.034,74.764,90.638h125.063
-			c1.013-116.497-74.022-180.441-192.34-180.441c-205.374,0-212.865,153.831-212.865,305.165
-			c0,227.834,24.701,316.231,235.906,316.231c48.979,0,126.957-11.053,162.979-20.906c2.161-0.592,6.061-2.982,6.061-6.088
-			c0-16.369,0-298.385,0-298.385h-193.316V415.29L1784.048,415.29z"/>
-	</g>
-	<g>
-		<path fill="#FFFFFF" d="M71.261,823.148c-1.235,4.83-2.007,9.451-1.25,12.805c0.962,3.355,3.755,5.039,9.63,5.039
-			s11.714-3.779,13.231-10.91c4.774-22.457-48.337-17.414-41.154-51.201c4.862-22.871,28.995-29.793,48.089-29.793
-			c20.142,0,36.345,9.65,29.805,32.516h-28.956c1.562-7.344,1.661-11.742-0.034-13.633c-1.491-1.891-3.916-2.305-7.066-2.305
-			c-6.503,0-10.964,4.193-12.483,11.322c-3.567,16.793,48.65,15.945,41.426,49.939c-3.928,18.465-21.98,32.316-46.53,32.316
-			c-25.808,0-41.631-6.508-33.665-36.096H71.261L71.261,823.148z"/>
-		<path fill="#FFFFFF" d="M197.855,788.951c3.344-15.732,0.475-20.98-8.758-20.98c-12.589,0-16.003,11.113-20.684,33.154
-			c-6.873,32.309-5.617,39.232,5.294,39.232c9.234,0,15.424-8.387,18.367-22.242h29.587c-6.112,28.746-24.271,41.131-51.967,41.131
-			c-38.189,0-38.274-23.293-31.226-56.449c6.153-28.949,15.616-53.709,54.645-53.709c27.277,0,40.218,12.172,34.329,39.863H197.855z
-			"/>
-		<path fill="#FFFFFF" d="M258.718,857.146h-29.586l22.528-105.961h29.584L258.718,857.146z M289.914,710.39l-4.951,23.289H255.38
-			l4.95-23.289H289.914z"/>
-		<path fill="#FFFFFF" d="M1643.192,857.146h-29.579l22.526-105.961h29.582L1643.192,857.146z M1674.395,710.39l-4.951,23.289
-			h-29.583l4.949-23.289H1674.395z"/>
-		<path fill="#FFFFFF" d="M315.804,808.888c-2.677,12.586-6.271,31.469,6.735,31.469c10.491,0,14.729-10.068,16.605-18.883h30.003
-			c-2.87,11.545-8.656,20.98-17.172,27.492c-8.306,6.5-19.6,10.279-33.451,10.279c-38.186,0-38.271-23.293-31.223-56.449
-			c6.154-28.949,15.615-53.709,54.643-53.709c39.869,0,38.785,25.805,30.509,59.801H315.804z M346.394,792.306
-			c2.185-10.281,5.729-24.963-8.332-24.963c-13.636,0-16.819,15.945-18.739,24.963H346.394z"/>
-		<path fill="#FFFFFF" d="M428.255,761.886h0.419c7.884-9.449,16.569-12.799,27.693-12.799c14.054,0,24.651,8.391,21.53,23.076
-			l-18.066,84.982h-29.583l15.523-73.02c2.003-9.447,1.887-16.785-7.554-16.785c-9.442,0-12.683,7.338-14.688,16.785l-15.523,73.02
-			H378.42l22.526-105.961h29.584L428.255,761.886z"/>
-		<path fill="#FFFFFF" d="M545.704,788.951c3.343-15.732,0.473-20.98-8.761-20.98c-12.592,0-16.004,11.113-20.688,33.154
-			c-6.868,32.309-5.612,39.232,5.298,39.232c9.236,0,15.425-8.387,18.366-22.242h29.587c-6.113,28.746-24.269,41.131-51.968,41.131
-			c-38.188,0-38.275-23.293-31.228-56.449c6.157-28.949,15.618-53.709,54.645-53.709c27.277,0,40.219,12.172,34.329,39.863H545.704z
-			"/>
-		<path fill="#FFFFFF" d="M613.292,808.888c-2.677,12.586-6.273,31.469,6.736,31.469c10.489,0,14.73-10.068,16.604-18.883h30.005
-			c-2.87,11.545-8.655,20.98-17.173,27.492c-8.307,6.5-19.603,10.279-33.451,10.279c-38.188,0-38.273-23.293-31.224-56.449
-			c6.153-28.949,15.613-53.709,54.64-53.709c39.868,0,38.789,25.805,30.513,59.801H613.292z M643.884,792.306
-			c2.184-10.281,5.727-24.963-8.333-24.963c-13.639,0-16.816,15.945-18.734,24.963H643.884z"/>
-		<path fill="#FFFFFF" d="M794.142,729.271c-11.779-0.836-15.277,3.777-17.556,14.482l-2.103,7.432h13.848l-3.879,18.252h-13.849
-			l-18.646,87.709h-29.583l18.644-87.709h-13.221l3.882-18.252h13.431c5.94-26.502,11.266-41.219,42.646-41.219
-			c3.567,0,6.876,0.207,10.402,0.424L794.142,729.271z"/>
-		<path fill="#FFFFFF" d="M784.778,802.796c6.154-28.949,15.614-53.709,54.642-53.709c39.028,0,37.961,24.76,31.807,53.709
-			c-7.048,33.156-17.035,56.449-55.225,56.449C777.815,859.246,777.731,835.953,784.778,802.796z M841.994,801.125
-			c4.687-22.041,6-33.154-6.586-33.154c-12.591,0-16.006,11.113-20.688,33.154c-6.868,32.309-5.612,39.232,5.298,39.232
-			C830.926,840.357,835.126,833.433,841.994,801.125z"/>
-		<path fill="#FFFFFF" d="M924.263,751.185l-2.897,13.637h0.421c8.043-12.17,18.872-15.734,30.832-15.734l-5.62,26.434
-			c-25.664-1.672-29.295,13.432-31.523,23.92l-12.269,57.707h-29.583l22.527-105.961h28.112V751.185z"/>
-		<path fill="#FFFFFF" d="M1065.27,837.421c-1.389,6.506-1.977,13.221-2.729,19.725h-27.484l1.729-14.059h-0.421
-			c-8.615,10.91-18.33,16.158-31.131,16.158c-20.563,0-24.859-15.318-20.938-33.787c7.45-35.035,34.831-36.508,62.229-36.09
-			l1.739-8.178c1.916-9.027,2.042-15.525-8.661-15.525c-10.28,0-12.77,7.756-14.552,16.154h-28.959
-			c2.723-12.803,8.447-20.984,16.399-25.811c7.784-5.033,17.839-6.924,29.169-6.924c37.558,0,38.95,16.16,34.845,35.459
-			L1065.27,837.421z M1013.521,824.199c-1.606,7.561-2.312,16.793,7.344,16.793c17.414,0,19.683-23.504,22.136-35.041
-			C1028.179,806.578,1017.537,805.32,1013.521,824.199z"/>
-		<path fill="#FFFFFF" d="M1182.38,788.951c3.346-15.732,0.476-20.98-8.756-20.98c-12.592,0-16.006,11.113-20.688,33.154
-			c-6.868,32.309-5.611,39.232,5.302,39.232c9.231,0,15.424-8.387,18.364-22.242h29.588c-6.112,28.746-24.269,41.131-51.968,41.131
-			c-38.189,0-38.272-23.293-31.227-56.449c6.155-28.949,15.613-53.709,54.643-53.709c27.276,0,40.218,12.172,34.331,39.863H1182.38z
-			"/>
-		<path fill="#FFFFFF" d="M1396.655,837.421c-1.382,6.506-1.968,13.221-2.724,19.725h-27.482l1.729-14.059h-0.421
-			c-8.619,10.91-18.337,16.158-31.135,16.158c-20.562,0-24.862-15.318-20.938-33.787c7.454-35.035,34.837-36.508,62.23-36.09
-			l1.743-8.178c1.911-9.027,2.036-15.525-8.66-15.525c-10.285,0-12.771,7.756-14.559,16.154h-28.961
-			c2.727-12.803,8.452-20.984,16.402-25.811c7.785-5.033,17.834-6.924,29.173-6.924c37.554,0,38.944,16.16,34.846,35.459
-			L1396.655,837.421z M1344.912,824.199c-1.6,7.561-2.312,16.793,7.348,16.793c17.416,0,19.681-23.504,22.137-35.041
-			C1359.57,806.578,1348.927,805.32,1344.912,824.199z"/>
-		<path fill="#FFFFFF" d="M1461.602,761.886h0.422c7.875-9.449,16.568-12.799,27.691-12.799c14.053,0,24.65,8.391,21.531,23.076
-			l-18.064,84.982h-29.598l15.536-73.02c2.003-9.447,1.878-16.785-7.557-16.785c-9.448,0-12.688,7.338-14.688,16.785l-15.527,73.02
-			h-29.584l22.526-105.961h29.589L1461.602,761.886z"/>
-		<path fill="#FFFFFF" d="M1614.947,751.185l-23.456,110.365c-1.567,7.344-7.304,36.305-48.009,36.305
-			c-22.035,0-39.715-5.67-34.857-31.482h28.962c-0.938,4.408-1.113,8.188,0.239,10.705c1.307,2.73,4.354,4.201,8.974,4.201
-			c7.344,0,12.384-6.93,14.658-17.631l4.323-20.352h-0.422c-6.145,8.182-15.429,12.379-25.084,12.379
-			c-32.52,0-24.719-29.791-19.635-53.717c4.959-23.287,11.661-52.871,42.506-52.871c10.494,0,18.534,4.611,20.64,14.477h0.415
-			l2.63-12.379H1614.947z M1554.86,837.421c10.702,0,14.536-11.121,19.31-33.57c4.949-23.295,6.708-36.508-4.197-36.508
-			c-11.127,0-14.66,7.758-21.533,40.076C1546.342,817.283,1540.589,837.421,1554.86,837.421z"/>
-		<path fill="#FFFFFF" d="M1713.985,761.886h0.415c7.884-9.449,16.57-12.799,27.691-12.799c14.059,0,24.653,8.391,21.533,23.076
-			l-18.062,84.982h-29.594l15.526-73.02c2.007-9.447,1.887-16.785-7.553-16.785c-9.446,0-12.686,7.338-14.694,16.785l-15.515,73.02
-			h-29.589l22.527-105.961h29.582L1713.985,761.886z"/>
-		<path fill="#FFFFFF" d="M1867.33,751.185l-23.466,110.365c-1.556,7.344-7.293,36.305-48.005,36.305
-			c-22.026,0-39.709-5.67-34.85-31.482h28.958c-0.942,4.408-1.109,8.188,0.239,10.705c1.308,2.73,4.358,4.201,8.975,4.201
-			c7.345,0,12.374-6.93,14.655-17.631l4.319-20.352h-0.415c-6.145,8.182-15.429,12.379-25.08,12.379
-			c-32.521,0-24.726-29.791-19.638-53.717c4.948-23.287,11.654-52.871,42.506-52.871c10.486,0,18.529,4.611,20.632,14.477h0.412
-			l2.637-12.379H1867.33z M1807.24,837.421c10.699,0,14.531-11.121,19.303-33.57c4.953-23.295,6.721-36.508-4.191-36.508
-			c-11.123,0-14.664,7.758-21.529,40.076C1798.725,817.283,1792.972,837.421,1807.24,837.421z"/>
-		<path fill="#FFFFFF" d="M1913.507,751.185h29.39l-2.429,79.732h0.412l35.265-79.732h31.428l0.398,79.732h0.412l32.242-79.732
-			h28.357l-50.256,105.961h-31.032l-3.033-70.287h-0.415l-35.464,70.287h-31.439L1913.507,751.185z"/>
-		<path fill="#FFFFFF" d="M2063.021,802.796c6.162-28.949,15.621-53.709,54.647-53.709c39.028,0,37.963,24.76,31.801,53.709
-			c-7.036,33.156-17.028,56.449-55.224,56.449C2056.062,859.246,2055.975,835.953,2063.021,802.796z M2120.247,801.125
-			c4.678-22.041,5.997-33.154-6.592-33.154c-12.593,0-15.999,11.113-20.684,33.154c-6.873,32.309-5.618,39.232,5.296,39.232
-			C2109.179,840.357,2113.374,833.433,2120.247,801.125z"/>
-		<path fill="#FFFFFF" d="M2204.189,751.185l-2.898,13.637h0.427c8.037-12.17,18.871-15.734,30.824-15.734l-5.618,26.434
-			c-25.658-1.672-29.296,13.432-31.52,23.92l-12.271,57.707h-29.582l22.528-105.961h28.11V751.185z"/>
-		<path fill="#FFFFFF" d="M1290.979,749.087c-11.125,0-19.81,3.35-27.689,12.799h-0.418l10.944-51.496h-29.589l-31.194,146.756
-			h29.583l15.522-73.02c2.007-9.445,5.248-16.783,14.688-16.783c9.441,0,9.562,7.338,7.553,16.783l-15.523,73.02h29.586
-			l18.063-84.98C1315.631,757.478,1305.036,749.087,1290.979,749.087z"/>
-		<polygon fill="#FFFFFF" points="2253.523,710.39 2222.329,857.146 2251.908,857.146 2283.104,710.39 		"/>
-		<path fill="#FFFFFF" d="M2352.381,710.39l-10.812,50.869h-0.415c-4.168-9.029-10.846-12.172-21.553-12.172
-			c-29.791,0-35.952,32.938-40.5,54.342c-4.646,21.824-12.918,55.816,17.72,55.816c11.329,0,20.271-3.57,27.913-13.854h0.424
-			l-2.498,11.754h28.113l31.193-146.756H2352.381L2352.381,710.39z M2332.645,803.22c-5.662,26.648-9.907,37.771-19.978,37.771
-			c-10.709,0-10.237-11.123-4.568-37.771c4.505-21.195,7.419-35.877,20.222-35.877
-			C2340.488,767.343,2337.154,782.025,2332.645,803.22z"/>
-	</g>
-	<path fill="#FFFFFF" d="M203.474,239.716l2.103,2.02c58.821,49.265,122.312,44.486,170.67,12.332
-		c30.496-20.284,168.388-111.981,246.33-163.811V38.492H41.683v136.705C77.088,169.069,134.151,175.078,203.474,239.716z"/>
-	<path fill="#FFFFFF" d="M422.01,467.153l-68.618-65.809c-9.63-8.786-17.884-15.225-19.66-16.594
-		c-56.077-39.381-108.36-27.417-130.901-19.028c-6.828,2.74-13.343,6.453-17.959,9.351L41.683,470.858v173.543h580.893V519.509
-		C609.828,527.613,531.131,570.189,422.01,467.153z"/>
-	<path fill="#FFFFFF" d="M198.344,255.241c-3.101-2.883-6.533-5.966-9.585-8.481c-68.605-53.08-133.416-27.105-147.077-20.62v59.741
-		l33.604-22.646c0,0,51.801-38.424,132.407,0.976L198.344,255.241z"/>
-	<path fill="#FFFFFF" d="M278.565,332.388c-3.016-2.668-6.053-5.421-9.121-8.32c-60.854-51.499-119.679-38.412-144.087-29.36
-		c-6.906,2.756-13.505,6.521-18.16,9.444l-6.592,4.411h-0.003l-58.919,39.414v61.521l109.59-73.846c0,0,51.196-37.977,130.99,0.282
-		L278.565,332.388z"/>
-	<path fill="#FFFFFF" d="M622.658,396.804c-22.844,15.398-46.074,31.052-55.317,37.243c-20.657,13.84-68.479,48.14-148.067,10.101
-		l9.354,8.981c3.5,3.175,7.758,6.828,12.424,10.406c57.164,42.714,118.246,36.576,164.04,6.231
-		c6.344-4.201,12.135-8.06,17.567-11.688V396.804z"/>
-	<path fill="#FFFFFF" d="M622.658,273.587c-50.169,33.741-117.878,79.276-130.598,87.796c-20.655,13.84-69.187,48.59-147.077,11.382
-		l9.015,8.662c1.025,0.945,2.047,1.873,3.076,2.847l-0.051,0.06l0,0l0.051-0.057c60.461,51.168,127.048,43.335,172.731,12.829
-		c25.76-17.2,62.381-41.961,92.851-62.614L622.658,273.587L622.658,273.587z"/>
-	<path fill="#FFFFFF" d="M622.658,151.031c-72.346,48.476-191.05,128.009-205.424,137.641
-		c-20.849,13.967-69.966,49.24-146.875,12.516l9.349,8.985c5.564,5.038,13.049,11.317,20.978,16.504
-		c53.228,34,108.948,27.997,154.298-2.287c32.594-21.765,112.771-75.06,167.675-111.541L622.658,151.031L622.658,151.031z"/>
-</g>
-</svg>
diff --git a/geomagio/webservice/templates/_template.html b/geomagio/webservice/templates/_template.html
deleted file mode 100644
index 845a27a1fccbb21ca06ebea50c3650123f12ec3b..0000000000000000000000000000000000000000
--- a/geomagio/webservice/templates/_template.html
+++ /dev/null
@@ -1,31 +0,0 @@
-<!DOCTYPE html>
-<html>
-  <head>
-    <title>{% block title %}{% endblock %}</title>
-    <meta charset="utf-8" />
-    <link
-      rel="stylesheet"
-      href="{{ url_for('static', filename='base.css') }}"
-    />
-  </head>
-  <body>
-    <nav>
-      {% if current_user & if current_user.is_authenticated %} Logged in as {{
-      current_user.email }}
-      <a href="{{ url_for('login.logout') }}">Log Out</a>
-      {% else %}
-      <a href="{{ url_for('login.login') }}">Log In</a>
-      {% endif %}
-    </nav>
-
-    <main>
-      {% for message in get_flashed_messages() %}
-      <div class="flash">{{ message }}</div>
-      {% endfor %}
-
-      <header>{% block header %}{% endblock %}</header>
-
-      {% block content %}{% endblock %}
-    </main>
-  </body>
-</html>
diff --git a/geomagio/webservice/templates/data/usage.html b/geomagio/webservice/templates/data/usage.html
deleted file mode 100644
index 08fd9694b626e12d38eb03bb42d6f3abae7b9ab1..0000000000000000000000000000000000000000
--- a/geomagio/webservice/templates/data/usage.html
+++ /dev/null
@@ -1,164 +0,0 @@
-{% block header %}
-<link
-  rel="stylesheet"
-  type="text/css"
-  href="{{ url_for('static', filename='usage.css') }}"
-/>
-{% endblock %} {% block content %}
-
-<h2>Example Requests</h2>
-
-<dl>
-  <dt>BOU observatory data for current UTC day in IAGA2002 format</dt>
-  <dd>
-    <a href="{{ request.url }}?id=BOU">
-      {{ request.url }}?id=BOU
-    </a>
-  </dd>
-
-  <dt>BOU observatory data for current UTC day in JSON format</dt>
-  <dd>
-    <a href="{{ request.url }}?id=BOU&format=json">
-      {{ request.url }}?id=BOU&format=json
-    </a>
-  </dd>
-
-  <dt>BOU electric field data for current UTC day in IAGA2002 format</dt>
-  <dd>
-    <a href="{{ request.url }}?id=BOU&elements=E-N,E-E">
-      {{ request.url }}?id=BOU&elements=E-N,E-E
-    </a>
-  </dd>
-</dl>
-<p>
-  <a href="examples.php">See more examples</a>
-</p>
-
-<h2>Request Limits</h2>
-
-<p>
-  To ensure availablility for users, the web service restricts the amount of
-  data that can be retrieved in one request to
-  <strong>345600 samples</strong>
-  . The amount of data requested is computed as follows, where interval is the
-  number of seconds between starttime and endtime:
-</p>
-
-<pre>
-  samples = count(elements) * interval / sampling_period
-</pre>
-
-<p>
-  This is equivalent to one day of second data in iaga2002 format (
-  <code>345600 samples = 4 elements * 24 hours * 3600 samples/hour</code>
-  ).
-</p>
-
-<h2>Parameters</h2>
-<dl>
-  <dt>id</dt>
-  <dd>
-    Observatory code. Required.
-    <br />
-    Valid values: {% for observatory in valid_observatories %}
-    <code>{{ observatory }}</code>
-    {% if not loop.last %} , {% endif %} {% endfor %}
-  </dd>
-
-  <dt>starttime</dt>
-  <dd>
-    Time of first requested data.
-    <br />
-    Default: start of current UTC day
-    <br />
-    Format: ISO8601 (
-    <code>YYYY-MM-DDTHH:MM:SSZ</code>
-    )
-    <br />
-    Example:
-    <code>2020-03-06T16:59:53Z</code>
-  </dd>
-
-  <dt>endtime</dt>
-  <dd>
-    Time of last requested data.
-    <br />
-    Default: starttime + 24 hours
-    <br />
-    Format: ISO8601 (
-    <code>YYYY-MM-DDTHH:MM:SSZ</code>
-    )
-    <br />
-    Example:
-    <code>2020-03-06T16:59:53Z</code>
-  </dd>
-
-  <dt>elements</dt>
-  <dd>
-    Comma separated list of requested elements.
-    <br />
-    Default:
-    <code>X,Y,Z,F</code>
-    <br />
-    Valid values: {% for element in valid_elements %}
-    <code>{{ element }}</code>
-    {% if not loop.last %} , {% endif %} {% endfor %}
-
-    <br />
-    <small>
-      NOTE: the USGS web service also supports specific EDGE channel codes. For
-      example:
-      <code>UK1</code>
-      is "electronics temperature"
-    </small>
-  </dd>
-
-  <dt>sampling_period</dt>
-  <dd>
-    Interval in seconds between values.
-    <br />
-    Default:
-    <code>60</code>
-    <br />
-    Valid values: {% for sampling_period in valid_sampling_periods %}
-    <code>{{ sampling_period }}</code>
-    {% if not loop.last %} , {% endif %} {% endfor %}
-  </dd>
-
-  <dt>type</dt>
-  <dd>
-    Type of data.
-    <br />
-    Default:
-    <code>variation</code>
-    Valid values: {% for data_type in valid_data_types %}
-    <code>{{ data_type }}</code>
-    {% if not loop.last %} , {% endif %} {% endfor %}
-
-    <br />
-    <small>
-      NOTE: the USGS web service also supports specific EDGE location codes. For
-      example:
-      <code>R0</code>
-      is "internet variation",
-      <code>R1</code>
-      is "satellite variation".
-    </small>
-  </dd>
-
-  <dt>format</dt>
-  <dd>
-    Output format.
-    <br />
-    Default:
-    <code>iaga2002</code>
-    <br />
-    Valid values:
-    <code>iaga2002</code>
-    ,
-    <code>json</code>
-    .
-  </dd>
-</dl>
-
-{% endblock %}
diff --git a/geomagio/webservice/templates/hello.html b/geomagio/webservice/templates/hello.html
deleted file mode 100644
index 8963d649b2853bae9fa49e4a2e7ae9afb88a153c..0000000000000000000000000000000000000000
--- a/geomagio/webservice/templates/hello.html
+++ /dev/null
@@ -1,5 +0,0 @@
-{% extends '_template.html' %} {% block header %}
-<h1>{% block title %}Hello{% endblock %}</h1>
-{% endblock %} {% block content %}
-<p>This is the page when logged in</p>
-{{ current_user.to_dict() | tojson }} {% endblock %}
diff --git a/geomagio/webservice/templates/index.html b/geomagio/webservice/templates/index.html
deleted file mode 100644
index e01d2fb50c54647918c23149706f4bfb1a278f37..0000000000000000000000000000000000000000
--- a/geomagio/webservice/templates/index.html
+++ /dev/null
@@ -1,5 +0,0 @@
-{% block header %}
-<h1>{% block title %}Index{% endblock %}</h1>
-{% endblock %} {% block content %}
-<p>This is the default page, when logged out</p>
-{% endblock %}
diff --git a/test/edge_test/MiniSeedFactory_test.py b/test/edge_test/MiniSeedFactory_test.py
index 1483489f03303b7367c2c1625251f21e25ab379e..18f2cee5eddd387804379a6aef8d6c0139c4e5ee 100644
--- a/test/edge_test/MiniSeedFactory_test.py
+++ b/test/edge_test/MiniSeedFactory_test.py
@@ -162,6 +162,7 @@ def __create_trace(
     stats.starttime = UTCDateTime("2019-12-01")
     stats.delta = TimeseriesUtility.get_delta_from_interval(data_interval)
     stats.channel = channel
+    stats.station = station
     stats.npts = len(data)
     stats.data_interval = data_interval
     stats.data_type = data_type
diff --git a/test_residual.py b/test_residual.py
new file mode 100644
index 0000000000000000000000000000000000000000..686dc53001ab4fb2002b961ad9e97dd98bbab56f
--- /dev/null
+++ b/test_residual.py
@@ -0,0 +1,27 @@
+import json
+from obspy import UTCDateTime
+
+from geomagio.residual import (
+    CalFileFactory,
+    SpreadsheetAbsolutesFactory,
+    WebAbsolutesFactory,
+)
+
+
+input_factory = SpreadsheetAbsolutesFactory()
+readings = input_factory.get_readings(
+    observatory="CMO",
+    starttime=UTCDateTime(2020, 1, 1),
+    endtime=UTCDateTime(2020, 1, 8),
+)
+print(
+    json.dumps(
+        readings,
+        default=lambda x: isinstance(x, UTCDateTime) and str(x) or x.__dict__,
+        indent=2,
+    )
+)
+
+output_factory = CalFileFactory()
+out = output_factory.format_readings(readings)
+print(out)