diff --git a/Pipfile b/Pipfile
index 0b6c797572887983771813753c4b8c71b29d143c..36de0183662f30ae87d95d6fe7e3b2eddb8aec59 100644
--- a/Pipfile
+++ b/Pipfile
@@ -17,13 +17,15 @@ obspy = ">1.2.0"
 pycurl = "*"
 authlib = "*"
 cryptography = "*"
-databases = {extras = ["sqlite"],version = "*"}
+databases = {extras = ["postgresql", "sqlite"], version = "*"}
 fastapi = "*"
 httpx = "==0.11.1"
 openpyxl = "*"
 pydantic = "==1.4"
 sqlalchemy = "*"
 sqlalchemy-utc = "*"
+typing = "*"
+typing-extensions = "*"
 uvicorn = "*"
 
 [pipenv]
diff --git a/Pipfile.lock b/Pipfile.lock
index c93459f431ad3df1384ac9fa0487a5db35a2a651..a8cdfa11a3ad8bcf09d150d98468d2bc8885190c 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
 {
     "_meta": {
         "hash": {
-            "sha256": "feb945aefc3117570ac55b98510d96243c861469466ee798fa8b49416693f89d"
+            "sha256": "809e5e04e4d1957e08e475b75c4c425c1b63b7d1fdaee30e79b077e84e115504"
         },
         "pipfile-spec": 6,
         "requires": {},
@@ -103,35 +103,35 @@
         },
         "click": {
             "hashes": [
-                "sha256:8a18b4ea89d8820c5d0c7da8a64b2c324b4dabb695804dbfea19b9be9d88c0cc",
-                "sha256:e345d143d80bf5ee7534056164e5e112ea5e22716bbb1ce727941f4c8b471b9a"
+                "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a",
+                "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"
             ],
-            "version": "==7.1.1"
+            "version": "==7.1.2"
         },
         "cryptography": {
             "hashes": [
-                "sha256:0cacd3ef5c604b8e5f59bf2582c076c98a37fe206b31430d0cd08138aff0986e",
-                "sha256:192ca04a36852a994ef21df13cca4d822adbbdc9d5009c0f96f1d2929e375d4f",
-                "sha256:19ae795137682a9778892fb4390c07811828b173741bce91e30f899424b3934d",
-                "sha256:1b9b535d6b55936a79dbe4990b64bb16048f48747c76c29713fea8c50eca2acf",
-                "sha256:2a2ad24d43398d89f92209289f15265107928f22a8d10385f70def7a698d6a02",
-                "sha256:3be7a5722d5bfe69894d3f7bbed15547b17619f3a88a318aab2e37f457524164",
-                "sha256:49870684da168b90110bbaf86140d4681032c5e6a2461adc7afdd93be5634216",
-                "sha256:587f98ce27ac4547177a0c6fe0986b8736058daffe9160dcf5f1bd411b7fbaa1",
-                "sha256:5aca6f00b2f42546b9bdf11a69f248d1881212ce5b9e2618b04935b87f6f82a1",
-                "sha256:6b744039b55988519cc183149cceb573189b3e46e16ccf6f8c46798bb767c9dc",
-                "sha256:6b91cab3841b4c7cb70e4db1697c69f036c8bc0a253edc0baa6783154f1301e4",
-                "sha256:7598974f6879a338c785c513e7c5a4329fbc58b9f6b9a6305035fca5b1076552",
-                "sha256:7a279f33a081d436e90e91d1a7c338553c04e464de1c9302311a5e7e4b746088",
-                "sha256:95e1296e0157361fe2f5f0ed307fd31f94b0ca13372e3673fa95095a627636a1",
-                "sha256:9fc9da390e98cb6975eadf251b6e5fa088820141061bf041cd5c72deba1dc526",
-                "sha256:cc20316e3f5a6b582fc3b029d8dc03aabeb645acfcb7fc1d9848841a33265748",
-                "sha256:d1bf5a1a0d60c7f9a78e448adcb99aa101f3f9588b16708044638881be15d6bc",
-                "sha256:ed1d0760c7e46436ec90834d6f10477ff09475c692ed1695329d324b2c5cd547",
-                "sha256:ef9a55013676907df6c9d7dd943eb1770d014f68beaa7e73250fb43c759f4585"
+                "sha256:091d31c42f444c6f519485ed528d8b451d1a0c7bf30e8ca583a0cac44b8a0df6",
+                "sha256:18452582a3c85b96014b45686af264563e3e5d99d226589f057ace56196ec78b",
+                "sha256:1dfa985f62b137909496e7fc182dac687206d8d089dd03eaeb28ae16eec8e7d5",
+                "sha256:1e4014639d3d73fbc5ceff206049c5a9a849cefd106a49fa7aaaa25cc0ce35cf",
+                "sha256:22e91636a51170df0ae4dcbd250d318fd28c9f491c4e50b625a49964b24fe46e",
+                "sha256:3b3eba865ea2754738616f87292b7f29448aec342a7c720956f8083d252bf28b",
+                "sha256:651448cd2e3a6bc2bb76c3663785133c40d5e1a8c1a9c5429e4354201c6024ae",
+                "sha256:726086c17f94747cedbee6efa77e99ae170caebeb1116353c6cf0ab67ea6829b",
+                "sha256:844a76bc04472e5135b909da6aed84360f522ff5dfa47f93e3dd2a0b84a89fa0",
+                "sha256:88c881dd5a147e08d1bdcf2315c04972381d026cdb803325c03fe2b4a8ed858b",
+                "sha256:96c080ae7118c10fcbe6229ab43eb8b090fccd31a09ef55f83f690d1ef619a1d",
+                "sha256:a0c30272fb4ddda5f5ffc1089d7405b7a71b0b0f51993cb4e5dbb4590b2fc229",
+                "sha256:bb1f0281887d89617b4c68e8db9a2c42b9efebf2702a3c5bf70599421a8623e3",
+                "sha256:c447cf087cf2dbddc1add6987bbe2f767ed5317adb2d08af940db517dd704365",
+                "sha256:c4fd17d92e9d55b84707f4fd09992081ba872d1a0c610c109c18e062e06a2e55",
+                "sha256:d0d5aeaedd29be304848f1c5059074a740fa9f6f26b84c5b63e8b29e73dfc270",
+                "sha256:daf54a4b07d67ad437ff239c8a4080cfd1cc7213df57d33c97de7b4738048d5e",
+                "sha256:e993468c859d084d5579e2ebee101de8f5a27ce8e2159959b6673b418fd8c785",
+                "sha256:f118a95c7480f5be0df8afeb9a11bd199aa20afab7a96bcf20409b411a3a85f0"
             ],
             "index": "pypi",
-            "version": "==2.9"
+            "version": "==2.9.2"
         },
         "cycler": {
             "hashes": [
@@ -146,10 +146,10 @@
                 "sqlite"
             ],
             "hashes": [
-                "sha256:a04db1d158a91db7bd49db16e14266e8e6c7336f06f88c700147690683c769a3"
+                "sha256:92b7f66c74d21eba54899ba88352311019131fb01ceaf076452949429e8d1b12"
             ],
             "index": "pypi",
-            "version": "==0.2.6"
+            "version": "==0.3.1"
         },
         "decorator": {
             "hashes": [
@@ -201,10 +201,10 @@
         },
         "hstspreload": {
             "hashes": [
-                "sha256:acbf4d6fd362b363ce567db56a8667c4f0e43073001add9c4e43f449c11e1d81",
-                "sha256:ca0af58f803598a737d5f325212e488b24de1ac9f0cf6fa12da1a3f4651914e6"
+                "sha256:5e42c9c9b925d793d1732e20f59da7c9d6d340bde82ead7d49031bde59991219",
+                "sha256:f2571debd824aa5ec34816a2e08badda043181e908710317b9826b5c30d3bc17"
             ],
-            "version": "==2020.4.14"
+            "version": "==2020.4.28"
         },
         "httptools": {
             "hashes": [
@@ -327,30 +327,30 @@
         },
         "numpy": {
             "hashes": [
-                "sha256:1598a6de323508cfeed6b7cd6c4efb43324f4692e20d1f76e1feec7f59013448",
-                "sha256:1b0ece94018ae21163d1f651b527156e1f03943b986188dd81bc7e066eae9d1c",
-                "sha256:2e40be731ad618cb4974d5ba60d373cdf4f1b8dcbf1dcf4d9dff5e212baf69c5",
-                "sha256:4ba59db1fcc27ea31368af524dcf874d9277f21fd2e1f7f1e2e0c75ee61419ed",
-                "sha256:59ca9c6592da581a03d42cc4e270732552243dc45e87248aa8d636d53812f6a5",
-                "sha256:5e0feb76849ca3e83dd396254e47c7dba65b3fa9ed3df67c2556293ae3e16de3",
-                "sha256:6d205249a0293e62bbb3898c4c2e1ff8a22f98375a34775a259a0523111a8f6c",
-                "sha256:6fcc5a3990e269f86d388f165a089259893851437b904f422d301cdce4ff25c8",
-                "sha256:82847f2765835c8e5308f136bc34018d09b49037ec23ecc42b246424c767056b",
-                "sha256:87902e5c03355335fc5992a74ba0247a70d937f326d852fc613b7f53516c0963",
-                "sha256:9ab21d1cb156a620d3999dd92f7d1c86824c622873841d6b080ca5495fa10fef",
-                "sha256:a1baa1dc8ecd88fb2d2a651671a84b9938461e8a8eed13e2f0a812a94084d1fa",
-                "sha256:a244f7af80dacf21054386539699ce29bcc64796ed9850c99a34b41305630286",
-                "sha256:a35af656a7ba1d3decdd4fae5322b87277de8ac98b7d9da657d9e212ece76a61",
-                "sha256:b1fe1a6f3a6f355f6c29789b5927f8bd4f134a4bd9a781099a7c4f66af8850f5",
-                "sha256:b5ad0adb51b2dee7d0ee75a69e9871e2ddfb061c73ea8bc439376298141f77f5",
-                "sha256:ba3c7a2814ec8a176bb71f91478293d633c08582119e713a0c5351c0f77698da",
-                "sha256:cd77d58fb2acf57c1d1ee2835567cd70e6f1835e32090538f17f8a3a99e5e34b",
-                "sha256:cdb3a70285e8220875e4d2bc394e49b4988bdb1298ffa4e0bd81b2f613be397c",
-                "sha256:deb529c40c3f1e38d53d5ae6cd077c21f1d49e13afc7936f7f868455e16b64a0",
-                "sha256:e7894793e6e8540dbeac77c87b489e331947813511108ae097f1715c018b8f3d"
+                "sha256:0aa2b318cf81eb1693fcfcbb8007e95e231d7e1aa24288137f3b19905736c3ee",
+                "sha256:163c78c04f47f26ca1b21068cea25ed7c5ecafe5f5ab2ea4895656a750582b56",
+                "sha256:1e37626bcb8895c4b3873fcfd54e9bfc5ffec8d0f525651d6985fcc5c6b6003c",
+                "sha256:264fd15590b3f02a1fbc095e7e1f37cdac698ff3829e12ffdcffdce3772f9d44",
+                "sha256:3d9e1554cd9b5999070c467b18e5ae3ebd7369f02706a8850816f576a954295f",
+                "sha256:40c24960cd5cec55222963f255858a1c47c6fa50a65a5b03fd7de75e3700eaaa",
+                "sha256:46f404314dbec78cb342904f9596f25f9b16e7cf304030f1339e553c8e77f51c",
+                "sha256:4847f0c993298b82fad809ea2916d857d0073dc17b0510fbbced663b3265929d",
+                "sha256:48e15612a8357393d176638c8f68a19273676877caea983f8baf188bad430379",
+                "sha256:6725d2797c65598778409aba8cd67077bb089d5b7d3d87c2719b206dc84ec05e",
+                "sha256:99f0ba97e369f02a21bb95faa3a0de55991fd5f0ece2e30a9e2eaebeac238921",
+                "sha256:a41f303b3f9157a31ce7203e3ca757a0c40c96669e72d9b6ee1bce8507638970",
+                "sha256:a4305564e93f5c4584f6758149fd446df39fd1e0a8c89ca0deb3cce56106a027",
+                "sha256:a551d8cc267c634774830086da42e4ba157fa41dd3b93982bc9501b284b0c689",
+                "sha256:a6bc9432c2640b008d5f29bad737714eb3e14bb8854878eacf3d7955c4e91c36",
+                "sha256:c60175d011a2e551a2f74c84e21e7c982489b96b6a5e4b030ecdeacf2914da68",
+                "sha256:e46e2384209c91996d5ec16744234d1c906ab79a701ce1a26155c9ec890b8dc8",
+                "sha256:e607b8cdc2ae5d5a63cd1bec30a15b5ed583ac6a39f04b7ba0f03fcfbf29c05b",
+                "sha256:e94a39d5c40fffe7696009dbd11bc14a349b377e03a384ed011e03d698787dd3",
+                "sha256:eb2286249ebfe8fcb5b425e5ec77e4736d53ee56d3ad296f8947f67150f495e3",
+                "sha256:fdee7540d12519865b423af411bd60ddb513d2eb2cd921149b732854995bbf8b"
             ],
             "index": "pypi",
-            "version": "==1.18.2"
+            "version": "==1.18.3"
         },
         "obspy": {
             "hashes": [
@@ -562,20 +562,38 @@
             ],
             "version": "==0.13.2"
         },
+        "typing": {
+            "hashes": [
+                "sha256:91dfe6f3f706ee8cc32d38edbbf304e9b7583fb37108fef38229617f8b3eba23",
+                "sha256:c8cabb5ab8945cd2f54917be357d134db9cc1eb039e59d1606dc1e60cb1d9d36",
+                "sha256:f38d83c5a7a7086543a0f649564d661859c5146a85775ab90c0d2f93ffaa9714"
+            ],
+            "index": "pypi",
+            "version": "==3.7.4.1"
+        },
+        "typing-extensions": {
+            "hashes": [
+                "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5",
+                "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae",
+                "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"
+            ],
+            "index": "pypi",
+            "version": "==3.7.4.2"
+        },
         "urllib3": {
             "hashes": [
-                "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc",
-                "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"
+                "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527",
+                "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"
             ],
-            "version": "==1.25.8"
+            "version": "==1.25.9"
         },
         "uvicorn": {
             "hashes": [
-                "sha256:0f58170165c4495f563d8224b2f415a0829af0412baa034d6f777904613087fd",
-                "sha256:6fdaf8e53bf1b2ddf0fe9ed06079b5348d7d1d87b3365fe2549e6de0d49e631c"
+                "sha256:a19de5b2f8dec56ec95bbbf3ce334b168efca6a238b2ba7c6cb020e3a42ce71a",
+                "sha256:aa6db2200cff18f8c550c869fab01f78ca987464d499f654e7aeb166fe0c0616"
             ],
             "index": "pypi",
-            "version": "==0.11.3"
+            "version": "==0.11.4"
         },
         "uvloop": {
             "hashes": [
@@ -660,10 +678,10 @@
         },
         "click": {
             "hashes": [
-                "sha256:8a18b4ea89d8820c5d0c7da8a64b2c324b4dabb695804dbfea19b9be9d88c0cc",
-                "sha256:e345d143d80bf5ee7534056164e5e112ea5e22716bbb1ce727941f4c8b471b9a"
+                "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a",
+                "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"
             ],
-            "version": "==7.1.1"
+            "version": "==7.1.2"
         },
         "coverage": {
             "hashes": [
@@ -716,10 +734,10 @@
         },
         "identify": {
             "hashes": [
-                "sha256:2bb8760d97d8df4408f4e805883dad26a2d076f04be92a10a3e43f09c6060742",
-                "sha256:faffea0fd8ec86bb146ac538ac350ed0c73908326426d387eded0bcc9d077522"
+                "sha256:23c18d97bb50e05be1a54917ee45cc61d57cb96aedc06aabb2b02331edf0dbf0",
+                "sha256:88ed90632023e52a6495749c6732e61e08ec9f4f04e95484a5c37b9caf40283c"
             ],
-            "version": "==1.4.14"
+            "version": "==1.4.15"
         },
         "importlib-metadata": {
             "hashes": [
@@ -765,11 +783,11 @@
         },
         "pre-commit": {
             "hashes": [
-                "sha256:487c675916e6f99d355ec5595ad77b325689d423ef4839db1ed2f02f639c9522",
-                "sha256:c0aa11bce04a7b46c5544723aedf4e81a4d5f64ad1205a30a9ea12d5e81969e1"
+                "sha256:979b53dab1af35063a483bfe13b0fcbbf1a2cf8c46b60e0a9a8d08e8269647a1",
+                "sha256:f3e85e68c6d1cbe7828d3471896f1b192cfcf1c4d83bf26e26beeb5941855257"
             ],
             "index": "pypi",
-            "version": "==2.2.0"
+            "version": "==2.3.0"
         },
         "py": {
             "hashes": [
@@ -892,10 +910,10 @@
         },
         "virtualenv": {
             "hashes": [
-                "sha256:00cfe8605fb97f5a59d52baab78e6070e72c12ca64f51151695407cc0eb8a431",
-                "sha256:c8364ec469084046c779c9a11ae6340094e8a0bf1d844330fc55c1cefe67c172"
+                "sha256:5021396e8f03d0d002a770da90e31e61159684db2859d0ba4850fbea752aa675",
+                "sha256:ac53ade75ca189bc97b6c1d9ec0f1a50efe33cbf178ae09452dcd9fd309013c1"
             ],
-            "version": "==20.0.17"
+            "version": "==20.0.18"
         },
         "waitress": {
             "hashes": [
@@ -920,11 +938,11 @@
         },
         "webtest": {
             "hashes": [
-                "sha256:71114cd778a7d7b237ec5c8a5c32084f447d869ae62e48bcd5b73af211133e74",
-                "sha256:da9cf14c103ff51a40dee4cac7657840d1317456eb8f0ca81289b5cbff175f4b"
+                "sha256:44ddfe99b5eca4cf07675e7222c81dd624d22f9a26035d2b93dc8862dc1153c6",
+                "sha256:aac168b5b2b4f200af4e35867cf316712210e3d5db81c1cbdff38722647bb087"
             ],
             "index": "pypi",
-            "version": "==2.0.34"
+            "version": "==2.0.35"
         },
         "zipp": {
             "hashes": [
@@ -934,5 +952,4 @@
             "version": "==3.1.0"
         }
     }
-  }
 }
diff --git a/geomagio/TimeseriesUtility.py b/geomagio/TimeseriesUtility.py
index 8b0d3ee49d1fbcf8fbdd5f6da1fd5fe08a661056..34358454cae2b3820ab39d0e5d04426db85b380e 100644
--- a/geomagio/TimeseriesUtility.py
+++ b/geomagio/TimeseriesUtility.py
@@ -276,6 +276,33 @@ def get_channels(stream):
     return [ch for ch in channels]
 
 
+def get_trace_value(traces, time, default=None):
+    """Get a value at a specific time.
+
+    Parameters
+    ----------
+    trace : obspy.core.Trace
+    time : obspy.core.UTCDateTime
+
+    Returns
+    -------
+    nearest time in trace
+    value from trace at nearest time, or None
+    """
+    # array of UTCDateTime values corresponding
+    for trace in traces:
+        times = trace.times("utcdatetime")
+        index = times.searchsorted(time)
+        trace_time = times[index]
+        trace_value = trace.data[index]
+        if trace_time == time:
+            if numpy.isnan(trace_value):
+                return default
+            else:
+                return trace_value
+    return default
+
+
 def has_all_channels(stream, channels, starttime, endtime):
     """Check whether all channels have any data within time range.
 
diff --git a/geomagio/residual/Calculation.py b/geomagio/residual/Calculation.py
index 2ea803ab878f5cccb6fdbb7a149921a1550bf268..0155df530f508a89322d40597194bfc9a84ea1c8 100644
--- a/geomagio/residual/Calculation.py
+++ b/geomagio/residual/Calculation.py
@@ -1,382 +1,257 @@
+from typing import List, Tuple
+from typing_extensions import Literal
+
 import numpy as np
-from .Ordinate import Ordinate
-from .Absolute import Absolute
-from .Angle import from_dms
-from .Angle import to_dms
-from .MeasurementType import MeasurementType as mt
 from pydantic import BaseModel
 
-
-# specify mark measurement types
-MARK_TYPES = [
-    mt.FIRST_MARK_DOWN,
-    mt.FIRST_MARK_UP,
-    mt.SECOND_MARK_DOWN,
-    mt.SECOND_MARK_UP,
-]
-
-# define measurement types used to calculate inclination
-INCLINATION_TYPES = [mt.NORTH_DOWN, mt.NORTH_UP, mt.SOUTH_DOWN, mt.SOUTH_UP]
-
-# define measurement types used to calculate declination
-DECLINATION_TYPES = [mt.EAST_UP, mt.EAST_DOWN, mt.WEST_UP, mt.WEST_DOWN]
-
-
-class Calculate(BaseModel):
-    """
-    Class object for performing calculations.
-    Contains the following:
-    angle: average angle across a measurement type
-    residual: average residual across a measurement type
-    hs: Multiplier for inclination claculations. +1 if measurment was taken in northern hemisphere, -1 if measurement was taken in the southern hemishpere.
-    ordinate: Variometer data. Ordinate object(contains a datapoint for H, E, Z, and F)
-    direction: Multiplier for inclination calculations. +1 if instrument is oriented upward, -1 if instrument if oriented downward.
-    shift: Degree shift in inclination measurements.
-    """
-
-    angle: float = None
-    residual: float = None
-    ordinate: Ordinate = None
-    f: float = None
-    direction: int = None
-    meridian: int = None
-    shift: int = None
-
-
-def calculate(reading):
-    """
-    Calculate/recalculate absolute from a Reading object's
-    ordinates, measurements, and metadata.
-    Outputs a list of absolutes containing baseline, absolute,
-    and element name. Also reutrns the scale value.
+from .Absolute import Absolute
+from .Angle import from_dms, to_dms
+from .MeasurementType import (
+    MeasurementType as mt,
+    DECLINATION_TYPES,
+    INCLINATION_TYPES,
+    MARK_TYPES,
+)
+from .Measurement import AverageMeasurement, Measurement, average_measurement
+from .Reading import Reading
+
+
+def calculate(reading: Reading) -> Reading:
+    """Calculate absolutes and scale value using residual method.
+
+    Parameters
+    -------
+    reading: reading to calculate absolutes from.
+
+    Returns
+    -------
+    new reading object with calculated absolutes and scale_value.
+    NOTE: rest of reading object is shallow copy.
     """
-    # gather oridinates, measuremenets, and metadata objects from reading
-    metadata = reading.metadata
-    ordinates = reading.ordinates
-    ordinate_index = reading.ordinate_index()
-    measurements = reading.measurements
-    measurement_index = reading.measurement_index()
-    # get ordinate values across h, e, z, and f for inclination measurement types
-    inclination_ordinates = [
-        o for o in ordinates if o.measurement_type in INCLINATION_TYPES
-    ]
-    # get average ordinate values across h, e, z, and f
-    i_mean = average_ordinate(inclination_ordinates, None)
+    # reference measurement, used to adjust absolutes
+    reference = reading[mt.WEST_DOWN][0]
     # calculate inclination
-    inclination, f = calculate_I(
-        measurement_index, inclination_ordinates, ordinate_index, i_mean, metadata,
+    inclination, f, mean = calculate_I(
+        hemisphere=reading.hemisphere, measurements=reading.measurements
     )
+    corrected_f = f + reading.pier_correction  # TODO: should this be returned?
     # calculate absolutes
-    h_abs, z_abs = calculate_absolutes(f, inclination)
-    b_mean = average_ordinate(
-        inclination_ordinates[:-1], None
-    )  # excludes scaling measurement
-    # calculate baselines for H and Z
-    h_b, z_b = calculate_baselines(h_abs, z_abs, b_mean)
-    # gather first measurement's ordinates
-    wd_ord = ordinate_index[mt.WEST_DOWN][0]
-    wd_h = wd_ord.h
-    wd_e = wd_ord.e
-    wd_z = wd_ord.z
-    # recalculate absolute value for H
-    h_abs = np.sqrt((h_b + wd_h) ** 2 + (wd_e) ** 2)
-    # recalculate absolute value for Z
-    z_abs = z_b + wd_z
-    # calculate scale value
-    scale_ordinates = ordinate_index[mt.NORTH_DOWN_SCALE]
-    scale_measurements = measurement_index[mt.NORTH_DOWN_SCALE]
-    scale = calculate_scale(f, scale_ordinates, scale_measurements, inclination,)
-    # calculate declination absolute and baseline
-    d_b, d_abs = calculate_D(
-        ordinate_index, measurements, measurement_index, metadata, h_b,
+    absoluteH, absoluteZ = calculate_HZ_absolutes(
+        corrected_f=corrected_f, inclination=inclination, mean=mean, reference=reference
     )
-
-    # return results as a set of Absolute objects along with the calculated scale value
-    resultD = Absolute(element="D", baseline=d_b, absolute=d_abs)
-    resultH = Absolute(element="H", baseline=h_b, absolute=h_abs)
-    resultZ = Absolute(element="Z", baseline=z_b, absolute=z_abs)
-
-    result = [resultD, resultH, resultZ]
-
-    return result
-
-
-def calculate_I(measurements, ordinates, ordinates_index, mean, metadata):
-    """
-    Calculate inclination angles from measurements, ordinates,
-    average ordinates from every measurement, and metadata.
-    Returns inclination angle and calculated average f
-    """
-    # get first inclination angle, assumed to be the southdown angle
-    Iprime = average_angle(measurements, mt.NORTH_UP, metadata)
-    if Iprime >= 90:
-        Iprime -= 180
-    # get multiplier for hempisphere the observatory is located in
-    # 1 if observatory is in northern hemisphere
-    # -1 if observatory is in southern hemisphere
-    hs = metadata["hemisphere"]
-    # gather calculation objects for each measurement type
-    inclination_measurements = {
-        m: Calculate(
-            angle=average_angle(measurements, m, metadata),
-            residual=average_residual(measurements, m),
-            ordinate=average_ordinate(ordinates_index, m),
-            direction=m.direction,
-            shift=m.shift,
-            meridian=m.meridian,
-        )
-        for m in INCLINATION_TYPES
-    }
-
-    # set inclination value for looping = Iprime
-    inclination = Iprime
-    # add one to inclination value to enter the loop
-    Inclination = inclination + 1
-    # loop condition
-    while abs(Inclination - inclination) > 0.0001:
-        # set temporary inclination variable to hold previous step's inclination
-        Inclination = inclination
-        inclination *= 180 / np.pi
-        # calculate f for inclination measurement types
-        for m in INCLINATION_TYPES:
-            inclination_measurements[m].f = calculate_f(
-                inclination_measurements[m].ordinate, mean, inclination
-            )
-        # average f values for inclination measurement types
-        f = np.average([inclination_measurements[m].f for m in INCLINATION_TYPES])
-        # calculation inclination for each inclination measurement type and average
-        inclination = np.average(
-            [
-                calculate_measurement_inclination(inclination_measurements[m], hs)
-                for m in INCLINATION_TYPES
-            ]
-        )
-        inclination *= np.pi / 180
-    # loop exits once the difference in average inclination between steps is lower than 0.0001
-    inclination *= 180 / np.pi
-    return inclination, f + metadata["pier_correction"]
-
-
-def calculate_D(ordinates_index, measurements, measurements_index, metadata, h_b):
-    """
-    Calculate declination absolute and declination baseline from
-    ordinates, measurements, measurement_index(dictionary), azimuth and H baseline
-    Returns absolute and baseline for declination.
-    """
-
-    # average mark angles
-    average_mark = np.average(
-        [
-            convert_precision(m.angle, precision=metadata["precision"])
-            for m in measurements
-            if m.measurement_type in MARK_TYPES
-        ]
+    absoluteD = calculate_D_absolute(
+        azimuth=reading.azimuth,
+        h_baseline=absoluteH.baseline,
+        measurements=reading.measurements,
+        reference=reference,
+    )
+    # calculate scale
+    scale_value = calculate_scale_value(
+        corrected_f=corrected_f,
+        inclination=inclination,
+        measurements=reading[mt.NORTH_DOWN_SCALE],
     )
-    # add 100 if mark up is greater than mark down
-    # subtract 100 otherwise
-    if (
-        measurements_index[mt.FIRST_MARK_UP][0].angle
-        < measurements_index[mt.FIRST_MARK_DOWN][0].angle
-    ):
+    # create new reading object
+    calculated = Reading(
+        absolutes=[absoluteD, absoluteH, absoluteZ],
+        scale_value=scale_value,
+        # copy other attributes
+        **reading.dict(exclude={"absolutes", "scale_value"}),
+    )
+    return calculated
+
+
+def calculate_D_absolute(
+    measurements: List[Measurement],
+    azimuth: float,
+    h_baseline: float,
+    reference: Measurement,
+) -> Absolute:
+    """Calculate D absolute.
+
+    Parameters
+    ----------
+    measurements: list with at least declination and mark measurements.
+    azimuth: azimuth to mark in decimal degrees.
+    h_baseline: calculated H baseline value.
+    reference: reference measurement used to adjust absolute.
+
+    Returns
+    -------
+    D Absolute
+    """
+    # mean across all declination measurements
+    mean = average_measurement(measurements, DECLINATION_TYPES)
+    # average mark
+    average_mark = average_measurement(measurements, MARK_TYPES).angle
+    # adjust based on which is larger
+    mark_up = average_measurement(measurements, [mt.FIRST_MARK_UP]).angle
+    mark_down = average_measurement(measurements, [mt.FIRST_MARK_DOWN]).angle
+    if mark_up < mark_down:
         average_mark += 90
     else:
         average_mark -= 90
-
-    # gather calculation objects for each declination measurement type
-    declination_measurements = {
-        m: Calculate(
-            angle=average_angle(measurements_index, m, metadata),
-            residual=average_residual(measurements_index, m),
-            ordinate=average_ordinate(ordinates_index, m),
-            meridian=m.meridian,
-        )
-        for m in DECLINATION_TYPES
-    }
-
-    # convert azimuth to decimal degrees
-    azimuth = (
-        int(metadata["mark_azimuth"] / 100) + (metadata["mark_azimuth"] % 100) / 60
-    )
-    # average meridian terms calculated from each declination measurements
+    # declination measurements
+    declination_measurements = [
+        average_measurement(measurements, [t]) for t in DECLINATION_TYPES
+    ]
+    # average declination meridian
     meridian = np.average(
         [
-            calculate_meridian_term(declination_measurements[m], h_b)
-            for m in DECLINATION_TYPES
+            m.angle
+            + np.degrees(
+                m.measurement_type.meridian
+                * (np.arcsin(m.residual / np.sqrt((m.h + h_baseline) ** 2 + m.e ** 2)))
+            )
+            - np.degrees(np.arctan(m.e / (m.h + h_baseline)))
+            for m in declination_measurements
         ]
     )
     # add subtract average mark angle from average meridian angle and add
     # azimuth to get the declination baseline
-    D_b = (meridian - average_mark) + azimuth
-    d_b = round(D_b * 60, 2)
-    # convert baseline into decimal degrees
-    d_b_dec = from_dms(minutes=d_b)
-    # gather first declination measurements' H ans E ordinates
-    wd_E_1 = ordinates_index[mt.WEST_DOWN][0].e
-    wd_H_1 = ordinates_index[mt.WEST_DOWN][0].h
-    # calculate Declination baseline
-    d_abs = D_b + np.arctan(wd_E_1 / (h_b + wd_H_1)) * (180 / np.pi)
-    d_abs = round(d_abs * 60, 1)
-    # convert absolute into dms
-    d_abs_dms = int(d_abs / 60) * 100 + ((d_abs / 60) % 1) * 60
-    # convert absolute into decimal degrees
-    d_abs_dec = from_dms(
-        degrees=int(d_abs_dms / 100), minutes=float(str(d_abs_dms)[2::])
-    )
-
-    return d_b_dec, d_abs_dec
-
-
-def calculate_absolutes(f, inclination):
-    """
-    Calculate absolutes for H, Z and F from computed
-    average f value(from inclination computations) and
-    calculated inclination angle.
-    Returns baselines for H and Z
-    """
-    # convert inclination to radians
-    i = (np.pi / 180) * (inclination)
-    h_abs = f * np.cos(i)
-    z_abs = f * np.sin(i)
-
-    return h_abs, z_abs
-
-
-def calculate_baselines(h_abs, z_abs, mean):
-    """
-    Calculate baselines with H and Z absolutes,
-    average ordinates across all measurements,
-    and pier correction(metadata).
-    Returns H and Z baselines
+    d_b = (meridian - average_mark) + azimuth
+    # calculate absolute
+    d_abs = d_b + np.degrees(np.arctan(reference.e / (reference.h + h_baseline)))
+    return Absolute(element="D", absolute=d_abs, baseline=d_b)
+
+
+def calculate_HZ_absolutes(
+    inclination: float,
+    corrected_f: float,
+    mean: AverageMeasurement,
+    reference: Measurement,
+) -> Tuple[Absolute, Absolute]:
+    """Calculate H and Z absolutes.
+
+    Parameters
+    ----------
+    inclination: calculated inclination.
+    corrected_f: calculated f with pier correction.
+    mean: mean of inclination ordinates.
+    reference:  reference measurement used to adjust absolutes.
+
+    Returns
+    -------
+    Tuple
+        - H Absolute
+        - Z Absolute
     """
+    inclination_radians = np.radians(inclination)
+    h_abs = corrected_f * np.cos(inclination_radians)
+    z_abs = corrected_f * np.sin(inclination_radians)
     h_b = round(np.sqrt(h_abs ** 2 - mean.e ** 2) - mean.h, 1)
     z_b = round(z_abs - mean.z, 1)
+    # adjust absolutes to reference measurement
+    h_abs = np.sqrt((h_b + reference.h) ** 2 + (reference.e) ** 2)
+    z_abs = z_b + reference.z
+    # return absolutes
+    return (
+        Absolute(
+            element="H",
+            baseline=h_b,
+            absolute=h_abs,
+            starttime=mean.time,
+            endtime=mean.endtime,
+        ),
+        Absolute(
+            element="Z",
+            baseline=z_b,
+            absolute=z_abs,
+            starttime=mean.time,
+            endtime=mean.endtime,
+        ),
+    )
+
 
-    return h_b, z_b
+def calculate_I(
+    measurements: List[Measurement], hemisphere: Literal[-1, 1] = 1
+) -> Tuple[float, float, Measurement]:
+    """Calculate inclination and f from measurements.
 
+    Parameters
+    ----------
+    measurements: list with at least inclination measurements.
+    hemisphere: +1 for northern hemisphere (default), -1 for southern hemisphere.
 
-def calculate_scale(f, ordinates, measurements, inclination):
+    Returns
+    -------
+    Tuple
+        - inclination angle in decimal degrees
+        - uncorrected calculated f
+        - mean of inclination measurements
     """
-    Calculate scale value from calulated f(from inclination computations),
-    inclination, and the measurements/ordinates taken for scaling purposes.
-    Such measurements usually present themselves as a set of three North Down
-    measurements, where the final two measuremets(and ordinates) are used for scaling.
+    # mean across all inclination measurements
+    mean = average_measurement(measurements, INCLINATION_TYPES)
+    # mean within each type
+    inclination_measurements = [
+        average_measurement(measurements, [t]) for t in INCLINATION_TYPES
+    ]
+    # get initial inclination angle, assumed to be the southdown angle
+    inclination = average_measurement(measurements, [mt.NORTH_DOWN]).angle
+    if inclination >= 90:
+        inclination -= 180
+    # loop until inclination converges
+    last_inclination = inclination + 1
+    while abs(last_inclination - inclination) > 0.0001:
+        # set temporary inclination variable to hold previous step's inclination
+        last_inclination = inclination
+        # Update measurement f based on inclination
+        inclination_radians = np.radians(inclination)
+        for m in inclination_measurements:
+            m.f = (
+                mean.f
+                + (m.h - mean.h) * np.cos(inclination_radians)
+                + (m.z - mean.z) * np.sin(inclination_radians)
+                + ((m.e) ** 2 - (mean.e) ** 2) / (2 * mean.f)
+            )
+        # calculate average inclination
+        inclination = np.average(
+            [
+                (
+                    m.measurement_type.shift
+                    + m.measurement_type.meridian
+                    * (
+                        +m.angle
+                        + m.measurement_type.direction
+                        * (hemisphere * np.degrees(np.arcsin(m.residual / m.f)))
+                    )
+                )
+                for m in inclination_measurements
+            ]
+        )
+    # calculate uncorrected f
+    f = np.average([m.f for m in inclination_measurements])
+    return inclination, f, mean
+
+
+def calculate_scale_value(
+    measurements: List[Measurement], inclination: float, corrected_f: float
+) -> float:
+    """Calculate scale value.
+
+    Parameters
+    ----------
+    measurements: measurements to be used for scale calculation.
+        should have type NORTH_DOWN_SCALE.
+    inclination: calculated inclination.
+    corrected_f: calculated f with pier correction.
+
+    Returns
+    -------
+    Calculated scale value.
     """
-    first_ord = ordinates[0]
-    second_ord = ordinates[1]
-    first_measurement = measurements[0]
-    second_measurement = measurements[1]
-
+    inclination_radians = np.radians(inclination)
+    m1, m2 = measurements[0], measurements[-1]
     field_change = (
-        (
-            -np.sin(inclination * np.pi / 180) * (second_ord.h - first_ord.h) / f
-            + np.cos(inclination * np.pi / 180) * (second_ord.z - first_ord.z) / f
+        np.degrees(
+            (
+                -np.sin(inclination_radians) * (m2.h - m1.h)
+                + np.cos(inclination_radians) * (m2.z - m1.z)
+            )
+            / corrected_f
         )
-        * 180
-        / np.pi
+        + 0.1668
     )
-
-    field_change += 0.1668
-
-    residual_change = abs(second_measurement.residual - first_measurement.residual)
-
-    scale_value = (f * field_change / residual_change) * np.pi / 180
-
+    residual_change = m2.residual - m1.residual
+    scale_value = corrected_f * field_change / np.abs(residual_change)
     return scale_value
-
-
-def average_angle(measurements, type, metadata):
-    """
-    Compute average angle from a dictionary of
-    measurements and specified measurement type.
-    """
-    return np.average(
-        [
-            convert_precision(m.angle, metadata["precision"])
-            for m in measurements[type]
-            if not m.mask
-        ]
-    )
-
-
-def average_residual(measurements, type):
-    """
-    Compute average residual from a dictionary
-    of measurements and specified measurement type.
-    """
-    return np.average([m.residual for m in measurements[type] if not m.mask])
-
-
-def average_ordinate(ordinates, type):
-    """
-    Compute average ordinate from a dictionary
-    of ordinates and specified measurement type.
-    """
-    if type is not None:
-        ordinates = ordinates[type]
-        if type is mt.NORTH_DOWN:
-            ordinates = ordinates[0:2]
-    o = Ordinate(measurement_type=type)
-    avgs = np.average([[o.h, o.e, o.z, o.f] for o in ordinates], axis=0,)
-    o.h, o.e, o.z, o.f = avgs
-    return o
-
-
-def calculate_f(ordinate, mean, inclination):
-    """
-    calculate f for a measurement type using a measurement's
-    average ordinates, average ordinate across all measurements,
-    and calculated inclination.
-    """
-    # get channel means form all ordinates
-    # calculate f using current step's inclination angle
-    f = (
-        mean.f
-        + (ordinate.h - mean.h) * np.cos(inclination * np.pi / 180)
-        + (ordinate.z - mean.z) * np.sin(inclination * np.pi / 180)
-        + ((ordinate.e) ** 2 - (mean.e) ** 2) / (2 * mean.f)
-    )
-    return f
-
-
-def calculate_measurement_inclination(calculation, hs):
-    """
-    Calculate a measurement's inclination value using
-    Calculate items' elements.
-    """
-    return calculation.shift + calculation.meridian * (
-        +calculation.angle
-        + calculation.direction
-        * (hs * np.arcsin(calculation.residual / calculation.f) * 180 / np.pi)
-    )
-
-
-def calculate_meridian_term(calculation, h_b):
-    """
-    Calculate meridian value from a measurement type
-    using a Calculate object and H's baseline value.
-    """
-    A1 = np.arcsin(
-        calculation.residual
-        / np.sqrt((calculation.ordinate.h + h_b) ** 2 + (calculation.ordinate.e) ** 2)
-    )
-    A2 = np.arctan(calculation.ordinate.e / (calculation.ordinate.h + h_b))
-    A1 = (180 / np.pi) * (A1)
-    A2 = (180 / np.pi) * (A2)
-    meridian_term = calculation.angle + (calculation.meridian * A1) - A2
-    return meridian_term
-
-
-def convert_precision(angle, precision="DMS"):
-    """
-    Account for precision of instrument in decimal degrees
-    """
-    degrees = int(angle)
-    if precision == "DMS":
-        minutes = int((angle % 1) * 100) / 60
-        seconds = ((angle * 100) % 1) / 36
-    else:
-        minutes = (angle % 1) * 100 / 60
-        seconds = 0
-    dms = degrees + minutes + seconds
-
-    return dms
diff --git a/geomagio/residual/Measurement.py b/geomagio/residual/Measurement.py
index e1f98fa5b3065c70eaf3634930809880755dfe4d..27d98ed5c93837b2203521c29ef1d76be3870128 100644
--- a/geomagio/residual/Measurement.py
+++ b/geomagio/residual/Measurement.py
@@ -1,5 +1,7 @@
-from typing import Optional
+import collections
+from typing import Dict, List, Optional
 
+import numpy
 from obspy.core import UTCDateTime
 from pydantic import BaseModel
 
@@ -20,6 +22,79 @@ class Measurement(BaseModel):
 
     measurement_type: MeasurementType
     angle: float = 0
-    residual: float = None
+    residual: Optional[float] = None
     time: Optional[UTCDateTime] = None
-    mask: bool = False
+    h: Optional[float] = None
+    e: Optional[float] = None
+    z: Optional[float] = None
+    f: Optional[float] = None
+
+
+class AverageMeasurement(Measurement):
+    endtime: Optional[UTCDateTime] = None
+
+
+def average_measurement(
+    measurements: List[Measurement], types: List[MeasurementType] = None
+) -> AverageMeasurement:
+    """Calculate average from multiple measurements.
+
+    Parameters
+    ----------
+    measurements - source measurements for average
+    types - optional list of types to include, default all
+
+    Returns
+    -------
+    None - if no measurements
+    Otherwise, average of matching measurements.
+        Type is copied from first measurement.
+    """
+    if types:
+        measurements = [m for m in measurements if m.measurement_type in types]
+    if len(measurements) == 0:
+        # no measurements to average
+        return None
+    starttime = safe_min([m.time.timestamp for m in measurements if m.time])
+    endtime = safe_max([m.time.timestamp for m in measurements if m.time])
+    measurement = AverageMeasurement(
+        measurement_type=measurements[0].measurement_type,
+        angle=safe_average([m.angle for m in measurements]),
+        residual=safe_average([m.residual for m in measurements]),
+        time=starttime and UTCDateTime(starttime) or None,
+        endtime=endtime and UTCDateTime(endtime) or None,
+        h=safe_average([m.h for m in measurements]),
+        e=safe_average([m.e for m in measurements]),
+        z=safe_average([m.z for m in measurements]),
+        f=safe_average([m.f for m in measurements]),
+    )
+    return measurement
+
+
+def measurement_index(
+    measurements: List[Measurement],
+) -> Dict[MeasurementType, List[Measurement]]:
+    """Generate index of measurements keyed by MeasurementType.
+
+    Any missing MeasurementType returns an empty list.
+    There may be multiple measurements of each MeasurementType.
+    """
+    index = collections.defaultdict(list)
+    for m in measurements:
+        index[m.measurement_type].append(m)
+    return index
+
+
+def safe_average(l: List[Optional[float]]):
+    values = l and [f for f in l if f] or None
+    return values and numpy.nanmean(values) or None
+
+
+def safe_max(l: List[Optional[float]]):
+    values = l and [f for f in l if f] or None
+    return values and numpy.nanmax(values) or None
+
+
+def safe_min(l: List[Optional[float]]):
+    values = l and [f for f in l if f] or None
+    return values and numpy.nanmin(values) or None
diff --git a/geomagio/residual/MeasurementType.py b/geomagio/residual/MeasurementType.py
index 2b43b3b14c5f51d3ffbd41b104436cc71e481d50..f8e1216ae2e231632be66fcbf34af27cd9818d1a 100644
--- a/geomagio/residual/MeasurementType.py
+++ b/geomagio/residual/MeasurementType.py
@@ -1,6 +1,6 @@
 import enum
 
-# FIXME: add another measurement type that is a scaling measurement type
+
 class MeasurementType(str, enum.Enum):
     """Measurement types used during absolutes."""
 
@@ -35,17 +35,6 @@ class MeasurementType(str, enum.Enum):
         else:
             return -1
 
-    @property
-    def shift(self):
-        if self == MeasurementType.SOUTH_DOWN:
-            return -180
-        if self == MeasurementType.SOUTH_UP:
-            return 180
-        if self == MeasurementType.NORTH_UP:
-            return 0
-        if self == MeasurementType.NORTH_DOWN:
-            return 360
-
     @property
     def meridian(self):
         if self in [
@@ -57,3 +46,41 @@ class MeasurementType(str, enum.Enum):
             return 1
         else:
             return -1
+
+    @property
+    def shift(self):
+        if self == MeasurementType.SOUTH_DOWN:
+            return -180
+        if self == MeasurementType.SOUTH_UP:
+            return 180
+        if self == MeasurementType.NORTH_UP:
+            return 0
+        if self == MeasurementType.NORTH_DOWN:
+            return 360
+
+
+# define measurement types used to calculate declination
+DECLINATION_TYPES = [
+    MeasurementType.EAST_UP,
+    MeasurementType.EAST_DOWN,
+    MeasurementType.WEST_UP,
+    MeasurementType.WEST_DOWN,
+]
+
+
+# define measurement types used to calculate inclination
+INCLINATION_TYPES = [
+    MeasurementType.NORTH_DOWN,
+    MeasurementType.NORTH_UP,
+    MeasurementType.SOUTH_DOWN,
+    MeasurementType.SOUTH_UP,
+]
+
+
+# specify mark measurement types
+MARK_TYPES = [
+    MeasurementType.FIRST_MARK_DOWN,
+    MeasurementType.FIRST_MARK_UP,
+    MeasurementType.SECOND_MARK_DOWN,
+    MeasurementType.SECOND_MARK_UP,
+]
diff --git a/geomagio/residual/Ordinate.py b/geomagio/residual/Ordinate.py
deleted file mode 100644
index e42359f5dda73ea2bbb08845a7db876d07f1341f..0000000000000000000000000000000000000000
--- a/geomagio/residual/Ordinate.py
+++ /dev/null
@@ -1,22 +0,0 @@
-from .MeasurementType import MeasurementType
-from obspy.core import UTCDateTime
-from typing import Optional
-from pydantic import BaseModel
-from .. import pydantic_utcdatetime
-
-
-class Ordinate(BaseModel):
-    """One variometer reading per channel. Gathered from each measurement time within a Reading.
-
-    Attributes
-    ----------
-    measurement_type: type of measurement.
-    h, e, z, f: one variometer reading for each channel per measurement
-    """
-
-    measurement_type: MeasurementType = None
-    time: Optional[UTCDateTime] = None
-    h: float = 0
-    e: float = 0
-    z: float = 0
-    f: float = 0
diff --git a/geomagio/residual/Reading.py b/geomagio/residual/Reading.py
index 531eb6a46178d060e801127340f7fe9c4bd0a79c..098cef7667040d74c7d51e1e1153a4c72b4714a3 100644
--- a/geomagio/residual/Reading.py
+++ b/geomagio/residual/Reading.py
@@ -1,13 +1,15 @@
 import collections
 from typing import Dict, List, Optional
+from typing_extensions import Literal
 
+from obspy import Stream
 from pydantic import BaseModel
 
+from .. import TimeseriesUtility
+from ..TimeseriesFactory import TimeseriesFactory
 from .Absolute import Absolute
-from .Measurement import Measurement
+from .Measurement import AverageMeasurement, Measurement, average_measurement
 from .MeasurementType import MeasurementType
-from .Ordinate import Ordinate
-from .Calculation import calculate
 
 
 class Reading(BaseModel):
@@ -21,43 +23,77 @@ class Reading(BaseModel):
     measurements: raw measurements used to compute absolutes.
     metadata: metadata used during absolute calculations.
     pier_correction: pier correction value, nT.
+    scale_value: scale value in decimal degrees.
     """
 
-    absolutes: Optional[List[Absolute]] = None
+    absolutes: List[Absolute] = []
     azimuth: float = 0
-    hemisphere: float = 1
-    measurements: Optional[List[Measurement]] = []
-    ordinates: Optional[List[Ordinate]] = []
-    metadata: Optional[Dict] = []
+    hemisphere: Literal[-1, 1] = 1
+    measurements: List[Measurement] = []
+    metadata: Dict = {}
     pier_correction: float = 0
+    scale_value: float = None
 
-    def absolutes_index(self) -> Dict[str, Absolute]:
-        """Generate index of absolutes keyed by element.
-        """
-        return {a.element: a for a in self.absolutes}
+    def __getitem__(self, measurement_type: MeasurementType):
+        """Provide access to measurements by type.
 
-    def update_absolutes(self):
-        self.absolutes = calculate(self)
-        return self.absolutes
+        Example: reading[MeasurementType.WEST_DOWN]
+        """
+        return [m for m in self.measurements if m.measurement_type == measurement_type]
 
-    def measurement_index(self) -> Dict[MeasurementType, List[Measurement]]:
-        """Generate index of measurements keyed by MeasurementType.
+    def load_ordinates(
+        self,
+        observatory: str,
+        timeseries_factory: TimeseriesFactory,
+        default_existing: bool = True,
+    ):
+        """Load ordinates from a timeseries factory.
 
-        Any missing MeasurementType returns an empty list.
-        There may be multiple measurements of each MeasurementType.
+        Parameters
+        ----------
+        observatory: the observatory to load.
+        timeseries_factory: source of data.
+        default_existing: keep existing values if data not found.
         """
-        index = collections.defaultdict(list)
-        for m in self.measurements:
-            index[m.measurement_type].append(m)
-        return index
+        mean = average_measurement(self.measurements)
+        data = timeseries_factory.get_timeseries(
+            observatory=observatory,
+            channels=("H", "E", "Z", "F"),
+            interval="second",
+            type="variation",
+            starttime=mean.time,
+            endtime=mean.endtime,
+        )
+        self.update_measurement_ordinates(data, default_existing)
 
-    def ordinate_index(self) -> Dict[MeasurementType, List[Ordinate]]:
-        """Generate index of ordinates keyed by MeasurementType.
+    def update_measurement_ordinates(self, data: Stream, default_existing: bool = True):
+        """Update ordinates.
 
-        Any missing MeasurementType returns an empty list.
-        There may be multiple ordinates of each MeasurementType.
+        Parameters
+        ----------
+        data: source of data.
+        default_existing: keep existing values if data not found.
         """
-        index = collections.defaultdict(list)
-        for o in self.ordinates:
-            index[o.measurement_type].append(o)
-        return index
+        for measurement in self.measurements:
+            if not measurement.time:
+                continue
+            measurement.h = TimeseriesUtility.get_trace_value(
+                traces=data.select(channel="H"),
+                time=measurement.time,
+                default=default_existing and measurement.h or None,
+            )
+            measurement.e = TimeseriesUtility.get_trace_value(
+                traces=data.select(channel="E"),
+                time=measurement.time,
+                default=default_existing and measurement.e or None,
+            )
+            measurement.z = TimeseriesUtility.get_trace_value(
+                traces=data.select(channel="Z"),
+                time=measurement.time,
+                default=default_existing and measurement.z or None,
+            )
+            measurement.f = TimeseriesUtility.get_trace_value(
+                traces=data.select(channel="F"),
+                time=measurement.time,
+                default=default_existing and measurement.f or None,
+            )
diff --git a/geomagio/residual/SpreadsheetAbsolutesFactory.py b/geomagio/residual/SpreadsheetAbsolutesFactory.py
index 7631fc2ae229296124bffdec6546e7d2e910b594..889753e03311acb172dce54f52c6be23f50e903f 100644
--- a/geomagio/residual/SpreadsheetAbsolutesFactory.py
+++ b/geomagio/residual/SpreadsheetAbsolutesFactory.py
@@ -1,6 +1,7 @@
 import os
 from typing import Dict, List
 
+import numpy
 from obspy.core import UTCDateTime
 import openpyxl
 
@@ -9,7 +10,6 @@ from .Measurement import Measurement
 from .MeasurementType import MeasurementType as mt
 from .Reading import Reading
 from . import Angle
-from .Ordinate import Ordinate
 
 
 SPREADSHEET_MEASUREMENTS = [
@@ -187,17 +187,6 @@ SPREADSHEET_MEASUREMENTS = [
         "z": "E57",
         "f": "B57",
     },
-    {
-        "type": mt.NORTH_DOWN,
-        "angle": "D45",
-        "residual": "E45",
-        "time": "B45",
-        "h": "C58",
-        "e": "D58",
-        "z": "E58",
-        "f": "B58",
-        "mask": True,
-    },
     # scaling
     {
         "type": mt.NORTH_DOWN_SCALE,
@@ -292,22 +281,22 @@ class SpreadsheetAbsolutesFactory(object):
         absolutes = self._parse_absolutes(summary_sheet, metadata["date"])
         measurements = (
             include_measurements
-            and self._parse_measurements(measurement_sheet, metadata["date"],)
-            or None
-        )
-        ordinates = (
-            include_measurements
-            and self._parse_ordinates(measurement_sheet, metadata["date"])
+            and self._parse_measurements(
+                measurement_sheet, metadata["date"], metadata["precision"]
+            )
             or None
         )
+        mark_azimuth = metadata["mark_azimuth"]
         return Reading(
             absolutes=absolutes,
-            azimuth=metadata["mark_azimuth"],
+            azimuth=Angle.from_dms(
+                degrees=int(mark_azimuth / 100.0), minutes=mark_azimuth % 100,
+            ),
             hemisphere=metadata["hemisphere"],
             measurements=measurements,
-            ordinates=ordinates,
             metadata=metadata,
             pier_correction=metadata["pier_correction"],
+            scale_value=numpy.degrees(metadata["scale_value"]),
         )
 
     def _parse_absolutes(
@@ -343,56 +332,42 @@ class SpreadsheetAbsolutesFactory(object):
         return absolutes
 
     def _parse_measurements(
-        self, sheet: openpyxl.worksheet, base_date: str
+        self, sheet: openpyxl.worksheet, base_date: str, precision: str
     ) -> List[Measurement]:
         """Parse measurements from a measurement sheet.
         """
         measurements = []
         for m in SPREADSHEET_MEASUREMENTS:
             measurement_type = m["type"]
-            angle = "angle" in m and sheet[m["angle"]].value or None
+            angle = (
+                "angle" in m
+                and convert_precision(sheet[m["angle"]].value, precision)
+                or None
+            )
             residual = "residual" in m and sheet[m["residual"]].value or None
             time = (
                 "time" in m
                 and parse_relative_time(base_date, sheet[m["time"]].value)
                 or None
             )
-            mask = "mask" in m or False
+            h = "h" in m and sheet[m["h"]].value or None
+            e = "e" in m and sheet[m["e"]].value or None
+            z = "z" in m and sheet[m["z"]].value or None
+            f = "f" in m and sheet[m["f"]].value or None
             measurements.append(
                 Measurement(
                     measurement_type=measurement_type,
                     angle=angle,
                     residual=residual,
                     time=time,
-                    mask=mask,
+                    h=h,
+                    e=e,
+                    z=z,
+                    f=f,
                 )
             )
         return measurements
 
-    def _parse_ordinates(
-        self, sheet: openpyxl.worksheet, base_date: str
-    ) -> List[Ordinate]:
-        """Parse ordinates from a measurement sheet.
-        """
-        ordinates = []
-        for m in SPREADSHEET_MEASUREMENTS:
-            measurement_type = m["type"]
-            h = "h" in m and sheet[m["h"]].value or 0.0
-            e = "e" in m and sheet[m["e"]].value or 0.0
-            z = "z" in m and sheet[m["z"]].value or 0.0
-            f = "f" in m and sheet[m["f"]].value or 0.0
-            time = (
-                "time" in m
-                and parse_relative_time(base_date, sheet[m["time"]].value)
-                or None
-            )
-            ordinates.append(
-                Ordinate(
-                    measurement_type=measurement_type, h=h, e=e, z=z, f=f, time=time,
-                )
-            )
-        return ordinates
-
     def _parse_metadata(
         self,
         constants_sheet: openpyxl.worksheet,
@@ -421,8 +396,23 @@ class SpreadsheetAbsolutesFactory(object):
             "observer": measurement_sheet["E8"].value,
             "pier_correction": calculation_sheet["I24"].value,
             "pier_name": summary_sheet["B5"].value,
+            "scale_value": summary_sheet["D33"].value,
             "station": measurement_sheet["A8"].value,
             "temperature": constants_sheet["J58"].value,
             "year": year,
             "precision": measurement_sheet["H8"].value,
         }
+
+
+def convert_precision(angle, precision="DMS"):
+    """
+    Account for precision of instrument in decimal degrees
+    """
+    degrees = int(angle)
+    if precision == "DMS":
+        minutes = int((angle % 1) * 100)
+        seconds = ((angle * 100) % 1) * 100
+    else:
+        minutes = (angle % 1) * 100
+        seconds = 0
+    return Angle.from_dms(degrees, minutes, seconds)
diff --git a/geomagio/residual/WebAbsolutesFactory.py b/geomagio/residual/WebAbsolutesFactory.py
index 0a9e1673218a31cea9a9f4cd3e78109dd322ec3a..120734293ff682c225d7d850f387bf6ab5d8c9fd 100644
--- a/geomagio/residual/WebAbsolutesFactory.py
+++ b/geomagio/residual/WebAbsolutesFactory.py
@@ -67,6 +67,10 @@ class WebAbsolutesFactory(object):
             angle=data["angle"],
             residual=0,
             time=data["time"] and UTCDateTime(data["time"]) or None,
+            h=data["h"],
+            e=data["e"],
+            z=data["z"],
+            f=data["f"],
         )
 
     def _parse_metadata(self, data: Mapping) -> Dict:
diff --git a/geomagio/residual/__init__.py b/geomagio/residual/__init__.py
index 9ec21abb70a7b1603e4008d45ce4b09ddf9aa3d8..59e03297abe1d279223f1a63e34a096e1322ebf6 100644
--- a/geomagio/residual/__init__.py
+++ b/geomagio/residual/__init__.py
@@ -1,11 +1,23 @@
 # residual module
 from __future__ import absolute_import
 
-from .Absolute import Absolute
 from . import Angle
+from .Absolute import Absolute
+from .Calculation import (
+    calculate,
+    calculate_D_absolute,
+    calculate_HZ_absolutes,
+    calculate_I,
+    calculate_scale_value,
+)
 from .CalFileFactory import CalFileFactory
-from .Measurement import Measurement
-from .MeasurementType import MeasurementType
+from .Measurement import Measurement, AverageMeasurement, average_measurement
+from .MeasurementType import (
+    MeasurementType,
+    DECLINATION_TYPES,
+    INCLINATION_TYPES,
+    MARK_TYPES,
+)
 from .Reading import Reading
 from .SpreadsheetAbsolutesFactory import SpreadsheetAbsolutesFactory
 from .WebAbsolutesFactory import WebAbsolutesFactory
@@ -13,7 +25,16 @@ from .WebAbsolutesFactory import WebAbsolutesFactory
 __all__ = [
     "Absolute",
     "Angle",
-    "CalFileFactory",
+    "AverageMeasurement",
+    "average_measurement" "CalFileFactory",
+    "calculate",
+    "calculate_D_absolute",
+    "calculate_HZ_absolutes",
+    "calculate_I",
+    "calculate_scale_value",
+    "DECLINATION_TYPES",
+    "INCLINATION_TYPES",
+    "MARK_TYPES",
     "Measurement",
     "MeasurementType",
     "Reading",
diff --git a/test/TimeseriesUtility_test.py b/test/TimeseriesUtility_test.py
index 25a93f404f220d7dfad9faf9b0c2e1d006d47819..8c036f7758412d61063e61f371cb539d44679b87 100644
--- a/test/TimeseriesUtility_test.py
+++ b/test/TimeseriesUtility_test.py
@@ -170,6 +170,48 @@ def test_get_merged_gaps():
     assert_equal(gap[1], UTCDateTime("2015-01-01T00:00:07Z"))
 
 
+def test_get_trace_values():
+    """TimeseriesUtility_test.test_get_trace_values()
+    """
+    stream = Stream(
+        [
+            __create_trace("H", [numpy.nan, 1, 1, numpy.nan, numpy.nan]),
+            __create_trace("Z", [0, 0, 0, 1, 1, 1]),
+        ]
+    )
+    for trace in stream:
+        # set time of first sample
+        trace.stats.starttime = UTCDateTime("2015-01-01T00:00:00Z")
+        # set sample rate to 1 second
+        trace.stats.delta = 1
+        trace.stats.npts = len(trace.data)
+    print(stream)
+    print(stream.select(channel="H")[0].times("utcdatetime"))
+    # value that doesn't exist
+    assert_equal(
+        TimeseriesUtility.get_trace_value(
+            traces=stream.select(channel="H"), time=UTCDateTime("2015-01-01T00:00:00Z")
+        ),
+        None,
+    )
+    # value that exists
+    assert_equal(
+        TimeseriesUtility.get_trace_value(
+            traces=stream.select(channel="Z"), time=UTCDateTime("2015-01-01T00:00:00Z")
+        ),
+        0,
+    )
+    # default for value that doesn't exist
+    assert_equal(
+        TimeseriesUtility.get_trace_value(
+            traces=stream.select(channel="H"),
+            time=UTCDateTime("2015-01-01T00:00:03Z"),
+            default=4,
+        ),
+        4,
+    )
+
+
 def test_has_all_channels():
     """TimeseriesUtility_test.test_has_all_channels():
     """
diff --git a/test/residual_test/residual_test.py b/test/residual_test/residual_test.py
index 726b7bbf8f41b8d3aea0f6b75eb0d684dadf7c3c..77fa0c9a5b6ce6f5821766ba626cdceca65ed933 100644
--- a/test/residual_test/residual_test.py
+++ b/test/residual_test/residual_test.py
@@ -1,79 +1,68 @@
-from geomagio.residual import SpreadsheetAbsolutesFactory
 from numpy.testing import assert_almost_equal
 import pytest
 
+from geomagio.residual import calculate, Reading, SpreadsheetAbsolutesFactory
 
-class test_functions:
-    @staticmethod
-    def get_absolutes(tmp_path):
-        """
-        Tests functionality of SpreadsheetAbsolutesFactory and recalculation of absolutes
-        """
-        # establish SpreadsheetAbsolutesFactory for reading test data from Excel
-        saf = SpreadsheetAbsolutesFactory()
-        # Read spreadsheet containing test data
-        reading = saf.parse_spreadsheet(path=tmp_path)
-        # establish original absolute object
-        original = reading.absolutes_index()
-        # recalculate absolute object using Calculation.py
-        reading.update_absolutes()
-        # establish recalculated absolute object
-        result = reading.absolutes_index()
-        return original, result
 
-    @staticmethod
-    def assert_absolutes(original, result):
-        """
-        Compares calculation results to original absolutes from spreadsheet
-        """
-        assert_almost_equal(
-            [original["H"].absolute, original["H"].baseline],
-            [result["H"].absolute, result["H"].baseline],
-            decimal=4,
-            verbose=True,
-        )
-        assert_almost_equal(
-            [original["D"].absolute, original["D"].baseline],
-            [result["D"].absolute, result["D"].baseline],
-            decimal=4,
-            verbose=True,
-        )
-        assert_almost_equal(
-            [original["Z"].absolute, original["Z"].baseline],
-            [result["Z"].absolute, result["Z"].baseline],
-            decimal=4,
-            verbose=True,
-        )
+def assert_readings_equal(expected: Reading, actual: Reading):
+    """
+    Compares calculation actuals to expected absolutes from spreadsheet
+    """
+    expected_absolutes = {a.element: a for a in expected.absolutes}
+    actual_absolutes = {a.element: a for a in actual.absolutes}
+    assert_almost_equal(
+        [expected_absolutes["H"].absolute, expected_absolutes["H"].baseline],
+        [actual_absolutes["H"].absolute, actual_absolutes["H"].baseline],
+        decimal=4,
+        verbose=True,
+    )
+    assert_almost_equal(
+        [expected_absolutes["D"].absolute, expected_absolutes["D"].baseline],
+        [actual_absolutes["D"].absolute, actual_absolutes["D"].baseline],
+        decimal=3,
+        verbose=True,
+    )
+    assert_almost_equal(
+        [expected_absolutes["Z"].absolute, expected_absolutes["Z"].baseline],
+        [actual_absolutes["Z"].absolute, actual_absolutes["Z"].baseline],
+        decimal=4,
+        verbose=True,
+    )
+    assert_almost_equal(
+        expected.scale_value, actual.scale_value, decimal=1, verbose=True
+    )
 
 
-@pytest.fixture
-def test_session():
-    return test_functions
+def compare_spreadsheet_absolutes(path):
+    """
+    Tests functionality of SpreadsheetAbsolutesFactory and recalculation of absolutes
+    """
+    # establish SpreadsheetAbsolutesFactory for reading test data from Excel
+    saf = SpreadsheetAbsolutesFactory()
+    # Read spreadsheet containing test data
+    reading = saf.parse_spreadsheet(path=path)
+    return reading
 
 
-def test_DED_20140952332(test_session):
+def test_DED_20140952332():
     """
     Compare calulations to original absolutes obejct from Spreadsheet.
     Tests gathering of Dedhorse's metadata for use by calculations.
     Tests calculations for measurements in units of DMS.
     """
     # gather absolute from DED test data and recalculate
-    original, result = test_session.get_absolutes(
-        tmp_path="etc/residual/DED-20140952332.xlsm"
-    )
+    reading = compare_spreadsheet_absolutes(path="etc/residual/DED-20140952332.xlsm")
     # test results with original spreadsheet values
-    test_session.assert_absolutes(original, result)
+    assert_readings_equal(reading, calculate(reading))
 
 
-def test_BRW_20133650000(test_session):
+def test_BRW_20133650000():
     """
     Compare calulations to original absolutes obejct from Spreadsheet.
     Tests gathering of BRW's metadata for use by calculations.
     Tests calculations for measurements in units of DM.
     """
-    # gather absolute from BRW test data and recalculate
-    original, result = test_session.get_absolutes(
-        tmp_path="etc/residual/BRW-20133650000.xlsm"
-    )
+    # gather absolute from DED test data and recalculate
+    reading = compare_spreadsheet_absolutes(path="etc/residual/BRW-20133650000.xlsm")
     # test results with original spreadsheet values
-    test_session.assert_absolutes(original, result)
+    assert_readings_equal(reading, calculate(reading))