diff --git a/code.json b/code.json
index f995592be9d66da73e0b375a7e0e72e9dd8d5209..4d18977a99b6e7910522140cd439aa33e5ac1014 100644
--- a/code.json
+++ b/code.json
@@ -3,7 +3,7 @@
     "name": "geomag-algorithms",
     "organization": "U.S. Geological Survey",
     "description": "Library for processing Geomagnetic timeseries data.",
-    "version": "1.6.5",
+    "version": "1.7.0",
     "status": "Development",
     "permissions": {
       "usageType": "openSource",
@@ -35,7 +35,7 @@
       "email": "gs-haz_dev_team_group@usgs.gov"
     },
     "date": {
-      "metadataLastUpdated": "2023-03-29"
+      "metadataLastUpdated": "2023-04-11"
     }
   }
 ]
diff --git a/geomagio/Metadata.py b/geomagio/Metadata.py
index 99486667e5bacd36419ca0d01dbe032e50f36b60..1f4b4019d137a65acea7e085862eb1c5ee816025 100644
--- a/geomagio/Metadata.py
+++ b/geomagio/Metadata.py
@@ -357,9 +357,9 @@ _INSTRUMENT_METADATA = [
             "channels": {
                 # each channel maps to a list of components to calculate nT
                 # TODO: calculate these lists based on "FGE" type
-                "U": [{"channel": "U_Volt", "offset": 0, "scale": 314.28}],
+                "U": [{"channel": "U_Volt", "offset": 35989, "scale": 314.28}],
                 "V": [{"channel": "V_Volt", "offset": 0, "scale": 310.48}],
-                "W": [{"channel": "W_Volt", "offset": 0, "scale": 317.5}],
+                "W": [{"channel": "W_Volt", "offset": 7654, "scale": 317.5}],
             },
             # this info should get updated when available
             "electronics": {
@@ -405,25 +405,25 @@ _INSTRUMENT_METADATA = [
     {
         "network": "NT",
         "station": "HOT",
-        "start_time": None,
+        "start_time": "2022-07-14T16:07:30.000Z",
         "end_time": None,
         "instrument": {
             "type": "FGE",
             "channels": {
                 # each channel maps to a list of components to calculate nT
                 # TODO: calculate these lists based on "FGE" type
-                "U": [{"channel": "U_Volt", "offset": 0, "scale": 971.8}],
-                "V": [{"channel": "V_Volt", "offset": 0, "scale": 970.6}],
-                "W": [{"channel": "W_Volt", "offset": 0, "scale": 960.2}],
+                "U": [{"channel": "U_Volt", "offset": 27123, "scale": 315.4}],
+                "V": [{"channel": "V_Volt", "offset": 0, "scale": 315.0}],
+                "W": [{"channel": "W_Volt", "offset": 21158, "scale": 311.7}],
             },
             # this info should get updated when available
             "electronics": {
                 "serial": "E558",
                 # these scale values are used to convert voltage
                 # these are calculated using Ohm's law, given scaling resistor value, and given nT/mA values.
-                "x-scale": 971.8,  # nT/V
-                "y-scale": 970.6,  # nT/V
-                "z-scale": 960.2,  # nT/V
+                "x-scale": 315.4,  # nT/V
+                "y-scale": 315.0,  # nT/V
+                "z-scale": 311.7,  # nT/V
                 "temperature-scale": 0.01,  # V/K
             },
             "sensor": {
@@ -560,9 +560,9 @@ _INSTRUMENT_METADATA = [
             "channels": {
                 # each channel maps to a list of components to calculate nT
                 # TODO: calculate these lists based on "FGE" type
-                "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2034}],
+                "U": [{"channel": "U_Volt", "offset": 27047, "scale": 313.2034}],
                 "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.2797}],
-                "W": [{"channel": "W_Volt", "offset": 0, "scale": 311.9576}],
+                "W": [{"channel": "W_Volt", "offset": 24288, "scale": 311.9576}],
             },
             # this info should get updated when available
             "electronics": {
@@ -585,24 +585,90 @@ _INSTRUMENT_METADATA = [
     {
         "network": "NT",
         "station": "TUC",
-        "start_time": None,
+        "start_time": "2023-03-29T00:40:00.000Z",
         "end_time": None,
         "instrument": {
-            "type": "Narod",
+            "type": "FGE",
             "channels": {
-                "U": [
-                    {"channel": "U_Volt", "offset": 0, "scale": 100},
-                    {"channel": "U_Bin", "offset": 0, "scale": 500},
-                ],
-                "V": [
-                    {"channel": "V_Volt", "offset": 0, "scale": 100},
-                    {"channel": "V_Bin", "offset": 0, "scale": 500},
-                ],
-                "W": [
-                    {"channel": "W_Volt", "offset": 0, "scale": 100},
-                    {"channel": "W_Bin", "offset": 0, "scale": 500},
-                ],
+                # each channel maps to a list of components to calculate nT
+                # TODO: calculate these lists based on "FGE" type
+                "U": [{"channel": "U_Volt", "offset": 24024, "scale": 978.355}],
+                "V": [{"channel": "V_Volt", "offset": 0, "scale": 965.901}],
+                "W": [{"channel": "W_Volt", "offset": 40040, "scale": 954.543}],
+            },
+            # this info should get updated when available
+            "electronics": {
+                "serial": "E571",
+                # these scale values are calculated manually
+                "x-scale": 978.355,  # nT/V
+                "y-scale": 965.901,  # nT/V
+                "z-scale": 954.543,  # nT/V
+                "temperature-scale": 0.01,  # V/K
+            },
+            "sensor": {
+                "serial": "S0446",
+                # these constants combine with instrument setting for offset
+                "x-constant": 37471,  # nT/mA
+                "y-constant": 36994,  # nT/mA
+                "z-constant": 36559,  # nT/mA
             },
         },
     },
+    ## Leaving these old objects in comments in case we want to save them in the actual database as historical data
+    # {
+    #     "network": "NT",
+    #     "station": "TUC",
+    #     "start_time": None,
+    #     "end_time": "2023-03-29T00:00:00.000Z",
+    #     "instrument": {
+    #         "type": "Narod",
+    #         "channels": {
+    #             "U": [
+    #                 {"channel": "U_Volt", "offset": 0, "scale": 100},
+    #                 {"channel": "U_Bin", "offset": 0, "scale": 500},
+    #             ],
+    #             "V": [
+    #                 {"channel": "V_Volt", "offset": 0, "scale": 100},
+    #                 {"channel": "V_Bin", "offset": 0, "scale": 500},
+    #             ],
+    #             "W": [
+    #                 {"channel": "W_Volt", "offset": 0, "scale": 100},
+    #                 {"channel": "W_Bin", "offset": 0, "scale": 500},
+    #             ],
+    #         },
+    #     },
+    # },
+    #     {
+    #     "network": "NT",
+    #     "station": "HOT",
+    #     "start_time": None,
+    #     "end_time": "2022-07-14T16:07:30.000Z",
+    #     "instrument": {
+    #         "type": "FGE",
+    #         "channels": {
+    #             # each channel maps to a list of components to calculate nT
+    #             # TODO: calculate these lists based on "FGE" type
+    #             "U": [{"channel": "U_Volt", "offset": 0, "scale": 971.8}],
+    #             "V": [{"channel": "V_Volt", "offset": 0, "scale": 970.6}],
+    #             "W": [{"channel": "W_Volt", "offset": 0, "scale": 960.2}],
+    #         },
+    #         # this info should get updated when available
+    #         "electronics": {
+    #             "serial": "E558",
+    #             # these scale values are used to convert voltage
+    #             # these are calculated using Ohm's law, given scaling resistor value, and given nT/mA values.
+    #             "x-scale": 971.8,  # nT/V
+    #             "y-scale": 970.6,  # nT/V
+    #             "z-scale": 960.2,  # nT/V
+    #             "temperature-scale": 0.01,  # V/K
+    #         },
+    #         "sensor": {
+    #             "serial": "S0428",
+    #             # these constants combine with instrument setting for offset
+    #             "x-constant": 37220,  # nT/mA
+    #             "y-constant": 37175,  # nT/mA
+    #             "z-constant": 36775,  # nT/mA
+    #         },
+    #     },
+    # },
 ]
diff --git a/geomagio/edge/EdgeFactory.py b/geomagio/edge/EdgeFactory.py
index 525999834e0e0dd383e6a9d33fe310cd83454db7..d07f3fa5e194ea9792360bca048773171792ddd8 100644
--- a/geomagio/edge/EdgeFactory.py
+++ b/geomagio/edge/EdgeFactory.py
@@ -333,6 +333,10 @@ class EdgeFactory(TimeseriesFactory):
             element=channel,
             location=self.locationCode,
         )
+        # geomag-algorithms *should* treat starttime/endtime as inclusive everywhere;
+        # according to its author, EdgeCWB is inclusive of starttime, but exclusive of
+        # endtime, to satisfy seismic standards/requirements, to precision delta/2;
+        half_delta = TimeseriesUtility.get_delta_from_interval(interval) / 2
         try:
             data = self.client.get_waveforms(
                 sncl.network,
@@ -340,7 +344,7 @@ class EdgeFactory(TimeseriesFactory):
                 sncl.location,
                 sncl.channel,
                 starttime,
-                endtime,
+                endtime + half_delta,
             )
         except TypeError:
             # get_waveforms() fails if no data is returned from Edge
diff --git a/geomagio/edge/MiniSeedFactory.py b/geomagio/edge/MiniSeedFactory.py
index ff03c0406497c2a5bfab0c2b5f1dccdf2f279d42..daf02420c82e3fd0f21b14d41d4f9b6c1e5428ed 100644
--- a/geomagio/edge/MiniSeedFactory.py
+++ b/geomagio/edge/MiniSeedFactory.py
@@ -358,9 +358,20 @@ class MiniSeedFactory(TimeseriesFactory):
             element=channel,
             location=self.locationCode,
         )
+        # geomag-algorithms *should* treat starttime/endtime as inclusive everywhere;
+        # according to its author, EdgeCWB is inclusive of starttime, but exclusive of
+        # endtime, to satisfy seismic standards/requirements, to precision delta/2;
+        half_delta = TimeseriesUtility.get_delta_from_interval(interval) / 2
         data = self.client.get_waveforms(
-            sncl.network, sncl.station, sncl.location, sncl.channel, starttime, endtime
+            sncl.network,
+            sncl.station,
+            sncl.location,
+            sncl.channel,
+            starttime,
+            endtime + half_delta,
         )
+        for trace in data:
+            trace.data = trace.data.astype(data[0].data.dtype)
         data.merge()
         if data.count() == 0 and add_empty_channels:
             data += self._get_empty_trace(
diff --git a/geomagio/iaga2002/IAGA2002Writer.py b/geomagio/iaga2002/IAGA2002Writer.py
index bd215ef98deae916d2cbdf83610b25b4d5d1adc2..4aac8db92a27426ec88ef4bc48d9dd7e9ab8f60a 100644
--- a/geomagio/iaga2002/IAGA2002Writer.py
+++ b/geomagio/iaga2002/IAGA2002Writer.py
@@ -87,11 +87,15 @@ class IAGA2002Writer(object):
                 self._format_header("Sensor Orientation", stats.sensor_orientation)
             )
         if "sensor_sampling_rate" in stats:
-            buf.append(
-                self._format_header(
-                    "Digital Sampling", str(1 / stats.sensor_sampling_rate) + " second"
+            try:
+                buf.append(
+                    self._format_header(
+                        "Digital Sampling",
+                        str(1 / stats.sensor_sampling_rate) + " second",
+                    )
                 )
-            )
+            except TypeError:
+                buf.append(self._format_header("Digital Sampling", ""))
         if "data_interval_type" in stats:
             buf.append(
                 self._format_header("Data Interval Type", stats.data_interval_type)
diff --git a/geomagio/residual/Angle.py b/geomagio/residual/Angle.py
index 06ce1c89354bd7e4726dcc9677a118ec084c3c68..57ae4bcd91af648c7da367d25f21d4b53a728655 100644
--- a/geomagio/residual/Angle.py
+++ b/geomagio/residual/Angle.py
@@ -3,7 +3,10 @@ from typing import List
 
 def from_dms(degrees: float = 0, minutes: float = 0, seconds: float = 0) -> float:
     """Convert degrees, minutes, seconds to decimal degrees"""
-    return degrees + (minutes / 60.0) + (seconds / 3600.0)
+    try:
+        return degrees + (minutes / 60.0) + (seconds / 3600.0)
+    except:
+        pass
 
 
 def to_dms(degrees: float) -> List[float]:
diff --git a/geomagio/residual/SpreadsheetAbsolutesFactory.py b/geomagio/residual/SpreadsheetAbsolutesFactory.py
index 00e3d4c4e4f117c7c9b259febd4d4d94fdf92e61..3facd71000769ba0676a097cc14afe3f0351c135 100644
--- a/geomagio/residual/SpreadsheetAbsolutesFactory.py
+++ b/geomagio/residual/SpreadsheetAbsolutesFactory.py
@@ -17,7 +17,7 @@ from .Measurement import Measurement
 from .MeasurementType import MeasurementType as mt
 from .Reading import Reading
 from . import Angle
-
+from .SpreadsheetSummaryFactory import SpreadsheetSummaryFactory
 
 SPREADSHEET_MEASUREMENTS = [
     # first mark
@@ -233,6 +233,44 @@ def parse_relative_time(base_date: str, time: str) -> UTCDateTime:
         return None
 
 
+def get_summary_flags(
+    factory: str,
+    observatory: str,
+    starttime: UTCDateTime,
+    endtime: UTCDateTime,
+    absolute_time: UTCDateTime,
+) -> List[Reading]:
+    """Get valid flags from SpreadsheetSummaryFactory.
+
+    Parameters
+    ----------
+    factory
+        configured SpreadsheetSummaryFactory
+    observatory
+        search observatory
+    starttime
+        search start time
+    endtime
+        search end time
+    absolute_time
+        time of the absolute
+
+    Returns
+    -------
+    Array of valid flags for absolutes from summary sheets
+    """
+    readings = factory.get_readings(
+        observatory=observatory,
+        starttime=UTCDateTime(starttime),
+        endtime=UTCDateTime(endtime),
+    )
+
+    for reading in readings:
+        if reading.time == absolute_time:
+            summary_flags = [v.valid for v in reading.absolutes]
+    return summary_flags
+
+
 class SpreadsheetAbsolutesFactory(object):
     """Read absolutes from residual spreadsheets.
 
@@ -268,12 +306,17 @@ class SpreadsheetAbsolutesFactory(object):
                         readings.append(
                             self.parse_spreadsheet(
                                 path=os.path.join(dirpath, filename),
+                                observatory=observatory,
+                                starttime=starttime,
+                                endtime=endtime,
                                 include_measurements=include_measurements,
                             )
                         )
         return readings
 
-    def parse_spreadsheet(self, path: str, include_measurements=True) -> Reading:
+    def parse_spreadsheet(
+        self, observatory, starttime, endtime, path: str, include_measurements=True
+    ) -> Reading:
         """Parse a residual spreadsheet file.
 
         Be sure to check Reading metadata for errors.
@@ -286,7 +329,9 @@ class SpreadsheetAbsolutesFactory(object):
         metadata = self._parse_metadata(
             constants_sheet, measurement_sheet, calculation_sheet, summary_sheet
         )
-        absolutes = self._parse_absolutes(summary_sheet, metadata["date"])
+        absolutes = self._parse_absolutes(
+            summary_sheet, observatory, starttime, endtime, metadata["date"]
+        )
         measurements = (
             include_measurements
             and self._parse_measurements(
@@ -310,9 +355,26 @@ class SpreadsheetAbsolutesFactory(object):
         )
 
     def _parse_absolutes(
-        self, sheet: openpyxl.worksheet, base_date: str
+        self,
+        sheet: openpyxl.worksheet,
+        observatory,
+        starttime,
+        endtime,
+        base_date: str,
     ) -> List[Absolute]:
         """Parse absolutes from a summary sheet."""
+        # absolute time should be the same in each spreadsheet
+        absolute_time = parse_relative_time(
+            base_date, "{:04d}".format(sheet["B12"].value)
+        )
+        # pull valid flags from summary spreadsheets and match with absolute_time
+        summary_flags = get_summary_flags(
+            factory=SpreadsheetSummaryFactory(self.base_directory),
+            observatory=observatory,
+            starttime=UTCDateTime(starttime),
+            endtime=UTCDateTime(endtime),
+            absolute_time=absolute_time,
+        )
         absolutes = [
             Absolute(
                 element="D",
@@ -330,6 +392,7 @@ class SpreadsheetAbsolutesFactory(object):
                     base_date,
                     "{:04d}".format(sheet["B12"].value),
                 ),
+                valid=summary_flags[0],
             ),
             Absolute(
                 element="H",
@@ -345,6 +408,7 @@ class SpreadsheetAbsolutesFactory(object):
                     base_date,
                     "{:04d}".format(sheet["B17"].value),
                 ),
+                valid=summary_flags[1],
             ),
             Absolute(
                 element="Z",
@@ -360,6 +424,7 @@ class SpreadsheetAbsolutesFactory(object):
                     base_date,
                     "{:04d}".format(sheet["B22"].value),
                 ),
+                valid=summary_flags[2],
             ),
         ]
         return absolutes
diff --git a/geomagio/residual/SpreadsheetSummaryFactory.py b/geomagio/residual/SpreadsheetSummaryFactory.py
index 7a1f5082c11e88c3fd26f5637eb3251e25f049e9..98ee84d4724b42b88ce59bd42bc0836ee2572a13 100644
--- a/geomagio/residual/SpreadsheetSummaryFactory.py
+++ b/geomagio/residual/SpreadsheetSummaryFactory.py
@@ -7,7 +7,32 @@ from typing import List
 from .Absolute import Absolute
 from . import Angle
 from .Reading import Reading
-from .SpreadsheetAbsolutesFactory import parse_relative_time
+
+
+def parse_relative_time(base_date: str, time: str) -> UTCDateTime:
+    """Parse a relative date.
+
+    Arguments
+    ---------
+    base_date: date when time occurs (YYYYMMDD)
+    time: time on base_date (HHMMSS) or (HHMM)
+    """
+    try:
+        time = "{0:04d}".format(time)
+        return UTCDateTime(f"{base_date}T{time}")
+    except Exception as e:
+        print(f"error parsing relative date '{base_date}T{time}': {e}")
+        return None
+
+
+def convert_baseline(
+    baseline: float = 0,
+) -> float:
+    """Convert basline from minutes to seconds"""
+    try:
+        return baseline / 60
+    except:
+        pass
 
 
 class SpreadsheetSummaryFactory(object):
@@ -78,14 +103,23 @@ class SpreadsheetSummaryFactory(object):
 
         Outputs
         -------
-        List of valid readings from spreadsheet.
-        If all readings are valid, 4 readings are returned
+        List of readings from spreadsheet.
+        Readings are returned regardless of if they are valid.
         """
         metadata = self._parse_metadata(sheet)
         date = sheet["I1"].value
         base_date = f"{date.year}{date.month:02}{date.day:02}"
         readings = []
-        for d_n in range(10, 14):
+        # define the number of sets in a spreadsheet:
+        sets_len = len(
+            [
+                sheet.cell(row=i, column=3).value
+                for i in range(10, 14)
+                if sheet.cell(row=i, column=3).value is not None
+            ]
+        )
+
+        for d_n in range(10, 10 + sets_len):
             h_n = d_n + 14
             v_n = d_n + 28
             absolutes = [
@@ -94,48 +128,45 @@ class SpreadsheetSummaryFactory(object):
                     absolute=Angle.from_dms(
                         degrees=sheet[f"C{d_n}"].value, minutes=sheet[f"D{d_n}"].value
                     ),
-                    baseline=sheet[f"H{d_n}"].value / 60,
+                    baseline=convert_baseline(sheet[f"H{d_n}"].value),
                     starttime=parse_relative_time(
-                        base_date, "{0:04d}".format(sheet[f"B{v_n}"].value)
+                        base_date, time=(sheet[f"B{v_n}"].value)
                     ),
                     endtime=parse_relative_time(
-                        base_date, "{0:04d}".format(sheet[f"B{d_n}"].value)
+                        base_date, time=(sheet[f"B{d_n}"].value)
                     ),
+                    valid=not bool(sheet[f"J{d_n}"].value),
                 ),
                 Absolute(
                     element="H",
                     absolute=sheet[f"D{h_n}"].value,
                     baseline=sheet[f"H{h_n}"].value,
                     starttime=parse_relative_time(
-                        base_date, "{0:04d}".format(sheet[f"B{v_n}"].value)
+                        base_date, time=(sheet[f"B{v_n}"].value)
                     ),
                     endtime=parse_relative_time(
-                        base_date, "{0:04d}".format(sheet[f"B{h_n}"].value)
+                        base_date, time=(sheet[f"B{h_n}"].value)
                     ),
+                    valid=not bool(sheet[f"J{h_n}"].value),
                 ),
                 Absolute(
                     element="Z",
                     absolute=sheet[f"D{v_n}"].value,
                     baseline=sheet[f"H{v_n}"].value,
                     starttime=parse_relative_time(
-                        base_date, "{0:04d}".format(sheet[f"B{v_n}"].value)
+                        base_date, time=(sheet[f"B{v_n}"].value)
                     ),
                     endtime=parse_relative_time(
-                        base_date, "{0:04d}".format(sheet[f"B{v_n}"].value)
+                        base_date, time=(sheet[f"B{v_n}"].value)
                     ),
+                    valid=not bool(sheet[f"J{v_n}"].value),
                 ),
             ]
-            valid = [
-                sheet[f"J{d_n}"].value,
-                sheet[f"J{h_n}"].value,
-                sheet[f"J{d_n}"].value,
-            ]
-            if valid == [None, None, None]:
-                readings.append(
-                    Reading(
-                        metadata=metadata,
-                        absolutes=absolutes,
-                        pier_correction=metadata["pier_correction"],
-                    ),
-                )
+            readings.append(
+                Reading(
+                    metadata=metadata,
+                    absolutes=absolutes,
+                    pier_correction=metadata["pier_correction"],
+                ),
+            )
         return readings
diff --git a/pyproject.toml b/pyproject.toml
index 1f9f2922b4e0af42209e02750db227e1441c9b2a..47df0cb3db471b75b65b292fbd2c9a457995d4a0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -17,7 +17,7 @@ packages = [
   {include = "geomagio" }
 ]
 repository="https://code.usgs.gov/ghsc/geomag/geomag-algorithms"
-version = "1.6.5"
+version = "1.7.0"
 
 
 [tool.poetry.dependencies]
diff --git a/test/adjusted_test/adjusted_test.py b/test/adjusted_test/adjusted_test.py
index 2ac3747660af93332bdfc4cd17c6fe29e106d666..7d81f9cbb308b914f5db2dc7620a1ccfc3754bbb 100644
--- a/test/adjusted_test/adjusted_test.py
+++ b/test/adjusted_test/adjusted_test.py
@@ -210,7 +210,7 @@ def test_CMO2015_infinite_one_interval():
         path="etc/residual/Caldata/",
     )
 
-    assert len(readings) == 146
+    assert len(readings) == 168
 
     result = Affine(
         observatory="CMO",
@@ -246,7 +246,7 @@ def test_CMO2015_infinite_weekly():
         endtime=UTCDateTime("2015-12-31T23:59:00Z"),
         path="etc/residual/Caldata/",
     )
-    assert len(readings) == 146
+    assert len(readings) == 168
 
     starttime = UTCDateTime("2015-02-01T00:00:00Z")
     endtime = UTCDateTime("2015-11-27T23:59:00Z")
@@ -286,7 +286,7 @@ def test_CMO2015_short_acausal():
         endtime=UTCDateTime("2015-12-31T23:59:00Z"),
         path="etc/residual/Caldata/",
     )
-    assert len(readings) == 146
+    assert len(readings) == 168
 
     starttime = UTCDateTime("2015-02-01T00:00:00Z")
     endtime = UTCDateTime("2015-11-27T23:59:00Z")
@@ -325,7 +325,7 @@ def test_CMO2015_short_causal():
         endtime=UTCDateTime("2015-12-31T23:59:00Z"),
         path="etc/residual/Caldata/",
     )
-    assert len(readings) == 146
+    assert len(readings) == 168
 
     starttime = UTCDateTime("2015-02-01T00:00:00Z")
     endtime = UTCDateTime("2015-11-27T23:59:00Z")
diff --git a/test/residual_test/residual_test.py b/test/residual_test/residual_test.py
index e1fe36f2b6708130d5bd2c3ef6a139bc2061b795..8b156125bd3696e86b1a2dd60df4674ddff6ab62 100644
--- a/test/residual_test/residual_test.py
+++ b/test/residual_test/residual_test.py
@@ -55,15 +55,21 @@ def get_json_readings(filename: str):
     return readings
 
 
-def get_spreadsheet_absolutes(path):
+def get_spreadsheet_absolutes(
+    observatory: str, starttime: UTCDateTime, endtime: UTCDateTime, path
+):
     """
     Tests functionality of SpreadsheetAbsolutesFactory and recalculation of absolutes
     """
     # establish SpreadsheetAbsolutesFactory for reading test data from Excel
-    saf = SpreadsheetAbsolutesFactory()
+    saf = SpreadsheetAbsolutesFactory(base_directory=path)
     # Read spreadsheet containing test data
-    reading = saf.parse_spreadsheet(path=path)
-    return reading
+    readings = saf.get_readings(
+        observatory=observatory,
+        starttime=UTCDateTime(starttime),
+        endtime=UTCDateTime(endtime),
+    )
+    return readings
 
 
 def get_spreadsheet_directory_readings(path, observatory, starttime, endtime):
@@ -87,7 +93,7 @@ def test_CMO_summaries():
         assert_equal(reading.metadata["station"], "CMO")
         assert_equal(reading.metadata["instrument"], 200803)
         assert_equal(reading.pier_correction, 10.5)
-    assert_equal(len(readings), 26)
+    assert_equal(len(readings), 28)
 
     assert readings[0].time > starttime
     assert readings[-1].time < endtime
@@ -100,11 +106,17 @@ def test_DED_20140952332():
     Tests calculations for measurements in units of DMS.
     """
     # gather absolute from DED test data and recalculate
-    reading = get_spreadsheet_absolutes(path="etc/residual/DED-20140952332.xlsm")
-    # test results with original spreadsheet values
-    assert_readings_equal(
-        expected=reading, actual=calculate(reading=reading), decimal=2
+    readings = get_spreadsheet_absolutes(
+        observatory="DED",
+        starttime="2014-01-01",
+        endtime="2014-05-01",
+        path="etc/residual/DED-20140952332.xlsm",
     )
+    # test results with original spreadsheet values
+    for reading in readings:
+        assert_readings_equal(
+            expected=reading, actual=calculate(reading=reading), decimal=2
+        )
 
 
 def test_BRW_20133650000():
@@ -114,13 +126,19 @@ def test_BRW_20133650000():
     Tests calculations for measurements in units of DM.
     """
     # gather absolute from DED test data and recalculate
-    reading = get_spreadsheet_absolutes(path="etc/residual/BRW-20133650000.xlsm")
-    # test results with original spreadsheet values
-    assert_readings_equal(
-        expected=reading,
-        actual=calculate(reading=reading),
-        decimal=1,  # change due to no longer rounding
+    readings = get_spreadsheet_absolutes(
+        observatory="BRW",
+        starttime="2013-01-01",
+        endtime="2013-12-31",
+        path="etc/residual/BRW-20133650000.xlsm",
     )
+    # test results with original spreadsheet values
+    for reading in readings:
+        assert_readings_equal(
+            expected=reading,
+            actual=calculate(reading=reading),
+            decimal=1,  # change due to no longer rounding
+        )
 
 
 def test_BOU_20190702():