From b02a423f120d622d45cac7b3922f423409de2b59 Mon Sep 17 00:00:00 2001
From: Alexandra Hobbs <ahobbs@contractor.usgs.gov>
Date: Mon, 13 Jan 2025 16:04:03 -0700
Subject: [PATCH] Replaced databases library with sqlmodel library. Organized
 code to include a /factories and /models under /db to move the metadata code
 under /api. Removed the contents of __init__.py to be empty files to follow
 the working agreement and clean up imports.

---
 bin/__init__.py                               |    8 -
 create_db.py                                  |    5 +-
 docs/working_agreements.md                    |    9 +
 geomagio/Controller.py                        |  123 +-
 geomagio/DerivedTimeseriesFactory.py          |    6 +-
 geomagio/__init__.py                          |   31 -
 geomagio/adjusted/AdjustedMatrix.py           |    4 +-
 geomagio/adjusted/Affine.py                   |    8 +-
 geomagio/adjusted/__init__.py                 |    9 -
 geomagio/adjusted/transform/__init__.py       |   26 -
 geomagio/algorithm/AdjustedAlgorithm.py       |    2 +-
 geomagio/algorithm/__init__.py                |   46 -
 geomagio/api/__init__.py                      |    4 -
 geomagio/api/app.py                           |   22 +-
 geomagio/api/db/MetadataDatabaseFactory.py    |  132 --
 geomagio/api/db/__init__.py                   |   16 -
 geomagio/api/db/common.py                     |   47 -
 geomagio/api/db/create.py                     |   19 -
 geomagio/api/db/database.md                   |    3 +
 geomagio/api/db/database.py                   |   16 +
 .../db/factories/MetadataDatabaseFactory.py   |  153 ++
 .../MetadataHistoryDatabaseFactory.py         |  102 ++
 .../db/factories/SessionDatabaseFactory.py    |   62 +
 geomagio/api/db/factories/__init__.py         |    0
 geomagio/api/db/factories/db_helpers.py       |   80 +
 geomagio/api/db/metadata_history_table.py     |   18 -
 geomagio/api/db/metadata_table.py             |   74 -
 geomagio/api/db/models/__init__.py            |    0
 geomagio/api/db/models/metadata.py            |  147 ++
 geomagio/api/db/models/metadata_history.py    |   70 +
 geomagio/api/db/models/session.py             |   10 +
 geomagio/api/db/session_table.py              |   50 -
 geomagio/api/secure/SessionMiddleware.py      |   47 +-
 geomagio/api/secure/__init__.py               |    6 -
 geomagio/api/secure/encryption.py             |   10 +-
 .../api/secure/{login.py => login_routes.py}  |    0
 .../{metadata.py => metadata_routes.py}       |   90 +-
 geomagio/api/secure/{app.py => secure_app.py} |   24 +-
 geomagio/api/ws/__init__.py                   |    6 -
 geomagio/api/ws/algorithms.py                 |    9 +-
 geomagio/api/ws/metadata.py                   |   45 -
 geomagio/api/ws/{app.py => ws_app.py}         |    5 +-
 geomagio/binlog/__init__.py                   |   11 -
 geomagio/covjson/__init__.py                  |    7 -
 geomagio/edge/__init__.py                     |   30 -
 geomagio/iaga2002/__init__.py                 |   20 -
 geomagio/imagcdf/__init__.py                  |   10 -
 geomagio/imfjson/__init__.py                  |   10 -
 geomagio/imfv122/__init__.py                  |   14 -
 geomagio/imfv283/__init__.py                  |   20 -
 geomagio/metadata/Metadata.py                 |  111 --
 geomagio/metadata/MetadataCategory.py         |   10 -
 geomagio/metadata/MetadataFactory.py          |    3 +-
 geomagio/metadata/MetadataQuery.py            |   50 -
 geomagio/metadata/__init__.py                 |   13 -
 .../instrument/InstrumentCalibrations.py      |   11 +-
 geomagio/metadata/instrument/__init__.py      |    6 -
 geomagio/metadata/main.py                     |    8 +-
 geomagio/netcdf/NetCDFFactory.py              |    5 +-
 geomagio/netcdf/__init__.py                   |    8 -
 geomagio/pcdcp/__init__.py                    |   18 -
 geomagio/processing/__init__.py               |   20 -
 geomagio/residual/__init__.py                 |   46 -
 geomagio/temperature/__init__.py              |   11 -
 geomagio/vbf/__init__.py                      |   11 -
 geomagio/xml/__init__.py                      |    8 -
 localdev/docker-compose.yml                   |    2 +-
 .../2280fe551e60_initialize_database.py       |    5 +-
 poetry.lock                                   |  920 ++++++------
 pyproject.toml                                |   10 +-
 test/Controller_test.py                       |    7 +-
 test/DerivedTimeseriesFactory_test.py         |    9 +-
 test/adjusted_test/adjusted_test.py           |   23 +-
 test/algorithm_test/AdjustedAlgorithm_test.py |   25 +-
 test/algorithm_test/Algorithm_test.py         |    3 +-
 test/algorithm_test/AverageAlgorithm_test.py  |    5 +-
 test/algorithm_test/DbDtAlgorithm_test.py     |    6 +-
 test/algorithm_test/FilterAlgorithm_test.py   |   18 +-
 test/algorithm_test/SQDistAlgorithm_test.py   |    3 +-
 test/algorithm_test/XYZAlgorithm_test.py      |    6 +-
 test/api_test/conftest.py                     |  715 +++++++++
 .../factories/MetadataDatabaseFactory_test.py | 1307 +++++++++++++++++
 .../MetadataHistoryDatabaseFactory_test.py    |  385 +++++
 .../factories/SessionDatabaseFactory_test.py  |  146 ++
 test/api_test/secure_test/conftest.py         |  229 ---
 .../{login_test.py => login_routes_test.py}   |   82 +-
 .../secure_test/metadata_routes_test.py       |  931 ++++++++++++
 test/api_test/secure_test/metadata_test.py    |  867 -----------
 test/api_test/ws_test/data_test.py            |    2 +-
 test/api_test/ws_test/elements_test.py        |    2 +-
 test/api_test/ws_test/filter_test.py          |    2 +-
 test/api_test/ws_test/observatories_test.py   |    2 +-
 test/api_test/ws_test/variometers_test.py     |    2 +-
 test/conftest.py                              |   16 +
 test/db/MetadataDatabaseFactory_test.py       |  443 ------
 test/edge_test/EdgeFactory_test.py            |    3 +-
 test/edge_test/FDSNFactory_test.py            |    2 +-
 test/edge_test/FDSNSNCL_test.py               |    3 +-
 test/edge_test/IRISSNCL_test.py               |    3 +-
 test/edge_test/MiniSeedFactory_test.py        |    5 +-
 test/edge_test/RawInputClient_test.py         |    4 +-
 test/edge_test/mseed_test_clients.py          |    2 +-
 test/iaga2002_test/IAGA2002Factory_test.py    |    2 +-
 test/iaga2002_test/IAGA2002Parser_test.py     |    2 +-
 test/imfjson_test/IMFJSONWriter_test.py       |    4 +-
 test/imfv122_test/IMFV122Parser_test.py       |    2 +-
 test/imfv283_test/IMFV283Parser_test.py       |    7 +-
 test/metadata/MetadataFactory_test.py         |   10 +-
 test/pcdcp_test/PCDCPFactory_test.py          |    2 +-
 test/pcdcp_test/PCDCPParser_test.py           |    2 +-
 test/residual_test/residual_test.py           |   12 +-
 test_metadata.py                              |   20 +-
 112 files changed, 4953 insertions(+), 3310 deletions(-)
 create mode 100644 docs/working_agreements.md
 delete mode 100644 geomagio/api/db/MetadataDatabaseFactory.py
 delete mode 100644 geomagio/api/db/common.py
 delete mode 100644 geomagio/api/db/create.py
 create mode 100644 geomagio/api/db/database.md
 create mode 100644 geomagio/api/db/database.py
 create mode 100644 geomagio/api/db/factories/MetadataDatabaseFactory.py
 create mode 100644 geomagio/api/db/factories/MetadataHistoryDatabaseFactory.py
 create mode 100644 geomagio/api/db/factories/SessionDatabaseFactory.py
 create mode 100644 geomagio/api/db/factories/__init__.py
 create mode 100644 geomagio/api/db/factories/db_helpers.py
 delete mode 100644 geomagio/api/db/metadata_history_table.py
 delete mode 100644 geomagio/api/db/metadata_table.py
 create mode 100644 geomagio/api/db/models/__init__.py
 create mode 100644 geomagio/api/db/models/metadata.py
 create mode 100644 geomagio/api/db/models/metadata_history.py
 create mode 100644 geomagio/api/db/models/session.py
 delete mode 100644 geomagio/api/db/session_table.py
 rename geomagio/api/secure/{login.py => login_routes.py} (100%)
 rename geomagio/api/secure/{metadata.py => metadata_routes.py} (54%)
 rename geomagio/api/secure/{app.py => secure_app.py} (53%)
 delete mode 100644 geomagio/api/ws/metadata.py
 rename geomagio/api/ws/{app.py => ws_app.py} (96%)
 delete mode 100644 geomagio/metadata/Metadata.py
 delete mode 100644 geomagio/metadata/MetadataCategory.py
 delete mode 100644 geomagio/metadata/MetadataQuery.py
 create mode 100644 test/api_test/conftest.py
 create mode 100644 test/api_test/db/factories/MetadataDatabaseFactory_test.py
 create mode 100644 test/api_test/db/factories/MetadataHistoryDatabaseFactory_test.py
 create mode 100644 test/api_test/db/factories/SessionDatabaseFactory_test.py
 delete mode 100644 test/api_test/secure_test/conftest.py
 rename test/api_test/secure_test/{login_test.py => login_routes_test.py} (64%)
 create mode 100644 test/api_test/secure_test/metadata_routes_test.py
 delete mode 100644 test/api_test/secure_test/metadata_test.py
 create mode 100644 test/conftest.py
 delete mode 100644 test/db/MetadataDatabaseFactory_test.py

diff --git a/bin/__init__.py b/bin/__init__.py
index 0db276a14..e69de29bb 100644
--- a/bin/__init__.py
+++ b/bin/__init__.py
@@ -1,8 +0,0 @@
-"""Module for bin
-"""
-
-from __future__ import absolute_import
-
-from . import geomag
-
-__all__ = ["geomag"]
diff --git a/create_db.py b/create_db.py
index eed829480..9d2c1873c 100644
--- a/create_db.py
+++ b/create_db.py
@@ -1,3 +1,4 @@
-from geomagio.api.db.create import create_db
+from sqlmodel import SQLModel
+from geomagio.api.db.database import engine
 
-create_db()
+SQLModel.metadata.create_all(engine)
diff --git a/docs/working_agreements.md b/docs/working_agreements.md
new file mode 100644
index 000000000..6841cbc2e
--- /dev/null
+++ b/docs/working_agreements.md
@@ -0,0 +1,9 @@
+Developer Working Agreements
+=====================
+
+These are agreements among the development team to facilitate smooth workflows and readable, efficient code. This is a living document that should be revisited regularly and these are meant to be guidelines with the understanding that exceptions can and do happen.
+
+- We will do our best not to push to production on Friday afternoons.
+- We try to not push commented out code to production.
+- Each directory should have an empty `__init__.py` file to allow python 3 to turn that directory into a module. We will avoid adding initialization code to these files including using the `__all__` variable.
+- Imports belong at the top of python files and should be specific (ie don't use `import * from package`). We will do our best to use absolute imports for external directories and relative imports to import modules in the same directory.
diff --git a/geomagio/Controller.py b/geomagio/Controller.py
index 817598fed..09935932e 100644
--- a/geomagio/Controller.py
+++ b/geomagio/Controller.py
@@ -1,32 +1,57 @@
 """Controller class for geomag algorithms"""
 
+import sys
 import argparse
+
 from io import StringIO
-import sys
 from typing import List, Optional, Tuple, Union
-
 from obspy.core import Stream, UTCDateTime
 
-from .algorithm import Algorithm, algorithms, AlgorithmException, FilterAlgorithm
-from .DerivedTimeseriesFactory import DerivedTimeseriesFactory
-from .PlotTimeseriesFactory import PlotTimeseriesFactory
-from .StreamTimeseriesFactory import StreamTimeseriesFactory
-from . import TimeseriesUtility, Util
-
-# factory packages
-from . import binlog
-from . import edge
-from . import iaga2002
-from . import imfjson
-from . import pcdcp
-from . import imfv122
-from . import imfv283
-from . import temperature
-from . import vbf
-from . import xml
-from . import covjson
-from . import netcdf
-from . import imagcdf
+from geomagio import TimeseriesUtility, Util
+from geomagio.algorithm.Algorithm import Algorithm
+from geomagio.algorithm.AlgorithmException import AlgorithmException
+from geomagio.algorithm.FilterAlgorithm import FilterAlgorithm
+from geomagio.algorithm.AdjustedAlgorithm import AdjustedAlgorithm
+from geomagio.algorithm.AverageAlgorithm import AverageAlgorithm
+from geomagio.algorithm.DbDtAlgorithm import DbDtAlgorithm
+from geomagio.algorithm.DeltaFAlgorithm import DeltaFAlgorithm
+from geomagio.algorithm.SqDistAlgorithm import SqDistAlgorithm
+from geomagio.algorithm.XYZAlgorithm import XYZAlgorithm
+
+from geomagio.DerivedTimeseriesFactory import DerivedTimeseriesFactory
+from geomagio.PlotTimeseriesFactory import PlotTimeseriesFactory
+from geomagio.StreamTimeseriesFactory import StreamTimeseriesFactory
+
+from geomagio.binlog.BinLogFactory import BinLogFactory
+from geomagio.edge.EdgeFactory import EdgeFactory
+from geomagio.edge.IRISFactory import IRISFactory
+from geomagio.edge.MiniSeedFactory import MiniSeedFactory
+from geomagio.edge.FDSNFactory import FDSNFactory
+from geomagio.edge.LocationCode import LocationCode
+from geomagio.iaga2002.IAGA2002Factory import IAGA2002Factory
+from geomagio.imfjson.IMFJSONFactory import IMFJSONFactory
+from geomagio.pcdcp.PCDCPFactory import PCDCPFactory
+from geomagio.imfv122.IMFV122Factory import IMFV122Factory
+from geomagio.imfv283.IMFV283Factory import IMFV283Factory
+from geomagio.imfv283.GOESIMFV283Factory import GOESIMFV283Factory
+from geomagio.temperature.TEMPFactory import TEMPFactory
+from geomagio.vbf.VBFFactory import VBFFactory
+from geomagio.xml.XMLFactory import XMLFactory
+from geomagio.covjson.CovJSONFactory import CovJSONFactory
+from geomagio.netcdf.NetCDFFactory import NetCDFFactory
+from geomagio.imagcdf.ImagCDFFactory import ImagCDFFactory
+
+
+algorithms = {
+    "identity": Algorithm,
+    "adjusted": AdjustedAlgorithm,
+    "average": AverageAlgorithm,
+    "dbdt": DbDtAlgorithm,
+    "deltaf": DeltaFAlgorithm,
+    "filter": FilterAlgorithm,
+    "sqdist": SqDistAlgorithm,
+    "xyz": XYZAlgorithm,
+}
 
 
 class Controller(object):
@@ -516,7 +541,7 @@ def get_input_factory(args):
         else:
             input_stream = StringIO(Util.read_url(args.input_url))
     if input_type == "edge":
-        input_factory = edge.EdgeFactory(
+        input_factory = EdgeFactory(
             host=args.input_host,
             port=args.input_port,
             locationCode=args.locationcode,
@@ -526,7 +551,7 @@ def get_input_factory(args):
         )
     elif input_type == "goes":
         # TODO: deal with other goes arguments
-        input_factory = imfv283.GOESIMFV283Factory(
+        input_factory = GOESIMFV283Factory(
             directory=args.input_goes_directory,
             getdcpmessages=args.input_goes_getdcpmessages,
             password=args.input_goes_password,
@@ -535,7 +560,7 @@ def get_input_factory(args):
             **input_factory_args,
         )
     elif input_type == "iris":
-        input_factory = edge.IRISFactory(
+        input_factory = IRISFactory(
             base_url=args.iris_url,
             network=args.iris_network,
             locationCode=args.locationcode,
@@ -543,7 +568,7 @@ def get_input_factory(args):
             **input_factory_args,
         )
     elif input_type == "fdsn":
-        input_factory = edge.FDSNFactory(
+        input_factory = FDSNFactory(
             base_url=args.fdsn_url,
             network=args.network,
             locationCode=args.locationcode,
@@ -552,17 +577,17 @@ def get_input_factory(args):
     else:
         # stream compatible factories
         if input_type == "iaga2002":
-            input_factory = iaga2002.IAGA2002Factory(**input_factory_args)
+            input_factory = IAGA2002Factory(**input_factory_args)
         if input_type == "netcdf":
-            input_factory = netcdf.NetCDFFactory(**input_factory_args)
+            input_factory = NetCDFFactory(**input_factory_args)
         elif input_type == "imfv122":
-            input_factory = imfv122.IMFV122Factory(**input_factory_args)
+            input_factory = IMFV122Factory(**input_factory_args)
         elif input_type == "imfv283":
-            input_factory = imfv283.IMFV283Factory(**input_factory_args)
+            input_factory = IMFV283Factory(**input_factory_args)
         elif input_type == "pcdcp":
-            input_factory = pcdcp.PCDCPFactory(**input_factory_args)
+            input_factory = PCDCPFactory(**input_factory_args)
         elif input_type == "miniseed":
-            input_factory = edge.MiniSeedFactory(
+            input_factory = MiniSeedFactory(
                 host=args.input_host,
                 port=args.input_port,
                 locationCode=args.locationcode,
@@ -572,12 +597,12 @@ def get_input_factory(args):
                 **input_factory_args,
             )
         elif input_type == "xml":
-            input_factory = xml.XMLFactory(**input_factory_args)
+            input_factory = XMLFactory(**input_factory_args)
         elif input_type == "covjson":
-            input_factory = covjson.CovJSONFactory(**input_factory_args)
+            input_factory = CovJSONFactory(**input_factory_args)
         elif input_type == "imagcdf":
             input_factory_args["output"] = args.output
-            input_factory = imagcdf.ImagCDFFactory(**input_factory_args)
+            input_factory = ImagCDFFactory(**input_factory_args)
         # wrap stream
         if input_stream is not None:
             input_factory = StreamTimeseriesFactory(
@@ -628,7 +653,7 @@ def get_output_factory(args):
     if output_type == "edge":
         # TODO: deal with other edge arguments
         locationcode = args.outlocationcode or args.locationcode or None
-        output_factory = edge.EdgeFactory(
+        output_factory = EdgeFactory(
             host=args.output_host,
             port=args.output_read_port,
             write_port=args.output_port,
@@ -644,25 +669,25 @@ def get_output_factory(args):
     else:
         # stream compatible factories
         if output_type == "binlog":
-            output_factory = binlog.BinLogFactory(**output_factory_args)
+            output_factory = BinLogFactory(**output_factory_args)
         elif output_type == "iaga2002":
-            output_factory = iaga2002.IAGA2002Factory(**output_factory_args)
+            output_factory = IAGA2002Factory(**output_factory_args)
         elif output_type == "netcdf":
-            output_factory = netcdf.NetCDFFactory(**output_factory_args)
+            output_factory = NetCDFFactory(**output_factory_args)
         elif output_type == "imfjson":
-            output_factory = imfjson.IMFJSONFactory(**output_factory_args)
+            output_factory = IMFJSONFactory(**output_factory_args)
         elif output_type == "covjson":
-            output_factory = covjson.CovJSONFactory(**output_factory_args)
+            output_factory = CovJSONFactory(**output_factory_args)
         elif output_type == "pcdcp":
-            output_factory = pcdcp.PCDCPFactory(**output_factory_args)
+            output_factory = PCDCPFactory(**output_factory_args)
         elif output_type == "temperature":
-            output_factory = temperature.TEMPFactory(**output_factory_args)
+            output_factory = TEMPFactory(**output_factory_args)
         elif output_type == "vbf":
-            output_factory = vbf.VBFFactory(**output_factory_args)
+            output_factory = VBFFactory(**output_factory_args)
         elif output_type == "miniseed":
             # TODO: deal with other miniseed arguments
             locationcode = args.outlocationcode or args.locationcode or None
-            output_factory = edge.MiniSeedFactory(
+            output_factory = MiniSeedFactory(
                 host=args.output_host,
                 port=args.output_read_port,
                 write_port=args.output_port,
@@ -672,9 +697,9 @@ def get_output_factory(args):
                 **output_factory_args,
             )
         elif output_type == "xml":
-            output_factory = xml.XMLFactory(**output_factory_args)
+            output_factory = XMLFactory(**output_factory_args)
         elif output_type == "imagcdf":
-            output_factory = imagcdf.ImagCDFFactory(**output_factory_args)
+            output_factory = ImagCDFFactory(**output_factory_args)
         # wrap stream
         if output_stream is not None:
             output_factory = StreamTimeseriesFactory(
@@ -958,7 +983,7 @@ def parse_args(args):
                 instead of "--type"
                 """,
         metavar="CODE",
-        type=edge.LocationCode,
+        type=LocationCode,
     )
     input_group.add_argument(
         "--observatory",
@@ -1173,7 +1198,7 @@ def parse_args(args):
         "--outlocationcode",
         help="Defaults to --locationcode",
         metavar="CODE",
-        type=edge.LocationCode,
+        type=LocationCode,
     )
     output_group.add_argument(
         "--output-edge-forceout",
diff --git a/geomagio/DerivedTimeseriesFactory.py b/geomagio/DerivedTimeseriesFactory.py
index 273b18dd2..a2c6990b2 100644
--- a/geomagio/DerivedTimeseriesFactory.py
+++ b/geomagio/DerivedTimeseriesFactory.py
@@ -2,8 +2,10 @@ from typing import List, Optional
 
 from obspy import Stream, Trace, UTCDateTime
 
-from .algorithm import Algorithm, DeltaFAlgorithm, XYZAlgorithm
-from .TimeseriesFactory import TimeseriesFactory, TimeseriesUtility
+from geomagio.algorithm.DeltaFAlgorithm import DeltaFAlgorithm
+from geomagio.algorithm.XYZAlgorithm import XYZAlgorithm
+from geomagio.TimeseriesFactory import TimeseriesFactory
+from geomagio import TimeseriesUtility
 
 
 class DerivedTimeseriesFactory(TimeseriesFactory):
diff --git a/geomagio/__init__.py b/geomagio/__init__.py
index 1887216cb..e69de29bb 100644
--- a/geomagio/__init__.py
+++ b/geomagio/__init__.py
@@ -1,31 +0,0 @@
-"""
-Geomag Algorithm Module
-"""
-
-from . import ChannelConverter
-from . import StreamConverter
-from . import TimeseriesUtility
-from . import Util
-
-from .Controller import Controller
-from .DerivedTimeseriesFactory import DerivedTimeseriesFactory
-from .ObservatoryMetadata import ObservatoryMetadata
-from .VariometerMetadata import VariometerMetadata
-from .PlotTimeseriesFactory import PlotTimeseriesFactory
-from .TimeseriesFactory import TimeseriesFactory
-from .TimeseriesFactoryException import TimeseriesFactoryException
-
-__all__ = [
-    "ChannelConverter",
-    "Controller",
-    "DeltaFAlgorithm",
-    "DerivedTimeseriesFactory",
-    "ObservatoryMetadata",
-    "VariometerMetadata",
-    "PlotTimeseriesFactory",
-    "StreamConverter",
-    "TimeseriesFactory",
-    "TimeseriesFactoryException",
-    "TimeseriesUtility",
-    "Util",
-]
diff --git a/geomagio/adjusted/AdjustedMatrix.py b/geomagio/adjusted/AdjustedMatrix.py
index 7b35f7655..41eecd800 100644
--- a/geomagio/adjusted/AdjustedMatrix.py
+++ b/geomagio/adjusted/AdjustedMatrix.py
@@ -3,8 +3,8 @@ from obspy import Stream, UTCDateTime
 from pydantic import BaseModel
 from typing import Any, List, Optional
 
-from ..pydantic_utcdatetime import CustomUTCDateTimeType
-from ..residual.Reading import Reading, get_absolutes_xyz, get_ordinates
+from geomagio.pydantic_utcdatetime import CustomUTCDateTimeType
+from geomagio.residual.Reading import Reading, get_absolutes_xyz, get_ordinates
 from .. import ChannelConverter
 from .Metric import Metric, get_metric
 
diff --git a/geomagio/adjusted/Affine.py b/geomagio/adjusted/Affine.py
index a4157d3e1..33bbf04b4 100644
--- a/geomagio/adjusted/Affine.py
+++ b/geomagio/adjusted/Affine.py
@@ -4,14 +4,16 @@ from obspy import UTCDateTime
 from pydantic import BaseModel, Field
 from typing import List, Optional, Tuple
 
-from ..residual.Reading import (
+from geomagio.residual.Reading import (
     Reading,
     get_absolutes_xyz,
     get_ordinates,
 )
 from .AdjustedMatrix import AdjustedMatrix
-from .transform import RotationTranslationXY, TranslateOrigins, Transform
-from ..pydantic_utcdatetime import CustomUTCDateTimeType
+from .transform.RotationTranslationXY import RotationTranslationXY
+from .transform.TranslateOrigins import TranslateOrigins
+from .transform.Transform import Transform
+from geomagio.pydantic_utcdatetime import CustomUTCDateTimeType
 
 
 class Affine(BaseModel):
diff --git a/geomagio/adjusted/__init__.py b/geomagio/adjusted/__init__.py
index b86d5d927..e69de29bb 100644
--- a/geomagio/adjusted/__init__.py
+++ b/geomagio/adjusted/__init__.py
@@ -1,9 +0,0 @@
-from .AdjustedMatrix import AdjustedMatrix
-from .Affine import Affine
-from .Metric import Metric
-
-__all__ = [
-    "AdjustedMatrix",
-    "Affine",
-    "Metric",
-]
diff --git a/geomagio/adjusted/transform/__init__.py b/geomagio/adjusted/transform/__init__.py
index 0fa99ab9a..e69de29bb 100644
--- a/geomagio/adjusted/transform/__init__.py
+++ b/geomagio/adjusted/transform/__init__.py
@@ -1,26 +0,0 @@
-from .LeastSq import LeastSq
-from .QRFactorization import QRFactorization
-from .Rescale3D import Rescale3D
-from .RotationTranslationXY import RotationTranslationXY
-from .ShearYZ import ShearYZ
-from .Transform import Transform
-from .TranslateOrigins import TranslateOrigins
-from .SVD import SVD
-from .ZRotationHScale import ZRotationHscale
-from .ZRotationHScaleZBaseline import ZRotationHscaleZbaseline
-from .ZRotationShear import ZRotationShear
-
-__all__ = [
-    "LeastSq",
-    "QRFactorization",
-    "Rescale3D",
-    "RotationTranslation3D",
-    "RotationTranslationXY",
-    "ShearYZ",
-    "Transform",
-    "TranslateOrigins",
-    "SVD",
-    "ZRotationHscale",
-    "ZRotationHscaleZbaseline",
-    "ZRotationShear",
-]
diff --git a/geomagio/algorithm/AdjustedAlgorithm.py b/geomagio/algorithm/AdjustedAlgorithm.py
index 3e2d1c2c7..f835fdbbd 100644
--- a/geomagio/algorithm/AdjustedAlgorithm.py
+++ b/geomagio/algorithm/AdjustedAlgorithm.py
@@ -4,7 +4,7 @@ import json
 import numpy as np
 from obspy.core import Stream, Stats
 
-from ..adjusted import AdjustedMatrix
+from geomagio.adjusted.AdjustedMatrix import AdjustedMatrix
 from .Algorithm import Algorithm
 
 
diff --git a/geomagio/algorithm/__init__.py b/geomagio/algorithm/__init__.py
index 0d56bbcdd..e69de29bb 100644
--- a/geomagio/algorithm/__init__.py
+++ b/geomagio/algorithm/__init__.py
@@ -1,46 +0,0 @@
-"""
-Geomag Algorithms module
-"""
-
-from __future__ import absolute_import
-
-# base classes
-from .Algorithm import Algorithm
-from .AlgorithmException import AlgorithmException
-
-# algorithms
-from .AdjustedAlgorithm import AdjustedAlgorithm
-from .AverageAlgorithm import AverageAlgorithm
-from .DbDtAlgorithm import DbDtAlgorithm
-from .DeltaFAlgorithm import DeltaFAlgorithm
-from .FilterAlgorithm import FilterAlgorithm
-from .SqDistAlgorithm import SqDistAlgorithm
-from .XYZAlgorithm import XYZAlgorithm
-
-
-# algorithms is used by Controller to auto generate arguments
-algorithms = {
-    "identity": Algorithm,
-    "adjusted": AdjustedAlgorithm,
-    "average": AverageAlgorithm,
-    "dbdt": DbDtAlgorithm,
-    "deltaf": DeltaFAlgorithm,
-    "filter": FilterAlgorithm,
-    "sqdist": SqDistAlgorithm,
-    "xyz": XYZAlgorithm,
-}
-
-
-__all__ = [
-    # base classes
-    "Algorithm",
-    "AlgorithmException",
-    # algorithms
-    "AdjustedAlgorithm",
-    "AverageAlgorithm",
-    "DbDtAlgorithm",
-    "DeltaFAlgorithm",
-    "FilterAlgorithm",
-    "SqDistAlgorithm",
-    "XYZAlgorithm",
-]
diff --git a/geomagio/api/__init__.py b/geomagio/api/__init__.py
index ee21241f2..e69de29bb 100644
--- a/geomagio/api/__init__.py
+++ b/geomagio/api/__init__.py
@@ -1,4 +0,0 @@
-from .app import app
-from .db.create import create_db
-
-__all__ = ["app", "create_db"]
diff --git a/geomagio/api/app.py b/geomagio/api/app.py
index d13f8b92b..07ee0c349 100644
--- a/geomagio/api/app.py
+++ b/geomagio/api/app.py
@@ -10,12 +10,11 @@ and can be run using uvicorn, or any other ASGI server:
 import logging
 import os
 
-from contextlib import asynccontextmanager
 from fastapi import FastAPI
 from starlette.responses import RedirectResponse
 
-from . import secure, ws
-from .db import database
+from geomagio.api.secure.secure_app import app as secure_app
+from geomagio.api.ws.ws_app import app as ws_app
 
 LOG_BASIC_CONFIG = os.getenv("LOG_BASIC_CONFIG", "false")
 LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
@@ -23,21 +22,12 @@ if LOG_BASIC_CONFIG == "true":
     logging.basicConfig(level=LOG_LEVEL)
 
 
-@asynccontextmanager
-async def lifespan(app: FastAPI):
-    # on startup
-    await database.connect()
-    yield
-    # on shutdown
-    await database.disconnect()
+ws_app = FastAPI()
 
+ws_app.mount("/ws/secure", secure_app)
+ws_app.mount("/ws", ws_app)
 
-app = FastAPI(lifespan=lifespan)
 
-app.mount("/ws/secure", secure.app)
-app.mount("/ws", ws.app)
-
-
-@app.get("/", include_in_schema=False)
+@ws_app.get("/", include_in_schema=False)
 async def redirect_to_ws():
     return RedirectResponse("/ws")
diff --git a/geomagio/api/db/MetadataDatabaseFactory.py b/geomagio/api/db/MetadataDatabaseFactory.py
deleted file mode 100644
index 7a92a91d5..000000000
--- a/geomagio/api/db/MetadataDatabaseFactory.py
+++ /dev/null
@@ -1,132 +0,0 @@
-from datetime import datetime
-from typing import List, Optional
-
-from databases import Database
-from obspy import UTCDateTime
-from sqlalchemy import or_
-
-from ...metadata import Metadata, MetadataQuery
-from .metadata_history_table import metadata_history
-from .metadata_table import metadata as metadata_table
-
-
-class MetadataDatabaseFactory(object):
-    def __init__(self, database: Database):
-        self.database = database
-
-    async def create_metadata(self, meta: Metadata) -> Metadata:
-        query = metadata_table.insert()
-        meta.status = meta.status or "new"
-        values = meta.model_dump(exclude={"id", "metadata_id"}, exclude_none=True)
-        query = query.values(**values)
-        meta.id = await self.database.execute(query)
-        return meta
-
-    async def get_metadata(
-        self,
-        params: MetadataQuery,
-        history: bool = False,
-    ) -> List[Metadata]:
-        table = metadata_table
-        if history:
-            table = metadata_history
-        query = table.select()
-        (
-            id,
-            category,
-            starttime,
-            endtime,
-            created_after,
-            created_before,
-            network,
-            station,
-            channel,
-            location,
-            data_valid,
-            metadata,
-            status,
-        ) = params.model_dump().values()
-        if id:
-            query = query.where(table.c.id == id)
-        if category:
-            query = query.where(table.c.category == category)
-        if network:
-            query = query.where(table.c.network == network)
-        if station:
-            query = query.where(table.c.station == station)
-        if channel:
-            query = query.where(table.c.channel.like(channel))
-        if location:
-            query = query.where(table.c.location.like(location))
-        if starttime:
-            query = query.where(
-                or_(
-                    table.c.endtime == None,
-                    table.c.endtime > starttime,
-                )
-            )
-        if endtime:
-            query = query.where(
-                or_(
-                    table.c.starttime == None,
-                    table.c.starttime < endtime,
-                )
-            )
-        if created_after:
-            query = query.where(table.c.created_time > created_after)
-        if created_before:
-            query = query.where(table.c.created_time < created_before)
-        if data_valid is not None:
-            query = query.where(table.c.data_valid == data_valid)
-        if status is not None:
-            query = query.where(table.c.status.in_(status))
-        rows = await self.database.fetch_all(query)
-        return [Metadata(**row) for row in rows]
-
-    async def get_metadata_by_id(self, id: int):
-        meta = await self.get_metadata(MetadataQuery(id=id))
-        if len(meta) != 1:
-            raise ValueError(f"{len(meta)} records found")
-        return meta[0]
-
-    async def get_metadata_history_by_id(self, id: int) -> Optional[Metadata]:
-        query = metadata_history.select()
-        query = query.where(metadata_history.c.id == id)
-        meta = await self.database.fetch_one(query)
-        if meta is None:
-            return meta
-        return Metadata(**meta)
-
-    async def get_metadata_history_by_metadata_id(
-        self, metadata_id: int
-    ) -> List[Metadata]:
-        async with self.database.transaction() as transaction:
-            query = metadata_history.select()
-            query = query.where(metadata_history.c.metadata_id == metadata_id).order_by(
-                metadata_history.c.updated_time
-            )
-            rows = await self.database.fetch_all(query)
-            metadata = [Metadata(**row) for row in rows]
-            current_metadata = await self.get_metadata_by_id(id=metadata_id)
-            metadata.append(current_metadata)
-            # return records in order of age(newest first)
-            metadata.reverse()
-            return metadata
-
-    async def update_metadata(self, meta: Metadata, updated_by: str) -> Metadata:
-        async with self.database.transaction() as transaction:
-            # write current record to metadata history table
-            original_metadata = await self.get_metadata_by_id(id=meta.id)
-            original_metadata.metadata_id = original_metadata.id
-            values = original_metadata.model_dump(exclude={"id"}, exclude_none=True)
-            query = metadata_history.insert()
-            query = query.values(**values)
-            original_metadata.id = await self.database.execute(query)
-            # update record in metadata table
-            meta.updated_by = updated_by
-            meta.updated_time = UTCDateTime()
-            query = metadata_table.update().where(metadata_table.c.id == meta.id)
-            values = meta.model_dump(exclude={"id", "metadata_id"})
-            query = query.values(**values)
-            await self.database.execute(query)
-            return await self.get_metadata_by_id(id=meta.id)
diff --git a/geomagio/api/db/__init__.py b/geomagio/api/db/__init__.py
index dfb22968e..e69de29bb 100644
--- a/geomagio/api/db/__init__.py
+++ b/geomagio/api/db/__init__.py
@@ -1,16 +0,0 @@
-"""geomagio.api.db package.
-
-This package manages the database connection, data models,
-and provides methods for data access from other parts of the api.
-
-Modules outside the api should not access the database directly.
-"""
-
-from .common import database, sqlalchemy_metadata
-from .MetadataDatabaseFactory import MetadataDatabaseFactory
-
-__all__ = [
-    "database",
-    "sqlalchemy_metadata",
-    "MetadataDatabaseFactory",
-]
diff --git a/geomagio/api/db/common.py b/geomagio/api/db/common.py
deleted file mode 100644
index d5646f1cc..000000000
--- a/geomagio/api/db/common.py
+++ /dev/null
@@ -1,47 +0,0 @@
-"""
-Define the database connection and sqlalchemy metadata objects.
-
-
-Configuration:
-    uses environment variables:
-
-    DATABASE_URL  - url to connect to database.
-                    Default is "sqlite:///./api_database.db"
-
-
-Database models:
-
-    Register with metadata.
-
-        class DatabaseModel(orm.Model):
-            __database__ = database
-            __metadata__ = sqlalchemy_metadata
-
-    And import in create.py, so scripts can manage the database schema.
-
-
-Applications must manage the database connections:
-
-    @app.on_event("startup")
-    async def on_startup():
-        await database.connect()
-
-
-    @app.on_event("shutdown")
-    async def on_shutdown():
-        await database.disconnect()
-"""
-
-import os
-
-from databases import Database
-from sqlalchemy import MetaData
-
-
-# database connection
-database_url = os.getenv("DATABASE_URL", None)
-database_url = database_url or "sqlite:///./api_database.db"
-database = Database(database_url)
-
-# metadata used to manage database schema
-sqlalchemy_metadata = MetaData()
diff --git a/geomagio/api/db/create.py b/geomagio/api/db/create.py
deleted file mode 100644
index 531b79a9b..000000000
--- a/geomagio/api/db/create.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import sqlalchemy
-
-from .common import database, sqlalchemy_metadata
-
-# register models with sqlalchemy_metadata by importing
-from .metadata_history_table import metadata_history
-from .metadata_table import metadata
-from .session_table import session
-
-
-def create_db():
-    """Create the database using sqlalchemy."""
-    database_url = str(database.url).replace("mysql://", "mysql+pymysql://")
-    engine = sqlalchemy.create_engine(database_url)
-    sqlalchemy_metadata.create_all(engine)
-
-
-if __name__ == "__main__":
-    create_db()
diff --git a/geomagio/api/db/database.md b/geomagio/api/db/database.md
new file mode 100644
index 000000000..ebfb7024a
--- /dev/null
+++ b/geomagio/api/db/database.md
@@ -0,0 +1,3 @@
+# Database
+
+All interactions with the metadata database should be through this db package. The db package consists of the engine, which manages the database connnections, the models, which define the sqlmodel table classes and pydantic models, and the factories that handle the actual reading and writing. Because of the limitation of sqlmodel table classes not allowing "metadata" to be a field as noted [here](https://github.com/fastapi/sqlmodel/issues/290), we have to translate pydantic models to sqlmdodel classes and back. This contains the issue to just the factories and the rest of the code base and users don't need to be aware of the translation.
\ No newline at end of file
diff --git a/geomagio/api/db/database.py b/geomagio/api/db/database.py
new file mode 100644
index 000000000..74fb3211b
--- /dev/null
+++ b/geomagio/api/db/database.py
@@ -0,0 +1,16 @@
+import os
+
+from enum import Enum
+from sqlmodel import create_engine
+
+from geomagio.api.db.models.metadata import MetadataTable
+from geomagio.api.db.models.metadata_history import MetadataHistoryTable
+
+
+class Tables(str, Enum):
+    METADATA = MetadataTable
+    METADATAHISTORYTABLE = MetadataHistoryTable
+
+
+database_url = os.getenv("DATABASE_URL", "sqlite:///./api_database.db")
+engine = create_engine(database_url)
diff --git a/geomagio/api/db/factories/MetadataDatabaseFactory.py b/geomagio/api/db/factories/MetadataDatabaseFactory.py
new file mode 100644
index 000000000..d5816eb7d
--- /dev/null
+++ b/geomagio/api/db/factories/MetadataDatabaseFactory.py
@@ -0,0 +1,153 @@
+from typing import List
+from sqlmodel import Session
+from datetime import datetime, timezone
+from obspy import UTCDateTime
+
+from geomagio.api.db.models.metadata import (
+    MetadataTable,
+    Metadata,
+    MetadataQuery,
+    MetadataUpdate,
+)
+from geomagio.api.db.models.metadata_history import MetadataHistoryTable
+from geomagio.api.db.factories import db_helpers
+
+
+class MetadataDatabaseFactory:
+    def __init__(self, engine):
+        self.engine = engine
+
+    async def create_metadata(self, new_metadata: Metadata) -> Metadata:
+        with Session(self.engine) as db_session:
+            # convert Metadata input into MetadataTable object
+            new_metadata_values = new_metadata.model_dump(by_alias=True)
+            new_metadata_table_model = MetadataTable(**new_metadata_values)
+
+            new_metadata_table_model = MetadataTable.model_validate(
+                new_metadata_table_model
+            )
+            new_metadata_table_model.id = None
+            if new_metadata_table_model.created_time is None:
+                new_metadata_table_model.created_time = UTCDateTime.now()
+
+            # convert UTCDateTime to aware datetime for sqlalchemy
+            new_metadata_table_model = db_helpers.convert_utcdatetime_to_datetime(
+                metadata=new_metadata_table_model
+            )
+
+            db_session.add(new_metadata_table_model)
+            db_session.commit()
+            db_session.refresh(new_metadata_table_model)
+
+            # convert newly created metadata row into Metadata object
+            newly_created_metadata_values = new_metadata_table_model.model_dump(
+                by_alias=True
+            )
+            returned_metadata = Metadata(**newly_created_metadata_values)
+
+            return returned_metadata
+
+    async def get_metadata(self, query: MetadataQuery) -> List[Metadata]:
+        with Session(self.engine) as db_session:
+            query = MetadataQuery.model_validate(query)
+            statement = db_helpers.build_metadata_search_query(
+                query=query, table=MetadataTable
+            )
+
+            results = db_session.exec(statement)
+
+            returned_metadata = []
+
+            for result in results:
+                found_metadata_values = result.model_dump(by_alias=True)
+                found_metadata = Metadata(**found_metadata_values)
+                returned_metadata.append(found_metadata)
+
+            return returned_metadata
+
+    async def get_metadata_by_id(self, id=int) -> Metadata:
+        if type(id) != int:
+            raise ValueError("expected an int")
+
+        with Session(self.engine) as db_session:
+            result = db_session.get(MetadataTable, id)
+            if result != None:
+                result_values = result.model_dump(by_alias=True)
+                return Metadata(**result_values)
+            else:
+                return None
+
+    async def update_metadata(
+        self, updated_metadata: MetadataUpdate, updated_by: str
+    ) -> Metadata:
+        with Session(self.engine) as db_session:
+            # convert any times on updated metadata to aware datetime for sqlalchemy
+            updated_metadata = db_helpers.convert_utcdatetime_to_datetime(
+                updated_metadata
+            )
+
+            # get original metadata
+            original_metadata = db_session.get(MetadataTable, updated_metadata.id)
+            if original_metadata is None:
+                raise ValueError("metadata not found")
+
+            # save original meetadata as a new metadata_history record
+            new_metadata_history_values = original_metadata.model_dump()
+            new_metadata_history = MetadataHistoryTable(**new_metadata_history_values)
+
+            # remove the id and set metadata_id to the original_metadata id
+            new_metadata_history.id = None
+            new_metadata_history.metadata_id = original_metadata.id
+
+            # copy over the time fields by hand to prevent them from being serialized into strings
+            new_metadata_history.created_time = original_metadata.created_time
+            if original_metadata.updated_time:
+                new_metadata_history.updated_time = original_metadata.updated_time
+            if original_metadata.starttime:
+                new_metadata_history.starttime = original_metadata.starttime
+            if original_metadata.endtime:
+                new_metadata_history.endtime = original_metadata.endtime
+
+            db_session.add(new_metadata_history)
+            db_session.flush()
+
+            # set updated values on original_metadata
+            # do not model_validate the inputs otherwise the defaults will all be set to None.
+            # sqlmodel_update removes any values set to None
+            update_values = updated_metadata.model_dump(
+                exclude_unset=True, by_alias=True
+            )
+            original_metadata.sqlmodel_update(update_values)
+
+            original_metadata.updated_by = updated_by
+            original_metadata.updated_time = datetime.now(timezone.utc)
+            db_session.add(original_metadata)
+            db_session.commit()
+            db_session.refresh(original_metadata)
+
+            # convert newly updated metadata row to Metadata type
+            return_metadata_values = original_metadata.model_dump(by_alias=True)
+            return Metadata(**return_metadata_values)
+
+    async def batch_create_metadata(self, new_metadatas: List[Metadata]):
+        with Session(self.engine) as db_session:
+            for new_metadata in new_metadatas:
+                new_metadata_values = new_metadata.model_dump(by_alias=True)
+                new_metadata_table_model = MetadataTable(**new_metadata_values)
+
+                new_metadata_table_model = MetadataTable.model_validate(
+                    new_metadata_table_model
+                )
+                new_metadata_table_model.id = None
+                if new_metadata_table_model.created_time is None:
+                    new_metadata_table_model.created_time = UTCDateTime.now()
+
+                # convert UTCDateTime to aware datetime for sqlalchemy
+                new_metadata_table_model = db_helpers.convert_utcdatetime_to_datetime(
+                    metadata=new_metadata_table_model
+                )
+
+                db_session.add(new_metadata_table_model)
+            db_session.commit()
+            db_session.refresh(new_metadata_table_model)
+        return new_metadatas
diff --git a/geomagio/api/db/factories/MetadataHistoryDatabaseFactory.py b/geomagio/api/db/factories/MetadataHistoryDatabaseFactory.py
new file mode 100644
index 000000000..fdb5b725f
--- /dev/null
+++ b/geomagio/api/db/factories/MetadataHistoryDatabaseFactory.py
@@ -0,0 +1,102 @@
+from typing import List
+from sqlmodel import Session, select
+
+from geomagio.api.db.factories import db_helpers
+from geomagio.api.db.models.metadata import Metadata, MetadataTable, MetadataQuery
+from geomagio.api.db.models.metadata_history import (
+    MetadataHistoryTable,
+    MetadataHistory,
+)
+
+
+class MetadataHistoryDatabaseFactory:
+    def __init__(self, engine):
+        self.engine = engine
+
+    async def get_metadata_history_by_id(self, id=int) -> MetadataHistory:
+        if type(id) != int:
+            raise ValueError("expected an int")
+
+        with Session(self.engine) as db_session:
+            result = db_session.get(MetadataHistoryTable, id)
+            if result != None:
+                result_values = result.model_dump(by_alias=True)
+                return MetadataHistory(**result_values)
+            else:
+                return None
+
+    async def get_metadata_history(self, query: MetadataQuery) -> List[MetadataHistory]:
+        with Session(self.engine) as db_session:
+            query = MetadataQuery.model_validate(query)
+            statement = db_helpers.build_metadata_search_query(
+                query=query, table=MetadataHistoryTable
+            )
+
+            results = db_session.exec(statement)
+
+            returned_results = []
+            for result in results:
+                result_values = result.model_dump(by_alias=True)
+                returned_results.append(MetadataHistory(**result_values))
+
+            return returned_results
+
+    async def get_metadata_history_by_metadata_id(self, metadata_id: int):
+        if type(metadata_id) != int:
+            raise ValueError("expected an int")
+
+        returned_list = []
+        with Session(self.engine) as db_session:
+            statement = (
+                select(MetadataHistoryTable, MetadataTable)
+                .join(MetadataTable)
+                .where(MetadataTable.id == metadata_id)
+            )
+            results = db_session.exec(statement)
+            history = results.all()
+
+            # if there's no metadata_history associated with the metdata object, just return the metadata ojbect
+            if len(history) == 0:
+                metadata = db_session.get(MetadataTable, metadata_id)
+                if metadata == None:
+                    return None
+                else:
+                    result_values = metadata.model_dump(by_alias=True)
+                    return [Metadata(**result_values)]
+
+            metadata_list = []
+            for metadata_history, metadata in history:
+                metadata_history_values = metadata_history.model_dump(by_alias=True)
+                formatted_metadata_history = MetadataHistory(**metadata_history_values)
+                returned_list.insert(0, formatted_metadata_history)
+
+                # make sure only 1 metadata is appended. if not, raise a big error
+                if len(metadata_list) == 0:
+                    metadata_list.append(metadata)
+                else:
+                    if metadata_list[0] != metadata:
+                        raise SystemError(
+                            "get_metadata_history_by_metadata_id returned more than one metadata for id: ",
+                            metadata_id,
+                        )
+
+            # insert Metadata object to the front of the list
+            formatted_metadata_values = metadata_list[0].model_dump(by_alias=True)
+            formatted_metadata = Metadata(**formatted_metadata_values)
+            returned_list.insert(0, formatted_metadata)
+
+            return returned_list
+
+    async def batch_create_metadata_history(self, new_metadatas: List[MetadataHistory]):
+        with Session(self.engine) as db_session:
+            for new_metadata in new_metadatas:
+                new_metadata = MetadataHistoryTable.model_validate(new_metadata)
+                new_metadata.id = None
+
+                # convert UTCDateTime to aware datetime for sqlalchemy
+                new_metadata = db_helpers.convert_utcdatetime_to_datetime(
+                    metadata=new_metadata
+                )
+
+                db_session.add(new_metadata)
+            db_session.commit()
diff --git a/geomagio/api/db/factories/SessionDatabaseFactory.py b/geomagio/api/db/factories/SessionDatabaseFactory.py
new file mode 100644
index 000000000..bd83645f4
--- /dev/null
+++ b/geomagio/api/db/factories/SessionDatabaseFactory.py
@@ -0,0 +1,62 @@
+import os
+import json
+import uuid
+
+from typing import Mapping
+from datetime import datetime, timezone
+from sqlmodel import Session, select
+
+from geomagio.api.db.models.session import session
+from geomagio.api.secure.encryption import get_fernet
+
+
+class SessionDatabaseFactory:
+    def __init__(self, engine):
+        self.encryption = get_fernet()
+        self.engine = engine
+
+    async def get_session(self, session_id: str) -> session:
+        with Session(self.engine) as db_session:
+            statement = select(session).where(session.session_id == session_id)
+            results = db_session.exec(statement)
+            existing_session = results.one()
+
+            # if existing_session is None:
+            #     return None
+
+            data = self.encryption.decrypt(existing_session.data.encode("utf8"))
+            return json.loads(data)
+
+    async def save_session(self, session_id: str, data: Mapping) -> None:
+        updated = datetime.now(timezone.utc)
+
+        data = json.dumps(data)
+        data = self.encryption.encrypt(data.encode("utf8")).decode("utf8")
+
+        with Session(self.engine) as db_session:
+            # check if session exists before updating
+            statement = select(session).where(session.session_id == session_id)
+            results = db_session.exec(statement)
+            existing_session = results.first()
+
+            if not existing_session:
+                new_session = session(session_id=session_id, data=data, updated=updated)
+                db_session.add(new_session)
+            else:
+                existing_session.updated = updated
+                existing_session.data = data
+                db_session.add(existing_session)
+
+            db_session.commit()
+
+    async def delete_session(self, session_id: str) -> None:
+        with Session(self.engine) as db_session:
+            statement = select(session).where(session.session_id == session_id)
+            results = db_session.exec(statement)
+            existing_session = results.first()
+
+            if existing_session is None:
+                return None
+
+            db_session.delete(existing_session)
+            db_session.commit()
diff --git a/geomagio/api/db/factories/__init__.py b/geomagio/api/db/factories/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/geomagio/api/db/factories/db_helpers.py b/geomagio/api/db/factories/db_helpers.py
new file mode 100644
index 000000000..e45caf93c
--- /dev/null
+++ b/geomagio/api/db/factories/db_helpers.py
@@ -0,0 +1,80 @@
+from datetime import timezone
+from typing import Any
+from sqlmodel import select, or_
+
+from geomagio.api.db.models.metadata import MetadataQuery
+from geomagio.api.db.database import Tables
+
+
+def convert_utcdatetime_to_datetime(metadata: Any):
+    if metadata.created_time is not None:
+        metadata.created_time = metadata.created_time.datetime.replace(
+            tzinfo=timezone.utc
+        )
+    if metadata.updated_time is not None:
+        metadata.updated_time = metadata.updated_time.datetime.replace(
+            tzinfo=timezone.utc
+        )
+    if metadata.starttime is not None:
+        metadata.starttime = metadata.starttime.datetime.replace(tzinfo=timezone.utc)
+    if metadata.endtime is not None:
+        metadata.endtime = metadata.endtime.datetime.replace(tzinfo=timezone.utc)
+    return metadata
+
+
+def build_metadata_search_query(query: MetadataQuery, table: Tables):
+    # convert UTCDateTime to aware datetime for sqlalchemy
+    if query.created_before is not None:
+        query.created_before = query.created_before.datetime.replace(
+            tzinfo=timezone.utc
+        )
+    if query.created_after is not None:
+        query.created_after = query.created_after.datetime.replace(tzinfo=timezone.utc)
+    if query.starttime is not None:
+        query.starttime = query.starttime.datetime.replace(tzinfo=timezone.utc)
+    if query.endtime is not None:
+        query.endtime = query.endtime.datetime.replace(tzinfo=timezone.utc)
+
+    # build select statement
+    statement = select(table)
+    if query.id:
+        statement = statement.where(table.id == query.id)
+    if query.category:
+        statement = statement.where(table.category == query.category)
+    if query.network:
+        statement = statement.where(table.network == query.network)
+    if query.station:
+        statement = statement.where(table.station == query.station)
+
+    # using "like" to search for channels and locations to account for potential inconsistencies over time
+    if query.channel:
+        statement = statement.where(table.channel.like(query.channel))
+    if query.location:
+        statement = statement.where(table.location.like(query.location))
+
+    if query.starttime:
+        statement = statement.where(
+            or_(
+                table.endtime == None,
+                table.endtime > query.starttime,
+            )
+        )
+    if query.endtime:
+        statement = statement.where(
+            or_(
+                table.starttime == None,
+                table.starttime < query.endtime,
+            )
+        )
+    if query.created_after:
+        statement = statement.where(table.created_time > query.created_after)
+    if query.created_before:
+        statement = statement.where(table.created_time < query.created_before)
+    if query.data_valid is not None:
+        statement = statement.where(table.data_valid == query.data_valid)
+
+    # users can search for multiple statuses
+    if query.status is not None:
+        statement = statement.where(table.status.in_(query.status))
+
+    return statement
diff --git a/geomagio/api/db/metadata_history_table.py b/geomagio/api/db/metadata_history_table.py
deleted file mode 100644
index 8fbd785c1..000000000
--- a/geomagio/api/db/metadata_history_table.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from sqlalchemy import Column, ForeignKey, Integer
-
-from .common import sqlalchemy_metadata
-from .metadata_table import metadata
-
-# create copy of original metadata table and add to sqlalchemy metadata
-metadata_history = metadata.to_metadata(
-    metadata=sqlalchemy_metadata, name="metadata_history"
-)
-metadata_history.indexes.clear()
-metadata_history.append_column(
-    Column(
-        "metadata_id",
-        Integer,
-        ForeignKey("metadata.id"),
-        nullable=False,
-    ),
-)
diff --git a/geomagio/api/db/metadata_table.py b/geomagio/api/db/metadata_table.py
deleted file mode 100644
index 6b5716db5..000000000
--- a/geomagio/api/db/metadata_table.py
+++ /dev/null
@@ -1,74 +0,0 @@
-from sqlalchemy import Boolean, Column, Index, Integer, JSON, String, Table, Text
-import sqlalchemy_utc
-
-from .common import sqlalchemy_metadata
-
-
-"""Metadata database model.
-
-See pydantic model geomagio.metadata.Metadata
-"""
-metadata = Table(
-    "metadata",
-    sqlalchemy_metadata,
-    ## COLUMNS
-    Column("id", Integer, primary_key=True),
-    # author
-    Column("created_by", String(length=255), index=True),
-    Column(
-        "created_time",
-        sqlalchemy_utc.UtcDateTime,
-        default=sqlalchemy_utc.utcnow(),
-        index=True,
-    ),
-    # editor
-    Column("updated_by", String(length=255), index=True, nullable=True),
-    Column("updated_time", sqlalchemy_utc.UtcDateTime, index=True, nullable=True),
-    # time range
-    Column("starttime", sqlalchemy_utc.UtcDateTime, index=True, nullable=True),
-    Column("endtime", sqlalchemy_utc.UtcDateTime, index=True, nullable=True),
-    # what data metadata references, null for wildcard
-    Column("network", String(length=255), nullable=True),  # indexed below
-    Column("station", String(length=255), nullable=True),  # indexed below
-    Column("channel", String(length=255), nullable=True),  # indexed below
-    Column("location", String(length=255), nullable=True),  # indexed below
-    # category (flag, matrix, etc)
-    Column("category", String(length=255)),  # indexed below
-    # higher priority overrides lower priority
-    Column("priority", Integer, default=1),
-    # whether data is valid (primarily for flags)
-    Column("data_valid", Boolean, default=True, index=True),
-    # deletion status indicator
-    Column("status", String(length=255), nullable=True),
-    # metadata json blob
-    Column("metadata", JSON, nullable=True),
-    # general comment
-    Column("comment", Text, nullable=True),
-    # review specific comment
-    Column("review_comment", Text, nullable=True),
-    ## INDICES
-    Index(
-        "index_station_metadata",
-        # sncl
-        "network",
-        "station",
-        "channel",
-        "location",
-        # type
-        "category",
-        # date
-        "starttime",
-        "endtime",
-        # valid
-        "data_valid",
-        "status",
-    ),
-    Index(
-        "index_category_time",
-        # type
-        "category",
-        # date
-        "starttime",
-        "endtime",
-    ),
-)
diff --git a/geomagio/api/db/models/__init__.py b/geomagio/api/db/models/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/geomagio/api/db/models/metadata.py b/geomagio/api/db/models/metadata.py
new file mode 100644
index 000000000..2bfa3b4be
--- /dev/null
+++ b/geomagio/api/db/models/metadata.py
@@ -0,0 +1,147 @@
+import sqlalchemy_utc
+
+from enum import Enum
+from obspy import UTCDateTime
+from typing import Dict, List, Any
+from pydantic import BaseModel
+from sqlmodel import SQLModel, Field, Column, Text, JSON, Index
+
+from geomagio.pydantic_utcdatetime import CustomUTCDateTimeType
+
+
+class MetadataCategory(str, Enum):
+    ADJUSTED_MATRIX = "adjusted-matrix"
+    FLAG = "flag"
+    INSTRUMENT = "instrument"
+    OBSERVATORY = "observatory"
+    READING = "reading"
+
+
+class MetadataTable(SQLModel, table=True):
+    __tablename__ = "metadata"
+
+    id: int | None = Field(primary_key=True)
+    created_by: str | None = Field(index=True)
+    created_time: CustomUTCDateTimeType | None = Field(
+        default=UTCDateTime.now(),
+        sa_column=Column(sqlalchemy_utc.UtcDateTime, index=True),
+    )
+    updated_by: str | None = Field(index=True)
+    updated_time: CustomUTCDateTimeType | None = Field(
+        sa_column=Column(sqlalchemy_utc.UtcDateTime, index=True)
+    )
+    starttime: CustomUTCDateTimeType | None = Field(
+        sa_column=Column(sqlalchemy_utc.UtcDateTime, index=True)
+    )
+    endtime: CustomUTCDateTimeType | None = Field(
+        sa_column=Column(sqlalchemy_utc.UtcDateTime, index=True)
+    )
+    network: str | None = None
+    station: str | None = None
+    channel: str | None = None
+    location: str | None = None
+    category: str | None = None
+    priority: int | None = Field(default=1)
+    data_valid: bool | None = Field(default=True, index=True)
+    status: str | None = "new"
+    # metadata field is already being used by sqlmodel and therefore needs a different name and alias
+    metadata_: Dict[str, Any] | None = Field(
+        sa_column=Column("metadata", JSON),
+        alias="metadata",
+        schema_extra={"serialization_alias": "metadata"},
+    )
+    comment: str | None = Field(sa_column=Column(Text))
+    review_comment: str | None = Field(sa_column=Column(Text))
+
+    __table_args__ = (
+        Index(
+            "index_station_metadata",
+            # sncl
+            "network",
+            "station",
+            "channel",
+            "location",
+            # type
+            "category",
+            # date
+            "starttime",
+            "endtime",
+            # valid
+            "data_valid",
+            "status",
+        ),
+        Index(
+            "index_category_time",
+            # type
+            "category",
+            # date
+            "starttime",
+            "endtime",
+        ),
+    )
+
+
+# this pydantic model exists as an abstraction layer between the code and the database for 2 reasons
+# 1. sqlmodel table validation is not powerful
+# 2. this allows the metadata_ name conflict to only be an issue when interacting directly with the database in the factory
+class Metadata(BaseModel):
+    id: int | None = None
+    created_by: str | None = None
+    created_time: CustomUTCDateTimeType | None = None
+    updated_by: str | None = None
+    updated_time: CustomUTCDateTimeType | None = None
+    starttime: CustomUTCDateTimeType | None = None
+    endtime: CustomUTCDateTimeType | None = None
+    network: str | None = None
+    station: str | None = None
+    channel: str | None = None
+    location: str | None = None
+    category: MetadataCategory | None = None
+    priority: int | None = 1
+    data_valid: bool | None = True
+    status: str | None = "new"
+    metadata: Dict[str, Any] = Field(default=None, alias="metadata_")
+    comment: str | None = None
+    review_comment: str | None = None
+
+
+class MetadataQuery(BaseModel):
+    id: int | None = None
+    category: MetadataCategory | None = None
+    starttime: CustomUTCDateTimeType | None = None
+    endtime: CustomUTCDateTimeType | None = None
+    created_after: CustomUTCDateTimeType | None = None
+    created_before: CustomUTCDateTimeType | None = None
+    network: str | None = None
+    station: str | None = None
+    channel: str | None = None
+    location: str | None = None
+    data_valid: bool | None = None
+    metadata: Dict[str, Any] | None = None
+    status: List[str] | None = None
+
+
+# this class validates the required id field
+class MetadataUpdate(BaseModel):
+    id: int
+    created_by: str | None = None
+    created_time: CustomUTCDateTimeType | None = None
+    updated_by: str | None = None
+    updated_time: CustomUTCDateTimeType | None = None
+    starttime: CustomUTCDateTimeType | None = None
+    endtime: CustomUTCDateTimeType | None = None
+    network: str | None = None
+    station: str | None = None
+    channel: str | None = None
+    location: str | None = None
+    category: MetadataCategory | None = None
+    priority: int | None = None
+    data_valid: bool | None = None
+    status: str | None = None
+    metadata: Dict[str, Any] = Field(
+        default=None,
+        alias="metadata_",
+        schema_extra={"serialization_alias": "metadata_"},
+    )
+    comment: str | None = None
+    review_comment: str | None = None
diff --git a/geomagio/api/db/models/metadata_history.py b/geomagio/api/db/models/metadata_history.py
new file mode 100644
index 000000000..86eb9dd92
--- /dev/null
+++ b/geomagio/api/db/models/metadata_history.py
@@ -0,0 +1,70 @@
+import sqlalchemy_utc
+
+from datetime import datetime, timezone
+from typing import Dict, Any
+from sqlmodel import SQLModel, Field, Column, Text, JSON
+from pydantic import BaseModel
+
+from geomagio.pydantic_utcdatetime import CustomUTCDateTimeType
+from geomagio.api.db.models.metadata import MetadataCategory
+
+
+class MetadataHistoryTable(SQLModel, table=True):
+    __tablename__ = "metadata_history"
+
+    id: int | None = Field(primary_key=True)
+    metadata_id: int = Field(foreign_key="metadata.id")
+    created_by: str | None = Field(index=True)
+    created_time: CustomUTCDateTimeType | None = Field(
+        default=datetime.now(timezone.utc),
+        sa_column=Column(sqlalchemy_utc.UtcDateTime, index=True),
+    )
+    updated_by: str | None = Field(index=True)
+    updated_time: CustomUTCDateTimeType | None = Field(
+        sa_column=Column(sqlalchemy_utc.UtcDateTime, index=True)
+    )
+    starttime: CustomUTCDateTimeType | None = Field(
+        sa_column=Column(sqlalchemy_utc.UtcDateTime, index=True)
+    )
+    endtime: CustomUTCDateTimeType | None = Field(
+        sa_column=Column(sqlalchemy_utc.UtcDateTime, index=True)
+    )
+    network: str | None = None
+    station: str | None = None
+    channel: str | None = None
+    location: str | None = None
+    category: str | None = None
+    priority: int | None = Field(default=1)
+    data_valid: bool | None = Field(default=True, index=True)
+    status: str | None = None
+    metadata_: Dict[str, Any] | None = Field(
+        sa_column=Column("metadata", JSON),
+        alias="metadata",
+        schema_extra={"serialization_alias": "metadata"},
+    )
+    comment: str | None = Field(sa_column=Column(Text))
+    review_comment: str | None = Field(sa_column=Column(Text))
+
+
+class MetadataHistory(BaseModel):
+    id: int | None = None
+    metadata_id: int
+    created_by: str | None = None
+    created_time: CustomUTCDateTimeType | None = None
+    updated_by: str | None = None
+    updated_time: CustomUTCDateTimeType | None = None
+    starttime: CustomUTCDateTimeType | None = None
+    endtime: CustomUTCDateTimeType | None = None
+    network: str | None = None
+    station: str | None = None
+    channel: str | None = None
+    location: str | None = None
+    category: MetadataCategory | None = None
+    priority: int | None = None
+    data_valid: bool | None = None
+    status: str | None = None
+    metadata: Dict | None = Field(
+        alias="metadata_",
+    )
+    comment: str | None = None
+    review_comment: str | None = None
diff --git a/geomagio/api/db/models/session.py b/geomagio/api/db/models/session.py
new file mode 100644
index 000000000..5b71910af
--- /dev/null
+++ b/geomagio/api/db/models/session.py
@@ -0,0 +1,10 @@
+import sqlalchemy_utc
+
+from sqlmodel import SQLModel, Field, Column, String, Text
+
+
+class session(SQLModel, table=True):
+    id: int = Field(primary_key=True)
+    session_id: str = Field(sa_column=Column(String(100), index=True))
+    data: str = Field(sa_column=Column(Text))
+    updated: str = Field(sa_column=Column(sqlalchemy_utc.UtcDateTime, index=True))
diff --git a/geomagio/api/db/session_table.py b/geomagio/api/db/session_table.py
deleted file mode 100644
index 61f4b589a..000000000
--- a/geomagio/api/db/session_table.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from datetime import datetime, timedelta, timezone
-
-import sqlalchemy
-import sqlalchemy_utc
-
-from .common import database, sqlalchemy_metadata
-
-
-session = sqlalchemy.Table(
-    "session",
-    sqlalchemy_metadata,
-    sqlalchemy.Column("id", sqlalchemy.Integer, primary_key=True),
-    sqlalchemy.Column("session_id", sqlalchemy.String(length=100), index=True),
-    sqlalchemy.Column("data", sqlalchemy.Text),
-    sqlalchemy.Column("updated", sqlalchemy_utc.UtcDateTime, index=True),
-)
-
-
-async def delete_session(session_id: str) -> None:
-    query = session.delete().where(session.c.session_id == session_id)
-    await database.execute(query)
-
-
-async def get_session(session_id: str) -> str:
-    query = session.select().where(session.c.session_id == session_id)
-    row = await database.fetch_one(query)
-    return row.data
-
-
-async def remove_expired_sessions(max_age: timedelta) -> None:
-    threshold = datetime.now(tz=timezone.utc) - max_age
-    query = session.delete().where(session.c.updated < threshold)
-    await database.execute(query)
-
-
-async def save_session(session_id: str, data: str) -> None:
-    updated = datetime.now(tz=timezone.utc)
-    # try update first
-    query = (
-        session.update()
-        .where(session.c.session_id == session_id)
-        .values(data=data, updated=updated)
-    )
-    count = await database.execute(query)
-    if count == 0:
-        # no matching session, insert
-        query = session.insert().values(
-            session_id=session_id, data=data, updated=updated
-        )
-        await database.execute(query)
diff --git a/geomagio/api/secure/SessionMiddleware.py b/geomagio/api/secure/SessionMiddleware.py
index 3d455bad5..84715e7c2 100644
--- a/geomagio/api/secure/SessionMiddleware.py
+++ b/geomagio/api/secure/SessionMiddleware.py
@@ -1,13 +1,13 @@
-import json
 import logging
 import uuid
-from typing import Callable, Dict, Mapping
 
-from cryptography.fernet import Fernet
 from starlette.datastructures import MutableHeaders
 from starlette.requests import HTTPConnection
 from starlette.types import ASGIApp, Message, Receive, Scope, Send
 
+from geomagio.api.db.database import engine
+from geomagio.api.db.factories.SessionDatabaseFactory import SessionDatabaseFactory
+
 
 class SessionMiddleware:
     """Based on Starlette SessionMiddleware.
@@ -22,11 +22,6 @@ class SessionMiddleware:
     ----------
     app: the ASGI application
 
-    delete_session_callback(session_id): callback to delete stored session data.
-    get_session_callback(session_id): callback to get stored session data.
-    save_session_callback(session_id): callback to update stored session data.
-    encryption: encrypt session data before storage if provided
-
     session_cookie: name of session cookie
     path: path for session cookie
     max_age: how long session cookies last
@@ -37,10 +32,6 @@ class SessionMiddleware:
     def __init__(
         self,
         app: ASGIApp,
-        delete_session_callback: Callable[[str], None],
-        get_session_callback: Callable[[str], str],
-        save_session_callback: Callable[[str, str], None],
-        encryption: Fernet = None,
         session_cookie: str = "session",
         path: str = "/",
         max_age: int = 14 * 24 * 60 * 60,  # 14 days, in seconds
@@ -48,10 +39,6 @@ class SessionMiddleware:
         https_only: bool = False,
     ) -> None:
         self.app = app
-        self.encryption = encryption
-        self.delete_session_callback = delete_session_callback
-        self.get_session_callback = get_session_callback
-        self.save_session_callback = save_session_callback
         self.session_cookie = session_cookie
         self.path = path
         self.max_age = max_age
@@ -71,10 +58,11 @@ class SessionMiddleware:
         if self.session_cookie in connection.cookies:
             session_id = connection.cookies[self.session_cookie]
             try:
-                scope["session"] = await self.get_session(session_id)
+                scope["session"] = await SessionDatabaseFactory(
+                    engine=engine
+                ).get_session(session_id=session_id)
                 initial_session_was_empty = False
             except Exception:
-                logging.exception(f"Error loading session {session_id}")
                 scope["session"] = {}
         else:
             scope["session"] = {}
@@ -85,31 +73,20 @@ class SessionMiddleware:
                 if scope["session"]:
                     session_id = session_id or uuid.uuid4().hex
                     # Persist session
-                    await self.save_session(session_id, scope["session"])
+                    await SessionDatabaseFactory(engine=engine).save_session(
+                        session_id=session_id, data=scope["session"]
+                    )
                     self.set_cookie(message=message, value=session_id)
                 elif not initial_session_was_empty:
                     # Clear session
-                    await self.delete_session(session_id)
+                    await SessionDatabaseFactory(engine=engine).delete_session(
+                        session_id=session_id
+                    )
                     self.set_cookie(message=message, value="null", max_age=-1)
             await send(message)
 
         await self.app(scope, receive, send_wrapper)
 
-    async def delete_session(self, session_id: str):
-        await self.delete_session_callback(session_id)
-
-    async def get_session(self, session_id: str) -> Dict:
-        data = await self.get_session_callback(session_id)
-        if self.encryption:
-            data = self.encryption.decrypt(data.encode("utf8"))
-        return json.loads(data)
-
-    async def save_session(self, session_id: str, data: Mapping):
-        data = json.dumps(data)
-        if self.encryption:
-            data = self.encryption.encrypt(data.encode("utf8")).decode("utf8")
-        await self.save_session_callback(session_id, data)
-
     def set_cookie(
         self,
         message: Message,
diff --git a/geomagio/api/secure/__init__.py b/geomagio/api/secure/__init__.py
index 6684194ce..e69de29bb 100644
--- a/geomagio/api/secure/__init__.py
+++ b/geomagio/api/secure/__init__.py
@@ -1,6 +0,0 @@
-"""Module with application for "/ws/secure" endpoints.
-"""
-
-from .app import app
-
-__all__ = ["app"]
diff --git a/geomagio/api/secure/encryption.py b/geomagio/api/secure/encryption.py
index 8f405df6f..30eb19f3c 100644
--- a/geomagio/api/secure/encryption.py
+++ b/geomagio/api/secure/encryption.py
@@ -1,11 +1,15 @@
+import os
 import base64
+
 from cryptography.fernet import Fernet
 from cryptography.hazmat.backends import default_backend
 from cryptography.hazmat.primitives import hashes
 from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
 
 
-def generate_key(password: str, salt: str) -> str:
+def generate_key() -> str:
+    password = os.getenv("SECRET_KEY", "secret_key")
+    salt = os.getenv("SECRET_SALT", "secret_salt")
     kdf = PBKDF2HMAC(
         algorithm=hashes.SHA256(),
         length=32,
@@ -17,5 +21,5 @@ def generate_key(password: str, salt: str) -> str:
     return key
 
 
-def get_fernet(password: str, salt: str):
-    return Fernet(generate_key(password, salt))
+def get_fernet():
+    return Fernet(generate_key())
diff --git a/geomagio/api/secure/login.py b/geomagio/api/secure/login_routes.py
similarity index 100%
rename from geomagio/api/secure/login.py
rename to geomagio/api/secure/login_routes.py
diff --git a/geomagio/api/secure/metadata.py b/geomagio/api/secure/metadata_routes.py
similarity index 54%
rename from geomagio/api/secure/metadata.py
rename to geomagio/api/secure/metadata_routes.py
index ee5a8328a..2fbe11c13 100644
--- a/geomagio/api/secure/metadata.py
+++ b/geomagio/api/secure/metadata_routes.py
@@ -16,17 +16,24 @@ Configuration:
 import os
 from typing import List
 
-from fastapi import APIRouter, Body, Depends, Request, Response, Query
-from obspy import UTCDateTime
+from fastapi import APIRouter, Body, Depends, Response, Request, Query, HTTPException
 
-from ...metadata import Metadata, MetadataCategory, MetadataQuery
-from ..db.common import database
-from ..db import MetadataDatabaseFactory
-from .login import require_user, User
+from geomagio.api.db.models.metadata import (
+    Metadata,
+    MetadataUpdate,
+    MetadataCategory,
+    MetadataQuery,
+)
+from geomagio.api.db.models.metadata_history import MetadataHistory
+from geomagio.api.db.factories.MetadataDatabaseFactory import MetadataDatabaseFactory
+from geomagio.api.db.factories.MetadataHistoryDatabaseFactory import (
+    MetadataHistoryDatabaseFactory,
+)
+from geomagio.api.db.database import engine
+from .login_routes import require_user, User
 from ...pydantic_utcdatetime import CustomUTCDateTimeType
 
 
-# routes for login/logout
 router = APIRouter()
 
 
@@ -62,17 +69,15 @@ def get_metadata_query(
     "/metadata",
     description="Save metadata in database",
     response_model=Metadata,
+    status_code=201,
 )
 async def create_metadata(
     request: Request,
     metadata: Metadata,
     user: User = Depends(require_user()),
 ):
-    metadata = await MetadataDatabaseFactory(database=database).create_metadata(
-        meta=metadata
-    )
-    return Response(
-        metadata.model_dump_json(), status_code=201, media_type="application/json"
+    return await MetadataDatabaseFactory(engine=engine).create_metadata(
+        new_metadata=metadata
     )
 
 
@@ -82,21 +87,25 @@ async def create_metadata(
     name="Request metadata",
     response_model=List[Metadata],
 )
-async def get_metadata(query: MetadataQuery = Depends(get_metadata_query)):
-    metas = await MetadataDatabaseFactory(database=database).get_metadata(params=query)
-    return metas
+async def get_metadata(
+    # each field needs to be handled separately otherwise FastAPI thinks it should expect a body
+    query: MetadataQuery = Depends(get_metadata_query),
+):
+    return await MetadataDatabaseFactory(engine=engine).get_metadata(query=query)
 
 
 @router.get(
     "/metadata/history",
     description="Search historical metadata by query parameters",
-    response_model=List[Metadata],
+    response_model=List[MetadataHistory],
 )
-async def get_metadata_history(query: MetadataQuery = Depends(get_metadata_query)):
-    metas = await MetadataDatabaseFactory(database=database).get_metadata(
-        params=query, history=True
+async def get_metadata_history(
+    # each field needs to be handled separately otherwise FastAPI thinks it should expect a body
+    query: MetadataQuery = Depends(get_metadata_query),
+):
+    return await MetadataHistoryDatabaseFactory(engine=engine).get_metadata_history(
+        query=query
     )
-    return metas
 
 
 @router.get(
@@ -105,36 +114,43 @@ async def get_metadata_history(query: MetadataQuery = Depends(get_metadata_query
     response_model=Metadata,
 )
 async def get_metadata_by_id(id: int):
-    return await MetadataDatabaseFactory(database=database).get_metadata_by_id(id=id)
+    metadata = await MetadataDatabaseFactory(engine=engine).get_metadata_by_id(id=id)
+    if metadata is None:
+        raise HTTPException(status_code=404, detail="metadata not found")
+    else:
+        return metadata
 
 
 @router.get(
     "/metadata/{metadata_id}/history",
     description="Search metadata version history by database id",
-    response_model=List[Metadata],
 )
 async def get_metadata_history_by_metadata_id(
     metadata_id: int,
 ):
-    return await MetadataDatabaseFactory(
-        database=database
+    metadata_history = await MetadataHistoryDatabaseFactory(
+        engine=engine
     ).get_metadata_history_by_metadata_id(
         metadata_id=metadata_id,
     )
+    if metadata_history is None:
+        raise HTTPException(status_code=404, detail="metadata not found")
+    else:
+        return metadata_history
 
 
 @router.get(
     "/metadata/history/{id}",
     description="Search historical metadata by database id",
-    response_model=Metadata,
+    response_model=MetadataHistory,
 )
 async def get_metadata_history_by_id(id: int):
-    metadata = await MetadataDatabaseFactory(
-        database=database
+    metadata_history = await MetadataHistoryDatabaseFactory(
+        engine=engine
     ).get_metadata_history_by_id(id=id)
-    if metadata is None:
-        return Response(status_code=404)
-    return metadata
+    if metadata_history is None:
+        raise HTTPException(status_code=404, detail="metadata_history not found")
+    return metadata_history
 
 
 @router.put(
@@ -144,14 +160,18 @@ async def get_metadata_history_by_id(id: int):
 )
 async def update_metadata(
     id: int,
-    metadata: Metadata = Body(...),
+    metadata: MetadataUpdate,
     user: User = Depends(
         require_user(
             [os.getenv("REVIEWER_GROUP", "ghsc/geomag/operations/roles/reviewer")]
         )
     ),
 ):
-    return await MetadataDatabaseFactory(database=database).update_metadata(
-        meta=metadata,
-        updated_by=user.nickname,
-    )
+    try:
+        return await MetadataDatabaseFactory(engine=engine).update_metadata(
+            updated_metadata=metadata,
+            updated_by=user.nickname,
+        )
+    except Exception as e:
+        if str(e) == "metadata not found":
+            raise HTTPException(status_code=404, detail="metadata not found")
diff --git a/geomagio/api/secure/app.py b/geomagio/api/secure/secure_app.py
similarity index 53%
rename from geomagio/api/secure/app.py
rename to geomagio/api/secure/secure_app.py
index 79e3f8c6a..e8e52a3d8 100644
--- a/geomagio/api/secure/app.py
+++ b/geomagio/api/secure/secure_app.py
@@ -1,14 +1,10 @@
 import json
-import os
-import uuid
 
-from fastapi import Depends, FastAPI, Request, Response
+from fastapi import FastAPI, Request, Response, Depends
 
-from ..db.session_table import delete_session, get_session, save_session
-from .encryption import get_fernet
-from .login import current_user, router as login_router, User
-from .metadata import router as metadata_router
-from .SessionMiddleware import SessionMiddleware
+from geomagio.api.secure.login_routes import current_user, router as login_router, User
+from geomagio.api.secure.metadata_routes import router as metadata_router
+from geomagio.api.secure.SessionMiddleware import SessionMiddleware
 
 
 app = FastAPI(
@@ -17,22 +13,12 @@ app = FastAPI(
     title="Geomagnetism Metadata Web Service",
 )
 
-# NOTE: database used for sessions is started by ..app.app,
-# which mounts this application at /ws/secure
 app.add_middleware(
-    middleware_class=SessionMiddleware,
-    delete_session_callback=delete_session,
-    get_session_callback=get_session,
-    save_session_callback=save_session,
-    encryption=get_fernet(
-        os.getenv("SECRET_KEY", uuid.uuid4().hex),
-        os.getenv("SECRET_SALT", "secret_salt"),
-    ),
+    SessionMiddleware,
     path="/ws/secure",
     session_cookie="PHPSESSID",
 )
 
-# include login routes to manage user
 app.include_router(login_router)
 app.include_router(metadata_router)
 
diff --git a/geomagio/api/ws/__init__.py b/geomagio/api/ws/__init__.py
index cd87bf0b8..e69de29bb 100644
--- a/geomagio/api/ws/__init__.py
+++ b/geomagio/api/ws/__init__.py
@@ -1,6 +0,0 @@
-"""Module with application for "/ws" endpoints.
-"""
-
-from .app import app
-
-__all__ = ["app"]
diff --git a/geomagio/api/ws/algorithms.py b/geomagio/api/ws/algorithms.py
index c4696a4c5..53fef06cb 100644
--- a/geomagio/api/ws/algorithms.py
+++ b/geomagio/api/ws/algorithms.py
@@ -4,11 +4,10 @@ from fastapi import APIRouter, Depends, HTTPException, Query
 from starlette.responses import Response
 
 
-from ...algorithm import DbDtAlgorithm
-from ...residual import (
-    calculate,
-    Reading,
-)
+from geomagio.algorithm.DbDtAlgorithm import DbDtAlgorithm
+from geomagio.residual.Calculation import calculate
+from geomagio.residual.Reading import Reading
+
 from .DataApiQuery import DataApiQuery
 from .FilterApiQuery import FilterApiQuery
 from .data import format_timeseries, get_data_factory, get_data_query, get_timeseries
diff --git a/geomagio/api/ws/metadata.py b/geomagio/api/ws/metadata.py
deleted file mode 100644
index cb265e469..000000000
--- a/geomagio/api/ws/metadata.py
+++ /dev/null
@@ -1,45 +0,0 @@
-from typing import List
-
-from fastapi import APIRouter, Query
-from obspy import UTCDateTime
-
-from ...metadata import Metadata, MetadataCategory, MetadataQuery
-from ..db.common import database
-from ..db import MetadataDatabaseFactory
-from ...pydantic_utcdatetime import CustomUTCDateTimeType
-
-
-router = APIRouter()
-
-
-@router.get(
-    "/metadata",
-    description="Search metadata records with query parameters(excludes id and metadata id)",
-    response_model=List[Metadata],
-)
-async def get_metadata(
-    category: MetadataCategory = None,
-    starttime: CustomUTCDateTimeType = None,
-    endtime: CustomUTCDateTimeType = None,
-    network: str = None,
-    station: str = None,
-    channel: str = None,
-    location: str = None,
-    data_valid: bool = None,
-    status: List[str] = Query(None),
-):
-    query = MetadataQuery(
-        category=category,
-        starttime=starttime,
-        endtime=endtime,
-        network=network,
-        station=station,
-        channel=channel,
-        location=location,
-        data_valid=data_valid,
-        status=status,
-    )
-    metas = await MetadataDatabaseFactory(database=database).get_metadata(
-        **query.datetime_dict(exclude={"id", "metadata_id"})
-    )
-    return metas
diff --git a/geomagio/api/ws/app.py b/geomagio/api/ws/ws_app.py
similarity index 96%
rename from geomagio/api/ws/app.py
rename to geomagio/api/ws/ws_app.py
index 3018c39e1..85a5cab32 100644
--- a/geomagio/api/ws/app.py
+++ b/geomagio/api/ws/ws_app.py
@@ -5,7 +5,7 @@ from fastapi.exceptions import RequestValidationError
 from fastapi.responses import JSONResponse, PlainTextResponse, RedirectResponse
 from obspy import UTCDateTime
 
-from . import algorithms, data, elements, metadata, observatories, variometers
+from . import algorithms, data, elements, observatories, variometers
 
 
 ERROR_CODE_MESSAGES = {
@@ -39,9 +39,6 @@ app.include_router(elements.router)
 app.include_router(observatories.router)
 app.include_router(variometers.router)
 
-if METADATA_ENDPOINT:
-    app.include_router(metadata.router)
-
 
 @app.middleware("http")
 async def add_headers(request: Request, call_next):
diff --git a/geomagio/binlog/__init__.py b/geomagio/binlog/__init__.py
index 5928f63f9..e69de29bb 100644
--- a/geomagio/binlog/__init__.py
+++ b/geomagio/binlog/__init__.py
@@ -1,11 +0,0 @@
-"""IO Module for BinLog Format
-"""
-
-from __future__ import absolute_import
-
-from .BinLogFactory import BinLogFactory
-from .StreamBinLogFactory import StreamBinLogFactory
-from .BinLogWriter import BinLogWriter
-
-
-__all__ = ["BinLogFactory", "StreamBinLogFactory", "BinLogWriter"]
diff --git a/geomagio/covjson/__init__.py b/geomagio/covjson/__init__.py
index 11211bd39..e69de29bb 100644
--- a/geomagio/covjson/__init__.py
+++ b/geomagio/covjson/__init__.py
@@ -1,7 +0,0 @@
-"""IO Module for CovJSON Format"""
-
-from __future__ import absolute_import
-
-from .CovJSONFactory import CovJSONFactory
-
-__all__ = ["CovJSONFactory"]
diff --git a/geomagio/edge/__init__.py b/geomagio/edge/__init__.py
index e8a4117e3..e69de29bb 100644
--- a/geomagio/edge/__init__.py
+++ b/geomagio/edge/__init__.py
@@ -1,30 +0,0 @@
-"""IO Module for Edge Format
-"""
-
-from __future__ import absolute_import
-
-from .EdgeFactory import EdgeFactory
-from .IRISFactory import IRISFactory
-from .FDSNFactory import FDSNFactory
-from .LocationCode import LocationCode
-from .MiniSeedFactory import MiniSeedFactory
-from .MiniSeedInputClient import MiniSeedInputClient
-from .RawInputClient import RawInputClient
-from .IRISSNCL import IRISSNCL
-from .FDSNSNCL import FDSNSNCL
-from .SNCL import SNCL
-from .LegacySNCL import LegacySNCL
-
-__all__ = [
-    "EdgeFactory",
-    "IRISFactory",
-    "FDSNFactory",
-    "LocationCode",
-    "MiniSeedFactory",
-    "MiniSeedInputClient",
-    "RawInputClient",
-    "IRISSNCL",
-    "FDSNSNCL",
-    "SNCL",
-    "LegacySNCL",
-]
diff --git a/geomagio/iaga2002/__init__.py b/geomagio/iaga2002/__init__.py
index 0a9617eff..e69de29bb 100644
--- a/geomagio/iaga2002/__init__.py
+++ b/geomagio/iaga2002/__init__.py
@@ -1,20 +0,0 @@
-"""IO Module for IAGA 2002 Format
-
-Based on documentation at:
-  https://www.ngdc.noaa.gov/IAGA/vdat/IAGA2002/iaga2002format.html
-"""
-
-from __future__ import absolute_import
-
-from .IAGA2002Factory import IAGA2002Factory
-from .StreamIAGA2002Factory import StreamIAGA2002Factory
-from .IAGA2002Parser import IAGA2002Parser
-from .IAGA2002Writer import IAGA2002Writer
-
-
-__all__ = [
-    "IAGA2002Factory",
-    "StreamIAGA2002Factory",
-    "IAGA2002Parser",
-    "IAGA2002Writer",
-]
diff --git a/geomagio/imagcdf/__init__.py b/geomagio/imagcdf/__init__.py
index e7611ab4c..e69de29bb 100644
--- a/geomagio/imagcdf/__init__.py
+++ b/geomagio/imagcdf/__init__.py
@@ -1,10 +0,0 @@
-"""IO Module for ImagCDF Format
-"""
-
-from __future__ import absolute_import
-
-from .ImagCDFFactory import ImagCDFFactory
-from .IMCDFPublication import IMCDFPublicationLevel
-
-
-__all__ = ["ImagCDFFactory", "IMCDFPublicationLevel"]
diff --git a/geomagio/imfjson/__init__.py b/geomagio/imfjson/__init__.py
index 8cec78d55..e69de29bb 100644
--- a/geomagio/imfjson/__init__.py
+++ b/geomagio/imfjson/__init__.py
@@ -1,10 +0,0 @@
-"""IO Module for IMFSONFactory Format
-"""
-
-from __future__ import absolute_import
-
-from .IMFJSONFactory import IMFJSONFactory
-from .IMFJSONWriter import IMFJSONWriter
-
-
-__all__ = ["IMFJSONWriter", "IMFJSONFactory"]
diff --git a/geomagio/imfv122/__init__.py b/geomagio/imfv122/__init__.py
index dc6129d4f..e69de29bb 100644
--- a/geomagio/imfv122/__init__.py
+++ b/geomagio/imfv122/__init__.py
@@ -1,14 +0,0 @@
-"""IO Module for IMFV122 Format
-
-Based on documentation at:
-  http://www.intermagnet.org/data-donnee/formats/imfv122-eng.php
-"""
-
-from __future__ import absolute_import
-
-from .IMFV122Factory import IMFV122Factory
-from .IMFV122Parser import IMFV122Parser
-from .StreamIMFV122Factory import StreamIMFV122Factory
-
-
-__all__ = ["IMFV122Factory", "IMFV122Parser", "StreamIMFV122Factory"]
diff --git a/geomagio/imfv283/__init__.py b/geomagio/imfv283/__init__.py
index 9a38dcfda..e69de29bb 100644
--- a/geomagio/imfv283/__init__.py
+++ b/geomagio/imfv283/__init__.py
@@ -1,20 +0,0 @@
-"""IO Module for IMFV283Factory Format
-
-Based on documentation at:
-  http://http://www.intermagnet.org/data-donnee/formats/imfv283e-eng.php
-"""
-
-from __future__ import absolute_import
-
-from .GOESIMFV283Factory import GOESIMFV283Factory
-from .IMFV283Factory import IMFV283Factory
-from .StreamIMFV283Factory import StreamIMFV283Factory
-from .IMFV283Parser import IMFV283Parser
-
-
-__all__ = [
-    "GOESIMFV283Factory",
-    "IMFV283Factory",
-    "StreamIMFV283Factory",
-    "IMFV283Parser",
-]
diff --git a/geomagio/metadata/Metadata.py b/geomagio/metadata/Metadata.py
deleted file mode 100644
index bfc6a35f9..000000000
--- a/geomagio/metadata/Metadata.py
+++ /dev/null
@@ -1,111 +0,0 @@
-from datetime import timezone
-from typing import Dict, Optional
-
-from obspy import UTCDateTime
-from pydantic import field_validator, field_serializer, BaseModel, Field
-
-from .MetadataCategory import MetadataCategory
-from ..pydantic_utcdatetime import CustomUTCDateTimeType
-
-
-class Metadata(BaseModel):
-    """
-    This class is used for Data flagging and other Metadata.
-
-    Flag example:
-    ```
-    automatic_flag = Metadata(
-        created_by = 'algorithm/version',
-        start_time = UTCDateTime('2020-01-02T00:17:00.1Z'),
-        end_time = UTCDateTime('2020-01-02T00:17:00.1Z'),
-        network = 'NT',
-        station = 'BOU',
-        channel = 'BEU',
-        category = MetadataCategory.FLAG,
-        comment = "spike detected",
-        priority = 1,
-        data_valid = False)
-    ```
-
-    Adjusted Matrix example:
-    ```
-    adjusted_matrix = Metadata(
-        created_by = 'algorithm/version',
-        start_time = UTCDateTime('2020-01-02T00:17:00Z'),
-        end_time = None,
-        network = 'NT',
-        station = 'BOU',
-        category = MetadataCategory.ADJUSTED_MATRIX,
-        comment = 'automatic adjusted matrix',
-        priority = 1,
-        value = {
-            'parameters': {'x': 1, 'y': 2, 'z': 3}
-            'matrix': [ ... ]
-        }
-    )
-    ```
-    """
-
-    # database id
-    id: Optional[int] = None
-    # metadata history id referencing database id
-    metadata_id: Optional[int] = None
-    # author
-    created_by: Optional[str] = None
-    created_time: Optional[CustomUTCDateTimeType] = None
-    # editor
-    updated_by: Optional[str] = None
-    updated_time: Optional[CustomUTCDateTimeType] = None
-    # time range
-    starttime: Optional[CustomUTCDateTimeType] = None
-    endtime: Optional[CustomUTCDateTimeType] = None
-    # what data metadata references, null for wildcard
-    network: Optional[str] = None
-    station: Optional[str] = None
-    channel: Optional[str] = None
-    location: Optional[str] = None
-    # category (flag, matrix, etc)
-    category: Optional[MetadataCategory] = None
-    # higher priority overrides lower priority
-    priority: Optional[int] = 1
-    # whether data is valid (primarily for flags)
-    data_valid: Optional[bool] = True
-    # metadata json blob
-    metadata: Optional[Dict] = None
-    # general comment
-    comment: Optional[str] = None
-    # review specific comment
-    review_comment: Optional[str] = None
-    # metadata status indicator
-    status: Optional[str] = None
-
-    # instructions for model_dump() to serialize pydantic CustomUTCDateTimeType into aware datetime.datetime type
-    # sqlalchemy is expecting aware datetime.datetime, not the string model_dump() creates by default
-    @field_serializer("created_time")
-    def serialize_created_time(self, created_time: UTCDateTime):
-        if created_time is not None:
-            created_time = created_time.datetime.replace(tzinfo=timezone.utc)
-        return created_time
-
-    @field_serializer("updated_time")
-    def serialize_updated_time(self, updated_time: UTCDateTime):
-        if updated_time is not None:
-            updated_time = updated_time.datetime.replace(tzinfo=timezone.utc)
-        return updated_time
-
-    @field_serializer("starttime")
-    def serialize_starttime(self, starttime: UTCDateTime):
-        if starttime is not None:
-            starttime = starttime.datetime.replace(tzinfo=timezone.utc)
-        return starttime
-
-    @field_serializer("endtime")
-    def serialize_endtime(self, endtime: UTCDateTime):
-        if endtime is not None:
-            endtime = endtime.datetime.replace(tzinfo=timezone.utc)
-        return endtime
-
-    @field_validator("created_time")
-    @classmethod
-    def set_default_created_time(cls, created_time: UTCDateTime = None) -> UTCDateTime:
-        return created_time or UTCDateTime()
diff --git a/geomagio/metadata/MetadataCategory.py b/geomagio/metadata/MetadataCategory.py
deleted file mode 100644
index bb33eda18..000000000
--- a/geomagio/metadata/MetadataCategory.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from enum import Enum
-
-
-# known category values as enumeration
-class MetadataCategory(str, Enum):
-    ADJUSTED_MATRIX = "adjusted-matrix"
-    FLAG = "flag"
-    INSTRUMENT = "instrument"
-    OBSERVATORY = "observatory"
-    READING = "reading"
diff --git a/geomagio/metadata/MetadataFactory.py b/geomagio/metadata/MetadataFactory.py
index 3e3dce3f7..f8efd5a27 100644
--- a/geomagio/metadata/MetadataFactory.py
+++ b/geomagio/metadata/MetadataFactory.py
@@ -7,8 +7,7 @@ import json
 from obspy import UTCDateTime
 from pydantic import TypeAdapter
 
-from .Metadata import Metadata
-from .MetadataQuery import MetadataQuery
+from geomagio.api.db.models.metadata import Metadata, MetadataQuery
 
 # Force IPV4 in requests package
 requests.packages.urllib3.util.connection.HAS_IPV6 = False
diff --git a/geomagio/metadata/MetadataQuery.py b/geomagio/metadata/MetadataQuery.py
deleted file mode 100644
index fbf74a008..000000000
--- a/geomagio/metadata/MetadataQuery.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from datetime import timezone
-
-from obspy import UTCDateTime
-from pydantic import field_serializer, BaseModel
-from typing import List, Optional, Dict, Any
-
-from .MetadataCategory import MetadataCategory
-from ..pydantic_utcdatetime import CustomUTCDateTimeType
-
-
-class MetadataQuery(BaseModel):
-    id: Optional[int] = None
-    category: Optional[MetadataCategory] = None
-    starttime: Optional[CustomUTCDateTimeType] = None
-    endtime: Optional[CustomUTCDateTimeType] = None
-    created_after: Optional[CustomUTCDateTimeType] = None
-    created_before: Optional[CustomUTCDateTimeType] = None
-    network: Optional[str] = None
-    station: Optional[str] = None
-    channel: Optional[str] = None
-    location: Optional[str] = None
-    data_valid: Optional[bool] = None
-    metadata: Optional[Dict[str, Any]] = None
-    status: Optional[List[str]] = None
-
-    # instructions for model_dump() to serialize pydantic CustomUTCDateTimeType into aware datetime.datetime type
-    # sqlalchemy is expecting aware datetime.datetime, not the string model_dump() creates by default
-    @field_serializer("created_after")
-    def serialize_created_after(self, created_after: UTCDateTime):
-        if created_after is not None:
-            created_after = created_after.datetime.replace(tzinfo=timezone.utc)
-        return created_after
-
-    @field_serializer("created_before")
-    def serialize_created_before(self, created_before: UTCDateTime):
-        if created_before is not None:
-            created_before = created_before.datetime.replace(tzinfo=timezone.utc)
-        return created_before
-
-    @field_serializer("starttime")
-    def serialize_starttime(self, starttime: UTCDateTime):
-        if starttime is not None:
-            starttime = starttime.datetime.replace(tzinfo=timezone.utc)
-        return starttime
-
-    @field_serializer("endtime")
-    def serialize_endtime(self, endtime: UTCDateTime):
-        if endtime is not None:
-            endtime = endtime.datetime.replace(tzinfo=timezone.utc)
-        return endtime
diff --git a/geomagio/metadata/__init__.py b/geomagio/metadata/__init__.py
index 4a348bb47..e69de29bb 100644
--- a/geomagio/metadata/__init__.py
+++ b/geomagio/metadata/__init__.py
@@ -1,13 +0,0 @@
-from .Metadata import Metadata
-from .MetadataCategory import MetadataCategory
-from .MetadataFactory import MetadataFactory, GEOMAG_API_URL
-from .MetadataQuery import MetadataQuery
-
-
-__all__ = [
-    "GEOMAG_API_URL",
-    "Metadata",
-    "MetadataCategory",
-    "MetadataFactory",
-    "MetadataQuery",
-]
diff --git a/geomagio/metadata/instrument/InstrumentCalibrations.py b/geomagio/metadata/instrument/InstrumentCalibrations.py
index 7a2374cd8..c016e1d4e 100644
--- a/geomagio/metadata/instrument/InstrumentCalibrations.py
+++ b/geomagio/metadata/instrument/InstrumentCalibrations.py
@@ -2,7 +2,8 @@ from obspy import UTCDateTime
 
 from requests.exceptions import JSONDecodeError, ConnectTimeout
 
-from geomagio.metadata import Metadata, MetadataFactory, MetadataCategory
+from geomagio.api.db.models.metadata import Metadata, MetadataCategory, MetadataQuery
+from geomagio.metadata.MetadataFactory import MetadataFactory
 from geomagio.Util import write_state_file, read_state_file
 
 
@@ -320,11 +321,10 @@ def get_instrument_calibrations(
     if not calibrations:
         state_filename = f"{observatory}_instrument_cals.json"
         metadata = []
-
         factory = MetadataFactory(
             url=metadata_url or "https://geomag.usgs.gov/ws/secure/metadata",
         )
-        query = Metadata(
+        query = MetadataQuery(
             category=MetadataCategory.INSTRUMENT,
             starttime=start_time,
             endtime=end_time,
@@ -333,9 +333,10 @@ def get_instrument_calibrations(
         )
         try:
             metadata = factory.get_metadata(query=query, timeout=28)
-        except:
+        except Exception as e:
             print(
-                "Warning: An error occurred while trying to pull metadata from the metadata server!"
+                "Warning: An error occurred while trying to pull metadata from the metadata server: e",
+                e,
             )
 
         if not metadata:
diff --git a/geomagio/metadata/instrument/__init__.py b/geomagio/metadata/instrument/__init__.py
index c470473d1..e69de29bb 100644
--- a/geomagio/metadata/instrument/__init__.py
+++ b/geomagio/metadata/instrument/__init__.py
@@ -1,6 +0,0 @@
-from .InstrumentCalibrations import InstrumentCalibrations
-
-
-__all__ = [
-    "InstrumentCalibrations",
-]
diff --git a/geomagio/metadata/main.py b/geomagio/metadata/main.py
index cf1cd9adc..9304c1d6a 100644
--- a/geomagio/metadata/main.py
+++ b/geomagio/metadata/main.py
@@ -1,15 +1,13 @@
 import sys
 import json
 import os
-from typing import Dict, List, Optional
+import typer
 
+from typing import Dict, List, Optional
 from obspy import UTCDateTime
-import typer
 
-from .Metadata import Metadata
-from .MetadataCategory import MetadataCategory
+from geomagio.api.db.models.metadata import Metadata, MetadataCategory, MetadataQuery
 from .MetadataFactory import MetadataFactory
-from .MetadataQuery import MetadataQuery
 
 
 GEOMAG_API_HOST = os.getenv("GEOMAG_API_HOST", "geomag.usgs.gov")
diff --git a/geomagio/netcdf/NetCDFFactory.py b/geomagio/netcdf/NetCDFFactory.py
index 3a92d580e..21eff7d5a 100644
--- a/geomagio/netcdf/NetCDFFactory.py
+++ b/geomagio/netcdf/NetCDFFactory.py
@@ -1,11 +1,12 @@
 import netCDF4
 import numpy as np
-from obspy import Stream, Trace, UTCDateTime
-from datetime import datetime, timezone
 import tempfile
 import shutil
 import os
 
+from obspy import Stream, Trace, UTCDateTime
+from datetime import datetime, timezone
+
 from geomagio import ChannelConverter
 from geomagio.TimeseriesFactory import TimeseriesFactory
 from geomagio.api.ws.Element import ELEMENT_INDEX
diff --git a/geomagio/netcdf/__init__.py b/geomagio/netcdf/__init__.py
index f2ccd864e..e69de29bb 100644
--- a/geomagio/netcdf/__init__.py
+++ b/geomagio/netcdf/__init__.py
@@ -1,8 +0,0 @@
-"""IO Module for NetCDF Format
-"""
-
-from __future__ import absolute_import
-
-from .NetCDFFactory import NetCDFFactory
-
-__all__ = ["NetCDFFactory"]
diff --git a/geomagio/pcdcp/__init__.py b/geomagio/pcdcp/__init__.py
index 308618e29..e69de29bb 100644
--- a/geomagio/pcdcp/__init__.py
+++ b/geomagio/pcdcp/__init__.py
@@ -1,18 +0,0 @@
-"""IO Module for PCDCP Format
-"""
-
-from __future__ import absolute_import
-
-from .PCDCPFactory import PCDCPFactory, PCDCP_FILE_PATTERN
-from .StreamPCDCPFactory import StreamPCDCPFactory
-from .PCDCPParser import PCDCPParser
-from .PCDCPWriter import PCDCPWriter
-
-
-__all__ = [
-    "PCDCPFactory",
-    "PCDCP_FILE_PATTERN",
-    "PCDCPParser",
-    "PCDCPWriter",
-    "StreamPCDCPFactory",
-]
diff --git a/geomagio/processing/__init__.py b/geomagio/processing/__init__.py
index a31e13c3b..e69de29bb 100644
--- a/geomagio/processing/__init__.py
+++ b/geomagio/processing/__init__.py
@@ -1,20 +0,0 @@
-"""Package with near-real time processing configurations.
-
-Note that these implementations are subject to change,
-and should be considered less stable than other packages in the library.
-"""
-
-from .factory import get_edge_factory, get_miniseed_factory
-from .derived import adjusted, average, sqdist_minute
-from .filters import minute_filter, second_filter
-
-
-__all__ = [
-    "adjusted",
-    "average",
-    "get_edge_factory",
-    "get_miniseed_factory",
-    "minute_filter",
-    "second_filter",
-    "sqdist_minute",
-]
diff --git a/geomagio/residual/__init__.py b/geomagio/residual/__init__.py
index ec2de5e8a..e69de29bb 100644
--- a/geomagio/residual/__init__.py
+++ b/geomagio/residual/__init__.py
@@ -1,46 +0,0 @@
-# residual module
-from __future__ import absolute_import
-
-from . import Angle
-from .Absolute import Absolute
-from .Calculation import (
-    calculate,
-    calculate_D_absolute,
-    calculate_HZ_absolutes,
-    calculate_I,
-    calculate_scale_value,
-)
-from .CalFileFactory import CalFileFactory
-from .Measurement import Measurement, AverageMeasurement, average_measurement
-from .MeasurementType import (
-    MeasurementType,
-    DECLINATION_TYPES,
-    INCLINATION_TYPES,
-    MARK_TYPES,
-)
-from .Reading import Reading
-from .SpreadsheetAbsolutesFactory import SpreadsheetAbsolutesFactory
-from .SpreadsheetSummaryFactory import SpreadsheetSummaryFactory
-from .WebAbsolutesFactory import WebAbsolutesFactory
-
-__all__ = [
-    "Absolute",
-    "Angle",
-    "AverageMeasurement",
-    "average_measurement",
-    "CalFileFactory",
-    "calculate",
-    "calculate_D_absolute",
-    "calculate_HZ_absolutes",
-    "calculate_I",
-    "calculate_scale_value",
-    "DECLINATION_TYPES",
-    "INCLINATION_TYPES",
-    "MARK_TYPES",
-    "Measurement",
-    "MeasurementType",
-    "Reading",
-    "SpreadsheetAbsolutesFactory",
-    "SpreadsheetSummaryFactory",
-    "WebAbsolutesFactory",
-]
diff --git a/geomagio/temperature/__init__.py b/geomagio/temperature/__init__.py
index 7b64a57f7..e69de29bb 100644
--- a/geomagio/temperature/__init__.py
+++ b/geomagio/temperature/__init__.py
@@ -1,11 +0,0 @@
-"""IO Module for TEMP Format
-"""
-
-from __future__ import absolute_import
-
-from .TEMPFactory import TEMPFactory
-from .StreamTEMPFactory import StreamTEMPFactory
-from .TEMPWriter import TEMPWriter
-
-
-__all__ = ["TEMPFactory", "StreamTEMPFactory", "TEMPWriter"]
diff --git a/geomagio/vbf/__init__.py b/geomagio/vbf/__init__.py
index f0b694de1..e69de29bb 100644
--- a/geomagio/vbf/__init__.py
+++ b/geomagio/vbf/__init__.py
@@ -1,11 +0,0 @@
-"""IO Module for VBF Format
-"""
-
-from __future__ import absolute_import
-
-from .VBFFactory import VBFFactory
-from .StreamVBFFactory import StreamVBFFactory
-from .VBFWriter import VBFWriter
-
-
-__all__ = ["VBFFactory", "StreamVBFFactory", "VBFWriter"]
diff --git a/geomagio/xml/__init__.py b/geomagio/xml/__init__.py
index 5253c8cab..e69de29bb 100644
--- a/geomagio/xml/__init__.py
+++ b/geomagio/xml/__init__.py
@@ -1,8 +0,0 @@
-"""IO Module for Edge Format
-"""
-
-from __future__ import absolute_import
-
-from .XMLFactory import XMLFactory
-
-__all__ = ["XMLFactory"]
diff --git a/localdev/docker-compose.yml b/localdev/docker-compose.yml
index d68cee8c9..4957687ff 100644
--- a/localdev/docker-compose.yml
+++ b/localdev/docker-compose.yml
@@ -34,7 +34,7 @@ services:
       - DATA_HOST=host.docker.internal
       - OPENID_CLIENT_ID=$OPENID_CLIENT_ID
       - OPENID_CLIENT_SECRET=$OPENID_CLIENT_SECRET
-      - DATABASE_URL=mysql://root:password@host.docker.internal:3306/geomag_operations
+      - DATABASE_URL=mysql+pymysql://root:password@host.docker.internal:3306/geomag_operations
       - OPENID_METADATA_URL=https://code.usgs.gov/.well-known/openid-configuration
       - SECRET_KEY=$SECRET_KEY
       - SECRET_SALT=$SECRET_SALT
diff --git a/migrations/versions/2280fe551e60_initialize_database.py b/migrations/versions/2280fe551e60_initialize_database.py
index 0e2fe978e..89d25141a 100644
--- a/migrations/versions/2280fe551e60_initialize_database.py
+++ b/migrations/versions/2280fe551e60_initialize_database.py
@@ -7,8 +7,9 @@ Create Date: 2021-04-22 13:06:28.852803
 """
 
 from alembic import op
+from sqlmodel import SQLModel
 
-from geomagio.api.db.create import create_db
+from geomagio.api.db.database import engine
 
 
 # revision identifiers, used by Alembic.
@@ -19,7 +20,7 @@ depends_on = None
 
 
 def upgrade():
-    create_db()
+    SQLModel.metadata.create_all(engine)
 
 
 def downgrade():
diff --git a/poetry.lock b/poetry.lock
index f97cf23eb..b50927e30 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,59 +1,23 @@
-# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
-
-[[package]]
-name = "aiomysql"
-version = "0.2.0"
-description = "MySQL driver for asyncio."
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "aiomysql-0.2.0-py3-none-any.whl", hash = "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a"},
-    {file = "aiomysql-0.2.0.tar.gz", hash = "sha256:558b9c26d580d08b8c5fd1be23c5231ce3aeff2dadad989540fee740253deb67"},
-]
-
-[package.dependencies]
-PyMySQL = ">=1.0"
-
-[package.extras]
-rsa = ["PyMySQL[rsa] (>=1.0)"]
-sa = ["sqlalchemy (>=1.3,<1.4)"]
-
-[[package]]
-name = "aiosqlite"
-version = "0.21.0"
-description = "asyncio bridge to the standard sqlite3 module"
-optional = false
-python-versions = ">=3.9"
-files = [
-    {file = "aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0"},
-    {file = "aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3"},
-]
-
-[package.dependencies]
-typing_extensions = ">=4.0"
-
-[package.extras]
-dev = ["attribution (==1.7.1)", "black (==24.3.0)", "build (>=1.2)", "coverage[toml] (==7.6.10)", "flake8 (==7.0.0)", "flake8-bugbear (==24.12.12)", "flit (==3.10.1)", "mypy (==1.14.1)", "ufmt (==2.5.1)", "usort (==1.0.8.post1)"]
-docs = ["sphinx (==8.1.3)", "sphinx-mdinclude (==0.6.1)"]
+# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
 
 [[package]]
 name = "alembic"
-version = "1.14.1"
+version = "1.15.1"
 description = "A database migration tool for SQLAlchemy."
 optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
 files = [
-    {file = "alembic-1.14.1-py3-none-any.whl", hash = "sha256:1acdd7a3a478e208b0503cd73614d5e4c6efafa4e73518bb60e4f2846a37b1c5"},
-    {file = "alembic-1.14.1.tar.gz", hash = "sha256:496e888245a53adf1498fcab31713a469c65836f8de76e01399aa1c3e90dd213"},
+    {file = "alembic-1.15.1-py3-none-any.whl", hash = "sha256:197de710da4b3e91cf66a826a5b31b5d59a127ab41bd0fc42863e2902ce2bbbe"},
+    {file = "alembic-1.15.1.tar.gz", hash = "sha256:e1a1c738577bca1f27e68728c910cd389b9a92152ff91d902da649c192e30c49"},
 ]
 
 [package.dependencies]
 Mako = "*"
-SQLAlchemy = ">=1.3.0"
-typing-extensions = ">=4"
+SQLAlchemy = ">=1.4.0"
+typing-extensions = ">=4.12"
 
 [package.extras]
-tz = ["backports.zoneinfo", "tzdata"]
+tz = ["tzdata"]
 
 [[package]]
 name = "annotated-types"
@@ -68,13 +32,13 @@ files = [
 
 [[package]]
 name = "anyio"
-version = "4.8.0"
+version = "4.9.0"
 description = "High level compatibility layer for multiple asynchronous event loop implementations"
 optional = false
 python-versions = ">=3.9"
 files = [
-    {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"},
-    {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"},
+    {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"},
+    {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"},
 ]
 
 [package.dependencies]
@@ -84,19 +48,19 @@ sniffio = ">=1.1"
 typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
 
 [package.extras]
-doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"]
-test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"]
+doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"]
+test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"]
 trio = ["trio (>=0.26.1)"]
 
 [[package]]
 name = "authlib"
-version = "1.4.1"
+version = "1.5.1"
 description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
 optional = false
 python-versions = ">=3.9"
 files = [
-    {file = "Authlib-1.4.1-py2.py3-none-any.whl", hash = "sha256:edc29c3f6a3e72cd9e9f45fff67fc663a2c364022eb0371c003f22d5405915c1"},
-    {file = "authlib-1.4.1.tar.gz", hash = "sha256:30ead9ea4993cdbab821dc6e01e818362f92da290c04c7f6a1940f86507a790d"},
+    {file = "authlib-1.5.1-py2.py3-none-any.whl", hash = "sha256:8408861cbd9b4ea2ff759b00b6f02fd7d81ac5a56d0b2b22c08606c6049aae11"},
+    {file = "authlib-1.5.1.tar.gz", hash = "sha256:5cbc85ecb0667312c1cdc2f9095680bb735883b123fb509fde1e65b1c5df972e"},
 ]
 
 [package.dependencies]
@@ -520,74 +484,74 @@ test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist"
 
 [[package]]
 name = "coverage"
-version = "7.6.12"
+version = "7.7.0"
 description = "Code coverage measurement for Python"
 optional = false
 python-versions = ">=3.9"
 files = [
-    {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"},
-    {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"},
-    {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"},
-    {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"},
-    {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"},
-    {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"},
-    {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"},
-    {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"},
-    {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"},
-    {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"},
-    {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"},
-    {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"},
-    {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"},
-    {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"},
-    {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"},
-    {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"},
-    {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"},
-    {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"},
-    {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"},
-    {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"},
-    {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"},
-    {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"},
-    {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"},
-    {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"},
-    {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"},
-    {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"},
-    {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"},
-    {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"},
-    {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"},
-    {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"},
-    {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"},
-    {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"},
-    {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"},
-    {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"},
-    {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"},
-    {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"},
-    {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"},
-    {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"},
-    {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"},
-    {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"},
-    {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"},
-    {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"},
-    {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"},
-    {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"},
-    {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"},
-    {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"},
-    {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"},
-    {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"},
-    {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"},
-    {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"},
-    {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"},
-    {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"},
-    {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"},
-    {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"},
-    {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"},
-    {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"},
-    {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"},
-    {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"},
-    {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"},
-    {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"},
-    {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"},
-    {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"},
-    {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"},
+    {file = "coverage-7.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a538a23119d1e2e2ce077e902d02ea3d8e0641786ef6e0faf11ce82324743944"},
+    {file = "coverage-7.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1586ad158523f4133499a4f322b230e2cfef9cc724820dbd58595a5a236186f4"},
+    {file = "coverage-7.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b6c96d69928a3a6767fab8dc1ce8a02cf0156836ccb1e820c7f45a423570d98"},
+    {file = "coverage-7.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f18d47641282664276977c604b5a261e51fefc2980f5271d547d706b06a837f"},
+    {file = "coverage-7.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a1e18a85bd066c7c556d85277a7adf4651f259b2579113844835ba1a74aafd"},
+    {file = "coverage-7.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:70f0925c4e2bfc965369f417e7cc72538fd1ba91639cf1e4ef4b1a6b50439b3b"},
+    {file = "coverage-7.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b0fac2088ec4aaeb5468b814bd3ff5e5978364bfbce5e567c44c9e2854469f6c"},
+    {file = "coverage-7.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b3e212a894d8ae07fde2ca8b43d666a6d49bbbddb10da0f6a74ca7bd31f20054"},
+    {file = "coverage-7.7.0-cp310-cp310-win32.whl", hash = "sha256:f32b165bf6dfea0846a9c9c38b7e1d68f313956d60a15cde5d1709fddcaf3bee"},
+    {file = "coverage-7.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:a2454b12a3f12cc4698f3508912e6225ec63682e2ca5a96f80a2b93cef9e63f3"},
+    {file = "coverage-7.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a0a207c87a9f743c8072d059b4711f8d13c456eb42dac778a7d2e5d4f3c253a7"},
+    {file = "coverage-7.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2d673e3add00048215c2cc507f1228a7523fd8bf34f279ac98334c9b07bd2656"},
+    {file = "coverage-7.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f81fe93dc1b8e5673f33443c0786c14b77e36f1025973b85e07c70353e46882b"},
+    {file = "coverage-7.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8c7524779003d59948c51b4fcbf1ca4e27c26a7d75984f63488f3625c328b9b"},
+    {file = "coverage-7.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c124025430249118d018dcedc8b7426f39373527c845093132196f2a483b6dd"},
+    {file = "coverage-7.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e7f559c36d5cdc448ee13e7e56ed7b6b5d44a40a511d584d388a0f5d940977ba"},
+    {file = "coverage-7.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:37cbc7b0d93dfd133e33c7ec01123fbb90401dce174c3b6661d8d36fb1e30608"},
+    {file = "coverage-7.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7d2a65876274acf544703e943c010b60bd79404e3623a1e5d52b64a6e2728de5"},
+    {file = "coverage-7.7.0-cp311-cp311-win32.whl", hash = "sha256:f5a2f71d6a91238e7628f23538c26aa464d390cbdedf12ee2a7a0fb92a24482a"},
+    {file = "coverage-7.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:ae8006772c6b0fa53c33747913473e064985dac4d65f77fd2fdc6474e7cd54e4"},
+    {file = "coverage-7.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:056d3017ed67e7ddf266e6f57378ece543755a4c9231e997789ab3bd11392c94"},
+    {file = "coverage-7.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:33c1394d8407e2771547583b66a85d07ed441ff8fae5a4adb4237ad39ece60db"},
+    {file = "coverage-7.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fbb7a0c3c21908520149d7751cf5b74eb9b38b54d62997b1e9b3ac19a8ee2fe"},
+    {file = "coverage-7.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb356e7ae7c2da13f404bf8f75be90f743c6df8d4607022e759f5d7d89fe83f8"},
+    {file = "coverage-7.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bce730d484038e97f27ea2dbe5d392ec5c2261f28c319a3bb266f6b213650135"},
+    {file = "coverage-7.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa4dff57fc21a575672176d5ab0ef15a927199e775c5e8a3d75162ab2b0c7705"},
+    {file = "coverage-7.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b667b91f4f714b17af2a18e220015c941d1cf8b07c17f2160033dbe1e64149f0"},
+    {file = "coverage-7.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:693d921621a0c8043bfdc61f7d4df5ea6d22165fe8b807cac21eb80dd94e4bbd"},
+    {file = "coverage-7.7.0-cp312-cp312-win32.whl", hash = "sha256:52fc89602cde411a4196c8c6894afb384f2125f34c031774f82a4f2608c59d7d"},
+    {file = "coverage-7.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ce8cf59e09d31a4915ff4c3b94c6514af4c84b22c4cc8ad7c3c546a86150a92"},
+    {file = "coverage-7.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4545485fef7a8a2d8f30e6f79ce719eb154aab7e44217eb444c1d38239af2072"},
+    {file = "coverage-7.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1393e5aa9441dafb0162c36c8506c648b89aea9565b31f6bfa351e66c11bcd82"},
+    {file = "coverage-7.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:316f29cc3392fa3912493ee4c83afa4a0e2db04ff69600711f8c03997c39baaa"},
+    {file = "coverage-7.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1ffde1d6bc2a92f9c9207d1ad808550873748ac2d4d923c815b866baa343b3f"},
+    {file = "coverage-7.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:416e2a8845eaff288f97eaf76ab40367deafb9073ffc47bf2a583f26b05e5265"},
+    {file = "coverage-7.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5efdeff5f353ed3352c04e6b318ab05c6ce9249c25ed3c2090c6e9cadda1e3b2"},
+    {file = "coverage-7.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:57f3bd0d29bf2bd9325c0ff9cc532a175110c4bf8f412c05b2405fd35745266d"},
+    {file = "coverage-7.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ab7090f04b12dc6469882ce81244572779d3a4b67eea1c96fb9ecc8c607ef39"},
+    {file = "coverage-7.7.0-cp313-cp313-win32.whl", hash = "sha256:180e3fc68ee4dc5af8b33b6ca4e3bb8aa1abe25eedcb958ba5cff7123071af68"},
+    {file = "coverage-7.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:55143aa13c49491f5606f05b49ed88663446dce3a4d3c5d77baa4e36a16d3573"},
+    {file = "coverage-7.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:cc41374d2f27d81d6558f8a24e5c114580ffefc197fd43eabd7058182f743322"},
+    {file = "coverage-7.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:89078312f06237417adda7c021c33f80f7a6d2db8572a5f6c330d89b080061ce"},
+    {file = "coverage-7.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b2f144444879363ea8834cd7b6869d79ac796cb8f864b0cfdde50296cd95816"},
+    {file = "coverage-7.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:60e6347d1ed882b1159ffea172cb8466ee46c665af4ca397edbf10ff53e9ffaf"},
+    {file = "coverage-7.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb203c0afffaf1a8f5b9659a013f8f16a1b2cad3a80a8733ceedc968c0cf4c57"},
+    {file = "coverage-7.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ad0edaa97cb983d9f2ff48cadddc3e1fb09f24aa558abeb4dc9a0dbacd12cbb4"},
+    {file = "coverage-7.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c5f8a5364fc37b2f172c26a038bc7ec4885f429de4a05fc10fdcb53fb5834c5c"},
+    {file = "coverage-7.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4e09534037933bf6eb31d804e72c52ec23219b32c1730f9152feabbd7499463"},
+    {file = "coverage-7.7.0-cp313-cp313t-win32.whl", hash = "sha256:1b336d06af14f8da5b1f391e8dec03634daf54dfcb4d1c4fb6d04c09d83cef90"},
+    {file = "coverage-7.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b54a1ee4c6f1905a436cbaa04b26626d27925a41cbc3a337e2d3ff7038187f07"},
+    {file = "coverage-7.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c8fbce80b2b8bf135d105aa8f5b36eae0c57d702a1cc3ebdea2a6f03f6cdde5"},
+    {file = "coverage-7.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d9710521f07f526de30ccdead67e6b236fe996d214e1a7fba8b36e2ba2cd8261"},
+    {file = "coverage-7.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7789e700f33f2b133adae582c9f437523cd5db8de845774988a58c360fc88253"},
+    {file = "coverage-7.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c36093aca722db73633cf2359026ed7782a239eb1c6db2abcff876012dc4cf"},
+    {file = "coverage-7.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c075d167a6ec99b798c1fdf6e391a1d5a2d054caffe9593ba0f97e3df2c04f0e"},
+    {file = "coverage-7.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d013c07061751ae81861cae6ec3a4fe04e84781b11fd4b6b4201590234b25c7b"},
+    {file = "coverage-7.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:104bf640f408f4e115b85110047c7f27377e1a8b7ba86f7db4fa47aa49dc9a8e"},
+    {file = "coverage-7.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:39abcacd1ed54e2c33c54bdc488b310e8ef6705833f7148b6eb9a547199d375d"},
+    {file = "coverage-7.7.0-cp39-cp39-win32.whl", hash = "sha256:8e336b56301774ace6be0017ff85c3566c556d938359b61b840796a0202f805c"},
+    {file = "coverage-7.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:8c938c6ae59be67ac19a7204e079efc94b38222cd7d0269f96e45e18cddeaa59"},
+    {file = "coverage-7.7.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:3b0e6e54591ae0d7427def8a4d40fca99df6b899d10354bab73cd5609807261c"},
+    {file = "coverage-7.7.0-py3-none-any.whl", hash = "sha256:708f0a1105ef2b11c79ed54ed31f17e6325ac936501fc373f24be3e6a578146a"},
+    {file = "coverage-7.7.0.tar.gz", hash = "sha256:cd879d4646055a573775a1cec863d00c9ff8c55860f8b17f6d8eee9140c06166"},
 ]
 
 [package.dependencies]
@@ -598,42 +562,46 @@ toml = ["tomli"]
 
 [[package]]
 name = "cryptography"
-version = "44.0.1"
+version = "44.0.2"
 description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
 optional = false
 python-versions = "!=3.9.0,!=3.9.1,>=3.7"
 files = [
-    {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"},
-    {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"},
-    {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2"},
-    {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911"},
-    {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69"},
-    {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026"},
-    {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd"},
-    {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0"},
-    {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf"},
-    {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864"},
-    {file = "cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a"},
-    {file = "cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00"},
-    {file = "cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008"},
-    {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862"},
-    {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3"},
-    {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7"},
-    {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a"},
-    {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c"},
-    {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62"},
-    {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41"},
-    {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b"},
-    {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7"},
-    {file = "cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9"},
-    {file = "cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f"},
-    {file = "cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183"},
-    {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12"},
-    {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83"},
-    {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420"},
-    {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4"},
-    {file = "cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7"},
-    {file = "cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14"},
+    {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"},
+    {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"},
+    {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"},
+    {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"},
+    {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"},
+    {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"},
+    {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"},
+    {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"},
+    {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"},
+    {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"},
+    {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"},
+    {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"},
+    {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"},
+    {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"},
+    {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"},
+    {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"},
+    {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"},
+    {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"},
+    {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"},
+    {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"},
+    {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"},
+    {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"},
+    {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"},
+    {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"},
+    {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"},
+    {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"},
+    {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"},
+    {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"},
+    {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"},
+    {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"},
+    {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"},
+    {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"},
+    {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"},
+    {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"},
+    {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"},
 ]
 
 [package.dependencies]
@@ -646,7 +614,7 @@ nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"]
 pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
 sdist = ["build (>=1.0.0)"]
 ssh = ["bcrypt (>=3.1.5)"]
-test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
+test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
 test-randomorder = ["pytest-randomly"]
 
 [[package]]
@@ -678,41 +646,15 @@ files = [
 [package.extras]
 dev = ["black", "flake8", "flake8-pyi", "matplotlib", "mypy (==0.770)", "numpy", "pandas", "pytest"]
 
-[[package]]
-name = "databases"
-version = "0.6.2"
-description = "Async database support for Python."
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "databases-0.6.2-py3-none-any.whl", hash = "sha256:ff4010136ac2bb9da2322a2ffda4ef9185ae1c365e5891e52924dd9499d33dc4"},
-    {file = "databases-0.6.2.tar.gz", hash = "sha256:b09c370ad7c2f64c7f4316c096e265dc2e28304732639889272390decda2f893"},
-]
-
-[package.dependencies]
-aiomysql = {version = "*", optional = true, markers = "extra == \"mysql\""}
-aiosqlite = {version = "*", optional = true, markers = "extra == \"sqlite\""}
-sqlalchemy = ">=1.4,<=1.4.41"
-
-[package.extras]
-aiomysql = ["aiomysql"]
-aiopg = ["aiopg"]
-aiosqlite = ["aiosqlite"]
-asyncmy = ["asyncmy"]
-asyncpg = ["asyncpg"]
-mysql = ["aiomysql"]
-postgresql = ["asyncpg"]
-sqlite = ["aiosqlite"]
-
 [[package]]
 name = "decorator"
-version = "5.1.1"
+version = "5.2.1"
 description = "Decorators for Humans"
 optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.8"
 files = [
-    {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
-    {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
+    {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"},
+    {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"},
 ]
 
 [[package]]
@@ -763,18 +705,18 @@ test = ["pytest (>=6)"]
 
 [[package]]
 name = "fastapi"
-version = "0.115.8"
+version = "0.115.11"
 description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf"},
-    {file = "fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9"},
+    {file = "fastapi-0.115.11-py3-none-any.whl", hash = "sha256:32e1541b7b74602e4ef4a0260ecaf3aadf9d4f19590bba3e1bf2ac4666aa2c64"},
+    {file = "fastapi-0.115.11.tar.gz", hash = "sha256:cc81f03f688678b92600a65a5e618b93592c65005db37157147204d8924bf94f"},
 ]
 
 [package.dependencies]
 pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
-starlette = ">=0.40.0,<0.46.0"
+starlette = ">=0.40.0,<0.47.0"
 typing-extensions = ">=4.8.0"
 
 [package.extras]
@@ -1712,27 +1654,24 @@ files = [
 
 [[package]]
 name = "obspy"
-version = "1.4.1"
+version = "1.4.0"
 description = "ObsPy - a Python framework for seismological observatories."
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "obspy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3d2e53224654711e32a6f808d6a7343a5277cba7ad423b94417874c6f3e80f7"},
-    {file = "obspy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09e0faecb206231d7395e975371ac66f00d564e2fea3e957274181b8d1c06548"},
-    {file = "obspy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:2355082a07b2539005364e12fe6a3ed030c7da6a4ddad11e6031eb0d549e80e4"},
-    {file = "obspy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2275962307055bfd0d6c875b12edb810a731f9ea4943e0c9d4c2eb26d45ada39"},
-    {file = "obspy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:affc60e6c93fc9d66907dab07e7e7261135e173b36324fe2dc89731adf75cf4d"},
-    {file = "obspy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:3176fdda86968478a6e9d9ffa8a3bb979356fbee29b58396cb0490b7eed02fbb"},
-    {file = "obspy-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3f133ce01caa0910cc445f83c7cafa23881ae8c3dd536e0b8f07adb4cf2cc40d"},
-    {file = "obspy-1.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20fcbeb061d03f17b73235518f3372f400058245fe9a7ecfa325eb166d263076"},
-    {file = "obspy-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:eeb01ffc7b15ffc028155128a060a8fdb191cb63e3ae3b7392ddc4b0951f134b"},
-    {file = "obspy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2de4b92baf97e20792c27a0e3663219512b3048dd9acc329370c196241c3b93a"},
-    {file = "obspy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c8befbcf01f230e2102794629d1e4b43fa3950f2830d81dab62ac0f8a819abf"},
-    {file = "obspy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:05256048d3df93936992370fdd6f1d6c1e55fbb52604e47481a42a0dde1d77c0"},
-    {file = "obspy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9ac3eb3e106bfa9669c9bcfa1e57b17b7444e3d4cfb7c399ec57e919703d7bfd"},
-    {file = "obspy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:407922924b88c922f2ed411cbe33ef5bbfb657919d6cabb445eb49b12ec3185e"},
-    {file = "obspy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:770b8318f0f7c3245fcd3aac645140a170dffae984c046ede7958e8865a613f1"},
-    {file = "obspy-1.4.1.tar.gz", hash = "sha256:9cf37b0ce03de43d80398703c006bfddbd709f32e8460a9404b27df998d3f747"},
+    {file = "obspy-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab33aef9cb16059e229b5a64205c0f40a982056c2a680f78ad732207347d4308"},
+    {file = "obspy-1.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c60fc347efe27b04aa4528a873f11d9d62c3708a93b4d4b7a2aeb5751b798b2d"},
+    {file = "obspy-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:307a2f6293b29d325e67c3cd80c4eef5df8e7aa1c87154c531f373d6f3c6682d"},
+    {file = "obspy-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86321501112b2ab0de2deaf1d8229fda6d09654bd9a4cbb36d06bbbd0d6387a2"},
+    {file = "obspy-1.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9de41809bd391f36b763e4eada2dab398de9af35b0c1c3c643a042943ee2956"},
+    {file = "obspy-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8b2f9c93f1321af851505a65e881b090d68c0974c03cb3b19d74bb361c56e52"},
+    {file = "obspy-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ea33704863b4859a7264255f1b6f520cde002f975d9af7dcbfdf506a563c52"},
+    {file = "obspy-1.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcbc566a2da7d3db4faa17662f25796274e1c55a715a2e6f480e95469776da0b"},
+    {file = "obspy-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6baa3610d40442da25d619776568d5a4e48530a42c63a1eb391c91a420baffe"},
+    {file = "obspy-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d70488ae7d9eb9ef4173e2b7ef9a535cbeede9776fdf8d7be185b3381da570bf"},
+    {file = "obspy-1.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84a87c2cf01728262020b7836c3e66ef231de0739c3c3bb288fbf7510bec6eae"},
+    {file = "obspy-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:2090a95b08b214575892c3d99bb3362b13a3b0f4689d4ee55f95ea4d8a2cbc26"},
+    {file = "obspy-1.4.0.tar.gz", hash = "sha256:336a6e1d9a485732b08173cb5dc1dd720a8e53f3b54c180a62bb8ceaa5fe5c06"},
 ]
 
 [package.dependencies]
@@ -1743,7 +1682,7 @@ numpy = ">=1.20"
 requests = "*"
 scipy = ">=1.7"
 setuptools = "*"
-sqlalchemy = "<2"
+sqlalchemy = "*"
 
 [package.extras]
 all = ["cartopy", "geographiclib", "packaging", "pyproj", "pyshp", "pytest", "pytest-json-report"]
@@ -1965,59 +1904,59 @@ files = [
 
 [[package]]
 name = "pycurl"
-version = "7.45.4"
+version = "7.45.6"
 description = "PycURL -- A Python Interface To The cURL library"
 optional = true
 python-versions = ">=3.5"
 files = [
-    {file = "pycurl-7.45.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:247b4af8eab7d04137a7f1a98391930e04ea93dc669b64db5625070fe15f80a3"},
-    {file = "pycurl-7.45.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:561f88697f7540634b1c750146f37bdc0da367b15f6b4ab2bb780871ee6ab005"},
-    {file = "pycurl-7.45.4-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b485fdaf78553f0b8e1c2803bb7dcbe47a7b47594f846fc7e9d3b94d794cfc89"},
-    {file = "pycurl-7.45.4-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:e7ae49b88a5d57485fbabef004534225dfe04dc15716a61fae1a0c7f46f2279e"},
-    {file = "pycurl-7.45.4-cp310-cp310-win32.whl", hash = "sha256:d14f954ecd21a070038d65ef1c6d1d3ab220f952ff703d48313123222097615c"},
-    {file = "pycurl-7.45.4-cp310-cp310-win_amd64.whl", hash = "sha256:2548c3291a33c821f0f80bf9989fc43b5d90fb78b534a7015c8419b83c6f5803"},
-    {file = "pycurl-7.45.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6c0e22052946bbfa25be67f9d1d6639eff10781c89f0cf6f3ff2099273d1bad"},
-    {file = "pycurl-7.45.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:acf25cfdaf914db21a2a6e9e274b6d95e3fa2b6018c38f2c58c94b5d8ac3d1b7"},
-    {file = "pycurl-7.45.4-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:a39f28f031885485325034918386be352036c220ca45625c7e286d3938eb579d"},
-    {file = "pycurl-7.45.4-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:9940e3234c1ca3d30f27a2202d325dbc25291605c98e9585100a351cacd935e8"},
-    {file = "pycurl-7.45.4-cp311-cp311-win32.whl", hash = "sha256:ffd3262f98b8997ad04940061d5ebd8bab2362169b9440939c397e24a4a135b0"},
-    {file = "pycurl-7.45.4-cp311-cp311-win_amd64.whl", hash = "sha256:1324a859b50bdb0abdbd5620e42f74240d0b7daf2d5925fa303695d9fc3ece18"},
-    {file = "pycurl-7.45.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:731c46e7c0acffaab19f7c2ecc3d9e7ee337500e87b260b4e0b9fae2d90fa133"},
-    {file = "pycurl-7.45.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13eb1643ab0bf4fdc539a2cdf1021029b07095d3196c5cee5a4271af268d3d31"},
-    {file = "pycurl-7.45.4-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:df5f94c051c5a163fa85064559ca94979575e2da26740ff91c078c50c541c465"},
-    {file = "pycurl-7.45.4-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:688d09ba2c6a0d4a749d192c43422839d73c40c85143c50cc65c944258fe0ba8"},
-    {file = "pycurl-7.45.4-cp312-cp312-win32.whl", hash = "sha256:236600bfe2cd72efe47333add621286667e8fa027dadf1247349afbf30333e95"},
-    {file = "pycurl-7.45.4-cp312-cp312-win_amd64.whl", hash = "sha256:26745c6c5ebdccfe8a828ac3fd4e6da6f5d2245696604f04529eb7894a02f4db"},
-    {file = "pycurl-7.45.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9bd493ce598f1dc76c8e50043c47debec27c583fa313a836b2d3667640f875d5"},
-    {file = "pycurl-7.45.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4f25d52c97dbca6ebea786f0961b49c1998fa05178abf1964a977c825b3d8ae6"},
-    {file = "pycurl-7.45.4-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:13c4b18f44637859f34639493efd297a08670f45e4eec34ab2dcba724e3cb5fc"},
-    {file = "pycurl-7.45.4-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:0470bff6cc24d8c2f63c80931aa239463800871609dafc6bcc9ca10f5a12a04e"},
-    {file = "pycurl-7.45.4-cp313-cp313-win32.whl", hash = "sha256:3452459668bd01d646385482362b021834a31c036aa1c02acd88924ddeff7d0d"},
-    {file = "pycurl-7.45.4-cp313-cp313-win_amd64.whl", hash = "sha256:fd167f73d34beb0cb8064334aee76d9bdd13167b30be6d5d36fb07d0c8223b71"},
-    {file = "pycurl-7.45.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b0e38e3eb83b0c891f391853f798fc6a97cb5a86a4a731df0b6320e539ae54ae"},
-    {file = "pycurl-7.45.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d192a48b3cec2e13ad432196b65c22e99620db92feae39c0476635354eff68c6"},
-    {file = "pycurl-7.45.4-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:57971d7215fc6fdedcfc092f880a59f04f52fcaf2fd329151b931623d7b59a9c"},
-    {file = "pycurl-7.45.4-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:73df3eb5940a7fbf4cf62f7271e9f23a8e9f80e352c838ee9a8448a70c01d3f5"},
-    {file = "pycurl-7.45.4-cp39-cp39-win32.whl", hash = "sha256:587a4891039803b5f48392066f97b7cd5e7e9a166187abb5cb4b4806fdb8fbef"},
-    {file = "pycurl-7.45.4-cp39-cp39-win_amd64.whl", hash = "sha256:caec8b634763351dd4e1b729a71542b1e2de885d39710ba8e7202817a381b453"},
-    {file = "pycurl-7.45.4.tar.gz", hash = "sha256:32c8e237069273f4260b6ae13d1e0f99daae938977016021565dc6e11050e803"},
+    {file = "pycurl-7.45.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c31b390f1e2cd4525828f1bb78c1f825c0aab5d1588228ed71b22c4784bdb593"},
+    {file = "pycurl-7.45.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:942b352b69184cb26920db48e0c5cb95af39874b57dbe27318e60f1e68564e37"},
+    {file = "pycurl-7.45.6-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3441ee77e830267aa6e2bb43b29fd5f8a6bd6122010c76a6f0bf84462e9ea9c7"},
+    {file = "pycurl-7.45.6-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:2a21e13278d7553a04b421676c458449f6c10509bebf04993f35154b06ee2b20"},
+    {file = "pycurl-7.45.6-cp310-cp310-win32.whl", hash = "sha256:d0b5501d527901369aba307354530050f56cd102410f2a3bacd192dc12c645e3"},
+    {file = "pycurl-7.45.6-cp310-cp310-win_amd64.whl", hash = "sha256:abe1b204a2f96f2eebeaf93411f03505b46d151ef6d9d89326e6dece7b3a008a"},
+    {file = "pycurl-7.45.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6f57ad26d6ab390391ad5030790e3f1a831c1ee54ad3bf969eb378f5957eeb0a"},
+    {file = "pycurl-7.45.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6fd295f03c928da33a00f56c91765195155d2ac6f12878f6e467830b5dce5f5"},
+    {file = "pycurl-7.45.6-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:334721ce1ccd71ff8e405470768b3d221b4393570ccc493fcbdbef4cd62e91ed"},
+    {file = "pycurl-7.45.6-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:0cd6b7794268c17f3c660162ed6381769ce0ad260331ef49191418dfc3a2d61a"},
+    {file = "pycurl-7.45.6-cp311-cp311-win32.whl", hash = "sha256:357ea634395310085b9d5116226ac5ec218a6ceebf367c2451ebc8d63a6e9939"},
+    {file = "pycurl-7.45.6-cp311-cp311-win_amd64.whl", hash = "sha256:878ae64484db18f8f10ba99bffc83fefb4fe8f5686448754f93ec32fa4e4ee93"},
+    {file = "pycurl-7.45.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c872d4074360964697c39c1544fe8c91bfecbff27c1cdda1fee5498e5fdadcda"},
+    {file = "pycurl-7.45.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56d1197eadd5774582b259cde4364357da71542758d8e917f91cc6ed7ed5b262"},
+    {file = "pycurl-7.45.6-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8a99e56d2575aa74c48c0cd08852a65d5fc952798f76a34236256d5589bf5aa0"},
+    {file = "pycurl-7.45.6-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c04230b9e9cfdca9cf3eb09a0bec6cf2f084640f1f1ca1929cca51411af85de2"},
+    {file = "pycurl-7.45.6-cp312-cp312-win32.whl", hash = "sha256:ae893144b82d72d95c932ebdeb81fc7e9fde758e5ecd5dd10ad5b67f34a8b8ee"},
+    {file = "pycurl-7.45.6-cp312-cp312-win_amd64.whl", hash = "sha256:56f841b6f2f7a8b2d3051b9ceebd478599dbea3c8d1de8fb9333c895d0c1eea5"},
+    {file = "pycurl-7.45.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7c09b7180799af70fc1d4eed580cfb1b9f34fda9081f73a3e3bc9a0e5a4c0e9b"},
+    {file = "pycurl-7.45.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:361bf94b2a057c7290f9ab84e935793ca515121fc012f4b6bef6c3b5e4ea4397"},
+    {file = "pycurl-7.45.6-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:bb9eff0c7794af972da769a887c87729f1bcd8869297b1c01a2732febbb75876"},
+    {file = "pycurl-7.45.6-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:26839d43dc7fff6b80e0067f185cc1d0e9be2ae6e2e2361ae8488cead5901c04"},
+    {file = "pycurl-7.45.6-cp313-cp313-win32.whl", hash = "sha256:a721c2696a71b1aa5ecf82e6d0ade64bc7211b7317f1c9c66e82f82e2264d8b4"},
+    {file = "pycurl-7.45.6-cp313-cp313-win_amd64.whl", hash = "sha256:f0198ebcda8686b3a0c66d490a687fa5fd466f8ecc2f20a0ed0931579538ae3d"},
+    {file = "pycurl-7.45.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a554a2813d415a7bb9a996a6298f3829f57e987635dcab9f1197b2dccd0ab3b2"},
+    {file = "pycurl-7.45.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9f721e3394e5bd7079802ec1819b19c5be4842012268cc45afcb3884efb31cf0"},
+    {file = "pycurl-7.45.6-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:81005c0f681d31d5af694d1d3c18bbf1bed0bc8b2bb10fb7388cb1378ba9bd6a"},
+    {file = "pycurl-7.45.6-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:3fc0b505c37c7c54d88ced27e1d9e3241130987c24bf1611d9bbd9a3e499e07c"},
+    {file = "pycurl-7.45.6-cp39-cp39-win32.whl", hash = "sha256:1309fc0f558a80ca444a3a5b0bdb1572a4d72b195233f0e65413b4d4dd78809b"},
+    {file = "pycurl-7.45.6-cp39-cp39-win_amd64.whl", hash = "sha256:2d1a49418b8b4c61f52e06d97b9c16142b425077bd997a123a2ba9ef82553203"},
+    {file = "pycurl-7.45.6.tar.gz", hash = "sha256:2b73e66b22719ea48ac08a93fc88e57ef36d46d03cb09d972063c9aa86bb74e6"},
 ]
 
 [[package]]
 name = "pydantic"
-version = "2.9.2"
+version = "2.10.6"
 description = "Data validation using Python type hints"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"},
-    {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"},
+    {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"},
+    {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"},
 ]
 
 [package.dependencies]
 annotated-types = ">=0.6.0"
-pydantic-core = "2.23.4"
-typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""}
+pydantic-core = "2.27.2"
+typing-extensions = ">=4.12.2"
 
 [package.extras]
 email = ["email-validator (>=2.0.0)"]
@@ -2025,100 +1964,111 @@ timezone = ["tzdata"]
 
 [[package]]
 name = "pydantic-core"
-version = "2.23.4"
+version = "2.27.2"
 description = "Core functionality for Pydantic validation and serialization"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"},
-    {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"},
-    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"},
-    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"},
-    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"},
-    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"},
-    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"},
-    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"},
-    {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"},
-    {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"},
-    {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"},
-    {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"},
-    {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"},
-    {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"},
-    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"},
-    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"},
-    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"},
-    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"},
-    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"},
-    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"},
-    {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"},
-    {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"},
-    {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"},
-    {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"},
-    {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"},
-    {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"},
-    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"},
-    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"},
-    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"},
-    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"},
-    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"},
-    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"},
-    {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"},
-    {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"},
-    {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"},
-    {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"},
-    {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"},
-    {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"},
-    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"},
-    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"},
-    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"},
-    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"},
-    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"},
-    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"},
-    {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"},
-    {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"},
-    {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"},
-    {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"},
-    {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"},
-    {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"},
-    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"},
-    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"},
-    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"},
-    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"},
-    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"},
-    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"},
-    {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"},
-    {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"},
-    {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"},
-    {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"},
-    {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"},
-    {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"},
-    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"},
-    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"},
-    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"},
-    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"},
-    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"},
-    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"},
-    {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"},
-    {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"},
-    {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"},
-    {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"},
-    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"},
-    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"},
-    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"},
-    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"},
-    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"},
-    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"},
-    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"},
-    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"},
-    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"},
-    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"},
-    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"},
-    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"},
-    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"},
-    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"},
-    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"},
-    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"},
-    {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"},
+    {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"},
+    {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"},
+    {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"},
+    {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"},
+    {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"},
+    {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"},
+    {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"},
+    {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"},
+    {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"},
+    {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"},
+    {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"},
+    {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"},
+    {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"},
+    {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"},
+    {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"},
+    {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"},
+    {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"},
+    {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"},
+    {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"},
+    {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"},
+    {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"},
+    {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"},
+    {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"},
+    {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"},
+    {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"},
 ]
 
 [package.dependencies]
@@ -2155,13 +2105,13 @@ diagrams = ["jinja2", "railroad-diagrams"]
 
 [[package]]
 name = "pytest"
-version = "8.3.4"
+version = "8.3.5"
 description = "pytest: simple powerful testing with Python"
 optional = false
 python-versions = ">=3.8"
 files = [
-    {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"},
-    {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"},
+    {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"},
+    {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"},
 ]
 
 [package.dependencies]
@@ -2479,13 +2429,13 @@ test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "po
 
 [[package]]
 name = "setuptools"
-version = "75.8.0"
+version = "76.1.0"
 description = "Easily download, build, install, upgrade, and uninstall Python packages"
 optional = false
 python-versions = ">=3.9"
 files = [
-    {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"},
-    {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"},
+    {file = "setuptools-76.1.0-py3-none-any.whl", hash = "sha256:34750dcb17d046929f545dec9b8349fe42bf4ba13ddffee78428aec422dbfb73"},
+    {file = "setuptools-76.1.0.tar.gz", hash = "sha256:4959b9ad482ada2ba2320c8f1a8d8481d4d8d668908a7a1b84d987375cd7f5bd"},
 ]
 
 [package.extras]
@@ -2521,77 +2471,98 @@ files = [
 
 [[package]]
 name = "sqlalchemy"
-version = "1.4.41"
+version = "2.0.39"
 description = "Database Abstraction Library"
 optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
-files = [
-    {file = "SQLAlchemy-1.4.41-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:13e397a9371ecd25573a7b90bd037db604331cf403f5318038c46ee44908c44d"},
-    {file = "SQLAlchemy-1.4.41-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2d6495f84c4fd11584f34e62f9feec81bf373787b3942270487074e35cbe5330"},
-    {file = "SQLAlchemy-1.4.41-cp27-cp27m-win32.whl", hash = "sha256:e570cfc40a29d6ad46c9aeaddbdcee687880940a3a327f2c668dd0e4ef0a441d"},
-    {file = "SQLAlchemy-1.4.41-cp27-cp27m-win_amd64.whl", hash = "sha256:5facb7fd6fa8a7353bbe88b95695e555338fb038ad19ceb29c82d94f62775a05"},
-    {file = "SQLAlchemy-1.4.41-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f37fa70d95658763254941ddd30ecb23fc4ec0c5a788a7c21034fc2305dab7cc"},
-    {file = "SQLAlchemy-1.4.41-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:361f6b5e3f659e3c56ea3518cf85fbdae1b9e788ade0219a67eeaaea8a4e4d2a"},
-    {file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0990932f7cca97fece8017414f57fdd80db506a045869d7ddf2dda1d7cf69ecc"},
-    {file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd767cf5d7252b1c88fcfb58426a32d7bd14a7e4942497e15b68ff5d822b41ad"},
-    {file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5102fb9ee2c258a2218281adcb3e1918b793c51d6c2b4666ce38c35101bb940e"},
-    {file = "SQLAlchemy-1.4.41-cp310-cp310-win32.whl", hash = "sha256:2082a2d2fca363a3ce21cfa3d068c5a1ce4bf720cf6497fb3a9fc643a8ee4ddd"},
-    {file = "SQLAlchemy-1.4.41-cp310-cp310-win_amd64.whl", hash = "sha256:e4b12e3d88a8fffd0b4ca559f6d4957ed91bd4c0613a4e13846ab8729dc5c251"},
-    {file = "SQLAlchemy-1.4.41-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:90484a2b00baedad361402c257895b13faa3f01780f18f4a104a2f5c413e4536"},
-    {file = "SQLAlchemy-1.4.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b67fc780cfe2b306180e56daaa411dd3186bf979d50a6a7c2a5b5036575cbdbb"},
-    {file = "SQLAlchemy-1.4.41-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ad2b727fc41c7f8757098903f85fafb4bf587ca6605f82d9bf5604bd9c7cded"},
-    {file = "SQLAlchemy-1.4.41-cp311-cp311-win32.whl", hash = "sha256:59bdc291165b6119fc6cdbc287c36f7f2859e6051dd923bdf47b4c55fd2f8bd0"},
-    {file = "SQLAlchemy-1.4.41-cp311-cp311-win_amd64.whl", hash = "sha256:d2e054aed4645f9b755db85bc69fc4ed2c9020c19c8027976f66576b906a74f1"},
-    {file = "SQLAlchemy-1.4.41-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:4ba7e122510bbc07258dc42be6ed45997efdf38129bde3e3f12649be70683546"},
-    {file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0dcf127bb99458a9d211e6e1f0f3edb96c874dd12f2503d4d8e4f1fd103790b"},
-    {file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e16c2be5cb19e2c08da7bd3a87fed2a0d4e90065ee553a940c4fc1a0fb1ab72b"},
-    {file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5ebeeec5c14533221eb30bad716bc1fd32f509196318fb9caa7002c4a364e4c"},
-    {file = "SQLAlchemy-1.4.41-cp36-cp36m-win32.whl", hash = "sha256:3e2ef592ac3693c65210f8b53d0edcf9f4405925adcfc031ff495e8d18169682"},
-    {file = "SQLAlchemy-1.4.41-cp36-cp36m-win_amd64.whl", hash = "sha256:eb30cf008850c0a26b72bd1b9be6730830165ce049d239cfdccd906f2685f892"},
-    {file = "SQLAlchemy-1.4.41-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:c23d64a0b28fc78c96289ffbd0d9d1abd48d267269b27f2d34e430ea73ce4b26"},
-    {file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eb8897367a21b578b26f5713833836f886817ee2ffba1177d446fa3f77e67c8"},
-    {file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14576238a5f89bcf504c5f0a388d0ca78df61fb42cb2af0efe239dc965d4f5c9"},
-    {file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:639e1ae8d48b3c86ffe59c0daa9a02e2bfe17ca3d2b41611b30a0073937d4497"},
-    {file = "SQLAlchemy-1.4.41-cp37-cp37m-win32.whl", hash = "sha256:0005bd73026cd239fc1e8ccdf54db58b6193be9a02b3f0c5983808f84862c767"},
-    {file = "SQLAlchemy-1.4.41-cp37-cp37m-win_amd64.whl", hash = "sha256:5323252be2bd261e0aa3f33cb3a64c45d76829989fa3ce90652838397d84197d"},
-    {file = "SQLAlchemy-1.4.41-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:05f0de3a1dc3810a776275763764bb0015a02ae0f698a794646ebc5fb06fad33"},
-    {file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0002e829142b2af00b4eaa26c51728f3ea68235f232a2e72a9508a3116bd6ed0"},
-    {file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22ff16cedab5b16a0db79f1bc99e46a6ddececb60c396562e50aab58ddb2871c"},
-    {file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccfd238f766a5bb5ee5545a62dd03f316ac67966a6a658efb63eeff8158a4bbf"},
-    {file = "SQLAlchemy-1.4.41-cp38-cp38-win32.whl", hash = "sha256:58bb65b3274b0c8a02cea9f91d6f44d0da79abc993b33bdedbfec98c8440175a"},
-    {file = "SQLAlchemy-1.4.41-cp38-cp38-win_amd64.whl", hash = "sha256:ce8feaa52c1640de9541eeaaa8b5fb632d9d66249c947bb0d89dd01f87c7c288"},
-    {file = "SQLAlchemy-1.4.41-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:199a73c31ac8ea59937cc0bf3dfc04392e81afe2ec8a74f26f489d268867846c"},
-    {file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676d51c9f6f6226ae8f26dc83ec291c088fe7633269757d333978df78d931ab"},
-    {file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:036d8472356e1d5f096c5e0e1a7e0f9182140ada3602f8fff6b7329e9e7cfbcd"},
-    {file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2307495d9e0ea00d0c726be97a5b96615035854972cc538f6e7eaed23a35886c"},
-    {file = "SQLAlchemy-1.4.41-cp39-cp39-win32.whl", hash = "sha256:9c56e19780cd1344fcd362fd6265a15f48aa8d365996a37fab1495cae8fcd97d"},
-    {file = "SQLAlchemy-1.4.41-cp39-cp39-win_amd64.whl", hash = "sha256:f5fa526d027d804b1f85cdda1eb091f70bde6fb7d87892f6dd5a48925bc88898"},
-    {file = "SQLAlchemy-1.4.41.tar.gz", hash = "sha256:0292f70d1797e3c54e862e6f30ae474014648bc9c723e14a2fda730adb0a9791"},
+python-versions = ">=3.7"
+files = [
+    {file = "SQLAlchemy-2.0.39-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:66a40003bc244e4ad86b72abb9965d304726d05a939e8c09ce844d27af9e6d37"},
+    {file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67de057fbcb04a066171bd9ee6bcb58738d89378ee3cabff0bffbf343ae1c787"},
+    {file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:533e0f66c32093a987a30df3ad6ed21170db9d581d0b38e71396c49718fbb1ca"},
+    {file = "SQLAlchemy-2.0.39-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7399d45b62d755e9ebba94eb89437f80512c08edde8c63716552a3aade61eb42"},
+    {file = "SQLAlchemy-2.0.39-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:788b6ff6728072b313802be13e88113c33696a9a1f2f6d634a97c20f7ef5ccce"},
+    {file = "SQLAlchemy-2.0.39-cp37-cp37m-win32.whl", hash = "sha256:01da15490c9df352fbc29859d3c7ba9cd1377791faeeb47c100832004c99472c"},
+    {file = "SQLAlchemy-2.0.39-cp37-cp37m-win_amd64.whl", hash = "sha256:f2bcb085faffcacf9319b1b1445a7e1cfdc6fb46c03f2dce7bc2d9a4b3c1cdc5"},
+    {file = "SQLAlchemy-2.0.39-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b761a6847f96fdc2d002e29e9e9ac2439c13b919adfd64e8ef49e75f6355c548"},
+    {file = "SQLAlchemy-2.0.39-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d7e3866eb52d914aea50c9be74184a0feb86f9af8aaaa4daefe52b69378db0b"},
+    {file = "SQLAlchemy-2.0.39-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995c2bacdddcb640c2ca558e6760383dcdd68830160af92b5c6e6928ffd259b4"},
+    {file = "SQLAlchemy-2.0.39-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:344cd1ec2b3c6bdd5dfde7ba7e3b879e0f8dd44181f16b895940be9b842fd2b6"},
+    {file = "SQLAlchemy-2.0.39-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5dfbc543578058c340360f851ddcecd7a1e26b0d9b5b69259b526da9edfa8875"},
+    {file = "SQLAlchemy-2.0.39-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3395e7ed89c6d264d38bea3bfb22ffe868f906a7985d03546ec7dc30221ea980"},
+    {file = "SQLAlchemy-2.0.39-cp38-cp38-win32.whl", hash = "sha256:bf555f3e25ac3a70c67807b2949bfe15f377a40df84b71ab2c58d8593a1e036e"},
+    {file = "SQLAlchemy-2.0.39-cp38-cp38-win_amd64.whl", hash = "sha256:463ecfb907b256e94bfe7bcb31a6d8c7bc96eca7cbe39803e448a58bb9fcad02"},
+    {file = "sqlalchemy-2.0.39-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6827f8c1b2f13f1420545bd6d5b3f9e0b85fe750388425be53d23c760dcf176b"},
+    {file = "sqlalchemy-2.0.39-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9f119e7736967c0ea03aff91ac7d04555ee038caf89bb855d93bbd04ae85b41"},
+    {file = "sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4600c7a659d381146e1160235918826c50c80994e07c5b26946a3e7ec6c99249"},
+    {file = "sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a06e6c8e31c98ddc770734c63903e39f1947c9e3e5e4bef515c5491b7737dde"},
+    {file = "sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4c433f78c2908ae352848f56589c02b982d0e741b7905228fad628999799de4"},
+    {file = "sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7bd5c5ee1448b6408734eaa29c0d820d061ae18cb17232ce37848376dcfa3e92"},
+    {file = "sqlalchemy-2.0.39-cp310-cp310-win32.whl", hash = "sha256:87a1ce1f5e5dc4b6f4e0aac34e7bb535cb23bd4f5d9c799ed1633b65c2bcad8c"},
+    {file = "sqlalchemy-2.0.39-cp310-cp310-win_amd64.whl", hash = "sha256:871f55e478b5a648c08dd24af44345406d0e636ffe021d64c9b57a4a11518304"},
+    {file = "sqlalchemy-2.0.39-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a28f9c238f1e143ff42ab3ba27990dfb964e5d413c0eb001b88794c5c4a528a9"},
+    {file = "sqlalchemy-2.0.39-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08cf721bbd4391a0e765fe0fe8816e81d9f43cece54fdb5ac465c56efafecb3d"},
+    {file = "sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a8517b6d4005facdbd7eb4e8cf54797dbca100a7df459fdaff4c5123265c1cd"},
+    {file = "sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b2de1523d46e7016afc7e42db239bd41f2163316935de7c84d0e19af7e69538"},
+    {file = "sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:412c6c126369ddae171c13987b38df5122cb92015cba6f9ee1193b867f3f1530"},
+    {file = "sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b35e07f1d57b79b86a7de8ecdcefb78485dab9851b9638c2c793c50203b2ae8"},
+    {file = "sqlalchemy-2.0.39-cp311-cp311-win32.whl", hash = "sha256:3eb14ba1a9d07c88669b7faf8f589be67871d6409305e73e036321d89f1d904e"},
+    {file = "sqlalchemy-2.0.39-cp311-cp311-win_amd64.whl", hash = "sha256:78f1b79132a69fe8bd6b5d91ef433c8eb40688ba782b26f8c9f3d2d9ca23626f"},
+    {file = "sqlalchemy-2.0.39-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c457a38351fb6234781d054260c60e531047e4d07beca1889b558ff73dc2014b"},
+    {file = "sqlalchemy-2.0.39-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:018ee97c558b499b58935c5a152aeabf6d36b3d55d91656abeb6d93d663c0c4c"},
+    {file = "sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a8120d6fc185f60e7254fc056a6742f1db68c0f849cfc9ab46163c21df47"},
+    {file = "sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2cf5b5ddb69142511d5559c427ff00ec8c0919a1e6c09486e9c32636ea2b9dd"},
+    {file = "sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f03143f8f851dd8de6b0c10784363712058f38209e926723c80654c1b40327a"},
+    {file = "sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06205eb98cb3dd52133ca6818bf5542397f1dd1b69f7ea28aa84413897380b06"},
+    {file = "sqlalchemy-2.0.39-cp312-cp312-win32.whl", hash = "sha256:7f5243357e6da9a90c56282f64b50d29cba2ee1f745381174caacc50d501b109"},
+    {file = "sqlalchemy-2.0.39-cp312-cp312-win_amd64.whl", hash = "sha256:2ed107331d188a286611cea9022de0afc437dd2d3c168e368169f27aa0f61338"},
+    {file = "sqlalchemy-2.0.39-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fe193d3ae297c423e0e567e240b4324d6b6c280a048e64c77a3ea6886cc2aa87"},
+    {file = "sqlalchemy-2.0.39-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:79f4f502125a41b1b3b34449e747a6abfd52a709d539ea7769101696bdca6716"},
+    {file = "sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a10ca7f8a1ea0fd5630f02feb055b0f5cdfcd07bb3715fc1b6f8cb72bf114e4"},
+    {file = "sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6b0a1c7ed54a5361aaebb910c1fa864bae34273662bb4ff788a527eafd6e14d"},
+    {file = "sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52607d0ebea43cf214e2ee84a6a76bc774176f97c5a774ce33277514875a718e"},
+    {file = "sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c08a972cbac2a14810463aec3a47ff218bb00c1a607e6689b531a7c589c50723"},
+    {file = "sqlalchemy-2.0.39-cp313-cp313-win32.whl", hash = "sha256:23c5aa33c01bd898f879db158537d7e7568b503b15aad60ea0c8da8109adf3e7"},
+    {file = "sqlalchemy-2.0.39-cp313-cp313-win_amd64.whl", hash = "sha256:4dabd775fd66cf17f31f8625fc0e4cfc5765f7982f94dc09b9e5868182cb71c0"},
+    {file = "sqlalchemy-2.0.39-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2600a50d590c22d99c424c394236899ba72f849a02b10e65b4c70149606408b5"},
+    {file = "sqlalchemy-2.0.39-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4eff9c270afd23e2746e921e80182872058a7a592017b2713f33f96cc5f82e32"},
+    {file = "sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7332868ce891eda48896131991f7f2be572d65b41a4050957242f8e935d5d7"},
+    {file = "sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125a7763b263218a80759ad9ae2f3610aaf2c2fbbd78fff088d584edf81f3782"},
+    {file = "sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:04545042969833cb92e13b0a3019549d284fd2423f318b6ba10e7aa687690a3c"},
+    {file = "sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:805cb481474e111ee3687c9047c5f3286e62496f09c0e82e8853338aaaa348f8"},
+    {file = "sqlalchemy-2.0.39-cp39-cp39-win32.whl", hash = "sha256:34d5c49f18778a3665d707e6286545a30339ad545950773d43977e504815fa70"},
+    {file = "sqlalchemy-2.0.39-cp39-cp39-win_amd64.whl", hash = "sha256:35e72518615aa5384ef4fae828e3af1b43102458b74a8c481f69af8abf7e802a"},
+    {file = "sqlalchemy-2.0.39-py3-none-any.whl", hash = "sha256:a1c6b0a5e3e326a466d809b651c63f278b1256146a377a528b6938a279da334f"},
+    {file = "sqlalchemy-2.0.39.tar.gz", hash = "sha256:5d2d1fe548def3267b4c70a8568f108d1fed7cbbeccb9cc166e05af2abc25c22"},
 ]
 
 [package.dependencies]
-greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
+greenlet = {version = "!=0.4.17", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
+typing-extensions = ">=4.6.0"
 
 [package.extras]
-aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
-aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
+aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"]
+aioodbc = ["aioodbc", "greenlet (!=0.4.17)"]
+aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
 asyncio = ["greenlet (!=0.4.17)"]
-asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
-mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
+asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
+mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"]
 mssql = ["pyodbc"]
 mssql-pymssql = ["pymssql"]
 mssql-pyodbc = ["pyodbc"]
-mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
-mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
+mypy = ["mypy (>=0.910)"]
+mysql = ["mysqlclient (>=1.4.0)"]
 mysql-connector = ["mysql-connector-python"]
-oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"]
+oracle = ["cx_oracle (>=8)"]
+oracle-oracledb = ["oracledb (>=1.0.1)"]
 postgresql = ["psycopg2 (>=2.7)"]
 postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
-postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
+postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
+postgresql-psycopg = ["psycopg (>=3.0.7)"]
 postgresql-psycopg2binary = ["psycopg2-binary"]
 postgresql-psycopg2cffi = ["psycopg2cffi"]
-pymysql = ["pymysql", "pymysql (<1)"]
-sqlcipher = ["sqlcipher3-binary"]
+postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
+pymysql = ["pymysql"]
+sqlcipher = ["sqlcipher3_binary"]
 
 [[package]]
 name = "sqlalchemy-stubs"
@@ -2623,15 +2594,30 @@ files = [
 setuptools = "*"
 SQLAlchemy = ">=0.9.0"
 
+[[package]]
+name = "sqlmodel"
+version = "0.0.24"
+description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness."
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "sqlmodel-0.0.24-py3-none-any.whl", hash = "sha256:6778852f09370908985b667d6a3ab92910d0d5ec88adcaf23dbc242715ff7193"},
+    {file = "sqlmodel-0.0.24.tar.gz", hash = "sha256:cc5c7613c1a5533c9c7867e1aab2fd489a76c9e8a061984da11b4e613c182423"},
+]
+
+[package.dependencies]
+pydantic = ">=1.10.13,<3.0.0"
+SQLAlchemy = ">=2.0.14,<2.1.0"
+
 [[package]]
 name = "starlette"
-version = "0.45.3"
+version = "0.46.1"
 description = "The little ASGI library that shines."
 optional = false
 python-versions = ">=3.9"
 files = [
-    {file = "starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d"},
-    {file = "starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f"},
+    {file = "starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227"},
+    {file = "starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230"},
 ]
 
 [package.dependencies]
@@ -2715,13 +2701,13 @@ files = [
 
 [[package]]
 name = "types-requests"
-version = "2.32.0.20241016"
+version = "2.32.0.20250306"
 description = "Typing stubs for requests"
 optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
 files = [
-    {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"},
-    {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"},
+    {file = "types_requests-2.32.0.20250306-py3-none-any.whl", hash = "sha256:25f2cbb5c8710b2022f8bbee7b2b66f319ef14aeea2f35d80f18c9dbf3b60a0b"},
+    {file = "types_requests-2.32.0.20250306.tar.gz", hash = "sha256:0962352694ec5b2f95fda877ee60a159abdf84a0fc6fdace599f20acb41a03d1"},
 ]
 
 [package.dependencies]
@@ -2916,80 +2902,80 @@ anyio = ">=3.0.0"
 
 [[package]]
 name = "websockets"
-version = "14.2"
+version = "15.0.1"
 description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
 optional = false
 python-versions = ">=3.9"
 files = [
-    {file = "websockets-14.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e8179f95323b9ab1c11723e5d91a89403903f7b001828161b480a7810b334885"},
-    {file = "websockets-14.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d8c3e2cdb38f31d8bd7d9d28908005f6fa9def3324edb9bf336d7e4266fd397"},
-    {file = "websockets-14.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:714a9b682deb4339d39ffa674f7b674230227d981a37d5d174a4a83e3978a610"},
-    {file = "websockets-14.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2e53c72052f2596fb792a7acd9704cbc549bf70fcde8a99e899311455974ca3"},
-    {file = "websockets-14.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3fbd68850c837e57373d95c8fe352203a512b6e49eaae4c2f4088ef8cf21980"},
-    {file = "websockets-14.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b27ece32f63150c268593d5fdb82819584831a83a3f5809b7521df0685cd5d8"},
-    {file = "websockets-14.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4daa0faea5424d8713142b33825fff03c736f781690d90652d2c8b053345b0e7"},
-    {file = "websockets-14.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bc63cee8596a6ec84d9753fd0fcfa0452ee12f317afe4beae6b157f0070c6c7f"},
-    {file = "websockets-14.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a570862c325af2111343cc9b0257b7119b904823c675b22d4ac547163088d0d"},
-    {file = "websockets-14.2-cp310-cp310-win32.whl", hash = "sha256:75862126b3d2d505e895893e3deac0a9339ce750bd27b4ba515f008b5acf832d"},
-    {file = "websockets-14.2-cp310-cp310-win_amd64.whl", hash = "sha256:cc45afb9c9b2dc0852d5c8b5321759cf825f82a31bfaf506b65bf4668c96f8b2"},
-    {file = "websockets-14.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3bdc8c692c866ce5fefcaf07d2b55c91d6922ac397e031ef9b774e5b9ea42166"},
-    {file = "websockets-14.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c93215fac5dadc63e51bcc6dceca72e72267c11def401d6668622b47675b097f"},
-    {file = "websockets-14.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c9b6535c0e2cf8a6bf938064fb754aaceb1e6a4a51a80d884cd5db569886910"},
-    {file = "websockets-14.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a52a6d7cf6938e04e9dceb949d35fbdf58ac14deea26e685ab6368e73744e4c"},
-    {file = "websockets-14.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f05702e93203a6ff5226e21d9b40c037761b2cfb637187c9802c10f58e40473"},
-    {file = "websockets-14.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22441c81a6748a53bfcb98951d58d1af0661ab47a536af08920d129b4d1c3473"},
-    {file = "websockets-14.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd9b868d78b194790e6236d9cbc46d68aba4b75b22497eb4ab64fa640c3af56"},
-    {file = "websockets-14.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a5a20d5843886d34ff8c57424cc65a1deda4375729cbca4cb6b3353f3ce4142"},
-    {file = "websockets-14.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:34277a29f5303d54ec6468fb525d99c99938607bc96b8d72d675dee2b9f5bf1d"},
-    {file = "websockets-14.2-cp311-cp311-win32.whl", hash = "sha256:02687db35dbc7d25fd541a602b5f8e451a238ffa033030b172ff86a93cb5dc2a"},
-    {file = "websockets-14.2-cp311-cp311-win_amd64.whl", hash = "sha256:862e9967b46c07d4dcd2532e9e8e3c2825e004ffbf91a5ef9dde519ee2effb0b"},
-    {file = "websockets-14.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f20522e624d7ffbdbe259c6b6a65d73c895045f76a93719aa10cd93b3de100c"},
-    {file = "websockets-14.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:647b573f7d3ada919fd60e64d533409a79dcf1ea21daeb4542d1d996519ca967"},
-    {file = "websockets-14.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6af99a38e49f66be5a64b1e890208ad026cda49355661549c507152113049990"},
-    {file = "websockets-14.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:091ab63dfc8cea748cc22c1db2814eadb77ccbf82829bac6b2fbe3401d548eda"},
-    {file = "websockets-14.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b374e8953ad477d17e4851cdc66d83fdc2db88d9e73abf755c94510ebddceb95"},
-    {file = "websockets-14.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a39d7eceeea35db85b85e1169011bb4321c32e673920ae9c1b6e0978590012a3"},
-    {file = "websockets-14.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0a6f3efd47ffd0d12080594f434faf1cd2549b31e54870b8470b28cc1d3817d9"},
-    {file = "websockets-14.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:065ce275e7c4ffb42cb738dd6b20726ac26ac9ad0a2a48e33ca632351a737267"},
-    {file = "websockets-14.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e9d0e53530ba7b8b5e389c02282f9d2aa47581514bd6049d3a7cffe1385cf5fe"},
-    {file = "websockets-14.2-cp312-cp312-win32.whl", hash = "sha256:20e6dd0984d7ca3037afcb4494e48c74ffb51e8013cac71cf607fffe11df7205"},
-    {file = "websockets-14.2-cp312-cp312-win_amd64.whl", hash = "sha256:44bba1a956c2c9d268bdcdf234d5e5ff4c9b6dc3e300545cbe99af59dda9dcce"},
-    {file = "websockets-14.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6f1372e511c7409a542291bce92d6c83320e02c9cf392223272287ce55bc224e"},
-    {file = "websockets-14.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4da98b72009836179bb596a92297b1a61bb5a830c0e483a7d0766d45070a08ad"},
-    {file = "websockets-14.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8a86a269759026d2bde227652b87be79f8a734e582debf64c9d302faa1e9f03"},
-    {file = "websockets-14.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86cf1aaeca909bf6815ea714d5c5736c8d6dd3a13770e885aafe062ecbd04f1f"},
-    {file = "websockets-14.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b0f6c3ba3b1240f602ebb3971d45b02cc12bd1845466dd783496b3b05783a5"},
-    {file = "websockets-14.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669c3e101c246aa85bc8534e495952e2ca208bd87994650b90a23d745902db9a"},
-    {file = "websockets-14.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eabdb28b972f3729348e632ab08f2a7b616c7e53d5414c12108c29972e655b20"},
-    {file = "websockets-14.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2066dc4cbcc19f32c12a5a0e8cc1b7ac734e5b64ac0a325ff8353451c4b15ef2"},
-    {file = "websockets-14.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ab95d357cd471df61873dadf66dd05dd4709cae001dd6342edafc8dc6382f307"},
-    {file = "websockets-14.2-cp313-cp313-win32.whl", hash = "sha256:a9e72fb63e5f3feacdcf5b4ff53199ec8c18d66e325c34ee4c551ca748623bbc"},
-    {file = "websockets-14.2-cp313-cp313-win_amd64.whl", hash = "sha256:b439ea828c4ba99bb3176dc8d9b933392a2413c0f6b149fdcba48393f573377f"},
-    {file = "websockets-14.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7cd5706caec1686c5d233bc76243ff64b1c0dc445339bd538f30547e787c11fe"},
-    {file = "websockets-14.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec607328ce95a2f12b595f7ae4c5d71bf502212bddcea528290b35c286932b12"},
-    {file = "websockets-14.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da85651270c6bfb630136423037dd4975199e5d4114cae6d3066641adcc9d1c7"},
-    {file = "websockets-14.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ecadc7ce90accf39903815697917643f5b7cfb73c96702318a096c00aa71f5"},
-    {file = "websockets-14.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1979bee04af6a78608024bad6dfcc0cc930ce819f9e10342a29a05b5320355d0"},
-    {file = "websockets-14.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dddacad58e2614a24938a50b85969d56f88e620e3f897b7d80ac0d8a5800258"},
-    {file = "websockets-14.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:89a71173caaf75fa71a09a5f614f450ba3ec84ad9fca47cb2422a860676716f0"},
-    {file = "websockets-14.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6af6a4b26eea4fc06c6818a6b962a952441e0e39548b44773502761ded8cc1d4"},
-    {file = "websockets-14.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:80c8efa38957f20bba0117b48737993643204645e9ec45512579132508477cfc"},
-    {file = "websockets-14.2-cp39-cp39-win32.whl", hash = "sha256:2e20c5f517e2163d76e2729104abc42639c41cf91f7b1839295be43302713661"},
-    {file = "websockets-14.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4c8cef610e8d7c70dea92e62b6814a8cd24fbd01d7103cc89308d2bfe1659ef"},
-    {file = "websockets-14.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d7d9cafbccba46e768be8a8ad4635fa3eae1ffac4c6e7cb4eb276ba41297ed29"},
-    {file = "websockets-14.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c76193c1c044bd1e9b3316dcc34b174bbf9664598791e6fb606d8d29000e070c"},
-    {file = "websockets-14.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd475a974d5352390baf865309fe37dec6831aafc3014ffac1eea99e84e83fc2"},
-    {file = "websockets-14.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6c0097a41968b2e2b54ed3424739aab0b762ca92af2379f152c1aef0187e1c"},
-    {file = "websockets-14.2-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d7ff794c8b36bc402f2e07c0b2ceb4a2424147ed4785ff03e2a7af03711d60a"},
-    {file = "websockets-14.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dec254fcabc7bd488dab64846f588fc5b6fe0d78f641180030f8ea27b76d72c3"},
-    {file = "websockets-14.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:bbe03eb853e17fd5b15448328b4ec7fb2407d45fb0245036d06a3af251f8e48f"},
-    {file = "websockets-14.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3c4aa3428b904d5404a0ed85f3644d37e2cb25996b7f096d77caeb0e96a3b42"},
-    {file = "websockets-14.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:577a4cebf1ceaf0b65ffc42c54856214165fb8ceeba3935852fc33f6b0c55e7f"},
-    {file = "websockets-14.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad1c1d02357b7665e700eca43a31d52814ad9ad9b89b58118bdabc365454b574"},
-    {file = "websockets-14.2-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f390024a47d904613577df83ba700bd189eedc09c57af0a904e5c39624621270"},
-    {file = "websockets-14.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3c1426c021c38cf92b453cdf371228d3430acd775edee6bac5a4d577efc72365"},
-    {file = "websockets-14.2-py3-none-any.whl", hash = "sha256:7a6ceec4ea84469f15cf15807a747e9efe57e369c384fa86e022b3bea679b79b"},
-    {file = "websockets-14.2.tar.gz", hash = "sha256:5059ed9c54945efb321f097084b4c7e52c246f2c869815876a69d1efc4ad6eb5"},
+    {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"},
+    {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"},
+    {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"},
+    {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"},
+    {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"},
+    {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"},
+    {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"},
+    {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"},
+    {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"},
+    {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"},
+    {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"},
+    {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"},
+    {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"},
+    {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"},
+    {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"},
+    {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"},
+    {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"},
+    {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"},
+    {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"},
+    {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"},
+    {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"},
+    {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"},
+    {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"},
+    {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"},
+    {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"},
+    {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"},
+    {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"},
+    {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"},
+    {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"},
+    {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"},
+    {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"},
+    {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"},
+    {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"},
+    {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"},
+    {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"},
+    {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"},
+    {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"},
+    {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"},
+    {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"},
+    {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"},
+    {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"},
+    {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"},
+    {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"},
+    {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"},
+    {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"},
+    {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"},
+    {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"},
+    {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"},
+    {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"},
+    {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"},
+    {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"},
+    {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"},
+    {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"},
+    {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"},
+    {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"},
+    {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"},
+    {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"},
+    {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"},
+    {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"},
+    {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"},
+    {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"},
+    {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"},
+    {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"},
+    {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"},
+    {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"},
+    {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"},
+    {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"},
+    {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"},
+    {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"},
 ]
 
 [[package]]
@@ -3017,4 +3003,4 @@ pycurl = ["pycurl"]
 [metadata]
 lock-version = "2.0"
 python-versions = ">3.9.1,<3.13"
-content-hash = "69cdd5c32d76c7b949db62d26677507f03af27a81ce79c0df8c2c107ddd318b1"
+content-hash = "dea6769b0117c7b226b39a1d011b3acf3abeb2e2cbed5d109aa11099c5ef5aaa"
diff --git a/pyproject.toml b/pyproject.toml
index 48d96e6c0..4e6d09c73 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -27,7 +27,7 @@ numpy = "^1.25.0"
 obspy = "^1.3.1"
 openpyxl = "^3.0.10"
 pycurl = {version = "^7.45.1", optional = true}
-pydantic = "2.9.2"
+pydantic = "2.10.6"
 requests = "^2.28.1"
 scipy = "^1.9.3"
 typer = "^0.6.1"
@@ -35,18 +35,16 @@ typer = "^0.6.1"
 alembic = "^1.8.1"
 Authlib = "^1.1.0"
 cryptography = "^44.0.1"
-databases = {extras = ["mysql", "sqlite"], version = "^0.6.1"}
 fastapi = ">=0.103.0"
 gunicorn = "^23.0.0"
 httpx = "0.28.1"
-# 2022-10-24 pin sqlalchemy to 1.4.41 for now
-# 1.4.42 seems to cause issues with databases (https://stackoverflow.com/a/74102692)
-SQLAlchemy = "1.4.41"
+SQLAlchemy = "2.0.39"
 SQLAlchemy-Utc = "^0.14.0"
 uvicorn = {extras = ["standard"], version = "^0.22.0"}
 netcdf4 = "^1.7.2"
 cdflib = "^1.3.2"
-
+sqlmodel = "^0.0.24"
+PyMySQL = "^1.1.1"
 
 [tool.poetry.dev-dependencies]
 black = "^24.3.0"
diff --git a/test/Controller_test.py b/test/Controller_test.py
index bff8a8fdb..209f44b03 100644
--- a/test/Controller_test.py
+++ b/test/Controller_test.py
@@ -1,9 +1,10 @@
 #! /usr/bin/env python
-from geomagio import Controller, TimeseriesFactory
-from geomagio.algorithm import Algorithm
+from geomagio.Controller import Controller
+from geomagio.TimeseriesFactory import TimeseriesFactory
+from geomagio.algorithm.Algorithm import Algorithm
 
 # needed to read outputs generated by Controller and test data
-from geomagio.iaga2002 import IAGA2002Factory
+from geomagio.iaga2002.IAGA2002Factory import IAGA2002Factory
 
 # needed to emulate geomag.py script
 from geomagio.Controller import _main, get_previous_interval, parse_args
diff --git a/test/DerivedTimeseriesFactory_test.py b/test/DerivedTimeseriesFactory_test.py
index debb0b213..94de45831 100644
--- a/test/DerivedTimeseriesFactory_test.py
+++ b/test/DerivedTimeseriesFactory_test.py
@@ -2,11 +2,12 @@ from typing import List
 
 from obspy import Stream
 
-from geomagio import TimeseriesUtility
-from geomagio.algorithm import Algorithm, DeltaFAlgorithm, XYZAlgorithm
+from geomagio.TimeseriesFactory import TimeseriesUtility
+from geomagio.algorithm.DeltaFAlgorithm import DeltaFAlgorithm
+from geomagio.algorithm.XYZAlgorithm import XYZAlgorithm
 from geomagio.DerivedTimeseriesFactory import DerivedTimeseriesFactory, get_missing
-from geomagio.iaga2002 import StreamIAGA2002Factory
-from geomagio.edge import EdgeFactory
+from geomagio.iaga2002.StreamIAGA2002Factory import StreamIAGA2002Factory
+from geomagio.edge.EdgeFactory import EdgeFactory
 
 
 def test_derive_trace():
diff --git a/test/adjusted_test/adjusted_test.py b/test/adjusted_test/adjusted_test.py
index 1395e2c61..aed7d5e3c 100644
--- a/test/adjusted_test/adjusted_test.py
+++ b/test/adjusted_test/adjusted_test.py
@@ -5,21 +5,20 @@ from numpy.testing import assert_equal, assert_array_almost_equal
 from obspy.core import UTCDateTime
 import pytest
 
-from geomagio.adjusted import AdjustedMatrix
 from geomagio.adjusted.Affine import Affine, get_epochs
-from geomagio.adjusted.transform import (
-    LeastSq,
-    QRFactorization,
-    Rescale3D,
-    RotationTranslationXY,
-    SVD,
-    ShearYZ,
-    TranslateOrigins,
-    ZRotationHscale,
-    TranslateOrigins,
+from geomagio.adjusted.transform.LeastSq import LeastSq
+from geomagio.adjusted.transform.QRFactorization import QRFactorization
+from geomagio.adjusted.transform.Rescale3D import Rescale3D
+from geomagio.adjusted.transform.RotationTranslationXY import RotationTranslationXY
+from geomagio.adjusted.transform.SVD import SVD
+from geomagio.adjusted.transform.ShearYZ import ShearYZ
+from geomagio.adjusted.transform.TranslateOrigins import TranslateOrigins
+from geomagio.adjusted.transform.ZRotationHScale import ZRotationHscale
+from geomagio.adjusted.transform.TranslateOrigins import TranslateOrigins
+from geomagio.adjusted.transform.ZRotationHScaleZBaseline import (
     ZRotationHscaleZbaseline,
-    ZRotationShear,
 )
+from geomagio.adjusted.transform.ZRotationShear import ZRotationShear
 from test.residual_test.residual_test import (
     get_json_readings,
     get_spreadsheet_directory_readings,
diff --git a/test/algorithm_test/AdjustedAlgorithm_test.py b/test/algorithm_test/AdjustedAlgorithm_test.py
index 38af1ffae..1b66ae953 100644
--- a/test/algorithm_test/AdjustedAlgorithm_test.py
+++ b/test/algorithm_test/AdjustedAlgorithm_test.py
@@ -1,8 +1,9 @@
-from geomagio.adjusted import AdjustedMatrix
-from geomagio.algorithm import AdjustedAlgorithm
-import geomagio.iaga2002 as i2
 from numpy.testing import assert_almost_equal, assert_array_equal, assert_equal
 
+from geomagio.adjusted.AdjustedMatrix import AdjustedMatrix
+from geomagio.algorithm.AdjustedAlgorithm import AdjustedAlgorithm
+from geomagio.iaga2002.IAGA2002Factory import IAGA2002Factory
+
 
 def test_construct():
     """algorithm_test.AdjustedAlgorithm_test.test_construct()"""
@@ -61,9 +62,9 @@ def test_process_XYZF_AdjustedMatrix():
 
     # load boulder Jan 16 files from /etc/ directory
     with open("etc/adjusted/BOU201601vmin.min") as f:
-        raw = i2.IAGA2002Factory().parse_string(f.read())
+        raw = IAGA2002Factory().parse_string(f.read())
     with open("etc/adjusted/BOU201601adj.min") as f:
-        expected = i2.IAGA2002Factory().parse_string(f.read())
+        expected = IAGA2002Factory().parse_string(f.read())
 
     # process hezf (raw) channels with loaded transform
     adjusted = a.process(raw)
@@ -95,9 +96,9 @@ def test_process_reverse_polarity_AdjustedMatrix():
 
     # load boulder May 20 files from /etc/ directory
     with open("etc/adjusted/BOU202005vmin.min") as f:
-        raw = i2.IAGA2002Factory().parse_string(f.read())
+        raw = IAGA2002Factory().parse_string(f.read())
     with open("etc/adjusted/BOU202005adj.min") as f:
-        expected = i2.IAGA2002Factory().parse_string(f.read())
+        expected = IAGA2002Factory().parse_string(f.read())
 
     # process he(raw) channels with loaded transform
     adjusted = a.process(raw)
@@ -124,9 +125,9 @@ def test_process_XYZF_statefile():
 
     # load boulder Jan 16 files from /etc/ directory
     with open("etc/adjusted/BOU201601vmin.min") as f:
-        raw = i2.IAGA2002Factory().parse_string(f.read())
+        raw = IAGA2002Factory().parse_string(f.read())
     with open("etc/adjusted/BOU201601adj.min") as f:
-        expected = i2.IAGA2002Factory().parse_string(f.read())
+        expected = IAGA2002Factory().parse_string(f.read())
 
     # process hezf (raw) channels with loaded transform
     adjusted = a.process(raw)
@@ -153,9 +154,9 @@ def test_process_reverse_polarity_statefile():
 
     # load boulder May 20 files from /etc/ directory
     with open("etc/adjusted/BOU202005vmin.min") as f:
-        raw = i2.IAGA2002Factory().parse_string(f.read())
+        raw = IAGA2002Factory().parse_string(f.read())
     with open("etc/adjusted/BOU202005adj.min") as f:
-        expected = i2.IAGA2002Factory().parse_string(f.read())
+        expected = IAGA2002Factory().parse_string(f.read())
 
     # process he(raw) channels with loaded transform
     adjusted = a.process(raw)
@@ -176,7 +177,7 @@ def test_process_no_statefile():
     a = AdjustedAlgorithm(inchannels=["H", "E", "Z", "F"])
     # load boulder Jan 16 files from /etc/ directory
     with open("etc/adjusted/BOU201601vmin.min") as f:
-        raw = i2.IAGA2002Factory().parse_string(f.read())
+        raw = IAGA2002Factory().parse_string(f.read())
     # process hezf (raw) channels with identity transform
     adjusted = a.process(raw)
     for i in range(len(adjusted)):
diff --git a/test/algorithm_test/Algorithm_test.py b/test/algorithm_test/Algorithm_test.py
index 986a1512a..8b89c325c 100644
--- a/test/algorithm_test/Algorithm_test.py
+++ b/test/algorithm_test/Algorithm_test.py
@@ -1,7 +1,8 @@
 #! /usr/bin/env python
 from obspy.core.stream import Stream
 from numpy.testing import assert_equal
-from geomagio.algorithm import Algorithm
+
+from geomagio.algorithm.Algorithm import Algorithm
 
 
 def test_algorithm_process():
diff --git a/test/algorithm_test/AverageAlgorithm_test.py b/test/algorithm_test/AverageAlgorithm_test.py
index bbe377e70..75a723e70 100644
--- a/test/algorithm_test/AverageAlgorithm_test.py
+++ b/test/algorithm_test/AverageAlgorithm_test.py
@@ -1,10 +1,11 @@
-from geomagio.algorithm import AverageAlgorithm
 from obspy.core.stream import Stream
-from ..StreamConverter_test import __create_trace
 from obspy.core import UTCDateTime
 import numpy as np
 from numpy.testing import assert_array_equal, assert_equal
 
+from geomagio.algorithm.AverageAlgorithm import AverageAlgorithm
+from ..StreamConverter_test import __create_trace
+
 
 def test_process():
     """AverageAlgorithm_test.test_process()
diff --git a/test/algorithm_test/DbDtAlgorithm_test.py b/test/algorithm_test/DbDtAlgorithm_test.py
index df9fc08ef..64f9cef70 100644
--- a/test/algorithm_test/DbDtAlgorithm_test.py
+++ b/test/algorithm_test/DbDtAlgorithm_test.py
@@ -1,5 +1,5 @@
-from geomagio.algorithm import DbDtAlgorithm
-import geomagio.iaga2002 as i2
+from geomagio.algorithm.DbDtAlgorithm import DbDtAlgorithm
+from geomagio.iaga2002.IAGA2002Factory import IAGA2002Factory
 from numpy.testing import assert_almost_equal, assert_equal
 
 
@@ -17,7 +17,7 @@ def test_process():
     hez_iaga2002_string = hez_iaga2002_file.read()
     hez_dbdt_iaga2002_file = open("etc/dbdt/BOU202005dbdt.min")
     hez_dbdt_iaga2002_string = hez_dbdt_iaga2002_file.read()
-    factory = i2.IAGA2002Factory()
+    factory = IAGA2002Factory()
     hez = factory.parse_string(hez_iaga2002_string)
     hez_dbdt = factory.parse_string(hez_dbdt_iaga2002_string)
 
diff --git a/test/algorithm_test/FilterAlgorithm_test.py b/test/algorithm_test/FilterAlgorithm_test.py
index cd7357e60..8d0e7d8ec 100644
--- a/test/algorithm_test/FilterAlgorithm_test.py
+++ b/test/algorithm_test/FilterAlgorithm_test.py
@@ -1,12 +1,12 @@
 import json
+import pytest
+import numpy as np
 
 from numpy.testing import assert_almost_equal, assert_equal
-import numpy as np
 from obspy import read, UTCDateTime
-import pytest
 
 from geomagio.algorithm.FilterAlgorithm import FilterAlgorithm, get_nearest_time
-import geomagio.iaga2002 as i2
+from geomagio.iaga2002.StreamIAGA2002Factory import StreamIAGA2002Factory
 
 
 def test_second():
@@ -35,7 +35,7 @@ def test_second():
     filtered = f.process(llo)
 
     with open("etc/filter/LLO20200106vsec.sec", "r") as f:
-        iaga = i2.StreamIAGA2002Factory(stream=f)
+        iaga = StreamIAGA2002Factory(stream=f)
         LLO = iaga.get_timeseries(starttime=None, endtime=None, observatory="LLO")
 
     u = LLO.select(channel="U")[0]
@@ -80,7 +80,7 @@ def test_minute():
     filtered = f.process(llo)
 
     with open("etc/filter/LLO20200106vmin.min", "r") as f:
-        iaga = i2.StreamIAGA2002Factory(stream=f)
+        iaga = StreamIAGA2002Factory(stream=f)
         LLO = iaga.get_timeseries(starttime=None, endtime=None, observatory="LLO")
 
     u = LLO.select(channel="U")[0]
@@ -123,7 +123,7 @@ def test_hour():
     filtered = f.process(bou)
 
     with open("etc/filter/BOU20200831vhor.hor", "r") as f:
-        iaga = i2.StreamIAGA2002Factory(stream=f)
+        iaga = StreamIAGA2002Factory(stream=f)
         BOU = iaga.get_timeseries(starttime=None, endtime=None, observatory="BOU")
 
     h = BOU.select(channel="H")[0]
@@ -169,7 +169,7 @@ def test_day():
     filtered = f.process(bou)
 
     with open("etc/filter/BOU20200831vday.day", "r") as f:
-        iaga = i2.StreamIAGA2002Factory(stream=f)
+        iaga = StreamIAGA2002Factory(stream=f)
         BOU = iaga.get_timeseries(starttime=None, endtime=None, observatory="BOU")
 
     h = BOU.select(channel="H")[0]
@@ -221,7 +221,7 @@ def test_custom():
     filtered = f.process(llo)
 
     with open("etc/filter/LLO20200106_custom_vsec.sec", "r") as f:
-        iaga = i2.StreamIAGA2002Factory(stream=f)
+        iaga = StreamIAGA2002Factory(stream=f)
         LLO = iaga.get_timeseries(starttime=None, endtime=None, observatory="LLO")
 
     u = LLO.select(channel="U")[0]
@@ -253,7 +253,7 @@ def test_starttime_shift():
     # with open('BOU20200101vsec.sec','wb') as file:
     #     i2w.write(out=file,timeseries=bou,channels=["H","E","Z","F"])
     with open("etc/filter/BOU20200101vsec.sec", "r") as file:
-        iaga = i2.StreamIAGA2002Factory(stream=file)
+        iaga = StreamIAGA2002Factory(stream=file)
         bou = iaga.get_timeseries(starttime=None, endtime=None, observatory="BOU")
     # check initial assumptions
     assert_equal(bou[0].stats.starttime, UTCDateTime("2020-01-01T00:00:00Z"))
diff --git a/test/algorithm_test/SQDistAlgorithm_test.py b/test/algorithm_test/SQDistAlgorithm_test.py
index 767f71c12..d3b801453 100644
--- a/test/algorithm_test/SQDistAlgorithm_test.py
+++ b/test/algorithm_test/SQDistAlgorithm_test.py
@@ -1,4 +1,3 @@
-from geomagio.algorithm import SqDistAlgorithm as sq
 import numpy as np
 from numpy.testing import (
     assert_allclose,
@@ -7,6 +6,8 @@ from numpy.testing import (
     assert_equal,
 )
 
+from geomagio.algorithm.SqDistAlgorithm import SqDistAlgorithm as sq
+
 
 def test_sqdistalgorithm_additive1():
     """SqDistAlgorithm_test.test_sqdistalgorithm_additive1()
diff --git a/test/algorithm_test/XYZAlgorithm_test.py b/test/algorithm_test/XYZAlgorithm_test.py
index 67be143a8..b34cbcaff 100644
--- a/test/algorithm_test/XYZAlgorithm_test.py
+++ b/test/algorithm_test/XYZAlgorithm_test.py
@@ -1,9 +1,11 @@
 #! /usr/bin/env python
+import numpy as np
+
 from obspy.core.stream import Stream
 from numpy.testing import assert_equal
-from geomagio.algorithm import XYZAlgorithm
+
+from geomagio.algorithm.XYZAlgorithm import XYZAlgorithm
 from ..StreamConverter_test import __create_trace
-import numpy as np
 
 
 def test_xyzalgorithm_process():
diff --git a/test/api_test/conftest.py b/test/api_test/conftest.py
new file mode 100644
index 000000000..556849c67
--- /dev/null
+++ b/test/api_test/conftest.py
@@ -0,0 +1,715 @@
+import pytest
+
+from datetime import datetime, timezone
+from obspy import UTCDateTime
+
+from geomagio.api.db.models.metadata import (
+    Metadata,
+    MetadataTable,
+    MetadataCategory,
+)
+from geomagio.api.db.models.metadata_history import (
+    MetadataHistoryTable,
+    MetadataHistory,
+)
+
+
+@pytest.fixture()
+def valid_token():
+    return {
+        "access_token": "test_access_token",
+        "token_type": "Bearer",
+        "expires_in": 7200,
+        "refresh_token": "test_refresh_toekn",
+        "scope": "openid email profile",
+        "created_at": 1733244369,
+        "id_token": "test_id_token",
+        "expires_at": 1733251569,
+        "userinfo": {
+            "iss": "http://test_url",
+            "sub": "1234",
+            "aud": "test_aud",
+            "exp": 1733244489,
+            "iat": 1733244369,
+            "nonce": "test_nonce",
+            "auth_time": 1733244368,
+            "sub_legacy": "test_sub_legacy",
+            "name": "test_user",
+            "nickname": "test_user_nickname",
+            "preferred_username": "test_preferred_username",
+            "email": "test_email",
+            "email_verified": True,
+            "profile": "http://test_url/user",
+            "picture": "http://picture_url",
+            "groups_direct": ["group1", "group2"],
+        },
+    }
+
+
+@pytest.fixture()
+def valid_userinfo():
+    return {
+        "sub": "1234",
+        "sub_legacy": "test_sub_legacy",
+        "name": "test_user",
+        "nickname": "test_user_nickname",
+        "preferred_username": "test_preferred_username",
+        "email": "test_email",
+        "email_verified": True,
+        "profile": "http://test_url/user",
+        "picture": "http://picture_url",
+        "groups": ["group1", "group2"],
+    }
+
+
+@pytest.fixture()
+def valid_gitlab_user():
+    return {
+        "id": 1234,
+        "email": "test_email",
+        "name": "test_user",
+        "username": "test_user_nickname",
+        "avatar_url": "http://picture_url",
+    }
+
+
+@pytest.fixture()
+def valid_session(valid_token, valid_userinfo):
+    return {"token": valid_token, "user": valid_userinfo}
+
+
+@pytest.fixture()
+def valid_reading_metadata():
+    return {
+        "created_by": "test_user",
+        "starttime": "2024-11-07T12:12:12Z",
+        "endtime": "2024-11-07T20:20:20Z",
+        "network": "NT",
+        "station": "BOU",
+        "category": "reading",
+        "data_valid": True,
+        "metadata": {
+            "azimuth": 199.1383,
+            "metadata": {
+                "time": "2020-01-03T17:12:47Z",
+                "station": "BOU",
+                "observer": "Test Observer",
+                "reviewed": True,
+                "reviewer": "Test Reviewer",
+                "mark_name": "AZ",
+                "pier_name": "MainPCDCP",
+                "theodolite": "108449",
+                "electronics": "0110",
+                "mark_azimuth": 199.1383,
+                "pier_correction": -22,
+            },
+            "absolutes": [
+                {
+                    "shift": 0,
+                    "valid": True,
+                    "element": "D",
+                    "endtime": "2020-01-03T17:16:21.000000Z",
+                    "absolute": 8.3851056,
+                    "baseline": 8.58571,
+                    "starttime": "2020-01-03T17:12:47.000000Z",
+                },
+                {
+                    "shift": 0,
+                    "valid": True,
+                    "element": "H",
+                    "endtime": "2020-01-03T17:24:40.000000Z",
+                    "absolute": 20728.0650365,
+                    "baseline": -71.7177135,
+                    "starttime": "2020-01-03T17:20:48.000000Z",
+                },
+                {
+                    "shift": 0,
+                    "valid": True,
+                    "element": "Z",
+                    "endtime": "2020-01-03T17:24:40.000000Z",
+                    "absolute": 47450.1529433,
+                    "baseline": 578.2041933,
+                    "starttime": "2020-01-03T17:20:48.000000Z",
+                },
+            ],
+            "hemisphere": 1,
+            "diagnostics": None,
+            "scale_value": None,
+            "measurements": [
+                {
+                    "e": None,
+                    "f": None,
+                    "h": None,
+                    "z": None,
+                    "angle": 191.56666666666666,
+                    "measurement_type": "FirstMarkDown",
+                },
+                {
+                    "e": None,
+                    "f": None,
+                    "h": None,
+                    "z": None,
+                    "angle": 11.566666666666666,
+                    "measurement_type": "FirstMarkUp",
+                },
+                {
+                    "e": None,
+                    "f": None,
+                    "h": None,
+                    "z": None,
+                    "angle": 191.56666666666666,
+                    "measurement_type": "SecondMarkDown",
+                },
+                {
+                    "e": None,
+                    "f": None,
+                    "h": None,
+                    "z": None,
+                    "angle": 11.566666666666666,
+                    "measurement_type": "SecondMarkUp",
+                },
+                {
+                    "e": -72.242,
+                    "f": 51801.81,
+                    "h": 20800.329,
+                    "z": 46871.49,
+                    "angle": 270.71666666666664,
+                    "measurement_type": "WestDown",
+                    "time": "2024-11-07T12:12:12Z",
+                },
+                {
+                    "e": -72.636,
+                    "f": 51801.92,
+                    "h": 20800.259,
+                    "z": 46871.641,
+                    "angle": 90.66666666666667,
+                    "measurement_type": "EastDown",
+                    "time": "2024-11-07T12:13:14Z",
+                },
+                {
+                    "e": -72.657,
+                    "f": 51801.82,
+                    "h": 20800.259,
+                    "z": 46871.521,
+                    "angle": 90.93333333333334,
+                    "measurement_type": "WestUp",
+                    "time": "2024-11-07T13:13:11Z",
+                },
+                {
+                    "e": -72.758,
+                    "f": 51801.92,
+                    "h": 20800.086,
+                    "z": 46871.707,
+                    "angle": 270.96666666666664,
+                    "measurement_type": "EastUp",
+                    "time": "2024-11-07T13:15:12Z",
+                },
+                {
+                    "e": -72.898,
+                    "f": 51801.89,
+                    "h": 20799.796,
+                    "z": 46871.802,
+                    "angle": 246.38333333333333,
+                    "measurement_type": "SouthDown",
+                    "time": "2024-11-07T15:12:12Z",
+                },
+                {
+                    "e": -72.8,
+                    "f": 51802.01,
+                    "h": 20799.852,
+                    "z": 46871.919,
+                    "angle": 66.38333333333334,
+                    "measurement_type": "NorthUp",
+                    "time": "2024-11-07T18:12:12Z",
+                },
+                {
+                    "e": -72.775,
+                    "f": 51802.01,
+                    "h": 20799.668,
+                    "z": 46871.997,
+                    "angle": 113.58333333333333,
+                    "measurement_type": "SouthUp",
+                    "time": "2024-11-07T19:12:19Z",
+                },
+                {
+                    "e": -72.813,
+                    "f": 51802.14,
+                    "h": 20799.815,
+                    "z": 46872.077,
+                    "angle": 293.5833333333333,
+                    "measurement_type": "NorthDown",
+                    "time": "2024-11-07T20:20:20Z",
+                },
+                {"measurement_type": "Meridian", "angle": 21},
+            ],
+            "pier_correction": -22,
+        },
+    }
+
+
+@pytest.fixture()
+def instrument_metadata():
+    return Metadata(
+        created_by="test_metadata.py",
+        category=MetadataCategory.INSTRUMENT,
+        network="NT",
+        station="BDT",
+        location="R0",
+        channel="F",
+        metadata={
+            "type": "FGE",
+            "channels": {
+                "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+                "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+                "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+            },
+            "electronics": {
+                "serial": "E0542",
+                "x-scale": 313.2,
+                "y-scale": 312.3,
+                "z-scale": 312.0,
+                "temperature-scale": 0.01,
+            },
+            "sensor": {
+                "serial": "S0419",
+                "x-constant": 36958,
+                "y-constant": 36849,
+                "z-constant": 36811,
+            },
+        },
+        status="new",
+    )
+
+
+@pytest.fixture()
+def metadata_with_times():
+    return Metadata(
+        created_by="test_metadata.py",
+        created_time=UTCDateTime(2021, 1, 3, 17, 24, 40),
+        updated_time=UTCDateTime(2021, 2, 3, 17, 24, 40),
+        starttime=UTCDateTime(2020, 12, 3, 22, 43, 27),
+        endtime=UTCDateTime(2020, 12, 3, 23, 43, 27),
+        category=MetadataCategory.INSTRUMENT,
+        network="NT",
+        station="BOU",
+        channel="U",
+        metadata={
+            "type": "Narod",
+            "channels": {
+                "U": [
+                    {"channel": "U_Volt", "offset": 0, "scale": 100},
+                    {"channel": "U_Bin", "offset": 0, "scale": 500},
+                ],
+                "V": [
+                    {"channel": "V_Volt", "offset": 0, "scale": 100},
+                    {"channel": "V_Bin", "offset": 0, "scale": 500},
+                ],
+                "W": [
+                    {"channel": "W_Volt", "offset": 0, "scale": 100},
+                    {"channel": "W_Bin", "offset": 0, "scale": 500},
+                ],
+            },
+        },
+        status="updated",
+    )
+
+
+@pytest.fixture()
+def metadata_with_datetimes():
+    return Metadata(
+        created_by="test_metadata.py",
+        created_time=datetime(2021, 1, 3, 17, 24, 40, tzinfo=timezone.utc),
+        updated_time=datetime(2021, 2, 3, 17, 24, 40, tzinfo=timezone.utc),
+        starttime=datetime(2020, 12, 4, 22, 43, 27, tzinfo=timezone.utc),
+        endtime=datetime(2020, 12, 4, 23, 43, 27, tzinfo=timezone.utc),
+        category=MetadataCategory.INSTRUMENT,
+        network="NT",
+        station="BOU",
+        location="R0",
+        metadata={
+            "type": "Narod",
+            "channels": {
+                "U": [
+                    {"channel": "U_Volt", "offset": 0, "scale": 100},
+                    {"channel": "U_Bin", "offset": 0, "scale": 500},
+                ],
+                "V": [
+                    {"channel": "V_Volt", "offset": 0, "scale": 100},
+                    {"channel": "V_Bin", "offset": 0, "scale": 500},
+                ],
+                "W": [
+                    {"channel": "W_Volt", "offset": 0, "scale": 100},
+                    {"channel": "W_Bin", "offset": 0, "scale": 500},
+                ],
+            },
+        },
+        status="updated",
+    )
+
+
+@pytest.fixture()
+def metadata_with_datetime_naive():
+    return Metadata(
+        created_by="test_metadata.py",
+        created_time=datetime(2021, 1, 3, 17, 24, 40),
+        updated_time=datetime(2021, 2, 3, 17, 24, 40),
+        starttime=datetime(2020, 12, 5, 22, 43, 27),
+        endtime=datetime(2020, 12, 5, 23, 43, 27),
+        category=MetadataCategory.INSTRUMENT,
+        network="NT",
+        station="BOU",
+        metadata={
+            "type": "Narod",
+            "channels": {
+                "U": [
+                    {"channel": "U_Volt", "offset": 0, "scale": 100},
+                    {"channel": "U_Bin", "offset": 0, "scale": 500},
+                ],
+                "V": [
+                    {"channel": "V_Volt", "offset": 0, "scale": 100},
+                    {"channel": "V_Bin", "offset": 0, "scale": 500},
+                ],
+                "W": [
+                    {"channel": "W_Volt", "offset": 0, "scale": 100},
+                    {"channel": "W_Bin", "offset": 0, "scale": 500},
+                ],
+            },
+        },
+        status="updated",
+    )
+
+
+@pytest.fixture()
+def metadata_with_time_strings():
+    return Metadata(
+        created_by="test_metadata.py",
+        created_time="2021-01-03T17:24:40",
+        starttime="2020-12-03T22:43:27",
+        endtime="2020-12-03T23:43:27",
+        category=MetadataCategory.INSTRUMENT,
+        network="NT",
+        station="BOU",
+        metadata={
+            "type": "Narod",
+            "channels": {
+                "U": [
+                    {"channel": "U_Volt", "offset": 0, "scale": 100},
+                    {"channel": "U_Bin", "offset": 0, "scale": 500},
+                ],
+                "V": [
+                    {"channel": "V_Volt", "offset": 0, "scale": 100},
+                    {"channel": "V_Bin", "offset": 0, "scale": 500},
+                ],
+                "W": [
+                    {"channel": "W_Volt", "offset": 0, "scale": 100},
+                    {"channel": "W_Bin", "offset": 0, "scale": 500},
+                ],
+            },
+        },
+    )
+
+
+@pytest.fixture()
+def observatory_metadata():
+    return Metadata(
+        created_by="test_metadata.py",
+        starttime="2022-02-03T22:43:27",
+        endtime="2022-02-03T23:43:27",
+        category=MetadataCategory.OBSERVATORY,
+        network="NT",
+        station="BRW",
+        metadata={
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "12"}],
+            "agency": "USGS",
+            "latitude": 71.322,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        },
+    )
+
+
+@pytest.fixture()
+def get_all_metadata(
+    instrument_metadata,
+    metadata_with_times,
+    metadata_with_datetimes,
+    metadata_with_datetime_naive,
+    metadata_with_time_strings,
+    observatory_metadata,
+):
+    return [
+        instrument_metadata,
+        metadata_with_times,
+        metadata_with_datetimes,
+        metadata_with_datetime_naive,
+        metadata_with_time_strings,
+        observatory_metadata,
+    ]
+
+
+@pytest.fixture()
+def metadata_history_1():
+    return MetadataHistoryTable(
+        metadata_id=38742,
+        created_by="test_metadata.py",
+        starttime="2022-02-03T22:43:27",
+        endtime="2022-02-03T23:43:27",
+        category=MetadataCategory.OBSERVATORY,
+        network="NT",
+        station="BRW",
+        metadata={
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "12"}],
+            "agency": "USGS",
+            "latitude": 71.322,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        },
+    )
+
+
+@pytest.fixture()
+def metadata_history_2():
+    return MetadataHistoryTable(
+        metadata_id=38742,
+        created_by="test_metadata.py",
+        updated_by="tester",
+        updated_time="2023-01-21T03:30:43",
+        starttime="2022-02-03T22:43:27",
+        endtime="2022-02-03T23:43:27",
+        category=MetadataCategory.OBSERVATORY,
+        network="NT",
+        station="BRW",
+        metadata={
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "22"}],
+            "agency": "USGS",
+            "latitude": 71.322,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        },
+    )
+
+
+@pytest.fixture()
+def metadata_history_3():
+    return MetadataHistoryTable(
+        metadata_id=38742,
+        created_by="test_metadata.py",
+        updated_by="another_tester",
+        updated_time="2023-01-22T13:30:43",
+        starttime="2022-02-03T22:43:27",
+        endtime="2022-02-03T23:43:27",
+        category=MetadataCategory.OBSERVATORY,
+        network="NT",
+        station="BRW",
+        metadata={
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "22"}],
+            "agency": "USGS",
+            "latitude": 87.32,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        },
+    )
+
+
+@pytest.fixture()
+def current_metadata_for_related_metadata_history():
+    return MetadataTable(
+        id=38742,
+        created_by="test_metadata.py",
+        created_time=datetime(2025, 1, 22, 13, 45, 27, tzinfo=timezone.utc),
+        updated_by="another_tester",
+        updated_time=datetime(2023, 6, 1, 13, 45, 27, tzinfo=timezone.utc),
+        starttime=datetime(2022, 2, 3, 22, 43, 27, tzinfo=timezone.utc),
+        endtime=datetime(2022, 2, 3, 23, 43, 27, tzinfo=timezone.utc),
+        category=MetadataCategory.OBSERVATORY,
+        network="NT",
+        station="BRW",
+        metadata_={
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "22"}],
+            "agency": "USGS",
+            "latitude": 87.32,
+            "elevation": 10,
+            "longitude": 202.234,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        },
+    )
+
+
+@pytest.fixture()
+def get_all_related_metadata_history(
+    metadata_history_1, metadata_history_2, metadata_history_3
+):
+    return [metadata_history_1, metadata_history_2, metadata_history_3]
+
+
+@pytest.fixture()
+def formatted_metadata_history_1():
+    return MetadataHistory(
+        metadata_id=38746,
+        created_by="test_metadata.py",
+        starttime=datetime(2022, 2, 3, 22, 43, 27, tzinfo=timezone.utc),
+        endtime=datetime(2022, 2, 3, 23, 43, 27, tzinfo=timezone.utc),
+        category=MetadataCategory.OBSERVATORY,
+        network="NT",
+        station="BRW",
+        metadata={
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "12"}],
+            "agency": "USGS",
+            "latitude": 71.322,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        },
+    )
+
+
+@pytest.fixture()
+def formatted_metadata_history_2():
+    return MetadataHistory(
+        metadata_id=38746,
+        created_by="test_metadata.py",
+        updated_by="tester",
+        updated_time=datetime(2023, 1, 21, 3, 30, 43, tzinfo=timezone.utc),
+        starttime=datetime(2022, 2, 3, 22, 43, 27, tzinfo=timezone.utc),
+        endtime=datetime(2022, 2, 3, 23, 43, 27, tzinfo=timezone.utc),
+        category=MetadataCategory.OBSERVATORY,
+        network="NT",
+        station="BRW",
+        metadata={
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "22"}],
+            "agency": "USGS",
+            "latitude": 71.322,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        },
+    )
+
+
+@pytest.fixture()
+def formatted_metadata_history_3():
+    return MetadataHistory(
+        metadata_id=38746,
+        created_by="test_metadata.py",
+        updated_by="another_tester",
+        updated_time=datetime(2023, 1, 22, 3, 30, 43, tzinfo=timezone.utc),
+        starttime=datetime(2022, 2, 3, 22, 43, 27, tzinfo=timezone.utc),
+        endtime=datetime(2022, 2, 3, 23, 43, 27, tzinfo=timezone.utc),
+        category=MetadataCategory.OBSERVATORY,
+        network="NT",
+        station="BRW",
+        metadata={
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "22"}],
+            "agency": "USGS",
+            "latitude": 87.32,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        },
+    )
+
+
+@pytest.fixture()
+def formatted_metadata_for_formatted_metadata_history():
+    return Metadata(
+        id=38746,
+        created_by="test_metadata.py",
+        created_time=datetime(2025, 1, 22, 13, 45, 27, tzinfo=timezone.utc),
+        updated_by="another_tester",
+        updated_time=datetime(2023, 6, 1, 13, 45, 27, tzinfo=timezone.utc),
+        starttime=datetime(2022, 2, 3, 22, 43, 27, tzinfo=timezone.utc),
+        endtime=datetime(2022, 2, 3, 23, 43, 27, tzinfo=timezone.utc),
+        category=MetadataCategory.OBSERVATORY,
+        network="NT",
+        station="BRW",
+        metadata={
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "22"}],
+            "agency": "USGS",
+            "latitude": 87.32,
+            "elevation": 10,
+            "longitude": 202.234,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        },
+    )
+
+
+@pytest.fixture()
+def get_all_formatted_metadata_history(
+    formatted_metadata_for_formatted_metadata_history,
+    formatted_metadata_history_3,
+    formatted_metadata_history_2,
+    formatted_metadata_history_1,
+):
+    return [
+        formatted_metadata_for_formatted_metadata_history,
+        formatted_metadata_history_3,
+        formatted_metadata_history_2,
+        formatted_metadata_history_1,
+    ]
diff --git a/test/api_test/db/factories/MetadataDatabaseFactory_test.py b/test/api_test/db/factories/MetadataDatabaseFactory_test.py
new file mode 100644
index 000000000..8e1dc23a3
--- /dev/null
+++ b/test/api_test/db/factories/MetadataDatabaseFactory_test.py
@@ -0,0 +1,1307 @@
+import pytest
+import pydantic_core
+
+from obspy import UTCDateTime
+from datetime import datetime, timedelta, timezone
+from sqlmodel import Session, delete, select
+
+from geomagio.api.db.models.metadata import (
+    Metadata,
+    MetadataTable,
+    MetadataCategory,
+    MetadataQuery,
+    MetadataUpdate,
+)
+from geomagio.api.db.models.metadata_history import MetadataHistoryTable
+from geomagio.api.db.factories.MetadataDatabaseFactory import MetadataDatabaseFactory
+
+
+def compare_metadata_objects(a: MetadataTable, b: Metadata) -> bool:
+    if a.id != b.id:
+        raise ValueError(
+            "ids do not match. a.id = " + str(a.id) + " and b.id = " + str(b.id)
+        )
+    if a.created_by != b.created_by:
+        raise ValueError(
+            "created_by does not match. a.created_by = "
+            + a.created_by
+            + " and b.created_by = "
+            + b.created_by
+        )
+    if a.created_time != b.created_time:
+        raise ValueError(
+            "created_time does not match. a.created_time = "
+            + a.created_time
+            + " and b.created_time = "
+            + b.created_time
+        )
+    if a.updated_by != b.updated_by:
+        raise ValueError(
+            "updated_by does not match. a.updated_by = "
+            + a.updated_by
+            + " and b.updated_by = "
+            + b.updated_by
+        )
+    if a.updated_time != b.updated_time:
+        raise ValueError(
+            "updated_time does not match. a.updated_time = "
+            + a.updated_time
+            + " and b.updated_time = "
+            + b.updated_time
+        )
+    if a.starttime != b.starttime:
+        raise ValueError(
+            "starttime does not match. a.starttime = "
+            + a.starttime
+            + " and b.starttime = "
+            + b.starttime
+        )
+    if a.endtime != b.endtime:
+        raise ValueError(
+            "endtime does not match. a.endtime = "
+            + a.endtime
+            + " and b.endtime = "
+            + b.endtime
+        )
+    if a.network != b.network:
+        raise ValueError(
+            "network does not match. a.network = "
+            + a.network
+            + " and b.network = "
+            + b.network
+        )
+    if a.station != b.station:
+        raise ValueError(
+            "station does not match. a.station = "
+            + a.station
+            + " and b.station = "
+            + b.station
+        )
+    if a.channel != b.channel:
+        raise ValueError(
+            "channel does not match. a.channel = "
+            + a.channel
+            + " and b.channel = "
+            + b.channel
+        )
+    if a.location != b.location:
+        raise ValueError(
+            "location does not match. a.location = "
+            + a.location
+            + " and b.location = "
+            + b.location
+        )
+    if a.category != b.category:
+        raise ValueError(
+            "category does not match. a.category = "
+            + a.category
+            + " and b.category = "
+            + b.category
+        )
+    if a.priority != b.priority:
+        raise ValueError(
+            "priority does not match. a.priority = "
+            + a.priority
+            + " and b.priority = "
+            + b.priority
+        )
+    if a.data_valid != b.data_valid:
+        raise ValueError(
+            "data_valid does not match. a.data_valid = "
+            + a.data_valid
+            + " and b.data_valid = "
+            + b.data_valid
+        )
+    if a.status != b.status:
+        raise ValueError(
+            "status does not match. a.status = "
+            + a.status
+            + " and b.status = "
+            + b.status
+        )
+    if a.metadata_ != b.metadata:
+        raise ValueError(
+            "metadata does not match. a.metadata_ = "
+            + a.metadata
+            + " and b.metadata = "
+            + b.metadata
+        )
+    if a.comment != b.comment:
+        raise ValueError(
+            "comment does not match. a.comment = "
+            + a.comment
+            + " and b.comment = "
+            + b.comment
+        )
+    if a.review_comment != b.review_comment:
+        raise ValueError(
+            "review_comment does not match. a.review_comment = "
+            + a.review_comment
+            + " and b.review_comment = "
+            + b.review_comment
+        )
+    return True
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_defaults(test_engine, instrument_metadata):
+    instrument_metadata.id = 123456
+    returned_metadata = await MetadataDatabaseFactory(
+        engine=test_engine
+    ).create_metadata(new_metadata=instrument_metadata)
+
+    # get the newly saved record to verify it was created correctly
+    with Session(test_engine) as test_db_session:
+        saved_metadata = test_db_session.get(MetadataTable, returned_metadata.id)
+
+        # assert the saved metadata fields are set correctly
+        assert saved_metadata.created_by == "test_metadata.py"
+        assert saved_metadata.category == MetadataCategory.INSTRUMENT
+        assert saved_metadata.network == "NT"
+        assert saved_metadata.station == "BDT"
+        assert saved_metadata.metadata_ == {
+            "type": "FGE",
+            "channels": {
+                "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+                "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+                "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+            },
+            "electronics": {
+                "serial": "E0542",
+                "x-scale": 313.2,
+                "y-scale": 312.3,
+                "z-scale": 312.0,
+                "temperature-scale": 0.01,
+            },
+            "sensor": {
+                "serial": "S0419",
+                "x-constant": 36958,
+                "y-constant": 36849,
+                "z-constant": 36811,
+            },
+        }
+
+        # assert created_time is within one hour of current time
+        discard = timedelta(
+            minutes=saved_metadata.created_time.minute,
+            seconds=saved_metadata.created_time.second,
+            microseconds=saved_metadata.created_time.microsecond,
+        )
+        saved_time = saved_metadata.created_time - discard
+
+        now = UTCDateTime.now()
+        current_time = datetime(
+            year=now.year,
+            month=now.month,
+            day=now.day,
+            hour=now.hour,
+            tzinfo=timezone.utc,
+        )
+
+        assert current_time == saved_time
+
+        # assert defaults are set correctly
+        assert saved_metadata.priority == 1
+        assert saved_metadata.data_valid == True
+        assert saved_metadata.status == "new"
+
+        # assert id is the database assigned id. not the input id
+        assert saved_metadata.id != instrument_metadata.id
+
+        # assert the metadata returned is the same as the saved metadata
+        # print("returned_metadata:", returned_metadata)
+        assert compare_metadata_objects(a=saved_metadata, b=returned_metadata)
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_with_starttime_and_endtime(
+    test_engine, metadata_with_times
+):
+    returned_metadata = await MetadataDatabaseFactory(
+        engine=test_engine
+    ).create_metadata(new_metadata=metadata_with_times)
+
+    # get the newly saved record to verify it was created correctly
+    with Session(test_engine) as test_db_session:
+        saved_metadata = test_db_session.get(MetadataTable, returned_metadata.id)
+
+        # assert the saved metadata fields are set correctly
+        assert saved_metadata.created_by == "test_metadata.py"
+        assert saved_metadata.created_time == datetime(
+            2021, 1, 3, 17, 24, 40, tzinfo=timezone.utc
+        )
+        assert saved_metadata.updated_time == datetime(
+            2021, 2, 3, 17, 24, 40, tzinfo=timezone.utc
+        )
+        assert saved_metadata.starttime == datetime(
+            2020, 12, 3, 22, 43, 27, tzinfo=timezone.utc
+        )
+        assert saved_metadata.endtime == datetime(
+            2020, 12, 3, 23, 43, 27, tzinfo=timezone.utc
+        )
+        assert saved_metadata.category == MetadataCategory.INSTRUMENT
+        assert saved_metadata.network == "NT"
+        assert saved_metadata.station == "BOU"
+        assert saved_metadata.metadata_ == {
+            "type": "Narod",
+            "channels": {
+                "U": [
+                    {"channel": "U_Volt", "offset": 0, "scale": 100},
+                    {"channel": "U_Bin", "offset": 0, "scale": 500},
+                ],
+                "V": [
+                    {"channel": "V_Volt", "offset": 0, "scale": 100},
+                    {"channel": "V_Bin", "offset": 0, "scale": 500},
+                ],
+                "W": [
+                    {"channel": "W_Volt", "offset": 0, "scale": 100},
+                    {"channel": "W_Bin", "offset": 0, "scale": 500},
+                ],
+            },
+        }
+
+        # assert defaults are set correctly
+        assert saved_metadata.priority == 1
+        assert saved_metadata.data_valid == True
+        assert saved_metadata.status == "updated"
+
+        # assert the metadata returned is the same as the saved metadata
+        assert compare_metadata_objects(a=saved_metadata, b=returned_metadata)
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_with_times_as_datetime(
+    test_engine, metadata_with_datetimes
+):
+    returned_metadata = await MetadataDatabaseFactory(
+        engine=test_engine
+    ).create_metadata(new_metadata=metadata_with_datetimes)
+
+    # get the newly saved record to verify it was created correctly
+    with Session(test_engine) as test_db_session:
+        saved_metadata = test_db_session.get(MetadataTable, returned_metadata.id)
+
+        # assert the saved metadata fields are set correctly
+        assert saved_metadata.created_by == "test_metadata.py"
+        assert saved_metadata.created_time == datetime(
+            2021, 1, 3, 17, 24, 40, tzinfo=timezone.utc
+        )
+        assert saved_metadata.updated_time == datetime(
+            2021, 2, 3, 17, 24, 40, tzinfo=timezone.utc
+        )
+        assert saved_metadata.starttime == datetime(
+            2020, 12, 4, 22, 43, 27, tzinfo=timezone.utc
+        )
+        assert saved_metadata.endtime == datetime(
+            2020, 12, 4, 23, 43, 27, tzinfo=timezone.utc
+        )
+        assert saved_metadata.category == MetadataCategory.INSTRUMENT
+        assert saved_metadata.network == "NT"
+        assert saved_metadata.station == "BOU"
+        assert saved_metadata.metadata_ == {
+            "type": "Narod",
+            "channels": {
+                "U": [
+                    {"channel": "U_Volt", "offset": 0, "scale": 100},
+                    {"channel": "U_Bin", "offset": 0, "scale": 500},
+                ],
+                "V": [
+                    {"channel": "V_Volt", "offset": 0, "scale": 100},
+                    {"channel": "V_Bin", "offset": 0, "scale": 500},
+                ],
+                "W": [
+                    {"channel": "W_Volt", "offset": 0, "scale": 100},
+                    {"channel": "W_Bin", "offset": 0, "scale": 500},
+                ],
+            },
+        }
+
+        # assert defaults are set correctly
+        assert saved_metadata.priority == 1
+        assert saved_metadata.data_valid == True
+        assert saved_metadata.status == "updated"
+
+        # assert the metadata returned is the same as the saved metadata
+        assert compare_metadata_objects(a=saved_metadata, b=returned_metadata)
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_with_times_as_naive_datetime(
+    test_engine, metadata_with_datetime_naive
+):
+    returned_metadata = await MetadataDatabaseFactory(
+        engine=test_engine
+    ).create_metadata(new_metadata=metadata_with_datetime_naive)
+
+    # get the newly saved record to verify it was created correctly
+    with Session(test_engine) as test_db_session:
+        saved_metadata = test_db_session.get(MetadataTable, returned_metadata.id)
+
+        # assert the saved metadata fields are set correctly
+        assert saved_metadata.created_by == "test_metadata.py"
+
+        # assert native datetimes are converted to aware datetimes by model_validate
+        assert saved_metadata.created_time == datetime(
+            2021, 1, 3, 17, 24, 40, tzinfo=timezone.utc
+        )
+        assert saved_metadata.updated_time == datetime(
+            2021, 2, 3, 17, 24, 40, tzinfo=timezone.utc
+        )
+        assert saved_metadata.starttime == datetime(
+            2020, 12, 5, 22, 43, 27, tzinfo=timezone.utc
+        )
+        assert saved_metadata.endtime == datetime(
+            2020, 12, 5, 23, 43, 27, tzinfo=timezone.utc
+        )
+        assert saved_metadata.category == MetadataCategory.INSTRUMENT
+        assert saved_metadata.network == "NT"
+        assert saved_metadata.station == "BOU"
+        assert saved_metadata.metadata_ == {
+            "type": "Narod",
+            "channels": {
+                "U": [
+                    {"channel": "U_Volt", "offset": 0, "scale": 100},
+                    {"channel": "U_Bin", "offset": 0, "scale": 500},
+                ],
+                "V": [
+                    {"channel": "V_Volt", "offset": 0, "scale": 100},
+                    {"channel": "V_Bin", "offset": 0, "scale": 500},
+                ],
+                "W": [
+                    {"channel": "W_Volt", "offset": 0, "scale": 100},
+                    {"channel": "W_Bin", "offset": 0, "scale": 500},
+                ],
+            },
+        }
+
+        # assert defaults are set correctly
+        assert saved_metadata.priority == 1
+        assert saved_metadata.data_valid == True
+        assert saved_metadata.status == "updated"
+
+        # assert the metadata returned is the same as the saved metadata
+        assert compare_metadata_objects(a=saved_metadata, b=returned_metadata)
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_with_times_as_strings(
+    test_engine, metadata_with_time_strings
+):
+    returned_metadata = await MetadataDatabaseFactory(
+        engine=test_engine
+    ).create_metadata(new_metadata=metadata_with_time_strings)
+
+    # get the newly saved record to verify it was created correctly
+    with Session(test_engine) as test_db_session:
+        saved_metadata = test_db_session.get(MetadataTable, returned_metadata.id)
+
+        # assert the saved metadata fields are set correctly
+        assert saved_metadata.created_by == "test_metadata.py"
+
+        # assert native datetimes are converted to aware datetimes by model_validate
+        assert saved_metadata.created_time == datetime(
+            2021, 1, 3, 17, 24, 40, tzinfo=timezone.utc
+        )
+        assert saved_metadata.starttime == datetime(
+            2020, 12, 3, 22, 43, 27, tzinfo=timezone.utc
+        )
+        assert saved_metadata.endtime == datetime(
+            2020, 12, 3, 23, 43, 27, tzinfo=timezone.utc
+        )
+        assert saved_metadata.updated_time == None
+        assert saved_metadata.category == MetadataCategory.INSTRUMENT
+        assert saved_metadata.network == "NT"
+        assert saved_metadata.station == "BOU"
+        assert saved_metadata.metadata_ == {
+            "type": "Narod",
+            "channels": {
+                "U": [
+                    {"channel": "U_Volt", "offset": 0, "scale": 100},
+                    {"channel": "U_Bin", "offset": 0, "scale": 500},
+                ],
+                "V": [
+                    {"channel": "V_Volt", "offset": 0, "scale": 100},
+                    {"channel": "V_Bin", "offset": 0, "scale": 500},
+                ],
+                "W": [
+                    {"channel": "W_Volt", "offset": 0, "scale": 100},
+                    {"channel": "W_Bin", "offset": 0, "scale": 500},
+                ],
+            },
+        }
+
+        # assert defaults are set correctly
+        assert saved_metadata.priority == 1
+        assert saved_metadata.data_valid == True
+        assert saved_metadata.status == "new"
+
+        # assert the metadata returned is the same as the saved metadata
+        assert compare_metadata_objects(a=saved_metadata, b=returned_metadata)
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_invalid_metadata(test_engine, instrument_metadata):
+    instrument_metadata.starttime = "invalid starttime"
+
+    try:
+        await MetadataDatabaseFactory(engine=test_engine).create_metadata(
+            new_metadata=instrument_metadata
+        )
+    except Exception as e:
+        assert e is not None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_with_starttime(test_engine, get_all_metadata):
+    # clear in memory Metadata table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataDatabaseFactory(engine=test_engine).batch_create_metadata(
+        get_all_metadata
+    )
+
+    query = MetadataQuery(
+        category=MetadataCategory.INSTRUMENT, starttime="2019-01-01T00:00:00"
+    )
+
+    results = await MetadataDatabaseFactory(engine=test_engine).get_metadata(
+        query=query
+    )
+
+    # expect 5 metadata rows to be returned. 4 with the endtime after the query starttime and 1 with no endtime
+    assert len(results) == 5
+
+    for result in results:
+        assert result.category == MetadataCategory.INSTRUMENT
+        # assert metadata is set correctly and not as metadata_
+        assert result.metadata["type"] != None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_with_endtime(test_engine, get_all_metadata):
+    # clear in memory Metadata table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataDatabaseFactory(engine=test_engine).batch_create_metadata(
+        get_all_metadata
+    )
+
+    query = MetadataQuery(
+        category=MetadataCategory.INSTRUMENT, endtime="2020-12-04T00:00:00"
+    )
+
+    results = await MetadataDatabaseFactory(engine=test_engine).get_metadata(
+        query=query
+    )
+
+    # expect 3 metadata rows to be returned. 2 with the starttime before the query endtime and 1 with no endtime
+    assert len(results) == 3
+
+    for result in results:
+        assert result.category == MetadataCategory.INSTRUMENT
+        # assert metadata is set correctly and not as metadata_
+        assert result.metadata["type"] != None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_with_channel(test_engine, get_all_metadata):
+    # clear in memory Metadata table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataDatabaseFactory(engine=test_engine).batch_create_metadata(
+        get_all_metadata
+    )
+
+    query = MetadataQuery(
+        category=MetadataCategory.INSTRUMENT, endtime="2020-12-04T00:00:00", channel="F"
+    )
+
+    results = await MetadataDatabaseFactory(engine=test_engine).get_metadata(
+        query=query
+    )
+
+    assert len(results) == 1
+    assert results[0].channel == "F"
+    assert results[0].metadata["type"] == "FGE"
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_with_location(test_engine, get_all_metadata):
+    # clear in memory Metadata table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataDatabaseFactory(engine=test_engine).batch_create_metadata(
+        get_all_metadata
+    )
+
+    query = MetadataQuery(
+        category=MetadataCategory.INSTRUMENT,
+        endtime="2020-12-04T00:00:00",
+        location="R0",
+    )
+
+    results = await MetadataDatabaseFactory(engine=test_engine).get_metadata(
+        query=query
+    )
+
+    assert len(results) == 1
+    assert results[0].location == "R0"
+    assert results[0].metadata["type"] == "FGE"
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_with_single_status(test_engine, get_all_metadata):
+    # clear in memory Metadata table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataDatabaseFactory(engine=test_engine).batch_create_metadata(
+        get_all_metadata
+    )
+
+    query = MetadataQuery(
+        category=MetadataCategory.INSTRUMENT,
+        starttime="2019-01-01T00:00:00",
+        status=["new"],
+    )
+
+    results = await MetadataDatabaseFactory(engine=test_engine).get_metadata(
+        query=query
+    )
+
+    # expect 2 metadata rows to be returned. the 1st object and the 5th where status is set to new by default
+    assert len(results) == 2
+
+    for result in results:
+        assert result.category == MetadataCategory.INSTRUMENT
+        assert result.status == "new"
+        assert result.metadata["type"] != None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_with_multiple_statuses(test_engine, get_all_metadata):
+    # clear in memory Metadata table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataDatabaseFactory(engine=test_engine).batch_create_metadata(
+        get_all_metadata
+    )
+
+    query = MetadataQuery(
+        category=MetadataCategory.INSTRUMENT,
+        starttime="2019-01-01T00:00:00",
+        status=["new", "updated"],
+    )
+
+    results = await MetadataDatabaseFactory(engine=test_engine).get_metadata(
+        query=query
+    )
+
+    # expect 5 metadata rows to be returned. all of the instrument metadata has a "new" or "updated" status and passes the starttime conditional
+    assert len(results) == 5
+
+    for result in results:
+        assert result.category == MetadataCategory.INSTRUMENT
+        assert result.status == "new" or result.status == "updated"
+        assert result.metadata["type"] != None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_with_created_before(test_engine, get_all_metadata):
+    # clear in memory Metadata table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataDatabaseFactory(engine=test_engine).batch_create_metadata(
+        get_all_metadata
+    )
+
+    query = MetadataQuery(
+        created_before=UTCDateTime(2025, 1, 1),
+        category=MetadataCategory.INSTRUMENT,
+        starttime="2019-01-01T00:00:00",
+        status=["new", "updated"],
+    )
+
+    results = await MetadataDatabaseFactory(engine=test_engine).get_metadata(
+        query=query
+    )
+
+    # expect 4 metadata rows to be returned. all of the instrument metadata except for the first one
+    assert len(results) == 4
+
+    for result in results:
+        assert result.category == MetadataCategory.INSTRUMENT
+        assert result.status == "new" or result.status == "updated"
+        assert result.metadata["type"] == "Narod"
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_with_created_after(test_engine, get_all_metadata):
+    # clear in memory Metadata table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataDatabaseFactory(engine=test_engine).batch_create_metadata(
+        get_all_metadata
+    )
+
+    query = MetadataQuery(
+        created_after=UTCDateTime(2019, 1, 1),
+        category=MetadataCategory.INSTRUMENT,
+        starttime="2019-01-01T00:00:00",
+        status=["new", "updated"],
+    )
+
+    results = await MetadataDatabaseFactory(engine=test_engine).get_metadata(
+        query=query
+    )
+
+    # expect 5 all of the instrument metadata rows to be returned
+    assert len(results) == 5
+
+    for result in results:
+        assert result.category == MetadataCategory.INSTRUMENT
+        assert result.status == "new" or result.status == "updated"
+        assert result.metadata["type"] != None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_returns_one_in_list(test_engine, get_all_metadata):
+    # clear in memory Metadata table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataDatabaseFactory(engine=test_engine).batch_create_metadata(
+        get_all_metadata
+    )
+
+    query = MetadataQuery(
+        created_after=UTCDateTime(2019, 1, 1), category=MetadataCategory.OBSERVATORY
+    )
+
+    results = await MetadataDatabaseFactory(engine=test_engine).get_metadata(
+        query=query
+    )
+
+    # expect 1 metadata observatory object to be returned as a list
+    assert len(results) == 1
+    assert results[0].category == MetadataCategory.OBSERVATORY
+    assert results[0].metadata == {
+        "id": "BRW",
+        "name": "Barrow",
+        "marks": [{"name": "m", "azimuth": "1"}],
+        "piers": [{"name": "p", "correction": "12"}],
+        "agency": "USGS",
+        "latitude": 71.322,
+        "elevation": 10,
+        "longitude": 203.378,
+        "agency_name": "United States Geological Survey (USGS)",
+        "electronics": ["1"],
+        "theodolites": ["12"],
+        "declination_base": 10589,
+        "sensor_orientation": "HDZF",
+    }
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_returns_empty_list(test_engine, get_all_metadata):
+    # clear in memory Metadata table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataDatabaseFactory(engine=test_engine).batch_create_metadata(
+        get_all_metadata
+    )
+
+    query = MetadataQuery(category=MetadataCategory.READING)
+
+    results = await MetadataDatabaseFactory(engine=test_engine).get_metadata(
+        query=query
+    )
+
+    # assert that it returns an empty list and does not error
+    assert len(results) == 0
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_by_id(test_engine, get_all_metadata):
+    # clear in memory Metadata table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataDatabaseFactory(engine=test_engine).batch_create_metadata(
+        get_all_metadata
+    )
+
+    result = await MetadataDatabaseFactory(engine=test_engine).get_metadata_by_id(3)
+    assert result.id == 3
+    assert result.created_time == datetime(2021, 1, 3, 17, 24, 40, tzinfo=timezone.utc)
+    assert result.updated_time == datetime(2021, 2, 3, 17, 24, 40, tzinfo=timezone.utc)
+    assert result.starttime == datetime(2020, 12, 4, 22, 43, 27, tzinfo=timezone.utc)
+    assert result.endtime == datetime(2020, 12, 4, 23, 43, 27, tzinfo=timezone.utc)
+    assert result.category == MetadataCategory.INSTRUMENT
+    assert result.network == "NT"
+    assert result.station == "BOU"
+    assert result.location == "R0"
+    assert result.metadata == {
+        "type": "Narod",
+        "channels": {
+            "U": [
+                {"channel": "U_Volt", "offset": 0, "scale": 100},
+                {"channel": "U_Bin", "offset": 0, "scale": 500},
+            ],
+            "V": [
+                {"channel": "V_Volt", "offset": 0, "scale": 100},
+                {"channel": "V_Bin", "offset": 0, "scale": 500},
+            ],
+            "W": [
+                {"channel": "W_Volt", "offset": 0, "scale": 100},
+                {"channel": "W_Bin", "offset": 0, "scale": 500},
+            ],
+        },
+    }
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_by_id_no_metadatareturned(test_engine):
+    result = await MetadataDatabaseFactory(engine=test_engine).get_metadata_by_id(12345)
+
+    # assert the result is None and not an error
+    assert result == None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_by_id_invalid(test_engine):
+    try:
+        await MetadataDatabaseFactory(engine=test_engine).get_metadata_by_id("3")
+    except Exception as e:
+        assert str(e) == "expected an int"
+
+
+def compare_metadata_and_metadatahistory_objects(
+    a: Metadata, b: MetadataHistoryTable
+) -> bool:
+    if a.id != b.metadata_id:
+        raise ValueError(
+            "ids do not match. a.id = "
+            + str(a.id)
+            + " and b.metadata_id = "
+            + str(b.metadata_id)
+        )
+    if a.created_by != b.created_by:
+        raise ValueError(
+            "created_by does not match. a.created_by = "
+            + a.created_by
+            + " and b.created_by = "
+            + b.created_by
+        )
+    if a.created_time != b.created_time:
+        raise ValueError(
+            "created_time does not match. a.created_time = "
+            + a.created_time
+            + " and b.created_time = "
+            + b.created_time
+        )
+    if a.updated_by != b.updated_by:
+        raise ValueError(
+            "updated_by does not match. a.updated_by = "
+            + a.updated_by
+            + " and b.updated_by = "
+            + b.updated_by
+        )
+    if a.updated_time != b.updated_time:
+        raise ValueError(
+            "updated_time does not match. a.updated_time = "
+            + a.updated_time
+            + " and b.updated_time = "
+            + b.updated_time
+        )
+    if a.starttime != b.starttime:
+        raise ValueError(
+            "starttime does not match. a.starttime = "
+            + a.starttime
+            + " and b.starttime = "
+            + b.starttime
+        )
+    if a.endtime != b.endtime:
+        raise ValueError(
+            "endtime does not match. a.endtime = "
+            + a.endtime
+            + " and b.endtime = "
+            + b.endtime
+        )
+    if a.network != b.network:
+        raise ValueError(
+            "network does not match. a.network = "
+            + a.network
+            + " and b.network = "
+            + b.network
+        )
+    if a.station != b.station:
+        raise ValueError(
+            "station does not match. a.station = "
+            + a.station
+            + " and b.station = "
+            + b.station
+        )
+    if a.channel != b.channel:
+        raise ValueError(
+            "channel does not match. a.channel = "
+            + a.channel
+            + " and b.channel = "
+            + b.channel
+        )
+    if a.location != b.location:
+        raise ValueError(
+            "location does not match. a.location = "
+            + a.location
+            + " and b.location = "
+            + b.location
+        )
+    if a.category != b.category:
+        raise ValueError(
+            "category does not match. a.category = "
+            + a.category
+            + " and b.category = "
+            + b.category
+        )
+    if a.priority != b.priority:
+        raise ValueError(
+            "priority does not match. a.priority = "
+            + a.priority
+            + " and b.priority = "
+            + b.priority
+        )
+    if a.data_valid != b.data_valid:
+        raise ValueError(
+            "data_valid does not match. a.data_valid = "
+            + a.data_valid
+            + " and b.data_valid = "
+            + b.data_valid
+        )
+    if a.status != b.status:
+        raise ValueError(
+            "status does not match. a.status = "
+            + a.status
+            + " and b.status = "
+            + b.status
+        )
+    if a.metadata != b.metadata_:
+        raise ValueError(
+            "metadata does not match. a.metadata = "
+            + a.metadata
+            + " and b.metadata_ = "
+            + b.metadata_
+        )
+    if a.comment != b.comment:
+        raise ValueError(
+            "comment does not match. a.comment = "
+            + a.comment
+            + " and b.comment = "
+            + b.comment
+        )
+    if a.review_comment != b.review_comment:
+        raise ValueError(
+            "review_comment does not match. a.review_comment = "
+            + a.review_comment
+            + " and b.review_comment = "
+            + b.review_comment
+        )
+    return True
+
+
+@pytest.mark.asyncio
+async def test_update_metadata(test_engine, observatory_metadata):
+    with Session(test_engine) as test_db_session:
+        # clear in memory MetadataHistoryTable table
+        statement = delete(MetadataHistoryTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+        # create a metadata row to update
+        created_metadata = await MetadataDatabaseFactory(
+            engine=test_engine
+        ).create_metadata(observatory_metadata)
+
+        updated_metadata = MetadataUpdate(
+            id=created_metadata.id, comment="Adding a comment", status="reviewed"
+        )
+
+        returned_metadata = await MetadataDatabaseFactory(
+            engine=test_engine
+        ).update_metadata(updated_metadata=updated_metadata, updated_by="tester")
+
+        # get metadatahistory row
+        statement = select(MetadataHistoryTable).where(
+            MetadataHistoryTable.metadata_id == created_metadata.id
+        )
+        results = test_db_session.exec(statement)
+        metadatahistory = results.one()
+
+        # assert the original created metadata saved to the metadatahistory table is identical to the original created metadata aside from id and metadataid
+        assert compare_metadata_and_metadatahistory_objects(
+            a=created_metadata, b=metadatahistory
+        )
+
+        # get metadata row to assert it was updated with the new values
+        newly_updated_metadata = test_db_session.get(MetadataTable, created_metadata.id)
+
+        # assert values not included in the MetadataUpdate object are not changed
+        assert newly_updated_metadata.id == created_metadata.id
+        assert newly_updated_metadata.created_by == created_metadata.created_by
+        assert newly_updated_metadata.created_time == created_metadata.created_time
+        assert newly_updated_metadata.starttime == created_metadata.starttime
+        assert newly_updated_metadata.endtime == created_metadata.endtime
+        assert newly_updated_metadata.network == created_metadata.network
+        assert newly_updated_metadata.station == created_metadata.station
+        assert newly_updated_metadata.channel == created_metadata.channel
+        assert newly_updated_metadata.location == created_metadata.location
+        assert newly_updated_metadata.category == created_metadata.category
+        assert newly_updated_metadata.priority == created_metadata.priority
+        assert newly_updated_metadata.data_valid == created_metadata.data_valid
+        assert newly_updated_metadata.metadata_ == created_metadata.metadata
+        assert newly_updated_metadata.review_comment == created_metadata.review_comment
+
+        # assert updated_by is set correctly
+        assert newly_updated_metadata.updated_by == "tester"
+        discard = timedelta(
+            minutes=newly_updated_metadata.created_time.minute,
+            seconds=newly_updated_metadata.created_time.second,
+            microseconds=newly_updated_metadata.created_time.microsecond,
+        )
+        saved_time = newly_updated_metadata.created_time - discard
+
+        now = UTCDateTime.now()
+        current_time = datetime(
+            year=now.year,
+            month=now.month,
+            day=now.day,
+            hour=now.hour,
+            tzinfo=timezone.utc,
+        )
+
+        # assert updated_time is set to within one minute of now
+        assert current_time == saved_time
+
+        # assert the values included in the MetadataUpdate object are changed to the new values
+        assert newly_updated_metadata.status == "reviewed"
+        assert newly_updated_metadata.comment == "Adding a comment"
+
+        # assert the returned updated metadata is the same as the metadata table row
+        assert compare_metadata_objects(a=newly_updated_metadata, b=returned_metadata)
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_remove_field(test_engine, observatory_metadata):
+    with Session(test_engine) as test_db_session:
+        # clear in memory MetadataHistoryTable table
+        statement = delete(MetadataHistoryTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+        # create a metadata row to update
+        observatory_metadata.comment = "to be deleted"
+        created_metadata = await MetadataDatabaseFactory(
+            engine=test_engine
+        ).create_metadata(observatory_metadata)
+
+        updated_metadata = MetadataUpdate(
+            id=created_metadata.id, comment=None, status="reviewed"
+        )
+
+        returned_metadata = await MetadataDatabaseFactory(
+            engine=test_engine
+        ).update_metadata(updated_metadata=updated_metadata, updated_by="tester")
+
+        # get metadatahistory row
+        statement = select(MetadataHistoryTable).where(
+            MetadataHistoryTable.metadata_id == created_metadata.id
+        )
+        results = test_db_session.exec(statement)
+        metadatahistory = results.one()
+
+        # assert the original created metadata saved to the metadatahistory table is identical to the original created metadata aside from id and metadataid
+        assert compare_metadata_and_metadatahistory_objects(
+            a=created_metadata, b=metadatahistory
+        )
+
+        # get metadata row to assert it was updated with the new values
+        newly_updated_metadata = test_db_session.get(MetadataTable, created_metadata.id)
+
+        # assert values not included in the MetadataUpdate object are not changed
+        assert newly_updated_metadata.id == created_metadata.id
+        assert newly_updated_metadata.created_by == created_metadata.created_by
+        assert newly_updated_metadata.created_time == created_metadata.created_time
+        assert newly_updated_metadata.starttime == created_metadata.starttime
+        assert newly_updated_metadata.endtime == created_metadata.endtime
+        assert newly_updated_metadata.network == created_metadata.network
+        assert newly_updated_metadata.station == created_metadata.station
+        assert newly_updated_metadata.channel == created_metadata.channel
+        assert newly_updated_metadata.location == created_metadata.location
+        assert newly_updated_metadata.category == created_metadata.category
+        assert newly_updated_metadata.priority == created_metadata.priority
+        assert newly_updated_metadata.data_valid == created_metadata.data_valid
+        assert newly_updated_metadata.metadata_ == created_metadata.metadata
+        assert newly_updated_metadata.review_comment == created_metadata.review_comment
+
+        # assert updated_by is set correctly
+        assert newly_updated_metadata.updated_by == "tester"
+        discard = timedelta(
+            minutes=newly_updated_metadata.created_time.minute,
+            seconds=newly_updated_metadata.created_time.second,
+            microseconds=newly_updated_metadata.created_time.microsecond,
+        )
+        saved_time = newly_updated_metadata.created_time - discard
+
+        now = UTCDateTime.now()
+        current_time = datetime(
+            year=now.year,
+            month=now.month,
+            day=now.day,
+            hour=now.hour,
+            tzinfo=timezone.utc,
+        )
+
+        # assert updated_time is set to within one minute of now
+        assert current_time == saved_time
+
+        # assert the comment is removed
+        assert newly_updated_metadata.status == "reviewed"
+        assert newly_updated_metadata.comment == None
+
+        # assert the returned updated metadata is the same as the metadata table row
+        assert compare_metadata_objects(a=newly_updated_metadata, b=returned_metadata)
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_multiple_updates(test_engine, observatory_metadata):
+    with Session(test_engine) as test_db_session:
+        # clear in memory MetadataHistoryTable table
+        statement = delete(MetadataHistoryTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+        # create a metadata row to update
+        created_metadata = await MetadataDatabaseFactory(
+            engine=test_engine
+        ).create_metadata(observatory_metadata)
+
+        updated_metadata = MetadataUpdate(
+            id=created_metadata.id,
+            metadata={
+                "id": "BRW",
+                "name": "Barrow",
+                "marks": [{"name": "m", "azimuth": "1"}],
+                "piers": [{"name": "p", "correction": "12"}],
+                "agency": "USGS",
+                "latitude": 100.00,
+                "elevation": 10,
+                "longitude": 203.378,
+                "agency_name": "United States Geological Survey (USGS)",
+                "electronics": ["1"],
+                "theodolites": ["12"],
+                "declination_base": 10589,
+                "sensor_orientation": "HDZF",
+            },
+            status="updated",
+        )
+
+        returned_metadata = await MetadataDatabaseFactory(
+            engine=test_engine
+        ).update_metadata(updated_metadata=updated_metadata, updated_by="tester")
+
+        # get metadatahistory row
+        statement = select(MetadataHistoryTable).where(
+            MetadataHistoryTable.metadata_id == created_metadata.id
+        )
+        results = test_db_session.exec(statement)
+        metadatahistory = results.one()
+
+        # assert the original created metadata saved to the metadatahistory table is identical to the original created metadata aside from id and metadataid
+        assert compare_metadata_and_metadatahistory_objects(
+            a=created_metadata, b=metadatahistory
+        )
+
+        # get metadata row to assert it was updated with the new values
+        newly_updated_metadata = test_db_session.get(MetadataTable, created_metadata.id)
+
+        # assert values not included in the MetadataUpdate object are not changed
+        assert newly_updated_metadata.id == created_metadata.id
+        assert newly_updated_metadata.created_by == created_metadata.created_by
+        assert newly_updated_metadata.created_time == created_metadata.created_time
+        assert newly_updated_metadata.starttime == created_metadata.starttime
+        assert newly_updated_metadata.endtime == created_metadata.endtime
+        assert newly_updated_metadata.network == created_metadata.network
+        assert newly_updated_metadata.station == created_metadata.station
+        assert newly_updated_metadata.channel == created_metadata.channel
+        assert newly_updated_metadata.location == created_metadata.location
+        assert newly_updated_metadata.category == created_metadata.category
+        assert newly_updated_metadata.priority == created_metadata.priority
+        assert newly_updated_metadata.data_valid == created_metadata.data_valid
+        assert newly_updated_metadata.comment == created_metadata.comment
+        assert newly_updated_metadata.review_comment == created_metadata.review_comment
+
+        # assert updated_by is set correctly
+        assert newly_updated_metadata.updated_by == "tester"
+        discard = timedelta(
+            minutes=newly_updated_metadata.created_time.minute,
+            seconds=newly_updated_metadata.created_time.second,
+            microseconds=newly_updated_metadata.created_time.microsecond,
+        )
+        saved_time = newly_updated_metadata.created_time - discard
+
+        now = UTCDateTime.now()
+        current_time = datetime(
+            year=now.year,
+            month=now.month,
+            day=now.day,
+            hour=now.hour,
+            tzinfo=timezone.utc,
+        )
+
+        # assert updated_time is set to within one minute of now
+        assert current_time == saved_time
+
+        # assert the values included in the MetadataUpdate object are changed to the new values
+        assert newly_updated_metadata.status == "updated"
+        assert newly_updated_metadata.metadata_ == {
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "12"}],
+            "agency": "USGS",
+            "latitude": 100.00,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        }
+
+        # assert the returned updated metadata is the same as the metadata table row
+        assert compare_metadata_objects(a=newly_updated_metadata, b=returned_metadata)
+
+        # update the same metadata again
+        second_update = MetadataUpdate(id=created_metadata.id, status="reviewed")
+
+        second_returned = await MetadataDatabaseFactory(
+            engine=test_engine
+        ).update_metadata(updated_metadata=second_update, updated_by="different tester")
+
+        # get the associated metadatahistory rows
+        second_statement = select(MetadataHistoryTable).where(
+            MetadataHistoryTable.metadata_id == created_metadata.id
+        )
+        second_results = test_db_session.exec(second_statement)
+        metadatahistory_rows = second_results.all()
+
+        assert len(metadatahistory_rows) == 2
+        for row in metadatahistory_rows:
+            assert row.metadata_id == created_metadata.id
+            # assert the values that weren't updated didn't change
+            assert row.created_by == created_metadata.created_by
+            assert row.created_time == created_metadata.created_time
+            assert row.starttime == created_metadata.starttime
+            assert row.endtime == created_metadata.endtime
+            assert row.network == created_metadata.network
+            assert row.station == created_metadata.station
+            assert row.channel == created_metadata.channel
+            assert row.location == created_metadata.location
+            assert row.category == created_metadata.category
+            assert row.priority == created_metadata.priority
+            assert row.data_valid == created_metadata.data_valid
+            assert row.review_comment == created_metadata.review_comment
+
+        assert metadatahistory_rows[0].status == "new"
+        assert metadatahistory_rows[1].status == "updated"
+
+        assert metadatahistory_rows[0].metadata_ == {
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "12"}],
+            "agency": "USGS",
+            "latitude": 71.322,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        }
+        assert metadatahistory_rows[1].metadata_ == {
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "12"}],
+            "agency": "USGS",
+            "latitude": 100.00,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        }
+
+        assert metadatahistory_rows[1].updated_by == "tester"
+
+        assert second_returned.updated_by == "different tester"
+        assert second_returned.status == "reviewed"
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_invalid_id(test_engine):
+    updated_metadata = MetadataUpdate(
+        id=476487562, comment="Adding a comment", status="reviewed"
+    )
+
+    try:
+        await MetadataDatabaseFactory(engine=test_engine).update_metadata(
+            updated_metadata=updated_metadata, updated_by="tester"
+        )
+    except Exception as e:
+        assert str(e) == "metadata not found"
diff --git a/test/api_test/db/factories/MetadataHistoryDatabaseFactory_test.py b/test/api_test/db/factories/MetadataHistoryDatabaseFactory_test.py
new file mode 100644
index 000000000..542d04f99
--- /dev/null
+++ b/test/api_test/db/factories/MetadataHistoryDatabaseFactory_test.py
@@ -0,0 +1,385 @@
+import pytest
+
+from datetime import datetime, timezone
+from sqlmodel import Session, delete
+
+from geomagio.api.db.models.metadata import (
+    MetadataTable,
+    MetadataQuery,
+    MetadataCategory,
+)
+from geomagio.api.db.models.metadata_history import MetadataHistoryTable
+from geomagio.api.db.factories.MetadataHistoryDatabaseFactory import (
+    MetadataHistoryDatabaseFactory,
+)
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history(test_engine, get_all_related_metadata_history):
+    # clear in memory metadata_history table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataHistoryTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata_history objects to the test database
+    await MetadataHistoryDatabaseFactory(
+        engine=test_engine
+    ).batch_create_metadata_history(get_all_related_metadata_history)
+
+    query = MetadataQuery(
+        category=MetadataCategory.OBSERVATORY, starttime="2019-01-01T00:00:00"
+    )
+
+    results = await MetadataHistoryDatabaseFactory(
+        engine=test_engine
+    ).get_metadata_history(query=query)
+    assert len(results) == 3
+
+    for result in results:
+        assert result.category == MetadataCategory.OBSERVATORY
+        assert result.metadata["name"] == "Barrow"
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_returns_one(test_engine, metadata_history_1):
+    # clear in memory metadata_history table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataHistoryTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a metadata_history object to the test database
+    await MetadataHistoryDatabaseFactory(
+        engine=test_engine
+    ).batch_create_metadata_history([metadata_history_1])
+
+    query = MetadataQuery(
+        category=MetadataCategory.OBSERVATORY, starttime="2019-01-01T00:00:00"
+    )
+
+    results = await MetadataHistoryDatabaseFactory(
+        engine=test_engine
+    ).get_metadata_history(query=query)
+    assert len(results) == 1
+    assert results[0].category == MetadataCategory.OBSERVATORY
+    assert results[0].metadata["name"] == "Barrow"
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_returns_none(test_engine):
+    # clear in memory metadata_history table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataHistoryTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    query = MetadataQuery(
+        category=MetadataCategory.OBSERVATORY, starttime="2019-01-01T00:00:00"
+    )
+
+    results = await MetadataHistoryDatabaseFactory(
+        engine=test_engine
+    ).get_metadata_history(query=query)
+
+    # assert this returns an empty list and does not error
+    assert len(results) == 0
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_by_id(
+    test_engine, get_all_related_metadata_history
+):
+    # clear in memory metadata_history table
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataHistoryTable)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # save a bunch of metadata to the test database
+    await MetadataHistoryDatabaseFactory(
+        engine=test_engine
+    ).batch_create_metadata_history(get_all_related_metadata_history)
+
+    result = await MetadataHistoryDatabaseFactory(
+        engine=test_engine
+    ).get_metadata_history_by_id(2)
+    assert result.id == 2
+    assert result.metadata_id == 38742
+    assert result.created_by == "test_metadata.py"
+    assert result.updated_by == "tester"
+    assert result.updated_time == datetime(2023, 1, 21, 3, 30, 43, tzinfo=timezone.utc)
+    assert result.starttime == datetime(2022, 2, 3, 22, 43, 27, tzinfo=timezone.utc)
+    assert result.endtime == datetime(2022, 2, 3, 23, 43, 27, tzinfo=timezone.utc)
+    assert result.category == MetadataCategory.OBSERVATORY
+    assert result.network == "NT"
+    assert result.station == "BRW"
+    assert result.metadata == {
+        "id": "BRW",
+        "name": "Barrow",
+        "marks": [{"name": "m", "azimuth": "1"}],
+        "piers": [{"name": "p", "correction": "22"}],
+        "agency": "USGS",
+        "latitude": 71.322,
+        "elevation": 10,
+        "longitude": 203.378,
+        "agency_name": "United States Geological Survey (USGS)",
+        "electronics": ["1"],
+        "theodolites": ["12"],
+        "declination_base": 10589,
+        "sensor_orientation": "HDZF",
+    }
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_by_id_no_metadata_returned(test_engine):
+    result = await MetadataHistoryDatabaseFactory(
+        engine=test_engine
+    ).get_metadata_history_by_id(12345)
+
+    # assert the result is None and not an error
+    assert result == None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_by_id_invalid(test_engine):
+    try:
+        await MetadataHistoryDatabaseFactory(
+            engine=test_engine
+        ).get_metadata_history_by_id("3")
+    except Exception as e:
+        assert str(e) == "expected an int"
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_by_metadata_id(
+    test_engine,
+    current_metadata_for_related_metadata_history,
+    get_all_related_metadata_history,
+):
+    # clear metadata and metadata_history tables
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        history_statement = delete(MetadataHistoryTable)
+        test_db_session.exec(statement)
+        test_db_session.exec(history_statement)
+        test_db_session.commit()
+
+        # save related metadata directly to db to make sure we're setting the right id
+        test_db_session.add(current_metadata_for_related_metadata_history)
+        test_db_session.commit()
+
+        # save related metadata_history
+        await MetadataHistoryDatabaseFactory(
+            engine=test_engine
+        ).batch_create_metadata_history(get_all_related_metadata_history)
+
+        returned = await MetadataHistoryDatabaseFactory(
+            engine=test_engine
+        ).get_metadata_history_by_metadata_id(metadata_id=38742)
+        assert len(returned) == 4
+
+        # assert the first object is the metadata object
+        assert returned[0].id == 38742
+        assert returned[0].created_by == "test_metadata.py"
+        assert returned[0].created_time == datetime(
+            2025, 1, 22, 13, 45, 27, tzinfo=timezone.utc
+        )
+        assert returned[0].updated_by == "another_tester"
+        assert returned[0].updated_time == datetime(
+            2023, 6, 1, 13, 45, 27, tzinfo=timezone.utc
+        )
+        assert returned[0].starttime == datetime(
+            2022, 2, 3, 22, 43, 27, tzinfo=timezone.utc
+        )
+        assert returned[0].endtime == datetime(
+            2022, 2, 3, 23, 43, 27, tzinfo=timezone.utc
+        )
+        assert returned[0].category == MetadataCategory.OBSERVATORY
+        assert returned[0].network == "NT"
+        assert returned[0].station == "BRW"
+        assert returned[0].metadata == {
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "22"}],
+            "agency": "USGS",
+            "latitude": 87.32,
+            "elevation": 10,
+            "longitude": 202.234,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        }
+
+        # assert the metadata_history objects are in order of most recent updated_time
+        assert returned[1].id == 3
+        assert returned[1].metadata_id == 38742
+        assert returned[1].created_by == "test_metadata.py"
+        assert returned[1].updated_by == "another_tester"
+        assert returned[1].updated_time == datetime(
+            2023, 1, 22, 13, 30, 43, tzinfo=timezone.utc
+        )
+        assert returned[1].starttime == datetime(
+            2022, 2, 3, 22, 43, 27, tzinfo=timezone.utc
+        )
+        assert returned[1].endtime == datetime(
+            2022, 2, 3, 23, 43, 27, tzinfo=timezone.utc
+        )
+        assert returned[1].category == MetadataCategory.OBSERVATORY
+        assert returned[1].network == "NT"
+        assert returned[1].station == "BRW"
+        assert returned[1].metadata == {
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "22"}],
+            "agency": "USGS",
+            "latitude": 87.32,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        }
+
+        assert returned[2].id == 2
+        assert returned[2].metadata_id == 38742
+        assert returned[2].created_by == "test_metadata.py"
+        assert returned[2].updated_by == "tester"
+        assert returned[2].updated_time == datetime(
+            2023, 1, 21, 3, 30, 43, tzinfo=timezone.utc
+        )
+        assert returned[2].starttime == datetime(
+            2022, 2, 3, 22, 43, 27, tzinfo=timezone.utc
+        )
+        assert returned[2].endtime == datetime(
+            2022, 2, 3, 23, 43, 27, tzinfo=timezone.utc
+        )
+        assert returned[2].category == MetadataCategory.OBSERVATORY
+        assert returned[2].network == "NT"
+        assert returned[2].station == "BRW"
+        assert returned[2].metadata == {
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "22"}],
+            "agency": "USGS",
+            "latitude": 71.322,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        }
+
+        assert returned[3].id == 1
+        assert returned[3].metadata_id == 38742
+        assert returned[3].created_by == "test_metadata.py"
+        assert returned[3].updated_by == None
+        assert returned[3].updated_time == None
+        assert returned[3].starttime == datetime(
+            2022, 2, 3, 22, 43, 27, tzinfo=timezone.utc
+        )
+        assert returned[3].endtime == datetime(
+            2022, 2, 3, 23, 43, 27, tzinfo=timezone.utc
+        )
+        assert returned[3].category == MetadataCategory.OBSERVATORY
+        assert returned[3].network == "NT"
+        assert returned[3].station == "BRW"
+        assert returned[3].metadata == {
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "12"}],
+            "agency": "USGS",
+            "latitude": 71.322,
+            "elevation": 10,
+            "longitude": 203.378,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        }
+
+
+# @pytest.mark.asyncio
+async def test_get_metadata_history_by_metadata_id_no_history(
+    test_engine, current_metadata_for_related_metadata_history
+):
+    # clear metadata and MetadataHistoryTable tables
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        history_statement = delete(MetadataHistoryTable)
+        test_db_session.exec(statement)
+        test_db_session.exec(history_statement)
+        test_db_session.commit()
+
+        # save metadata directly to db to have control over the id
+        test_db_session.add(current_metadata_for_related_metadata_history)
+        test_db_session.commit()
+
+        returned = await MetadataHistoryDatabaseFactory(
+            engine=test_engine
+        ).get_metadata_history_by_metadata_id(metadata_id=38742)
+        assert len(returned) == 1
+
+        # assert the metadata object was returned
+        assert returned[0].id == 38742
+        assert returned[0].created_by == "test_metadata.py"
+        assert returned[0].created_time == datetime(
+            2025, 1, 22, 13, 45, 27, tzinfo=timezone.utc
+        )
+        assert returned[0].updated_by == "another_tester"
+        assert returned[0].updated_time == datetime(
+            2023, 6, 1, 13, 45, 27, tzinfo=timezone.utc
+        )
+        assert returned[0].starttime == datetime(
+            2022, 2, 3, 22, 43, 27, tzinfo=timezone.utc
+        )
+        assert returned[0].endtime == datetime(
+            2022, 2, 3, 23, 43, 27, tzinfo=timezone.utc
+        )
+        assert returned[0].category == MetadataCategory.OBSERVATORY
+        assert returned[0].network == "NT"
+        assert returned[0].station == "BRW"
+        assert returned[0].metadata == {
+            "id": "BRW",
+            "name": "Barrow",
+            "marks": [{"name": "m", "azimuth": "1"}],
+            "piers": [{"name": "p", "correction": "22"}],
+            "agency": "USGS",
+            "latitude": 87.32,
+            "elevation": 10,
+            "longitude": 202.234,
+            "agency_name": "United States Geological Survey (USGS)",
+            "electronics": ["1"],
+            "theodolites": ["12"],
+            "declination_base": 10589,
+            "sensor_orientation": "HDZF",
+        }
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_by_metadata_id_no_metadata(test_engine):
+    # clear metadata and metadata_history tables
+    with Session(test_engine) as test_db_session:
+        statement = delete(MetadataTable)
+        history_statement = delete(MetadataHistoryTable)
+        test_db_session.exec(statement)
+        test_db_session.exec(history_statement)
+        test_db_session.commit()
+
+        try:
+            await MetadataHistoryDatabaseFactory(
+                engine=test_engine
+            ).get_metadata_history_by_metadata_id(metadata_id=38742)
+        except Exception as e:
+            assert str(e) == "metadata not found"
diff --git a/test/api_test/db/factories/SessionDatabaseFactory_test.py b/test/api_test/db/factories/SessionDatabaseFactory_test.py
new file mode 100644
index 000000000..4d21c2f24
--- /dev/null
+++ b/test/api_test/db/factories/SessionDatabaseFactory_test.py
@@ -0,0 +1,146 @@
+import os
+import uuid
+import json
+import pytest
+
+from sqlmodel import Session, delete, select
+
+from geomagio.api.db.models.session import session
+from geomagio.api.db.factories.SessionDatabaseFactory import SessionDatabaseFactory
+from geomagio.api.secure.encryption import get_fernet
+
+
+@pytest.mark.asyncio
+async def test_encryption(test_engine, valid_session):
+    with Session(test_engine) as test_db_session:
+        # clear test session table
+        statement = delete(session)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+        session_id = "randomstring"
+        await SessionDatabaseFactory(engine=test_engine).save_session(
+            session_id=session_id, data=valid_session
+        )
+
+        # get session directly from database to make sure its encrypted
+        statement = select(session).where(session.session_id == session_id)
+        results = test_db_session.exec(statement)
+        user_session = results.one()
+
+        # decrypt the data
+        encryption = get_fernet()
+        decrypted_data = encryption.decrypt(user_session.data.encode("utf8"))
+
+        # assert encryption is working as expected
+        returned_session = json.loads(decrypted_data)
+        assert returned_session == valid_session
+
+
+@pytest.mark.asyncio
+async def test_get_session_no_session(test_engine):
+    try:
+        await SessionDatabaseFactory(engine=test_engine).get_session(
+            session_id="idonotexist"
+        )
+    except Exception as e:
+        assert e is not None
+
+
+@pytest.mark.asyncio
+async def test_save_new_session(
+    test_engine, valid_session, valid_token, valid_userinfo
+):
+    with Session(test_engine) as test_db_session:
+        # clear test session table
+        statement = delete(session)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    session_id = "randomstring"
+    await SessionDatabaseFactory(engine=test_engine).save_session(
+        session_id=session_id, data=valid_session
+    )
+
+    # get the newly created session to make sure it's saved correctly and encryption is working as expected
+    user_session = await SessionDatabaseFactory(engine=test_engine).get_session(
+        session_id=session_id
+    )
+    assert user_session["token"] == valid_token
+    assert user_session["user"] == valid_userinfo
+
+
+@pytest.mark.asyncio
+async def test_update_current_session(
+    test_engine, valid_session, valid_token, valid_userinfo
+):
+    with Session(test_engine) as test_db_session:
+        # clear test session table
+        statement = delete(session)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+    # create an initial session to later update
+    session_id = "sessiontoupdate"
+    await SessionDatabaseFactory(engine=test_engine).save_session(
+        session_id=session_id, data=valid_session
+    )
+
+    # get the newly created session to make sure it's saved correctly and encryption is working as expected
+    user_session = await SessionDatabaseFactory(engine=test_engine).get_session(
+        session_id=session_id
+    )
+    assert user_session["token"] == valid_token
+    assert user_session["user"] == valid_userinfo
+    assert user_session["user"]["groups"] == ["group1", "group2"]
+
+    # change the userinfo and save it
+    updated_session_data = valid_session
+    updated_session_data["user"]["groups"] = []
+    await SessionDatabaseFactory(engine=test_engine).save_session(
+        session_id=session_id, data=updated_session_data
+    )
+
+    # get the updated session and assert the group info was changed as expected
+    updated_session = await SessionDatabaseFactory(engine=test_engine).get_session(
+        session_id=session_id
+    )
+    assert updated_session["token"] == valid_token
+    assert updated_session["user"]["email"] == "test_email"
+    assert updated_session["user"]["groups"] == []
+
+
+@pytest.mark.asyncio
+async def test_delete_session(test_engine, valid_session):
+    with Session(test_engine) as test_db_session:
+        # clear test session table
+        statement = delete(session)
+        test_db_session.exec(statement)
+        test_db_session.commit()
+
+        # create an initial session to later delete
+        session_id = "sessiontodelete"
+        await SessionDatabaseFactory(engine=test_engine).save_session(
+            session_id=session_id, data=valid_session
+        )
+
+        # delete session
+        await SessionDatabaseFactory(engine=test_engine).delete_session(
+            session_id=session_id
+        )
+
+        # try to get session to verify
+        second_statement = select(session).where(session.session_id == session_id)
+        results = test_db_session.exec(second_statement)
+        user_session = results.all()
+
+        assert len(user_session) == 0
+
+
+@pytest.mark.asyncio
+async def test_delete_non_existing(test_engine):
+    session_id = "idonotexist"
+    # this should not error
+    await SessionDatabaseFactory(engine=test_engine).delete_session(
+        session_id=session_id
+    )
diff --git a/test/api_test/secure_test/conftest.py b/test/api_test/secure_test/conftest.py
deleted file mode 100644
index 0df413243..000000000
--- a/test/api_test/secure_test/conftest.py
+++ /dev/null
@@ -1,229 +0,0 @@
-import pytest
-
-
-@pytest.fixture()
-def valid_token():
-    return {
-        "access_token": "test_access_token",
-        "token_type": "Bearer",
-        "expires_in": 7200,
-        "refresh_token": "test_refresh_toekn",
-        "scope": "openid email profile",
-        "created_at": 1733244369,
-        "id_token": "test_id_token",
-        "expires_at": 1733251569,
-        "userinfo": {
-            "iss": "http://test_url",
-            "sub": "1234",
-            "aud": "test_aud",
-            "exp": 1733244489,
-            "iat": 1733244369,
-            "nonce": "test_nonce",
-            "auth_time": 1733244368,
-            "sub_legacy": "test_sub_legacy",
-            "name": "test_user",
-            "nickname": "test_user_nickname",
-            "preferred_username": "test_preferred_username",
-            "email": "test_email",
-            "email_verified": True,
-            "profile": "http://test_url/user",
-            "picture": "http://picture_url",
-            "groups_direct": ["group1", "group2"],
-        },
-    }
-
-
-@pytest.fixture()
-def valid_userinfo():
-    return {
-        "sub": "1234",
-        "sub_legacy": "test_sub_legacy",
-        "name": "test_user",
-        "nickname": "test_user_nickname",
-        "preferred_username": "test_preferred_username",
-        "email": "test_email",
-        "email_verified": True,
-        "profile": "http://test_url/user",
-        "picture": "http://picture_url",
-        "groups": ["group1", "group2"],
-    }
-
-
-@pytest.fixture()
-def valid_gitlab_user():
-    return {
-        "id": 1234,
-        "email": "test_email",
-        "name": "test_user",
-        "username": "test_user_nickname",
-        "avatar_url": "http://picture_url",
-    }
-
-
-@pytest.fixture()
-def valid_reading_metadata():
-    return {
-        "created_by": "test_user",
-        "starttime": "2024-11-07T12:12:12Z",
-        "endtime": "2024-11-07T20:20:20Z",
-        "network": "NT",
-        "station": "BOU",
-        "category": "reading",
-        "data_valid": True,
-        "metadata": {
-            "azimuth": 199.1383,
-            "metadata": {
-                "time": "2020-01-03T17:12:47Z",
-                "station": "BOU",
-                "observer": "Test Observer",
-                "reviewed": True,
-                "reviewer": "Test Reviewer",
-                "mark_name": "AZ",
-                "pier_name": "MainPCDCP",
-                "theodolite": "108449",
-                "electronics": "0110",
-                "mark_azimuth": 199.1383,
-                "pier_correction": -22,
-            },
-            "absolutes": [
-                {
-                    "shift": 0,
-                    "valid": True,
-                    "element": "D",
-                    "endtime": "2020-01-03T17:16:21.000000Z",
-                    "absolute": 8.3851056,
-                    "baseline": 8.58571,
-                    "starttime": "2020-01-03T17:12:47.000000Z",
-                },
-                {
-                    "shift": 0,
-                    "valid": True,
-                    "element": "H",
-                    "endtime": "2020-01-03T17:24:40.000000Z",
-                    "absolute": 20728.0650365,
-                    "baseline": -71.7177135,
-                    "starttime": "2020-01-03T17:20:48.000000Z",
-                },
-                {
-                    "shift": 0,
-                    "valid": True,
-                    "element": "Z",
-                    "endtime": "2020-01-03T17:24:40.000000Z",
-                    "absolute": 47450.1529433,
-                    "baseline": 578.2041933,
-                    "starttime": "2020-01-03T17:20:48.000000Z",
-                },
-            ],
-            "hemisphere": 1,
-            "diagnostics": None,
-            "scale_value": None,
-            "measurements": [
-                {
-                    "e": None,
-                    "f": None,
-                    "h": None,
-                    "z": None,
-                    "angle": 191.56666666666666,
-                    "measurement_type": "FirstMarkDown",
-                },
-                {
-                    "e": None,
-                    "f": None,
-                    "h": None,
-                    "z": None,
-                    "angle": 11.566666666666666,
-                    "measurement_type": "FirstMarkUp",
-                },
-                {
-                    "e": None,
-                    "f": None,
-                    "h": None,
-                    "z": None,
-                    "angle": 191.56666666666666,
-                    "measurement_type": "SecondMarkDown",
-                },
-                {
-                    "e": None,
-                    "f": None,
-                    "h": None,
-                    "z": None,
-                    "angle": 11.566666666666666,
-                    "measurement_type": "SecondMarkUp",
-                },
-                {
-                    "e": -72.242,
-                    "f": 51801.81,
-                    "h": 20800.329,
-                    "z": 46871.49,
-                    "angle": 270.71666666666664,
-                    "measurement_type": "WestDown",
-                    "time": "2024-11-07T12:12:12Z",
-                },
-                {
-                    "e": -72.636,
-                    "f": 51801.92,
-                    "h": 20800.259,
-                    "z": 46871.641,
-                    "angle": 90.66666666666667,
-                    "measurement_type": "EastDown",
-                    "time": "2024-11-07T12:13:14Z",
-                },
-                {
-                    "e": -72.657,
-                    "f": 51801.82,
-                    "h": 20800.259,
-                    "z": 46871.521,
-                    "angle": 90.93333333333334,
-                    "measurement_type": "WestUp",
-                    "time": "2024-11-07T13:13:11Z",
-                },
-                {
-                    "e": -72.758,
-                    "f": 51801.92,
-                    "h": 20800.086,
-                    "z": 46871.707,
-                    "angle": 270.96666666666664,
-                    "measurement_type": "EastUp",
-                    "time": "2024-11-07T13:15:12Z",
-                },
-                {
-                    "e": -72.898,
-                    "f": 51801.89,
-                    "h": 20799.796,
-                    "z": 46871.802,
-                    "angle": 246.38333333333333,
-                    "measurement_type": "SouthDown",
-                    "time": "2024-11-07T15:12:12Z",
-                },
-                {
-                    "e": -72.8,
-                    "f": 51802.01,
-                    "h": 20799.852,
-                    "z": 46871.919,
-                    "angle": 66.38333333333334,
-                    "measurement_type": "NorthUp",
-                    "time": "2024-11-07T18:12:12Z",
-                },
-                {
-                    "e": -72.775,
-                    "f": 51802.01,
-                    "h": 20799.668,
-                    "z": 46871.997,
-                    "angle": 113.58333333333333,
-                    "measurement_type": "SouthUp",
-                    "time": "2024-11-07T19:12:19Z",
-                },
-                {
-                    "e": -72.813,
-                    "f": 51802.14,
-                    "h": 20799.815,
-                    "z": 46872.077,
-                    "angle": 293.5833333333333,
-                    "measurement_type": "NorthDown",
-                    "time": "2024-11-07T20:20:20Z",
-                },
-                {"measurement_type": "Meridian", "angle": 21},
-            ],
-            "pier_correction": -22,
-        },
-    }
diff --git a/test/api_test/secure_test/login_test.py b/test/api_test/secure_test/login_routes_test.py
similarity index 64%
rename from test/api_test/secure_test/login_test.py
rename to test/api_test/secure_test/login_routes_test.py
index 389796565..fcb3dc185 100644
--- a/test/api_test/secure_test/login_test.py
+++ b/test/api_test/secure_test/login_routes_test.py
@@ -1,40 +1,44 @@
 import pytest
 import httpx
+
 from unittest.mock import ANY
 from fastapi.testclient import TestClient
 from authlib.integrations.base_client.errors import MismatchingStateError
 
-from geomagio.api.secure import app
-from geomagio.api.secure.SessionMiddleware import SessionMiddleware
-from geomagio.api.secure.login import User
+from geomagio.api.secure.secure_app import app
+from geomagio.api.secure.login_routes import User
+from geomagio.api.db.factories.SessionDatabaseFactory import SessionDatabaseFactory
 
 client = TestClient(app)
 
 
 @pytest.mark.asyncio
-async def test_authorization_valid(valid_token, valid_userinfo, mocker):
-
+async def test_authorization_valid(valid_session, valid_token, valid_userinfo, mocker):
     mock_authorize = mocker.AsyncMock(return_value=valid_token)
     mocker.patch(
-        "geomagio.api.secure.login.oauth.openid.authorize_access_token", mock_authorize
+        "geomagio.api.secure.login_routes.oauth.openid.authorize_access_token",
+        mock_authorize,
     )
 
     mock_userinfo = mocker.AsyncMock(return_value=valid_userinfo)
-    mocker.patch("geomagio.api.secure.login.oauth.openid.userinfo", mock_userinfo)
+    mocker.patch(
+        "geomagio.api.secure.login_routes.oauth.openid.userinfo", mock_userinfo
+    )
 
     mock_save_session = mocker.AsyncMock()
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
 
     response = client.get("/authorize")
     assert response.status_code == 200
 
-    valid_session = {
-        "token": valid_token,
-        "user": valid_userinfo,
-    }
+    mock_save_session.assert_called_once_with(session_id=ANY, data=valid_session)
 
-    # assert the session data is correct
-    mock_save_session.assert_called_once_with(ANY, valid_session)
+
+@pytest.mark.asyncio
+async def test_authorization_invalid_authorize_access_token():
+    # assert that requests directly to GET /authorize cause an exception
+    with pytest.raises(MismatchingStateError) as err:
+        client.get("/authorize")
 
 
 @pytest.mark.asyncio
@@ -52,11 +56,12 @@ async def test_authorization_invalid_authorize_access_token():
 @pytest.mark.asyncio
 async def test_login_redirects(mocker):
     mock_save_session = mocker.AsyncMock()
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
 
     mock_redirect = mocker.AsyncMock(return_value={"status_code": 302})
     mocker.patch(
-        "geomagio.api.secure.login.oauth.openid.authorize_redirect", mock_redirect
+        "geomagio.api.secure.login_routes.oauth.openid.authorize_redirect",
+        mock_redirect,
     )
 
     header = "https://testserver/ws/secure/metadata/1"
@@ -68,37 +73,38 @@ async def test_login_redirects(mocker):
     )
 
     # assert session is saved with correct referrer header
-    mock_save_session.assert_called_once_with(ANY, {"after_authorize_redirect": header})
+    mock_save_session.assert_called_once_with(
+        session_id=ANY, data={"after_authorize_redirect": header}
+    )
 
 
 @pytest.mark.asyncio
-async def test_logout(valid_token, valid_userinfo, mocker):
-    valid_session = {"token": valid_token, "user": valid_userinfo}
+async def test_logout(valid_session, mocker):
     mock_get_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
 
     mock_delete_session = mocker.AsyncMock()
-    mocker.patch.object(SessionMiddleware, "delete_session", mock_delete_session)
+    mocker.patch.object(SessionDatabaseFactory, "delete_session", mock_delete_session)
 
     client.get(url="/logout", headers={"Cookie": "PHPSESSID=valid_session_id"})
 
-    mock_get_session.assert_called_once_with("valid_session_id")
-    mock_delete_session.assert_called_once_with("valid_session_id")
+    mock_get_session.assert_called_once_with(session_id="valid_session_id")
+    mock_delete_session.assert_called_once_with(session_id="valid_session_id")
 
 
 @pytest.mark.asyncio
 async def test_user_with_valid_session(valid_token, valid_userinfo, mocker):
     valid_session = {"token": valid_token, "user": valid_userinfo}
     mock_get_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
 
-    mock_save_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+    mock_save_session = mocker.AsyncMock()
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
 
     response = client.get(url="/user", headers={"Cookie": "PHPSESSID=valid_session_id"})
 
     assert response.status_code == 200
-    mock_get_session.assert_called_once_with("valid_session_id")
+    mock_get_session.assert_called_once_with(session_id="valid_session_id")
 
     user = User(**response.json())
     assert user.email == "test_email"
@@ -133,8 +139,8 @@ async def test_user_with_no_session_and_valid_token(
 
     valid_session = {"token": valid_token, "user": valid_userinfo}
 
-    mock_save_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+    mock_save_session = mocker.AsyncMock()
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
 
     response = client.get(url="/user", headers={"Authorization": "valid_gitlab_token"})
 
@@ -148,7 +154,9 @@ async def test_user_with_no_session_and_valid_token(
     }
 
     assert response.status_code == 200
-    mock_save_session.assert_called_once_with(ANY, {"user": session_user})
+    mock_save_session.assert_called_once_with(
+        session_id=ANY, data={"user": session_user}
+    )
 
     user = User(**response.json())
     assert user.email == "test_email"
@@ -159,7 +167,7 @@ async def test_user_with_no_session_and_valid_token(
 @pytest.mark.asyncio
 async def test_user_with_no_session(mocker):
     mock_get_session = mocker.AsyncMock()
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
 
     # do not pass in cookie
     response = client.get(url="/user")
@@ -171,10 +179,12 @@ async def test_user_with_no_session(mocker):
 @pytest.mark.asyncio
 async def test_user_with_no_session_and_invalid_token(mocker):
     mock_get_session = mocker.AsyncMock()
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
 
     mock_gitlab_request = mocker.AsyncMock(return_value=None)
-    mocker.patch("geomagio.api.secure.login.get_gitlab_user", mock_gitlab_request)
+    mocker.patch(
+        "geomagio.api.secure.login_routes.get_gitlab_user", mock_gitlab_request
+    )
 
     # do not pass in cookie
     response = client.get(
@@ -195,14 +205,14 @@ async def test_user_invalid_session(mocker):
         "url": "test_url",
     }
     mock_get_session = mocker.AsyncMock(return_value=invalid_session)
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
 
     mock_save_session = mocker.AsyncMock()
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
 
     response = client.get(
         url="/user", headers={"Cookie": "PHPSESSID=invalid_session_id"}
     )
 
     assert response.status_code == 401
-    mock_get_session.assert_called_once_with("invalid_session_id")
+    mock_get_session.assert_called_once_with(session_id="invalid_session_id")
diff --git a/test/api_test/secure_test/metadata_routes_test.py b/test/api_test/secure_test/metadata_routes_test.py
new file mode 100644
index 000000000..cc97e394d
--- /dev/null
+++ b/test/api_test/secure_test/metadata_routes_test.py
@@ -0,0 +1,931 @@
+import pytest
+import json
+import httpx
+
+from unittest.mock import ANY
+from fastapi.testclient import TestClient
+from obspy import UTCDateTime
+
+from geomagio.api.secure.secure_app import app
+from geomagio.api.db.models.metadata import Metadata
+from geomagio.api.db.factories.MetadataDatabaseFactory import MetadataDatabaseFactory
+from geomagio.api.db.factories.MetadataHistoryDatabaseFactory import (
+    MetadataHistoryDatabaseFactory,
+)
+from geomagio.api.db.factories.SessionDatabaseFactory import SessionDatabaseFactory
+
+client = TestClient(app)
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_valid_session(
+    valid_session, valid_reading_metadata, mocker
+):
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock()
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
+
+    # create mock response for MetadataDatabaseFactory().create_metadata
+    valid_reading_metadata["id"] = 7204
+    created_metadata = Metadata(**valid_reading_metadata)
+    created_metadata = Metadata.model_validate(created_metadata)
+
+    mock_save_metadata = mocker.AsyncMock(return_value=created_metadata)
+    mocker.patch.object(MetadataDatabaseFactory, "create_metadata", mock_save_metadata)
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.post(
+        url="/metadata", headers={"Cookie": "PHPSESSID=valid_session_id"}, content=body
+    )
+
+    assert response.status_code == 201
+    mock_get_session.assert_called_once_with(session_id="valid_session_id")
+
+    # assert the response matches the object returned by MetadataDatabaseFactory().create_metadata
+    response_body = response.json()
+    assert response_body["id"] == 7204
+    assert response_body["category"] == "reading"
+    assert response_body["starttime"] == UTCDateTime("2024-11-07T12:12:12Z")
+    assert response_body["endtime"] == UTCDateTime("2024-11-07T20:20:20Z")
+    assert response_body["network"] == "NT"
+    assert response_body["station"] == "BOU"
+    assert response_body["status"] == "new"
+    assert response_body["priority"] == 1
+    assert response_body["metadata"] == created_metadata.metadata
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_valid_session_invalid_user_input(valid_session, mocker):
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock()
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
+
+    invalid_user_input = {
+        "created_by": "test_user",
+        "starttime": "January 28th, 2025 08:00:00",
+        "endtime": "2024-11-07T20:20:20Z",
+        "network": "NT",
+        "station": "BOU",
+        "category": "reading",
+        "data_valid": True,
+    }
+
+    body = json.dumps(invalid_user_input)
+
+    response = client.post(
+        url="/metadata", headers={"Cookie": "PHPSESSID=valid_session_id"}, content=body
+    )
+
+    response_body = response.json()
+
+    assert response.status_code == 422
+    mock_get_session.assert_called_once_with(session_id="valid_session_id")
+
+    assert (
+        response_body["detail"][0]["msg"]
+        == "Input should be an instance of UTCDateTime"
+    )
+    assert (
+        response_body["detail"][1]["msg"]
+        == "Value error, Invalid time type. See obspy UTCDateTime for more information."
+    )
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_valid_auth_token(
+    valid_token, valid_userinfo, valid_gitlab_user, valid_reading_metadata, mocker
+):
+    valid_user_response = httpx.Response(status_code=200, json=valid_gitlab_user)
+
+    valid_groups = [
+        {
+            "id": 1,
+            "full_path": "group1",
+        },
+        {"id": 2, "full_path": "group2"},
+    ]
+
+    valid_groups_response = httpx.Response(status_code=200, json=valid_groups)
+
+    mock_gitlab_request = mocker.AsyncMock(
+        side_effect=[
+            valid_user_response,
+            valid_groups_response,
+            httpx.Response(status_code=200, json={}),
+        ]
+    )
+    mocker.patch("httpx.AsyncClient.get", mock_gitlab_request)
+
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
+
+    # create mock response for MetadataDatabaseFactory().create_metadata
+    valid_reading_metadata["id"] = 7204
+    created_metadata = Metadata(**valid_reading_metadata)
+    created_metadata = Metadata.model_validate(created_metadata)
+    mock_save_metadata = mocker.AsyncMock(return_value=created_metadata)
+    mocker.patch.object(MetadataDatabaseFactory, "create_metadata", mock_save_metadata)
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.post(
+        url="/metadata", headers={"Authorization": "auth_token"}, content=body
+    )
+
+    session_user = {
+        "email": "test_email",
+        "sub": 1234,
+        "groups": ["group1", "group2"],
+        "name": "test_user",
+        "nickname": "test_user_nickname",
+        "picture": "http://picture_url",
+    }
+
+    assert response.status_code == 201
+    mock_save_session.assert_called_once_with(
+        session_id=ANY, data={"user": session_user}
+    )
+
+    returned_metadata = Metadata(**response.json())
+    assert returned_metadata.id == 7204
+    assert returned_metadata.category == "reading"
+    assert returned_metadata.starttime == UTCDateTime("2024-11-07T12:12:12Z")
+    assert returned_metadata.endtime == UTCDateTime("2024-11-07T20:20:20Z")
+    assert returned_metadata.network == "NT"
+    assert returned_metadata.station == "BOU"
+    assert returned_metadata.status == "new"
+    assert returned_metadata.priority == 1
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_no_session_or_token(valid_reading_metadata, mocker):
+    mock_get_session = mocker.AsyncMock()
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    body = json.dumps(valid_reading_metadata)
+
+    # do not pass in cookie or auth
+    response = client.post(url="/metadata", content=body)
+    assert response.status_code == 401
+
+    mock_get_session.assert_not_called()
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_with_no_session_and_invalid_token(mocker):
+    mock_get_session = mocker.AsyncMock()
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    mock_gitlab_request = mocker.AsyncMock(return_value=None)
+    mocker.patch(
+        "geomagio.api.secure.login_routes.get_gitlab_user", mock_gitlab_request
+    )
+
+    response = client.post(
+        url="/metadata", headers={"Authorization": "invalid_gitlab_token"}
+    )
+    assert response.status_code == 401
+
+    mock_get_session.assert_not_called()
+    mock_gitlab_request.assert_called_once_with(token="invalid_gitlab_token")
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_invalid_session(mocker):
+    # mock invalid session. this is created when users GET /metadata without logging in
+    invalid_session = {
+        "redirect_uri": "redirect_uri",
+        "nonce": "nonce_str",
+        "url": "test_url",
+    }
+    mock_get_session = mocker.AsyncMock(return_value=invalid_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock()
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
+
+    response = client.post(
+        url="/metadata", headers={"Cookie": "PHPSESSID=invalid_session_id"}
+    )
+
+    assert response.status_code == 401
+    mock_get_session.assert_called_once_with(session_id="invalid_session_id")
+
+
+@pytest.mark.asyncio
+async def test_update_metadata(
+    valid_token, valid_userinfo, instrument_metadata, mocker
+):
+    valid_userinfo["groups"] = ["ghsc/geomag/operations/roles/reviewer"]
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
+
+    # mock response for MetadataDatabaseFactory().update_metadata
+    instrument_metadata.id = 7204
+    instrument_metadata.updated_by = "test_user"
+    instrument_metadata.updated_time = UTCDateTime.now()
+    instrument_metadata.metadata = {
+        "type": "FGE",
+        "channels": {
+            "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+            "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+            "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+        },
+        "electronics": {
+            "serial": "E0542",
+            "x-scale": 313.2,
+            "y-scale": 312.3,
+            "z-scale": 312.0,
+            "temperature-scale": 0.01,
+        },
+        "sensor": {
+            "serial": "S0419",
+            "x-constant": 36958,
+            "y-constant": 36849,
+            "z-constant": 36810,
+        },
+    }
+    instrument_metadata = Metadata.model_validate(instrument_metadata)
+    mock_update = mocker.AsyncMock(return_value=instrument_metadata)
+    mocker.patch.object(MetadataDatabaseFactory, "update_metadata", mock_update)
+
+    valid_user_input = {
+        "id": 7204,
+        "metadata": {
+            "type": "FGE",
+            "channels": {
+                "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+                "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+                "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+            },
+            "electronics": {
+                "serial": "E0542",
+                "x-scale": 313.2,
+                "y-scale": 312.3,
+                "z-scale": 312.0,
+                "temperature-scale": 0.01,
+            },
+            "sensor": {
+                "serial": "S0419",
+                "x-constant": 36958,
+                "y-constant": 36849,
+                "z-constant": 36810,
+            },
+        },
+    }
+
+    body = json.dumps(valid_user_input)
+
+    response = client.put(
+        url="/metadata/1234",
+        headers={"Cookie": "PHPSESSID=valid_session_id"},
+        content=body,
+    )
+    assert response.status_code == 200
+    mock_get_session.assert_called_once_with(session_id="valid_session_id")
+
+    # assert the response matches the object returned by MetadataDatabaseFactory().update_metadata
+    response_body = response.json()
+    assert response_body["id"] == 7204
+    assert response_body["updated_by"] == "test_user"
+    assert response_body["category"] == "instrument"
+    assert response_body["network"] == "NT"
+    assert response_body["station"] == "BDT"
+    assert response_body["status"] == "new"
+    assert response_body["priority"] == 1
+    assert response_body["metadata"] == instrument_metadata.metadata
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_invalid_input(valid_token, valid_userinfo, mocker):
+    valid_userinfo["groups"] = ["ghsc/geomag/operations/roles/reviewer"]
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
+
+    invalid_user_input = {
+        "id": 7204,
+        "starttime": "12:20:32",
+    }
+
+    body = json.dumps(invalid_user_input)
+
+    response = client.put(
+        url="/metadata/1234",
+        headers={"Cookie": "PHPSESSID=valid_session_id"},
+        content=body,
+    )
+    assert response.status_code == 422
+    mock_get_session.assert_called_once_with(session_id="valid_session_id")
+
+    response_body = response.json()
+
+    assert (
+        response_body["detail"][0]["msg"]
+        == "Input should be an instance of UTCDateTime"
+    )
+    assert (
+        response_body["detail"][1]["msg"]
+        == "Value error, Invalid time type. See obspy UTCDateTime for more information."
+    )
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_valid_auth_token(
+    valid_token,
+    valid_userinfo,
+    valid_gitlab_user,
+    instrument_metadata,
+    mocker,
+):
+    valid_user_response = httpx.Response(status_code=200, json=valid_gitlab_user)
+
+    valid_groups = [
+        {
+            "id": 1,
+            "full_path": "group1",
+        },
+        {"id": 2, "full_path": "ghsc/geomag/operations/roles/reviewer"},
+    ]
+
+    valid_groups_response = httpx.Response(status_code=200, json=valid_groups)
+
+    mock_gitlab_request = mocker.AsyncMock(
+        side_effect=[
+            valid_user_response,
+            valid_groups_response,
+            httpx.Response(status_code=200, json={}),
+        ]
+    )
+    mocker.patch("httpx.AsyncClient.get", mock_gitlab_request)
+
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
+
+    # mock response for MetadataDatabaseFactory().update_metadata
+    instrument_metadata.id = 7204
+    instrument_metadata.updated_by = "test_user"
+    instrument_metadata.updated_time = UTCDateTime.now()
+    instrument_metadata.metadata = {
+        "type": "FGE",
+        "channels": {
+            "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+            "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+            "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+        },
+        "electronics": {
+            "serial": "E0542",
+            "x-scale": 313.2,
+            "y-scale": 312.3,
+            "z-scale": 312.0,
+            "temperature-scale": 0.01,
+        },
+        "sensor": {
+            "serial": "S0419",
+            "x-constant": 36958,
+            "y-constant": 36849,
+            "z-constant": 36810,
+        },
+    }
+    instrument_metadata = Metadata.model_validate(instrument_metadata)
+    mock_update = mocker.AsyncMock(return_value=instrument_metadata)
+    mocker.patch.object(MetadataDatabaseFactory, "update_metadata", mock_update)
+
+    valid_user_input = {
+        "id": 7204,
+        "metadata": {
+            "type": "FGE",
+            "channels": {
+                "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+                "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+                "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+            },
+            "electronics": {
+                "serial": "E0542",
+                "x-scale": 313.2,
+                "y-scale": 312.3,
+                "z-scale": 312.0,
+                "temperature-scale": 0.01,
+            },
+            "sensor": {
+                "serial": "S0419",
+                "x-constant": 36958,
+                "y-constant": 36849,
+                "z-constant": 36810,
+            },
+        },
+    }
+
+    body = json.dumps(valid_user_input)
+
+    response = client.put(
+        url="/metadata/1234", headers={"Authorization": "auth_token"}, content=body
+    )
+
+    session_user = {
+        "email": "test_email",
+        "sub": 1234,
+        "groups": ["group1", "ghsc/geomag/operations/roles/reviewer"],
+        "name": "test_user",
+        "nickname": "test_user_nickname",
+        "picture": "http://picture_url",
+    }
+
+    assert response.status_code == 200
+    mock_save_session.assert_called_once_with(
+        session_id=ANY, data={"user": session_user}
+    )
+
+    # assert the response matches the object returned by MetadataDatabaseFactory().update_metadata
+    response_body = response.json()
+    assert response_body["id"] == 7204
+    assert response_body["updated_by"] == "test_user"
+    assert response_body["category"] == "instrument"
+    assert response_body["network"] == "NT"
+    assert response_body["station"] == "BDT"
+    assert response_body["status"] == "new"
+    assert response_body["priority"] == 1
+    assert response_body["metadata"] == instrument_metadata.metadata
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_no_valid_group(
+    valid_session, valid_reading_metadata, mocker
+):
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.put(
+        url="/metadata/1234",
+        headers={"Cookie": "PHPSESSID=valid_session_id"},
+        content=body,
+    )
+    assert response.status_code == 403
+    mock_get_session.assert_called_once_with(session_id="valid_session_id")
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_no_session(
+    valid_session, valid_reading_metadata, mocker
+):
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.put(url="/metadata/1234", content=body)
+    assert response.status_code == 401
+
+    mock_get_session.assert_not_called()
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_invalid_session(mocker, valid_reading_metadata):
+    # mock invalid session. this is created when users GET /metadata without logging in
+    invalid_session = {
+        "redirect_uri": "redirect_uri",
+        "nonce": "nonce_str",
+        "url": "test_url",
+    }
+    mock_get_session = mocker.AsyncMock(return_value=invalid_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock()
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.put(
+        url="/metadata/1234",
+        headers={"Cookie": "PHPSESSID=invalid_session_id"},
+        content=body,
+    )
+
+    assert response.status_code == 401
+    mock_get_session.assert_called_once_with(session_id="invalid_session_id")
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_with_no_session_and_invalid_token(
+    mocker, valid_reading_metadata
+):
+    mock_get_session = mocker.AsyncMock()
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    mock_gitlab_request = mocker.AsyncMock(return_value=None)
+    mocker.patch(
+        "geomagio.api.secure.login_routes.get_gitlab_user", mock_gitlab_request
+    )
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.put(
+        url="/metadata/1234",
+        headers={"Authorization": "invalid_gitlab_token"},
+        content=body,
+    )
+    assert response.status_code == 401
+
+    mock_get_session.assert_not_called()
+    mock_gitlab_request.assert_called_once_with(token="invalid_gitlab_token")
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_no_existing_metadata(
+    valid_userinfo, valid_token, mocker
+):
+    valid_userinfo["groups"] = ["ghsc/geomag/operations/roles/reviewer"]
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionDatabaseFactory, "save_session", mock_save_session)
+
+    mock_update = mocker.AsyncMock(side_effect=ValueError("metadata not found"))
+    mocker.patch.object(MetadataDatabaseFactory, "update_metadata", mock_update)
+
+    body = json.dumps({"id": "1234", "category": "instrument"})
+
+    response = client.put(
+        url="/metadata/1234",
+        headers={"Cookie": "PHPSESSID=valid_session_id"},
+        content=body,
+    )
+    assert response.status_code == 404
+    mock_get_session.assert_called_once_with(session_id="valid_session_id")
+
+
+@pytest.mark.asyncio
+async def test_get_metadata(instrument_metadata, metadata_with_datetimes, mocker):
+    # mock return multiple metadata
+    instrument_metadata.id = 1242
+    instrument_metadata.station = "BOU"
+    metadata_with_datetimes.id = 821
+    all_instrument_metadata = [instrument_metadata, metadata_with_datetimes]
+    mock_get = mocker.AsyncMock(return_value=all_instrument_metadata)
+    mocker.patch.object(MetadataDatabaseFactory, "get_metadata", mock_get)
+
+    response = client.get(url="/metadata?station=BOU&category=instrument&network=NT")
+    assert response.status_code == 200
+
+    response_body = response.json()
+    assert len(response_body) == 2
+
+    for metadata in response_body:
+        assert metadata["station"] == "BOU"
+        assert metadata["category"] == "instrument"
+        assert metadata["network"] == "NT"
+
+        # assert nested metadata is returned as "metadata" and not "metadata_" to the user
+        assert metadata["metadata"] != None
+        assert metadata.get("metadata_") == None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_return_single(instrument_metadata, mocker):
+    # mock return single metadata
+    instrument_metadata.id = 1242
+    all_instrument_metadata = [instrument_metadata]
+    mock_get = mocker.AsyncMock(return_value=all_instrument_metadata)
+    mocker.patch.object(MetadataDatabaseFactory, "get_metadata", mock_get)
+
+    response = client.get(url="/metadata?station=BDT&category=instrument&network=NT")
+    assert response.status_code == 200
+
+    response_body = response.json()
+    assert len(response_body) == 1
+    assert response_body[0]["station"] == "BDT"
+    assert response_body[0]["category"] == "instrument"
+    assert response_body[0]["network"] == "NT"
+
+    # assert nested metadata is returned as "metadata" and not "metadata_" to the user
+    assert response_body[0]["metadata"] != None
+    assert response_body[0].get("metadata_") == None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_return_empty(mocker):
+    # mock return empty list
+
+    mock_get = mocker.AsyncMock(return_value=[])
+    mocker.patch.object(MetadataDatabaseFactory, "get_metadata", mock_get)
+
+    response = client.get(url="/metadata?station=BDT&category=instrument&network=NT")
+    assert response.status_code == 200
+
+    response_body = response.json()
+    assert len(response_body) == 0
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_invalid_input():
+    response = client.get(
+        url="/metadata?station=BDT&category=instrument&starttime=1:12:24T8:8:43"
+    )
+    assert response.status_code == 422
+
+    response_body = response.json()
+    assert (
+        response_body["detail"][0]["msg"]
+        == "Input should be an instance of UTCDateTime"
+    )
+    assert (
+        response_body["detail"][1]["msg"]
+        == "Value error, Invalid time type. See obspy UTCDateTime for more information."
+    )
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_by_id(instrument_metadata, mocker):
+    instrument_metadata.id = 1242
+    mock_get = mocker.AsyncMock(return_value=instrument_metadata)
+    mocker.patch.object(MetadataDatabaseFactory, "get_metadata_by_id", mock_get)
+
+    response = client.get(url="/metadata/1242")
+    assert response.status_code == 200
+
+    response_body = response.json()
+    assert response_body["station"] == "BDT"
+    assert response_body["category"] == "instrument"
+    assert response_body["network"] == "NT"
+
+    # assert nested metadata is returned as "metadata" and not "metadata_" to the user
+    assert response_body["metadata"] == {
+        "type": "FGE",
+        "channels": {
+            "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
+            "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
+            "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
+        },
+        "electronics": {
+            "serial": "E0542",
+            "x-scale": 313.2,
+            "y-scale": 312.3,
+            "z-scale": 312.0,
+            "temperature-scale": 0.01,
+        },
+        "sensor": {
+            "serial": "S0419",
+            "x-constant": 36958,
+            "y-constant": 36849,
+            "z-constant": 36811,
+        },
+    }
+    assert response_body.get("metadata_") == None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_by_id_no_metadata_found(mocker):
+    mock_get = mocker.AsyncMock(return_value=None)
+    mocker.patch.object(MetadataDatabaseFactory, "get_metadata_by_id", mock_get)
+
+    response = client.get(url="/metadata/1242")
+    assert response.status_code == 404
+    response_body = response.json()
+    assert response_body["detail"] == "metadata not found"
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_by_id_invalid_id():
+    response = client.get(url="/metadata/828cskje9w8e")
+    assert response.status_code == 422
+
+    response_body = response.json()
+    assert (
+        response_body["detail"][0]["msg"]
+        == "Input should be a valid integer, unable to parse string as an integer"
+    )
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history(
+    formatted_metadata_history_1, formatted_metadata_history_2, mocker
+):
+    # mock return multiple metadata_history
+    mock_get = mocker.AsyncMock(
+        return_value=[formatted_metadata_history_1, formatted_metadata_history_2]
+    )
+    mocker.patch.object(
+        MetadataHistoryDatabaseFactory, "get_metadata_history", mock_get
+    )
+
+    response = client.get(
+        url="/metadata/history?station=BRW&category=observatory&network=NT"
+    )
+    assert response.status_code == 200
+
+    response_body = response.json()
+    assert len(response_body) == 2
+
+    for metadata_history in response_body:
+        assert metadata_history["metadata_id"] == 38746
+        assert metadata_history["station"] == "BRW"
+        assert metadata_history["category"] == "observatory"
+        assert metadata_history["network"] == "NT"
+
+        # assert nested metadata is returned as "metadata" and not "metadata_" to the user
+        assert metadata_history["metadata"] != None
+        assert metadata_history.get("metadata_") == None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_return_single(formatted_metadata_history_1, mocker):
+    # mock return single MetadataHistoryTable
+    mock_get = mocker.AsyncMock(return_value=[formatted_metadata_history_1])
+    mocker.patch.object(
+        MetadataHistoryDatabaseFactory, "get_metadata_history", mock_get
+    )
+
+    response = client.get(
+        url="/metadata/history?station=BRW&category=observatory&network=NT"
+    )
+    assert response.status_code == 200
+
+    response_body = response.json()
+    assert len(response_body) == 1
+
+    assert response_body[0]["metadata_id"] == 38746
+    assert response_body[0]["station"] == "BRW"
+    assert response_body[0]["category"] == "observatory"
+    assert response_body[0]["network"] == "NT"
+
+    # assert nested metadata is returned as "metadata" and not "metadata_" to the user
+    assert response_body[0]["metadata"] != None
+    assert response_body[0].get("metadata_") == None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_return_empty(mocker):
+    # mock return empty
+
+    mock_get = mocker.AsyncMock(return_value=[])
+    mocker.patch.object(
+        MetadataHistoryDatabaseFactory, "get_metadata_history", mock_get
+    )
+
+    response = client.get(
+        url="/metadata/history?station=BRW&category=observatory&network=NT"
+    )
+    assert response.status_code == 200
+
+    response_body = response.json()
+    assert len(response_body) == 0
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_invalid_input():
+    response = client.get(
+        url="/metadata/history?station=BRW&category=observatory&network=NT&starttime=today"
+    )
+    assert response.status_code == 422
+
+    response_body = response.json()
+    assert (
+        response_body["detail"][0]["msg"]
+        == "Input should be an instance of UTCDateTime"
+    )
+    assert (
+        response_body["detail"][1]["msg"]
+        == "Value error, Invalid time type. See obspy UTCDateTime for more information."
+    )
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_by_id(formatted_metadata_history_1, mocker):
+    mock_get = mocker.AsyncMock(return_value=formatted_metadata_history_1)
+    mocker.patch.object(
+        MetadataHistoryDatabaseFactory, "get_metadata_history_by_id", mock_get
+    )
+
+    response = client.get(url="/metadata/history/124221")
+    assert response.status_code == 200
+
+    response_body = response.json()
+    assert response_body["metadata_id"] == 38746
+    assert response_body["station"] == "BRW"
+    assert response_body["category"] == "observatory"
+    assert response_body["network"] == "NT"
+
+    # assert nested metadata is returned as "metadata" and not "metadata_" to the user
+    assert response_body["metadata"] == {
+        "id": "BRW",
+        "name": "Barrow",
+        "marks": [{"name": "m", "azimuth": "1"}],
+        "piers": [{"name": "p", "correction": "12"}],
+        "agency": "USGS",
+        "latitude": 71.322,
+        "elevation": 10,
+        "longitude": 203.378,
+        "agency_name": "United States Geological Survey (USGS)",
+        "electronics": ["1"],
+        "theodolites": ["12"],
+        "declination_base": 10589,
+        "sensor_orientation": "HDZF",
+    }
+    assert response_body.get("metadata_") == None
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_by_id_no_metadata_history_found(mocker):
+    mock_get = mocker.AsyncMock(return_value=None)
+    mocker.patch.object(
+        MetadataHistoryDatabaseFactory, "get_metadata_history_by_id", mock_get
+    )
+
+    response = client.get(url="/metadata/history/34253")
+    assert response.status_code == 404
+    response_body = response.json()
+    assert response_body["detail"] == "metadata_history not found"
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_by_id_invalid_id():
+    response = client.get(url="/metadata/history/8239453knsoi23yr")
+    assert response.status_code == 422
+
+    response_body = response.json()
+    assert (
+        response_body["detail"][0]["msg"]
+        == "Input should be a valid integer, unable to parse string as an integer"
+    )
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_by_metadata_id(
+    get_all_formatted_metadata_history, mocker
+):
+    mock_get = mocker.AsyncMock(return_value=get_all_formatted_metadata_history)
+    mocker.patch.object(
+        MetadataHistoryDatabaseFactory, "get_metadata_history_by_metadata_id", mock_get
+    )
+
+    response = client.get(url="/metadata/12532/history")
+
+    assert response.status_code == 200
+
+    response_body = response.json()
+    assert len(response_body) == 4
+    assert response_body[0]["id"] == 38746
+    assert response_body[1]["metadata_id"] == 38746
+    assert response_body[2]["metadata_id"] == 38746
+    assert response_body[3]["metadata_id"] == 38746
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_by_metadata_id_only_metadata(
+    formatted_metadata_for_formatted_metadata_history, mocker
+):
+    mock_get = mocker.AsyncMock(
+        return_value=[formatted_metadata_for_formatted_metadata_history]
+    )
+    mocker.patch.object(
+        MetadataHistoryDatabaseFactory, "get_metadata_history_by_metadata_id", mock_get
+    )
+
+    response = client.get(url="/metadata/12532/history")
+
+    assert response.status_code == 200
+
+    response_body = response.json()
+    assert len(response_body) == 1
+    assert response_body[0]["id"] == 38746
+
+
+@pytest.mark.asyncio
+async def test_get_metadata_history_by_metadata_id_not_found(mocker):
+    mock_get = mocker.AsyncMock(return_value=None)
+    mocker.patch.object(
+        MetadataHistoryDatabaseFactory, "get_metadata_history_by_metadata_id", mock_get
+    )
+
+    response = client.get(url="/metadata/3824598235973/history")
+
+    assert response.status_code == 404
+
+    response_body = response.json()
+    assert response_body["detail"] == "metadata not found"
diff --git a/test/api_test/secure_test/metadata_test.py b/test/api_test/secure_test/metadata_test.py
deleted file mode 100644
index b2c50090e..000000000
--- a/test/api_test/secure_test/metadata_test.py
+++ /dev/null
@@ -1,867 +0,0 @@
-import pytest
-import json
-import httpx
-import unittest
-import datetime
-from unittest.mock import ANY, AsyncMock, patch
-from fastapi.testclient import TestClient
-from obspy import UTCDateTime
-
-from geomagio.api.secure import app
-from geomagio.api.secure.SessionMiddleware import SessionMiddleware
-from geomagio.metadata.Metadata import Metadata
-from geomagio.api.db.MetadataDatabaseFactory import MetadataDatabaseFactory
-
-client = TestClient(app)
-
-
-@pytest.mark.asyncio
-@patch("databases.Database.execute", new_callable=AsyncMock)
-async def test_create_metadata_valid_session(
-    valid_token, valid_userinfo, valid_reading_metadata, mocker
-):
-    valid_session = {"token": valid_token, "user": valid_userinfo}
-    mock_get_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
-
-    mock_save_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
-
-    body = json.dumps(valid_reading_metadata)
-
-    response = client.post(
-        url="/metadata", headers={"Cookie": "PHPSESSID=valid_session_id"}, content=body
-    )
-
-    assert response.status_code == 201
-    mock_get_session.assert_called_once_with("valid_session_id")
-
-    returned_metadata = Metadata(**response.json())
-    assert returned_metadata.id == 1
-    assert returned_metadata.category == "reading"
-    assert returned_metadata.starttime == UTCDateTime("2024-11-07T12:12:12Z")
-    assert returned_metadata.endtime == UTCDateTime("2024-11-07T20:20:20Z")
-    assert returned_metadata.network == "NT"
-    assert returned_metadata.station == "BOU"
-    assert returned_metadata.status == "new"
-    assert returned_metadata.priority == 1
-
-
-@pytest.mark.asyncio
-@patch("databases.Database.execute", new_callable=AsyncMock)
-async def test_create_metadata_with_id(
-    valid_token, valid_userinfo, valid_reading_metadata, mocker
-):
-    # input id is removed and replaced with new db id
-    valid_session = {"token": valid_token, "user": valid_userinfo}
-    mock_get_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
-
-    mock_save_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
-
-    valid_reading_metadata["id"] = 1234
-    body = json.dumps(valid_reading_metadata)
-
-    response = client.post(
-        url="/metadata", headers={"Cookie": "PHPSESSID=valid_session_id"}, content=body
-    )
-
-    assert response.status_code == 201
-    mock_get_session.assert_called_once_with("valid_session_id")
-
-    returned_metadata = Metadata(**response.json())
-    assert returned_metadata.id == 1
-    assert returned_metadata.category == "reading"
-    assert returned_metadata.starttime == UTCDateTime("2024-11-07T12:12:12Z")
-    assert returned_metadata.endtime == UTCDateTime("2024-11-07T20:20:20Z")
-    assert returned_metadata.network == "NT"
-    assert returned_metadata.station == "BOU"
-    assert returned_metadata.status == "new"
-    assert returned_metadata.priority == 1
-
-
-@pytest.mark.asyncio
-@patch("databases.Database.execute", new_callable=AsyncMock)
-async def test_create_metadata_valid_auth_token(
-    valid_token, valid_userinfo, valid_gitlab_user, valid_reading_metadata, mocker
-):
-    valid_user_response = httpx.Response(status_code=200, json=valid_gitlab_user)
-
-    valid_groups = [
-        {
-            "id": 1,
-            "full_path": "group1",
-        },
-        {"id": 2, "full_path": "group2"},
-    ]
-
-    valid_groups_response = httpx.Response(status_code=200, json=valid_groups)
-
-    mock_gitlab_request = mocker.AsyncMock(
-        side_effect=[
-            valid_user_response,
-            valid_groups_response,
-            httpx.Response(status_code=200, json={}),
-        ]
-    )
-    mocker.patch("httpx.AsyncClient.get", mock_gitlab_request)
-
-    valid_session = {"token": valid_token, "user": valid_userinfo}
-
-    mock_save_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
-
-    body = json.dumps(valid_reading_metadata)
-
-    response = client.post(
-        url="/metadata", headers={"Authorization": "auth_token"}, content=body
-    )
-
-    session_user = {
-        "email": "test_email",
-        "sub": 1234,
-        "groups": ["group1", "group2"],
-        "name": "test_user",
-        "nickname": "test_user_nickname",
-        "picture": "http://picture_url",
-    }
-
-    assert response.status_code == 201
-    mock_save_session.assert_called_once_with(ANY, {"user": session_user})
-
-    returned_metadata = Metadata(**response.json())
-    assert returned_metadata.id == 1
-    assert returned_metadata.category == "reading"
-    assert returned_metadata.starttime == UTCDateTime("2024-11-07T12:12:12Z")
-    assert returned_metadata.endtime == UTCDateTime("2024-11-07T20:20:20Z")
-    assert returned_metadata.network == "NT"
-    assert returned_metadata.station == "BOU"
-    assert returned_metadata.status == "new"
-    assert returned_metadata.priority == 1
-
-
-@pytest.mark.asyncio
-async def test_create_metadata_no_session_or_token(valid_reading_metadata, mocker):
-    mock_get_session = mocker.AsyncMock()
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
-
-    body = json.dumps(valid_reading_metadata)
-
-    # do not pass in cookie or auth
-    response = client.post(url="/metadata", content=body)
-    assert response.status_code == 401
-
-    mock_get_session.assert_not_called()
-
-
-@pytest.mark.asyncio
-async def test_create_metadata_with_no_session_and_invalid_token(mocker):
-    mock_get_session = mocker.AsyncMock()
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
-
-    mock_gitlab_request = mocker.AsyncMock(return_value=None)
-    mocker.patch("geomagio.api.secure.login.get_gitlab_user", mock_gitlab_request)
-
-    response = client.post(
-        url="/metadata", headers={"Authorization": "invalid_gitlab_token"}
-    )
-    assert response.status_code == 401
-
-    mock_get_session.assert_not_called()
-    mock_gitlab_request.assert_called_once_with(token="invalid_gitlab_token")
-
-
-@pytest.mark.asyncio
-async def test_create_metadata_invalid_session(mocker):
-    # mock invalid session. this is created when users GET /metadata without logging in
-    invalid_session = {
-        "redirect_uri": "redirect_uri",
-        "nonce": "nonce_str",
-        "url": "test_url",
-    }
-    mock_get_session = mocker.AsyncMock(return_value=invalid_session)
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
-
-    mock_save_session = mocker.AsyncMock()
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
-
-    response = client.post(
-        url="/metadata", headers={"Cookie": "PHPSESSID=invalid_session_id"}
-    )
-
-    assert response.status_code == 401
-    mock_get_session.assert_called_once_with("invalid_session_id")
-
-
-@pytest.mark.asyncio
-@patch("databases.Database.transaction")
-@patch("databases.Database.fetch_all", new_callable=AsyncMock)
-@patch("databases.Database.execute", new_callable=AsyncMock)
-async def test_update_metadata(
-    mock_execute,
-    mock_fetch,
-    mock_transaction,
-    valid_token,
-    valid_userinfo,
-    valid_reading_metadata,
-    mocker,
-):
-    valid_userinfo["groups"] = ["ghsc/geomag/operations/roles/reviewer"]
-    valid_session = {"token": valid_token, "user": valid_userinfo}
-    mock_get_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
-
-    mock_save_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
-
-    valid_reading_metadata["id"] = 1234
-    mock_fetch.side_effect = [
-        [
-            {
-                "id": 1234,
-                "category": "instrument",
-                "data_valid": True,
-                "priority": 1,
-            }
-        ],
-        [valid_reading_metadata],
-    ]
-    mock_execute.side_effect = [1, None]
-
-    body = json.dumps(valid_reading_metadata)
-
-    response = client.put(
-        url="/metadata/1234",
-        headers={"Cookie": "PHPSESSID=valid_session_id"},
-        content=body,
-    )
-    assert response.status_code == 200
-
-    returned_metadata = Metadata(**response.json())
-    returned_metadata.id = 1234
-    returned_metadata.category = "reading"
-
-    assert mock_fetch.call_count == 2
-    assert mock_execute.call_count == 2
-
-    # the query for both fetch_all requests should be the same
-    fetch_called_pararms = mock_fetch.call_args.args[0].compile().params
-    assert fetch_called_pararms["id_1"] == 1234
-
-    # assert save to metadata history is correct
-    insert_called_params = mock_execute.call_args_list[0].args[0].compile().params
-    assert insert_called_params["metadata_id"] == 1234
-    assert insert_called_params["category"] == "instrument"
-    assert insert_called_params["data_valid"] == True
-    assert insert_called_params["priority"] == 1
-
-    # assert update to metadata table is correct
-    update_called_params = mock_execute.call_args_list[1].args[0].compile().params
-    assert update_called_params["id_1"] == 1234
-    assert update_called_params["category"] == "reading"
-
-
-@pytest.mark.asyncio
-@patch("databases.Database.transaction")
-@patch("databases.Database.fetch_all", new_callable=AsyncMock)
-@patch("databases.Database.execute", new_callable=AsyncMock)
-async def test_update_metadata_valid_auth_token(
-    mock_execute,
-    mock_fetch,
-    mock_transaction,
-    valid_token,
-    valid_userinfo,
-    valid_gitlab_user,
-    valid_reading_metadata,
-    mocker,
-):
-    valid_user_response = httpx.Response(status_code=200, json=valid_gitlab_user)
-
-    valid_groups = [
-        {
-            "id": 1,
-            "full_path": "group1",
-        },
-        {"id": 2, "full_path": "ghsc/geomag/operations/roles/reviewer"},
-    ]
-
-    valid_groups_response = httpx.Response(status_code=200, json=valid_groups)
-
-    mock_gitlab_request = mocker.AsyncMock(
-        side_effect=[
-            valid_user_response,
-            valid_groups_response,
-            httpx.Response(status_code=200, json={}),
-        ]
-    )
-    mocker.patch("httpx.AsyncClient.get", mock_gitlab_request)
-
-    valid_session = {"token": valid_token, "user": valid_userinfo}
-
-    mock_save_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
-
-    valid_reading_metadata["id"] = 1234
-    mock_fetch.side_effect = [
-        [
-            {
-                "id": 1234,
-                "category": "instrument",
-                "data_valid": True,
-                "priority": 1,
-            }
-        ],
-        [valid_reading_metadata],
-    ]
-    mock_execute.side_effect = [1, None]
-
-    body = json.dumps(valid_reading_metadata)
-
-    response = client.put(
-        url="/metadata/1234", headers={"Authorization": "auth_token"}, content=body
-    )
-
-    session_user = {
-        "email": "test_email",
-        "sub": 1234,
-        "groups": ["group1", "ghsc/geomag/operations/roles/reviewer"],
-        "name": "test_user",
-        "nickname": "test_user_nickname",
-        "picture": "http://picture_url",
-    }
-
-    assert response.status_code == 200
-    mock_save_session.assert_called_once_with(ANY, {"user": session_user})
-
-    returned_metadata = Metadata(**response.json())
-    returned_metadata.id = 1234
-    returned_metadata.category = "reading"
-
-    assert mock_fetch.call_count == 2
-    assert mock_execute.call_count == 2
-
-    # the query for both fetch_all requests should be the same
-    fetch_called_pararms = mock_fetch.call_args.args[0].compile().params
-    assert fetch_called_pararms["id_1"] == 1234
-
-    # assert save to metadata history is correct
-    insert_called_params = mock_execute.call_args_list[0].args[0].compile().params
-    assert insert_called_params["metadata_id"] == 1234
-    assert insert_called_params["category"] == "instrument"
-    assert insert_called_params["data_valid"] == True
-    assert insert_called_params["priority"] == 1
-
-    # assert update to metadata table is correct
-    update_called_params = mock_execute.call_args_list[1].args[0].compile().params
-    assert update_called_params["id_1"] == 1234
-    assert update_called_params["category"] == "reading"
-
-
-@pytest.mark.asyncio
-@patch("databases.Database.transaction")
-@patch("databases.Database.fetch_all", new_callable=AsyncMock)
-@patch("databases.Database.execute", new_callable=AsyncMock)
-async def test_update_metadata_no_existing_metadata(
-    mock_execute,
-    mock_fetch,
-    mock_transaction,
-    valid_token,
-    valid_userinfo,
-    valid_reading_metadata,
-    mocker,
-):
-    valid_userinfo["groups"] = ["ghsc/geomag/operations/roles/reviewer"]
-    valid_session = {"token": valid_token, "user": valid_userinfo}
-    mock_get_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
-
-    mock_save_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
-
-    valid_reading_metadata["id"] = 1234
-    mock_fetch.return_value = []
-
-    body = json.dumps(valid_reading_metadata)
-
-    try:
-        response = client.put(
-            url="/metadata/1234",
-            headers={"Cookie": "PHPSESSID=valid_session_id"},
-            content=body,
-        )
-    except Exception as e:
-        assert type(e) == ValueError
-
-
-@pytest.mark.asyncio
-async def test_update_metadata_no_valid_group(
-    valid_token, valid_userinfo, valid_reading_metadata, mocker
-):
-    valid_session = {"token": valid_token, "user": valid_userinfo}
-    mock_get_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
-
-    mock_save_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
-
-    body = json.dumps(valid_reading_metadata)
-
-    response = client.put(
-        url="/metadata/1234",
-        headers={"Cookie": "PHPSESSID=valid_session_id"},
-        content=body,
-    )
-    assert response.status_code == 403
-
-
-@pytest.mark.asyncio
-async def test_update_metadata_no_session(
-    valid_token, valid_userinfo, valid_reading_metadata, mocker
-):
-    valid_session = {"token": valid_token, "user": valid_userinfo}
-    mock_get_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
-
-    mock_save_session = mocker.AsyncMock(return_value=valid_session)
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
-
-    body = json.dumps(valid_reading_metadata)
-
-    response = client.put(url="/metadata/1234", content=body)
-    assert response.status_code == 401
-
-    mock_get_session.assert_not_called()
-
-
-@pytest.mark.asyncio
-async def test_update_metadata_invalid_session(mocker, valid_reading_metadata):
-    # mock invalid session. this is created when users GET /metadata without logging in
-    invalid_session = {
-        "redirect_uri": "redirect_uri",
-        "nonce": "nonce_str",
-        "url": "test_url",
-    }
-    mock_get_session = mocker.AsyncMock(return_value=invalid_session)
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
-
-    mock_save_session = mocker.AsyncMock()
-    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
-
-    body = json.dumps(valid_reading_metadata)
-
-    response = client.put(
-        url="/metadata/1234",
-        headers={"Cookie": "PHPSESSID=invalid_session_id"},
-        content=body,
-    )
-
-    assert response.status_code == 401
-    mock_get_session.assert_called_once_with("invalid_session_id")
-
-
-@pytest.mark.asyncio
-async def test_update_metadata_with_no_session_and_invalid_token(
-    mocker, valid_reading_metadata
-):
-    mock_get_session = mocker.AsyncMock()
-    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
-
-    mock_gitlab_request = mocker.AsyncMock(return_value=None)
-    mocker.patch("geomagio.api.secure.login.get_gitlab_user", mock_gitlab_request)
-
-    body = json.dumps(valid_reading_metadata)
-
-    response = client.put(
-        url="/metadata/1234",
-        headers={"Authorization": "invalid_gitlab_token"},
-        content=body,
-    )
-    assert response.status_code == 401
-
-    mock_get_session.assert_not_called()
-    mock_gitlab_request.assert_called_once_with(token="invalid_gitlab_token")
-
-
-# # # the way the database class is set up prevents us from using pytest-mocks here. unittest is a bit more
-# # # powerful but less intuitive and allows us to inspect the parameters to mocked database calls
-class TestMetadata(unittest.IsolatedAsyncioTestCase):
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata(self, mock_fetch):
-        mock_fetch.return_value = [
-            {
-                "id": 8372,
-                "station": "BOU",
-                "category": "instrument",
-                "network": "NT",
-                "channel": "F",
-                "location": "R0",
-                "data_valid": True,
-                "status": "new",
-                "metadata": {"metadata1": "metadata1"},
-            },
-            {
-                "id": 8376,
-                "station": "BOU",
-                "category": "instrument",
-                "network": "NT",
-                "channel": "F",
-                "location": "R0",
-                "data_valid": True,
-                "status": "new",
-                "metadata": {"metadata2": "metadata2"},
-            },
-        ]
-
-        response = client.get(
-            url="/metadata?station=BOU&category=instrument&network=NT&channel=F&location=R0&data_valid=true&status=new"
-        )
-        assert response.status_code == 200
-
-        mock_fetch.assert_called_once()
-        called_pararms = mock_fetch.call_args.args[0].compile().params
-
-        assert called_pararms["station_1"] == "BOU"
-        assert called_pararms["category_1"] == "instrument"
-        assert called_pararms["network_1"] == "NT"
-        assert called_pararms["channel_1"] == "F"
-        assert called_pararms["location_1"] == "R0"
-        assert called_pararms["status_1"] == ["new"]
-
-        returned_metadata = []
-
-        rows = response.json()
-        for metadata in rows:
-            returned_metadata.append(Metadata(**metadata))
-
-        assert returned_metadata[0].id == 8372
-        assert returned_metadata[1].id == 8376
-
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata_with_starttime(self, mock_fetch):
-        mock_fetch.return_value = [
-            {
-                "id": 8372,
-                "station": "BOU",
-                "category": "instrument",
-                "network": "NT",
-                "channel": "F",
-                "location": "R0",
-                "data_valid": True,
-                "status": "new",
-                "metadata": {"metadata1": "metadata1"},
-            },
-            {
-                "id": 8376,
-                "station": "BOU",
-                "category": "instrument",
-                "network": "NT",
-                "channel": "F",
-                "location": "R0",
-                "data_valid": True,
-                "status": "new",
-                "metadata": {"metadata2": "metadata2"},
-            },
-        ]
-        response = client.get(
-            url="/metadata?station=BOU&category=instrument&starttime=2024-11-06T04:27:40Z"
-        )
-        assert response.status_code == 200
-
-        mock_fetch.assert_called_once()
-        called_pararms = mock_fetch.call_args.args[0].compile().params
-
-        assert called_pararms["station_1"] == "BOU"
-        assert called_pararms["category_1"] == "instrument"
-        # assert endtime is set to starttime because the query is where endtime is None or greater than starttime
-        assert called_pararms["endtime_1"] == datetime.datetime(
-            2024, 11, 6, 4, 27, 40, tzinfo=datetime.timezone.utc
-        )
-
-        returned_metadata = []
-
-        rows = response.json()
-        for metadata in rows:
-            returned_metadata.append(Metadata(**metadata))
-
-        assert returned_metadata[0].id == 8372
-        assert returned_metadata[1].id == 8376
-
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata_with_endtime(self, mock_fetch):
-        mock_fetch.return_value = [
-            {
-                "id": 8372,
-                "station": "BOU",
-                "category": "instrument",
-                "network": "NT",
-                "channel": "F",
-                "location": "R0",
-                "data_valid": True,
-                "status": "new",
-                "metadata": {"metadata1": "metadata1"},
-            },
-            {
-                "id": 8376,
-                "station": "BOU",
-                "category": "instrument",
-                "network": "NT",
-                "channel": "F",
-                "location": "R0",
-                "data_valid": True,
-                "status": "new",
-                "metadata": {"metadata2": "metadata2"},
-            },
-        ]
-        response = client.get(
-            url="/metadata?station=BOU&category=instrument&endtime=2024-12-06T04:27:40Z"
-        )
-        assert response.status_code == 200
-
-        mock_fetch.assert_called_once()
-        called_pararms = mock_fetch.call_args.args[0].compile().params
-
-        assert called_pararms["station_1"] == "BOU"
-        assert called_pararms["category_1"] == "instrument"
-        # assert starttime is set to endtime because the query is where starttime is None or greater than endtime
-        assert called_pararms["starttime_1"] == datetime.datetime(
-            2024, 12, 6, 4, 27, 40, tzinfo=datetime.timezone.utc
-        )
-
-        returned_metadata = []
-
-        rows = response.json()
-        for metadata in rows:
-            returned_metadata.append(Metadata(**metadata))
-
-        assert returned_metadata[0].id == 8372
-        assert returned_metadata[1].id == 8376
-
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata_with_created_after(self, mock_fetch):
-        mock_fetch.return_value = [
-            {
-                "id": 8372,
-                "station": "BOU",
-                "category": "instrument",
-                "network": "NT",
-                "channel": "F",
-                "location": "R0",
-                "data_valid": True,
-                "status": "new",
-                "metadata": {"metadata1": "metadata1"},
-            },
-            {
-                "id": 8376,
-                "station": "BOU",
-                "category": "instrument",
-                "network": "NT",
-                "channel": "F",
-                "location": "R0",
-                "data_valid": True,
-                "status": "new",
-                "metadata": {"metadata2": "metadata2"},
-            },
-        ]
-        response = client.get(
-            url="/metadata?station=BOU&category=instrument&created_after=2024-10-06T04:27:40Z"
-        )
-        assert response.status_code == 200
-
-        mock_fetch.assert_called_once()
-        called_pararms = mock_fetch.call_args.args[0].compile().params
-
-        assert called_pararms["station_1"] == "BOU"
-        assert called_pararms["category_1"] == "instrument"
-        # assert created_time is set to created_after because the query is where created_time is greater than created_after
-        assert called_pararms["created_time_1"] == datetime.datetime(
-            2024, 10, 6, 4, 27, 40, tzinfo=datetime.timezone.utc
-        )
-
-        returned_metadata = []
-
-        rows = response.json()
-        for metadata in rows:
-            returned_metadata.append(Metadata(**metadata))
-
-        assert returned_metadata[0].id == 8372
-        assert returned_metadata[1].id == 8376
-
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata_with_created_before(self, mock_fetch):
-        mock_fetch.return_value = [
-            {
-                "id": 8372,
-                "station": "BOU",
-                "category": "instrument",
-                "network": "NT",
-                "channel": "F",
-                "location": "R0",
-                "data_valid": True,
-                "status": "new",
-                "metadata": {"metadata1": "metadata1"},
-            },
-            {
-                "id": 8376,
-                "station": "BOU",
-                "category": "instrument",
-                "network": "NT",
-                "channel": "F",
-                "location": "R0",
-                "data_valid": True,
-                "status": "new",
-                "metadata": {"metadata2": "metadata2"},
-            },
-        ]
-        response = client.get(
-            url="/metadata?station=BOU&category=instrument&created_before=2024-09-06T04:27:40Z"
-        )
-        assert response.status_code == 200
-
-        mock_fetch.assert_called_once()
-        called_pararms = mock_fetch.call_args.args[0].compile().params
-
-        assert called_pararms["station_1"] == "BOU"
-        assert called_pararms["category_1"] == "instrument"
-        # assert created_time is set to created_before because the query is where created_time is less than created_before
-        assert called_pararms["created_time_1"] == datetime.datetime(
-            2024, 9, 6, 4, 27, 40, tzinfo=datetime.timezone.utc
-        )
-
-        returned_metadata = []
-
-        rows = response.json()
-        for metadata in rows:
-            returned_metadata.append(Metadata(**metadata))
-
-        assert returned_metadata[0].id == 8372
-        assert returned_metadata[1].id == 8376
-
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata_returns_empty(self, mock_fetch):
-        mock_fetch.return_value = []
-
-        response = client.get(
-            url="/metadata?station=BOU&category=instrument&created_before=2024-09-06T04:27:40Z"
-        )
-        assert response.status_code == 200
-
-        mock_fetch.assert_called_once()
-
-        # assert response is an empty list
-        assert response.json() == []
-
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata_by_id(self, mock_fetch):
-        mock_fetch.return_value = [{"id": 1234, "category": "instrument"}]
-        response = client.get(url="/metadata/1234")
-        assert response.status_code == 200
-
-        mock_fetch.assert_called_once()
-        called_pararms = mock_fetch.call_args.args[0].compile().params
-
-        assert called_pararms["id_1"] == 1234
-
-        returned_metadata = Metadata(**response.json())
-        assert returned_metadata.id == 1234
-        assert returned_metadata.category == "instrument"
-
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata_by_id_returns_empty(self, mock_fetch):
-        mock_fetch.return_value = []
-        try:
-            client.get(url="/metadata/1234")
-        except Exception as e:
-            assert type(e) == ValueError
-
-        mock_fetch.assert_called_once()
-
-    @patch("databases.Database.fetch_one", new_callable=AsyncMock)
-    async def test_get_metadata_history_by_id(self, mock_fetch):
-        mock_fetch.return_value = {"id": 1234, "category": "instrument"}
-        response = client.get(url="/metadata/history/1234")
-        assert response.status_code == 200
-
-        mock_fetch.assert_called_once()
-        called_pararms = mock_fetch.call_args.args[0].compile().params
-
-        assert called_pararms["id_1"] == 1234
-
-        returned_metadata = Metadata(**response.json())
-        assert returned_metadata.id == 1234
-        assert returned_metadata.category == "instrument"
-
-    @patch("databases.Database.fetch_one", new_callable=AsyncMock)
-    async def test_get_metadata_history_by_id_returns_empty(self, mock_fetch):
-        mock_fetch.return_value = None
-
-        response = client.get(url="/metadata/history/1234")
-        assert response.status_code == 404
-
-        mock_fetch.assert_called_once()
-        called_pararms = mock_fetch.call_args.args[0].compile().params
-
-        assert called_pararms["id_1"] == 1234
-
-    @patch("databases.Database.transaction")
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata_history_by_metadata_id(
-        self, mock_fetch, mock_transaction
-    ):
-        mock_fetch.side_effect = [
-            [{"id": 242, "metadata_id": 1234, "category": "instrument"}],
-            [{"id": 1234, "category": "reading"}],
-        ]
-        response = client.get(url="/metadata/1234/history")
-        assert response.status_code == 200
-
-        assert mock_fetch.call_count == 2
-        called_pararms = mock_fetch.call_args.args[0].compile().params
-
-        assert called_pararms["id_1"] == 1234
-
-        returned_metadata = []
-
-        rows = response.json()
-        for metadata in rows:
-            returned_metadata.append(Metadata(**metadata))
-
-        # expect the metadata from the metadata table to be first
-        assert returned_metadata[0].id == 1234
-        assert returned_metadata[1].id == 242
-
-    @patch("databases.Database.transaction")
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata_history_by_metadata_id_no_history(
-        self, mock_fetch, mock_transaction
-    ):
-        mock_fetch.side_effect = [[], [{"id": 1234, "category": "reading"}]]
-        response = client.get(url="/metadata/1234/history")
-        assert response.status_code == 200
-
-        assert mock_fetch.call_count == 2
-        called_pararms = mock_fetch.call_args.args[0].compile().params
-
-        assert called_pararms["id_1"] == 1234
-
-        returned_metadata = []
-
-        rows = response.json()
-        for metadata in rows:
-            returned_metadata.append(Metadata(**metadata))
-
-        assert returned_metadata[0].id == 1234
-
-    @patch("databases.Database.transaction")
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata_history_by_metadata_id_no_history_or_metadata(
-        self, mock_fetch, mock_transaction
-    ):
-        mock_fetch.side_effect = [[], []]
-
-        try:
-            client.get(url="/metadata/1234/history")
-        except Exception as e:
-            assert type(e) == ValueError
-
-        assert mock_fetch.call_count == 2
diff --git a/test/api_test/ws_test/data_test.py b/test/api_test/ws_test/data_test.py
index 765fa407b..dd7a24bde 100644
--- a/test/api_test/ws_test/data_test.py
+++ b/test/api_test/ws_test/data_test.py
@@ -5,7 +5,7 @@ from numpy.testing import assert_equal
 from obspy import UTCDateTime
 import pytest
 
-from geomagio.api.ws import app
+from geomagio.api.ws.ws_app import app
 from geomagio.api.ws.data import get_data_query
 from geomagio.api.ws.DataApiQuery import (
     DataApiQuery,
diff --git a/test/api_test/ws_test/elements_test.py b/test/api_test/ws_test/elements_test.py
index 5549fb76f..6f0ba79f3 100644
--- a/test/api_test/ws_test/elements_test.py
+++ b/test/api_test/ws_test/elements_test.py
@@ -1,6 +1,6 @@
 from fastapi.testclient import TestClient
 
-from geomagio.api.ws import app
+from geomagio.api.ws.ws_app import app
 
 client = TestClient(app)
 
diff --git a/test/api_test/ws_test/filter_test.py b/test/api_test/ws_test/filter_test.py
index 67a09a421..48cf0da56 100644
--- a/test/api_test/ws_test/filter_test.py
+++ b/test/api_test/ws_test/filter_test.py
@@ -5,7 +5,7 @@ from numpy.testing import assert_equal
 from obspy import UTCDateTime
 import pytest
 
-from geomagio.api.ws import app
+from geomagio.api.ws.ws_app import app
 from geomagio.api.ws.filter import get_filter_data_query
 from geomagio.api.ws.FilterApiQuery import FilterApiQuery
 from geomagio.api.ws.DataApiQuery import SamplingPeriod, OutputFormat, DataType
diff --git a/test/api_test/ws_test/observatories_test.py b/test/api_test/ws_test/observatories_test.py
index dbe95cda1..cdc556294 100644
--- a/test/api_test/ws_test/observatories_test.py
+++ b/test/api_test/ws_test/observatories_test.py
@@ -1,6 +1,6 @@
 from fastapi.testclient import TestClient
 
-from geomagio.api.ws import app
+from geomagio.api.ws.ws_app import app
 
 client = TestClient(app)
 
diff --git a/test/api_test/ws_test/variometers_test.py b/test/api_test/ws_test/variometers_test.py
index 163411efa..5d7ee4744 100644
--- a/test/api_test/ws_test/variometers_test.py
+++ b/test/api_test/ws_test/variometers_test.py
@@ -1,6 +1,6 @@
 from fastapi.testclient import TestClient
 
-from geomagio.api.ws import app
+from geomagio.api.ws.ws_app import app
 
 client = TestClient(app)
 
diff --git a/test/conftest.py b/test/conftest.py
new file mode 100644
index 000000000..b7a28d113
--- /dev/null
+++ b/test/conftest.py
@@ -0,0 +1,16 @@
+import pytest
+
+from sqlmodel import SQLModel, create_engine
+
+from geomagio.api.db.models.metadata import MetadataTable
+from geomagio.api.db.models.metadata_history import MetadataHistoryTable
+from geomagio.api.db.models.session import session
+
+# create a test engine that connects to an in memory test database
+engine = create_engine("sqlite://")
+SQLModel.metadata.create_all(engine)
+
+
+@pytest.fixture()
+def test_engine():
+    return engine
diff --git a/test/db/MetadataDatabaseFactory_test.py b/test/db/MetadataDatabaseFactory_test.py
deleted file mode 100644
index 61e60299d..000000000
--- a/test/db/MetadataDatabaseFactory_test.py
+++ /dev/null
@@ -1,443 +0,0 @@
-import datetime
-import unittest
-from dateutil import tz
-from unittest.mock import AsyncMock, patch
-from databases import Database
-
-from obspy import UTCDateTime
-
-from geomagio.api.db import MetadataDatabaseFactory
-from geomagio.metadata import Metadata, MetadataCategory, MetadataQuery
-
-
-class TestMetadataDatabaseFactory(unittest.IsolatedAsyncioTestCase):
-
-    @patch("databases.Database.execute", new_callable=AsyncMock)
-    async def test_create_metadata_defaults(self, mock_execute):
-        test_data = Metadata(
-            category=MetadataCategory.INSTRUMENT,
-            created_by="test_metadata.py",
-            network="NT",
-            station="BDT",
-            metadata={
-                "type": "FGE",
-                "channels": {
-                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
-                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
-                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
-                },
-                "electronics": {
-                    "serial": "E0542",
-                    "x-scale": 313.2,
-                    "y-scale": 312.3,
-                    "z-scale": 312.0,
-                    "temperature-scale": 0.01,
-                },
-                "sensor": {
-                    "serial": "S0419",
-                    "x-constant": 36958,
-                    "y-constant": 36849,
-                    "z-constant": 36811,
-                },
-            },
-        )
-
-        db = Database("sqlite:///:memory:")
-
-        await MetadataDatabaseFactory(database=db).create_metadata(test_data)
-
-        # assert data_valid, priority, and status are set to the correct defaults
-        expected_values = {
-            "category": "instrument",
-            "created_by": "test_metadata.py",
-            "network": "NT",
-            "station": "BDT",
-            "metadata": {
-                "type": "FGE",
-                "channels": {
-                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
-                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
-                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
-                },
-                "electronics": {
-                    "serial": "E0542",
-                    "x-scale": 313.2,
-                    "y-scale": 312.3,
-                    "z-scale": 312.0,
-                    "temperature-scale": 0.01,
-                },
-                "sensor": {
-                    "serial": "S0419",
-                    "x-constant": 36958,
-                    "y-constant": 36849,
-                    "z-constant": 36811,
-                },
-            },
-            "data_valid": True,
-            "priority": 1,
-            "status": "new",
-        }
-
-        mock_execute.assert_called_once()
-        called_params = mock_execute.call_args.args[0].compile().params
-
-        assert called_params == expected_values
-
-    @patch("databases.Database.execute", new_callable=AsyncMock)
-    async def test_create_metadata_created_time(self, mock_execute):
-        now = UTCDateTime()
-        test_data = Metadata(
-            created_time=now,
-            category=MetadataCategory.INSTRUMENT,
-            created_by="test_metadata.py",
-            network="NT",
-            station="BDT",
-            metadata={
-                "type": "FGE",
-                "channels": {
-                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
-                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
-                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
-                },
-                "electronics": {
-                    "serial": "E0542",
-                    "x-scale": 313.2,
-                    "y-scale": 312.3,
-                    "z-scale": 312.0,
-                    "temperature-scale": 0.01,
-                },
-                "sensor": {
-                    "serial": "S0419",
-                    "x-constant": 36958,
-                    "y-constant": 36849,
-                    "z-constant": 36811,
-                },
-            },
-        )
-
-        db = Database("sqlite:///:memory:")
-
-        await MetadataDatabaseFactory(database=db).create_metadata(test_data)
-
-        # assert data_valid, priority, and status are set to the correct defaults
-        expected_values = {
-            "created_time": datetime.datetime(
-                year=now.year,
-                month=now.month,
-                day=now.day,
-                hour=now.hour,
-                minute=now.minute,
-                second=now.second,
-                microsecond=now.microsecond,
-                tzinfo=tz.tzutc(),
-            ),
-            "category": "instrument",
-            "created_by": "test_metadata.py",
-            "network": "NT",
-            "station": "BDT",
-            "metadata": {
-                "type": "FGE",
-                "channels": {
-                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
-                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
-                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
-                },
-                "electronics": {
-                    "serial": "E0542",
-                    "x-scale": 313.2,
-                    "y-scale": 312.3,
-                    "z-scale": 312.0,
-                    "temperature-scale": 0.01,
-                },
-                "sensor": {
-                    "serial": "S0419",
-                    "x-constant": 36958,
-                    "y-constant": 36849,
-                    "z-constant": 36811,
-                },
-            },
-            "data_valid": True,
-            "priority": 1,
-            "status": "new",
-        }
-
-        mock_execute.assert_called_once()
-        called_params = mock_execute.call_args.args[0].compile().params
-
-        assert called_params == expected_values
-
-    @patch("databases.Database.execute", new_callable=AsyncMock)
-    async def test_create_metadata_with_ids(self, mock_execute):
-        test_data = Metadata(
-            id=1234,
-            metadata_id=5678,
-            category=MetadataCategory.INSTRUMENT,
-            created_by="test_metadata.py",
-            network="NT",
-            station="BDT",
-            metadata={
-                "type": "FGE",
-                "channels": {
-                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
-                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
-                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
-                },
-                "electronics": {
-                    "serial": "E0542",
-                    "x-scale": 313.2,
-                    "y-scale": 312.3,
-                    "z-scale": 312.0,
-                    "temperature-scale": 0.01,
-                },
-                "sensor": {
-                    "serial": "S0419",
-                    "x-constant": 36958,
-                    "y-constant": 36849,
-                    "z-constant": 36811,
-                },
-            },
-        )
-
-        db = Database("sqlite:///:memory:")
-
-        await MetadataDatabaseFactory(database=db).create_metadata(test_data)
-
-        # assert id and metadata_id are removed
-        expected_values = {
-            "category": "instrument",
-            "created_by": "test_metadata.py",
-            "network": "NT",
-            "station": "BDT",
-            "metadata": {
-                "type": "FGE",
-                "channels": {
-                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
-                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
-                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
-                },
-                "electronics": {
-                    "serial": "E0542",
-                    "x-scale": 313.2,
-                    "y-scale": 312.3,
-                    "z-scale": 312.0,
-                    "temperature-scale": 0.01,
-                },
-                "sensor": {
-                    "serial": "S0419",
-                    "x-constant": 36958,
-                    "y-constant": 36849,
-                    "z-constant": 36811,
-                },
-            },
-            "data_valid": True,
-            "priority": 1,
-            "status": "new",
-        }
-
-        mock_execute.assert_called_once()
-        called_params = mock_execute.call_args.args[0].compile().params
-
-        assert called_params == expected_values
-
-    @patch("databases.Database.execute", new_callable=AsyncMock)
-    async def test_create_metadata_with_starttime_and_endtime(self, mock_execute):
-        now = UTCDateTime()
-        t = UTCDateTime(2020, 1, 3, 17, 24, 40)
-        test_data = Metadata(
-            created_by="test_metadata.py",
-            created_time=now,
-            starttime=t,
-            endtime=t,
-            network="NT",
-            station="BOU",
-            channel=None,
-            location=None,
-            category=MetadataCategory.READING,
-            priority=1,
-            data_valid=True,
-            metadata={},
-        )
-
-        db = Database("sqlite:///:memory:")
-
-        await MetadataDatabaseFactory(database=db).create_metadata(test_data)
-
-        # assert starttime and endtime are strings of expected UTCDateTime
-        expected_values = {
-            "category": "reading",
-            "created_time": datetime.datetime(
-                year=now.year,
-                month=now.month,
-                day=now.day,
-                hour=now.hour,
-                minute=now.minute,
-                second=now.second,
-                microsecond=now.microsecond,
-                tzinfo=tz.tzutc(),
-            ),
-            "created_by": "test_metadata.py",
-            "starttime": datetime.datetime(
-                year=t.year,
-                month=t.month,
-                day=t.day,
-                hour=t.hour,
-                minute=t.minute,
-                second=t.second,
-                microsecond=t.microsecond,
-                tzinfo=tz.tzutc(),
-            ),
-            "endtime": datetime.datetime(
-                year=t.year,
-                month=t.month,
-                day=t.day,
-                hour=t.hour,
-                minute=t.minute,
-                second=t.second,
-                microsecond=t.microsecond,
-                tzinfo=tz.tzutc(),
-            ),
-            "network": "NT",
-            "station": "BOU",
-            "metadata": {},
-            "data_valid": True,
-            "priority": 1,
-            "status": "new",
-        }
-
-        mock_execute.assert_called_once()
-        called_params = mock_execute.call_args.args[0].compile().params
-
-        assert called_params == expected_values
-
-    @patch("databases.Database.execute", new_callable=AsyncMock)
-    async def test_create_metadata_with_times_as_datetime(self, mock_execute):
-        # assert datetime is aware if not explicitly set by the user
-        s = datetime.datetime(2020, 1, 3, 17, 24, 40)
-        e = datetime.datetime(2020, 1, 3, 17, 24, 40, tzinfo=tz.tzutc())
-        test_data = Metadata(
-            created_by="test_metadata.py",
-            starttime=s,
-            endtime=e,
-            network="NT",
-            station="BOU",
-            channel=None,
-            location=None,
-            category=MetadataCategory.READING,
-            priority=1,
-            data_valid=True,
-            metadata={},
-        )
-
-        db = Database("sqlite:///:memory:")
-
-        await MetadataDatabaseFactory(database=db).create_metadata(test_data)
-
-        mock_execute.assert_called_once()
-        called_params = mock_execute.call_args.args[0].compile().params
-
-        assert called_params["starttime"] == datetime.datetime(
-            year=s.year,
-            month=s.month,
-            day=s.day,
-            hour=s.hour,
-            minute=s.minute,
-            second=s.second,
-            microsecond=s.microsecond,
-            tzinfo=tz.tzutc(),
-        )
-        assert called_params["endtime"] == datetime.datetime(
-            year=e.year,
-            month=e.month,
-            day=e.day,
-            hour=e.hour,
-            minute=e.minute,
-            second=e.second,
-            microsecond=e.microsecond,
-            tzinfo=tz.tzutc(),
-        )
-
-    @patch("databases.Database.execute", new_callable=AsyncMock)
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_update_metadata_defaults(self, mock_fetch_all, mock_execute):
-        test_data = Metadata(
-            category=MetadataCategory.INSTRUMENT,
-            network="NT",
-            station="BDT",
-            metadata={
-                "type": "FGE",
-                "channels": {
-                    "U": [{"channel": "U_Volt", "offset": 0, "scale": 313.2}],
-                    "V": [{"channel": "V_Volt", "offset": 0, "scale": 312.3}],
-                    "W": [{"channel": "W_Volt", "offset": 0, "scale": 312.0}],
-                },
-                "electronics": {
-                    "serial": "E0542",
-                    "x-scale": 313.2,
-                    "y-scale": 312.3,
-                    "z-scale": 312.0,
-                    "temperature-scale": 0.01,
-                },
-                "sensor": {
-                    "serial": "S0419",
-                    "x-constant": 36958,
-                    "y-constant": 36849,
-                    "z-constant": 36811,
-                },
-            },
-        )
-
-        db = Database("sqlite:///:memory:")
-        yesterday = datetime.datetime(2024, 11, 1, 8, 15, tzinfo=tz.tzutc())
-
-        mock_fetch_all.return_value = (
-            {
-                "id": 1234,
-                "created_time": yesterday,
-                "category": "instrument",
-                "network": "NT",
-                "station": "BDT",
-                "metadata": {
-                    "foo": "bar",
-                },
-            },
-        )
-
-        await MetadataDatabaseFactory(database=db).update_metadata(
-            meta=test_data, updated_by="test_user"
-        )
-
-        assert mock_fetch_all.call_count == 2
-        assert mock_execute.call_count == 2
-
-        first_called_params = mock_execute.call_args_list[0].args[0].compile().params
-        second_called_params = mock_execute.call_args_list[1].args[0].compile().params
-
-        assert first_called_params["metadata_id"] == 1234
-        assert first_called_params["created_time"] == yesterday
-        assert first_called_params["metadata"] == {"foo": "bar"}
-
-        assert second_called_params["updated_by"] == "test_user"
-        assert second_called_params["updated_time"] is not None
-        assert second_called_params["metadata"] == test_data.metadata
-
-    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
-    async def test_get_metadata(self, mock_fetch_all):
-        test_query = MetadataQuery(
-            category=MetadataCategory.INSTRUMENT,
-            station="BSL",
-            starttime=UTCDateTime(2020, 1, 20),
-        )
-
-        db = Database("sqlite:///:memory:")
-
-        await MetadataDatabaseFactory(database=db).get_metadata(params=test_query)
-
-        mock_fetch_all.assert_called_once()
-
-        called_params = mock_fetch_all.call_args.args[0].compile().params
-
-        assert called_params["category_1"] == "instrument"
-        assert called_params["station_1"] == "BSL"
-        assert called_params["endtime_1"] == datetime.datetime(
-            2020, 1, 20, tzinfo=tz.tzutc()
-        )
diff --git a/test/edge_test/EdgeFactory_test.py b/test/edge_test/EdgeFactory_test.py
index 52a4f662f..c836c1429 100644
--- a/test/edge_test/EdgeFactory_test.py
+++ b/test/edge_test/EdgeFactory_test.py
@@ -1,9 +1,10 @@
 """Tests for EdgeFactory.py"""
 
 from obspy.core import Stream, Trace, UTCDateTime
-from geomagio.edge import EdgeFactory
 from numpy.testing import assert_equal
 
+from geomagio.edge.EdgeFactory import EdgeFactory
+
 
 def test_get_timeseries():
     """edge_test.EdgeFactory_test.test_get_timeseries()"""
diff --git a/test/edge_test/FDSNFactory_test.py b/test/edge_test/FDSNFactory_test.py
index f409674a5..0eb79c28d 100644
--- a/test/edge_test/FDSNFactory_test.py
+++ b/test/edge_test/FDSNFactory_test.py
@@ -8,7 +8,7 @@ from obspy.core import Stream, Trace, UTCDateTime
 from obspy.core.inventory import Inventory, Network, Station, Channel, Site
 import pytest
 
-from geomagio.edge import FDSNFactory
+from geomagio.edge.FDSNFactory import FDSNFactory
 from geomagio.metadata.instrument.InstrumentCalibrations import (
     get_instrument_calibrations,
 )
diff --git a/test/edge_test/FDSNSNCL_test.py b/test/edge_test/FDSNSNCL_test.py
index 4d66363ab..22e13a8b4 100644
--- a/test/edge_test/FDSNSNCL_test.py
+++ b/test/edge_test/FDSNSNCL_test.py
@@ -1,7 +1,6 @@
 import pytest
 
-from geomagio.edge import FDSNSNCL
-from geomagio.edge.FDSNSNCL import get_FDSN_channel, get_location
+from geomagio.edge.FDSNSNCL import FDSNSNCL, get_FDSN_channel, get_location
 
 
 def test_data_type():
diff --git a/test/edge_test/IRISSNCL_test.py b/test/edge_test/IRISSNCL_test.py
index 06021877a..60f9949ee 100644
--- a/test/edge_test/IRISSNCL_test.py
+++ b/test/edge_test/IRISSNCL_test.py
@@ -1,7 +1,6 @@
 import pytest
 
-from geomagio.edge import IRISSNCL
-from geomagio.edge.IRISSNCL import get_iris_channel, get_location
+from geomagio.edge.IRISSNCL import IRISSNCL, get_iris_channel, get_location
 
 
 def test_data_type():
diff --git a/test/edge_test/MiniSeedFactory_test.py b/test/edge_test/MiniSeedFactory_test.py
index 75f895e5c..d0e187348 100644
--- a/test/edge_test/MiniSeedFactory_test.py
+++ b/test/edge_test/MiniSeedFactory_test.py
@@ -8,8 +8,9 @@ from numpy.testing import assert_equal, assert_array_equal
 from obspy.core import read, Stats, Stream, Trace, UTCDateTime
 import pytest
 
-from geomagio import TimeseriesUtility
-from geomagio.edge import MiniSeedFactory, MiniSeedInputClient
+from geomagio.TimeseriesFactory import TimeseriesUtility
+from geomagio.edge.MiniSeedFactory import MiniSeedFactory
+from geomagio.edge.MiniSeedInputClient import MiniSeedInputClient
 from geomagio.metadata.instrument.InstrumentCalibrations import (
     get_instrument_calibrations,
 )
diff --git a/test/edge_test/RawInputClient_test.py b/test/edge_test/RawInputClient_test.py
index 876bc5694..d2e3c67e9 100644
--- a/test/edge_test/RawInputClient_test.py
+++ b/test/edge_test/RawInputClient_test.py
@@ -1,10 +1,8 @@
 """Tests for RawInputClient.py"""
 
 import numpy
-from datetime import datetime
-import logging
 from obspy.core import Stats, Trace, UTCDateTime
-from geomagio.edge import EdgeFactory, RawInputClient
+from geomagio.edge.RawInputClient import RawInputClient
 from numpy.testing import assert_equal
 
 
diff --git a/test/edge_test/mseed_test_clients.py b/test/edge_test/mseed_test_clients.py
index d706113d2..5d42627ad 100644
--- a/test/edge_test/mseed_test_clients.py
+++ b/test/edge_test/mseed_test_clients.py
@@ -3,7 +3,7 @@ from obspy import Stream, UTCDateTime
 from obspy.clients.neic.client import Client
 
 from geomagio import TimeseriesUtility
-from geomagio.edge import SNCL
+from geomagio.edge.SNCL import SNCL
 
 
 class MockMiniSeedClient(Client):
diff --git a/test/iaga2002_test/IAGA2002Factory_test.py b/test/iaga2002_test/IAGA2002Factory_test.py
index 7c1531dbf..189dd6399 100644
--- a/test/iaga2002_test/IAGA2002Factory_test.py
+++ b/test/iaga2002_test/IAGA2002Factory_test.py
@@ -1,7 +1,7 @@
 """Tests for IAGA2002Factory class"""
 
 from numpy.testing import assert_equal
-from geomagio.iaga2002 import IAGA2002Factory
+from geomagio.iaga2002.IAGA2002Factory import IAGA2002Factory
 
 
 def test_parse_empty():
diff --git a/test/iaga2002_test/IAGA2002Parser_test.py b/test/iaga2002_test/IAGA2002Parser_test.py
index 04ccc9c3a..17e9b40e3 100644
--- a/test/iaga2002_test/IAGA2002Parser_test.py
+++ b/test/iaga2002_test/IAGA2002Parser_test.py
@@ -1,7 +1,7 @@
 """Tests for the IAGA2002 Parser class."""
 
 from numpy.testing import assert_equal
-from geomagio.iaga2002 import IAGA2002Parser
+from geomagio.iaga2002.IAGA2002Parser import IAGA2002Parser
 
 
 IAGA2002_EXAMPLE = """ Format                 IAGA-2002                                    |
diff --git a/test/imfjson_test/IMFJSONWriter_test.py b/test/imfjson_test/IMFJSONWriter_test.py
index 7792ded45..c412bddba 100644
--- a/test/imfjson_test/IMFJSONWriter_test.py
+++ b/test/imfjson_test/IMFJSONWriter_test.py
@@ -1,8 +1,8 @@
 """Tests for the IMFJSON Writer class."""
 
 from numpy.testing import assert_equal
-from geomagio.iaga2002 import IAGA2002Factory
-from geomagio.imfjson import IMFJSONWriter
+from geomagio.iaga2002.IAGA2002Factory import IAGA2002Factory
+from geomagio.imfjson.IMFJSONWriter import IMFJSONWriter
 import numpy as np
 
 
diff --git a/test/imfv122_test/IMFV122Parser_test.py b/test/imfv122_test/IMFV122Parser_test.py
index 2a3d87a1a..d92848cd2 100644
--- a/test/imfv122_test/IMFV122Parser_test.py
+++ b/test/imfv122_test/IMFV122Parser_test.py
@@ -1,7 +1,7 @@
 """Tests for the IMFV122 Parser class."""
 
 from numpy.testing import assert_equal
-from geomagio.imfv122 import IMFV122Parser
+from geomagio.imfv122.IMFV122Parser import IMFV122Parser
 from obspy.core import UTCDateTime
 
 
diff --git a/test/imfv283_test/IMFV283Parser_test.py b/test/imfv283_test/IMFV283Parser_test.py
index fb7e5395a..f6cd01e83 100644
--- a/test/imfv283_test/IMFV283Parser_test.py
+++ b/test/imfv283_test/IMFV283Parser_test.py
@@ -5,7 +5,8 @@ from __future__ import unicode_literals
 from numpy.testing import assert_equal
 from obspy import UTCDateTime
 
-from geomagio.imfv283 import IMFV283Parser, imfv283_codes
+from geomagio.imfv283.IMFV283Parser import IMFV283Parser
+from geomagio.imfv283.imfv283_codes import OBSERVATORIES
 
 
 IMFV283_EXAMPLE_VIC = (
@@ -45,7 +46,7 @@ def test_parse_msg_header():
 def test_parse_goes_header_VIC():
     """imfv283_test.IMFV283Parser_test.test_parse_goes_header_VIC()"""
     goes_data = IMFV283Parser()._process_ness_block(
-        IMFV283_EXAMPLE_VIC, imfv283_codes.OBSERVATORIES["VIC"], 191
+        IMFV283_EXAMPLE_VIC, OBSERVATORIES["VIC"], 191
     )
     actual_goes_header = IMFV283Parser()._parse_goes_header(goes_data)
 
@@ -64,7 +65,7 @@ def test_parse_goes_header_VIC():
 def test_parse_goes_header_STJ():
     """imfv283_test.IMFV283Parser_test.test_parse_goes_header_STJ()"""
     goes_data = IMFV283Parser()._process_ness_block(
-        IMFV283_EXAMPLE_STJ, imfv283_codes.OBSERVATORIES["STJ"], 191
+        IMFV283_EXAMPLE_STJ, OBSERVATORIES["STJ"], 191
     )
     actual_goes_header = IMFV283Parser()._parse_goes_header(goes_data)
 
diff --git a/test/metadata/MetadataFactory_test.py b/test/metadata/MetadataFactory_test.py
index cde42da53..268d98b93 100644
--- a/test/metadata/MetadataFactory_test.py
+++ b/test/metadata/MetadataFactory_test.py
@@ -1,7 +1,7 @@
 import datetime
 from obspy import UTCDateTime
 
-from geomagio.metadata import MetadataQuery
+from geomagio.api.db.models.metadata import MetadataQuery
 from geomagio.metadata.MetadataFactory import parse_params
 
 
@@ -14,8 +14,8 @@ def test_parse_params_with_UTCDateTime():
 
     params = parse_params(query)
 
-    assert params["starttime"] == "2024-11-07T00:00:00+00:00"
-    assert params["endtime"] == "2024-11-08T00:00:00+00:00"
+    assert params["starttime"] == "2024-11-07T00:00:00.000000Z"
+    assert params["endtime"] == "2024-11-08T00:00:00.000000Z"
 
 
 def test_parse_params_with_datetime():
@@ -27,5 +27,5 @@ def test_parse_params_with_datetime():
 
     params = parse_params(query)
 
-    assert params["starttime"] == "2024-11-07T00:00:00+00:00"
-    assert params["endtime"] == "2024-11-08T00:00:00+00:00"
+    assert params["starttime"] == "2024-11-07T00:00:00.000000Z"
+    assert params["endtime"] == "2024-11-08T00:00:00.000000Z"
diff --git a/test/pcdcp_test/PCDCPFactory_test.py b/test/pcdcp_test/PCDCPFactory_test.py
index 08a92e85e..cdb9ff79e 100644
--- a/test/pcdcp_test/PCDCPFactory_test.py
+++ b/test/pcdcp_test/PCDCPFactory_test.py
@@ -1,6 +1,6 @@
 """Tests for PCDCPFactory."""
 
-from geomagio.pcdcp import PCDCPFactory
+from geomagio.pcdcp.PCDCPFactory import PCDCPFactory
 from obspy.core.utcdatetime import UTCDateTime
 from obspy.core.stream import Stream
 from numpy.testing import assert_equal
diff --git a/test/pcdcp_test/PCDCPParser_test.py b/test/pcdcp_test/PCDCPParser_test.py
index 62ef09918..c1aaaaa57 100644
--- a/test/pcdcp_test/PCDCPParser_test.py
+++ b/test/pcdcp_test/PCDCPParser_test.py
@@ -1,7 +1,7 @@
 """Tests for the PCDCP Parser class."""
 
 from numpy.testing import assert_equal
-from geomagio.pcdcp import PCDCPParser
+from geomagio.pcdcp.PCDCPParser import PCDCPParser
 
 
 PCDCP_EXAMPLE = """
diff --git a/test/residual_test/residual_test.py b/test/residual_test/residual_test.py
index 2b03f2227..6aef906d3 100644
--- a/test/residual_test/residual_test.py
+++ b/test/residual_test/residual_test.py
@@ -2,17 +2,13 @@ import json
 
 from numpy.testing import assert_almost_equal, assert_equal
 from pydantic import TypeAdapter
-import pytest
 from typing import List
 
 from obspy.core import UTCDateTime
-from geomagio.residual import (
-    calculate,
-    Reading,
-    SpreadsheetAbsolutesFactory,
-    SpreadsheetSummaryFactory,
-    WebAbsolutesFactory,
-)
+from geomagio.residual.Calculation import calculate
+from geomagio.residual.Reading import Reading
+from geomagio.residual.SpreadsheetAbsolutesFactory import SpreadsheetAbsolutesFactory
+from geomagio.residual.SpreadsheetSummaryFactory import SpreadsheetSummaryFactory
 
 
 def assert_readings_equal(expected: Reading, actual: Reading, decimal: int):
diff --git a/test_metadata.py b/test_metadata.py
index 4436aa0a6..477ca2d0e 100644
--- a/test_metadata.py
+++ b/test_metadata.py
@@ -3,9 +3,10 @@ import json
 from obspy import UTCDateTime
 
 from geomagio.adjusted import AdjustedMatrix, Metric
-from geomagio.api.db import database, MetadataDatabaseFactory
+from geomagio.api.db.models.metadata import Metadata, MetadataCategory
+from geomagio.api.db.factories.MetadataDatabaseFactory import MetadataDatabaseFactory
+from geomagio.api.db.database import engine
 from geomagio.api.ws.Observatory import OBSERVATORIES
-from geomagio.metadata import Metadata, MetadataCategory
 from geomagio.residual import SpreadsheetAbsolutesFactory, WebAbsolutesFactory
 
 
@@ -109,7 +110,7 @@ for observatory in OBSERVATORIES:
             created_by="test_metadata.py",
             network=network,
             station=observatory.id,
-            metadata=observatory.dict(),
+            metadata=observatory.model_dump(),
         )
     )
 
@@ -126,7 +127,7 @@ reading = SpreadsheetAbsolutesFactory().parse_spreadsheet(
 readings.append(reading)
 
 for reading in readings:
-    json_string = reading.json()
+    json_string = reading.model_dump_json()
     reading_dict = json.loads(json_string)
     try:
         reviewer = reading.metadata["reviewer"]
@@ -167,16 +168,17 @@ test_metadata.append(
         category="adjusted-matrix",
         station="FRD",
         network="NT",
-        metadata=adjusted_matrix.dict(),
+        metadata=adjusted_matrix.model_dump(),
     )
 )
 
 
 async def load_test_metadata():
-    await database.connect()
-    for meta in test_metadata:
-        await MetadataDatabaseFactory(database=database).create_metadata(meta)
-    await database.disconnect()
+    # validate before saving
+    for m in test_metadata:
+        m = Metadata.model_validate(m)
+
+    await MetadataDatabaseFactory(engine=engine).batch_create_metadata(test_metadata)
 
 
 if __name__ == "__main__":
-- 
GitLab