diff --git a/geomagio/api/secure/metadata.py b/geomagio/api/secure/metadata.py
index 404b262b98a8e8a6b4a6d4e22c8851ef556e0593..ee5a8328aee818777b9dd86f58d1144943bd63c2 100644
--- a/geomagio/api/secure/metadata.py
+++ b/geomagio/api/secure/metadata.py
@@ -4,13 +4,12 @@ Uses login.py for user management.
 
 Anyone can access metadata.
 Logged in users can create new metadata.
-Update and delete are restricted based on group membership.
+Update is restricted based on group membership.
 
 
 Configuration:
     uses environment variables:
 
-    ADMIN_GROUP           - delete is restricted the admin group.
     REVIEWER_GROUP        - update is restricted the reviewer group.
 """
 
diff --git a/poetry.lock b/poetry.lock
index 2976e14e6e53dcd1d5b5e1c3b59ca3891840cc92..5946b52da5e31c6b97862e02ff98f9b2cc62304e 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
 
 [[package]]
 name = "aiomysql"
@@ -73,24 +73,25 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
 
 [[package]]
 name = "anyio"
-version = "3.7.1"
+version = "4.5.2"
 description = "High level compatibility layer for multiple asynchronous event loop implementations"
 optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
 files = [
-    {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"},
-    {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"},
+    {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"},
+    {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"},
 ]
 
 [package.dependencies]
-exceptiongroup = {version = "*", markers = "python_version < \"3.11\""}
+exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
 idna = ">=2.8"
 sniffio = ">=1.1"
+typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""}
 
 [package.extras]
-doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"]
-test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
-trio = ["trio (<0.22)"]
+doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
+test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"]
+trio = ["trio (>=0.26.1)"]
 
 [[package]]
 name = "authlib"
@@ -889,35 +890,35 @@ tornado = ["tornado (>=0.2)"]
 
 [[package]]
 name = "h11"
-version = "0.12.0"
+version = "0.14.0"
 description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
 optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
 files = [
-    {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"},
-    {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"},
+    {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
+    {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
 ]
 
 [[package]]
 name = "httpcore"
-version = "0.15.0"
+version = "1.0.7"
 description = "A minimal low-level HTTP client."
 optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
 files = [
-    {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"},
-    {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"},
+    {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"},
+    {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"},
 ]
 
 [package.dependencies]
-anyio = "==3.*"
 certifi = "*"
-h11 = ">=0.11,<0.13"
-sniffio = "==1.*"
+h11 = ">=0.13,<0.15"
 
 [package.extras]
+asyncio = ["anyio (>=4.0,<5.0)"]
 http2 = ["h2 (>=3,<5)"]
 socks = ["socksio (==1.*)"]
+trio = ["trio (>=0.22.0,<1.0)"]
 
 [[package]]
 name = "httptools"
@@ -976,26 +977,27 @@ test = ["Cython (>=0.29.24)"]
 
 [[package]]
 name = "httpx"
-version = "0.23.0"
+version = "0.28.1"
 description = "The next generation HTTP client."
 optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
 files = [
-    {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"},
-    {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"},
+    {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
+    {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
 ]
 
 [package.dependencies]
+anyio = "*"
 certifi = "*"
-httpcore = ">=0.15.0,<0.16.0"
-rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
-sniffio = "*"
+httpcore = "==1.*"
+idna = "*"
 
 [package.extras]
 brotli = ["brotli", "brotlicffi"]
-cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<13)"]
+cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
 http2 = ["h2 (>=3,<5)"]
 socks = ["socksio (==1.*)"]
+zstd = ["zstandard (>=0.18.0)"]
 
 [[package]]
 name = "idna"
@@ -2243,23 +2245,6 @@ urllib3 = ">=1.21.1,<3"
 socks = ["PySocks (>=1.5.6,!=1.5.7)"]
 use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
 
-[[package]]
-name = "rfc3986"
-version = "1.5.0"
-description = "Validating URI References per RFC 3986"
-optional = false
-python-versions = "*"
-files = [
-    {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
-    {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
-]
-
-[package.dependencies]
-idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
-
-[package.extras]
-idna2008 = ["idna"]
-
 [[package]]
 name = "ruamel-yaml"
 version = "0.18.10"
@@ -2967,4 +2952,4 @@ pycurl = ["pycurl"]
 [metadata]
 lock-version = "2.0"
 python-versions = ">=3.8,<3.12"
-content-hash = "5d6aca195743faf8f75339d35cda6989c5cafeaa07beb5c26511411f4dd77236"
+content-hash = "e74f5314dc78dc4ab24fd3afab056baa724eb022c689df183a91854cc9463ad8"
diff --git a/pyproject.toml b/pyproject.toml
index 130a69e02edf034d71dbb0c6b33ff071a1238c94..ec3207c2b50bbb23e8f31b713e075977785f757b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -38,7 +38,7 @@ cryptography = "^43.0.1"
 databases = {extras = ["mysql", "sqlite"], version = "^0.6.1"}
 fastapi = ">=0.103.0"
 gunicorn = "^23.0.0"
-httpx = "0.23.0"
+httpx = "0.28.1"
 # 2022-10-24 pin sqlalchemy to 1.4.41 for now
 # 1.4.42 seems to cause issues with databases (https://stackoverflow.com/a/74102692)
 SQLAlchemy = "1.4.41"
diff --git a/test/api_test/secure_test/conftest.py b/test/api_test/secure_test/conftest.py
new file mode 100644
index 0000000000000000000000000000000000000000..0df413243c229a44d02c4c9caf120064af32a7ee
--- /dev/null
+++ b/test/api_test/secure_test/conftest.py
@@ -0,0 +1,229 @@
+import pytest
+
+
+@pytest.fixture()
+def valid_token():
+    return {
+        "access_token": "test_access_token",
+        "token_type": "Bearer",
+        "expires_in": 7200,
+        "refresh_token": "test_refresh_toekn",
+        "scope": "openid email profile",
+        "created_at": 1733244369,
+        "id_token": "test_id_token",
+        "expires_at": 1733251569,
+        "userinfo": {
+            "iss": "http://test_url",
+            "sub": "1234",
+            "aud": "test_aud",
+            "exp": 1733244489,
+            "iat": 1733244369,
+            "nonce": "test_nonce",
+            "auth_time": 1733244368,
+            "sub_legacy": "test_sub_legacy",
+            "name": "test_user",
+            "nickname": "test_user_nickname",
+            "preferred_username": "test_preferred_username",
+            "email": "test_email",
+            "email_verified": True,
+            "profile": "http://test_url/user",
+            "picture": "http://picture_url",
+            "groups_direct": ["group1", "group2"],
+        },
+    }
+
+
+@pytest.fixture()
+def valid_userinfo():
+    return {
+        "sub": "1234",
+        "sub_legacy": "test_sub_legacy",
+        "name": "test_user",
+        "nickname": "test_user_nickname",
+        "preferred_username": "test_preferred_username",
+        "email": "test_email",
+        "email_verified": True,
+        "profile": "http://test_url/user",
+        "picture": "http://picture_url",
+        "groups": ["group1", "group2"],
+    }
+
+
+@pytest.fixture()
+def valid_gitlab_user():
+    return {
+        "id": 1234,
+        "email": "test_email",
+        "name": "test_user",
+        "username": "test_user_nickname",
+        "avatar_url": "http://picture_url",
+    }
+
+
+@pytest.fixture()
+def valid_reading_metadata():
+    return {
+        "created_by": "test_user",
+        "starttime": "2024-11-07T12:12:12Z",
+        "endtime": "2024-11-07T20:20:20Z",
+        "network": "NT",
+        "station": "BOU",
+        "category": "reading",
+        "data_valid": True,
+        "metadata": {
+            "azimuth": 199.1383,
+            "metadata": {
+                "time": "2020-01-03T17:12:47Z",
+                "station": "BOU",
+                "observer": "Test Observer",
+                "reviewed": True,
+                "reviewer": "Test Reviewer",
+                "mark_name": "AZ",
+                "pier_name": "MainPCDCP",
+                "theodolite": "108449",
+                "electronics": "0110",
+                "mark_azimuth": 199.1383,
+                "pier_correction": -22,
+            },
+            "absolutes": [
+                {
+                    "shift": 0,
+                    "valid": True,
+                    "element": "D",
+                    "endtime": "2020-01-03T17:16:21.000000Z",
+                    "absolute": 8.3851056,
+                    "baseline": 8.58571,
+                    "starttime": "2020-01-03T17:12:47.000000Z",
+                },
+                {
+                    "shift": 0,
+                    "valid": True,
+                    "element": "H",
+                    "endtime": "2020-01-03T17:24:40.000000Z",
+                    "absolute": 20728.0650365,
+                    "baseline": -71.7177135,
+                    "starttime": "2020-01-03T17:20:48.000000Z",
+                },
+                {
+                    "shift": 0,
+                    "valid": True,
+                    "element": "Z",
+                    "endtime": "2020-01-03T17:24:40.000000Z",
+                    "absolute": 47450.1529433,
+                    "baseline": 578.2041933,
+                    "starttime": "2020-01-03T17:20:48.000000Z",
+                },
+            ],
+            "hemisphere": 1,
+            "diagnostics": None,
+            "scale_value": None,
+            "measurements": [
+                {
+                    "e": None,
+                    "f": None,
+                    "h": None,
+                    "z": None,
+                    "angle": 191.56666666666666,
+                    "measurement_type": "FirstMarkDown",
+                },
+                {
+                    "e": None,
+                    "f": None,
+                    "h": None,
+                    "z": None,
+                    "angle": 11.566666666666666,
+                    "measurement_type": "FirstMarkUp",
+                },
+                {
+                    "e": None,
+                    "f": None,
+                    "h": None,
+                    "z": None,
+                    "angle": 191.56666666666666,
+                    "measurement_type": "SecondMarkDown",
+                },
+                {
+                    "e": None,
+                    "f": None,
+                    "h": None,
+                    "z": None,
+                    "angle": 11.566666666666666,
+                    "measurement_type": "SecondMarkUp",
+                },
+                {
+                    "e": -72.242,
+                    "f": 51801.81,
+                    "h": 20800.329,
+                    "z": 46871.49,
+                    "angle": 270.71666666666664,
+                    "measurement_type": "WestDown",
+                    "time": "2024-11-07T12:12:12Z",
+                },
+                {
+                    "e": -72.636,
+                    "f": 51801.92,
+                    "h": 20800.259,
+                    "z": 46871.641,
+                    "angle": 90.66666666666667,
+                    "measurement_type": "EastDown",
+                    "time": "2024-11-07T12:13:14Z",
+                },
+                {
+                    "e": -72.657,
+                    "f": 51801.82,
+                    "h": 20800.259,
+                    "z": 46871.521,
+                    "angle": 90.93333333333334,
+                    "measurement_type": "WestUp",
+                    "time": "2024-11-07T13:13:11Z",
+                },
+                {
+                    "e": -72.758,
+                    "f": 51801.92,
+                    "h": 20800.086,
+                    "z": 46871.707,
+                    "angle": 270.96666666666664,
+                    "measurement_type": "EastUp",
+                    "time": "2024-11-07T13:15:12Z",
+                },
+                {
+                    "e": -72.898,
+                    "f": 51801.89,
+                    "h": 20799.796,
+                    "z": 46871.802,
+                    "angle": 246.38333333333333,
+                    "measurement_type": "SouthDown",
+                    "time": "2024-11-07T15:12:12Z",
+                },
+                {
+                    "e": -72.8,
+                    "f": 51802.01,
+                    "h": 20799.852,
+                    "z": 46871.919,
+                    "angle": 66.38333333333334,
+                    "measurement_type": "NorthUp",
+                    "time": "2024-11-07T18:12:12Z",
+                },
+                {
+                    "e": -72.775,
+                    "f": 51802.01,
+                    "h": 20799.668,
+                    "z": 46871.997,
+                    "angle": 113.58333333333333,
+                    "measurement_type": "SouthUp",
+                    "time": "2024-11-07T19:12:19Z",
+                },
+                {
+                    "e": -72.813,
+                    "f": 51802.14,
+                    "h": 20799.815,
+                    "z": 46872.077,
+                    "angle": 293.5833333333333,
+                    "measurement_type": "NorthDown",
+                    "time": "2024-11-07T20:20:20Z",
+                },
+                {"measurement_type": "Meridian", "angle": 21},
+            ],
+            "pier_correction": -22,
+        },
+    }
diff --git a/test/api_test/secure_test/login_test.py b/test/api_test/secure_test/login_test.py
index 6097219b51dcd50d2116893b4e63e06a0034f27d..389796565cebb401741ec9679719f1fc2224f90c 100644
--- a/test/api_test/secure_test/login_test.py
+++ b/test/api_test/secure_test/login_test.py
@@ -3,7 +3,6 @@ import httpx
 from unittest.mock import ANY
 from fastapi.testclient import TestClient
 from authlib.integrations.base_client.errors import MismatchingStateError
-from requests import Response
 
 from geomagio.api.secure import app
 from geomagio.api.secure.SessionMiddleware import SessionMiddleware
@@ -12,65 +11,6 @@ from geomagio.api.secure.login import User
 client = TestClient(app)
 
 
-@pytest.fixture()
-def valid_token():
-    return {
-        "access_token": "test_access_token",
-        "token_type": "Bearer",
-        "expires_in": 7200,
-        "refresh_token": "test_refresh_toekn",
-        "scope": "openid email profile",
-        "created_at": 1733244369,
-        "id_token": "test_id_token",
-        "expires_at": 1733251569,
-        "userinfo": {
-            "iss": "http://test_url",
-            "sub": "1234",
-            "aud": "test_aud",
-            "exp": 1733244489,
-            "iat": 1733244369,
-            "nonce": "test_nonce",
-            "auth_time": 1733244368,
-            "sub_legacy": "test_sub_legacy",
-            "name": "test_user",
-            "nickname": "test_user_nickname",
-            "preferred_username": "test_preferred_username",
-            "email": "test_email",
-            "email_verified": True,
-            "profile": "http://test_url/user",
-            "picture": "http://picture_url",
-            "groups_direct": ["group1", "group2"],
-        },
-    }
-
-
-@pytest.fixture()
-def valid_userinfo():
-    return {
-        "sub": "1234",
-        "sub_legacy": "test_sub_legacy",
-        "name": "test_user",
-        "nickname": "test_user_nickname",
-        "preferred_username": "test_preferred_username",
-        "email": "test_email",
-        "email_verified": True,
-        "profile": "http://test_url/user",
-        "picture": "http://picture_url",
-        "groups": ["group1", "group2"],
-    }
-
-
-@pytest.fixture()
-def valid_gitlab_user():
-    return {
-        "id": 1234,
-        "email": "test_email",
-        "name": "test_user",
-        "username": "test_user_nickname",
-        "avatar_url": "http://picture_url",
-    }
-
-
 @pytest.mark.asyncio
 async def test_authorization_valid(valid_token, valid_userinfo, mocker):
 
diff --git a/test/api_test/secure_test/metadata_test.py b/test/api_test/secure_test/metadata_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..b2c50090e8e682f2f36ddb3fb2dbf043c6be391a
--- /dev/null
+++ b/test/api_test/secure_test/metadata_test.py
@@ -0,0 +1,867 @@
+import pytest
+import json
+import httpx
+import unittest
+import datetime
+from unittest.mock import ANY, AsyncMock, patch
+from fastapi.testclient import TestClient
+from obspy import UTCDateTime
+
+from geomagio.api.secure import app
+from geomagio.api.secure.SessionMiddleware import SessionMiddleware
+from geomagio.metadata.Metadata import Metadata
+from geomagio.api.db.MetadataDatabaseFactory import MetadataDatabaseFactory
+
+client = TestClient(app)
+
+
+@pytest.mark.asyncio
+@patch("databases.Database.execute", new_callable=AsyncMock)
+async def test_create_metadata_valid_session(
+    valid_token, valid_userinfo, valid_reading_metadata, mocker
+):
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.post(
+        url="/metadata", headers={"Cookie": "PHPSESSID=valid_session_id"}, content=body
+    )
+
+    assert response.status_code == 201
+    mock_get_session.assert_called_once_with("valid_session_id")
+
+    returned_metadata = Metadata(**response.json())
+    assert returned_metadata.id == 1
+    assert returned_metadata.category == "reading"
+    assert returned_metadata.starttime == UTCDateTime("2024-11-07T12:12:12Z")
+    assert returned_metadata.endtime == UTCDateTime("2024-11-07T20:20:20Z")
+    assert returned_metadata.network == "NT"
+    assert returned_metadata.station == "BOU"
+    assert returned_metadata.status == "new"
+    assert returned_metadata.priority == 1
+
+
+@pytest.mark.asyncio
+@patch("databases.Database.execute", new_callable=AsyncMock)
+async def test_create_metadata_with_id(
+    valid_token, valid_userinfo, valid_reading_metadata, mocker
+):
+    # input id is removed and replaced with new db id
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+
+    valid_reading_metadata["id"] = 1234
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.post(
+        url="/metadata", headers={"Cookie": "PHPSESSID=valid_session_id"}, content=body
+    )
+
+    assert response.status_code == 201
+    mock_get_session.assert_called_once_with("valid_session_id")
+
+    returned_metadata = Metadata(**response.json())
+    assert returned_metadata.id == 1
+    assert returned_metadata.category == "reading"
+    assert returned_metadata.starttime == UTCDateTime("2024-11-07T12:12:12Z")
+    assert returned_metadata.endtime == UTCDateTime("2024-11-07T20:20:20Z")
+    assert returned_metadata.network == "NT"
+    assert returned_metadata.station == "BOU"
+    assert returned_metadata.status == "new"
+    assert returned_metadata.priority == 1
+
+
+@pytest.mark.asyncio
+@patch("databases.Database.execute", new_callable=AsyncMock)
+async def test_create_metadata_valid_auth_token(
+    valid_token, valid_userinfo, valid_gitlab_user, valid_reading_metadata, mocker
+):
+    valid_user_response = httpx.Response(status_code=200, json=valid_gitlab_user)
+
+    valid_groups = [
+        {
+            "id": 1,
+            "full_path": "group1",
+        },
+        {"id": 2, "full_path": "group2"},
+    ]
+
+    valid_groups_response = httpx.Response(status_code=200, json=valid_groups)
+
+    mock_gitlab_request = mocker.AsyncMock(
+        side_effect=[
+            valid_user_response,
+            valid_groups_response,
+            httpx.Response(status_code=200, json={}),
+        ]
+    )
+    mocker.patch("httpx.AsyncClient.get", mock_gitlab_request)
+
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.post(
+        url="/metadata", headers={"Authorization": "auth_token"}, content=body
+    )
+
+    session_user = {
+        "email": "test_email",
+        "sub": 1234,
+        "groups": ["group1", "group2"],
+        "name": "test_user",
+        "nickname": "test_user_nickname",
+        "picture": "http://picture_url",
+    }
+
+    assert response.status_code == 201
+    mock_save_session.assert_called_once_with(ANY, {"user": session_user})
+
+    returned_metadata = Metadata(**response.json())
+    assert returned_metadata.id == 1
+    assert returned_metadata.category == "reading"
+    assert returned_metadata.starttime == UTCDateTime("2024-11-07T12:12:12Z")
+    assert returned_metadata.endtime == UTCDateTime("2024-11-07T20:20:20Z")
+    assert returned_metadata.network == "NT"
+    assert returned_metadata.station == "BOU"
+    assert returned_metadata.status == "new"
+    assert returned_metadata.priority == 1
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_no_session_or_token(valid_reading_metadata, mocker):
+    mock_get_session = mocker.AsyncMock()
+    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+
+    body = json.dumps(valid_reading_metadata)
+
+    # do not pass in cookie or auth
+    response = client.post(url="/metadata", content=body)
+    assert response.status_code == 401
+
+    mock_get_session.assert_not_called()
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_with_no_session_and_invalid_token(mocker):
+    mock_get_session = mocker.AsyncMock()
+    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+
+    mock_gitlab_request = mocker.AsyncMock(return_value=None)
+    mocker.patch("geomagio.api.secure.login.get_gitlab_user", mock_gitlab_request)
+
+    response = client.post(
+        url="/metadata", headers={"Authorization": "invalid_gitlab_token"}
+    )
+    assert response.status_code == 401
+
+    mock_get_session.assert_not_called()
+    mock_gitlab_request.assert_called_once_with(token="invalid_gitlab_token")
+
+
+@pytest.mark.asyncio
+async def test_create_metadata_invalid_session(mocker):
+    # mock invalid session. this is created when users GET /metadata without logging in
+    invalid_session = {
+        "redirect_uri": "redirect_uri",
+        "nonce": "nonce_str",
+        "url": "test_url",
+    }
+    mock_get_session = mocker.AsyncMock(return_value=invalid_session)
+    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock()
+    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+
+    response = client.post(
+        url="/metadata", headers={"Cookie": "PHPSESSID=invalid_session_id"}
+    )
+
+    assert response.status_code == 401
+    mock_get_session.assert_called_once_with("invalid_session_id")
+
+
+@pytest.mark.asyncio
+@patch("databases.Database.transaction")
+@patch("databases.Database.fetch_all", new_callable=AsyncMock)
+@patch("databases.Database.execute", new_callable=AsyncMock)
+async def test_update_metadata(
+    mock_execute,
+    mock_fetch,
+    mock_transaction,
+    valid_token,
+    valid_userinfo,
+    valid_reading_metadata,
+    mocker,
+):
+    valid_userinfo["groups"] = ["ghsc/geomag/operations/roles/reviewer"]
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+
+    valid_reading_metadata["id"] = 1234
+    mock_fetch.side_effect = [
+        [
+            {
+                "id": 1234,
+                "category": "instrument",
+                "data_valid": True,
+                "priority": 1,
+            }
+        ],
+        [valid_reading_metadata],
+    ]
+    mock_execute.side_effect = [1, None]
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.put(
+        url="/metadata/1234",
+        headers={"Cookie": "PHPSESSID=valid_session_id"},
+        content=body,
+    )
+    assert response.status_code == 200
+
+    returned_metadata = Metadata(**response.json())
+    returned_metadata.id = 1234
+    returned_metadata.category = "reading"
+
+    assert mock_fetch.call_count == 2
+    assert mock_execute.call_count == 2
+
+    # the query for both fetch_all requests should be the same
+    fetch_called_pararms = mock_fetch.call_args.args[0].compile().params
+    assert fetch_called_pararms["id_1"] == 1234
+
+    # assert save to metadata history is correct
+    insert_called_params = mock_execute.call_args_list[0].args[0].compile().params
+    assert insert_called_params["metadata_id"] == 1234
+    assert insert_called_params["category"] == "instrument"
+    assert insert_called_params["data_valid"] == True
+    assert insert_called_params["priority"] == 1
+
+    # assert update to metadata table is correct
+    update_called_params = mock_execute.call_args_list[1].args[0].compile().params
+    assert update_called_params["id_1"] == 1234
+    assert update_called_params["category"] == "reading"
+
+
+@pytest.mark.asyncio
+@patch("databases.Database.transaction")
+@patch("databases.Database.fetch_all", new_callable=AsyncMock)
+@patch("databases.Database.execute", new_callable=AsyncMock)
+async def test_update_metadata_valid_auth_token(
+    mock_execute,
+    mock_fetch,
+    mock_transaction,
+    valid_token,
+    valid_userinfo,
+    valid_gitlab_user,
+    valid_reading_metadata,
+    mocker,
+):
+    valid_user_response = httpx.Response(status_code=200, json=valid_gitlab_user)
+
+    valid_groups = [
+        {
+            "id": 1,
+            "full_path": "group1",
+        },
+        {"id": 2, "full_path": "ghsc/geomag/operations/roles/reviewer"},
+    ]
+
+    valid_groups_response = httpx.Response(status_code=200, json=valid_groups)
+
+    mock_gitlab_request = mocker.AsyncMock(
+        side_effect=[
+            valid_user_response,
+            valid_groups_response,
+            httpx.Response(status_code=200, json={}),
+        ]
+    )
+    mocker.patch("httpx.AsyncClient.get", mock_gitlab_request)
+
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+
+    valid_reading_metadata["id"] = 1234
+    mock_fetch.side_effect = [
+        [
+            {
+                "id": 1234,
+                "category": "instrument",
+                "data_valid": True,
+                "priority": 1,
+            }
+        ],
+        [valid_reading_metadata],
+    ]
+    mock_execute.side_effect = [1, None]
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.put(
+        url="/metadata/1234", headers={"Authorization": "auth_token"}, content=body
+    )
+
+    session_user = {
+        "email": "test_email",
+        "sub": 1234,
+        "groups": ["group1", "ghsc/geomag/operations/roles/reviewer"],
+        "name": "test_user",
+        "nickname": "test_user_nickname",
+        "picture": "http://picture_url",
+    }
+
+    assert response.status_code == 200
+    mock_save_session.assert_called_once_with(ANY, {"user": session_user})
+
+    returned_metadata = Metadata(**response.json())
+    returned_metadata.id = 1234
+    returned_metadata.category = "reading"
+
+    assert mock_fetch.call_count == 2
+    assert mock_execute.call_count == 2
+
+    # the query for both fetch_all requests should be the same
+    fetch_called_pararms = mock_fetch.call_args.args[0].compile().params
+    assert fetch_called_pararms["id_1"] == 1234
+
+    # assert save to metadata history is correct
+    insert_called_params = mock_execute.call_args_list[0].args[0].compile().params
+    assert insert_called_params["metadata_id"] == 1234
+    assert insert_called_params["category"] == "instrument"
+    assert insert_called_params["data_valid"] == True
+    assert insert_called_params["priority"] == 1
+
+    # assert update to metadata table is correct
+    update_called_params = mock_execute.call_args_list[1].args[0].compile().params
+    assert update_called_params["id_1"] == 1234
+    assert update_called_params["category"] == "reading"
+
+
+@pytest.mark.asyncio
+@patch("databases.Database.transaction")
+@patch("databases.Database.fetch_all", new_callable=AsyncMock)
+@patch("databases.Database.execute", new_callable=AsyncMock)
+async def test_update_metadata_no_existing_metadata(
+    mock_execute,
+    mock_fetch,
+    mock_transaction,
+    valid_token,
+    valid_userinfo,
+    valid_reading_metadata,
+    mocker,
+):
+    valid_userinfo["groups"] = ["ghsc/geomag/operations/roles/reviewer"]
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+
+    valid_reading_metadata["id"] = 1234
+    mock_fetch.return_value = []
+
+    body = json.dumps(valid_reading_metadata)
+
+    try:
+        response = client.put(
+            url="/metadata/1234",
+            headers={"Cookie": "PHPSESSID=valid_session_id"},
+            content=body,
+        )
+    except Exception as e:
+        assert type(e) == ValueError
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_no_valid_group(
+    valid_token, valid_userinfo, valid_reading_metadata, mocker
+):
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.put(
+        url="/metadata/1234",
+        headers={"Cookie": "PHPSESSID=valid_session_id"},
+        content=body,
+    )
+    assert response.status_code == 403
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_no_session(
+    valid_token, valid_userinfo, valid_reading_metadata, mocker
+):
+    valid_session = {"token": valid_token, "user": valid_userinfo}
+    mock_get_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock(return_value=valid_session)
+    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.put(url="/metadata/1234", content=body)
+    assert response.status_code == 401
+
+    mock_get_session.assert_not_called()
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_invalid_session(mocker, valid_reading_metadata):
+    # mock invalid session. this is created when users GET /metadata without logging in
+    invalid_session = {
+        "redirect_uri": "redirect_uri",
+        "nonce": "nonce_str",
+        "url": "test_url",
+    }
+    mock_get_session = mocker.AsyncMock(return_value=invalid_session)
+    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+
+    mock_save_session = mocker.AsyncMock()
+    mocker.patch.object(SessionMiddleware, "save_session", mock_save_session)
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.put(
+        url="/metadata/1234",
+        headers={"Cookie": "PHPSESSID=invalid_session_id"},
+        content=body,
+    )
+
+    assert response.status_code == 401
+    mock_get_session.assert_called_once_with("invalid_session_id")
+
+
+@pytest.mark.asyncio
+async def test_update_metadata_with_no_session_and_invalid_token(
+    mocker, valid_reading_metadata
+):
+    mock_get_session = mocker.AsyncMock()
+    mocker.patch.object(SessionMiddleware, "get_session", mock_get_session)
+
+    mock_gitlab_request = mocker.AsyncMock(return_value=None)
+    mocker.patch("geomagio.api.secure.login.get_gitlab_user", mock_gitlab_request)
+
+    body = json.dumps(valid_reading_metadata)
+
+    response = client.put(
+        url="/metadata/1234",
+        headers={"Authorization": "invalid_gitlab_token"},
+        content=body,
+    )
+    assert response.status_code == 401
+
+    mock_get_session.assert_not_called()
+    mock_gitlab_request.assert_called_once_with(token="invalid_gitlab_token")
+
+
+# # # the way the database class is set up prevents us from using pytest-mocks here. unittest is a bit more
+# # # powerful but less intuitive and allows us to inspect the parameters to mocked database calls
+class TestMetadata(unittest.IsolatedAsyncioTestCase):
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata(self, mock_fetch):
+        mock_fetch.return_value = [
+            {
+                "id": 8372,
+                "station": "BOU",
+                "category": "instrument",
+                "network": "NT",
+                "channel": "F",
+                "location": "R0",
+                "data_valid": True,
+                "status": "new",
+                "metadata": {"metadata1": "metadata1"},
+            },
+            {
+                "id": 8376,
+                "station": "BOU",
+                "category": "instrument",
+                "network": "NT",
+                "channel": "F",
+                "location": "R0",
+                "data_valid": True,
+                "status": "new",
+                "metadata": {"metadata2": "metadata2"},
+            },
+        ]
+
+        response = client.get(
+            url="/metadata?station=BOU&category=instrument&network=NT&channel=F&location=R0&data_valid=true&status=new"
+        )
+        assert response.status_code == 200
+
+        mock_fetch.assert_called_once()
+        called_pararms = mock_fetch.call_args.args[0].compile().params
+
+        assert called_pararms["station_1"] == "BOU"
+        assert called_pararms["category_1"] == "instrument"
+        assert called_pararms["network_1"] == "NT"
+        assert called_pararms["channel_1"] == "F"
+        assert called_pararms["location_1"] == "R0"
+        assert called_pararms["status_1"] == ["new"]
+
+        returned_metadata = []
+
+        rows = response.json()
+        for metadata in rows:
+            returned_metadata.append(Metadata(**metadata))
+
+        assert returned_metadata[0].id == 8372
+        assert returned_metadata[1].id == 8376
+
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata_with_starttime(self, mock_fetch):
+        mock_fetch.return_value = [
+            {
+                "id": 8372,
+                "station": "BOU",
+                "category": "instrument",
+                "network": "NT",
+                "channel": "F",
+                "location": "R0",
+                "data_valid": True,
+                "status": "new",
+                "metadata": {"metadata1": "metadata1"},
+            },
+            {
+                "id": 8376,
+                "station": "BOU",
+                "category": "instrument",
+                "network": "NT",
+                "channel": "F",
+                "location": "R0",
+                "data_valid": True,
+                "status": "new",
+                "metadata": {"metadata2": "metadata2"},
+            },
+        ]
+        response = client.get(
+            url="/metadata?station=BOU&category=instrument&starttime=2024-11-06T04:27:40Z"
+        )
+        assert response.status_code == 200
+
+        mock_fetch.assert_called_once()
+        called_pararms = mock_fetch.call_args.args[0].compile().params
+
+        assert called_pararms["station_1"] == "BOU"
+        assert called_pararms["category_1"] == "instrument"
+        # assert endtime is set to starttime because the query is where endtime is None or greater than starttime
+        assert called_pararms["endtime_1"] == datetime.datetime(
+            2024, 11, 6, 4, 27, 40, tzinfo=datetime.timezone.utc
+        )
+
+        returned_metadata = []
+
+        rows = response.json()
+        for metadata in rows:
+            returned_metadata.append(Metadata(**metadata))
+
+        assert returned_metadata[0].id == 8372
+        assert returned_metadata[1].id == 8376
+
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata_with_endtime(self, mock_fetch):
+        mock_fetch.return_value = [
+            {
+                "id": 8372,
+                "station": "BOU",
+                "category": "instrument",
+                "network": "NT",
+                "channel": "F",
+                "location": "R0",
+                "data_valid": True,
+                "status": "new",
+                "metadata": {"metadata1": "metadata1"},
+            },
+            {
+                "id": 8376,
+                "station": "BOU",
+                "category": "instrument",
+                "network": "NT",
+                "channel": "F",
+                "location": "R0",
+                "data_valid": True,
+                "status": "new",
+                "metadata": {"metadata2": "metadata2"},
+            },
+        ]
+        response = client.get(
+            url="/metadata?station=BOU&category=instrument&endtime=2024-12-06T04:27:40Z"
+        )
+        assert response.status_code == 200
+
+        mock_fetch.assert_called_once()
+        called_pararms = mock_fetch.call_args.args[0].compile().params
+
+        assert called_pararms["station_1"] == "BOU"
+        assert called_pararms["category_1"] == "instrument"
+        # assert starttime is set to endtime because the query is where starttime is None or greater than endtime
+        assert called_pararms["starttime_1"] == datetime.datetime(
+            2024, 12, 6, 4, 27, 40, tzinfo=datetime.timezone.utc
+        )
+
+        returned_metadata = []
+
+        rows = response.json()
+        for metadata in rows:
+            returned_metadata.append(Metadata(**metadata))
+
+        assert returned_metadata[0].id == 8372
+        assert returned_metadata[1].id == 8376
+
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata_with_created_after(self, mock_fetch):
+        mock_fetch.return_value = [
+            {
+                "id": 8372,
+                "station": "BOU",
+                "category": "instrument",
+                "network": "NT",
+                "channel": "F",
+                "location": "R0",
+                "data_valid": True,
+                "status": "new",
+                "metadata": {"metadata1": "metadata1"},
+            },
+            {
+                "id": 8376,
+                "station": "BOU",
+                "category": "instrument",
+                "network": "NT",
+                "channel": "F",
+                "location": "R0",
+                "data_valid": True,
+                "status": "new",
+                "metadata": {"metadata2": "metadata2"},
+            },
+        ]
+        response = client.get(
+            url="/metadata?station=BOU&category=instrument&created_after=2024-10-06T04:27:40Z"
+        )
+        assert response.status_code == 200
+
+        mock_fetch.assert_called_once()
+        called_pararms = mock_fetch.call_args.args[0].compile().params
+
+        assert called_pararms["station_1"] == "BOU"
+        assert called_pararms["category_1"] == "instrument"
+        # assert created_time is set to created_after because the query is where created_time is greater than created_after
+        assert called_pararms["created_time_1"] == datetime.datetime(
+            2024, 10, 6, 4, 27, 40, tzinfo=datetime.timezone.utc
+        )
+
+        returned_metadata = []
+
+        rows = response.json()
+        for metadata in rows:
+            returned_metadata.append(Metadata(**metadata))
+
+        assert returned_metadata[0].id == 8372
+        assert returned_metadata[1].id == 8376
+
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata_with_created_before(self, mock_fetch):
+        mock_fetch.return_value = [
+            {
+                "id": 8372,
+                "station": "BOU",
+                "category": "instrument",
+                "network": "NT",
+                "channel": "F",
+                "location": "R0",
+                "data_valid": True,
+                "status": "new",
+                "metadata": {"metadata1": "metadata1"},
+            },
+            {
+                "id": 8376,
+                "station": "BOU",
+                "category": "instrument",
+                "network": "NT",
+                "channel": "F",
+                "location": "R0",
+                "data_valid": True,
+                "status": "new",
+                "metadata": {"metadata2": "metadata2"},
+            },
+        ]
+        response = client.get(
+            url="/metadata?station=BOU&category=instrument&created_before=2024-09-06T04:27:40Z"
+        )
+        assert response.status_code == 200
+
+        mock_fetch.assert_called_once()
+        called_pararms = mock_fetch.call_args.args[0].compile().params
+
+        assert called_pararms["station_1"] == "BOU"
+        assert called_pararms["category_1"] == "instrument"
+        # assert created_time is set to created_before because the query is where created_time is less than created_before
+        assert called_pararms["created_time_1"] == datetime.datetime(
+            2024, 9, 6, 4, 27, 40, tzinfo=datetime.timezone.utc
+        )
+
+        returned_metadata = []
+
+        rows = response.json()
+        for metadata in rows:
+            returned_metadata.append(Metadata(**metadata))
+
+        assert returned_metadata[0].id == 8372
+        assert returned_metadata[1].id == 8376
+
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata_returns_empty(self, mock_fetch):
+        mock_fetch.return_value = []
+
+        response = client.get(
+            url="/metadata?station=BOU&category=instrument&created_before=2024-09-06T04:27:40Z"
+        )
+        assert response.status_code == 200
+
+        mock_fetch.assert_called_once()
+
+        # assert response is an empty list
+        assert response.json() == []
+
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata_by_id(self, mock_fetch):
+        mock_fetch.return_value = [{"id": 1234, "category": "instrument"}]
+        response = client.get(url="/metadata/1234")
+        assert response.status_code == 200
+
+        mock_fetch.assert_called_once()
+        called_pararms = mock_fetch.call_args.args[0].compile().params
+
+        assert called_pararms["id_1"] == 1234
+
+        returned_metadata = Metadata(**response.json())
+        assert returned_metadata.id == 1234
+        assert returned_metadata.category == "instrument"
+
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata_by_id_returns_empty(self, mock_fetch):
+        mock_fetch.return_value = []
+        try:
+            client.get(url="/metadata/1234")
+        except Exception as e:
+            assert type(e) == ValueError
+
+        mock_fetch.assert_called_once()
+
+    @patch("databases.Database.fetch_one", new_callable=AsyncMock)
+    async def test_get_metadata_history_by_id(self, mock_fetch):
+        mock_fetch.return_value = {"id": 1234, "category": "instrument"}
+        response = client.get(url="/metadata/history/1234")
+        assert response.status_code == 200
+
+        mock_fetch.assert_called_once()
+        called_pararms = mock_fetch.call_args.args[0].compile().params
+
+        assert called_pararms["id_1"] == 1234
+
+        returned_metadata = Metadata(**response.json())
+        assert returned_metadata.id == 1234
+        assert returned_metadata.category == "instrument"
+
+    @patch("databases.Database.fetch_one", new_callable=AsyncMock)
+    async def test_get_metadata_history_by_id_returns_empty(self, mock_fetch):
+        mock_fetch.return_value = None
+
+        response = client.get(url="/metadata/history/1234")
+        assert response.status_code == 404
+
+        mock_fetch.assert_called_once()
+        called_pararms = mock_fetch.call_args.args[0].compile().params
+
+        assert called_pararms["id_1"] == 1234
+
+    @patch("databases.Database.transaction")
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata_history_by_metadata_id(
+        self, mock_fetch, mock_transaction
+    ):
+        mock_fetch.side_effect = [
+            [{"id": 242, "metadata_id": 1234, "category": "instrument"}],
+            [{"id": 1234, "category": "reading"}],
+        ]
+        response = client.get(url="/metadata/1234/history")
+        assert response.status_code == 200
+
+        assert mock_fetch.call_count == 2
+        called_pararms = mock_fetch.call_args.args[0].compile().params
+
+        assert called_pararms["id_1"] == 1234
+
+        returned_metadata = []
+
+        rows = response.json()
+        for metadata in rows:
+            returned_metadata.append(Metadata(**metadata))
+
+        # expect the metadata from the metadata table to be first
+        assert returned_metadata[0].id == 1234
+        assert returned_metadata[1].id == 242
+
+    @patch("databases.Database.transaction")
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata_history_by_metadata_id_no_history(
+        self, mock_fetch, mock_transaction
+    ):
+        mock_fetch.side_effect = [[], [{"id": 1234, "category": "reading"}]]
+        response = client.get(url="/metadata/1234/history")
+        assert response.status_code == 200
+
+        assert mock_fetch.call_count == 2
+        called_pararms = mock_fetch.call_args.args[0].compile().params
+
+        assert called_pararms["id_1"] == 1234
+
+        returned_metadata = []
+
+        rows = response.json()
+        for metadata in rows:
+            returned_metadata.append(Metadata(**metadata))
+
+        assert returned_metadata[0].id == 1234
+
+    @patch("databases.Database.transaction")
+    @patch("databases.Database.fetch_all", new_callable=AsyncMock)
+    async def test_get_metadata_history_by_metadata_id_no_history_or_metadata(
+        self, mock_fetch, mock_transaction
+    ):
+        mock_fetch.side_effect = [[], []]
+
+        try:
+            client.get(url="/metadata/1234/history")
+        except Exception as e:
+            assert type(e) == ValueError
+
+        assert mock_fetch.call_count == 2