diff --git a/geomagio/adjusted/AdjustedMatrix.py b/geomagio/adjusted/AdjustedMatrix.py
index bd36001a1cb1e3abbb77165261593e09a9084a66..7b35f765588a5f82142d869d6af0d49d4a0b113b 100644
--- a/geomagio/adjusted/AdjustedMatrix.py
+++ b/geomagio/adjusted/AdjustedMatrix.py
@@ -3,9 +3,9 @@ from obspy import Stream, UTCDateTime
 from pydantic import BaseModel
 from typing import Any, List, Optional
 
+from ..pydantic_utcdatetime import CustomUTCDateTimeType
 from ..residual.Reading import Reading, get_absolutes_xyz, get_ordinates
 from .. import ChannelConverter
-from .. import pydantic_utcdatetime
 from .Metric import Metric, get_metric
 
 
@@ -25,9 +25,9 @@ class AdjustedMatrix(BaseModel):
     matrix: Optional[List[List[float]]] = None
     pier_correction: float = 0
     metrics: Optional[List[Metric]] = None
-    starttime: Optional[UTCDateTime] = None
-    endtime: Optional[UTCDateTime] = None
-    time: Optional[UTCDateTime] = None
+    starttime: Optional[CustomUTCDateTimeType] = None
+    endtime: Optional[CustomUTCDateTimeType] = None
+    time: Optional[CustomUTCDateTimeType] = None
 
     def process(
         self,
diff --git a/geomagio/adjusted/Affine.py b/geomagio/adjusted/Affine.py
index 28e14c2413fc5bebd318bd04cced4d0fe3563bb8..a4157d3e193cf667bf406b1f5c691eae70886f2b 100644
--- a/geomagio/adjusted/Affine.py
+++ b/geomagio/adjusted/Affine.py
@@ -9,9 +9,9 @@ from ..residual.Reading import (
     get_absolutes_xyz,
     get_ordinates,
 )
-from .. import pydantic_utcdatetime
 from .AdjustedMatrix import AdjustedMatrix
 from .transform import RotationTranslationXY, TranslateOrigins, Transform
+from ..pydantic_utcdatetime import CustomUTCDateTimeType
 
 
 class Affine(BaseModel):
@@ -27,8 +27,10 @@ class Affine(BaseModel):
     """
 
     observatory: str = None
-    starttime: UTCDateTime = Field(default_factory=lambda: UTCDateTime() - (86400 * 7))
-    endtime: UTCDateTime = Field(default_factory=lambda: UTCDateTime())
+    starttime: CustomUTCDateTimeType = Field(
+        default_factory=lambda: UTCDateTime() - (86400 * 7)
+    )
+    endtime: CustomUTCDateTimeType = Field(default_factory=lambda: UTCDateTime())
     update_interval: Optional[int] = 86400 * 7
     transforms: List[Transform] = [
         RotationTranslationXY(memory=(86400 * 100), acausal=True),
diff --git a/geomagio/api/secure/metadata.py b/geomagio/api/secure/metadata.py
index 7330a15809b43b9ea82b1d5cf7355930836ddbff..235ca4421ecbe3bc4a18327b408d48e385d5af66 100644
--- a/geomagio/api/secure/metadata.py
+++ b/geomagio/api/secure/metadata.py
@@ -21,10 +21,11 @@ from fastapi import APIRouter, Body, Depends, Request, Response, Query
 from obspy import UTCDateTime
 
 from ...metadata import Metadata, MetadataCategory, MetadataQuery
-from ... import pydantic_utcdatetime
 from ..db.common import database
 from ..db import MetadataDatabaseFactory
 from .login import require_user, User
+from ...pydantic_utcdatetime import CustomUTCDateTimeType
+
 
 # routes for login/logout
 router = APIRouter()
@@ -32,10 +33,10 @@ router = APIRouter()
 
 def get_metadata_query(
     category: MetadataCategory = None,
-    starttime: UTCDateTime = None,
-    endtime: UTCDateTime = None,
-    created_after: UTCDateTime = None,
-    created_before: UTCDateTime = None,
+    starttime: CustomUTCDateTimeType = None,
+    endtime: CustomUTCDateTimeType = None,
+    created_after: CustomUTCDateTimeType = None,
+    created_before: CustomUTCDateTimeType = None,
     network: str = None,
     station: str = None,
     channel: str = None,
diff --git a/geomagio/api/ws/DataApiQuery.py b/geomagio/api/ws/DataApiQuery.py
index 6cba83df81b4320223daf7551105e3bdf4609c7e..a0cfea71fc456bf40efd5a3b1052e8181d23f1b4 100644
--- a/geomagio/api/ws/DataApiQuery.py
+++ b/geomagio/api/ws/DataApiQuery.py
@@ -1,14 +1,14 @@
 import datetime
 import enum
 import os
-from typing import Dict, List, Union
+from typing import List, Optional
 
 from obspy import UTCDateTime
-from pydantic import BaseModel, root_validator, validator
+from pydantic import ConfigDict, field_validator, model_validator, Field, BaseModel
 
-from ... import pydantic_utcdatetime
 from .Element import ELEMENTS
 from .Observatory import OBSERVATORY_INDEX, ASL_OBSERVATORY_INDEX
+from ...pydantic_utcdatetime import CustomUTCDateTimeType
 
 
 DEFAULT_ELEMENTS = ["X", "Y", "Z", "F"]
@@ -51,43 +51,39 @@ class DataHost(str, enum.Enum):
     def values(cls) -> List[str]:
         return [t.value for t in cls]
 
+def default_starttime() -> UTCDateTime:
+    # default to start of current day
+    now = datetime.datetime.now(tz=datetime.timezone.utc)
+    return UTCDateTime(year=now.year, month=now.month, day=now.day)
 
 class DataApiQuery(BaseModel):
+    model_config = ConfigDict(extra='forbid')  
+
     id: str
-    starttime: UTCDateTime = None
-    endtime: UTCDateTime = None
+    starttime: CustomUTCDateTimeType = Field(
+        default_factory=lambda: default_starttime())
+    # endtime default is dependent on start time, so it's handled after validation in the model_validator
+    endtime: Optional[CustomUTCDateTimeType] = None
     elements: List[str] = DEFAULT_ELEMENTS
     sampling_period: SamplingPeriod = SamplingPeriod.MINUTE
-    data_type: Union[DataType, str] = DataType.VARIATION
-    format: Union[OutputFormat, str] = OutputFormat.IAGA2002
-    data_host: Union[DataHost, str] = DataHost.DEFAULT
-
-    @validator("data_type")
-    def validate_data_type(
-        cls, data_type: Union[DataType, str]
-    ) -> Union[DataType, str]:
-        if data_type not in DataType.values() and len(data_type) != 2:
-            raise ValueError(
-                f"Bad data type value '{data_type}'."
-                f" Valid values are: {', '.join(DataType.values())}"
-            )
-        return data_type
+    data_type: DataType = DataType.VARIATION
+    format: OutputFormat = OutputFormat.IAGA2002
+    data_host: DataHost = DataHost.DEFAULT
 
-    @validator("data_host")
+    @field_validator("data_host", mode="before")
     def validate_data_host(
-        cls, data_host: Union[DataHost, str]
-    ) -> Union[DataHost, str]:
+        cls, data_host: DataHost
+    ) -> DataHost:
         if data_host not in DataHost.values():
             raise ValueError(
                 # don't advertise acceptable hosts
                 f"Bad data_host value '{data_host}'."
             )
         return data_host
-
-    @validator("elements", pre=True, always=True)
+    
+    @field_validator("elements", mode="before")
+    @classmethod
     def validate_elements(cls, elements: List[str]) -> List[str]:
-        if not elements:
-            return DEFAULT_ELEMENTS
         if len(elements) == 1 and "," in elements[0]:
             elements = [e.strip() for e in elements[0].split(",")]
         for element in elements:
@@ -98,7 +94,7 @@ class DataApiQuery(BaseModel):
                 )
         return elements
 
-    @validator("id")
+    @field_validator("id", mode="before")
     def validate_id(cls, id: str) -> str:
         complete_observatory_index = {**OBSERVATORY_INDEX, **ASL_OBSERVATORY_INDEX}
         if id not in complete_observatory_index:
@@ -108,44 +104,20 @@ class DataApiQuery(BaseModel):
             )
         return id
 
-    @validator("starttime", always=True)
-    def validate_starttime(cls, starttime: UTCDateTime) -> UTCDateTime:
-        if not starttime:
-            # default to start of current day
-            now = datetime.datetime.now(tz=datetime.timezone.utc)
-            return UTCDateTime(year=now.year, month=now.month, day=now.day)
-        return starttime
-
-    @validator("endtime", always=True)
-    def validate_endtime(
-        cls, endtime: UTCDateTime, *, values: Dict, **kwargs
-    ) -> UTCDateTime:
-        """Default endtime is based on starttime.
-
-        This method needs to be after validate_starttime.
-        """
-        if not endtime:
-            # endtime defaults to 1 day after startime
-            starttime = values.get("starttime")
-            endtime = starttime + (86400 - 0.001)
-        return endtime
-
-    @root_validator
-    def validate_combinations(cls, values):
-        starttime, endtime, elements, format, sampling_period = (
-            values.get("starttime"),
-            values.get("endtime"),
-            values.get("elements"),
-            values.get("format"),
-            values.get("sampling_period"),
-        )
-        if len(elements) > 4 and format == "iaga2002":
+    @model_validator(mode="after")
+    def validate_combinations(self):
+        if len(self.elements) > 4 and self.format == "iaga2002":
             raise ValueError("No more than four elements allowed for iaga2002 format.")
-        if starttime > endtime:
+        if self.endtime is None:
+            # end time defaults to 1 day after start time
+            self.endtime = self.starttime + (86400 - 0.001)
+        if self.starttime > self.endtime:
             raise ValueError("Starttime must be before endtime.")
         # check data volume
-        samples = int(len(elements) * (endtime - starttime) / sampling_period)
+        samples = int(
+            len(self.elements) * (self.endtime - self.starttime) / self.sampling_period
+        )
         if samples > REQUEST_LIMIT:
             raise ValueError(f"Request exceeds limit ({samples} > {REQUEST_LIMIT})")
         # otherwise okay
-        return values
+        return self
diff --git a/geomagio/api/ws/Element.py b/geomagio/api/ws/Element.py
index b0df40d1da3f54d4c7d5d3d734bde9a18e6e80d4..4ef567fd6edab34b67dafb430f010a7f45643fe5 100644
--- a/geomagio/api/ws/Element.py
+++ b/geomagio/api/ws/Element.py
@@ -4,7 +4,7 @@ from pydantic import BaseModel
 
 class Element(BaseModel):
     id: str
-    abbreviation: Optional[str]
+    abbreviation: Optional[str] = None
     name: str
     units: str
 
diff --git a/geomagio/api/ws/FilterApiQuery.py b/geomagio/api/ws/FilterApiQuery.py
index dcae69ac9ba3519945d90f2be83ec98494888dfa..e21ddb36d6d44f3237399bf464bf8b981e1526e2 100644
--- a/geomagio/api/ws/FilterApiQuery.py
+++ b/geomagio/api/ws/FilterApiQuery.py
@@ -1,38 +1,27 @@
 from .DataApiQuery import DataApiQuery, SamplingPeriod, REQUEST_LIMIT
-from pydantic import root_validator
+from pydantic import ConfigDict, model_validator
 
-"""This script contains the class inheriting everything except input/output_sampling 
-period from the DataApiQuery class. This is where more specific functionailty 
-should be added for the FilterApiQuery endpoint if/when changes need to be made"""
+"""This class inherits all the fields and validation on DataApiQuery and adds
+the fields input_sampling_period and output_sampling_period."""
 
 
 class FilterApiQuery(DataApiQuery):
+    model_config = ConfigDict(extra='forbid')  
+
     input_sampling_period: SamplingPeriod = SamplingPeriod.SECOND
     output_sampling_period: SamplingPeriod = SamplingPeriod.MINUTE
 
-    # Remove inherited fields that we don't need for this specific endpoint
-    class Config:
-        fields = {"sampling_period": {"exclude": True}}
-
-    @root_validator
-    def validate_combinations(cls, values):
-        starttime, endtime, elements, format, input_sampling_period = (
-            values.get("starttime"),
-            values.get("endtime"),
-            values.get("elements"),
-            values.get("format"),
-            values.get("input_sampling_period"),
-        )
-        if len(elements) > 4 and format == "iaga2002":
-            raise ValueError("No more than four elements allowed for iaga2002 format.")
-        if starttime > endtime:
-            raise ValueError("Starttime must be before endtime.")
-
+    @model_validator(mode="after")
+    def validate_sample_size(self):
         # Calculate the number of samples based on the input sampling period
-        samples = int(len(elements) * (endtime - starttime) / input_sampling_period)
+        samples = int(
+            len(self.elements)
+            * (self.endtime - self.starttime)
+            / self.input_sampling_period
+        )
 
         # Validate the request size
         if samples > REQUEST_LIMIT:
             raise ValueError(f"Request exceeds limit ({samples} > {REQUEST_LIMIT})")
 
-        return values
+        return self
diff --git a/geomagio/api/ws/Observatory.py b/geomagio/api/ws/Observatory.py
index a2dcd672f6480c588d139f9f5abb0fd709b83353..bfd6bb07420a015e6aed69b617987ed1262ca1fe 100644
--- a/geomagio/api/ws/Observatory.py
+++ b/geomagio/api/ws/Observatory.py
@@ -1,4 +1,4 @@
-from pydantic import BaseModel, validator
+from pydantic import BaseModel, field_validator
 from geomagio.ObservatoryMetadata import DEFAULT_METADATA
 from geomagio.VariometerMetadata import DEFAULT_ASL_METADATA
 from typing import Dict, Optional
@@ -10,8 +10,8 @@ class Observatory(BaseModel):
     name: str
     agency: str = None
     agency_name: str
-    declination_base: int = None
-    sensor_orientation: str = None
+    declination_base: Optional[int] = None
+    sensor_orientation: Optional[str] = None
     elevation: Optional[int] = None
     latitude: Optional[float] = None
     longitude: Optional[float] = None
@@ -36,7 +36,8 @@ class Observatory(BaseModel):
 
         super().__init__(**data)
 
-    @validator("latitude")
+    @field_validator("latitude")
+    @classmethod
     def validate_latitude(cls, latitude: Optional[float] = None) -> float:
         if latitude is None:
             return latitude
@@ -44,7 +45,8 @@ class Observatory(BaseModel):
             raise ValueError(f"latitude ({latitude}) out of range [-90, 90]")
         return latitude
 
-    @validator("longitude")
+    @field_validator("longitude")
+    @classmethod
     def validate_longitude(cls, longitude: Optional[float] = None) -> float:
         if longitude is None:
             return longitude
@@ -52,9 +54,9 @@ class Observatory(BaseModel):
             raise ValueError(f"longitude ({longitude}) out of range [-360, 360]")
         return longitude
 
-    @validator("sensor_orientation", always=True)
-    def validate_sensor_orientation(cls, sensor_orientation: str, values: Dict) -> str:
-        agency = values.get("agency")
+    @field_validator("sensor_orientation")
+    @classmethod
+    def validate_sensor_orientation(cls, sensor_orientation: str, agency: str) -> str:
         if not sensor_orientation:
             if agency == "GSC":
                 sensor_orientation = "XYZF"
diff --git a/geomagio/api/ws/data.py b/geomagio/api/ws/data.py
index 853a63e8dde1c359f9cd4ebebc67997d9be5ced2..3a77dbb8a51b4cfe4641b728789c2c7e77f9f514 100644
--- a/geomagio/api/ws/data.py
+++ b/geomagio/api/ws/data.py
@@ -1,5 +1,5 @@
 import os
-from typing import List, Union
+from typing import List, Union, Optional
 
 from fastapi import APIRouter, Depends, Query, Request
 from obspy import UTCDateTime, Stream
@@ -19,6 +19,8 @@ from .DataApiQuery import (
     DataHost,
 )
 
+from ...pydantic_utcdatetime import CustomUTCDateTimeType
+
 
 def get_data_factory(
     query: DataApiQuery,
@@ -55,12 +57,12 @@ def get_data_factory(
 def get_data_query(
     request: Request,
     id: str = Query(..., title="Observatory code"),
-    starttime: UTCDateTime = Query(
+    starttime: Optional[CustomUTCDateTimeType] = Query(
         None,
         title="Start Time",
         description="Time of first requested data. Default is start of current UTC day.",
     ),
-    endtime: UTCDateTime = Query(
+    endtime: Optional[CustomUTCDateTimeType] = Query(
         None,
         title="End Time",
         description="Time of last requested data. Default is starttime plus 24 hours.",
diff --git a/geomagio/api/ws/filter.py b/geomagio/api/ws/filter.py
index 43c00602679604cdaeeabdbd3ce17bc578f99e48..54c5f6825a9922f74dfa55c4ba7776909a9d76f8 100644
--- a/geomagio/api/ws/filter.py
+++ b/geomagio/api/ws/filter.py
@@ -1,4 +1,4 @@
-from typing import List, Union
+from typing import List, Union, Optional
 from fastapi import Query
 from obspy import UTCDateTime, Stream
 from ... import TimeseriesFactory, TimeseriesUtility
@@ -10,16 +10,17 @@ from .DataApiQuery import (
     SamplingPeriod,
 )
 from .FilterApiQuery import FilterApiQuery
+from ...pydantic_utcdatetime import CustomUTCDateTimeType
 
 
 def get_filter_data_query(
     id: str = Query(..., title="Observatory code"),
-    starttime: UTCDateTime = Query(
+    starttime: Optional[CustomUTCDateTimeType] = Query(
         None,
         title="Start Time",
         description="Time of first requested data. Default is start of current UTC day.",
     ),
-    endtime: UTCDateTime = Query(None, title="End Time"),
+    endtime: Optional[CustomUTCDateTimeType] = Query(None, title="End Time"),
     elements: List[str] = Query(DEFAULT_ELEMENTS, title="Geomagnetic Elements"),
     data_type: Union[DataType, str] = Query(DataType.VARIATION, alias="type"),
     format: Union[OutputFormat, str] = Query(
diff --git a/geomagio/api/ws/metadata.py b/geomagio/api/ws/metadata.py
index 1775569a434dda86c04a731494ea0060db1980d0..cb265e4695c03125724a547992a220b2951e6c67 100644
--- a/geomagio/api/ws/metadata.py
+++ b/geomagio/api/ws/metadata.py
@@ -6,6 +6,8 @@ from obspy import UTCDateTime
 from ...metadata import Metadata, MetadataCategory, MetadataQuery
 from ..db.common import database
 from ..db import MetadataDatabaseFactory
+from ...pydantic_utcdatetime import CustomUTCDateTimeType
+
 
 router = APIRouter()
 
@@ -17,8 +19,8 @@ router = APIRouter()
 )
 async def get_metadata(
     category: MetadataCategory = None,
-    starttime: UTCDateTime = None,
-    endtime: UTCDateTime = None,
+    starttime: CustomUTCDateTimeType = None,
+    endtime: CustomUTCDateTimeType = None,
     network: str = None,
     station: str = None,
     channel: str = None,
diff --git a/geomagio/metadata/Metadata.py b/geomagio/metadata/Metadata.py
index 7c073317776baef7d8a08280aaeeaaa5989936f3..5f61483afd94a2bc9d6ec3db037fb9f6cf0a94f0 100644
--- a/geomagio/metadata/Metadata.py
+++ b/geomagio/metadata/Metadata.py
@@ -1,11 +1,11 @@
 from datetime import timezone
-from typing import Dict
+from typing import Dict, Optional
 
 from obspy import UTCDateTime
-from pydantic import BaseModel, validator
+from pydantic import field_validator, BaseModel
 
-from .. import pydantic_utcdatetime
 from .MetadataCategory import MetadataCategory
+from ..pydantic_utcdatetime import CustomUTCDateTimeType
 
 
 class Metadata(BaseModel):
@@ -51,33 +51,33 @@ class Metadata(BaseModel):
     # metadata history id referencing database id
     metadata_id: int = None
     # author
-    created_by: str = None
-    created_time: UTCDateTime = None
+    created_by: Optional[str] = None
+    created_time: Optional[CustomUTCDateTimeType] = None
     # editor
-    updated_by: str = None
-    updated_time: UTCDateTime = None
+    updated_by: Optional[str] = None
+    updated_time: Optional[CustomUTCDateTimeType] = None
     # time range
-    starttime: UTCDateTime = None
-    endtime: UTCDateTime = None
+    starttime: Optional[CustomUTCDateTimeType] = None
+    endtime: Optional[CustomUTCDateTimeType] = None
     # what data metadata references, null for wildcard
-    network: str = None
-    station: str = None
-    channel: str = None
-    location: str = None
+    network: Optional[str] = None
+    station: Optional[str] = None
+    channel: Optional[str] = None
+    location: Optional[str] = None
     # category (flag, matrix, etc)
-    category: MetadataCategory = None
+    category: Optional[MetadataCategory] = None
     # higher priority overrides lower priority
-    priority: int = 1
+    priority: Optional[int] = 1
     # whether data is valid (primarily for flags)
-    data_valid: bool = True
+    data_valid: Optional[bool] = True
     # metadata json blob
-    metadata: Dict = None
+    metadata: Optional[Dict] = None
     # general comment
-    comment: str = None
+    comment: Optional[str] = None
     # review specific comment
-    review_comment: str = None
+    review_comment: Optional[str] = None
     # metadata status indicator
-    status: str = None
+    status: Optional[str] = None
 
     def datetime_dict(self, **kwargs):
         values = self.dict(**kwargs)
@@ -86,6 +86,7 @@ class Metadata(BaseModel):
                 values[key] = values[key].datetime.replace(tzinfo=timezone.utc)
         return values
 
-    @validator("created_time")
+    @field_validator("created_time")
+    @classmethod
     def set_default_created_time(cls, created_time: UTCDateTime = None) -> UTCDateTime:
         return created_time or UTCDateTime()
diff --git a/geomagio/metadata/MetadataQuery.py b/geomagio/metadata/MetadataQuery.py
index 2a2e5638f103ee3699ca7fdf12aa83e4ea61e89b..7d4ac301e375838070d33ab341fbfee1496840b2 100644
--- a/geomagio/metadata/MetadataQuery.py
+++ b/geomagio/metadata/MetadataQuery.py
@@ -4,21 +4,21 @@ from obspy import UTCDateTime
 from pydantic import BaseModel
 from typing import List, Optional, Dict, Any
 
-from .. import pydantic_utcdatetime
 from .MetadataCategory import MetadataCategory
+from ..pydantic_utcdatetime import CustomUTCDateTimeType
 
 
 class MetadataQuery(BaseModel):
     id: int = None
-    category: MetadataCategory = None
-    starttime: UTCDateTime = None
-    endtime: UTCDateTime = None
-    created_after: UTCDateTime = None
-    created_before: UTCDateTime = None
-    network: str = None
-    station: str = None
-    channel: str = None
-    location: str = None
+    category: Optional[MetadataCategory] = None
+    starttime: Optional[CustomUTCDateTimeType] = None
+    endtime: Optional[CustomUTCDateTimeType] = None
+    created_after: Optional[CustomUTCDateTimeType] = None
+    created_before: Optional[CustomUTCDateTimeType] = None
+    network: Optional[str] = None
+    station: Optional[str] = None
+    channel: Optional[str] = None
+    location: Optional[str] = None
     data_valid: Optional[bool] = None
     metadata: Optional[Dict[str, Any]] = None
     status: Optional[List[str]] = None
diff --git a/geomagio/pydantic_utcdatetime.py b/geomagio/pydantic_utcdatetime.py
index e2d029938fd608b6cfab451278b9286b9a103850..6d6b56b1ebd8bf12fc3d90f420a5305743e95394 100644
--- a/geomagio/pydantic_utcdatetime.py
+++ b/geomagio/pydantic_utcdatetime.py
@@ -1,72 +1,64 @@
 """Configure pydantic to allow UTCDateTime attributes on models.
 """
 
-from datetime import datetime
-from typing import Any, Callable, Dict, List, Tuple, TypeVar, Union
-
-# FIXME: migrating to pydantic v2 should make this unnecessary
-#        (other changes will be needed though)
-from fastapi.encoders import ENCODERS_BY_TYPE
-
 from obspy import UTCDateTime
-from pydantic.errors import PydanticValueError
-import pydantic.json
-import pydantic.schema
-import pydantic.validators
-
-
-# placeholder type for register_custom_pydantic_type method
-CustomType = TypeVar("CustomType")
-
-
-def register_custom_pydantic_type(
-    custom_type: CustomType,
-    encoder: Callable[[CustomType], Any],
-    json_schema: Dict,
-    parsers: List[Callable[[Any], CustomType]],
-):
-    try:
-        if custom_type.__custom_pydantic_type__:
-            # already registered
-            return
-    except AttributeError:
-        # not registered yet
-        pass
-    # add encoder
-    pydantic.json.ENCODERS_BY_TYPE[custom_type] = encoder
-
-    # FIXME: migrating to pydantic v2 should make this unnecessary
-    #        (other changes will be needed though)
-    ENCODERS_BY_TYPE[custom_type] = encoder
-
-    # add openapi mapping
-    pydantic.schema.field_class_to_schema += ((custom_type, json_schema),)
-    # add validator
-    pydantic.validators._VALIDATORS.append((custom_type, parsers))
-    # mark as installed
-    custom_type.__custom_pydantic_type__ = True
-
-
-class UTCDateTimeError(PydanticValueError):
-    msg_template = "invalid date-time format"
-
-
-def format_utcdatetime(o: UTCDateTime) -> str:
-    return o.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
-
-
-def parse_utcdatetime(
-    value: Union[datetime, float, int, str, UTCDateTime]
-) -> UTCDateTime:
-    try:
-        return UTCDateTime(value)
-    except:
-        raise UTCDateTimeError()
-
-
-register_custom_pydantic_type(
-    UTCDateTime,
-    encoder=format_utcdatetime,
-    json_schema={"type": "string", "format": "date-time"},
-    parsers=[parse_utcdatetime],
+from pydantic_core import CoreSchema, core_schema
+from typing import Annotated, Any
+
+from pydantic import (
+    GetCoreSchemaHandler,
+    GetJsonSchemaHandler,
+    PydanticUserError,
+    TypeAdapter,
+    ValidationError,
 )
+
+from pydantic.json_schema import JsonSchemaValue
+
+
+class CustomUTCDateTimeValidator:
+    @classmethod
+    def __get_pydantic_core_schema__(
+        cls,
+        _source_type: Any,
+        _handler: GetCoreSchemaHandler,
+    ) -> CoreSchema:
+        def UTCDateTime_validator(value: Any):
+            try:
+                time = UTCDateTime(value)
+            except:
+                raise ValueError(
+                    f"Invalid time type. Expected format is '%Y-%m-%dT%H:%M:%S.%fZ'"
+                )
+            return time
+
+        schema = core_schema.chain_schema(
+            [
+                core_schema.any_schema(),
+                core_schema.no_info_plain_validator_function(UTCDateTime_validator),
+            ],
+        )
+
+        schema = core_schema.json_or_python_schema(
+            json_schema=schema,
+            python_schema=core_schema.union_schema(
+                [
+                    # check if it's an instance first before doing any further work
+                    core_schema.is_instance_schema(UTCDateTime),
+                    schema,
+                ]
+            ),
+            serialization=core_schema.plain_serializer_function_ser_schema(
+                lambda instance: instance.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
+            ),
+        )
+        return schema
+
+    @classmethod
+    def __get_pydantic_json_schema__(
+        cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
+    ) -> JsonSchemaValue:
+        return handler(core_schema.int_schema())
+
+
+CustomUTCDateTimeType = Annotated[UTCDateTime, CustomUTCDateTimeValidator]
diff --git a/geomagio/residual/Absolute.py b/geomagio/residual/Absolute.py
index d927757af2167ffc771d6bf7252580ff7a49dae3..1339ad5f6b8f8c29acaab411ca5320d63a442a71 100644
--- a/geomagio/residual/Absolute.py
+++ b/geomagio/residual/Absolute.py
@@ -2,8 +2,7 @@ from typing import Optional
 
 from obspy import UTCDateTime
 from pydantic import BaseModel
-
-from .. import pydantic_utcdatetime
+from ..pydantic_utcdatetime import CustomUTCDateTimeType
 
 
 class Absolute(BaseModel):
@@ -25,8 +24,9 @@ class Absolute(BaseModel):
     element: str
     absolute: Optional[float] = None
     baseline: Optional[float] = None
-    starttime: Optional[UTCDateTime] = None
-    endtime: Optional[UTCDateTime] = None
+    starttime: Optional[CustomUTCDateTimeType] = None
+    endtime: Optional[CustomUTCDateTimeType] = None
+
     shift: float = 0
     valid: bool = True
 
diff --git a/geomagio/residual/Measurement.py b/geomagio/residual/Measurement.py
index f8be194375c5a57327770eb1a1d39ff2ec7cad1e..e7454fa318e0c69a6d1b5a2950bea492b7580407 100644
--- a/geomagio/residual/Measurement.py
+++ b/geomagio/residual/Measurement.py
@@ -5,8 +5,8 @@ import numpy
 from obspy.core import UTCDateTime
 from pydantic import BaseModel
 
-from .. import pydantic_utcdatetime
 from .MeasurementType import MeasurementType
+from ..pydantic_utcdatetime import CustomUTCDateTimeType
 
 
 class Measurement(BaseModel):
@@ -23,7 +23,7 @@ class Measurement(BaseModel):
     measurement_type: MeasurementType
     angle: float = 0
     residual: Optional[float] = None
-    time: Optional[UTCDateTime] = None
+    time: Optional[CustomUTCDateTimeType] = None
     h: Optional[float] = None
     e: Optional[float] = None
     z: Optional[float] = None
@@ -31,7 +31,7 @@ class Measurement(BaseModel):
 
 
 class AverageMeasurement(Measurement):
-    endtime: Optional[UTCDateTime] = None
+    endtime: Optional[CustomUTCDateTimeType] = None
 
 
 def average_measurement(
diff --git a/geomagio/residual/Reading.py b/geomagio/residual/Reading.py
index 75648d9b4882f96bb22cf5ac4f2c9c708a884787..b6292f1b42a2487965a4d6241df9e8e6099b1b96 100644
--- a/geomagio/residual/Reading.py
+++ b/geomagio/residual/Reading.py
@@ -38,7 +38,7 @@ class Reading(BaseModel):
     measurements: List[Measurement] = []
     metadata: Dict = {}
     pier_correction: float = 0
-    scale_value: float = None
+    scale_value: Optional[float] = None
 
     def __getitem__(self, measurement_type: MeasurementType):
         """Provide access to measurements by type.
diff --git a/poetry.lock b/poetry.lock
index 59d31bf88d16f78f19d4f818ec4f674905b1e16c..74cc612c809120f424e93dbb51e1c5fb26fda85a 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
 
 [[package]]
 name = "aiomysql"
@@ -57,6 +57,20 @@ typing-extensions = ">=4"
 [package.extras]
 tz = ["backports.zoneinfo"]
 
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+description = "Reusable constraint types to use with typing.Annotated"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
+    {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
+
 [[package]]
 name = "anyio"
 version = "3.7.1"
@@ -1909,62 +1923,124 @@ files = [
 
 [[package]]
 name = "pydantic"
-version = "1.10.18"
-description = "Data validation and settings management using python type hints"
+version = "2.9.2"
+description = "Data validation using Python type hints"
 optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
 files = [
-    {file = "pydantic-1.10.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e405ffcc1254d76bb0e760db101ee8916b620893e6edfbfee563b3c6f7a67c02"},
-    {file = "pydantic-1.10.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e306e280ebebc65040034bff1a0a81fd86b2f4f05daac0131f29541cafd80b80"},
-    {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11d9d9b87b50338b1b7de4ebf34fd29fdb0d219dc07ade29effc74d3d2609c62"},
-    {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b661ce52c7b5e5f600c0c3c5839e71918346af2ef20062705ae76b5c16914cab"},
-    {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c20f682defc9ef81cd7eaa485879ab29a86a0ba58acf669a78ed868e72bb89e0"},
-    {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5ae6b7c8483b1e0bf59e5f1843e4fd8fd405e11df7de217ee65b98eb5462861"},
-    {file = "pydantic-1.10.18-cp310-cp310-win_amd64.whl", hash = "sha256:74fe19dda960b193b0eb82c1f4d2c8e5e26918d9cda858cbf3f41dd28549cb70"},
-    {file = "pydantic-1.10.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72fa46abace0a7743cc697dbb830a41ee84c9db8456e8d77a46d79b537efd7ec"},
-    {file = "pydantic-1.10.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef0fe7ad7cbdb5f372463d42e6ed4ca9c443a52ce544472d8842a0576d830da5"},
-    {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00e63104346145389b8e8f500bc6a241e729feaf0559b88b8aa513dd2065481"},
-    {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae6fa2008e1443c46b7b3a5eb03800121868d5ab6bc7cda20b5df3e133cde8b3"},
-    {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9f463abafdc92635da4b38807f5b9972276be7c8c5121989768549fceb8d2588"},
-    {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3445426da503c7e40baccefb2b2989a0c5ce6b163679dd75f55493b460f05a8f"},
-    {file = "pydantic-1.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:467a14ee2183bc9c902579bb2f04c3d3dac00eff52e252850509a562255b2a33"},
-    {file = "pydantic-1.10.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:efbc8a7f9cb5fe26122acba1852d8dcd1e125e723727c59dcd244da7bdaa54f2"},
-    {file = "pydantic-1.10.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24a4a159d0f7a8e26bf6463b0d3d60871d6a52eac5bb6a07a7df85c806f4c048"},
-    {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b74be007703547dc52e3c37344d130a7bfacca7df112a9e5ceeb840a9ce195c7"},
-    {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcb20d4cb355195c75000a49bb4a31d75e4295200df620f454bbc6bdf60ca890"},
-    {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46f379b8cb8a3585e3f61bf9ae7d606c70d133943f339d38b76e041ec234953f"},
-    {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbfbca662ed3729204090c4d09ee4beeecc1a7ecba5a159a94b5a4eb24e3759a"},
-    {file = "pydantic-1.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:c6d0a9f9eccaf7f438671a64acf654ef0d045466e63f9f68a579e2383b63f357"},
-    {file = "pydantic-1.10.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d5492dbf953d7d849751917e3b2433fb26010d977aa7a0765c37425a4026ff1"},
-    {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe734914977eed33033b70bfc097e1baaffb589517863955430bf2e0846ac30f"},
-    {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15fdbe568beaca9aacfccd5ceadfb5f1a235087a127e8af5e48df9d8a45ae85c"},
-    {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c3e742f62198c9eb9201781fbebe64533a3bbf6a76a91b8d438d62b813079dbc"},
-    {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19a3bd00b9dafc2cd7250d94d5b578edf7a0bd7daf102617153ff9a8fa37871c"},
-    {file = "pydantic-1.10.18-cp37-cp37m-win_amd64.whl", hash = "sha256:2ce3fcf75b2bae99aa31bd4968de0474ebe8c8258a0110903478bd83dfee4e3b"},
-    {file = "pydantic-1.10.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:335a32d72c51a313b33fa3a9b0fe283503272ef6467910338e123f90925f0f03"},
-    {file = "pydantic-1.10.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34a3613c7edb8c6fa578e58e9abe3c0f5e7430e0fc34a65a415a1683b9c32d9a"},
-    {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9ee4e6ca1d9616797fa2e9c0bfb8815912c7d67aca96f77428e316741082a1b"},
-    {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23e8ec1ce4e57b4f441fc91e3c12adba023fedd06868445a5b5f1d48f0ab3682"},
-    {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:44ae8a3e35a54d2e8fa88ed65e1b08967a9ef8c320819a969bfa09ce5528fafe"},
-    {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5389eb3b48a72da28c6e061a247ab224381435256eb541e175798483368fdd3"},
-    {file = "pydantic-1.10.18-cp38-cp38-win_amd64.whl", hash = "sha256:069b9c9fc645474d5ea3653788b544a9e0ccd3dca3ad8c900c4c6eac844b4620"},
-    {file = "pydantic-1.10.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80b982d42515632eb51f60fa1d217dfe0729f008e81a82d1544cc392e0a50ddf"},
-    {file = "pydantic-1.10.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aad8771ec8dbf9139b01b56f66386537c6fe4e76c8f7a47c10261b69ad25c2c9"},
-    {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941a2eb0a1509bd7f31e355912eb33b698eb0051730b2eaf9e70e2e1589cae1d"},
-    {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65f7361a09b07915a98efd17fdec23103307a54db2000bb92095457ca758d485"},
-    {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6951f3f47cb5ca4da536ab161ac0163cab31417d20c54c6de5ddcab8bc813c3f"},
-    {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a4c5eec138a9b52c67f664c7d51d4c7234c5ad65dd8aacd919fb47445a62c86"},
-    {file = "pydantic-1.10.18-cp39-cp39-win_amd64.whl", hash = "sha256:49e26c51ca854286bffc22b69787a8d4063a62bf7d83dc21d44d2ff426108518"},
-    {file = "pydantic-1.10.18-py3-none-any.whl", hash = "sha256:06a189b81ffc52746ec9c8c007f16e5167c8b0a696e1a726369327e3db7b2a82"},
-    {file = "pydantic-1.10.18.tar.gz", hash = "sha256:baebdff1907d1d96a139c25136a9bb7d17e118f133a76a2ef3b845e831e3403a"},
+    {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"},
+    {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"},
 ]
 
 [package.dependencies]
-typing-extensions = ">=4.2.0"
+annotated-types = ">=0.6.0"
+pydantic-core = "2.23.4"
+typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""}
 
 [package.extras]
-dotenv = ["python-dotenv (>=0.10.4)"]
-email = ["email-validator (>=1.0.3)"]
+email = ["email-validator (>=2.0.0)"]
+timezone = ["tzdata"]
+
+[[package]]
+name = "pydantic-core"
+version = "2.23.4"
+description = "Core functionality for Pydantic validation and serialization"
+optional = false
+python-versions = ">=3.8"
+files = [
+    {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"},
+    {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"},
+    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"},
+    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"},
+    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"},
+    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"},
+    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"},
+    {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"},
+    {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"},
+    {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"},
+    {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"},
+    {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"},
+    {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"},
+    {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"},
+    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"},
+    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"},
+    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"},
+    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"},
+    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"},
+    {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"},
+    {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"},
+    {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"},
+    {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"},
+    {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"},
+    {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"},
+    {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"},
+    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"},
+    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"},
+    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"},
+    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"},
+    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"},
+    {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"},
+    {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"},
+    {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"},
+    {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"},
+    {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"},
+    {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"},
+    {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"},
+    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"},
+    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"},
+    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"},
+    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"},
+    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"},
+    {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"},
+    {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"},
+    {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"},
+    {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"},
+    {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"},
+    {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"},
+    {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"},
+    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"},
+    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"},
+    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"},
+    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"},
+    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"},
+    {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"},
+    {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"},
+    {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"},
+    {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"},
+    {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"},
+    {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"},
+    {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"},
+    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"},
+    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"},
+    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"},
+    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"},
+    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"},
+    {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"},
+    {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"},
+    {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"},
+    {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"},
+    {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"},
+    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"},
+    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"},
+    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"},
+    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"},
+    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"},
+    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"},
+    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"},
+    {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"},
+    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"},
+    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"},
+    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"},
+    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"},
+    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"},
+    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"},
+    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"},
+    {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"},
+    {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
 
 [[package]]
 name = "pymysql"
@@ -2840,4 +2916,4 @@ pycurl = ["pycurl"]
 [metadata]
 lock-version = "2.0"
 python-versions = ">=3.8,<3.12"
-content-hash = "19e498dcfd2120466c52b5847f0eb60fee104db8faac95396f8f7b3680866af9"
+content-hash = "fd766e42eaeb0d161fb69ba57b8df09adbf265f37bd0f65321bfad3aee9f2130"
diff --git a/pyproject.toml b/pyproject.toml
index f79cf7a1b65fd2fe5243b2c92f9e174cdcc69557..cf5cbc54a8ec015debf2dfc960fceac362ad6a3b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -27,7 +27,7 @@ numpy = "^1.23.4"
 obspy = "^1.3.1"
 openpyxl = "^3.0.10"
 pycurl = {version = "^7.45.1", optional = true}
-pydantic = "^1.10.2"
+pydantic = "2.9.2"
 requests = "^2.28.1"
 scipy = "^1.9.3"
 typer = "^0.6.1"
diff --git a/test/DataApiQuery_test.py b/test/DataApiQuery_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..5cac94e20f2ae93b9c603065782e4970faf1dae8
--- /dev/null
+++ b/test/DataApiQuery_test.py
@@ -0,0 +1,203 @@
+import datetime
+from numpy.testing import assert_equal
+from obspy import UTCDateTime
+
+from geomagio.api.ws.DataApiQuery import (
+    DataApiQuery,
+    SamplingPeriod,
+    DataType,
+    OutputFormat,
+    DataHost
+)
+
+def test_DataApiQuery_defaults():
+    query = DataApiQuery(id="BOU")
+
+    now = datetime.datetime.now(tz=datetime.timezone.utc)
+    expected_start_time = UTCDateTime(year=now.year, month=now.month, day=now.day)
+    expected_endtime = expected_start_time + (86400 - 0.001)
+
+    assert_equal(query.id, "BOU")
+    assert_equal(query.starttime, expected_start_time)
+    assert_equal(query.endtime, expected_endtime)
+    assert_equal(query.elements, ["X", "Y", "Z", "F"])
+    assert_equal(query.sampling_period, SamplingPeriod.MINUTE)
+    assert_equal(query.data_type, DataType.VARIATION)
+    assert_equal(query.format, OutputFormat.IAGA2002)
+    assert_equal(query.data_host, DataHost.DEFAULT)
+
+def test_DataApiQuery_valid():
+    query = DataApiQuery(
+        id="DED",
+        starttime="2024-09-01T00:00:01",
+        endtime="2024-09-01T01:00:01",
+        elements=["F"],
+        sampling_period=1,
+        data_type="adjusted",
+        format="json",
+        data_host="cwbpub.cr.usgs.gov",
+    )
+
+    assert_equal(query.id, "DED")
+    assert_equal(query.starttime, UTCDateTime("2024-09-01T00:00:01"))
+    assert_equal(query.endtime, UTCDateTime("2024-09-01T01:00:01"))
+    assert_equal(query.elements, ["F"])
+    assert_equal(query.sampling_period, SamplingPeriod.SECOND)
+    assert_equal(query.data_type, DataType.ADJUSTED)
+    assert_equal(query.format, OutputFormat.JSON)
+    assert_equal(query.data_host, DataHost.CWBPUB)
+
+def test_DataApiQuery_no_id():
+    query = None
+    try:
+        query = DataApiQuery()
+    except Exception as e:
+        err = e.errors()
+        assert "Field required" == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_DataApiQuery_invalid_id():
+    query = None
+    try:
+        query = DataApiQuery(id="YMCA")
+    except Exception as e:
+        err = e.errors()
+        assert "Value error, Bad observatory id 'YMCA'. Valid values are: ANMO, BDT, BLC, BOU, BRD, BRT, BRW, BSL, CASY, CBB, CMO, CMT, COLA, COR, DED, DGMT, DHT, DWPF, E46A, E62A, ECSD, EUA, EYMN, FCC, FDT, FRD, FRN, GOGA, GUA, GUT, HAD, HER, HON, HOT, HRV, IQA, KAK, KBS, KONO, KSU1, MEA, NEW, O20A, OTT, PAB, QSPA, RES, RSSD, SBA, SFJD, SHU, SIT, SJG, SJT, SNK, SSPA, STJ, TST, TUC, USGS, VIC, WCI, YKC." == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+
+def test_DataApiQuery_default_endtime():
+    query = DataApiQuery(id="BOU", starttime="2024-11-01T00:00:01")
+
+    assert_equal(query.id, "BOU")
+    assert_equal(query.starttime, UTCDateTime("2024-11-01T00:00:01"))
+    # endtime is 1 day after start time
+    assert_equal(query.endtime, UTCDateTime("2024-11-02T00:00:00.999"))
+    assert_equal(query.elements, ["X", "Y", "Z", "F"])
+    assert_equal(query.sampling_period, SamplingPeriod.MINUTE)
+    assert_equal(query.data_type, DataType.VARIATION)
+    assert_equal(query.format, OutputFormat.IAGA2002)
+    assert_equal(query.data_host, DataHost.DEFAULT)
+
+def test_DataApiQuery_starttime_after_endtime():
+    query = None
+    try:
+        query = DataApiQuery(
+            id="BOU", 
+            starttime="2024-11-01T00:00:01",
+            endtime="2024-10-01T00:00:01"
+        )
+    except Exception as e:
+        err = e.errors()
+        assert "Value error, Starttime must be before endtime." == err[0]["msg"]
+        
+    assert_equal(query, None)
+
+def test_DataApiQuery_invalid_starttime():
+    query = None
+    try:
+        query = DataApiQuery(
+            id="BOU", 
+            starttime="November 8th 2024 12:00pm",
+        )
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be an instance of UTCDateTime" == err[0]["msg"]
+        
+    assert_equal(query, None)
+
+def test_DataApiQuery_invalid_endtime():
+    query = None
+    try:
+        query = DataApiQuery(
+            id="BOU", 
+            starttime="2024-11-01T00:00:01",
+            endtime="Add one day",
+        )
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be an instance of UTCDateTime" == err[0]["msg"]
+        
+    assert_equal(query, None)
+
+def test_DataApiQuery_invalid_list_elements():
+    query = None
+    try:
+        query = DataApiQuery(id="BOU", elements="XYZF")
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be a valid list" == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_DataApiQuery_invalid_element():
+    query = None
+    try:
+        query = DataApiQuery(id="BOU", elements=["A"])
+    except Exception as e:
+        err = e.errors()
+        assert "Value error, Bad element 'A'. Valid values are: U, V, W, H, E, X, Y, D, Z, F, G, DIST, E-E, E-N, SQ, SV, UK1, UK2, UK3, UK4." == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_DataApiQuery_invalid_elements():
+    query = None
+    try:
+        query = DataApiQuery(id="BOU", elements=["Z","A","F"])
+    except Exception as e:
+        err = e.errors()
+        assert "Value error, Bad element 'A'. Valid values are: U, V, W, H, E, X, Y, D, Z, F, G, DIST, E-E, E-N, SQ, SV, UK1, UK2, UK3, UK4." == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_DataApiQuery_invalid_sampling_period():
+    query = None
+    try:
+        query = DataApiQuery(id="BOU", sampling_period=100)
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be 0.1, 1.0, 60.0, 3600.0 or 86400.0" == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_DataApiQuery_invalid_data_type():
+    query = None
+    try:
+        query = DataApiQuery(id="BOU", data_type="not-definitive")
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be 'variation', 'adjusted', 'quasi-definitive' or 'definitive'" == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_DataApiQuery_invalid_format():
+    query = None
+    try:
+        query = DataApiQuery(id="BOU", format="text")
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be 'iaga2002' or 'json'" == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_DataApiQuery_invalid_data_host():
+    query = None
+    try:
+        query = DataApiQuery(id="BOU", data_host="cwbp3.cr.usgs.gov")
+    except Exception as e:
+        err = e.errors()
+        assert "Value error, Bad data_host value 'cwbp3.cr.usgs.gov'." == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_DataApiQuery_extra_fields():
+    query = None
+    try:
+        query = DataApiQuery(id="BOU", foo="bar")
+    except Exception as e:
+        err = e.errors()
+        assert "Extra inputs are not permitted" == err[0]["msg"]
+    
+    assert_equal(query, None)
diff --git a/test/FilterApiQuery_test.py b/test/FilterApiQuery_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..d4a63dd86ae839621fe1762876549fb17ce07b75
--- /dev/null
+++ b/test/FilterApiQuery_test.py
@@ -0,0 +1,206 @@
+import datetime
+from numpy.testing import assert_equal
+from obspy import UTCDateTime
+
+from geomagio.api.ws.FilterApiQuery import FilterApiQuery
+from geomagio.api.ws.DataApiQuery import (
+    SamplingPeriod,
+    DataType,
+    OutputFormat,
+    DataHost
+)
+
+def test_FilterApiQuery_defaults():
+    query = FilterApiQuery(id="ANMO")
+
+    now = datetime.datetime.now(tz=datetime.timezone.utc)
+    expected_start_time = UTCDateTime(year=now.year, month=now.month, day=now.day)
+    expected_endtime = expected_start_time + (86400 - 0.001)
+
+    assert_equal(query.id, "ANMO")
+    assert_equal(query.starttime, expected_start_time)
+    assert_equal(query.endtime, expected_endtime)
+    assert_equal(query.elements, ["X", "Y", "Z", "F"])
+    assert_equal(query.input_sampling_period, SamplingPeriod.SECOND)
+    assert_equal(query.output_sampling_period, SamplingPeriod.MINUTE)
+    assert_equal(query.data_type, DataType.VARIATION)
+    assert_equal(query.format, OutputFormat.IAGA2002)
+    assert_equal(query.data_host, DataHost.DEFAULT)
+
+def test_FilterApiQuery_valid():
+    query = FilterApiQuery(
+        id="ANMO",
+        starttime="2024-09-01T00:00:01",
+        endtime="2024-09-01T01:00:01",
+        elements=["Z"],
+        input_sampling_period=60,
+        output_sampling_period=3600,
+        data_type="adjusted",
+        format="json",
+        data_host="cwbpub.cr.usgs.gov",
+    )
+
+    assert_equal(query.id, "ANMO")
+    assert_equal(query.starttime, UTCDateTime("2024-09-01T00:00:01"))
+    assert_equal(query.endtime, UTCDateTime("2024-09-01T01:00:01"))
+    assert_equal(query.elements, ["Z"])
+    assert_equal(query.input_sampling_period, SamplingPeriod.MINUTE)
+    assert_equal(query.output_sampling_period, SamplingPeriod.HOUR)
+    assert_equal(query.data_type, DataType.ADJUSTED)
+    assert_equal(query.format, OutputFormat.JSON)
+    assert_equal(query.data_host, DataHost.CWBPUB)
+
+def test_FilterApiQuery_no_id():
+    query = None
+    try:
+        query = FilterApiQuery()
+    except Exception as e:
+        err = e.errors()
+        assert "Field required" == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_FilterApiQuery_invalid_id():
+    query = None
+    try:
+        query = FilterApiQuery(id="YMCA")
+    except Exception as e:
+        err = e.errors()
+        assert "Value error, Bad observatory id 'YMCA'." in err[0]["msg"]
+    
+    assert_equal(query, None)
+
+
+def test_FilterApiQuery_default_endtime():
+    query = FilterApiQuery(id="ANMO", starttime="2024-11-01T00:00:01")
+
+    assert_equal(query.id, "ANMO")
+    assert_equal(query.starttime, UTCDateTime("2024-11-01T00:00:01"))
+    # endtime is 1 day after start time
+    assert_equal(query.endtime, UTCDateTime("2024-11-02T00:00:00.999"))
+    assert_equal(query.elements, ["X", "Y", "Z", "F"])
+    assert_equal(query.sampling_period, SamplingPeriod.MINUTE)
+    assert_equal(query.data_type, DataType.VARIATION)
+    assert_equal(query.format, OutputFormat.IAGA2002)
+    assert_equal(query.data_host, DataHost.DEFAULT)
+
+def test_FilterApiQuery_starttime_after_endtime():
+    query = None
+    try:
+        query = FilterApiQuery(
+            id="ANMO", 
+            starttime="2024-11-01T00:00:01",
+            endtime="2024-10-01T00:00:01"
+        )
+    except Exception as e:
+        err = e.errors()
+        assert "Value error, Starttime must be before endtime." == err[0]["msg"]
+        
+    assert_equal(query, None)
+
+def test_FilterApiQuery_invalid_starttime():
+    query = None
+    try:
+        query = FilterApiQuery(
+            id="ANMO", 
+            starttime="November 8th 2024 12:00pm",
+        )
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be an instance of UTCDateTime" == err[0]["msg"]
+        
+    assert_equal(query, None)
+
+def test_FilterApiQuery_invalid_endtime():
+    query = None
+    try:
+        query = FilterApiQuery(
+            id="ANMO", 
+            starttime="2024-11-01T00:00:01",
+            endtime="Add one day",
+        )
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be an instance of UTCDateTime" == err[0]["msg"]
+        
+    assert_equal(query, None)
+
+def test_FilterApiQuery_invalid_list_elements():
+    query = None
+    try:
+        query = FilterApiQuery(id="ANMO", elements="XYZF")
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be a valid list" == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_FilterApiQuery_invalid_element():
+    query = None
+    try:
+        query = FilterApiQuery(id="ANMO", elements=["A"])
+    except Exception as e:
+        err = e.errors()
+        assert "Value error, Bad element 'A'. Valid values are: U, V, W, H, E, X, Y, D, Z, F, G, DIST, E-E, E-N, SQ, SV, UK1, UK2, UK3, UK4." == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_FilterApiQuery_invalid_elements():
+    query = None
+    try:
+        query = FilterApiQuery(id="ANMO", elements=["Z","A","F"])
+    except Exception as e:
+        err = e.errors()
+        assert "Value error, Bad element 'A'. Valid values are: U, V, W, H, E, X, Y, D, Z, F, G, DIST, E-E, E-N, SQ, SV, UK1, UK2, UK3, UK4." == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_FilterApiQuery_invalid_sampling_period():
+    query = None
+    try:
+        query = FilterApiQuery(id="ANMO", sampling_period=100)
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be 0.1, 1.0, 60.0, 3600.0 or 86400.0" == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_FilterApiQuery_invalid_data_type():
+    query = None
+    try:
+        query = FilterApiQuery(id="ANMO", data_type="not-definitive")
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be 'variation', 'adjusted', 'quasi-definitive' or 'definitive'" == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_FilterApiQuery_invalid_format():
+    query = None
+    try:
+        query = FilterApiQuery(id="ANMO", format="text")
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be 'iaga2002' or 'json'" == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_FilterApiQuery_invalid_data_host():
+    query = None
+    try:
+        query = FilterApiQuery(id="ANMO", data_host="cwbp3.cr.usgs.gov")
+    except Exception as e:
+        err = e.errors()
+        assert "Value error, Bad data_host value 'cwbp3.cr.usgs.gov'." == err[0]["msg"]
+    
+    assert_equal(query, None)
+
+def test_FilterApiQuery_extra_fields():
+    query = None
+    try:
+        query = FilterApiQuery(id="ANMO", foo="bar")
+    except Exception as e:
+        err = e.errors()
+        assert "Extra inputs are not permitted" == err[0]["msg"]
+    
+    assert_equal(query, None)
diff --git a/test/api_test/ws_test/data_test.py b/test/api_test/ws_test/data_test.py
index e0b731efd9975ab5f726319dd847c8dfd5b94ca9..99d19fad444dde3b569da0ba0088b7d6c40faa6d 100644
--- a/test/api_test/ws_test/data_test.py
+++ b/test/api_test/ws_test/data_test.py
@@ -10,6 +10,7 @@ from geomagio.api.ws.DataApiQuery import (
     DataApiQuery,
     OutputFormat,
     SamplingPeriod,
+    DataType
 )
 
 
@@ -26,16 +27,17 @@ def test_client():
 def test_get_data_query(test_client):
     """test.api_test.ws_test.data_test.test_get_data_query()"""
     response = test_client.get(
-        "/query/?id=BOU&starttime=2020-09-01T00:00:01&elements=X,Y,Z,F&type=R1&sampling_period=60&format=iaga2002"
+        "/query/?id=BOU&starttime=2020-09-01T00:00:01&elements=X,Y,Z,F&type=adjusted&sampling_period=60&format=iaga2002"
     )
     query = DataApiQuery(**response.json())
+    print("query: ", query.elements)
     assert_equal(query.id, "BOU")
     assert_equal(query.starttime, UTCDateTime("2020-09-01T00:00:01"))
     assert_equal(query.endtime, UTCDateTime("2020-09-02T00:00:00.999"))
     assert_equal(query.elements, ["X", "Y", "Z", "F"])
     assert_equal(query.sampling_period, SamplingPeriod.MINUTE)
     assert_equal(query.format, OutputFormat.IAGA2002)
-    assert_equal(query.data_type, "R1")
+    assert_equal(query.data_type, DataType.ADJUSTED)
 
 
 async def test_get_data_query_extra_params(test_client):
diff --git a/test/api_test/ws_test/filter_test.py b/test/api_test/ws_test/filter_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..c559c710f97e9e2d7bcf09a6e3952d19bae2a8e3
--- /dev/null
+++ b/test/api_test/ws_test/filter_test.py
@@ -0,0 +1,42 @@
+from fastapi import Depends
+from fastapi.testclient import TestClient
+from numpy.testing import assert_equal
+from obspy import UTCDateTime
+import pytest
+
+from geomagio.api.ws import app
+from geomagio.api.ws.filter import get_filter_data_query
+from geomagio.api.ws.FilterApiQuery import FilterApiQuery
+from geomagio.api.ws.DataApiQuery import (
+    SamplingPeriod,
+    OutputFormat,
+    DataType
+)
+
+
+@pytest.fixture(scope="module")
+def test_client():
+    @app.get("/filter/", response_model=FilterApiQuery)
+    def get_query(query: FilterApiQuery = Depends(get_filter_data_query)):
+        return query
+
+    client = TestClient(app)
+    yield client
+
+
+def test_get_filter_data_query(test_client):
+    """test.api_test.ws_test.filter_test.test_get_filter_data_query()"""
+    response = test_client.get(
+        "/filter/?id=ANMO&starttime=2020-09-01T00:00:01&elements=X,Y,Z,F&type=variation&input_sampling_period=60&output_sampling_period=3600&format=iaga2002"
+    )
+    query = FilterApiQuery(**response.json())
+    print("query: ", query.elements)
+    assert_equal(query.id, "ANMO")
+    assert_equal(query.starttime, UTCDateTime("2020-09-01T00:00:01"))
+    assert_equal(query.endtime, UTCDateTime("2020-09-02T00:00:00.999"))
+    assert_equal(query.elements, ["X", "Y", "Z", "F"])
+    assert_equal(query.sampling_period, SamplingPeriod.MINUTE)
+    assert_equal(query.format, OutputFormat.IAGA2002)
+    assert_equal(query.data_type, DataType.VARIATION)
+    assert_equal(query.input_sampling_period, SamplingPeriod.MINUTE)
+    assert_equal(query.output_sampling_period, SamplingPeriod.HOUR)
diff --git a/test/pydantic_utcdatetime_test.py b/test/pydantic_utcdatetime_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..44dafa8f191523920ff7cfb0e4dd4fbc24fd77f7
--- /dev/null
+++ b/test/pydantic_utcdatetime_test.py
@@ -0,0 +1,39 @@
+import datetime
+from dateutil import tz
+from pydantic import BaseModel
+from obspy import UTCDateTime
+from numpy.testing import assert_equal
+
+from geomagio.pydantic_utcdatetime import CustomUTCDateTimeType
+
+
+class TestClass(BaseModel):
+    starttime: CustomUTCDateTimeType
+
+
+def test_UTCDateTime_string():
+    t = TestClass(starttime="2024-11-05T00:00:00.00Z")
+
+    assert_equal(t.starttime, UTCDateTime(2024, 11, 5, 0, 0))
+
+
+def test_UTCDateTime_timestamp():
+    t = TestClass(starttime=datetime.datetime(2024, 11, 5, tzinfo=tz.tzutc()))
+
+    assert_equal(t.starttime, UTCDateTime(2024, 11, 5, 0, 0))
+
+
+def test_UTCDateTime_unix_timestamp():
+    t = TestClass(starttime=1730764800)
+
+    assert_equal(t.starttime, UTCDateTime(2024, 11, 5, 0, 0))
+
+def test_invalid():
+    t = None
+    try:
+        t = TestClass(starttime="November 8th 2024 12:00pm")
+    except Exception as e:
+        err = e.errors()
+        assert "Input should be an instance of UTCDateTime" == err[0]["msg"]
+    
+    assert_equal(t, None)
diff --git a/test/residual_test/residual_test.py b/test/residual_test/residual_test.py
index 5f70ff83f9768739861b8e8d6f4687c4a43acf67..2b03f22272a1884973e4cf34987b2dc6d82fa1aa 100644
--- a/test/residual_test/residual_test.py
+++ b/test/residual_test/residual_test.py
@@ -1,7 +1,7 @@
 import json
 
 from numpy.testing import assert_almost_equal, assert_equal
-from pydantic import parse_obj_as
+from pydantic import TypeAdapter
 import pytest
 from typing import List
 
@@ -49,7 +49,8 @@ def assert_readings_equal(expected: Reading, actual: Reading, decimal: int):
 def get_json_readings(filename: str):
     with open(filename, "r") as file:
         readings = json.load(file)
-    readings = parse_obj_as(List[Reading], readings)
+    readingAdapter = TypeAdapter(List[Reading])
+    readings = readingAdapter.validate_python(readings)
     return readings