diff --git a/geomagio/Controller.py b/geomagio/Controller.py
index 4e212579a625d08143fbf64bce5979c40976080c..826683f1c40d82ef37b8a738f69a0014bd6a317d 100644
--- a/geomagio/Controller.py
+++ b/geomagio/Controller.py
@@ -501,6 +501,7 @@ def get_input_factory(args):
     input_factory_args["observatory"] = args.observatory
     input_factory_args["type"] = args.type
     input_type = args.input
+    input_factory_args["output"] = args.output
     # stream/url arguments
     if args.input_file is not None:
         if input_type in ["netcdf", "miniseed", "imagcdf"]:
diff --git a/geomagio/imagcdf/IMCDFPublication.py b/geomagio/imagcdf/IMCDFPublication.py
new file mode 100644
index 0000000000000000000000000000000000000000..36f66be3281f528eb89264f1f51700b349da3a4b
--- /dev/null
+++ b/geomagio/imagcdf/IMCDFPublication.py
@@ -0,0 +1,108 @@
+from typing import Optional, Union
+
+
+class IMCDFPublicationLevel:
+    """Handles publication levels and mapping between data types and levels.
+
+    The ImagCDF format uses publication levels to describe the processing
+    level of the data. This class maps data types (e.g., 'variation', 'definitive')
+    to their corresponding publication levels as defined in the ImagCDF documentation.
+
+    Publication Levels:
+        1: Raw data with no processing.
+        2: Edited data with preliminary baselines.
+        3: Data suitable for initial bulletins or quasi-definitive publication.
+        4: Definitive data with no further changes expected.
+
+    Reference:
+    - ImagCDF Technical Documentation: Attributes that Uniquely Identify the Data
+    """
+
+    class PublicationLevel:
+        LEVEL_1 = "1"
+        LEVEL_2 = "2"
+        LEVEL_3 = "3"
+        LEVEL_4 = "4"
+
+    TYPE_TO_LEVEL = {
+        "none": PublicationLevel.LEVEL_1,
+        "variation": PublicationLevel.LEVEL_1,
+        "reported": PublicationLevel.LEVEL_1,
+        "provisional": PublicationLevel.LEVEL_2,
+        "adjusted": PublicationLevel.LEVEL_2,
+        "quasi-definitive": PublicationLevel.LEVEL_3,
+        "quasidefinitive": PublicationLevel.LEVEL_3,
+        "definitive": PublicationLevel.LEVEL_4,
+    }
+
+    def __init__(self, value: Union[str, int]):
+        """Initialize with a data type or publication level to determine the publication level.
+
+        Args:
+            value (Union[str, int]): The data type (str) or publication level (int).
+
+        Raises:
+            ValueError: If the value is not provided or is unsupported.
+        """
+        if isinstance(value, int) or (isinstance(value, str) and value.isdigit()):
+            self.level = str(value)
+            if self.level not in self.PublicationLevel.__dict__.values():
+                raise ValueError(f"Unsupported level: {value}")
+        elif isinstance(value, str):
+            self.level = self.TYPE_TO_LEVEL.get(value.lower())
+            if not self.level:
+                raise ValueError(f"Unsupported data_type: {value}")
+        else:
+            raise ValueError(
+                "value must be a string or an integer representing data_type or level."
+            )
+
+    def get_level(self) -> str:
+        """Return the publication level as a string.
+
+        Returns:
+            str: The publication level.
+        """
+        return self.level
+
+    def get_imf_data_type(self, long_form: Optional[bool] = True) -> str:
+        """Get the IMF data type based on the publication level.
+
+        Args:
+            long_form (bool): If True, return the full description; otherwise, return the abbreviated form.
+
+        Returns:
+            str: The IMF data type.
+
+        Reference:
+            https://tech-man.intermagnet.org/latest/appendices/dataformats.html#intermagnet-satellite-transmission-format-imfv2-83
+        """
+        if self.level == self.PublicationLevel.LEVEL_4:
+            return "Definitive" if long_form else "D"
+        elif self.level == self.PublicationLevel.LEVEL_3:
+            return "Quasi-definitive" if long_form else "Q"
+        elif self.level == self.PublicationLevel.LEVEL_2:
+            return "Adjusted" if long_form else "A"
+        else:
+            return "Reported" if long_form else "R"
+
+    def get_iaga2002_data_type(self, long_form: Optional[bool] = True) -> str:
+        """Get the IAGA-2002 data type based on the publication level.
+
+        Args:
+            long_form (bool): If True, return the full description; otherwise, return the abbreviated form.
+
+        Returns:
+            str: The IAGA-2002 data type.
+
+        Reference:
+            https://tech-man.intermagnet.org/latest/appendices/dataformats.html#iaga2002-intermagnet-exchange-format-spreadsheet-compatible
+        """
+        if self.level == self.PublicationLevel.LEVEL_4:
+            return "Definitive" if long_form else "D"
+        elif self.level == self.PublicationLevel.LEVEL_3:
+            return "Quasi-definitive" if long_form else "Q"
+        elif self.level == self.PublicationLevel.LEVEL_2:
+            return "Provisional" if long_form else "P"
+        else:
+            return "Variation" if long_form else "V"
diff --git a/geomagio/imagcdf/ImagCDFFactory.py b/geomagio/imagcdf/ImagCDFFactory.py
index c285504510cc2a9fd77bb47026145b4ef0c50453..de58ced1ea59017c63403d9dde4c0c8385d4dd21 100644
--- a/geomagio/imagcdf/ImagCDFFactory.py
+++ b/geomagio/imagcdf/ImagCDFFactory.py
@@ -12,77 +12,23 @@ References:
 - CDFLib Docs: [https://cdflib.readthedocs.io/en/latest/#, https://cdflib.readthedocs.io/en/stable/modules/index.html] 
 """
 
-from __future__ import absolute_import, print_function
-from io import BytesIO
 import os
 import sys
-from typing import List, Optional, Union
-from datetime import datetime, timezone, tzinfo
+from typing import List, Optional
+from datetime import datetime, timezone
 import numpy as np
 from obspy import Stream, Trace, UTCDateTime
-from sqlalchemy import true
 
 from geomagio.TimeseriesFactory import TimeseriesFactory
 from geomagio.api.ws.Element import TEMPERATURE_ELEMENTS_ID
-
 from ..geomag_types import DataInterval, DataType
 from ..TimeseriesFactoryException import TimeseriesFactoryException
-from .. import TimeseriesUtility
 from .. import Util
-
-import cdflib
+from cdflib import cdfepoch
 from cdflib.cdfwrite import CDF as CDFWriter
 from cdflib.cdfread import CDF as CDFReader
 import tempfile
-
-
-class IMCDFPublicationLevel:
-    """Handles publication levels and mapping between data types and levels.
-
-    The ImagCDF format uses publication levels to describe the processing
-    level of the data. This class maps data types (e.g., 'variation', 'definitive')
-    to their corresponding publication levels as defined in the ImagCDF documentation.
-
-    Publication Levels:
-        1: Raw data with no processing.
-        2: Edited data with preliminary baselines.
-        3: Data suitable for initial bulletins or quasi-definitive publication.
-        4: Definitive data with no further changes expected.
-
-    Reference:
-    - ImagCDF Technical Documentation: Attributes that Uniquely Identify the Data
-    """
-
-    class PublicationLevel:
-        LEVEL_1 = "1"
-        LEVEL_2 = "2"
-        LEVEL_3 = "3"
-        LEVEL_4 = "4"
-
-    TYPE_TO_LEVEL = {
-        "none": PublicationLevel.LEVEL_1,
-        "variation": PublicationLevel.LEVEL_1,
-        "reported": PublicationLevel.LEVEL_1,
-        "provisional": PublicationLevel.LEVEL_2,
-        "adjusted": PublicationLevel.LEVEL_2,
-        "quasi-definitive": PublicationLevel.LEVEL_3,
-        "quasidefinitive": PublicationLevel.LEVEL_3,
-        "definitive": PublicationLevel.LEVEL_4,
-    }
-
-    def __init__(self, data_type: Optional[str] = None):
-        """Initialize with a data type to determine the publication level."""
-        if data_type:
-            self.level = self.TYPE_TO_LEVEL.get(data_type.lower())
-        else:
-            raise ValueError("data_type must be provided.")
-
-        if not self.level:
-            raise ValueError(f"Unsupported data_type: {data_type}")
-
-    def to_string(self) -> str:
-        """Return the publication level as a string."""
-        return self.level
+from .IMCDFPublication import IMCDFPublicationLevel
 
 
 class ImagCDFFactory(TimeseriesFactory):
@@ -93,7 +39,11 @@ class ImagCDFFactory(TimeseriesFactory):
     """
 
     isUniqueTimes = True  # used to determine depend_0 and CDF Time Variable Name
-    NONSTANDARD_ELEMENTS = TEMPERATURE_ELEMENTS_ID
+    NONSTANDARD_ELEMENTS = (
+        TEMPERATURE_ELEMENTS_ID  # for using non standard elements such as UK1
+    )
+    __FILL_VALUE = 99_999
+    __MAX_COMPRESSION = 9
 
     def __init__(
         self,
@@ -103,7 +53,8 @@ class ImagCDFFactory(TimeseriesFactory):
         interval: DataInterval = "minute",
         urlTemplate="file://etc/imagcdf/{obs}_{dt}_{t}.cdf",
         urlInterval: int = -1,
-        inputFile: Optional[str] = None, 
+        inputFile: Optional[str] = None,
+        output: Optional[str] = "iaga2002",
     ):
         """
         Initialize the ImagCDFFactory with default parameters.
@@ -115,7 +66,7 @@ class ImagCDFFactory(TimeseriesFactory):
         - interval: Data interval (e.g., 'minute', 'second').
         - urlTemplate: Template for generating file URLs or paths.
         - urlInterval: Interval size for splitting data into multiple files.
-        - inputFile: An ImagCDF file to read data from. If not provided urlTemplate is assumed path for reads.  
+        - inputFile: An ImagCDF file to read data from. If not provided urlTemplate is assumed path for reads.
         """
         super().__init__(
             observatory=observatory,
@@ -126,6 +77,7 @@ class ImagCDFFactory(TimeseriesFactory):
             urlInterval=urlInterval,
         )
         self.inputFile = inputFile
+        self.output = output
 
     def write_file(self, fh, timeseries: Stream, channels: List[str]):
         # Create a temporary file to write the CDF data
@@ -135,10 +87,10 @@ class ImagCDFFactory(TimeseriesFactory):
         try:
             # Initialize the CDF writer
             cdf_spec = {
-                "Compressed": 9,  # Enable compression (1-9)
-                "Majority": CDFWriter.ROW_MAJOR,
+                "Compressed": self.__MAX_COMPRESSION,  # Max Gzip compression (1-9). Almost always the GZIP is the best choice for all data. (CDF User Guide p.23 1.4.3 )
+                "Majority": CDFWriter.ROW_MAJOR,  # The first dimension changes the slowest (CDF User Guide p.45 2.3.15 Majority)
                 "Encoding": CDFWriter.NETWORK_ENCODING,  #  XDR Encoding - If a CDF must be portable between two or more different types of computers use network encoded.
-                "Checksum": True,  # Disable checksum for faster writes (optional)
+                "Checksum": True,  # True for Data Integrity. False for faster writes (optional)
                 "rDim_sizes": [],  # Applicable only if using rVariables - CDF protocol recommends only using zVariables.
             }
 
@@ -162,11 +114,13 @@ class ImagCDFFactory(TimeseriesFactory):
                     "Var_Type": "zVariable",
                     "Dim_Sizes": [],
                     "Sparse": "no_sparse",  # no_sparse because there should not be time gaps. (Time stamps must represent a regular time series with no missing values in the series)
-                    "Compress": 9,
+                    "Compress": self.__MAX_COMPRESSION,
                     "Pad": None,
                 }
                 # Define time variable attributes
-                var_attrs = self._create_time_var_attrs(ts_name)
+                var_attrs = (
+                    {}
+                )  # ImagCDF does not require or recommend any 'time' variable attributes.
 
                 # Write time variable
                 cdf_writer.write_var(var_spec, var_attrs, ts_data)
@@ -179,23 +133,20 @@ class ImagCDFFactory(TimeseriesFactory):
                 # if channel in REAL_TEMPERATURE:
                 #     temperature_index += 1  # MUST INCREMENT INDEX BEFORE USING
                 #     var_name = f"Temperature{temperature_index}"
-                data_type = self._get_cdf_data_type(trace)
-                num_elements = 1
-                if data_type in [
-                    CDFWriter.CDF_CHAR,
-                    CDFWriter.CDF_UCHAR,
-                ]:  # Handle string types
-                    num_elements = len(trace.data[0]) if len(trace.data) > 0 else 1
-
+                data_type = (
+                    CDFWriter.CDF_INT4
+                    if trace.data.dtype in [np.int32, np.int64]
+                    else CDFWriter.CDF_DOUBLE
+                )
                 var_spec = {
                     "Variable": var_name,
                     "Data_Type": data_type,
-                    "Num_Elements": num_elements,
+                    "Num_Elements": 1,
                     "Rec_Vary": True,
                     "Var_Type": "zVariable",
                     "Dim_Sizes": [],
                     "Sparse": "no_sparse",
-                    "Compress": 9,
+                    "Compress": self.__MAX_COMPRESSION,
                     "Pad": None,
                 }
                 var_attrs = self._create_var_attrs(
@@ -250,7 +201,6 @@ class ImagCDFFactory(TimeseriesFactory):
 
         # Extract metadata from the first trace
         stats = timeseries[0].stats
-        delta = stats.delta  # Sample rate
         observatory = stats.station
         starttime = starttime or stats.starttime
         endtime = endtime or stats.endtime
@@ -291,18 +241,16 @@ class ImagCDFFactory(TimeseriesFactory):
                 )
                 # Check if the file already exists to merge data
                 if os.path.isfile(url_file):
-                    os.remove(url_file)
-                    print(f"Naming conflict. Deleting exisiting file '{url_file}'.")
-                    # raise TimeseriesFactoryException(
-                    #     f"Error: File '{url_file}' already exists."
-                    # )
+                    raise TimeseriesFactoryException(
+                        f"Error: File '{url_file}' already exists."
+                    )
                 # Pad the data to ensure it fits the interval
                 url_data.trim(
                     starttime=interval_start,
                     endtime=interval_end,
                     nearest_sample=True,
                     pad=True,
-                    fill_value=99_999,  # FILLVAL
+                    fill_value=self.__FILL_VALUE,  # FILLVAL
                 )
 
                 # Write the data to the CDF file
@@ -336,7 +284,7 @@ class ImagCDFFactory(TimeseriesFactory):
         )
 
         for urlInterval in urlIntervals:
-            if self.inputFile is None: 
+            if self.inputFile is None:
                 url = self._get_url(
                     observatory=observatory,
                     date=urlInterval["start"],
@@ -354,7 +302,7 @@ class ImagCDFFactory(TimeseriesFactory):
                 if not os.path.isfile(url_file):
                     # If file doesn't exist, skip
                     continue
-            else: 
+            else:
                 url_file = self.inputFile
             try:
                 # Read CDF data and merge
@@ -364,13 +312,13 @@ class ImagCDFFactory(TimeseriesFactory):
                 print(f"Error reading CDF file '{url_file}': {e}", file=sys.stderr)
 
         # After reading all intervals, merge and trim
-        timeseries.merge(fill_value=99_999)
+        timeseries.merge(fill_value=self.__FILL_VALUE)
         timeseries.trim(
             starttime=starttime,
             endtime=endtime,
             nearest_sample=True,
             pad=True,
-            fill_value=99_999,
+            fill_value=self.__FILL_VALUE,
         )
 
         # If requested, add empty channels not present in data
@@ -415,7 +363,7 @@ class ImagCDFFactory(TimeseriesFactory):
         institution = getattr(stats, "agency_name", None) or ""
         latitude = getattr(stats, "geodetic_latitude", None) or 0.0
         longitude = getattr(stats, "geodetic_longitude", None) or 0.0
-        elevation = getattr(stats, "elevation", None) or 99_999.0
+        elevation = getattr(stats, "elevation", None) or self.__FILL_VALUE
         conditions_of_use = getattr(stats, "conditions_of_use", None) or ""
         vector_orientation = getattr(stats, "sensor_orientation", None) or ""
         data_interval_type = getattr(stats, "data_interval_type", None) or self.interval
@@ -423,7 +371,7 @@ class ImagCDFFactory(TimeseriesFactory):
         sensor_sampling_rate = getattr(stats, "sensor_sampling_rate", None) or 0.0
         comments = getattr(stats, "filter_comments", None) or [""]
         declination_base = getattr(stats, "declination_base", None) or 0.0
-        publication_level = IMCDFPublicationLevel(data_type=self.type).to_string()
+        publication_level = IMCDFPublicationLevel(data_type=self.type).get_level()
         global_attrs = {
             "FormatDescription": {0: "INTERMAGNET CDF Format"},
             "FormatVersion": {0: "1.2"},
@@ -433,7 +381,7 @@ class ImagCDFFactory(TimeseriesFactory):
             "PublicationLevel": {0: publication_level},
             "PublicationDate": {
                 0: [
-                    cdflib.cdfepoch.timestamp_to_tt2000(
+                    cdfepoch.timestamp_to_tt2000(
                         datetime.timestamp(datetime.now(timezone.utc))
                     ),
                     "cdf_time_tt2000",
@@ -444,24 +392,23 @@ class ImagCDFFactory(TimeseriesFactory):
             "Longitude": {0: np.array([longitude], dtype=np.float64)},
             "Elevation": {0: np.array([elevation], dtype=np.float64)},
             "Institution": {0: institution},
-            "VectorSensOrient": {
-                0: vector_orientation
-            },  # remove F - because its a calculation, not an element?
-            "StandardLevel": {0: "None"},  # Set to 'None'
-            # Temporarily Omit 'StandardName', 'StandardVersion', 'PartialStandDesc'
+            "VectorSensOrient": {0: vector_orientation},
+            "StandardLevel": {0: "None"},
+            # Can Omit 'StandardName', 'StandardVersion', 'PartialStandDesc' when StandardLevel=None
             "Source": {
                 0: "institute"
             },  # "institute" - if the named institution provided the data, “INTERMAGNET” - if the data file has been created by INTERMAGNET from another data source, “WDC” - if the World Data Centre has created the file from another data source
             "TermsOfUse": {0: conditions_of_use},
+            # [[Optional Attributes]]
             # 'UniqueIdentifier': {0: ''},
             # 'ParentIdentifiers': {0: ''},
             # 'ReferenceLinks': {0: ''}, #links to /ws, plots, USGS.gov
-            # Custom Attributes Below
-            "SensorSamplingRate": {0: sensor_sampling_rate},  
-            "DataType": {0: data_type},  
-            "Comments": {0: comments},  
-            "DeclinationBase": {0: declination_base},  
-            "Network": {0: network},  
+            # [[Custom Attributes]]
+            "SensorSamplingRate": {0: sensor_sampling_rate},
+            "DataType": {0: data_type},
+            "Comments": {0: comments},
+            "DeclinationBase": {0: declination_base},
+            "Network": {0: network},
         }
         return global_attrs
 
@@ -481,7 +428,7 @@ class ImagCDFFactory(TimeseriesFactory):
                 for i in range(trace.stats.npts)
             ]
             # Convert timestamps to TT2000 format required by CDF
-            tt2000_times = cdflib.cdfepoch.timestamp_to_tt2000(
+            tt2000_times = cdfepoch.timestamp_to_tt2000(
                 [time.timestamp() for time in times]
             )
 
@@ -547,49 +494,6 @@ class ImagCDFFactory(TimeseriesFactory):
             else time_vars
         )
 
-    def _create_var_spec(
-        self,
-        var_name: str,
-        data_type: str,
-        num_elements: int,
-        var_type: str,
-        dim_sizes: List[int],
-        sparse: bool,
-        compress: int,
-        pad: Optional[Union[str, np.ndarray]],
-    ) -> dict:
-        """
-        Create a variable specification dictionary for cdflib.
-
-        This is used to define the properties of a variable when writing it to
-        the CDF file.
-
-        Parameters:
-        - var_name: Name of the variable.
-        - data_type: CDF data type.
-        - num_elements: Number of elements per record.
-        - var_type: Variable type ('zVariable' or 'rVariable').
-        - dim_sizes: Dimensions of the variable (empty list for 0D).
-        - sparse: Whether the variable uses sparse records.
-        - compress: Compression level.
-        - pad: Pad value for sparse records.
-
-        Reference:
-        - CDF User's Guide: Variable Specification
-        """
-        var_spec = {
-            "Variable": var_name,
-            "Data_Type": data_type,
-            "Num_Elements": num_elements,
-            "Rec_Vary": True,
-            "Var_Type": var_type,
-            "Dim_Sizes": dim_sizes,
-            "Sparse": "no_sparse" if not sparse else "pad_sparse",
-            "Compress": compress,
-            "Pad": pad,
-        }
-        return var_spec
-
     def _create_var_attrs(
         self,
         trace: Trace,
@@ -641,57 +545,17 @@ class ImagCDFFactory(TimeseriesFactory):
         var_attrs = {
             "FIELDNAM": fieldnam,
             "UNITS": units,
-            "FILLVAL": 99999.0,
+            "FILLVAL": self.__FILL_VALUE,
             "VALIDMIN": validmin,
             "VALIDMAX": validmax,
             "DEPEND_0": depend_0,
             "DISPLAY_TYPE": "time_series",
             "LABLAXIS": channel,
-            # custom ImagCDF variable attributes below
+            # [[Custom Attributes]]
             "DATA_INTERVAL_TYPE": trace.stats.data_interval_type,
         }
         return var_attrs
 
-    def _create_time_var_attrs(self, ts_name: str) -> dict:
-        """
-        Create a dictionary of time variable attributes.
-
-        These attributes provide metadata for time variables.
-        Note: None of these attributes are required for the time stamp variables.
-        Reference:
-        - ImagCDF Technical Documentation: ImagCDF Data
-        """
-        # var_attrs = {
-        # 'UNITS': 'TT2000',
-        # 'DISPLAY_TYPE': 'time_series',
-        # 'LABLAXIS': 'Time',
-        # }
-        # return var_attrs
-        return {}
-
-    def _get_cdf_data_type(self, trace: Trace) -> int:
-        """
-        Map ObsPy trace data type to CDF data type.
-
-        Determines the appropriate CDF data type based on the NumPy data type
-        of the trace data.
-
-        Returns:
-        - CDF_DOUBLE (45) for floating-point data.
-        - CDF_INT4 (41) for integer data.
-
-        Reference:
-        - See CDF for more data types
-        """
-
-        if trace.data.dtype in [np.float32, np.float64]:
-            return CDFWriter.CDF_DOUBLE
-        elif trace.data.dtype in [np.int32, np.int64]:
-            return CDFWriter.CDF_INT4
-        else:
-            # Default to double precision float
-            return CDFWriter.CDF_DOUBLE
-
     def _read_cdf(self, cdf: CDFReader, channels: Optional[List[str]]) -> Stream:
         """
         Read CDF data into an ObsPy Stream.
@@ -747,24 +611,32 @@ class ImagCDFFactory(TimeseriesFactory):
         institution = global_attrs.get("Institution", [""])[0]
         latitude = global_attrs.get("Latitude", [0.0])[0]
         longitude = global_attrs.get("Longitude", [0.0])[0]
-        elevation = global_attrs.get("Elevation", [99_999.0])[
+        elevation = global_attrs.get("Elevation", [self.__FILL_VALUE])[
             0
         ]  # default to 99_999 per technical documents.
-        sensor_sampling_rate = global_attrs.get("SensorSamplingRate", [0.0])[0]
         sensor_orientation = global_attrs.get("VectorSensOrient", [""])[0]
-        data_type = global_attrs.get("DataType", ["variation"])[0]
-        publication_level = global_attrs.get("PublicationLevel", ["1"])[0]
-        comments = global_attrs.get("Comments", [""]) #keep comments as an array
+        publication_level = global_attrs.get("PublicationLevel", ["0"])[0]
+        publication = IMCDFPublicationLevel(publication_level)
         terms_of_use = global_attrs.get("TermsOfUse", [""])[0]
-        declination_base = global_attrs.get("DeclinationBase", [0.0])[0]
-        network = global_attrs.get("Network", [""])[0]
+
+        # Not expected in ImagCDF
+        data_type = (
+            publication.get_imf_data_type()
+            if "imf" in self.output
+            else publication.get_iaga2002_data_type()
+        )
+
+        sensor_sampling_rate = global_attrs.get("SensorSamplingRate", [None])[0]
+        comments = global_attrs.get("Comments", [None])  # keep comments as an array
+        declination_base = global_attrs.get("DeclinationBase", [None])[0]
+        network = global_attrs.get("Network", [None])[0]
 
         # Identify all time variables
         time_vars = {}
         for var in cdf.cdf_info().zVariables:
             if var.endswith("Times"):
                 time_data = cdf.varget(var)
-                unix_times = cdflib.cdfepoch.unixtime(time_data)
+                unix_times = cdfepoch.unixtime(time_data)
                 utc_times = [UTCDateTime(t) for t in unix_times]
                 time_vars[var] = utc_times
 
@@ -830,7 +702,7 @@ class ImagCDFFactory(TimeseriesFactory):
                     delta = 60.0
 
             time_attrs = cdf.varattsget(var)
-            data_interval = time_attrs.get("DATA_INTERVAL_TYPE", [""])
+            data_interval = time_attrs.get("DATA_INTERVAL_TYPE", None)
 
             # Required variable attributes based on ImagCDF standards
             required_variable_attrs = [
@@ -856,29 +728,36 @@ class ImagCDFFactory(TimeseriesFactory):
                 )
                 raise TimeseriesFactoryException(error_message)
 
+            header = {
+                "station": observatory,
+                "channel": channel,
+                "starttime": times[0],
+                "delta": delta,
+                "geodetic_latitude": latitude,
+                "geodetic_longitude": longitude,
+                "elevation": elevation,
+                "sensor_orientation": "".join(sensor_orientation),
+                "data_type": data_type,
+                "station_name": station_name,
+                "agency_name": institution,
+                "conditions_of_use": terms_of_use,
+            }
+            # data points not expected in ImagCDF specs
+            if sensor_sampling_rate is not None:
+                header.update({"sensor_sampling_rate": sensor_sampling_rate})
+            if data_interval is not None:
+                header.update({"data_interval_type": data_interval})
+            if declination_base is not None:
+                header.update({"declination_base": declination_base})
+            if comments is not None:
+                header.update({"filter_comments": comments})
+            if network is not None:
+                header.update({"network": network})
+
             # Create a trace
             trace = Trace(
                 data=data,
-                header={
-                    "station": observatory,
-                    "channel": channel,
-                    "starttime": times[0],
-                    "delta": delta,
-                    "geodetic_latitude": latitude,
-                    "geodetic_longitude": longitude,
-                    "elevation": elevation,
-                    "sensor_orientation": "".join(sensor_orientation),
-                    "data_type": data_type,
-                    "station_name": station_name,
-                    "agency_name": institution,
-                    "conditions_of_use": terms_of_use,
-                    # data points not in a traditional ImagCDF
-                    "sensor_sampling_rate": sensor_sampling_rate,
-                    "data_interval_type": data_interval,
-                    "declination_base": declination_base,
-                    "filter_comments": comments,
-                    "network": network
-                },
+                header=header,
             )
             stream += trace
 
@@ -915,7 +794,7 @@ class ImagCDFFactory(TimeseriesFactory):
         - ImagCDF Technical Documentation: ImagCDF File Names
         """
         # Get the publication level for the type
-        publication_level = IMCDFPublicationLevel(data_type=type).to_string()
+        publication_level = IMCDFPublicationLevel(data_type=type).get_level()
 
         # Format of Date/Time Portion of Filename based on interval see reference: https://tech-man.intermagnet.org/latest/appendices/dataformats.html#example-data-file:~:text=Format%20of%20Date,%EF%83%81
         if interval == "year":