diff --git a/geomagio/algorithm/FilterAlgorithm.py b/geomagio/algorithm/FilterAlgorithm.py
index da0f2df18f115f7be5d9e3d25c86147917a65e4a..1dd3353d747b798fc48fb5363cfc464fcd859733 100644
--- a/geomagio/algorithm/FilterAlgorithm.py
+++ b/geomagio/algorithm/FilterAlgorithm.py
@@ -192,7 +192,7 @@ class FilterAlgorithm(Algorithm):
             if (
                 self.input_sample_period <= step["input_sample_period"]
                 and self.output_sample_period >= step["output_sample_period"]
-            ):
+            ): 
                 if (
                     step["type"] == "average"
                     and step["output_sample_period"] != self.output_sample_period
@@ -449,4 +449,4 @@ class FilterAlgorithm(Algorithm):
         self.output_sample_period = TimeseriesUtility.get_delta_from_interval(
             arguments.output_interval or arguments.interval
         )
-        self.load_state()
+        self.load_state()
\ No newline at end of file
diff --git a/geomagio/api/ws/algorithms.py b/geomagio/api/ws/algorithms.py
index 1ee093dcbc4e89b7661ef0890355248c4eade8fc..c73961c0dfef2bac5a7ae7771bac73c23867a9ee 100644
--- a/geomagio/api/ws/algorithms.py
+++ b/geomagio/api/ws/algorithms.py
@@ -2,13 +2,14 @@ import json
 
 from fastapi import APIRouter, Depends, HTTPException
 from starlette.responses import Response
+from obspy.core import Stream, Stats
 
-from ...algorithm import DbDtAlgorithm
+from ...algorithm import DbDtAlgorithm, FilterAlgorithm
 from ...residual import (
     calculate,
     Reading,
 )
-from .DataApiQuery import DataApiQuery
+from .DataApiQuery import DataApiQuery, SamplingPeriod
 from .data import format_timeseries, get_data_factory, get_data_query, get_timeseries
 
 
@@ -36,6 +37,63 @@ def get_dbdt(
     )
 
 
+
+
+
+####################################### The router .get filter isnt visible on the docs page 
+#Look for register routers in the backend 
+
+@router.get(
+    "/algorithms/filter/",
+    description="Filtered data dependent on requested interval",
+    name="Filtered Algorithm",
+)
+
+
+def get_filter(
+    query: DataApiQuery = Depends(get_data_query),
+) -> Response:
+    
+    filt = FilterAlgorithm(input_sample_period=SamplingPeriod.SECOND, output_sample_period=query.sampling_period, steps = None)
+    steps = filt.get_filter_steps()
+    # Update the filter algorithm with new steps 
+    filt.steps = steps 
+    print("The steps found in filt: ", steps)
+
+    data_factory = get_data_factory(query=query)
+    # read data
+    raw = get_timeseries(data_factory, query)
+    print("The initial timeseries output:", raw)
+    # run dbdt
+    timeseries = process_in_stages(raw,steps)
+
+    print("The timeseries output:", timeseries)
+    print("The elements being used are ", query.elements)
+    elements = [f"{element}" for element in query.elements]
+    # output response
+    return format_timeseries(
+        timeseries=timeseries, format=query.format, elements=elements
+    )
+def process_in_stages(input_data, steps):
+    # Initialize the current data to the input data
+   
+    # Process each step sequentially
+    for step in steps:
+        # Create an instance of FilterAlgorithm for the current step
+
+        filt = FilterAlgorithm(
+            input_sample_period= step["input_sample_period"],
+            output_sample_period=step["output_sample_period"],
+            steps=[step]
+        )
+        
+        # Process the current data through the filter algorithm
+        current_data = filt.process(input_data)
+        
+    return current_data
+
+##########################################
+
 @router.post(
     "/algorithms/residual",
     description="Caclulates new absolutes and baselines from reading\n\n"
diff --git a/geomagio/api/ws/data.py b/geomagio/api/ws/data.py
index 138f3417d38f9123d8171f5de85d874bff42c5c9..0fa380bdcf97e90e18078d24479ca36a3d545a3a 100644
--- a/geomagio/api/ws/data.py
+++ b/geomagio/api/ws/data.py
@@ -34,6 +34,10 @@ def get_data_factory(
     sampling_period = query.sampling_period
     if query.id in ASL_OBSERVATORY_INDEX:
         factory = FDSNFactory(network="IU", locationCode="40")
+        # if sampling_period in [SamplingPeriod.MINUTE]:
+        #     print("THIS IS WHERE THE QUERY IS")
+            
+           
     elif sampling_period in [
         SamplingPeriod.TEN_HERTZ,
         SamplingPeriod.HOUR,
@@ -176,15 +180,27 @@ def get_timeseries(data_factory: TimeseriesFactory, query: DataApiQuery) -> Stre
     data_factory: where to read data
     query: parameters for the data to read
     """
-    # get data
-    timeseries = data_factory.get_timeseries(
+   
+    #This will always return the one-second data for variometers to be used in any filter process
+    if query.data_type == "variation":
+        timeseries = data_factory.get_timeseries(
         starttime=query.starttime,
         endtime=query.endtime,
         observatory=query.id,
         channels=query.elements,
         type=query.data_type,
-        interval=TimeseriesUtility.get_interval_from_delta(query.sampling_period),
+        interval=TimeseriesUtility.get_interval_from_delta(SamplingPeriod.SECOND),
     )
+    else:
+         # get data
+        timeseries = data_factory.get_timeseries(
+            starttime=query.starttime,
+            endtime=query.endtime,
+            observatory=query.id,
+            channels=query.elements,
+            type=query.data_type,
+            interval=TimeseriesUtility.get_interval_from_delta(query.sampling_period),
+        )
     return timeseries
 
 
diff --git a/geomagio/edge/FDSNFactory.py b/geomagio/edge/FDSNFactory.py
index 91020bd3b41328fcaa64dca931f1a9b5e49fd0f7..160cb06353d4b5f8df35f30b0f5cfa8b45b3046d 100644
--- a/geomagio/edge/FDSNFactory.py
+++ b/geomagio/edge/FDSNFactory.py
@@ -9,6 +9,7 @@ import numpy
 import sys
 import numpy.ma
 from obspy import Stream, UTCDateTime, Trace, read, read_inventory
+from obspy.clients.fdsn.header import FDSNNoDataException
 from obspy.clients.iris import Client
 from obspy.clients.fdsn import Client as FDSNClient
 
@@ -23,6 +24,7 @@ from .FDSNSNCL import FDSNSNCL
 from .SNCL import SNCL
 
 
+
 class FDSNFactory(TimeseriesFactory):
     """TimeseriesFactory for Edge related data.
 
@@ -88,7 +90,6 @@ class FDSNFactory(TimeseriesFactory):
         snclMapper: str = "geomag",
     ):
         TimeseriesFactory.__init__(self, observatory, channels, type, interval)
-
         self.Client = FDSNClient(base_url)
 
         self.tag = tag
@@ -110,7 +111,7 @@ class FDSNFactory(TimeseriesFactory):
         channels: Optional[List[str]] = None,
         type: Optional[DataType] = None,
         interval: Optional[DataInterval] = None,
-        add_empty_channels: bool = True,
+        add_empty_channels: bool = True,  
     ) -> Stream:
         """Get timeseries data
 
@@ -242,9 +243,9 @@ class FDSNFactory(TimeseriesFactory):
                 endtime=endtime + half_delta,
                 attach_response=True,
             )
-        except TypeError:
+        except FDSNNoDataException:
             data = Stream()
-
+        print("The data count is", data.count)
         # make sure data is 32bit int
         for trace in data:
             trace.data = trace.data.astype("i4")
@@ -264,31 +265,32 @@ class FDSNFactory(TimeseriesFactory):
             TimeseriesUtility.pad_and_trim_trace(
                 trace=data[0], starttime=starttime, endtime=endtime
             )
-        # Beneath is necessary code to check the reported azimuth
-        # for the LF1 (E or Y) and LF2 (H or X) channels and determine if intstrument axes have been reversed.
-        azi, dip = self._get_orientations(data[0], starttime, sncl)
-
-        # Checks Azimuth for LF2 channel
-        if 270 > azi and azi > 90.0 and data[0].stats.channel == "LF2":
-            data[0].data *= -1
-
-        # Checks Azimuth for LF1 channel
-        elif azi > 180 and azi < 360 and data[0].stats.channel == "LF1":
-            data[0].data *= -1
-
-        # Checks Dip for LFZ channel
-        elif dip > 0 and dip < 180 and data[0].stats.channel == "LFZ":
-            data[0].data *= -1
-
-        # Remove channel Response:
-        # The function "remove_response" appears to be doing what we want;
-        # i.e. convert from counts to NT, but this may be a placeholder
-        # at least until we see how this function behaves if
-        # a station has a frequency response.
-        data.remove_response(output="DEF", zero_mean=False, taper=False)
+            # Beneath is necessary code to check the reported azimuth
+            # for the LF1 (E or Y) and LF2 (H or X) channels and determine if intstrument axes have been reversed.
+            azi, dip = FDSNFactory._get_orientations(self, data[0], starttime, sncl)
+
+            # Checks Azimuth for LF2 channel
+            if 270 > azi and azi > 90.0 and data[0].stats.channel[-2:] == "F2":
+                data[0].data *= -1
+
+            # Checks Azimuth for LF1 channel
+            elif azi > 180 and azi < 360 and data[0].stats.channel[-2:] == "F1":
+                data[0].data *= -1
+
+            # Checks Dip for LFZ channel
+            elif dip > 0 and dip < 180 and data[0].stats.channel[-2:] == "FZ":
+                data[0].data *= -1
+
+            # Remove channel Response:
+            # The function "remove_response" appears to be doing what we want;
+            # i.e. convert from counts to NT, but this may be a placeholder
+            # at least until we see how this function behaves if
+            # a station has a frequency response.
+                if data.count != 0:
+                    data.remove_response(output="DEF", zero_mean=False, taper=False)
 
         self._set_metadata(data, observatory, channel, type, interval)
-
+    
         return data
 
     def _post_process(
@@ -357,8 +359,8 @@ class FDSNFactory(TimeseriesFactory):
                 trace.stats, observatory, channel, type, interval
             )
 
-    def _get_orientations(
-        self, trace: Trace, starttime: UTCDateTime, sncl
+    def _get_orientations(self,
+        trace: Trace, starttime: UTCDateTime, sncl
     ) -> Tuple[float, float]:
         # Retrieve station orientation information using FDSN for each trace
 
@@ -383,4 +385,4 @@ class FDSNFactory(TimeseriesFactory):
         trace.stats.azimuth = azimuth
 
         # Return both azimuth and dip
-        return azimuth, dip
+        return azimuth, dip
\ No newline at end of file
diff --git a/geomagio/edge/FDSNSNCL.py b/geomagio/edge/FDSNSNCL.py
index 58c5e68c9f4e35e85d3b86e479bedc30b52acb3e..aa727a12fc6754312cd249e23d5e1b3f1d26402b 100644
--- a/geomagio/edge/FDSNSNCL.py
+++ b/geomagio/edge/FDSNSNCL.py
@@ -67,7 +67,7 @@ def get_FDSN_channel(
     location: Optional[str] = None,
 ) -> str:
     if location == "40" and network == "IU":
-        return get_channel_start(interval=interval) + _get_channel_end(element=element)
+        return _get_channel_start(interval=interval, data_type = data_type) + _get_channel_end(element=element)
     return get_channel(element=element, interval=interval, data_type=data_type)
 
 
@@ -80,4 +80,19 @@ def _get_channel_end(element: str) -> str:
         return "FZ"
     elif element[0:2] == "LF":  # predefined element
         return element[1:]
+    elif element[0:2] == "UF":  # predefined element
+        return element[1:]
     raise ValueError(f"Unsupported element: {element}")
+
+def _get_channel_start(interval: str, data_type:str) -> str:
+    if interval == "tenhertz":
+        return "B"
+    if interval == "second":
+        return "L"
+    elif interval == "minute":
+        return "U"
+    elif interval == "hour":
+        return "R"
+    elif interval == "day":
+        return "P"
+    raise ValueError(f" Unexcepted interval: {interval}")
\ No newline at end of file