diff --git a/bin/__init__.py b/bin/__init__.py
index a01f64b9ef8b91d00ee7c52e54c11004c75bba04..ef7e90169311f54c153d71afe5c359dddec2d4f8 100644
--- a/bin/__init__.py
+++ b/bin/__init__.py
@@ -4,6 +4,4 @@ from __future__ import absolute_import
 
 from . import geomag
 
-__all__ = [
-    'geomag'
-]
+__all__ = ["geomag"]
diff --git a/bin/geomag.py b/bin/geomag.py
index 0682db176f1bb22d090980a7238719e5d0d9e11d..454aef8207fb52770f5f5f3a407b07a9cbc53275 100755
--- a/bin/geomag.py
+++ b/bin/geomag.py
@@ -2,17 +2,18 @@
 
 from os import path
 import sys
+
 # ensure geomag is on the path before importing
 try:
     import geomagio  # noqa (tells linter to ignore this line.)
 except ImportError:
     script_dir = path.dirname(path.abspath(__file__))
-    sys.path.append(path.normpath(path.join(script_dir, '..')))
+    sys.path.append(path.normpath(path.join(script_dir, "..")))
 
 
 from geomagio.Controller import main, parse_args
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     args = parse_args(sys.argv[1:])
     main(args)
diff --git a/bin/geomag_webservice.py b/bin/geomag_webservice.py
index 00e6b81667ee21b20e703e68b2717e95440d0bc6..1f027d3e2f1bf1091c34e61382173c5cd2c0f38e 100755
--- a/bin/geomag_webservice.py
+++ b/bin/geomag_webservice.py
@@ -5,27 +5,28 @@ from __future__ import absolute_import, print_function
 import os
 import sys
 from wsgiref.simple_server import make_server
+
 # ensure geomag is on the path before importing
 try:
     import geomagio  # noqa (tells linter to ignore this line.)
 except ImportError:
     path = os.path
     script_dir = path.dirname(path.abspath(__file__))
-    sys.path.append(path.normpath(path.join(script_dir, '..')))
+    sys.path.append(path.normpath(path.join(script_dir, "..")))
     import geomagio
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     # read configuration from environment
-    edge_host = os.getenv('EDGE_HOST', 'cwbpub.cr.usgs.gov')
-    edge_port = int(os.getenv('EDGE_PORT', '2060'))
-    factory_type = os.getenv('GEOMAG_FACTORY_TYPE', 'edge')
-    webservice_host = os.getenv('GEOMAG_WEBSERVICE_HOST', '')
-    webservice_port = int(os.getenv('GEOMAG_WEBSERVICE_PORT', '7981'))
-    version = os.getenv('GEOMAG_VERSION', None)
+    edge_host = os.getenv("EDGE_HOST", "cwbpub.cr.usgs.gov")
+    edge_port = int(os.getenv("EDGE_PORT", "2060"))
+    factory_type = os.getenv("GEOMAG_FACTORY_TYPE", "edge")
+    webservice_host = os.getenv("GEOMAG_WEBSERVICE_HOST", "")
+    webservice_port = int(os.getenv("GEOMAG_WEBSERVICE_PORT", "7981"))
+    version = os.getenv("GEOMAG_VERSION", None)
 
     # configure factory
-    if factory_type == 'edge':
+    if factory_type == "edge":
         factory = geomagio.edge.EdgeFactory(host=edge_host, port=edge_port)
     else:
         raise "Unknown factory type '%s'" % factory_type
diff --git a/bin/main.py b/bin/main.py
index 40be09ea3db72dc7e578ad28aa619b417ba2e5fd..41c4db072a35b56a0a16b8b719b2a6f549b16ea9 100755
--- a/bin/main.py
+++ b/bin/main.py
@@ -2,12 +2,13 @@
 
 from os import path
 import sys
+
 # ensure geomag is on the path before importing
 try:
     import geomagio  # noqa (ignores this line for lint purposes.)
 except ImportError:
     script_dir = path.dirname(path.abspath(__file__))
-    sys.path.append(path.normpath(path.join(script_dir, '..')))
+    sys.path.append(path.normpath(path.join(script_dir, "..")))
 
 
 import geomagio.iaga2002 as iaga2002
@@ -16,15 +17,21 @@ from obspy.core.utcdatetime import UTCDateTime
 
 def main():
     """Example loading IAGA2002 test data from a directory."""
-    iaga_dir = path.normpath(path.join(script_dir, '../etc/iaga2002'))
-    factory = iaga2002.IAGA2002Factory('file://' + iaga_dir +
-            '/%(OBS)s/%(interval)s%(type)s/%(obs)s%(ymd)s%(t)s%(i)s.%(i)s',
-            observatory='BOU', channels=('H', 'D', 'Z', 'F'),
-            interval='minute', type='variation')
+    iaga_dir = path.normpath(path.join(script_dir, "../etc/iaga2002"))
+    factory = iaga2002.IAGA2002Factory(
+        "file://"
+        + iaga_dir
+        + "/%(OBS)s/%(interval)s%(type)s/%(obs)s%(ymd)s%(t)s%(i)s.%(i)s",
+        observatory="BOU",
+        channels=("H", "D", "Z", "F"),
+        interval="minute",
+        type="variation",
+    )
     timeseries = factory.get_timeseries(
-            UTCDateTime('2014-11-01'), UTCDateTime('2014-11-02'))
+        UTCDateTime("2014-11-01"), UTCDateTime("2014-11-02")
+    )
     print(timeseries)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     main()
diff --git a/bin/make_cal.py b/bin/make_cal.py
index b644e03d4a8299b3d6515b0be39b252de07b40cb..eaa91d09a25e0805c60922ed239b8a088af6610d 100755
--- a/bin/make_cal.py
+++ b/bin/make_cal.py
@@ -19,10 +19,10 @@ import urllib2
 
 # format used to output files
 # "{OBSERVATORY}" and "{YEAR}" are replaced with argument values
-FILENAME_FORMAT = './{OBSERVATORY}{YEAR}WebAbsMaster.cal'
+FILENAME_FORMAT = "./{OBSERVATORY}{YEAR}WebAbsMaster.cal"
 
 # url for observation web service
-SERVICE_URL = 'https://geomag.usgs.gov/baselines/observation.json.php'
+SERVICE_URL = "https://geomag.usgs.gov/baselines/observation.json.php"
 
 ############################################################################
 # DO NOT EDIT BELOW THIS LINE
@@ -31,8 +31,8 @@ SERVICE_URL = 'https://geomag.usgs.gov/baselines/observation.json.php'
 # parse observatory and year arguments
 if len(sys.argv) != 3:
     cmd = sys.argv[0]
-    print('Usage:   {} OBSERVATORY YEAR'.format(cmd), file=sys.stderr)
-    print('Example: {} BOU 2016'.format(cmd), file=sys.stderr)
+    print("Usage:   {} OBSERVATORY YEAR".format(cmd), file=sys.stderr)
+    print("Example: {} BOU 2016".format(cmd), file=sys.stderr)
     sys.exit(1)
 
 OBSERVATORY = sys.argv[1]
@@ -40,79 +40,91 @@ YEAR = int(sys.argv[2])
 
 
 # request observations from service
-url = SERVICE_URL + '?' + '&'.join([
-    'observatory=' + OBSERVATORY,
-    'starttime=' + str(YEAR) + '-01-01',
-    'endtime=' + str(YEAR + 1) + '-01-01',
-])
+url = (
+    SERVICE_URL
+    + "?"
+    + "&".join(
+        [
+            "observatory=" + OBSERVATORY,
+            "starttime=" + str(YEAR) + "-01-01",
+            "endtime=" + str(YEAR + 1) + "-01-01",
+        ]
+    )
+)
 
 try:
-    print('Loading data from web service\n\t{}'.format(url), file=sys.stderr)
-    response = urllib2.urlopen(url,
+    print("Loading data from web service\n\t{}".format(url), file=sys.stderr)
+    response = urllib2.urlopen(
+        url,
         # allow environment certificate bundle override
-        cafile=os.environ.get('SSL_CERT_FILE'))
+        cafile=os.environ.get("SSL_CERT_FILE"),
+    )
     data = response.read()
     observations = json.loads(data)
 except Exception as e:
-    print('Error loading data ({})'.format(str(e)), file=sys.stderr)
+    print("Error loading data ({})".format(str(e)), file=sys.stderr)
     sys.exit(1)
 
 
 # extract all valid cal values
 cals = []
-for observation in observations['data']:
-    for reading in observation['readings']:
-        for channel in ['H', 'D', 'Z']:
+for observation in observations["data"]:
+    for reading in observation["readings"]:
+        for channel in ["H", "D", "Z"]:
             cal = reading[channel]
-            if not cal['absolute'] or \
-                    not cal['baseline'] or \
-                    not cal['end'] or \
-                    not cal['start'] or \
-                    not cal['valid']:
+            if (
+                not cal["absolute"]
+                or not cal["baseline"]
+                or not cal["end"]
+                or not cal["start"]
+                or not cal["valid"]
+            ):
                 # not a valid cal value
                 continue
             # convert D values from degrees to minutes
-            multiplier = 60 if channel == 'D' else 1
-            absolute = cal['absolute'] * multiplier
-            baseline = cal['baseline'] * multiplier
-            end = datetime.utcfromtimestamp(cal['end'])
-            start = datetime.utcfromtimestamp(cal['start'])
-            cals.append({
-                'absolute': absolute,
-                'baseline': baseline,
-                'channel': channel,
-                'end': end,
-                'start': start
-            })
+            multiplier = 60 if channel == "D" else 1
+            absolute = cal["absolute"] * multiplier
+            baseline = cal["baseline"] * multiplier
+            end = datetime.utcfromtimestamp(cal["end"])
+            start = datetime.utcfromtimestamp(cal["start"])
+            cals.append(
+                {
+                    "absolute": absolute,
+                    "baseline": baseline,
+                    "channel": channel,
+                    "end": end,
+                    "start": start,
+                }
+            )
 
 
 # format calfile
-CAL_HEADER_FORMAT = '--{date:%Y %m %d} ({channel})'
-CAL_LINE_FORMAT = '{start:%H%M}-{end:%H%M} c{baseline:9.1f}{absolute:9.1f}'
+CAL_HEADER_FORMAT = "--{date:%Y %m %d} ({channel})"
+CAL_LINE_FORMAT = "{start:%H%M}-{end:%H%M} c{baseline:9.1f}{absolute:9.1f}"
 
 calfile = []
 # output by date in order
-cals = sorted(cals, key=lambda c: c['start'])
+cals = sorted(cals, key=lambda c: c["start"])
 # group by date
-for date, cals in itertools.groupby(cals, key=lambda c: c['start'].date()):
+for date, cals in itertools.groupby(cals, key=lambda c: c["start"].date()):
     # convert group to list so it can be reused
     cals = list(cals)
     # within each day, order by H, then D, then Z
-    for channel in ['H', 'D', 'Z']:
-        channel_cals = [c for c in cals if c['channel'] == channel]
+    for channel in ["H", "D", "Z"]:
+        channel_cals = [c for c in cals if c["channel"] == channel]
         if not channel_cals:
             # no matching values
             continue
         # add channel header
         calfile.append(CAL_HEADER_FORMAT.format(channel=channel, date=date))
         calfile.extend([CAL_LINE_FORMAT.format(**c) for c in channel_cals])
-calfile.append('')
+calfile.append("")
 
 
 # write calfile
 filename = FILENAME_FORMAT.format(OBSERVATORY=OBSERVATORY, YEAR=YEAR)
-print('Writing cal file to {}'.format(filename), file=sys.stderr)
-with open(filename, 'wb', -1) as f:
+print("Writing cal file to {}".format(filename), file=sys.stderr)
+with open(filename, "wb", -1) as f:
     f.write(os.linesep.join(calfile))
 
 
diff --git a/bin/monitor.py b/bin/monitor.py
index 7e636abda7eceec6bcfec7ceb72a3da95e1bb991..7910bb098a5883697e5040fe411960de035e7675 100755
--- a/bin/monitor.py
+++ b/bin/monitor.py
@@ -3,12 +3,13 @@
 """Monitor """
 from os import path
 import sys
+
 # ensure geomag is on the path before importing
 try:
     import geomagio  # noqa (tells linter to ignore this line.)
 except ImportError:
     script_dir = path.dirname(path.abspath(__file__))
-    sys.path.append(path.normpath(path.join(script_dir, '..')))
+    sys.path.append(path.normpath(path.join(script_dir, "..")))
 
 import argparse
 import sys
@@ -26,9 +27,9 @@ def calculate_warning_threshold(warning_threshold, interval):
     interval: string
         the interval being warned against
     """
-    if interval == 'minute':
+    if interval == "minute":
         warning_threshold *= 60
-    elif interval == 'second':
+    elif interval == "second":
         warning_threshold *= 3600
     return warning_threshold
 
@@ -59,14 +60,15 @@ def get_gaps(gaps):
     gaps: array
         Array of gaps
     """
-    gap_string = ''
+    gap_string = ""
     if len(gaps):
         for gap in gaps:
-            gap_string += '&nbsp;&nbsp;&nbsp;&nbsp; %s to %s <br>\n' % \
-                (format_time(gap[0]),
-                 format_time(gap[1]))
+            gap_string += "&nbsp;&nbsp;&nbsp;&nbsp; %s to %s <br>\n" % (
+                format_time(gap[0]),
+                format_time(gap[1]),
+            )
     else:
-        gap_string = '&nbsp;&nbsp;&nbsp;&nbsp;None<br>'
+        gap_string = "&nbsp;&nbsp;&nbsp;&nbsp;None<br>"
     return gap_string
 
 
@@ -81,10 +83,10 @@ def get_gap_total(gaps, interval):
     """
     total = 0
     divisor = 1
-    if interval == 'minute':
+    if interval == "minute":
         divisor = 60
     for gap in gaps:
-        total += (int(gap[2] - gap[0]) / divisor)
+        total += int(gap[2] - gap[0]) / divisor
     return total
 
 
@@ -105,35 +107,37 @@ def get_last_time(gaps, endtime):
 
 
 def get_table_header():
-    return '<table style="border-collapse: collapse;">\n' + \
-        '<thead>\n' + \
-            '<tr>\n' + \
-                '<th style="border:1px solid black; padding: 2px;">' +\
-                    '</th>\n' + \
-                '<th style="border:1px solid black; padding: 2px;">' +\
-                    '</th>\n' + \
-                '<th colspan=3 ' +\
-                    'style="border:1px solid black; padding: 2px;">' +\
-                    'Gap</th>\n' + \
-                '<th style="border:1px solid black; padding: 2px;">' +\
-                    '</th>\n' + \
-              '</tr>\n' + \
-              '<tr>\n' + \
-                '<th style="border:1px solid black; padding: 2px;">' +\
-                    'Channel</th>\n' + \
-                '<th style="border:1px solid black; padding: 2px;">' +\
-                    'Last Time Value</th>\n' + \
-                '<th style="border:1px solid black; padding: 2px;">' +\
-                    'Count</th>\n' + \
-                '<th style="border:1px solid black; padding: 2px;">' +\
-                    'Total Time</th>\n' + \
-                '<th style="border:1px solid black; padding: 2px;">' +\
-                    'Percentage</th>\n' + \
-                '<th style="border:1px solid black; padding: 2px;">' +\
-                    'Total Values</th>\n' + \
-            '</tr>\n' + \
-        '</thead>\n' + \
-        '<tbody>\n'
+    return (
+        '<table style="border-collapse: collapse;">\n'
+        + "<thead>\n"
+        + "<tr>\n"
+        + '<th style="border:1px solid black; padding: 2px;">'
+        + "</th>\n"
+        + '<th style="border:1px solid black; padding: 2px;">'
+        + "</th>\n"
+        + "<th colspan=3 "
+        + 'style="border:1px solid black; padding: 2px;">'
+        + "Gap</th>\n"
+        + '<th style="border:1px solid black; padding: 2px;">'
+        + "</th>\n"
+        + "</tr>\n"
+        + "<tr>\n"
+        + '<th style="border:1px solid black; padding: 2px;">'
+        + "Channel</th>\n"
+        + '<th style="border:1px solid black; padding: 2px;">'
+        + "Last Time Value</th>\n"
+        + '<th style="border:1px solid black; padding: 2px;">'
+        + "Count</th>\n"
+        + '<th style="border:1px solid black; padding: 2px;">'
+        + "Total Time</th>\n"
+        + '<th style="border:1px solid black; padding: 2px;">'
+        + "Percentage</th>\n"
+        + '<th style="border:1px solid black; padding: 2px;">'
+        + "Total Values</th>\n"
+        + "</tr>\n"
+        + "</thead>\n"
+        + "<tbody>\n"
+    )
 
 
 def has_gaps(gaps):
@@ -160,21 +164,23 @@ def print_html_header(starttime, endtime, title):
     title: string
         The title passed in by the user
     """
-    print('<!DOCTYPE html>\n' +
-        '<html>\n' +
-            '<head>\n' +
-                '<title> %s \n to %s \n</title>' %
-                        (format_time(starttime), format_time(endtime)) +
-            '</head>\n' +
-            '<body>\n' +
-                '<style type="text/css">\n' +
-                    'table {border-collapse: collapse;}\n' +
-                    'th {border:1px solid black; padding: 2px;}\n' +
-                    'td {text-align:center;}\n' +
-                '</style>\n' +
-                title + '<br>\n'
-                '%s to %s ' %
-                    (format_time(starttime), format_time(endtime)))
+    print(
+        "<!DOCTYPE html>\n"
+        + "<html>\n"
+        + "<head>\n"
+        + "<title> %s \n to %s \n</title>"
+        % (format_time(starttime), format_time(endtime))
+        + "</head>\n"
+        + "<body>\n"
+        + '<style type="text/css">\n'
+        + "table {border-collapse: collapse;}\n"
+        + "th {border:1px solid black; padding: 2px;}\n"
+        + "td {text-align:center;}\n"
+        + "</style>\n"
+        + title
+        + "<br>\n"
+        "%s to %s " % (format_time(starttime), format_time(endtime))
+    )
 
 
 def print_observatories(args):
@@ -196,73 +202,74 @@ def print_observatories(args):
     host = args.edge_host
     table_header = get_table_header()
     warning_issued = False
-    table_end = \
-        '</tbody>\n' + \
-        '</table>\n'
+    table_end = "</tbody>\n" + "</table>\n"
 
     for observatory in args.observatories:
-        summary_table = ''
-        gap_details = ''
+        summary_table = ""
+        gap_details = ""
         print_it = False
-        summary_header = '<p>Observatory: %s </p>\n' % observatory
+        summary_header = "<p>Observatory: %s </p>\n" % observatory
         summary_table += table_header
         for interval in intervals:
             factory = edge.EdgeFactory(
-                    host=host,
-                    port=2060,
-                    observatory=observatory,
-                    type=args.type,
-                    channels=channels,
-                    locationCode=args.locationcode,
-                    interval=interval)
-
-            timeseries = factory.get_timeseries(
-                    starttime=starttime,
-                    endtime=endtime)
+                host=host,
+                port=2060,
+                observatory=observatory,
+                type=args.type,
+                channels=channels,
+                locationCode=args.locationcode,
+                interval=interval,
+            )
+
+            timeseries = factory.get_timeseries(starttime=starttime, endtime=endtime)
             gaps = TimeseriesUtility.get_stream_gaps(timeseries)
             if args.gaps_only and not has_gaps(gaps):
                 continue
             else:
                 print_it = True
 
-            warning = ''
+            warning = ""
             warning_threshold = calculate_warning_threshold(
-                    args.warning_threshold, interval)
+                args.warning_threshold, interval
+            )
 
-            summary_table += '<tr>'
+            summary_table += "<tr>"
             summary_table += '<td style="text-align:center;">'
-            summary_table += ' %sS \n </td></tr>\n' % interval.upper()
-            gap_details += '&nbsp;&nbsp;%sS <br>\n' % interval.upper()
+            summary_table += " %sS \n </td></tr>\n" % interval.upper()
+            gap_details += "&nbsp;&nbsp;%sS <br>\n" % interval.upper()
             for channel in channels:
                 gap = gaps[channel]
                 trace = timeseries.select(channel=channel)[0]
                 total = get_gap_total(gap, interval)
                 percentage, count = calculate_gap_percentage(total, trace)
                 last = get_last_time(gap, endtime)
-                summary_table += '<tr>\n'
-                summary_table += '<td style="text-align:center;">%s</td>' % \
-                        channel
-                summary_table += '<td style="text-align:center;">%s</td>' % \
-                        format_time(last)
-                summary_table += '<td style="text-align:center;">%d</td>' % \
-                        len(gap)
-                summary_table += '<td style="text-align:center;">%d %s</td>' \
-                        % (total, interval)
-                summary_table += '<td style="text-align:center;">%0.2f%%</td>'\
-                        % percentage
-                summary_table += '<td style="text-align:center;">%d</td>' \
-                        % count
-                summary_table += '</tr>\n'
+                summary_table += "<tr>\n"
+                summary_table += '<td style="text-align:center;">%s</td>' % channel
+                summary_table += '<td style="text-align:center;">%s</td>' % format_time(
+                    last
+                )
+                summary_table += '<td style="text-align:center;">%d</td>' % len(gap)
+                summary_table += '<td style="text-align:center;">%d %s</td>' % (
+                    total,
+                    interval,
+                )
+                summary_table += (
+                    '<td style="text-align:center;">%0.2f%%</td>' % percentage
+                )
+                summary_table += '<td style="text-align:center;">%d</td>' % count
+                summary_table += "</tr>\n"
                 if endtime - last > warning_threshold:
-                    warning += '%s ' % channel
+                    warning += "%s " % channel
                     warning_issued = True
                 # Gap Detail
-                gap_details += '&nbsp;&nbsp;Channel: %s <br>\n' % channel
-                gap_details += get_gaps(gap) + '\n'
+                gap_details += "&nbsp;&nbsp;Channel: %s <br>\n" % channel
+                gap_details += get_gaps(gap) + "\n"
             if len(warning):
-                summary_header += 'Warning: Channels older then ' + \
-                    'warning-threshold ' + \
-                    '%s %ss<br>\n' % (warning, interval)
+                summary_header += (
+                    "Warning: Channels older then "
+                    + "warning-threshold "
+                    + "%s %ss<br>\n" % (warning, interval)
+                )
         summary_table += table_end
         if print_it:
             print(summary_header)
@@ -287,8 +294,7 @@ def main(args):
     print_html_header(args.starttime, args.endtime, args.title)
 
     warning_issued = print_observatories(args)
-    print('</body>\n' +
-          '</html>\n')
+    print("</body>\n" + "</html>\n")
 
     sys.exit(warning_issued)
 
@@ -306,55 +312,67 @@ def parse_args(args):
         dictionary like object containing arguments.
     """
     parser = argparse.ArgumentParser(
-        description='Use @ to read commands from a file.',
-        fromfile_prefix_chars='@')
-
-    parser.add_argument('--starttime',
-            required=True,
-            type=UTCDateTime,
-            default=None,
-            help='UTC date YYYY-MM-DD HH:MM:SS')
-    parser.add_argument('--endtime',
-            required=True,
-            type=UTCDateTime,
-            default=None,
-            help='UTC date YYYY-MM-DD HH:MM:SS')
-    parser.add_argument('--edge-host',
-            required=True,
-            help='IP/URL for edge connection')
-    parser.add_argument('--observatories',
-            required=True,
-            nargs='*',
-            help='Observatory code ie BOU, CMO, etc')
-    parser.add_argument('--channels',
-            nargs='*',
-            default=['H', 'E', 'Z', 'F'],
-            help='Channels H, E, Z, etc')
-    parser.add_argument('--intervals',
-            nargs='*',
-            default=['minute'],
-            choices=['hourly', 'minute', 'second'])
-    parser.add_argument('--locationcode',
-            default='R0',
-            choices=['R0', 'R1', 'RM', 'Q0', 'D0', 'C0'])
-    parser.add_argument('--type',
-            default='variation',
-            choices=['variation', 'quasi-definitive', 'definitive'])
-    parser.add_argument('--warning-threshold',
-            type=int,
-            default=60,
-            help='How many time slices should pass before a warning is issued')
-    parser.add_argument('--gaps-only',
-            action='store_true',
-            default=True,
-            help='Only print Observatories with gaps.')
-    parser.add_argument('--title',
-            default='',
-            help='Title for the top of the report')
+        description="Use @ to read commands from a file.", fromfile_prefix_chars="@"
+    )
+
+    parser.add_argument(
+        "--starttime",
+        required=True,
+        type=UTCDateTime,
+        default=None,
+        help="UTC date YYYY-MM-DD HH:MM:SS",
+    )
+    parser.add_argument(
+        "--endtime",
+        required=True,
+        type=UTCDateTime,
+        default=None,
+        help="UTC date YYYY-MM-DD HH:MM:SS",
+    )
+    parser.add_argument("--edge-host", required=True, help="IP/URL for edge connection")
+    parser.add_argument(
+        "--observatories",
+        required=True,
+        nargs="*",
+        help="Observatory code ie BOU, CMO, etc",
+    )
+    parser.add_argument(
+        "--channels",
+        nargs="*",
+        default=["H", "E", "Z", "F"],
+        help="Channels H, E, Z, etc",
+    )
+    parser.add_argument(
+        "--intervals",
+        nargs="*",
+        default=["minute"],
+        choices=["hourly", "minute", "second"],
+    )
+    parser.add_argument(
+        "--locationcode", default="R0", choices=["R0", "R1", "RM", "Q0", "D0", "C0"]
+    )
+    parser.add_argument(
+        "--type",
+        default="variation",
+        choices=["variation", "quasi-definitive", "definitive"],
+    )
+    parser.add_argument(
+        "--warning-threshold",
+        type=int,
+        default=60,
+        help="How many time slices should pass before a warning is issued",
+    )
+    parser.add_argument(
+        "--gaps-only",
+        action="store_true",
+        default=True,
+        help="Only print Observatories with gaps.",
+    )
+    parser.add_argument("--title", default="", help="Title for the top of the report")
 
     return parser.parse_args(args)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     args = parse_args(sys.argv[1:])
     main(args)
diff --git a/geomagio/ChannelConverter.py b/geomagio/ChannelConverter.py
index 1058f6c9c059f3a66361277fa01faa68c23220ea..a7c3bfc8b255f77521bce41916ab0b7484a02693 100644
--- a/geomagio/ChannelConverter.py
+++ b/geomagio/ChannelConverter.py
@@ -20,8 +20,8 @@ Notes: We use numpy functions instead of standard python arithmetic funtions
 import numpy
 
 
-M2R = numpy.pi / 180 / 60       # Minutes to Radians
-R2M = 180.0 / numpy.pi * 60     # Radians to Minutes
+M2R = numpy.pi / 180 / 60  # Minutes to Radians
+R2M = 180.0 / numpy.pi * 60  # Radians to Minutes
 
 
 # ###
diff --git a/geomagio/Controller.py b/geomagio/Controller.py
index 14440d8372a750a0058bcb543794ea58a33be643..f6517a4cc9bf97149c9305fc519953c0112622af 100644
--- a/geomagio/Controller.py
+++ b/geomagio/Controller.py
@@ -80,17 +80,16 @@ class Controller(object):
             # do this per observatory in case an
             # algorithm needs different amounts of data
             input_start, input_end = self._algorithm.get_input_interval(
-                    start=starttime,
-                    end=endtime,
-                    observatory=obs,
-                    channels=channels)
+                start=starttime, end=endtime, observatory=obs, channels=channels
+            )
             if input_start is None or input_end is None:
                 continue
             timeseries += self._inputFactory.get_timeseries(
-                    observatory=obs,
-                    starttime=input_start,
-                    endtime=input_end,
-                    channels=channels)
+                observatory=obs,
+                starttime=input_start,
+                endtime=input_end,
+                channels=channels,
+            )
         return timeseries
 
     def _rename_channels(self, timeseries, renames):
@@ -116,8 +115,7 @@ class Controller(object):
                 t.stats.channel = to_name
         return timeseries
 
-    def _get_output_timeseries(self, observatory, channels, starttime,
-            endtime):
+    def _get_output_timeseries(self, observatory, channels, starttime, endtime):
         """Get timeseries from the output factory for requested options.
 
         Parameters
@@ -138,10 +136,8 @@ class Controller(object):
         timeseries = Stream()
         for obs in observatory:
             timeseries += self._outputFactory.get_timeseries(
-                observatory=obs,
-                starttime=starttime,
-                endtime=endtime,
-                channels=channels)
+                observatory=obs, starttime=starttime, endtime=endtime, channels=channels
+            )
         return timeseries
 
     def run(self, options, input_timeseries=None):
@@ -156,19 +152,18 @@ class Controller(object):
             already read the input to confirm data can be produced.
         """
         algorithm = self._algorithm
-        input_channels = options.inchannels or \
-                algorithm.get_input_channels()
-        output_channels = options.outchannels or \
-                algorithm.get_output_channels()
+        input_channels = options.inchannels or algorithm.get_input_channels()
+        output_channels = options.outchannels or algorithm.get_output_channels()
         next_starttime = algorithm.get_next_starttime()
         starttime = next_starttime or options.starttime
         endtime = options.endtime
         # input
         timeseries = input_timeseries or self._get_input_timeseries(
-                observatory=options.observatory,
-                starttime=starttime,
-                endtime=endtime,
-                channels=input_channels)
+            observatory=options.observatory,
+            starttime=starttime,
+            endtime=endtime,
+            channels=input_channels,
+        )
         if timeseries.count() == 0:
             # no data to process
             return
@@ -177,35 +172,34 @@ class Controller(object):
             # when running a stateful algorithms with the realtime option
             # pad/trim timeseries to the interval:
             # [next_starttime, max(timeseries.endtime, now-options.realtime)]
-            input_start, input_end = \
-                    TimeseriesUtility.get_stream_start_end_times(
-                            timeseries, without_gaps=True)
+            input_start, input_end = TimeseriesUtility.get_stream_start_end_times(
+                timeseries, without_gaps=True
+            )
             realtime_gap = endtime - options.realtime
             if input_end < realtime_gap:
                 input_end = realtime_gap
             # pad to the start of the "realtime gap"
-            TimeseriesUtility.pad_timeseries(timeseries,
-                    next_starttime, input_end)
+            TimeseriesUtility.pad_timeseries(timeseries, next_starttime, input_end)
         # process
         if options.rename_input_channel:
             timeseries = self._rename_channels(
-                    timeseries=timeseries,
-                    renames=options.rename_input_channel)
+                timeseries=timeseries, renames=options.rename_input_channel
+            )
         processed = algorithm.process(timeseries)
         # trim if --no-trim is not set
         if not options.no_trim:
-            processed.trim(starttime=starttime,
-                    endtime=endtime)
+            processed.trim(starttime=starttime, endtime=endtime)
         if options.rename_output_channel:
             processed = self._rename_channels(
-                    timeseries=processed,
-                    renames=options.rename_output_channel)
+                timeseries=processed, renames=options.rename_output_channel
+            )
         # output
         self._outputFactory.put_timeseries(
-                timeseries=processed,
-                starttime=starttime,
-                endtime=endtime,
-                channels=output_channels)
+            timeseries=processed,
+            starttime=starttime,
+            endtime=endtime,
+            channels=output_channels,
+        )
 
     def run_as_update(self, options, update_count=0):
         """Updates data.
@@ -232,43 +226,49 @@ class Controller(object):
                 return
         algorithm = self._algorithm
         if algorithm.get_next_starttime() is not None:
-            raise AlgorithmException(
-                    'Stateful algorithms cannot use run_as_update')
-        input_channels = options.inchannels or \
-                algorithm.get_input_channels()
+            raise AlgorithmException("Stateful algorithms cannot use run_as_update")
+        input_channels = options.inchannels or algorithm.get_input_channels()
         output_observatory = options.output_observatory
-        output_channels = options.outchannels or \
-                algorithm.get_output_channels()
-        print('checking gaps', options.starttime, options.endtime,
-                output_observatory, output_channels,
-                file=sys.stderr)
+        output_channels = options.outchannels or algorithm.get_output_channels()
+        print(
+            "checking gaps",
+            options.starttime,
+            options.endtime,
+            output_observatory,
+            output_channels,
+            file=sys.stderr,
+        )
         # request output to see what has already been generated
         output_timeseries = self._get_output_timeseries(
-                observatory=options.output_observatory,
-                starttime=options.starttime,
-                endtime=options.endtime,
-                channels=output_channels)
+            observatory=options.output_observatory,
+            starttime=options.starttime,
+            endtime=options.endtime,
+            channels=output_channels,
+        )
         if len(output_timeseries) > 0:
             # find gaps in output, so they can be updated
             output_gaps = TimeseriesUtility.get_merged_gaps(
-                    TimeseriesUtility.get_stream_gaps(output_timeseries))
+                TimeseriesUtility.get_stream_gaps(output_timeseries)
+            )
         else:
-            output_gaps = [[
-                options.starttime,
-                options.endtime,
-                # next sample time not used
-                None
-            ]]
+            output_gaps = [
+                [
+                    options.starttime,
+                    options.endtime,
+                    # next sample time not used
+                    None,
+                ]
+            ]
         for output_gap in output_gaps:
             input_timeseries = self._get_input_timeseries(
-                    observatory=options.observatory,
-                    starttime=output_gap[0],
-                    endtime=output_gap[1],
-                    channels=input_channels)
+                observatory=options.observatory,
+                starttime=output_gap[0],
+                endtime=output_gap[1],
+                channels=input_channels,
+            )
             if not algorithm.can_produce_data(
-                    starttime=output_gap[0],
-                    endtime=output_gap[1],
-                    stream=input_timeseries):
+                starttime=output_gap[0], endtime=output_gap[1], stream=input_timeseries
+            ):
                 continue
             # check for fillable gap at start
             if output_gap[0] == options.starttime:
@@ -282,9 +282,14 @@ class Controller(object):
             # fill gap
             options.starttime = output_gap[0]
             options.endtime = output_gap[1]
-            print('processing', options.starttime, options.endtime,
-                    output_observatory, output_channels,
-                    file=sys.stderr)
+            print(
+                "processing",
+                options.starttime,
+                options.endtime,
+                output_observatory,
+                output_channels,
+                file=sys.stderr,
+            )
             self.run(options, input_timeseries)
 
 
@@ -307,58 +312,61 @@ def get_input_factory(args):
 
     # standard arguments
     input_factory_args = {}
-    input_factory_args['interval'] = args.input_interval or args.interval
-    input_factory_args['observatory'] = args.observatory
-    input_factory_args['type'] = args.type
+    input_factory_args["interval"] = args.input_interval or args.interval
+    input_factory_args["observatory"] = args.observatory
+    input_factory_args["type"] = args.type
     # stream/url arguments
     if args.input_file is not None:
-        input_stream = open(args.input_file, 'r')
+        input_stream = open(args.input_file, "r")
     elif args.input_stdin:
         input_stream = sys.stdin
     elif args.input_url is not None:
-        if '{' in args.input_url:
-            input_factory_args['urlInterval'] = args.input_url_interval
-            input_factory_args['urlTemplate'] = args.input_url
+        if "{" in args.input_url:
+            input_factory_args["urlInterval"] = args.input_url_interval
+            input_factory_args["urlTemplate"] = args.input_url
         else:
             input_stream = BytesIO(Util.read_url(args.input_url))
     input_type = args.input
-    if input_type == 'edge':
+    if input_type == "edge":
         input_factory = edge.EdgeFactory(
-                host=args.input_host,
-                port=args.input_port,
-                locationCode=args.locationcode,
-                **input_factory_args)
-    elif input_type == 'miniseed':
+            host=args.input_host,
+            port=args.input_port,
+            locationCode=args.locationcode,
+            **input_factory_args
+        )
+    elif input_type == "miniseed":
         input_factory = edge.MiniSeedFactory(
-                host=args.input_host,
-                port=args.input_port,
-                locationCode=args.locationcode,
-                convert_channels=args.convert_voltbin,
-                **input_factory_args)
-    elif input_type == 'goes':
+            host=args.input_host,
+            port=args.input_port,
+            locationCode=args.locationcode,
+            convert_channels=args.convert_voltbin,
+            **input_factory_args
+        )
+    elif input_type == "goes":
         # TODO: deal with other goes arguments
         input_factory = imfv283.GOESIMFV283Factory(
-                directory=args.input_goes_directory,
-                getdcpmessages=args.input_goes_getdcpmessages,
-                password=args.input_goes_password,
-                server=args.input_goes_server,
-                user=args.input_goes_user,
-                **input_factory_args)
+            directory=args.input_goes_directory,
+            getdcpmessages=args.input_goes_getdcpmessages,
+            password=args.input_goes_password,
+            server=args.input_goes_server,
+            user=args.input_goes_user,
+            **input_factory_args
+        )
     else:
         # stream compatible factories
-        if input_type == 'iaga2002':
+        if input_type == "iaga2002":
             input_factory = iaga2002.IAGA2002Factory(**input_factory_args)
-        elif input_type == 'imfv122':
+        elif input_type == "imfv122":
             input_factory = imfv122.IMFV122Factory(**input_factory_args)
-        elif input_type == 'imfv283':
+        elif input_type == "imfv283":
             input_factory = imfv283.IMFV283Factory(**input_factory_args)
-        elif input_type == 'pcdcp':
+        elif input_type == "pcdcp":
             input_factory = pcdcp.PCDCPFactory(**input_factory_args)
         # wrap stream
         if input_stream is not None:
             input_factory = StreamTimeseriesFactory(
-                    factory=input_factory,
-                    stream=input_stream)
+                factory=input_factory, stream=input_stream
+            )
     return input_factory
 
 
@@ -382,12 +390,12 @@ def get_output_factory(args):
 
     # standard arguments
     output_factory_args = {}
-    output_factory_args['interval'] = args.output_interval or args.interval
-    output_factory_args['observatory'] = args.output_observatory
-    output_factory_args['type'] = args.type
+    output_factory_args["interval"] = args.output_interval or args.interval
+    output_factory_args["observatory"] = args.output_observatory
+    output_factory_args["type"] = args.type
     # stream/url arguments
     if args.output_file is not None:
-        output_stream = open(args.output_file, 'wb')
+        output_stream = open(args.output_file, "wb")
     elif args.output_stdout:
         try:
             # python 3
@@ -397,51 +405,53 @@ def get_output_factory(args):
             output_stream = sys.stdout
     elif args.output_url is not None:
         output_url = args.output_url
-        output_factory_args['urlInterval'] = args.output_url_interval
-        output_factory_args['urlTemplate'] = output_url
+        output_factory_args["urlInterval"] = args.output_url_interval
+        output_factory_args["urlTemplate"] = output_url
 
     output_type = args.output
-    if output_type == 'edge':
+    if output_type == "edge":
         # TODO: deal with other edge arguments
         locationcode = args.outlocationcode or args.locationcode or None
         output_factory = edge.EdgeFactory(
-                host=args.output_host,
-                port=args.output_read_port,
-                write_port=args.output_port,
-                locationCode=locationcode,
-                tag=args.output_edge_tag,
-                forceout=args.output_edge_forceout,
-                **output_factory_args)
-    elif output_type == 'miniseed':
+            host=args.output_host,
+            port=args.output_read_port,
+            write_port=args.output_port,
+            locationCode=locationcode,
+            tag=args.output_edge_tag,
+            forceout=args.output_edge_forceout,
+            **output_factory_args
+        )
+    elif output_type == "miniseed":
         # TODO: deal with other miniseed arguments
         locationcode = args.outlocationcode or args.locationcode or None
         output_factory = edge.EdgeFactory(
-                host=args.output_host,
-                port=args.output_read_port,
-                write_port=args.output_port,
-                locationCode=locationcode,
-                **output_factory_args)
-    elif output_type == 'plot':
+            host=args.output_host,
+            port=args.output_read_port,
+            write_port=args.output_port,
+            locationCode=locationcode,
+            **output_factory_args
+        )
+    elif output_type == "plot":
         output_factory = PlotTimeseriesFactory()
     else:
         # stream compatible factories
-        if output_type == 'binlog':
+        if output_type == "binlog":
             output_factory = binlog.BinLogFactory(**output_factory_args)
-        elif output_type == 'iaga2002':
+        elif output_type == "iaga2002":
             output_factory = iaga2002.IAGA2002Factory(**output_factory_args)
-        elif output_type == 'imfjson':
+        elif output_type == "imfjson":
             output_factory = imfjson.IMFJSONFactory(**output_factory_args)
-        elif output_type == 'pcdcp':
+        elif output_type == "pcdcp":
             output_factory = pcdcp.PCDCPFactory(**output_factory_args)
-        elif output_type == 'temperature':
+        elif output_type == "temperature":
             output_factory = temperature.TEMPFactory(**output_factory_args)
-        elif output_type == 'vbf':
+        elif output_type == "vbf":
             output_factory = vbf.VBFFactory(**output_factory_args)
         # wrap stream
         if output_stream is not None:
             output_factory = StreamTimeseriesFactory(
-                    factory=output_factory,
-                    stream=output_stream)
+                factory=output_factory, stream=output_stream
+            )
     return output_factory
 
 
@@ -469,25 +479,24 @@ def main(args):
     if args.output_observatory is None:
         args.output_observatory = args.observatory
     elif args.observatory_foreach:
-        raise Exception("Cannot combine" +
-             " --output-observatory and --observatory-foreach")
+        raise Exception(
+            "Cannot combine" + " --output-observatory and --observatory-foreach"
+        )
 
     if args.output_stdout and args.update:
-        raise Exception("Cannot combine" +
-            " --output-stdout and --update")
+        raise Exception("Cannot combine" + " --output-stdout and --update")
 
     # translate realtime into start/end times
     if args.realtime:
         if args.realtime is True:
             # convert interval to number of seconds
-            if args.interval == 'minute':
+            if args.interval == "minute":
                 args.realtime = 3600
             else:
                 args.realtime = 600
         # calculate endtime/starttime
         now = UTCDateTime()
-        args.endtime = UTCDateTime(now.year, now.month, now.day,
-                now.hour, now.minute)
+        args.endtime = UTCDateTime(now.year, now.month, now.day, now.hour, now.minute)
         args.starttime = args.endtime - args.realtime
 
     if args.observatory_foreach:
@@ -499,9 +508,11 @@ def main(args):
             try:
                 _main(args)
             except Exception as e:
-                print("Exception processing observatory {}".format(obs),
-                        str(e),
-                        file=sys.stderr)
+                print(
+                    "Exception processing observatory {}".format(obs),
+                    str(e),
+                    file=sys.stderr,
+                )
         if observatory_exception:
             print("Exceptions occurred during processing", file=sys.stderr)
             sys.exit(1)
@@ -547,138 +558,163 @@ def parse_args(args):
         description="""
             Read, optionally process, and Write Geomag Timeseries data.
             Use @ to read arguments from a file.""",
-        fromfile_prefix_chars='@',)
+        fromfile_prefix_chars="@",
+    )
 
     # Input group
-    input_group = parser.add_argument_group('Input', 'How data is read.')
+    input_group = parser.add_argument_group("Input", "How data is read.")
 
     input_type_group = input_group.add_mutually_exclusive_group(required=True)
-    input_type_group.add_argument('--input',
-            choices=(
-                'edge',
-                'goes',
-                'iaga2002',
-                'imfv122',
-                'imfv283',
-                'miniseed',
-                'pcdcp'
-            ),
-            default='edge',
-            help='Input format (Default "edge")')
-
-    input_group.add_argument('--input-file',
-            help='Read from specified file',
-            metavar='FILE')
-    input_group.add_argument('--input-host',
-            default='cwbpub.cr.usgs.gov',
-            help='Hostname or IP address (Default "cwbpub.cr.usgs.gov")',
-            metavar='HOST')
-    input_group.add_argument('--input-interval',
+    input_type_group.add_argument(
+        "--input",
+        choices=("edge", "goes", "iaga2002", "imfv122", "imfv283", "miniseed", "pcdcp"),
+        default="edge",
+        help='Input format (Default "edge")',
+    )
+
+    input_group.add_argument(
+        "--input-file", help="Read from specified file", metavar="FILE"
+    )
+    input_group.add_argument(
+        "--input-host",
+        default="cwbpub.cr.usgs.gov",
+        help='Hostname or IP address (Default "cwbpub.cr.usgs.gov")',
+        metavar="HOST",
+    )
+    input_group.add_argument(
+        "--input-interval",
         default=None,
-        choices=['day', 'hour', 'minute', 'second', 'tenhertz'],
+        choices=["day", "hour", "minute", "second", "tenhertz"],
         help="Default same as --interval",
-        metavar='INTERVAL')
-    input_group.add_argument('--input-port',
-            default=2060,
-            help='Port number (Default 2060)',
-            metavar='PORT',
-            type=int)
-    input_group.add_argument('--input-stdin',
-            action='store_true',
-            default=False,
-            help='Read from standard input')
-    input_group.add_argument('--input-url',
-            help='Read from a url or url pattern.',
-            metavar='URL')
-    input_group.add_argument('--input-url-interval',
-            default=86400,
-            help="""
+        metavar="INTERVAL",
+    )
+    input_group.add_argument(
+        "--input-port",
+        default=2060,
+        help="Port number (Default 2060)",
+        metavar="PORT",
+        type=int,
+    )
+    input_group.add_argument(
+        "--input-stdin",
+        action="store_true",
+        default=False,
+        help="Read from standard input",
+    )
+    input_group.add_argument(
+        "--input-url", help="Read from a url or url pattern.", metavar="URL"
+    )
+    input_group.add_argument(
+        "--input-url-interval",
+        default=86400,
+        help="""
                 Seconds of data each url request should return
                 (default 86400) used to map requests across multiple files
                 or make multiple requests for chunks of data.
                 """,
-            metavar='N',
-            type=int)
-
-    input_group.add_argument('--inchannels',
-            nargs='*',
-            help='Channels H, E, Z, etc',
-            metavar='CHANNEL')
-    input_group.add_argument('--interval',
-            default='minute',
-            choices=['day', 'hour', 'minute', 'second', 'tenhertz'],
-            help='Data interval, default "minute"',
-            metavar='INTERVAL')
-    input_group.add_argument('--locationcode',
-            help="""
+        metavar="N",
+        type=int,
+    )
+
+    input_group.add_argument(
+        "--inchannels", nargs="*", help="Channels H, E, Z, etc", metavar="CHANNEL"
+    )
+    input_group.add_argument(
+        "--interval",
+        default="minute",
+        choices=["day", "hour", "minute", "second", "tenhertz"],
+        help='Data interval, default "minute"',
+        metavar="INTERVAL",
+    )
+    input_group.add_argument(
+        "--locationcode",
+        help="""
                 Use explicit location code, e.g. "R0", "R1",
                 instead of "--type"
                 """,
-            metavar='CODE',
-            type=edge.LocationCode)
-    input_group.add_argument('--observatory',
-            default=(None,),
-            help="""
+        metavar="CODE",
+        type=edge.LocationCode,
+    )
+    input_group.add_argument(
+        "--observatory",
+        default=(None,),
+        help="""
                 Observatory code ie BOU, CMO, etc.
                 CAUTION: Using multiple observatories is not
                 recommended in most cases; especially with
                 single observatory formats like IAGA and PCDCP.
                 """,
-            metavar='OBS',
-            nargs='*',
-            type=str,
-            required=True)
-    input_group.add_argument('--observatory-foreach',
-            action='store_true',
-            default=False,
-            help='When specifying multiple observatories, process'
-                    ' each observatory separately')
-    input_group.add_argument('--rename-input-channel',
-            action='append',
-            help="""
+        metavar="OBS",
+        nargs="*",
+        type=str,
+        required=True,
+    )
+    input_group.add_argument(
+        "--observatory-foreach",
+        action="store_true",
+        default=False,
+        help="When specifying multiple observatories, process"
+        " each observatory separately",
+    )
+    input_group.add_argument(
+        "--rename-input-channel",
+        action="append",
+        help="""
                 Rename an input channel after it is read,
                 before it is processed
                 """,
-            metavar=('FROM', 'TO'),
-            nargs=2)
-    input_group.add_argument('--type',
-            default='variation',
-            choices=['variation',
-                     'reported',
-                     'provisional',
-                     'adjusted',
-                     'quasi-definitive',
-                     'definitive'],
-            help='Data type, default "variation"')
+        metavar=("FROM", "TO"),
+        nargs=2,
+    )
+    input_group.add_argument(
+        "--type",
+        default="variation",
+        choices=[
+            "variation",
+            "reported",
+            "provisional",
+            "adjusted",
+            "quasi-definitive",
+            "definitive",
+        ],
+        help='Data type, default "variation"',
+    )
     # time range
-    input_group.add_argument('--starttime',
-            type=UTCDateTime,
-            default=None,
-            help='UTC date time YYYY-MM-DD HH:MM:SS',
-            metavar='ISO8601')
-    input_group.add_argument('--endtime',
-            type=UTCDateTime,
-            default=None,
-            help='UTC date time YYYY-MM-DD HH:MM:SS',
-            metavar='ISO8601')
-    input_group.add_argument('--realtime',
-            default=False,
-            const=True,
-            help="""
+    input_group.add_argument(
+        "--starttime",
+        type=UTCDateTime,
+        default=None,
+        help="UTC date time YYYY-MM-DD HH:MM:SS",
+        metavar="ISO8601",
+    )
+    input_group.add_argument(
+        "--endtime",
+        type=UTCDateTime,
+        default=None,
+        help="UTC date time YYYY-MM-DD HH:MM:SS",
+        metavar="ISO8601",
+    )
+    input_group.add_argument(
+        "--realtime",
+        default=False,
+        const=True,
+        help="""
                 Run the last N seconds.
                 Default 3600 (last hour) when interval is minute,
                 Default 600 (last 10 minutes) otherwise.
                 """,
-            metavar='N',
-            nargs='?',
-            type=int)
+        metavar="N",
+        nargs="?",
+        type=int,
+    )
 
     # conversion from bins/volts to nT
-    input_group.add_argument('--convert-voltbin',
-            nargs='*',
-            default=None,
-            metavar='CHANNEL',
-            help="""
+    input_group.add_argument(
+        "--convert-voltbin",
+        nargs="*",
+        default=None,
+        metavar="CHANNEL",
+        help="""
                 Convert channels from bins/volts to nT.
                 Example: "
                     --inchannels U_Bin U_Volt
@@ -687,287 +723,385 @@ def parse_args(args):
                     --convert-voltbin U
                     --outchannels U
                     "
-                """)
+                """,
+    )
 
     # Output group
-    output_group = parser.add_argument_group('Output', 'How data is written.')
-    output_type_group = output_group.add_mutually_exclusive_group(
-            required=True)
+    output_group = parser.add_argument_group("Output", "How data is written.")
+    output_type_group = output_group.add_mutually_exclusive_group(required=True)
 
     # output arguments
-    output_type_group.add_argument('--output',
-            choices=(
-                'binlog',
-                'edge',
-                'iaga2002',
-                'imfjson',
-                'miniseed',
-                'pcdcp',
-                'plot',
-                'temperature',
-                'vbf'
-            ),
-            # TODO: set default to 'iaga2002'
-            help='Output format')
-
-    output_group.add_argument('--outchannels',
-            nargs='*',
-            default=None,
-            help='Defaults to --inchannels',
-            metavar='CHANNEL')
-    output_group.add_argument('--output-file',
-            help='Write to specified file',
-            metavar='FILE')
-    output_group.add_argument('--output-host',
-            default='cwbpub.cr.usgs.gov',
-            help='Write to specified host',
-            metavar='HOST')
-    output_group.add_argument('--output-interval',
-            default=None,
-            choices=['day', 'hour', 'minute', 'second', 'tenhertz'],
-            help="Default same as --interval",
-            metavar='INTERVAL')
-    output_group.add_argument('--output-observatory',
-            default=None,
-            help='Defaults to value of --observatory argument.',
-            metavar='OBS',
-            nargs='*',
-            type=str)
-    output_group.add_argument('--output-port',
-            default=7981,
-            help='Write to specified port',
-            metavar='PORT',
-            type=int)
-    output_group.add_argument('--output-read-port',
-            default=2060,
-            help='Read from specified port',
-            metavar='PORT',
-            type=int)
-    output_group.add_argument('--output-stdout',
-            action='store_true',
-            default=False,
-            help='Write to standard output')
-    output_group.add_argument('--output-url',
-            help='Write to a file:// url pattern',
-            metavar='URL')
-    output_group.add_argument('--output-url-interval',
-            default=86400,
-            help='Output interval in seconds',
-            metavar='INTERVAL',
-            type=int)
-    output_group.add_argument('--rename-output-channel',
-            action='append',
-            help='Rename an output channel before it is written',
-            metavar=('FROM', 'TO'),
-            nargs=2)
-    output_group.add_argument('--outlocationcode',
-            help='Defaults to --locationcode',
-            metavar='CODE',
-            type=edge.LocationCode)
-    output_group.add_argument('--output-edge-forceout',
-            action='store_true',
-            default=False,
-            help='Used when writing to EDGE, to close miniseed immediately.')
-    output_group.add_argument('--output-edge-tag',
-            default='GEOMAG',
-            help='Used when writing to EDGE, to identify source of data.',
-            metavar='TAG')
+    output_type_group.add_argument(
+        "--output",
+        choices=(
+            "binlog",
+            "edge",
+            "iaga2002",
+            "imfjson",
+            "miniseed",
+            "pcdcp",
+            "plot",
+            "temperature",
+            "vbf",
+        ),
+        # TODO: set default to 'iaga2002'
+        help="Output format",
+    )
+
+    output_group.add_argument(
+        "--outchannels",
+        nargs="*",
+        default=None,
+        help="Defaults to --inchannels",
+        metavar="CHANNEL",
+    )
+    output_group.add_argument(
+        "--output-file", help="Write to specified file", metavar="FILE"
+    )
+    output_group.add_argument(
+        "--output-host",
+        default="cwbpub.cr.usgs.gov",
+        help="Write to specified host",
+        metavar="HOST",
+    )
+    output_group.add_argument(
+        "--output-interval",
+        default=None,
+        choices=["day", "hour", "minute", "second", "tenhertz"],
+        help="Default same as --interval",
+        metavar="INTERVAL",
+    )
+    output_group.add_argument(
+        "--output-observatory",
+        default=None,
+        help="Defaults to value of --observatory argument.",
+        metavar="OBS",
+        nargs="*",
+        type=str,
+    )
+    output_group.add_argument(
+        "--output-port",
+        default=7981,
+        help="Write to specified port",
+        metavar="PORT",
+        type=int,
+    )
+    output_group.add_argument(
+        "--output-read-port",
+        default=2060,
+        help="Read from specified port",
+        metavar="PORT",
+        type=int,
+    )
+    output_group.add_argument(
+        "--output-stdout",
+        action="store_true",
+        default=False,
+        help="Write to standard output",
+    )
+    output_group.add_argument(
+        "--output-url", help="Write to a file:// url pattern", metavar="URL"
+    )
+    output_group.add_argument(
+        "--output-url-interval",
+        default=86400,
+        help="Output interval in seconds",
+        metavar="INTERVAL",
+        type=int,
+    )
+    output_group.add_argument(
+        "--rename-output-channel",
+        action="append",
+        help="Rename an output channel before it is written",
+        metavar=("FROM", "TO"),
+        nargs=2,
+    )
+    output_group.add_argument(
+        "--outlocationcode",
+        help="Defaults to --locationcode",
+        metavar="CODE",
+        type=edge.LocationCode,
+    )
+    output_group.add_argument(
+        "--output-edge-forceout",
+        action="store_true",
+        default=False,
+        help="Used when writing to EDGE, to close miniseed immediately.",
+    )
+    output_group.add_argument(
+        "--output-edge-tag",
+        default="GEOMAG",
+        help="Used when writing to EDGE, to identify source of data.",
+        metavar="TAG",
+    )
 
     # Processing group
-    processing_group = parser.add_argument_group(
-            'Processing',
-            'How data is processed.')
-    processing_group.add_argument('--algorithm',
-            choices=[k for k in algorithms],
-            default='identity',
-            help='Default is "identity", which skips processing')
+    processing_group = parser.add_argument_group("Processing", "How data is processed.")
+    processing_group.add_argument(
+        "--algorithm",
+        choices=[k for k in algorithms],
+        default="identity",
+        help='Default is "identity", which skips processing',
+    )
     for k in algorithms:
         algorithms[k].add_arguments(processing_group)
-    processing_group.add_argument('--update',
-            action='store_true',
-            default=False,
-            help="""
+    processing_group.add_argument(
+        "--update",
+        action="store_true",
+        default=False,
+        help="""
                 Check for gaps in output,
                 and merge new data into existing.
-                """)
-    processing_group.add_argument('--update-limit',
-            type=int,
-            default=0,
-            help="""
+                """,
+    )
+    processing_group.add_argument(
+        "--update-limit",
+        type=int,
+        default=0,
+        help="""
                 Update mode checks for gaps and will step backwards
                 to gap fill, if the start of the current interval is a gap,
                 when limit is set to more than 0.
                 """,
-            metavar='N')
-    processing_group.add_argument('--no-trim',
-            action='store_true',
-            default=False,
-            help='Ensures output data will not be trimmed down')
+        metavar="N",
+    )
+    processing_group.add_argument(
+        "--no-trim",
+        action="store_true",
+        default=False,
+        help="Ensures output data will not be trimmed down",
+    )
 
     # GOES parameters
     goes_group = parser.add_argument_group(
-            'GOES parameters',
-            'Used to configure "--input goes"')
-    goes_group.add_argument('--input-goes-directory',
-            default='.',
-            help='Directory for support files for goes input of imfv283 data',
-            metavar='PATH')
-    goes_group.add_argument('--input-goes-getdcpmessages',
-            default='',
-            help='Location of getDcpMessages.',
-            metavar='PATH')
-    goes_group.add_argument('--input-goes-password',
-            default='',
-            help='Password for goes user',
-            metavar='PASSWORD')
-    goes_group.add_argument('--input-goes-server',
-            nargs='*',
-            help='The server name(s) to retrieve the GOES data from',
-            metavar='HOST')
-    goes_group.add_argument('--input-goes-user',
-            default='GEOMAG',
-            help='The user name to use to retrieve data from GOES',
-            metavar='USER')
+        "GOES parameters", 'Used to configure "--input goes"'
+    )
+    goes_group.add_argument(
+        "--input-goes-directory",
+        default=".",
+        help="Directory for support files for goes input of imfv283 data",
+        metavar="PATH",
+    )
+    goes_group.add_argument(
+        "--input-goes-getdcpmessages",
+        default="",
+        help="Location of getDcpMessages.",
+        metavar="PATH",
+    )
+    goes_group.add_argument(
+        "--input-goes-password",
+        default="",
+        help="Password for goes user",
+        metavar="PASSWORD",
+    )
+    goes_group.add_argument(
+        "--input-goes-server",
+        nargs="*",
+        help="The server name(s) to retrieve the GOES data from",
+        metavar="HOST",
+    )
+    goes_group.add_argument(
+        "--input-goes-user",
+        default="GEOMAG",
+        help="The user name to use to retrieve data from GOES",
+        metavar="USER",
+    )
 
     # still allow deprecated arguments for now, but hide behind opt in flag
-    deprecated = parser.add_argument_group('Deprecated')
-    deprecated.add_argument('--enable-deprecated-arguments',
-            action='store_true',
-            default=False,
-            help="enable support for deprecated arguments")
+    deprecated = parser.add_argument_group("Deprecated")
+    deprecated.add_argument(
+        "--enable-deprecated-arguments",
+        action="store_true",
+        default=False,
+        help="enable support for deprecated arguments",
+    )
     # check for this argument before adding deprecated args to usage
-    if '--enable-deprecated-arguments' in args:
+    if "--enable-deprecated-arguments" in args:
         add_deprecated_args(deprecated, input_type_group, output_type_group)
 
-    deprecated.add_argument('--volt-conversion',
-            default=100.0,
-            metavar='NT',
-            help='(Deprecated, Unused) Conversion factor (nT/V) for volts')
-    deprecated.add_argument('--bin-conversion',
-            default=500.0,
-            metavar='NT',
-            help='(Deprecated, Unused) Conversion factor (nT/bin) for bins')
+    deprecated.add_argument(
+        "--volt-conversion",
+        default=100.0,
+        metavar="NT",
+        help="(Deprecated, Unused) Conversion factor (nT/V) for volts",
+    )
+    deprecated.add_argument(
+        "--bin-conversion",
+        default=500.0,
+        metavar="NT",
+        help="(Deprecated, Unused) Conversion factor (nT/bin) for bins",
+    )
 
     return parser.parse_args(args)
 
 
 def add_deprecated_args(parser, input_group, output_group):
-    print('WARNING: you are enabling deprecated arguments,' +
-            ' please update your usage', file=sys.stderr)
+    print(
+        "WARNING: you are enabling deprecated arguments," + " please update your usage",
+        file=sys.stderr,
+    )
 
     # argument options for inputs and outputs,
     # replaced with less TYPE specific options
-    parser.add_argument('--input-edge-port',
-            type=int,
-            default=2060,
-            help='(Deprecated) \
+    parser.add_argument(
+        "--input-edge-port",
+        type=int,
+        default=2060,
+        help='(Deprecated) \
                 Use "--input-port".',
-            metavar='PORT')
-    parser.add_argument('--output-edge-port',
-            type=int,
-            dest='edge_write_port',
-            default=7981,
-            help='(Deprecated) \
+        metavar="PORT",
+    )
+    parser.add_argument(
+        "--output-edge-port",
+        type=int,
+        dest="edge_write_port",
+        default=7981,
+        help='(Deprecated) \
                 Use "--output-port".',
-            metavar='PORT')
-    parser.add_argument('--output-edge-cwb-port',
-            type=int,
-            dest='edge_write_port',
-            default=7981,
-            help='(Deprecated) \
+        metavar="PORT",
+    )
+    parser.add_argument(
+        "--output-edge-cwb-port",
+        type=int,
+        dest="edge_write_port",
+        default=7981,
+        help='(Deprecated) \
                 Use "--output miniseed" and "--output-port PORT".',
-            metavar='PORT')
-    parser.add_argument('--output-edge-read-port',
-            type=int,
-            default=2060,
-            help='(Deprecated) \
+        metavar="PORT",
+    )
+    parser.add_argument(
+        "--output-edge-read-port",
+        type=int,
+        default=2060,
+        help='(Deprecated) \
                 Use "--output-read-port".',
-            metavar='PORT')
+        metavar="PORT",
+    )
 
     # input arguments (generally use "--input TYPE")
-    input_group.add_argument('--input-edge',
-            help='(Deprecated) \
+    input_group.add_argument(
+        "--input-edge",
+        help='(Deprecated) \
                 Use "--input edge" and "--input-host HOST".',
-            metavar='HOST')
-    input_group.add_argument('--input-iaga-file',
-            help='(Deprecated) \
+        metavar="HOST",
+    )
+    input_group.add_argument(
+        "--input-iaga-file",
+        help='(Deprecated) \
                 Use "--input iaga2002" and "--input-file FILE".',
-            metavar='FILE')
-    input_group.add_argument('--input-iaga-stdin',
-            action='store_true',
-            default=False,
-            help='(Deprecated) \
-                Use "--input iaga2002" and "--input-stdin".')
-    input_group.add_argument('--input-iaga-url',
-            help='(Deprecated) \
+        metavar="FILE",
+    )
+    input_group.add_argument(
+        "--input-iaga-stdin",
+        action="store_true",
+        default=False,
+        help='(Deprecated) \
+                Use "--input iaga2002" and "--input-stdin".',
+    )
+    input_group.add_argument(
+        "--input-iaga-url",
+        help='(Deprecated) \
                 Use "--input iaga2002" and "--input-url URL".',
-            metavar='URL')
-    input_group.add_argument('--input-imfv283-file',
-            help='(Deprecated) \
+        metavar="URL",
+    )
+    input_group.add_argument(
+        "--input-imfv283-file",
+        help='(Deprecated) \
                 Use "--input imfv283" and "--input-file FILE".',
-            metavar='FILE')
-    input_group.add_argument('--input-imfv283-stdin',
-            action='store_true',
-            default=False,
-            help='(Deprecated) \
-                Use "--input imfv283" and "--input-stdin"')
-    input_group.add_argument('--input-imfv283-url',
-            help='(Deprecated) \
+        metavar="FILE",
+    )
+    input_group.add_argument(
+        "--input-imfv283-stdin",
+        action="store_true",
+        default=False,
+        help='(Deprecated) \
+                Use "--input imfv283" and "--input-stdin"',
+    )
+    input_group.add_argument(
+        "--input-imfv283-url",
+        help='(Deprecated) \
                 Use "--input iaga2002" and "--input-url URL".',
-            metavar='URL')
-    input_group.add_argument('--input-imfv283-goes',
-            action='store_true',
-            default=False,
-            help='(Deprecated) \
-                Use "--input goes".')
-    input_group.add_argument('--input-pcdcp-file',
-            help='(Deprecated) \
+        metavar="URL",
+    )
+    input_group.add_argument(
+        "--input-imfv283-goes",
+        action="store_true",
+        default=False,
+        help='(Deprecated) \
+                Use "--input goes".',
+    )
+    input_group.add_argument(
+        "--input-pcdcp-file",
+        help='(Deprecated) \
                 Use "--input pcdcp" and "--input-file FILE".',
-            metavar='FILE')
-    input_group.add_argument('--input-pcdcp-stdin',
-            action='store_true',
-            default=False,
-            help='(Deprecated) \
-                Use "--input pcddp" and "--input-stdin".')
-    input_group.add_argument('--input-pcdcp-url',
-            help='(Deprecated) \
+        metavar="FILE",
+    )
+    input_group.add_argument(
+        "--input-pcdcp-stdin",
+        action="store_true",
+        default=False,
+        help='(Deprecated) \
+                Use "--input pcddp" and "--input-stdin".',
+    )
+    input_group.add_argument(
+        "--input-pcdcp-url",
+        help='(Deprecated) \
                 Use "--input pcdcp" and "--input-url URL".',
-            metavar='URL')
+        metavar="URL",
+    )
 
     # output arguments (generally use "--output TYPE")
-    output_group.add_argument('--output-iaga-file',
-            help='(Deprecated) \
+    output_group.add_argument(
+        "--output-iaga-file",
+        help='(Deprecated) \
                 Use "--output iaga2002" and "--output-file FILE".',
-            metavar='FILE')
-    output_group.add_argument('--output-iaga-stdout',
-            action='store_true', default=False,
-            help='(Deprecated) \
-                Use "--output iaga2002" and "--output-stdout".')
-    output_group.add_argument('--output-iaga-url',
-            help='(Deprecated) \
+        metavar="FILE",
+    )
+    output_group.add_argument(
+        "--output-iaga-stdout",
+        action="store_true",
+        default=False,
+        help='(Deprecated) \
+                Use "--output iaga2002" and "--output-stdout".',
+    )
+    output_group.add_argument(
+        "--output-iaga-url",
+        help='(Deprecated) \
                 Use "--output iaga2002" and "--output-url URL".',
-            metavar='URL')
-    output_group.add_argument('--output-pcdcp-file',
-            help='(Deprecated) \
+        metavar="URL",
+    )
+    output_group.add_argument(
+        "--output-pcdcp-file",
+        help='(Deprecated) \
                 Use "--output pcdcp" and "--output-file FILE".',
-            metavar='FILE')
-    output_group.add_argument('--output-pcdcp-stdout',
-            action='store_true', default=False,
-            help='(Deprecated) \
-                Use "--output pcdcp" and "--output-stdout".')
-    output_group.add_argument('--output-pcdcp-url',
-            help='(Deprecated) \
+        metavar="FILE",
+    )
+    output_group.add_argument(
+        "--output-pcdcp-stdout",
+        action="store_true",
+        default=False,
+        help='(Deprecated) \
+                Use "--output pcdcp" and "--output-stdout".',
+    )
+    output_group.add_argument(
+        "--output-pcdcp-url",
+        help='(Deprecated) \
                 Use "--output pcdcp" and "--output-url URL".',
-            metavar='URL')
-    output_group.add_argument('--output-edge',
-            help='(Deprecated) \
+        metavar="URL",
+    )
+    output_group.add_argument(
+        "--output-edge",
+        help='(Deprecated) \
                 Use "--output edge" and "--output-host HOST".',
-            metavar='HOST')
-    output_group.add_argument('--output-plot',
-            action='store_true',
-            default=False,
-            help='(Deprecated) \
-                Use "--output plot".')
+        metavar="HOST",
+    )
+    output_group.add_argument(
+        "--output-plot",
+        action="store_true",
+        default=False,
+        help='(Deprecated) \
+                Use "--output plot".',
+    )
 
 
 def parse_deprecated_arguments(args):
@@ -975,67 +1109,70 @@ def parse_deprecated_arguments(args):
     # map legacy input arguments
     usingDeprecated = False
     if args.input_edge is not None:
-        args.input = 'edge'
+        args.input = "edge"
         args.input_host = args.input_edge
         args.input_port = args.input_edge_port
         usingDeprecated = True
     elif args.input_iaga_file is not None:
-        args.input = 'iaga2002'
+        args.input = "iaga2002"
         args.input_file = args.input_iaga_file
         usingDeprecated = True
     elif args.input_iaga_stdin:
-        args.input = 'iaga2002'
+        args.input = "iaga2002"
         args.input_stdin = True
         usingDeprecated = True
     elif args.input_iaga_url is not None:
-        args.input = 'iaga2002'
+        args.input = "iaga2002"
         args.input_url = args.input_iaga_url
         usingDeprecated = True
     elif args.input_imfv283_file is not None:
-        args.input = 'imfv283'
+        args.input = "imfv283"
         args.input_file = args.input_imfv283_file
         usingDeprecated = True
     elif args.input_imfv283_url is not None:
-        args.input = 'imfv283'
+        args.input = "imfv283"
         args.input_url = args.input_imfv283_url
         usingDeprecated = True
     elif args.input_imfv283_goes:
-        args.input = 'goes'
+        args.input = "goes"
         usingDeprecated = True
     # map legacy output arguments
     if args.output_edge is not None:
-        args.output = 'edge'
+        args.output = "edge"
         args.output_host = args.output_edge
         args.output_port = args.edge_write_port
         usingDeprecated = True
     elif args.output_iaga_file is not None:
-        args.output = 'iaga2002'
+        args.output = "iaga2002"
         args.output_file = args.output_iaga_file
         usingDeprecated = True
     elif args.output_iaga_stdout:
-        args.output = 'iaga2002'
+        args.output = "iaga2002"
         args.output_stdout = True
         usingDeprecated = True
     elif args.output_iaga_url is not None:
-        args.output = 'iaga2002'
+        args.output = "iaga2002"
         args.output_url = args.output_iaga_url
         usingDeprecated = True
     elif args.output_pcdcp_file is not None:
-        args.output = 'pcdcp'
+        args.output = "pcdcp"
         args.output_file = args.output_pcdcp_file
         usingDeprecated = True
     elif args.output_pcdcp_stdout:
-        args.output = 'pcdcp'
+        args.output = "pcdcp"
         args.output_stdout = True
         usingDeprecated = True
     elif args.output_pcdcp_url is not None:
-        args.output = 'pcdcp'
+        args.output = "pcdcp"
         args.output_url = args.output_pcdcp_url
         usingDeprecated = True
     elif args.output_plot:
-        args.output = 'plot'
+        args.output = "plot"
         usingDeprecated = True
 
     if usingDeprecated:
-        print('WARNING: you are using deprecated arguments,' +
-              ' please update your usage', file=sys.stderr)
+        print(
+            "WARNING: you are using deprecated arguments,"
+            + " please update your usage",
+            file=sys.stderr,
+        )
diff --git a/geomagio/Metadata.py b/geomagio/Metadata.py
index a6aa0269c1d02e07cbd14510cf759d8ac32c1312..a9f2d07eea5344e7d46a00712eea870435755ca1 100644
--- a/geomagio/Metadata.py
+++ b/geomagio/Metadata.py
@@ -17,13 +17,9 @@ def get_instrument(observatory, start_time=None, end_time=None, metadata=None):
     return [
         m
         for m in metadata
-        if m["station"] == observatory and
-            (end_time is None or
-                m["start_time"] is None or
-                m["start_time"] < end_time) and
-            (start_time is None or
-                m["end_time"] is None or
-                m["end_time"] > start_time)
+        if m["station"] == observatory
+        and (end_time is None or m["start_time"] is None or m["start_time"] < end_time)
+        and (start_time is None or m["end_time"] is None or m["end_time"] > start_time)
     ]
 
 
diff --git a/geomagio/ObservatoryMetadata.py b/geomagio/ObservatoryMetadata.py
index a35776b8e1c037c6f7c943322671ebb083371e0a..deac24f23aa2e6c0cd3ca6fa8b97525d1ebfac3e 100644
--- a/geomagio/ObservatoryMetadata.py
+++ b/geomagio/ObservatoryMetadata.py
@@ -3,1012 +3,946 @@
 
 # default metadata for the 14 USGS observatories.
 DEFAULT_METADATA = {
-    'BDT': {
-        'metadata': {
-            'station_name': 'Boulder Test',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '40.137',
-            'geodetic_longitude': '254.763',
-            'elevation': '1682',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 5527,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'BOU': {
-        'metadata': {
-            'station_name': 'Boulder',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '40.137',
-            'geodetic_longitude': '254.763',
-            'elevation': '1682',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 5527,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'TST': {
-        'metadata': {
-            'station_name': 'Boulder Test',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '40.137',
-            'geodetic_longitude': '254.763',
-            'elevation': '1682',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 10000.0,
-            'declination_base': 5527,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'BRW': {
-        'metadata': {
-            'station_name': 'Barrow',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '71.322',
-            'geodetic_longitude': '203.378',
-            'elevation': '10',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 10589,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'data_interval_type': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'BRT': {
-        'metadata': {
-            'station_name': 'Barrow Test',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '71.322',
-            'geodetic_longitude': '203.378',
-            'elevation': '10',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 10000.0,
-            'declination_base': 10589,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'data_interval_type': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'BSL': {
-        'metadata': {
-            'station_name': 'Stennis Space Center',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '30.350',
-            'geodetic_longitude': '270.365',
-            'elevation': '8',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 215772,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'CMO': {
-        'metadata': {
-            'station_name': 'College',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '64.874',
-            'geodetic_longitude': '212.140',
-            'elevation': '197',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 12151,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        }
-    },
-    'CMT': {
-        'metadata': {
-            'station_name': 'College',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '64.874',
-            'geodetic_longitude': '212.140',
-            'elevation': '197',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 10000.0,
-            'declination_base': 12151,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        }
-    },
-    'DED': {
-        'metadata': {
-            'station_name': 'Deadhorse',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '70.355',
-            'geodetic_longitude': '211.207',
-            'elevation': '10',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 10755,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'DHT': {
-        'metadata': {
-            'station_name': 'Deadhorse Test',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '70.355',
-            'geodetic_longitude': '211.207',
-            'elevation': '10',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 10000.0,
-            'declination_base': 10755,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'FRD': {
-        'metadata': {
-            'station_name': 'Fredericksburg',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '38.205',
-            'geodetic_longitude': '282.627',
-            'elevation': '69',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 209690,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'FDT': {
-        'metadata': {
-            'station_name': 'Fredericksburg Test',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '38.205',
-            'geodetic_longitude': '282.627',
-            'elevation': '69',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 209690,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'FRN': {
-        'metadata': {
-            'station_name': 'Fresno',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '37.091',
-            'geodetic_longitude': '240.282',
-            'elevation': '331',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 8097,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'GUA': {
-        'metadata': {
-            'station_name': 'Guam',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '13.588',
-            'geodetic_longitude': '144.867',
-            'elevation': '140',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 764,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'HON': {
-        'metadata': {
-            'station_name': 'Honolulu',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '21.316',
-            'geodetic_longitude': '202.000',
-            'elevation': '4',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 5982,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'KAK': {
-        'metadata': {
-            'station_name': 'Kakioka',
-            'agency_name': 'Japan Meteorological Agency',
-            'geodetic_latitude': '36.232',
-            'geodetic_longitude': '140.186',
-            'elevation': '36',
-            'sensor_orientation': 'HDZF',
-            'reported': 'HDZF',
-            'sensor_sampling_rate': 0.01,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'NEW': {
-        'metadata': {
-            'station_name': 'Newport',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '48.265',
-            'geodetic_longitude': '242.878',
-            'elevation': '770',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 9547,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'SHU': {
-        'metadata': {
-            'station_name': 'Shumagin',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '55.348',
-            'geodetic_longitude': '199.538',
-            'elevation': '80',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 7386,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'SIT': {
-        'metadata': {
-            'station_name': 'Sitka',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '57.058',
-            'geodetic_longitude': '224.675',
-            'elevation': '24',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 12349,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'SJG': {
-        'metadata': {
-            'station_name': 'San Juan',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '18.113',
-            'geodetic_longitude': '293.849',
-            'elevation': '424',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 208439,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'TUC': {
-        'metadata': {
-            'station_name': 'Tucson',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '32.174',
-            'geodetic_longitude': '249.267',
-            'elevation': '946',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 5863,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
-            },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
-    },
-    'USGS': {
-        'metadata': {
-            'station_name': 'USGS',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '40.137',
-            'geodetic_longitude': '254.764',
-            'elevation': '1682',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'declination_base': 0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': '1-minute calculated',
-                'filter_comments': []
-            },
-            'hourly': {
-                'data_interval_type': '1-hour calculated'
-            }
-        }
-    },
-    'BLC': {
-        'metadata': {
-            'station_name': 'Baker Lake',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '64.300',
-            'geodetic_longitude': '264.000',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'BRD': {
-        'metadata': {
-            'station_name': 'Brandon',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '49.600',
-            'geodetic_longitude': '262.900',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'CBB': {
-        'metadata': {
-            'station_name': 'Cambridge Bay',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '69.200',
-            'geodetic_longitude': '255.000',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'EUA': {
-        'metadata': {
-            'station_name': 'Eureka',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '55.300',
-            'geodetic_longitude': '282.300',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'FCC': {
-        'metadata': {
-            'station_name': 'Fort Churchill',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '58.800',
-            'geodetic_longitude': '265.900',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'HAD': {
-        'metadata': {
-            'station_name': 'Hartland',
-            'agency_name': 'British Geological Survey (BGS)',
-            'geodetic_latitude': '51.000',
-            'geodetic_longitude': '355.500',
-            'elevation': '0',
-            'sensor_orientation': 'HDZF',
-            'reported': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'HER': {
-        'metadata': {
-            'station_name': 'Hermanus',
-            'agency_name': 'National Research Foundation',
-            'geodetic_latitude': '-34.400',
-            'geodetic_longitude': '19.200',
-            'elevation': '0',
-            'sensor_orientation': 'HDZF',
-            'reported': 'HDZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'IQA': {
-        'metadata': {
-            'station_name': 'Iqaluit',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '63.800',
-            'geodetic_longitude': '291.500',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'MEA': {
-        'metadata': {
-            'station_name': 'Meanook',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '54.600',
-            'geodetic_longitude': '246.700',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'OTT': {
-        'metadata': {
-            'station_name': 'Ottowa',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '45.400',
-            'geodetic_longitude': '284.500',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'RES': {
-        'metadata': {
-            'station_name': 'Resolute Bay',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '74.700',
-            'geodetic_longitude': '265.100',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'SNK': {
-        'metadata': {
-            'station_name': 'Sanikiluaq',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '62.400',
-            'geodetic_longitude': '245.500',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'STJ': {
-        'metadata': {
-            'station_name': 'St Johns',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '47.600',
-            'geodetic_longitude': '307.300',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    },
-    'VIC': {
-        'metadata': {
-            'station_name': 'Victoria',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '48.600',
-            'geodetic_longitude': '236.600',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
+    "BDT": {
+        "metadata": {
+            "station_name": "Boulder Test",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "40.137",
+            "geodetic_longitude": "254.763",
+            "elevation": "1682",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 5527,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "BOU": {
+        "metadata": {
+            "station_name": "Boulder",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "40.137",
+            "geodetic_longitude": "254.763",
+            "elevation": "1682",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 5527,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "TST": {
+        "metadata": {
+            "station_name": "Boulder Test",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "40.137",
+            "geodetic_longitude": "254.763",
+            "elevation": "1682",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 10000.0,
+            "declination_base": 5527,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "BRW": {
+        "metadata": {
+            "station_name": "Barrow",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "71.322",
+            "geodetic_longitude": "203.378",
+            "elevation": "10",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 10589,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "data_interval_type": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "BRT": {
+        "metadata": {
+            "station_name": "Barrow Test",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "71.322",
+            "geodetic_longitude": "203.378",
+            "elevation": "10",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 10000.0,
+            "declination_base": 10589,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "data_interval_type": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "BSL": {
+        "metadata": {
+            "station_name": "Stennis Space Center",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "30.350",
+            "geodetic_longitude": "270.365",
+            "elevation": "8",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 215772,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "CMO": {
+        "metadata": {
+            "station_name": "College",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "64.874",
+            "geodetic_longitude": "212.140",
+            "elevation": "197",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 12151,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        }
+    },
+    "CMT": {
+        "metadata": {
+            "station_name": "College",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "64.874",
+            "geodetic_longitude": "212.140",
+            "elevation": "197",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 10000.0,
+            "declination_base": 12151,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        }
+    },
+    "DED": {
+        "metadata": {
+            "station_name": "Deadhorse",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "70.355",
+            "geodetic_longitude": "211.207",
+            "elevation": "10",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 10755,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "DHT": {
+        "metadata": {
+            "station_name": "Deadhorse Test",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "70.355",
+            "geodetic_longitude": "211.207",
+            "elevation": "10",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 10000.0,
+            "declination_base": 10755,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "FRD": {
+        "metadata": {
+            "station_name": "Fredericksburg",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "38.205",
+            "geodetic_longitude": "282.627",
+            "elevation": "69",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 209690,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "FDT": {
+        "metadata": {
+            "station_name": "Fredericksburg Test",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "38.205",
+            "geodetic_longitude": "282.627",
+            "elevation": "69",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 209690,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "FRN": {
+        "metadata": {
+            "station_name": "Fresno",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "37.091",
+            "geodetic_longitude": "240.282",
+            "elevation": "331",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 8097,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "GUA": {
+        "metadata": {
+            "station_name": "Guam",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "13.588",
+            "geodetic_longitude": "144.867",
+            "elevation": "140",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 764,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "HON": {
+        "metadata": {
+            "station_name": "Honolulu",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "21.316",
+            "geodetic_longitude": "202.000",
+            "elevation": "4",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 5982,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "KAK": {
+        "metadata": {
+            "station_name": "Kakioka",
+            "agency_name": "Japan Meteorological Agency",
+            "geodetic_latitude": "36.232",
+            "geodetic_longitude": "140.186",
+            "elevation": "36",
+            "sensor_orientation": "HDZF",
+            "reported": "HDZF",
+            "sensor_sampling_rate": 0.01,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "NEW": {
+        "metadata": {
+            "station_name": "Newport",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "48.265",
+            "geodetic_longitude": "242.878",
+            "elevation": "770",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 9547,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "SHU": {
+        "metadata": {
+            "station_name": "Shumagin",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "55.348",
+            "geodetic_longitude": "199.538",
+            "elevation": "80",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 7386,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "SIT": {
+        "metadata": {
+            "station_name": "Sitka",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "57.058",
+            "geodetic_longitude": "224.675",
+            "elevation": "24",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 12349,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "SJG": {
+        "metadata": {
+            "station_name": "San Juan",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "18.113",
+            "geodetic_longitude": "293.849",
+            "elevation": "424",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 208439,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "TUC": {
+        "metadata": {
+            "station_name": "Tucson",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "32.174",
+            "geodetic_longitude": "249.267",
+            "elevation": "946",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 5863,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
+            },
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
+    },
+    "USGS": {
+        "metadata": {
+            "station_name": "USGS",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "40.137",
+            "geodetic_longitude": "254.764",
+            "elevation": "1682",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "declination_base": 0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "1-minute calculated",
+                "filter_comments": [],
+            },
+            "hourly": {"data_interval_type": "1-hour calculated"},
+        },
+    },
+    "BLC": {
+        "metadata": {
+            "station_name": "Baker Lake",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "64.300",
+            "geodetic_longitude": "264.000",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "BRD": {
+        "metadata": {
+            "station_name": "Brandon",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "49.600",
+            "geodetic_longitude": "262.900",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "CBB": {
+        "metadata": {
+            "station_name": "Cambridge Bay",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "69.200",
+            "geodetic_longitude": "255.000",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "EUA": {
+        "metadata": {
+            "station_name": "Eureka",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "55.300",
+            "geodetic_longitude": "282.300",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "FCC": {
+        "metadata": {
+            "station_name": "Fort Churchill",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "58.800",
+            "geodetic_longitude": "265.900",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "HAD": {
+        "metadata": {
+            "station_name": "Hartland",
+            "agency_name": "British Geological Survey (BGS)",
+            "geodetic_latitude": "51.000",
+            "geodetic_longitude": "355.500",
+            "elevation": "0",
+            "sensor_orientation": "HDZF",
+            "reported": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "HER": {
+        "metadata": {
+            "station_name": "Hermanus",
+            "agency_name": "National Research Foundation",
+            "geodetic_latitude": "-34.400",
+            "geodetic_longitude": "19.200",
+            "elevation": "0",
+            "sensor_orientation": "HDZF",
+            "reported": "HDZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "IQA": {
+        "metadata": {
+            "station_name": "Iqaluit",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "63.800",
+            "geodetic_longitude": "291.500",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "MEA": {
+        "metadata": {
+            "station_name": "Meanook",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "54.600",
+            "geodetic_longitude": "246.700",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "OTT": {
+        "metadata": {
+            "station_name": "Ottowa",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "45.400",
+            "geodetic_longitude": "284.500",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "RES": {
+        "metadata": {
+            "station_name": "Resolute Bay",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "74.700",
+            "geodetic_longitude": "265.100",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "SNK": {
+        "metadata": {
+            "station_name": "Sanikiluaq",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "62.400",
+            "geodetic_longitude": "245.500",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "STJ": {
+        "metadata": {
+            "station_name": "St Johns",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "47.600",
+            "geodetic_longitude": "307.300",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "VIC": {
+        "metadata": {
+            "station_name": "Victoria",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "48.600",
+            "geodetic_longitude": "236.600",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
+    },
+    "YKC": {
+        "metadata": {
+            "station_name": "Yellowknife",
+            "agency_name": "Geological Survey of Canada (GSC)",
+            "geodetic_latitude": "62.400",
+            "geodetic_longitude": "245.500",
+            "elevation": "0",
+            "sensor_orientation": "XYZF",
+            "reported": "XYZF",
+            "sensor_sampling_rate": 100.0,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
+        },
+        "interval_specific": {
+            "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45)"},
+            "second": {"data_interval_type": ""},
+        },
     },
-    'YKC': {
-        'metadata': {
-            'station_name': 'Yellowknife',
-            'agency_name': 'Geological Survey of Canada (GSC)',
-            'geodetic_latitude': '62.400',
-            'geodetic_longitude': '245.500',
-            'elevation': '0',
-            'sensor_orientation': 'XYZF',
-            'reported': 'XYZF',
-            'sensor_sampling_rate': 100.0,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
-        },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45)'
-            },
-            'second': {
-                'data_interval_type': ''
-            }
-        }
-    }
 }
 
 
 DEFAULT_INTERVAL_SPECIFIC = {
-        'minute': {'data_interval_type': 'filtered 1-minute (00:15-01:45) '},
-        'second': {'data_interval_type': 'Average 1-Second'}
+    "minute": {"data_interval_type": "filtered 1-minute (00:15-01:45) "},
+    "second": {"data_interval_type": "Average 1-Second"},
 }
 
 
@@ -1023,8 +957,7 @@ class ObservatoryMetadata(object):
 
     def __init__(self, metadata=None, interval_specific=None):
         self.metadata = metadata or DEFAULT_METADATA
-        self.interval_specific = interval_specific or \
-                DEFAULT_INTERVAL_SPECIFIC
+        self.interval_specific = interval_specific or DEFAULT_INTERVAL_SPECIFIC
 
     def set_metadata(self, stats, observatory, channel, type, interval):
         """Set timeseries metadata (aka a traces stats)
@@ -1048,20 +981,19 @@ class ObservatoryMetadata(object):
         obspy.core.trace.stats
           the combined stats and the default metadata.
         """
-        stats['channel'] = channel
-        stats['data_interval'] = interval
-        stats['data_type'] = type
+        stats["channel"] = channel
+        stats["data_interval"] = interval
+        stats["data_type"] = type
         if observatory not in self.metadata:
             return
         # copy in standard metadata
-        metadata = self.metadata[observatory]['metadata']
+        metadata = self.metadata[observatory]["metadata"]
         for key in metadata:
             stats[key] = metadata[key]
         # copy in interval specific metadata
         interval_specific = self.interval_specific
-        if 'interval_specific' in self.metadata[observatory]:
-            interval_specific = \
-                self.metadata[observatory]['interval_specific']
+        if "interval_specific" in self.metadata[observatory]:
+            interval_specific = self.metadata[observatory]["interval_specific"]
         # stats['data_interval_type'] = data_interval_type[interval]
         if interval in interval_specific:
             for key in interval_specific[interval]:
diff --git a/geomagio/PlotTimeseriesFactory.py b/geomagio/PlotTimeseriesFactory.py
index b3be6cfe8d8f5c96c034f0e02200768b26b35616..db9574e4f692a22471e761c1c32b2f26eceaac2f 100644
--- a/geomagio/PlotTimeseriesFactory.py
+++ b/geomagio/PlotTimeseriesFactory.py
@@ -8,17 +8,32 @@ from .TimeseriesFactory import TimeseriesFactory
 class PlotTimeseriesFactory(TimeseriesFactory):
     """TimeseriesFactory that generates a plot.
     """
+
     def __init__(self, *args, **kwargs):
         TimeseriesFactory.__init__(self, *args, **kwargs)
 
-    def get_timeseries(self, starttime, endtime, observatory=None,
-            channels=None, type=None, interval=None):
+    def get_timeseries(
+        self,
+        starttime,
+        endtime,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """This factory does not support get_timeseries.
         """
         raise NotImplementedError('"get_timeseries" not implemented')
 
-    def put_timeseries(self, timeseries, starttime=None, endtime=None,
-            channels=None, type=None, interval=None):
+    def put_timeseries(
+        self,
+        timeseries,
+        starttime=None,
+        endtime=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Store timeseries data.
 
         Parameters
diff --git a/geomagio/StreamConverter.py b/geomagio/StreamConverter.py
index da14f4cdf3d20e31a5eaea863c4e91cb39643122..4ef3232c5db1ef5b0ec7cf4eb62357a9d5550413 100644
--- a/geomagio/StreamConverter.py
+++ b/geomagio/StreamConverter.py
@@ -26,16 +26,20 @@ def get_geo_from_mag(mag):
     obspy.core.Stream
         new stream object containing geographic components X, Y, Z, and F.
     """
-    h = mag.select(channel='H')[0]
-    d = mag.select(channel='D')[0]
-    z = mag.select(channel='Z')
-    f = mag.select(channel='F')
+    h = mag.select(channel="H")[0]
+    d = mag.select(channel="D")[0]
+    z = mag.select(channel="Z")
+    f = mag.select(channel="F")
     mag_h = h.data
     mag_d = d.data
     (geo_x, geo_y) = ChannelConverter.get_geo_from_mag(mag_h, mag_d)
-    return obspy.core.Stream((
-        __get_trace('X', h.stats, geo_x),
-        __get_trace('Y', d.stats, geo_y))) + z + f
+    return (
+        obspy.core.Stream(
+            (__get_trace("X", h.stats, geo_x), __get_trace("Y", d.stats, geo_y))
+        )
+        + z
+        + f
+    )
 
 
 def get_geo_from_obs(obs):
@@ -67,14 +71,13 @@ def get_deltaf_from_geo(geo):
     obspy.core.Stream
         stream object containing delta f values
     """
-    x = geo.select(channel='X')[0]
-    y = geo.select(channel='Y')[0]
-    z = geo.select(channel='Z')[0]
-    fs = geo.select(channel='F')[0]
+    x = geo.select(channel="X")[0]
+    y = geo.select(channel="Y")[0]
+    z = geo.select(channel="Z")[0]
+    fs = geo.select(channel="F")[0]
     fv = ChannelConverter.get_computed_f_using_squares(x, y, z)
     G = ChannelConverter.get_deltaf(fv, fs)
-    return obspy.core.Stream((
-            __get_trace('G', x.stats, G), ))
+    return obspy.core.Stream((__get_trace("G", x.stats, G),))
 
 
 def get_deltaf_from_obs(obs):
@@ -90,14 +93,13 @@ def get_deltaf_from_obs(obs):
     obspy.core.Stream
         stream object containing delta f values
     """
-    h = obs.select(channel='H')[0]
-    z = obs.select(channel='Z')[0]
-    fs = obs.select(channel='F')[0]
+    h = obs.select(channel="H")[0]
+    z = obs.select(channel="Z")[0]
+    fs = obs.select(channel="F")[0]
     e = __get_obs_e_from_obs(obs)
     fv = ChannelConverter.get_computed_f_using_squares(h, e, z)
     G = ChannelConverter.get_deltaf(fv, fs)
-    return obspy.core.Stream((
-            __get_trace('G', h.stats, G), ))
+    return obspy.core.Stream((__get_trace("G", h.stats, G),))
 
 
 def get_mag_from_geo(geo):
@@ -113,16 +115,20 @@ def get_mag_from_geo(geo):
     obspy.core.Stream
         new stream object containing magnetic components H, D, Z, and F.
     """
-    x = geo.select(channel='X')[0]
-    y = geo.select(channel='Y')[0]
-    z = geo.select(channel='Z')
-    f = geo.select(channel='F')
+    x = geo.select(channel="X")[0]
+    y = geo.select(channel="Y")[0]
+    z = geo.select(channel="Z")
+    f = geo.select(channel="F")
     geo_x = x.data
     geo_y = y.data
     (mag_h, mag_d) = ChannelConverter.get_mag_from_geo(geo_x, geo_y)
-    return obspy.core.Stream((
-            __get_trace('H', x.stats, mag_h),
-            __get_trace('D', y.stats, mag_d))) + z + f
+    return (
+        obspy.core.Stream(
+            (__get_trace("H", x.stats, mag_h), __get_trace("D", y.stats, mag_d))
+        )
+        + z
+        + f
+    )
 
 
 def get_mag_from_obs(obs):
@@ -138,18 +144,23 @@ def get_mag_from_obs(obs):
     obspy.core.Stream
         new stream object containing magnetic components H, D, Z, and F.
     """
-    h = obs.select(channel='H')[0]
+    h = obs.select(channel="H")[0]
     e = __get_obs_e_from_obs(obs)
-    z = obs.select(channel='Z')
-    f = obs.select(channel='F')
+    z = obs.select(channel="Z")
+    f = obs.select(channel="F")
     obs_h = h.data
     obs_e = e.data
     d0 = ChannelConverter.get_radians_from_minutes(
-            numpy.float64(e.stats.declination_base) / 10)
+        numpy.float64(e.stats.declination_base) / 10
+    )
     (mag_h, mag_d) = ChannelConverter.get_mag_from_obs(obs_h, obs_e, d0)
-    return obspy.core.Stream((
-            __get_trace('H', h.stats, mag_h),
-            __get_trace('D', e.stats, mag_d))) + z + f
+    return (
+        obspy.core.Stream(
+            (__get_trace("H", h.stats, mag_h), __get_trace("D", e.stats, mag_d))
+        )
+        + z
+        + f
+    )
 
 
 def get_obs_from_geo(geo, include_d=False):
@@ -184,23 +195,22 @@ def get_obs_from_mag(mag, include_d=False):
     obspy.core.Stream
         new stream object containing observatory components H, D, E, Z, and F
     """
-    h = mag.select(channel='H')[0]
-    d = mag.select(channel='D')[0]
-    z = mag.select(channel='Z')
-    f = mag.select(channel='F')
+    h = mag.select(channel="H")[0]
+    d = mag.select(channel="D")[0]
+    z = mag.select(channel="Z")
+    f = mag.select(channel="F")
 
     mag_h = h.data
     mag_d = d.data
     d0 = ChannelConverter.get_radians_from_minutes(
-        numpy.float64(d.stats.declination_base) / 10)
+        numpy.float64(d.stats.declination_base) / 10
+    )
     (obs_h, obs_e) = ChannelConverter.get_obs_from_mag(mag_h, mag_d, d0)
 
-    traces = (
-        __get_trace('H', h.stats, obs_h),
-        __get_trace('E', d.stats, obs_e))
+    traces = (__get_trace("H", h.stats, obs_h), __get_trace("E", d.stats, obs_e))
     if include_d:
         obs_d = ChannelConverter.get_obs_d_from_obs(obs_h, obs_e)
-        traces = traces + (__get_trace('D', d.stats, obs_d),)
+        traces = traces + (__get_trace("D", d.stats, obs_d),)
     return obspy.core.Stream(traces) + z + f
 
 
@@ -221,16 +231,16 @@ def get_obs_from_obs(obs, include_e=False, include_d=False):
     obspy.core.Stream
         new stream object containing observatory components H, D, E, Z, and F
     """
-    h = obs.select(channel='H')[0]
-    z = obs.select(channel='Z')
-    f = obs.select(channel='F')
-    traces = (h, )
+    h = obs.select(channel="H")[0]
+    z = obs.select(channel="Z")
+    f = obs.select(channel="F")
+    traces = (h,)
     if include_d:
         d = __get_obs_d_from_obs(obs)
-        traces = traces + (d, )
+        traces = traces + (d,)
     if include_e:
         e = __get_obs_e_from_obs(obs)
-        traces = traces + (e, )
+        traces = traces + (e,)
     return obspy.core.Stream(traces) + z + f
 
 
@@ -272,12 +282,13 @@ def __get_obs_d_from_obs(obs):
         observatory component D.
     """
     try:
-        d = obs.select(channel='D')[0]
+        d = obs.select(channel="D")[0]
     except IndexError:
-        h = obs.select(channel='H')[0]
-        e = obs.select(channel='E')[0]
-        d = __get_trace('D', e.stats,
-                ChannelConverter.get_obs_d_from_obs(h.data, e.data))
+        h = obs.select(channel="H")[0]
+        e = obs.select(channel="E")[0]
+        d = __get_trace(
+            "D", e.stats, ChannelConverter.get_obs_d_from_obs(h.data, e.data)
+        )
     return d
 
 
@@ -297,10 +308,11 @@ def __get_obs_e_from_obs(obs):
         observatory component E.
     """
     try:
-        e = obs.select(channel='E')[0]
+        e = obs.select(channel="E")[0]
     except IndexError:
-        h = obs.select(channel='H')[0]
-        d = obs.select(channel='D')[0]
-        e = __get_trace('E', d.stats,
-                ChannelConverter.get_obs_e_from_obs(h.data, d.data))
+        h = obs.select(channel="H")[0]
+        d = obs.select(channel="D")[0]
+        e = __get_trace(
+            "E", d.stats, ChannelConverter.get_obs_e_from_obs(h.data, d.data)
+        )
     return e
diff --git a/geomagio/StreamTimeseriesFactory.py b/geomagio/StreamTimeseriesFactory.py
index 77a4214add378f70bde47acca5f10734176f58eb..56ae0dcb4fa001fcf935045052a1112913b4032f 100644
--- a/geomagio/StreamTimeseriesFactory.py
+++ b/geomagio/StreamTimeseriesFactory.py
@@ -19,26 +19,42 @@ class StreamTimeseriesFactory(TimeseriesFactory):
     --------
     Timeseriesfactory
     """
+
     def __init__(self, factory, stream):
         self.factory = factory
         self.stream = stream
         self.stream_data = None
 
-    def get_timeseries(self, starttime, endtime, observatory=None,
-            channels=None, type=None, interval=None):
+    def get_timeseries(
+        self,
+        starttime,
+        endtime,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Get timeseries using stream as input.
         """
         if self.stream_data is None:
             # only read stream once
             self.stream_data = self.stream.read()
         return self.factory.parse_string(
-                data=self.stream_data,
-                starttime=starttime,
-                endtime=endtime,
-                observatory=observatory)
+            data=self.stream_data,
+            starttime=starttime,
+            endtime=endtime,
+            observatory=observatory,
+        )
 
-    def put_timeseries(self, timeseries, starttime=None, endtime=None,
-            channels=None, type=None, interval=None):
+    def put_timeseries(
+        self,
+        timeseries,
+        starttime=None,
+        endtime=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Put timeseries using stream as output.
         """
         self.factory.write_file(self.stream, timeseries, channels)
diff --git a/geomagio/TimeseriesFactory.py b/geomagio/TimeseriesFactory.py
index effc223cedc3a599aad92799052053c2fd9643a8..d62576e3f7ea4b85cdd244dd35f37f09ed3650d2 100644
--- a/geomagio/TimeseriesFactory.py
+++ b/geomagio/TimeseriesFactory.py
@@ -42,9 +42,16 @@ class TimeseriesFactory(object):
         Interval in seconds between URLs.
         Intervals begin at the unix epoch (1970-01-01T00:00:00Z)
     """
-    def __init__(self, observatory=None, channels=('H', 'D', 'Z', 'F'),
-            type='variation', interval='minute',
-            urlTemplate='', urlInterval=-1):
+
+    def __init__(
+        self,
+        observatory=None,
+        channels=("H", "D", "Z", "F"),
+        type="variation",
+        interval="minute",
+        urlTemplate="",
+        urlInterval=-1,
+    ):
         self.observatory = observatory
         self.channels = channels
         self.type = type
@@ -52,8 +59,15 @@ class TimeseriesFactory(object):
         self.urlTemplate = urlTemplate
         self.urlInterval = urlInterval
 
-    def get_timeseries(self, starttime, endtime, observatory=None,
-            channels=None, type=None, interval=None):
+    def get_timeseries(
+        self,
+        starttime,
+        endtime,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Get timeseries data.
 
         Support for specific channels, types, and intervals varies
@@ -97,28 +111,29 @@ class TimeseriesFactory(object):
 
         timeseries = obspy.core.Stream()
         urlIntervals = Util.get_intervals(
-                starttime=starttime,
-                endtime=endtime,
-                size=self.urlInterval)
+            starttime=starttime, endtime=endtime, size=self.urlInterval
+        )
         for urlInterval in urlIntervals:
             url = self._get_url(
-                    observatory=observatory,
-                    date=urlInterval['start'],
-                    type=type,
-                    interval=interval,
-                    channels=channels)
+                observatory=observatory,
+                date=urlInterval["start"],
+                type=type,
+                interval=interval,
+                channels=channels,
+            )
             try:
                 data = Util.read_url(url)
             except IOError as e:
-                print("Error reading url: %s, continuing" % str(e),
-                        file=sys.stderr)
+                print("Error reading url: %s, continuing" % str(e), file=sys.stderr)
                 continue
             try:
-                timeseries += self.parse_string(data,
-                        observatory=observatory,
-                        type=type,
-                        interval=interval,
-                        channels=channels)
+                timeseries += self.parse_string(
+                    data,
+                    observatory=observatory,
+                    type=type,
+                    interval=interval,
+                    channels=channels,
+                )
             except NotImplementedError:
                 raise NotImplementedError('"get_timeseries" not implemented')
             except Exception as e:
@@ -131,11 +146,12 @@ class TimeseriesFactory(object):
             timeseries = filtered
         timeseries.merge()
         timeseries.trim(
-                starttime=starttime,
-                endtime=endtime,
-                nearest_sample=False,
-                pad=True,
-                fill_value=numpy.nan)
+            starttime=starttime,
+            endtime=endtime,
+            nearest_sample=False,
+            pad=True,
+            fill_value=numpy.nan,
+        )
         return timeseries
 
     def parse_string(self, data, **kwargs):
@@ -153,8 +169,15 @@ class TimeseriesFactory(object):
         """
         raise NotImplementedError('"parse_string" not implemented')
 
-    def put_timeseries(self, timeseries, starttime=None, endtime=None,
-            channels=None, type=None, interval=None):
+    def put_timeseries(
+        self,
+        timeseries,
+        starttime=None,
+        endtime=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Store timeseries data.
 
         Parameters
@@ -184,8 +207,8 @@ class TimeseriesFactory(object):
         if len(timeseries) == 0:
             # no data to put
             return
-        if not self.urlTemplate.startswith('file://'):
-            raise TimeseriesFactoryException('Only file urls are supported')
+        if not self.urlTemplate.startswith("file://"):
+            raise TimeseriesFactoryException("Only file urls are supported")
         channels = channels or self.channels
         type = type or self.type
         interval = interval or self.interval
@@ -196,44 +219,47 @@ class TimeseriesFactory(object):
         endtime = endtime or stats.endtime
 
         urlIntervals = Util.get_intervals(
-                starttime=starttime,
-                endtime=endtime,
-                size=self.urlInterval)
+            starttime=starttime, endtime=endtime, size=self.urlInterval
+        )
         for urlInterval in urlIntervals:
-            interval_start = urlInterval['start']
-            interval_end = urlInterval['end']
+            interval_start = urlInterval["start"]
+            interval_end = urlInterval["end"]
             if interval_start != interval_end:
                 interval_end = interval_end - delta
             url = self._get_url(
-                    observatory=observatory,
-                    date=interval_start,
-                    type=type,
-                    interval=interval,
-                    channels=channels)
+                observatory=observatory,
+                date=interval_start,
+                type=type,
+                interval=interval,
+                channels=channels,
+            )
             url_data = timeseries.slice(
-                    starttime=interval_start,
-                    # subtract delta to omit the sample at end: `[start, end)`
-                    endtime=interval_end)
+                starttime=interval_start,
+                # subtract delta to omit the sample at end: `[start, end)`
+                endtime=interval_end,
+            )
             url_file = Util.get_file_from_url(url, createParentDirectory=True)
             # existing data file, merge new data into existing
             if os.path.isfile(url_file):
                 try:
                     existing_data = Util.read_file(url_file)
-                    existing_data = self.parse_string(existing_data,
-                            observatory=url_data[0].stats.station,
-                            type=type,
-                            interval=interval,
-                            channels=channels)
+                    existing_data = self.parse_string(
+                        existing_data,
+                        observatory=url_data[0].stats.station,
+                        type=type,
+                        interval=interval,
+                        channels=channels,
+                    )
                     # TODO: make parse_string return the correct location code
                     for trace in existing_data:
                         # make location codes match, just in case
                         new_trace = url_data.select(
-                                network=trace.stats.network,
-                                station=trace.stats.station,
-                                channel=trace.stats.channel)[0]
+                            network=trace.stats.network,
+                            station=trace.stats.station,
+                            channel=trace.stats.channel,
+                        )[0]
                         trace.stats.location = new_trace.stats.location
-                    url_data = TimeseriesUtility.merge_streams(
-                            existing_data, url_data)
+                    url_data = TimeseriesUtility.merge_streams(existing_data, url_data)
                 except IOError:
                     # no data yet
                     pass
@@ -246,13 +272,13 @@ class TimeseriesFactory(object):
                 endtime=interval_end,
                 nearest_sample=False,
                 pad=True,
-                fill_value=numpy.nan)
-            with open(url_file, 'wb') as fh:
+                fill_value=numpy.nan,
+            )
+            with open(url_file, "wb") as fh:
                 try:
                     self.write_file(fh, url_data, channels)
                 except NotImplementedError:
-                    raise NotImplementedError(
-                            '"put_timeseries" not implemented')
+                    raise NotImplementedError('"put_timeseries" not implemented')
 
     def write_file(self, fh, timeseries, channels):
         """Write timeseries data to the given file object.
@@ -288,17 +314,17 @@ class TimeseriesFactory(object):
         TimeseriesFactoryException
             if url does not start with file://
         """
-        if not url.startswith('file://'):
-            raise TimeseriesFactoryException(
-                    'Only file urls are supported for writing')
-        filename = url.replace('file://', '')
+        if not url.startswith("file://"):
+            raise TimeseriesFactoryException("Only file urls are supported for writing")
+        filename = url.replace("file://", "")
         parent = os.path.dirname(filename)
         if not os.path.exists(parent):
             os.makedirs(parent)
         return filename
 
-    def _get_url(self, observatory, date, type='variation', interval='minute',
-            channels=None):
+    def _get_url(
+        self, observatory, date, type="variation", interval="minute", channels=None
+    ):
         """Get the url for a specified file.
 
         Replaces patterns (described in class docstring) with values based on
@@ -324,28 +350,28 @@ class TimeseriesFactory(object):
             if type or interval are not supported.
         """
         params = {
-            'date': date.datetime,
-            'i': self._get_interval_abbreviation(interval),
-            'interval': self._get_interval_name(interval),
+            "date": date.datetime,
+            "i": self._get_interval_abbreviation(interval),
+            "interval": self._get_interval_name(interval),
             # used by Hermanus
-            'minute': date.hour * 60 + date.minute,
+            "minute": date.hour * 60 + date.minute,
             # end Hermanus
             # used by Kakioka
-            'month': date.strftime('%b').lower(),
-            'MONTH': date.strftime('%b').upper(),
+            "month": date.strftime("%b").lower(),
+            "MONTH": date.strftime("%b").upper(),
             # end Kakioka
-            'obs': observatory.lower(),
-            'OBS': observatory.upper(),
-            't': self._get_type_abbreviation(type),
-            'type': self._get_type_name(type),
+            "obs": observatory.lower(),
+            "OBS": observatory.upper(),
+            "t": self._get_type_abbreviation(type),
+            "type": self._get_type_name(type),
             # LEGACY
             # old date properties, string.format supports any strftime format
             # i.e. '{date:%j}'
-            'julian': date.strftime('%j'),
-            'year': date.strftime('%Y'),
-            'ymd': date.strftime('%Y%m%d')
+            "julian": date.strftime("%j"),
+            "year": date.strftime("%Y"),
+            "ymd": date.strftime("%Y%m%d"),
         }
-        if '{' in self.urlTemplate:
+        if "{" in self.urlTemplate:
             # use new style string formatting
             return self.urlTemplate.format(**params)
         # use old style string interpolation
@@ -370,19 +396,18 @@ class TimeseriesFactory(object):
             if ``interval`` is not supported.
         """
         interval_abbr = None
-        if interval == 'daily':
-            interval_abbr = 'day'
-        elif interval == 'hourly':
-            interval_abbr = 'hor'
-        elif interval == 'minute':
-            interval_abbr = 'min'
-        elif interval == 'monthly':
-            interval_abbr = 'mon'
-        elif interval == 'second':
-            interval_abbr = 'sec'
+        if interval == "daily":
+            interval_abbr = "day"
+        elif interval == "hourly":
+            interval_abbr = "hor"
+        elif interval == "minute":
+            interval_abbr = "min"
+        elif interval == "monthly":
+            interval_abbr = "mon"
+        elif interval == "second":
+            interval_abbr = "sec"
         else:
-            raise TimeseriesFactoryException(
-                    'Unexpected interval "%s"' % interval)
+            raise TimeseriesFactoryException('Unexpected interval "%s"' % interval)
         return interval_abbr
 
     def _get_interval_name(self, interval):
@@ -404,13 +429,12 @@ class TimeseriesFactory(object):
             if ``interval`` is not supported.
         """
         interval_name = None
-        if interval == 'minute':
-            interval_name = 'OneMinute'
-        elif interval == 'second':
-            interval_name = 'OneSecond'
+        if interval == "minute":
+            interval_name = "OneMinute"
+        elif interval == "second":
+            interval_name = "OneSecond"
         else:
-            raise TimeseriesFactoryException(
-                    'Unsupported interval "%s"' % interval)
+            raise TimeseriesFactoryException('Unsupported interval "%s"' % interval)
         return interval_name
 
     def _get_type_abbreviation(self, type):
@@ -432,17 +456,16 @@ class TimeseriesFactory(object):
             if ``type`` is not supported.
         """
         type_abbr = None
-        if type == 'definitive':
-            type_abbr = 'd'
-        elif type == 'provisional' or type == 'adjusted':
-            type_abbr = 'p'
-        elif type == 'quasi-definitive':
-            type_abbr = 'q'
-        elif type == 'variation' or type == 'reported':
-            type_abbr = 'v'
+        if type == "definitive":
+            type_abbr = "d"
+        elif type == "provisional" or type == "adjusted":
+            type_abbr = "p"
+        elif type == "quasi-definitive":
+            type_abbr = "q"
+        elif type == "variation" or type == "reported":
+            type_abbr = "v"
         else:
-            raise TimeseriesFactoryException(
-                    'Unexpected type "%s"' % type)
+            raise TimeseriesFactoryException('Unexpected type "%s"' % type)
         return type_abbr
 
     def _get_type_name(self, type):
@@ -465,15 +488,14 @@ class TimeseriesFactory(object):
             if ``type`` is not supported.
         """
         type_name = None
-        if type == 'variation' or type == 'reported':
-            type_name = ''
-        elif type == 'provisional' or type == 'adjusted':
-            type_name = 'Provisional'
-        elif type == 'quasi-definitive' or type == 'quasidefinitive':
-            type_name = 'QuasiDefinitive'
-        elif type == 'definitive':
-            type_name = 'Definitive'
+        if type == "variation" or type == "reported":
+            type_name = ""
+        elif type == "provisional" or type == "adjusted":
+            type_name = "Provisional"
+        elif type == "quasi-definitive" or type == "quasidefinitive":
+            type_name = "QuasiDefinitive"
+        elif type == "definitive":
+            type_name = "Definitive"
         else:
-            raise TimeseriesFactoryException(
-                    'Unsupported type "%s"' % type)
+            raise TimeseriesFactoryException('Unsupported type "%s"' % type)
         return type_name
diff --git a/geomagio/TimeseriesFactoryException.py b/geomagio/TimeseriesFactoryException.py
index 06acf4473255a4c8e478ca5e218b5b2959fc4596..0bcd71b0148313d645cf4c6addcb01078d64d5db 100644
--- a/geomagio/TimeseriesFactoryException.py
+++ b/geomagio/TimeseriesFactoryException.py
@@ -5,4 +5,5 @@ Base class for exceptions thrown by factories.
 
 class TimeseriesFactoryException(Exception):
     """Base class for exceptions thrown by factories."""
+
     pass
diff --git a/geomagio/TimeseriesUtility.py b/geomagio/TimeseriesUtility.py
index 6b15ab433ab8af57521f83e0df1fb8d29acdfdfe..73e1023d1d82f25fd7cb77ae63ad384c10289737 100644
--- a/geomagio/TimeseriesUtility.py
+++ b/geomagio/TimeseriesUtility.py
@@ -6,8 +6,9 @@ import numpy
 import obspy.core
 
 
-def create_empty_trace(starttime, endtime, observatory,
-            channel, type, interval, network, station, location):
+def create_empty_trace(
+    starttime, endtime, observatory, channel, type, interval, network, station, location
+):
     """create an empty trace filled with nans.
 
     Parameters
@@ -43,7 +44,8 @@ def create_empty_trace(starttime, endtime, observatory,
     stats.channel = channel
     # Calculate first valid sample time based on interval
     trace_starttime = obspy.core.UTCDateTime(
-        numpy.ceil(starttime.timestamp / delta) * delta)
+        numpy.ceil(starttime.timestamp / delta) * delta
+    )
     stats.starttime = trace_starttime
     stats.delta = delta
     # Calculate number of valid samples up to or before endtime
@@ -66,16 +68,16 @@ def get_delta_from_interval(data_interval):
     int
         number of seconds for interval, or None if unknown
     """
-    if data_interval == 'tenhertz':
+    if data_interval == "tenhertz":
         delta = 0.1
-    elif data_interval == 'second':
-        delta = 1.
-    elif data_interval == 'minute':
-        delta = 60.
-    elif data_interval == 'hour':
-        delta = 3600.
-    elif data_interval == 'day':
-        delta = 86400.
+    elif data_interval == "second":
+        delta = 1.0
+    elif data_interval == "minute":
+        delta = 60.0
+    elif data_interval == "hour":
+        delta = 3600.0
+    elif data_interval == "day":
+        delta = 86400.0
     else:
         delta = None
     return delta
@@ -173,16 +175,12 @@ def get_trace_gaps(trace):
         else:
             if gap is not None:
                 # end of a gap
-                gap.extend([
-                        starttime + (i - 1) * delta,
-                        starttime + i * delta])
+                gap.extend([starttime + (i - 1) * delta, starttime + i * delta])
                 gaps.append(gap)
                 gap = None
     # check for gap at end
     if gap is not None:
-        gap.extend([
-                starttime + (length - 1) * delta,
-                starttime + length * delta])
+        gap.extend([starttime + (length - 1) * delta, starttime + length * delta])
         gaps.append(gap)
     return gaps
 
@@ -268,13 +266,14 @@ def has_all_channels(stream, channels, starttime, endtime):
     -------
     bool: True if data found across all channels between starttime/endtime
     """
-    input_gaps = get_merged_gaps(
-            get_stream_gaps(stream=stream, channels=channels))
+    input_gaps = get_merged_gaps(get_stream_gaps(stream=stream, channels=channels))
     for input_gap in input_gaps:
         # Check for gaps that include the entire range
-        if (starttime >= input_gap[0] and
-                starttime <= input_gap[1] and
-                endtime < input_gap[2]):
+        if (
+            starttime >= input_gap[0]
+            and starttime <= input_gap[1]
+            and endtime < input_gap[2]
+        ):
             return False
     return True
 
@@ -307,9 +306,7 @@ def has_any_channels(stream, channels, starttime, endtime):
             # no gaps in channel
             return True
         for gap in channel_gaps:
-            if not (starttime >= gap[0] and
-                    starttime <= gap[1] and
-                    endtime < gap[2]):
+            if not (starttime >= gap[0] and starttime <= gap[1] and endtime < gap[2]):
                 # gap doesn't span channel
                 return True
     # didn't find any data
@@ -331,9 +328,7 @@ def mask_stream(stream):
     """
     masked = obspy.core.Stream()
     for trace in stream:
-        masked += obspy.core.Trace(
-                numpy.ma.masked_invalid(trace.data),
-                trace.stats)
+        masked += obspy.core.Trace(numpy.ma.masked_invalid(trace.data), trace.stats)
     return masked
 
 
@@ -354,10 +349,11 @@ def unmask_stream(stream):
     unmasked = obspy.core.Stream()
     for trace in stream:
         unmasked += obspy.core.Trace(
-                trace.data.filled(fill_value=numpy.nan)
-                        if isinstance(trace.data, numpy.ma.MaskedArray)
-                        else trace.data,
-                trace.stats)
+            trace.data.filled(fill_value=numpy.nan)
+            if isinstance(trace.data, numpy.ma.MaskedArray)
+            else trace.data,
+            trace.stats,
+        )
     return unmasked
 
 
@@ -390,22 +386,24 @@ def merge_streams(*streams):
     for trace in merged:
         stats = trace.stats
         split_stream = split.select(
-                channel=stats.channel,
-                station=stats.station,
-                network=stats.network,
-                location=stats.location)
+            channel=stats.channel,
+            station=stats.station,
+            network=stats.network,
+            location=stats.location,
+        )
         if len(split_stream) == 0:
             readd += trace
     split += readd
 
     # merge data
     split.merge(
-            # 1 = do not interpolate
-            interpolation_samples=0,
-            # 1 = when there is overlap, use data from trace with last endtime
-            method=1,
-            # np.nan = work-around for (problematic) intermediate masked arrays
-            fill_value=numpy.nan)
+        # 1 = do not interpolate
+        interpolation_samples=0,
+        # 1 = when there is overlap, use data from trace with last endtime
+        method=1,
+        # np.nan = work-around for (problematic) intermediate masked arrays
+        fill_value=numpy.nan,
+    )
 
     # convert back to NaN filled array
     merged = unmask_stream(split)
@@ -463,9 +461,9 @@ def pad_and_trim_trace(trace, starttime, endtime):
         # pad to starttime
         cnt = int((trace_starttime - starttime) / trace_delta)
         if cnt > 0:
-            trace.data = numpy.concatenate([
-                    numpy.full(cnt, numpy.nan, dtype=numpy.float64),
-                    trace.data])
+            trace.data = numpy.concatenate(
+                [numpy.full(cnt, numpy.nan, dtype=numpy.float64), trace.data]
+            )
             trace_starttime = trace_starttime - trace_delta * cnt
             trace.stats.starttime = trace_starttime
     if trace_endtime > endtime:
@@ -477,6 +475,6 @@ def pad_and_trim_trace(trace, starttime, endtime):
         # pad to endtime
         cnt = int((endtime - trace_endtime) / trace.stats.delta)
         if cnt > 0:
-            trace.data = numpy.concatenate([
-                    trace.data,
-                    numpy.full(cnt, numpy.nan, dtype=numpy.float64)])
+            trace.data = numpy.concatenate(
+                [trace.data, numpy.full(cnt, numpy.nan, dtype=numpy.float64)]
+            )
diff --git a/geomagio/Util.py b/geomagio/Util.py
index 1c90dc8efb58c66b547dcb2178f139239d658603..4f02a749d94c593535f67d34653c5b924ed3ab5a 100644
--- a/geomagio/Util.py
+++ b/geomagio/Util.py
@@ -13,6 +13,7 @@ class ObjectView(object):
     d : dictionary
         The dictionary to wrap.
     """
+
     def __init__(self, d):
         self.__dict__ = d
 
@@ -46,9 +47,9 @@ def get_file_from_url(url, createParentDirectory=False):
     Exception
         if url does not start with file://
     """
-    if not url.startswith('file://'):
-        raise Exception('Only file urls are supported by get_file_from_url')
-    filename = url.replace('file://', '')
+    if not url.startswith("file://"):
+        raise Exception("Only file urls are supported by get_file_from_url")
+    filename = url.replace("file://", "")
     if createParentDirectory:
         parent = os.path.dirname(filename)
         if not os.path.exists(parent):
@@ -83,10 +84,7 @@ def get_intervals(starttime, endtime, size=86400, align=True, trim=False):
         which represent [intervalstart, intervalend).
     """
     if size <= 0:
-        return [{
-            'start': starttime,
-            'end': endtime
-        }]
+        return [{"start": starttime, "end": endtime}]
     if align:
         # align based on size
         time = starttime - (starttime.timestamp % size)
@@ -102,10 +100,7 @@ def get_intervals(starttime, endtime, size=86400, align=True, trim=False):
                 start = starttime
             if end > endtime:
                 end = endtime
-        intervals.append({
-            'start': start,
-            'end': end
-        })
+        intervals.append({"start": start, "end": end})
     return intervals
 
 
@@ -128,7 +123,7 @@ def read_file(filepath):
         if file does not exist
     """
     file_data = None
-    with open(filepath, 'r') as f:
+    with open(filepath, "r") as f:
         file_data = f.read()
     return file_data
 
@@ -161,6 +156,7 @@ def read_url(url, connect_timeout=15, max_redirects=5, timeout=300):
         pass
     # wait to import pycurl until it is needed
     import pycurl
+
     content = None
     out = BytesIO()
     curl = pycurl.Curl()
@@ -174,7 +170,7 @@ def read_url(url, connect_timeout=15, max_redirects=5, timeout=300):
         curl.setopt(pycurl.WRITEFUNCTION, out.write)
         curl.perform()
         content = out.getvalue()
-        content = content.decode('utf-8')
+        content = content.decode("utf-8")
     except pycurl.error as e:
         raise IOError(e.args)
     finally:
diff --git a/geomagio/WebService.py b/geomagio/WebService.py
index 3eb658dfe4f7ddd04554967b3c3d6e9aca0d82dc..324029279f7a1300ca71be3f0b596e44228c0043 100644
--- a/geomagio/WebService.py
+++ b/geomagio/WebService.py
@@ -18,27 +18,22 @@ from geomagio.WebServiceUsage import WebServiceUsage
 from obspy.core import UTCDateTime
 
 
-DEFAULT_DATA_TYPE = 'variation'
-DEFAULT_ELEMENTS = ('X', 'Y', 'Z', 'F')
-DEFAULT_OUTPUT_FORMAT = 'iaga2002'
-DEFAULT_SAMPLING_PERIOD = '60'
+DEFAULT_DATA_TYPE = "variation"
+DEFAULT_ELEMENTS = ("X", "Y", "Z", "F")
+DEFAULT_OUTPUT_FORMAT = "iaga2002"
+DEFAULT_SAMPLING_PERIOD = "60"
 ERROR_CODE_MESSAGES = {
-        204: 'No Data',
-        400: 'Bad Request',
-        404: 'Not Found',
-        409: 'Conflict',
-        500: 'Internal Server Error',
-        501: 'Not Implemented',
-        503: 'Service Unavailable'
+    204: "No Data",
+    400: "Bad Request",
+    404: "Not Found",
+    409: "Conflict",
+    500: "Internal Server Error",
+    501: "Not Implemented",
+    503: "Service Unavailable",
 }
-VALID_DATA_TYPES = [
-        'variation',
-        'adjusted',
-        'quasi-definitive',
-        'definitive'
-]
-VALID_OUTPUT_FORMATS = ['iaga2002', 'json']
-VALID_SAMPLING_PERIODS = ['1', '60']
+VALID_DATA_TYPES = ["variation", "adjusted", "quasi-definitive", "definitive"]
+VALID_OUTPUT_FORMATS = ["iaga2002", "json"]
+VALID_SAMPLING_PERIODS = ["1", "60"]
 
 
 def _get_param(params, key, required=False):
@@ -67,19 +62,23 @@ def _get_param(params, key, required=False):
     value = params.get(key)
     if isinstance(value, (list, tuple)):
         if len(value) > 1:
-            raise WebServiceException('"' + key +
-                    '" may only be specified once.')
+            raise WebServiceException('"' + key + '" may only be specified once.')
         value = escape(value[0])
     if value is None:
         if required:
-            raise WebServiceException('"' + key +
-                    '" is a required parameter.')
+            raise WebServiceException('"' + key + '" is a required parameter.')
     return value
 
 
 class WebService(object):
-    def __init__(self, factory=None, version=None, metadata=None,
-            usage_documentation=None, error_stream=sys.stderr):
+    def __init__(
+        self,
+        factory=None,
+        version=None,
+        metadata=None,
+        usage_documentation=None,
+        error_stream=sys.stderr,
+    ):
         self.error_stream = error_stream
         self.factory = factory or EdgeFactory()
         self.metadata = metadata or ObservatoryMetadata().metadata
@@ -88,20 +87,20 @@ class WebService(object):
 
     def __call__(self, environ, start_response):
         """Implement WSGI interface"""
-        if environ['QUERY_STRING'] == '':
+        if environ["QUERY_STRING"] == "":
             return self.usage_documentation.__call__(environ, start_response)
         try:
             # parse params
-            query = self.parse(parse_qs(environ['QUERY_STRING']))
+            query = self.parse(parse_qs(environ["QUERY_STRING"]))
             query._verify_parameters()
             self.output_format = query.output_format
         except Exception as e:
             message = str(e)
-            ftype = parse_qs(environ['QUERY_STRING']).get('format', [''])[0]
-            if ftype == 'json':
-                self.output_format = 'json'
+            ftype = parse_qs(environ["QUERY_STRING"]).get("format", [""])[0]
+            if ftype == "json":
+                self.output_format = "json"
             else:
-                self.output_format = 'iaga2002'
+                self.output_format = "iaga2002"
             error_body = self.error(400, message, environ, start_response)
             return [error_body]
         try:
@@ -109,13 +108,13 @@ class WebService(object):
             timeseries = self.fetch(query)
             # format timeseries
             timeseries_string = self.format_data(
-                    query, timeseries, start_response, environ)
+                query, timeseries, start_response, environ
+            )
             if isinstance(timeseries_string, str):
-                timeseries_string = timeseries_string.encode('utf8')
+                timeseries_string = timeseries_string.encode("utf8")
         except Exception as e:
             if self.error_stream:
-                print("Error processing request: %s" % str(e),
-                        file=self.error_stream)
+                print("Error processing request: %s" % str(e), file=self.error_stream)
             message = "Server error."
             error_body = self.error(500, message, environ, start_response)
             return [error_body]
@@ -124,13 +123,10 @@ class WebService(object):
     def error(self, code, message, environ, start_response):
         """Assign error_body value based on error format."""
         error_body = self.http_error(code, message, environ)
-        status = str(code) + ' ' + ERROR_CODE_MESSAGES[code]
-        start_response(status,
-                [
-                    ("Content-Type", "text/plain")
-                ])
+        status = str(code) + " " + ERROR_CODE_MESSAGES[code]
+        start_response(status, [("Content-Type", "text/plain")])
         if isinstance(error_body, str):
-            error_body = error_body.encode('utf8')
+            error_body = error_body.encode("utf8")
         return error_body
 
     def fetch(self, query):
@@ -146,17 +142,18 @@ class WebService(object):
         obspy.core.Stream
             timeseries object with requested data.
         """
-        if query.sampling_period == '1':
-            sampling_period = 'second'
-        if query.sampling_period == '60':
-            sampling_period = 'minute'
+        if query.sampling_period == "1":
+            sampling_period = "second"
+        if query.sampling_period == "60":
+            sampling_period = "minute"
         timeseries = self.factory.get_timeseries(
-                observatory=query.observatory_id,
-                channels=query.elements,
-                starttime=query.starttime,
-                endtime=query.endtime,
-                type=query.data_type,
-                interval=sampling_period)
+            observatory=query.observatory_id,
+            channels=query.elements,
+            starttime=query.starttime,
+            endtime=query.endtime,
+            type=query.data_type,
+            interval=sampling_period,
+        )
         return timeseries
 
     def format_data(self, query, timeseries, start_response, environ):
@@ -173,18 +170,12 @@ class WebService(object):
         unicode
           IAGA2002 formatted string.
         """
-        url = environ['HTTP_HOST'] + environ['PATH_INFO'] + \
-                environ['QUERY_STRING']
-        if query.output_format == 'json':
-            timeseries_string = IMFJSONWriter.format(timeseries,
-                    query.elements, url)
+        url = environ["HTTP_HOST"] + environ["PATH_INFO"] + environ["QUERY_STRING"]
+        if query.output_format == "json":
+            timeseries_string = IMFJSONWriter.format(timeseries, query.elements, url)
         else:
-            timeseries_string = IAGA2002Writer.format(timeseries,
-                    query.elements)
-        start_response('200 OK',
-                [
-                    ("Content-Type", "text/plain")
-                ])
+            timeseries_string = IAGA2002Writer.format(timeseries, query.elements)
+        start_response("200 OK", [("Content-Type", "text/plain")])
         return timeseries_string
 
     def http_error(self, code, message, environ):
@@ -195,15 +186,17 @@ class WebService(object):
         http_error_body : str
             body of http error message.
         """
-        query_string = environ['QUERY_STRING']
-        path_info = environ['PATH_INFO']
-        host = environ['HTTP_HOST']
-        if self.output_format == 'json':
-            http_error_body = self.json_error(code, message, path_info,
-                    query_string, host)
+        query_string = environ["QUERY_STRING"]
+        path_info = environ["PATH_INFO"]
+        host = environ["HTTP_HOST"]
+        if self.output_format == "json":
+            http_error_body = self.json_error(
+                code, message, path_info, query_string, host
+            )
         else:
-            http_error_body = self.iaga2002_error(code, message, path_info,
-                    query_string)
+            http_error_body = self.iaga2002_error(
+                code, message, path_info, query_string
+            )
         return http_error_body
 
     def iaga2002_error(self, code, message, path_info, query_string):
@@ -215,17 +208,28 @@ class WebService(object):
             body of iaga2002 error message.
         """
         status_message = ERROR_CODE_MESSAGES[code]
-        error_body = 'Error ' + str(code) + ': ' + status_message + \
-                '\n\n' + message + '\n\n' + \
-                'Usage details are available from ' + \
-                'http://geomag.usgs.gov/ws/edge/ \n\n' + \
-                'Request:\n' + \
-                path_info + '?' + query_string + '\n\n' + \
-                'Request Submitted:\n' + \
-                datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") + '\n\n'
+        error_body = (
+            "Error "
+            + str(code)
+            + ": "
+            + status_message
+            + "\n\n"
+            + message
+            + "\n\n"
+            + "Usage details are available from "
+            + "http://geomag.usgs.gov/ws/edge/ \n\n"
+            + "Request:\n"
+            + path_info
+            + "?"
+            + query_string
+            + "\n\n"
+            + "Request Submitted:\n"
+            + datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
+            + "\n\n"
+        )
         # Check if there is version information available
         if self.version is not None:
-            error_body += 'Service version:\n' + str(self.version)
+            error_body += "Service version:\n" + str(self.version)
         return error_body
 
     def json_error(self, code, message, path_info, query_string, host):
@@ -237,18 +241,17 @@ class WebService(object):
             body of json error message.
         """
         error_dict = OrderedDict()
-        error_dict['type'] = "Error"
-        error_dict['metadata'] = OrderedDict()
-        error_dict['metadata']['status'] = 400
+        error_dict["type"] = "Error"
+        error_dict["metadata"] = OrderedDict()
+        error_dict["metadata"]["status"] = 400
         date = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
-        error_dict['metadata']['generated'] = date
-        error_dict['metadata']['url'] = host + path_info + '?' + query_string
+        error_dict["metadata"]["generated"] = date
+        error_dict["metadata"]["url"] = host + path_info + "?" + query_string
         status_message = ERROR_CODE_MESSAGES[code]
-        error_dict['metadata']['title'] = status_message
-        error_dict['metadata']['api'] = str(self.version)
-        error_dict['metadata']['error'] = message
-        error_body = dumps(error_dict,
-                ensure_ascii=True).encode('utf8')
+        error_dict["metadata"]["title"] = status_message
+        error_dict["metadata"]["api"] = str(self.version)
+        error_dict["metadata"]["error"] = message
+        error_body = dumps(error_dict, ensure_ascii=True).encode("utf8")
         error_body = str(error_body)
         return error_body
 
@@ -271,13 +274,13 @@ class WebService(object):
             if any parameters are not supported.
         """
         # Get values
-        observatory_id = _get_param(params, 'id', required=True)
-        starttime = _get_param(params, 'starttime')
-        endtime = _get_param(params, 'endtime')
-        elements = _get_param(params, 'elements')
-        sampling_period = _get_param(params, 'sampling_period')
-        data_type = _get_param(params, 'type')
-        output_format = _get_param(params, 'format')
+        observatory_id = _get_param(params, "id", required=True)
+        starttime = _get_param(params, "starttime")
+        endtime = _get_param(params, "endtime")
+        elements = _get_param(params, "elements")
+        sampling_period = _get_param(params, "sampling_period")
+        data_type = _get_param(params, "type")
+        output_format = _get_param(params, "format")
         # Assign values or defaults
         if not output_format:
             output_format = DEFAULT_OUTPUT_FORMAT
@@ -286,24 +289,21 @@ class WebService(object):
         observatory_id = observatory_id.upper()
         if observatory_id not in list(self.metadata.keys()):
             raise WebServiceException(
-                   'Bad id value "%s".'
-                   ' Valid values are: %s'
-                   % (observatory_id, list(self.metadata.keys())))
+                'Bad id value "%s".'
+                " Valid values are: %s" % (observatory_id, list(self.metadata.keys()))
+            )
         if not starttime:
             now = datetime.now()
-            today = UTCDateTime(
-                    year=now.year,
-                    month=now.month,
-                    day=now.day,
-                    hour=0)
+            today = UTCDateTime(year=now.year, month=now.month, day=now.day, hour=0)
             starttime = today
         else:
             try:
                 starttime = UTCDateTime(starttime)
             except Exception:
                 raise WebServiceException(
-                        'Bad starttime value "%s".'
-                        ' Valid values are ISO-8601 timestamps.' % starttime)
+                    'Bad starttime value "%s".'
+                    " Valid values are ISO-8601 timestamps." % starttime
+                )
         if not endtime:
             endtime = starttime + (24 * 60 * 60 - 1)
         else:
@@ -311,12 +311,13 @@ class WebService(object):
                 endtime = UTCDateTime(endtime)
             except Exception:
                 raise WebServiceException(
-                        'Bad endtime value "%s".'
-                        ' Valid values are ISO-8601 timestamps.' % endtime)
+                    'Bad endtime value "%s".'
+                    " Valid values are ISO-8601 timestamps." % endtime
+                )
         if not elements:
             elements = DEFAULT_ELEMENTS
         else:
-            elements = [e.strip().upper() for e in elements.replace(',', '')]
+            elements = [e.strip().upper() for e in elements.replace(",", "")]
         if not sampling_period:
             sampling_period = DEFAULT_SAMPLING_PERIOD
         else:
@@ -360,9 +361,17 @@ class WebServiceQuery(object):
         output format.
         default 'iaga2002'.
     """
-    def __init__(self, observatory_id=None, starttime=None, endtime=None,
-            elements=None, sampling_period=60, data_type='variation',
-            output_format='iaga2002'):
+
+    def __init__(
+        self,
+        observatory_id=None,
+        starttime=None,
+        endtime=None,
+        elements=None,
+        sampling_period=60,
+        data_type="variation",
+        output_format="iaga2002",
+    ):
         self.observatory_id = observatory_id
         self.starttime = starttime
         self.endtime = endtime
@@ -379,29 +388,30 @@ class WebServiceQuery(object):
         WebServiceException
             if any parameters are not supported.
         """
-        if len(self.elements) > 4 and self.output_format == 'iaga2002':
+        if len(self.elements) > 4 and self.output_format == "iaga2002":
             raise WebServiceException(
-                    'No more than four elements allowed for iaga2002 format.')
+                "No more than four elements allowed for iaga2002 format."
+            )
         if self.starttime > self.endtime:
-            raise WebServiceException(
-                    'Starttime must be before endtime.')
+            raise WebServiceException("Starttime must be before endtime.")
         if self.data_type not in VALID_DATA_TYPES:
             raise WebServiceException(
-                    'Bad type value "%s".'
-                    ' Valid values are: %s'
-                    % (self.data_type, VALID_DATA_TYPES))
+                'Bad type value "%s".'
+                " Valid values are: %s" % (self.data_type, VALID_DATA_TYPES)
+            )
         if self.sampling_period not in VALID_SAMPLING_PERIODS:
             raise WebServiceException(
-                    'Bad sampling_period value "%s".'
-                    ' Valid values are: %s'
-                    % (self.sampling_period, VALID_SAMPLING_PERIODS))
+                'Bad sampling_period value "%s".'
+                " Valid values are: %s" % (self.sampling_period, VALID_SAMPLING_PERIODS)
+            )
         if self.output_format not in VALID_OUTPUT_FORMATS:
             raise WebServiceException(
-                    'Bad format value "%s".'
-                    ' Valid values are: %s'
-                    % (self.output_format, VALID_OUTPUT_FORMATS))
+                'Bad format value "%s".'
+                " Valid values are: %s" % (self.output_format, VALID_OUTPUT_FORMATS)
+            )
 
 
 class WebServiceException(Exception):
     """Base class for exceptions thrown by web services."""
+
     pass
diff --git a/geomagio/WebServiceUsage.py b/geomagio/WebServiceUsage.py
index 858f621ab0cf7ea38db9d8fe56e8dfa50b4674ea..1530c9248f3191b04f398f916d6c75fc874036fb 100644
--- a/geomagio/WebServiceUsage.py
+++ b/geomagio/WebServiceUsage.py
@@ -7,20 +7,17 @@ class WebServiceUsage(object):
     def __init__(self, metadata=None, mount_path=None, host_prefix=None):
         metadata = metadata or list(ObservatoryMetadata().metadata.keys())
         self.date = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
-        self.metadata = ', '.join(sorted(metadata))
+        self.metadata = ", ".join(sorted(metadata))
         self.mount_path = mount_path
         self.host_prefix = host_prefix
 
     def __call__(self, environ, start_response):
         """Implement documentation page"""
-        start_response('200 OK',
-                [
-                    ("Content-Type", "text/html")
-                ])
+        start_response("200 OK", [("Content-Type", "text/html")])
         if self.mount_path is None:
-            self.mount_path = '/ws/edge'
+            self.mount_path = "/ws/edge"
         if self.host_prefix is None:
-            self.host_prefix = environ['HTTP_HOST']
+            self.host_prefix = environ["HTTP_HOST"]
         usage_page = self.set_usage_page()
         return [usage_page]
 
@@ -182,12 +179,13 @@ class WebServiceUsage(object):
               </nav>
             </body>
             </html>
-        """.format(metadata=ids, date=self.date,
-                host_prefix=self.host_prefix,
-                stylesheet=stylesheet,
-                link1=self.host_prefix + self.mount_path + "/?id=BOU",
-                link2=self.host_prefix + self.mount_path +
-                "/?id=BOU&format=json",
-                link3=self.host_prefix + self.mount_path +
-                "/?id=BOU&elements=E-N,E-E",)
+        """.format(
+            metadata=ids,
+            date=self.date,
+            host_prefix=self.host_prefix,
+            stylesheet=stylesheet,
+            link1=self.host_prefix + self.mount_path + "/?id=BOU",
+            link2=self.host_prefix + self.mount_path + "/?id=BOU&format=json",
+            link3=self.host_prefix + self.mount_path + "/?id=BOU&elements=E-N,E-E",
+        )
         return usage_body
diff --git a/geomagio/__init__.py b/geomagio/__init__.py
index 036d81bf40a799727e7af6c27970f87436b65e66..5c391414edd5b3cb148be27ed8c6ea6cc56bc6e8 100644
--- a/geomagio/__init__.py
+++ b/geomagio/__init__.py
@@ -16,15 +16,15 @@ from .TimeseriesFactoryException import TimeseriesFactoryException
 from .WebService import WebService
 
 __all__ = [
-    'ChannelConverter',
-    'Controller',
-    'DeltaFAlgorithm',
-    'ObservatoryMetadata',
-    'PlotTimeseriesFactory',
-    'StreamConverter',
-    'TimeseriesFactory',
-    'TimeseriesFactoryException',
-    'TimeseriesUtility',
-    'Util',
-    'WebService'
+    "ChannelConverter",
+    "Controller",
+    "DeltaFAlgorithm",
+    "ObservatoryMetadata",
+    "PlotTimeseriesFactory",
+    "StreamConverter",
+    "TimeseriesFactory",
+    "TimeseriesFactoryException",
+    "TimeseriesUtility",
+    "Util",
+    "WebService",
 ]
diff --git a/geomagio/algorithm/AdjustedAlgorithm.py b/geomagio/algorithm/AdjustedAlgorithm.py
index f78fff75fc892dea86176e2e19fae5f357823b66..70f908afb1a3158ef2195d55fdd0f6b5b05cdefa 100644
--- a/geomagio/algorithm/AdjustedAlgorithm.py
+++ b/geomagio/algorithm/AdjustedAlgorithm.py
@@ -14,17 +14,24 @@ import sys
 class AdjustedAlgorithm(Algorithm):
     """Adjusted Data Algorithm"""
 
-    def __init__(self, matrix=None, pier_correction=None, statefile=None,
-            data_type=None, location=None):
-        Algorithm.__init__(self, inchannels=('H', 'E', 'Z', 'F'),
-            outchannels=('X', 'Y', 'Z', 'F'))
+    def __init__(
+        self,
+        matrix=None,
+        pier_correction=None,
+        statefile=None,
+        data_type=None,
+        location=None,
+    ):
+        Algorithm.__init__(
+            self, inchannels=("H", "E", "Z", "F"), outchannels=("X", "Y", "Z", "F")
+        )
         # state variables
         self.matrix = matrix
         self.pier_correction = pier_correction
         self.statefile = statefile
         self.data_type = data_type
         self.location = location
-        if (matrix is None):
+        if matrix is None:
             self.load_state()
 
     def load_state(self):
@@ -37,30 +44,30 @@ class AdjustedAlgorithm(Algorithm):
             return
         data = None
         try:
-            with open(self.statefile, 'r') as f:
+            with open(self.statefile, "r") as f:
                 data = f.read()
                 data = json.loads(data)
         except IOError as err:
             sys.stderr.write("I/O error {0}".format(err))
-        if data is None or data == '':
+        if data is None or data == "":
             return
-        self.matrix[0, 0] = np.float64(data['M11'])
-        self.matrix[0, 1] = np.float64(data['M12'])
-        self.matrix[0, 2] = np.float64(data['M13'])
-        self.matrix[0, 3] = np.float64(data['M14'])
-        self.matrix[1, 0] = np.float64(data['M21'])
-        self.matrix[1, 1] = np.float64(data['M22'])
-        self.matrix[1, 2] = np.float64(data['M23'])
-        self.matrix[1, 3] = np.float64(data['M24'])
-        self.matrix[2, 0] = np.float64(data['M31'])
-        self.matrix[2, 1] = np.float64(data['M32'])
-        self.matrix[2, 2] = np.float64(data['M33'])
-        self.matrix[2, 3] = np.float64(data['M34'])
-        self.matrix[3, 0] = np.float64(data['M41'])
-        self.matrix[3, 1] = np.float64(data['M42'])
-        self.matrix[3, 2] = np.float64(data['M43'])
-        self.matrix[3, 3] = np.float64(data['M44'])
-        self.pier_correction = np.float64(data['PC'])
+        self.matrix[0, 0] = np.float64(data["M11"])
+        self.matrix[0, 1] = np.float64(data["M12"])
+        self.matrix[0, 2] = np.float64(data["M13"])
+        self.matrix[0, 3] = np.float64(data["M14"])
+        self.matrix[1, 0] = np.float64(data["M21"])
+        self.matrix[1, 1] = np.float64(data["M22"])
+        self.matrix[1, 2] = np.float64(data["M23"])
+        self.matrix[1, 3] = np.float64(data["M24"])
+        self.matrix[2, 0] = np.float64(data["M31"])
+        self.matrix[2, 1] = np.float64(data["M32"])
+        self.matrix[2, 2] = np.float64(data["M33"])
+        self.matrix[2, 3] = np.float64(data["M34"])
+        self.matrix[3, 0] = np.float64(data["M41"])
+        self.matrix[3, 1] = np.float64(data["M42"])
+        self.matrix[3, 2] = np.float64(data["M43"])
+        self.matrix[3, 3] = np.float64(data["M44"])
+        self.pier_correction = np.float64(data["PC"])
 
     def save_state(self):
         """Save algorithm state to a file.
@@ -69,25 +76,25 @@ class AdjustedAlgorithm(Algorithm):
         if self.statefile is None:
             return
         data = {
-            'M11': self.matrix[0, 0],
-            'M12': self.matrix[0, 1],
-            'M13': self.matrix[0, 2],
-            'M14': self.matrix[0, 3],
-            'M21': self.matrix[1, 0],
-            'M22': self.matrix[1, 1],
-            'M23': self.matrix[1, 2],
-            'M24': self.matrix[1, 3],
-            'M31': self.matrix[2, 0],
-            'M32': self.matrix[2, 1],
-            'M33': self.matrix[2, 2],
-            'M34': self.matrix[2, 3],
-            'M41': self.matrix[3, 0],
-            'M42': self.matrix[3, 1],
-            'M43': self.matrix[3, 2],
-            'M44': self.matrix[3, 3],
-            'PC': self.pier_correction
+            "M11": self.matrix[0, 0],
+            "M12": self.matrix[0, 1],
+            "M13": self.matrix[0, 2],
+            "M14": self.matrix[0, 3],
+            "M21": self.matrix[1, 0],
+            "M22": self.matrix[1, 1],
+            "M23": self.matrix[1, 2],
+            "M24": self.matrix[1, 3],
+            "M31": self.matrix[2, 0],
+            "M32": self.matrix[2, 1],
+            "M33": self.matrix[2, 2],
+            "M34": self.matrix[2, 3],
+            "M41": self.matrix[3, 0],
+            "M42": self.matrix[3, 1],
+            "M43": self.matrix[3, 2],
+            "M44": self.matrix[3, 3],
+            "PC": self.pier_correction,
         }
-        with open(self.statefile, 'w') as f:
+        with open(self.statefile, "w") as f:
             f.write(json.dumps(data))
 
     def create_trace(self, channel, stats, data):
@@ -109,16 +116,15 @@ class AdjustedAlgorithm(Algorithm):
         """
         stats = Stats(stats)
         if self.data_type is None:
-            stats.data_type = 'adjusted'
+            stats.data_type = "adjusted"
         else:
             stats.data_type = self.data_type
         if self.data_type is None:
-            stats.location = 'A0'
+            stats.location = "A0"
         else:
             stats.location = self.location
 
-        trace = super(AdjustedAlgorithm, self).create_trace(channel, stats,
-            data)
+        trace = super(AdjustedAlgorithm, self).create_trace(channel, stats, data)
         return trace
 
     def process(self, stream):
@@ -136,19 +142,19 @@ class AdjustedAlgorithm(Algorithm):
 
         out = None
 
-        h = stream.select(channel='H')[0]
-        e = stream.select(channel='E')[0]
-        z = stream.select(channel='Z')[0]
-        f = stream.select(channel='F')[0]
+        h = stream.select(channel="H")[0]
+        e = stream.select(channel="E")[0]
+        z = stream.select(channel="Z")[0]
+        f = stream.select(channel="F")[0]
 
         raws = np.vstack([h.data, e.data, z.data, np.ones_like(h.data)])
         adj = np.dot(self.matrix, raws)
         fnew = f.data + self.pier_correction
 
-        x = self.create_trace('X', h.stats, adj[0])
-        y = self.create_trace('Y', e.stats, adj[1])
-        z = self.create_trace('Z', z.stats, adj[2])
-        f = self.create_trace('F', f.stats, fnew)
+        x = self.create_trace("X", h.stats, adj[0])
+        y = self.create_trace("Y", e.stats, adj[1])
+        z = self.create_trace("Z", z.stats, adj[2])
+        f = self.create_trace("F", f.stats, fnew)
 
         out = Stream([x, y, z, f])
 
@@ -169,26 +175,28 @@ class AdjustedAlgorithm(Algorithm):
         # collect channels in stream
         channels = []
         for trace in stream:
-            channels += trace.stats['channel']
+            channels += trace.stats["channel"]
 
         # if F is available, can produce at least adjusted F
-        if ('F' in channels and
-            super(AdjustedAlgorithm, self).can_produce_data(
-                starttime,
-                endtime,
-                stream.select(channel='F'))):
+        if "F" in channels and super(AdjustedAlgorithm, self).can_produce_data(
+            starttime, endtime, stream.select(channel="F")
+        ):
             return True
 
         # if HEZ are available, can produce at least adjusted XYZ
-        if ('H' in channels and
-            'E' in channels and
-            'Z' in channels and
-            np.all(
-                [super(AdjustedAlgorithm, self).can_produce_data(
-                     starttime,
-                     endtime,
-                     stream.select(channel=chan))
-                 for chan in ('H', 'E', 'Z')])):
+        if (
+            "H" in channels
+            and "E" in channels
+            and "Z" in channels
+            and np.all(
+                [
+                    super(AdjustedAlgorithm, self).can_produce_data(
+                        starttime, endtime, stream.select(channel=chan)
+                    )
+                    for chan in ("H", "E", "Z")
+                ]
+            )
+        ):
             return True
 
         # return false if cannot produce adjustded F or XYZ
@@ -203,9 +211,11 @@ class AdjustedAlgorithm(Algorithm):
             command line argument parser
         """
 
-        parser.add_argument('--adjusted-statefile',
-                default=None,
-                help='File to store state between calls to algorithm')
+        parser.add_argument(
+            "--adjusted-statefile",
+            default=None,
+            help="File to store state between calls to algorithm",
+        )
 
     def configure(self, arguments):
         """Configure algorithm using comand line arguments.
diff --git a/geomagio/algorithm/Algorithm.py b/geomagio/algorithm/Algorithm.py
index 2c485ac9385b633c37c80410c9e69be8ad2274f6..d59aa4a7cfed286d9ac870231f60ae7ddd7fed62 100644
--- a/geomagio/algorithm/Algorithm.py
+++ b/geomagio/algorithm/Algorithm.py
@@ -102,10 +102,8 @@ class Algorithm(object):
             The input stream we want to make certain has data for the algorithm
         """
         return TimeseriesUtility.has_all_channels(
-                stream,
-                self.get_required_channels(),
-                starttime,
-                endtime)
+            stream, self.get_required_channels(), starttime, endtime
+        )
 
     def get_next_starttime(self):
         """Check whether algorithm has a stateful start time.
diff --git a/geomagio/algorithm/AlgorithmException.py b/geomagio/algorithm/AlgorithmException.py
index cb27283da832ec638cb9901d3e7b44899f1f9eae..955d6da757cb614fa40e85bf37d1cf78878470e3 100644
--- a/geomagio/algorithm/AlgorithmException.py
+++ b/geomagio/algorithm/AlgorithmException.py
@@ -5,4 +5,5 @@ Base class for exceptions thrown by Algorithms.
 
 class AlgorithmException(Exception):
     """Base class for exceptions thrown by Algorithms."""
+
     pass
diff --git a/geomagio/algorithm/AverageAlgorithm.py b/geomagio/algorithm/AverageAlgorithm.py
index 28e2f80ce053249e84afe1f868f011a68e937998..d6536c76b67f11904b790f885437fe445b874afa 100644
--- a/geomagio/algorithm/AverageAlgorithm.py
+++ b/geomagio/algorithm/AverageAlgorithm.py
@@ -18,13 +18,7 @@ class AverageAlgorithm(Algorithm):
 
     """
 
-    def __init__(
-        self,
-        observatories=None,
-        channel=None,
-        location=None,
-        scales=None
-    ):
+    def __init__(self, observatories=None, channel=None, location=None, scales=None):
         Algorithm.__init__(self)
         self._npts = -1
         self._stt = -1
@@ -50,9 +44,10 @@ class AverageAlgorithm(Algorithm):
         # must have only one channel for each observatory
         if len(timeseries) != len(self.observatories):
             raise AlgorithmException(
-                'Expected data for %d stations, received %d \n'
-                    'Only 1 channel may be averaged at one time'
-                    % (len(self.observatories), len(timeseries)))
+                "Expected data for %d stations, received %d \n"
+                "Only 1 channel may be averaged at one time"
+                % (len(self.observatories), len(timeseries))
+            )
 
         first = True
         # timeseries starttime and number of samples must match
@@ -66,17 +61,17 @@ class AverageAlgorithm(Algorithm):
                 first = False
 
             if ts.stats.npts != self._npts:
-                raise AlgorithmException(
-                    'Received timeseries have different lengths')
+                raise AlgorithmException("Received timeseries have different lengths")
 
             if numpy.isnan(ts.data).all():
                 raise AlgorithmException(
-                    'Trace for %s observatory is completely empty.'
-                    % (ts.stats.station))
+                    "Trace for %s observatory is completely empty." % (ts.stats.station)
+                )
 
             if ts.stats.starttime != self._stt:
                 raise AlgorithmException(
-                    'Received timeseries have different starttimes')
+                    "Received timeseries have different starttimes"
+                )
 
     def process(self, timeseries):
         """averages a channel across multiple stations
@@ -89,14 +84,11 @@ class AverageAlgorithm(Algorithm):
         out_stream:
             new stream object containing the averaged values.
         """
-        self.observatories = self.observatories or \
-                    [t.stats.station for t in timeseries]
+        self.observatories = self.observatories or [t.stats.station for t in timeseries]
 
-        self.outchannel = self.outchannel or \
-            timeseries[0].stats.channel
+        self.outchannel = self.outchannel or timeseries[0].stats.channel
 
-        self.outlocation = self.outlocation or \
-            timeseries[0].stats.location
+        self.outlocation = self.outlocation or timeseries[0].stats.location
 
         scale_values = self.scales or ([1] * len(timeseries))
         lat_corr = {}
@@ -128,15 +120,14 @@ class AverageAlgorithm(Algorithm):
 
         # Create a stream from the trace function
         new_stats = obspy.core.Stats()
-        new_stats.station = 'USGS'
+        new_stats.station = "USGS"
         new_stats.channel = self.outchannel
-        new_stats.network = 'NT'
+        new_stats.network = "NT"
         new_stats.location = self.outlocation
         new_stats.starttime = timeseries[0].stats.starttime
         new_stats.npts = timeseries[0].stats.npts
         new_stats.delta = timeseries[0].stats.delta
-        stream = obspy.core.Stream((
-                obspy.core.Trace(dst_tot, new_stats), ))
+        stream = obspy.core.Stream((obspy.core.Trace(dst_tot, new_stats),))
 
         # return averaged values as a stream
         return stream
@@ -150,12 +141,14 @@ class AverageAlgorithm(Algorithm):
         parser: ArgumentParser
             command line argument parser
         """
-        parser.add_argument('--average-observatory-scale',
-               default=None,
-               help='Scale factor for observatories specified with ' +
-                    '--observatory argument',
-               nargs='*',
-               type=float)
+        parser.add_argument(
+            "--average-observatory-scale",
+            default=None,
+            help="Scale factor for observatories specified with "
+            + "--observatory argument",
+            nargs="*",
+            type=float,
+        )
 
     def configure(self, arguments):
         """Configure algorithm using comand line arguments.
@@ -169,14 +162,14 @@ class AverageAlgorithm(Algorithm):
         self.observatories = arguments.observatory
         if arguments.outchannels:
             if len(arguments.outchannels) > 1:
-                raise AlgorithmException(
-                    'Only 1 channel can be specified')
+                raise AlgorithmException("Only 1 channel can be specified")
             self.outchannel = arguments.outchannels[0]
 
         self.scales = arguments.average_observatory_scale
         if self.scales:
             if len(self.observatories) != len(self.scales):
                 raise AlgorithmException(
-                    'Mismatch between observatories and scale factors')
+                    "Mismatch between observatories and scale factors"
+                )
 
         self.outlocation = arguments.outlocationcode or arguments.locationcode
diff --git a/geomagio/algorithm/DeltaFAlgorithm.py b/geomagio/algorithm/DeltaFAlgorithm.py
index 8541dbdcfead477b1062666e651aa7899076f147..22ebab00988a260a6fc986048ef076caa5b2ad2a 100644
--- a/geomagio/algorithm/DeltaFAlgorithm.py
+++ b/geomagio/algorithm/DeltaFAlgorithm.py
@@ -12,9 +12,9 @@ from .. import StreamConverter
 # obs represents the sensor orientation aligned close to the mag orientation
 # obsd is the same as obs,  but with D(declination) instead of E (e/w vector)
 CHANNELS = {
-    'geo': ['X', 'Y', 'Z', 'F'],
-    'obs': ['H', 'E', 'Z', 'F'],
-    'obsd': ['H', 'D', 'Z', 'F']
+    "geo": ["X", "Y", "Z", "F"],
+    "obs": ["H", "E", "Z", "F"],
+    "obsd": ["H", "D", "Z", "F"],
 }
 
 
@@ -28,9 +28,8 @@ class DeltaFAlgorithm(Algorithm):
         will be converting from.
     """
 
-    def __init__(self, informat='obs'):
-        Algorithm.__init__(self, inchannels=CHANNELS[informat],
-                outchannels=['G'])
+    def __init__(self, informat="obs"):
+        Algorithm.__init__(self, inchannels=CHANNELS[informat], outchannels=["G"])
         self._informat = informat
 
     def check_stream(self, timeseries):
@@ -44,8 +43,7 @@ class DeltaFAlgorithm(Algorithm):
         """
         for channel in self._inchannels:
             if len(timeseries.select(channel=channel)) == 0:
-                raise AlgorithmException(
-                    'Channel %s not found in input' % channel)
+                raise AlgorithmException("Channel %s not found in input" % channel)
 
     def process(self, timeseries):
         """converts a timeseries stream into a different coordinate system
@@ -63,9 +61,9 @@ class DeltaFAlgorithm(Algorithm):
         self.check_stream(timeseries)
         out_stream = None
         informat = self._informat
-        if informat == 'geo':
+        if informat == "geo":
             out_stream = StreamConverter.get_deltaf_from_geo(timeseries)
-        elif informat == 'obs' or informat == 'obsd':
+        elif informat == "obs" or informat == "obsd":
             out_stream = StreamConverter.get_deltaf_from_obs(timeseries)
 
         return out_stream
@@ -79,10 +77,12 @@ class DeltaFAlgorithm(Algorithm):
         parser: ArgumentParser
             command line argument parser
         """
-        parser.add_argument('--deltaf-from',
-                choices=['geo', 'obs', 'obsd'],
-                default='obs',
-                help='Geomagnetic orientation to read from')
+        parser.add_argument(
+            "--deltaf-from",
+            choices=["geo", "obs", "obsd"],
+            default="obs",
+            help="Geomagnetic orientation to read from",
+        )
 
     def configure(self, arguments):
         """Configure algorithm using comand line arguments.
diff --git a/geomagio/algorithm/FilterAlgorithm.py b/geomagio/algorithm/FilterAlgorithm.py
index dd7e4147e2889a14074439361d014ccd37f4362b..5131e5571ff5033bd7ed32382bcde28fd3da5546 100644
--- a/geomagio/algorithm/FilterAlgorithm.py
+++ b/geomagio/algorithm/FilterAlgorithm.py
@@ -9,26 +9,24 @@ from .. import TimeseriesUtility
 
 STEPS = [
     {  # 10 Hz to one second filter
-        'name': '10Hz',
-        'input_sample_period': 0.1,
-        'output_sample_period': 1.0,
-        'window': sps.firwin(123, 0.25, window='blackman', fs=10.0),
+        "name": "10Hz",
+        "input_sample_period": 0.1,
+        "output_sample_period": 1.0,
+        "window": sps.firwin(123, 0.25, window="blackman", fs=10.0),
     },
-
     {  # one second to one minute filter
-        'name': 'Intermagnet One Minute',
-        'input_sample_period': 1.0,
-        'output_sample_period': 60.0,
-        'window': sps.get_window(window=('gaussian', 15.8734), Nx=91),
+        "name": "Intermagnet One Minute",
+        "input_sample_period": 1.0,
+        "output_sample_period": 60.0,
+        "window": sps.get_window(window=("gaussian", 15.8734), Nx=91),
     },
-
     {  # one minute to one hour filter
-        'name': 'One Hour',
-        'input_sample_period': 60.0,
-        'output_sample_period': 3600.0,
-        'window': sps.windows.boxcar(91),
-    }
-        ]
+        "name": "One Hour",
+        "input_sample_period": 60.0,
+        "output_sample_period": 3600.0,
+        "window": sps.windows.boxcar(91),
+    },
+]
 
 
 class FilterAlgorithm(Algorithm):
@@ -36,9 +34,16 @@ class FilterAlgorithm(Algorithm):
         Filter Algorithm that filters and downsamples data
     """
 
-    def __init__(self, coeff_filename=None, filtertype=None,
-            steps=None, input_sample_period=None, output_sample_period=None,
-            inchannels=None, outchannels=None):
+    def __init__(
+        self,
+        coeff_filename=None,
+        filtertype=None,
+        steps=None,
+        input_sample_period=None,
+        output_sample_period=None,
+        inchannels=None,
+        outchannels=None,
+    ):
 
         Algorithm.__init__(self, inchannels=None, outchannels=None)
         self.coeff_filename = coeff_filename
@@ -55,19 +60,21 @@ class FilterAlgorithm(Algorithm):
         if self.coeff_filename is None:
             return
 
-        with open(self.coeff_filename, 'r') as f:
+        with open(self.coeff_filename, "r") as f:
             data = f.read()
             data = json.loads(data)
 
-        if data is None or data == '':
+        if data is None or data == "":
             return
 
-        self.steps = [{
-            'name': 'name' in data and data['name'] or 'custom',
-            'input_sample_period': self.input_sample_period,
-            'output_sample_period': self.output_sample_period,
-            'window': data['window']
-        }]
+        self.steps = [
+            {
+                "name": "name" in data and data["name"] or "custom",
+                "input_sample_period": self.input_sample_period,
+                "output_sample_period": self.output_sample_period,
+                "window": data["window"],
+            }
+        ]
 
     def save_state(self):
         """Save algorithm state to a file.
@@ -75,10 +82,8 @@ class FilterAlgorithm(Algorithm):
         """
         if self.coeff_filename is None:
             return
-        data = {
-            'window': list(self.window)
-        }
-        with open(self.coeff_filename, 'w') as f:
+        data = {"window": list(self.window)}
+        with open(self.coeff_filename, "w") as f:
             f.write(json.dumps(data))
 
     def get_filter_steps(self):
@@ -95,8 +100,8 @@ class FilterAlgorithm(Algorithm):
 
         steps = []
         for step in STEPS:
-            if self.input_sample_period <= step['input_sample_period']:
-                if self.output_sample_period >= step['output_sample_period']:
+            if self.input_sample_period <= step["input_sample_period"]:
+                if self.output_sample_period >= step["output_sample_period"]:
                     steps.append(step)
         return steps
 
@@ -115,10 +120,8 @@ class FilterAlgorithm(Algorithm):
             The input stream we want to make certain has data for the algorithm
         """
         return TimeseriesUtility.has_any_channels(
-                stream,
-                self.get_required_channels(),
-                starttime,
-                endtime)
+            stream, self.get_required_channels(), starttime, endtime
+        )
 
     def create_trace(self, channel, stats, data):
         """Utility to create a new trace object.
@@ -139,8 +142,7 @@ class FilterAlgorithm(Algorithm):
             trace containing data and metadata.
         """
 
-        trace = super(FilterAlgorithm, self).create_trace(channel, stats,
-            data)
+        trace = super(FilterAlgorithm, self).create_trace(channel, stats, data)
         return trace
 
     def process(self, stream):
@@ -177,20 +179,18 @@ class FilterAlgorithm(Algorithm):
             stream containing 1 trace per original trace.
         """
         # gather variables from step
-        input_sample_period = step['input_sample_period']
-        output_sample_period = step['output_sample_period']
-        window = np.array(step['window'])
+        input_sample_period = step["input_sample_period"]
+        output_sample_period = step["output_sample_period"]
+        window = np.array(step["window"])
         decimation = int(output_sample_period / input_sample_period)
         numtaps = len(window)
         window = window / sum(window)
 
         out = Stream()
         for trace in stream:
-            filtered = self.firfilter(trace.data,
-                    window, decimation)
+            filtered = self.firfilter(trace.data, window, decimation)
             stats = Stats(trace.stats)
-            stats.starttime = stats.starttime + \
-                    input_sample_period * (numtaps // 2)
+            stats.starttime = stats.starttime + input_sample_period * (numtaps // 2)
             stats.delta = output_sample_period
             stats.npts = len(filtered)
             trace_out = self.create_trace(stats.channel, stats, filtered)
@@ -221,11 +221,9 @@ class FilterAlgorithm(Algorithm):
 
         # build view into data, with numtaps  chunks separated into
         # overlapping 'rows'
-        shape = data.shape[:-1] + \
-                (data.shape[-1] - numtaps + 1, numtaps)
+        shape = data.shape[:-1] + (data.shape[-1] - numtaps + 1, numtaps)
         strides = data.strides + (data.strides[-1],)
-        as_s = npls.as_strided(data, shape=shape, strides=strides,
-                               writeable=False)
+        as_s = npls.as_strided(data, shape=shape, strides=strides, writeable=False)
         # build masked array for invalid entries, also 'decimate' by step
         as_masked = np.ma.masked_invalid(as_s[::step], copy=True)
         # sums of the total 'weights' of the filter corresponding to
@@ -287,9 +285,11 @@ class FilterAlgorithm(Algorithm):
         # input and output time intervals are managed
         # by Controller and TimeriesUtility
 
-        parser.add_argument('--filter-coefficients',
-                default=None,
-                help='File storing custom filter coefficients')
+        parser.add_argument(
+            "--filter-coefficients",
+            default=None,
+            help="File storing custom filter coefficients",
+        )
 
     def configure(self, arguments):
         """Configure algorithm using comand line arguments.
@@ -302,7 +302,9 @@ class FilterAlgorithm(Algorithm):
         # intialize filter with command line arguments
         self.coeff_filename = arguments.filter_coefficients
         self.input_sample_period = TimeseriesUtility.get_delta_from_interval(
-                arguments.input_interval or arguments.interval)
+            arguments.input_interval or arguments.interval
+        )
         self.output_sample_period = TimeseriesUtility.get_delta_from_interval(
-                arguments.output_interval or arguments.interval)
+            arguments.output_interval or arguments.interval
+        )
         self.load_state()
diff --git a/geomagio/algorithm/SqDistAlgorithm.py b/geomagio/algorithm/SqDistAlgorithm.py
index 6ac282abb333b7d00136c0c431e2f8fb08876a49..d9aa1584a957f8b79023dc6f607ee927ca4dd2ad 100644
--- a/geomagio/algorithm/SqDistAlgorithm.py
+++ b/geomagio/algorithm/SqDistAlgorithm.py
@@ -25,10 +25,25 @@ from scipy.optimize import fmin_l_bfgs_b
 class SqDistAlgorithm(Algorithm):
     """Solar Quiet, Secular Variation, and Disturbance algorithm"""
 
-    def __init__(self, alpha=None, beta=None, gamma=None, phi=1, m=1,
-                 yhat0=None, s0=None, l0=None, b0=None, sigma0=None,
-                 zthresh=6, fc=0, hstep=0, statefile=None, mag=False,
-                 smooth=1):
+    def __init__(
+        self,
+        alpha=None,
+        beta=None,
+        gamma=None,
+        phi=1,
+        m=1,
+        yhat0=None,
+        s0=None,
+        l0=None,
+        b0=None,
+        sigma0=None,
+        zthresh=6,
+        fc=0,
+        hstep=0,
+        statefile=None,
+        mag=False,
+        smooth=1,
+    ):
         Algorithm.__init__(self, inchannels=None, outchannels=None)
         self.alpha = alpha
         self.beta = beta
@@ -73,11 +88,13 @@ class SqDistAlgorithm(Algorithm):
             end of input required to generate requested output.
         """
         if self.mag:
-            channels = ('H')
-        if observatory == self.last_observatory \
-                and len(channels) == 1 \
-                and channels[0] == self.last_channel \
-                and start == self.next_starttime:
+            channels = "H"
+        if (
+            observatory == self.last_observatory
+            and len(channels) == 1
+            and channels[0] == self.last_channel
+            and start == self.next_starttime
+        ):
             # state is up to date, only need new data
             return (start, end)
         # state not up to date, need to prime
@@ -112,22 +129,22 @@ class SqDistAlgorithm(Algorithm):
             return
         data = None
         try:
-            with open(self.statefile, 'r') as f:
+            with open(self.statefile, "r") as f:
                 data = f.read()
                 data = json.loads(data)
         except Exception:
             pass
-        if data is None or data == '':
+        if data is None or data == "":
             return
-        self.yhat0 = data['yhat0']
-        self.s0 = data['s0']
-        self.l0 = data['l0']
-        self.b0 = data['b0']
-        self.sigma0 = data['sigma0']
-        self.last_observatory = data['last_observatory']
-        self.last_channel = data['last_channel']
-        self.last_delta = 'last_delta' in data and data['last_delta'] or None
-        self.next_starttime = UTCDateTime(data['next_starttime'])
+        self.yhat0 = data["yhat0"]
+        self.s0 = data["s0"]
+        self.l0 = data["l0"]
+        self.b0 = data["b0"]
+        self.sigma0 = data["sigma0"]
+        self.last_observatory = data["last_observatory"]
+        self.last_channel = data["last_channel"]
+        self.last_delta = "last_delta" in data and data["last_delta"] or None
+        self.next_starttime = UTCDateTime(data["next_starttime"])
 
     def save_state(self):
         """Save algorithm state to a file.
@@ -137,17 +154,17 @@ class SqDistAlgorithm(Algorithm):
         if self.statefile is None:
             return
         data = {
-            'yhat0': list(self.yhat0),
-            's0': list(self.s0),
-            'l0': self.l0,
-            'b0': self.b0,
-            'sigma0': list(self.sigma0),
-            'last_observatory': self.last_observatory,
-            'last_channel': self.last_channel,
-            'last_delta': self.last_delta,
-            'next_starttime': str(self.next_starttime)
+            "yhat0": list(self.yhat0),
+            "s0": list(self.s0),
+            "l0": self.l0,
+            "b0": self.b0,
+            "sigma0": list(self.sigma0),
+            "last_observatory": self.last_observatory,
+            "last_channel": self.last_channel,
+            "last_delta": self.last_delta,
+            "next_starttime": str(self.next_starttime),
         }
-        with open(self.statefile, 'w') as f:
+        with open(self.statefile, "w") as f:
             f.write(json.dumps(data))
 
     def process(self, stream):
@@ -169,15 +186,19 @@ class SqDistAlgorithm(Algorithm):
 
         if self.mag:
             # convert stream to mag
-            if stream.select(channel='H').count() > 0 \
-                    and stream.select(channel='E').count() > 0:
+            if (
+                stream.select(channel="H").count() > 0
+                and stream.select(channel="E").count() > 0
+            ):
                 stream = StreamConverter.get_mag_from_obs(stream)
-            elif stream.select(channel='X').count() > 0 \
-                    and stream.select(channel='Y').count() > 0:
+            elif (
+                stream.select(channel="X").count() > 0
+                and stream.select(channel="Y").count() > 0
+            ):
                 stream = StreamConverter.get_mag_from_geo(stream)
             else:
-                raise AlgorithmException('Unable to convert to magnetic H')
-            stream = stream.select(channel='H')
+                raise AlgorithmException("Unable to convert to magnetic H")
+            stream = stream.select(channel="H")
 
         for trace in stream.traces:
             out += self.process_one(trace)
@@ -206,44 +227,52 @@ class SqDistAlgorithm(Algorithm):
         """
         out = Stream()
         # check state
-        if self.last_observatory is not None \
-                or self.last_channel is not None \
-                or self.last_delta is not None \
-                or self.next_starttime is not None:
+        if (
+            self.last_observatory is not None
+            or self.last_channel is not None
+            or self.last_delta is not None
+            or self.next_starttime is not None
+        ):
             # have state, verify okay to proceed
-            if trace.stats.station != self.last_observatory \
-                    or trace.stats.channel != self.last_channel \
-                    or trace.stats.delta != self.last_delta \
-                    or trace.stats.starttime != self.next_starttime:
+            if (
+                trace.stats.station != self.last_observatory
+                or trace.stats.channel != self.last_channel
+                or trace.stats.delta != self.last_delta
+                or trace.stats.starttime != self.next_starttime
+            ):
                 # state not correct
                 raise AlgorithmException(
-                        'Inconsistent SQDist algorithm state' +
-                        ' process(%s, %s, %s, %s) <> state(%s, %s, %s, %s)' %
-                                (trace.stats.station,
-                                trace.stats.channel,
-                                trace.stats.delta,
-                                trace.stats.starttime,
-                                self.last_observatory,
-                                self.last_channel,
-                                self.last_delta,
-                                self.next_starttime))
+                    "Inconsistent SQDist algorithm state"
+                    + " process(%s, %s, %s, %s) <> state(%s, %s, %s, %s)"
+                    % (
+                        trace.stats.station,
+                        trace.stats.channel,
+                        trace.stats.delta,
+                        trace.stats.starttime,
+                        self.last_observatory,
+                        self.last_channel,
+                        self.last_delta,
+                        self.next_starttime,
+                    )
+                )
         # process
         yhat, shat, sigmahat, yhat0, s0, l0, b0, sigma0 = self.additive(
-                yobs=trace.data,
-                m=self.m,
-                alpha=self.alpha,
-                beta=self.beta,
-                gamma=self.gamma,
-                phi=self.phi,
-                yhat0=self.yhat0,
-                s0=self.s0,
-                l0=self.l0,
-                b0=self.b0,
-                sigma0=self.sigma0,
-                zthresh=self.zthresh,
-                fc=self.fc,
-                hstep=self.hstep,
-                smooth=self.smooth)
+            yobs=trace.data,
+            m=self.m,
+            alpha=self.alpha,
+            beta=self.beta,
+            gamma=self.gamma,
+            phi=self.phi,
+            yhat0=self.yhat0,
+            s0=self.s0,
+            l0=self.l0,
+            b0=self.b0,
+            sigma0=self.sigma0,
+            zthresh=self.zthresh,
+            fc=self.fc,
+            hstep=self.hstep,
+            smooth=self.smooth,
+        )
         # update state
         self.yhat0 = yhat0
         self.s0 = s0
@@ -253,8 +282,9 @@ class SqDistAlgorithm(Algorithm):
         self.last_observatory = trace.stats.station
         self.last_channel = trace.stats.channel
         self.last_delta = trace.stats.delta
-        self.next_starttime = trace.stats.starttime + \
-                (trace.stats.delta * trace.stats.npts)
+        self.next_starttime = trace.stats.starttime + (
+            trace.stats.delta * trace.stats.npts
+        )
         self.save_state()
         # create updated traces
         channel = trace.stats.channel
@@ -270,16 +300,31 @@ class SqDistAlgorithm(Algorithm):
         # if the data array is longer than expected, as when self.fc is non-
         # zero; HOWEVER, if self.hstep is non-zero, both stats.starttime and
         # stats.endtime must be adjusted to accomadate the time-shift.
-        out += self.create_trace(channel + '_Dist', trace.stats, dist)
-        out += self.create_trace(channel + '_SQ', trace.stats, sq)
-        out += self.create_trace(channel + '_SV', trace.stats, sv)
-        out += self.create_trace(channel + '_Sigma', trace.stats, sigmahat)
+        out += self.create_trace(channel + "_Dist", trace.stats, dist)
+        out += self.create_trace(channel + "_SQ", trace.stats, sq)
+        out += self.create_trace(channel + "_SV", trace.stats, sv)
+        out += self.create_trace(channel + "_Sigma", trace.stats, sigmahat)
         return out
 
     @classmethod
-    def additive(cls, yobs, m, alpha, beta, gamma, phi=1,
-                 yhat0=None, s0=None, l0=None, b0=None, sigma0=None,
-                 zthresh=6, fc=0, hstep=0, smooth=1):
+    def additive(
+        cls,
+        yobs,
+        m,
+        alpha,
+        beta,
+        gamma,
+        phi=1,
+        yhat0=None,
+        s0=None,
+        l0=None,
+        b0=None,
+        sigma0=None,
+        zthresh=6,
+        fc=0,
+        hstep=0,
+        smooth=1,
+    ):
         """Primary function for Holt-Winters smoothing/forecasting with
           damped linear trend and additive seasonal component.
 
@@ -360,19 +405,19 @@ class SqDistAlgorithm(Algorithm):
         """
 
         if alpha is None:
-            raise AlgorithmException('alpha is required')
+            raise AlgorithmException("alpha is required")
         if beta is None:
-            raise AlgorithmException('beta is required')
+            raise AlgorithmException("beta is required")
         if gamma is None:
-            raise AlgorithmException('gamma is required')
+            raise AlgorithmException("gamma is required")
         if phi is None:
-            raise AlgorithmException('phi is required')
+            raise AlgorithmException("phi is required")
 
         # set some default values
         if l0 is None:
-            l = np.nanmean(yobs[0:int(m)])
+            l = np.nanmean(yobs[0 : int(m)])
             if np.isnan(l):
-                l = 0.
+                l = 0.0
         else:
             l = l0
             if not np.isscalar(l0):
@@ -404,8 +449,7 @@ class SqDistAlgorithm(Algorithm):
         else:
             sigma = list(sigma0)
             if len(sigma) != (hstep + 1):
-                raise AlgorithmException(
-                    "sigma0 must have length %d" % (hstep + 1))
+                raise AlgorithmException("sigma0 must have length %d" % (hstep + 1))
 
         # generate a vector of weights that will "smooth" seasonal
         # variations locally...the quotes are because what we really
@@ -416,16 +460,16 @@ class SqDistAlgorithm(Algorithm):
         # smooth parameter should specify the required cut-off period in terms
         # of discrete samples; for now, we generate a Gaussian filter according
         # to White et al. (USGS SIR 2014-5045).
-        fom = 10**(-3 / 20.)  # halve power at corner frequency
+        fom = 10 ** (-3 / 20.0)  # halve power at corner frequency
         omg = np.pi / np.float64(smooth)  # corner angular frequency
-        sig = np.sqrt(-2 * np.log(fom) / omg**2) + np.finfo(float).eps  # sig>0
-        ts = np.linspace(np.max((-m, -3 * np.round(sig))),
-                         np.min((m, 3 * np.round(sig))),
-                         np.int(np.round(
-                                 np.min((2 * m, 6 * np.round(sig))) + 1
-                         )))
+        sig = np.sqrt(-2 * np.log(fom) / omg ** 2) + np.finfo(float).eps  # sig>0
+        ts = np.linspace(
+            np.max((-m, -3 * np.round(sig))),
+            np.min((m, 3 * np.round(sig))),
+            np.int(np.round(np.min((2 * m, 6 * np.round(sig))) + 1)),
+        )
         nts = ts.size
-        weights = np.exp(-0.5 * (ts / sig)**2)
+        weights = np.exp(-0.5 * (ts / sig) ** 2)
         weights = weights / np.sum(weights)
 
         #
@@ -443,8 +487,11 @@ class SqDistAlgorithm(Algorithm):
         phiHminus1 = 0
         for h in range(1, hstep):
             phiHminus1 = phiHminus1 + phi ** (h - 1)
-            sumc2_H = sumc2_H + (alpha * (1 + phiHminus1 * beta) +
-                               gamma * (1 if (h % m == 0) else 0)) ** 2
+            sumc2_H = (
+                sumc2_H
+                + (alpha * (1 + phiHminus1 * beta) + gamma * (1 if (h % m == 0) else 0))
+                ** 2
+            )
         phiJminus1 = phiHminus1
         sumc2 = sumc2_H
         jstep = hstep
@@ -475,7 +522,7 @@ class SqDistAlgorithm(Algorithm):
                 # fc>0, so simulate beyond last input
                 et = np.nan
 
-            if (np.isnan(et) or np.abs(et) > zthresh * sigma[i]):
+            if np.isnan(et) or np.abs(et) > zthresh * sigma[i]:
                 # forecast (i.e., update l, b, and s assuming et==0)
 
                 # no change in seasonal adjustments
@@ -492,8 +539,14 @@ class SqDistAlgorithm(Algorithm):
                     # valid observation
                     phiJminus1 = phiJminus1 + phi ** jstep
                     jstep = jstep + 1
-                    sumc2 = sumc2 + (alpha * (1 + phiJminus1 * beta) +
-                            gamma * (1 if (jstep % m == 0) else 0)) ** 2
+                    sumc2 = (
+                        sumc2
+                        + (
+                            alpha * (1 + phiJminus1 * beta)
+                            + gamma * (1 if (jstep % m == 0) else 0)
+                        )
+                        ** 2
+                    )
 
                 else:
                     # still update sigma using et when et > zthresh * sigma
@@ -512,12 +565,14 @@ class SqDistAlgorithm(Algorithm):
                 # distribute error correction across range of seasonal
                 # corrections according to weights calculated above
                 s[i + m] = s[i] + gamma * (1 - alpha) * et * weights[nts // 2]
-                s[i + m - nts // 2:i + m] = (s[i + m - nts // 2:i + m] +
-                                            gamma * (1 - alpha) * et *
-                                            weights[:nts // 2])
-                s[i + 1:i + nts // 2 + 1] = (s[i + 1:i + nts // 2 + 1] +
-                                            gamma * (1 - alpha) * et *
-                                            weights[nts // 2 + 1:])
+                s[i + m - nts // 2 : i + m] = (
+                    s[i + m - nts // 2 : i + m]
+                    + gamma * (1 - alpha) * et * weights[: nts // 2]
+                )
+                s[i + 1 : i + nts // 2 + 1] = (
+                    s[i + 1 : i + nts // 2 + 1]
+                    + gamma * (1 - alpha) * et * weights[nts // 2 + 1 :]
+                )
 
                 # update l and b using equation-error formulation
                 l = l + phi * b + alpha * et
@@ -533,11 +588,11 @@ class SqDistAlgorithm(Algorithm):
 
             # freeze state with last input for reinitialization
             if i == (len(yobs) - 1):
-                yhat0 = yhat[len(yobs):(len(yobs) + hstep)].copy()
-                s0 = s[len(yobs):(len(yobs) + m)].copy() - r[i + 1]
+                yhat0 = yhat[len(yobs) : (len(yobs) + hstep)].copy()
+                s0 = s[len(yobs) : (len(yobs) + m)].copy() - r[i + 1]
                 l0 = l + r[i + 1]
                 b0 = b
-                sigma0 = sigma[len(yobs):(len(yobs) + hstep + 1)].copy()
+                sigma0 = sigma[len(yobs) : (len(yobs) + hstep + 1)].copy()
 
         # endfor i in range(len(yobs) + fc)
 
@@ -545,20 +600,38 @@ class SqDistAlgorithm(Algorithm):
         l = l + r[-1]
         s = np.array(s) - np.hstack((r, np.tile(r[-1], m - 1)))
 
-        return (yhat[:len(yobs) + fc],
-                s[:len(yobs) + fc],
-                sigma[1:len(yobs) + fc + 1],
-                yhat0,
-                s0,
-                l0,
-                b0,
-                sigma0)
+        return (
+            yhat[: len(yobs) + fc],
+            s[: len(yobs) + fc],
+            sigma[1 : len(yobs) + fc + 1],
+            yhat0,
+            s0,
+            l0,
+            b0,
+            sigma0,
+        )
 
     @classmethod
-    def estimate_parameters(cls, yobs, m, alpha=None, beta=None, gamma=None,
-            phi=1, yhat0=None, s0=None, l0=None, b0=None, sigma0=None,
-            zthresh=6, fc=0, hstep=0,
-            alpha0=0.3, beta0=0.1, gamma0=0.1):
+    def estimate_parameters(
+        cls,
+        yobs,
+        m,
+        alpha=None,
+        beta=None,
+        gamma=None,
+        phi=1,
+        yhat0=None,
+        s0=None,
+        l0=None,
+        b0=None,
+        sigma0=None,
+        zthresh=6,
+        fc=0,
+        hstep=0,
+        alpha0=0.3,
+        beta0=0.1,
+        gamma0=0.1,
+    ):
         """Estimate alpha, beta, and gamma parameters based on observed data.
 
         Parameters
@@ -626,13 +699,15 @@ class SqDistAlgorithm(Algorithm):
         boundaries = [
             (alpha, alpha) if alpha is not None else (0, 1),
             (beta, beta) if beta is not None else (0, 1),
-            (gamma, gamma) if gamma is not None else (0, 1)
+            (gamma, gamma) if gamma is not None else (0, 1),
         ]
-        initial_values = np.array([
-            alpha if alpha is not None else alpha0,
-            beta if beta is not None else beta0,
-            gamma if gamma is not None else gamma0
-        ])
+        initial_values = np.array(
+            [
+                alpha if alpha is not None else alpha0,
+                beta if beta is not None else beta0,
+                gamma if gamma is not None else gamma0,
+            ]
+        )
 
         def func(params, *args):
             """Function that computes root-mean-squared-error based on current
@@ -647,15 +722,24 @@ class SqDistAlgorithm(Algorithm):
             alpha, beta, gamma = params
             # call Holt-Winters with additive seasonality
             yhat, _, _, _, _, _, _, _ = cls.additive(
-                    yobs, m,
-                    alpha=alpha, beta=beta, gamma=gamma, l0=l0, b0=b0, s0=s0,
-                    zthresh=zthresh, hstep=hstep)
+                yobs,
+                m,
+                alpha=alpha,
+                beta=beta,
+                gamma=gamma,
+                l0=l0,
+                b0=b0,
+                s0=s0,
+                zthresh=zthresh,
+                hstep=hstep,
+            )
             # compute root-mean-squared-error of predictions
             error = np.sqrt(np.nanmean(np.square(np.subtract(yobs, yhat))))
             return error
 
-        parameters = fmin_l_bfgs_b(func, x0=initial_values, args=(),
-                bounds=boundaries, approx_grad=True)
+        parameters = fmin_l_bfgs_b(
+            func, x0=initial_values, args=(), bounds=boundaries, approx_grad=True
+        )
         alpha, beta, gamma = parameters[0]
         rmse = parameters[1]
         return (alpha, beta, gamma, rmse)
@@ -669,40 +753,47 @@ class SqDistAlgorithm(Algorithm):
         parser: ArgumentParser
             command line argument parser
         """
-        parser.add_argument('--sqdist-alpha',
-                # default=1.0 / 1440.0 / 30,
-                default=0,
-                help='Smoothing parameter for secular variation',
-                type=float)
-        parser.add_argument('--sqdist-beta',
-                default=0,
-                help='Smoothing parameter for slope',
-                type=float)
-        parser.add_argument('--sqdist-gamma',
-                # default=1.0 / 30,
-                default=0,
-                help='Smoothing parameter for solar quiet',
-                type=float)
-        parser.add_argument('--sqdist-m',
-                # default=1440,
-                default=1,
-                help='SqDist m parameter',
-                type=int)
-        parser.add_argument('--sqdist-mag',
-                action='store_true',
-                default=False,
-                help='Generate sqdist based on magnetic H component')
-        parser.add_argument('--sqdist-statefile',
-                default=None,
-                help='File to store state between calls to algorithm')
-        parser.add_argument('--sqdist-zthresh',
-                default=6,
-                help='Set Z-score threshold',
-                type=float)
-        parser.add_argument('--sqdist-smooth',
-                default=1,
-                help='Local SQ smoothing parameter',
-                type=int)
+        parser.add_argument(
+            "--sqdist-alpha",
+            # default=1.0 / 1440.0 / 30,
+            default=0,
+            help="Smoothing parameter for secular variation",
+            type=float,
+        )
+        parser.add_argument(
+            "--sqdist-beta", default=0, help="Smoothing parameter for slope", type=float
+        )
+        parser.add_argument(
+            "--sqdist-gamma",
+            # default=1.0 / 30,
+            default=0,
+            help="Smoothing parameter for solar quiet",
+            type=float,
+        )
+        parser.add_argument(
+            "--sqdist-m",
+            # default=1440,
+            default=1,
+            help="SqDist m parameter",
+            type=int,
+        )
+        parser.add_argument(
+            "--sqdist-mag",
+            action="store_true",
+            default=False,
+            help="Generate sqdist based on magnetic H component",
+        )
+        parser.add_argument(
+            "--sqdist-statefile",
+            default=None,
+            help="File to store state between calls to algorithm",
+        )
+        parser.add_argument(
+            "--sqdist-zthresh", default=6, help="Set Z-score threshold", type=float
+        )
+        parser.add_argument(
+            "--sqdist-smooth", default=1, help="Local SQ smoothing parameter", type=int
+        )
 
     def configure(self, arguments):
         """Configure algorithm using comand line arguments.
diff --git a/geomagio/algorithm/XYZAlgorithm.py b/geomagio/algorithm/XYZAlgorithm.py
index d1c4ba63c0f38f44b1097233e026d01169e6959d..d87b4c0996f0b7c54b0d9cb3489fceab871b5dd6 100644
--- a/geomagio/algorithm/XYZAlgorithm.py
+++ b/geomagio/algorithm/XYZAlgorithm.py
@@ -14,10 +14,10 @@ from .. import StreamConverter
 # obs represents the sensor orientation aligned close to the mag orientation
 # obsd is the same as obs,  but with D(declination) instead of E (e/w vector)
 CHANNELS = {
-    'geo': ['X', 'Y', 'Z', 'F'],
-    'mag': ['H', 'D', 'Z', 'F'],
-    'obs': ['H', 'E', 'Z', 'F'],
-    'obsd': ['H', 'D', 'Z', 'F']
+    "geo": ["X", "Y", "Z", "F"],
+    "mag": ["H", "D", "Z", "F"],
+    "obs": ["H", "E", "Z", "F"],
+    "obsd": ["H", "D", "Z", "F"],
 }
 
 
@@ -34,9 +34,10 @@ class XYZAlgorithm(Algorithm):
         be converting to.
     """
 
-    def __init__(self, informat='obs', outformat='geo'):
-        Algorithm.__init__(self, inchannels=CHANNELS[informat],
-                outchannels=CHANNELS[outformat])
+    def __init__(self, informat="obs", outformat="geo"):
+        Algorithm.__init__(
+            self, inchannels=CHANNELS[informat], outchannels=CHANNELS[outformat]
+        )
         self._informat = informat
         self._outformat = outformat
 
@@ -53,8 +54,7 @@ class XYZAlgorithm(Algorithm):
         """
         for channel in self.get_required_channels():
             if len(timeseries.select(channel=channel)) == 0:
-                raise AlgorithmException(
-                    'Channel %s not found in input' % channel)
+                raise AlgorithmException("Channel %s not found in input" % channel)
 
     def get_required_channels(self):
         """Only the first two channels are required
@@ -78,38 +78,42 @@ class XYZAlgorithm(Algorithm):
         out_stream = None
         informat = self._informat
         outformat = self._outformat
-        if outformat == 'geo':
-            if informat == 'geo':
+        if outformat == "geo":
+            if informat == "geo":
                 out_stream = timeseries
-            elif informat == 'mag':
+            elif informat == "mag":
                 out_stream = StreamConverter.get_geo_from_mag(timeseries)
-            elif informat == 'obs' or informat == 'obsd':
+            elif informat == "obs" or informat == "obsd":
                 out_stream = StreamConverter.get_geo_from_obs(timeseries)
-        elif outformat == 'mag':
-            if informat == 'geo':
+        elif outformat == "mag":
+            if informat == "geo":
                 out_stream = StreamConverter.get_mag_from_geo(timeseries)
-            elif informat == 'mag':
+            elif informat == "mag":
                 out_stream = timeseries
-            elif informat == 'obs' or informat == 'obsd':
+            elif informat == "obs" or informat == "obsd":
                 out_stream = StreamConverter.get_mag_from_obs(timeseries)
-        elif outformat == 'obs':
-            if informat == 'geo':
+        elif outformat == "obs":
+            if informat == "geo":
                 out_stream = StreamConverter.get_obs_from_geo(timeseries)
-            elif informat == 'mag':
+            elif informat == "mag":
                 out_stream = StreamConverter.get_obs_from_mag(timeseries)
-            elif informat == 'obs' or informat == 'obsd':
-                out_stream = StreamConverter.get_obs_from_obs(timeseries,
-                        include_e=True)
-        elif outformat == 'obsd':
-            if informat == 'geo':
-                out_stream = StreamConverter.get_obs_from_geo(timeseries,
-                        include_d=True)
-            elif informat == 'mag':
-                out_stream = StreamConverter.get_obs_from_mag(timeseries,
-                        include_d=True)
-            elif informat == 'obs' or informat == 'obsd':
-                out_stream = StreamConverter.get_obs_from_obs(timeseries,
-                        include_d=True)
+            elif informat == "obs" or informat == "obsd":
+                out_stream = StreamConverter.get_obs_from_obs(
+                    timeseries, include_e=True
+                )
+        elif outformat == "obsd":
+            if informat == "geo":
+                out_stream = StreamConverter.get_obs_from_geo(
+                    timeseries, include_d=True
+                )
+            elif informat == "mag":
+                out_stream = StreamConverter.get_obs_from_mag(
+                    timeseries, include_d=True
+                )
+            elif informat == "obs" or informat == "obsd":
+                out_stream = StreamConverter.get_obs_from_obs(
+                    timeseries, include_d=True
+                )
         return out_stream
 
     @classmethod
@@ -121,14 +125,18 @@ class XYZAlgorithm(Algorithm):
         parser: ArgumentParser
             command line argument parser
         """
-        parser.add_argument('--xyz-from',
-                choices=['geo', 'mag', 'obs', 'obsd'],
-                default='obs',
-                help='Geomagnetic orientation to read from')
-        parser.add_argument('--xyz-to',
-                choices=['geo', 'mag', 'obs', 'obsd'],
-                default='geo',
-                help='Geomagnetic orientation to convert to')
+        parser.add_argument(
+            "--xyz-from",
+            choices=["geo", "mag", "obs", "obsd"],
+            default="obs",
+            help="Geomagnetic orientation to read from",
+        )
+        parser.add_argument(
+            "--xyz-to",
+            choices=["geo", "mag", "obs", "obsd"],
+            default="geo",
+            help="Geomagnetic orientation to convert to",
+        )
 
     def configure(self, arguments):
         """Configure algorithm using comand line arguments.
diff --git a/geomagio/algorithm/__init__.py b/geomagio/algorithm/__init__.py
index 1b1423034b6835b13b88060b36ceb5fad9a68cff..894b63b5573096b40f21e94ea30e316f61b6a730 100644
--- a/geomagio/algorithm/__init__.py
+++ b/geomagio/algorithm/__init__.py
@@ -6,6 +6,7 @@ from __future__ import absolute_import
 # base classes
 from .Algorithm import Algorithm
 from .AlgorithmException import AlgorithmException
+
 # algorithms
 from .AdjustedAlgorithm import AdjustedAlgorithm
 from .AverageAlgorithm import AverageAlgorithm
@@ -17,25 +18,25 @@ from .XYZAlgorithm import XYZAlgorithm
 
 # algorithms is used by Controller to auto generate arguments
 algorithms = {
-    'identity': Algorithm,
-    'adjusted': AdjustedAlgorithm,
-    'average': AverageAlgorithm,
-    'deltaf': DeltaFAlgorithm,
-    'filter': FilterAlgorithm,
-    'sqdist': SqDistAlgorithm,
-    'xyz': XYZAlgorithm
+    "identity": Algorithm,
+    "adjusted": AdjustedAlgorithm,
+    "average": AverageAlgorithm,
+    "deltaf": DeltaFAlgorithm,
+    "filter": FilterAlgorithm,
+    "sqdist": SqDistAlgorithm,
+    "xyz": XYZAlgorithm,
 }
 
 
 __all__ = [
     # base classes
-    'Algorithm',
-    'AlgorithmException',
+    "Algorithm",
+    "AlgorithmException",
     # algorithms
-    'AdjustedAlgorithm',
-    'AverageAlgorithm',
-    'DeltaFAlgorithm',
-    'FilterAlgorithm',
-    'SqDistAlgorithm',
-    'XYZAlgorithm'
+    "AdjustedAlgorithm",
+    "AverageAlgorithm",
+    "DeltaFAlgorithm",
+    "FilterAlgorithm",
+    "SqDistAlgorithm",
+    "XYZAlgorithm",
 ]
diff --git a/geomagio/binlog/BinLogWriter.py b/geomagio/binlog/BinLogWriter.py
index 51e3a70c7602b96bf757e59b34c718bb85da1086..f43ac161ee871621a09a0d72520ffce8f2b3b904 100644
--- a/geomagio/binlog/BinLogWriter.py
+++ b/geomagio/binlog/BinLogWriter.py
@@ -40,8 +40,9 @@ class BinLogWriter(object):
         for channel in channels:
             if timeseries.select(channel=channel).count() == 0:
                 raise TimeseriesFactoryException(
-                    'Missing channel "%s" for output, available channels %s' %
-                    (channel, str(TimeseriesUtility.get_channels(timeseries))))
+                    'Missing channel "%s" for output, available channels %s'
+                    % (channel, str(TimeseriesUtility.get_channels(timeseries)))
+                )
         stats = timeseries[0].stats
 
         out.write(self._format_header(stats))
@@ -49,15 +50,16 @@ class BinLogWriter(object):
         self._format_data(timeseries, channels)
 
         if (len(Hbuf) + len(Ebuf) + len(Zbuf)) > 0:
-            out.write(' C  Date       Time     DaySec     Bin change'
-            '    Voltage change\n')
-            out.write(''.join(Hbuf))
-            out.write('\n')
-            out.write(''.join(Ebuf))
-            out.write('\n')
-            out.write(''.join(Zbuf))
+            out.write(
+                " C  Date       Time     DaySec     Bin change" "    Voltage change\n"
+            )
+            out.write("".join(Hbuf))
+            out.write("\n")
+            out.write("".join(Ebuf))
+            out.write("\n")
+            out.write("".join(Zbuf))
         else:
-            out.write('*** No Bin Changes Found ***\n')
+            out.write("*** No Bin Changes Found ***\n")
 
     def _format_header(self, stats):
         """format headers for BinLog file
@@ -78,10 +80,17 @@ class BinLogWriter(object):
         sttdate = stats.starttime.strftime("%d-%b-%y")
         enddate = stats.endtime.strftime("%d-%b-%y")
 
-        buf.append('Bin Change Report: ' + observatory + '  Start Day: ' +
-                    sttdate + ' End Day: ' + enddate + '\n\n')
+        buf.append(
+            "Bin Change Report: "
+            + observatory
+            + "  Start Day: "
+            + sttdate
+            + " End Day: "
+            + enddate
+            + "\n\n"
+        )
 
-        return ''.join(buf)
+        return "".join(buf)
 
     def _format_data(self, timeseries, channels):
         """Format all data lines.
@@ -104,9 +113,10 @@ class BinLogWriter(object):
         # Use a copy of the trace so that we don't modify the original.
         for trace in timeseries:
             traceLocal = trace.copy()
-            if traceLocal.stats.channel == 'D':
-                traceLocal.data = \
-                    ChannelConverter.get_minutes_from_radians(traceLocal.data)
+            if traceLocal.stats.channel == "D":
+                traceLocal.data = ChannelConverter.get_minutes_from_radians(
+                    traceLocal.data
+                )
 
             # TODO - we should look into multiplying the trace all at once
             # like this, but this gives an error on Windows at the moment.
@@ -122,7 +132,8 @@ class BinLogWriter(object):
         for i in range(len(traces[0].data)):
             self._format_values(
                 datetime.utcfromtimestamp(starttime + i * delta),
-                (t.data[i] for t in traces))
+                (t.data[i] for t in traces),
+            )
 
         return
 
@@ -145,10 +156,11 @@ class BinLogWriter(object):
         tt = time.timetuple()
         totalMinutes = int(tt.tm_hour * 3600 + tt.tm_min * 60 + tt.tm_sec)
 
-        timestr = '{0.tm_year:0>4d}-{0.tm_mon:0>2d}-{0.tm_mday:0>2d} ' \
-                  '{0.tm_hour:0>2d}:{0.tm_min:0>2d}:{0.tm_sec:0>2d}' \
-                  ' ({1:0>5d})'. \
-                    format(tt, totalMinutes)
+        timestr = (
+            "{0.tm_year:0>4d}-{0.tm_mon:0>2d}-{0.tm_mday:0>2d} "
+            "{0.tm_hour:0>2d}:{0.tm_min:0>2d}:{0.tm_sec:0>2d}"
+            " ({1:0>5d})".format(tt, totalMinutes)
+        )
 
         # init volt/bin vals to dead
         vdead = 99.999999
@@ -159,27 +171,33 @@ class BinLogWriter(object):
         for idx, valx in enumerate(values):
             if ~numpy.isnan(valx):
                 if idx == 0 or idx == 2 or idx == 4:
-                    vblist[idx] = valx / 1000.
+                    vblist[idx] = valx / 1000.0
                 else:
                     vblist[idx] = int(valx)
 
         if vblist[1] != 999 and h_prev[1] != 999 and vblist[1] != h_prev[1]:
-            Hbuf.append('{0: >3s} {1:>s}  '
-            '{2: >4d} to {3: >4d}  {4: >10.6f} to {5: >10.6f}\n'.
-            format('(H)', timestr, h_prev[1],
-                    vblist[1], h_prev[0], vblist[0]))
+            Hbuf.append(
+                "{0: >3s} {1:>s}  "
+                "{2: >4d} to {3: >4d}  {4: >10.6f} to {5: >10.6f}\n".format(
+                    "(H)", timestr, h_prev[1], vblist[1], h_prev[0], vblist[0]
+                )
+            )
 
         if vblist[3] != 999 and e_prev[1] != 999 and vblist[3] != e_prev[1]:
-            Ebuf.append('{0: >3s} {1:>s}  '
-            '{2: >4d} to {3: >4d}  {4: >10.6f} to {5: >10.6f}\n'.
-            format('(E)', timestr, e_prev[1],
-                    vblist[3], e_prev[0], vblist[2]))
+            Ebuf.append(
+                "{0: >3s} {1:>s}  "
+                "{2: >4d} to {3: >4d}  {4: >10.6f} to {5: >10.6f}\n".format(
+                    "(E)", timestr, e_prev[1], vblist[3], e_prev[0], vblist[2]
+                )
+            )
 
         if vblist[5] != 999 and z_prev[1] != 999 and vblist[5] != z_prev[1]:
-            Zbuf.append('{0: >3s} {1:>s}  '
-            '{2: >4d} to {3: >4d}  {4: >10.6f} to {5: >10.6f}\n'.
-            format('(Z)', timestr, z_prev[1],
-                    vblist[5], z_prev[0], vblist[4]))
+            Zbuf.append(
+                "{0: >3s} {1:>s}  "
+                "{2: >4d} to {3: >4d}  {4: >10.6f} to {5: >10.6f}\n".format(
+                    "(Z)", timestr, z_prev[1], vblist[5], z_prev[0], vblist[4]
+                )
+            )
 
         h_prev[0] = vblist[0]
         h_prev[1] = vblist[1]
diff --git a/geomagio/binlog/StreamBinLogFactory.py b/geomagio/binlog/StreamBinLogFactory.py
index e915b2a4a49cbb00763874b084544ab64ae3c7a9..c7c723b405fbc481e2b1d61b47a43d69b7f4e7c6 100644
--- a/geomagio/binlog/StreamBinLogFactory.py
+++ b/geomagio/binlog/StreamBinLogFactory.py
@@ -23,8 +23,15 @@ class StreamBinLogFactory(BinLogFactory):
         BinLogFactory.__init__(self, **kwargs)
         self._stream = stream
 
-    def put_timeseries(self, timeseries, starttime=None, endtime=None,
-            channels=None, type=None, interval=None):
+    def put_timeseries(
+        self,
+        timeseries,
+        starttime=None,
+        endtime=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Implements put_timeseries
 
         Notes: Calls BinLogFactory.write_file in place of
diff --git a/geomagio/binlog/__init__.py b/geomagio/binlog/__init__.py
index 93132290aa7db731c4addc581a9d499e9c048459..5fd9eb78cd3e5292f34319d420812be853709c86 100644
--- a/geomagio/binlog/__init__.py
+++ b/geomagio/binlog/__init__.py
@@ -7,8 +7,4 @@ from .StreamBinLogFactory import StreamBinLogFactory
 from .BinLogWriter import BinLogWriter
 
 
-__all__ = [
-    'BinLogFactory',
-    'StreamBinLogFactory',
-    'BinLogWriter'
-]
+__all__ = ["BinLogFactory", "StreamBinLogFactory", "BinLogWriter"]
diff --git a/geomagio/edge/EdgeFactory.py b/geomagio/edge/EdgeFactory.py
index 6940ee8c28243a81d34aa5cbc425f892a2f69e33..bdecb483ba66f262149f17d93b8af36cae13eb7d 100644
--- a/geomagio/edge/EdgeFactory.py
+++ b/geomagio/edge/EdgeFactory.py
@@ -72,10 +72,22 @@ class EdgeFactory(TimeseriesFactory):
         for reading.
     """
 
-    def __init__(self, host='cwbpub.cr.usgs.gov', port=2060, write_port=7981,
-            observatory=None, channels=None, type=None, interval=None,
-            observatoryMetadata=None, locationCode=None,
-            cwbhost=None, cwbport=0, tag='GeomagAlg', forceout=False):
+    def __init__(
+        self,
+        host="cwbpub.cr.usgs.gov",
+        port=2060,
+        write_port=7981,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+        observatoryMetadata=None,
+        locationCode=None,
+        cwbhost=None,
+        cwbport=0,
+        tag="GeomagAlg",
+        forceout=False,
+    ):
         TimeseriesFactory.__init__(self, observatory, channels, type, interval)
         self.client = earthworm.Client(host, port)
 
@@ -86,12 +98,19 @@ class EdgeFactory(TimeseriesFactory):
         self.host = host
         self.port = port
         self.write_port = write_port
-        self.cwbhost = cwbhost or ''
+        self.cwbhost = cwbhost or ""
         self.cwbport = cwbport
         self.forceout = forceout
 
-    def get_timeseries(self, starttime, endtime, observatory=None,
-            channels=None, type=None, interval=None):
+    def get_timeseries(
+        self,
+        starttime,
+        endtime,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Get timeseries data
 
         Parameters
@@ -127,7 +146,8 @@ class EdgeFactory(TimeseriesFactory):
 
         if starttime > endtime:
             raise TimeseriesFactoryException(
-                'Starttime before endtime "%s" "%s"' % (starttime, endtime))
+                'Starttime before endtime "%s" "%s"' % (starttime, endtime)
+            )
 
         # obspy factories sometimes write to stdout, instead of stderr
         original_stdout = sys.stdout
@@ -137,8 +157,9 @@ class EdgeFactory(TimeseriesFactory):
             # get the timeseries
             timeseries = obspy.core.Stream()
             for channel in channels:
-                data = self._get_timeseries(starttime, endtime, observatory,
-                        channel, type, interval)
+                data = self._get_timeseries(
+                    starttime, endtime, observatory, channel, type, interval
+                )
                 timeseries += data
         finally:
             # restore stdout
@@ -147,8 +168,16 @@ class EdgeFactory(TimeseriesFactory):
 
         return timeseries
 
-    def put_timeseries(self, timeseries, starttime=None, endtime=None,
-                observatory=None, channels=None, type=None, interval=None):
+    def put_timeseries(
+        self,
+        timeseries,
+        starttime=None,
+        endtime=None,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Put timeseries data
 
         Parameters
@@ -176,17 +205,20 @@ class EdgeFactory(TimeseriesFactory):
         type = type or self.type or stats.data_type
         interval = interval or self.interval or stats.data_interval
 
-        if (starttime is None or endtime is None):
+        if starttime is None or endtime is None:
             starttime, endtime = TimeseriesUtility.get_stream_start_end_times(
-                    timeseries)
+                timeseries
+            )
         for channel in channels:
             if timeseries.select(channel=channel).count() == 0:
                 raise TimeseriesFactoryException(
-                    'Missing channel "%s" for output, available channels %s' %
-                    (channel, str(TimeseriesUtility.get_channels(timeseries))))
+                    'Missing channel "%s" for output, available channels %s'
+                    % (channel, str(TimeseriesUtility.get_channels(timeseries)))
+                )
         for channel in channels:
-            self._put_channel(timeseries, observatory, channel, type,
-                    interval, starttime, endtime)
+            self._put_channel(
+                timeseries, observatory, channel, type, interval, starttime, endtime
+            )
 
     def _convert_timeseries_to_decimal(self, stream):
         """convert geomag edge timeseries data stored as ints, to decimal by
@@ -270,38 +302,38 @@ class EdgeFactory(TimeseriesFactory):
 
         # If form is chan.loc, return chan (left) portion.
         # Allows specific chan/loc selection.
-        if channel.find('.') >= 0:
-            tmplist = channel.split('.')
+        if channel.find(".") >= 0:
+            tmplist = channel.split(".")
             return tmplist[0].strip()
 
-        if channel == 'D':
-            edge_channel = edge_interval_code + 'VD'
-        elif channel == 'E':
-            edge_channel = edge_interval_code + 'VE'
-        elif channel == 'F':
-            edge_channel = edge_interval_code + 'SF'
-        elif channel == 'H':
-            edge_channel = edge_interval_code + 'VH'
-        elif channel == 'Z':
-            edge_channel = edge_interval_code + 'VZ'
-        elif channel == 'G':
-            edge_channel = edge_interval_code + 'SG'
-        elif channel == 'X':
-            edge_channel = edge_interval_code + 'VX'
-        elif channel == 'Y':
-            edge_channel = edge_interval_code + 'VY'
-        elif channel == 'E-E':
-            edge_channel = edge_interval_code + 'QE'
-        elif channel == 'E-N':
-            edge_channel = edge_interval_code + 'QN'
-        elif channel == 'DIST':
-            edge_channel = edge_interval_code + 'DT'
-        elif channel == 'DST':
-            edge_channel = edge_interval_code + 'GD'
-        elif channel == 'SQ':
-            edge_channel = edge_interval_code + 'SQ'
-        elif channel == 'SV':
-            edge_channel = edge_interval_code + 'SV'
+        if channel == "D":
+            edge_channel = edge_interval_code + "VD"
+        elif channel == "E":
+            edge_channel = edge_interval_code + "VE"
+        elif channel == "F":
+            edge_channel = edge_interval_code + "SF"
+        elif channel == "H":
+            edge_channel = edge_interval_code + "VH"
+        elif channel == "Z":
+            edge_channel = edge_interval_code + "VZ"
+        elif channel == "G":
+            edge_channel = edge_interval_code + "SG"
+        elif channel == "X":
+            edge_channel = edge_interval_code + "VX"
+        elif channel == "Y":
+            edge_channel = edge_interval_code + "VY"
+        elif channel == "E-E":
+            edge_channel = edge_interval_code + "QE"
+        elif channel == "E-N":
+            edge_channel = edge_interval_code + "QN"
+        elif channel == "DIST":
+            edge_channel = edge_interval_code + "DT"
+        elif channel == "DST":
+            edge_channel = edge_interval_code + "GD"
+        elif channel == "SQ":
+            edge_channel = edge_interval_code + "SQ"
+        elif channel == "SV":
+            edge_channel = edge_interval_code + "SV"
         else:
             edge_channel = channel
         return edge_channel
@@ -332,21 +364,21 @@ class EdgeFactory(TimeseriesFactory):
 
         # If form is chan.loc, return loc (right) portion
         # Allows specific chan/loc selection.
-        if channel.find('.') >= 0:
-            tmplist = channel.split('.')
+        if channel.find(".") >= 0:
+            tmplist = channel.split(".")
             return tmplist[1].strip()
 
         if self.locationCode is not None:
             location = self.locationCode
         else:
-            if type == 'variation' or type == 'reported':
-                location = 'R0'
-            elif type == 'adjusted' or type == 'provisional':
-                location = 'A0'
-            elif type == 'quasi-definitive':
-                location = 'Q0'
-            elif type == 'definitive':
-                location = 'D0'
+            if type == "variation" or type == "reported":
+                location = "R0"
+            elif type == "adjusted" or type == "provisional":
+                location = "A0"
+            elif type == "quasi-definitive":
+                location = "Q0"
+            elif type == "definitive":
+                location = "D0"
         return location
 
     def _get_edge_network(self, observatory, channel, type, interval):
@@ -368,7 +400,7 @@ class EdgeFactory(TimeseriesFactory):
         network
             always NT
         """
-        return 'NT'
+        return "NT"
 
     def _get_edge_station(self, observatory, channel, type, interval):
         """get edge station.
@@ -413,21 +445,19 @@ class EdgeFactory(TimeseriesFactory):
         interval type
         """
         interval_code = None
-        if interval == 'day':
-            interval_code = 'D'
-        elif interval == 'hour':
-            interval_code = 'H'
-        elif interval == 'minute':
-            interval_code = 'M'
-        elif interval == 'second':
-            interval_code = 'S'
+        if interval == "day":
+            interval_code = "D"
+        elif interval == "hour":
+            interval_code = "H"
+        elif interval == "minute":
+            interval_code = "M"
+        elif interval == "second":
+            interval_code = "S"
         else:
-            raise TimeseriesFactoryException(
-                    'Unexpected interval "%s"' % interval)
+            raise TimeseriesFactoryException('Unexpected interval "%s"' % interval)
         return interval_code
 
-    def _get_timeseries(self, starttime, endtime, observatory,
-                channel, type, interval):
+    def _get_timeseries(self, starttime, endtime, observatory, channel, type, interval):
         """get timeseries data for a single channel.
 
         Parameters
@@ -450,26 +480,30 @@ class EdgeFactory(TimeseriesFactory):
         obspy.core.trace
             timeseries trace of the requested channel data
         """
-        station = self._get_edge_station(observatory, channel,
-                type, interval)
-        location = self._get_edge_location(observatory, channel,
-                type, interval)
-        network = self._get_edge_network(observatory, channel,
-                type, interval)
-        edge_channel = self._get_edge_channel(observatory, channel,
-                type, interval)
-        data = self.client.get_waveforms(network, station, location,
-                edge_channel, starttime, endtime)
+        station = self._get_edge_station(observatory, channel, type, interval)
+        location = self._get_edge_location(observatory, channel, type, interval)
+        network = self._get_edge_network(observatory, channel, type, interval)
+        edge_channel = self._get_edge_channel(observatory, channel, type, interval)
+        data = self.client.get_waveforms(
+            network, station, location, edge_channel, starttime, endtime
+        )
         # make sure data is 32bit int
         for trace in data:
-            trace.data = trace.data.astype('i4')
+            trace.data = trace.data.astype("i4")
         data.merge()
         if data.count() == 0:
             data += TimeseriesUtility.create_empty_trace(
-                starttime, endtime, observatory, channel, type,
-                interval, network, station, location)
-        self._set_metadata(data,
-                observatory, channel, type, interval)
+                starttime,
+                endtime,
+                observatory,
+                channel,
+                type,
+                interval,
+                network,
+                station,
+                location,
+            )
+        self._set_metadata(data, observatory, channel, type, interval)
         return data
 
     def _post_process(self, timeseries, starttime, endtime, channels):
@@ -498,15 +532,15 @@ class EdgeFactory(TimeseriesFactory):
                 trace.data.set_fill_value(numpy.nan)
                 trace.data = trace.data.filled()
 
-        if 'D' in channels:
-            for trace in timeseries.select(channel='D'):
-                trace.data = ChannelConverter.get_radians_from_minutes(
-                    trace.data)
+        if "D" in channels:
+            for trace in timeseries.select(channel="D"):
+                trace.data = ChannelConverter.get_radians_from_minutes(trace.data)
 
         TimeseriesUtility.pad_timeseries(timeseries, starttime, endtime)
 
-    def _put_channel(self, timeseries, observatory, channel, type, interval,
-                starttime, endtime):
+    def _put_channel(
+        self, timeseries, observatory, channel, type, interval, starttime, endtime
+    ):
         """Put a channel worth of data
 
         Parameters
@@ -528,14 +562,10 @@ class EdgeFactory(TimeseriesFactory):
         -----
         RawInputClient seems to only work when sockets are
         """
-        station = self._get_edge_station(observatory, channel,
-                type, interval)
-        location = self._get_edge_location(observatory, channel,
-                type, interval)
-        network = self._get_edge_network(observatory, channel,
-                type, interval)
-        edge_channel = self._get_edge_channel(observatory, channel,
-                type, interval)
+        station = self._get_edge_station(observatory, channel, type, interval)
+        location = self._get_edge_location(observatory, channel, type, interval)
+        network = self._get_edge_network(observatory, channel, type, interval)
+        edge_channel = self._get_edge_channel(observatory, channel, type, interval)
 
         now = obspy.core.UTCDateTime(datetime.utcnow())
         if ((now - endtime) > 864000) and (self.cwbport > 0):
@@ -545,11 +575,11 @@ class EdgeFactory(TimeseriesFactory):
             host = self.host
             port = self.write_port
 
-        ric = RawInputClient(self.tag, host, port, station,
-                edge_channel, location, network)
+        ric = RawInputClient(
+            self.tag, host, port, station, edge_channel, location, network
+        )
 
-        stream = self._convert_stream_to_masked(timeseries=timeseries,
-                channel=channel)
+        stream = self._convert_stream_to_masked(timeseries=timeseries, channel=channel)
 
         # Make certain there's actually data
         if not numpy.ma.any(stream.select(channel=channel)[0].data):
@@ -558,9 +588,10 @@ class EdgeFactory(TimeseriesFactory):
         for trace in stream.select(channel=channel).split():
             trace_send = trace.copy()
             trace_send.trim(starttime, endtime)
-            if channel == 'D':
+            if channel == "D":
                 trace_send.data = ChannelConverter.get_minutes_from_radians(
-                    trace_send.data)
+                    trace_send.data
+                )
             trace_send = self._convert_trace_to_int(trace_send)
             ric.send_trace(interval, trace_send)
         if self.forceout:
@@ -582,5 +613,6 @@ class EdgeFactory(TimeseriesFactory):
         """
 
         for trace in stream:
-            self.observatoryMetadata.set_metadata(trace.stats, observatory,
-                    channel, type, interval)
+            self.observatoryMetadata.set_metadata(
+                trace.stats, observatory, channel, type, interval
+            )
diff --git a/geomagio/edge/LocationCode.py b/geomagio/edge/LocationCode.py
index 0cbafd3916f3069f93fe85463112cc65c5eace64..e2840f754881a3d9edc5c1cb392f714757eeb8b6 100644
--- a/geomagio/edge/LocationCode.py
+++ b/geomagio/edge/LocationCode.py
@@ -26,7 +26,8 @@ def LocationCode(code):
         if the location code doesn't match the regular expression.
     """
     try:
-        return re.match('^[A-Z0-9]{2}$', code).group(0)
+        return re.match("^[A-Z0-9]{2}$", code).group(0)
     except AttributeError:
         raise argparse.ArgumentTypeError(
-                'Invalid location code, expected /^[A-Z0-9]{2}$/')
+            "Invalid location code, expected /^[A-Z0-9]{2}$/"
+        )
diff --git a/geomagio/edge/MiniSeedFactory.py b/geomagio/edge/MiniSeedFactory.py
index da1999051b3ff86e9fc6aaa7225ee53e45b51378..8a58646e6ba96c127108415db8c886b6d322cc89 100644
--- a/geomagio/edge/MiniSeedFactory.py
+++ b/geomagio/edge/MiniSeedFactory.py
@@ -63,10 +63,19 @@ class MiniSeedFactory(TimeseriesFactory):
         for reading.
     """
 
-    def __init__(self, host='cwbpub.cr.usgs.gov', port=2061, write_port=7981,
-            observatory=None, channels=None, type=None, interval=None,
-            observatoryMetadata=None, locationCode=None,
-            convert_channels=None):
+    def __init__(
+        self,
+        host="cwbpub.cr.usgs.gov",
+        port=2061,
+        write_port=7981,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+        observatoryMetadata=None,
+        locationCode=None,
+        convert_channels=None,
+    ):
         TimeseriesFactory.__init__(self, observatory, channels, type, interval)
 
         self.client = miniseed.Client(host, port)
@@ -79,8 +88,15 @@ class MiniSeedFactory(TimeseriesFactory):
         self.convert_channels = convert_channels or []
         self.write_client = MiniSeedInputClient(self.host, self.write_port)
 
-    def get_timeseries(self, starttime, endtime, observatory=None,
-            channels=None, type=None, interval=None):
+    def get_timeseries(
+        self,
+        starttime,
+        endtime,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Get timeseries data
 
         Parameters
@@ -116,7 +132,8 @@ class MiniSeedFactory(TimeseriesFactory):
 
         if starttime > endtime:
             raise TimeseriesFactoryException(
-                'Starttime before endtime "%s" "%s"' % (starttime, endtime))
+                'Starttime before endtime "%s" "%s"' % (starttime, endtime)
+            )
 
         # obspy factories sometimes write to stdout, instead of stderr
         original_stdout = sys.stdout
@@ -127,11 +144,13 @@ class MiniSeedFactory(TimeseriesFactory):
             timeseries = obspy.core.Stream()
             for channel in channels:
                 if channel in self.convert_channels:
-                    data = self._convert_timeseries(starttime, endtime,
-                            observatory, channel, type, interval)
+                    data = self._convert_timeseries(
+                        starttime, endtime, observatory, channel, type, interval
+                    )
                 else:
-                    data = self._get_timeseries(starttime, endtime,
-                            observatory, channel, type, interval)
+                    data = self._get_timeseries(
+                        starttime, endtime, observatory, channel, type, interval
+                    )
                 timeseries += data
         finally:
             # restore stdout
@@ -140,8 +159,16 @@ class MiniSeedFactory(TimeseriesFactory):
         self._post_process(timeseries, starttime, endtime, channels)
         return timeseries
 
-    def put_timeseries(self, timeseries, starttime=None, endtime=None,
-                observatory=None, channels=None, type=None, interval=None):
+    def put_timeseries(
+        self,
+        timeseries,
+        starttime=None,
+        endtime=None,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Put timeseries data
 
         Parameters
@@ -169,22 +196,26 @@ class MiniSeedFactory(TimeseriesFactory):
         type = type or self.type or stats.data_type
         interval = interval or self.interval or stats.data_interval
 
-        if (starttime is None or endtime is None):
+        if starttime is None or endtime is None:
             starttime, endtime = TimeseriesUtility.get_stream_start_end_times(
-                    timeseries)
+                timeseries
+            )
         for channel in channels:
             if timeseries.select(channel=channel).count() == 0:
                 raise TimeseriesFactoryException(
-                    'Missing channel "%s" for output, available channels %s' %
-                    (channel, str(TimeseriesUtility.get_channels(timeseries))))
+                    'Missing channel "%s" for output, available channels %s'
+                    % (channel, str(TimeseriesUtility.get_channels(timeseries)))
+                )
         for channel in channels:
-            self._put_channel(timeseries, observatory, channel, type,
-                    interval, starttime, endtime)
+            self._put_channel(
+                timeseries, observatory, channel, type, interval, starttime, endtime
+            )
         # close socket
         self.write_client.close()
 
-    def get_calculated_timeseries(self, starttime, endtime, observatory,
-            channel, type, interval, components):
+    def get_calculated_timeseries(
+        self, starttime, endtime, observatory, channel, type, interval, components
+    ):
         """Calculate a single channel using multiple component channels.
 
         Parameters
@@ -217,8 +248,9 @@ class MiniSeedFactory(TimeseriesFactory):
         converted = None
         for component in components:
             # load component
-            data = self._get_timeseries(starttime, endtime, observatory,
-                        component["channel"], type, interval)[0]
+            data = self._get_timeseries(
+                starttime, endtime, observatory, component["channel"], type, interval
+            )[0]
             # convert to nT
             nt = data.data * component["scale"] + component["offset"]
             # add to converted
@@ -230,10 +262,17 @@ class MiniSeedFactory(TimeseriesFactory):
         # set channel parameter to U, V, or W
         stats.channel = channel
         # create empty trace with adapted stats
-        out = TimeseriesUtility.create_empty_trace(stats.starttime,
-                stats.endtime, stats.station, stats.channel,
-                stats.data_type, stats.data_interval,
-                stats.network, stats.station, stats.location)
+        out = TimeseriesUtility.create_empty_trace(
+            stats.starttime,
+            stats.endtime,
+            stats.station,
+            stats.channel,
+            stats.data_type,
+            stats.data_interval,
+            stats.network,
+            stats.station,
+            stats.location,
+        )
         out.data = converted
         return out
 
@@ -281,53 +320,53 @@ class MiniSeedFactory(TimeseriesFactory):
 
         # If form is chan.loc, return chan (left) portion.
         # Allows specific chan/loc selection.
-        if channel.find('.') >= 0:
-            tmplist = channel.split('.')
+        if channel.find(".") >= 0:
+            tmplist = channel.split(".")
             return tmplist[0].strip()
 
         # see if channel name uses _ for ELEMENT_SUFFIX
         element = None
         suffix = None
-        if channel.find('_') >= 0:
-            element, suffix = channel.split('_')
+        if channel.find("_") >= 0:
+            element, suffix = channel.split("_")
 
         # 10Hz should be bin/volt
-        if interval == 'tenhertz':
+        if interval == "tenhertz":
             middle = None
-            if suffix == 'Bin':
-                middle = 'Y'
-            elif suffix == 'Volt':
-                middle = 'E'
+            if suffix == "Bin":
+                middle = "Y"
+            elif suffix == "Volt":
+                middle = "E"
             elif suffix is not None:
                 raise TimeseriesFactoryException(
-                        'bad channel suffix "%s", wanted "Bin" or "Volt"'
-                        % suffix)
+                    'bad channel suffix "%s", wanted "Bin" or "Volt"' % suffix
+                )
             # check for expected channels
-            if element in ('U', 'V', 'W') and middle is not None:
+            if element in ("U", "V", "W") and middle is not None:
                 return edge_interval_code + middle + element
             else:
                 # unknown, assume advanced user
                 return channel
 
         if suffix is not None:
-            if suffix == 'Dist' or suffix == 'SQ' or suffix == 'SV':
+            if suffix == "Dist" or suffix == "SQ" or suffix == "SV":
                 # these suffixes modify location code, but use element channel
                 channel = element
             else:
                 raise TimeseriesFactoryException(
-                        'bad channel suffix "%s", wanted "Dist", "SQ", or "SV"'
-                        % suffix)
-        if channel in ('D', 'F', 'G', 'H', 'U', 'V', 'W', 'X', 'Y', 'Z'):
+                    'bad channel suffix "%s", wanted "Dist", "SQ", or "SV"' % suffix
+                )
+        if channel in ("D", "F", "G", "H", "U", "V", "W", "X", "Y", "Z"):
             # normal elements
-            edge_channel = edge_interval_code + 'F' + channel
-        elif channel == 'E-E':
-            edge_channel = edge_interval_code + 'QE'
-        elif channel == 'E-N':
-            edge_channel = edge_interval_code + 'QN'
-        elif channel == 'Dst4':
-            edge_channel = edge_interval_code + 'X4'
-        elif channel == 'Dst3':
-            edge_channel = edge_interval_code + 'X3'
+            edge_channel = edge_interval_code + "F" + channel
+        elif channel == "E-E":
+            edge_channel = edge_interval_code + "QE"
+        elif channel == "E-N":
+            edge_channel = edge_interval_code + "QN"
+        elif channel == "Dst4":
+            edge_channel = edge_interval_code + "X4"
+        elif channel == "Dst3":
+            edge_channel = edge_interval_code + "X3"
         else:
             edge_channel = channel
         return edge_channel
@@ -356,36 +395,36 @@ class MiniSeedFactory(TimeseriesFactory):
         """
         # If form is chan.loc, return loc (right) portion
         # Allows specific chan/loc selection.
-        if channel.find('.') >= 0:
-            tmplist = channel.split('.')
+        if channel.find(".") >= 0:
+            tmplist = channel.split(".")
             return tmplist[1].strip()
         # factory override
         if self.locationCode is not None:
             return self.locationCode
         # determine prefix
-        location_prefix = 'R'
-        if data_type == 'variation' or data_type == 'reported':
-            location_prefix = 'R'
-        elif data_type == 'adjusted' or data_type == 'provisional':
-            location_prefix = 'A'
-        elif data_type == 'quasi-definitive':
-            location_prefix = 'Q'
-        elif data_type == 'definitive':
-            location_prefix = 'D'
+        location_prefix = "R"
+        if data_type == "variation" or data_type == "reported":
+            location_prefix = "R"
+        elif data_type == "adjusted" or data_type == "provisional":
+            location_prefix = "A"
+        elif data_type == "quasi-definitive":
+            location_prefix = "Q"
+        elif data_type == "definitive":
+            location_prefix = "D"
         # determine suffix
-        location_suffix = '0'
-        if channel.find('_') >= 0:
-            _, suffix = channel.split('_')
-            if suffix == 'Dist':
-                location_suffix = 'D'
-            elif suffix == 'SQ':
-                location_suffix = 'Q'
-            elif suffix == 'SV':
-                location_suffix = 'V'
-            elif suffix not in ('Bin', 'Volt'):
+        location_suffix = "0"
+        if channel.find("_") >= 0:
+            _, suffix = channel.split("_")
+            if suffix == "Dist":
+                location_suffix = "D"
+            elif suffix == "SQ":
+                location_suffix = "Q"
+            elif suffix == "SV":
+                location_suffix = "V"
+            elif suffix not in ("Bin", "Volt"):
                 raise TimeseriesFactoryException(
-                        'bad channel suffix "%s", wanted "Dist", "SQ", or "SV"'
-                        % suffix)
+                    'bad channel suffix "%s", wanted "Dist", "SQ", or "SV"' % suffix
+                )
         return location_prefix + location_suffix
 
     def _get_edge_network(self, observatory, channel, type, interval):
@@ -407,7 +446,7 @@ class MiniSeedFactory(TimeseriesFactory):
         network
             always NT
         """
-        return 'NT'
+        return "NT"
 
     def _get_edge_station(self, observatory, channel, type, interval):
         """get edge station.
@@ -446,23 +485,21 @@ class MiniSeedFactory(TimeseriesFactory):
         interval type
         """
         interval_code = None
-        if interval == 'day':
-            interval_code = 'P'
-        elif interval == 'hour':
-            interval_code = 'R'
-        elif interval == 'minute':
-            interval_code = 'U'
-        elif interval == 'second':
-            interval_code = 'L'
-        elif interval == 'tenhertz':
-            interval_code = 'B'
+        if interval == "day":
+            interval_code = "P"
+        elif interval == "hour":
+            interval_code = "R"
+        elif interval == "minute":
+            interval_code = "U"
+        elif interval == "second":
+            interval_code = "L"
+        elif interval == "tenhertz":
+            interval_code = "B"
         else:
-            raise TimeseriesFactoryException(
-                    'Unexpected interval "%s"' % interval)
+            raise TimeseriesFactoryException('Unexpected interval "%s"' % interval)
         return interval_code
 
-    def _get_timeseries(self, starttime, endtime, observatory,
-                channel, type, interval):
+    def _get_timeseries(self, starttime, endtime, observatory, channel, type, interval):
         """get timeseries data for a single channel.
 
         Parameters
@@ -485,27 +522,32 @@ class MiniSeedFactory(TimeseriesFactory):
         obspy.core.trace
             timeseries trace of the requested channel data
         """
-        station = self._get_edge_station(observatory, channel,
-                type, interval)
-        location = self._get_edge_location(observatory, channel,
-                type, interval)
-        network = self._get_edge_network(observatory, channel,
-                type, interval)
-        edge_channel = self._get_edge_channel(observatory, channel,
-                type, interval)
-        data = self.client.get_waveforms(network, station, location,
-                edge_channel, starttime, endtime)
+        station = self._get_edge_station(observatory, channel, type, interval)
+        location = self._get_edge_location(observatory, channel, type, interval)
+        network = self._get_edge_network(observatory, channel, type, interval)
+        edge_channel = self._get_edge_channel(observatory, channel, type, interval)
+        data = self.client.get_waveforms(
+            network, station, location, edge_channel, starttime, endtime
+        )
         data.merge()
         if data.count() == 0:
             data += TimeseriesUtility.create_empty_trace(
-                starttime, endtime, observatory, channel, type,
-                interval, network, station, location)
-        self._set_metadata(data,
-                observatory, channel, type, interval)
+                starttime,
+                endtime,
+                observatory,
+                channel,
+                type,
+                interval,
+                network,
+                station,
+                location,
+            )
+        self._set_metadata(data, observatory, channel, type, interval)
         return data
 
-    def _convert_timeseries(self, starttime, endtime, observatory,
-                channel, type, interval):
+    def _convert_timeseries(
+        self, starttime, endtime, observatory, channel, type, interval
+    ):
         """Generate a single channel using multiple components.
 
         Finds metadata, then calls _get_converted_timeseries for actual
@@ -543,18 +585,26 @@ class MiniSeedFactory(TimeseriesFactory):
                 # no idea how to convert
                 continue
             # determine metadata overlap with request
-            start = (starttime
-                    if entry_starttime is None or
-                        entry_starttime < starttime
-                    else entry_starttime)
-            end = (endtime
-                    if entry_endtime is None or
-                        entry_endtime > endtime
-                    else entry_endtime)
+            start = (
+                starttime
+                if entry_starttime is None or entry_starttime < starttime
+                else entry_starttime
+            )
+            end = (
+                endtime
+                if entry_endtime is None or entry_endtime > endtime
+                else entry_endtime
+            )
             # now convert
-            out += self.get_calculated_timeseries(start, end,
-                    observatory, channel, type, interval,
-                    instrument_channels[channel])
+            out += self.get_calculated_timeseries(
+                start,
+                end,
+                observatory,
+                channel,
+                type,
+                interval,
+                instrument_channels[channel],
+            )
         return out
 
     def _post_process(self, timeseries, starttime, endtime, channels):
@@ -582,15 +632,15 @@ class MiniSeedFactory(TimeseriesFactory):
                 trace.data.set_fill_value(numpy.nan)
                 trace.data = trace.data.filled()
 
-        if 'D' in channels:
-            for trace in timeseries.select(channel='D'):
-                trace.data = ChannelConverter.get_radians_from_minutes(
-                    trace.data)
+        if "D" in channels:
+            for trace in timeseries.select(channel="D"):
+                trace.data = ChannelConverter.get_radians_from_minutes(trace.data)
 
         TimeseriesUtility.pad_timeseries(timeseries, starttime, endtime)
 
-    def _put_channel(self, timeseries, observatory, channel, type, interval,
-                starttime, endtime):
+    def _put_channel(
+        self, timeseries, observatory, channel, type, interval, starttime, endtime
+    ):
         """Put a channel worth of data
 
         Parameters
@@ -614,14 +664,10 @@ class MiniSeedFactory(TimeseriesFactory):
         to_write = to_write.split()
         to_write = TimeseriesUtility.unmask_stream(to_write)
         # relabel channels from internal to edge conventions
-        station = self._get_edge_station(observatory, channel,
-                type, interval)
-        location = self._get_edge_location(observatory, channel,
-                type, interval)
-        network = self._get_edge_network(observatory, channel,
-                type, interval)
-        edge_channel = self._get_edge_channel(observatory, channel,
-                type, interval)
+        station = self._get_edge_station(observatory, channel, type, interval)
+        location = self._get_edge_location(observatory, channel, type, interval)
+        network = self._get_edge_network(observatory, channel, type, interval)
+        edge_channel = self._get_edge_channel(observatory, channel, type, interval)
         for trace in to_write:
             trace.stats.station = station
             trace.stats.location = location
@@ -645,5 +691,6 @@ class MiniSeedFactory(TimeseriesFactory):
         """
 
         for trace in stream:
-            self.observatoryMetadata.set_metadata(trace.stats, observatory,
-                    channel, type, interval)
+            self.observatoryMetadata.set_metadata(
+                trace.stats, observatory, channel, type, interval
+            )
diff --git a/geomagio/edge/MiniSeedInputClient.py b/geomagio/edge/MiniSeedInputClient.py
index 382b535a338c630b2a2ef6efe24b2f0e01e97360..dc66c17b0cb288d70cfb0896a2f3b9fdd6284df7 100644
--- a/geomagio/edge/MiniSeedInputClient.py
+++ b/geomagio/edge/MiniSeedInputClient.py
@@ -17,6 +17,7 @@ class MiniSeedInputClient(object):
     port: int
         MiniSeedServer port
     """
+
     def __init__(self, host, port=2061):
         self.host = host
         self.port = port
@@ -53,8 +54,7 @@ class MiniSeedInputClient(object):
             except socket.error as e:
                 if attempts >= max_attempts:
                     raise
-                print('Unable to connect (%s), trying again' % e,
-                        file=sys.stderr)
+                print("Unable to connect (%s), trying again" % e, file=sys.stderr)
         self.socket = s
 
     def send(self, stream):
@@ -72,6 +72,6 @@ class MiniSeedInputClient(object):
             self.connect()
         # convert stream to miniseed
         buf = io.BytesIO()
-        stream.write(buf, format='MSEED')
+        stream.write(buf, format="MSEED")
         # send data
         self.socket.sendall(buf)
diff --git a/geomagio/edge/RawInputClient.py b/geomagio/edge/RawInputClient.py
index 0c79ab28c9f6f4ec0659646cf8934ef9ca885843..695b93688e0b6e1c64e5f3bced2e760f978f907a 100644
--- a/geomagio/edge/RawInputClient.py
+++ b/geomagio/edge/RawInputClient.py
@@ -27,9 +27,9 @@ PACKSTR, TAGSTR: String's used by pack.struct, to indicate the data format
     for that packet.
 PACKEHEAD: The code that leads a packet being sent to Edge.
 """
-PACKSTR = '!1H1h12s4h4B3i'
-TAGSTR = '!1H1h12s6i'
-PACKETHEAD = 0xa1b2
+PACKSTR = "!1H1h12s4h4B3i"
+TAGSTR = "!1H1h12s6i"
+PACKETHEAD = 0xA1B2
 
 """
 TAG, FORCEOUT: Flags that indicate to edge that a "data" packet has a specific
@@ -39,7 +39,7 @@ TAG = -1
 FORCEOUT = -2
 
 
-class RawInputClient():
+class RawInputClient:
 
     """RawInputClient for direct to edge data.
     Parameters
@@ -77,9 +77,20 @@ class RawInputClient():
     Uses sockets to send data to an edge. See send method for packet encoding
     """
 
-    def __init__(self, tag='', host='', port=0, station='', channel='',
-            location='', network='', activity=0, ioclock=0, quality=0,
-            timingquality=0):
+    def __init__(
+        self,
+        tag="",
+        host="",
+        port=0,
+        station="",
+        channel="",
+        location="",
+        network="",
+        activity=0,
+        ioclock=0,
+        quality=0,
+        timingquality=0,
+    ):
         self.tag = tag
         self.host = host
         self.port = port
@@ -92,12 +103,10 @@ class RawInputClient():
         self.buf = None
         self.sequence = 0
 
-        self.seedname = self.create_seedname(station, channel,
-                location, network)
+        self.seedname = self.create_seedname(station, channel, location, network)
 
         if len(self.tag) > 10:
-            raise TimeseriesFactoryException(
-                'Tag limited to 10 characters')
+            raise TimeseriesFactoryException("Tag limited to 10 characters")
 
     def close(self):
         """close the open sockets
@@ -106,8 +115,7 @@ class RawInputClient():
             self.socket.close()
             self.socket = None
 
-    def create_seedname(self, observatory, channel, location='R0',
-                network='NT'):
+    def create_seedname(self, observatory, channel, location="R0", network="NT"):
         """create a seedname for communication with edge.
 
         PARAMETERS
@@ -133,10 +141,7 @@ class RawInputClient():
         the correct length.  We only expect observatory to ever be of an
         incorrect length.
         """
-        return str(network +
-                observatory.ljust(5) +
-                channel +
-                location).encode()
+        return str(network + observatory.ljust(5) + channel + location).encode()
 
     def forceout(self):
         """ force edge to recognize data
@@ -148,7 +153,7 @@ class RawInputClient():
             Fourceout tells edge that we're done sending data for now, and
             to go ahead and make it available
         """
-        buf = self._get_forceout(UTCDateTime(datetime.utcnow()), 0.)
+        buf = self._get_forceout(UTCDateTime(datetime.utcnow()), 0.0)
         self._send(buf)
 
     def send_trace(self, interval, trace):
@@ -169,25 +174,24 @@ class RawInputClient():
         totalsamps = len(trace.data)
         starttime = trace.stats.starttime
 
-        if interval == 'second':
+        if interval == "second":
             nsamp = HOURSECONDS
             timeoffset = 1
-            samplerate = 1.
-        elif interval == 'minute':
+            samplerate = 1.0
+        elif interval == "minute":
             nsamp = DAYMINUTES
             timeoffset = 60
-            samplerate = 1. / 60
-        elif interval == 'hourly':
+            samplerate = 1.0 / 60
+        elif interval == "hourly":
             nsamp = MAXINPUTSIZE
             timeoffset = 3600
-            samplerate = 1. / 3600
-        elif interval == 'daily':
+            samplerate = 1.0 / 3600
+        elif interval == "daily":
             nsamp = MAXINPUTSIZE
             timeoffset = 86400
-            samplerate = 1. / 86400
+            samplerate = 1.0 / 86400
         else:
-            raise TimeseriesFactoryException(
-                    'Unsupported interval for RawInputClient')
+            raise TimeseriesFactoryException("Unsupported interval for RawInputClient")
 
         for i in range(0, totalsamps, nsamp):
             if totalsamps - i < nsamp:
@@ -228,7 +232,7 @@ class RawInputClient():
             self.socket.sendall(buf)
             self.sequence += 1
         except socket.error as v:
-            error = 'Socket error %d' % v[0]
+            error = "Socket error %d" % v[0]
             sys.stderr.write(error)
             raise TimeseriesFactoryException(error)
 
@@ -271,10 +275,23 @@ class RawInputClient():
         yr, doy, secs, usecs = self._get_time_values(time)
         ratemantissa, ratedivisor = self._get_mantissa_divisor(rate)
 
-        buf = struct.pack(PACKSTR, PACKETHEAD, FORCEOUT, self.seedname, yr,
-                doy, ratemantissa, ratedivisor, self.activity, self.ioclock,
-                self.quality, self.timingquality, secs, usecs,
-                self.sequence)
+        buf = struct.pack(
+            PACKSTR,
+            PACKETHEAD,
+            FORCEOUT,
+            self.seedname,
+            yr,
+            doy,
+            ratemantissa,
+            ratedivisor,
+            self.activity,
+            self.ioclock,
+            self.quality,
+            self.timingquality,
+            secs,
+            usecs,
+            self.sequence,
+        )
         return buf
 
     def _get_data(self, samples, time, rate):
@@ -319,17 +336,32 @@ class RawInputClient():
         nsamp = len(samples)
         if nsamp > 32767:
             raise TimeseriesFactoryException(
-                'Edge input limited to 32767 integers per packet.')
+                "Edge input limited to 32767 integers per packet."
+            )
 
         yr, doy, secs, usecs = self._get_time_values(time)
         ratemantissa, ratedivisor = self._get_mantissa_divisor(rate)
 
-        packStr = '%s%d%s' % (PACKSTR, nsamp, 'i')
+        packStr = "%s%d%s" % (PACKSTR, nsamp, "i")
         bpackStr = str(packStr).encode()
-        buf = struct.pack(bpackStr, PACKETHEAD, nsamp, self.seedname, yr, doy,
-                ratemantissa, ratedivisor, self.activity, self.ioclock,
-                self.quality, self.timingquality, secs, usecs, self.sequence,
-                *samples)
+        buf = struct.pack(
+            bpackStr,
+            PACKETHEAD,
+            nsamp,
+            self.seedname,
+            yr,
+            doy,
+            ratemantissa,
+            ratedivisor,
+            self.activity,
+            self.ioclock,
+            self.quality,
+            self.timingquality,
+            secs,
+            usecs,
+            self.sequence,
+            *samples
+        )
 
         return buf
 
@@ -349,11 +381,11 @@ class RawInputClient():
         if rate > 0.9999:
             ratemantissa = int(rate * 100 + 0.001)
             ratedivisor = -100
-        elif rate * 60. - 1.0 < 0.00000001:          # one minute data
+        elif rate * 60.0 - 1.0 < 0.00000001:  # one minute data
             ratemantissa = -60
             ratedivisor = 1
         else:
-            ratemantissa = int(rate * 10000. + 0.001)
+            ratemantissa = int(rate * 10000.0 + 0.001)
             ratedivisor = -10000
 
         return (ratemantissa, ratedivisor)
@@ -375,9 +407,8 @@ class RawInputClient():
         The Packet is right padded with zeros
         The Packet must be 40 Bytes long.
         """
-        tg = str(self.tag + '            ').encode()
-        tb = struct.pack(TAGSTR, PACKETHEAD, TAG, tg[:12],
-                0, 0, 0, 0, 0, 0)
+        tg = str(self.tag + "            ").encode()
+        tb = struct.pack(TAGSTR, PACKETHEAD, TAG, tg[:12], 0, 0, 0, 0, 0, 0)
         return tb
 
     def _get_time_values(self, time):
@@ -419,11 +450,11 @@ class RawInputClient():
                 newsocket.connect((self.host, self.port))
                 done = True
             except socket.error as v:
-                sys.stderr.write('Could not connect to socket, trying again')
-                sys.stderr.write('sockect error %d' % v[0])
+                sys.stderr.write("Could not connect to socket, trying again")
+                sys.stderr.write("sockect error %d" % v[0])
                 sleep(1)
             if trys > 2:
-                raise TimeseriesFactoryException('Could not open socket')
+                raise TimeseriesFactoryException("Could not open socket")
             trys += 1
         self.socket = newsocket
         self.socket.sendall(self._get_tag())
diff --git a/geomagio/edge/__init__.py b/geomagio/edge/__init__.py
index 047575c7d19fef9217a5a2784eec76991c92d8e6..93888daa3356a5d4f8b5d3fed1034772106e6f13 100644
--- a/geomagio/edge/__init__.py
+++ b/geomagio/edge/__init__.py
@@ -7,9 +7,4 @@ from .LocationCode import LocationCode
 from .MiniSeedFactory import MiniSeedFactory
 from .RawInputClient import RawInputClient
 
-__all__ = [
-    'EdgeFactory',
-    'LocationCode',
-    'MiniSeedFactory',
-    'RawInputClient'
-]
+__all__ = ["EdgeFactory", "LocationCode", "MiniSeedFactory", "RawInputClient"]
diff --git a/geomagio/edge/sncl.py b/geomagio/edge/sncl.py
index 35f230964bf40f9d78a8a9660136fa3020b2ac34..5bfa4ff2ee186cd832a70623fd6b8604ceee3aea 100644
--- a/geomagio/edge/sncl.py
+++ b/geomagio/edge/sncl.py
@@ -9,32 +9,33 @@ Location
 # components that map directly to channel suffixes
 CHANNEL_FROM_COMPONENT = {
     # e-field
-    'E-E': 'QY',
-    'E-N': 'QX',
-    'E-U': 'QU',
-    'E-V': 'QV',
+    "E-E": "QY",
+    "E-N": "QX",
+    "E-U": "QU",
+    "E-V": "QV",
     # derived indicies
-    'AE': 'XA',
-    'DST3': 'X3',
-    'DST': 'X4',
-    'K': 'XK'
+    "AE": "XA",
+    "DST3": "X3",
+    "DST": "X4",
+    "K": "XK",
 }
 # reverse lookup of component from channel
-COMPONENT_FROM_CHANNEL = dict(
-        (v, k) for (k, v) in CHANNEL_FROM_COMPONENT.iteritems())
+COMPONENT_FROM_CHANNEL = dict((v, k) for (k, v) in CHANNEL_FROM_COMPONENT.iteritems())
 
 
 class SNCLException(Exception):
     pass
 
 
-def get_scnl(observatory,
-        component=None,
-        channel=None,
-        data_type='variation',
-        interval='second',
-        location=None,
-        network='NT'):
+def get_scnl(
+    observatory,
+    component=None,
+    channel=None,
+    data_type="variation",
+    interval="second",
+    location=None,
+    network="NT",
+):
     """Generate a SNCL code from data attributes.
 
     Parameters
@@ -76,10 +77,10 @@ def get_scnl(observatory,
     channel = channel or __get_channel(component, interval)
     location = location or __get_location(component, data_type)
     return {
-        'station': observatory,
-        'network': network,
-        'channel': channel,
-        'location': location,
+        "station": observatory,
+        "network": network,
+        "channel": channel,
+        "location": location,
     }
 
 
@@ -108,16 +109,16 @@ def parse_sncl(sncl):
         'data_type'   : geomag data type (e.g. 'variation')
         'interval'    : data interval in seconds (e.g. 1)
     """
-    network = sncl['network']
-    station = sncl['station']
-    channel = sncl['channel']
-    location = sncl['location']
+    network = sncl["network"]
+    station = sncl["station"]
+    channel = sncl["channel"]
+    location = sncl["location"]
     return {
-        'observatory': station,
-        'network': network,
-        'component': __parse_component(channel, location),
-        'data_type': __parse_data_type(location),
-        'interval': __parse_interval(channel),
+        "observatory": station,
+        "network": network,
+        "component": __parse_component(channel, location),
+        "data_type": __parse_data_type(location),
+        "interval": __parse_interval(channel),
     }
 
 
@@ -132,35 +133,35 @@ def __get_channel(component, interval):
 
 
 def __get_channel_start(interval):
-    if interval == 'tenhertz' or interval == 0.1:
-        return 'B'
-    if interval == 'second' or interval == 1:
-        return 'L'
-    if interval == 'minute' or interval == 60:
-        return 'U'
-    if interval == 'hour' or interval == 3600:
-        return 'R'
-    if interval == 'day' or interval == 86400:
-        return 'P'
-    raise SNCLException('Unexpected interval {}'.format(interval))
+    if interval == "tenhertz" or interval == 0.1:
+        return "B"
+    if interval == "second" or interval == 1:
+        return "L"
+    if interval == "minute" or interval == 60:
+        return "U"
+    if interval == "hour" or interval == 3600:
+        return "R"
+    if interval == "day" or interval == 86400:
+        return "P"
+    raise SNCLException("Unexpected interval {}".format(interval))
 
 
 def __get_channel_end(component):
     # default to engineering units
-    channel_middle = 'F'
+    channel_middle = "F"
     # check for suffix that may override
-    component_parts = component.split('-')
+    component_parts = component.split("-")
     channel_end = component_parts[0]
     if len(component_parts) > 1:
         component_suffix = component_parts[1]
-        if component_suffix == '-Bin':
-            channel_middle = 'Y'
-        elif component_suffix == '-Temp':
-            channel_middle = 'K'
-        elif component_suffix == '-Volt':
-            channel_middle = 'E'
+        if component_suffix == "-Bin":
+            channel_middle = "Y"
+        elif component_suffix == "-Temp":
+            channel_middle = "K"
+        elif component_suffix == "-Volt":
+            channel_middle = "E"
         else:
-            raise SNCLException('Unexpected component {}'.format(component))
+            raise SNCLException("Unexpected component {}".format(component))
     return channel_middle + channel_end
 
 
@@ -171,27 +172,27 @@ def __get_location(component, data_type):
 
 
 def __get_location_start(data_type):
-    if data_type == 'variation':
-        return 'R'
-    elif data_type == 'adjusted':
-        return 'A'
-    elif data_type == 'quasi-definitive':
-        return 'Q'
-    elif data_type == 'definitive':
-        return 'D'
-    raise SNCLException('Unexpected data type {}'.format(data_type))
+    if data_type == "variation":
+        return "R"
+    elif data_type == "adjusted":
+        return "A"
+    elif data_type == "quasi-definitive":
+        return "Q"
+    elif data_type == "definitive":
+        return "D"
+    raise SNCLException("Unexpected data type {}".format(data_type))
 
 
 def __get_location_end(component):
-    if component.endswith('-Sat'):
-        return '1'
-    if component.endswith('-Dist'):
-        return 'D'
-    if component.endswith('-SQ'):
-        return 'Q'
-    if component.endswith('-SV'):
-        return 'V'
-    return '0'
+    if component.endswith("-Sat"):
+        return "1"
+    if component.endswith("-Dist"):
+        return "D"
+    if component.endswith("-SQ"):
+        return "Q"
+    if component.endswith("-SV"):
+        return "V"
+    return "0"
 
 
 def __parse_component(channel, location):
@@ -200,58 +201,58 @@ def __parse_component(channel, location):
         return COMPONENT_FROM_CHANNEL[channel_end]
     channel_middle = channel[1]
     component = channel[2]
-    component_end = ''
-    if channel_middle == 'E':
-        component_end = '-Volt'
-    elif channel_middle == 'K':
-        component_end = '-Temp'
-    elif channel_middle == 'Y':
-        component_end = '-Bin'
-    elif channel_middle == 'F':
+    component_end = ""
+    if channel_middle == "E":
+        component_end = "-Volt"
+    elif channel_middle == "K":
+        component_end = "-Temp"
+    elif channel_middle == "Y":
+        component_end = "-Bin"
+    elif channel_middle == "F":
         component_end = __parse_component_end(location)
     else:
-        raise SNCLException('Unexpected channel middle {}'.format(channel))
+        raise SNCLException("Unexpected channel middle {}".format(channel))
     return component + component_end
 
 
 def __parse_component_end(location):
     location_end = location[1]
-    if location_end == '0':
-        return ''
-    if location_end == '1':
-        return '-Sat'
-    if location_end == 'D':
-        return '-Dist'
-    if location_end == 'Q':
-        return '-SQ'
-    if location_end == 'V':
-        return '-SV'
-    raise SNCLException('Unexpected location end {}'.format(location_end))
+    if location_end == "0":
+        return ""
+    if location_end == "1":
+        return "-Sat"
+    if location_end == "D":
+        return "-Dist"
+    if location_end == "Q":
+        return "-SQ"
+    if location_end == "V":
+        return "-SV"
+    raise SNCLException("Unexpected location end {}".format(location_end))
 
 
 def __parse_data_type(location):
     location_start = location[0]
-    if location_start == 'R':
-        return 'variation'
-    if location_start == 'A':
-        return 'adjusted'
-    if location_start == 'Q':
-        return 'quasi-definitive'
-    if location_start == 'D':
-        return 'definitive'
-    raise SNCLException('Unexpected location start {}'.format(location_start))
+    if location_start == "R":
+        return "variation"
+    if location_start == "A":
+        return "adjusted"
+    if location_start == "Q":
+        return "quasi-definitive"
+    if location_start == "D":
+        return "definitive"
+    raise SNCLException("Unexpected location start {}".format(location_start))
 
 
 def __parse_interval(channel):
     channel_start = channel[0]
-    if channel_start == 'B':
+    if channel_start == "B":
         return 0.1
-    if channel_start == 'L':
+    if channel_start == "L":
         return 1
-    if channel_start == 'U':
+    if channel_start == "U":
         return 60
-    if channel_start == 'R':
+    if channel_start == "R":
         return 3600
-    if channel_start == 'P':
+    if channel_start == "P":
         return 86400
-    raise SNCLException('Unexpected channel {}'.format(channel))
+    raise SNCLException("Unexpected channel {}".format(channel))
diff --git a/geomagio/iaga2002/IAGA2002Factory.py b/geomagio/iaga2002/IAGA2002Factory.py
index 6867d3a2c5eff8160e640f2fc120490076e805c7..a03b8bcba5a0dc4a79ccf54f4abc34f9cc4ec27d 100644
--- a/geomagio/iaga2002/IAGA2002Factory.py
+++ b/geomagio/iaga2002/IAGA2002Factory.py
@@ -9,7 +9,7 @@ from .IAGA2002Writer import IAGA2002Writer
 
 
 # pattern for iaga 2002 file names
-IAGA_FILE_PATTERN = '%(obs)s%(ymd)s%(t)s%(i)s.%(i)s'
+IAGA_FILE_PATTERN = "%(obs)s%(ymd)s%(t)s%(i)s.%(i)s"
 
 
 class IAGA2002Factory(TimeseriesFactory):
@@ -35,8 +35,7 @@ class IAGA2002Factory(TimeseriesFactory):
     def __init__(self, **kwargs):
         TimeseriesFactory.__init__(self, **kwargs)
 
-    def parse_string(self, data, observatory=None, interval='minute',
-            **kwargs):
+    def parse_string(self, data, observatory=None, interval="minute", **kwargs):
         """Parse the contents of a string in the format of an IAGA2002 file.
 
         Parameters
@@ -66,21 +65,20 @@ class IAGA2002Factory(TimeseriesFactory):
             rate = (length - 1) / (endtime - starttime)
         else:
             # guess based on args
-            if interval == 'minute':
+            if interval == "minute":
                 rate = 1 / 60
-            elif interval == 'second':
+            elif interval == "second":
                 rate = 1
             else:
-                raise Exception('one sample, and unable to guess rate')
+                raise Exception("one sample, and unable to guess rate")
         for channel in list(data.keys()):
             stats = obspy.core.Stats(metadata)
             stats.starttime = starttime
             stats.sampling_rate = rate
             stats.npts = length
             stats.channel = channel
-            if channel == 'D':
-                data[channel] = ChannelConverter.get_radians_from_minutes(
-                    data[channel])
+            if channel == "D":
+                data[channel] = ChannelConverter.get_radians_from_minutes(data[channel])
             stream += obspy.core.Trace(data[channel], stats)
         return stream
 
diff --git a/geomagio/iaga2002/IAGA2002Parser.py b/geomagio/iaga2002/IAGA2002Parser.py
index 7633ba3ece0b1fcd8a02750e03e96fbc5310feed..efbfdc3c8b774b2478edcd673ca623753cbe631e 100644
--- a/geomagio/iaga2002/IAGA2002Parser.py
+++ b/geomagio/iaga2002/IAGA2002Parser.py
@@ -5,11 +5,11 @@ import numpy
 from datetime import datetime
 
 # values that represent missing data points in IAGA2002
-EIGHTS = numpy.float64('88888')
-NINES = numpy.float64('99999')
+EIGHTS = numpy.float64("88888")
+NINES = numpy.float64("99999")
 
 # placeholder channel name used when less than 4 channels are being written.
-EMPTY_CHANNEL = 'NUL'
+EMPTY_CHANNEL = "NUL"
 
 
 class IAGA2002Parser(object):
@@ -38,10 +38,7 @@ class IAGA2002Parser(object):
         """Create a new IAGA2002 parser."""
         # header fields
         self.headers = {}
-        self.metadata = {
-            'network': 'NT',
-            'station': observatory
-        }
+        self.metadata = {"network": "NT", "station": observatory}
         # header comments
         self.comments = []
         # array of channel names
@@ -68,9 +65,9 @@ class IAGA2002Parser(object):
         lines = data.splitlines()
         for line in lines:
             if parsing_headers:
-                if line.startswith(' ') and line.endswith('|'):
+                if line.startswith(" ") and line.endswith("|"):
                     # still in headers
-                    if line.startswith(' #'):
+                    if line.startswith(" #"):
                         self._parse_comment(line)
                     else:
                         self._parse_header(line)
@@ -90,33 +87,33 @@ class IAGA2002Parser(object):
         value = line[24:69].strip()
         self.headers[key] = value
         key_upper = key.upper()
-        if key_upper == 'SOURCE OF DATA':
-            key = 'agency_name'
-        elif key_upper == 'STATION NAME':
-            key = 'station_name'
-        elif key_upper == 'IAGA CODE':
-            key = 'station'
-        elif key_upper == 'GEODETIC LATITUDE':
-            key = 'geodetic_latitude'
-        elif key_upper == 'GEODETIC LONGITUDE':
-            key = 'geodetic_longitude'
-        elif key_upper == 'ELEVATION':
-            key = 'elevation'
-        elif key_upper == 'SENSOR ORIENTATION':
-            key = 'sensor_orientation'
-        elif key_upper == 'DIGITAL SAMPLING':
-            key = 'sensor_sampling_rate'
+        if key_upper == "SOURCE OF DATA":
+            key = "agency_name"
+        elif key_upper == "STATION NAME":
+            key = "station_name"
+        elif key_upper == "IAGA CODE":
+            key = "station"
+        elif key_upper == "GEODETIC LATITUDE":
+            key = "geodetic_latitude"
+        elif key_upper == "GEODETIC LONGITUDE":
+            key = "geodetic_longitude"
+        elif key_upper == "ELEVATION":
+            key = "elevation"
+        elif key_upper == "SENSOR ORIENTATION":
+            key = "sensor_orientation"
+        elif key_upper == "DIGITAL SAMPLING":
+            key = "sensor_sampling_rate"
             try:
-                if value.find('second') != -1:
-                    value = 1 / float(value.replace('second', '').strip())
-                elif value.find('Hz') != -1:
-                    value = float(value.replace('Hz', '').strip())
+                if value.find("second") != -1:
+                    value = 1 / float(value.replace("second", "").strip())
+                elif value.find("Hz") != -1:
+                    value = float(value.replace("Hz", "").strip())
             except ValueError:
                 return
-        elif key_upper == 'DATA INTERVAL TYPE':
-            key = 'data_interval_type'
-        elif key_upper == 'DATA TYPE':
-            key = 'data_type'
+        elif key_upper == "DATA INTERVAL TYPE":
+            key = "data_interval_type"
+        elif key_upper == "DATA TYPE":
+            key = "data_type"
         else:
             # not a required header
             return
@@ -135,11 +132,11 @@ class IAGA2002Parser(object):
         Adds channel names to ``self.channels``.
         Creates empty values arrays in ``self.data``.
         """
-        iaga_code = self.metadata['station']
-        self.channels.append(line[30:40].strip().replace(iaga_code, ''))
-        self.channels.append(line[40:50].strip().replace(iaga_code, ''))
-        self.channels.append(line[50:60].strip().replace(iaga_code, ''))
-        self.channels.append(line[60:69].strip().replace(iaga_code, ''))
+        iaga_code = self.metadata["station"]
+        self.channels.append(line[30:40].strip().replace(iaga_code, ""))
+        self.channels.append(line[40:50].strip().replace(iaga_code, ""))
+        self.channels.append(line[50:60].strip().replace(iaga_code, ""))
+        self.channels.append(line[60:69].strip().replace(iaga_code, ""))
 
     def _parse_data(self, line):
         """Parse one data point in the timeseries.
@@ -149,12 +146,17 @@ class IAGA2002Parser(object):
         """
         # parsing time components is much faster
         time = datetime(
-                # date
-                int(line[0:4]), int(line[5:7]), int(line[8:10]),
-                # time
-                int(line[11:13]), int(line[14:16]), int(line[17:19]),
-                # microseconds
-                int(line[20:23]) * 1000)
+            # date
+            int(line[0:4]),
+            int(line[5:7]),
+            int(line[8:10]),
+            # time
+            int(line[11:13]),
+            int(line[14:16]),
+            int(line[17:19]),
+            # microseconds
+            int(line[20:23]) * 1000,
+        )
         t, d1, d2, d3, d4 = self._parsedata
         t.append(time)
         d1.append(line[31:40])
@@ -192,30 +194,31 @@ class IAGA2002Parser(object):
         is_intermagnet = False
         is_gin = False
         for comment in self.comments:
-            if comment.startswith('DECBAS'):
+            if comment.startswith("DECBAS"):
                 # parse DECBAS
-                decbas = comment.replace('DECBAS', '').strip()
-                declination_base = int(decbas[:decbas.find(' ')])
-            elif comment.startswith('CONDITIONS OF USE:'):
-                conditions_of_use = comment.replace(
-                        'CONDITIONS OF USE:', '').strip()
+                decbas = comment.replace("DECBAS", "").strip()
+                declination_base = int(decbas[: decbas.find(" ")])
+            elif comment.startswith("CONDITIONS OF USE:"):
+                conditions_of_use = comment.replace("CONDITIONS OF USE:", "").strip()
             else:
                 comment_upper = comment.upper()
-                if 'FILTER' in comment_upper:
+                if "FILTER" in comment_upper:
                     filter_comments.append(comment)
-                elif 'GIN' in comment_upper:
+                elif "GIN" in comment_upper:
                     is_gin = True
-                elif 'INTERMAGNET DVD' in comment_upper or \
-                        'WWW.INTERMAGNET.ORG' in comment_upper:
+                elif (
+                    "INTERMAGNET DVD" in comment_upper
+                    or "WWW.INTERMAGNET.ORG" in comment_upper
+                ):
                     is_intermagnet = True
                 else:
                     comments.append(comment)
-        self.metadata['comments'] = tuple(comments)
-        self.metadata['filter_comments'] = tuple(filter_comments)
-        self.metadata['conditions_of_use'] = conditions_of_use
-        self.metadata['declination_base'] = declination_base
-        self.metadata['is_intermagnet'] = is_intermagnet
-        self.metadata['is_gin'] = is_gin
+        self.metadata["comments"] = tuple(comments)
+        self.metadata["filter_comments"] = tuple(filter_comments)
+        self.metadata["conditions_of_use"] = conditions_of_use
+        self.metadata["declination_base"] = declination_base
+        self.metadata["is_intermagnet"] = is_intermagnet
+        self.metadata["is_gin"] = is_gin
 
     def _merge_comments(self, comments):
         """Combine multi-line, period-delimited comments.
@@ -236,9 +239,9 @@ class IAGA2002Parser(object):
             if partial is None:
                 partial = comment
             else:
-                partial = partial + ' ' + comment
+                partial = partial + " " + comment
             # comments end with period
-            if partial.endswith('.'):
+            if partial.endswith("."):
                 merged.append(partial)
                 partial = None
         # comment that doesn't end in a period
diff --git a/geomagio/iaga2002/IAGA2002Writer.py b/geomagio/iaga2002/IAGA2002Writer.py
index c8d815cbde40764308f5454105170255a40e40b0..14363bf8832968e01de7e371b9d30254aaf52698 100644
--- a/geomagio/iaga2002/IAGA2002Writer.py
+++ b/geomagio/iaga2002/IAGA2002Writer.py
@@ -16,8 +16,11 @@ class IAGA2002Writer(object):
     """IAGA2002 writer.
     """
 
-    def __init__(self, empty_value=IAGA2002Parser.NINES,
-            empty_channel=IAGA2002Parser.EMPTY_CHANNEL):
+    def __init__(
+        self,
+        empty_value=IAGA2002Parser.NINES,
+        empty_channel=IAGA2002Parser.EMPTY_CHANNEL,
+    ):
         self.empty_value = empty_value
         self.empty_channel = empty_channel
 
@@ -36,16 +39,16 @@ class IAGA2002Writer(object):
         for channel in channels:
             if timeseries.select(channel=channel).count() == 0:
                 raise TimeseriesFactoryException(
-                    'Missing channel "%s" for output, available channels %s' %
-                    (channel, str(TimeseriesUtility.get_channels(timeseries))))
+                    'Missing channel "%s" for output, available channels %s'
+                    % (channel, str(TimeseriesUtility.get_channels(timeseries)))
+                )
         stats = timeseries[0].stats
         if len(channels) != 4:
             channels = self._pad_to_four_channels(timeseries, channels)
-        out.write(self._format_headers(stats, channels).encode('utf8'))
-        out.write(self._format_comments(stats).encode('utf8'))
-        out.write(self._format_channels(channels, stats.station).encode(
-                'utf8'))
-        out.write(self._format_data(timeseries, channels).encode('utf8'))
+        out.write(self._format_headers(stats, channels).encode("utf8"))
+        out.write(self._format_comments(stats).encode("utf8"))
+        out.write(self._format_channels(channels, stats.station).encode("utf8"))
+        out.write(self._format_data(timeseries, channels).encode("utf8"))
 
     def _format_headers(self, stats, channels):
         """format headers for IAGA2002 file
@@ -63,34 +66,40 @@ class IAGA2002Writer(object):
             an array containing formatted strings of header data.
         """
         buf = []
-        buf.append(self._format_header('Format', 'IAGA-2002'))
-        if 'agency_name' in stats:
-            buf.append(self._format_header('Source of Data',
-                    stats.agency_name))
-        if 'station_name' in stats:
-            buf.append(self._format_header('Station Name', stats.station_name))
-        buf.append(self._format_header('IAGA CODE', stats.station))
-        if 'geodetic_latitude' in stats:
-            buf.append(self._format_header('Geodetic Latitude',
-                    str(stats.geodetic_latitude)))
-        if 'geodetic_longitude' in stats:
-            buf.append(self._format_header('Geodetic Longitude',
-                    str(stats.geodetic_longitude)))
-        if 'elevation' in stats:
-            buf.append(self._format_header('Elevation', stats.elevation))
-        buf.append(self._format_header('Reported', ''.join(channels)))
-        if 'sensor_orientation' in stats:
-            buf.append(self._format_header('Sensor Orientation',
-                    stats.sensor_orientation))
-        if 'sensor_sampling_rate' in stats:
-            buf.append(self._format_header('Digital Sampling',
-                    str(1 / stats.sensor_sampling_rate) + ' second'))
-        if 'data_interval_type' in stats:
-            buf.append(self._format_header('Data Interval Type',
-                    stats.data_interval_type))
-        if 'data_type' in stats:
-            buf.append(self._format_header('Data Type', stats.data_type))
-        return ''.join(buf)
+        buf.append(self._format_header("Format", "IAGA-2002"))
+        if "agency_name" in stats:
+            buf.append(self._format_header("Source of Data", stats.agency_name))
+        if "station_name" in stats:
+            buf.append(self._format_header("Station Name", stats.station_name))
+        buf.append(self._format_header("IAGA CODE", stats.station))
+        if "geodetic_latitude" in stats:
+            buf.append(
+                self._format_header("Geodetic Latitude", str(stats.geodetic_latitude))
+            )
+        if "geodetic_longitude" in stats:
+            buf.append(
+                self._format_header("Geodetic Longitude", str(stats.geodetic_longitude))
+            )
+        if "elevation" in stats:
+            buf.append(self._format_header("Elevation", stats.elevation))
+        buf.append(self._format_header("Reported", "".join(channels)))
+        if "sensor_orientation" in stats:
+            buf.append(
+                self._format_header("Sensor Orientation", stats.sensor_orientation)
+            )
+        if "sensor_sampling_rate" in stats:
+            buf.append(
+                self._format_header(
+                    "Digital Sampling", str(1 / stats.sensor_sampling_rate) + " second"
+                )
+            )
+        if "data_interval_type" in stats:
+            buf.append(
+                self._format_header("Data Interval Type", stats.data_interval_type)
+            )
+        if "data_type" in stats:
+            buf.append(self._format_header("Data Type", stats.data_type))
+        return "".join(buf)
 
     def _format_comments(self, stats):
         """format comments for IAGA2002 file
@@ -106,32 +115,34 @@ class IAGA2002Writer(object):
             an array containing formatted strings of header data.
         """
         comments = []
-        if ('declination_base' in stats and
-          stats.declination_base is not None and
-          (stats.data_type == 'variation' or stats.data_type == 'reported')):
-            comments.append('DECBAS               {:<8d}'
-                    '(Baseline declination value in tenths of minutes East'
-                    ' (0-216,000)).'.format(stats.declination_base))
-        if 'filter_comments' in stats:
+        if (
+            "declination_base" in stats
+            and stats.declination_base is not None
+            and (stats.data_type == "variation" or stats.data_type == "reported")
+        ):
+            comments.append(
+                "DECBAS               {:<8d}"
+                "(Baseline declination value in tenths of minutes East"
+                " (0-216,000)).".format(stats.declination_base)
+            )
+        if "filter_comments" in stats:
             comments.extend(stats.filter_comments)
-        if 'comments' in stats:
+        if "comments" in stats:
             comments.extend(stats.comments)
-        if 'is_gin' in stats and stats.is_gin:
-            comments.append('This data file was constructed by the Golden ' +
-                    'GIN.')
-        if 'is_intermagnet' in stats and stats.is_intermagnet:
-            comments.append('Final data will be available on the' +
-                    ' INTERMAGNET DVD.')
-            comments.append('Go to www.intermagnet.org for details on' +
-                    ' obtaining this product.')
-        if 'conditions_of_use' in stats and \
-                stats.conditions_of_use is not None:
-            comments.append('CONDITIONS OF USE: ' + stats.conditions_of_use)
+        if "is_gin" in stats and stats.is_gin:
+            comments.append("This data file was constructed by the Golden " + "GIN.")
+        if "is_intermagnet" in stats and stats.is_intermagnet:
+            comments.append("Final data will be available on the" + " INTERMAGNET DVD.")
+            comments.append(
+                "Go to www.intermagnet.org for details on" + " obtaining this product."
+            )
+        if "conditions_of_use" in stats and stats.conditions_of_use is not None:
+            comments.append("CONDITIONS OF USE: " + stats.conditions_of_use)
         # generate comment output
         buf = []
         for comment in comments:
             buf.append(self._format_comment(comment))
-        return ''.join(buf)
+        return "".join(buf)
 
     def _format_header(self, name, value):
         """format headers for IAGA2002 file
@@ -148,9 +159,9 @@ class IAGA2002Writer(object):
         str
             a string formatted to be a single header line in an IAGA2002 file
         """
-        prefix = ' '
-        suffix = ' |' + linesep
-        return ''.join((prefix, name.ljust(23), value.ljust(44), suffix))
+        prefix = " "
+        suffix = " |" + linesep
+        return "".join((prefix, name.ljust(23), value.ljust(44), suffix))
 
     def _format_comment(self, comment):
         """format header for IAGA2002 file
@@ -165,12 +176,12 @@ class IAGA2002Writer(object):
             a string formatted to be a single comment in an IAGA2002 file.
         """
         buf = []
-        prefix = ' # '
-        suffix = ' |' + linesep
+        prefix = " # "
+        suffix = " |" + linesep
         lines = textwrap.wrap(comment, 65)
         for line in lines:
             buf.extend((prefix, line.ljust(65), suffix))
-        return ''.join(buf)
+        return "".join(buf)
 
     def _format_channels(self, channels, iaga_code):
         """Format channel header line.
@@ -190,19 +201,20 @@ class IAGA2002Writer(object):
         iaga_code_len = len(iaga_code)
         if iaga_code_len != 3 and iaga_code_len != 4:
             raise TimeseriesFactoryException(
-                    'iaga_code "{}" is not 3 characters'.format(iaga_code))
+                'iaga_code "{}" is not 3 characters'.format(iaga_code)
+            )
         if len(channels) != 4:
-            raise TimeseriesFactoryException(
-                    'more than 4 channels {}'.format(channels))
-        buf = ['DATE       TIME         DOY  ']
+            raise TimeseriesFactoryException("more than 4 channels {}".format(channels))
+        buf = ["DATE       TIME         DOY  "]
         for channel in channels:
             channel_len = len(channel)
             if channel_len < 1 or channel_len > 4:
                 raise TimeseriesFactoryException(
-                        'channel "{}" is not 1 character'.format(channel))
-            buf.append('   {:<7s}'.format(iaga_code + channel))
-        buf.append('|' + linesep)
-        return ''.join(buf)
+                    'channel "{}" is not 1 character'.format(channel)
+                )
+            buf.append("   {:<7s}".format(iaga_code + channel))
+        buf.append("|" + linesep)
+        return "".join(buf)
 
     def _format_data(self, timeseries, channels):
         """Format all data lines.
@@ -215,17 +227,20 @@ class IAGA2002Writer(object):
             list and order of channel values to output.
         """
         buf = []
-        if timeseries.select(channel='D'):
-            d = timeseries.select(channel='D')
+        if timeseries.select(channel="D"):
+            d = timeseries.select(channel="D")
             d[0].data = ChannelConverter.get_minutes_from_radians(d[0].data)
         traces = [timeseries.select(channel=c)[0] for c in channels]
         starttime = float(traces[0].stats.starttime)
         delta = traces[0].stats.delta
         for i in range(len(traces[0].data)):
-            buf.append(self._format_values(
-                datetime.utcfromtimestamp(starttime + i * delta),
-                (t.data[i] for t in traces)))
-        return ''.join(buf)
+            buf.append(
+                self._format_values(
+                    datetime.utcfromtimestamp(starttime + i * delta),
+                    (t.data[i] for t in traces),
+                )
+            )
+        return "".join(buf)
 
     def _format_values(self, time, values):
         """Format one line of data values.
@@ -244,13 +259,17 @@ class IAGA2002Writer(object):
             Formatted line containing values.
         """
         tt = time.timetuple()
-        return '{0.tm_year:0>4d}-{0.tm_mon:0>2d}-{0.tm_mday:0>2d} ' \
-                '{0.tm_hour:0>2d}:{0.tm_min:0>2d}:{0.tm_sec:0>2d}.{1:0>3d} ' \
-                '{0.tm_yday:0>3d}   ' \
-                ' {2:9.2f} {3:9.2f} {4:9.2f} {5:9.2f}'.format(
-                tt, int(time.microsecond / 1000),
-                *[self.empty_value if numpy.isnan(val) else val
-                        for val in values]) + linesep
+        return (
+            "{0.tm_year:0>4d}-{0.tm_mon:0>2d}-{0.tm_mday:0>2d} "
+            "{0.tm_hour:0>2d}:{0.tm_min:0>2d}:{0.tm_sec:0>2d}.{1:0>3d} "
+            "{0.tm_yday:0>3d}   "
+            " {2:9.2f} {3:9.2f} {4:9.2f} {5:9.2f}".format(
+                tt,
+                int(time.microsecond / 1000),
+                *[self.empty_value if numpy.isnan(val) else val for val in values]
+            )
+            + linesep
+        )
 
     def _pad_to_four_channels(self, timeseries, channels):
         padded = channels[:]
diff --git a/geomagio/iaga2002/StreamIAGA2002Factory.py b/geomagio/iaga2002/StreamIAGA2002Factory.py
index 565a18183de9f1ba380f7981cac6343b946ebb30..62f6f871c99b61aad9a4ce903f63a99cdbce0d66 100644
--- a/geomagio/iaga2002/StreamIAGA2002Factory.py
+++ b/geomagio/iaga2002/StreamIAGA2002Factory.py
@@ -18,23 +18,38 @@ class StreamIAGA2002Factory(IAGA2002Factory):
     IAGA2002Factory
     Timeseriesfactory
     """
+
     def __init__(self, stream, **kwargs):
         IAGA2002Factory.__init__(self, **kwargs)
         self._stream = stream
 
-    def get_timeseries(self, starttime, endtime, observatory=None,
-            channels=None, type=None, interval=None):
+    def get_timeseries(
+        self,
+        starttime,
+        endtime,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Implements get_timeseries
 
         Notes: Calls IAGA2002Factory.parse_string in place of
             IAGA2002Factory.get_timeseries.
         """
-        return IAGA2002Factory.parse_string(self,
-                data=self._stream.read(),
-                observatory=observatory)
-
-    def put_timeseries(self, timeseries, starttime=None, endtime=None,
-            channels=None, type=None, interval=None):
+        return IAGA2002Factory.parse_string(
+            self, data=self._stream.read(), observatory=observatory
+        )
+
+    def put_timeseries(
+        self,
+        timeseries,
+        starttime=None,
+        endtime=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Implements put_timeseries
 
         Notes: Calls IAGA2002Factory.write_file in place of
diff --git a/geomagio/iaga2002/__init__.py b/geomagio/iaga2002/__init__.py
index d7344c772bf2886fe00afbe9bd104623a08db966..66644b4512b06eeffabbc49240f00b1f55d98b69 100644
--- a/geomagio/iaga2002/__init__.py
+++ b/geomagio/iaga2002/__init__.py
@@ -12,8 +12,8 @@ from .IAGA2002Writer import IAGA2002Writer
 
 
 __all__ = [
-    'IAGA2002Factory',
-    'StreamIAGA2002Factory',
-    'IAGA2002Parser',
-    'IAGA2002Writer'
+    "IAGA2002Factory",
+    "StreamIAGA2002Factory",
+    "IAGA2002Parser",
+    "IAGA2002Writer",
 ]
diff --git a/geomagio/imfjson/IMFJSONWriter.py b/geomagio/imfjson/IMFJSONWriter.py
index f2988effcdf9d1da7d55a2e2ad68df8dac840de8..5effd5df4a85bab7ba5bf0c765c4f80822d54130 100644
--- a/geomagio/imfjson/IMFJSONWriter.py
+++ b/geomagio/imfjson/IMFJSONWriter.py
@@ -36,16 +36,18 @@ class IMFJSONWriter(object):
         for channel in channels:
             if timeseries.select(channel=channel).count() == 0:
                 raise TimeseriesFactoryException(
-                    'Missing channel "%s" for output, available channels %s' %
-                    (channel, str(TimeseriesUtility.get_channels(timeseries))))
+                    'Missing channel "%s" for output, available channels %s'
+                    % (channel, str(TimeseriesUtility.get_channels(timeseries)))
+                )
         stats = timeseries[0].stats
-        file_dict['type'] = 'Timeseries'
-        file_dict['metadata'] = self._format_metadata(stats, channels)
-        file_dict['metadata']['url'] = url
-        file_dict['times'] = self._format_times(timeseries, channels)
-        file_dict['values'] = self._format_data(timeseries, channels, stats)
-        formatted_timeseries = json.dumps(file_dict,
-                ensure_ascii=True, separators=(',', ':')).encode('utf8')
+        file_dict["type"] = "Timeseries"
+        file_dict["metadata"] = self._format_metadata(stats, channels)
+        file_dict["metadata"]["url"] = url
+        file_dict["times"] = self._format_times(timeseries, channels)
+        file_dict["values"] = self._format_data(timeseries, channels, stats)
+        formatted_timeseries = json.dumps(
+            file_dict, ensure_ascii=True, separators=(",", ":")
+        ).encode("utf8")
         out.write(str(formatted_timeseries))
 
     def _format_data(self, timeseries, channels, stats):
@@ -69,34 +71,32 @@ class IMFJSONWriter(object):
         for c in channels:
             value_dict = OrderedDict()
             trace = timeseries.select(channel=c)[0]
-            value_dict['id'] = c
-            value_dict['metadata'] = OrderedDict()
-            metadata = value_dict['metadata']
-            metadata['element'] = c
-            metadata['network'] = stats.network
-            metadata['station'] = stats.station
+            value_dict["id"] = c
+            value_dict["metadata"] = OrderedDict()
+            metadata = value_dict["metadata"]
+            metadata["element"] = c
+            metadata["network"] = stats.network
+            metadata["station"] = stats.station
             edge_channel = trace.stats.channel
-            metadata['channel'] = edge_channel
+            metadata["channel"] = edge_channel
             if stats.location == "":
-                if (stats.data_type == 'variation' or
-                  stats.data_type == 'reported'):
-                    stats.location = 'R0'
-                elif (stats.data_type == 'adjusted' or
-                  stats.data_type == 'provisional'):
-                    stats.location = 'A0'
-                elif stats.data_type == 'quasi-definitive':
-                    stats.location = 'Q0'
-                elif stats.data_type == 'definitive':
-                    stats.location = 'D0'
-            metadata['location'] = stats.location
+                if stats.data_type == "variation" or stats.data_type == "reported":
+                    stats.location = "R0"
+                elif stats.data_type == "adjusted" or stats.data_type == "provisional":
+                    stats.location = "A0"
+                elif stats.data_type == "quasi-definitive":
+                    stats.location = "Q0"
+                elif stats.data_type == "definitive":
+                    stats.location = "D0"
+            metadata["location"] = stats.location
             values += [value_dict]
             series = np.copy(trace.data)
-            if c == 'D':
+            if c == "D":
                 series = ChannelConverter.get_minutes_from_radians(series)
             # Converting numpy array to list required for JSON serialization
             series = series.tolist()
-            series = [None if str(x) == 'nan' else x for x in series]
-            value_dict['values'] = series
+            series = [None if str(x) == "nan" else x for x in series]
+            value_dict["values"] = series
             # TODO: Add flag metadata
         return values
 
@@ -118,44 +118,44 @@ class IMFJSONWriter(object):
         metadata_dict = OrderedDict()
         intermag = OrderedDict()
         imo = OrderedDict()
-        imo['iaga_code'] = stats.station
-        if 'station_name' in stats:
-            imo['name'] = stats.station_name
+        imo["iaga_code"] = stats.station
+        if "station_name" in stats:
+            imo["name"] = stats.station_name
         coords = [None] * 3
-        if 'geodetic_longitude' in stats:
+        if "geodetic_longitude" in stats:
             coords[0] = float(stats.geodetic_longitude)
-        if 'geodetic_latitude' in stats:
+        if "geodetic_latitude" in stats:
             coords[1] = float(stats.geodetic_latitude)
         try:
-            if 'elevation' in stats:
+            if "elevation" in stats:
                 coords[2] = float(stats.elevation)
         except (KeyError, ValueError, TypeError):
             pass
-        imo['coordinates'] = coords
-        intermag['imo'] = imo
-        intermag['reported_orientation'] = ''.join(channels)
-        if 'sensor_orientation' in stats:
-            intermag['sensor_orientation'] = stats.sensor_orientation
-        if 'data_type' in stats:
-            intermag['data_type'] = stats.data_type
-        if 'sampling_rate' in stats:
-            if stats.sampling_rate == 1. / 60.:
+        imo["coordinates"] = coords
+        intermag["imo"] = imo
+        intermag["reported_orientation"] = "".join(channels)
+        if "sensor_orientation" in stats:
+            intermag["sensor_orientation"] = stats.sensor_orientation
+        if "data_type" in stats:
+            intermag["data_type"] = stats.data_type
+        if "sampling_rate" in stats:
+            if stats.sampling_rate == 1.0 / 60.0:
                 rate = 60
-            elif stats.sampling_rate == 1. / 3600.:
+            elif stats.sampling_rate == 1.0 / 3600.0:
                 rate = 3600
-            elif stats.sampling_rate == 1. / 86400.:
+            elif stats.sampling_rate == 1.0 / 86400.0:
                 rate = 86400
             else:
                 rate = 1
-            intermag['sampling_period'] = rate
+            intermag["sampling_period"] = rate
         # 1/sampling_rate to output in seconds rather than hertz
-        if 'sensor_sampling_rate' in stats:
+        if "sensor_sampling_rate" in stats:
             sampling = 1 / stats.sensor_sampling_rate
-            intermag['digital_sampling_rate'] = sampling
-        metadata_dict['intermagnet'] = intermag
-        metadata_dict['status'] = 200
+            intermag["digital_sampling_rate"] = sampling
+        metadata_dict["intermagnet"] = intermag
+        metadata_dict["status"] = 200
         generated = datetime.utcnow()
-        metadata_dict['generated'] = generated.strftime("%Y-%m-%dT%H:%M:%SZ")
+        metadata_dict["generated"] = generated.strftime("%Y-%m-%dT%H:%M:%SZ")
         return metadata_dict
 
     def _format_times(self, timeseries, channels):
@@ -178,8 +178,11 @@ class IMFJSONWriter(object):
         starttime = float(traces[0].stats.starttime)
         delta = traces[0].stats.delta
         for i in range(len(traces[0].data)):
-            times.append(self._format_time_string(
-                datetime.utcfromtimestamp(starttime + i * delta)))
+            times.append(
+                self._format_time_string(
+                    datetime.utcfromtimestamp(starttime + i * delta)
+                )
+            )
         return times
 
     def _format_time_string(self, time):
@@ -196,9 +199,11 @@ class IMFJSONWriter(object):
             formatted time.
         """
         tt = time.timetuple()
-        return '{0.tm_year:0>4d}-{0.tm_mon:0>2d}-{0.tm_mday:0>2d}T' \
-                '{0.tm_hour:0>2d}:{0.tm_min:0>2d}:{0.tm_sec:0>2d}.{1:0>3d}Z' \
-                ''.format(tt, int(time.microsecond / 1000))
+        return (
+            "{0.tm_year:0>4d}-{0.tm_mon:0>2d}-{0.tm_mday:0>2d}T"
+            "{0.tm_hour:0>2d}:{0.tm_min:0>2d}:{0.tm_sec:0>2d}.{1:0>3d}Z"
+            "".format(tt, int(time.microsecond / 1000))
+        )
 
     @classmethod
     def format(self, timeseries, channels, url=None):
diff --git a/geomagio/imfjson/__init__.py b/geomagio/imfjson/__init__.py
index 2354d7b141d6192b90d43426c22490a69f72dd68..3125b4e8e7ef2c49931f251fcb190c665c448d6d 100644
--- a/geomagio/imfjson/__init__.py
+++ b/geomagio/imfjson/__init__.py
@@ -6,7 +6,4 @@ from .IMFJSONFactory import IMFJSONFactory
 from .IMFJSONWriter import IMFJSONWriter
 
 
-__all__ = [
-    'IMFJSONWriter',
-    'IMFJSONFactory'
-]
+__all__ = ["IMFJSONWriter", "IMFJSONFactory"]
diff --git a/geomagio/imfv122/IMFV122Factory.py b/geomagio/imfv122/IMFV122Factory.py
index 9210287bf042baed1b7b1fa8b9e01aa026da6999..dfdf0c48c086dca36c99633c5b428c52777d5881 100644
--- a/geomagio/imfv122/IMFV122Factory.py
+++ b/geomagio/imfv122/IMFV122Factory.py
@@ -52,8 +52,7 @@ class IMFV122Factory(TimeseriesFactory):
             stats.sampling_rate = rate
             stats.npts = length
             stats.channel = channel
-            if channel == 'D':
-                data[channel] = ChannelConverter.get_radians_from_minutes(
-                    data[channel])
+            if channel == "D":
+                data[channel] = ChannelConverter.get_radians_from_minutes(data[channel])
             stream += obspy.core.Trace(data[channel], stats)
         return stream
diff --git a/geomagio/imfv122/IMFV122Parser.py b/geomagio/imfv122/IMFV122Parser.py
index 38046033d88ab70ac24262405e71033996ed2e8a..af00f92c7125d4e520aaaf6b20e24a14ef4e0914 100644
--- a/geomagio/imfv122/IMFV122Parser.py
+++ b/geomagio/imfv122/IMFV122Parser.py
@@ -5,8 +5,8 @@ import numpy
 from obspy.core import UTCDateTime
 
 # values that represent missing data points in IAGA2002
-EIGHTS = numpy.float64('888888')
-NINES = numpy.float64('999999')
+EIGHTS = numpy.float64("888888")
+NINES = numpy.float64("999999")
 
 
 class IMFV122Parser(object):
@@ -32,10 +32,7 @@ class IMFV122Parser(object):
     def __init__(self, observatory=None):
         """Create a new IAGA2002 parser."""
         # header fields
-        self.metadata = {
-            'network': 'NT',
-            'station': observatory
-        }
+        self.metadata = {"network": "NT", "station": observatory}
         # array of channel names
         self.channels = []
         # timestamps of data (datetime.datetime)
@@ -67,23 +64,25 @@ class IMFV122Parser(object):
 
         Adds value to ``self.headers``.
         """
-        (observatory,
-                date,
-                doy,
-                start,
-                components,
-                type,
-                gin,
-                colalong,
-                decbas,
-                reserved) = line.split()
+        (
+            observatory,
+            date,
+            doy,
+            start,
+            components,
+            type,
+            gin,
+            colalong,
+            decbas,
+            reserved,
+        ) = line.split()
 
         self.channels = list(components)
-        self.metadata['declination_base'] = int(decbas)
-        self.metadata['geodetic_latitude'] = float(colalong[:4]) / 10
-        self.metadata['geodetic_longitude'] = float(colalong[4:]) / 10
-        self.metadata['station'] = observatory
-        self.metadata['gin'] = gin
+        self.metadata["declination_base"] = int(decbas)
+        self.metadata["geodetic_latitude"] = float(colalong[:4]) / 10
+        self.metadata["geodetic_longitude"] = float(colalong[4:]) / 10
+        self.metadata["station"] = observatory
+        self.metadata["gin"] = gin
 
         year = 1900 + int(date[-2:])
         julday = int(doy)
@@ -101,11 +100,7 @@ class IMFV122Parser(object):
             hour = int(dayminute / 60)
             minute = dayminute % 60
             self._delta = 60
-        self._nexttime = UTCDateTime(
-                year=year,
-                julday=julday,
-                hour=hour,
-                minute=minute)
+        self._nexttime = UTCDateTime(year=year, julday=julday, hour=hour, minute=minute)
 
     def _parse_data(self, line):
         """Parse one data point in the timeseries.
@@ -139,7 +134,7 @@ class IMFV122Parser(object):
             data = numpy.array(data, dtype=numpy.float64)
             data[data == EIGHTS] = numpy.nan
             data[data == NINES] = numpy.nan
-            if channel == 'D':
+            if channel == "D":
                 data = data / 100
             else:
                 data = data / 10
diff --git a/geomagio/imfv122/StreamIMFV122Factory.py b/geomagio/imfv122/StreamIMFV122Factory.py
index 837082de3874a005da473cdbda8770b217c1a58d..b868d8dd20318e1587403dda00b6f751876ad1cd 100644
--- a/geomagio/imfv122/StreamIMFV122Factory.py
+++ b/geomagio/imfv122/StreamIMFV122Factory.py
@@ -18,17 +18,25 @@ class StreamIMFV122Factory(IMFV122Factory):
     IMFV122Factory
     Timeseriesfactory
     """
+
     def __init__(self, stream, **kwargs):
         IMFV122Factory.__init__(self, **kwargs)
         self._stream = stream
 
-    def get_timeseries(self, starttime, endtime, observatory=None,
-            channels=None, type=None, interval=None):
+    def get_timeseries(
+        self,
+        starttime,
+        endtime,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Implements get_timeseries
 
         Notes: Calls IMFV122Factory.parse_string in place of
             IMFV122Factory.get_timeseries.
         """
-        return IMFV122Factory.parse_string(self,
-                data=self._stream.read(),
-                observatory=observatory)
+        return IMFV122Factory.parse_string(
+            self, data=self._stream.read(), observatory=observatory
+        )
diff --git a/geomagio/imfv122/__init__.py b/geomagio/imfv122/__init__.py
index 727ac7b80b561ea12022f0c84159d79d2b38b4a9..8044f6a7b15ae3e292ac89fb13e49d2cc2f64214 100644
--- a/geomagio/imfv122/__init__.py
+++ b/geomagio/imfv122/__init__.py
@@ -10,8 +10,4 @@ from .IMFV122Parser import IMFV122Parser
 from .StreamIMFV122Factory import StreamIMFV122Factory
 
 
-__all__ = [
-    'IMFV122Factory',
-    'IMFV122Parser',
-    'StreamIMFV122Factory'
-]
+__all__ = ["IMFV122Factory", "IMFV122Parser", "StreamIMFV122Factory"]
diff --git a/geomagio/imfv283/GOESIMFV283Factory.py b/geomagio/imfv283/GOESIMFV283Factory.py
index 2e87512dcb2b62fd51591da8404422b9eb950963..29bda1d9b9f912b481ba3b0f8ad9cc0bc5569d4f 100644
--- a/geomagio/imfv283/GOESIMFV283Factory.py
+++ b/geomagio/imfv283/GOESIMFV283Factory.py
@@ -41,18 +41,33 @@ class GOESIMFV283Factory(IMFV283Factory):
     IMFV283Factory
     Timeseriesfactory
     """
-    def __init__(self, directory=None, getdcpmessages=None,
-            password=None, server=None, user=None, **kwargs):
+
+    def __init__(
+        self,
+        directory=None,
+        getdcpmessages=None,
+        password=None,
+        server=None,
+        user=None,
+        **kwargs
+    ):
         IMFV283Factory.__init__(self, None, **kwargs)
         self.directory = directory
         self.getdcpmessages = getdcpmessages
         self.server = server
         self.user = user
         self.password = password
-        self.javaerror = b'FATAL'
-
-    def get_timeseries(self, starttime, endtime, observatory=None,
-            channels=None, type=None, interval=None):
+        self.javaerror = b"FATAL"
+
+    def get_timeseries(
+        self,
+        starttime,
+        endtime,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Implements get_timeseries
 
         Notes: Calls IMFV283Factory.parse_string in place of
@@ -60,7 +75,7 @@ class GOESIMFV283Factory(IMFV283Factory):
         """
         observatory = observatory or self.observatory
         channels = channels or self.channels
-        self.criteria_file_name = observatory + '.sc'
+        self.criteria_file_name = observatory + ".sc"
         timeseries = Stream()
         output = self._retrieve_goes_messages(starttime, endtime, observatory)
         timeseries += self.parse_string(output)
@@ -70,8 +85,10 @@ class GOESIMFV283Factory(IMFV283Factory):
         timeseries.trim(starttime, endtime)
         # output the number of points we read for logging
         if len(timeseries):
-            print("Read %s points from %s" % (timeseries[0].stats.npts,
-                observatory), file=sys.stderr)
+            print(
+                "Read %s points from %s" % (timeseries[0].stats.npts, observatory),
+                file=sys.stderr,
+            )
 
         self._post_process(timeseries)
         if observatory is not None:
@@ -89,8 +106,9 @@ class GOESIMFV283Factory(IMFV283Factory):
         """
         for trace in timeseries:
             stats = trace.stats
-            self.observatoryMetadata.set_metadata(stats, stats.station,
-                    stats.channel, 'variation', 'minute')
+            self.observatoryMetadata.set_metadata(
+                stats, stats.station, stats.channel, "variation", "minute"
+            )
 
     def _retrieve_goes_messages(self, starttime, endtime, observatory):
         """Retrieve goes messages, using getdcpmessages commandline tool.
@@ -128,18 +146,27 @@ class GOESIMFV283Factory(IMFV283Factory):
         for server in self.server:
             print(server, file=sys.stderr)
             proc = subprocess.Popen(
-                    [self.getdcpmessages,
-                    '-h', server,
-                    '-u', self.user,
-                    '-P', self.password,
-                    '-f', self.directory + '/' + self.criteria_file_name,
-                    '-t', '60',
-                    '-n'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+                [
+                    self.getdcpmessages,
+                    "-h",
+                    server,
+                    "-u",
+                    self.user,
+                    "-P",
+                    self.password,
+                    "-f",
+                    self.directory + "/" + self.criteria_file_name,
+                    "-t",
+                    "60",
+                    "-n",
+                ],
+                stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE,
+            )
             (output, error) = proc.communicate()
             print(error, file=sys.stderr)
             if error.find(self.javaerror) >= 0:
-                print('Error: could not connect to %s' % server,
-                    file=sys.stderr)
+                print("Error: could not connect to %s" % server, file=sys.stderr)
                 continue
             break
 
@@ -170,21 +197,21 @@ class GOESIMFV283Factory(IMFV283Factory):
         start = starttime - 2200
         end = endtime + 1800
 
-        criteria_file = self.directory + '/' + self.criteria_file_name
+        criteria_file = self.directory + "/" + self.criteria_file_name
         buf = []
-        buf.append('#\n# LRGS Search Criteria\n#\n')
-        buf.append('DAPS_SINCE: ')
-        buf.append(start.datetime.strftime('%y/%j %H:%M:%S\n'))
-        buf.append('DAPS_UNTIL: ')
-        buf.append(end.datetime.strftime('%y/%j %H:%M:%S\n'))
-        buf.append('NETWORK_LIST: ' + observatory.lower() + '.nl\n')
-        buf.append('DAPS_STATUS: N\n')
-        buf.append('RETRANSMITTED: N\n')
-        buf.append('ASCENDING_TIME: false\n')
-        buf.append('RT_SETTLE_DELAY: true\n')
+        buf.append("#\n# LRGS Search Criteria\n#\n")
+        buf.append("DAPS_SINCE: ")
+        buf.append(start.datetime.strftime("%y/%j %H:%M:%S\n"))
+        buf.append("DAPS_UNTIL: ")
+        buf.append(end.datetime.strftime("%y/%j %H:%M:%S\n"))
+        buf.append("NETWORK_LIST: " + observatory.lower() + ".nl\n")
+        buf.append("DAPS_STATUS: N\n")
+        buf.append("RETRANSMITTED: N\n")
+        buf.append("ASCENDING_TIME: false\n")
+        buf.append("RT_SETTLE_DELAY: true\n")
         criteria_dir = os.path.dirname(criteria_file)
         if not os.path.exists(criteria_dir):
             os.makedirs(criteria_dir)
-        with open(criteria_file, 'wb') as fh:
-            fh.write(''.join(buf).encode())
+        with open(criteria_file, "wb") as fh:
+            fh.write("".join(buf).encode())
             fh.close()
diff --git a/geomagio/imfv283/IMFV283Factory.py b/geomagio/imfv283/IMFV283Factory.py
index edd87fb0619815136f8d3bdc3b678f111698e2e6..f2c9d2effb77bfe4b00cc459d55004e43f42364a 100644
--- a/geomagio/imfv283/IMFV283Factory.py
+++ b/geomagio/imfv283/IMFV283Factory.py
@@ -26,8 +26,15 @@ class IMFV283Factory(TimeseriesFactory):
         TimeseriesFactory.__init__(self, **kwargs)
         self.observatoryMetadata = observatoryMetadata or ObservatoryMetadata()
 
-    def get_timeseries(self, starttime, endtime, observatory=None,
-            channels=None, type='variation', interval='minute'):
+    def get_timeseries(
+        self,
+        starttime,
+        endtime,
+        observatory=None,
+        channels=None,
+        type="variation",
+        interval="minute",
+    ):
         """Get timeseries data
 
         Parameters
@@ -55,20 +62,22 @@ class IMFV283Factory(TimeseriesFactory):
             retrieving timeseries.
         """
         timeseries = TimeseriesFactory.get_timeseries(
-                self,
-                starttime=starttime,
-                endtime=endtime,
-                observatory=observatory,
-                channels=channels,
-                type=type,
-                interval=interval)
+            self,
+            starttime=starttime,
+            endtime=endtime,
+            observatory=observatory,
+            channels=channels,
+            type=type,
+            interval=interval,
+        )
         observatory = observatory or self.observatory
         if observatory is not None:
             timeseries = timeseries.select(station=observatory)
         for trace in timeseries:
             stats = trace.stats
-            self.observatoryMetadata.set_metadata(stats, stats.station,
-                    stats.channel, 'variation', 'minute')
+            self.observatoryMetadata.set_metadata(
+                stats, stats.station, stats.channel, "variation", "minute"
+            )
         return timeseries
 
     def parse_string(self, data, **kwargs):
@@ -94,8 +103,7 @@ class IMFV283Factory(TimeseriesFactory):
             if isinstance(trace.data, numpy.ma.MaskedArray):
                 trace.data.set_fill_value(numpy.nan)
                 trace.data = trace.data.filled()
-        if stream.select(channel='D').count() > 0:
-            for trace in stream.select(channel='D'):
-                trace.data = ChannelConverter.get_radians_from_minutes(
-                    trace.data)
+        if stream.select(channel="D").count() > 0:
+            for trace in stream.select(channel="D"):
+                trace.data = ChannelConverter.get_radians_from_minutes(trace.data)
         return stream
diff --git a/geomagio/imfv283/IMFV283Parser.py b/geomagio/imfv283/IMFV283Parser.py
index 5c45a94cfa931996ece87a1d104f2be384a6d2a5..e9bcbfe564cc572f0b4dfa2ffa054eeb0de3fbcc 100644
--- a/geomagio/imfv283/IMFV283Parser.py
+++ b/geomagio/imfv283/IMFV283Parser.py
@@ -23,10 +23,10 @@ SHIFT = 1048576
 # USGS, it's actually E. Since only USGS and Canada (YXZF) use GOES
 # we are specfying it as E.
 CHANNELS = {
-    0: ['X', 'Y', 'Z', 'F'],
-    1: ['H', 'E', 'Z', 'F'],
-    2: ['1', 'D', 'I', 'F'],
-    3: ['1', '2', '3', '4']
+    0: ["X", "Y", "Z", "F"],
+    1: ["H", "E", "Z", "F"],
+    2: ["1", "D", "I", "F"],
+    3: ["1", "2", "3", "4"],
 }
 
 
@@ -75,22 +75,21 @@ class IMFV283Parser(object):
         for line in lines:
             # if line isn't at least 37 characters, there's no need to proceed.
             if len(line) <= HEADER_SIZE:
-                sys.stderr.write('Bad Header length\n')
+                sys.stderr.write("Bad Header length\n")
                 continue
 
             try:
                 msg_header = self._parse_msg_header(line)
 
-                data_len = msg_header['data_len']
+                data_len = msg_header["data_len"]
                 # check message size indicates data exists
                 if data_len < MSG_SIZE_100B or data_len > MSG_SIZE_300B:
-                    sys.stderr.write('Incorrect data Length \n')
+                    sys.stderr.write("Incorrect data Length \n")
                     continue
 
                 goes_data = self._process_ness_block(
-                        line,
-                        imfv283_codes.OBSERVATORIES[msg_header['obs']],
-                        data_len)
+                    line, imfv283_codes.OBSERVATORIES[msg_header["obs"]], data_len
+                )
 
                 goes_header = self._parse_goes_header(goes_data)
                 data = self._get_data(goes_header, goes_data)
@@ -99,8 +98,7 @@ class IMFV283Parser(object):
                 sys.stderr.write("Incorrect data line ")
                 sys.stderr.write(str(line))
 
-    def _estimate_data_time(self, transmission, doy, minute,
-            max_transmit_delay=1800):
+    def _estimate_data_time(self, transmission, doy, minute, max_transmit_delay=1800):
         """Get data start time for a GOES data packet.
 
         Parameters
@@ -131,17 +129,14 @@ class IMFV283Parser(object):
             whether a gps timing error was corrected
         """
         # convert to datetime
-        transmit_time = UTCDateTime(
-                b'20' + transmission[0:5] + b'T' + transmission[5:])
+        transmit_time = UTCDateTime(b"20" + transmission[0:5] + b"T" + transmission[5:])
         transmit_year = transmit_time.year
         # delta should not include first day of year
         data_time_delta = timedelta(days=doy - 1, minutes=minute)
-        data_time = UTCDateTime(
-                datetime(transmit_year, 1, 1) + data_time_delta)
+        data_time = UTCDateTime(datetime(transmit_year, 1, 1) + data_time_delta)
         if data_time > transmit_time:
             # data cannot be measured after it is transmitted
-            data_time = UTCDateTime(datetime(transmit_year - 1, 1, 1)) + \
-                    data_time_delta
+            data_time = UTCDateTime(datetime(transmit_year - 1, 1, 1)) + data_time_delta
         # check transmission delay, to detect gps clock errors
         transmit_delay = transmit_time - data_time
         if transmit_delay < max_transmit_delay:
@@ -181,7 +176,7 @@ class IMFV283Parser(object):
             dictionary of channel data arrays.
         """
         parse_data = {}
-        channels = CHANNELS[header['orient']]
+        channels = CHANNELS[header["orient"]]
         for channel in channels:
             parse_data[channel] = []
 
@@ -238,15 +233,15 @@ class IMFV283Parser(object):
         header = {}
 
         # day of year and minute of day are combined into 3 bytes
-        header['day'] = data[0] + 0x100 * (data[1] & 0xF)
-        header['minute'] = (data[2] & 0xFF) * 0x10 + data[1] / 0x10
+        header["day"] = data[0] + 0x100 * (data[1] & 0xF)
+        header["minute"] = (data[2] & 0xFF) * 0x10 + data[1] / 0x10
 
         # offset values for each channel are in bytes 3,4,5,6 respectively.
-        header['offset'] = data[3:7]
+        header["offset"] = data[3:7]
 
         # Not used.  alert_capable = (goes_block[7] & 0x01)
         # orient code. The orientation of the instrument (HEZF, etc.)
-        header['orient'] = data[7] / 0x40
+        header["orient"] = data[7] / 0x40
 
         # scale values bits 0,1,2,3 of byte 7.
         # Either 1 if bit not set, 2 if bit is set.
@@ -262,7 +257,7 @@ class IMFV283Parser(object):
             scale3 = 2
         if (data[7] & 0x4) > 0:
             scale4 = 2
-        header['scale'] = [scale1, scale2, scale3, scale4]
+        header["scale"] = [scale1, scale2, scale3, scale4]
 
         return header
 
@@ -280,16 +275,16 @@ class IMFV283Parser(object):
         """
         header = {}
 
-        header['daps_platform'] = msg[0:8]
-        platform = header['daps_platform'].decode()
-        header['obs'] = imfv283_codes.PLATFORMS[platform]
+        header["daps_platform"] = msg[0:8]
+        platform = header["daps_platform"].decode()
+        header["obs"] = imfv283_codes.PLATFORMS[platform]
         # if it's not in the observatory dictionary, we ignore it.
-        if header['obs'] is None:
+        if header["obs"] is None:
             return header
 
         # get the time of the transmission
-        header['transmission_time'] = msg[8:19]
-        header['data_len'] = int(msg[32:37])
+        header["transmission_time"] = msg[8:19]
+        header["data_len"] = int(msg[32:37])
         return header
 
     def _post_process(self, data, msg_header, goes_header):
@@ -306,28 +301,29 @@ class IMFV283Parser(object):
 
         """
         (goes_time, msg_time, corrected) = self._estimate_data_time(
-                msg_header['transmission_time'],
-                goes_header['day'],
-                goes_header['minute'])
+            msg_header["transmission_time"], goes_header["day"], goes_header["minute"]
+        )
         if corrected:
             sys.stderr.write(
-                'corrected gps week number error\n' +
-                '\ttransmit day={}, reported day={}, corrected day={}\n'
-                .format(msg_time.julday, goes_header['day'], goes_time.julday))
+                "corrected gps week number error\n"
+                + "\ttransmit day={}, reported day={}, corrected day={}\n".format(
+                    msg_time.julday, goes_header["day"], goes_time.julday
+                )
+            )
         if (msg_time - goes_time) > (24 * 60):
-            sys.stderr.write('data over twice as old as the message\n')
+            sys.stderr.write("data over twice as old as the message\n")
             return
 
-        scale = goes_header['scale']
-        offset = goes_header['offset']
-        orientation = goes_header['orient']
+        scale = goes_header["scale"]
+        offset = goes_header["offset"]
+        orientation = goes_header["orient"]
         for channel, loc in zip(CHANNELS[orientation], range(0, 4)):
             stats = obspy.core.Stats()
             stats.channel = channel
             stats.sampling_rate = 0.0166666666667
             stats.starttime = goes_time
             stats.npts = 12
-            stats.station = msg_header['obs']
+            stats.station = msg_header["obs"]
 
             numpy_data = numpy.array(data[channel], dtype=numpy.float64)
             numpy_data[numpy_data == DEAD_VALUE] = numpy.nan
@@ -379,8 +375,7 @@ class IMFV283Parser(object):
             goes_value2 = ((byte2 // 0x4) & 0xF) + ((byte1 & 0xF) * 0x10)
 
             # swap the bytes depending on domsat information.
-            if domsat['swap_hdr'] and cnt <= 11 or \
-                    domsat['swap_data'] and cnt > 11:
+            if domsat["swap_hdr"] and cnt <= 11 or domsat["swap_data"] and cnt > 11:
                 goes_block[goes_byte] = goes_value2
                 goes_block[goes_byte + 1] = goes_value1
             else:
diff --git a/geomagio/imfv283/StreamIMFV283Factory.py b/geomagio/imfv283/StreamIMFV283Factory.py
index 21f46eea951afe5624bf4779cd41c49d747f32a5..aff52877f6bbe3948acb82fddd31326148fb6854 100644
--- a/geomagio/imfv283/StreamIMFV283Factory.py
+++ b/geomagio/imfv283/StreamIMFV283Factory.py
@@ -18,12 +18,20 @@ class StreamIMFV283Factory(IMFV283Factory):
     IMFV283Factory
     Timeseriesfactory
     """
+
     def __init__(self, stream, **kwargs):
         IMFV283Factory.__init__(self, **kwargs)
         self._stream = stream
 
-    def get_timeseries(self, starttime, endtime, observatory=None,
-            channels=None, type=None, interval=None):
+    def get_timeseries(
+        self,
+        starttime,
+        endtime,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Implements get_timeseries
 
         Notes: Calls IMFV283Factory.parse_string in place of
diff --git a/geomagio/imfv283/__init__.py b/geomagio/imfv283/__init__.py
index 8051ec49c633eef41dea0a052e88ece14a0f26b7..de3a6aeb8984c9e87e3ce984e0e536a1e80d71cc 100644
--- a/geomagio/imfv283/__init__.py
+++ b/geomagio/imfv283/__init__.py
@@ -12,8 +12,8 @@ from .IMFV283Parser import IMFV283Parser
 
 
 __all__ = [
-    'GOESIMFV283Factory',
-    'IMFV283Factory',
-    'StreamIMFV283Factory',
-    'IMFV283Parser'
+    "GOESIMFV283Factory",
+    "IMFV283Factory",
+    "StreamIMFV283Factory",
+    "IMFV283Parser",
 ]
diff --git a/geomagio/imfv283/imfv283_codes.py b/geomagio/imfv283/imfv283_codes.py
index d566ce1e917d40f6a7857d3a429fe0acef17b12b..da4a96b53d18e761e1b5fa3db1e50d578ee14f00 100644
--- a/geomagio/imfv283/imfv283_codes.py
+++ b/geomagio/imfv283/imfv283_codes.py
@@ -3,178 +3,178 @@ from __future__ import unicode_literals
 """Dictionary of observatory codes and ness block byte orders"""
 OBSERVATORIES = {
     # USGS
-    'BOU': {
-        'orient': 'HEZF',
-        'platform': '75C2D538',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'BRW': {
-        'orient': 'HEZF',
-        'platform': '75C172CE',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'BSL': {
-        'orient': 'HEZF',
-        'platform': '75C236CA',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'CMO': {
-        'orient': 'HEZF',
-        'platform': '75C06342',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'DED': {
-        'orient': 'HEZF',
-        'platform': '75C301AA',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'FRD': {
-        'orient': 'HEZF',
-        'platform': '75C21026',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'FRN': {
-        'orient': 'HEZF',
-        'platform': '75C2F3D4',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'GUA': {
-        'orient': 'HEZF',
-        'platform': '75C33430',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'HON': {
-        'orient': 'HEZF',
-        'platform': '75C161B8',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'NEW': {
-        'orient': 'HEZF',
-        'platform': '75C2E0A2',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'SHU': {
-        'orient': 'HEZF',
-        'platform': '75C266B6',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'SIT': {
-        'orient': 'HEZF',
-        'platform': '75C28544',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'SJG': {
-        'orient': 'HEZF',
-        'platform': '75C0B52A',
-        'swap_hdr': False,
-        'swap_data': True
-    },
-    'TUC': {
-        'orient': 'HEZF',
-        'platform': '75C14754',
-        'swap_hdr': False,
-        'swap_data': True
+    "BOU": {
+        "orient": "HEZF",
+        "platform": "75C2D538",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "BRW": {
+        "orient": "HEZF",
+        "platform": "75C172CE",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "BSL": {
+        "orient": "HEZF",
+        "platform": "75C236CA",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "CMO": {
+        "orient": "HEZF",
+        "platform": "75C06342",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "DED": {
+        "orient": "HEZF",
+        "platform": "75C301AA",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "FRD": {
+        "orient": "HEZF",
+        "platform": "75C21026",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "FRN": {
+        "orient": "HEZF",
+        "platform": "75C2F3D4",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "GUA": {
+        "orient": "HEZF",
+        "platform": "75C33430",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "HON": {
+        "orient": "HEZF",
+        "platform": "75C161B8",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "NEW": {
+        "orient": "HEZF",
+        "platform": "75C2E0A2",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "SHU": {
+        "orient": "HEZF",
+        "platform": "75C266B6",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "SIT": {
+        "orient": "HEZF",
+        "platform": "75C28544",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "SJG": {
+        "orient": "HEZF",
+        "platform": "75C0B52A",
+        "swap_hdr": False,
+        "swap_data": True,
+    },
+    "TUC": {
+        "orient": "HEZF",
+        "platform": "75C14754",
+        "swap_hdr": False,
+        "swap_data": True,
     },
     # NRCAN
-    'BLC': {
-        'orient': 'XYZF',
-        'platform': '75C3644C',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'BRD': {
-        'orient': 'XYZF',
-        'platform': '75C387BE',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'CBB': {
-        'orient': 'XYZF',
-        'platform': '75C351D6',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'EUA': {
-        'orient': 'XYZF',
-        'platform': '75C2405A',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'FCC': {
-        'orient': 'XYZF',
-        'platform': '75C3773A',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'IQA': {
-        'orient': 'XYZF',
-        'platform': '75C0F620',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'MEA': {
-        'orient': 'XYZF',
-        'platform': '75C32746',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'OTT': {
-        'orient': 'XYZF',
-        'platform': '75C20350',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'RES': {
-        'orient': 'XYZF',
-        'platform': '75C1D236',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'SNK': {
-        'orient': 'XYZF',
-        'platform': '75C15422',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'STJ': {
-        'orient': 'XYZF',
-        'platform': '75C1E7AC',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'VIC': {
-        'orient': 'XYZF',
-        'platform': '75C2A3A8',
-        'swap_hdr': True,
-        'swap_data': False
-    },
-    'YKC': {
-        'orient': 'XYZF',
-        'platform': '75C312DC',
-        'swap_hdr': True,
-        'swap_data': False
+    "BLC": {
+        "orient": "XYZF",
+        "platform": "75C3644C",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "BRD": {
+        "orient": "XYZF",
+        "platform": "75C387BE",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "CBB": {
+        "orient": "XYZF",
+        "platform": "75C351D6",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "EUA": {
+        "orient": "XYZF",
+        "platform": "75C2405A",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "FCC": {
+        "orient": "XYZF",
+        "platform": "75C3773A",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "IQA": {
+        "orient": "XYZF",
+        "platform": "75C0F620",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "MEA": {
+        "orient": "XYZF",
+        "platform": "75C32746",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "OTT": {
+        "orient": "XYZF",
+        "platform": "75C20350",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "RES": {
+        "orient": "XYZF",
+        "platform": "75C1D236",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "SNK": {
+        "orient": "XYZF",
+        "platform": "75C15422",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "STJ": {
+        "orient": "XYZF",
+        "platform": "75C1E7AC",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "VIC": {
+        "orient": "XYZF",
+        "platform": "75C2A3A8",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
+    "YKC": {
+        "orient": "XYZF",
+        "platform": "75C312DC",
+        "swap_hdr": True,
+        "swap_data": False,
     },
     # OTHER
-    'KGI': {
-        'orient': 'HEZF',
-        'platform': '75C394C8',
-        'swap_hdr': True,
-        'swap_data': False
-    }
+    "KGI": {
+        "orient": "HEZF",
+        "platform": "75C394C8",
+        "swap_hdr": True,
+        "swap_data": False,
+    },
 }
 
 PLATFORMS = {}
 for obs in OBSERVATORIES:
-    PLATFORMS[OBSERVATORIES[obs]['platform']] = obs
+    PLATFORMS[OBSERVATORIES[obs]["platform"]] = obs
diff --git a/geomagio/pcdcp/PCDCPFactory.py b/geomagio/pcdcp/PCDCPFactory.py
index c6d783dac98f2da442a1746be5c7cd271bdc69e7..ab8b410d352bda93d3108dc11423cfd6482ee4ab 100644
--- a/geomagio/pcdcp/PCDCPFactory.py
+++ b/geomagio/pcdcp/PCDCPFactory.py
@@ -9,7 +9,7 @@ from .PCDCPWriter import PCDCPWriter
 
 
 # pattern for pcdcp file names
-PCDCP_FILE_PATTERN = '%(obs)s%(y)s%(j)s.%(i)s'
+PCDCP_FILE_PATTERN = "%(obs)s%(y)s%(j)s.%(i)s"
 # note: seconds files end in .raw after 2008, .sec or .Sec on or before
 
 
@@ -59,8 +59,8 @@ class PCDCPFactory(TimeseriesFactory):
         sample_periods = {4: 60.0, 5: 1.0}
         sample_period = sample_periods[len(parser.times[0])]
 
-        yr = parser.header['year']
-        yrday = parser.header['yearday']
+        yr = parser.header["year"]
+        yrday = parser.header["yearday"]
 
         startday = obspy.core.UTCDateTime(yr + yrday)
         starttime = startday + int(parser.times[0]) * sample_period
@@ -73,16 +73,15 @@ class PCDCPFactory(TimeseriesFactory):
 
         for channel in list(data.keys()):
             stats = obspy.core.Stats()
-            stats.network = 'NT'
-            stats.station = parser.header['station']
+            stats.network = "NT"
+            stats.station = parser.header["station"]
             stats.starttime = starttime
             stats.sampling_rate = rate
             stats.npts = length
             stats.channel = channel
 
-            if channel == 'D':
-                data[channel] = ChannelConverter.get_radians_from_minutes(
-                    data[channel])
+            if channel == "D":
+                data[channel] = ChannelConverter.get_radians_from_minutes(data[channel])
 
             stream += obspy.core.Trace(data[channel], stats)
 
diff --git a/geomagio/pcdcp/PCDCPParser.py b/geomagio/pcdcp/PCDCPParser.py
index 1c6c87d92943923b8e7a36365a859ee1493e4ce8..676b31d93f1a4a7309a4aa10216e27bad08f14fa 100644
--- a/geomagio/pcdcp/PCDCPParser.py
+++ b/geomagio/pcdcp/PCDCPParser.py
@@ -4,9 +4,9 @@
 import numpy
 
 # values that represent missing data points in PCDCP
-NINES = numpy.int('9999999')
-NINES_RAW = numpy.int('99999990')
-NINES_DEG = numpy.int('9999')
+NINES = numpy.int("9999999")
+NINES_RAW = numpy.int("99999990")
+NINES_DEG = numpy.int("9999")
 
 
 class PCDCPParser(object):
@@ -29,8 +29,15 @@ class PCDCPParser(object):
     def __init__(self):
         """Create a new PCDCP parser."""
         # header fields
-        self.header_fields = ['station', 'year', 'yearday', 'date',
-                             'orientation', 'resolution', 'Version']
+        self.header_fields = [
+            "station",
+            "year",
+            "yearday",
+            "date",
+            "orientation",
+            "resolution",
+            "Version",
+        ]
         self.header = {}
         # resolution (float)
         self.resolution = 0.0
@@ -68,10 +75,11 @@ class PCDCPParser(object):
 
         Adds value to ``self.header``.
         """
-        self.header = dict(zip(self.header_fields,
-                               line.split(None, len(self.header_fields))))
+        self.header = dict(
+            zip(self.header_fields, line.split(None, len(self.header_fields)))
+        )
 
-        self.resolution = float(self.header['resolution'].split('nT')[0])
+        self.resolution = float(self.header["resolution"].split("nT")[0])
 
         return
 
@@ -107,9 +115,9 @@ class PCDCPParser(object):
         """Adds channel names to ``self.channels``.
         Creates empty values arrays in ``self.data``.
         """
-        self.channels.append('H')
-        self.channels.append('E')
-        self.channels.append('Z')
-        self.channels.append('F')
+        self.channels.append("H")
+        self.channels.append("E")
+        self.channels.append("Z")
+        self.channels.append("F")
 
         self._parsedata = ([], [], [], [], [])
diff --git a/geomagio/pcdcp/PCDCPWriter.py b/geomagio/pcdcp/PCDCPWriter.py
index d53781700ef8ab5856a3ec8a5620051157410d71..5d11ab1c54d2d8e6287a5e98e507489fabf665cb 100644
--- a/geomagio/pcdcp/PCDCPWriter.py
+++ b/geomagio/pcdcp/PCDCPWriter.py
@@ -32,8 +32,9 @@ class PCDCPWriter(object):
         for channel in channels:
             if timeseries.select(channel=channel).count() == 0:
                 raise TimeseriesFactoryException(
-                    'Missing channel "%s" for output, available channels %s' %
-                    (channel, str(TimeseriesUtility.get_channels(timeseries))))
+                    'Missing channel "%s" for output, available channels %s'
+                    % (channel, str(TimeseriesUtility.get_channels(timeseries)))
+                )
         stats = timeseries[0].stats
 
         # Set dead val for 1-sec data.
@@ -69,10 +70,20 @@ class PCDCPWriter(object):
         if stats.delta == 1:
             resolution = "0.001nT"
 
-        buf.append(observatory + '  ' + year + '  ' + yearday + '  ' +
-                    date + '  HEZF  ' + resolution + '  File Version 2.00\n')
-
-        return ''.join(buf)
+        buf.append(
+            observatory
+            + "  "
+            + year
+            + "  "
+            + yearday
+            + "  "
+            + date
+            + "  HEZF  "
+            + resolution
+            + "  File Version 2.00\n"
+        )
+
+        return "".join(buf)
 
     def _format_data(self, timeseries, channels, stats):
         """Format all data lines.
@@ -96,9 +107,10 @@ class PCDCPWriter(object):
         # Use a copy of the trace so that we don't modify the original.
         for trace in timeseries:
             traceLocal = trace.copy()
-            if traceLocal.stats.channel == 'D':
-                traceLocal.data = \
-                    ChannelConverter.get_minutes_from_radians(traceLocal.data)
+            if traceLocal.stats.channel == "D":
+                traceLocal.data = ChannelConverter.get_minutes_from_radians(
+                    traceLocal.data
+                )
 
             # TODO - we should look into multiplying the trace all at once
             # like this, but this gives an error on Windows at the moment.
@@ -112,11 +124,15 @@ class PCDCPWriter(object):
         delta = traces[0].stats.delta
 
         for i in range(len(traces[0].data)):
-            buf.append(self._format_values(
-                datetime.utcfromtimestamp(starttime + i * delta),
-                (t.data[i] for t in traces), stats))
+            buf.append(
+                self._format_values(
+                    datetime.utcfromtimestamp(starttime + i * delta),
+                    (t.data[i] for t in traces),
+                    stats,
+                )
+            )
 
-        return ''.join(buf)
+        return "".join(buf)
 
     def _format_values(self, time, values, stats):
         """Format one line of data values.
@@ -152,14 +168,26 @@ class PCDCPWriter(object):
 
         tt = time.timetuple()
 
-        totalMinutes = int(tt.tm_hour * hr_multiplier +
-                        tt.tm_min * mn_multiplier + tt.tm_sec * sc_multiplier)
-
-        return '{0:0>{tw}d} {1: >{dw}d} {2: >{dw}d} {3: >{dw}d}' \
-                ' {4: >{dw}d}\n'.format(totalMinutes, tw=time_width,
-                *[self.empty_value if numpy.isnan(val) else int(round(
-                    val * data_multiplier))
-                        for val in values], dw=data_width)
+        totalMinutes = int(
+            tt.tm_hour * hr_multiplier
+            + tt.tm_min * mn_multiplier
+            + tt.tm_sec * sc_multiplier
+        )
+
+        return (
+            "{0:0>{tw}d} {1: >{dw}d} {2: >{dw}d} {3: >{dw}d}"
+            " {4: >{dw}d}\n".format(
+                totalMinutes,
+                tw=time_width,
+                *[
+                    self.empty_value
+                    if numpy.isnan(val)
+                    else int(round(val * data_multiplier))
+                    for val in values
+                ],
+                dw=data_width
+            )
+        )
 
     @classmethod
     def format(self, timeseries, channels):
diff --git a/geomagio/pcdcp/StreamPCDCPFactory.py b/geomagio/pcdcp/StreamPCDCPFactory.py
index c3561ef4a66a63418806389a70b9dc01561e75ce..ba52bb6be05162f6e1e0fc7be146bf31b7eabbc8 100644
--- a/geomagio/pcdcp/StreamPCDCPFactory.py
+++ b/geomagio/pcdcp/StreamPCDCPFactory.py
@@ -18,23 +18,38 @@ class StreamPCDCPFactory(PCDCPFactory):
     PCDCPFactory
     Timeseriesfactory
     """
+
     def __init__(self, stream, **kwargs):
         PCDCPFactory.__init__(self, **kwargs)
         self._stream = stream
 
-    def get_timeseries(self, starttime, endtime, observatory=None,
-            channels=None, type=None, interval=None):
+    def get_timeseries(
+        self,
+        starttime,
+        endtime,
+        observatory=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Implements get_timeseries
 
         Notes: Calls PCDCPFactory.parse_string in place of
             PCDCPFactory.get_timeseries.
         """
-        return PCDCPFactory.parse_string(self,
-                data=self._stream.read(),
-                observatory=observatory)
-
-    def put_timeseries(self, timeseries, starttime=None, endtime=None,
-            channels=None, type=None, interval=None):
+        return PCDCPFactory.parse_string(
+            self, data=self._stream.read(), observatory=observatory
+        )
+
+    def put_timeseries(
+        self,
+        timeseries,
+        starttime=None,
+        endtime=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Implements put_timeseries
 
         Notes: Calls PCDCPFactory.write_file in place of
diff --git a/geomagio/pcdcp/__init__.py b/geomagio/pcdcp/__init__.py
index ef16c3ab6487bf9c41d3b9be4613e89f74def868..a391ac9c6f19e68c517f4fee2897a92d2a3bbcf8 100644
--- a/geomagio/pcdcp/__init__.py
+++ b/geomagio/pcdcp/__init__.py
@@ -8,9 +8,4 @@ from .PCDCPParser import PCDCPParser
 from .PCDCPWriter import PCDCPWriter
 
 
-__all__ = [
-    'PCDCPFactory',
-    'StreamPCDCPFactory',
-    'PCDCPParser',
-    'PCDCPWriter'
-]
+__all__ = ["PCDCPFactory", "StreamPCDCPFactory", "PCDCPParser", "PCDCPWriter"]
diff --git a/geomagio/temperature/StreamTEMPFactory.py b/geomagio/temperature/StreamTEMPFactory.py
index 4607d8c416d7110e11b484e86e6a7824167772ab..356f6a598ff8224c582a08a005a2a5a971c92abb 100644
--- a/geomagio/temperature/StreamTEMPFactory.py
+++ b/geomagio/temperature/StreamTEMPFactory.py
@@ -18,12 +18,20 @@ class StreamTEMPFactory(TEMPFactory):
     TEMPFactory
     Timeseriesfactory
     """
+
     def __init__(self, stream, **kwargs):
         TEMPFactory.__init__(self, **kwargs)
         self._stream = stream
 
-    def put_timeseries(self, timeseries, starttime=None, endtime=None,
-            channels=None, type=None, interval=None):
+    def put_timeseries(
+        self,
+        timeseries,
+        starttime=None,
+        endtime=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Implements put_timeseries
 
         Notes: Calls TEMPFactory.write_file in place of
diff --git a/geomagio/temperature/TEMPFactory.py b/geomagio/temperature/TEMPFactory.py
index c0403a985f06d52c64a99e34ab2ffd5b3386487e..fad62a9769ac7c87f9c8ce07f0a0f059580e7c74 100644
--- a/geomagio/temperature/TEMPFactory.py
+++ b/geomagio/temperature/TEMPFactory.py
@@ -6,7 +6,7 @@ from .TEMPWriter import TEMPWriter
 
 
 # pattern for temp file names
-TEMP_FILE_PATTERN = '%(obs)s%(y)s%(j)s.%(i)s'
+TEMP_FILE_PATTERN = "%(obs)s%(y)s%(j)s.%(i)s"
 
 
 class TEMPFactory(TimeseriesFactory):
diff --git a/geomagio/temperature/TEMPWriter.py b/geomagio/temperature/TEMPWriter.py
index d7dfebe3b695d10b2375c5adcf25c0deaadcf871..57667d0152d787fdcc79a23cc48e6c26a6a4ea3e 100644
--- a/geomagio/temperature/TEMPWriter.py
+++ b/geomagio/temperature/TEMPWriter.py
@@ -12,7 +12,7 @@ class TEMPWriter(object):
     """TEMP writer.
     """
 
-    def __init__(self, empty_value=numpy.int('9999')):
+    def __init__(self, empty_value=numpy.int("9999")):
         self.empty_value = empty_value
 
     def write(self, out, timeseries, channels):
@@ -30,8 +30,9 @@ class TEMPWriter(object):
         for channel in channels:
             if timeseries.select(channel=channel).count() == 0:
                 raise TimeseriesFactoryException(
-                    'Missing channel "%s" for output, available channels %s' %
-                    (channel, str(TimeseriesUtility.get_channels(timeseries))))
+                    'Missing channel "%s" for output, available channels %s'
+                    % (channel, str(TimeseriesUtility.get_channels(timeseries)))
+                )
         stats = timeseries[0].stats
         out.write(self._format_header(stats))
         out.write(self._format_data(timeseries, channels))
@@ -56,10 +57,18 @@ class TEMPWriter(object):
         yearday = str(stats.starttime.julday).zfill(3)
         date = stats.starttime.strftime("%d-%b-%y")
 
-        buf.append(observatory + '  ' + year + '  ' + yearday + '  ' + date +
-                    '  T1 T2 T3 T4 V1 Deg-C*10/volts*10  File Version 1.00\n')
+        buf.append(
+            observatory
+            + "  "
+            + year
+            + "  "
+            + yearday
+            + "  "
+            + date
+            + "  T1 T2 T3 T4 V1 Deg-C*10/volts*10  File Version 1.00\n"
+        )
 
-        return ''.join(buf)
+        return "".join(buf)
 
     def _format_data(self, timeseries, channels):
         """Format all data lines.
@@ -96,11 +105,14 @@ class TEMPWriter(object):
         delta = traces[0].stats.delta
 
         for i in range(len(traces[0].data)):
-            buf.append(self._format_values(
-                datetime.utcfromtimestamp(starttime + i * delta),
-                (t.data[i] for t in traces)))
+            buf.append(
+                self._format_values(
+                    datetime.utcfromtimestamp(starttime + i * delta),
+                    (t.data[i] for t in traces),
+                )
+            )
 
-        return ''.join(buf)
+        return "".join(buf)
 
     def _format_values(self, time, values):
         """Format one line of data values.
@@ -121,11 +133,13 @@ class TEMPWriter(object):
         tt = time.timetuple()
         totalMinutes = int(tt.tm_hour * 60 + tt.tm_min)
 
-        return '{0:0>4d} {1: >5d} {2: >5d} {3: >5d} {4: >5d}' \
-                ' {5: >5d}\n'.format(totalMinutes,
-                *[self.empty_value if numpy.isnan(val) else int(round(
-                    val * 10))
-                        for val in values])
+        return "{0:0>4d} {1: >5d} {2: >5d} {3: >5d} {4: >5d}" " {5: >5d}\n".format(
+            totalMinutes,
+            *[
+                self.empty_value if numpy.isnan(val) else int(round(val * 10))
+                for val in values
+            ]
+        )
 
     @classmethod
     def format(self, timeseries, channels):
diff --git a/geomagio/temperature/__init__.py b/geomagio/temperature/__init__.py
index c5769f5da4de5e5da6f680c7903f0ce0d4051c01..77e464af4dc09e01f6e4313d9adf81d6d6c9db50 100644
--- a/geomagio/temperature/__init__.py
+++ b/geomagio/temperature/__init__.py
@@ -7,8 +7,4 @@ from .StreamTEMPFactory import StreamTEMPFactory
 from .TEMPWriter import TEMPWriter
 
 
-__all__ = [
-    'TEMPFactory',
-    'StreamTEMPFactory',
-    'TEMPWriter'
-]
+__all__ = ["TEMPFactory", "StreamTEMPFactory", "TEMPWriter"]
diff --git a/geomagio/vbf/StreamVBFFactory.py b/geomagio/vbf/StreamVBFFactory.py
index ad02e5ced80b04666d71fb88c0fc0772b25a3ead..a8962c6303aa64a8e95b724552e2a5bfc73eb149 100644
--- a/geomagio/vbf/StreamVBFFactory.py
+++ b/geomagio/vbf/StreamVBFFactory.py
@@ -23,8 +23,15 @@ class StreamVBFFactory(VBFFactory):
         VBFFactory.__init__(self, **kwargs)
         self._stream = stream
 
-    def put_timeseries(self, timeseries, starttime=None, endtime=None,
-            channels=None, type=None, interval=None):
+    def put_timeseries(
+        self,
+        timeseries,
+        starttime=None,
+        endtime=None,
+        channels=None,
+        type=None,
+        interval=None,
+    ):
         """Implements put_timeseries
 
         Notes: Calls VBFFactory.write_file in place of
diff --git a/geomagio/vbf/VBFFactory.py b/geomagio/vbf/VBFFactory.py
index d708a354086c6ebd39e99580f1192142e956db91..5d96582b2f7ebec299c864e820ee31ddb578b953 100644
--- a/geomagio/vbf/VBFFactory.py
+++ b/geomagio/vbf/VBFFactory.py
@@ -6,7 +6,7 @@ from .VBFWriter import VBFWriter
 
 
 # pattern for vbf file names
-VBF_FILE_PATTERN = '%(obs)s%(y)s%(j)s.%(i)s'
+VBF_FILE_PATTERN = "%(obs)s%(y)s%(j)s.%(i)s"
 
 
 class VBFFactory(TimeseriesFactory):
diff --git a/geomagio/vbf/VBFWriter.py b/geomagio/vbf/VBFWriter.py
index 47794c812adbb8557282eb5a7d95f10899d6aeb1..518e4a0ba68fca1ee4711bfcc896f42cd1211b7a 100644
--- a/geomagio/vbf/VBFWriter.py
+++ b/geomagio/vbf/VBFWriter.py
@@ -12,7 +12,7 @@ class VBFWriter(object):
     """VBF writer.
     """
 
-    def __init__(self, empty_value=numpy.int('9999999')):
+    def __init__(self, empty_value=numpy.int("9999999")):
         self.empty_value = empty_value
 
     def write(self, out, timeseries, channels):
@@ -30,8 +30,9 @@ class VBFWriter(object):
         for channel in channels:
             if timeseries.select(channel=channel).count() == 0:
                 raise TimeseriesFactoryException(
-                    'Missing channel "%s" for output, available channels %s' %
-                    (channel, str(TimeseriesUtility.get_channels(timeseries))))
+                    'Missing channel "%s" for output, available channels %s'
+                    % (channel, str(TimeseriesUtility.get_channels(timeseries)))
+                )
         stats = timeseries[0].stats
 
         out.write(self._format_header(stats))
@@ -58,10 +59,18 @@ class VBFWriter(object):
         yearday = str(stats.starttime.julday).zfill(3)
         date = stats.starttime.strftime("%d-%b-%y")
 
-        buf.append(observatory + '  ' + year + '  ' + yearday + '  ' +
-                    date + '  Hvolt Hbin Evolt Ebin Zvolt Zbin Version 1.0\n')
+        buf.append(
+            observatory
+            + "  "
+            + year
+            + "  "
+            + yearday
+            + "  "
+            + date
+            + "  Hvolt Hbin Evolt Ebin Zvolt Zbin Version 1.0\n"
+        )
 
-        return ''.join(buf)
+        return "".join(buf)
 
     def _format_data(self, timeseries, channels):
         """Format all data lines.
@@ -85,9 +94,10 @@ class VBFWriter(object):
         # Use a copy of the trace so that we don't modify the original.
         for trace in timeseries:
             traceLocal = trace.copy()
-            if traceLocal.stats.channel == 'D':
-                traceLocal.data = \
-                    ChannelConverter.get_minutes_from_radians(traceLocal.data)
+            if traceLocal.stats.channel == "D":
+                traceLocal.data = ChannelConverter.get_minutes_from_radians(
+                    traceLocal.data
+                )
 
             # TODO - we should look into multiplying the trace all at once
             # like this, but this gives an error on Windows at the moment.
@@ -101,11 +111,14 @@ class VBFWriter(object):
         delta = traces[0].stats.delta
 
         for i in range(len(traces[0].data)):
-            buf.append(self._format_values(
-                datetime.utcfromtimestamp(starttime + i * delta),
-                (t.data[i] for t in traces)))
+            buf.append(
+                self._format_values(
+                    datetime.utcfromtimestamp(starttime + i * delta),
+                    (t.data[i] for t in traces),
+                )
+            )
 
-        return ''.join(buf)
+        return "".join(buf)
 
     def _format_values(self, time, values):
         """Format one line of data values.
@@ -135,12 +148,14 @@ class VBFWriter(object):
         for idx, valx in enumerate(values):
             if ~numpy.isnan(valx):
                 if idx == 0 or idx == 2 or idx == 4:
-                    vblist[idx] = valx / 1000.
+                    vblist[idx] = valx / 1000.0
                 else:
                     vblist[idx] = int(valx)
 
-        return '{0:0>5d} {1: >10.6f} {2: >4d} {3: >10.6f} {4: >4d} ' \
-                '{5: >10.6f} {6: >4d}\n'.format(totalMinutes, *vblist)
+        return (
+            "{0:0>5d} {1: >10.6f} {2: >4d} {3: >10.6f} {4: >4d} "
+            "{5: >10.6f} {6: >4d}\n".format(totalMinutes, *vblist)
+        )
 
     @classmethod
     def format(self, timeseries, channels):
diff --git a/geomagio/vbf/__init__.py b/geomagio/vbf/__init__.py
index 59fc79065b088da79353c7e0beb0257fcda43985..355a551bd0103c7fea1f704c369a788aa0e027d1 100644
--- a/geomagio/vbf/__init__.py
+++ b/geomagio/vbf/__init__.py
@@ -7,8 +7,4 @@ from .StreamVBFFactory import StreamVBFFactory
 from .VBFWriter import VBFWriter
 
 
-__all__ = [
-    'VBFFactory',
-    'StreamVBFFactory',
-    'VBFWriter'
-]
+__all__ = ["VBFFactory", "StreamVBFFactory", "VBFWriter"]
diff --git a/geomagio/webservice/__init__.py b/geomagio/webservice/__init__.py
index c24cf7dd8528f6ca66b1085b9dc44598e27b9da9..86eab5600dd5e87f8e4a7a4ce4b784bc982005cc 100644
--- a/geomagio/webservice/__init__.py
+++ b/geomagio/webservice/__init__.py
@@ -4,6 +4,4 @@ from __future__ import absolute_import
 from .app import create_app
 
 
-__all__ = [
-    'create_app'
-]
+__all__ = ["create_app"]
diff --git a/geomagio/webservice/app.py b/geomagio/webservice/app.py
index fc3767079dbe0564712d4740c0143d42145da671..289ff5cfe5312d37483d948cfc2a3d6729f11632 100644
--- a/geomagio/webservice/app.py
+++ b/geomagio/webservice/app.py
@@ -19,8 +19,8 @@ def create_app():
     session.init_app(app)
 
     # add default route
-    @app.route('/')
+    @app.route("/")
     def index():
-        return flask.render_template('index.html')
+        return flask.render_template("index.html")
 
     return app
diff --git a/geomagio/webservice/database.py b/geomagio/webservice/database.py
index c24e75f9c385b8b6d20cc2519c5ce6c511d0776b..b156d3a157b74b4e47c8f5ffdb7b5704154d995a 100644
--- a/geomagio/webservice/database.py
+++ b/geomagio/webservice/database.py
@@ -1,4 +1,3 @@
-
 import flask
 import flask_migrate
 import flask_sqlalchemy
diff --git a/geomagio/webservice/login.py b/geomagio/webservice/login.py
index 7281a6e2828dfa63acbf2c07dda5c3309ef5b832..8d593209fb1b1a069da0d23c31c20b0c08c9e819 100644
--- a/geomagio/webservice/login.py
+++ b/geomagio/webservice/login.py
@@ -10,7 +10,7 @@ from .database import db
 
 
 # Blueprint for auth routes
-blueprint = flask.Blueprint('login', __name__)
+blueprint = flask.Blueprint("login", __name__)
 login_manager = flask_login.LoginManager()
 oauth = OAuth()
 
@@ -23,17 +23,17 @@ def init_app(app: flask.Flask):
     global oauth
     # LoginManager
     login_manager.init_app(app)
-    login_manager.login_view = 'login.login'
+    login_manager.login_view = "login.login"
     # OpenID client
     oauth.init_app(app)
     # register oauth client (needs to happen after init_app)
     # creates property "oauth.openid"
     oauth.register(
-        name='openid',
-        client_id=os.getenv('OPENID_CLIENT_ID'),
-        client_secret=os.getenv('OPENID_CLIENT_SECRET'),
-        server_metadata_url=os.getenv('OPENID_METADATA_URL'),
-        client_kwargs={"scope": "openid email"}
+        name="openid",
+        client_id=os.getenv("OPENID_CLIENT_ID"),
+        client_secret=os.getenv("OPENID_CLIENT_SECRET"),
+        server_metadata_url=os.getenv("OPENID_METADATA_URL"),
+        client_kwargs={"scope": "openid email"},
     )
     # register blueprint routes
     app.register_blueprint(blueprint)
@@ -42,7 +42,8 @@ def init_app(app: flask.Flask):
 class User(db.Model, flask_login.UserMixin):
     """User database model.
     """
-    __tablename__ = 'user'
+
+    __tablename__ = "user"
     id = db.Column(db.Integer, primary_key=True)
     openid = db.Column(db.Text, unique=True, nullable=False)
     email = db.Column(db.Text, unique=True, nullable=False)
@@ -53,10 +54,10 @@ class User(db.Model, flask_login.UserMixin):
 
     def to_dict(self):
         return {
-            'id': self.id,
-            'openid': self.openid,
-            'email': self.email,
-            'groups': self.groups
+            "id": self.id,
+            "openid": self.openid,
+            "email": self.email,
+            "groups": self.groups,
         }
 
 
@@ -65,19 +66,19 @@ def _load_user(user_id: str):
     return User.query.filter_by(openid=user_id).first()
 
 
-@blueprint.route('/hello')
+@blueprint.route("/hello")
 @flask_login.login_required
 def hello():
-    return flask.render_template('hello.html')
+    return flask.render_template("hello.html")
 
 
-@blueprint.route('/login')
+@blueprint.route("/login")
 def login():
-    redirect_uri = flask.url_for('login.authorize', _external=True)
+    redirect_uri = flask.url_for("login.authorize", _external=True)
     return oauth.openid.authorize_redirect(redirect_uri)
 
 
-@blueprint.route('/login/callback')
+@blueprint.route("/login/callback")
 def authorize():
     oauth.openid.authorize_access_token()
     userinfo = oauth.openid.userinfo()
@@ -88,11 +89,11 @@ def authorize():
         db.session.add(user)
         db.session.commit()
     flask_login.login_user(user)
-    return flask.redirect(flask.url_for('login.hello'))
+    return flask.redirect(flask.url_for("login.hello"))
 
 
-@blueprint.route('/logout')
+@blueprint.route("/logout")
 @flask_login.login_required
 def logout():
     flask_login.logout_user()
-    return flask.redirect(flask.url_for('index'))
+    return flask.redirect(flask.url_for("index"))
diff --git a/geomagio/webservice/metadata.py b/geomagio/webservice/metadata.py
index 863258e9c8218183fe600e13fb6247e2bc87b929..a87c444a8d62c5fabdead446b113033264ea3884 100644
--- a/geomagio/webservice/metadata.py
+++ b/geomagio/webservice/metadata.py
@@ -5,8 +5,8 @@ from .database import db
 
 
 # known category values as constants
-CATEGORY_FLAG = 'flag'
-CATEGORY_ADJUSTED_MATRIX = 'adjusted-matrix'
+CATEGORY_FLAG = "flag"
+CATEGORY_ADJUSTED_MATRIX = "adjusted-matrix"
 
 
 class Metadata(db.Model):
@@ -49,7 +49,7 @@ class Metadata(db.Model):
     """
 
     # table and primary key
-    __tablename__ = 'metadata'
+    __tablename__ = "metadata"
     id = db.Column(db.Integer, primary_key=True)
 
     # author
diff --git a/geomagio/webservice/session.py b/geomagio/webservice/session.py
index d38da01d92511b83d7b100807a1971606e5f9e56..5d05014dc202651059fd28ad3bf8d9215d747156 100644
--- a/geomagio/webservice/session.py
+++ b/geomagio/webservice/session.py
@@ -7,6 +7,6 @@ from .database import db
 
 
 def init_app(app: flask.Flask):
-    app.config['SESSION_TYPE'] = 'sqlalchemy'
-    app.config['SESSION_SQLALCHEMY'] = db
+    app.config["SESSION_TYPE"] = "sqlalchemy"
+    app.config["SESSION_SQLALCHEMY"] = db
     flask_session.Session(app)
diff --git a/setup.py b/setup.py
index bad5d3ab29ca6768d36a161a488273d5fff32129..fc2736ec5704654003ce03db3eecc1932e858fd2 100644
--- a/setup.py
+++ b/setup.py
@@ -1,33 +1,23 @@
 from distutils.core import setup
 
 setup(
-    name='geomag-algorithms',
-    version='1.0.0',
-    description='USGS Geomag IO Library',
-    url='https://github.com/usgs/geomag-algorithms',
+    name="geomag-algorithms",
+    version="1.0.0",
+    description="USGS Geomag IO Library",
+    url="https://github.com/usgs/geomag-algorithms",
     packages=[
-        'geomagio',
-        'geomagio.algorithm',
-        'geomagio.binlog',
-        'geomagio.edge',
-        'geomagio.iaga2002',
-        'geomagio.imfjson',
-        'geomagio.imfv122',
-        'geomagio.imfv283',
-        'geomagio.pcdcp',
-        'geomagio.temperature',
-        'geomagio.vbf'
+        "geomagio",
+        "geomagio.algorithm",
+        "geomagio.binlog",
+        "geomagio.edge",
+        "geomagio.iaga2002",
+        "geomagio.imfjson",
+        "geomagio.imfv122",
+        "geomagio.imfv283",
+        "geomagio.pcdcp",
+        "geomagio.temperature",
+        "geomagio.vbf",
     ],
-    install_requires=[
-        'numpy',
-        'matplotlib',
-        'scipy',
-        'obspy',
-        'pycurl'
-    ],
-    scripts=[
-        'bin/geomag.py',
-        'bin/geomag_webservice.py',
-        'bin/make_cal.py'
-    ]
+    install_requires=["numpy", "matplotlib", "scipy", "obspy", "pycurl"],
+    scripts=["bin/geomag.py", "bin/geomag_webservice.py", "bin/make_cal.py"],
 )
diff --git a/test/ChannelConverter_test.py b/test/ChannelConverter_test.py
index aa33c9f3042b1bcebbce2457e0bd46065c25e0c7..bf050888bd9229c4fd2e311f81463975af8a1591 100644
--- a/test/ChannelConverter_test.py
+++ b/test/ChannelConverter_test.py
@@ -57,8 +57,7 @@ class ChannelConverterTest:
         z = 2
         fv = channel.get_computed_f_using_squares(h, e, z)
         fs = math.sqrt(12)
-        assert_almost_equal(fv, fs, 8, 'Expect fv to almost equal sqrt(12)',
-                True)
+        assert_almost_equal(fv, fs, 8, "Expect fv to almost equal sqrt(12)", True)
 
     def test_get_geo_from_obs(self):
         """ChannelConverter_test.test_get_geo_from_obs()
@@ -74,8 +73,8 @@ class ChannelConverterTest:
         h = 1
         e = 1
         (X, Y) = channel.get_geo_from_obs(h, e)
-        assert_almost_equal(X, 1, 8, 'Expect X to almost equal 1.', True)
-        assert_almost_equal(Y, 1, 8, 'Expect Y to almost equal 1.', True)
+        assert_almost_equal(X, 1, 8, "Expect X to almost equal 1.", True)
+        assert_almost_equal(Y, 1, 8, "Expect Y to almost equal 1.", True)
 
         # 2) Call get_geo_from_obs using h,e values of cos(15), sin(15)
         #       (to create a d of 15 degrees) and a d0 of 15 degrees.
@@ -84,10 +83,8 @@ class ChannelConverterTest:
         e = sin(15 * D2R)
         d0 = 15 * D2R
         (X, Y) = channel.get_geo_from_obs(h, e, d0)
-        assert_almost_equal(X, cos(30 * D2R), 8,
-                'Expect X to equal cos(30)', True)
-        assert_almost_equal(Y, sin(30 * D2R), 8,
-                'Expect Y to equal sin(30)', True)
+        assert_almost_equal(X, cos(30 * D2R), 8, "Expect X to equal cos(30)", True)
+        assert_almost_equal(Y, sin(30 * D2R), 8, "Expect Y to equal sin(30)", True)
 
         # 3) Call get_geo_from_obs using h,e values of 1,0 with a d0 of 315
         #   degrees. The geographic X,Y will be cos(45), sin(-45)
@@ -95,10 +92,8 @@ class ChannelConverterTest:
         e = 0
         d0 = 315 * D2R
         (X, Y) = channel.get_geo_from_obs(h, e, d0)
-        assert_almost_equal(X, cos(45 * D2R), 8,
-                'Expect X to equal cos(45).', True)
-        assert_almost_equal(Y, sin(-45 * D2R), 8,
-                'Expect Y to equal sin(45).', True)
+        assert_almost_equal(X, cos(45 * D2R), 8, "Expect X to equal cos(45).", True)
+        assert_almost_equal(Y, sin(-45 * D2R), 8, "Expect Y to equal sin(45).", True)
 
         # 4) Call get_geo_from_obs using h,e values of cos_30,sin_30 and d0 of
         #   30 degrees. The geographic X,Y will be cos(-30), sin(-30), due to
@@ -108,10 +103,8 @@ class ChannelConverterTest:
         e = sin(30 * D2R)
         d0 = -60 * D2R
         (X, Y) = channel.get_geo_from_obs(h, e, d0)
-        assert_almost_equal(X, cos(-30 * D2R), 8,
-                'Expect X to equal cos(60).', True)
-        assert_almost_equal(Y, sin(-30 * D2R), 8,
-                'Expect Y to equal sin(60).', True)
+        assert_almost_equal(X, cos(-30 * D2R), 8, "Expect X to equal cos(60).", True)
+        assert_almost_equal(Y, sin(-30 * D2R), 8, "Expect Y to equal sin(60).", True)
 
     def test_get_geo_from_mag(self):
         """ChannelConverter_test.test_get_geo_from_mag()
@@ -125,10 +118,8 @@ class ChannelConverterTest:
         h = 1
         d = 30 * D2R
         (X, Y) = channel.get_geo_from_mag(h, d)
-        assert_almost_equal(X, cos(30 * D2R), 8,
-                'Expect X to be cos(30).', True)
-        assert_almost_equal(Y, sin(30 * D2R), 8,
-                'Expect Y to be sin(30).', True)
+        assert_almost_equal(X, cos(30 * D2R), 8, "Expect X to be cos(30).", True)
+        assert_almost_equal(Y, sin(30 * D2R), 8, "Expect Y to be sin(30).", True)
 
     def test_get_geo_x_from_mag(self):
         """ChannelConverter_test.test_get_geo_x_from_mag()
@@ -142,14 +133,13 @@ class ChannelConverterTest:
         h = 2
         d = 45 * D2R
         X = channel.get_geo_x_from_mag(h, d)
-        assert_almost_equal(X, 2 * cos(d), 8, 'Expect X to be cos(45).', True)
+        assert_almost_equal(X, 2 * cos(d), 8, "Expect X to be cos(45).", True)
         # 2) Call get_geo_x_from_mag using H,D of 1, 30 degrees. Expect
         #   X to be cos(30)
         h = 2
         d = 30 * D2R
         X = channel.get_geo_x_from_mag(h, d)
-        assert_almost_equal(X, 2 * cos(d), 8,
-                'Expect X to equal cos(30).', True)
+        assert_almost_equal(X, 2 * cos(d), 8, "Expect X to equal cos(30).", True)
 
     def test_get_geo_y_from_mag(self):
         """ChannelConverter_test.test_get_geo_y_from_mag()
@@ -163,15 +153,13 @@ class ChannelConverterTest:
         h = 2
         d = 45 * D2R
         Y = channel.get_geo_y_from_mag(h, d)
-        assert_almost_equal(Y, 2 * sin(45 * D2R), 8,
-                'Expect Y to be 2sin(45).', True)
+        assert_almost_equal(Y, 2 * sin(45 * D2R), 8, "Expect Y to be 2sin(45).", True)
         # 2) Call get_geo_x_from_mag using H,D of 1, 30 degrees. Expect
         #   X to be cos(30)
         h = 2
         d = 30 * D2R
         Y = channel.get_geo_y_from_mag(h, d)
-        assert_almost_equal(Y, 2 * sin(30 * D2R), 8,
-                'Expect Y to be 2sin(30).', True)
+        assert_almost_equal(Y, 2 * sin(30 * D2R), 8, "Expect Y to be 2sin(30).", True)
 
     def test_get_mag_from_obs(self):
         """ChannelConverter_test.test_get_geo_y_from_obs()
@@ -189,8 +177,8 @@ class ChannelConverterTest:
         e = sin(30 * D2R)
         d0 = 15 * D2R
         H, D = channel.get_mag_from_obs(h, e, d0)
-        assert_almost_equal(H, 1, 8, 'Expect H to be 1.', True)
-        assert_almost_equal(D, 45 * D2R, 8, 'Expect D to be 45.', True)
+        assert_almost_equal(H, 1, 8, "Expect H to be 1.", True)
+        assert_almost_equal(D, 45 * D2R, 8, "Expect D to be 45.", True)
 
     def test_get_mag_from_geo(self):
         """ChannelConverter_test.test_get_geo_y_from_obs()
@@ -205,8 +193,8 @@ class ChannelConverterTest:
         X = 3 * cos(30 * D2R)
         Y = 3 * sin(30 * D2R)
         H, D = channel.get_mag_from_geo(X, Y)
-        assert_almost_equal(H, 3, 8, 'Expect H to equal 3.', True)
-        assert_almost_equal(D, 30 * D2R, 8, 'Expect D to be 30.', True)
+        assert_almost_equal(H, 3, 8, "Expect H to equal 3.", True)
+        assert_almost_equal(D, 30 * D2R, 8, "Expect D to be 30.", True)
 
     def test_get_mag_d_from_obs(self):
         """ChannelConverter_test.test_get_mag_d_from_obs()
@@ -222,45 +210,41 @@ class ChannelConverterTest:
         h = 2
         e = 2
         D = channel.get_mag_d_from_obs(h, e)
-        assert_almost_equal(D, 45 * D2R, 8, 'Expect D to be 45 degrees.', True)
+        assert_almost_equal(D, 45 * D2R, 8, "Expect D to be 45 degrees.", True)
         # 2) Call get_mag_d_from_obs using h,e cos(30), sin(30).
         #   Expect d of 30 degress.
         h = cos(30 * D2R)
         e = sin(30 * D2R)
         D = channel.get_mag_d_from_obs(h, e)
-        assert_almost_equal(D, 30 * D2R, 8,
-                'Expect D to equal 30 degrees', True)
+        assert_almost_equal(D, 30 * D2R, 8, "Expect D to equal 30 degrees", True)
         # 3) Call get_mag_d_from_obs using h,e cos(30), sin(30),
         #   d0 = 30 degrees Expect d to be 60 degress.
         h = cos(30 * D2R)
         e = sin(30 * D2R)
         d0 = 30 * D2R
         D = channel.get_mag_d_from_obs(h, e, d0)
-        assert_almost_equal(D, 60 * D2R, 8,
-                'Expect D to equal 60 degrees', True)
+        assert_almost_equal(D, 60 * D2R, 8, "Expect D to equal 60 degrees", True)
         # 4) Call get_mag_d_from_obs using h,e cos(30), sin(30),
         #   d0 = 330 degrees Expect d of 360 degress.
         h = cos(30 * D2R)
         e = sin(30 * D2R)
         d0 = 330 * D2R
         D = channel.get_mag_d_from_obs(h, e, d0)
-        assert_almost_equal(D, 360 * D2R, 8,
-                'Expect D to equal 360 degrees', True)
+        assert_almost_equal(D, 360 * D2R, 8, "Expect D to equal 360 degrees", True)
         # 5) Call get_mag_d_from_obs using h,e cos(30), sin(30),
         #   d0 = -30 degrees Expect d of 0 degress.
         h = cos(30 * D2R)
         e = sin(30 * D2R)
         d0 = -30 * D2R
         D = channel.get_mag_d_from_obs(h, e, d0)
-        assert_almost_equal(D, 0, 8, 'Expect D to equal 0 degrees', True)
+        assert_almost_equal(D, 0, 8, "Expect D to equal 0 degrees", True)
         # 6) Call get_mag_d_from_obs using h,e cos(30), -sin(30),
         #   d0 = -30 degrees. Expect d of -60 degress.
         h = cos(30 * D2R)
         e = sin(-30 * D2R)
         d0 = -30 * D2R
         D = channel.get_mag_d_from_obs(h, e, d0)
-        assert_almost_equal(D, -60 * D2R, 8,
-                'Expect D to equal -60 degrees', True)
+        assert_almost_equal(D, -60 * D2R, 8, "Expect D to equal -60 degrees", True)
 
     def test_get_mag_d_from_geo(self):
         """ChannelConverter_test.test_get_mag_d_from_geo()
@@ -274,20 +258,19 @@ class ChannelConverterTest:
         X = 2
         Y = 2
         D = channel.get_mag_d_from_geo(X, Y)
-        assert_almost_equal(D, 45 * D2R, 8, 'Expect D to be 45 degrees.', True)
+        assert_almost_equal(D, 45 * D2R, 8, "Expect D to be 45 degrees.", True)
         # 2) Call get_mag_d_from_geo using X,Y equal to cos(30), sin(30).
         #   Expect D to be 30 degrees.
         X = cos(30 * D2R)
         Y = sin(30 * D2R)
         D = channel.get_mag_d_from_geo(X, Y)
-        assert_almost_equal(D, 30 * D2R, 8, 'Expect D to be 30 degrees.', True)
+        assert_almost_equal(D, 30 * D2R, 8, "Expect D to be 30 degrees.", True)
         # 3) Call get_mag_d_from_geo using X,Y equal to cos(30), -sin(30).
         #   Expect D to be -30 degrees.
         X = cos(30 * D2R)
         Y = sin(-30 * D2R)
         D = channel.get_mag_d_from_geo(X, Y)
-        assert_almost_equal(D, -30 * D2R, 8,
-                'Expect D to equal -30 degrees', True)
+        assert_almost_equal(D, -30 * D2R, 8, "Expect D to equal -30 degrees", True)
 
     def test_get_mag_h_from_obs(self):
         """ChannelConverter_test.test_get_mag_h_from_obs()
@@ -300,7 +283,7 @@ class ChannelConverterTest:
         h = 3
         e = 4
         H = channel.get_mag_h_from_obs(h, e)
-        assert_almost_equal(H, 5, 8, 'Expect H to be 5.', True)
+        assert_almost_equal(H, 5, 8, "Expect H to be 5.", True)
 
     def test_get_mag_h_from_geo(self):
         """ChannelConverter_test.test_get_mag_d_from_geo()
@@ -313,7 +296,7 @@ class ChannelConverterTest:
         X = 3
         Y = 4
         H = channel.get_mag_h_from_geo(X, Y)
-        assert_almost_equal(H, 5, 8, 'Expect H to be 5.', True)
+        assert_almost_equal(H, 5, 8, "Expect H to be 5.", True)
 
     def test_get_obs_from_geo(self):
         """ChannelConverter_test.test_get_obs_from_geo()
@@ -329,8 +312,8 @@ class ChannelConverterTest:
         X = 1
         Y = 1
         (h, e) = channel.get_obs_from_geo(X, Y)
-        assert_almost_equal(h, 1.0, 8, 'Expect h to be 1.', True)
-        assert_almost_equal(e, 1.0, 8, 'Expect e to be 1.', True)
+        assert_almost_equal(h, 1.0, 8, "Expect h to be 1.", True)
+        assert_almost_equal(e, 1.0, 8, "Expect e to be 1.", True)
         # 2) Call get_obs_from_geo using equal X,Y values to create a 45
         #   degree angle (D), with a d0 of 45/2. The observatory declination
         #   (d) will be 45/2, the difference between the total field angle,
@@ -340,8 +323,7 @@ class ChannelConverterTest:
         d0 = 22.5 * D2R
         (h, e) = channel.get_obs_from_geo(X, Y, d0)
         d = channel.get_obs_d_from_obs(h, e)
-        assert_almost_equal(d, 22.5 * D2R, 8,
-                'Expect d to be 22.5 degrees.', True)
+        assert_almost_equal(d, 22.5 * D2R, 8, "Expect d to be 22.5 degrees.", True)
         # 3) Call get_obs_from_geo using equal X,Y values to create a 45
         #   degree angle (D), with a d0 of 315 degrees. The observatory
         #   declination (d) will be 90 degrees.
@@ -350,7 +332,7 @@ class ChannelConverterTest:
         d0 = 315 * D2R
         (h, e) = channel.get_obs_from_geo(X, Y, d0)
         d = channel.get_obs_d_from_obs(h, e)
-        assert_almost_equal(d, 90 * D2R, 8, 'Expect d to be 90 degrees.', True)
+        assert_almost_equal(d, 90 * D2R, 8, "Expect d to be 90 degrees.", True)
         # 4) Call get_obs_from_geo using X,Y values of cos(60), sin(60), and
         #   d0 of 30 degrees. The observatory values h,e will be cos(30)
         #   and sin(30), and the observatory declination will be 30 degrees.
@@ -360,12 +342,10 @@ class ChannelConverterTest:
         Y = sin(60 * D2R)
         d0 = 30 * D2R
         (h, e) = channel.get_obs_from_geo(X, Y, d0)
-        assert_almost_equal(h, cos(30 * D2R), 8,
-                'Expect h to be cos(30).', True)
-        assert_almost_equal(e, sin(30 * D2R), 8,
-                'Expect e to be sin(30).', True)
+        assert_almost_equal(h, cos(30 * D2R), 8, "Expect h to be cos(30).", True)
+        assert_almost_equal(e, sin(30 * D2R), 8, "Expect e to be sin(30).", True)
         d = channel.get_obs_d_from_obs(h, e)
-        assert_almost_equal(d, 30 * D2R, 8, 'Expect d to be 30 degrees.', True)
+        assert_almost_equal(d, 30 * D2R, 8, "Expect d to be 30 degrees.", True)
 
     def test_get_obs_from_mag(self):
         """ChannelConverter_test.test_get_obs_from_mag()
@@ -378,10 +358,8 @@ class ChannelConverterTest:
         H = 1
         D = -22.5 * D2R
         (h, e) = channel.get_obs_from_mag(H, D, 22.5 * D2R)
-        assert_almost_equal(h, cos(45 * D2R), 8,
-                'Expect h to be cos(45)', True)
-        assert_almost_equal(e, -cos(45 * D2R), 8,
-                'Expect e to be -cos(45).', True)
+        assert_almost_equal(h, cos(45 * D2R), 8, "Expect h to be cos(45)", True)
+        assert_almost_equal(e, -cos(45 * D2R), 8, "Expect e to be -cos(45).", True)
 
     def test_get_obs_d_from_obs(self):
         """ChannelConverter_test.test_get_obs_d_from_obs()
@@ -395,14 +373,13 @@ class ChannelConverterTest:
         h = cos(30 * D2R)
         e = sin(30 * D2R)
         d = channel.get_obs_d_from_obs(h, e)
-        assert_almost_equal(d, 30 * D2R, 8, 'Expect d to be 30 degrees.', True)
+        assert_almost_equal(d, 30 * D2R, 8, "Expect d to be 30 degrees.", True)
         # 2) Call get_obs_d_from_obs using h,e cos(30), -sin(30). Expect
         #   d to be 30.
         h = cos(30 * D2R)
         e = sin(-30 * D2R)
         d = channel.get_obs_d_from_obs(h, e)
-        assert_almost_equal(d, -30 * D2R, 8,
-                'Expect d to be 30 degrees.', True)
+        assert_almost_equal(d, -30 * D2R, 8, "Expect d to be 30 degrees.", True)
 
     def test_get_obs_d_from_mag_d(self):
         """ChannelConverter_test.test_get_obs_d_from_mag()
@@ -416,26 +393,25 @@ class ChannelConverterTest:
         #   declination of 1 back.
         D = 1
         d = channel.get_obs_d_from_mag_d(D)
-        assert_almost_equal(d, 1, 8, 'Expect d to be 1.', True)
+        assert_almost_equal(d, 1, 8, "Expect d to be 1.", True)
         # 2) Call get_obs_d_from_mag using d, d0 values of 22.5, 45. Expect
         #   observatory declination of -22.5 degrees.
         D = 22.5 * D2R
         d0 = 45 * D2R
         d = channel.get_obs_d_from_mag_d(D, d0)
-        assert_almost_equal(d, -22.5 * D2R, 8,
-                'Expect d to be -22.5 degrees.', True)
+        assert_almost_equal(d, -22.5 * D2R, 8, "Expect d to be -22.5 degrees.", True)
         # 3) Call get_obs_d_from_mag using d, d0 values of 60, 30. Expect
         #   observatory declination of 30 degrees.
         D = 60 * D2R
         d0 = 30 * D2R
         d = channel.get_obs_d_from_mag_d(D, d0)
-        assert_almost_equal(d, 30 * D2R, 8, 'Expect d to be 30 degrees.', True)
+        assert_almost_equal(d, 30 * D2R, 8, "Expect d to be 30 degrees.", True)
         # 4) Call get_obs_d_from_mag using d, d0 values of 30, -30.
         #   Expect observatory declination of 60 degrees.
         D = 30 * D2R
         d0 = -30 * D2R
         d = channel.get_obs_d_from_mag_d(D, d0)
-        assert_almost_equal(d, 60 * D2R, 8, 'Expect d to be 60 degrees.', True)
+        assert_almost_equal(d, 60 * D2R, 8, "Expect d to be 60 degrees.", True)
 
     def test_get_obs_e_from_mag(self):
         """ChannelConverter_test.test_get_obs_e_from_mag()
@@ -451,22 +427,19 @@ class ChannelConverterTest:
         H = 1
         D = 45 * D2R
         e = channel.get_obs_e_from_mag(H, D)
-        assert_almost_equal(e, sin(45 * D2R), 8,
-                'Expect e to be sin(45).', True)
+        assert_almost_equal(e, sin(45 * D2R), 8, "Expect e to be sin(45).", True)
         # 2) Call get_obs_e_from_mag using H,D of 1, 30. Expect e to be sin(30)
         H = 1
         D = 30 * D2R
         e = channel.get_obs_e_from_mag(H, D)
-        assert_almost_equal(e, sin(30 * D2R), 8,
-                'Expect e to be sin(30).', True)
+        assert_almost_equal(e, sin(30 * D2R), 8, "Expect e to be sin(30).", True)
         # 3) Call get_obs_e_from_mag using H,D,d0 of 1, 15, -15. Expect e to
         #   be sin(30)
         H = 1
         D = 15 * D2R
         d0 = -15 * D2R
         e = channel.get_obs_e_from_mag(H, D, d0)
-        assert_almost_equal(e, sin(30 * D2R), 8,
-                'Expect e to be sin(30)', True)
+        assert_almost_equal(e, sin(30 * D2R), 8, "Expect e to be sin(30)", True)
 
     def test_get_obs_e_from_obs(self):
         """ChannelConverter_test.test_get_obs_e_from_obs()
@@ -481,8 +454,9 @@ class ChannelConverterTest:
         h = 2
         d = 30 * D2R
         e = channel.get_obs_e_from_obs(h, d)
-        assert_almost_equal(e, 2 * tan(30 * D2R), 8,
-                'Expect e to be 2 * tan(30).', True)
+        assert_almost_equal(
+            e, 2 * tan(30 * D2R), 8, "Expect e to be 2 * tan(30).", True
+        )
 
     def test_get_obs_h_from_mag(self):
         """ChannelConverter_test.test_get_obs_h_from_mag()
@@ -497,16 +471,14 @@ class ChannelConverterTest:
         H = 1
         D = 45 * D2R
         h = channel.get_obs_h_from_mag(H, D)
-        assert_almost_equal(h, cos(45 * D2R), 8,
-                'Expect h to be cos(45).', True)
+        assert_almost_equal(h, cos(45 * D2R), 8, "Expect h to be cos(45).", True)
         # 2) Call get_obs_h_from_mag using H,D,d0 1,30,15.
         #   Expect h to be cos(15)
         H = 1
         D = 30 * D2R
         d0 = 15 * D2R
         h = channel.get_obs_h_from_mag(H, D, d0)
-        assert_almost_equal(h, cos(15 * D2R), 8,
-                'Expect h to be cos(15)', True)
+        assert_almost_equal(h, cos(15 * D2R), 8, "Expect h to be cos(15)", True)
 
     def test_geo_to_obs_to_geo(self):
         """ChannelConverter_test.test_geo_to_obs_to_geo()
@@ -522,8 +494,8 @@ class ChannelConverterTest:
         (X, Y) = channel.get_geo_from_obs(h_in, e_in, d0)
         (h, e) = channel.get_obs_from_geo(X, Y, d0)
 
-        assert_almost_equal(h, 20840.15, 8, 'Expect h to = 20840.15.', True)
-        assert_almost_equal(e, -74.16, 8, 'Expect e to = -74.16', True)
+        assert_almost_equal(h, 20840.15, 8, "Expect h to = 20840.15.", True)
+        assert_almost_equal(e, -74.16, 8, "Expect e to = -74.16", True)
 
     def test_get_radians_from_minutes(self):
         """ChannelConverter_test.test_get_radian_from_decimal()
@@ -532,8 +504,9 @@ class ChannelConverterTest:
         """
         minutes = 45 * 60
         radians = channel.get_radians_from_minutes(minutes)
-        assert_almost_equal(radians, math.pi / 4.0, 8,
-                'Expect radians to be pi/4', True)
+        assert_almost_equal(
+            radians, math.pi / 4.0, 8, "Expect radians to be pi/4", True
+        )
 
     def test_get_minutes_from_radians(self):
         """ChannelConverter_test.test_get_decimal_from_radian()
@@ -542,5 +515,6 @@ class ChannelConverterTest:
         """
         radians = math.pi / 4.0
         minutes = channel.get_minutes_from_radians(radians)
-        assert_almost_equal(minutes, 45 * 60, 8,
-                'Expect minutes to be equal to 45 degrees', True)
+        assert_almost_equal(
+            minutes, 45 * 60, 8, "Expect minutes to be equal to 45 degrees", True
+        )
diff --git a/test/Controller_test.py b/test/Controller_test.py
index 5a08eda0e05314a823ee9110bd804ac7db0c6898..c7d0509d00e2e01cf7fb5f7435cdddc06abcf237 100644
--- a/test/Controller_test.py
+++ b/test/Controller_test.py
@@ -4,10 +4,13 @@ from geomagio.algorithm import Algorithm
 
 # needed to read outputs generated by Controller and test data
 from geomagio.iaga2002 import IAGA2002Factory
+
 # needed to emulate geomag.py script
 from geomagio.Controller import _main, parse_args
+
 # needed to copy SqDistAlgorithm statefile
 from shutil import copy
+
 # needed to determine a valid (and writable) temp folder
 from tempfile import gettempdir
 
@@ -26,8 +29,7 @@ def test_controller():
     algorithm = Algorithm()
     controller = Controller(inputfactory, outputfactory, algorithm)
     assert_equal(isinstance(controller._inputFactory, TimeseriesFactory), True)
-    assert_equal(isinstance(controller._outputFactory, TimeseriesFactory),
-            True)
+    assert_equal(isinstance(controller._outputFactory, TimeseriesFactory), True)
     assert_equal(isinstance(controller._algorithm, Algorithm), True)
 
 
@@ -51,139 +53,161 @@ def test_controller_update_sqdist():
 
     # create list of string command line arguments
     fake_argv = [
-        '--input', 'iaga2002',
-        '--input-url',
-          'file://etc/controller/{obs}{date:%Y%m%d}_XYZF_{t}{i}.{i}',
-        '--observatory', 'BOU',
-        '--algorithm', 'sqdist',
-        '--sqdist-m', '1440',
-        '--sqdist-alpha', '2.3148e-5',
-        '--sqdist-gamma', '3.3333e-2',
-        '--sqdist-smooth', '180',
-        '--inchannels', 'X', 'Y', 'Z', 'F',
-        '--interval', 'minute',
-        '--rename-output-channel', 'H_Dist', 'MDT',
-        '--rename-output-channel', 'H_SQ', 'MSQ',
-        '--rename-output-channel', 'H_SV', 'MSV',
-        '--rename-output-channel', 'H_Sigma', 'MSS',
-        '--outchannels', 'MDT', 'MSQ', 'MSV', 'MSS',
-        '--sqdist-mag',
-        '--sqdist-statefile', tmp_dir + '/sqdistBOU_h_state.json',
-        '--type', 'variation',
-        '--output', 'iaga2002',
-        '--output-url',
-          'file://' + tmp_dir + '/{obs}{date:%Y%m%d}_DQVS_{t}{i}.{i}',
-        '--realtime', '600'
+        "--input",
+        "iaga2002",
+        "--input-url",
+        "file://etc/controller/{obs}{date:%Y%m%d}_XYZF_{t}{i}.{i}",
+        "--observatory",
+        "BOU",
+        "--algorithm",
+        "sqdist",
+        "--sqdist-m",
+        "1440",
+        "--sqdist-alpha",
+        "2.3148e-5",
+        "--sqdist-gamma",
+        "3.3333e-2",
+        "--sqdist-smooth",
+        "180",
+        "--inchannels",
+        "X",
+        "Y",
+        "Z",
+        "F",
+        "--interval",
+        "minute",
+        "--rename-output-channel",
+        "H_Dist",
+        "MDT",
+        "--rename-output-channel",
+        "H_SQ",
+        "MSQ",
+        "--rename-output-channel",
+        "H_SV",
+        "MSV",
+        "--rename-output-channel",
+        "H_Sigma",
+        "MSS",
+        "--outchannels",
+        "MDT",
+        "MSQ",
+        "MSV",
+        "MSS",
+        "--sqdist-mag",
+        "--sqdist-statefile",
+        tmp_dir + "/sqdistBOU_h_state.json",
+        "--type",
+        "variation",
+        "--output",
+        "iaga2002",
+        "--output-url",
+        "file://" + tmp_dir + "/{obs}{date:%Y%m%d}_DQVS_{t}{i}.{i}",
+        "--realtime",
+        "600",
     ]
     # parse arguments and create initial args object
     args = parse_args(fake_argv)
 
     # read in test and latest output and compare
     actual_factory = IAGA2002Factory(
-        urlTemplate=('file://' +
-            tmp_dir + '/{obs}{date:%Y%m%d}_DQVS_{t}{i}.{i}'),
+        urlTemplate=("file://" + tmp_dir + "/{obs}{date:%Y%m%d}_DQVS_{t}{i}.{i}"),
         urlInterval=86400,
-        observatory='BOU',
-        channels=['MDT', 'MSQ', 'MSV', 'MSS']
+        observatory="BOU",
+        channels=["MDT", "MSQ", "MSV", "MSS"],
     )
     expected_factory = IAGA2002Factory(
-        urlTemplate='url template, individual tests change the template below',
+        urlTemplate="url template, individual tests change the template below",
         urlInterval=86400,
-        observatory='BOU',
-        channels=['MDT', 'MSQ', 'MSV', 'MSS']
+        observatory="BOU",
+        channels=["MDT", "MSQ", "MSV", "MSS"],
     )
 
     # setup test data
     # copy SqDistAlgorithm statefile and empty DQVS output file to tmp folder
-    copy('etc/controller/sqdistBOU_h_state.json',
-        tmp_dir)
-    copy('etc/controller/bou20181024_DQVS_test0_vmin.min',
-        tmp_dir + '/bou20181024_DQVS_vmin.min')
+    copy("etc/controller/sqdistBOU_h_state.json", tmp_dir)
+    copy(
+        "etc/controller/bou20181024_DQVS_test0_vmin.min",
+        tmp_dir + "/bou20181024_DQVS_vmin.min",
+    )
 
     # TEST 1 - include a gap at end that is less than realtime (10 minutes),
     # expect sqdist not to project SQ/SV/SS
-    starttime1 = args.starttime = UTCDateTime('2018-10-24T00:00:00Z')
-    endtime1 = args.endtime = UTCDateTime('2018-10-24T00:19:00Z')
+    starttime1 = args.starttime = UTCDateTime("2018-10-24T00:00:00Z")
+    endtime1 = args.endtime = UTCDateTime("2018-10-24T00:19:00Z")
     _main(args)
     # compare results
-    actual = actual_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime1)
-    expected_factory.urlTemplate = \
-            'file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test1_{t}{i}.{i}'
-    expected = expected_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime1)
+    actual = actual_factory.get_timeseries(starttime=starttime1, endtime=endtime1)
+    expected_factory.urlTemplate = (
+        "file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test1_{t}{i}.{i}"
+    )
+    expected = expected_factory.get_timeseries(starttime=starttime1, endtime=endtime1)
     assert_allclose(actual, expected)
 
     # TEST 2 - start after next_starttime (00:10),
     # expect SQDist to project sq/sv/ss values over gap,
     # then process until last gap starting at 00:38
-    args.startime = UTCDateTime('2018-10-24T00:20:00Z')
-    endtime2 = args.endtime = UTCDateTime('2018-10-24T00:39:00Z')
+    args.startime = UTCDateTime("2018-10-24T00:20:00Z")
+    endtime2 = args.endtime = UTCDateTime("2018-10-24T00:39:00Z")
     _main(args)
     # compare results
-    actual = actual_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime2)
-    expected_factory.urlTemplate = \
-            'file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test2_{t}{i}.{i}'
-    expected = expected_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime2)
+    actual = actual_factory.get_timeseries(starttime=starttime1, endtime=endtime2)
+    expected_factory.urlTemplate = (
+        "file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test2_{t}{i}.{i}"
+    )
+    expected = expected_factory.get_timeseries(starttime=starttime1, endtime=endtime2)
     assert_allclose(actual, expected)
 
     # TEST 3 - start after next_starttime (00:38),
     # expect SQDist to project over gap,
     # then process until last gap starting at 00:58
-    args.starttime = UTCDateTime('2018-10-24T00:40:00Z')
-    endtime3 = args.endtime = UTCDateTime('2018-10-24T00:59:00Z')
+    args.starttime = UTCDateTime("2018-10-24T00:40:00Z")
+    endtime3 = args.endtime = UTCDateTime("2018-10-24T00:59:00Z")
     _main(args)
     # compare results
-    actual = actual_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime3)
-    expected_factory.urlTemplate = \
-            'file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test3_{t}{i}.{i}'
-    expected = expected_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime3)
+    actual = actual_factory.get_timeseries(starttime=starttime1, endtime=endtime3)
+    expected_factory.urlTemplate = (
+        "file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test3_{t}{i}.{i}"
+    )
+    expected = expected_factory.get_timeseries(starttime=starttime1, endtime=endtime3)
     assert_allclose(actual, expected)
 
     # TEST 4 - start after next_starttime (00:58),
     # exptect SQDist to project over gap,
     # then process until last gap starting at 01:16
-    args.starttime = UTCDateTime('2018-10-24T01:00:00Z')
-    endtime4 = args.endtime = UTCDateTime('2018-10-24T01:19:00Z')
+    args.starttime = UTCDateTime("2018-10-24T01:00:00Z")
+    endtime4 = args.endtime = UTCDateTime("2018-10-24T01:19:00Z")
     _main(args)
     # compare results
-    actual = actual_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime4)
-    expected_factory.urlTemplate = \
-            'file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test4_{t}{i}.{i}'
-    expected = expected_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime4)
+    actual = actual_factory.get_timeseries(starttime=starttime1, endtime=endtime4)
+    expected_factory.urlTemplate = (
+        "file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test4_{t}{i}.{i}"
+    )
+    expected = expected_factory.get_timeseries(starttime=starttime1, endtime=endtime4)
     assert_allclose(actual, expected)
 
     # TEST 5 - start after next_starttime (01:16),
     # expect SQDist to project until beginning of realtime gap,
     # starting at 01:30 (01:39 - 600 seconds)
-    args.starttime = UTCDateTime('2018-10-24T01:20:00Z')
-    endtime5 = args.endtime = UTCDateTime('2018-10-24T01:39:00Z')
+    args.starttime = UTCDateTime("2018-10-24T01:20:00Z")
+    endtime5 = args.endtime = UTCDateTime("2018-10-24T01:39:00Z")
     _main(args)
     # compare results
-    actual = actual_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime5)
-    expected_factory.urlTemplate = \
-            'file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test5_{t}{i}.{i}'
-    expected = expected_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime5)
+    actual = actual_factory.get_timeseries(starttime=starttime1, endtime=endtime5)
+    expected_factory.urlTemplate = (
+        "file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test5_{t}{i}.{i}"
+    )
+    expected = expected_factory.get_timeseries(starttime=starttime1, endtime=endtime5)
     assert_allclose(actual, expected)
 
     # TEST 6 - set starttime before next_starttime (which is 01:30)
     # expect sqdist to pick up where it left off
-    args.starttime = UTCDateTime('2018-10-24T01:20:00Z')
-    endtime6 = args.endtime = UTCDateTime('2018-10-24T01:59:00Z')
+    args.starttime = UTCDateTime("2018-10-24T01:20:00Z")
+    endtime6 = args.endtime = UTCDateTime("2018-10-24T01:59:00Z")
     _main(args)
     # compare results
-    actual = actual_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime6)
-    expected_factory.urlTemplate = \
-            'file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test6_{t}{i}.{i}'
-    expected = expected_factory.get_timeseries(
-            starttime=starttime1, endtime=endtime6)
+    actual = actual_factory.get_timeseries(starttime=starttime1, endtime=endtime6)
+    expected_factory.urlTemplate = (
+        "file://etc/controller/{obs}{date:%Y%m%d}_DQVS_test6_{t}{i}.{i}"
+    )
+    expected = expected_factory.get_timeseries(starttime=starttime1, endtime=endtime6)
     assert_allclose(actual, expected)
diff --git a/test/ObservatoryMetadata_test.py b/test/ObservatoryMetadata_test.py
index bcfcd499feadff8f3b596bc789eb2dc3adb9cc22..135548946d101e064b22006119cad34658630abb 100644
--- a/test/ObservatoryMetadata_test.py
+++ b/test/ObservatoryMetadata_test.py
@@ -6,42 +6,42 @@ import obspy.core
 
 
 METADATA = {
-    'BOU': {
-        'metadata': {
-            'station_name': 'Boulder',
-            'agency_name': 'United States Geological Survey (USGS)',
-            'geodetic_latitude': '-90',
-            'geodetic_longitude': '-180',
-            'elevation': '-1000',
-            'sensor_orientation': 'HDZF',
-            'sensor_sampling_rate': '0.01 second',
-            'declination_base': 20000,
-            'is_gin': False,
-            'is_intermagnet': False,
-            'conditions_of_use': 'The Conditions of Use for data provided' +
-                    ' through INTERMAGNET and acknowledgement templates' +
-                    ' can be found at www.intermagnet.org'
+    "BOU": {
+        "metadata": {
+            "station_name": "Boulder",
+            "agency_name": "United States Geological Survey (USGS)",
+            "geodetic_latitude": "-90",
+            "geodetic_longitude": "-180",
+            "elevation": "-1000",
+            "sensor_orientation": "HDZF",
+            "sensor_sampling_rate": "0.01 second",
+            "declination_base": 20000,
+            "is_gin": False,
+            "is_intermagnet": False,
+            "conditions_of_use": "The Conditions of Use for data provided"
+            + " through INTERMAGNET and acknowledgement templates"
+            + " can be found at www.intermagnet.org",
         },
-        'interval_specific': {
-            'minute': {
-                'data_interval_type': 'filtered 1-minute (00:15-01:45) ',
-                'filter_comments': ['Vector 1-minute values are computed' +
-                    ' from 1-second values using the INTERMAGNET gaussian' +
-                    ' filter centered on the minute. Scalar 1-minute values' +
-                    ' are computed from 1-second values using the' +
-                    ' INTERMAGNET gaussian filter centered on the minute. ']
+        "interval_specific": {
+            "minute": {
+                "data_interval_type": "filtered 1-minute (00:15-01:45) ",
+                "filter_comments": [
+                    "Vector 1-minute values are computed"
+                    + " from 1-second values using the INTERMAGNET gaussian"
+                    + " filter centered on the minute. Scalar 1-minute values"
+                    + " are computed from 1-second values using the"
+                    + " INTERMAGNET gaussian filter centered on the minute. "
+                ],
             },
-            'second': {
-                'data_interval_type': 'Average 1-Second'
-            }
-        }
+            "second": {"data_interval_type": "Average 1-Second"},
+        },
     }
 }
 
 
 DATA_INTERVAL_TYPE = {
-        'minute': {'data_interval_type': 'filtered 1-minute (00:29-01:30) '},
-        'second': {'data_interval_type': 'filtered 1-Second'}
+    "minute": {"data_interval_type": "filtered 1-minute (00:29-01:30) "},
+    "second": {"data_interval_type": "filtered 1-Second"},
 }
 
 
@@ -53,19 +53,17 @@ def test_set_metadata():
     # metadata.
     observatorymetadata = ObservatoryMetadata()
     stats = obspy.core.Stats()
-    stats.channel = 'MVH'
-    stats.location = 'R0'
-    stats.data_interval = 'second'
-    stats.data_type = 'quasi-definitive'
-    observatorymetadata.set_metadata(stats, 'BOU', 'MVH',
-            'quasi-definitive', 'second')
-    assert_equal(stats['declination_base'], 5527)
+    stats.channel = "MVH"
+    stats.location = "R0"
+    stats.data_interval = "second"
+    stats.data_type = "quasi-definitive"
+    observatorymetadata.set_metadata(stats, "BOU", "MVH", "quasi-definitive", "second")
+    assert_equal(stats["declination_base"], 5527)
 
     # Test custom metadata
     stats = obspy.core.Stats()
     observatorymetadata = ObservatoryMetadata(METADATA, DATA_INTERVAL_TYPE)
-    observatorymetadata.set_metadata(stats, 'BOU', 'MVH',
-            'quasi-definitive', 'second')
-    assert_equal(stats['declination_base'], 20000)
+    observatorymetadata.set_metadata(stats, "BOU", "MVH", "quasi-definitive", "second")
+    assert_equal(stats["declination_base"], 20000)
     print(stats)
-    assert_equal(stats['data_interval_type'], 'Average 1-Second')
+    assert_equal(stats["data_interval_type"], "Average 1-Second")
diff --git a/test/StreamConverter_test.py b/test/StreamConverter_test.py
index bf816375ef1a9921db032f587d3cfe7ddb167c5c..0c8b4207ac380f9d27efb5c526d24f9703dcec78 100644
--- a/test/StreamConverter_test.py
+++ b/test/StreamConverter_test.py
@@ -19,8 +19,8 @@ cos = numpy.cos
 sin = numpy.sin
 
 D2R = numpy.pi / 180
-D2I = 60 * 10               # Degrees to Iaga Decbas
-STARTTIME = obspy.core.UTCDateTime('2014-11-01')
+D2I = 60 * 10  # Degrees to Iaga Decbas
+STARTTIME = obspy.core.UTCDateTime("2014-11-01")
 
 
 def test_get_geo_from_mag():
@@ -36,17 +36,27 @@ def test_get_geo_from_mag():
     #   H = [1, 1], and D = [15 degrees, 30 degrees], expect streams of
     #   X = [cos(15), cos(30)] and Y = [sin(15), sin(30)]
     # stats.DECBAS = 15 * D2I
-    mag += __create_trace('H', [1, 1])
-    mag += __create_trace('D', [15 * D2R, 30 * D2R])
-    mag += __create_trace('Z', [1, 1])
-    mag += __create_trace('F', [1, 1])
+    mag += __create_trace("H", [1, 1])
+    mag += __create_trace("D", [15 * D2R, 30 * D2R])
+    mag += __create_trace("Z", [1, 1])
+    mag += __create_trace("F", [1, 1])
     geo = StreamConverter.get_geo_from_mag(mag)
-    X = geo.select(channel='X')[0].data
-    Y = geo.select(channel='Y')[0].data
-    assert_almost_equal(X, [cos(15 * D2R), cos(30 * D2R)], 9,
-        'Expect X to equal [cos(15), cos(30)]', True)
-    assert_almost_equal(Y, [sin(15 * D2R), sin(30 * D2R)], 9,
-        'Expect Y to equal [sin(15), sin(30)]', True)
+    X = geo.select(channel="X")[0].data
+    Y = geo.select(channel="Y")[0].data
+    assert_almost_equal(
+        X,
+        [cos(15 * D2R), cos(30 * D2R)],
+        9,
+        "Expect X to equal [cos(15), cos(30)]",
+        True,
+    )
+    assert_almost_equal(
+        Y,
+        [sin(15 * D2R), sin(30 * D2R)],
+        9,
+        "Expect Y to equal [sin(15), sin(30)]",
+        True,
+    )
 
 
 def test_get_geo_from_obs():
@@ -60,34 +70,42 @@ def test_get_geo_from_obs():
 
     # 1) Call get_geo_from_obs using equal h, e streams with a decbas of 0
     #   the geographic stream values X, Y will be the same.
-    obs += __create_trace('H', [1])
-    obs += __create_trace('E', [1])
-    obs += __create_trace('Z', [1])
-    obs += __create_trace('F', [1])
+    obs += __create_trace("H", [1])
+    obs += __create_trace("E", [1])
+    obs += __create_trace("Z", [1])
+    obs += __create_trace("F", [1])
     geo = StreamConverter.get_geo_from_obs(obs)
-    X = geo.select(channel='X')[0].data
-    Y = geo.select(channel='Y')[0].data
-    assert_almost_equal(X[0], 1, 9,
-        'Expect X to almost equal 1', True)
-    assert_almost_equal(Y[0], 1, 9,
-        'Expect Y to almost equal 1', True)
+    X = geo.select(channel="X")[0].data
+    Y = geo.select(channel="Y")[0].data
+    assert_almost_equal(X[0], 1, 9, "Expect X to almost equal 1", True)
+    assert_almost_equal(Y[0], 1, 9, "Expect Y to almost equal 1", True)
 
     # 2) Call get_geo_from_obs using a decbas of 15 degrees, and streams
     #   with H = [cos(15), cos(30)], and E = [sin(15), sin(30)].
     #   Expect streams of X = [cos(30), cos(45)] and Y = sin(30), sin(45)
     obs = obspy.core.Stream()
     DECBAS = 15 * D2I
-    obs += __create_trace('H', [cos(15 * D2R), cos(30 * D2R)], DECBAS)
-    obs += __create_trace('E', [sin(15 * D2R), sin(30 * D2R)], DECBAS)
-    obs += __create_trace('Z', [1, 1], DECBAS)
-    obs += __create_trace('F', [1, 1], DECBAS)
+    obs += __create_trace("H", [cos(15 * D2R), cos(30 * D2R)], DECBAS)
+    obs += __create_trace("E", [sin(15 * D2R), sin(30 * D2R)], DECBAS)
+    obs += __create_trace("Z", [1, 1], DECBAS)
+    obs += __create_trace("F", [1, 1], DECBAS)
     geo = StreamConverter.get_geo_from_obs(obs)
-    X = geo.select(channel='X')[0].data
-    Y = geo.select(channel='Y')[0].data
-    assert_almost_equal(X, [cos(30 * D2R), cos(45 * D2R)], 9,
-        'Expect X to equal [cos(30), cos(45)]', True)
-    assert_almost_equal(Y, [sin(30 * D2R), sin(45 * D2R)], 9,
-        'Expect Y to equal [sin(30), sin(45)]', True)
+    X = geo.select(channel="X")[0].data
+    Y = geo.select(channel="Y")[0].data
+    assert_almost_equal(
+        X,
+        [cos(30 * D2R), cos(45 * D2R)],
+        9,
+        "Expect X to equal [cos(30), cos(45)]",
+        True,
+    )
+    assert_almost_equal(
+        Y,
+        [sin(30 * D2R), sin(45 * D2R)],
+        9,
+        "Expect Y to equal [sin(30), sin(45)]",
+        True,
+    )
 
 
 def test_get_mag_from_geo():
@@ -103,17 +121,17 @@ def test_get_mag_from_geo():
     #   [cos(15), cos(30)], and a Y stream of [sin(15), sin(30)].
     #   Expect a H stream of [1,1] and a D strem of [15 degrees, 30 degrees]
     DECBAS = 15 * D2I
-    geo += __create_trace('X', [cos(15 * D2R), cos(30 * D2R)], DECBAS)
-    geo += __create_trace('Y', [sin(15 * D2R), sin(30 * D2R)], DECBAS)
-    geo += __create_trace('Z', [1, 1], DECBAS)
-    geo += __create_trace('F', [1, 1], DECBAS)
+    geo += __create_trace("X", [cos(15 * D2R), cos(30 * D2R)], DECBAS)
+    geo += __create_trace("Y", [sin(15 * D2R), sin(30 * D2R)], DECBAS)
+    geo += __create_trace("Z", [1, 1], DECBAS)
+    geo += __create_trace("F", [1, 1], DECBAS)
     mag = StreamConverter.get_mag_from_geo(geo)
-    H = mag.select(channel='H')[0].data
-    D = mag.select(channel='D')[0].data
-    assert_almost_equal(H, [1, 1], 9,
-        'Expect H to equal [1,1]', True)
-    assert_almost_equal(D, [15 * D2R, 30 * D2R], 9,
-        'Expect D to equal [15 degrees, 30 degrees]', True)
+    H = mag.select(channel="H")[0].data
+    D = mag.select(channel="D")[0].data
+    assert_almost_equal(H, [1, 1], 9, "Expect H to equal [1,1]", True)
+    assert_almost_equal(
+        D, [15 * D2R, 30 * D2R], 9, "Expect D to equal [15 degrees, 30 degrees]", True
+    )
 
 
 def test_get_mag_from_obs():
@@ -128,17 +146,17 @@ def test_get_mag_from_obs():
     #   [cos(15), cos(30)] and a E stream of [sin(15), sin(30)].
     #   Expect a H stream of [1, 1] and a D stream of [30 degrees, 45 degrees]
     DECBAS = 15 * D2I
-    obs += __create_trace('H', [cos(15 * D2R), cos(30 * D2R)], DECBAS)
-    obs += __create_trace('E', [sin(15 * D2R), sin(30 * D2R)], DECBAS)
-    obs += __create_trace('Z', [1, 1], DECBAS)
-    obs += __create_trace('F', [1, 1], DECBAS)
+    obs += __create_trace("H", [cos(15 * D2R), cos(30 * D2R)], DECBAS)
+    obs += __create_trace("E", [sin(15 * D2R), sin(30 * D2R)], DECBAS)
+    obs += __create_trace("Z", [1, 1], DECBAS)
+    obs += __create_trace("F", [1, 1], DECBAS)
     mag = StreamConverter.get_mag_from_obs(obs)
-    H = mag.select(channel='H')[0].data
-    D = mag.select(channel='D')[0].data
-    assert_almost_equal(H, [1, 1], 9,
-        'Expect H to equal [1,1]', True)
-    assert_almost_equal(D, [30 * D2R, 45 * D2R], 9,
-        'Expect D to equal [30 degrees, 45 degrees]', True)
+    H = mag.select(channel="H")[0].data
+    D = mag.select(channel="D")[0].data
+    assert_almost_equal(H, [1, 1], 9, "Expect H to equal [1,1]", True)
+    assert_almost_equal(
+        D, [30 * D2R, 45 * D2R], 9, "Expect D to equal [30 degrees, 45 degrees]", True
+    )
 
 
 def test_get_obs_from_geo():
@@ -155,20 +173,31 @@ def test_get_obs_from_geo():
     #   Expect a H stream of [cos(15), cos(30)] and a
     #   E stream of [sin(15), sin(30)]
     DECBAS = 15 * D2I
-    geo += __create_trace('X', [cos(30 * D2R), cos(45 * D2R)], DECBAS)
-    geo += __create_trace('Y', [sin(30 * D2R), sin(45 * D2R)], DECBAS)
-    geo += __create_trace('Z', [1, 1], DECBAS)
-    geo += __create_trace('F', [1, 1], DECBAS)
+    geo += __create_trace("X", [cos(30 * D2R), cos(45 * D2R)], DECBAS)
+    geo += __create_trace("Y", [sin(30 * D2R), sin(45 * D2R)], DECBAS)
+    geo += __create_trace("Z", [1, 1], DECBAS)
+    geo += __create_trace("F", [1, 1], DECBAS)
     obs = StreamConverter.get_obs_from_geo(geo, True)
-    H = obs.select(channel='H')[0].data
-    E = obs.select(channel='E')[0].data
-    D = obs.select(channel='D')[0].data
-    assert_almost_equal(H, [cos(15 * D2R), cos(30 * D2R)], 9,
-        'Expect H to equal [cos(15), cos(30)]', True)
-    assert_almost_equal(E, [sin(15 * D2R), sin(30 * D2R)], 9,
-        'Expect E to equal [sin(15), sin(30)', True)
-    assert_almost_equal(D, [15 * D2R, 30 * D2R], 9,
-        'Expect D to equal [15 degress, 30 degrees]', True)
+    H = obs.select(channel="H")[0].data
+    E = obs.select(channel="E")[0].data
+    D = obs.select(channel="D")[0].data
+    assert_almost_equal(
+        H,
+        [cos(15 * D2R), cos(30 * D2R)],
+        9,
+        "Expect H to equal [cos(15), cos(30)]",
+        True,
+    )
+    assert_almost_equal(
+        E,
+        [sin(15 * D2R), sin(30 * D2R)],
+        9,
+        "Expect E to equal [sin(15), sin(30)",
+        True,
+    )
+    assert_almost_equal(
+        D, [15 * D2R, 30 * D2R], 9, "Expect D to equal [15 degress, 30 degrees]", True
+    )
 
 
 def test_get_obs_from_mag():
@@ -185,20 +214,31 @@ def test_get_obs_from_mag():
     #   of [cos(15), cos(30)], a D stream of [30 degrees, 45 degrees],
     #   and a E stream of [sin(15), sin(30)]
     DECBAS = 15 * D2I
-    mag += __create_trace('H', [1, 1], DECBAS)
-    mag += __create_trace('D', [30 * D2R, 45 * D2R], DECBAS)
-    mag += __create_trace('Z', [1, 1], DECBAS)
-    mag += __create_trace('F', [1, 1], DECBAS)
+    mag += __create_trace("H", [1, 1], DECBAS)
+    mag += __create_trace("D", [30 * D2R, 45 * D2R], DECBAS)
+    mag += __create_trace("Z", [1, 1], DECBAS)
+    mag += __create_trace("F", [1, 1], DECBAS)
     obs = StreamConverter.get_obs_from_mag(mag, True)
-    H = obs.select(channel='H')[0].data
-    D = obs.select(channel='D')[0].data
-    E = obs.select(channel='E')[0].data
-    assert_almost_equal(H, [cos(15 * D2R), cos(30 * D2R)], 9,
-        'Expect H to equal [cos(15), cos(30)', True)
-    assert_almost_equal(D, [15 * D2R, 30 * D2R], 9,
-        'Expect D to equal [15 degrees, 30 degrees', True)
-    assert_almost_equal(E, [sin(15 * D2R), sin(30 * D2R)], 9,
-        'Expect E to equal [sin(15), sin(30)', True)
+    H = obs.select(channel="H")[0].data
+    D = obs.select(channel="D")[0].data
+    E = obs.select(channel="E")[0].data
+    assert_almost_equal(
+        H,
+        [cos(15 * D2R), cos(30 * D2R)],
+        9,
+        "Expect H to equal [cos(15), cos(30)",
+        True,
+    )
+    assert_almost_equal(
+        D, [15 * D2R, 30 * D2R], 9, "Expect D to equal [15 degrees, 30 degrees", True
+    )
+    assert_almost_equal(
+        E,
+        [sin(15 * D2R), sin(30 * D2R)],
+        9,
+        "Expect E to equal [sin(15), sin(30)",
+        True,
+    )
 
 
 def test_get_obs_from_obs():
@@ -214,27 +254,33 @@ def test_get_obs_from_obs():
     #   Expect a D stream of [15 degrees, 30 degrees]
     obs_e = obspy.core.Stream()
     DECBAS = 15 * D2I
-    obs_e += __create_trace('H', [cos(15 * D2R), cos(30 * D2R)], DECBAS)
-    obs_e += __create_trace('E', [sin(15 * D2R), sin(30 * D2R)], DECBAS)
-    obs_e += __create_trace('Z', [1, 1], DECBAS)
-    obs_e += __create_trace('F', [1, 1], DECBAS)
+    obs_e += __create_trace("H", [cos(15 * D2R), cos(30 * D2R)], DECBAS)
+    obs_e += __create_trace("E", [sin(15 * D2R), sin(30 * D2R)], DECBAS)
+    obs_e += __create_trace("Z", [1, 1], DECBAS)
+    obs_e += __create_trace("F", [1, 1], DECBAS)
     obs_D = StreamConverter.get_obs_from_obs(obs_e, False, True)
-    d = obs_D.select(channel='D')[0].data
-    assert_almost_equal(d, [15 * D2R, 30 * D2R], 9,
-        'Expect D to equal [15 degrees, 30 degrees]', True)
+    d = obs_D.select(channel="D")[0].data
+    assert_almost_equal(
+        d, [15 * D2R, 30 * D2R], 9, "Expect D to equal [15 degrees, 30 degrees]", True
+    )
 
     # 2) Call get_obs_from_obs using a decbase of 15 degrees, a H stream of
     #   [cos(15), cos(30)], and a D stream of [15, 30].
     #   Expect a D stream of [sin(15), sin(30)]
     obs_d = obspy.core.Stream()
-    obs_d += __create_trace('H', [cos(15 * D2R), cos(30 * D2R)], DECBAS)
-    obs_d += __create_trace('D', [15 * D2R, 30 * D2R], DECBAS)
-    obs_d += __create_trace('Z', [1, 1], DECBAS)
-    obs_d += __create_trace('F', [1, 1], DECBAS)
+    obs_d += __create_trace("H", [cos(15 * D2R), cos(30 * D2R)], DECBAS)
+    obs_d += __create_trace("D", [15 * D2R, 30 * D2R], DECBAS)
+    obs_d += __create_trace("Z", [1, 1], DECBAS)
+    obs_d += __create_trace("F", [1, 1], DECBAS)
     obs_E = StreamConverter.get_obs_from_obs(obs_d, True, False)
-    e = obs_E.select(channel='E')[0].data
-    assert_almost_equal(e, [sin(15 * D2R), sin(30 * D2R)], 9,
-        'Expect E to equal [sin(15), sin(30)', True)
+    e = obs_E.select(channel="E")[0].data
+    assert_almost_equal(
+        e,
+        [sin(15 * D2R), sin(30 * D2R)],
+        9,
+        "Expect E to equal [sin(15), sin(30)",
+        True,
+    )
 
 
 def test_verification_data():
@@ -251,40 +297,50 @@ def test_verification_data():
     """
     DECBAS = 552.7
     obs_v = obspy.core.Stream()
-    obs_v += __create_trace('H',
-        [20889.55, 20889.57, 20889.74, 20889.86, 20889.91, 20889.81], DECBAS)
-    obs_v += __create_trace('E',
-        [-21.10, -20.89, -20.72, -20.57, -20.39, -20.12], DECBAS)
-    obs_v += __create_trace('Z',
-        [47565.29, 47565.34, 47565.39, 47565.45, 47565.51, 47565.54], DECBAS)
-    obs_v += __create_trace('F',
-        [52485.77, 52485.84, 52485.94, 52486.06, 52486.11, 52486.10], DECBAS)
+    obs_v += __create_trace(
+        "H", [20889.55, 20889.57, 20889.74, 20889.86, 20889.91, 20889.81], DECBAS
+    )
+    obs_v += __create_trace(
+        "E", [-21.10, -20.89, -20.72, -20.57, -20.39, -20.12], DECBAS
+    )
+    obs_v += __create_trace(
+        "Z", [47565.29, 47565.34, 47565.39, 47565.45, 47565.51, 47565.54], DECBAS
+    )
+    obs_v += __create_trace(
+        "F", [52485.77, 52485.84, 52485.94, 52486.06, 52486.11, 52486.10], DECBAS
+    )
     obs_V = StreamConverter.get_obs_from_obs(obs_v, True, True)
-    d = obs_V.select(channel='D')[0].data
+    d = obs_V.select(channel="D")[0].data
     d = ChannelConverter.get_minutes_from_radians(d)
     # Using d values calculated using small angle approximation.
-    assert_almost_equal(d,
-        [-3.47, -3.43, -3.40, -3.38, -3.35, -3.31], 2,
-        'Expect d to equal [-3.47, -3.43, -3.40, -3.38, -3.35, -3.31]', True)
+    assert_almost_equal(
+        d,
+        [-3.47, -3.43, -3.40, -3.38, -3.35, -3.31],
+        2,
+        "Expect d to equal [-3.47, -3.43, -3.40, -3.38, -3.35, -3.31]",
+        True,
+    )
 
     mag = obspy.core.Stream()
     DECBAS = 552.7
-    mag += __create_trace('H',
-        [20884.04, 20883.45, 20883.38, 20883.43, 20883.07, 20882.76], DECBAS)
+    mag += __create_trace(
+        "H", [20884.04, 20883.45, 20883.38, 20883.43, 20883.07, 20882.76], DECBAS
+    )
     d = ChannelConverter.get_radians_from_minutes(
-        [556.51, 556.52, 556.56, 556.61, 556.65, 556.64])
-    mag += __create_trace('D', d, DECBAS)
-    mag += __create_trace('Z',
-        [48546.90, 48546.80, 48546.80, 48546.70, 48546.80, 48546.90], DECBAS)
-    mag += __create_trace('F',
-        [0.10, 0.00, 0.10, 0.00, 0.00, 0.00, 0.00], DECBAS)
+        [556.51, 556.52, 556.56, 556.61, 556.65, 556.64]
+    )
+    mag += __create_trace("D", d, DECBAS)
+    mag += __create_trace(
+        "Z", [48546.90, 48546.80, 48546.80, 48546.70, 48546.80, 48546.90], DECBAS
+    )
+    mag += __create_trace("F", [0.10, 0.00, 0.10, 0.00, 0.00, 0.00, 0.00], DECBAS)
     geo = StreamConverter.get_geo_from_mag(mag)
-    X = geo.select(channel='X')[0].data
-    Y = geo.select(channel='Y')[0].data
-    assert_almost_equal(X,
-        [20611.00, 20610.40, 20610.30, 20610.30, 20609.90, 20609.60], 2)
-    assert_almost_equal(Y,
-        [3366.00, 3366.00, 3366.20, 3366.50, 3366.70, 3366.60], 1)
+    X = geo.select(channel="X")[0].data
+    Y = geo.select(channel="Y")[0].data
+    assert_almost_equal(
+        X, [20611.00, 20610.40, 20610.30, 20610.30, 20609.90, 20609.60], 2
+    )
+    assert_almost_equal(Y, [3366.00, 3366.00, 3366.20, 3366.50, 3366.70, 3366.60], 1)
 
 
 def __create_trace(channel, data, decbase=0):
diff --git a/test/TimeseriesUtility_test.py b/test/TimeseriesUtility_test.py
index b2eea78ccb39eb35b3277be2ca3cceea066e111b..25a93f404f220d7dfad9faf9b0c2e1d006d47819 100644
--- a/test/TimeseriesUtility_test.py
+++ b/test/TimeseriesUtility_test.py
@@ -14,27 +14,28 @@ assert_array_equal = numpy.testing.assert_array_equal
 def test_create_empty_trace():
     """TimeseriesUtility_test.test_create_empty_trace()
     """
-    trace1 = _create_trace([1, 1, 1, 1, 1], 'H', UTCDateTime("2018-01-01"))
-    trace2 = _create_trace([2, 2], 'E', UTCDateTime("2018-01-01"))
-    observatory = 'Test'
-    interval = 'minute'
-    network = 'NT'
-    location = 'R0'
+    trace1 = _create_trace([1, 1, 1, 1, 1], "H", UTCDateTime("2018-01-01"))
+    trace2 = _create_trace([2, 2], "E", UTCDateTime("2018-01-01"))
+    observatory = "Test"
+    interval = "minute"
+    network = "NT"
+    location = "R0"
     trace3 = TimeseriesUtility.create_empty_trace(
-            starttime=trace1.stats.starttime,
-            endtime=trace1.stats.endtime,
-            observatory=observatory,
-            channel='F',
-            type='variation',
-            interval=interval,
-            network=network,
-            station=trace1.stats.station,
-            location=location)
+        starttime=trace1.stats.starttime,
+        endtime=trace1.stats.endtime,
+        observatory=observatory,
+        channel="F",
+        type="variation",
+        interval=interval,
+        network=network,
+        station=trace1.stats.station,
+        location=location,
+    )
     timeseries = Stream(traces=[trace1, trace2])
     # For continuity set stats to be same for all traces
     for trace in timeseries:
         trace.stats.observatory = observatory
-        trace.stats.type = 'variation'
+        trace.stats.type = "variation"
         trace.stats.interval = interval
         trace.stats.network = network
         trace.stats.station = trace1.stats.station
@@ -45,7 +46,8 @@ def test_create_empty_trace():
     TimeseriesUtility.pad_timeseries(
         timeseries=timeseries,
         starttime=trace1.stats.starttime,
-        endtime=trace1.stats.endtime)
+        endtime=trace1.stats.endtime,
+    )
     assert_equal(len(trace3.data), trace3.stats.npts)
     assert_equal(timeseries[0].stats.starttime, timeseries[2].stats.starttime)
     # Change starttime by more than 1 delta
@@ -61,28 +63,30 @@ def test_get_stream_gaps():
 
     confirms that gaps are found in a stream
     """
-    stream = Stream([
-        __create_trace('H', [numpy.nan, 1, 1, numpy.nan, numpy.nan]),
-        __create_trace('Z', [0, 0, 0, 1, 1, 1])
-    ])
+    stream = Stream(
+        [
+            __create_trace("H", [numpy.nan, 1, 1, numpy.nan, numpy.nan]),
+            __create_trace("Z", [0, 0, 0, 1, 1, 1]),
+        ]
+    )
     for trace in stream:
         # set time of first sample
-        trace.stats.starttime = UTCDateTime('2015-01-01T00:00:00Z')
+        trace.stats.starttime = UTCDateTime("2015-01-01T00:00:00Z")
         # set sample rate to 1 second
         trace.stats.delta = 1
     # find gaps
     gaps = TimeseriesUtility.get_stream_gaps(stream)
-    assert_equal(len(gaps['H']), 2)
+    assert_equal(len(gaps["H"]), 2)
     # gap at start of H
-    gap = gaps['H'][0]
-    assert_equal(gap[0], UTCDateTime('2015-01-01T00:00:00Z'))
-    assert_equal(gap[1], UTCDateTime('2015-01-01T00:00:00Z'))
+    gap = gaps["H"][0]
+    assert_equal(gap[0], UTCDateTime("2015-01-01T00:00:00Z"))
+    assert_equal(gap[1], UTCDateTime("2015-01-01T00:00:00Z"))
     # gap at end of H
-    gap = gaps['H'][1]
-    assert_equal(gap[0], UTCDateTime('2015-01-01T00:00:03Z'))
-    assert_equal(gap[1], UTCDateTime('2015-01-01T00:00:04Z'))
+    gap = gaps["H"][1]
+    assert_equal(gap[0], UTCDateTime("2015-01-01T00:00:03Z"))
+    assert_equal(gap[1], UTCDateTime("2015-01-01T00:00:04Z"))
     # no gaps in Z channel
-    assert_equal(len(gaps['Z']), 0)
+    assert_equal(len(gaps["Z"]), 0)
 
 
 def test_get_stream_gaps_channels():
@@ -90,19 +94,21 @@ def test_get_stream_gaps_channels():
 
     test that gaps are only checked in specified channels.
     """
-    stream = Stream([
-        __create_trace('H', [numpy.nan, 1, 1, numpy.nan, numpy.nan]),
-        __create_trace('Z', [0, 0, 0, 1, 1, 1])
-    ])
+    stream = Stream(
+        [
+            __create_trace("H", [numpy.nan, 1, 1, numpy.nan, numpy.nan]),
+            __create_trace("Z", [0, 0, 0, 1, 1, 1]),
+        ]
+    )
     for trace in stream:
         # set time of first sample
-        trace.stats.starttime = UTCDateTime('2015-01-01T00:00:00Z')
+        trace.stats.starttime = UTCDateTime("2015-01-01T00:00:00Z")
         # set sample rate to 1 second
         trace.stats.delta = 1
     # find gaps
-    gaps = TimeseriesUtility.get_stream_gaps(stream, ['Z'])
-    assert_equal('H' in gaps, False)
-    assert_equal(len(gaps['Z']), 0)
+    gaps = TimeseriesUtility.get_stream_gaps(stream, ["Z"])
+    assert_equal("H" in gaps, False)
+    assert_equal(len(gaps["Z"]), 0)
 
 
 def test_get_trace_gaps():
@@ -110,17 +116,17 @@ def test_get_trace_gaps():
 
     confirm that gaps are found in a trace
     """
-    trace = __create_trace('H', [1, 1, numpy.nan, numpy.nan, 0, 1])
+    trace = __create_trace("H", [1, 1, numpy.nan, numpy.nan, 0, 1])
     # set time of first sample
-    trace.stats.starttime = UTCDateTime('2015-01-01T00:00:00Z')
+    trace.stats.starttime = UTCDateTime("2015-01-01T00:00:00Z")
     # set sample rate to 1 minute
     trace.stats.delta = 60
     # find gap
     gaps = TimeseriesUtility.get_trace_gaps(trace)
     assert_equal(len(gaps), 1)
     gap = gaps[0]
-    assert_equal(gap[0], UTCDateTime('2015-01-01T00:02:00Z'))
-    assert_equal(gap[1], UTCDateTime('2015-01-01T00:03:00Z'))
+    assert_equal(gap[0], UTCDateTime("2015-01-01T00:02:00Z"))
+    assert_equal(gap[1], UTCDateTime("2015-01-01T00:03:00Z"))
 
 
 def test_get_merged_gaps():
@@ -128,90 +134,104 @@ def test_get_merged_gaps():
 
     confirm that gaps are merged
     """
-    merged = TimeseriesUtility.get_merged_gaps({
-        'H': [
-            # gap for 2 seconds, that starts after next gap
-            [
-                UTCDateTime('2015-01-01T00:00:01Z'),
-                UTCDateTime('2015-01-01T00:00:03Z'),
-                UTCDateTime('2015-01-01T00:00:04Z')
-            ]
-        ],
-        # gap for 1 second, that occurs before previous gap
-        'Z': [
-            [
-                UTCDateTime('2015-01-01T00:00:00Z'),
-                UTCDateTime('2015-01-01T00:00:00Z'),
-                UTCDateTime('2015-01-01T00:00:01Z')
+    merged = TimeseriesUtility.get_merged_gaps(
+        {
+            "H": [
+                # gap for 2 seconds, that starts after next gap
+                [
+                    UTCDateTime("2015-01-01T00:00:01Z"),
+                    UTCDateTime("2015-01-01T00:00:03Z"),
+                    UTCDateTime("2015-01-01T00:00:04Z"),
+                ]
             ],
-            [
-                UTCDateTime('2015-01-01T00:00:05Z'),
-                UTCDateTime('2015-01-01T00:00:07Z'),
-                UTCDateTime('2015-01-01T00:00:08Z')
+            # gap for 1 second, that occurs before previous gap
+            "Z": [
+                [
+                    UTCDateTime("2015-01-01T00:00:00Z"),
+                    UTCDateTime("2015-01-01T00:00:00Z"),
+                    UTCDateTime("2015-01-01T00:00:01Z"),
+                ],
+                [
+                    UTCDateTime("2015-01-01T00:00:05Z"),
+                    UTCDateTime("2015-01-01T00:00:07Z"),
+                    UTCDateTime("2015-01-01T00:00:08Z"),
+                ],
             ],
-        ]
-    })
+        }
+    )
     assert_equal(len(merged), 2)
     # first gap combines H and Z gaps
     gap = merged[0]
-    assert_equal(gap[0], UTCDateTime('2015-01-01T00:00:00Z'))
-    assert_equal(gap[1], UTCDateTime('2015-01-01T00:00:03Z'))
+    assert_equal(gap[0], UTCDateTime("2015-01-01T00:00:00Z"))
+    assert_equal(gap[1], UTCDateTime("2015-01-01T00:00:03Z"))
     # second gap is second Z gap
     gap = merged[1]
-    assert_equal(gap[0], UTCDateTime('2015-01-01T00:00:05Z'))
-    assert_equal(gap[1], UTCDateTime('2015-01-01T00:00:07Z'))
+    assert_equal(gap[0], UTCDateTime("2015-01-01T00:00:05Z"))
+    assert_equal(gap[1], UTCDateTime("2015-01-01T00:00:07Z"))
 
 
 def test_has_all_channels():
     """TimeseriesUtility_test.test_has_all_channels():
     """
     nan = numpy.nan
-    stream = Stream([
-        __create_trace('H', [nan, 1, 1, nan, nan]),
-        __create_trace('Z', [0, 0, 0, 1, 1]),
-        __create_trace('E', [nan, nan, nan, nan, nan])
-    ])
+    stream = Stream(
+        [
+            __create_trace("H", [nan, 1, 1, nan, nan]),
+            __create_trace("Z", [0, 0, 0, 1, 1]),
+            __create_trace("E", [nan, nan, nan, nan, nan]),
+        ]
+    )
     for trace in stream:
         # set time of first sample
-        trace.stats.starttime = UTCDateTime('2015-01-01T00:00:00Z')
+        trace.stats.starttime = UTCDateTime("2015-01-01T00:00:00Z")
         # set sample rate to 1 second
         trace.stats.delta = 1
         trace.stats.npts = len(trace.data)
     # check for channels
     starttime = stream[0].stats.starttime
     endtime = stream[0].stats.endtime
-    assert_equal(TimeseriesUtility.has_all_channels(
-            stream, ['H', 'Z'], starttime, endtime), True)
-    assert_equal(TimeseriesUtility.has_all_channels(
-            stream, ['H', 'Z', 'E'], starttime, endtime), False)
-    assert_equal(TimeseriesUtility.has_all_channels(
-            stream, ['E'], starttime, endtime), False)
+    assert_equal(
+        TimeseriesUtility.has_all_channels(stream, ["H", "Z"], starttime, endtime), True
+    )
+    assert_equal(
+        TimeseriesUtility.has_all_channels(stream, ["H", "Z", "E"], starttime, endtime),
+        False,
+    )
+    assert_equal(
+        TimeseriesUtility.has_all_channels(stream, ["E"], starttime, endtime), False
+    )
 
 
 def test_has_any_channels():
     """TimeseriesUtility_test.test_has_any_channels():
     """
     nan = numpy.nan
-    stream = Stream([
-        __create_trace('H', [nan, 1, 1, nan, nan]),
-        __create_trace('Z', [0, 0, 0, 1, 1, 1]),
-        __create_trace('E', [nan, nan, nan, nan, nan])
-    ])
+    stream = Stream(
+        [
+            __create_trace("H", [nan, 1, 1, nan, nan]),
+            __create_trace("Z", [0, 0, 0, 1, 1, 1]),
+            __create_trace("E", [nan, nan, nan, nan, nan]),
+        ]
+    )
     for trace in stream:
         # set time of first sample
-        trace.stats.starttime = UTCDateTime('2015-01-01T00:00:00Z')
+        trace.stats.starttime = UTCDateTime("2015-01-01T00:00:00Z")
         # set sample rate to 1 second
         trace.stats.delta = 1
         trace.stats.npts = len(trace.data)
     # check for channels
     starttime = stream[0].stats.starttime
     endtime = stream[0].stats.endtime
-    assert_equal(TimeseriesUtility.has_any_channels(
-            stream, ['H', 'Z'], starttime, endtime), True)
-    assert_equal(TimeseriesUtility.has_any_channels(
-            stream, ['H', 'Z', 'E'], starttime, endtime), True)
-    assert_equal(TimeseriesUtility.has_any_channels(
-            stream, ['E'], starttime, endtime), False)
+    assert_equal(
+        TimeseriesUtility.has_any_channels(stream, ["H", "Z"], starttime, endtime), True
+    )
+    assert_equal(
+        TimeseriesUtility.has_any_channels(stream, ["H", "Z", "E"], starttime, endtime),
+        True,
+    )
+    assert_equal(
+        TimeseriesUtility.has_any_channels(stream, ["E"], starttime, endtime), False
+    )
 
 
 def test_merge_streams():
@@ -219,25 +239,25 @@ def test_merge_streams():
 
     confirm merge streams treats empty channels correctly
     """
-    trace1 = __create_trace('H', [1, 1, 1, 1])
-    trace2 = __create_trace('E', [2, numpy.nan, numpy.nan, 2])
-    trace3 = __create_trace('F', [numpy.nan, numpy.nan, numpy.nan, numpy.nan])
-    trace4 = __create_trace('H', [2, 2, 2, 2])
-    trace5 = __create_trace('E', [3, numpy.nan, numpy.nan, 3])
-    trace6 = __create_trace('F', [numpy.nan, numpy.nan, numpy.nan, numpy.nan])
+    trace1 = __create_trace("H", [1, 1, 1, 1])
+    trace2 = __create_trace("E", [2, numpy.nan, numpy.nan, 2])
+    trace3 = __create_trace("F", [numpy.nan, numpy.nan, numpy.nan, numpy.nan])
+    trace4 = __create_trace("H", [2, 2, 2, 2])
+    trace5 = __create_trace("E", [3, numpy.nan, numpy.nan, 3])
+    trace6 = __create_trace("F", [numpy.nan, numpy.nan, numpy.nan, numpy.nan])
     npts1 = len(trace1.data)
     npts2 = len(trace4.data)
     timeseries1 = Stream(traces=[trace1, trace2, trace3])
     timeseries2 = Stream(traces=[trace4, trace5, trace6])
     for trace in timeseries1:
-        trace.stats.starttime = UTCDateTime('2018-01-01T00:00:00Z')
+        trace.stats.starttime = UTCDateTime("2018-01-01T00:00:00Z")
         trace.stats.npts = npts1
     for trace in timeseries2:
-        trace.stats.starttime = UTCDateTime('2018-01-01T00:02:00Z')
+        trace.stats.starttime = UTCDateTime("2018-01-01T00:02:00Z")
         trace.stats.npts = npts2
     merged_streams1 = TimeseriesUtility.merge_streams(timeseries1)
     # Make sure the empty 'F' was not removed from stream
-    assert_equal(1, len(merged_streams1.select(channel='F')))
+    assert_equal(1, len(merged_streams1.select(channel="F")))
     # Merge multiple streams with overlapping timestamps
     timeseries = timeseries1 + timeseries2
 
@@ -245,58 +265,47 @@ def test_merge_streams():
     assert_equal(len(merged_streams), len(timeseries1))
     assert_equal(len(merged_streams[0]), 6)
     assert_equal(len(merged_streams[2]), 6)
+    assert_almost_equal(merged_streams.select(channel="H")[0].data, [1, 1, 2, 2, 2, 2])
     assert_almost_equal(
-            merged_streams.select(channel='H')[0].data,
-            [1, 1, 2, 2, 2, 2])
-    assert_almost_equal(
-            merged_streams.select(channel='E')[0].data,
-            [2, numpy.nan, 3, 2, numpy.nan, 3])
-    assert_almost_equal(
-            merged_streams.select(channel='F')[0].data,
-            [numpy.nan] * 6)
+        merged_streams.select(channel="E")[0].data, [2, numpy.nan, 3, 2, numpy.nan, 3]
+    )
+    assert_almost_equal(merged_streams.select(channel="F")[0].data, [numpy.nan] * 6)
 
-    trace7 = __create_trace('H', [1, 1, 1, 1])
-    trace8 = __create_trace('E', [numpy.nan, numpy.nan, numpy.nan, numpy.nan])
-    trace9 = __create_trace('F', [numpy.nan, numpy.nan, numpy.nan, numpy.nan])
+    trace7 = __create_trace("H", [1, 1, 1, 1])
+    trace8 = __create_trace("E", [numpy.nan, numpy.nan, numpy.nan, numpy.nan])
+    trace9 = __create_trace("F", [numpy.nan, numpy.nan, numpy.nan, numpy.nan])
     timeseries3 = Stream(traces=[trace7, trace8, trace9])
     npts3 = len(trace7.data)
     for trace in timeseries3:
-        trace.stats.starttime = UTCDateTime('2018-01-01T00:00:00Z')
+        trace.stats.starttime = UTCDateTime("2018-01-01T00:00:00Z")
         trace.stats.npts = npts3
     merged_streams3 = TimeseriesUtility.merge_streams(timeseries3)
     assert_equal(len(timeseries3), len(merged_streams3))
-    assert_almost_equal(
-            timeseries3.select(channel='H')[0].data,
-            [1, 1, 1, 1])
-    assert_equal(
-            numpy.isnan(timeseries3.select(channel='E')[0].data).all(),
-            True)
-    assert_equal(
-            numpy.isnan(timeseries3.select(channel='F')[0].data).all(),
-            True)
-
-    trace10 = __create_trace('H', [1, 1, numpy.nan, numpy.nan, 1, 1])
-    trace11 = __create_trace('H', [2, 2, 2, 2])
-    trace10.stats.starttime = UTCDateTime('2018-01-01T00:00:00Z')
-    trace11.stats.starttime = UTCDateTime('2018-01-01T00:01:00Z')
+    assert_almost_equal(timeseries3.select(channel="H")[0].data, [1, 1, 1, 1])
+    assert_equal(numpy.isnan(timeseries3.select(channel="E")[0].data).all(), True)
+    assert_equal(numpy.isnan(timeseries3.select(channel="F")[0].data).all(), True)
+
+    trace10 = __create_trace("H", [1, 1, numpy.nan, numpy.nan, 1, 1])
+    trace11 = __create_trace("H", [2, 2, 2, 2])
+    trace10.stats.starttime = UTCDateTime("2018-01-01T00:00:00Z")
+    trace11.stats.starttime = UTCDateTime("2018-01-01T00:01:00Z")
     timeseries4 = Stream(traces=[trace10, trace11])
     merged4 = TimeseriesUtility.merge_streams(timeseries4)
     assert_equal(len(merged4[0].data), 6)
-    assert_almost_equal(
-        merged4.select(channel='H')[0].data,
-        [1, 2, 2, 2, 1, 1])
+    assert_almost_equal(merged4.select(channel="H")[0].data, [1, 2, 2, 2, 1, 1])
 
 
 def test_pad_timeseries():
     """TimeseriesUtility_test.test_pad_timeseries()
     """
-    trace1 = _create_trace([1, 1, 1, 1, 1], 'H', UTCDateTime("2018-01-01"))
-    trace2 = _create_trace([2, 2], 'E', UTCDateTime("2018-01-01"))
+    trace1 = _create_trace([1, 1, 1, 1, 1], "H", UTCDateTime("2018-01-01"))
+    trace2 = _create_trace([2, 2], "E", UTCDateTime("2018-01-01"))
     timeseries = Stream(traces=[trace1, trace2])
     TimeseriesUtility.pad_timeseries(
         timeseries=timeseries,
         starttime=trace1.stats.starttime,
-        endtime=trace1.stats.endtime)
+        endtime=trace1.stats.endtime,
+    )
     assert_equal(len(trace1.data), len(trace2.data))
     assert_equal(trace1.stats.starttime, trace2.stats.starttime)
     assert_equal(trace1.stats.endtime, trace2.stats.endtime)
@@ -314,55 +323,67 @@ def test_pad_timeseries():
 def test_pad_and_trim_trace():
     """TimeseriesUtility_test.test_pad_and_trim_trace()
     """
-    trace = _create_trace([0, 1, 2, 3, 4], 'X', UTCDateTime("2018-01-01"))
+    trace = _create_trace([0, 1, 2, 3, 4], "X", UTCDateTime("2018-01-01"))
     assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z"))
     assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z"))
     # starttime between first and second sample
     # expect first sample to be removed, start at next sample, end at same
-    TimeseriesUtility.pad_and_trim_trace(trace,
-            starttime=UTCDateTime("2018-01-01T00:00:30Z"),
-            endtime=trace.stats.endtime)
+    TimeseriesUtility.pad_and_trim_trace(
+        trace,
+        starttime=UTCDateTime("2018-01-01T00:00:30Z"),
+        endtime=trace.stats.endtime,
+    )
     assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:01:00Z"))
     assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z"))
     assert_array_equal(trace.data, [1, 2, 3, 4])
     # endtime between last and second to last samples
-    TimeseriesUtility.pad_and_trim_trace(trace,
-            starttime=UTCDateTime("2018-01-01T00:00:30Z"),
-            endtime=UTCDateTime("2018-01-01T00:03:50Z"))
+    TimeseriesUtility.pad_and_trim_trace(
+        trace,
+        starttime=UTCDateTime("2018-01-01T00:00:30Z"),
+        endtime=UTCDateTime("2018-01-01T00:03:50Z"),
+    )
     assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:01:00Z"))
     assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:03:00Z"))
     assert_array_equal(trace.data, [1, 2, 3])
     # pad outward
-    TimeseriesUtility.pad_and_trim_trace(trace,
-            starttime=UTCDateTime("2018-01-01T00:00:00Z"),
-            endtime=UTCDateTime("2018-01-01T00:05:00Z"))
+    TimeseriesUtility.pad_and_trim_trace(
+        trace,
+        starttime=UTCDateTime("2018-01-01T00:00:00Z"),
+        endtime=UTCDateTime("2018-01-01T00:05:00Z"),
+    )
     assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z"))
     assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:05:00Z"))
     assert_array_equal(trace.data, [numpy.nan, 1, 2, 3, numpy.nan, numpy.nan])
     # remove exactly one sample
-    TimeseriesUtility.pad_and_trim_trace(trace,
-            starttime=UTCDateTime("2018-01-01T00:00:00Z"),
-            endtime=UTCDateTime("2018-01-01T00:04:00Z"))
+    TimeseriesUtility.pad_and_trim_trace(
+        trace,
+        starttime=UTCDateTime("2018-01-01T00:00:00Z"),
+        endtime=UTCDateTime("2018-01-01T00:04:00Z"),
+    )
     assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z"))
     assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z"))
     assert_array_equal(trace.data, [numpy.nan, 1, 2, 3, numpy.nan])
     # pad start and trim end
-    TimeseriesUtility.pad_and_trim_trace(trace,
-            starttime=UTCDateTime("2017-12-31T23:58:59Z"),
-            endtime=UTCDateTime("2018-01-01T00:03:00Z"))
+    TimeseriesUtility.pad_and_trim_trace(
+        trace,
+        starttime=UTCDateTime("2017-12-31T23:58:59Z"),
+        endtime=UTCDateTime("2018-01-01T00:03:00Z"),
+    )
     assert_equal(trace.stats.starttime, UTCDateTime("2017-12-31T23:59:00Z"))
     assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:03:00Z"))
     assert_array_equal(trace.data, [numpy.nan, numpy.nan, 1, 2, 3])
     # pad end and trim start
-    TimeseriesUtility.pad_and_trim_trace(trace,
-            starttime=UTCDateTime("2018-01-01T00:00:00Z"),
-            endtime=UTCDateTime("2018-01-01T00:04:00Z"))
+    TimeseriesUtility.pad_and_trim_trace(
+        trace,
+        starttime=UTCDateTime("2018-01-01T00:00:00Z"),
+        endtime=UTCDateTime("2018-01-01T00:04:00Z"),
+    )
     assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z"))
     assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z"))
     assert_array_equal(trace.data, [numpy.nan, 1, 2, 3, numpy.nan])
 
 
-def _create_trace(data, channel, starttime, delta=60.):
+def _create_trace(data, channel, starttime, delta=60.0):
     stats = Stats()
     stats.channel = channel
     stats.delta = delta
diff --git a/test/Util_test.py b/test/Util_test.py
index 58abe4dbf6e7a69c66221715bbbf4540a22afb51..2c44beb7d316ebe0aab324675ec15152bb58ff0b 100644
--- a/test/Util_test.py
+++ b/test/Util_test.py
@@ -11,8 +11,8 @@ def test_get_file_for_url__throws_exception():
     """
     # throws exception for non "file://" urls
     try:
-        Util.get_file_from_url('http://someserver/path')
-        assert False, ('expected exception')
+        Util.get_file_from_url("http://someserver/path")
+        assert False, "expected exception"
     except Exception:
         pass
 
@@ -21,29 +21,30 @@ def test_get_file_for_url__parses_file_urls():
     """Util_test.test_get_file_for_url__parses_file_urls()
     """
     # parses file urls
-    f = Util.get_file_from_url('file://./somefile')
-    assert_equal(f, './somefile')
+    f = Util.get_file_from_url("file://./somefile")
+    assert_equal(f, "./somefile")
 
 
 def test_get_file_for_url__creates_directories():
     """Util_test.test_get_file_for_url__creates_directories()
     """
     # creates directories if requested
-    if os.path.isdir('/tmp/_geomag_algorithms_test_'):
-        shutil.rmtree('/tmp/_geomag_algorithms_test_')
-    f = Util.get_file_from_url('file:///tmp/_geomag_algorithms_test_/somefile',
-            createParentDirectory=True)
-    if not os.path.isdir('/tmp/_geomag_algorithms_test_'):
-        assert False, ('directory not created')
-    shutil.rmtree('/tmp/_geomag_algorithms_test_')
-    assert_equal(f, '/tmp/_geomag_algorithms_test_/somefile')
+    if os.path.isdir("/tmp/_geomag_algorithms_test_"):
+        shutil.rmtree("/tmp/_geomag_algorithms_test_")
+    f = Util.get_file_from_url(
+        "file:///tmp/_geomag_algorithms_test_/somefile", createParentDirectory=True
+    )
+    if not os.path.isdir("/tmp/_geomag_algorithms_test_"):
+        assert False, "directory not created"
+    shutil.rmtree("/tmp/_geomag_algorithms_test_")
+    assert_equal(f, "/tmp/_geomag_algorithms_test_/somefile")
 
 
 def test_get_interval__defaults():
     """Util_test.test_get_interval()
     """
-    starttime = UTCDateTime('2015-01-01T00:00:00Z')
-    endtime = UTCDateTime('2015-02-01T00:00:00Z')
+    starttime = UTCDateTime("2015-01-01T00:00:00Z")
+    endtime = UTCDateTime("2015-02-01T00:00:00Z")
     intervals = Util.get_intervals(starttime, endtime)
     assert_equal(len(intervals), 31)
 
@@ -51,8 +52,8 @@ def test_get_interval__defaults():
 def test_get_interval__custom_size():
     """Util_test.test_get_interval__custom_size()
     """
-    starttime = UTCDateTime('2015-01-01T00:00:00Z')
-    endtime = UTCDateTime('2015-01-02T00:00:00Z')
+    starttime = UTCDateTime("2015-01-01T00:00:00Z")
+    endtime = UTCDateTime("2015-01-02T00:00:00Z")
     intervals = Util.get_intervals(starttime, endtime, size=3600)
     assert_equal(len(intervals), 24)
 
@@ -60,18 +61,18 @@ def test_get_interval__custom_size():
 def test_get_interval__negative_size():
     """Util_test.test_get_interval__negative_size()
     """
-    starttime = UTCDateTime('2015-01-01T00:00:00Z')
-    endtime = UTCDateTime('2015-01-02T00:00:00Z')
+    starttime = UTCDateTime("2015-01-01T00:00:00Z")
+    endtime = UTCDateTime("2015-01-02T00:00:00Z")
     intervals = Util.get_intervals(starttime, endtime, size=-1)
     assert_equal(len(intervals), 1)
-    assert_equal(intervals[0]['start'], starttime)
-    assert_equal(intervals[0]['end'], endtime)
+    assert_equal(intervals[0]["start"], starttime)
+    assert_equal(intervals[0]["end"], endtime)
 
 
 def test_get_interval__trim():
     """Util_test.test_get_interval__trim()
     """
-    starttime = UTCDateTime('2015-01-01T01:00:00Z')
-    endtime = UTCDateTime('2015-01-02T00:00:00Z')
+    starttime = UTCDateTime("2015-01-01T01:00:00Z")
+    endtime = UTCDateTime("2015-01-02T00:00:00Z")
     intervals = Util.get_intervals(starttime, endtime, trim=True)
-    assert_equal(intervals[0]['start'], starttime)
+    assert_equal(intervals[0]["start"], starttime)
diff --git a/test/WebService_test.py b/test/WebService_test.py
index f9f2377783ca214ff93d30084a9175bca354f421..99c5e0ad50c517a27d92ffbad46de19ae5ec0092 100644
--- a/test/WebService_test.py
+++ b/test/WebService_test.py
@@ -14,26 +14,32 @@ from obspy.core.utcdatetime import UTCDateTime
 
 class TestFactory(object):
     "Factory to test for 200 and 400 response statuses."
+
     @staticmethod
-    def get_timeseries(observatory=None, channels=None,
-            starttime=None, endtime=None, type=None,
-            interval=None):
+    def get_timeseries(
+        observatory=None,
+        channels=None,
+        starttime=None,
+        endtime=None,
+        type=None,
+        interval=None,
+    ):
         stream = obspy.core.Stream()
         for channel in channels:
             stats = obspy.core.Stats()
             stats.channel = channel
             stats.starttime = starttime
-            stats.network = 'Test'
+            stats.network = "Test"
             stats.station = observatory
             stats.location = observatory
-            if interval == 'second':
-                stats.sampling_rate = 1.
-            elif interval == 'minute':
-                stats.sampling_rate = 1. / 60.
-            elif interval == 'hourly':
-                stats.sampling_rate = 1. / 3600.
-            elif interval == 'daily':
-                stats.sampling_rate = 1. / 86400.
+            if interval == "second":
+                stats.sampling_rate = 1.0
+            elif interval == "minute":
+                stats.sampling_rate = 1.0 / 60.0
+            elif interval == "hourly":
+                stats.sampling_rate = 1.0 / 3600.0
+            elif interval == "daily":
+                stats.sampling_rate = 1.0 / 86400.0
             length = int((endtime - starttime) * stats.sampling_rate)
             stats.npts = length + 1
             data = numpy.full(length, numpy.nan, dtype=numpy.float64)
@@ -44,10 +50,16 @@ class TestFactory(object):
 
 class ErrorFactory(object):
     "Factory to test for 500 response status."
+
     @staticmethod
-    def get_timeseries(observatory=None, channels=None,
-            starttime=None, endtime=None, type=None,
-            interval=None):
+    def get_timeseries(
+        observatory=None,
+        channels=None,
+        starttime=None,
+        endtime=None,
+        type=None,
+        interval=None,
+    ):
         pass
 
 
@@ -58,14 +70,14 @@ def test__get_param():
     the appropriate values and raises exceptions for invalid values.
     """
     params = {
-        'id': None,
-        'elements': 'H,E,Z,F',
-        'sampling_period': ['1', '60'],
+        "id": None,
+        "elements": "H,E,Z,F",
+        "sampling_period": ["1", "60"],
     }
-    assert_raises(Exception, _get_param, params, 'id', required=True)
-    elements = _get_param(params, 'elements')
-    assert_equal(elements, 'H,E,Z,F')
-    assert_raises(Exception, _get_param, params, 'sampling_period')
+    assert_raises(Exception, _get_param, params, "id", required=True)
+    elements = _get_param(params, "elements")
+    assert_equal(elements, "H,E,Z,F")
+    assert_raises(Exception, _get_param, params, "sampling_period")
 
 
 def test_fetch():
@@ -75,9 +87,13 @@ def test_fetch():
     obspy.core.stream object.
     """
     service = WebService(TestFactory())
-    query = service.parse(parse_qs('id=BOU&starttime=2016-06-06'
-            '&endtime=2016-06-07&elements=H,E,Z,F&sampling_period=60'
-            '&format=iaga2002&type=variation'))
+    query = service.parse(
+        parse_qs(
+            "id=BOU&starttime=2016-06-06"
+            "&endtime=2016-06-07&elements=H,E,Z,F&sampling_period=60"
+            "&format=iaga2002&type=variation"
+        )
+    )
     timeseries = service.fetch(query)
     assert_equal(isinstance(timeseries, Stream), True)
 
@@ -90,29 +106,33 @@ def test_parse():
     confirm that default values are applied correctly.
     """
     service = WebService(TestFactory())
-    query = service.parse(parse_qs('id=BOU&starttime=2016-06-06'
-            '&endtime=2016-06-07&elements=H,E,Z,F&sampling_period=60'
-            '&format=iaga2002&type=variation'))
-    assert_equal(query.observatory_id, 'BOU')
+    query = service.parse(
+        parse_qs(
+            "id=BOU&starttime=2016-06-06"
+            "&endtime=2016-06-07&elements=H,E,Z,F&sampling_period=60"
+            "&format=iaga2002&type=variation"
+        )
+    )
+    assert_equal(query.observatory_id, "BOU")
     assert_equal(query.starttime, UTCDateTime(2016, 6, 6, 0))
     assert_equal(query.endtime, UTCDateTime(2016, 6, 7, 0))
-    assert_equal(query.elements, ['H', 'E', 'Z', 'F'])
-    assert_equal(query.sampling_period, '60')
-    assert_equal(query.output_format, 'iaga2002')
-    assert_equal(query.data_type, 'variation')
+    assert_equal(query.elements, ["H", "E", "Z", "F"])
+    assert_equal(query.sampling_period, "60")
+    assert_equal(query.output_format, "iaga2002")
+    assert_equal(query.data_type, "variation")
     # Test that defaults are set for unspecified values
     now = datetime.now()
     today = UTCDateTime(year=now.year, month=now.month, day=now.day, hour=0)
     tomorrow = today + (24 * 60 * 60 - 1)
-    query = service.parse(parse_qs('id=BOU'))
-    assert_equal(query.observatory_id, 'BOU')
+    query = service.parse(parse_qs("id=BOU"))
+    assert_equal(query.observatory_id, "BOU")
     assert_equal(query.starttime, today)
     assert_equal(query.endtime, tomorrow)
-    assert_equal(query.elements, ('X', 'Y', 'Z', 'F'))
-    assert_equal(query.sampling_period, '60')
-    assert_equal(query.output_format, 'iaga2002')
-    assert_equal(query.data_type, 'variation')
-    assert_raises(Exception, service.parse, parse_qs('/?id=bad'))
+    assert_equal(query.elements, ("X", "Y", "Z", "F"))
+    assert_equal(query.sampling_period, "60")
+    assert_equal(query.output_format, "iaga2002")
+    assert_equal(query.data_type, "variation")
+    assert_raises(Exception, service.parse, parse_qs("/?id=bad"))
 
 
 def test_requests():
@@ -123,23 +143,23 @@ def test_requests():
     """
     app = webtest.TestApp(WebService(TestFactory()))
     # Check invalid request (bad values)
-    response = app.get('/?id=bad', expect_errors=True)
+    response = app.get("/?id=bad", expect_errors=True)
     assert_equal(response.status_int, 400)
-    assert_equal(response.status, '400 Bad Request')
-    assert_equal(response.content_type, 'text/plain')
+    assert_equal(response.status, "400 Bad Request")
+    assert_equal(response.content_type, "text/plain")
     # Check invalid request (duplicates)
-    response = app.get('/?id=BOU&id=BOU', expect_errors=True)
+    response = app.get("/?id=BOU&id=BOU", expect_errors=True)
     assert_equal(response.status_int, 400)
-    assert_equal(response.status, '400 Bad Request')
-    assert_equal(response.content_type, 'text/plain')
+    assert_equal(response.status, "400 Bad Request")
+    assert_equal(response.content_type, "text/plain")
     # Check valid request (upper and lower case)
-    response = app.get('/?id=BOU')
+    response = app.get("/?id=BOU")
     assert_equal(response.status_int, 200)
-    assert_equal(response.status, '200 OK')
-    assert_equal(response.content_type, 'text/plain')
+    assert_equal(response.status, "200 OK")
+    assert_equal(response.content_type, "text/plain")
     # Test internal server error (use fake factory)
     app = webtest.TestApp(WebService(ErrorFactory(), error_stream=None))
-    response = app.get('/?id=BOU', expect_errors=True)
+    response = app.get("/?id=BOU", expect_errors=True)
     assert_equal(response.status_int, 500)
-    assert_equal(response.status, '500 Internal Server Error')
-    assert_equal(response.content_type, 'text/plain')
+    assert_equal(response.status, "500 Internal Server Error")
+    assert_equal(response.content_type, "text/plain")
diff --git a/test/algorithm_test/AdjustedAlgorithm_test.py b/test/algorithm_test/AdjustedAlgorithm_test.py
index 988dbfc84351ef202e9c7dc5e3a857e586b04f29..4fe1c2838b3736f0369b290cbf19ab5c341cd1f8 100644
--- a/test/algorithm_test/AdjustedAlgorithm_test.py
+++ b/test/algorithm_test/AdjustedAlgorithm_test.py
@@ -9,7 +9,7 @@ def test_construct():
     matrix = None
     pier_correction = None
     # load adjusted data transform matrix and pier correction
-    a = adj(matrix, pier_correction, 'etc/adjusted/adjbou_state_.json')
+    a = adj(matrix, pier_correction, "etc/adjusted/adjbou_state_.json")
 
     assert_almost_equal(a.matrix[0, 0], 9.83427577e-01, 6)
 
@@ -25,12 +25,12 @@ def test_process():
     matrix = None
     pier_correction = None
     # load adjusted data transform matrix and pier correction
-    a = adj(matrix, pier_correction, 'etc/adjusted/adjbou_state_.json')
+    a = adj(matrix, pier_correction, "etc/adjusted/adjbou_state_.json")
 
     # load boulder Jan 16 files from /etc/ directory
-    hezf_iaga2002_file = open('etc/adjusted/BOU201601vmin.min')
+    hezf_iaga2002_file = open("etc/adjusted/BOU201601vmin.min")
     hezf_iaga2002_string = hezf_iaga2002_file.read()
-    xyzf_iaga2002_file = open('etc/adjusted/BOU201601adj.min')
+    xyzf_iaga2002_file = open("etc/adjusted/BOU201601adj.min")
     xyzf_iaga2002_string = xyzf_iaga2002_file.read()
     factory = i2.IAGA2002Factory()
     hezf = factory.parse_string(hezf_iaga2002_string)
@@ -40,15 +40,15 @@ def test_process():
     adj_bou = a.process(hezf)
 
     # unpack channels from loaded adjusted data file
-    x = xyzf.select(channel='X')[0]
-    y = xyzf.select(channel='Y')[0]
-    z = xyzf.select(channel='Z')[0]
-    f = xyzf.select(channel='F')[0]
+    x = xyzf.select(channel="X")[0]
+    y = xyzf.select(channel="Y")[0]
+    z = xyzf.select(channel="Z")[0]
+    f = xyzf.select(channel="F")[0]
     # unpack channels from adjusted processing of raw data
-    x_adj = adj_bou.select(channel='X')[0]
-    y_adj = adj_bou.select(channel='Y')[0]
-    z_adj = adj_bou.select(channel='Z')[0]
-    f_adj = adj_bou.select(channel='F')[0]
+    x_adj = adj_bou.select(channel="X")[0]
+    y_adj = adj_bou.select(channel="Y")[0]
+    z_adj = adj_bou.select(channel="Z")[0]
+    f_adj = adj_bou.select(channel="F")[0]
 
     assert_almost_equal(x.data, x_adj.data, 2)
     assert_almost_equal(y.data, y_adj.data, 2)
diff --git a/test/algorithm_test/Algorithm_test.py b/test/algorithm_test/Algorithm_test.py
index b8e1aa9c40dd6477e8d2a91bf5a0afee984f5fca..986a1512a3b061f551470c051e7ef7cb1126293a 100644
--- a/test/algorithm_test/Algorithm_test.py
+++ b/test/algorithm_test/Algorithm_test.py
@@ -21,9 +21,8 @@ def test_algorithm_channels():
     confirms that algorithm.get_input_channels returns the correct channels
     confirms that algorithm.get_output_channels returns the correct channels
     """
-    inchannels = ['H', 'E', 'Z', 'F']
-    outchannels = ['H', 'D', 'Z', 'F']
-    algorithm = Algorithm(inchannels=inchannels,
-            outchannels=outchannels)
+    inchannels = ["H", "E", "Z", "F"]
+    outchannels = ["H", "D", "Z", "F"]
+    algorithm = Algorithm(inchannels=inchannels, outchannels=outchannels)
     assert_equal(algorithm.get_input_channels(), inchannels)
     assert_equal(algorithm.get_output_channels(), outchannels)
diff --git a/test/algorithm_test/AverageAlgorithm_test.py b/test/algorithm_test/AverageAlgorithm_test.py
index d907ee044c386974deb2bdcf683c754964c57291..a93540cd95d446a97c0136752aa657b7c8ff2acb 100644
--- a/test/algorithm_test/AverageAlgorithm_test.py
+++ b/test/algorithm_test/AverageAlgorithm_test.py
@@ -23,22 +23,22 @@ def test_process():
 
     # Create timeseries with first trace that uses test_data1
     timeseries = Stream()
-    timeseries += __create_trace('H', test_data1)
+    timeseries += __create_trace("H", test_data1)
     # Set metadata so process can read the array:
     # station, sample number, data type, and data interval
-    timeseries[0].stats.station = 'HON'
+    timeseries[0].stats.station = "HON"
     timeseries[0].stats.samples = 5
-    timeseries[0].stats.data_type = 'variation'
-    timeseries[0].stats.data_interval = 'minute'
+    timeseries[0].stats.data_type = "variation"
+    timeseries[0].stats.data_interval = "minute"
     # Add the next trace with test_data2 and set station name
-    timeseries += __create_trace('H', test_data2)
-    timeseries[1].stats.station = 'GUA'
+    timeseries += __create_trace("H", test_data2)
+    timeseries[1].stats.station = "GUA"
     # Add final trace with test_data3 and set station name
-    timeseries += __create_trace('H', test_data3)
-    timeseries[2].stats.station = 'SJG'
+    timeseries += __create_trace("H", test_data3)
+    timeseries[2].stats.station = "SJG"
 
     # initialize the algorithm factory with Observatories and Channel
-    a = AverageAlgorithm(('HON', 'GUA', 'SJG'), 'H')
+    a = AverageAlgorithm(("HON", "GUA", "SJG"), "H")
     outstream = a.process(timeseries)
     # Ensure the average of two
     np.testing.assert_array_equal(outstream[0].data, expected_solution)
@@ -52,30 +52,30 @@ def test_gaps():
     """
 
     # Create a trace with data gaps
-    gap_trace = __create_trace('H', [1, 1, np.nan, np.nan, 1, 1])
+    gap_trace = __create_trace("H", [1, 1, np.nan, np.nan, 1, 1])
 
     # set time of first sample, sample rate (1 minute),
     # station, data type, and data interval
-    gap_trace.stats.starttime = UTCDateTime('2015-01-01T00:00:00Z')
+    gap_trace.stats.starttime = UTCDateTime("2015-01-01T00:00:00Z")
     gap_trace.stats.delta = 60
-    gap_trace.stats.station = 'HON'
-    gap_trace.stats.data_type = 'variation'
-    gap_trace.stats.data_interval = 'minute'
+    gap_trace.stats.station = "HON"
+    gap_trace.stats.data_type = "variation"
+    gap_trace.stats.data_interval = "minute"
 
     # Create a trace with no gaps
-    full_trace = __create_trace('H', [1, 1, 1, 1, 1, 1])
+    full_trace = __create_trace("H", [1, 1, 1, 1, 1, 1])
 
     # set time of first sample, sample rate, station
-    full_trace.stats.starttime = UTCDateTime('2015-01-01T00:00:00Z')
+    full_trace.stats.starttime = UTCDateTime("2015-01-01T00:00:00Z")
     full_trace.stats.delta = 60
-    full_trace.stats.station = 'SJG'
+    full_trace.stats.station = "SJG"
 
     # Create timeseries that contains the gap_trace and full_trace
     timeseries = Stream()
     timeseries += gap_trace
     timeseries += full_trace
     # Initialize the AverageAlgorithm factory with observatories and channel
-    alg = AverageAlgorithm(('HON', 'SJG'), 'H')
+    alg = AverageAlgorithm(("HON", "SJG"), "H")
     # Run timeseries through the average process
     outstream = alg.process(timeseries)
 
@@ -91,20 +91,20 @@ def test_metadata():
     """
 
     # Create a trace with channel 'H' and any numbers
-    test_trace = __create_trace('H', [3, 3, 3, 3, 3, 3])
-    test_trace2 = __create_trace('H', [1, 1, 1, 1, 1, 1])
+    test_trace = __create_trace("H", [3, 3, 3, 3, 3, 3])
+    test_trace2 = __create_trace("H", [1, 1, 1, 1, 1, 1])
 
     # set start time, sample rate (1 minute), station, data type and interval
-    test_trace.stats.starttime = UTCDateTime('2015-01-01T00:00:00Z')
+    test_trace.stats.starttime = UTCDateTime("2015-01-01T00:00:00Z")
     test_trace.stats.delta = 60
-    test_trace.stats.station = 'HON'
-    test_trace.stats.data_type = 'variation'
-    test_trace.stats.data_interval = 'minute'
+    test_trace.stats.station = "HON"
+    test_trace.stats.data_type = "variation"
+    test_trace.stats.data_interval = "minute"
 
     # set start time, sample rate (1 minute), station of second trace
-    test_trace2.stats.starttime = UTCDateTime('2015-01-01T00:00:00Z')
+    test_trace2.stats.starttime = UTCDateTime("2015-01-01T00:00:00Z")
     test_trace2.stats.delta = 60
-    test_trace2.stats.station = 'SJG'
+    test_trace2.stats.station = "SJG"
 
     # Populate timeseries with the 2 traces
     timeseries = Stream()
@@ -113,10 +113,10 @@ def test_metadata():
 
     # Initialize the average algorithm with observatories and
     # set a new outchannel name
-    alg = AverageAlgorithm(('HON', 'SJG'), 'Hdt')
+    alg = AverageAlgorithm(("HON", "SJG"), "Hdt")
     outstream = alg.process(timeseries)
 
     # The station name should be changed to 'USGS'
-    assert_equal(outstream[0].stats.station, 'USGS')
+    assert_equal(outstream[0].stats.station, "USGS")
     # The channel should be changed to 'Hdt'
-    assert_equal(outstream[0].stats.channel, 'Hdt')
+    assert_equal(outstream[0].stats.channel, "Hdt")
diff --git a/test/algorithm_test/FilterAlgorithm_test.py b/test/algorithm_test/FilterAlgorithm_test.py
index f7671e5bfb15b6c5baebcaa1ddd76123043e0856..8c7325ce0ab0c2c84cc564a24b1476d6665d5d1d 100644
--- a/test/algorithm_test/FilterAlgorithm_test.py
+++ b/test/algorithm_test/FilterAlgorithm_test.py
@@ -8,8 +8,7 @@ def test_second():
     """algorithm_test.FilterAlgorithm_test.test_second()
     Tests algorithm for 10Hz to second.
     """
-    f = FilterAlgorithm(input_sample_period=0.1,
-                        output_sample_period=1)
+    f = FilterAlgorithm(input_sample_period=0.1, output_sample_period=1)
 
     # generation of 10HZ_filter_sec.mseed
     # starttime = UTCDateTime('2020-01-06T00:00:00Z')
@@ -27,21 +26,20 @@ def test_second():
     #       interval='tenhertz', type='variaton')
     # LLO_raw.write('10HZ_filter_sec.mseed')
 
-    llo = read('etc/filter/10HZ_filter_sec.mseed')
+    llo = read("etc/filter/10HZ_filter_sec.mseed")
     filtered = f.process(llo)
 
-    with open('etc/filter/LLO20200106vsec.sec', 'r') as f:
+    with open("etc/filter/LLO20200106vsec.sec", "r") as f:
         iaga = i2.StreamIAGA2002Factory(stream=f)
-        LLO = iaga.get_timeseries(starttime=None,
-                    endtime=None, observatory='LLO')
+        LLO = iaga.get_timeseries(starttime=None, endtime=None, observatory="LLO")
 
-    u = LLO.select(channel='U')[0]
-    v = LLO.select(channel='V')[0]
-    w = LLO.select(channel='W')[0]
+    u = LLO.select(channel="U")[0]
+    v = LLO.select(channel="V")[0]
+    w = LLO.select(channel="W")[0]
 
-    u_filt = filtered.select(channel='U')[0]
-    v_filt = filtered.select(channel='V')[0]
-    w_filt = filtered.select(channel='W')[0]
+    u_filt = filtered.select(channel="U")[0]
+    v_filt = filtered.select(channel="V")[0]
+    w_filt = filtered.select(channel="W")[0]
 
     assert_almost_equal(u_filt.data, u.data, 2)
     assert_almost_equal(v_filt.data, v.data, 2)
@@ -52,8 +50,7 @@ def test_minute():
     """algorithm_test.FilterAlgorithm_test.test_minute()
     Tests algorithm for 10Hz to minute.
     """
-    f = FilterAlgorithm(input_sample_period=0.1,
-                        output_sample_period=60.0)
+    f = FilterAlgorithm(input_sample_period=0.1, output_sample_period=60.0)
 
     # generation of 10HZ_filter_min.mseed
     # starttime = UTCDateTime('2020-01-06T00:00:00Z')
@@ -70,21 +67,20 @@ def test_minute():
     #       interval='tenhertz', type='variaton')
     # LLO.write('10HZ_filter_min.mseed')
 
-    llo = read('etc/filter/10HZ_filter_min.mseed')
+    llo = read("etc/filter/10HZ_filter_min.mseed")
     filtered = f.process(llo)
 
-    with open('etc/filter/LLO20200106vmin.min', 'r') as f:
+    with open("etc/filter/LLO20200106vmin.min", "r") as f:
         iaga = i2.StreamIAGA2002Factory(stream=f)
-        LLO = iaga.get_timeseries(starttime=None,
-                    endtime=None, observatory='LLO')
+        LLO = iaga.get_timeseries(starttime=None, endtime=None, observatory="LLO")
 
-    u = LLO.select(channel='U')[0]
-    v = LLO.select(channel='V')[0]
-    w = LLO.select(channel='W')[0]
+    u = LLO.select(channel="U")[0]
+    v = LLO.select(channel="V")[0]
+    w = LLO.select(channel="W")[0]
 
-    u_filt = filtered.select(channel='U')[0]
-    v_filt = filtered.select(channel='V')[0]
-    w_filt = filtered.select(channel='W')[0]
+    u_filt = filtered.select(channel="U")[0]
+    v_filt = filtered.select(channel="V")[0]
+    w_filt = filtered.select(channel="W")[0]
 
     assert_almost_equal(u_filt.data, u.data, 2)
     assert_almost_equal(v_filt.data, v.data, 2)
@@ -95,8 +91,7 @@ def test_hour():
     """algorithm_test.FilterAlgorithm_test.test_hour()
     Tests algorithm for 10Hz to hour.
     """
-    f = FilterAlgorithm(input_sample_period=0.1,
-                        output_sample_period=3600.0)
+    f = FilterAlgorithm(input_sample_period=0.1, output_sample_period=3600.0)
 
     # generation of 10HZ_filter_hor.mseed
     # starttime = UTCDateTime('2020-01-06T00:00:00Z')
@@ -113,21 +108,20 @@ def test_hour():
     #       interval='tenhertz', type='variaton')
     # LLO.write('10HZ_filter_hor.mseed')
 
-    llo = read('etc/filter/10HZ_filter_hor.mseed')
+    llo = read("etc/filter/10HZ_filter_hor.mseed")
     filtered = f.process(llo)
 
-    with open('etc/filter/LLO20200106vhor.hor', 'r') as f:
+    with open("etc/filter/LLO20200106vhor.hor", "r") as f:
         iaga = i2.StreamIAGA2002Factory(stream=f)
-        LLO = iaga.get_timeseries(starttime=None,
-                    endtime=None, observatory='LLO')
+        LLO = iaga.get_timeseries(starttime=None, endtime=None, observatory="LLO")
 
-    u = LLO.select(channel='U')[0]
-    v = LLO.select(channel='V')[0]
-    w = LLO.select(channel='W')[0]
+    u = LLO.select(channel="U")[0]
+    v = LLO.select(channel="V")[0]
+    w = LLO.select(channel="W")[0]
 
-    u_filt = filtered.select(channel='U')[0]
-    v_filt = filtered.select(channel='V')[0]
-    w_filt = filtered.select(channel='W')[0]
+    u_filt = filtered.select(channel="U")[0]
+    v_filt = filtered.select(channel="V")[0]
+    w_filt = filtered.select(channel="W")[0]
 
     assert_almost_equal(u_filt.data, u.data, 2)
     assert_almost_equal(v_filt.data, v.data, 2)
@@ -138,9 +132,11 @@ def test_custom():
     """algorithm_test.FilterAlgorithm_test.test_custom()
     Tests algorithm for 10Hz to second with custom filter coefficients.
     """
-    f = FilterAlgorithm(input_sample_period=0.1,
-                        output_sample_period=1.0,
-                        coeff_filename='etc/filter/coeffs.json')
+    f = FilterAlgorithm(
+        input_sample_period=0.1,
+        output_sample_period=1.0,
+        coeff_filename="etc/filter/coeffs.json",
+    )
 
     # generation of 10HZ_filter_sec.mseed
     # starttime = UTCDateTime('2020-01-06T00:00:00Z')
@@ -157,21 +153,20 @@ def test_custom():
     #       interval='tenhertz', type='variaton')
     # LLO.write('10HZ_filter_sec.mseed')
 
-    llo = read('etc/filter/10HZ_filter_sec.mseed')
+    llo = read("etc/filter/10HZ_filter_sec.mseed")
     filtered = f.process(llo)
 
-    with open('etc/filter/LLO20200106_custom_vsec.sec', 'r') as f:
+    with open("etc/filter/LLO20200106_custom_vsec.sec", "r") as f:
         iaga = i2.StreamIAGA2002Factory(stream=f)
-        LLO = iaga.get_timeseries(starttime=None,
-                    endtime=None, observatory='LLO')
+        LLO = iaga.get_timeseries(starttime=None, endtime=None, observatory="LLO")
 
-    u = LLO.select(channel='U')[0]
-    v = LLO.select(channel='V')[0]
-    w = LLO.select(channel='W')[0]
+    u = LLO.select(channel="U")[0]
+    v = LLO.select(channel="V")[0]
+    w = LLO.select(channel="W")[0]
 
-    u_filt = filtered.select(channel='U')[0]
-    v_filt = filtered.select(channel='V')[0]
-    w_filt = filtered.select(channel='W')[0]
+    u_filt = filtered.select(channel="U")[0]
+    v_filt = filtered.select(channel="V")[0]
+    w_filt = filtered.select(channel="W")[0]
 
     assert_almost_equal(u_filt.data, u.data, 2)
     assert_almost_equal(v_filt.data, v.data, 2)
diff --git a/test/algorithm_test/SQDistAlgorithm_test.py b/test/algorithm_test/SQDistAlgorithm_test.py
index 4cbda67c472c262d550ca1365293291f987449a7..49d15728322f2038f5b193786ee2263ce7210ca6 100644
--- a/test/algorithm_test/SQDistAlgorithm_test.py
+++ b/test/algorithm_test/SQDistAlgorithm_test.py
@@ -4,7 +4,8 @@ from numpy.testing import (
     assert_allclose,
     assert_almost_equal,
     assert_array_less,
-    assert_equal)
+    assert_equal,
+)
 
 
 def test_sqdistalgorithm_additive1():
@@ -37,90 +38,277 @@ def test_sqdistalgorithm_additive1():
     # and assume PI only grows with trendline adjustments
     yobs1 = np.zeros(12) * np.nan
     yhat1, shat1, sighat1, _, _, _, _, _ = sq.additive(
-        yobs1, m, alpha=alpha, beta=beta, gamma=0,
-        s0=s0, l0=l0, b0=b0, sigma0=sigma0, hstep=hstep)
-
-    assert_almost_equal(yhat1, [0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, -1],
-        err_msg='yhat1 should almost equal simple time series')
-    assert_almost_equal(shat1, [0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, -1],
-        err_msg='shat1 should almost equal simple time series')
-    assert_almost_equal(sighat1, [0.70710678, 0.70955777, 0.71200031,
-        0.71443451, 0.71686044, 0.71927819, 0.72168784, 0.72408947, 0.72648316,
-        0.72886899, 0.73124703, 0.73361737],
-        err_msg='sighat1 should almost equal simple time series')
+        yobs1,
+        m,
+        alpha=alpha,
+        beta=beta,
+        gamma=0,
+        s0=s0,
+        l0=l0,
+        b0=b0,
+        sigma0=sigma0,
+        hstep=hstep,
+    )
+
+    assert_almost_equal(
+        yhat1,
+        [0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, -1],
+        err_msg="yhat1 should almost equal simple time series",
+    )
+    assert_almost_equal(
+        shat1,
+        [0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, -1],
+        err_msg="shat1 should almost equal simple time series",
+    )
+    assert_almost_equal(
+        sighat1,
+        [
+            0.70710678,
+            0.70955777,
+            0.71200031,
+            0.71443451,
+            0.71686044,
+            0.71927819,
+            0.72168784,
+            0.72408947,
+            0.72648316,
+            0.72886899,
+            0.73124703,
+            0.73361737,
+        ],
+        err_msg="sighat1 should almost equal simple time series",
+    )
 
     # predict three cycles ahead given l0 and s0, no inputs,
     # and assume PI only grows with seasonal adjustments
     yobs1 = np.zeros(12) * np.nan
     yhat1, shat1, sighat1, _, _, _, _, _ = sq.additive(
-        yobs1, m, alpha=0, beta=0, gamma=gamma,
-        s0=s0, l0=0, b0=0, sigma0=sigma0, hstep=hstep)
-
-    assert_almost_equal(yhat1, [0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, -1],
-        err_msg='yhat1 should almost equal simple time series, 2nd run')
-    assert_almost_equal(shat1, [0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, -1],
-        err_msg='shat1 should almost equal simple time series, 2nd run')
-    assert_almost_equal(sighat1, [0.70710678, 0.70710678, 0.70710678,
-        0.70710678, 0.74535599, 0.74535599, 0.74535599, 0.74535599, 0.78173596,
-        0.78173596, 0.78173596, 0.78173596],
-        err_msg='sighat1 should almost equal simple time series, 2nd run')
+        yobs1,
+        m,
+        alpha=0,
+        beta=0,
+        gamma=gamma,
+        s0=s0,
+        l0=0,
+        b0=0,
+        sigma0=sigma0,
+        hstep=hstep,
+    )
+
+    assert_almost_equal(
+        yhat1,
+        [0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, -1],
+        err_msg="yhat1 should almost equal simple time series, 2nd run",
+    )
+    assert_almost_equal(
+        shat1,
+        [0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, -1],
+        err_msg="shat1 should almost equal simple time series, 2nd run",
+    )
+    assert_almost_equal(
+        sighat1,
+        [
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.74535599,
+            0.74535599,
+            0.74535599,
+            0.74535599,
+            0.78173596,
+            0.78173596,
+            0.78173596,
+            0.78173596,
+        ],
+        err_msg="sighat1 should almost equal simple time series, 2nd run",
+    )
 
     # smooth three cycles' worth of zero-value input observations,
     # assuming only the trendline varies
     yobs1 = np.zeros(12)
     yhat1, shat1, sighat1, _, _, _, _, _ = sq.additive(
-        yobs1, m, alpha=alpha, beta=0, gamma=0,
-        s0=s0, l0=0, b0=0, sigma0=sigma0, hstep=hstep)
+        yobs1,
+        m,
+        alpha=alpha,
+        beta=0,
+        gamma=0,
+        s0=s0,
+        l0=0,
+        b0=0,
+        sigma0=sigma0,
+        hstep=hstep,
+    )
 
     # check output
-    assert_almost_equal(yhat1, [0, 1, -0.08333333, -1.07638889, 0.01331019,
-        1.01220100, -0.07214908, -1.06613666, 0.02270806, 1.02081573,
-        -0.06425225, -1.0588979], 8,
-        err_msg='yhat1 should almost equal simple time series, 3rd run')
-    assert_almost_equal(shat1, [0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, -1], 8,
-        err_msg='shat1 should almost equal simple time series, 3rd run')
-    assert_almost_equal(sighat1, [0.64818122, 0.67749945, 0.62798561,
-        0.66535255, 0.61101568, 0.64444779, 0.59675623, 0.63587127, 0.58477433,
-        0.62111112, 0.57470621, 0.61505552], 8,
-        err_msg='sighat1 should almost equal simple time series, 3rd run')
+    assert_almost_equal(
+        yhat1,
+        [
+            0,
+            1,
+            -0.08333333,
+            -1.07638889,
+            0.01331019,
+            1.01220100,
+            -0.07214908,
+            -1.06613666,
+            0.02270806,
+            1.02081573,
+            -0.06425225,
+            -1.0588979,
+        ],
+        8,
+        err_msg="yhat1 should almost equal simple time series, 3rd run",
+    )
+    assert_almost_equal(
+        shat1,
+        [0, 1, 0, -1, 0, 1, 0, -1, 0, 1, 0, -1],
+        8,
+        err_msg="shat1 should almost equal simple time series, 3rd run",
+    )
+    assert_almost_equal(
+        sighat1,
+        [
+            0.64818122,
+            0.67749945,
+            0.62798561,
+            0.66535255,
+            0.61101568,
+            0.64444779,
+            0.59675623,
+            0.63587127,
+            0.58477433,
+            0.62111112,
+            0.57470621,
+            0.61505552,
+        ],
+        8,
+        err_msg="sighat1 should almost equal simple time series, 3rd run",
+    )
 
     # smooth three cycles' worth of zero-value input observations,
     # assuming only the seasonal adjustments vary
     yobs1 = np.zeros(12)
     yhat1, shat1, sighat1, _, _, _, _, _ = sq.additive(
-        yobs1, m, alpha=0, beta=0, gamma=gamma,
-        s0=s0, l0=0, b0=0, sigma0=sigma0, hstep=hstep)
+        yobs1,
+        m,
+        alpha=0,
+        beta=0,
+        gamma=gamma,
+        s0=s0,
+        l0=0,
+        b0=0,
+        sigma0=sigma0,
+        hstep=hstep,
+    )
 
     # check output
-    assert_almost_equal(yhat1, [0, 1, 0, -1, 0, 0.66666667, 0, -0.66666667,
-        0, 0.44444444, 0, -0.44444444], 8,
-        err_msg='yhat1 should almost equal simple time series, 4th run')
-    assert_almost_equal(shat1, [0, 1, 0.08333333, -0.91666667, 0, 0.66666667,
-        0.05555556, -0.61111111, 0, 0.44444444, 0.03703704, -0.40740741], 8,
-        err_msg='shat1 should almost equal simple time series, 4th run')
-    assert_almost_equal(sighat1, [0.70710678, 0.70710678, 0.70710678,
-        0.70710678, 0.70710678, 0.70710678, 0.70710678, 0.70710678, 0.70710678,
-        0.70710678, 0.70710678, 0.70710678], 8,
-        err_msg='sighat1 should almost equal simple time series, 4th run')
+    assert_almost_equal(
+        yhat1,
+        [0, 1, 0, -1, 0, 0.66666667, 0, -0.66666667, 0, 0.44444444, 0, -0.44444444],
+        8,
+        err_msg="yhat1 should almost equal simple time series, 4th run",
+    )
+    assert_almost_equal(
+        shat1,
+        [
+            0,
+            1,
+            0.08333333,
+            -0.91666667,
+            0,
+            0.66666667,
+            0.05555556,
+            -0.61111111,
+            0,
+            0.44444444,
+            0.03703704,
+            -0.40740741,
+        ],
+        8,
+        err_msg="shat1 should almost equal simple time series, 4th run",
+    )
+    assert_almost_equal(
+        sighat1,
+        [
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+        ],
+        8,
+        err_msg="sighat1 should almost equal simple time series, 4th run",
+    )
 
     # smooth three cycles' worth of sinusoid input observations,
     # assuming only the seasonal adjustments vary, starting at zero
     yobs1 = np.concatenate((s0, s0, s0))
     yhat1, shat1, sighat1, _, _, _, _, _ = sq.additive(
-        yobs1, m, alpha=0, beta=0, gamma=gamma,
-        s0=s0 * 0, l0=0, b0=0, sigma0=sigma0, hstep=hstep)
+        yobs1,
+        m,
+        alpha=0,
+        beta=0,
+        gamma=gamma,
+        s0=s0 * 0,
+        l0=0,
+        b0=0,
+        sigma0=sigma0,
+        hstep=hstep,
+    )
 
     # check output
-    assert_almost_equal(yhat1, [0, 0, 0, 0, 0, 0.33333333, 0, -0.33333333,
-        0, 0.55555556, 0, -0.55555556], 8,
-        err_msg='yhat1 should almost equal simple time series, 5th run')
-    assert_almost_equal(shat1, [0, 0, -0.08333333, -0.08333333, 0, 0.33333333,
-        -0.05555556, -0.38888889, 0, 0.55555555, -0.03703704, -0.59259259], 8,
-        err_msg='shat1 should almost equal simple time series, 5th run')
-    assert_almost_equal(sighat1, [0.70710678, 0.70710678, 0.70710678,
-        0.70710678, 0.70710678, 0.70710678, 0.70710678, 0.70710678, 0.70710678,
-        0.70710678, 0.70710678, 0.70710678], 8,
-        err_msg='sighat1 should almost equal simple time series, 5th run')
+    assert_almost_equal(
+        yhat1,
+        [0, 0, 0, 0, 0, 0.33333333, 0, -0.33333333, 0, 0.55555556, 0, -0.55555556],
+        8,
+        err_msg="yhat1 should almost equal simple time series, 5th run",
+    )
+    assert_almost_equal(
+        shat1,
+        [
+            0,
+            0,
+            -0.08333333,
+            -0.08333333,
+            0,
+            0.33333333,
+            -0.05555556,
+            -0.38888889,
+            0,
+            0.55555555,
+            -0.03703704,
+            -0.59259259,
+        ],
+        8,
+        err_msg="shat1 should almost equal simple time series, 5th run",
+    )
+    assert_almost_equal(
+        sighat1,
+        [
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+            0.70710678,
+        ],
+        8,
+        err_msg="sighat1 should almost equal simple time series, 5th run",
+    )
 
 
 def test_sqdistalgorithm_additive2():
@@ -130,16 +318,16 @@ def test_sqdistalgorithm_additive2():
        outputs.
     """
     # set up smoothing parameters
-    m = 100                            # length of "day"
-    alpha = 1.0 / 100.0 / 3.0          # average age of level is 3 "days"
-    beta = 0                           # slope doesn't change
+    m = 100  # length of "day"
+    alpha = 1.0 / 100.0 / 3.0  # average age of level is 3 "days"
+    beta = 0  # slope doesn't change
     gamma = 1.0 / 100.0 * 100.0 / 3.0  # average age of "seasonal" correction
-    phi = 1                            # don't dampen the slope
+    phi = 1  # don't dampen the slope
 
     # initialize states for smoother
-    l0 = None     # this uses the default initial level
-    b0 = 0        # this is NOT the default initial slope
-    s0 = None     # this uses default initial "seasonal" correction
+    l0 = None  # this uses the default initial level
+    b0 = 0  # this is NOT the default initial slope
+    s0 = None  # this uses default initial "seasonal" correction
     sigma0 = [0]  # this is NOT the default initial standard deviation
 
     # create first 50 "days" at 100 samples per synthetic "day", 0-50
@@ -148,48 +336,102 @@ def test_sqdistalgorithm_additive2():
 
     # these are the old "defaults" computed when l0, b0, and s0 were None
     l0 = np.nanmean(syn000to050[0:1440])
-    b0 = (np.nanmean(syn000to050[m:2 * m]) - np.nanmean(syn000to050[0:m])) / m
+    b0 = (np.nanmean(syn000to050[m : 2 * m]) - np.nanmean(syn000to050[0:m])) / m
     s0 = [syn000to050[i] - l0 for i in range(m)]
 
     # run additive method on first 50 "days"
-    (synHat000to050, sHat000to050, sigma000to050,
-        syn050, s050, l050, b050, sigma050) = sq.additive(
-        syn000to050, m, alpha, beta, gamma, phi,
-        yhat0=None, s0=s0, l0=l0, b0=b0, sigma0=sigma0)
+    (
+        synHat000to050,
+        sHat000to050,
+        sigma000to050,
+        syn050,
+        s050,
+        l050,
+        b050,
+        sigma050,
+    ) = sq.additive(
+        syn000to050,
+        m,
+        alpha,
+        beta,
+        gamma,
+        phi,
+        yhat0=None,
+        s0=s0,
+        l0=l0,
+        b0=b0,
+        sigma0=sigma0,
+    )
 
     # The output should track the input exactly on this simple series
-    assert_equal(synHat000to050.all(), syn000to050.all(),
-        'Output of additive should match simple sinusoid exactly')
+    assert_equal(
+        synHat000to050.all(),
+        syn000to050.all(),
+        "Output of additive should match simple sinusoid exactly",
+    )
 
     # Check max, min and average
-    assert_almost_equal(np.amax(synHat000to050), 10, 8,
-        'Additive output should have a max of 10.0')
-    assert_almost_equal(np.amin(synHat000to050), -10, 8,
-        'Additive output should have a min of -10.0')
-    assert_almost_equal(np.mean(synHat000to050), 0, 8,
-        'Additive output should have average of 0')
+    assert_almost_equal(
+        np.amax(synHat000to050), 10, 8, "Additive output should have a max of 10.0"
+    )
+    assert_almost_equal(
+        np.amin(synHat000to050), -10, 8, "Additive output should have a min of -10.0"
+    )
+    assert_almost_equal(
+        np.mean(synHat000to050), 0, 8, "Additive output should have average of 0"
+    )
 
     # create 2nd set of 50 "days", 50-100
     t050to100 = np.arange(5001, 10001)
     syn050to100 = 20 + 10.0 * np.sin(t050to100 * (2 * np.pi) / 100.0)
 
     # run additive method on next 50 "days"
-    (synHat050to100, sHat050to100, sigma050to100,
-        syn100, s100, l100, b100, sigma100) = sq.additive(
-        syn050to100, m, alpha, beta, gamma, phi,
-        yhat0=syn050, s0=s050, l0=l050, b0=b050, sigma0=sigma050)
+    (
+        synHat050to100,
+        sHat050to100,
+        sigma050to100,
+        syn100,
+        s100,
+        l100,
+        b100,
+        sigma100,
+    ) = sq.additive(
+        syn050to100,
+        m,
+        alpha,
+        beta,
+        gamma,
+        phi,
+        yhat0=syn050,
+        s0=s050,
+        l0=l050,
+        b0=b050,
+        sigma0=sigma050,
+    )
 
     # Check max, min and average
-    assert_almost_equal(np.amax(synHat050to100), 30, 6,
-        'Additive output should have a max of 30.0')
-    assert_almost_equal(np.amin(synHat050to100), -8.81753802428088, 8,
-        'Additive output should have a min of -8.81753...')
-    assert_almost_equal(np.mean(synHat050to100), 19.17899833054862, 8,
-        'Additive output should have average of 19.17899...')
+    assert_almost_equal(
+        np.amax(synHat050to100), 30, 6, "Additive output should have a max of 30.0"
+    )
+    assert_almost_equal(
+        np.amin(synHat050to100),
+        -8.81753802428088,
+        8,
+        "Additive output should have a min of -8.81753...",
+    )
+    assert_almost_equal(
+        np.mean(synHat050to100),
+        19.17899833054862,
+        8,
+        "Additive output should have average of 19.17899...",
+    )
 
     # the initial part of the computed series is catching up to the synthetic
-    assert_array_less(synHat050to100[:555], syn050to100[:555],
-        'Output of additive should begin below synthetic data')
+    assert_array_less(
+        synHat050to100[:555],
+        syn050to100[:555],
+        "Output of additive should begin below synthetic data",
+    )
     # short section where the series' swap places
     assert_array_less(syn050to100[555:576], synHat050to100[555:576])
     # they swap back
@@ -197,99 +439,258 @@ def test_sqdistalgorithm_additive2():
     # swap again
     assert_array_less(syn050to100[655:689], synHat050to100[655:689])
     # after the initial lag and swaps, the series' get closer and closer
-    assert_allclose(syn050to100[475:], synHat050to100[475:], rtol=1e-1,
-        err_msg='Additive output should trend toward synthetic data, 1e-1')
-    assert_allclose(syn050to100[955:], synHat050to100[955:], rtol=1e-2,
-        err_msg='Additive output should trend toward synthetic data, 1e-2')
-    assert_allclose(syn050to100[1500:], synHat050to100[1500:], rtol=1e-3,
-        err_msg='Additive output should trend toward synthetic data, 1e-3')
-    assert_allclose(syn050to100[2100:], synHat050to100[2100:], rtol=1e-4,
-        err_msg='Additive output should trend toward synthetic data, 1e-4')
-    assert_allclose(syn050to100[2700:], synHat050to100[2700:], rtol=1e-5,
-        err_msg='Additive output should trend toward synthetic data, 1e-5')
-    assert_allclose(syn050to100[3300:], synHat050to100[3300:], rtol=1e-6,
-        err_msg='Additive output should track synthetic data, 1e-6: 50-100')
+    assert_allclose(
+        syn050to100[475:],
+        synHat050to100[475:],
+        rtol=1e-1,
+        err_msg="Additive output should trend toward synthetic data, 1e-1",
+    )
+    assert_allclose(
+        syn050to100[955:],
+        synHat050to100[955:],
+        rtol=1e-2,
+        err_msg="Additive output should trend toward synthetic data, 1e-2",
+    )
+    assert_allclose(
+        syn050to100[1500:],
+        synHat050to100[1500:],
+        rtol=1e-3,
+        err_msg="Additive output should trend toward synthetic data, 1e-3",
+    )
+    assert_allclose(
+        syn050to100[2100:],
+        synHat050to100[2100:],
+        rtol=1e-4,
+        err_msg="Additive output should trend toward synthetic data, 1e-4",
+    )
+    assert_allclose(
+        syn050to100[2700:],
+        synHat050to100[2700:],
+        rtol=1e-5,
+        err_msg="Additive output should trend toward synthetic data, 1e-5",
+    )
+    assert_allclose(
+        syn050to100[3300:],
+        synHat050to100[3300:],
+        rtol=1e-6,
+        err_msg="Additive output should track synthetic data, 1e-6: 50-100",
+    )
 
     # create 3rd set of 50 "days", 100-150
     t100to150 = np.arange(10001, 15001)
-    syn100to150 = 20 + 10.0 * np.sin(t100to150 * (2 * np.pi) / 100.) + \
-                  20 * np.sin(t100to150 * (2 * np.pi) / 5000.0)
+    syn100to150 = (
+        20
+        + 10.0 * np.sin(t100to150 * (2 * np.pi) / 100.0)
+        + 20 * np.sin(t100to150 * (2 * np.pi) / 5000.0)
+    )
 
     # run the additive method on the 3rd set of 50 "days"
-    (synHat100to150, sHat100to150, sigma100to150,
-        syn150, s150, l150, b150, sigma150) = sq.additive(
-        syn100to150, m, alpha, beta, gamma, phi,
-        yhat0=syn100, l0=l100, b0=b100, s0=s100, sigma0=sigma100)
+    (
+        synHat100to150,
+        sHat100to150,
+        sigma100to150,
+        syn150,
+        s150,
+        l150,
+        b150,
+        sigma150,
+    ) = sq.additive(
+        syn100to150,
+        m,
+        alpha,
+        beta,
+        gamma,
+        phi,
+        yhat0=syn100,
+        l0=l100,
+        b0=b100,
+        s0=s100,
+        sigma0=sigma100,
+    )
 
     # Check max, min and average
-    assert_almost_equal(np.amax(synHat100to150), 49.758884882080558, 8,
-        'Additive output should have a max of 49.75888...')
-    assert_almost_equal(np.amin(synHat100to150), -9.7579516919427647, 8,
-        'Additive output should have a min of -9.7579...')
-    assert_almost_equal(np.mean(synHat100to150), 20.059589538984323, 8,
-        'Additive output should have average of 20.0595...')
+    assert_almost_equal(
+        np.amax(synHat100to150),
+        49.758884882080558,
+        8,
+        "Additive output should have a max of 49.75888...",
+    )
+    assert_almost_equal(
+        np.amin(synHat100to150),
+        -9.7579516919427647,
+        8,
+        "Additive output should have a min of -9.7579...",
+    )
+    assert_almost_equal(
+        np.mean(synHat100to150),
+        20.059589538984323,
+        8,
+        "Additive output should have average of 20.0595...",
+    )
 
     # A couple of sections run pretty close together here
-    assert_allclose(syn100to150[800:1900], synHat100to150[800:1900], rtol=1e-1,
-        err_msg='Additive output should track synthetic data: day 100-150')
+    assert_allclose(
+        syn100to150[800:1900],
+        synHat100to150[800:1900],
+        rtol=1e-1,
+        err_msg="Additive output should track synthetic data: day 100-150",
+    )
 
     # create 4th set of 50 "days", 150-200
     t150to200 = np.arange(15001, 20001)
-    syn150to200 = 20 + (10.0 * np.sin(t150to200 * (2 * np.pi) / 100.0)) * \
-                  (1 * np.cos(t150to200 * (2 * np.pi) / 5000.0))
+    syn150to200 = 20 + (10.0 * np.sin(t150to200 * (2 * np.pi) / 100.0)) * (
+        1 * np.cos(t150to200 * (2 * np.pi) / 5000.0)
+    )
 
     # run the additive method on the 4th set of 50 "days"
-    (synHat150to200, sHat150to200, sigma150to200,
-        syn200, s200, l200, b200, sigma200) = sq.additive(
-        syn150to200, m, alpha, beta, gamma, phi,
-        yhat0=syn150, l0=l150, b0=b150, s0=s150, sigma0=sigma150)
+    (
+        synHat150to200,
+        sHat150to200,
+        sigma150to200,
+        syn200,
+        s200,
+        l200,
+        b200,
+        sigma200,
+    ) = sq.additive(
+        syn150to200,
+        m,
+        alpha,
+        beta,
+        gamma,
+        phi,
+        yhat0=syn150,
+        l0=l150,
+        b0=b150,
+        s0=s150,
+        sigma0=sigma150,
+    )
 
     # Check max, min and average
-    assert_almost_equal(np.amax(synHat150to200), 29.573654766341747, 8,
-        'Additive output should have a max of 29.5736...')
-    assert_almost_equal(np.amin(synHat150to200), 7.9430807703401669, 8,
-        'Additive output should have a min of 7.943...')
-    assert_almost_equal(np.mean(synHat150to200), 19.911560325896119, 8,
-        'Additive output should have average of 19.911...')
+    assert_almost_equal(
+        np.amax(synHat150to200),
+        29.573654766341747,
+        8,
+        "Additive output should have a max of 29.5736...",
+    )
+    assert_almost_equal(
+        np.amin(synHat150to200),
+        7.9430807703401669,
+        8,
+        "Additive output should have a min of 7.943...",
+    )
+    assert_almost_equal(
+        np.mean(synHat150to200),
+        19.911560325896119,
+        8,
+        "Additive output should have average of 19.911...",
+    )
 
     # create 5th set of 50 "days", 200-250
     t200to250 = np.arange(20001, 25001)
-    syn200to250 = 20 + ((10.0 * np.sin(t200to250 * (2 * np.pi) / 100.0)) *
-        (1 * np.cos(t200to250 * (2 * np.pi) / 5000.0)) +
-        20 * np.sin(t200to250 * (2 * np.pi) / 5000.0))
+    syn200to250 = 20 + (
+        (10.0 * np.sin(t200to250 * (2 * np.pi) / 100.0))
+        * (1 * np.cos(t200to250 * (2 * np.pi) / 5000.0))
+        + 20 * np.sin(t200to250 * (2 * np.pi) / 5000.0)
+    )
 
     # run the additive method on the 5th set of 50 "days"
-    (synHat200to250, sHat200to250, sigma200to250,
-        syn250, s250, l250, b250, sigma250) = sq.additive(
-        syn200to250, m, alpha, beta, gamma, phi,
-        yhat0=syn200, l0=l200, b0=b200, s0=s200, sigma0=sigma200)
+    (
+        synHat200to250,
+        sHat200to250,
+        sigma200to250,
+        syn250,
+        s250,
+        l250,
+        b250,
+        sigma250,
+    ) = sq.additive(
+        syn200to250,
+        m,
+        alpha,
+        beta,
+        gamma,
+        phi,
+        yhat0=syn200,
+        l0=l200,
+        b0=b200,
+        s0=s200,
+        sigma0=sigma200,
+    )
 
     # Check max, min and average
-    assert_almost_equal(np.amax(synHat200to250), 43.417782188651529, 8,
-        'Additive output should have a max of 43.417...')
-    assert_almost_equal(np.amin(synHat200to250), -3.4170071669726791, 8,
-        'Additive output should have a min of -3.417...')
-    assert_almost_equal(np.mean(synHat200to250), 20.09191068952186, 8,
-        'Additive output should have average of 20.0919...')
+    assert_almost_equal(
+        np.amax(synHat200to250),
+        43.417782188651529,
+        8,
+        "Additive output should have a max of 43.417...",
+    )
+    assert_almost_equal(
+        np.amin(synHat200to250),
+        -3.4170071669726791,
+        8,
+        "Additive output should have a min of -3.417...",
+    )
+    assert_almost_equal(
+        np.mean(synHat200to250),
+        20.09191068952186,
+        8,
+        "Additive output should have average of 20.0919...",
+    )
 
     # create 5th set of 50 "days", 250-300
     t250to300 = np.arange(25001, 30001)
     np.random.seed(123456789)
-    syn250to300 = 20 + ((10.0 * np.sin(t250to300 * (2 * np.pi) / 100.0)) *
-        (1 * np.cos(t250to300 * (2 * np.pi) / 5000.0)) +
-        20 * np.sin(t250to300 * (2 * np.pi) / 5000.0)) + \
-        5 * np.random.randn(5000)
+    syn250to300 = (
+        20
+        + (
+            (10.0 * np.sin(t250to300 * (2 * np.pi) / 100.0))
+            * (1 * np.cos(t250to300 * (2 * np.pi) / 5000.0))
+            + 20 * np.sin(t250to300 * (2 * np.pi) / 5000.0)
+        )
+        + 5 * np.random.randn(5000)
+    )
 
     # run the additive method on the 5th set of 50 "days"
-    (synHat250to300, sHat250to300, sigma250to300,
-        syn300, s300, l300, b300, sigma300) = sq.additive(
-        syn250to300, m, alpha, beta, gamma, phi,
-        yhat0=syn250, l0=l250, b0=b250, s0=s250, sigma0=sigma250)
+    (
+        synHat250to300,
+        sHat250to300,
+        sigma250to300,
+        syn300,
+        s300,
+        l300,
+        b300,
+        sigma300,
+    ) = sq.additive(
+        syn250to300,
+        m,
+        alpha,
+        beta,
+        gamma,
+        phi,
+        yhat0=syn250,
+        l0=l250,
+        b0=b250,
+        s0=s250,
+        sigma0=sigma250,
+    )
 
     # Check max, min and average
-    assert_almost_equal(np.amax(synHat250to300), 49.3099797861343534, 8,
-        'Additive output should have a max of 49.309...')
-    assert_almost_equal(np.amin(synHat250to300), -8.7531069723345301, 8,
-        'Additive output should have a min of -8.783...')
-    assert_almost_equal(np.mean(synHat250to300), 20.006498585824623, 8,
-        'Additive output should have average of 20.006...')
+    assert_almost_equal(
+        np.amax(synHat250to300),
+        49.3099797861343534,
+        8,
+        "Additive output should have a max of 49.309...",
+    )
+    assert_almost_equal(
+        np.amin(synHat250to300),
+        -8.7531069723345301,
+        8,
+        "Additive output should have a min of -8.783...",
+    )
+    assert_almost_equal(
+        np.mean(synHat250to300),
+        20.006498585824623,
+        8,
+        "Additive output should have average of 20.006...",
+    )
diff --git a/test/algorithm_test/XYZAlgorithm_test.py b/test/algorithm_test/XYZAlgorithm_test.py
index 9e4870ed93f4bd08443be51fb5bb2f40c1c23f78..67be143a880a3ed11b5de59c0d711e0773c205d7 100644
--- a/test/algorithm_test/XYZAlgorithm_test.py
+++ b/test/algorithm_test/XYZAlgorithm_test.py
@@ -11,14 +11,14 @@ def test_xyzalgorithm_process():
 
     confirms that a converted stream contains the correct outputchannels.
     """
-    algorithm = XYZAlgorithm('obs', 'geo')
+    algorithm = XYZAlgorithm("obs", "geo")
     timeseries = Stream()
-    timeseries += __create_trace('H', [1, 1])
-    timeseries += __create_trace('E', [1, 1])
-    timeseries += __create_trace('Z', [1, 1])
-    timeseries += __create_trace('F', [1, 1])
+    timeseries += __create_trace("H", [1, 1])
+    timeseries += __create_trace("E", [1, 1])
+    timeseries += __create_trace("Z", [1, 1])
+    timeseries += __create_trace("F", [1, 1])
     outputstream = algorithm.process(timeseries)
-    assert_equal(outputstream[0].stats.channel, 'X')
+    assert_equal(outputstream[0].stats.channel, "X")
 
 
 def test_xyzalgorithm_channels():
@@ -27,9 +27,9 @@ def test_xyzalgorithm_channels():
     confirms that the input/output channels are correct for the given
     informat/outformat during instantiation.
     """
-    algorithm = XYZAlgorithm('obs', 'geo')
-    inchannels = ['H', 'E', 'Z', 'F']
-    outchannels = ['X', 'Y', 'Z', 'F']
+    algorithm = XYZAlgorithm("obs", "geo")
+    inchannels = ["H", "E", "Z", "F"]
+    outchannels = ["X", "Y", "Z", "F"]
     assert_equal(algorithm.get_input_channels(), inchannels)
     assert_equal(algorithm.get_output_channels(), outchannels)
 
@@ -40,13 +40,13 @@ def test_xyzalgorithm_limited_channels():
     confirms that only the required channels are necessary for processing
     ie. 'H' and 'E' are only needed to get 'X' and 'Y' without 'Z' or 'F'
     """
-    algorithm = XYZAlgorithm('obs', 'mag')
+    algorithm = XYZAlgorithm("obs", "mag")
     count = 5
     timeseries = Stream()
-    timeseries += __create_trace('H', [2] * count)
-    timeseries += __create_trace('E', [3] * count)
+    timeseries += __create_trace("H", [2] * count)
+    timeseries += __create_trace("E", [3] * count)
     outstream = algorithm.process(timeseries)
-    ds = outstream.select(channel='D')
+    ds = outstream.select(channel="D")
     # there is 1 trace
     assert_equal(len(ds), 1)
     d = ds[0]
@@ -64,18 +64,22 @@ def test_xyzalgorithm_uneccesary_channel_empty():
     or and empty 'F' channel. This also makes sure the 'Z' and 'F' channels
     are passed without any modification.
     """
-    algorithm = XYZAlgorithm('obs', 'mag')
+    algorithm = XYZAlgorithm("obs", "mag")
     timeseries = Stream()
-    timeseries += __create_trace('H', [1, 1])
-    timeseries += __create_trace('E', [1, 1])
-    timeseries += __create_trace('Z', [1, np.NaN])
-    timeseries += __create_trace('F', [np.NaN, np.NaN])
+    timeseries += __create_trace("H", [1, 1])
+    timeseries += __create_trace("E", [1, 1])
+    timeseries += __create_trace("Z", [1, np.NaN])
+    timeseries += __create_trace("F", [np.NaN, np.NaN])
     outstream = algorithm.process(timeseries)
-    assert_equal(outstream.select(channel='Z')[0].data.all(),
-        timeseries.select(channel='Z')[0].data.all())
-    assert_equal(outstream.select(channel='F')[0].data.all(),
-        timeseries.select(channel='F')[0].data.all())
-    ds = outstream.select(channel='D')
+    assert_equal(
+        outstream.select(channel="Z")[0].data.all(),
+        timeseries.select(channel="Z")[0].data.all(),
+    )
+    assert_equal(
+        outstream.select(channel="F")[0].data.all(),
+        timeseries.select(channel="F")[0].data.all(),
+    )
+    ds = outstream.select(channel="D")
     # there is 1 trace
     assert_equal(len(ds), 1)
     d = ds[0]
diff --git a/test/edge_test/EdgeFactory_test.py b/test/edge_test/EdgeFactory_test.py
index 3b3543936c3e28ed37d55d5c229f1db6f8c9bfc4..433ddd75a2935c9b4f5beefe5bebbc92ddfef459 100644
--- a/test/edge_test/EdgeFactory_test.py
+++ b/test/edge_test/EdgeFactory_test.py
@@ -9,14 +9,14 @@ def test__get_edge_network():
     """edge_test.EdgeFactory_test.test__get_edge_network()
     """
     # _get_edge_network should always return NT for use by USGS geomag
-    assert_equal(EdgeFactory()._get_edge_network(' ', ' ', ' ', ' '), 'NT')
+    assert_equal(EdgeFactory()._get_edge_network(" ", " ", " ", " "), "NT")
 
 
 def test__get_edge_station():
     """edge_test.EdgeFactory_test.test__get_edge_station()
     """
     # _get_edge_station will return the observatory code passed in.
-    assert_equal(EdgeFactory()._get_edge_station('BOU', ' ', ' ', ' '), 'BOU')
+    assert_equal(EdgeFactory()._get_edge_station("BOU", " ", " ", " "), "BOU")
 
 
 def test__get_edge_channel():
@@ -24,22 +24,14 @@ def test__get_edge_channel():
     """
     # Call private function _get_edge_channel, make certain
     # it gets back the appropriate 2 character code.
-    assert_equal(EdgeFactory()._get_edge_channel('', 'D', '', 'minute'),
-            'MVD')
-    assert_equal(EdgeFactory()._get_edge_channel('', 'E', '', 'minute'),
-            'MVE')
-    assert_equal(EdgeFactory()._get_edge_channel('', 'F', '', 'minute'),
-            'MSF')
-    assert_equal(EdgeFactory()._get_edge_channel('', 'H', '', 'minute'),
-            'MVH')
-    assert_equal(EdgeFactory()._get_edge_channel('', 'DIST', '', 'minute'),
-            'MDT')
-    assert_equal(EdgeFactory()._get_edge_channel('', 'DST', '', 'minute'),
-            'MGD')
-    assert_equal(EdgeFactory()._get_edge_channel('', 'E-E', '', 'minute'),
-            'MQE')
-    assert_equal(EdgeFactory()._get_edge_channel('', 'E-N', '', 'minute'),
-            'MQN')
+    assert_equal(EdgeFactory()._get_edge_channel("", "D", "", "minute"), "MVD")
+    assert_equal(EdgeFactory()._get_edge_channel("", "E", "", "minute"), "MVE")
+    assert_equal(EdgeFactory()._get_edge_channel("", "F", "", "minute"), "MSF")
+    assert_equal(EdgeFactory()._get_edge_channel("", "H", "", "minute"), "MVH")
+    assert_equal(EdgeFactory()._get_edge_channel("", "DIST", "", "minute"), "MDT")
+    assert_equal(EdgeFactory()._get_edge_channel("", "DST", "", "minute"), "MGD")
+    assert_equal(EdgeFactory()._get_edge_channel("", "E-E", "", "minute"), "MQE")
+    assert_equal(EdgeFactory()._get_edge_channel("", "E-N", "", "minute"), "MQN")
 
 
 def test__get_edge_location():
@@ -47,21 +39,18 @@ def test__get_edge_location():
     """
     # Call _get_edge_location, make certain it returns the correct edge
     # location code.
-    assert_equal(EdgeFactory()._get_edge_location(
-            '', '', 'variation', ''), 'R0')
-    assert_equal(EdgeFactory()._get_edge_location(
-            '', '', 'quasi-definitive', ''), 'Q0')
-    assert_equal(EdgeFactory()._get_edge_location(
-            '', '', 'definitive', ''), 'D0')
+    assert_equal(EdgeFactory()._get_edge_location("", "", "variation", ""), "R0")
+    assert_equal(EdgeFactory()._get_edge_location("", "", "quasi-definitive", ""), "Q0")
+    assert_equal(EdgeFactory()._get_edge_location("", "", "definitive", ""), "D0")
 
 
 def test__get_interval_code():
     """edge_test.EdgeFactory_test.test__get_interval_code()
     """
-    assert_equal(EdgeFactory()._get_interval_code('day'), 'D')
-    assert_equal(EdgeFactory()._get_interval_code('hour'), 'H')
-    assert_equal(EdgeFactory()._get_interval_code('minute'), 'M')
-    assert_equal(EdgeFactory()._get_interval_code('second'), 'S')
+    assert_equal(EdgeFactory()._get_interval_code("day"), "D")
+    assert_equal(EdgeFactory()._get_interval_code("hour"), "H")
+    assert_equal(EdgeFactory()._get_interval_code("minute"), "M")
+    assert_equal(EdgeFactory()._get_interval_code("second"), "S")
 
 
 def test__set_metadata():
@@ -72,9 +61,9 @@ def test__set_metadata():
     trace1 = Trace()
     trace2 = Trace()
     stream = Stream(traces=[trace1, trace2])
-    EdgeFactory()._set_metadata(stream, 'BOU', 'H', 'variation', 'minute')
-    assert_equal(stream[0].stats['channel'], 'H')
-    assert_equal(stream[1].stats['channel'], 'H')
+    EdgeFactory()._set_metadata(stream, "BOU", "H", "variation", "minute")
+    assert_equal(stream[0].stats["channel"], "H")
+    assert_equal(stream[1].stats["channel"], "H")
 
 
 # def test_get_timeseries():
@@ -83,11 +72,22 @@ def dont_get_timeseries():
     # Call get_timeseries, and test stats for comfirmation that it came back.
     # TODO, need to pass in host and port from a config file, or manually
     #   change for a single test.
-    edge_factory = EdgeFactory(host='TODO', port='TODO')
+    edge_factory = EdgeFactory(host="TODO", port="TODO")
     timeseries = edge_factory.get_timeseries(
-        UTCDateTime(2015, 3, 1, 0, 0, 0), UTCDateTime(2015, 3, 1, 1, 0, 0),
-        'BOU', ('H'), 'variation', 'minute')
-    assert_equal(timeseries.select(channel='H')[0].stats.station,
-        'BOU', 'Expect timeseries to have stats')
-    assert_equal(timeseries.select(channel='H')[0].stats.channel,
-        'H', 'Expect timeseries stats channel to be equal to H')
+        UTCDateTime(2015, 3, 1, 0, 0, 0),
+        UTCDateTime(2015, 3, 1, 1, 0, 0),
+        "BOU",
+        ("H"),
+        "variation",
+        "minute",
+    )
+    assert_equal(
+        timeseries.select(channel="H")[0].stats.station,
+        "BOU",
+        "Expect timeseries to have stats",
+    )
+    assert_equal(
+        timeseries.select(channel="H")[0].stats.channel,
+        "H",
+        "Expect timeseries stats channel to be equal to H",
+    )
diff --git a/test/edge_test/MiniSeedFactory_test.py b/test/edge_test/MiniSeedFactory_test.py
index b37c30eaed3f1b05a0eeb1d5dd827b01506e0092..1483489f03303b7367c2c1625251f21e25ab379e 100644
--- a/test/edge_test/MiniSeedFactory_test.py
+++ b/test/edge_test/MiniSeedFactory_test.py
@@ -11,18 +11,14 @@ def test__get_edge_network():
     """edge_test.MiniSeedFactory_test.test__get_edge_network()
     """
     # _get_edge_network should always return NT for use by USGS geomag
-    assert_equal(
-            MiniSeedFactory()._get_edge_network(' ', ' ', ' ', ' '),
-            'NT')
+    assert_equal(MiniSeedFactory()._get_edge_network(" ", " ", " ", " "), "NT")
 
 
 def test__get_edge_station():
     """edge_test.MiniSeedFactory_test.test__get_edge_station()
     """
     # _get_edge_station will return the observatory code passed in.
-    assert_equal(
-            MiniSeedFactory()._get_edge_station('BOU', ' ', ' ', ' '),
-            'BOU')
+    assert_equal(MiniSeedFactory()._get_edge_station("BOU", " ", " ", " "), "BOU")
 
 
 def test__get_edge_channel():
@@ -31,15 +27,15 @@ def test__get_edge_channel():
     # Call private function _get_edge_channel, make certain
     # it gets back the appropriate 2 character code.
     factory = MiniSeedFactory()
-    assert_equal(factory._get_edge_channel('', 'D', '', 'minute'), 'UFD')
-    assert_equal(factory._get_edge_channel('', 'U', '', 'minute'), 'UFU')
-    assert_equal(factory._get_edge_channel('', 'F', '', 'minute'), 'UFF')
-    assert_equal(factory._get_edge_channel('', 'H', '', 'minute'), 'UFH')
-    assert_equal(factory._get_edge_channel('', 'BEU', '', 'minute'), 'BEU')
-    assert_equal(factory._get_edge_channel('', 'Dst4', '', 'minute'), 'UX4')
-    assert_equal(factory._get_edge_channel('', 'Dst3', '', 'minute'), 'UX3')
-    assert_equal(factory._get_edge_channel('', 'E-E', '', 'minute'), 'UQE')
-    assert_equal(factory._get_edge_channel('', 'E-N', '', 'minute'), 'UQN')
+    assert_equal(factory._get_edge_channel("", "D", "", "minute"), "UFD")
+    assert_equal(factory._get_edge_channel("", "U", "", "minute"), "UFU")
+    assert_equal(factory._get_edge_channel("", "F", "", "minute"), "UFF")
+    assert_equal(factory._get_edge_channel("", "H", "", "minute"), "UFH")
+    assert_equal(factory._get_edge_channel("", "BEU", "", "minute"), "BEU")
+    assert_equal(factory._get_edge_channel("", "Dst4", "", "minute"), "UX4")
+    assert_equal(factory._get_edge_channel("", "Dst3", "", "minute"), "UX3")
+    assert_equal(factory._get_edge_channel("", "E-E", "", "minute"), "UQE")
+    assert_equal(factory._get_edge_channel("", "E-N", "", "minute"), "UQN")
 
 
 def test__get_edge_location():
@@ -47,22 +43,21 @@ def test__get_edge_location():
     """
     # Call _get_edge_location, make certain it returns the correct edge
     # location code.
-    assert_equal(MiniSeedFactory()._get_edge_location(
-            '', '', 'variation', ''), 'R0')
-    assert_equal(MiniSeedFactory()._get_edge_location(
-            '', '', 'quasi-definitive', ''), 'Q0')
-    assert_equal(MiniSeedFactory()._get_edge_location(
-            '', '', 'definitive', ''), 'D0')
+    assert_equal(MiniSeedFactory()._get_edge_location("", "", "variation", ""), "R0")
+    assert_equal(
+        MiniSeedFactory()._get_edge_location("", "", "quasi-definitive", ""), "Q0"
+    )
+    assert_equal(MiniSeedFactory()._get_edge_location("", "", "definitive", ""), "D0")
 
 
 def test__get_interval_code():
     """edge_test.MiniSeedFactory_test.test__get_interval_code()
     """
-    assert_equal(MiniSeedFactory()._get_interval_code('day'), 'P')
-    assert_equal(MiniSeedFactory()._get_interval_code('hour'), 'R')
-    assert_equal(MiniSeedFactory()._get_interval_code('minute'), 'U')
-    assert_equal(MiniSeedFactory()._get_interval_code('second'), 'L')
-    assert_equal(MiniSeedFactory()._get_interval_code('tenhertz'), 'B')
+    assert_equal(MiniSeedFactory()._get_interval_code("day"), "P")
+    assert_equal(MiniSeedFactory()._get_interval_code("hour"), "R")
+    assert_equal(MiniSeedFactory()._get_interval_code("minute"), "U")
+    assert_equal(MiniSeedFactory()._get_interval_code("second"), "L")
+    assert_equal(MiniSeedFactory()._get_interval_code("tenhertz"), "B")
 
 
 class MockMiniSeedInputClient(object):
@@ -80,23 +75,22 @@ class MockMiniSeedInputClient(object):
 def test__put_timeseries():
     """edge_test.MiniSeedFactory_test.test__put_timeseries()
     """
-    trace1 = __create_trace([0, 1, 2, 3, numpy.nan, 5, 6, 7, 8, 9],
-            channel='H')
+    trace1 = __create_trace([0, 1, 2, 3, numpy.nan, 5, 6, 7, 8, 9], channel="H")
     client = MockMiniSeedInputClient()
     factory = MiniSeedFactory()
     factory.write_client = client
-    factory.put_timeseries(Stream(trace1), channels=('H'))
+    factory.put_timeseries(Stream(trace1), channels=("H"))
     # put timeseries should call close when done
     assert_equal(client.close_called, True)
     # trace should be split in 2 blocks at gap
     sent = client.last_sent
     assert_equal(len(sent), 2)
     # first trace includes [0...4]
-    assert_equal(sent[0].stats.channel, 'LFH')
+    assert_equal(sent[0].stats.channel, "LFH")
     assert_equal(len(sent[0]), 4)
     assert_equal(sent[0].stats.endtime, trace1.stats.starttime + 3)
     # second trace includes [5...9]
-    assert_equal(sent[1].stats.channel, 'LFH')
+    assert_equal(sent[1].stats.channel, "LFH")
     assert_equal(len(sent[1]), 5)
     assert_equal(sent[1].stats.starttime, trace1.stats.starttime + 5)
     assert_equal(sent[1].stats.endtime, trace1.stats.endtime)
@@ -110,9 +104,9 @@ def test__set_metadata():
     trace1 = Trace()
     trace2 = Trace()
     stream = Stream(traces=[trace1, trace2])
-    MiniSeedFactory()._set_metadata(stream, 'BOU', 'H', 'variation', 'minute')
-    assert_equal(stream[0].stats['channel'], 'H')
-    assert_equal(stream[1].stats['channel'], 'H')
+    MiniSeedFactory()._set_metadata(stream, "BOU", "H", "variation", "minute")
+    assert_equal(stream[0].stats["channel"], "H")
+    assert_equal(stream[1].stats["channel"], "H")
 
 
 # def test_get_timeseries():
@@ -121,23 +115,36 @@ def dont_get_timeseries():
     # Call get_timeseries, and test stats for comfirmation that it came back.
     # TODO, need to pass in host and port from a config file, or manually
     #   change for a single test.
-    edge_factory = MiniSeedFactory(host='TODO', port='TODO')
+    edge_factory = MiniSeedFactory(host="TODO", port="TODO")
     timeseries = edge_factory.get_timeseries(
-        UTCDateTime(2015, 3, 1, 0, 0, 0), UTCDateTime(2015, 3, 1, 1, 0, 0),
-        'BOU', ('H'), 'variation', 'minute')
-    assert_equal(timeseries.select(channel='H')[0].stats.station,
-        'BOU', 'Expect timeseries to have stats')
-    assert_equal(timeseries.select(channel='H')[0].stats.channel,
-        'H', 'Expect timeseries stats channel to be equal to H')
-
-
-def __create_trace(data,
-        network='NT',
-        station='BOU',
-        channel='H',
-        location='R0',
-        data_interval='second',
-        data_type='interval'):
+        UTCDateTime(2015, 3, 1, 0, 0, 0),
+        UTCDateTime(2015, 3, 1, 1, 0, 0),
+        "BOU",
+        ("H"),
+        "variation",
+        "minute",
+    )
+    assert_equal(
+        timeseries.select(channel="H")[0].stats.station,
+        "BOU",
+        "Expect timeseries to have stats",
+    )
+    assert_equal(
+        timeseries.select(channel="H")[0].stats.channel,
+        "H",
+        "Expect timeseries stats channel to be equal to H",
+    )
+
+
+def __create_trace(
+    data,
+    network="NT",
+    station="BOU",
+    channel="H",
+    location="R0",
+    data_interval="second",
+    data_type="interval",
+):
     """
     Utility to create a trace containing the given numpy array.
 
@@ -152,7 +159,7 @@ def __create_trace(data,
         Stream containing the channel.
     """
     stats = Stats()
-    stats.starttime = UTCDateTime('2019-12-01')
+    stats.starttime = UTCDateTime("2019-12-01")
     stats.delta = TimeseriesUtility.get_delta_from_interval(data_interval)
     stats.channel = channel
     stats.npts = len(data)
diff --git a/test/edge_test/RawInputClient_test.py b/test/edge_test/RawInputClient_test.py
index 4d69005faad2792e97e2e2ed8ffed82dc2cb9b48..d9676b526756357230b365df51b2bd1ef1e27543 100644
--- a/test/edge_test/RawInputClient_test.py
+++ b/test/edge_test/RawInputClient_test.py
@@ -19,30 +19,39 @@ class MockRawInputClient(RawInputClient):
 def test_raw_input_client():
     """edge_test.RawInputClient_test.test_raw_input_client()
     """
-    network = 'NT'
-    station = 'BOU'
-    channel = 'MVH'
-    location = 'R0'
+    network = "NT"
+    station = "BOU"
+    channel = "MVH"
+    location = "R0"
     data = [0, 1, 2, 3, 4, 5]
-    starttime = UTCDateTime('2019-12-01')
+    starttime = UTCDateTime("2019-12-01")
 
     trace = Trace(
-            numpy.array(data, dtype=numpy.float64),
-            Stats({
-                'channel': channel,
-                'delta': 60.0,
-                'location': location,
-                'network': network,
-                'npts': len(data),
-                'starttime': starttime,
-                'station': station
-            }))
-
-    client = MockRawInputClient(tag='tag', host='host', port='port',
-            station=station, channel=channel,
-            location=location, network=network)
+        numpy.array(data, dtype=numpy.float64),
+        Stats(
+            {
+                "channel": channel,
+                "delta": 60.0,
+                "location": location,
+                "network": network,
+                "npts": len(data),
+                "starttime": starttime,
+                "station": station,
+            }
+        ),
+    )
+
+    client = MockRawInputClient(
+        tag="tag",
+        host="host",
+        port="port",
+        station=station,
+        channel=channel,
+        location=location,
+        network=network,
+    )
     trace_send = EdgeFactory()._convert_trace_to_int(trace.copy())
-    client.send_trace('minute', trace_send)
+    client.send_trace("minute", trace_send)
     # verify data was sent
     assert_equal(len(client.last_send), 1)
 
@@ -50,12 +59,18 @@ def test_raw_input_client():
 def test__get_tag():
     """edge_test.RawInputClient_test.test_raw_input_client()
     """
-    network = 'NT'
-    station = 'BOU'
-    channel = 'MVH'
-    location = 'R0'
-    client = MockRawInputClient(tag='tag', host='host', port='port',
-            station=station, channel=channel,
-            location=location, network=network)
+    network = "NT"
+    station = "BOU"
+    channel = "MVH"
+    location = "R0"
+    client = MockRawInputClient(
+        tag="tag",
+        host="host",
+        port="port",
+        station=station,
+        channel=channel,
+        location=location,
+        network=network,
+    )
     tag_send = client._get_tag()
     assert_equal(tag_send is not None, True)
diff --git a/test/iaga2002_test/IAGA2002Factory_test.py b/test/iaga2002_test/IAGA2002Factory_test.py
index 95838696079d8a494cbe01131718884fa0de8d84..7c1531dbfea3220f96fd959c78268837041f66dc 100644
--- a/test/iaga2002_test/IAGA2002Factory_test.py
+++ b/test/iaga2002_test/IAGA2002Factory_test.py
@@ -11,5 +11,5 @@ def test_parse_empty():
     if the data being parsed is empty.
     """
     parser = IAGA2002Factory()
-    stream = parser.parse_string('')
+    stream = parser.parse_string("")
     assert_equal(len(stream), 0)
diff --git a/test/iaga2002_test/IAGA2002Parser_test.py b/test/iaga2002_test/IAGA2002Parser_test.py
index be8148002162ab068c0078b5da520afb95990472..8cfde259f8eca7ba9997d58eeb80d0f744e5be78 100644
--- a/test/iaga2002_test/IAGA2002Parser_test.py
+++ b/test/iaga2002_test/IAGA2002Parser_test.py
@@ -4,8 +4,7 @@ from numpy.testing import assert_equal
 from geomagio.iaga2002 import IAGA2002Parser
 
 
-IAGA2002_EXAMPLE = \
-""" Format                 IAGA-2002                                    |
+IAGA2002_EXAMPLE = """ Format                 IAGA-2002                                    |
  Source of Data         United States Geological Survey (USGS)       |
  Station Name           Boulder Test                                 |
  IAGA CODE              BDT                                          |
@@ -49,10 +48,10 @@ def test__merge_comments():
     only the middle line ending in a period.
     Verify, the first and second line are merged.
     """
-    comments = ['line 1', 'line 2.', 'line 3']
+    comments = ["line 1", "line 2.", "line 3"]
     assert_equal(
-        IAGA2002Parser()._merge_comments(comments),
-        ['line 1 line 2.', 'line 3'])
+        IAGA2002Parser()._merge_comments(comments), ["line 1 line 2.", "line 3"]
+    )
 
 
 def test__parse_header():
@@ -62,9 +61,10 @@ def test__parse_header():
     Verify the header name and value are split at the correct column.
     """
     parser = IAGA2002Parser()
-    parser._parse_header(' Format                 ' +
-            'IAGA-2002                                    |')
-    assert_equal(parser.headers['Format'], 'IAGA-2002')
+    parser._parse_header(
+        " Format                 " + "IAGA-2002                                    |"
+    )
+    assert_equal(parser.headers["Format"], "IAGA-2002")
 
 
 def test__parse_comment():
@@ -74,11 +74,13 @@ def test__parse_comment():
     Verify the comment delimiters are removed.
     """
     parser = IAGA2002Parser()
-    parser._parse_comment(' # Go to www.intermagnet.org for details on' +
-            ' obtaining this product.  |')
-    assert_equal(parser.comments[-1],
-            'Go to www.intermagnet.org for details on' +
-                    ' obtaining this product.')
+    parser._parse_comment(
+        " # Go to www.intermagnet.org for details on" + " obtaining this product.  |"
+    )
+    assert_equal(
+        parser.comments[-1],
+        "Go to www.intermagnet.org for details on" + " obtaining this product.",
+    )
 
 
 def test__parse_channels():
@@ -89,11 +91,13 @@ def test__parse_channels():
     Verify the IAGA CODE value is removed from parsed channel names.
     """
     parser = IAGA2002Parser()
-    parser._parse_header(' IAGA CODE              ' +
-            'BDT                                          |')
-    parser._parse_channels('DATE       TIME         DOY     ' +
-            'BDTH      BDTD      BDTZ      BDTF   |')
-    assert_equal(parser.channels, ['H', 'D', 'Z', 'F'])
+    parser._parse_header(
+        " IAGA CODE              " + "BDT                                          |"
+    )
+    parser._parse_channels(
+        "DATE       TIME         DOY     " + "BDTH      BDTD      BDTZ      BDTF   |"
+    )
+    assert_equal(parser.channels, ["H", "D", "Z", "F"])
 
 
 def test_parse_decbas():
@@ -105,4 +109,4 @@ def test_parse_decbas():
     """
     parser = IAGA2002Parser()
     parser.parse(IAGA2002_EXAMPLE)
-    assert_equal(parser.metadata['declination_base'], 5527)
+    assert_equal(parser.metadata["declination_base"], 5527)
diff --git a/test/imfjson_test/IMFJSONWriter_test.py b/test/imfjson_test/IMFJSONWriter_test.py
index 7f5ff92af85b8c4c2ac276a271a07111d276b6e6..ff8b19d6d91bb7f341fc93ef7674317214d827ec 100644
--- a/test/imfjson_test/IMFJSONWriter_test.py
+++ b/test/imfjson_test/IMFJSONWriter_test.py
@@ -7,7 +7,7 @@ import numpy as np
 
 
 EXAMPLE_INPUT_FACTORY = IAGA2002Factory()
-EXAMPLE_CHANNELS = ('H', 'D', 'Z', 'F')
+EXAMPLE_CHANNELS = ("H", "D", "Z", "F")
 EXAMPLE_FILE = "etc/iaga2002/BOU/OneMinute/bou20141101vmin.min"
 with open(EXAMPLE_FILE, "r") as input_file:
     data = input_file.read()
@@ -24,19 +24,19 @@ def test_metadata():
     """
     writer = IMFJSONWriter()
     metadata = writer._format_metadata(EXAMPLE_STATS, EXAMPLE_CHANNELS)
-    assert_equal(metadata['status'], 200)
+    assert_equal(metadata["status"], 200)
     # Test intermagnet parameters
-    intermag = metadata['intermagnet']
-    assert_equal(intermag['reported_orientation'], "HDZF")
-    assert_equal(intermag['sensor_orientation'], "HDZF")
-    assert_equal(intermag['data_type'], "variation")
-    assert_equal(intermag['sampling_period'], 60)
-    assert_equal(intermag['digital_sampling_rate'], 0.01)
+    intermag = metadata["intermagnet"]
+    assert_equal(intermag["reported_orientation"], "HDZF")
+    assert_equal(intermag["sensor_orientation"], "HDZF")
+    assert_equal(intermag["data_type"], "variation")
+    assert_equal(intermag["sampling_period"], 60)
+    assert_equal(intermag["digital_sampling_rate"], 0.01)
     # Test intermagnet-imo parameters
-    imo = metadata['intermagnet']['imo']
-    assert_equal(imo['iaga_code'], "BOU")
-    assert_equal(imo['name'], "Boulder")
-    assert_equal(imo['coordinates'], [254.764, 40.137, 1682])
+    imo = metadata["intermagnet"]["imo"]
+    assert_equal(imo["iaga_code"], "BOU")
+    assert_equal(imo["name"], "Boulder")
+    assert_equal(imo["coordinates"], [254.764, 40.137, 1682])
 
 
 def test_times():
@@ -49,8 +49,9 @@ def test_times():
     writer = IMFJSONWriter()
     times = writer._format_times(EXAMPLE_DATA, EXAMPLE_CHANNELS)
     # load times to test against
-    test_day, test_time = np.genfromtxt(EXAMPLE_FILE, skip_header=25,
-        usecols=(0, 1), unpack=True, dtype=str)
+    test_day, test_time = np.genfromtxt(
+        EXAMPLE_FILE, skip_header=25, usecols=(0, 1), unpack=True, dtype=str
+    )
     test_date_times = []
     for idx in range(test_day.shape[0]):
         test_date_times += [test_day[idx] + "T" + test_time[idx] + "Z"]
@@ -66,34 +67,33 @@ def test_values():
     are the correct value and format.
     """
     writer = IMFJSONWriter()
-    values = writer._format_data(EXAMPLE_DATA, EXAMPLE_CHANNELS,
-            EXAMPLE_STATS)
+    values = writer._format_data(EXAMPLE_DATA, EXAMPLE_CHANNELS, EXAMPLE_STATS)
     test_val_keys = ["id", "metadata", "values"]
     for val in values:
         for key, test in zip(val, test_val_keys):
             assert_equal(key, test)
-    assert_equal(values[0]['id'], "H")
-    assert_equal(values[1]['id'], "D")
-    assert_equal(values[2]['id'], "Z")
-    assert_equal(values[3]['id'], "F")
+    assert_equal(values[0]["id"], "H")
+    assert_equal(values[1]["id"], "D")
+    assert_equal(values[2]["id"], "Z")
+    assert_equal(values[3]["id"], "F")
     # Test values-metadata (need to add flags)
-    metadata = values[0]['metadata']
-    test_metadata_keys = ["element", "network", "station",
-            "channel", "location"]
+    metadata = values[0]["metadata"]
+    test_metadata_keys = ["element", "network", "station", "channel", "location"]
     for key, test in zip(metadata, test_metadata_keys):
         assert_equal(key, test)
-    assert_equal(metadata['element'], "H")
-    assert_equal(metadata['network'], "NT")
-    assert_equal(metadata['station'], "BOU")
+    assert_equal(metadata["element"], "H")
+    assert_equal(metadata["network"], "NT")
+    assert_equal(metadata["station"], "BOU")
     # channels do not match H v MVH
     # assert_equal(metadata['channel'], "MVH")
-    assert_equal(metadata['location'], "R0")
+    assert_equal(metadata["location"], "R0")
     # Test values-values
     #  Round to match iaga format
-    vals_H = np.around(values[0]['values'], 2)
-    vals_D = np.around(values[1]['values'], 2)
-    test_val_H, test_val_D = np.loadtxt(EXAMPLE_FILE, skiprows=25,
-        usecols=(3, 4), unpack=True, dtype=float)
+    vals_H = np.around(values[0]["values"], 2)
+    vals_D = np.around(values[1]["values"], 2)
+    test_val_H, test_val_D = np.loadtxt(
+        EXAMPLE_FILE, skiprows=25, usecols=(3, 4), unpack=True, dtype=float
+    )
     #  tolist required to prevent ValueError in comparison
     assert_equal(vals_H.tolist(), test_val_H.tolist())
     assert_equal(vals_D.tolist(), test_val_D.tolist())
diff --git a/test/imfv122_test/IMFV122Parser_test.py b/test/imfv122_test/IMFV122Parser_test.py
index aaf591ac56e854f770961a8a1dc2f40f0a20db7c..951d41608ef82d07d8e078b8124fcd604a4c6396 100644
--- a/test/imfv122_test/IMFV122Parser_test.py
+++ b/test/imfv122_test/IMFV122Parser_test.py
@@ -10,15 +10,16 @@ def test_imfv122_parse_header__hour_of_day():
     """
     parser = IMFV122Parser()
     parser._parse_header(
-            'KAK MAY0216 123 03 HDZF A KYO 05381402 000000 RRRRRRRRRRRRRRRR')
-    assert_equal(parser.channels, ['H', 'D', 'Z', 'F'])
+        "KAK MAY0216 123 03 HDZF A KYO 05381402 000000 RRRRRRRRRRRRRRRR"
+    )
+    assert_equal(parser.channels, ["H", "D", "Z", "F"])
     metadata = parser.metadata
-    assert_equal(metadata['declination_base'], 0)
-    assert_equal(metadata['geodetic_latitude'], 53.8)
-    assert_equal(metadata['geodetic_longitude'], 140.2)
-    assert_equal(metadata['station'], 'KAK')
+    assert_equal(metadata["declination_base"], 0)
+    assert_equal(metadata["geodetic_latitude"], 53.8)
+    assert_equal(metadata["geodetic_longitude"], 140.2)
+    assert_equal(metadata["station"], "KAK")
     assert_equal(parser._delta, 60)
-    assert_equal(parser._nexttime, UTCDateTime('2016-05-02T03:00:00Z'))
+    assert_equal(parser._nexttime, UTCDateTime("2016-05-02T03:00:00Z"))
 
 
 def test_imfv122_parse_header__minute_of_day():
@@ -26,15 +27,16 @@ def test_imfv122_parse_header__minute_of_day():
     """
     parser = IMFV122Parser()
     parser._parse_header(
-            'HER JAN0116 001 0123 HDZF R EDI 12440192 -14161 DRRRRRRRRRRRRRRR')
-    assert_equal(parser.channels, ['H', 'D', 'Z', 'F'])
+        "HER JAN0116 001 0123 HDZF R EDI 12440192 -14161 DRRRRRRRRRRRRRRR"
+    )
+    assert_equal(parser.channels, ["H", "D", "Z", "F"])
     metadata = parser.metadata
-    assert_equal(metadata['declination_base'], -14161)
-    assert_equal(metadata['geodetic_latitude'], 124.4)
-    assert_equal(metadata['geodetic_longitude'], 19.2)
-    assert_equal(metadata['station'], 'HER')
+    assert_equal(metadata["declination_base"], -14161)
+    assert_equal(metadata["geodetic_latitude"], 124.4)
+    assert_equal(metadata["geodetic_longitude"], 19.2)
+    assert_equal(metadata["station"], "HER")
     assert_equal(parser._delta, 60)
-    assert_equal(parser._nexttime, UTCDateTime('2016-01-01T02:03:00Z'))
+    assert_equal(parser._nexttime, UTCDateTime("2016-01-01T02:03:00Z"))
 
 
 def test_imfv122_parse_data():
@@ -42,20 +44,22 @@ def test_imfv122_parse_data():
     """
     parser = IMFV122Parser()
     parser._parse_header(
-            'HER JAN0116 001 0123 HDZF R EDI 12440192 -14161 DRRRRRRRRRRRRRRR')
-    parser._parse_data('1234 5678 9101 1121 3141 5161 7181 9202')
+        "HER JAN0116 001 0123 HDZF R EDI 12440192 -14161 DRRRRRRRRRRRRRRR"
+    )
+    parser._parse_data("1234 5678 9101 1121 3141 5161 7181 9202")
     import pprint
+
     pprint.pprint(parser._parsedata)
-    assert_equal(parser._parsedata[0][0], UTCDateTime('2016-01-01T02:03:00Z'))
-    assert_equal(parser._parsedata[1][0], '1234')
-    assert_equal(parser._parsedata[2][0], '5678')
-    assert_equal(parser._parsedata[3][0], '9101')
-    assert_equal(parser._parsedata[4][0], '1121')
-    assert_equal(parser._parsedata[0][1], UTCDateTime('2016-01-01T02:04:00Z'))
-    assert_equal(parser._parsedata[1][1], '3141')
-    assert_equal(parser._parsedata[2][1], '5161')
-    assert_equal(parser._parsedata[3][1], '7181')
-    assert_equal(parser._parsedata[4][1], '9202')
+    assert_equal(parser._parsedata[0][0], UTCDateTime("2016-01-01T02:03:00Z"))
+    assert_equal(parser._parsedata[1][0], "1234")
+    assert_equal(parser._parsedata[2][0], "5678")
+    assert_equal(parser._parsedata[3][0], "9101")
+    assert_equal(parser._parsedata[4][0], "1121")
+    assert_equal(parser._parsedata[0][1], UTCDateTime("2016-01-01T02:04:00Z"))
+    assert_equal(parser._parsedata[1][1], "3141")
+    assert_equal(parser._parsedata[2][1], "5161")
+    assert_equal(parser._parsedata[3][1], "7181")
+    assert_equal(parser._parsedata[4][1], "9202")
 
 
 def test_imfv122_post_process():
@@ -63,16 +67,17 @@ def test_imfv122_post_process():
     """
     parser = IMFV122Parser()
     parser._parse_header(
-            'HER JAN0116 001 0123 HDZF R EDI 12440192 -14161 DRRRRRRRRRRRRRRR')
-    parser._parse_data('1234 5678 9101 1121 3141 5161 7181 9202')
+        "HER JAN0116 001 0123 HDZF R EDI 12440192 -14161 DRRRRRRRRRRRRRRR"
+    )
+    parser._parse_data("1234 5678 9101 1121 3141 5161 7181 9202")
     parser._post_process()
-    assert_equal(parser.times[0], UTCDateTime('2016-01-01T02:03:00Z'))
-    assert_equal(parser.data['H'][0], 123.4)
-    assert_equal(parser.data['D'][0], 56.78)
-    assert_equal(parser.data['Z'][0], 910.1)
-    assert_equal(parser.data['F'][0], 112.1)
-    assert_equal(parser.times[1], UTCDateTime('2016-01-01T02:04:00Z'))
-    assert_equal(parser.data['H'][1], 314.1)
-    assert_equal(parser.data['D'][1], 51.61)
-    assert_equal(parser.data['Z'][1], 718.1)
-    assert_equal(parser.data['F'][1], 920.2)
+    assert_equal(parser.times[0], UTCDateTime("2016-01-01T02:03:00Z"))
+    assert_equal(parser.data["H"][0], 123.4)
+    assert_equal(parser.data["D"][0], 56.78)
+    assert_equal(parser.data["Z"][0], 910.1)
+    assert_equal(parser.data["F"][0], 112.1)
+    assert_equal(parser.times[1], UTCDateTime("2016-01-01T02:04:00Z"))
+    assert_equal(parser.data["H"][1], 314.1)
+    assert_equal(parser.data["D"][1], 51.61)
+    assert_equal(parser.data["Z"][1], 718.1)
+    assert_equal(parser.data["F"][1], 920.2)
diff --git a/test/imfv283_test/IMFV283Parser_test.py b/test/imfv283_test/IMFV283Parser_test.py
index 31d833ab91802eb6a3dfea384562807875ebfd57..70dcbf76f17d97d9f590863d27e31e1f5ff15ccc 100644
--- a/test/imfv283_test/IMFV283Parser_test.py
+++ b/test/imfv283_test/IMFV283Parser_test.py
@@ -7,15 +7,19 @@ from obspy import UTCDateTime
 from geomagio.imfv283 import IMFV283Parser, imfv283_codes
 
 
-IMFV283_EXAMPLE_VIC = b'75C2A3A814023012741G43-1NN027EUP00191`A^P@RVxZ}|' + \
-    b'D@@B_BEM@@@@@@@@@@@@@@@@@@@@@@@@@@@E|BxtTADVD@\\E\\BxxT@tVCh\\E' + \
-    b'lByDT@xVCp\\EdBy@T@tVCh\\EhByPT@xVCl\\EPBy@T@tVCd\\EdBxlTA@VCp\\Eh' + \
-    b'BxTTA@VCp\\EdBGxTA@VCl\\EPBG`T@xVC\\\\DtBGHT@lVCD\\DPBG@T@XVBh\\'
+IMFV283_EXAMPLE_VIC = (
+    b"75C2A3A814023012741G43-1NN027EUP00191`A^P@RVxZ}|"
+    + b"D@@B_BEM@@@@@@@@@@@@@@@@@@@@@@@@@@@E|BxtTADVD@\\E\\BxxT@tVCh\\E"
+    + b"lByDT@xVCp\\EdBy@T@tVCh\\EhByPT@xVCl\\EPBy@T@tVCd\\EdBxlTA@VCp\\Eh"
+    + b"BxTTA@VCp\\EdBGxTA@VCl\\EPBG`T@xVC\\\\DtBGHT@lVCD\\DPBG@T@XVBh\\"
+)
 
-IMFV283_EXAMPLE_FRD = b'75C2102614023012927G43-0NN027EUP00191bx@WyhD{' + \
-    b'aDB~@X@{Bb@@@@@@@@@@@@@@@@@@@@@@@@@@@@[DAV[@cUAjT@[EAVZ@cUAjT@[' + \
-    b'BAVZ@cVAjS@[DAVZ@cUAjS@[DAVZ@cUAjS@[GAV\\@cTAjT@[DAV[@cUAjT@[BAVY' + \
-    b'@cVAjT@[CAVW@cWAjT@[CAVT@cWAjU@[AAVO@cYAjV@Z}AVK@c[AjV'
+IMFV283_EXAMPLE_FRD = (
+    b"75C2102614023012927G43-0NN027EUP00191bx@WyhD{"
+    + b"aDB~@X@{Bb@@@@@@@@@@@@@@@@@@@@@@@@@@@@[DAV[@cUAjT@[EAVZ@cUAjT@["
+    + b"BAVZ@cVAjS@[DAVZ@cUAjS@[DAVZ@cUAjS@[GAV\\@cTAjT@[DAV[@cUAjT@[BAVY"
+    + b"@cVAjT@[CAVW@cWAjT@[CAVT@cWAjU@[AAVO@cYAjV@Z}AVK@c[AjV"
+)
 
 
 def test_parse_msg_header():
@@ -25,17 +29,17 @@ def test_parse_msg_header():
     Verify the header name and value are split at the correct column.
     """
     header = IMFV283Parser()._parse_msg_header(IMFV283_EXAMPLE_VIC)
-    assert_equal(header['obs'], 'VIC')
+    assert_equal(header["obs"], "VIC")
 
 
 def test_parse_goes_header():
     """imfv283_test.IMFV283Parser_test.test_parse_goes_header()
     """
-    goes_data = IMFV283Parser()._process_ness_block(IMFV283_EXAMPLE_VIC,
-        imfv283_codes.OBSERVATORIES['VIC'],
-        191)
+    goes_data = IMFV283Parser()._process_ness_block(
+        IMFV283_EXAMPLE_VIC, imfv283_codes.OBSERVATORIES["VIC"], 191
+    )
     goes_header = IMFV283Parser()._parse_goes_header(goes_data)
-    assert_equal(goes_header['day'], 23)
+    assert_equal(goes_header["day"], 23)
 
 
 def test_estimate_data_time__correct_doy():
@@ -45,13 +49,14 @@ def test_estimate_data_time__correct_doy():
     """
     parser = IMFV283Parser()
     # BOU aka normal
-    transmission = b'17274013121'
+    transmission = b"17274013121"
     day = 274
     minute = 72
-    (data_time, transmit_time, corrected) = \
-            parser._estimate_data_time(transmission, day, minute)
-    assert_equal(data_time, UTCDateTime('2017-10-01T01:12:00Z'))
-    assert_equal(transmit_time, UTCDateTime('2017-10-01T01:31:21Z'))
+    (data_time, transmit_time, corrected) = parser._estimate_data_time(
+        transmission, day, minute
+    )
+    assert_equal(data_time, UTCDateTime("2017-10-01T01:12:00Z"))
+    assert_equal(transmit_time, UTCDateTime("2017-10-01T01:31:21Z"))
     assert_equal(corrected, False)
 
 
@@ -62,11 +67,12 @@ def test_estimate_data_time__incorrect_doy():
     """
     parser = IMFV283Parser()
     # BLC aka 1999 rollover gps issue
-    transmission = b'17274013241'
+    transmission = b"17274013241"
     day = 46
     minute = 78
-    (data_time, transmit_time, corrected) = \
-            parser._estimate_data_time(transmission, day, minute)
-    assert_equal(data_time, UTCDateTime('2017-10-01T01:18:00Z'))
-    assert_equal(transmit_time, UTCDateTime('2017-10-01T01:32:41Z'))
+    (data_time, transmit_time, corrected) = parser._estimate_data_time(
+        transmission, day, minute
+    )
+    assert_equal(data_time, UTCDateTime("2017-10-01T01:18:00Z"))
+    assert_equal(transmit_time, UTCDateTime("2017-10-01T01:32:41Z"))
     assert_equal(corrected, True)
diff --git a/test/pcdcp_test/PCDCPFactory_test.py b/test/pcdcp_test/PCDCPFactory_test.py
index 9fe4f0d4dd9315eb6f5fbccf9d2377936133fee0..08a92e85ee18118eec9a42780ffb3eba056a710e 100644
--- a/test/pcdcp_test/PCDCPFactory_test.py
+++ b/test/pcdcp_test/PCDCPFactory_test.py
@@ -5,16 +5,14 @@ from obspy.core.utcdatetime import UTCDateTime
 from obspy.core.stream import Stream
 from numpy.testing import assert_equal
 
-pcdcpString = \
-"""BOU  2015  001  01-Jan-15  HEZF  0.01nT  File Version 2.00
+pcdcpString = """BOU  2015  001  01-Jan-15  HEZF  0.01nT  File Version 2.00
 0000  2086167    -5707  4745737  5237768
 0001  2086190    -5664  4745737  5237777
 0002  2086213    -5638  4745741  5237787
 0003  2086239    -5632  4745739  5237796
 0004  2086198    -5626  4745743  5237786"""
 
-pcdcpString_seconds = \
-"""BOU  2015  001  01-Jan-15  HEZF  0.001nT  File Version 2.00
+pcdcpString_seconds = """BOU  2015  001  01-Jan-15  HEZF  0.001nT  File Version 2.00
 00000  20861520    -57095  47457409  52377630
 00001  20861533    -57096  47457397  52377650
 00002  20861554    -57077  47457391  52377650
@@ -32,15 +30,13 @@ def test_parse_string():
     stream = PCDCPFactory().parse_string(pcdcpString)
 
     assert_equal(type(stream), Stream)
-    assert_equal(stream[0].stats.network, 'NT')
-    assert_equal(stream[0].stats.station, 'BOU')
-    assert_equal(stream[0].stats.starttime,
-                UTCDateTime('2015-01-01T00:00:00.000000Z'))
-    h = stream.select(channel='H')[0]
+    assert_equal(stream[0].stats.network, "NT")
+    assert_equal(stream[0].stats.station, "BOU")
+    assert_equal(stream[0].stats.starttime, UTCDateTime("2015-01-01T00:00:00.000000Z"))
+    h = stream.select(channel="H")[0]
     assert_equal(h.data[1], 20861.90)
-    assert_equal(stream[0].stats.endtime,
-                UTCDateTime('2015-01-01T00:04:00.000000Z'))
-    z = stream.select(channel='Z')[0]
+    assert_equal(stream[0].stats.endtime, UTCDateTime("2015-01-01T00:04:00.000000Z"))
+    z = stream.select(channel="Z")[0]
     assert_equal(z.data[-1], 47457.43)
 
 
@@ -53,13 +49,11 @@ def test_parse_string_seconds():
     stream = PCDCPFactory().parse_string(pcdcpString_seconds)
 
     assert_equal(type(stream), Stream)
-    assert_equal(stream[0].stats.network, 'NT')
-    assert_equal(stream[0].stats.station, 'BOU')
-    assert_equal(stream[0].stats.starttime,
-                UTCDateTime('2015-01-01T00:00:00.000000Z'))
-    h = stream.select(channel='H')[0]
+    assert_equal(stream[0].stats.network, "NT")
+    assert_equal(stream[0].stats.station, "BOU")
+    assert_equal(stream[0].stats.starttime, UTCDateTime("2015-01-01T00:00:00.000000Z"))
+    h = stream.select(channel="H")[0]
     assert_equal(h.data[0], 20861.520)
-    assert_equal(stream[0].stats.endtime,
-                UTCDateTime('2015-01-01T00:00:04.000000Z'))
-    z = stream.select(channel='Z')[0]
+    assert_equal(stream[0].stats.endtime, UTCDateTime("2015-01-01T00:00:04.000000Z"))
+    z = stream.select(channel="Z")[0]
     assert_equal(z.data[-1], 47457.384)
diff --git a/test/pcdcp_test/PCDCPParser_test.py b/test/pcdcp_test/PCDCPParser_test.py
index 6df41c58aedd661cc65e72dacfd4bd9b80eb3ec3..62ef09918a71ef82c9aa1c534062a67a20b1108d 100644
--- a/test/pcdcp_test/PCDCPParser_test.py
+++ b/test/pcdcp_test/PCDCPParser_test.py
@@ -4,8 +4,7 @@ from numpy.testing import assert_equal
 from geomagio.pcdcp import PCDCPParser
 
 
-PCDCP_EXAMPLE = \
-"""
+PCDCP_EXAMPLE = """
 BOU  2015  001  01-Jan-15  HEZF  0.01nT  File Version 2.00
 0000  2086167    -5707  4745737  5237768
 0001  2086190    -5664  4745737  5237777
@@ -18,8 +17,7 @@ BOU  2015  001  01-Jan-15  HEZF  0.01nT  File Version 2.00
 0008  2086278    -5571  4745734  5237808
 """
 
-PCDCP_EXAMPLE_SECOND = \
-"""
+PCDCP_EXAMPLE_SECOND = """
 BOU  2015  001  01-Jan-15  HEZF  0.001nT  File Version 2.00
 00000  20861520    -57095  47457409  52377630
 00001  20861533    -57096  47457397  52377650
@@ -42,14 +40,15 @@ def test_parse_header():
     Verify the header name and value are split at the correct column.
     """
     parser = PCDCPParser()
-    parser._parse_header('BOU  2015  001  01-Jan-15  HEZF  0.01nT' +
-        '  File Version 2.00')
+    parser._parse_header(
+        "BOU  2015  001  01-Jan-15  HEZF  0.01nT" + "  File Version 2.00"
+    )
 
-    assert_equal(parser.header['date'], '01-Jan-15')
-    assert_equal(parser.header['station'], 'BOU')
-    assert_equal(parser.header['year'], '2015')
-    assert_equal(parser.header['yearday'], '001')
-    assert_equal(parser.header['resolution'], '0.01nT')
+    assert_equal(parser.header["date"], "01-Jan-15")
+    assert_equal(parser.header["station"], "BOU")
+    assert_equal(parser.header["year"], "2015")
+    assert_equal(parser.header["yearday"], "001")
+    assert_equal(parser.header["resolution"], "0.01nT")
 
 
 def test_parse_header_sec():
@@ -59,11 +58,12 @@ def test_parse_header_sec():
     header.  Verify the header name and value are split correctly.
     """
     parser = PCDCPParser()
-    parser._parse_header('BOU  2015  001  01-Jan-15  HEZF  0.001nT' +
-        ' File Version 2.00')
+    parser._parse_header(
+        "BOU  2015  001  01-Jan-15  HEZF  0.001nT" + " File Version 2.00"
+    )
 
-    assert_equal(parser.header['date'], '01-Jan-15')
-    assert_equal(parser.header['station'], 'BOU')
-    assert_equal(parser.header['year'], '2015')
-    assert_equal(parser.header['yearday'], '001')
-    assert_equal(parser.header['resolution'], '0.001nT')
+    assert_equal(parser.header["date"], "01-Jan-15")
+    assert_equal(parser.header["station"], "BOU")
+    assert_equal(parser.header["year"], "2015")
+    assert_equal(parser.header["yearday"], "001")
+    assert_equal(parser.header["resolution"], "0.001nT")