diff --git a/Dockerfile b/Dockerfile
index ab993d1e0e1fdafa2e349626759027d9757ada90..2a09b4a0ccb695d311b0c6e736a650de49ae860b 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -19,7 +19,7 @@ RUN echo 'export PATH=/conda/bin:$PATH' > /etc/profile.d/conda.sh && \
 # install algorithms and dependencies via conda
 RUN conda config --set ssl_verify $SSL_CERT_FILE && \
     conda config --add channels conda-forge && \
-    conda install --yes jupyter obspy && \
+    conda install --yes jupyter obspy 'icu=58.*' && \
     conda clean -i -l -t -y && \
     # build pycurl with SFTP support
         export PIP_CERT=$SSL_CERT_FILE && \
diff --git a/geomagio/Controller.py b/geomagio/Controller.py
index 0ab14743208533315eb67515fd70c61485b3ebbc..346070be5cfc6a567f58a5df8923d9817700bfba 100644
--- a/geomagio/Controller.py
+++ b/geomagio/Controller.py
@@ -4,11 +4,12 @@ from builtins import str as unicode
 
 import argparse
 import sys
+from io import BytesIO
 from obspy.core import Stream, UTCDateTime
 from .algorithm import algorithms
 from .PlotTimeseriesFactory import PlotTimeseriesFactory
 from .StreamTimeseriesFactory import StreamTimeseriesFactory
-from . import TimeseriesUtility
+from . import TimeseriesUtility, Util
 
 # factory packages
 from . import binlog
@@ -209,13 +210,15 @@ class Controller(object):
         if options.update_limit != 0:
             if update_count >= options.update_limit:
                 return
-        print('checking gaps', options.starttime, options.endtime,
-            file=sys.stderr)
         algorithm = self._algorithm
         input_channels = options.inchannels or \
                 algorithm.get_input_channels()
+        output_observatory = options.output_observatory
         output_channels = options.outchannels or \
                 algorithm.get_output_channels()
+        print('checking gaps', options.starttime, options.endtime,
+                output_observatory, output_channels,
+                file=sys.stderr)
         # request output to see what has already been generated
         output_timeseries = self._get_output_timeseries(
                 observatory=options.output_observatory,
@@ -257,7 +260,8 @@ class Controller(object):
             options.starttime = output_gap[0]
             options.endtime = output_gap[1]
             print('processing', options.starttime, options.endtime,
-                file=sys.stderr)
+                    output_observatory, output_channels,
+                    file=sys.stderr)
             self.run(options, input_timeseries)
 
 
@@ -289,9 +293,11 @@ def get_input_factory(args):
     elif args.input_stdin:
         input_stream = sys.stdin
     elif args.input_url is not None:
-        input_factory_args['urlInterval'] = args.input_url_interval
-        input_factory_args['urlTemplate'] = args.input_url
-
+        if '{' in args.input_url:
+            input_factory_args['urlInterval'] = args.input_url_interval
+            input_factory_args['urlTemplate'] = args.input_url
+        else:
+            input_stream = BytesIO(Util.read_url(args.input_url))
     input_type = args.input
     if input_type == 'edge':
         input_factory = edge.EdgeFactory(
@@ -317,7 +323,7 @@ def get_input_factory(args):
         elif input_type == 'imfv283':
             input_factory = imfv283.IMFV283Factory(**input_factory_args)
         elif input_type == 'pcdcp':
-                input_factory = pcdcp.PCDCPFactory(**input_factory_args)
+            input_factory = pcdcp.PCDCPFactory(**input_factory_args)
         # wrap stream
         if input_stream is not None:
             input_factory = StreamTimeseriesFactory(
@@ -353,7 +359,12 @@ def get_output_factory(args):
     if args.output_file is not None:
         output_stream = open(args.output_file, 'wb')
     elif args.output_stdout:
-        output_stream = sys.stdout
+        try:
+            # python 3
+            output_stream = sys.stdout.buffer
+        except AttributeError:
+            # python 2
+            output_stream = sys.stdout
     elif args.output_url is not None:
         output_url = args.output_url
         output_factory_args['urlInterval'] = args.output_url_interval
@@ -490,10 +501,20 @@ def main(args):
 
     if args.observatory_foreach:
         observatory = args.observatory
+        observatory_exception = None
         for obs in observatory:
             args.observatory = (obs,)
             args.output_observatory = (obs,)
-            _main(args)
+            try:
+                _main(args)
+            except Exception as e:
+                print("Exception processing observatory {}".format(obs),
+                        str(e),
+                        file=sys.stderr)
+        if observatory_exception:
+            print("Exceptions occurred during processing", file=sys.stderr)
+            sys.exit(1)
+
     else:
         _main(args)
 
diff --git a/geomagio/TimeseriesFactory.py b/geomagio/TimeseriesFactory.py
index 8867490e1d444e356a56a8d8600dc3d9a9e4d5b5..3f08bac145b9de1f1bc3953b6ce04199ce6d1932 100644
--- a/geomagio/TimeseriesFactory.py
+++ b/geomagio/TimeseriesFactory.py
@@ -198,16 +198,20 @@ class TimeseriesFactory(object):
                 endtime=endtime,
                 size=self.urlInterval)
         for urlInterval in urlIntervals:
+            interval_start = urlInterval['start']
+            interval_end = urlInterval['end']
+            if interval_start != interval_end:
+                interval_end = interval_end - delta
             url = self._get_url(
                     observatory=observatory,
-                    date=urlInterval['start'],
+                    date=interval_start,
                     type=type,
                     interval=interval,
                     channels=channels)
             url_data = timeseries.slice(
-                    starttime=urlInterval['start'],
+                    starttime=interval_start,
                     # subtract delta to omit the sample at end: `[start, end)`
-                    endtime=(urlInterval['end'] - delta))
+                    endtime=interval_end)
             url_file = Util.get_file_from_url(url, createParentDirectory=True)
             # existing data file, merge new data into existing
             if os.path.isfile(url_file):
@@ -236,8 +240,8 @@ class TimeseriesFactory(object):
                     pass
             # pad with NaN's out to urlInterval (like get_timeseries())
             url_data.trim(
-                starttime=urlInterval['start'],
-                endtime=(urlInterval['end'] - delta),
+                starttime=interval_start,
+                endtime=interval_end,
                 nearest_sample=False,
                 pad=True,
                 fill_value=numpy.nan)
diff --git a/geomagio/TimeseriesUtility.py b/geomagio/TimeseriesUtility.py
index 41910bb0465946d2778f37ff98260ec9646e492f..da7086d967b2e2c20c817b1381d204a1039dba80 100644
--- a/geomagio/TimeseriesUtility.py
+++ b/geomagio/TimeseriesUtility.py
@@ -4,7 +4,7 @@ import numpy
 import obspy.core
 
 
-def get_stream_gaps(stream):
+def get_stream_gaps(stream, channels=None):
     """Get gaps in a given stream
     Parameters
     ----------
@@ -12,6 +12,7 @@ def get_stream_gaps(stream):
         the stream to check for gaps
     channels: array_like
         list of channels to check for gaps
+        Default is None (check all channels).
 
     Returns
     -------
@@ -25,6 +26,8 @@ def get_stream_gaps(stream):
     gaps = {}
     for trace in stream:
         channel = trace.stats.channel
+        if channels is not None and channel not in channels:
+            continue
         gaps[channel] = get_trace_gaps(trace)
     return gaps
 
diff --git a/geomagio/edge/EdgeFactory.py b/geomagio/edge/EdgeFactory.py
index 69837643879520803bea4c42e6a2741fea1e4845..a26993d477f98b82603d5ec359b275a9f824cc98 100644
--- a/geomagio/edge/EdgeFactory.py
+++ b/geomagio/edge/EdgeFactory.py
@@ -153,7 +153,7 @@ class EdgeFactory(TimeseriesFactory):
         # restore stdout
         finally:
             output = temp_stdout.getvalue()
-            if output != '':
+            if output:
                 sys.stderr.write(str(output))
             temp_stdout.close()
             sys.stdout = original_stdout
diff --git a/geomagio/iaga2002/IAGA2002Factory.py b/geomagio/iaga2002/IAGA2002Factory.py
index eb48e32a5b1d6a48cf4571539252ad374ce7c513..f18dbf7419310225b737dde9d05f571bee923ebe 100644
--- a/geomagio/iaga2002/IAGA2002Factory.py
+++ b/geomagio/iaga2002/IAGA2002Factory.py
@@ -53,11 +53,14 @@ class IAGA2002Factory(TimeseriesFactory):
         """
         parser = IAGA2002Parser(observatory=observatory)
         parser.parse(data)
+        stream = obspy.core.Stream()
+        if len(parser.times) == 0:
+            # no data parsed
+            return stream
         metadata = parser.metadata
         starttime = obspy.core.UTCDateTime(parser.times[0])
         endtime = obspy.core.UTCDateTime(parser.times[-1])
         data = parser.data
-        stream = obspy.core.Stream()
         length = len(data[list(data)[0]])
         if starttime != endtime:
             rate = (length - 1) / (endtime - starttime)
diff --git a/geomagio/iaga2002/IAGA2002Parser.py b/geomagio/iaga2002/IAGA2002Parser.py
index b0604d22d71f9775b9b7132a902a352e954c6962..7633ba3ece0b1fcd8a02750e03e96fbc5310feed 100644
--- a/geomagio/iaga2002/IAGA2002Parser.py
+++ b/geomagio/iaga2002/IAGA2002Parser.py
@@ -61,6 +61,9 @@ class IAGA2002Parser(object):
         data : str
             IAGA 2002 formatted file contents.
         """
+        # create parsing time and data arrays
+        self._parsedata = ([], [], [], [], [])
+
         parsing_headers = True
         lines = data.splitlines()
         for line in lines:
@@ -137,8 +140,6 @@ class IAGA2002Parser(object):
         self.channels.append(line[40:50].strip().replace(iaga_code, ''))
         self.channels.append(line[50:60].strip().replace(iaga_code, ''))
         self.channels.append(line[60:69].strip().replace(iaga_code, ''))
-        # create parsing data arrays
-        self._parsedata = ([], [], [], [], [])
 
     def _parse_data(self, line):
         """Parse one data point in the timeseries.
diff --git a/package.json b/package.json
index 43d11915fe734fa919e7bb54253ef8559791d5f9..2eb8593c9b93e385f2ea9a8494ffe9d421224b25 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
 {
   "name": "geomag-algorithms",
-  "version": "0.5.0",
+  "version": "0.6.0",
   "homepage": "http://geomag.usgs.gov/",
   "repository": "https://github.com/usgs/geomag-algorithms.git",
   "description": "Geomagnetism algorithms.",
diff --git a/setup.py b/setup.py
index 2f13210901c0b38807ff6a1ecaab8e3ad1cd521e..01da8aba080ac8b6a715b8c2b8e0e44bd96f7b45 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from distutils.core import setup
 
 setup(
     name='geomag-algorithms',
-    version='0.5.0',
+    version='0.6.0',
     description='USGS Geomag IO Library',
     url='https://github.com/usgs/geomag-algorithms',
     packages=[
diff --git a/test/TimeseriesUtility_test.py b/test/TimeseriesUtility_test.py
index ea1adc9e4a2c2f753b1d9638a4832e7dddef897f..8f11588059c1fe709fd4283e015a170fa05a2349 100644
--- a/test/TimeseriesUtility_test.py
+++ b/test/TimeseriesUtility_test.py
@@ -37,6 +37,27 @@ def test_get_stream_gaps():
     assert_equals(len(gaps['Z']), 0)
 
 
+def test_get_stream_gaps_channels():
+    """TimeseriesUtility_test.test_get_stream_gaps_channels()
+
+    test that gaps are only checked in specified channels.
+    """
+    stream = Stream
+    stream = Stream([
+        __create_trace('H', [numpy.nan, 1, 1, numpy.nan, numpy.nan]),
+        __create_trace('Z', [0, 0, 0, 1, 1, 1])
+    ])
+    for trace in stream:
+        # set time of first sample
+        trace.stats.starttime = UTCDateTime('2015-01-01T00:00:00Z')
+        # set sample rate to 1 second
+        trace.stats.delta = 1
+    # find gaps
+    gaps = TimeseriesUtility.get_stream_gaps(stream, ['Z'])
+    assert_equals('H' in gaps, False)
+    assert_equals(len(gaps['Z']), 0)
+
+
 def test_get_trace_gaps():
     """TimeseriesUtility_test.test_get_trace_gaps()
 
diff --git a/test/iaga2002_test/IAGA2002Factory_test.py b/test/iaga2002_test/IAGA2002Factory_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..2929a6f96e91d352c308fb7f59799a702bd9f9f5
--- /dev/null
+++ b/test/iaga2002_test/IAGA2002Factory_test.py
@@ -0,0 +1,15 @@
+"""Tests for IAGA2002Factory class"""
+
+from nose.tools import assert_equals
+from geomagio.iaga2002 import IAGA2002Factory
+
+
+def test_parse_empty():
+    """iaga2002_test.IAGA2002Parser_test.test_parse_empty()
+
+    Verify the parse method returns an empty stream without exceptions
+    if the data being parsed is empty.
+    """
+    parser = IAGA2002Factory()
+    stream = parser.parse_string('')
+    assert_equals(len(stream), 0)