From 51a2d3408e9a28f6ea8ff34a755c0e003f2ba6ee Mon Sep 17 00:00:00 2001 From: Jeremy Fee <jmfee@usgs.gov> Date: Tue, 11 Feb 2020 14:54:53 -0700 Subject: [PATCH] Remove nose imports, fix warnings --- geomagio/WebService.py | 4 +- test/Controller_test.py | 10 +- test/ObservatoryMetadata_test.py | 8 +- test/TimeseriesUtility_test.py | 102 +++++++++--------- test/Util_test.py | 22 ++-- test/WebService_test.py | 60 +++++------ test/algorithm_test/AdjustedAlgorithm_test.py | 5 +- test/algorithm_test/Algorithm_test.py | 9 +- test/algorithm_test/AverageAlgorithm_test.py | 8 +- test/algorithm_test/SQDistAlgorithm_test.py | 12 +-- test/algorithm_test/XYZAlgorithm_test.py | 25 +++-- test/edge_test/EdgeFactory_test.py | 44 ++++---- test/edge_test/MiniSeedFactory_test.py | 66 ++++++------ test/edge_test/RawInputClient_test.py | 6 +- test/iaga2002_test/IAGA2002Factory_test.py | 4 +- test/iaga2002_test/IAGA2002Parser_test.py | 12 +-- test/imfjson_test/IMFJSONWriter_test.py | 48 ++++----- test/imfv122_test/IMFV122Parser_test.py | 70 ++++++------ test/imfv283_test/IMFV283Parser_test.py | 18 ++-- test/pcdcp_test/PCDCPFactory_test.py | 30 +++--- test/pcdcp_test/PCDCPParser_test.py | 22 ++-- 21 files changed, 291 insertions(+), 294 deletions(-) diff --git a/geomagio/WebService.py b/geomagio/WebService.py index c40a91466..3eb658dfe 100644 --- a/geomagio/WebService.py +++ b/geomagio/WebService.py @@ -2,7 +2,9 @@ """ from __future__ import print_function -from cgi import escape, parse_qs +from html import escape +from urllib.parse import parse_qs + from collections import OrderedDict from datetime import datetime from json import dumps diff --git a/test/Controller_test.py b/test/Controller_test.py index 32caec56d..5a08eda0e 100644 --- a/test/Controller_test.py +++ b/test/Controller_test.py @@ -1,7 +1,6 @@ #! /usr/bin/env python from geomagio import Controller, TimeseriesFactory from geomagio.algorithm import Algorithm -from nose.tools import assert_is_instance # needed to read outputs generated by Controller and test data from geomagio.iaga2002 import IAGA2002Factory @@ -12,7 +11,7 @@ from shutil import copy # needed to determine a valid (and writable) temp folder from tempfile import gettempdir -from numpy.testing import assert_allclose +from numpy.testing import assert_allclose, assert_equal from obspy.core import UTCDateTime @@ -26,9 +25,10 @@ def test_controller(): outputfactory = TimeseriesFactory() algorithm = Algorithm() controller = Controller(inputfactory, outputfactory, algorithm) - assert_is_instance(controller._inputFactory, TimeseriesFactory) - assert_is_instance(controller._outputFactory, TimeseriesFactory) - assert_is_instance(controller._algorithm, Algorithm) + assert_equal(isinstance(controller._inputFactory, TimeseriesFactory), True) + assert_equal(isinstance(controller._outputFactory, TimeseriesFactory), + True) + assert_equal(isinstance(controller._algorithm, Algorithm), True) def test_controller_update_sqdist(): diff --git a/test/ObservatoryMetadata_test.py b/test/ObservatoryMetadata_test.py index f5e37d654..bcfcd499f 100644 --- a/test/ObservatoryMetadata_test.py +++ b/test/ObservatoryMetadata_test.py @@ -1,7 +1,7 @@ """Tests for ObservatoryMetadata.py""" from geomagio import ObservatoryMetadata -from nose.tools import assert_equals +from numpy.testing import assert_equal import obspy.core @@ -59,13 +59,13 @@ def test_set_metadata(): stats.data_type = 'quasi-definitive' observatorymetadata.set_metadata(stats, 'BOU', 'MVH', 'quasi-definitive', 'second') - assert_equals(stats['declination_base'], 5527) + assert_equal(stats['declination_base'], 5527) # Test custom metadata stats = obspy.core.Stats() observatorymetadata = ObservatoryMetadata(METADATA, DATA_INTERVAL_TYPE) observatorymetadata.set_metadata(stats, 'BOU', 'MVH', 'quasi-definitive', 'second') - assert_equals(stats['declination_base'], 20000) + assert_equal(stats['declination_base'], 20000) print(stats) - assert_equals(stats['data_interval_type'], 'Average 1-Second') + assert_equal(stats['data_interval_type'], 'Average 1-Second') diff --git a/test/TimeseriesUtility_test.py b/test/TimeseriesUtility_test.py index ec2f876e9..627393c15 100644 --- a/test/TimeseriesUtility_test.py +++ b/test/TimeseriesUtility_test.py @@ -1,7 +1,7 @@ #! /usr/bin/env python from __future__ import absolute_import -from nose.tools import assert_equals +from numpy.testing import assert_equal from .StreamConverter_test import __create_trace import numpy from geomagio import TimeseriesUtility @@ -40,20 +40,20 @@ def test_create_empty_trace(): trace.stats.station = trace1.stats.station trace.stats.location = location timeseries += trace3 - assert_equals(len(trace3.data), trace3.stats.npts) - assert_equals(timeseries[0].stats.starttime, timeseries[2].stats.starttime) + assert_equal(len(trace3.data), trace3.stats.npts) + assert_equal(timeseries[0].stats.starttime, timeseries[2].stats.starttime) TimeseriesUtility.pad_timeseries( timeseries=timeseries, starttime=trace1.stats.starttime, endtime=trace1.stats.endtime) - assert_equals(len(trace3.data), trace3.stats.npts) - assert_equals(timeseries[0].stats.starttime, timeseries[2].stats.starttime) + assert_equal(len(trace3.data), trace3.stats.npts) + assert_equal(timeseries[0].stats.starttime, timeseries[2].stats.starttime) # Change starttime by more than 1 delta starttime = trace1.stats.starttime endtime = trace1.stats.endtime TimeseriesUtility.pad_timeseries(timeseries, starttime - 90, endtime + 90) - assert_equals(len(trace3.data), trace3.stats.npts) - assert_equals(timeseries[0].stats.starttime, timeseries[2].stats.starttime) + assert_equal(len(trace3.data), trace3.stats.npts) + assert_equal(timeseries[0].stats.starttime, timeseries[2].stats.starttime) def test_get_stream_gaps(): @@ -72,17 +72,17 @@ def test_get_stream_gaps(): trace.stats.delta = 1 # find gaps gaps = TimeseriesUtility.get_stream_gaps(stream) - assert_equals(len(gaps['H']), 2) + assert_equal(len(gaps['H']), 2) # gap at start of H gap = gaps['H'][0] - assert_equals(gap[0], UTCDateTime('2015-01-01T00:00:00Z')) - assert_equals(gap[1], UTCDateTime('2015-01-01T00:00:00Z')) + assert_equal(gap[0], UTCDateTime('2015-01-01T00:00:00Z')) + assert_equal(gap[1], UTCDateTime('2015-01-01T00:00:00Z')) # gap at end of H gap = gaps['H'][1] - assert_equals(gap[0], UTCDateTime('2015-01-01T00:00:03Z')) - assert_equals(gap[1], UTCDateTime('2015-01-01T00:00:04Z')) + assert_equal(gap[0], UTCDateTime('2015-01-01T00:00:03Z')) + assert_equal(gap[1], UTCDateTime('2015-01-01T00:00:04Z')) # no gaps in Z channel - assert_equals(len(gaps['Z']), 0) + assert_equal(len(gaps['Z']), 0) def test_get_stream_gaps_channels(): @@ -102,8 +102,8 @@ def test_get_stream_gaps_channels(): trace.stats.delta = 1 # find gaps gaps = TimeseriesUtility.get_stream_gaps(stream, ['Z']) - assert_equals('H' in gaps, False) - assert_equals(len(gaps['Z']), 0) + assert_equal('H' in gaps, False) + assert_equal(len(gaps['Z']), 0) def test_get_trace_gaps(): @@ -118,10 +118,10 @@ def test_get_trace_gaps(): trace.stats.delta = 60 # find gap gaps = TimeseriesUtility.get_trace_gaps(trace) - assert_equals(len(gaps), 1) + assert_equal(len(gaps), 1) gap = gaps[0] - assert_equals(gap[0], UTCDateTime('2015-01-01T00:02:00Z')) - assert_equals(gap[1], UTCDateTime('2015-01-01T00:03:00Z')) + assert_equal(gap[0], UTCDateTime('2015-01-01T00:02:00Z')) + assert_equal(gap[1], UTCDateTime('2015-01-01T00:03:00Z')) def test_get_merged_gaps(): @@ -152,15 +152,15 @@ def test_get_merged_gaps(): ], ] }) - assert_equals(len(merged), 2) + assert_equal(len(merged), 2) # first gap combines H and Z gaps gap = merged[0] - assert_equals(gap[0], UTCDateTime('2015-01-01T00:00:00Z')) - assert_equals(gap[1], UTCDateTime('2015-01-01T00:00:03Z')) + assert_equal(gap[0], UTCDateTime('2015-01-01T00:00:00Z')) + assert_equal(gap[1], UTCDateTime('2015-01-01T00:00:03Z')) # second gap is second Z gap gap = merged[1] - assert_equals(gap[0], UTCDateTime('2015-01-01T00:00:05Z')) - assert_equals(gap[1], UTCDateTime('2015-01-01T00:00:07Z')) + assert_equal(gap[0], UTCDateTime('2015-01-01T00:00:05Z')) + assert_equal(gap[1], UTCDateTime('2015-01-01T00:00:07Z')) def test_merge_streams(): @@ -186,14 +186,14 @@ def test_merge_streams(): trace.stats.npts = npts2 merged_streams1 = TimeseriesUtility.merge_streams(timeseries1) # Make sure the empty 'F' was not removed from stream - assert_equals(1, len(merged_streams1.select(channel='F'))) + assert_equal(1, len(merged_streams1.select(channel='F'))) # Merge multiple streams with overlapping timestamps timeseries = timeseries1 + timeseries2 merged_streams = TimeseriesUtility.merge_streams(timeseries) - assert_equals(len(merged_streams), len(timeseries1)) - assert_equals(len(merged_streams[0]), 6) - assert_equals(len(merged_streams[2]), 6) + assert_equal(len(merged_streams), len(timeseries1)) + assert_equal(len(merged_streams[0]), 6) + assert_equal(len(merged_streams[2]), 6) assert_almost_equal( merged_streams.select(channel='H')[0].data, [1, 1, 2, 2, 2, 2]) @@ -213,14 +213,14 @@ def test_merge_streams(): trace.stats.starttime = UTCDateTime('2018-01-01T00:00:00Z') trace.stats.npts = npts3 merged_streams3 = TimeseriesUtility.merge_streams(timeseries3) - assert_equals(len(timeseries3), len(merged_streams3)) + assert_equal(len(timeseries3), len(merged_streams3)) assert_almost_equal( timeseries3.select(channel='H')[0].data, [1, 1, 1, 1]) - assert_equals( + assert_equal( numpy.isnan(timeseries3.select(channel='E')[0].data).all(), True) - assert_equals( + assert_equal( numpy.isnan(timeseries3.select(channel='F')[0].data).all(), True) @@ -230,7 +230,7 @@ def test_merge_streams(): trace11.stats.starttime = UTCDateTime('2018-01-01T00:01:00Z') timeseries4 = Stream(traces=[trace10, trace11]) merged4 = TimeseriesUtility.merge_streams(timeseries4) - assert_equals(len(merged4[0].data), 6) + assert_equal(len(merged4[0].data), 6) assert_almost_equal( merged4.select(channel='H')[0].data, [1, 2, 2, 2, 1, 1]) @@ -246,68 +246,68 @@ def test_pad_timeseries(): timeseries=timeseries, starttime=trace1.stats.starttime, endtime=trace1.stats.endtime) - assert_equals(len(trace1.data), len(trace2.data)) - assert_equals(trace1.stats.starttime, trace2.stats.starttime) - assert_equals(trace1.stats.endtime, trace2.stats.endtime) + assert_equal(len(trace1.data), len(trace2.data)) + assert_equal(trace1.stats.starttime, trace2.stats.starttime) + assert_equal(trace1.stats.endtime, trace2.stats.endtime) # change starttime by less than 1 delta starttime = trace1.stats.starttime endtime = trace1.stats.endtime TimeseriesUtility.pad_timeseries(timeseries, starttime - 30, endtime + 30) - assert_equals(trace1.stats.starttime, starttime) + assert_equal(trace1.stats.starttime, starttime) # Change starttime by more than 1 delta TimeseriesUtility.pad_timeseries(timeseries, starttime - 90, endtime + 90) - assert_equals(trace1.stats.starttime, starttime - 60) - assert_equals(numpy.isnan(trace1.data[0]), numpy.isnan(numpy.NaN)) + assert_equal(trace1.stats.starttime, starttime - 60) + assert_equal(numpy.isnan(trace1.data[0]), numpy.isnan(numpy.NaN)) def test_pad_and_trim_trace(): """TimeseriesUtility_test.test_pad_and_trim_trace() """ trace = _create_trace([0, 1, 2, 3, 4], 'X', UTCDateTime("2018-01-01")) - assert_equals(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z")) - assert_equals(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z")) + assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z")) + assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z")) # starttime between first and second sample # expect first sample to be removed, start at next sample, end at same TimeseriesUtility.pad_and_trim_trace(trace, starttime=UTCDateTime("2018-01-01T00:00:30Z"), endtime=trace.stats.endtime) - assert_equals(trace.stats.starttime, UTCDateTime("2018-01-01T00:01:00Z")) - assert_equals(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z")) + assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:01:00Z")) + assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z")) assert_array_equal(trace.data, [1, 2, 3, 4]) # endtime between last and second to last samples TimeseriesUtility.pad_and_trim_trace(trace, starttime=UTCDateTime("2018-01-01T00:00:30Z"), endtime=UTCDateTime("2018-01-01T00:03:50Z")) - assert_equals(trace.stats.starttime, UTCDateTime("2018-01-01T00:01:00Z")) - assert_equals(trace.stats.endtime, UTCDateTime("2018-01-01T00:03:00Z")) + assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:01:00Z")) + assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:03:00Z")) assert_array_equal(trace.data, [1, 2, 3]) # pad outward TimeseriesUtility.pad_and_trim_trace(trace, starttime=UTCDateTime("2018-01-01T00:00:00Z"), endtime=UTCDateTime("2018-01-01T00:05:00Z")) - assert_equals(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z")) - assert_equals(trace.stats.endtime, UTCDateTime("2018-01-01T00:05:00Z")) + assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z")) + assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:05:00Z")) assert_array_equal(trace.data, [numpy.nan, 1, 2, 3, numpy.nan, numpy.nan]) # remove exactly one sample TimeseriesUtility.pad_and_trim_trace(trace, starttime=UTCDateTime("2018-01-01T00:00:00Z"), endtime=UTCDateTime("2018-01-01T00:04:00Z")) - assert_equals(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z")) - assert_equals(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z")) + assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z")) + assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z")) assert_array_equal(trace.data, [numpy.nan, 1, 2, 3, numpy.nan]) # pad start and trim end TimeseriesUtility.pad_and_trim_trace(trace, starttime=UTCDateTime("2017-12-31T23:58:59Z"), endtime=UTCDateTime("2018-01-01T00:03:00Z")) - assert_equals(trace.stats.starttime, UTCDateTime("2017-12-31T23:59:00Z")) - assert_equals(trace.stats.endtime, UTCDateTime("2018-01-01T00:03:00Z")) + assert_equal(trace.stats.starttime, UTCDateTime("2017-12-31T23:59:00Z")) + assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:03:00Z")) assert_array_equal(trace.data, [numpy.nan, numpy.nan, 1, 2, 3]) # pad end and trim start TimeseriesUtility.pad_and_trim_trace(trace, starttime=UTCDateTime("2018-01-01T00:00:00Z"), endtime=UTCDateTime("2018-01-01T00:04:00Z")) - assert_equals(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z")) - assert_equals(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z")) + assert_equal(trace.stats.starttime, UTCDateTime("2018-01-01T00:00:00Z")) + assert_equal(trace.stats.endtime, UTCDateTime("2018-01-01T00:04:00Z")) assert_array_equal(trace.data, [numpy.nan, 1, 2, 3, numpy.nan]) diff --git a/test/Util_test.py b/test/Util_test.py index b84748a32..58abe4dbf 100644 --- a/test/Util_test.py +++ b/test/Util_test.py @@ -1,7 +1,7 @@ #! /usr/bin/env python import os.path import shutil -from nose.tools import assert_equals, assert_false +from numpy.testing import assert_equal from geomagio import Util from obspy.core import UTCDateTime @@ -12,7 +12,7 @@ def test_get_file_for_url__throws_exception(): # throws exception for non "file://" urls try: Util.get_file_from_url('http://someserver/path') - assert_false('expected exception') + assert False, ('expected exception') except Exception: pass @@ -22,7 +22,7 @@ def test_get_file_for_url__parses_file_urls(): """ # parses file urls f = Util.get_file_from_url('file://./somefile') - assert_equals(f, './somefile') + assert_equal(f, './somefile') def test_get_file_for_url__creates_directories(): @@ -34,9 +34,9 @@ def test_get_file_for_url__creates_directories(): f = Util.get_file_from_url('file:///tmp/_geomag_algorithms_test_/somefile', createParentDirectory=True) if not os.path.isdir('/tmp/_geomag_algorithms_test_'): - assert_false('directory not created') + assert False, ('directory not created') shutil.rmtree('/tmp/_geomag_algorithms_test_') - assert_equals(f, '/tmp/_geomag_algorithms_test_/somefile') + assert_equal(f, '/tmp/_geomag_algorithms_test_/somefile') def test_get_interval__defaults(): @@ -45,7 +45,7 @@ def test_get_interval__defaults(): starttime = UTCDateTime('2015-01-01T00:00:00Z') endtime = UTCDateTime('2015-02-01T00:00:00Z') intervals = Util.get_intervals(starttime, endtime) - assert_equals(len(intervals), 31) + assert_equal(len(intervals), 31) def test_get_interval__custom_size(): @@ -54,7 +54,7 @@ def test_get_interval__custom_size(): starttime = UTCDateTime('2015-01-01T00:00:00Z') endtime = UTCDateTime('2015-01-02T00:00:00Z') intervals = Util.get_intervals(starttime, endtime, size=3600) - assert_equals(len(intervals), 24) + assert_equal(len(intervals), 24) def test_get_interval__negative_size(): @@ -63,9 +63,9 @@ def test_get_interval__negative_size(): starttime = UTCDateTime('2015-01-01T00:00:00Z') endtime = UTCDateTime('2015-01-02T00:00:00Z') intervals = Util.get_intervals(starttime, endtime, size=-1) - assert_equals(len(intervals), 1) - assert_equals(intervals[0]['start'], starttime) - assert_equals(intervals[0]['end'], endtime) + assert_equal(len(intervals), 1) + assert_equal(intervals[0]['start'], starttime) + assert_equal(intervals[0]['end'], endtime) def test_get_interval__trim(): @@ -74,4 +74,4 @@ def test_get_interval__trim(): starttime = UTCDateTime('2015-01-01T01:00:00Z') endtime = UTCDateTime('2015-01-02T00:00:00Z') intervals = Util.get_intervals(starttime, endtime, trim=True) - assert_equals(intervals[0]['start'], starttime) + assert_equal(intervals[0]['start'], starttime) diff --git a/test/WebService_test.py b/test/WebService_test.py index 02a515515..f9f237778 100644 --- a/test/WebService_test.py +++ b/test/WebService_test.py @@ -1,7 +1,7 @@ """Unit Tests for WebService""" -from cgi import parse_qs +from urllib.parse import parse_qs from datetime import datetime -from nose.tools import assert_equals, assert_is_instance, assert_raises +from numpy.testing import assert_equal, assert_raises import numpy import webtest @@ -64,7 +64,7 @@ def test__get_param(): } assert_raises(Exception, _get_param, params, 'id', required=True) elements = _get_param(params, 'elements') - assert_equals(elements, 'H,E,Z,F') + assert_equal(elements, 'H,E,Z,F') assert_raises(Exception, _get_param, params, 'sampling_period') @@ -79,7 +79,7 @@ def test_fetch(): '&endtime=2016-06-07&elements=H,E,Z,F&sampling_period=60' '&format=iaga2002&type=variation')) timeseries = service.fetch(query) - assert_is_instance(timeseries, Stream) + assert_equal(isinstance(timeseries, Stream), True) def test_parse(): @@ -93,25 +93,25 @@ def test_parse(): query = service.parse(parse_qs('id=BOU&starttime=2016-06-06' '&endtime=2016-06-07&elements=H,E,Z,F&sampling_period=60' '&format=iaga2002&type=variation')) - assert_equals(query.observatory_id, 'BOU') - assert_equals(query.starttime, UTCDateTime(2016, 6, 6, 0)) - assert_equals(query.endtime, UTCDateTime(2016, 6, 7, 0)) - assert_equals(query.elements, ['H', 'E', 'Z', 'F']) - assert_equals(query.sampling_period, '60') - assert_equals(query.output_format, 'iaga2002') - assert_equals(query.data_type, 'variation') + assert_equal(query.observatory_id, 'BOU') + assert_equal(query.starttime, UTCDateTime(2016, 6, 6, 0)) + assert_equal(query.endtime, UTCDateTime(2016, 6, 7, 0)) + assert_equal(query.elements, ['H', 'E', 'Z', 'F']) + assert_equal(query.sampling_period, '60') + assert_equal(query.output_format, 'iaga2002') + assert_equal(query.data_type, 'variation') # Test that defaults are set for unspecified values now = datetime.now() today = UTCDateTime(year=now.year, month=now.month, day=now.day, hour=0) tomorrow = today + (24 * 60 * 60 - 1) query = service.parse(parse_qs('id=BOU')) - assert_equals(query.observatory_id, 'BOU') - assert_equals(query.starttime, today) - assert_equals(query.endtime, tomorrow) - assert_equals(query.elements, ('X', 'Y', 'Z', 'F')) - assert_equals(query.sampling_period, '60') - assert_equals(query.output_format, 'iaga2002') - assert_equals(query.data_type, 'variation') + assert_equal(query.observatory_id, 'BOU') + assert_equal(query.starttime, today) + assert_equal(query.endtime, tomorrow) + assert_equal(query.elements, ('X', 'Y', 'Z', 'F')) + assert_equal(query.sampling_period, '60') + assert_equal(query.output_format, 'iaga2002') + assert_equal(query.data_type, 'variation') assert_raises(Exception, service.parse, parse_qs('/?id=bad')) @@ -124,22 +124,22 @@ def test_requests(): app = webtest.TestApp(WebService(TestFactory())) # Check invalid request (bad values) response = app.get('/?id=bad', expect_errors=True) - assert_equals(response.status_int, 400) - assert_equals(response.status, '400 Bad Request') - assert_equals(response.content_type, 'text/plain') + assert_equal(response.status_int, 400) + assert_equal(response.status, '400 Bad Request') + assert_equal(response.content_type, 'text/plain') # Check invalid request (duplicates) response = app.get('/?id=BOU&id=BOU', expect_errors=True) - assert_equals(response.status_int, 400) - assert_equals(response.status, '400 Bad Request') - assert_equals(response.content_type, 'text/plain') + assert_equal(response.status_int, 400) + assert_equal(response.status, '400 Bad Request') + assert_equal(response.content_type, 'text/plain') # Check valid request (upper and lower case) response = app.get('/?id=BOU') - assert_equals(response.status_int, 200) - assert_equals(response.status, '200 OK') - assert_equals(response.content_type, 'text/plain') + assert_equal(response.status_int, 200) + assert_equal(response.status, '200 OK') + assert_equal(response.content_type, 'text/plain') # Test internal server error (use fake factory) app = webtest.TestApp(WebService(ErrorFactory(), error_stream=None)) response = app.get('/?id=BOU', expect_errors=True) - assert_equals(response.status_int, 500) - assert_equals(response.status, '500 Internal Server Error') - assert_equals(response.content_type, 'text/plain') + assert_equal(response.status_int, 500) + assert_equal(response.status, '500 Internal Server Error') + assert_equal(response.content_type, 'text/plain') diff --git a/test/algorithm_test/AdjustedAlgorithm_test.py b/test/algorithm_test/AdjustedAlgorithm_test.py index 3d2944203..988dbfc84 100644 --- a/test/algorithm_test/AdjustedAlgorithm_test.py +++ b/test/algorithm_test/AdjustedAlgorithm_test.py @@ -1,7 +1,6 @@ from geomagio.algorithm import AdjustedAlgorithm as adj import geomagio.iaga2002 as i2 -from nose.tools import assert_equals -from numpy.testing import assert_almost_equal +from numpy.testing import assert_almost_equal, assert_equal def test_construct(): @@ -14,7 +13,7 @@ def test_construct(): assert_almost_equal(a.matrix[0, 0], 9.83427577e-01, 6) - assert_equals(a.pier_correction, -22) + assert_equal(a.pier_correction, -22) def test_process(): diff --git a/test/algorithm_test/Algorithm_test.py b/test/algorithm_test/Algorithm_test.py index 8ce878f1d..b8e1aa9c4 100644 --- a/test/algorithm_test/Algorithm_test.py +++ b/test/algorithm_test/Algorithm_test.py @@ -1,7 +1,6 @@ #! /usr/bin/env python from obspy.core.stream import Stream -from nose.tools import assert_equals -from nose.tools import assert_is_instance +from numpy.testing import assert_equal from geomagio.algorithm import Algorithm @@ -13,7 +12,7 @@ def test_algorithm_process(): algorithm = Algorithm() timeseries = Stream() outputstream = algorithm.process(timeseries) - assert_is_instance(outputstream, Stream) + assert_equal(isinstance(outputstream, Stream), True) def test_algorithm_channels(): @@ -26,5 +25,5 @@ def test_algorithm_channels(): outchannels = ['H', 'D', 'Z', 'F'] algorithm = Algorithm(inchannels=inchannels, outchannels=outchannels) - assert_equals(algorithm.get_input_channels(), inchannels) - assert_equals(algorithm.get_output_channels(), outchannels) + assert_equal(algorithm.get_input_channels(), inchannels) + assert_equal(algorithm.get_output_channels(), outchannels) diff --git a/test/algorithm_test/AverageAlgorithm_test.py b/test/algorithm_test/AverageAlgorithm_test.py index 7784b4153..d907ee044 100644 --- a/test/algorithm_test/AverageAlgorithm_test.py +++ b/test/algorithm_test/AverageAlgorithm_test.py @@ -2,10 +2,8 @@ from geomagio.algorithm import AverageAlgorithm from obspy.core.stream import Stream from ..StreamConverter_test import __create_trace from obspy.core import UTCDateTime -from nose.tools import assert_equals import numpy as np -from numpy.testing import assert_array_equal -# from nose.tools import assert_almost_equals +from numpy.testing import assert_array_equal, assert_equal def test_process(): @@ -119,6 +117,6 @@ def test_metadata(): outstream = alg.process(timeseries) # The station name should be changed to 'USGS' - assert_equals(outstream[0].stats.station, 'USGS') + assert_equal(outstream[0].stats.station, 'USGS') # The channel should be changed to 'Hdt' - assert_equals(outstream[0].stats.channel, 'Hdt') + assert_equal(outstream[0].stats.channel, 'Hdt') diff --git a/test/algorithm_test/SQDistAlgorithm_test.py b/test/algorithm_test/SQDistAlgorithm_test.py index 48c071d81..4cbda67c4 100644 --- a/test/algorithm_test/SQDistAlgorithm_test.py +++ b/test/algorithm_test/SQDistAlgorithm_test.py @@ -1,10 +1,10 @@ from geomagio.algorithm import SqDistAlgorithm as sq -from nose.tools import assert_equals import numpy as np - -assert_allclose = np.testing.assert_allclose -assert_almost_equal = np.testing.assert_almost_equal -assert_array_less = np.testing.assert_array_less +from numpy.testing import ( + assert_allclose, + assert_almost_equal, + assert_array_less, + assert_equal) def test_sqdistalgorithm_additive1(): @@ -158,7 +158,7 @@ def test_sqdistalgorithm_additive2(): yhat0=None, s0=s0, l0=l0, b0=b0, sigma0=sigma0) # The output should track the input exactly on this simple series - assert_equals(synHat000to050.all(), syn000to050.all(), + assert_equal(synHat000to050.all(), syn000to050.all(), 'Output of additive should match simple sinusoid exactly') # Check max, min and average diff --git a/test/algorithm_test/XYZAlgorithm_test.py b/test/algorithm_test/XYZAlgorithm_test.py index 8d0857447..9e4870ed9 100644 --- a/test/algorithm_test/XYZAlgorithm_test.py +++ b/test/algorithm_test/XYZAlgorithm_test.py @@ -1,7 +1,6 @@ #! /usr/bin/env python from obspy.core.stream import Stream -from nose.tools import assert_equals -from nose.tools import assert_is +from numpy.testing import assert_equal from geomagio.algorithm import XYZAlgorithm from ..StreamConverter_test import __create_trace import numpy as np @@ -19,7 +18,7 @@ def test_xyzalgorithm_process(): timeseries += __create_trace('Z', [1, 1]) timeseries += __create_trace('F', [1, 1]) outputstream = algorithm.process(timeseries) - assert_is(outputstream[0].stats.channel, 'X') + assert_equal(outputstream[0].stats.channel, 'X') def test_xyzalgorithm_channels(): @@ -31,8 +30,8 @@ def test_xyzalgorithm_channels(): algorithm = XYZAlgorithm('obs', 'geo') inchannels = ['H', 'E', 'Z', 'F'] outchannels = ['X', 'Y', 'Z', 'F'] - assert_equals(algorithm.get_input_channels(), inchannels) - assert_equals(algorithm.get_output_channels(), outchannels) + assert_equal(algorithm.get_input_channels(), inchannels) + assert_equal(algorithm.get_output_channels(), outchannels) def test_xyzalgorithm_limited_channels(): @@ -49,12 +48,12 @@ def test_xyzalgorithm_limited_channels(): outstream = algorithm.process(timeseries) ds = outstream.select(channel='D') # there is 1 trace - assert_equals(len(ds), 1) + assert_equal(len(ds), 1) d = ds[0] # d has `count` values (same as input) - assert_equals(len(d.data), count) + assert_equal(len(d.data), count) # d has no NaN values - assert_equals(np.isnan(d).any(), False) + assert_equal(np.isnan(d).any(), False) def test_xyzalgorithm_uneccesary_channel_empty(): @@ -72,15 +71,15 @@ def test_xyzalgorithm_uneccesary_channel_empty(): timeseries += __create_trace('Z', [1, np.NaN]) timeseries += __create_trace('F', [np.NaN, np.NaN]) outstream = algorithm.process(timeseries) - assert_equals(outstream.select(channel='Z')[0].data.all(), + assert_equal(outstream.select(channel='Z')[0].data.all(), timeseries.select(channel='Z')[0].data.all()) - assert_equals(outstream.select(channel='F')[0].data.all(), + assert_equal(outstream.select(channel='F')[0].data.all(), timeseries.select(channel='F')[0].data.all()) ds = outstream.select(channel='D') # there is 1 trace - assert_equals(len(ds), 1) + assert_equal(len(ds), 1) d = ds[0] # d has 2 values (same as input) - assert_equals(len(d.data), 2) + assert_equal(len(d.data), 2) # d has no NaN values - assert_equals(np.isnan(d).any(), False) + assert_equal(np.isnan(d).any(), False) diff --git a/test/edge_test/EdgeFactory_test.py b/test/edge_test/EdgeFactory_test.py index e7a731d34..3b3543936 100644 --- a/test/edge_test/EdgeFactory_test.py +++ b/test/edge_test/EdgeFactory_test.py @@ -2,21 +2,21 @@ from obspy.core import Stream, Trace, UTCDateTime from geomagio.edge import EdgeFactory -from nose.tools import assert_equals +from numpy.testing import assert_equal def test__get_edge_network(): """edge_test.EdgeFactory_test.test__get_edge_network() """ # _get_edge_network should always return NT for use by USGS geomag - assert_equals(EdgeFactory()._get_edge_network(' ', ' ', ' ', ' '), 'NT') + assert_equal(EdgeFactory()._get_edge_network(' ', ' ', ' ', ' '), 'NT') def test__get_edge_station(): """edge_test.EdgeFactory_test.test__get_edge_station() """ # _get_edge_station will return the observatory code passed in. - assert_equals(EdgeFactory()._get_edge_station('BOU', ' ', ' ', ' '), 'BOU') + assert_equal(EdgeFactory()._get_edge_station('BOU', ' ', ' ', ' '), 'BOU') def test__get_edge_channel(): @@ -24,21 +24,21 @@ def test__get_edge_channel(): """ # Call private function _get_edge_channel, make certain # it gets back the appropriate 2 character code. - assert_equals(EdgeFactory()._get_edge_channel('', 'D', '', 'minute'), + assert_equal(EdgeFactory()._get_edge_channel('', 'D', '', 'minute'), 'MVD') - assert_equals(EdgeFactory()._get_edge_channel('', 'E', '', 'minute'), + assert_equal(EdgeFactory()._get_edge_channel('', 'E', '', 'minute'), 'MVE') - assert_equals(EdgeFactory()._get_edge_channel('', 'F', '', 'minute'), + assert_equal(EdgeFactory()._get_edge_channel('', 'F', '', 'minute'), 'MSF') - assert_equals(EdgeFactory()._get_edge_channel('', 'H', '', 'minute'), + assert_equal(EdgeFactory()._get_edge_channel('', 'H', '', 'minute'), 'MVH') - assert_equals(EdgeFactory()._get_edge_channel('', 'DIST', '', 'minute'), + assert_equal(EdgeFactory()._get_edge_channel('', 'DIST', '', 'minute'), 'MDT') - assert_equals(EdgeFactory()._get_edge_channel('', 'DST', '', 'minute'), + assert_equal(EdgeFactory()._get_edge_channel('', 'DST', '', 'minute'), 'MGD') - assert_equals(EdgeFactory()._get_edge_channel('', 'E-E', '', 'minute'), + assert_equal(EdgeFactory()._get_edge_channel('', 'E-E', '', 'minute'), 'MQE') - assert_equals(EdgeFactory()._get_edge_channel('', 'E-N', '', 'minute'), + assert_equal(EdgeFactory()._get_edge_channel('', 'E-N', '', 'minute'), 'MQN') @@ -47,21 +47,21 @@ def test__get_edge_location(): """ # Call _get_edge_location, make certain it returns the correct edge # location code. - assert_equals(EdgeFactory()._get_edge_location( + assert_equal(EdgeFactory()._get_edge_location( '', '', 'variation', ''), 'R0') - assert_equals(EdgeFactory()._get_edge_location( + assert_equal(EdgeFactory()._get_edge_location( '', '', 'quasi-definitive', ''), 'Q0') - assert_equals(EdgeFactory()._get_edge_location( + assert_equal(EdgeFactory()._get_edge_location( '', '', 'definitive', ''), 'D0') def test__get_interval_code(): """edge_test.EdgeFactory_test.test__get_interval_code() """ - assert_equals(EdgeFactory()._get_interval_code('day'), 'D') - assert_equals(EdgeFactory()._get_interval_code('hour'), 'H') - assert_equals(EdgeFactory()._get_interval_code('minute'), 'M') - assert_equals(EdgeFactory()._get_interval_code('second'), 'S') + assert_equal(EdgeFactory()._get_interval_code('day'), 'D') + assert_equal(EdgeFactory()._get_interval_code('hour'), 'H') + assert_equal(EdgeFactory()._get_interval_code('minute'), 'M') + assert_equal(EdgeFactory()._get_interval_code('second'), 'S') def test__set_metadata(): @@ -73,8 +73,8 @@ def test__set_metadata(): trace2 = Trace() stream = Stream(traces=[trace1, trace2]) EdgeFactory()._set_metadata(stream, 'BOU', 'H', 'variation', 'minute') - assert_equals(stream[0].stats['channel'], 'H') - assert_equals(stream[1].stats['channel'], 'H') + assert_equal(stream[0].stats['channel'], 'H') + assert_equal(stream[1].stats['channel'], 'H') # def test_get_timeseries(): @@ -87,7 +87,7 @@ def dont_get_timeseries(): timeseries = edge_factory.get_timeseries( UTCDateTime(2015, 3, 1, 0, 0, 0), UTCDateTime(2015, 3, 1, 1, 0, 0), 'BOU', ('H'), 'variation', 'minute') - assert_equals(timeseries.select(channel='H')[0].stats.station, + assert_equal(timeseries.select(channel='H')[0].stats.station, 'BOU', 'Expect timeseries to have stats') - assert_equals(timeseries.select(channel='H')[0].stats.channel, + assert_equal(timeseries.select(channel='H')[0].stats.channel, 'H', 'Expect timeseries stats channel to be equal to H') diff --git a/test/edge_test/MiniSeedFactory_test.py b/test/edge_test/MiniSeedFactory_test.py index 09947b84a..b37c30eae 100644 --- a/test/edge_test/MiniSeedFactory_test.py +++ b/test/edge_test/MiniSeedFactory_test.py @@ -1,7 +1,7 @@ """Tests for MiniSeedFactory.py""" -from nose.tools import assert_equals import numpy +from numpy.testing import assert_equal from obspy.core import Stats, Stream, Trace, UTCDateTime from geomagio import TimeseriesUtility from geomagio.edge import MiniSeedFactory @@ -11,7 +11,7 @@ def test__get_edge_network(): """edge_test.MiniSeedFactory_test.test__get_edge_network() """ # _get_edge_network should always return NT for use by USGS geomag - assert_equals( + assert_equal( MiniSeedFactory()._get_edge_network(' ', ' ', ' ', ' '), 'NT') @@ -20,7 +20,7 @@ def test__get_edge_station(): """edge_test.MiniSeedFactory_test.test__get_edge_station() """ # _get_edge_station will return the observatory code passed in. - assert_equals( + assert_equal( MiniSeedFactory()._get_edge_station('BOU', ' ', ' ', ' '), 'BOU') @@ -31,15 +31,15 @@ def test__get_edge_channel(): # Call private function _get_edge_channel, make certain # it gets back the appropriate 2 character code. factory = MiniSeedFactory() - assert_equals(factory._get_edge_channel('', 'D', '', 'minute'), 'UFD') - assert_equals(factory._get_edge_channel('', 'U', '', 'minute'), 'UFU') - assert_equals(factory._get_edge_channel('', 'F', '', 'minute'), 'UFF') - assert_equals(factory._get_edge_channel('', 'H', '', 'minute'), 'UFH') - assert_equals(factory._get_edge_channel('', 'BEU', '', 'minute'), 'BEU') - assert_equals(factory._get_edge_channel('', 'Dst4', '', 'minute'), 'UX4') - assert_equals(factory._get_edge_channel('', 'Dst3', '', 'minute'), 'UX3') - assert_equals(factory._get_edge_channel('', 'E-E', '', 'minute'), 'UQE') - assert_equals(factory._get_edge_channel('', 'E-N', '', 'minute'), 'UQN') + assert_equal(factory._get_edge_channel('', 'D', '', 'minute'), 'UFD') + assert_equal(factory._get_edge_channel('', 'U', '', 'minute'), 'UFU') + assert_equal(factory._get_edge_channel('', 'F', '', 'minute'), 'UFF') + assert_equal(factory._get_edge_channel('', 'H', '', 'minute'), 'UFH') + assert_equal(factory._get_edge_channel('', 'BEU', '', 'minute'), 'BEU') + assert_equal(factory._get_edge_channel('', 'Dst4', '', 'minute'), 'UX4') + assert_equal(factory._get_edge_channel('', 'Dst3', '', 'minute'), 'UX3') + assert_equal(factory._get_edge_channel('', 'E-E', '', 'minute'), 'UQE') + assert_equal(factory._get_edge_channel('', 'E-N', '', 'minute'), 'UQN') def test__get_edge_location(): @@ -47,22 +47,22 @@ def test__get_edge_location(): """ # Call _get_edge_location, make certain it returns the correct edge # location code. - assert_equals(MiniSeedFactory()._get_edge_location( + assert_equal(MiniSeedFactory()._get_edge_location( '', '', 'variation', ''), 'R0') - assert_equals(MiniSeedFactory()._get_edge_location( + assert_equal(MiniSeedFactory()._get_edge_location( '', '', 'quasi-definitive', ''), 'Q0') - assert_equals(MiniSeedFactory()._get_edge_location( + assert_equal(MiniSeedFactory()._get_edge_location( '', '', 'definitive', ''), 'D0') def test__get_interval_code(): """edge_test.MiniSeedFactory_test.test__get_interval_code() """ - assert_equals(MiniSeedFactory()._get_interval_code('day'), 'P') - assert_equals(MiniSeedFactory()._get_interval_code('hour'), 'R') - assert_equals(MiniSeedFactory()._get_interval_code('minute'), 'U') - assert_equals(MiniSeedFactory()._get_interval_code('second'), 'L') - assert_equals(MiniSeedFactory()._get_interval_code('tenhertz'), 'B') + assert_equal(MiniSeedFactory()._get_interval_code('day'), 'P') + assert_equal(MiniSeedFactory()._get_interval_code('hour'), 'R') + assert_equal(MiniSeedFactory()._get_interval_code('minute'), 'U') + assert_equal(MiniSeedFactory()._get_interval_code('second'), 'L') + assert_equal(MiniSeedFactory()._get_interval_code('tenhertz'), 'B') class MockMiniSeedInputClient(object): @@ -87,19 +87,19 @@ def test__put_timeseries(): factory.write_client = client factory.put_timeseries(Stream(trace1), channels=('H')) # put timeseries should call close when done - assert_equals(client.close_called, True) + assert_equal(client.close_called, True) # trace should be split in 2 blocks at gap sent = client.last_sent - assert_equals(len(sent), 2) + assert_equal(len(sent), 2) # first trace includes [0...4] - assert_equals(sent[0].stats.channel, 'LFH') - assert_equals(len(sent[0]), 4) - assert_equals(sent[0].stats.endtime, trace1.stats.starttime + 3) + assert_equal(sent[0].stats.channel, 'LFH') + assert_equal(len(sent[0]), 4) + assert_equal(sent[0].stats.endtime, trace1.stats.starttime + 3) # second trace includes [5...9] - assert_equals(sent[1].stats.channel, 'LFH') - assert_equals(len(sent[1]), 5) - assert_equals(sent[1].stats.starttime, trace1.stats.starttime + 5) - assert_equals(sent[1].stats.endtime, trace1.stats.endtime) + assert_equal(sent[1].stats.channel, 'LFH') + assert_equal(len(sent[1]), 5) + assert_equal(sent[1].stats.starttime, trace1.stats.starttime + 5) + assert_equal(sent[1].stats.endtime, trace1.stats.endtime) def test__set_metadata(): @@ -111,8 +111,8 @@ def test__set_metadata(): trace2 = Trace() stream = Stream(traces=[trace1, trace2]) MiniSeedFactory()._set_metadata(stream, 'BOU', 'H', 'variation', 'minute') - assert_equals(stream[0].stats['channel'], 'H') - assert_equals(stream[1].stats['channel'], 'H') + assert_equal(stream[0].stats['channel'], 'H') + assert_equal(stream[1].stats['channel'], 'H') # def test_get_timeseries(): @@ -125,9 +125,9 @@ def dont_get_timeseries(): timeseries = edge_factory.get_timeseries( UTCDateTime(2015, 3, 1, 0, 0, 0), UTCDateTime(2015, 3, 1, 1, 0, 0), 'BOU', ('H'), 'variation', 'minute') - assert_equals(timeseries.select(channel='H')[0].stats.station, + assert_equal(timeseries.select(channel='H')[0].stats.station, 'BOU', 'Expect timeseries to have stats') - assert_equals(timeseries.select(channel='H')[0].stats.channel, + assert_equal(timeseries.select(channel='H')[0].stats.channel, 'H', 'Expect timeseries stats channel to be equal to H') diff --git a/test/edge_test/RawInputClient_test.py b/test/edge_test/RawInputClient_test.py index 60e3515e3..4d69005fa 100644 --- a/test/edge_test/RawInputClient_test.py +++ b/test/edge_test/RawInputClient_test.py @@ -3,7 +3,7 @@ import numpy from obspy.core import Stats, Trace, UTCDateTime from geomagio.edge import EdgeFactory, RawInputClient -from nose.tools import assert_equals +from numpy.testing import assert_equal class MockRawInputClient(RawInputClient): @@ -44,7 +44,7 @@ def test_raw_input_client(): trace_send = EdgeFactory()._convert_trace_to_int(trace.copy()) client.send_trace('minute', trace_send) # verify data was sent - assert_equals(len(client.last_send), 1) + assert_equal(len(client.last_send), 1) def test__get_tag(): @@ -58,4 +58,4 @@ def test__get_tag(): station=station, channel=channel, location=location, network=network) tag_send = client._get_tag() - assert_equals(tag_send is not None, True) + assert_equal(tag_send is not None, True) diff --git a/test/iaga2002_test/IAGA2002Factory_test.py b/test/iaga2002_test/IAGA2002Factory_test.py index 2929a6f96..958386960 100644 --- a/test/iaga2002_test/IAGA2002Factory_test.py +++ b/test/iaga2002_test/IAGA2002Factory_test.py @@ -1,6 +1,6 @@ """Tests for IAGA2002Factory class""" -from nose.tools import assert_equals +from numpy.testing import assert_equal from geomagio.iaga2002 import IAGA2002Factory @@ -12,4 +12,4 @@ def test_parse_empty(): """ parser = IAGA2002Factory() stream = parser.parse_string('') - assert_equals(len(stream), 0) + assert_equal(len(stream), 0) diff --git a/test/iaga2002_test/IAGA2002Parser_test.py b/test/iaga2002_test/IAGA2002Parser_test.py index 52172e6f4..be8148002 100644 --- a/test/iaga2002_test/IAGA2002Parser_test.py +++ b/test/iaga2002_test/IAGA2002Parser_test.py @@ -1,6 +1,6 @@ """Tests for the IAGA2002 Parser class.""" -from nose.tools import assert_equals +from numpy.testing import assert_equal from geomagio.iaga2002 import IAGA2002Parser @@ -50,7 +50,7 @@ def test__merge_comments(): Verify, the first and second line are merged. """ comments = ['line 1', 'line 2.', 'line 3'] - assert_equals( + assert_equal( IAGA2002Parser()._merge_comments(comments), ['line 1 line 2.', 'line 3']) @@ -64,7 +64,7 @@ def test__parse_header(): parser = IAGA2002Parser() parser._parse_header(' Format ' + 'IAGA-2002 |') - assert_equals(parser.headers['Format'], 'IAGA-2002') + assert_equal(parser.headers['Format'], 'IAGA-2002') def test__parse_comment(): @@ -76,7 +76,7 @@ def test__parse_comment(): parser = IAGA2002Parser() parser._parse_comment(' # Go to www.intermagnet.org for details on' + ' obtaining this product. |') - assert_equals(parser.comments[-1], + assert_equal(parser.comments[-1], 'Go to www.intermagnet.org for details on' + ' obtaining this product.') @@ -93,7 +93,7 @@ def test__parse_channels(): 'BDT |') parser._parse_channels('DATE TIME DOY ' + 'BDTH BDTD BDTZ BDTF |') - assert_equals(parser.channels, ['H', 'D', 'Z', 'F']) + assert_equal(parser.channels, ['H', 'D', 'Z', 'F']) def test_parse_decbas(): @@ -105,4 +105,4 @@ def test_parse_decbas(): """ parser = IAGA2002Parser() parser.parse(IAGA2002_EXAMPLE) - assert_equals(parser.metadata['declination_base'], 5527) + assert_equal(parser.metadata['declination_base'], 5527) diff --git a/test/imfjson_test/IMFJSONWriter_test.py b/test/imfjson_test/IMFJSONWriter_test.py index 040284295..7f5ff92af 100644 --- a/test/imfjson_test/IMFJSONWriter_test.py +++ b/test/imfjson_test/IMFJSONWriter_test.py @@ -1,6 +1,6 @@ """Tests for the IMFJSON Writer class.""" -from nose.tools import assert_equals +from numpy.testing import assert_equal from geomagio.iaga2002 import IAGA2002Factory from geomagio.imfjson import IMFJSONWriter import numpy as np @@ -24,19 +24,19 @@ def test_metadata(): """ writer = IMFJSONWriter() metadata = writer._format_metadata(EXAMPLE_STATS, EXAMPLE_CHANNELS) - assert_equals(metadata['status'], 200) + assert_equal(metadata['status'], 200) # Test intermagnet parameters intermag = metadata['intermagnet'] - assert_equals(intermag['reported_orientation'], "HDZF") - assert_equals(intermag['sensor_orientation'], "HDZF") - assert_equals(intermag['data_type'], "variation") - assert_equals(intermag['sampling_period'], 60) - assert_equals(intermag['digital_sampling_rate'], 0.01) + assert_equal(intermag['reported_orientation'], "HDZF") + assert_equal(intermag['sensor_orientation'], "HDZF") + assert_equal(intermag['data_type'], "variation") + assert_equal(intermag['sampling_period'], 60) + assert_equal(intermag['digital_sampling_rate'], 0.01) # Test intermagnet-imo parameters imo = metadata['intermagnet']['imo'] - assert_equals(imo['iaga_code'], "BOU") - assert_equals(imo['name'], "Boulder") - assert_equals(imo['coordinates'], [254.764, 40.137, 1682]) + assert_equal(imo['iaga_code'], "BOU") + assert_equal(imo['name'], "Boulder") + assert_equal(imo['coordinates'], [254.764, 40.137, 1682]) def test_times(): @@ -54,7 +54,7 @@ def test_times(): test_date_times = [] for idx in range(test_day.shape[0]): test_date_times += [test_day[idx] + "T" + test_time[idx] + "Z"] - assert_equals(times, test_date_times) + assert_equal(times, test_date_times) def test_values(): @@ -71,23 +71,23 @@ def test_values(): test_val_keys = ["id", "metadata", "values"] for val in values: for key, test in zip(val, test_val_keys): - assert_equals(key, test) - assert_equals(values[0]['id'], "H") - assert_equals(values[1]['id'], "D") - assert_equals(values[2]['id'], "Z") - assert_equals(values[3]['id'], "F") + assert_equal(key, test) + assert_equal(values[0]['id'], "H") + assert_equal(values[1]['id'], "D") + assert_equal(values[2]['id'], "Z") + assert_equal(values[3]['id'], "F") # Test values-metadata (need to add flags) metadata = values[0]['metadata'] test_metadata_keys = ["element", "network", "station", "channel", "location"] for key, test in zip(metadata, test_metadata_keys): - assert_equals(key, test) - assert_equals(metadata['element'], "H") - assert_equals(metadata['network'], "NT") - assert_equals(metadata['station'], "BOU") + assert_equal(key, test) + assert_equal(metadata['element'], "H") + assert_equal(metadata['network'], "NT") + assert_equal(metadata['station'], "BOU") # channels do not match H v MVH - # assert_equals(metadata['channel'], "MVH") - assert_equals(metadata['location'], "R0") + # assert_equal(metadata['channel'], "MVH") + assert_equal(metadata['location'], "R0") # Test values-values # Round to match iaga format vals_H = np.around(values[0]['values'], 2) @@ -95,5 +95,5 @@ def test_values(): test_val_H, test_val_D = np.loadtxt(EXAMPLE_FILE, skiprows=25, usecols=(3, 4), unpack=True, dtype=float) # tolist required to prevent ValueError in comparison - assert_equals(vals_H.tolist(), test_val_H.tolist()) - assert_equals(vals_D.tolist(), test_val_D.tolist()) + assert_equal(vals_H.tolist(), test_val_H.tolist()) + assert_equal(vals_D.tolist(), test_val_D.tolist()) diff --git a/test/imfv122_test/IMFV122Parser_test.py b/test/imfv122_test/IMFV122Parser_test.py index ad5a50f29..aaf591ac5 100644 --- a/test/imfv122_test/IMFV122Parser_test.py +++ b/test/imfv122_test/IMFV122Parser_test.py @@ -1,6 +1,6 @@ """Tests for the IMFV122 Parser class.""" -from nose.tools import assert_equals +from numpy.testing import assert_equal from geomagio.imfv122 import IMFV122Parser from obspy.core import UTCDateTime @@ -11,14 +11,14 @@ def test_imfv122_parse_header__hour_of_day(): parser = IMFV122Parser() parser._parse_header( 'KAK MAY0216 123 03 HDZF A KYO 05381402 000000 RRRRRRRRRRRRRRRR') - assert_equals(parser.channels, ['H', 'D', 'Z', 'F']) + assert_equal(parser.channels, ['H', 'D', 'Z', 'F']) metadata = parser.metadata - assert_equals(metadata['declination_base'], 0) - assert_equals(metadata['geodetic_latitude'], 53.8) - assert_equals(metadata['geodetic_longitude'], 140.2) - assert_equals(metadata['station'], 'KAK') - assert_equals(parser._delta, 60) - assert_equals(parser._nexttime, UTCDateTime('2016-05-02T03:00:00Z')) + assert_equal(metadata['declination_base'], 0) + assert_equal(metadata['geodetic_latitude'], 53.8) + assert_equal(metadata['geodetic_longitude'], 140.2) + assert_equal(metadata['station'], 'KAK') + assert_equal(parser._delta, 60) + assert_equal(parser._nexttime, UTCDateTime('2016-05-02T03:00:00Z')) def test_imfv122_parse_header__minute_of_day(): @@ -27,14 +27,14 @@ def test_imfv122_parse_header__minute_of_day(): parser = IMFV122Parser() parser._parse_header( 'HER JAN0116 001 0123 HDZF R EDI 12440192 -14161 DRRRRRRRRRRRRRRR') - assert_equals(parser.channels, ['H', 'D', 'Z', 'F']) + assert_equal(parser.channels, ['H', 'D', 'Z', 'F']) metadata = parser.metadata - assert_equals(metadata['declination_base'], -14161) - assert_equals(metadata['geodetic_latitude'], 124.4) - assert_equals(metadata['geodetic_longitude'], 19.2) - assert_equals(metadata['station'], 'HER') - assert_equals(parser._delta, 60) - assert_equals(parser._nexttime, UTCDateTime('2016-01-01T02:03:00Z')) + assert_equal(metadata['declination_base'], -14161) + assert_equal(metadata['geodetic_latitude'], 124.4) + assert_equal(metadata['geodetic_longitude'], 19.2) + assert_equal(metadata['station'], 'HER') + assert_equal(parser._delta, 60) + assert_equal(parser._nexttime, UTCDateTime('2016-01-01T02:03:00Z')) def test_imfv122_parse_data(): @@ -46,16 +46,16 @@ def test_imfv122_parse_data(): parser._parse_data('1234 5678 9101 1121 3141 5161 7181 9202') import pprint pprint.pprint(parser._parsedata) - assert_equals(parser._parsedata[0][0], UTCDateTime('2016-01-01T02:03:00Z')) - assert_equals(parser._parsedata[1][0], '1234') - assert_equals(parser._parsedata[2][0], '5678') - assert_equals(parser._parsedata[3][0], '9101') - assert_equals(parser._parsedata[4][0], '1121') - assert_equals(parser._parsedata[0][1], UTCDateTime('2016-01-01T02:04:00Z')) - assert_equals(parser._parsedata[1][1], '3141') - assert_equals(parser._parsedata[2][1], '5161') - assert_equals(parser._parsedata[3][1], '7181') - assert_equals(parser._parsedata[4][1], '9202') + assert_equal(parser._parsedata[0][0], UTCDateTime('2016-01-01T02:03:00Z')) + assert_equal(parser._parsedata[1][0], '1234') + assert_equal(parser._parsedata[2][0], '5678') + assert_equal(parser._parsedata[3][0], '9101') + assert_equal(parser._parsedata[4][0], '1121') + assert_equal(parser._parsedata[0][1], UTCDateTime('2016-01-01T02:04:00Z')) + assert_equal(parser._parsedata[1][1], '3141') + assert_equal(parser._parsedata[2][1], '5161') + assert_equal(parser._parsedata[3][1], '7181') + assert_equal(parser._parsedata[4][1], '9202') def test_imfv122_post_process(): @@ -66,13 +66,13 @@ def test_imfv122_post_process(): 'HER JAN0116 001 0123 HDZF R EDI 12440192 -14161 DRRRRRRRRRRRRRRR') parser._parse_data('1234 5678 9101 1121 3141 5161 7181 9202') parser._post_process() - assert_equals(parser.times[0], UTCDateTime('2016-01-01T02:03:00Z')) - assert_equals(parser.data['H'][0], 123.4) - assert_equals(parser.data['D'][0], 56.78) - assert_equals(parser.data['Z'][0], 910.1) - assert_equals(parser.data['F'][0], 112.1) - assert_equals(parser.times[1], UTCDateTime('2016-01-01T02:04:00Z')) - assert_equals(parser.data['H'][1], 314.1) - assert_equals(parser.data['D'][1], 51.61) - assert_equals(parser.data['Z'][1], 718.1) - assert_equals(parser.data['F'][1], 920.2) + assert_equal(parser.times[0], UTCDateTime('2016-01-01T02:03:00Z')) + assert_equal(parser.data['H'][0], 123.4) + assert_equal(parser.data['D'][0], 56.78) + assert_equal(parser.data['Z'][0], 910.1) + assert_equal(parser.data['F'][0], 112.1) + assert_equal(parser.times[1], UTCDateTime('2016-01-01T02:04:00Z')) + assert_equal(parser.data['H'][1], 314.1) + assert_equal(parser.data['D'][1], 51.61) + assert_equal(parser.data['Z'][1], 718.1) + assert_equal(parser.data['F'][1], 920.2) diff --git a/test/imfv283_test/IMFV283Parser_test.py b/test/imfv283_test/IMFV283Parser_test.py index add50c90c..31d833ab9 100644 --- a/test/imfv283_test/IMFV283Parser_test.py +++ b/test/imfv283_test/IMFV283Parser_test.py @@ -1,7 +1,7 @@ """Tests for the IMFV283 Parser class.""" from __future__ import unicode_literals -from nose.tools import assert_equals +from numpy.testing import assert_equal from obspy import UTCDateTime from geomagio.imfv283 import IMFV283Parser, imfv283_codes @@ -25,7 +25,7 @@ def test_parse_msg_header(): Verify the header name and value are split at the correct column. """ header = IMFV283Parser()._parse_msg_header(IMFV283_EXAMPLE_VIC) - assert_equals(header['obs'], 'VIC') + assert_equal(header['obs'], 'VIC') def test_parse_goes_header(): @@ -35,7 +35,7 @@ def test_parse_goes_header(): imfv283_codes.OBSERVATORIES['VIC'], 191) goes_header = IMFV283Parser()._parse_goes_header(goes_data) - assert_equals(goes_header['day'], 23) + assert_equal(goes_header['day'], 23) def test_estimate_data_time__correct_doy(): @@ -50,9 +50,9 @@ def test_estimate_data_time__correct_doy(): minute = 72 (data_time, transmit_time, corrected) = \ parser._estimate_data_time(transmission, day, minute) - assert_equals(data_time, UTCDateTime('2017-10-01T01:12:00Z')) - assert_equals(transmit_time, UTCDateTime('2017-10-01T01:31:21Z')) - assert_equals(corrected, False) + assert_equal(data_time, UTCDateTime('2017-10-01T01:12:00Z')) + assert_equal(transmit_time, UTCDateTime('2017-10-01T01:31:21Z')) + assert_equal(corrected, False) def test_estimate_data_time__incorrect_doy(): @@ -67,6 +67,6 @@ def test_estimate_data_time__incorrect_doy(): minute = 78 (data_time, transmit_time, corrected) = \ parser._estimate_data_time(transmission, day, minute) - assert_equals(data_time, UTCDateTime('2017-10-01T01:18:00Z')) - assert_equals(transmit_time, UTCDateTime('2017-10-01T01:32:41Z')) - assert_equals(corrected, True) + assert_equal(data_time, UTCDateTime('2017-10-01T01:18:00Z')) + assert_equal(transmit_time, UTCDateTime('2017-10-01T01:32:41Z')) + assert_equal(corrected, True) diff --git a/test/pcdcp_test/PCDCPFactory_test.py b/test/pcdcp_test/PCDCPFactory_test.py index 9b4b6ce14..9fe4f0d4d 100644 --- a/test/pcdcp_test/PCDCPFactory_test.py +++ b/test/pcdcp_test/PCDCPFactory_test.py @@ -3,7 +3,7 @@ from geomagio.pcdcp import PCDCPFactory from obspy.core.utcdatetime import UTCDateTime from obspy.core.stream import Stream -from nose.tools import assert_equals +from numpy.testing import assert_equal pcdcpString = \ """BOU 2015 001 01-Jan-15 HEZF 0.01nT File Version 2.00 @@ -31,17 +31,17 @@ def test_parse_string(): """ stream = PCDCPFactory().parse_string(pcdcpString) - assert_equals(type(stream), Stream) - assert_equals(stream[0].stats.network, 'NT') - assert_equals(stream[0].stats.station, 'BOU') - assert_equals(stream[0].stats.starttime, + assert_equal(type(stream), Stream) + assert_equal(stream[0].stats.network, 'NT') + assert_equal(stream[0].stats.station, 'BOU') + assert_equal(stream[0].stats.starttime, UTCDateTime('2015-01-01T00:00:00.000000Z')) h = stream.select(channel='H')[0] - assert_equals(h.data[1], 20861.90) - assert_equals(stream[0].stats.endtime, + assert_equal(h.data[1], 20861.90) + assert_equal(stream[0].stats.endtime, UTCDateTime('2015-01-01T00:04:00.000000Z')) z = stream.select(channel='Z')[0] - assert_equals(z.data[-1], 47457.43) + assert_equal(z.data[-1], 47457.43) def test_parse_string_seconds(): @@ -52,14 +52,14 @@ def test_parse_string_seconds(): """ stream = PCDCPFactory().parse_string(pcdcpString_seconds) - assert_equals(type(stream), Stream) - assert_equals(stream[0].stats.network, 'NT') - assert_equals(stream[0].stats.station, 'BOU') - assert_equals(stream[0].stats.starttime, + assert_equal(type(stream), Stream) + assert_equal(stream[0].stats.network, 'NT') + assert_equal(stream[0].stats.station, 'BOU') + assert_equal(stream[0].stats.starttime, UTCDateTime('2015-01-01T00:00:00.000000Z')) h = stream.select(channel='H')[0] - assert_equals(h.data[0], 20861.520) - assert_equals(stream[0].stats.endtime, + assert_equal(h.data[0], 20861.520) + assert_equal(stream[0].stats.endtime, UTCDateTime('2015-01-01T00:00:04.000000Z')) z = stream.select(channel='Z')[0] - assert_equals(z.data[-1], 47457.384) + assert_equal(z.data[-1], 47457.384) diff --git a/test/pcdcp_test/PCDCPParser_test.py b/test/pcdcp_test/PCDCPParser_test.py index 7ff78762d..6df41c58a 100644 --- a/test/pcdcp_test/PCDCPParser_test.py +++ b/test/pcdcp_test/PCDCPParser_test.py @@ -1,6 +1,6 @@ """Tests for the PCDCP Parser class.""" -from nose.tools import assert_equals +from numpy.testing import assert_equal from geomagio.pcdcp import PCDCPParser @@ -45,11 +45,11 @@ def test_parse_header(): parser._parse_header('BOU 2015 001 01-Jan-15 HEZF 0.01nT' + ' File Version 2.00') - assert_equals(parser.header['date'], '01-Jan-15') - assert_equals(parser.header['station'], 'BOU') - assert_equals(parser.header['year'], '2015') - assert_equals(parser.header['yearday'], '001') - assert_equals(parser.header['resolution'], '0.01nT') + assert_equal(parser.header['date'], '01-Jan-15') + assert_equal(parser.header['station'], 'BOU') + assert_equal(parser.header['year'], '2015') + assert_equal(parser.header['yearday'], '001') + assert_equal(parser.header['resolution'], '0.01nT') def test_parse_header_sec(): @@ -62,8 +62,8 @@ def test_parse_header_sec(): parser._parse_header('BOU 2015 001 01-Jan-15 HEZF 0.001nT' + ' File Version 2.00') - assert_equals(parser.header['date'], '01-Jan-15') - assert_equals(parser.header['station'], 'BOU') - assert_equals(parser.header['year'], '2015') - assert_equals(parser.header['yearday'], '001') - assert_equals(parser.header['resolution'], '0.001nT') + assert_equal(parser.header['date'], '01-Jan-15') + assert_equal(parser.header['station'], 'BOU') + assert_equal(parser.header['year'], '2015') + assert_equal(parser.header['yearday'], '001') + assert_equal(parser.header['resolution'], '0.001nT') -- GitLab