From 066a56a7762dda71a7dc41c1a7893ffc23c6afa6 Mon Sep 17 00:00:00 2001 From: Hal Simpson <hasimpson@usgs.gov> Date: Mon, 24 Aug 2015 01:37:16 -0600 Subject: [PATCH] Added IMFV283 factory and parser --- geomagio/imfv283/IMFV283Factory.py | 383 +++++++++++++++++++++++ geomagio/imfv283/IMFV283Parser.py | 374 ++++++++++++++++++++++ geomagio/imfv283/IMFV283Parser_test.py | 35 +++ geomagio/imfv283/StreamIMFV283Factory.py | 35 +++ geomagio/imfv283/__init__.py | 16 + geomagio/imfv283/imfv283_codes.py | 174 ++++++++++ 6 files changed, 1017 insertions(+) create mode 100644 geomagio/imfv283/IMFV283Factory.py create mode 100644 geomagio/imfv283/IMFV283Parser.py create mode 100644 geomagio/imfv283/IMFV283Parser_test.py create mode 100644 geomagio/imfv283/StreamIMFV283Factory.py create mode 100644 geomagio/imfv283/__init__.py create mode 100644 geomagio/imfv283/imfv283_codes.py diff --git a/geomagio/imfv283/IMFV283Factory.py b/geomagio/imfv283/IMFV283Factory.py new file mode 100644 index 000000000..530ca8e65 --- /dev/null +++ b/geomagio/imfv283/IMFV283Factory.py @@ -0,0 +1,383 @@ +"""Factory that loads IAGA2002 Files.""" + +import obspy.core +import os +import urllib2 +import numpy +from .. import ChannelConverter +from ..TimeseriesFactory import TimeseriesFactory +from ..TimeseriesFactoryException import TimeseriesFactoryException +from IMFV283Parser import IMFV283Parser + + +# pattern for IMFV283 file names +IMFV283_FILE_PATTERN = 'dcpmsgs.txt' + + +def read_url(url): + """Open and read url contents. + + Parameters + ---------- + url : str + A urllib2 compatible url, such as http:// or file://. + + Returns + ------- + str + contents returned by url. + + Raises + ------ + urllib2.URLError + if any occurs + """ + response = urllib2.urlopen(url) + content = None + try: + content = response.read() + except urllib2.URLError, e: + print e.reason + raise + finally: + response.close() + return content + + +class IMFV283Factory(TimeseriesFactory): + """TimeseriesFactory for IMFV283 formatted files. + + Parameters + ---------- + urlTemplate : str + A string that contains any of the following replacement patterns: + - '%(i)s' : interval abbreviation + - '%(interval)s' interval name + - '%(obs)s' lowercase observatory code + - '%(OBS)s' uppercase observatory code + - '%(t)s' type abbreviation + - '%(type)s' type name + - '%(ymd)s' time formatted as YYYYMMDD + + See Also + -------- + IMFV283Parser + + Notes + ----- + The urlTemplate is probably overkill for IMFV283, but I've left it in place + in case some has a different methodology, that more closely models the + url/file reading. + """ + + def __init__(self, urlTemplate, observatory=None, channels=None, type=None, + interval=None): + TimeseriesFactory.__init__(self, observatory, channels, type, interval) + self.urlTemplate = urlTemplate + + def get_timeseries(self, starttime, endtime, observatory=None, + channels=None, type='variation', interval='minute'): + """Get timeseries data + + Parameters + ---------- + observatory : str + observatory code. + starttime : obspy.core.UTCDateTime + time of first sample. + endtime : obspy.core.UTCDateTime + time of last sample. + type : {'variation'} + data type. + interval : {'minute'} + data interval. + + Returns + ------- + obspy.core.Stream + timeseries object with requested data. + + Raises + ------ + TimeseriesFactoryException + if invalid values are requested, or errors occur while + retrieving timeseries. + """ + observatory = observatory or self.observatory + channels = channels or self.channels + type = type or self.type + interval = interval or self.interval + timeseries = obspy.core.Stream() + + url = self._get_url(observatory, obspy.core.UTCDateTime(), + type, interval) + imfV283File = read_url(url) + timeseries += self.parse_string(imfV283File) + # merge channel traces for multiple days + timeseries.merge() + # trim to requested start/end time + timeseries.trim(starttime, endtime) + if observatory is not None: + timeseries = timeseries.select(station=observatory) + return timeseries.select(station=observatory) + + def parse_string(self, imfV283String): + """Parse the contents of a string in the format of an IMFV283 file. + + Parameters + ---------- + IMFV283String : str + string containing IMFV283 content. + + Returns + ------- + obspy.core.Stream + parsed data. + """ + parser = IMFV283Parser() + parser.parse(imfV283String) + + stream = parser.stream + stream.merge() + + for trace in stream: + if isinstance(trace.data, numpy.ma.MaskedArray): + trace.data.set_fill_value(numpy.nan) + trace.data = trace.data.filled() + if stream.select(channel='D').count() > 0: + for trace in stream.select(channel='D'): + trace.data = ChannelConverter.get_radians_from_minutes( + trace.data) + + return stream + + def _get_url(self, observatory, date, type='variation', interval='minute'): + """Get the url for a specified IMFV283 file. + + Replaces patterns (described in class docstring) with values based on + parameter values. + + Parameters + ---------- + observatory : str + observatory code. + date : obspy.core.UTCDateTime + day to fetch (only year, month, day are used) + type : {'variation', 'quasi-definitive'} + data type. + interval : {'minute', 'second'} + data interval. + + Raises + ------ + TimeseriesFactoryException + if type or interval are not supported. + """ + return self.urlTemplate % { + 'i': self._get_interval_abbreviation(interval), + 'interval': self._get_interval_name(interval), + 'obs': observatory.lower(), + 'OBS': observatory.upper(), + 't': self._get_type_abbreviation(type), + 'type': self._get_type_name(type), + 'ymd': date.strftime("%Y%m%d")} + + def _get_interval_abbreviation(self, interval): + """Get abbreviation for a data interval. + + Used by ``_get_url`` to replace ``%(i)s`` in urlTemplate. + + Parameters + ---------- + interval : {'daily', 'hourly', 'minute', 'monthly', 'second'} + + Returns + ------- + abbreviation for ``interval``. + + Raises + ------ + TimeseriesFactoryException + if ``interval`` is not supported. + """ + print interval + interval_abbr = None + if interval == 'daily': + interval_abbr = 'day' + elif interval == 'hourly': + interval_abbr = 'hor' + elif interval == 'minute': + interval_abbr = 'min' + elif interval == 'monthly': + interval_abbr = 'mon' + elif interval == 'second': + interval_abbr = 'sec' + else: + raise TimeseriesFactoryException( + 'Unexpected interval "%s"' % interval) + return interval_abbr + + def _get_interval_name(self, interval): + """Get name for a data interval. + + Used by ``_get_url`` to replace ``%(interval)s`` in urlTemplate. + + Parameters + ---------- + interval : {'minute', 'second'} + + Returns + ------- + name for ``interval``. + + Raises + ------ + TimeseriesFactoryException + if ``interval`` is not supported. + """ + interval_name = None + if interval == 'minute': + interval_name = 'OneMinute' + elif interval == 'second': + interval_name = 'OneSecond' + else: + raise TimeseriesFactoryException( + 'Unsupported interval "%s"' % interval) + return interval_name + + def _get_type_abbreviation(self, type): + """Get abbreviation for a data type. + + Used by ``_get_url`` to replace ``%(t)s`` in urlTemplate. + + Parameters + ---------- + type : {'definitive', 'provisional', 'quasi-definitive', 'variation'} + + Returns + ------- + name for ``type``. + + Raises + ------ + TimeseriesFactoryException + if ``type`` is not supported. + """ + type_abbr = None + if type == 'definitive': + type_abbr = 'd' + elif type == 'provisional': + type_abbr = 'p' + elif type == 'quasi-definitive': + type_abbr = 'q' + elif type == 'variation': + type_abbr = 'v' + else: + raise TimeseriesFactoryException( + 'Unexpected type "%s"' % type) + return type_abbr + + def _get_type_name(self, type): + """Get name for a data type. + + Used by ``_get_url`` to replace ``%(type)s`` in urlTemplate. + + Parameters + ---------- + type : {'variation', 'quasi-definitive'} + + Returns + ------- + name for ``type``. + + Raises + ------ + TimeseriesFactoryException + if ``type`` is not supported. + """ + type_name = None + if type == 'variation': + type_name = '' + elif type == 'quasi-definitive': + type_name = 'QuasiDefinitive' + else: + raise TimeseriesFactoryException( + 'Unsupported type "%s"' % type) + return type_name + + def _get_days(self, starttime, endtime): + """Get days between (inclusive) starttime and endtime. + + Parameters + ---------- + starttime : obspy.core.UTCDateTime + the start time + endtime : obspy.core.UTCDateTime + the end time + + Returns + ------- + array_like + list of times, one per day, for all days between and including + ``starttime`` and ``endtime``. + + Raises + ------ + TimeseriesFactoryException + if starttime is after endtime + """ + if starttime > endtime: + raise TimeseriesFactoryException( + 'starttime must be before endtime') + days = [] + day = starttime + lastday = (endtime.year, endtime.month, endtime.day) + while True: + days.append(day) + if lastday == (day.year, day.month, day.day): + break + # move to next day + day = obspy.core.UTCDateTime(day.timestamp + 86400) + return days + + def write_file(self, fh, timeseries, channels): + """writes timeseries data to the given file object. + + Parameters + ---------- + fh: file object + timeseries : obspy.core.Stream + stream containing traces to store. + channels : array_like + list of channels to store + """ + raise TimeseriesFactoryException('IAF write_file not implemented.') + + def _get_file_from_url(self, url): + """Get a file for writing. + + Ensures parent directory exists. + + Parameters + ---------- + url : str + Url path to IMFV283 + + Returns + ------- + str + path to file without file:// prefix + + Raises + ------ + TimeseriesFactoryException + if url does not start with file:// + """ + if not url.startswith('file://'): + raise TimeseriesFactoryException( + 'Only file urls are supported for writing') + filename = url.replace('file://', '') + parent = os.path.dirname(filename) + if not os.path.exists(parent): + os.makedirs(parent) + return filename diff --git a/geomagio/imfv283/IMFV283Parser.py b/geomagio/imfv283/IMFV283Parser.py new file mode 100644 index 000000000..1c92b4e3d --- /dev/null +++ b/geomagio/imfv283/IMFV283Parser.py @@ -0,0 +1,374 @@ +"""Parsing methods for the IMFV283 Format.""" +import math +import numpy +import sys +import obspy +from obspy.core import UTCDateTime + +import imfv283_codes +from ..edge import ObservatoryMetadata + +# values that represent missing data points in IAGA2002 +IMFV_DEAD = 65535 + +HDR_SIZE = 37 +MSG_SIZE_100B = 190 +MSG_SIZE_300B = 191 +BIAS = 8192 +SHIFT = 1048576 + +# Documentation list the second channel as D, but we know that for +# USGS, it's actually E. Since only USGS and Canada (YXZF) use GOES +# we are specfying it as E. +CHANNELS = { + 0: ['X', 'Y', 'Z', 'F'], + 1: ['H', 'E', 'Z', 'F'], + 2: ['', 'D', 'I', 'F'], + 3: [] +} + + +class IMFV283Parser(object): + """IMFV283 parser. + + Based on documentation at: + http://www.intermagnet.org/data-donnee/formats/imfv283e-eng.php + + Attributes + ---------- + headers : dict + parsed IMFV283 headers. + channels : array + parsed channel names. + data : dict + keys are channel names (order listed in ``self.channels``). + values are ``numpy.array`` of timeseries values, array values are + ``numpy.nan`` when values are missing. + Notes: + ------ + IMFV283 is the format that data is sent over the GOES satellite. + Data is sent in 12 minute packets. + When an outage occurs at the observatory, generally speaking, only the most + recent data is sent. At the reciever data is collected and kept for up to + 30 days. At this point, the utility we use to read data from the receiver + simply gets all the packets from the last time it connected, and appends + them to the data file. + We can change this to get smarter, but right now, there's no need to. + """ + + def __init__(self, observatoryMetadata=None): + """Create a new IMFV283 parser.""" + # dictionary of data (channel : numpy.array<float64>) + self.observatoryMetadata = observatoryMetadata or ObservatoryMetadata() + # temporary storage for data being parsed + self._parsedata = None + self.stream = obspy.core.Stream() + + def parse(self, data): + """Parse a string containing IMFV283 formatted data. + + Parameters + ---------- + data : str + IMFV283 formatted file contents. + """ + lines = data.splitlines() + for line in lines: + # if line isn't at least 37 characters, there's no need to proceed. + if len(line) <= HDR_SIZE: + sys.stderr.write('Bad Header length\n') + continue + + msg_header = self._parse_msg_header(line) + + data_len = msg_header['data_len'] + # check message size indicates data exists + if data_len < MSG_SIZE_100B or data_len > MSG_SIZE_300B: + sys.stderr.write('Incorrect data Length \n') + continue + + goes_data = self._process_ness_block( + line, + imfv283_codes.OBSERVATORIES[msg_header['obs']], + data_len) + + goes_header = self._parse_goes_header(goes_data) + data = self._get_data(goes_header, goes_data) + self._post_process(data, msg_header, goes_header) + + def _findPlatformInObservatories(self, key): + """Find the observatory using the platform key. + + Parameters + ---------- + key : str + The 8 digit key + Returns + ------- + str + The 3 digit observatory code. + + """ + observatories = imfv283_codes.OBSERVATORIES + for obs in observatories: + if observatories[obs]['platform'] == key: + return obs + return None + + def _get_data(self, header, data): + """get data from data packet + + Parameters + ---------- + header : dict + contains the header information for the data packet + data : bytearray + contains the encoded channel data + Returns + ------- + dict + dictionary of channel data arrays. + """ + parse_data = {} + channels = CHANNELS[header['orient']] + for channel in channels: + parse_data[channel] = [] + + scale = header['scale'] + offset = header['offset'] + bytecount = 30 + for cnt in xrange(0, 12): + # get data in 2 byte pairs as integers. + d1 = 0x100 * data[bytecount] + data[bytecount + 1] + d2 = 0x100 * data[bytecount + 2] + data[bytecount + 3] + d3 = 0x100 * data[bytecount + 4] + data[bytecount + 5] + d4 = 0x100 * data[bytecount + 6] + data[bytecount + 7] + bytecount += 8 + + # take 2 byte int, scale and offset it then shift it to the + # correct value 10th nanotesla, and convert it to a nanotesla + # float. + if d1 != IMFV_DEAD: + d1 = (d1 * scale[0] + offset[0] * BIAS - SHIFT) / 10.0 + if d2 != IMFV_DEAD: + d2 = (d2 * scale[1] + offset[1] * BIAS - SHIFT) / 10.0 + if d3 != IMFV_DEAD: + d3 = (d3 * scale[2] + offset[2] * BIAS - SHIFT) / 10.0 + if d4 != IMFV_DEAD: + d4 = (d4 * scale[3] + offset[3] * BIAS - SHIFT) / 10.0 + + parse_data[channels[0]].append(d1) + parse_data[channels[1]].append(d2) + parse_data[channels[2]].append(d3) + parse_data[channels[3]].append(d4) + + return parse_data + + def _get_data_offset(self, data_len): + """get the data offset for the ness blocks + + Parameters + ---------- + data_len : int + the length of the data provided by the message header. + Returns + ------- + int + offset to the data in the ness block + Notes + ----- + The data block has an extra flag tacked onto the start. We detect + this by looking at the data length. Since we don't use this + flag we skip it by adding to the data offset. + """ + if data_len == MSG_SIZE_300B: + return HDR_SIZE + 1 + return HDR_SIZE + + def _get_startime(self, msg_header, goes_header): + """Get Starttime by combining headers. + + Parameters + ---------- + msg_header : dict + header information for the message packet + goes_header : dict + header information for the goes data packet + + Returns + ------- + goes_time : UTCDateTime + The starttime for the goes data packet. + msg_time : UTCDateTime + The starttime for the goes message packet. + Notes + ----- + The goes data packet stores the day of the year, and the minute of + the day. To get a complete starttime, we use the year for the message + and correct for the case where the message came in a new year, vs. + when the data packet was created. + """ + msg_time = msg_header['transmission_time'] + msg_year = 2000 + int(msg_time[0:2]) + msg_day = int(msg_time[2:5]) + + oridinal_time = '%04d%sT%s' % (msg_year, msg_time[2:5], msg_time[5:]) + msg_time = UTCDateTime(oridinal_time) + + if msg_day == 1 and goes_header['day'] >= 365: + msg_year -= 1 + + hour = math.floor(goes_header['minute'] / 60.) + minute = goes_header['minute'] % 60 + + ordinal_time = '%04d%03dT%02d%02d' % (msg_year, goes_header['day'], + hour, minute) + goes_time = UTCDateTime(ordinal_time) + return (goes_time, msg_time) + + def _parse_goes_header(self, data): + """ parse goes data header + + Parameters + ---------- + data : bytearray + The bytearray containing the goes data packet. + Returns + ------- + dict + dictionary containing the required values for decoding the + data packet. + """ + header = {} + + # day of year and minute of day are combined into 3 bytes + header['day'] = data[0] + 0x100 * (data[1] & 0xF) + header['minute'] = (data[2] & 0xFF) * 0x10 + data[1] / 0x10 + + # offset values for each channel are in bytes 3,4,5,6 respectively. + header['offset'] = data[3:7] + + # Not used. alert_capable = (goes_block[7] & 0x01) + # orient code. The orientation of the instrument (HEZF, etc.) + header['orient'] = data[7] / 0x40 + + # scale values bits 0,1,2,3 of byte 7. + # Either 1 if bit not set, 2 if bit is set. + scale1 = 1 + scale2 = 1 + scale3 = 1 + scale4 = 1 + if (data[7] & 0x20) > 0: + scale1 = 2 + if (data[7] & 0x10) > 0: + scale2 = 2 + if (data[7] & 0x8) > 0: + scale3 = 2 + if (data[7] & 0x4) > 0: + scale4 = 2 + header['scale'] = [scale1, scale2, scale3, scale4] + + return header + + def _parse_msg_header(self, msg): + """parse the goes message header + + Parameters + ---------- + msg : array of chars + The message as provided by the goes receipt software, open dcs. + Returns + ------- + dict + a dictionary of the header information we use. + """ + header = {} + + header['daps_platform'] = msg[0:8] + header['obs'] = self._findPlatformInObservatories( + header['daps_platform']) + # if it's not in the observatory dictionary, we ignore it. + if header['obs'] is None: + return header + + # get the time of the transmission + header['transmission_time'] = msg[8:19] + header['data_len'] = int(msg[32:37]) + return header + + def _post_process(self, data, msg_header, goes_header): + """process parsed data + + Parameters + ---------- + data: dict + parsed data by channel + msg_header: dict + parsed header of the message + goes_header: dict + parsed header of the goes data + + """ + (goes_time, msg_time) = self._get_startime(msg_header, goes_header) + if (msg_time - goes_time) > (24 * 60): + sys.stderr.write('data over twice as old as the message') + return + + for channel in CHANNELS[goes_header['orient']]: + stats = obspy.core.Stats() + stats.sampling_rate = 0.0166666666667 + stats.channel = channel + stats.starttime = goes_time + stats.npts = 12 + stats.station = msg_header['obs'] + self.observatoryMetadata.set_metadata(stats, msg_header['obs'], + channel, 'variation', 'minute') + numpy_data = numpy.array(data[channel], dtype=numpy.float64) + numpy_data[numpy_data == IMFV_DEAD] = numpy.nan + trace = obspy.core.Trace(numpy_data, stats) + self.stream += trace + + def _process_ness_block(self, msg, domsat, data_len): + """process the "ness" block of data into an IMFV283 data block. + + Parameters + ---------- + msg : array + unsigned chars + domsat : dict + Dictionary of observatory dependent information used to decode + ness block. + data_len : int + data_len provided by the message header. + """ + goes_block = bytearray(126) + ness_byte = 0 + goes_byte = 0 + + offset = self._get_data_offset(data_len) + + for cnt in xrange(0, 63): + # Convert 3 byte "pair" into ordinal values for manipulation. + byte3 = ord(msg[offset + ness_byte + 2]) + byte2 = ord(msg[offset + ness_byte + 1]) + byte1 = ord(msg[offset + ness_byte]) + + # mask the three ness bytes to get the 2 byte information. + goes_block[goes_byte] = (byte3 & 0x3F) + \ + ((byte2 & 0x3) * 0x40) + goes_block[goes_byte + 1] = ((byte2 / 0x4) & 0xF) + \ + ((byte1 & 0xF) * 0x10) + + # swap the bytes depending on domsat information. + if domsat['swap_hdr'] and cnt <= 11: + byte_value = goes_block[goes_byte + 1] + goes_block[goes_byte + 1] = goes_block[goes_byte] + goes_block[goes_byte] = byte_value + if domsat['swap_data'] and cnt > 11: + byte_value = goes_block[goes_byte + 1] + goes_block[goes_byte + 1] = goes_block[goes_byte] + goes_block[goes_byte] = byte_value + ness_byte += 3 + goes_byte += 2 + + return goes_block diff --git a/geomagio/imfv283/IMFV283Parser_test.py b/geomagio/imfv283/IMFV283Parser_test.py new file mode 100644 index 000000000..8932c8f42 --- /dev/null +++ b/geomagio/imfv283/IMFV283Parser_test.py @@ -0,0 +1,35 @@ +"""Tests for the IMFV283 Parser class.""" + +from nose.tools import assert_equals +from IMFV283Parser import IMFV283Parser + +import imfv283_codes + + +IMFV283_EXAMPLE_VIC = '75C2A3A814023012741G43-1NN027EUP00191`A^P@RVxZ}|' + \ + 'D@@B_BEM@@@@@@@@@@@@@@@@@@@@@@@@@@@E|BxtTADVD@\E\BxxT@tVCh\\E' + \ + 'lByDT@xVCp\\EdBy@T@tVCh\EhByPT@xVCl\\EPBy@T@tVCd\EdBxlTA@VCp\\Eh' + \ + 'BxTTA@VCp\\EdBGxTA@VCl\EPBG`T@xVC\\\\DtBGHT@lVCD\DPBG@T@XVBh\\' + +IMFV283_EXAMPLE_FRD = '75C2102614023012927G43-0NN027EUP00191bx@WyhD{' + \ + 'aDB~@X@{Bb@@@@@@@@@@@@@@@@@@@@@@@@@@@@[DAV[@cUAjT@[EAVZ@cUAjT@[' + \ + 'BAVZ@cVAjS@[DAVZ@cUAjS@[DAVZ@cUAjS@[GAV\\@cTAjT@[DAV[@cUAjT@[BAVY' + \ + '@cVAjT@[CAVW@cWAjT@[CAVT@cWAjU@[AAVO@cYAjV@Z}AVK@c[AjV' + + +def test_parse_msg_header(): + """geomagio.imfv283.IMFV283Parser_test.test_parse_msg_header() + + Call the _parse_header method with a header. + Verify the header name and value are split at the correct column. + """ + header = IMFV283Parser()._parse_msg_header(IMFV283_EXAMPLE_VIC) + assert_equals(header['obs'], 'VIC') + + +def test_parse_goes_header(): + goes_data = IMFV283Parser()._process_ness_block(IMFV283_EXAMPLE_VIC, + imfv283_codes.OBSERVATORIES['VIC'], + 191) + goes_header = IMFV283Parser()._parse_goes_header(goes_data) + assert_equals(goes_header['day'], 23) diff --git a/geomagio/imfv283/StreamIMFV283Factory.py b/geomagio/imfv283/StreamIMFV283Factory.py new file mode 100644 index 000000000..1e56fb94c --- /dev/null +++ b/geomagio/imfv283/StreamIMFV283Factory.py @@ -0,0 +1,35 @@ +"""Factory to load IMFV283 files from an input StreamIMFV283Factory.""" + +from IMFV283Factory import IMFV283Factory + + +class StreamIMFV283Factory(IMFV283Factory): + """Timeseries Factory for IMFV283 formatted files loaded via a stream. + normally either a single file, or stdio. + + Parameters + ---------- + stream: file object + io stream, normally either a file, or stdio + + See Also + -------- + IMFV283Factory + Timeseriesfactory + """ + def __init__(self, stream, observatory=None, channels=None, + type=None, interval=None): + IMFV283Factory.__init__(self, None, observatory, channels, + type, interval) + self._stream = stream + + def get_timeseries(self, starttime, endtime, observatory=None, + channels=None, type=None, interval=None): + """Implements get_timeseries + + Notes: Calls IMFV283Factory.parse_string in place of + IMFV283Factory.get_timeseries. + """ + + self._stream.seek(0) + return IMFV283Factory.parse_string(self, self._stream.read()) diff --git a/geomagio/imfv283/__init__.py b/geomagio/imfv283/__init__.py new file mode 100644 index 000000000..e7c9bf0de --- /dev/null +++ b/geomagio/imfv283/__init__.py @@ -0,0 +1,16 @@ +"""IO Module for IMFV283Factory Format + +Based on documentation at: + http://http://www.intermagnet.org/data-donnee/formats/imfv283e-eng.php +""" + +from IMFV283Factory import IMFV283Factory +from StreamIMFV283Factory import StreamIMFV283Factory +from IMFV283Parser import IMFV283Parser + + +__all__ = [ + 'IMFV283Factory', + 'StreamIMFV283Factory', + 'IMFV283Parser' +] diff --git a/geomagio/imfv283/imfv283_codes.py b/geomagio/imfv283/imfv283_codes.py new file mode 100644 index 000000000..0e2b201cb --- /dev/null +++ b/geomagio/imfv283/imfv283_codes.py @@ -0,0 +1,174 @@ +"""Dictionary of observatory codes and ness block byte orders""" +OBSERVATORIES = { + # USGS + 'BOU': { + 'orient': 'HEZF', + 'platform': '75C2D538', + 'swap_hdr': False, + 'swap_data': True + }, + 'BRW': { + 'orient': 'HEZF', + 'platform': '75C172CE', + 'swap_hdr': False, + 'swap_data': True + }, + 'BSL': { + 'orient': 'HEZF', + 'platform': '75C236CA', + 'swap_hdr': False, + 'swap_data': True + }, + 'CMO': { + 'orient': 'HEZF', + 'platform': '75C06342', + 'swap_hdr': False, + 'swap_data': True + }, + 'DED': { + 'orient': 'HEZF', + 'platform': '75C301AA', + 'swap_hdr': False, + 'swap_data': True + }, + 'FRD': { + 'orient': 'HEZF', + 'platform': '75C21026', + 'swap_hdr': False, + 'swap_data': True + }, + 'FRN': { + 'orient': 'HEZF', + 'platform': '75C2F3D4', + 'swap_hdr': False, + 'swap_data': True + }, + 'GUA': { + 'orient': 'HEZF', + 'platform': '75C33430', + 'swap_hdr': False, + 'swap_data': True + }, + 'HON': { + 'orient': 'HEZF', + 'platform': '75C161B8', + 'swap_hdr': False, + 'swap_data': True + }, + 'NEW': { + 'orient': 'HEZF', + 'platform': '75C2E0A2', + 'swap_hdr': False, + 'swap_data': True + }, + 'SHU': { + 'orient': 'HEZF', + 'platform': '75C266B6', + 'swap_hdr': False, + 'swap_data': True + }, + 'SIT': { + 'orient': 'HEZF', + 'platform': '75C28544', + 'swap_hdr': False, + 'swap_data': True + }, + 'SJG': { + 'orient': 'HEZF', + 'platform': '75C0B52A', + 'swap_hdr': False, + 'swap_data': True + }, + 'TUC': { + 'orient': 'HEZF', + 'platform': '75C14754', + 'swap_hdr': False, + 'swap_data': True + }, + # NRCAN + 'BLC': { + 'orient': 'XYZF', + 'platform': '75C3644C', + 'swap_hdr': True, + 'swap_data': False + }, + 'BRD': { + 'orient': 'XYZF', + 'platform': '75C387BE', + 'swap_hdr': True, + 'swap_data': False + }, + 'CBB': { + 'orient': 'XYZF', + 'platform': '75C351D6', + 'swap_hdr': True, + 'swap_data': False + }, + 'EUA': { + 'orient': 'XYZF', + 'platform': '75C2405A', + 'swap_hdr': True, + 'swap_data': False + }, + 'FCC': { + 'orient': 'XYZF', + 'platform': '75C3773A', + 'swap_hdr': True, + 'swap_data': False + }, + 'IQA': { + 'orient': 'XYZF', + 'platform': '75C0F620', + 'swap_hdr': True, + 'swap_data': False + }, + 'MEA': { + 'orient': 'XYZF', + 'platform': '75C32746', + 'swap_hdr': True, + 'swap_data': False + }, + 'OTT': { + 'orient': 'XYZF', + 'platform': '75C20350', + 'swap_hdr': True, + 'swap_data': False + }, + 'RES': { + 'orient': 'XYZF', + 'platform': '75C1D236', + 'swap_hdr': True, + 'swap_data': False + }, + 'SNK': { + 'orient': 'XYZF', + 'platform': '75C15422', + 'swap_hdr': True, + 'swap_data': False + }, + 'STJ': { + 'orient': 'XYZF', + 'platform': '75C1E7AC', + 'swap_hdr': True, + 'swap_data': False + }, + 'VIC': { + 'orient': 'XYZF', + 'platform': '75C2A3A8', + 'swap_hdr': True, + 'swap_data': False + }, + 'YKC': { + 'orient': 'XYZF', + 'platform': '75C312DC', + 'swap_hdr': True, + 'swap_data': False + }, + # OTHER + 'KGI': { + 'orient': 'HEZF', + 'platform': '75C394C8', + 'swap_hdr': True, + 'swap_data': False + } +} -- GitLab