First big stab at better structuring
authorAlexander Vasarab <alexander@wylark.com>
Thu, 2 Jul 2020 18:02:52 +0000 (11:02 -0700)
committerAlexander Vasarab <alexander@wylark.com>
Thu, 2 Jul 2020 18:02:52 +0000 (11:02 -0700)
- Key parts of the program get their own, distinct subroutines
- Struck version from opening docstring
- Remove unused OrderedDict import
- Capitalized 'LOG' global variable

infoex-autowx.py

index 598f24cd765d300b8d49f3e2b1884ebca63e8a41..d66683cfa41775964bc8f2b432daa172d8b0fd24 100755 (executable)
@@ -6,8 +6,6 @@ InfoEx <-> NRCS/MesoWest Auto Wx implementation
 Alexander Vasarab
 Wylark Mountaineering LLC
 
 Alexander Vasarab
 Wylark Mountaineering LLC
 
-Version 2.0.0
-
 This program fetches data from either an NRCS SNOTEL site or MesoWest
 weather station and pushes it to InfoEx using the new automated weather
 system implementation.
 This program fetches data from either an NRCS SNOTEL site or MesoWest
 weather station and pushes it to InfoEx using the new automated weather
 system implementation.
@@ -32,7 +30,6 @@ import os
 import sys
 import time
 
 import sys
 import time
 
-from collections import OrderedDict
 from ftplib import FTP
 from optparse import OptionParser
 
 from ftplib import FTP
 from optparse import OptionParser
 
@@ -44,349 +41,404 @@ import zeep.transports
 
 __version__ = '2.0.0'
 
 
 __version__ = '2.0.0'
 
-log = logging.getLogger(__name__)
-log.setLevel(logging.NOTSET)
-
-try:
-    from systemd.journal import JournalHandler
-    log.addHandler(JournalHandler())
-except:
-    ## fallback to syslog
-    #import logging.handlers
-    #log.addHandler(logging.handlers.SysLogHandler())
-    # fallback to stdout
-    handler = logging.StreamHandler(sys.stdout)
-    log.addHandler(handler)
-
-parser = OptionParser(version=__version__)
-
-parser.add_option("--config",
-    dest="config",
-    metavar="FILE",
-    help="location of config file")
-
-parser.add_option("--log-level",
-    dest="log_level",
-    default=None,
-    help="set the log level (debug, info, warning)")
-
-parser.add_option("--dry-run",
-    action="store_true",
-    dest="dry_run",
-    default=False,
-    help="fetch data but don't upload to InfoEx")
-
-(options, args) = parser.parse_args()
-
-config = configparser.ConfigParser(allow_no_value=False)
-
-if not options.config:
-    parser.print_help()
-    print("\nPlease specify a configuration file via --config.")
-    sys.exit(1)
-
-config.read(options.config)
-
-# ugly, but passable
-if options.log_level in [None, 'debug', 'info', 'warning']:
-    if options.log_level == 'debug':
-        log.setLevel(logging.DEBUG)
-    elif options.log_level == 'info':
-        log.setLevel(logging.INFO)
-    elif options.log_level == 'warning':
-        log.setLevel(logging.WARNING)
-    else:
-        log.setLevel(logging.NOTSET)
-else:
-    parser.print_help()
-    print("\nPlease select an appropriate log level or remove the switch (--log-level).")
-    sys.exit(1)
-
-log.debug('STARTING UP')
-
-try:
-    infoex = {
-        'host': config['infoex']['host'],
-        'uuid': config['infoex']['uuid'],
-        'api_key': config['infoex']['api_key'],
-        'csv_filename': config['infoex']['csv_filename'],
-        'location_uuid': config['infoex']['location_uuid'],
-        'wx_data': {}, # placeholder key, values to come later
-    }
-
-    data = dict()
-    data['provider'] = config['station']['type']
-
-    if data['provider'] not in ['nrcs', 'mesowest']:
-        print("Please specify either nrcs or mesowest as the station type.")
-        sys.exit(1)
+LOG = logging.getLogger(__name__)
+LOG.setLevel(logging.NOTSET)
 
 
-    if data['provider'] == 'nrcs':
-        data['source'] = 'https://www.wcc.nrcs.usda.gov/awdbWebService/services?WSDL'
-        data['station_id'] = config['station']['station_id']
+def get_parser():
+    """Return OptionParser for this program"""
+    parser = OptionParser(version=__version__)
 
 
-        try:
-            desired_data = config['station']['desired_data'].split(',')
-        except:
-            # desired_data malformed or missing, setting default
-            desired_data = [
-                    'TOBS', # AIR TEMPERATURE OBSERVED (degF)
-                    'SNWD', # SNOW DEPTH (in)
-                    'PREC'  # PRECIPITATION ACCUMULATION (in)
-                    ]
-
-        # XXX: For NRCS, we're manually overriding units for now! Once
-        #      unit conversion is supported for NRCS, REMOVE THIS!
-        if 'units' not in data:
-            data['units'] = 'imperial'
-
-    if data['provider'] == 'mesowest':
-        data['source'] = 'https://api.synopticdata.com/v2/stations/timeseries'
-        data['station_id'] = config['station']['station_id']
-        data['units'] = config['station']['units']
+    parser.add_option("--config",
+        dest="config",
+        metavar="FILE",
+        help="location of config file")
 
 
-        try:
-            desired_data = config['station']['desired_data']
-        except:
-            # desired_data malformed or missing, setting default
-            desired_data = 'air_temp,snow_depth'
-
-        # construct full API URL (sans start/end time, added later)
-        data['source'] = data['source'] + '?token=' + config['station']['token'] + '&within=60&units=' + data['units'] + '&stid=' + data['station_id'] + '&vars=' + desired_data
-
-except KeyError as e:
-    log.critical("%s not defined in %s" % (e, options.config))
-    exit(1)
-except Exception as exc:
-    log.critical("Exception occurred in config parsing: '%s'" % (exc))
-    exit(1)
-
-# all sections/values present in config file, final sanity check
-try:
-    for key in config.sections():
-        for subkey in config[key]:
-            if not len(config[key][subkey]):
-                raise ValueError;
-except ValueError as exc:
-    log.critical("Config value '%s.%s' is empty" % (key, subkey))
-    exit(1)
-
-# INFOEX FIELDS
-#
-# This won't earn style points in Python, but here we establish a couple
-# of helpful mappings variables. The reason this is helpful is that the
-# end result is simply an ordered set, the CSV file. But we still may
-# want to manipulate the values arbitrarily before writing that file.
-#
-# Also note that the current Auto Wx InfoEx documentation shows these
-# keys in a graphical table with the "index" beginning at 1, but here we
-# sanely index beginning at 0.
-fmap = {}                           ; final_data     = [None] * 29
-fmap['Location UUID'] = 0           ; final_data[0]  = infoex['location_uuid']
-fmap['obDate'] = 1                  ; final_data[1]  = None
-fmap['obTime'] = 2                  ; final_data[2]  = None
-fmap['timeZone'] = 3                ; final_data[3]  = 'Pacific'
-fmap['tempMaxHour'] = 4             ; final_data[4]  = None
-fmap['tempMaxHourUnit'] = 5         ; final_data[5]  = 'F'
-fmap['tempMinHour'] = 6             ; final_data[6]  = None
-fmap['tempMinHourUnit'] = 7         ; final_data[7]  = 'F'
-fmap['tempPres'] = 8                ; final_data[8]  = None
-fmap['tempPresUnit'] = 9            ; final_data[9]  = 'F'
-fmap['precipitationGauge'] = 10     ; final_data[10] = None
-fmap['precipitationGaugeUnit'] = 11 ; final_data[11] = 'in'
-fmap['windSpeedNum'] = 12           ; final_data[12] = None
-fmap['windSpeedUnit'] = 13          ; final_data[13] = 'mph'
-fmap['windDirectionNum'] = 14       ; final_data[14] = None
-fmap['hS'] = 15                     ; final_data[15] = None
-fmap['hsUnit'] = 16                 ; final_data[16] = 'in'
-fmap['baro'] = 17                   ; final_data[17] = None
-fmap['baroUnit'] = 18               ; final_data[18] = 'inHg'
-fmap['rH'] = 19                     ; final_data[19] = None
-fmap['windGustSpeedNum'] = 20       ; final_data[20] = None
-fmap['windGustSpeedNumUnit'] = 21   ; final_data[21] = 'mph'
-fmap['windGustDirNum'] = 22         ; final_data[22] = None
-fmap['dewPoint'] = 23               ; final_data[23] = None
-fmap['dewPointUnit'] = 24           ; final_data[24] = 'F'
-fmap['hn24Auto'] = 25               ; final_data[25] = None
-fmap['hn24AutoUnit'] = 26           ; final_data[26] = 'in'
-fmap['hstAuto'] = 27                ; final_data[27] = None
-fmap['hstAutoUnit'] = 28            ; final_data[28] = 'in'
-
-# one final mapping, the NRCS/MesoWest fields that this program supports to
-# their InfoEx counterpart
-iemap = {}
-
-if data['provider'] == 'nrcs':
-    iemap['PREC'] = 'precipitationGauge'
-    iemap['TOBS'] = 'tempPres'
-    iemap['SNWD'] = 'hS'
-    iemap['PRES'] = 'baro'
-    iemap['RHUM'] = 'rH'
-    iemap['WSPD'] = 'windSpeedNum'
-    iemap['WDIR'] = 'windDirectionNum'
-    # unsupported by NRCS:
-    # windGustSpeedNum
-elif data['provider'] == 'mesowest':
-    iemap['precip_accum'] = 'precipitationGauge'
-    iemap['air_temp'] = 'tempPres'
-    iemap['snow_depth'] = 'hS'
-    iemap['pressure'] = 'baro'
-    iemap['relative_humidity'] = 'rH'
-    iemap['wind_speed'] = 'windSpeedNum'
-    iemap['wind_direction'] = 'windDirectionNum'
-    iemap['wind_gust'] = 'windGustSpeedNum'
-
-# override units if user selected metric
-#
-# NOTE: to update this, use the fmap<->final_data mapping laid out above
-#
-# NOTE: this only 'works' with MesoWest for now, as the MesoWest API
-#       itself handles the unit conversion; in the future, we will also
-#       support NRCS unit conversion, but this must be done by this
-#       program.
-if data['units'] == 'metric':
-    final_data[fmap['tempPresUnit']] = 'C'
-    final_data[fmap['hsUnit']] = 'm'
-    final_data[fmap['windSpeedUnit']] = 'm/s'
-    final_data[fmap['windGustSpeedNumUnit']] = 'm/s'
-
-# floor time to nearest hour
-dt = datetime.datetime.now()
-end_date = dt - datetime.timedelta(minutes=dt.minute % 60,
-                                   seconds=dt.second,
-                                   microseconds=dt.microsecond)
-begin_date = end_date - datetime.timedelta(hours=3)
-
-# get the data
-log.debug("Getting %s data from %s to %s" % (str(desired_data),
-    str(begin_date), str(end_date)))
-
-time_all_elements = time.time()
-
-# NRCS-specific code
-if data['provider'] == 'nrcs':
-    transport = zeep.transports.Transport(cache=zeep.cache.SqliteCache())
-    client = zeep.Client(wsdl=data['source'], transport=transport)
-
-    for elementCd in desired_data:
-        time_element = time.time()
-
-        # get the last three hours of data for this elementCd
-        tmp = client.service.getHourlyData(
-                stationTriplets=[data['station_id']],
-                elementCd=elementCd,
-                ordinal=1,
-                beginDate=begin_date,
-                endDate=end_date)
-
-        log.info("Time to get elementCd '%s': %.3f sec" % (elementCd,
-            time.time() - time_element))
-
-        values = tmp[0]['values']
-
-        # sort and isolate the most recent
-        #
-        # NOTE: we do this because sometimes there are gaps in hourly data
-        #       in NRCS; yes, we may end up with slightly inaccurate data,
-        #       so perhaps this decision will be re-evaluated in the future
-        if values:
-            ordered = sorted(values, key=lambda t: t['dateTime'], reverse=True)
-            infoex['wx_data'][elementCd] = ordered[0]['value']
-        else:
-            infoex['wx_data'][elementCd] = None
+    parser.add_option("--log-level",
+        dest="log_level",
+        default=None,
+        help="set the log level (debug, info, warning)")
 
 
-# MesoWest-specific code
-elif data['provider'] == 'mesowest':
-    # massage begin/end date format
-    begin_date_str = begin_date.strftime('%Y%m%d%H%M')
-    end_date_str = end_date.strftime('%Y%m%d%H%M')
+    parser.add_option("--dry-run",
+        action="store_true",
+        dest="dry_run",
+        default=False,
+        help="fetch data but don't upload to InfoEx")
 
 
-    # construct final, completed API URL
-    api_req_url = data['source'] + '&start=' + begin_date_str + '&end=' + end_date_str
-    req = requests.get(api_req_url)
+    return parser
 
 
+def setup_config(config):
+    """Setup config variable based on values specified in the ini file"""
+    try:
+        infoex = {
+            'host': config['infoex']['host'],
+            'uuid': config['infoex']['uuid'],
+            'api_key': config['infoex']['api_key'],
+            'csv_filename': config['infoex']['csv_filename'],
+            'location_uuid': config['infoex']['location_uuid'],
+            'wx_data': {}, # placeholder key, values to come later
+        }
+
+        data = dict()
+        data['provider'] = config['station']['type']
+
+        if data['provider'] not in ['nrcs', 'mesowest']:
+            print("Please specify either nrcs or mesowest as the station type.")
+            sys.exit(1)
+
+        if data['provider'] == 'nrcs':
+            data['source'] = 'https://www.wcc.nrcs.usda.gov/awdbWebService/services?WSDL'
+            data['station_id'] = config['station']['station_id']
+
+            try:
+                data['desired_data'] = config['station']['desired_data'].split(',')
+            except:
+                # desired_data malformed or missing, setting default
+                data['desired_data'] = [
+                                       'TOBS', # AIR TEMPERATURE OBSERVED (degF)
+                                       'SNWD', # SNOW DEPTH (in)
+                                       'PREC'  # PRECIPITATION ACCUMULATION (in)
+                                       ]
+
+            # XXX: For NRCS, we're manually overriding units for now! Once
+            #      unit conversion is supported for NRCS, REMOVE THIS!
+            if 'units' not in data:
+                data['units'] = 'imperial'
+
+        if data['provider'] == 'mesowest':
+            data['source'] = 'https://api.synopticdata.com/v2/stations/timeseries'
+            data['station_id'] = config['station']['station_id']
+            data['units'] = config['station']['units']
+
+            try:
+                data['desired_data'] = config['station']['desired_data']
+            except:
+                # desired_data malformed or missing, setting default
+                data['desired_data'] = 'air_temp,snow_depth'
+
+            # construct full API URL (sans start/end time, added later)
+            data['source'] = data['source'] + '?token=' + config['station']['token'] + '&within=60&units=' + data['units'] + '&stid=' + data['station_id'] + '&vars=' + data['desired_data']
+
+    except KeyError as e:
+        LOG.critical("%s not defined in %s" % (e, options.config))
+        exit(1)
+    except Exception as exc:
+        LOG.critical("Exception occurred in config parsing: '%s'" % (exc))
+        exit(1)
+
+    # all sections/values present in config file, final sanity check
+    try:
+        for key in config.sections():
+            for subkey in config[key]:
+                if not len(config[key][subkey]):
+                    raise ValueError;
+    except ValueError as exc:
+        LOG.critical("Config value '%s.%s' is empty" % (key, subkey))
+        exit(1)
+
+    return (infoex, data)
+
+def setup_logging(log_level):
+    """Setup our logging infrastructure"""
     try:
     try:
-        json = req.json()
-    except ValueError:
-        log.error("Bad JSON in MesoWest response")
+        from systemd.journal import JournalHandler
+        LOG.addHandler(JournalHandler())
+    except:
+        ## fallback to syslog
+        #import logging.handlers
+        #LOG.addHandler(logging.handlers.SysLogHandler())
+        # fallback to stdout
+        handler = logging.StreamHandler(sys.stdout)
+        LOG.addHandler(handler)
+
+    # ugly, but passable
+    if log_level in [None, 'debug', 'info', 'warning']:
+        if log_level == 'debug':
+            LOG.setLevel(logging.DEBUG)
+        elif log_level == 'info':
+            LOG.setLevel(logging.INFO)
+        elif log_level == 'warning':
+            LOG.setLevel(logging.WARNING)
+        else:
+            LOG.setLevel(logging.NOTSET)
+    else:
+        return False
+
+    return True
+
+def main():
+    """Main routine: sort through args, decide what to do, then do it"""
+    parser = get_parser()
+    (options, args) = parser.parse_args()
+
+    config = configparser.ConfigParser(allow_no_value=False)
+
+    if not options.config:
+        parser.print_help()
+        print("\nPlease specify a configuration file via --config.")
         sys.exit(1)
 
         sys.exit(1)
 
-    try:
-        observations = json['STATION'][0]['OBSERVATIONS']
-    except ValueError:
-        log.error("Bad JSON in MesoWest response")
+    config.read(options.config)
+
+    if not setup_logging(options.log_level):
+        parser.print_help()
+        print("\nPlease select an appropriate log level or remove the switch (--log-level).")
         sys.exit(1)
 
         sys.exit(1)
 
-    pos = len(observations['date_time']) - 1
+    (infoex, data) = setup_config(config)
 
 
-    for elementCd in desired_data.split(','):
-        # sort and isolate the most recent, see note above in NRCS for how and
-        # why this is done
-        #
-        # NOTE: Unlike in the NRCS case, the MesoWest API response contains all
-        #       data (whereas with NRCS, we have to make a separate request for
-        #       each element we want). This is nice for network efficiency but
-        #       it means we have to handle this part differently for each.
-        #
-        # NOTE: Also unlike NRCS, MesoWest provides more granular data; NRCS
-        #       provides hourly data, but MesoWest can often provide data every
-        #       10 minutes -- though this provides more opportunity for
-        #       irregularities
-
-        # we may not have the data at all
-        key_name = elementCd + '_set_1'
-        if key_name in observations:
-            if observations[key_name][pos]:
-                infoex['wx_data'][elementCd] = observations[key_name][pos]
+    LOG.debug('Config parsed, starting up')
+
+    # create mappings
+    (fmap, final_data) = setup_infoex_fields_mapping(infoex['location_uuid'])
+    iemap = setup_infoex_counterparts_mapping(data['provider'])
+
+    # override units if user selected metric
+    #
+    # NOTE: to update this, use the fmap<->final_data mapping laid out above
+    #
+    # NOTE: this only 'works' with MesoWest for now, as the MesoWest API
+    #       itself handles the unit conversion; in the future, we will also
+    #       support NRCS unit conversion, but this must be done by this
+    #       program.
+    if data['units'] == 'metric':
+        final_data[fmap['tempPresUnit']] = 'C'
+        final_data[fmap['hsUnit']] = 'm'
+        final_data[fmap['windSpeedUnit']] = 'm/s'
+        final_data[fmap['windGustSpeedNumUnit']] = 'm/s'
+
+    # floor time to nearest hour
+    dt = datetime.datetime.now()
+    end_date = dt - datetime.timedelta(minutes=dt.minute % 60,
+                                       seconds=dt.second,
+                                       microseconds=dt.microsecond)
+    begin_date = end_date - datetime.timedelta(hours=3)
+
+    # get the data
+    LOG.debug("Getting %s data from %s to %s" % (str(data['desired_data']),
+        str(begin_date), str(end_date)))
+
+    time_all_elements = time.time()
+
+    # NRCS-specific code
+    if data['provider'] == 'nrcs':
+        transport = zeep.transports.Transport(cache=zeep.cache.SqliteCache())
+        client = zeep.Client(wsdl=data['source'], transport=transport)
+
+        for elementCd in data['desired_data']:
+            time_element = time.time()
+
+            # get the last three hours of data for this elementCd
+            tmp = client.service.getHourlyData(
+                    stationTriplets=[data['station_id']],
+                    elementCd=elementCd,
+                    ordinal=1,
+                    beginDate=begin_date,
+                    endDate=end_date)
+
+            LOG.info("Time to get elementCd '%s': %.3f sec" % (elementCd,
+                time.time() - time_element))
+
+            values = tmp[0]['values']
+
+            # sort and isolate the most recent
+            #
+            # NOTE: we do this because sometimes there are gaps in hourly data
+            #       in NRCS; yes, we may end up with slightly inaccurate data,
+            #       so perhaps this decision will be re-evaluated in the future
+            if values:
+                ordered = sorted(values, key=lambda t: t['dateTime'], reverse=True)
+                infoex['wx_data'][elementCd] = ordered[0]['value']
             else:
                 infoex['wx_data'][elementCd] = None
             else:
                 infoex['wx_data'][elementCd] = None
-        else:
-            infoex['wx_data'][elementCd] = None
 
 
-log.info("Time to get all data : %.3f sec" % (time.time() -
-    time_all_elements))
+    # MesoWest-specific code
+    elif data['provider'] == 'mesowest':
+        # massage begin/end date format
+        begin_date_str = begin_date.strftime('%Y%m%d%H%M')
+        end_date_str = end_date.strftime('%Y%m%d%H%M')
 
 
-log.debug("infoex[wx_data]: %s", str(infoex['wx_data']))
+        # construct final, completed API URL
+        api_req_url = data['source'] + '&start=' + begin_date_str + '&end=' + end_date_str
+        req = requests.get(api_req_url)
 
 
-# Now we only need to add in what we want to change thanks to that
-# abomination of a variable declaration earlier
-final_data[fmap['Location UUID']] = infoex['location_uuid']
-final_data[fmap['obDate']] = end_date.strftime('%m/%d/%Y')
-final_data[fmap['obTime']] = end_date.strftime('%H:%M')
+        try:
+            json = req.json()
+        except ValueError:
+            LOG.error("Bad JSON in MesoWest response")
+            sys.exit(1)
 
 
-for elementCd in infoex['wx_data']:
-    if elementCd not in iemap:
-        log.warning("BAD KEY wx_data['%s']" % (elementCd))
-        continue
+        try:
+            observations = json['STATION'][0]['OBSERVATIONS']
+        except ValueError:
+            LOG.error("Bad JSON in MesoWest response")
+            sys.exit(1)
+
+        pos = len(observations['date_time']) - 1
+
+        for elementCd in data['desired_data'].split(','):
+            # sort and isolate the most recent, see note above in NRCS for how and
+            # why this is done
+            #
+            # NOTE: Unlike in the NRCS case, the MesoWest API response contains all
+            #       data (whereas with NRCS, we have to make a separate request for
+            #       each element we want). This is nice for network efficiency but
+            #       it means we have to handle this part differently for each.
+            #
+            # NOTE: Also unlike NRCS, MesoWest provides more granular data; NRCS
+            #       provides hourly data, but MesoWest can often provide data every
+            #       10 minutes -- though this provides more opportunity for
+            #       irregularities
+
+            # we may not have the data at all
+            key_name = elementCd + '_set_1'
+            if key_name in observations:
+                if observations[key_name][pos]:
+                    infoex['wx_data'][elementCd] = observations[key_name][pos]
+                else:
+                    infoex['wx_data'][elementCd] = None
+            else:
+                infoex['wx_data'][elementCd] = None
 
 
-    # CONSIDER: Casting every value to Float() -- need to investigate if
-    #           any possible elementCds we may want are any other data
-    #           type than float.
-    #
-    #           Another possibility is to query the API with
-    #           getStationElements and temporarily store the
-    #           storedUnitCd. But that's pretty network-intensive and
-    #           may not even be worth it if there's only e.g. one or two
-    #           exceptions to any otherwise uniformly Float value set.
-    final_data[fmap[iemap[elementCd]]] = infoex['wx_data'][elementCd]
-
-log.debug("final_data: %s" % (str(final_data)))
-
-with open(infoex['csv_filename'], 'w') as f:
-    # The requirement is that empty values are represented in the CSV
-    # file as "", csv.QUOTE_NONNUMERIC achieves that
-    log.debug("writing CSV file '%s'" % (infoex['csv_filename']))
-    writer = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
-    writer.writerow(final_data)
-    f.close()
-
-if not options.dry_run:
-    with open(infoex['csv_filename'], 'rb') as f:
-        log.debug("uploading FTP file '%s'" % (infoex['host']))
-        ftp = FTP(infoex['host'], infoex['uuid'], infoex['api_key'])
-        ftp.storlines('STOR ' + infoex['csv_filename'], f)
-        ftp.close()
+    LOG.info("Time to get all data : %.3f sec" % (time.time() -
+        time_all_elements))
+
+    LOG.debug("infoex[wx_data]: %s", str(infoex['wx_data']))
+
+    # Now we only need to add in what we want to change thanks to that
+    # abomination of a variable declaration earlier
+    final_data[fmap['Location UUID']] = infoex['location_uuid']
+    final_data[fmap['obDate']] = end_date.strftime('%m/%d/%Y')
+    final_data[fmap['obTime']] = end_date.strftime('%H:%M')
+
+    for elementCd in infoex['wx_data']:
+        if elementCd not in iemap:
+            LOG.warning("BAD KEY wx_data['%s']" % (elementCd))
+            continue
+
+        # CONSIDER: Casting every value to Float() -- need to investigate if
+        #           any possible elementCds we may want are any other data
+        #           type than float.
+        #
+        #           Another possibility is to query the API with
+        #           getStationElements and temporarily store the
+        #           storedUnitCd. But that's pretty network-intensive and
+        #           may not even be worth it if there's only e.g. one or two
+        #           exceptions to any otherwise uniformly Float value set.
+        final_data[fmap[iemap[elementCd]]] = infoex['wx_data'][elementCd]
+
+    LOG.debug("final_data: %s" % (str(final_data)))
+
+    if not write_local_csv(infoex['csv_filename'], final_data):
+        LOG.warning('Could not write local CSV file: %s',
+                    infoex['csv_filename'])
+        return 1;
+
+    if not options.dry_run:
+        upload_csv(infoex['csv_filename'], infoex)
+
+    LOG.debug('DONE')
+    return 0
+
+# Data structure operations
+def setup_infoex_fields_mapping(location_uuid):
+    """
+    Create a mapping of InfoEx fields to the local data's indexing scheme.
+
+    INFOEX FIELDS
+    
+    This won't earn style points in Python, but here we establish a couple
+    of helpful mappings variables. The reason this is helpful is that the
+    end result is simply an ordered set, the CSV file. But we still may
+    want to manipulate the values arbitrarily before writing that file.
+    
+    Also note that the current Auto Wx InfoEx documentation shows these
+    keys in a graphical table with the "index" beginning at 1, but here we
+    sanely index beginning at 0.
+    """
+    fmap = {}                           ; final_data     = [None] * 29
+    fmap['Location UUID'] = 0           ; final_data[0]  = location_uuid
+    fmap['obDate'] = 1                  ; final_data[1]  = None
+    fmap['obTime'] = 2                  ; final_data[2]  = None
+    fmap['timeZone'] = 3                ; final_data[3]  = 'Pacific'
+    fmap['tempMaxHour'] = 4             ; final_data[4]  = None
+    fmap['tempMaxHourUnit'] = 5         ; final_data[5]  = 'F'
+    fmap['tempMinHour'] = 6             ; final_data[6]  = None
+    fmap['tempMinHourUnit'] = 7         ; final_data[7]  = 'F'
+    fmap['tempPres'] = 8                ; final_data[8]  = None
+    fmap['tempPresUnit'] = 9            ; final_data[9]  = 'F'
+    fmap['precipitationGauge'] = 10     ; final_data[10] = None
+    fmap['precipitationGaugeUnit'] = 11 ; final_data[11] = 'in'
+    fmap['windSpeedNum'] = 12           ; final_data[12] = None
+    fmap['windSpeedUnit'] = 13          ; final_data[13] = 'mph'
+    fmap['windDirectionNum'] = 14       ; final_data[14] = None
+    fmap['hS'] = 15                     ; final_data[15] = None
+    fmap['hsUnit'] = 16                 ; final_data[16] = 'in'
+    fmap['baro'] = 17                   ; final_data[17] = None
+    fmap['baroUnit'] = 18               ; final_data[18] = 'inHg'
+    fmap['rH'] = 19                     ; final_data[19] = None
+    fmap['windGustSpeedNum'] = 20       ; final_data[20] = None
+    fmap['windGustSpeedNumUnit'] = 21   ; final_data[21] = 'mph'
+    fmap['windGustDirNum'] = 22         ; final_data[22] = None
+    fmap['dewPoint'] = 23               ; final_data[23] = None
+    fmap['dewPointUnit'] = 24           ; final_data[24] = 'F'
+    fmap['hn24Auto'] = 25               ; final_data[25] = None
+    fmap['hn24AutoUnit'] = 26           ; final_data[26] = 'in'
+    fmap['hstAuto'] = 27                ; final_data[27] = None
+    fmap['hstAutoUnit'] = 28            ; final_data[28] = 'in'
+
+    return (fmap, final_data)
+
+def setup_infoex_counterparts_mapping(provider):
+    """
+    Create a mapping of the NRCS/MesoWest fields that this program supports to
+    their InfoEx counterparts
+    """
+    iemap = {}
+
+    if provider == 'nrcs':
+        iemap['PREC'] = 'precipitationGauge'
+        iemap['TOBS'] = 'tempPres'
+        iemap['SNWD'] = 'hS'
+        iemap['PRES'] = 'baro'
+        iemap['RHUM'] = 'rH'
+        iemap['WSPD'] = 'windSpeedNum'
+        iemap['WDIR'] = 'windDirectionNum'
+        # unsupported by NRCS:
+        # windGustSpeedNum
+    elif provider == 'mesowest':
+        iemap['precip_accum'] = 'precipitationGauge'
+        iemap['air_temp'] = 'tempPres'
+        iemap['snow_depth'] = 'hS'
+        iemap['pressure'] = 'baro'
+        iemap['relative_humidity'] = 'rH'
+        iemap['wind_speed'] = 'windSpeedNum'
+        iemap['wind_direction'] = 'windDirectionNum'
+        iemap['wind_gust'] = 'windGustSpeedNum'
+
+    return iemap
+
+# CSV operations
+def write_local_csv(path_to_file, data):
+    """Write the specified CSV file to disk"""
+    with open(path_to_file, 'w') as f:
+        # The requirement is that empty values are represented in the CSV
+        # file as "", csv.QUOTE_NONNUMERIC achieves that
+        LOG.debug("writing CSV file '%s'" % (path_to_file))
+        writer = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
+        writer.writerow(data)
         f.close()
         f.close()
-    os.remove(infoex['csv_filename'])
+    return True
+
+def upload_csv(path_to_file, infoex_data):
+    """Upload the specified CSV file to InfoEx FTP and remove the file"""
+    with open(path_to_file, 'rb') as file_object:
+        LOG.debug("uploading FTP file '%s'" % (infoex_data['host']))
+        ftp = FTP(infoex_data['host'], infoex_data['uuid'],
+                  infoex_data['api_key'])
+        ftp.storlines('STOR ' + path_to_file, file_object)
+        ftp.close()
+        file_object.close()
+    os.remove(path_to_file)
 
 
-log.debug('DONE')
+if __name__ == "__main__":
+    sys.exit(main())