+ LOG.debug("Getting %s data from %s to %s (%s)",
+ str(station['desired_data']),
+ str(begin_date), str(end_date), end_date.tzinfo.zone)
+
+ time_all_elements = time.time()
+
+ # get the data
+ if station['provider'] == 'nrcs':
+ infoex['wx_data'] = get_nrcs_data(begin_date, end_date, station)
+ elif station['provider'] == 'mesowest':
+ infoex['wx_data'] = get_mesowest_data(begin_date, end_date,
+ station)
+ elif station['provider'] == 'python':
+ try:
+ spec = importlib.util.spec_from_file_location('custom_wx',
+ station['path'])
+ mod = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(mod)
+ mod.LOG = LOG
+
+ try:
+ infoex['wx_data'] = mod.get_custom_data()
+
+ if infoex['wx_data'] is None:
+ infoex['wx_data'] = []
+ except Exception as exc:
+ LOG.error("Python program for custom Wx data failed in "
+ "execution: %s", str(exc))
+ sys.exit(1)
+
+ LOG.info("Successfully executed external Python program")
+ except ImportError:
+ LOG.error("Please upgrade to Python 3.3 or later")
+ sys.exit(1)
+ except FileNotFoundError:
+ LOG.error("Specified Python program for custom Wx data "
+ "was not found")
+ sys.exit(1)
+ except Exception as exc:
+ LOG.error("A problem was encountered when attempting to "
+ "load your custom Wx program: %s", str(exc))
+ sys.exit(1)
+
+ LOG.info("Time taken to get all data : %.3f sec", time.time() -
+ time_all_elements)
+
+ LOG.debug("infoex[wx_data]: %s", str(infoex['wx_data']))
+
+ # timezone massaging
+ final_end_date = end_date.astimezone(station['tz'])
+
+ # Now we only need to add in what we want to change thanks to that
+ # abomination of a variable declaration earlier
+ final_data[fmap['Location UUID']] = infoex['location_uuid']
+ final_data[fmap['obDate']] = final_end_date.strftime('%m/%d/%Y')
+ final_data[fmap['obTime']] = final_end_date.strftime('%H:%M')
+ final_data[fmap['timeZone']] = station['tz'].zone
+
+ for element_cd in infoex['wx_data']:
+ if element_cd not in iemap:
+ LOG.warning("BAD KEY wx_data['%s']", element_cd)
+ continue
+
+ if infoex['wx_data'][element_cd] is None:
+ continue
+
+ # do the conversion before the rounding
+ if station['provider'] == 'nrcs' and station['units'] == 'metric':
+ infoex['wx_data'][element_cd] = convert_nrcs_units_to_metric(element_cd, infoex['wx_data'][element_cd])
+
+ if station['provider'] != 'custom' and station['units'] == 'american':
+ infoex['wx_data'][element_cd] = convert_units_to_american(element_cd, infoex['wx_data'][element_cd])
+
+ # Massage precision of certain values to fit InfoEx's
+ # expectations
+ #
+ # 0 decimal places: relative humidity, wind speed, wind
+ # direction, wind gust, snow depth
+ # 1 decimal place: air temp, baro
+ # Avoid transforming None values
+ if element_cd in ['wind_speed', 'WSPD', 'wind_direction',
+ 'RHUM', 'relative_humidity', 'WDIR',
+ 'wind_gust', 'SNWD', 'snow_depth',
+ 'hn24']:
+ infoex['wx_data'][element_cd] = round(infoex['wx_data'][element_cd])
+ elif element_cd in ['TOBS', 'air_temp', 'PRES', 'pressure']:
+ infoex['wx_data'][element_cd] = round(infoex['wx_data'][element_cd], 1)
+ elif element_cd in ['PREC', 'precip_accum']:
+ infoex['wx_data'][element_cd] = round(infoex['wx_data'][element_cd], 2)
+
+ # CONSIDER: Casting every value to Float() -- need to investigate if
+ # any possible elementCds we may want are any other data
+ # type than float.
+ #
+ # Another possibility is to query the API with
+ # getStationElements and temporarily store the
+ # storedUnitCd. But that's pretty network-intensive and
+ # may not even be worth it if there's only e.g. one or two
+ # exceptions to any otherwise uniformly Float value set.
+ final_data[fmap[iemap[element_cd]]] = infoex['wx_data'][element_cd]
+
+ LOG.debug("final_data: %s", str(final_data))
+
+ if infoex['wx_data']:
+ if not write_local_csv(infoex['csv_filename'], final_data):
+ LOG.warning('Could not write local CSV file: %s',
+ infoex['csv_filename'])
+ return 1
+
+ if not options.dry_run:
+ upload_csv(infoex['csv_filename'], infoex)
+
+ LOG.debug('DONE')
+ return 0
+
+# data structure operations
+def setup_infoex_fields_mapping(location_uuid):
+ """
+ Create a mapping of InfoEx fields to the local data's indexing scheme.
+
+ INFOEX FIELDS
+
+ This won't earn style points in Python, but here we establish a couple
+ of helpful mappings variables. The reason this is helpful is that the
+ end result is simply an ordered set, the CSV file. But we still may
+ want to manipulate the values arbitrarily before writing that file.
+
+ Also note that the current Auto Wx InfoEx documentation shows these
+ keys in a graphical table with the "index" beginning at 1, but here we
+ sanely index beginning at 0.
+ """
+ # pylint: disable=too-many-statements,multiple-statements,bad-whitespace
+ fmap = {} ; final_data = [None] * 29
+ fmap['Location UUID'] = 0 ; final_data[0] = location_uuid
+ fmap['obDate'] = 1 ; final_data[1] = None
+ fmap['obTime'] = 2 ; final_data[2] = None
+ fmap['timeZone'] = 3 ; final_data[3] = 'Pacific'
+ fmap['tempMaxHour'] = 4 ; final_data[4] = None
+ fmap['tempMaxHourUnit'] = 5 ; final_data[5] = 'F'
+ fmap['tempMinHour'] = 6 ; final_data[6] = None
+ fmap['tempMinHourUnit'] = 7 ; final_data[7] = 'F'
+ fmap['tempPres'] = 8 ; final_data[8] = None
+ fmap['tempPresUnit'] = 9 ; final_data[9] = 'F'
+ fmap['precipitationGauge'] = 10 ; final_data[10] = None
+ fmap['precipitationGaugeUnit'] = 11 ; final_data[11] = 'in'
+ fmap['windSpeedNum'] = 12 ; final_data[12] = None
+ fmap['windSpeedUnit'] = 13 ; final_data[13] = 'mph'
+ fmap['windDirectionNum'] = 14 ; final_data[14] = None
+ fmap['hS'] = 15 ; final_data[15] = None
+ fmap['hsUnit'] = 16 ; final_data[16] = 'in'
+ fmap['baro'] = 17 ; final_data[17] = None
+ fmap['baroUnit'] = 18 ; final_data[18] = 'inHg'
+ fmap['rH'] = 19 ; final_data[19] = None
+ fmap['windGustSpeedNum'] = 20 ; final_data[20] = None
+ fmap['windGustSpeedNumUnit'] = 21 ; final_data[21] = 'mph'
+ fmap['windGustDirNum'] = 22 ; final_data[22] = None
+ fmap['dewPoint'] = 23 ; final_data[23] = None
+ fmap['dewPointUnit'] = 24 ; final_data[24] = 'F'
+ fmap['hn24Auto'] = 25 ; final_data[25] = None
+ fmap['hn24AutoUnit'] = 26 ; final_data[26] = 'in'
+ fmap['hstAuto'] = 27 ; final_data[27] = None
+ fmap['hstAutoUnit'] = 28 ; final_data[28] = 'in'
+
+ return (fmap, final_data)
+
+def setup_infoex_counterparts_mapping(provider):
+ """
+ Create a mapping of the NRCS/MesoWest fields that this program supports to
+ their InfoEx counterparts
+ """
+ iemap = {}
+
+ if provider == 'nrcs':
+ iemap['PREC'] = 'precipitationGauge'
+ iemap['TOBS'] = 'tempPres'
+ iemap['TMAX'] = 'tempMaxHour'
+ iemap['TMIN'] = 'tempMinHour'
+ iemap['SNWD'] = 'hS'
+ iemap['PRES'] = 'baro'
+ iemap['RHUM'] = 'rH'
+ iemap['WSPD'] = 'windSpeedNum'
+ iemap['WDIR'] = 'windDirectionNum'
+ # unsupported by NRCS:
+ # windGustSpeedNum
+
+ # NOTE: this doesn't exist in NRCS SNOTEL, we create it in this
+ # program, so add it to the map here
+ iemap['hn24'] = 'hn24Auto'
+ elif provider == 'mesowest':
+ iemap['precip_accum'] = 'precipitationGauge'
+ iemap['air_temp'] = 'tempPres'
+ iemap['air_temp_high_24_hour'] = 'tempMaxHour'
+ iemap['air_temp_low_24_hour'] = 'tempMinHour'
+ iemap['snow_depth'] = 'hS'
+ iemap['pressure'] = 'baro'
+ iemap['relative_humidity'] = 'rH'
+ iemap['wind_speed'] = 'windSpeedNum'
+ iemap['wind_direction'] = 'windDirectionNum'
+ iemap['wind_gust'] = 'windGustSpeedNum'
+
+ # NOTE: this doesn't exist in MesoWest, we create it in this
+ # program, so add it to the map here
+ iemap['hn24'] = 'hn24Auto'
+ elif provider == 'python':
+ # we expect Python programs to use the InfoEx data type names
+ iemap['precipitationGauge'] = 'precipitationGauge'
+ iemap['tempPres'] = 'tempPres'
+ iemap['tempMaxHour'] = 'tempMaxHour'
+ iemap['tempMinHour'] = 'tempMinHour'
+ iemap['hS'] = 'hS'
+ iemap['baro'] = 'baro'
+ iemap['rH'] = 'rH'
+ iemap['windSpeedNum'] = 'windSpeedNum'
+ iemap['windDirectionNum'] = 'windDirectionNum'
+ iemap['windGustSpeedNum'] = 'windGustSpeedNum'
+
+ return iemap
+
+# provider-specific operations
+def get_nrcs_data(begin, end, station):
+ """get the data we're after from the NRCS WSDL"""
+ transport = zeep.transports.Transport(cache=zeep.cache.SqliteCache())
+ transport.session.verify = False
+ client = zeep.Client(wsdl=station['source'], transport=transport)
+ remote_data = {}
+
+ # massage begin/end date format
+ begin_date_str = begin.strftime('%Y-%m-%d %H:%M:00')
+ end_date_str = end.strftime('%Y-%m-%d %H:%M:00')
+
+ for element_cd in station['desired_data']:
+ time_element = time.time()
+
+ # get the last three hours of data for this elementCd/element_cd
+ tmp = client.service.getHourlyData(
+ stationTriplets=[station['station_id']],
+ elementCd=element_cd,
+ ordinal=1,
+ beginDate=begin_date_str,
+ endDate=end_date_str)
+
+ LOG.info("Time to get NRCS elementCd '%s': %.3f sec", element_cd,
+ time.time() - time_element)
+
+ values = tmp[0]['values']
+
+ # sort and isolate the most recent
+ #
+ # NOTE: we do this because sometimes there are gaps in hourly data
+ # in NRCS; yes, we may end up with slightly inaccurate data,
+ # so perhaps this decision will be re-evaluated in the future
+ if values:
+ ordered = sorted(values, key=lambda t: t['dateTime'], reverse=True)
+ remote_data[element_cd] = ordered[0]['value']
+ else:
+ remote_data[element_cd] = None
+
+
+ # calc hn24, if applicable
+ hn24 = None
+
+ if station['hn24']:
+ hn24_values = []
+
+ if element_cd == "SNWD":
+ for idx, _ in enumerate(values):
+ val = values[idx]
+ if val is None:
+ continue
+ hn24_values.append(val['value'])
+
+ if len(hn24_values) > 0:
+ # instead of taking MAX - MIN, we want the first
+ # value (most distant) - the last value (most
+ # recent)
+ #
+ # if the result is positive, then we have
+ # settlement; if it's not, then we have HN24
+ hn24 = hn24_values[0] - hn24_values[len(hn24_values)-1]
+
+ if hn24 < 0.0:
+ hn24 = abs(hn24)
+ else:
+ # this case represents HS settlement
+ hn24 = 0.0
+
+ # finally, if user wants hn24 and it's set to None at this
+ # point, then force it to 0.0
+ if hn24 is None:
+ hn24 = 0.0
+
+ if hn24 is not None:
+ if station['hn24']:
+ remote_data['hn24'] = hn24
+
+ return remote_data
+
+def get_mesowest_data(begin, end, station):
+ """get the data we're after from the MesoWest/Synoptic API"""
+ remote_data = {}
+
+ # massage begin/end date format
+ begin_date_str = begin.strftime('%Y%m%d%H%M')
+ end_date_str = end.strftime('%Y%m%d%H%M')
+
+ # construct final, completed API URL
+ api_req_url = station['source'] + '&start=' + begin_date_str + '&end=' + end_date_str
+
+ try:
+ req = requests.get(api_req_url)
+ except requests.exceptions.ConnectionError:
+ LOG.error("Could not connect to '%s'", api_req_url)
+ sys.exit(1)