2 # -*- coding: utf-8 -*-
5 InfoEx <-> NRCS Auto Wx implementation
7 Wylark Mountaineering LLC
11 This program fetches data from an NRCS SNOTEL site and pushes it to
12 InfoEx using the new automated weather system implementation.
14 It is designed to be run hourly, and it asks for the last three hours
15 of data of each desired type, and selects the most recent one. This
16 lends some resiliency to the process and helps ensure that we have a
17 value to send, but it can lead to somewhat inconsistent/untruthful
18 data if e.g. the HS is from the last hour but the tempPres is from two
19 hours ago because the instrumentation had a hiccup. It's worth
20 considering if this is a bug or a feature.
22 For more information, see file: README
23 For licensing, see file: LICENSE
33 from collections
import OrderedDict
34 from ftplib
import FTP
35 from optparse
import OptionParser
39 import zeep
.transports
41 log
= logging
.getLogger(__name__
)
42 log
.setLevel(logging
.DEBUG
)
45 from systemd
.journal
import JournalHandler
46 log
.addHandler(JournalHandler())
49 import logging
.handlers
50 log
.addHandler(logging
.handlers
.SysLogHandler())
52 parser
= OptionParser()
53 parser
.add_option("--config",
56 help="location of config file")
57 parser
.add_option("--dry-run",
61 help="fetch data but don't upload to InfoEx")
63 (options
, args
) = parser
.parse_args()
65 config
= configparser
.ConfigParser(allow_no_value
=False)
67 if not options
.config
:
68 print("Please specify a configuration file via --config")
71 config
.read(options
.config
)
73 log
.debug('STARTING UP')
75 wsdl
= 'https://www.wcc.nrcs.usda.gov/awdbWebService/services?WSDL'
79 'host': config
['ftp']['host'],
80 'uuid': config
['ftp']['uuid'],
81 'api_key': config
['ftp']['api_key'],
82 'location_uuid': config
['wxsite']['location_uuid'],
83 'wx_data': {}, # placeholder key, values to come later
84 'csv_filename': config
['wxsite']['csv_filename']
87 station_triplet
= config
['wxsite']['station_triplet']
90 desired_data
= config
['wxsite']['desired_data'].split(',')
92 # desired_data malformed or missing, setting default
94 'TOBS', # AIR TEMPERATURE OBSERVED (degF)
95 'SNWD', # SNOW DEPTH (in)
96 'PREC' # PRECIPITATION ACCUMULATION (in)
99 log
.critical("%s not defined in %s" % (e
, options
.config
))
101 except Exception as exc
:
102 log
.critical("Exception occurred in config parsing: '%s'" % (exc
))
105 # all sections/values present in config file, final sanity check
107 for key
in config
.sections():
108 for subkey
in config
[key
]:
109 if not len(config
[key
][subkey
]):
111 except ValueError as exc
:
112 log
.critical("Config value '%s.%s' is empty" % (key
, subkey
))
117 # This won't earn style points in Python, but here we establish a couple
118 # of helpful mappings variables. The reason this is helpful is that the
119 # end result is simply an ordered set, the CSV file. But we still may
120 # want to manipulate the values arbitrarily before writing that file.
122 # Also note that the current Auto Wx InfoEx documentation shows these
123 # keys in a graphical table with the "index" beginning at 1, but here we
124 # are sanely indexing beginning at 0.
125 fmap
= {} ; final_data
= [None] * 29
126 fmap
['Location UUID'] = 0 ; final_data
[0] = infoex
['location_uuid']
127 fmap
['obDate'] = 1 ; final_data
[1] = None
128 fmap
['obTime'] = 2 ; final_data
[2] = None
129 fmap
['timeZone'] = 3 ; final_data
[3] = 'Pacific'
130 fmap
['tempMaxHour'] = 4 ; final_data
[4] = None
131 fmap
['tempMaxHourUnit'] = 5 ; final_data
[5] = 'F'
132 fmap
['tempMinHour'] = 6 ; final_data
[6] = None
133 fmap
['tempMinHourUnit'] = 7 ; final_data
[7] = 'F'
134 fmap
['tempPres'] = 8 ; final_data
[8] = None
135 fmap
['tempPresUnit'] = 9 ; final_data
[9] = 'F'
136 fmap
['precipitationGauge'] = 10 ; final_data
[10] = None
137 fmap
['precipitationGaugeUnit'] = 11 ; final_data
[11] = 'in'
138 fmap
['windSpeedNum'] = 12 ; final_data
[12] = None
139 fmap
['windSpeedUnit'] = 13 ; final_data
[13] = 'mph'
140 fmap
['windDirectionNum'] = 14 ; final_data
[14] = None
141 fmap
['hS'] = 15 ; final_data
[15] = None
142 fmap
['hsUnit'] = 16 ; final_data
[16] = 'in'
143 fmap
['baro'] = 17 ; final_data
[17] = None
144 fmap
['baroUnit'] = 18 ; final_data
[18] = 'inHg'
145 fmap
['rH'] = 19 ; final_data
[19] = None
146 fmap
['windGustSpeedNum'] = 20 ; final_data
[20] = None
147 fmap
['windGustSpeedNumUnit'] = 21 ; final_data
[21] = 'mph'
148 fmap
['windGustDirNum'] = 22 ; final_data
[22] = None
149 fmap
['dewPoint'] = 23 ; final_data
[23] = None
150 fmap
['dewPointUnit'] = 24 ; final_data
[24] = 'F'
151 fmap
['hn24Auto'] = 25 ; final_data
[25] = None
152 fmap
['hn24AutoUnit'] = 26 ; final_data
[26] = 'in'
153 fmap
['hstAuto'] = 27 ; final_data
[27] = None
154 fmap
['hstAutoUnit'] = 28 ; final_data
[28] = 'in'
156 # one final mapping, the NRCS fields that this program supports to
157 # their InfoEx counterpart
159 iemap
['PREC'] = 'precipitationGauge'
160 iemap
['TOBS'] = 'tempPres'
163 # floor time to nearest hour
164 dt
= datetime
.datetime
.now()
165 end_date
= dt
- datetime
.timedelta(minutes
=dt
.minute
% 60,
167 microseconds
=dt
.microsecond
)
168 begin_date
= end_date
- datetime
.timedelta(hours
=3)
170 transport
= zeep
.transports
.Transport(cache
=zeep
.cache
.SqliteCache())
171 client
= zeep
.Client(wsdl
=wsdl
, transport
=transport
)
172 time_all_elements
= time
.time()
174 log
.debug("Getting %s data from %s to %s" % (str(desired_data
),
175 str(begin_date
), str(end_date
)))
177 for elementCd
in desired_data
:
178 time_element
= time
.time()
180 # get the last three hours of data for this elementCd
181 tmp
= client
.service
.getHourlyData(
182 stationTriplets
=[station_triplet
],
185 beginDate
=begin_date
,
188 log
.info("Time to get elementCd '%s': %.3f sec" % (elementCd
,
189 time
.time() - time_element
))
191 values
= tmp
[0]['values']
193 # sort and isolate the most recent
195 # NOTE: we do this because sometimes there are gaps in hourly data
196 # in NRCS; yes, we may end up with slightly inaccurate data,
197 # so perhaps this decision will be re-evaluated in the future
199 ordered
= sorted(values
, key
=lambda t
: t
['dateTime'], reverse
=True)
200 infoex
['wx_data'][elementCd
] = ordered
[0]['value']
202 infoex
['wx_data'][elementCd
] = None
204 log
.info("Time to get all elementCds : %.3f sec" % (time
.time() -
207 log
.debug("infoex[wx_data]: %s", str(infoex
['wx_data']))
209 # Now we only need to add in what we want to change thanks to that
210 # abomination of a variable declaration earlier
211 final_data
[fmap
['Location UUID']] = infoex
['location_uuid']
212 final_data
[fmap
['obDate']] = end_date
.strftime('%m/%d/%Y')
213 final_data
[fmap
['obTime']] = end_date
.strftime('%H:%M')
215 for elementCd
in infoex
['wx_data']:
216 if elementCd
not in iemap
:
217 log
.warning("BAD KEY wx_data['%s']" % (elementCd
))
220 # CONSIDER: Casting every value to Float() -- need to investigate if
221 # any possible elementCds we may want are any other data
224 # Another possibility is to query the API with
225 # getStationElements and temporarily store the
226 # storedUnitCd. But that's pretty network-intensive and
227 # may not even be worth it if there's only e.g. one or two
228 # exceptions to any otherwise uniformly Float value set.
229 final_data
[fmap
[iemap
[elementCd
]]] = infoex
['wx_data'][elementCd
]
231 log
.debug("final_data: %s" % (str(final_data
)))
233 with
open(infoex
['csv_filename'], 'w') as f
:
234 # The requirement is that empty values are represented in the CSV
235 # file as "", csv.QUOTE_NONNUMERIC achieves that
236 log
.debug("writing CSV file '%s'" % (infoex
['csv_filename']))
237 writer
= csv
.writer(f
, quoting
=csv
.QUOTE_NONNUMERIC
)
238 writer
.writerow(final_data
)
241 if not options
.dry_run
:
242 with
open(infoex
['csv_filename'], 'rb') as f
:
243 log
.debug("uploading FTP file '%s'" % (infoex
['host']))
244 ftp
= FTP(infoex
['host'], infoex
['uuid'], infoex
['api_key'])
245 ftp
.storlines('STOR ' + infoex
['csv_filename'], f
)