2 # -*- coding: utf-8 -*-
5 InfoEx <-> NRCS Auto Wx implementation
7 Wylark Mountaineering LLC
11 This program fetches data from an NRCS SNOTEL site and pushes it to
12 InfoEx using the new automated weather system implementation.
14 It is designed to be run hourly, and it asks for the last three hours
15 of data of each desired type, and selects the most recent one. This
16 lends some resiliency to the process and helps ensure that we have a
17 value to send, but it can lead to somewhat inconsistent/untruthful
18 data if e.g. the HS is from the last hour but the tempPres is from two
19 hours ago because the instrumentation had a hiccup. It's worth
20 considering if this is a bug or a feature.
22 For more information, see file: README
23 For licensing, see file: LICENSE
34 from collections
import OrderedDict
35 from ftplib
import FTP
36 from optparse
import OptionParser
40 import zeep
.transports
42 log
= logging
.getLogger(__name__
)
43 log
.setLevel(logging
.DEBUG
)
46 from systemd
.journal
import JournalHandler
47 log
.addHandler(JournalHandler())
50 import logging
.handlers
51 log
.addHandler(logging
.handlers
.SysLogHandler())
53 parser
= OptionParser()
54 parser
.add_option("--config",
57 help="location of config file")
58 parser
.add_option("--dry-run",
62 help="fetch data but don't upload to InfoEx")
64 (options
, args
) = parser
.parse_args()
66 config
= configparser
.ConfigParser(allow_no_value
=False)
68 if not options
.config
:
69 print("Please specify a configuration file via --config")
72 config
.read(options
.config
)
74 log
.debug('STARTING UP')
76 wsdl
= 'https://www.wcc.nrcs.usda.gov/awdbWebService/services?WSDL'
80 'host': config
['ftp']['host'],
81 'uuid': config
['ftp']['uuid'],
82 'api_key': config
['ftp']['api_key'],
83 'location_uuid': config
['wxsite']['location_uuid'],
84 'wx_data': {}, # placeholder key, values to come later
85 'csv_filename': config
['wxsite']['csv_filename']
88 station_triplet
= config
['wxsite']['station_triplet']
91 desired_data
= config
['wxsite']['desired_data'].split(',')
93 # desired_data malformed or missing, setting default
95 'TOBS', # AIR TEMPERATURE OBSERVED (degF)
96 'SNWD', # SNOW DEPTH (in)
97 'PREC' # PRECIPITATION ACCUMULATION (in)
100 log
.critical("%s not defined in %s" % (e
, options
.config
))
102 except Exception as exc
:
103 log
.critical("Exception occurred in config parsing: '%s'" % (exc
))
106 # all sections/values present in config file, final sanity check
108 for key
in config
.sections():
109 for subkey
in config
[key
]:
110 if not len(config
[key
][subkey
]):
112 except ValueError as exc
:
113 log
.critical("Config value '%s.%s' is empty" % (key
, subkey
))
118 # This won't earn style points in Python, but here we establish a couple
119 # of helpful mappings variables. The reason this is helpful is that the
120 # end result is simply an ordered set, the CSV file. But we still may
121 # want to manipulate the values arbitrarily before writing that file.
123 # Also note that the current Auto Wx InfoEx documentation shows these
124 # keys in a graphical table with the "index" beginning at 1, but here we
125 # are sanely indexing beginning at 0.
126 fmap
= {} ; final_data
= [None] * 29
127 fmap
['Location UUID'] = 0 ; final_data
[0] = infoex
['location_uuid']
128 fmap
['obDate'] = 1 ; final_data
[1] = None
129 fmap
['obTime'] = 2 ; final_data
[2] = None
130 fmap
['timeZone'] = 3 ; final_data
[3] = 'Pacific'
131 fmap
['tempMaxHour'] = 4 ; final_data
[4] = None
132 fmap
['tempMaxHourUnit'] = 5 ; final_data
[5] = 'F'
133 fmap
['tempMinHour'] = 6 ; final_data
[6] = None
134 fmap
['tempMinHourUnit'] = 7 ; final_data
[7] = 'F'
135 fmap
['tempPres'] = 8 ; final_data
[8] = None
136 fmap
['tempPresUnit'] = 9 ; final_data
[9] = 'F'
137 fmap
['precipitationGauge'] = 10 ; final_data
[10] = None
138 fmap
['precipitationGaugeUnit'] = 11 ; final_data
[11] = 'in'
139 fmap
['windSpeedNum'] = 12 ; final_data
[12] = None
140 fmap
['windSpeedUnit'] = 13 ; final_data
[13] = 'mph'
141 fmap
['windDirectionNum'] = 14 ; final_data
[14] = None
142 fmap
['hS'] = 15 ; final_data
[15] = None
143 fmap
['hsUnit'] = 16 ; final_data
[16] = 'in'
144 fmap
['baro'] = 17 ; final_data
[17] = None
145 fmap
['baroUnit'] = 18 ; final_data
[18] = 'inHg'
146 fmap
['rH'] = 19 ; final_data
[19] = None
147 fmap
['windGustSpeedNum'] = 20 ; final_data
[20] = None
148 fmap
['windGustSpeedNumUnit'] = 21 ; final_data
[21] = 'mph'
149 fmap
['windGustDirNum'] = 22 ; final_data
[22] = None
150 fmap
['dewPoint'] = 23 ; final_data
[23] = None
151 fmap
['dewPointUnit'] = 24 ; final_data
[24] = 'F'
152 fmap
['hn24Auto'] = 25 ; final_data
[25] = None
153 fmap
['hn24AutoUnit'] = 26 ; final_data
[26] = 'in'
154 fmap
['hstAuto'] = 27 ; final_data
[27] = None
155 fmap
['hstAutoUnit'] = 28 ; final_data
[28] = 'in'
157 # one final mapping, the NRCS fields that this program supports to
158 # their InfoEx counterpart
160 iemap
['PREC'] = 'precipitationGauge'
161 iemap
['TOBS'] = 'tempPres'
164 # floor time to nearest hour
165 dt
= datetime
.datetime
.now()
166 end_date
= dt
- datetime
.timedelta(minutes
=dt
.minute
% 60,
168 microseconds
=dt
.microsecond
)
169 begin_date
= end_date
- datetime
.timedelta(hours
=3)
171 transport
= zeep
.transports
.Transport(cache
=zeep
.cache
.SqliteCache())
172 client
= zeep
.Client(wsdl
=wsdl
, transport
=transport
)
173 time_all_elements
= time
.time()
175 log
.debug("Getting %s data from %s to %s" % (str(desired_data
),
176 str(begin_date
), str(end_date
)))
178 for elementCd
in desired_data
:
179 time_element
= time
.time()
181 # get the last three hours of data for this elementCd
182 tmp
= client
.service
.getHourlyData(
183 stationTriplets
=[station_triplet
],
186 beginDate
=begin_date
,
189 log
.info("Time to get elementCd '%s': %.3f sec" % (elementCd
,
190 time
.time() - time_element
))
192 values
= tmp
[0]['values']
194 # sort and isolate the most recent
196 # NOTE: we do this because sometimes there are gaps in hourly data
197 # in NRCS; yes, we may end up with slightly inaccurate data,
198 # so perhaps this decision will be re-evaluated in the future
200 ordered
= sorted(values
, key
=lambda t
: t
['dateTime'], reverse
=True)
201 infoex
['wx_data'][elementCd
] = ordered
[0]['value']
203 infoex
['wx_data'][elementCd
] = None
205 log
.info("Time to get all elementCds : %.3f sec" % (time
.time() -
208 log
.debug("infoex[wx_data]: %s", str(infoex
['wx_data']))
210 # Now we only need to add in what we want to change thanks to that
211 # abomination of a variable declaration earlier
212 final_data
[fmap
['Location UUID']] = infoex
['location_uuid']
213 final_data
[fmap
['obDate']] = end_date
.strftime('%m/%d/%Y')
214 final_data
[fmap
['obTime']] = end_date
.strftime('%H:%M')
216 for elementCd
in infoex
['wx_data']:
217 if elementCd
not in iemap
:
218 log
.warning("BAD KEY wx_data['%s']" % (elementCd
))
221 # CONSIDER: Casting every value to Float() -- need to investigate if
222 # any possible elementCds we may want are any other data
225 # Another possibility is to query the API with
226 # getStationElements and temporarily store the
227 # storedUnitCd. But that's pretty network-intensive and
228 # may not even be worth it if there's only e.g. one or two
229 # exceptions to any otherwise uniformly Float value set.
230 final_data
[fmap
[iemap
[elementCd
]]] = infoex
['wx_data'][elementCd
]
232 log
.debug("final_data: %s" % (str(final_data
)))
234 with
open(infoex
['csv_filename'], 'w') as f
:
235 # The requirement is that empty values are represented in the CSV
236 # file as "", csv.QUOTE_NONNUMERIC achieves that
237 log
.debug("writing CSV file '%s'" % (infoex
['csv_filename']))
238 writer
= csv
.writer(f
, quoting
=csv
.QUOTE_NONNUMERIC
)
239 writer
.writerow(final_data
)
242 if not options
.dry_run
:
244 with
open(infoex
['csv_filename'], 'rb') as f
:
245 log
.debug("uploading FTP file '%s'" % (infoex
['host']))
246 ftp
= FTP(infoex
['host'], infoex
['uuid'], infoex
['api_key'])
247 ftp
.storlines('STOR ' + infoex
['csv_filename'], f
)
250 os
.remove(infoex
['csv_filename'])