summary | 
shortlog | 
log | 
commit | commitdiff | 
tree
raw | 
patch | 
inline | side by side (from parent 1: 
87aca64)
 
Here were the most common fixes:
- Lots of wrong hanging indentations (bad-continuation)
- Many logging-not-lazy warnings
- A few unnecessary-semicolon warnings :0)
- A few line-too-long warnings
A few warnings were suppressed globally (.pylintrc), and in particular,
in the mapping functions, several whitespace warnings were suppressed
locally.
--- /dev/null
+[MASTER]
+# justifications for disabling the following, program-wide
+#
+# import-error: due to importation of the zeep module, this may be
+#               (more correctly) fixable by pylintrc init-hook
+#
+# invalid-name: infoex-autowx should be infoex_autowx, but it's too ugly
+disable = import-error,invalid-name
 
     parser = OptionParser(version=__version__)
 
     parser.add_option("--config",
     parser = OptionParser(version=__version__)
 
     parser.add_option("--config",
-        dest="config",
-        metavar="FILE",
-        help="location of config file")
+                      dest="config",
+                      metavar="FILE",
+                      help="location of config file")
 
     parser.add_option("--log-level",
 
     parser.add_option("--log-level",
-        dest="log_level",
-        default=None,
-        help="set the log level (debug, info, warning)")
+                      dest="log_level",
+                      default=None,
+                      help="set the log level (debug, info, warning)")
 
     parser.add_option("--dry-run",
 
     parser.add_option("--dry-run",
-        action="store_true",
-        dest="dry_run",
-        default=False,
-        help="fetch data but don't upload to InfoEx")
+                      action="store_true",
+                      dest="dry_run",
+                      default=False,
+                      help="fetch data but don't upload to InfoEx")
 
             station['desired_data'] = config['station']['desired_data']
 
             # construct full API URL (sans start/end time, added later)
             station['desired_data'] = config['station']['desired_data']
 
             # construct full API URL (sans start/end time, added later)
-            station['source'] = station['source'] + '?token=' + config['station']['token'] + '&within=60&units=' + station['units'] + '&stid=' + station['station_id'] + '&vars=' + station['desired_data']
-
-    except KeyError as e:
-        LOG.critical("%s not defined in %s" % (e, options.config))
-        exit(1)
-    except Exception as exc:
-        LOG.critical("Exception occurred in config parsing: '%s'" % (exc))
+            station['source'] = station['source'] + '?token=' + \
+                                config['station']['token'] + \
+                                '&within=60&units=' + station['units'] + \
+                                '&stid=' + station['station_id'] + \
+                                '&vars=' + station['desired_data']
+
+    except KeyError as err:
+        LOG.critical("%s not defined in configuration file", err)
         exit(1)
 
     # all sections/values present in config file, final sanity check
     try:
         for key in config.sections():
             for subkey in config[key]:
         exit(1)
 
     # all sections/values present in config file, final sanity check
     try:
         for key in config.sections():
             for subkey in config[key]:
-                if not len(config[key][subkey]):
-                    raise ValueError;
-    except ValueError as exc:
-        LOG.critical("Config value '%s.%s' is empty" % (key, subkey))
+                if not config[key][subkey]:
+                    raise ValueError
+    except ValueError:
+        LOG.critical("Config value '%s.%s' is empty", key, subkey)
         exit(1)
 
     return (infoex, station)
         exit(1)
 
     return (infoex, station)
 
     try:
         from systemd.journal import JournalHandler
         LOG.addHandler(JournalHandler())
     try:
         from systemd.journal import JournalHandler
         LOG.addHandler(JournalHandler())
         ## fallback to syslog
         #import logging.handlers
         #LOG.addHandler(logging.handlers.SysLogHandler())
         ## fallback to syslog
         #import logging.handlers
         #LOG.addHandler(logging.handlers.SysLogHandler())
 
     (begin_date, end_date) = setup_time_values()
 
     # get the data
     (begin_date, end_date) = setup_time_values()
 
     # get the data
-    LOG.debug("Getting %s data from %s to %s" % (str(station['desired_data']),
-        str(begin_date), str(end_date)))
+    LOG.debug("Getting %s data from %s to %s", str(station['desired_data']),
+              str(begin_date), str(end_date))
 
     time_all_elements = time.time()
 
 
     time_all_elements = time.time()
 
 
         infoex['wx_data'] = get_nrcs_data(begin_date, end_date, station)
     elif station['provider'] == 'mesowest':
         infoex['wx_data'] = get_mesowest_data(begin_date, end_date,
         infoex['wx_data'] = get_nrcs_data(begin_date, end_date, station)
     elif station['provider'] == 'mesowest':
         infoex['wx_data'] = get_mesowest_data(begin_date, end_date,
-    LOG.info("Time taken to get all data : %.3f sec" % (time.time() -
-        time_all_elements))
+    LOG.info("Time taken to get all data : %.3f sec", time.time() -
+             time_all_elements)
 
     LOG.debug("infoex[wx_data]: %s", str(infoex['wx_data']))
 
 
     LOG.debug("infoex[wx_data]: %s", str(infoex['wx_data']))
 
 
     final_data[fmap['obDate']] = end_date.strftime('%m/%d/%Y')
     final_data[fmap['obTime']] = end_date.strftime('%H:%M')
 
     final_data[fmap['obDate']] = end_date.strftime('%m/%d/%Y')
     final_data[fmap['obTime']] = end_date.strftime('%H:%M')
 
-    for elementCd in infoex['wx_data']:
-        if elementCd not in iemap:
-            LOG.warning("BAD KEY wx_data['%s']" % (elementCd))
+    for element_cd in infoex['wx_data']:
+        if element_cd not in iemap:
+            LOG.warning("BAD KEY wx_data['%s']", element_cd)
             continue
 
         # CONSIDER: Casting every value to Float() -- need to investigate if
             continue
 
         # CONSIDER: Casting every value to Float() -- need to investigate if
 
         #           storedUnitCd. But that's pretty network-intensive and
         #           may not even be worth it if there's only e.g. one or two
         #           exceptions to any otherwise uniformly Float value set.
         #           storedUnitCd. But that's pretty network-intensive and
         #           may not even be worth it if there's only e.g. one or two
         #           exceptions to any otherwise uniformly Float value set.
-        final_data[fmap[iemap[elementCd]]] = infoex['wx_data'][elementCd]
+        final_data[fmap[iemap[element_cd]]] = infoex['wx_data'][element_cd]
-    LOG.debug("final_data: %s" % (str(final_data)))
+    LOG.debug("final_data: %s", str(final_data))
 
     if not write_local_csv(infoex['csv_filename'], final_data):
         LOG.warning('Could not write local CSV file: %s',
                     infoex['csv_filename'])
 
     if not write_local_csv(infoex['csv_filename'], final_data):
         LOG.warning('Could not write local CSV file: %s',
                     infoex['csv_filename'])
 
     if not options.dry_run:
         upload_csv(infoex['csv_filename'], infoex)
 
     if not options.dry_run:
         upload_csv(infoex['csv_filename'], infoex)
 
     Create a mapping of InfoEx fields to the local data's indexing scheme.
 
     INFOEX FIELDS
     Create a mapping of InfoEx fields to the local data's indexing scheme.
 
     INFOEX FIELDS
     This won't earn style points in Python, but here we establish a couple
     of helpful mappings variables. The reason this is helpful is that the
     end result is simply an ordered set, the CSV file. But we still may
     want to manipulate the values arbitrarily before writing that file.
     This won't earn style points in Python, but here we establish a couple
     of helpful mappings variables. The reason this is helpful is that the
     end result is simply an ordered set, the CSV file. But we still may
     want to manipulate the values arbitrarily before writing that file.
     Also note that the current Auto Wx InfoEx documentation shows these
     keys in a graphical table with the "index" beginning at 1, but here we
     sanely index beginning at 0.
     """
     Also note that the current Auto Wx InfoEx documentation shows these
     keys in a graphical table with the "index" beginning at 1, but here we
     sanely index beginning at 0.
     """
+    # pylint: disable=too-many-statements,multiple-statements,bad-whitespace
     fmap = {}                           ; final_data     = [None] * 29
     fmap['Location UUID'] = 0           ; final_data[0]  = location_uuid
     fmap['obDate'] = 1                  ; final_data[1]  = None
     fmap = {}                           ; final_data     = [None] * 29
     fmap['Location UUID'] = 0           ; final_data[0]  = location_uuid
     fmap['obDate'] = 1                  ; final_data[1]  = None
 
     client = zeep.Client(wsdl=station['source'], transport=transport)
     remote_data = {}
 
     client = zeep.Client(wsdl=station['source'], transport=transport)
     remote_data = {}
 
-    for elementCd in station['desired_data']:
+    for element_cd in station['desired_data']:
         time_element = time.time()
 
         time_element = time.time()
 
-        # get the last three hours of data for this elementCd
+        # get the last three hours of data for this elementCd/element_cd
         tmp = client.service.getHourlyData(
         tmp = client.service.getHourlyData(
-                stationTriplets=[station['station_id']],
-                elementCd=elementCd,
-                ordinal=1,
-                beginDate=begin,
-                endDate=end)
+            stationTriplets=[station['station_id']],
+            elementCd=element_cd,
+            ordinal=1,
+            beginDate=begin,
+            endDate=end)
-        LOG.info("Time to get elementCd '%s': %.3f sec" % (elementCd,
-            time.time() - time_element))
+        LOG.info("Time to get NRCS elementCd '%s': %.3f sec", element_cd,
+                 time.time() - time_element)
 
         values = tmp[0]['values']
 
 
         values = tmp[0]['values']
 
 
         #       so perhaps this decision will be re-evaluated in the future
         if values:
             ordered = sorted(values, key=lambda t: t['dateTime'], reverse=True)
         #       so perhaps this decision will be re-evaluated in the future
         if values:
             ordered = sorted(values, key=lambda t: t['dateTime'], reverse=True)
-            remote_data[elementCd] = ordered[0]['value']
+            remote_data[element_cd] = ordered[0]['value']
-            remote_data[elementCd] = None
+            remote_data[element_cd] = None
 
 
     pos = len(observations['date_time']) - 1
 
 
     pos = len(observations['date_time']) - 1
 
-    for elementCd in station['desired_data'].split(','):
+    for element_cd in station['desired_data'].split(','):
         # sort and isolate the most recent, see note above in NRCS for how and
         # why this is done
         #
         # sort and isolate the most recent, see note above in NRCS for how and
         # why this is done
         #
 
         #       irregularities
 
         # we may not have the data at all
         #       irregularities
 
         # we may not have the data at all
-        key_name = elementCd + '_set_1'
+        key_name = element_cd + '_set_1'
         if key_name in observations:
             if observations[key_name][pos]:
         if key_name in observations:
             if observations[key_name][pos]:
-                remote_data[elementCd] = observations[key_name][pos]
+                remote_data[element_cd] = observations[key_name][pos]
-                remote_data[elementCd] = None
+                remote_data[element_cd] = None
-            remote_data[elementCd] = None
+            remote_data[element_cd] = None
 
 # CSV operations
 def write_local_csv(path_to_file, data):
     """Write the specified CSV file to disk"""
 # CSV operations
 def write_local_csv(path_to_file, data):
     """Write the specified CSV file to disk"""
-    with open(path_to_file, 'w') as f:
+    with open(path_to_file, 'w') as file_object:
         # The requirement is that empty values are represented in the CSV
         # file as "", csv.QUOTE_NONNUMERIC achieves that
         # The requirement is that empty values are represented in the CSV
         # file as "", csv.QUOTE_NONNUMERIC achieves that
-        LOG.debug("writing CSV file '%s'" % (path_to_file))
-        writer = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
+        LOG.debug("writing CSV file '%s'", path_to_file)
+        writer = csv.writer(file_object, quoting=csv.QUOTE_NONNUMERIC)
     return True
 
 def upload_csv(path_to_file, infoex_data):
     """Upload the specified CSV file to InfoEx FTP and remove the file"""
     with open(path_to_file, 'rb') as file_object:
     return True
 
 def upload_csv(path_to_file, infoex_data):
     """Upload the specified CSV file to InfoEx FTP and remove the file"""
     with open(path_to_file, 'rb') as file_object:
-        LOG.debug("uploading FTP file '%s'" % (infoex_data['host']))
+        LOG.debug("uploading FTP file '%s'", infoex_data['host'])
         ftp = FTP(infoex_data['host'], infoex_data['uuid'],
                   infoex_data['api_key'])
         ftp.storlines('STOR ' + path_to_file, file_object)
         ftp = FTP(infoex_data['host'], infoex_data['uuid'],
                   infoex_data['api_key'])
         ftp.storlines('STOR ' + path_to_file, file_object)
 
 def setup_time_values():
     """establish time bounds of data request(s)"""
     # floor time to nearest hour
 def setup_time_values():
     """establish time bounds of data request(s)"""
     # floor time to nearest hour
-    dt = datetime.datetime.now()
-    end_date = dt - datetime.timedelta(minutes=dt.minute % 60,
-                                       seconds=dt.second,
-                                       microseconds=dt.microsecond)
+    date_time = datetime.datetime.now()
+    end_date = date_time - datetime.timedelta(minutes=date_time.minute % 60,
+                                              seconds=date_time.second,
+                                              microseconds=date_time.microsecond)
     begin_date = end_date - datetime.timedelta(hours=3)
     return (begin_date, end_date)
 
     begin_date = end_date - datetime.timedelta(hours=3)
     return (begin_date, end_date)