Add --delay command line and delay config options
[weather.git] / weather.py
1 """Contains various object definitions needed by the weather utility."""
2
3 weather_copyright = """\
4 # Copyright (c) 2006-2024 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
7 #"""
8
9 weather_version = "2.4.4"
10
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
13
14 def pyversion(ref=None):
15     """Determine the Python version and optionally compare to a reference."""
16     import platform
17     ver = platform.python_version()
18     if ref:
19         return [
20             int(x) for x in ver.split(".")[:2]
21         ] >= [
22             int(x) for x in ref.split(".")[:2]
23         ]
24     else: return ver
25
26 class Selections:
27     """An object to contain selection data."""
28     def __init__(self):
29         """Store the config, options and arguments."""
30         self.config = get_config()
31         self.options, self.arguments = get_options(self.config)
32         if self.get_bool("cache") and self.get_bool("cache_search") \
33             and not self.get_bool("longlist"):
34             integrate_search_cache(
35                 self.config,
36                 self.get("cachedir"),
37                 self.get("setpath")
38             )
39         if not self.arguments:
40             if "id" in self.options.__dict__ \
41                 and self.options.__dict__["id"]:
42                 self.arguments.append( self.options.__dict__["id"] )
43                 del( self.options.__dict__["id"] )
44                 import sys
45                 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46                 sys.stderr.write(message)
47             elif "city" in self.options.__dict__ \
48                 and self.options.__dict__["city"] \
49                 and "st" in self.options.__dict__ \
50                 and self.options.__dict__["st"]:
51                 self.arguments.append(
52                     "^%s city, %s" % (
53                         self.options.__dict__["city"],
54                         self.options.__dict__["st"]
55                     )
56                 )
57                 del( self.options.__dict__["city"] )
58                 del( self.options.__dict__["st"] )
59                 import sys
60                 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61                 sys.stderr.write(message)
62     def get(self, option, argument=None):
63         """Retrieve data from the config or options."""
64         if argument:
65             if self.config.has_section(argument) and (
66                 self.config.has_option(argument, "city") \
67                     or self.config.has_option(argument, "id") \
68                     or self.config.has_option(argument, "st")
69             ):
70                 self.config.remove_section(argument)
71                 import sys
72                 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73                 sys.stderr.write(message)
74             if not self.config.has_section(argument):
75                 guessed = guess(
76                     argument,
77                     path=self.get("setpath"),
78                     info=self.get("info"),
79                     cache_search=(
80                         self.get("cache") and self.get("cache_search")
81                     ),
82                     cachedir=self.get("cachedir"),
83                     quiet=self.get_bool("quiet")
84                 )
85                 self.config.add_section(argument)
86                 for item in guessed.items():
87                     self.config.set(argument, *item)
88             if self.config.has_option(argument, option):
89                 return self.config.get(argument, option)
90         if option in self.options.__dict__:
91             return self.options.__dict__[option]
92         import sys
93         message = "WARNING: no URI defined for %s\n" % option
94         sys.stderr.write(message)
95         return None
96     def get_bool(self, option, argument=None):
97         """Get data and coerce to a boolean if necessary."""
98         # Mimic configparser's getboolean() method by treating
99         # false/no/off/0 as False and true/yes/on/1 as True values,
100         # case-insensitively
101         value = self.get(option, argument)
102         if isinstance(value, bool):
103             return value
104         if isinstance(value, str):
105             vlower = value.lower()
106             if vlower in ('false', 'no', 'off', '0'):
107                 return False
108             elif vlower in ('true', 'yes', 'on', '1'):
109                 return True
110         raise ValueError("Not a boolean: %s" % value)
111     def getint(self, option, argument=None):
112         """Get data and coerce to an integer if necessary."""
113         value = self.get(option, argument)
114         if value: return int(value)
115         else: return 0
116
117 def average(coords):
118     """Average a list of coordinates."""
119     x = 0
120     y = 0
121     for coord in coords:
122         x += coord[0]
123         y += coord[1]
124     count = len(coords)
125     return (x/count, y/count)
126
127 def filter_units(line, units="imperial"):
128     """Filter or convert units in a line of text between US/UK and metric."""
129     import re
130     # filter lines with both pressures in the form of "X inches (Y hPa)" or
131     # "X in. Hg (Y hPa)"
132     dual_p = re.match(
133         r"(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
134         line
135     )
136     if dual_p:
137         preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
138         if units == "imperial": line = preamble + in_hg + trailer
139         elif units == "metric": line = preamble + hpa + trailer
140     # filter lines with both temperatures in the form of "X F (Y C)"
141     dual_t = re.match(
142         r"(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
143         line
144     )
145     if dual_t:
146         preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
147         if units == "imperial": line = preamble + fahrenheit + trailer
148         elif units == "metric": line = preamble + celsius + trailer
149     # if metric is desired, convert distances in the form of "X mile(s)" to
150     # "Y kilometer(s)"
151     if units == "metric":
152         imperial_d = re.match(
153             r"(.* )(\d+)( mile\(s\))(.*)",
154             line
155         )
156         if imperial_d:
157             preamble, mi, m_u, trailer = imperial_d.groups()
158             line = preamble + str(int(round(int(mi)*1.609344))) \
159                 + " kilometer(s)" + trailer
160     # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
161     # desired, convert to "Z KPH"
162     imperial_s = re.match(
163         r"(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
164         line
165     )
166     if imperial_s:
167         preamble, mph, m_u, kt, trailer = imperial_s.groups()
168         if units == "imperial": line = preamble + mph + m_u + trailer
169         elif units == "metric": 
170             line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
171                 trailer
172     imperial_s = re.match(
173         r"(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
174         line
175     )
176     if imperial_s:
177         preamble, mph, m_u, kt, trailer = imperial_s.groups()
178         if units == "imperial": line = preamble + mph + m_u + trailer
179         elif units == "metric": 
180             line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
181                 trailer
182     # if imperial is desired, qualify given forcast temperatures like "X F"; if
183     # metric is desired, convert to "Y C"
184     imperial_t = re.match(
185         r"(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
186         line
187     )
188     if imperial_t:
189         preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
190         if units == "imperial":
191             line = preamble + parameter + fahrenheit + " F" + sep + trailer
192         elif units == "metric":
193             line = preamble + parameter \
194                 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
195                 + trailer
196     # hand off the resulting line
197     return line
198
199 def get_uri(
200     uri,
201     ignore_fail=False,
202     cache_data=False,
203     cacheage=900,
204     cachedir="."
205 ):
206     """Return a string containing the results of a URI GET."""
207     if pyversion("3"):
208         import urllib, urllib.error, urllib.request
209         URLError = urllib.error.URLError
210         urlopen = urllib.request.urlopen
211     else:
212         import urllib2 as urllib
213         URLError = urllib.URLError
214         urlopen = urllib.urlopen
215     import os, time
216     if cache_data:
217         dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
218         if not os.path.exists(dcachedir):
219             try: os.makedirs(dcachedir)
220             except (IOError, OSError): pass
221         dcache_fn = os.path.join(
222             dcachedir,
223             uri.split(":",1)[1].replace("/","_")
224         )
225     now = time.time()
226     if cache_data and os.access(dcache_fn, os.R_OK) \
227         and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
228         dcache_fd = open(dcache_fn)
229         data = dcache_fd.read()
230         dcache_fd.close()
231     else:
232         try:
233             data = urlopen(uri).read().decode("utf-8")
234         except URLError:
235             if ignore_fail: return ""
236             import os, sys
237             sys.stderr.write("%s error: failed to retrieve\n   %s\n\n" % (
238                 os.path.basename( sys.argv[0] ), uri))
239             raise
240         # Some data sources are HTML with the plain text wrapped in pre tags
241         if "<pre>" in data:
242             data = data[data.find("<pre>")+5:data.find("</pre>")]
243         if cache_data:
244             try:
245                 import codecs
246                 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
247                 dcache_fd.write(data)
248                 dcache_fd.close()
249             except (IOError, OSError): pass
250     return data
251
252 def get_metar(
253     uri=None,
254     verbose=False,
255     quiet=False,
256     headers=None,
257     imperial=False,
258     metric=False,
259     cache_data=False,
260     cacheage=900,
261     cachedir="."
262 ):
263     """Return a summarized METAR for the specified station."""
264     if not uri:
265         import os, sys
266         message = "%s error: METAR URI required for conditions\n" % \
267             os.path.basename( sys.argv[0] )
268         sys.stderr.write(message)
269         sys.exit(1)
270     metar = get_uri(
271         uri,
272         cache_data=cache_data,
273         cacheage=cacheage,
274         cachedir=cachedir
275     )
276     if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
277     if verbose: return metar
278     else:
279         import re
280         lines = metar.split("\n")
281         if not headers:
282             headers = \
283                 "relative_humidity," \
284                 + "precipitation_last_hour," \
285                 + "sky conditions," \
286                 + "temperature," \
287                 + "heat index," \
288                 + "windchill," \
289                 + "weather," \
290                 + "wind"
291         headerlist = headers.lower().replace("_"," ").split(",")
292         output = []
293         if not quiet:
294             title = "Current conditions at %s"
295             place = lines[0].split(", ")
296             if len(place) > 1:
297                 place = "%s, %s" % ( place[0].title(), place[1] )
298             else: place = "<UNKNOWN>"
299             output.append(title%place)
300             output.append("Last updated " + lines[1])
301         header_match = False
302         for header in headerlist:
303             for line in lines:
304                 if line.lower().startswith(header + ":"):
305                     if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
306                     if imperial: line = filter_units(line, units="imperial")
307                     elif metric: line = filter_units(line, units="metric")
308                     if quiet: output.append(line)
309                     else: output.append("   " + line)
310                     header_match = True
311         if not header_match:
312             output.append(
313                 "(no conditions matched your header list, try with --verbose)"
314             )
315         return "\n".join(output)
316
317 def get_alert(
318     uri=None,
319     verbose=False,
320     quiet=False,
321     cache_data=False,
322     cacheage=900,
323     cachedir=".",
324     delay=1
325 ):
326     """Return alert notice for the specified URI."""
327     if not uri:
328         return ""
329     alert = get_uri(
330         uri,
331         ignore_fail=True,
332         cache_data=cache_data,
333         cacheage=cacheage,
334         cachedir=cachedir
335     ).strip()
336     if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
337     if alert:
338         if verbose: return alert
339         else:
340             import re
341             if re.search(r"\nNational Weather Service", alert):
342                 muted = True
343             else:
344                 muted = False
345             expirycheck = re.search(r"Expires:([0-9]{12})", alert)
346             if expirycheck:
347                 # only report alerts and forecasts that expired less than delay
348                 # hours ago
349                 import datetime, zoneinfo
350                 expiration = datetime.datetime.fromisoformat(
351                     "%sT%sZ" % (expirycheck[1][:8], expirycheck[1][-4:]))
352                 now = datetime.datetime.now(tz=zoneinfo.ZoneInfo("UTC"))
353                 if now - expiration > datetime.timedelta(hours=delay):
354                     return ""
355             lines = alert.split("\n")
356             output = []
357             for line in lines:
358                 if muted and line.startswith("National Weather Service"):
359                     muted = False
360                     line = ""
361                 elif line == "&&":
362                     line = ""
363                 elif line == "$$":
364                     muted = True
365                 if line and not muted:
366                     if quiet: output.append(line)
367                     else: output.append("   " + line)
368             return "\n".join(output)
369
370 def get_options(config):
371     """Parse the options passed on the command line."""
372
373     # for optparse's builtin -h/--help option
374     usage = \
375         "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
376
377     # for optparse's builtin --version option
378     verstring = "%prog " + weather_version
379
380     # create the parser
381     import optparse
382     option_parser = optparse.OptionParser(usage=usage, version=verstring)
383     # separate options object from list of arguments and return both
384
385     # the -a/--alert option
386     if config.has_option("default", "alert"):
387         default_alert = config.getboolean("default", "alert")
388     else: default_alert = False
389     option_parser.add_option("-a", "--alert",
390         dest="alert",
391         action="store_true",
392         default=default_alert,
393         help="include local alert notices")
394
395     # the --atypes option
396     if config.has_option("default", "atypes"):
397         default_atypes = config.get("default", "atypes")
398     else:
399         default_atypes = \
400             "coastal_flood_statement," \
401             + "flash_flood_statement," \
402             + "flash_flood_warning," \
403             + "flash_flood_watch," \
404             + "flood_warning," \
405             + "severe_thunderstorm_warning," \
406             + "severe_weather_statement," \
407             + "special_weather_statement," \
408             + "tornado," \
409             + "urgent_weather_message"
410     option_parser.add_option("--atypes",
411         dest="atypes",
412         default=default_atypes,
413         help="list of alert notification types to display")
414
415     # the --build-sets option
416     option_parser.add_option("--build-sets",
417         dest="build_sets",
418         action="store_true",
419         default=False,
420         help="(re)build location correlation sets")
421
422     # the --cacheage option
423     if config.has_option("default", "cacheage"):
424         default_cacheage = config.getint("default", "cacheage")
425     else: default_cacheage = 900
426     option_parser.add_option("--cacheage",
427         dest="cacheage",
428         default=default_cacheage,
429         help="duration in seconds to refresh cached data")
430
431     # the --cachedir option
432     if config.has_option("default", "cachedir"):
433         default_cachedir = config.get("default", "cachedir")
434     else: default_cachedir = "~/.weather"
435     option_parser.add_option("--cachedir",
436         dest="cachedir",
437         default=default_cachedir,
438         help="directory for storing cached searches and data")
439
440     # the --delay option
441     if config.has_option("default", "delay"):
442         default_delay = config.getint("default", "delay")
443     else: default_delay = 1
444     option_parser.add_option("--delay",
445         dest="delay",
446         default=default_delay,
447         help="hours to delay alert and forecast expiration")
448
449     # the -f/--forecast option
450     if config.has_option("default", "forecast"):
451         default_forecast = config.getboolean("default", "forecast")
452     else: default_forecast = False
453     option_parser.add_option("-f", "--forecast",
454         dest="forecast",
455         action="store_true",
456         default=default_forecast,
457         help="include a local forecast")
458
459     # the --headers option
460     if config.has_option("default", "headers"):
461         default_headers = config.get("default", "headers")
462     else:
463         default_headers = \
464             "temperature," \
465             + "relative_humidity," \
466             + "wind," \
467             + "heat_index," \
468             + "windchill," \
469             + "weather," \
470             + "sky_conditions," \
471             + "precipitation_last_hour"
472     option_parser.add_option("--headers",
473         dest="headers",
474         default=default_headers,
475         help="list of conditions headers to display")
476
477     # the --imperial option
478     if config.has_option("default", "imperial"):
479         default_imperial = config.getboolean("default", "imperial")
480     else: default_imperial = False
481     option_parser.add_option("--imperial",
482         dest="imperial",
483         action="store_true",
484         default=default_imperial,
485         help="filter/convert conditions for US/UK units")
486
487     # the --info option
488     option_parser.add_option("--info",
489         dest="info",
490         action="store_true",
491         default=False,
492         help="output detailed information for your search")
493
494     # the -l/--list option
495     option_parser.add_option("-l", "--list",
496         dest="list",
497         action="store_true",
498         default=False,
499         help="list all configured aliases and cached searches")
500
501     # the --longlist option
502     option_parser.add_option("--longlist",
503         dest="longlist",
504         action="store_true",
505         default=False,
506         help="display details of all configured aliases")
507
508     # the -m/--metric option
509     if config.has_option("default", "metric"):
510         default_metric = config.getboolean("default", "metric")
511     else: default_metric = False
512     option_parser.add_option("-m", "--metric",
513         dest="metric",
514         action="store_true",
515         default=default_metric,
516         help="filter/convert conditions for metric units")
517
518     # the -n/--no-conditions option
519     if config.has_option("default", "conditions"):
520         default_conditions = config.getboolean("default", "conditions")
521     else: default_conditions = True
522     option_parser.add_option("-n", "--no-conditions",
523         dest="conditions",
524         action="store_false",
525         default=default_conditions,
526         help="disable output of current conditions")
527
528     # the --no-cache option
529     if config.has_option("default", "cache"):
530         default_cache = config.getboolean("default", "cache")
531     else: default_cache = True
532     option_parser.add_option("--no-cache",
533         dest="cache",
534         action="store_false",
535         default=True,
536         help="disable all caching (searches and data)")
537
538     # the --no-cache-data option
539     if config.has_option("default", "cache_data"):
540         default_cache_data = config.getboolean("default", "cache_data")
541     else: default_cache_data = True
542     option_parser.add_option("--no-cache-data",
543         dest="cache_data",
544         action="store_false",
545         default=True,
546         help="disable retrieved data caching")
547
548     # the --no-cache-search option
549     if config.has_option("default", "cache_search"):
550         default_cache_search = config.getboolean("default", "cache_search")
551     else: default_cache_search = True
552     option_parser.add_option("--no-cache-search",
553         dest="cache_search",
554         action="store_false",
555         default=True,
556         help="disable search result caching")
557
558     # the -q/--quiet option
559     if config.has_option("default", "quiet"):
560         default_quiet = config.getboolean("default", "quiet")
561     else: default_quiet = False
562     option_parser.add_option("-q", "--quiet",
563         dest="quiet",
564         action="store_true",
565         default=default_quiet,
566         help="skip preambles and don't indent")
567
568     # the --setpath option
569     if config.has_option("default", "setpath"):
570         default_setpath = config.get("default", "setpath")
571     else: default_setpath = ".:~/.weather"
572     option_parser.add_option("--setpath",
573         dest="setpath",
574         default=default_setpath,
575         help="directory search path for correlation sets")
576
577     # the -v/--verbose option
578     if config.has_option("default", "verbose"):
579         default_verbose = config.getboolean("default", "verbose")
580     else: default_verbose = False
581     option_parser.add_option("-v", "--verbose",
582         dest="verbose",
583         action="store_true",
584         default=default_verbose,
585         help="show full decoded feeds")
586
587     # deprecated options
588     if config.has_option("default", "city"):
589         default_city = config.get("default", "city")
590     else: default_city = ""
591     option_parser.add_option("-c", "--city",
592         dest="city",
593         default=default_city,
594         help=optparse.SUPPRESS_HELP)
595     if config.has_option("default", "id"):
596         default_id = config.get("default", "id")
597     else: default_id = ""
598     option_parser.add_option("-i", "--id",
599         dest="id",
600         default=default_id,
601         help=optparse.SUPPRESS_HELP)
602     if config.has_option("default", "st"):
603         default_st = config.get("default", "st")
604     else: default_st = ""
605     option_parser.add_option("-s", "--st",
606         dest="st",
607         default=default_st,
608         help=optparse.SUPPRESS_HELP)
609
610     options, arguments = option_parser.parse_args()
611     return options, arguments
612
613 def get_config():
614     """Parse the aliases and configuration."""
615     if pyversion("3"): import configparser
616     else: import ConfigParser as configparser
617     config = configparser.ConfigParser()
618     import os
619     rcfiles = [
620         "/etc/weatherrc",
621         "/etc/weather/weatherrc",
622         os.path.expanduser("~/.weather/weatherrc"),
623         os.path.expanduser("~/.weatherrc"),
624         "weatherrc"
625         ]
626     for rcfile in rcfiles:
627         if os.access(rcfile, os.R_OK):
628             if pyversion("3"):
629                 config.read(rcfile, encoding="utf-8")
630             else:
631                 config.read(rcfile)
632     for section in config.sections():
633         if section != section.lower():
634             if config.has_section(section.lower()):
635                 config.remove_section(section.lower())
636             config.add_section(section.lower())
637             for option,value in config.items(section):
638                 config.set(section.lower(), option, value)
639     return config
640
641 def integrate_search_cache(config, cachedir, setpath):
642     """Add cached search results into the configuration."""
643     if pyversion("3"): import configparser
644     else: import ConfigParser as configparser
645     import os, time
646     scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
647     if not os.access(scache_fn, os.R_OK): return config
648     scache_fd = open(scache_fn)
649     created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
650     scache_fd.close()
651     now = time.time()
652     datafiles = data_index(setpath)
653     if datafiles:
654         data_freshness = sorted(
655             [ x[1] for x in datafiles.values() ],
656             reverse=True
657         )[0]
658     else: data_freshness = now
659     if created < data_freshness <= now:
660         try:
661             os.remove(scache_fn)
662             print( "[clearing outdated %s]" % scache_fn )
663         except (IOError, OSError):
664             pass
665         return config
666     scache = configparser.ConfigParser()
667     if pyversion("3"):
668         scache.read(scache_fn, encoding="utf-8")
669     else:
670         scache.read(scache_fn)
671     for section in scache.sections():
672         if not config.has_section(section):
673             config.add_section(section)
674             for option,value in scache.items(section):
675                 config.set(section, option, value)
676     return config
677
678 def list_aliases(config, detail=False):
679     """Return a formatted list of aliases defined in the config."""
680     if detail:
681         output = "\n# configured alias details..."
682         for section in sorted(config.sections()):
683             output += "\n\n[%s]" % section
684             for item in sorted(config.items(section)):
685                 output += "\n%s = %s" % item
686         output += "\n"
687     else:
688         output = "configured aliases and cached searches..."
689         for section in sorted(config.sections()):
690             if config.has_option(section, "description"):
691                 description = config.get(section, "description")
692             else: description = "(no description provided)"
693             output += "\n   %s: %s" % (section, description)
694     return output
695
696 def data_index(path):
697     import os
698     datafiles = {}
699     for filename in ("airports", "places", "stations", "zctas", "zones"):
700         for dirname in path.split(":"):
701             for extension in ("", ".gz", ".txt"):
702                 candidate = os.path.expanduser(
703                     os.path.join( dirname, "".join( (filename, extension) ) )
704                 )
705                 if os.path.exists(candidate):
706                     datafiles[filename] = (
707                         candidate,
708                         os.stat(candidate).st_mtime
709                     )
710                     break
711             if filename in datafiles:
712                 break
713     return datafiles
714
715 def guess(
716     expression,
717     path=".",
718     max_results=20,
719     info=False,
720     cache_search=False,
721     cacheage=900,
722     cachedir=".",
723     quiet=False
724 ):
725     """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
726     import codecs, datetime, time, os, re, sys
727     if pyversion("3"): import configparser
728     else: import ConfigParser as configparser
729     datafiles = data_index(path)
730     if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
731     elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
732     elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
733     elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
734     elif re.match(
735         r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
736         expression
737     ):
738         searchtype = "coordinates"
739     elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
740     else:
741         searchtype = "name"
742         cache_search = False
743     if cache_search: action = "caching"
744     else: action = "using"
745     if info:
746         scores = [
747             (0.005, "bad"),
748             (0.025, "poor"),
749             (0.160, "suspect"),
750             (0.500, "mediocre"),
751             (0.840, "good"),
752             (0.975, "great"),
753             (0.995, "excellent"),
754             (1.000, "ideal"),
755         ]
756     if not quiet: print("Searching via %s..."%searchtype)
757     stations = configparser.ConfigParser()
758     dataname = "stations"
759     if dataname in datafiles:
760         datafile = datafiles[dataname][0]
761         if datafile.endswith(".gz"):
762             import gzip
763             if pyversion("3"):
764                 stations.read_string(
765                     gzip.open(datafile).read().decode("utf-8") )
766             else: stations.read_file( gzip.open(datafile) )
767         else:
768             if pyversion("3"):
769                 stations.read(datafile, encoding="utf-8")
770             else:
771                 stations.read(datafile)
772     else:
773         message = "%s error: can't find \"%s\" data file\n" % (
774             os.path.basename( sys.argv[0] ),
775             dataname
776         )
777         sys.stderr.write(message)
778         exit(1)
779     zones = configparser.ConfigParser()
780     dataname = "zones"
781     if dataname in datafiles:
782         datafile = datafiles[dataname][0]
783         if datafile.endswith(".gz"):
784             import gzip
785             if pyversion("3"):
786                 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
787             else: zones.read_file( gzip.open(datafile) )
788         else:
789             if pyversion("3"):
790                 zones.read(datafile, encoding="utf-8")
791             else:
792                 zones.read(datafile)
793     else:
794         message = "%s error: can't find \"%s\" data file\n" % (
795             os.path.basename( sys.argv[0] ),
796             dataname
797         )
798         sys.stderr.write(message)
799         exit(1)
800     search = None
801     station = ("", 0)
802     zone = ("", 0)
803     dataset = None
804     possibilities = []
805     uris = {}
806     if searchtype == "airport":
807         expression = expression.lower()
808         airports = configparser.ConfigParser()
809         dataname = "airports"
810         if dataname in datafiles:
811             datafile = datafiles[dataname][0]
812             if datafile.endswith(".gz"):
813                 import gzip
814                 if pyversion("3"):
815                     airports.read_string(
816                         gzip.open(datafile).read().decode("utf-8") )
817                 else: airports.read_file( gzip.open(datafile) )
818             else:
819                 if pyversion("3"):
820                     airports.read(datafile, encoding="utf-8")
821                 else:
822                     airports.read(datafile)
823         else:
824             message = "%s error: can't find \"%s\" data file\n" % (
825                 os.path.basename( sys.argv[0] ),
826                 dataname
827             )
828             sys.stderr.write(message)
829             exit(1)
830         if airports.has_section(expression) \
831             and airports.has_option(expression, "station"):
832             search = (expression, "IATA/FAA airport code %s" % expression)
833             station = ( airports.get(expression, "station"), 0 )
834             if stations.has_option(station[0], "zone"):
835                 zone = eval( stations.get(station[0], "zone") )
836                 dataset = stations
837             if not ( info or quiet ) \
838                 and stations.has_option( station[0], "description" ):
839                 print(
840                     "[%s result %s]" % (
841                         action,
842                         stations.get(station[0], "description")
843                     )
844                 )
845         else:
846             message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
847                 expression,
848                 datafiles["airports"][0]
849             )
850             sys.stderr.write(message)
851             exit(1)
852     elif searchtype == "station":
853         expression = expression.lower()
854         if stations.has_section(expression):
855             station = (expression, 0)
856             if not search:
857                 search = (expression, "ICAO station code %s" % expression)
858             if stations.has_option(expression, "zone"):
859                 zone = eval( stations.get(expression, "zone") )
860                 dataset = stations
861             if not ( info or quiet ) \
862                 and stations.has_option(expression, "description"):
863                 print(
864                     "[%s result %s]" % (
865                         action,
866                         stations.get(expression, "description")
867                     )
868                 )
869         else:
870             message = "No ICAO weather station \"%s\" in the %s file.\n" % (
871                 expression,
872                 datafiles["stations"][0]
873             )
874             sys.stderr.write(message)
875             exit(1)
876     elif searchtype == "zone":
877         expression = expression.lower()
878         if zones.has_section(expression) \
879             and zones.has_option(expression, "station"):
880             zone = (expression, 0)
881             station = eval( zones.get(expression, "station") )
882             dataset = zones
883             search = (expression, "NWS/NOAA weather zone %s" % expression)
884             if not ( info or quiet ) \
885                 and zones.has_option(expression, "description"):
886                 print(
887                     "[%s result %s]" % (
888                         action,
889                         zones.get(expression, "description")
890                     )
891                 )
892         else:
893             message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
894                 expression,
895                 datafiles["zones"][0]
896             )
897             sys.stderr.write(message)
898             exit(1)
899     elif searchtype == "ZCTA":
900         zctas = configparser.ConfigParser()
901         dataname = "zctas"
902         if dataname in datafiles:
903             datafile = datafiles[dataname][0]
904             if datafile.endswith(".gz"):
905                 import gzip
906                 if pyversion("3"):
907                     zctas.read_string(
908                         gzip.open(datafile).read().decode("utf-8") )
909                 else: zctas.read_file( gzip.open(datafile) )
910             else:
911                 if pyversion("3"):
912                     zctas.read(datafile, encoding="utf-8")
913                 else:
914                     zctas.read(datafile)
915         else:
916             message = "%s error: can't find \"%s\" data file\n" % (
917                 os.path.basename( sys.argv[0] ),
918                 dataname
919             )
920             sys.stderr.write(message)
921             exit(1)
922         dataset = zctas
923         if zctas.has_section(expression) \
924             and zctas.has_option(expression, "station"):
925             station = eval( zctas.get(expression, "station") )
926             search = (expression, "Census ZCTA (ZIP code) %s" % expression)
927             if zctas.has_option(expression, "zone"):
928                 zone = eval( zctas.get(expression, "zone") )
929         else:
930             message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
931                 expression,
932                 datafiles["zctas"][0]
933             )
934             sys.stderr.write(message)
935             exit(1)
936     elif searchtype == "coordinates":
937         search = (expression, "Geographic coordinates %s" % expression)
938         stationtable = {}
939         for station in stations.sections():
940             if stations.has_option(station, "location"):
941                 stationtable[station] = {
942                     "location": eval( stations.get(station, "location") )
943                 }
944         station = closest( gecos(expression), stationtable, "location", 0.1 )
945         if not station[0]:
946             message = "No ICAO weather station found near %s.\n" % expression
947             sys.stderr.write(message)
948             exit(1)
949         zonetable = {}
950         for zone in zones.sections():
951             if zones.has_option(zone, "centroid"):
952                 zonetable[zone] = {
953                     "centroid": eval( zones.get(zone, "centroid") )
954                 }
955         zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
956         if not zone[0]:
957             message = "No NWS weather zone near %s; forecasts unavailable.\n" \
958                 % expression
959             sys.stderr.write(message)
960     elif searchtype in ("FIPS", "name"):
961         places = configparser.ConfigParser()
962         dataname = "places"
963         if dataname in datafiles:
964             datafile = datafiles[dataname][0]
965             if datafile.endswith(".gz"):
966                 import gzip
967                 if pyversion("3"):
968                     places.read_string(
969                         gzip.open(datafile).read().decode("utf-8") )
970                 else: places.read_file( gzip.open(datafile) )
971             else:
972                 if pyversion("3"):
973                     places.read(datafile, encoding="utf-8")
974                 else:
975                     places.read(datafile)
976         else:
977             message = "%s error: can't find \"%s\" data file\n" % (
978                 os.path.basename( sys.argv[0] ),
979                 dataname
980             )
981             sys.stderr.write(message)
982             exit(1)
983         dataset = places
984         place = expression.lower()
985         if places.has_section(place) and places.has_option(place, "station"):
986             station = eval( places.get(place, "station") )
987             search = (expression, "Census Place %s" % expression)
988             if places.has_option(place, "description"):
989                 search = (
990                     search[0],
991                     search[1] + ", %s" % places.get(place, "description")
992                 )
993             if places.has_option(place, "zone"):
994                 zone = eval( places.get(place, "zone") )
995             if not ( info or quiet ) \
996                 and places.has_option(place, "description"):
997                 print(
998                     "[%s result %s]" % (
999                         action,
1000                         places.get(place, "description")
1001                     )
1002                 )
1003         else:
1004             for place in places.sections():
1005                 if places.has_option(place, "description") \
1006                     and places.has_option(place, "station") \
1007                     and re.search(
1008                         expression,
1009                         places.get(place, "description"),
1010                         re.I
1011                     ):
1012                         possibilities.append(place)
1013             for place in stations.sections():
1014                 if stations.has_option(place, "description") \
1015                     and re.search(
1016                         expression,
1017                         stations.get(place, "description"),
1018                         re.I
1019                     ):
1020                         possibilities.append(place)
1021             for place in zones.sections():
1022                 if zones.has_option(place, "description") \
1023                     and zones.has_option(place, "station") \
1024                     and re.search(
1025                         expression,
1026                         zones.get(place, "description"),
1027                         re.I
1028                     ):
1029                         possibilities.append(place)
1030             if len(possibilities) == 1:
1031                 place = possibilities[0]
1032                 if places.has_section(place):
1033                     station = eval( places.get(place, "station") )
1034                     description = places.get(place, "description")
1035                     if places.has_option(place, "zone"):
1036                         zone = eval( places.get(place, "zone" ) )
1037                     search = ( expression, "%s: %s" % (place, description) )
1038                 elif stations.has_section(place):
1039                     station = (place, 0.0)
1040                     description = stations.get(place, "description")
1041                     if stations.has_option(place, "zone"):
1042                         zone = eval( stations.get(place, "zone" ) )
1043                     search = ( expression, "ICAO station code %s" % place )
1044                 elif zones.has_section(place):
1045                     station = eval( zones.get(place, "station") )
1046                     description = zones.get(place, "description")
1047                     zone = (place, 0.0)
1048                     search = ( expression, "NWS/NOAA weather zone %s" % place )
1049                 if not ( info or quiet ):
1050                     print( "[%s result %s]" % (action, description) )
1051             if not possibilities and not station[0]:
1052                 message = "No FIPS code/census area match in the %s file.\n" % (
1053                     datafiles["places"][0]
1054                 )
1055                 sys.stderr.write(message)
1056                 exit(1)
1057     if station[0]:
1058         uris["metar"] = stations.get( station[0], "metar" )
1059         if zone[0]:
1060             for key,value in zones.items( zone[0] ):
1061                 if key not in ("centroid", "description", "station"):
1062                     uris[key] = value
1063     elif possibilities:
1064         count = len(possibilities)
1065         if count <= max_results:
1066             print( "Your search is ambiguous, returning %s matches:" % count )
1067             for place in sorted(possibilities):
1068                 if places.has_section(place):
1069                     print(
1070                         "   [%s] %s" % (
1071                             place,
1072                             places.get(place, "description")
1073                         )
1074                     )
1075                 elif stations.has_section(place):
1076                     print(
1077                         "   [%s] %s" % (
1078                             place,
1079                             stations.get(place, "description")
1080                         )
1081                     )
1082                 elif zones.has_section(place):
1083                     print(
1084                         "   [%s] %s" % (
1085                             place,
1086                             zones.get(place, "description")
1087                         )
1088                     )
1089         else:
1090             print(
1091                 "Your search is too ambiguous, returning %s matches." % count
1092             )
1093         exit(0)
1094     if info:
1095         stationlist = []
1096         zonelist = []
1097         if dataset:
1098             for section in dataset.sections():
1099                 if dataset.has_option(section, "station"):
1100                     stationlist.append(
1101                         eval( dataset.get(section, "station") )[1]
1102                     )
1103                 if dataset.has_option(section, "zone"):
1104                     zonelist.append( eval( dataset.get(section, "zone") )[1] )
1105         stationlist.sort()
1106         zonelist.sort()
1107         scount = len(stationlist)
1108         zcount = len(zonelist)
1109         sranks = []
1110         zranks = []
1111         for score in scores:
1112             if stationlist:
1113                 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1114             if zonelist:
1115                 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1116         description = search[1]
1117         uris["description"] = description
1118         print(
1119             "%s\n%s" % ( description, "-" * len(description) )
1120         )
1121         print(
1122             "%s: %s" % (
1123                 station[0],
1124                 stations.get( station[0], "description" )
1125             )
1126         )
1127         km = radian_to_km*station[1]
1128         mi = radian_to_mi*station[1]
1129         if sranks and not description.startswith("ICAO station code "):
1130             for index in range(0, len(scores)):
1131                 if station[1] >= sranks[index]:
1132                     score = scores[index][1]
1133                     break
1134             print(
1135                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1136             )
1137         elif searchtype == "coordinates":
1138             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
1139         if zone[0]:
1140             print(
1141                 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1142             )
1143         km = radian_to_km*zone[1]
1144         mi = radian_to_mi*zone[1]
1145         if zranks and not description.startswith("NWS/NOAA weather zone "):
1146             for index in range(0, len(scores)):
1147                 if zone[1] >= zranks[index]:
1148                     score = scores[index][1]
1149                     break
1150             print(
1151                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1152             )
1153         elif searchtype == "coordinates" and zone[0]:
1154             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
1155     if cache_search:
1156         now = time.time()
1157         nowstamp = "%s (%s)" % (
1158             now,
1159             datetime.datetime.isoformat(
1160                 datetime.datetime.fromtimestamp(now),
1161                 " "
1162             )
1163         )
1164         search_cache = ["\n"]
1165         search_cache.append( "[%s]\n" % search[0] ) 
1166         search_cache.append( "cached = %s\n" % nowstamp )
1167         for uriname in sorted(uris.keys()):
1168             search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1169         real_cachedir = os.path.expanduser(cachedir)
1170         if not os.path.exists(real_cachedir):
1171             try: os.makedirs(real_cachedir)
1172             except (IOError, OSError): pass
1173         scache_fn = os.path.join(real_cachedir, "searches")
1174         if not os.path.exists(scache_fn):
1175             then = sorted(
1176                     [ x[1] for x in datafiles.values() ],
1177                     reverse=True
1178                 )[0]
1179             thenstamp = "%s (%s)" % (
1180                 then,
1181                 datetime.datetime.isoformat(
1182                     datetime.datetime.fromtimestamp(then),
1183                     " "
1184                 )
1185             )
1186             search_cache.insert(
1187                 0,
1188                 "# based on data files from: %s\n" % thenstamp
1189             )
1190         try:
1191             scache_existing = configparser.ConfigParser()
1192             if pyversion("3"):
1193                 scache_existing.read(scache_fn, encoding="utf-8")
1194             else:
1195                 scache_existing.read(scache_fn)
1196             if not scache_existing.has_section(search[0]):
1197                 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1198                 scache_fd.writelines(search_cache)
1199                 scache_fd.close()
1200         except (IOError, OSError): pass
1201     if not info:
1202         return(uris)
1203
1204 def closest(position, nodes, fieldname, angle=None):
1205     import math
1206     if not angle: angle = 2*math.pi
1207     match = None
1208     for name in nodes:
1209         if fieldname in nodes[name]:
1210             node = nodes[name][fieldname]
1211             if node and abs( position[0]-node[0] ) < angle:
1212                 if abs( position[1]-node[1] ) < angle \
1213                     or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1214                     if position == node:
1215                         angle = 0
1216                         match = name
1217                     else:
1218                         candidate = math.acos(
1219                             math.sin( position[0] ) * math.sin( node[0] ) \
1220                                 + math.cos( position[0] ) \
1221                                 * math.cos( node[0] ) \
1222                                 * math.cos( position[1] - node[1] )
1223                             )
1224                         if candidate < angle:
1225                             angle = candidate
1226                             match = name
1227     if match: match = str(match)
1228     return (match, angle)
1229
1230 def gecos(formatted):
1231     import math, re
1232     coordinates = formatted.split(",")
1233     for coordinate in range(0, 2):
1234         degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1235             r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1236             coordinates[coordinate].strip().lower()
1237         ).groups()
1238         value = float(degrees)
1239         if minutes: value += float(minutes)/60
1240         if seconds: value += float(seconds)/3600
1241         if hemisphere and hemisphere in "sw": value *= -1
1242         coordinates[coordinate] = math.radians(value)
1243     return tuple(coordinates)
1244
1245 def correlate():
1246     import codecs, csv, datetime, hashlib, os, re, sys, time, zipfile, zoneinfo
1247     if pyversion("3"): import configparser
1248     else: import ConfigParser as configparser
1249     for filename in os.listdir("."):
1250         if re.match("[0-9]{4}_Gaz_counties_national.zip$", filename):
1251             gcounties_an = filename
1252             gcounties_fn = filename[:-4] + ".txt"
1253         elif re.match("[0-9]{4}_Gaz_cousubs_national.zip$", filename):
1254             gcousubs_an = filename
1255             gcousubs_fn = filename[:-4] + ".txt"
1256         elif re.match("[0-9]{4}_Gaz_place_national.zip$", filename):
1257             gplace_an = filename
1258             gplace_fn = filename[:-4] + ".txt"
1259         elif re.match("[0-9]{4}_Gaz_zcta_national.zip$", filename):
1260             gzcta_an = filename
1261             gzcta_fn = filename[:-4] + ".txt"
1262         elif re.match("bp[0-9]{2}[a-z]{2}[0-9]{2}.dbx$", filename):
1263             cpfzcf_fn = filename
1264     nsdcccc_fn = "nsd_cccc.txt"
1265     ourairports_fn = "airports.csv"
1266     overrides_fn = "overrides.conf"
1267     overrideslog_fn = "overrides.log"
1268     slist_fn = "slist"
1269     zlist_fn = "zlist"
1270     qalog_fn = "qa.log"
1271     airports_fn = "airports"
1272     places_fn = "places"
1273     stations_fn = "stations"
1274     zctas_fn = "zctas"
1275     zones_fn = "zones"
1276     header = """\
1277 %s
1278 # generated by %s on %s from these public domain sources:
1279 #
1280 # https://www.census.gov/geographies/reference-files/time-series/geo/gazetteer-files.html
1281 # %s %s %s
1282 # %s %s %s
1283 # %s %s %s
1284 # %s %s %s
1285 #
1286 # https://www.weather.gov/gis/ZoneCounty/
1287 # %s %s %s
1288 #
1289 # https://tgftp.nws.noaa.gov/data/
1290 # %s %s %s
1291 #
1292 # https://ourairports.com/data/
1293 # %s %s %s
1294 #
1295 # ...and these manually-generated or hand-compiled adjustments:
1296 # %s %s %s
1297 # %s %s %s
1298 # %s %s %s\
1299 """ % (
1300         weather_copyright,
1301         os.path.basename( sys.argv[0] ),
1302         datetime.date.isoformat(
1303             datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) )
1304         ),
1305         hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1306         datetime.date.isoformat(
1307             datetime.datetime.utcfromtimestamp( os.path.getmtime(gcounties_an) )
1308         ),
1309         gcounties_an,
1310         hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1311         datetime.date.isoformat(
1312             datetime.datetime.utcfromtimestamp( os.path.getmtime(gcousubs_an) )
1313         ),
1314         gcousubs_an,
1315         hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1316         datetime.date.isoformat(
1317             datetime.datetime.utcfromtimestamp( os.path.getmtime(gplace_an) )
1318         ),
1319         gplace_an,
1320         hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1321         datetime.date.isoformat(
1322             datetime.datetime.utcfromtimestamp( os.path.getmtime(gzcta_an) )
1323         ),
1324         gzcta_an,
1325         hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1326         datetime.date.isoformat(
1327             datetime.datetime.utcfromtimestamp( os.path.getmtime(cpfzcf_fn) )
1328         ),
1329         cpfzcf_fn,
1330         hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1331         datetime.date.isoformat(
1332             datetime.datetime.utcfromtimestamp( os.path.getmtime(nsdcccc_fn) )
1333         ),
1334         nsdcccc_fn,
1335         hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
1336         datetime.date.isoformat(
1337             datetime.datetime.utcfromtimestamp( os.path.getmtime(ourairports_fn) )
1338         ),
1339         ourairports_fn,
1340         hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1341         datetime.date.isoformat(
1342             datetime.datetime.utcfromtimestamp( os.path.getmtime(overrides_fn) )
1343         ),
1344         overrides_fn,
1345         hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1346         datetime.date.isoformat(
1347             datetime.datetime.utcfromtimestamp( os.path.getmtime(slist_fn) )
1348         ),
1349         slist_fn,
1350         hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1351         datetime.date.isoformat(
1352             datetime.datetime.utcfromtimestamp( os.path.getmtime(zlist_fn) )
1353         ),
1354         zlist_fn
1355     )
1356     airports = {}
1357     places = {}
1358     stations = {}
1359     zctas = {}
1360     zones = {}
1361     message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1362     sys.stdout.write(message)
1363     sys.stdout.flush()
1364     count = 0
1365     gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "r")
1366     columns = gcounties.readline().decode("utf-8").strip().split("\t")
1367     for line in gcounties:
1368         fields = line.decode("utf-8").strip().split("\t")
1369         f_geoid = fields[ columns.index("GEOID") ].strip()
1370         f_name = fields[ columns.index("NAME") ].strip()
1371         f_usps = fields[ columns.index("USPS") ].strip()
1372         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1373         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1374         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1375             fips = "fips%s" % f_geoid
1376             if fips not in places: places[fips] = {}
1377             places[fips]["centroid"] = gecos(
1378                 "%s,%s" % (f_intptlat, f_intptlong)
1379             )
1380             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1381             count += 1
1382     gcounties.close()
1383     print("done (%s lines)." % count)
1384     message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1385     sys.stdout.write(message)
1386     sys.stdout.flush()
1387     count = 0
1388     gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "r")
1389     columns = gcousubs.readline().decode("utf-8").strip().split("\t")
1390     for line in gcousubs:
1391         fields = line.decode("utf-8").strip().split("\t")
1392         f_geoid = fields[ columns.index("GEOID") ].strip()
1393         f_name = fields[ columns.index("NAME") ].strip()
1394         f_usps = fields[ columns.index("USPS") ].strip()
1395         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1396         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1397         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1398             fips = "fips%s" % f_geoid
1399             if fips not in places: places[fips] = {}
1400             places[fips]["centroid"] = gecos(
1401                 "%s,%s" % (f_intptlat, f_intptlong)
1402             )
1403             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1404             count += 1
1405     gcousubs.close()
1406     print("done (%s lines)." % count)
1407     message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1408     sys.stdout.write(message)
1409     sys.stdout.flush()
1410     count = 0
1411     gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "r")
1412     columns = gplace.readline().decode("utf-8").strip().split("\t")
1413     for line in gplace:
1414         fields = line.decode("utf-8").strip().split("\t")
1415         f_geoid = fields[ columns.index("GEOID") ].strip()
1416         f_name = fields[ columns.index("NAME") ].strip()
1417         f_usps = fields[ columns.index("USPS") ].strip()
1418         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1419         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1420         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1421             fips = "fips%s" % f_geoid
1422             if fips not in places: places[fips] = {}
1423             places[fips]["centroid"] = gecos(
1424                 "%s,%s" % (f_intptlat, f_intptlong)
1425             )
1426             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1427             count += 1
1428     gplace.close()
1429     print("done (%s lines)." % count)
1430     message = "Reading %s..." % slist_fn
1431     sys.stdout.write(message)
1432     sys.stdout.flush()
1433     count = 0
1434     slist = codecs.open(slist_fn, "r", "utf-8")
1435     for line in slist:
1436         icao = line.split("#")[0].strip()
1437         if icao:
1438             stations[icao] = {
1439                 "metar": "https://tgftp.nws.noaa.gov/data/observations/"\
1440                     + "metar/decoded/%s.TXT" % icao.upper()
1441             }
1442             count += 1
1443     slist.close()
1444     print("done (%s lines)." % count)
1445     message = "Reading %s..." % nsdcccc_fn
1446     sys.stdout.write(message)
1447     sys.stdout.flush()
1448     count = 0
1449     nsdcccc = codecs.open(nsdcccc_fn, "r", "utf-8")
1450     for line in nsdcccc:
1451         line = str(line)
1452         fields = line.split(";")
1453         icao = fields[0].strip().lower()
1454         if icao in stations:
1455             description = []
1456             name = " ".join( fields[3].strip().title().split() )
1457             if name: description.append(name)
1458             st = fields[4].strip()
1459             if st: description.append(st)
1460             country = " ".join( fields[5].strip().title().split() )
1461             if country: description.append(country)
1462             if description:
1463                 stations[icao]["description"] = ", ".join(description)
1464             lat, lon = fields[7:9]
1465             if lat and lon:
1466                 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1467             elif "location" not in stations[icao]:
1468                 lat, lon = fields[5:7]
1469                 if lat and lon:
1470                     stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1471         count += 1
1472     nsdcccc.close()
1473     print("done (%s lines)." % count)
1474     message = "Reading %s..." % ourairports_fn
1475     sys.stdout.write(message)
1476     sys.stdout.flush()
1477     count = 0
1478     ourairports = open(ourairports_fn, "r")
1479     for row in csv.reader(ourairports):
1480         icao = row[12].lower()
1481         if icao in stations:
1482             iata = row[13].lower()
1483             if len(iata) == 3: airports[iata] = { "station": icao }
1484             if "description" not in stations[icao]:
1485                 description = []
1486                 name = row[3]
1487                 if name: description.append(name)
1488                 municipality = row[10]
1489                 if municipality: description.append(municipality)
1490                 region = row[9]
1491                 country = row[8]
1492                 if region:
1493                     if "-" in region:
1494                         c,r = region.split("-", 1)
1495                         if c == country: region = r
1496                     description.append(region)
1497                 if country:
1498                     description.append(country)
1499                 if description:
1500                     stations[icao]["description"] = ", ".join(description)
1501             if "location" not in stations[icao]:
1502                 lat = row[4]
1503                 if lat:
1504                     lon = row[5]
1505                     if lon:
1506                         stations[icao]["location"] = gecos(
1507                             "%s,%s" % (lat, lon)
1508                         )
1509         count += 1
1510     ourairports.close()
1511     print("done (%s lines)." % count)
1512     message = "Reading %s..." % zlist_fn
1513     sys.stdout.write(message)
1514     sys.stdout.flush()
1515     count = 0
1516     zlist = codecs.open(zlist_fn, "r", "utf-8")
1517     for line in zlist:
1518         line = line.split("#")[0].strip()
1519         if line:
1520             zones[line] = {}
1521             count += 1
1522     zlist.close()
1523     print("done (%s lines)." % count)
1524     message = "Reading %s..." % cpfzcf_fn
1525     sys.stdout.write(message)
1526     sys.stdout.flush()
1527     count = 0
1528     cpfz = {}
1529     cpfzcf = codecs.open(cpfzcf_fn, "r", "utf-8")
1530     for line in cpfzcf:
1531         fields = line.strip().split("|")
1532         if len(fields) == 11 \
1533             and fields[0] and fields[1] and fields[9] and fields[10]:
1534             zone = "z".join( fields[:2] ).lower()
1535             if zone in zones:
1536                 state = fields[0]
1537                 description = fields[3].strip()
1538                 fips = "fips%s"%fields[6]
1539                 countycode = "%sc%s" % (state.lower(), fips[-3:])
1540                 if state:
1541                     zones[zone]["coastal_flood_statement"] = (
1542                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1543                         "flood/coastal/%s/%s.txt" % (state.lower(), zone))
1544                     zones[zone]["flash_flood_statement"] = (
1545                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1546                         "flash_flood/statement/%s/%s.txt"
1547                         % (state.lower(), countycode))
1548                     zones[zone]["flash_flood_warning"] = (
1549                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1550                         "flash_flood/warning/%s/%s.txt"
1551                         % (state.lower(), countycode))
1552                     zones[zone]["flash_flood_watch"] = (
1553                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1554                         "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
1555                     zones[zone]["flood_warning"] = (
1556                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1557                         "flood/warning/%s/%s.txt"
1558                         % (state.lower(), countycode))
1559                     zones[zone]["severe_thunderstorm_warning"] = (
1560                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1561                         "thunderstorm/%s/%s.txt" % (state.lower(), countycode))
1562                     zones[zone]["severe_weather_statement"] = (
1563                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1564                         "severe_weather_stmt/%s/%s.txt"
1565                         % (state.lower(), countycode))
1566                     zones[zone]["short_term_forecast"] = (
1567                         "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
1568                         "%s/%s.txt" % (state.lower(), zone))
1569                     zones[zone]["special_weather_statement"] = (
1570                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1571                         "special_weather_stmt/%s/%s.txt"
1572                         % (state.lower(), zone))
1573                     zones[zone]["state_forecast"] = (
1574                         "https://tgftp.nws.noaa.gov/data/forecasts/state/"
1575                         "%s/%s.txt" % (state.lower(), zone))
1576                     zones[zone]["tornado"] = (
1577                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1578                         "tornado/%s/%s.txt" % (state.lower(), countycode))
1579                     zones[zone]["urgent_weather_message"] = (
1580                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1581                         "non_precip/%s/%s.txt" % (state.lower(), zone))
1582                     zones[zone]["zone_forecast"] = (
1583                         "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
1584                         "%s/%s.txt" % (state.lower(), zone))
1585                 tzcode = fields[7]
1586                 if tzcode == "A":
1587                     zones[zone]["tz"] = "US/Alaska"
1588                 elif tzcode == "AH":
1589                     zones[zone]["tz"] = "US/Aleutian"
1590                 elif tzcode in ("C", "CE", "CM"):
1591                     zones[zone]["tz"] = "US/Central"
1592                 elif tzcode in ("E", "e"):
1593                     zones[zone]["tz"] = "US/Eastern"
1594                 elif tzcode == "F":
1595                     zones[zone]["tz"] = "Pacific/Guadalcanal"
1596                 elif tzcode == "G":
1597                     zones[zone]["tz"] = "Pacific/Guam"
1598                 elif tzcode == "H":
1599                     zones[zone]["tz"] = "US/Hawaii"
1600                 elif tzcode == "J":
1601                     zones[zone]["tz"] = "Japan"
1602                 elif tzcode == "K":
1603                     zones[zone]["tz"] = "Pacific/Kwajalein"
1604                 elif tzcode in ("M", "MC", "MP"):
1605                     zones[zone]["tz"] = "US/Mountain"
1606                 elif tzcode == "m":
1607                     zones[zone]["tz"] = "US/Arizona"
1608                 elif tzcode == "P":
1609                     zones[zone]["tz"] = "US/Pacific"
1610                 elif tzcode == "S":
1611                     zones[zone]["tz"] = "US/Samoa"
1612                 elif tzcode == "V":
1613                     zones[zone]["tz"] = "America/Virgin"
1614                 else:
1615                     zones[zone]["tz"] = ""
1616                 county = fields[5]
1617                 if county:
1618                     if description.endswith(county):
1619                         description += " County"
1620                     else:
1621                         description += ", %s County" % county
1622                 description += ", %s, US" % state
1623                 zones[zone]["description"] = description
1624                 zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1625                 if fips in places and not zones[zone]["centroid"]:
1626                     zones[zone]["centroid"] = places[fips]["centroid"]
1627         count += 1
1628     cpfzcf.close()
1629     print("done (%s lines)." % count)
1630     message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1631     sys.stdout.write(message)
1632     sys.stdout.flush()
1633     count = 0
1634     gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "r")
1635     columns = gzcta.readline().decode("utf-8").strip().split("\t")
1636     for line in gzcta:
1637         fields = line.decode("utf-8").strip().split("\t")
1638         f_geoid = fields[ columns.index("GEOID") ].strip()
1639         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1640         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1641         if f_geoid and f_intptlat and f_intptlong:
1642             if f_geoid not in zctas: zctas[f_geoid] = {}
1643             zctas[f_geoid]["centroid"] = gecos(
1644                 "%s,%s" % (f_intptlat, f_intptlong)
1645             )
1646             count += 1
1647     gzcta.close()
1648     print("done (%s lines)." % count)
1649     message = "Reading %s..." % overrides_fn
1650     sys.stdout.write(message)
1651     sys.stdout.flush()
1652     count = 0
1653     added = 0
1654     removed = 0
1655     changed = 0
1656     overrides = configparser.ConfigParser()
1657     overrides.read_file( codecs.open(overrides_fn, "r", "utf8") )
1658     overrideslog = []
1659     for section in overrides.sections():
1660         addopt = 0
1661         chgopt = 0
1662         if section.startswith("-"):
1663             section = section[1:]
1664             delete = True
1665         else: delete = False
1666         if re.match("[A-Za-z]{3}$", section):
1667             if delete:
1668                 if section in airports:
1669                     del( airports[section] )
1670                     logact = "removed airport %s" % section
1671                     removed += 1
1672                 else:
1673                     logact = "tried to remove nonexistent airport %s" % section
1674             else:
1675                 if section in airports:
1676                     logact = "changed airport %s" % section
1677                     changed += 1
1678                 else:
1679                     airports[section] = {}
1680                     logact = "added airport %s" % section
1681                     added += 1
1682                 for key,value in overrides.items(section):
1683                     if key in airports[section]: chgopt += 1
1684                     else: addopt += 1
1685                     if key in ("centroid", "location"):
1686                         airports[section][key] = eval(value)
1687                     else:
1688                         airports[section][key] = value
1689                 if addopt and chgopt:
1690                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1691                 elif addopt: logact += " (+%s options)" % addopt
1692                 elif chgopt: logact += " (!%s options)" % chgopt
1693         elif re.match("[A-Za-z0-9]{4}$", section):
1694             if delete:
1695                 if section in stations:
1696                     del( stations[section] )
1697                     logact = "removed station %s" % section
1698                     removed += 1
1699                 else:
1700                     logact = "tried to remove nonexistent station %s" % section
1701             else:
1702                 if section in stations:
1703                     logact = "changed station %s" % section
1704                     changed += 1
1705                 else:
1706                     stations[section] = {}
1707                     logact = "added station %s" % section
1708                     added += 1
1709                 for key,value in overrides.items(section):
1710                     if key in stations[section]: chgopt += 1
1711                     else: addopt += 1
1712                     if key in ("centroid", "location"):
1713                         stations[section][key] = eval(value)
1714                     else:
1715                         stations[section][key] = value
1716                 if addopt and chgopt:
1717                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1718                 elif addopt: logact += " (+%s options)" % addopt
1719                 elif chgopt: logact += " (!%s options)" % chgopt
1720         elif re.match("[0-9]{5}$", section):
1721             if delete:
1722                 if section in zctas:
1723                     del( zctas[section] )
1724                     logact = "removed zcta %s" % section
1725                     removed += 1
1726                 else:
1727                     logact = "tried to remove nonexistent zcta %s" % section
1728             else:
1729                 if section in zctas:
1730                     logact = "changed zcta %s" % section
1731                     changed += 1
1732                 else:
1733                     zctas[section] = {}
1734                     logact = "added zcta %s" % section
1735                     added += 1
1736                 for key,value in overrides.items(section):
1737                     if key in zctas[section]: chgopt += 1
1738                     else: addopt += 1
1739                     if key in ("centroid", "location"):
1740                         zctas[section][key] = eval(value)
1741                     else:
1742                         zctas[section][key] = value
1743                 if addopt and chgopt:
1744                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1745                 elif addopt: logact += " (+%s options)" % addopt
1746                 elif chgopt: logact += " (!%s options)" % chgopt
1747         elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1748             if delete:
1749                 if section in zones:
1750                     del( zones[section] )
1751                     logact = "removed zone %s" % section
1752                     removed += 1
1753                 else:
1754                     logact = "tried to remove nonexistent zone %s" % section
1755             else:
1756                 if section in zones:
1757                     logact = "changed zone %s" % section
1758                     changed += 1
1759                 else:
1760                     zones[section] = {}
1761                     logact = "added zone %s" % section
1762                     added += 1
1763                 for key,value in overrides.items(section):
1764                     if key in zones[section]: chgopt += 1
1765                     else: addopt += 1
1766                     if key in ("centroid", "location"):
1767                         zones[section][key] = eval(value)
1768                     else:
1769                         zones[section][key] = value
1770                 if addopt and chgopt:
1771                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1772                 elif addopt: logact += " (+%s options)" % addopt
1773                 elif chgopt: logact += " (!%s options)" % chgopt
1774         elif re.match("fips[0-9]+$", section):
1775             if delete:
1776                 if section in places:
1777                     del( places[section] )
1778                     logact = "removed place %s" % section
1779                     removed += 1
1780                 else:
1781                     logact = "tried to remove nonexistent place %s" % section
1782             else:
1783                 if section in places:
1784                     logact = "changed place %s" % section
1785                     changed += 1
1786                 else:
1787                     places[section] = {}
1788                     logact = "added place %s" % section
1789                     added += 1
1790                 for key,value in overrides.items(section):
1791                     if key in places[section]: chgopt += 1
1792                     else: addopt += 1
1793                     if key in ("centroid", "location"):
1794                         places[section][key] = eval(value)
1795                     else:
1796                         places[section][key] = value
1797                 if addopt and chgopt:
1798                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1799                 elif addopt: logact += " (+%s options)" % addopt
1800                 elif chgopt: logact += " (!%s options)" % chgopt
1801         count += 1
1802         overrideslog.append("%s\n" % logact)
1803     overrideslog.sort()
1804     if os.path.exists(overrideslog_fn):
1805         os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1806     overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1807     import time
1808     overrideslog_fd.write(
1809         '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
1810         '# use, copy, modify, and distribute this software is granted under terms\n'
1811         '# provided in the LICENSE file distributed with this software.\n\n'
1812         % time.gmtime().tm_year)
1813     overrideslog_fd.writelines(overrideslog)
1814     overrideslog_fd.close()
1815     print("done (%s overridden sections: +%s/-%s/!%s)." % (
1816         count,
1817         added,
1818         removed,
1819         changed
1820     ) )
1821     estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1822     print(
1823         "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1824             estimate
1825     )
1826     count = 0
1827     milestones = list( range(51) )
1828     message = "   "
1829     sys.stdout.write(message)
1830     sys.stdout.flush()
1831     for fips in places:
1832         centroid = places[fips]["centroid"]
1833         if centroid:
1834             station = closest(centroid, stations, "location", 0.1)
1835         if station[0]:
1836             places[fips]["station"] = station
1837             count += 1
1838             if not count%100:
1839                 level = int(50*count/estimate)
1840                 if level in milestones:
1841                     for remaining in milestones[:milestones.index(level)+1]:
1842                         if remaining%5:
1843                             message = "."
1844                             sys.stdout.write(message)
1845                             sys.stdout.flush()
1846                         else:
1847                             message = "%s%%" % (remaining*2,)
1848                             sys.stdout.write(message)
1849                             sys.stdout.flush()
1850                         milestones.remove(remaining)
1851         if centroid:
1852             zone = closest(centroid, zones, "centroid", 0.1)
1853         if zone[0]:
1854             places[fips]["zone"] = zone
1855             count += 1
1856             if not count%100:
1857                 level = int(50*count/estimate)
1858                 if level in milestones:
1859                     for remaining in milestones[:milestones.index(level)+1]:
1860                         if remaining%5:
1861                             message = "."
1862                             sys.stdout.write(message)
1863                             sys.stdout.flush()
1864                         else:
1865                             message = "%s%%" % (remaining*2,)
1866                             sys.stdout.write(message)
1867                             sys.stdout.flush()
1868                         milestones.remove(remaining)
1869     for station in stations:
1870         if "location" in stations[station]:
1871             location = stations[station]["location"]
1872             if location:
1873                 zone = closest(location, zones, "centroid", 0.1)
1874             if zone[0]:
1875                 stations[station]["zone"] = zone
1876                 count += 1
1877                 if not count%100:
1878                     level = int(50*count/estimate)
1879                     if level in milestones:
1880                         for remaining in milestones[:milestones.index(level)+1]:
1881                             if remaining%5:
1882                                 message = "."
1883                                 sys.stdout.write(message)
1884                                 sys.stdout.flush()
1885                             else:
1886                                 message = "%s%%" % (remaining*2,)
1887                                 sys.stdout.write(message)
1888                                 sys.stdout.flush()
1889                             milestones.remove(remaining)
1890     for zcta in zctas.keys():
1891         centroid = zctas[zcta]["centroid"]
1892         if centroid:
1893             station = closest(centroid, stations, "location", 0.1)
1894         if station[0]:
1895             zctas[zcta]["station"] = station
1896             count += 1
1897             if not count%100:
1898                 level = int(50*count/estimate)
1899                 if level in milestones:
1900                     for remaining in milestones[ : milestones.index(level)+1 ]:
1901                         if remaining%5:
1902                             message = "."
1903                             sys.stdout.write(message)
1904                             sys.stdout.flush()
1905                         else:
1906                             message = "%s%%" % (remaining*2,)
1907                             sys.stdout.write(message)
1908                             sys.stdout.flush()
1909                         milestones.remove(remaining)
1910         if centroid:
1911             zone = closest(centroid, zones, "centroid", 0.1)
1912         if zone[0]:
1913             zctas[zcta]["zone"] = zone
1914             count += 1
1915             if not count%100:
1916                 level = int(50*count/estimate)
1917                 if level in milestones:
1918                     for remaining in milestones[:milestones.index(level)+1]:
1919                         if remaining%5:
1920                             message = "."
1921                             sys.stdout.write(message)
1922                             sys.stdout.flush()
1923                         else:
1924                             message = "%s%%" % (remaining*2,)
1925                             sys.stdout.write(message)
1926                             sys.stdout.flush()
1927                         milestones.remove(remaining)
1928     for zone in zones.keys():
1929         if "centroid" in zones[zone]:
1930             centroid = zones[zone]["centroid"]
1931             if centroid:
1932                 station = closest(centroid, stations, "location", 0.1)
1933             if station[0]:
1934                 zones[zone]["station"] = station
1935                 count += 1
1936                 if not count%100:
1937                     level = int(50*count/estimate)
1938                     if level in milestones:
1939                         for remaining in milestones[:milestones.index(level)+1]:
1940                             if remaining%5:
1941                                 message = "."
1942                                 sys.stdout.write(message)
1943                                 sys.stdout.flush()
1944                             else:
1945                                 message = "%s%%" % (remaining*2,)
1946                                 sys.stdout.write(message)
1947                                 sys.stdout.flush()
1948                             milestones.remove(remaining)
1949     for remaining in milestones:
1950         if remaining%5:
1951             message = "."
1952             sys.stdout.write(message)
1953             sys.stdout.flush()
1954         else:
1955             message = "%s%%" % (remaining*2,)
1956             sys.stdout.write(message)
1957             sys.stdout.flush()
1958     print("\n   done (%s correlations)." % count)
1959     message = "Writing %s..." % airports_fn
1960     sys.stdout.write(message)
1961     sys.stdout.flush()
1962     count = 0
1963     if os.path.exists(airports_fn):
1964         os.rename(airports_fn, "%s_old"%airports_fn)
1965     airports_fd = codecs.open(airports_fn, "w", "utf8")
1966     airports_fd.write(header)
1967     for airport in sorted( airports.keys() ):
1968         airports_fd.write("\n\n[%s]" % airport)
1969         for key, value in sorted( airports[airport].items() ):
1970             if type(value) is float: value = "%.7f"%value
1971             elif type(value) is tuple:
1972                 elements = []
1973                 for element in value:
1974                     if type(element) is float: elements.append("%.7f"%element)
1975                     else: elements.append( repr(element) )
1976                 value = "(%s)"%", ".join(elements)
1977             airports_fd.write( "\n%s = %s" % (key, value) )
1978         count += 1
1979     airports_fd.write("\n")
1980     airports_fd.close()
1981     print("done (%s sections)." % count)
1982     message = "Writing %s..." % places_fn
1983     sys.stdout.write(message)
1984     sys.stdout.flush()
1985     count = 0
1986     if os.path.exists(places_fn):
1987         os.rename(places_fn, "%s_old"%places_fn)
1988     places_fd = codecs.open(places_fn, "w", "utf8")
1989     places_fd.write(header)
1990     for fips in sorted( places.keys() ):
1991         places_fd.write("\n\n[%s]" % fips)
1992         for key, value in sorted( places[fips].items() ):
1993             if type(value) is float: value = "%.7f"%value
1994             elif type(value) is tuple:
1995                 elements = []
1996                 for element in value:
1997                     if type(element) is float: elements.append("%.7f"%element)
1998                     else: elements.append( repr(element) )
1999                 value = "(%s)"%", ".join(elements)
2000             places_fd.write( "\n%s = %s" % (key, value) )
2001         count += 1
2002     places_fd.write("\n")
2003     places_fd.close()
2004     print("done (%s sections)." % count)
2005     message = "Writing %s..." % stations_fn
2006     sys.stdout.write(message)
2007     sys.stdout.flush()
2008     count = 0
2009     if os.path.exists(stations_fn):
2010         os.rename(stations_fn, "%s_old"%stations_fn)
2011     stations_fd = codecs.open(stations_fn, "w", "utf-8")
2012     stations_fd.write(header)
2013     for station in sorted( stations.keys() ):
2014         stations_fd.write("\n\n[%s]" % station)
2015         for key, value in sorted( stations[station].items() ):
2016             if type(value) is float: value = "%.7f"%value
2017             elif type(value) is tuple:
2018                 elements = []
2019                 for element in value:
2020                     if type(element) is float: elements.append("%.7f"%element)
2021                     else: elements.append( repr(element) )
2022                 value = "(%s)"%", ".join(elements)
2023             if type(value) is bytes:
2024                 value = value.decode("utf-8")
2025             stations_fd.write( "\n%s = %s" % (key, value) )
2026         count += 1
2027     stations_fd.write("\n")
2028     stations_fd.close()
2029     print("done (%s sections)." % count)
2030     message = "Writing %s..." % zctas_fn
2031     sys.stdout.write(message)
2032     sys.stdout.flush()
2033     count = 0
2034     if os.path.exists(zctas_fn):
2035         os.rename(zctas_fn, "%s_old"%zctas_fn)
2036     zctas_fd = codecs.open(zctas_fn, "w", "utf8")
2037     zctas_fd.write(header)
2038     for zcta in sorted( zctas.keys() ):
2039         zctas_fd.write("\n\n[%s]" % zcta)
2040         for key, value in sorted( zctas[zcta].items() ):
2041             if type(value) is float: value = "%.7f"%value
2042             elif type(value) is tuple:
2043                 elements = []
2044                 for element in value:
2045                     if type(element) is float: elements.append("%.7f"%element)
2046                     else: elements.append( repr(element) )
2047                 value = "(%s)"%", ".join(elements)
2048             zctas_fd.write( "\n%s = %s" % (key, value) )
2049         count += 1
2050     zctas_fd.write("\n")
2051     zctas_fd.close()
2052     print("done (%s sections)." % count)
2053     message = "Writing %s..." % zones_fn
2054     sys.stdout.write(message)
2055     sys.stdout.flush()
2056     count = 0
2057     if os.path.exists(zones_fn):
2058         os.rename(zones_fn, "%s_old"%zones_fn)
2059     zones_fd = codecs.open(zones_fn, "w", "utf8")
2060     zones_fd.write(header)
2061     for zone in sorted( zones.keys() ):
2062         zones_fd.write("\n\n[%s]" % zone)
2063         for key, value in sorted( zones[zone].items() ):
2064             if type(value) is float: value = "%.7f"%value
2065             elif type(value) is tuple:
2066                 elements = []
2067                 for element in value:
2068                     if type(element) is float: elements.append("%.7f"%element)
2069                     else: elements.append( repr(element) )
2070                 value = "(%s)"%", ".join(elements)
2071             zones_fd.write( "\n%s = %s" % (key, value) )
2072         count += 1
2073     zones_fd.write("\n")
2074     zones_fd.close()
2075     print("done (%s sections)." % count)
2076     message = "Starting QA check..."
2077     sys.stdout.write(message)
2078     sys.stdout.flush()
2079     airports = configparser.ConfigParser()
2080     if pyversion("3"):
2081         airports.read(airports_fn, encoding="utf-8")
2082     else:
2083         airports.read(airports_fn)
2084     places = configparser.ConfigParser()
2085     if pyversion("3"):
2086         places.read(places_fn, encoding="utf-8")
2087     else:
2088         places.read(places_fn)
2089     stations = configparser.ConfigParser()
2090     if pyversion("3"):
2091         stations.read(stations_fn, encoding="utf-8")
2092     else:
2093         stations.read(stations_fn)
2094     zctas = configparser.ConfigParser()
2095     if pyversion("3"):
2096         zctas.read(zctas_fn, encoding="utf-8")
2097     else:
2098         zctas.read(zctas_fn)
2099     zones = configparser.ConfigParser()
2100     if pyversion("3"):
2101         zones.read(zones_fn, encoding="utf-8")
2102     else:
2103         zones.read(zones_fn)
2104     qalog = []
2105     places_nocentroid = 0
2106     places_nodescription = 0
2107     for place in sorted( places.sections() ):
2108         if not places.has_option(place, "centroid"):
2109             qalog.append("%s: no centroid\n" % place)
2110             places_nocentroid += 1
2111         if not places.has_option(place, "description"):
2112             qalog.append("%s: no description\n" % place)
2113             places_nodescription += 1
2114     stations_nodescription = 0
2115     stations_nolocation = 0
2116     stations_nometar = 0
2117     for station in sorted( stations.sections() ):
2118         if not stations.has_option(station, "description"):
2119             qalog.append("%s: no description\n" % station)
2120             stations_nodescription += 1
2121         if not stations.has_option(station, "location"):
2122             qalog.append("%s: no location\n" % station)
2123             stations_nolocation += 1
2124         if not stations.has_option(station, "metar"):
2125             qalog.append("%s: no metar\n" % station)
2126             stations_nometar += 1
2127     airports_badstation = 0
2128     airports_nostation = 0
2129     for airport in sorted( airports.sections() ):
2130         if not airports.has_option(airport, "station"):
2131             qalog.append("%s: no station\n" % airport)
2132             airports_nostation += 1
2133         else:
2134             station = airports.get(airport, "station")
2135             if station not in stations.sections():
2136                 qalog.append( "%s: bad station %s\n" % (airport, station) )
2137                 airports_badstation += 1
2138     zctas_nocentroid = 0
2139     for zcta in sorted( zctas.sections() ):
2140         if not zctas.has_option(zcta, "centroid"):
2141             qalog.append("%s: no centroid\n" % zcta)
2142             zctas_nocentroid += 1
2143     zones_nocentroid = 0
2144     zones_nodescription = 0
2145     zones_notz = 0
2146     zones_noforecast = 0
2147     zones_overlapping = 0
2148     zonetable = {}
2149     for zone in zones.sections():
2150         if zones.has_option(zone, "centroid"):
2151             zonetable[zone] = {
2152                 "centroid": eval( zones.get(zone, "centroid") )
2153             }
2154     for zone in sorted( zones.sections() ):
2155         if zones.has_option(zone, "centroid"):
2156             zonetable_local = zonetable.copy()
2157             del( zonetable_local[zone] )
2158             centroid = eval( zones.get(zone, "centroid") )
2159             if centroid:
2160                 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2161             if nearest[1]*radian_to_km < 1:
2162                 qalog.append( "%s: within one km of %s\n" % (
2163                     zone,
2164                     nearest[0]
2165                 ) )
2166                 zones_overlapping += 1
2167         else:
2168             qalog.append("%s: no centroid\n" % zone)
2169             zones_nocentroid += 1
2170         if not zones.has_option(zone, "description"):
2171             qalog.append("%s: no description\n" % zone)
2172             zones_nodescription += 1
2173         if not zones.has_option(zone, "tz") or not zones.get(
2174                 zone, "tz") in zoneinfo.available_timezones():
2175             qalog.append("%s: no time zone\n" % zone)
2176             zones_notz += 1
2177         if not zones.has_option(zone, "zone_forecast"):
2178             qalog.append("%s: no forecast\n" % zone)
2179             zones_noforecast += 1
2180     if os.path.exists(qalog_fn):
2181         os.rename(qalog_fn, "%s_old"%qalog_fn)
2182     qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2183     import time
2184     qalog_fd.write(
2185         '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
2186         '# use, copy, modify, and distribute this software is granted under terms\n'
2187         '# provided in the LICENSE file distributed with this software.\n\n'
2188         % time.gmtime().tm_year)
2189     qalog_fd.writelines(qalog)
2190     qalog_fd.close()
2191     if qalog:
2192         print("issues found (see %s for details):"%qalog_fn)
2193         if airports_badstation:
2194             print("   %s airports with invalid station"%airports_badstation)
2195         if airports_nostation:
2196             print("   %s airports with no station"%airports_nostation)
2197         if places_nocentroid:
2198             print("   %s places with no centroid"%places_nocentroid)
2199         if places_nodescription:
2200             print("   %s places with no description"%places_nodescription)
2201         if stations_nodescription:
2202             print("   %s stations with no description"%stations_nodescription)
2203         if stations_nolocation:
2204             print("   %s stations with no location"%stations_nolocation)
2205         if stations_nometar:
2206             print("   %s stations with no METAR"%stations_nometar)
2207         if zctas_nocentroid:
2208             print("   %s ZCTAs with no centroid"%zctas_nocentroid)
2209         if zones_nocentroid:
2210             print("   %s zones with no centroid"%zones_nocentroid)
2211         if zones_nodescription:
2212             print("   %s zones with no description"%zones_nodescription)
2213         if zones_notz:
2214             print("   %s zones with no time zone"%zones_notz)
2215         if zones_noforecast:
2216             print("   %s zones with no forecast"%zones_noforecast)
2217         if zones_overlapping:
2218             print("   %s zones within one km of another"%zones_overlapping)
2219     else: print("no issues found.")
2220     print("Indexing complete!")