1 """Contains various object definitions needed by the weather utility."""
3 weather_copyright = """\
4 # Copyright (c) 2006-2024 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
9 weather_version = "2.5.0"
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
15 """An object to contain selection data."""
17 """Store the config, options and arguments."""
18 self.config = get_config()
19 self.options, self.arguments = get_options(self.config)
20 if self.get_bool("cache") and self.get_bool("cache_search") \
21 and not self.get_bool("longlist"):
22 integrate_search_cache(
27 if not self.arguments:
28 if "id" in self.options.__dict__ \
29 and self.options.__dict__["id"]:
30 self.arguments.append( self.options.__dict__["id"] )
31 del( self.options.__dict__["id"] )
33 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
34 sys.stderr.write(message)
35 elif "city" in self.options.__dict__ \
36 and self.options.__dict__["city"] \
37 and "st" in self.options.__dict__ \
38 and self.options.__dict__["st"]:
39 self.arguments.append(
41 self.options.__dict__["city"],
42 self.options.__dict__["st"]
45 del( self.options.__dict__["city"] )
46 del( self.options.__dict__["st"] )
48 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
49 sys.stderr.write(message)
50 def get(self, option, argument=None):
51 """Retrieve data from the config or options."""
53 if self.config.has_section(argument) and (
54 self.config.has_option(argument, "city") \
55 or self.config.has_option(argument, "id") \
56 or self.config.has_option(argument, "st")
58 self.config.remove_section(argument)
60 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
61 sys.stderr.write(message)
62 if not self.config.has_section(argument):
65 path=self.get("setpath"),
66 info=self.get("info"),
68 self.get("cache") and self.get("cache_search")
70 cachedir=self.get("cachedir"),
71 quiet=self.get_bool("quiet")
73 self.config.add_section(argument)
74 for item in guessed.items():
75 self.config.set(argument, *item)
76 if self.config.has_option(argument, option):
77 return self.config.get(argument, option)
78 if option in self.options.__dict__:
79 return self.options.__dict__[option]
81 message = "WARNING: no URI defined for %s\n" % option
82 sys.stderr.write(message)
84 def get_bool(self, option, argument=None):
85 """Get data and coerce to a boolean if necessary."""
86 # Mimic configparser's getboolean() method by treating
87 # false/no/off/0 as False and true/yes/on/1 as True values,
89 value = self.get(option, argument)
90 if isinstance(value, bool):
92 if isinstance(value, str):
93 vlower = value.lower()
94 if vlower in ('false', 'no', 'off', '0'):
96 elif vlower in ('true', 'yes', 'on', '1'):
98 raise ValueError("Not a boolean: %s" % value)
99 def getint(self, option, argument=None):
100 """Get data and coerce to an integer if necessary."""
101 value = self.get(option, argument)
102 if value: return int(value)
106 """Average a list of coordinates."""
113 return (x/count, y/count)
115 def filter_units(line, units="imperial"):
116 """Filter or convert units in a line of text between US/UK and metric."""
118 # filter lines with both pressures in the form of "X inches (Y hPa)" or
121 r"(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
125 preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
126 if units == "imperial": line = preamble + in_hg + trailer
127 elif units == "metric": line = preamble + hpa + trailer
128 # filter lines with both temperatures in the form of "X F (Y C)"
130 r"(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
134 preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
135 if units == "imperial": line = preamble + fahrenheit + trailer
136 elif units == "metric": line = preamble + celsius + trailer
137 # if metric is desired, convert distances in the form of "X mile(s)" to
139 if units == "metric":
140 imperial_d = re.match(
141 r"(.* )(\d+)( mile\(s\))(.*)",
145 preamble, mi, m_u, trailer = imperial_d.groups()
146 line = preamble + str(int(round(int(mi)*1.609344))) \
147 + " kilometer(s)" + trailer
148 # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
149 # desired, convert to "Z KPH"
150 imperial_s = re.match(
151 r"(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
155 preamble, mph, m_u, kt, trailer = imperial_s.groups()
156 if units == "imperial": line = preamble + mph + m_u + trailer
157 elif units == "metric":
158 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
160 imperial_s = re.match(
161 r"(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
165 preamble, mph, m_u, kt, trailer = imperial_s.groups()
166 if units == "imperial": line = preamble + mph + m_u + trailer
167 elif units == "metric":
168 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
170 # if imperial is desired, qualify given forcast temperatures like "X F"; if
171 # metric is desired, convert to "Y C"
172 imperial_t = re.match(
173 r"(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
177 preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
178 if units == "imperial":
179 line = preamble + parameter + fahrenheit + " F" + sep + trailer
180 elif units == "metric":
181 line = preamble + parameter \
182 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
184 # hand off the resulting line
194 """Return a string containing the results of a URI GET."""
195 import os, time, urllib, urllib.error, urllib.request
197 dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
198 if not os.path.exists(dcachedir):
199 try: os.makedirs(dcachedir)
200 except (IOError, OSError): pass
201 dcache_fn = os.path.join(
203 uri.split(":",1)[1].replace("/","_")
206 if cache_data and os.access(dcache_fn, os.R_OK) \
207 and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
208 dcache_fd = open(dcache_fn)
209 data = dcache_fd.read()
213 data = urllib.request.urlopen(uri).read().decode("utf-8")
214 except urllib.error.URLError:
215 if ignore_fail: return ""
217 sys.stderr.write("%s error: failed to retrieve\n %s\n\n" % (
218 os.path.basename( sys.argv[0] ), uri))
220 # Some data sources are HTML with the plain text wrapped in pre tags
222 data = data[data.find("<pre>")+5:data.find("</pre>")]
226 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
227 dcache_fd.write(data)
229 except (IOError, OSError): pass
243 """Return a summarized METAR for the specified station."""
246 message = "%s error: METAR URI required for conditions\n" % \
247 os.path.basename( sys.argv[0] )
248 sys.stderr.write(message)
252 cache_data=cache_data,
256 if type(metar) is bytes: metar = metar.decode("utf-8")
257 if verbose: return metar
260 lines = metar.split("\n")
263 "relative_humidity," \
264 + "precipitation_last_hour," \
265 + "sky conditions," \
271 headerlist = headers.lower().replace("_"," ").split(",")
274 title = "Current conditions at %s"
275 place = lines[0].split(", ")
277 place = "%s, %s" % ( place[0].title(), place[1] )
278 else: place = "<UNKNOWN>"
279 output.append(title%place)
280 output.append("Last updated " + lines[1])
282 for header in headerlist:
284 if line.lower().startswith(header + ":"):
285 if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
286 if imperial: line = filter_units(line, units="imperial")
287 elif metric: line = filter_units(line, units="metric")
288 if quiet: output.append(line)
289 else: output.append(" " + line)
293 "(no conditions matched your header list, try with --verbose)"
295 return "\n".join(output)
306 """Return alert notice for the specified URI."""
312 cache_data=cache_data,
316 if type(alert) is bytes: alert = alert.decode("utf-8")
318 if verbose: return alert
321 if re.search(r"\nNational Weather Service", alert):
325 expirycheck = re.search(r"Expires:([0-9]{12})", alert)
327 # only report alerts and forecasts that expired less than delay
329 import datetime, zoneinfo
330 expiration = datetime.datetime.fromisoformat(
335 expirycheck[1][8:10],
337 )).replace(tzinfo=zoneinfo.ZoneInfo("UTC"))
338 now = datetime.datetime.now(tz=zoneinfo.ZoneInfo("UTC"))
339 if now - expiration > datetime.timedelta(hours=delay):
341 lines = alert.split("\n")
344 if muted and line.startswith("National Weather Service"):
351 if line and not muted:
352 if quiet: output.append(line)
353 else: output.append(" " + line)
354 return "\n".join(output)
356 def get_options(config):
357 """Parse the options passed on the command line."""
359 # for optparse's builtin -h/--help option
361 "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
363 # for optparse's builtin --version option
364 verstring = "%prog " + weather_version
368 option_parser = optparse.OptionParser(usage=usage, version=verstring)
369 # separate options object from list of arguments and return both
371 # the -a/--alert option
372 if config.has_option("default", "alert"):
373 default_alert = config.getboolean("default", "alert")
374 else: default_alert = False
375 option_parser.add_option("-a", "--alert",
378 default=default_alert,
379 help="include local alert notices")
381 # the --atypes option
382 if config.has_option("default", "atypes"):
383 default_atypes = config.get("default", "atypes")
386 "coastal_flood_statement," \
387 + "flash_flood_statement," \
388 + "flash_flood_warning," \
389 + "flash_flood_watch," \
391 + "severe_thunderstorm_warning," \
392 + "severe_weather_statement," \
393 + "special_weather_statement," \
395 + "urgent_weather_message"
396 option_parser.add_option("--atypes",
398 default=default_atypes,
399 help="list of alert notification types to display")
401 # the --build-sets option
402 option_parser.add_option("--build-sets",
406 help="(re)build location correlation sets")
408 # the --cacheage option
409 if config.has_option("default", "cacheage"):
410 default_cacheage = config.getint("default", "cacheage")
411 else: default_cacheage = 900
412 option_parser.add_option("--cacheage",
414 default=default_cacheage,
415 help="duration in seconds to refresh cached data")
417 # the --cachedir option
418 if config.has_option("default", "cachedir"):
419 default_cachedir = config.get("default", "cachedir")
420 else: default_cachedir = "~/.weather"
421 option_parser.add_option("--cachedir",
423 default=default_cachedir,
424 help="directory for storing cached searches and data")
427 if config.has_option("default", "delay"):
428 default_delay = config.getint("default", "delay")
429 else: default_delay = 1
430 option_parser.add_option("--delay",
432 default=default_delay,
433 help="hours to delay alert and forecast expiration")
435 # the -f/--forecast option
436 if config.has_option("default", "forecast"):
437 default_forecast = config.getboolean("default", "forecast")
438 else: default_forecast = False
439 option_parser.add_option("-f", "--forecast",
442 default=default_forecast,
443 help="include a local forecast")
445 # the --headers option
446 if config.has_option("default", "headers"):
447 default_headers = config.get("default", "headers")
451 + "relative_humidity," \
456 + "sky_conditions," \
457 + "precipitation_last_hour"
458 option_parser.add_option("--headers",
460 default=default_headers,
461 help="list of conditions headers to display")
463 # the --imperial option
464 if config.has_option("default", "imperial"):
465 default_imperial = config.getboolean("default", "imperial")
466 else: default_imperial = False
467 option_parser.add_option("--imperial",
470 default=default_imperial,
471 help="filter/convert conditions for US/UK units")
474 option_parser.add_option("--info",
478 help="output detailed information for your search")
480 # the -l/--list option
481 option_parser.add_option("-l", "--list",
485 help="list all configured aliases and cached searches")
487 # the --longlist option
488 option_parser.add_option("--longlist",
492 help="display details of all configured aliases")
494 # the -m/--metric option
495 if config.has_option("default", "metric"):
496 default_metric = config.getboolean("default", "metric")
497 else: default_metric = False
498 option_parser.add_option("-m", "--metric",
501 default=default_metric,
502 help="filter/convert conditions for metric units")
504 # the -n/--no-conditions option
505 if config.has_option("default", "conditions"):
506 default_conditions = config.getboolean("default", "conditions")
507 else: default_conditions = True
508 option_parser.add_option("-n", "--no-conditions",
510 action="store_false",
511 default=default_conditions,
512 help="disable output of current conditions")
514 # the --no-cache option
515 if config.has_option("default", "cache"):
516 default_cache = config.getboolean("default", "cache")
517 else: default_cache = True
518 option_parser.add_option("--no-cache",
520 action="store_false",
522 help="disable all caching (searches and data)")
524 # the --no-cache-data option
525 if config.has_option("default", "cache_data"):
526 default_cache_data = config.getboolean("default", "cache_data")
527 else: default_cache_data = True
528 option_parser.add_option("--no-cache-data",
530 action="store_false",
532 help="disable retrieved data caching")
534 # the --no-cache-search option
535 if config.has_option("default", "cache_search"):
536 default_cache_search = config.getboolean("default", "cache_search")
537 else: default_cache_search = True
538 option_parser.add_option("--no-cache-search",
540 action="store_false",
542 help="disable search result caching")
544 # the -q/--quiet option
545 if config.has_option("default", "quiet"):
546 default_quiet = config.getboolean("default", "quiet")
547 else: default_quiet = False
548 option_parser.add_option("-q", "--quiet",
551 default=default_quiet,
552 help="skip preambles and don't indent")
554 # the --setpath option
555 if config.has_option("default", "setpath"):
556 default_setpath = config.get("default", "setpath")
557 else: default_setpath = ".:~/.weather"
558 option_parser.add_option("--setpath",
560 default=default_setpath,
561 help="directory search path for correlation sets")
563 # the -v/--verbose option
564 if config.has_option("default", "verbose"):
565 default_verbose = config.getboolean("default", "verbose")
566 else: default_verbose = False
567 option_parser.add_option("-v", "--verbose",
570 default=default_verbose,
571 help="show full decoded feeds")
574 if config.has_option("default", "city"):
575 default_city = config.get("default", "city")
576 else: default_city = ""
577 option_parser.add_option("-c", "--city",
579 default=default_city,
580 help=optparse.SUPPRESS_HELP)
581 if config.has_option("default", "id"):
582 default_id = config.get("default", "id")
583 else: default_id = ""
584 option_parser.add_option("-i", "--id",
587 help=optparse.SUPPRESS_HELP)
588 if config.has_option("default", "st"):
589 default_st = config.get("default", "st")
590 else: default_st = ""
591 option_parser.add_option("-s", "--st",
594 help=optparse.SUPPRESS_HELP)
596 options, arguments = option_parser.parse_args()
597 return options, arguments
600 """Parse the aliases and configuration."""
601 import configparser, os
602 config = configparser.ConfigParser()
605 "/etc/weather/weatherrc",
606 os.path.expanduser("~/.weather/weatherrc"),
607 os.path.expanduser("~/.weatherrc"),
610 for rcfile in rcfiles:
611 if os.access(rcfile, os.R_OK):
612 config.read(rcfile, encoding="utf-8")
613 for section in config.sections():
614 if section != section.lower():
615 if config.has_section(section.lower()):
616 config.remove_section(section.lower())
617 config.add_section(section.lower())
618 for option,value in config.items(section):
619 config.set(section.lower(), option, value)
622 def integrate_search_cache(config, cachedir, setpath):
623 """Add cached search results into the configuration."""
624 import configparser, os, time
625 scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
626 if not os.access(scache_fn, os.R_OK): return config
627 scache_fd = open(scache_fn)
628 created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
631 datafiles = data_index(setpath)
633 data_freshness = sorted(
634 [ x[1] for x in datafiles.values() ],
637 else: data_freshness = now
638 if created < data_freshness <= now:
641 print( "[clearing outdated %s]" % scache_fn )
642 except (IOError, OSError):
645 scache = configparser.ConfigParser()
646 scache.read(scache_fn, encoding="utf-8")
647 for section in scache.sections():
648 if not config.has_section(section):
649 config.add_section(section)
650 for option,value in scache.items(section):
651 config.set(section, option, value)
654 def list_aliases(config, detail=False):
655 """Return a formatted list of aliases defined in the config."""
657 output = "\n# configured alias details..."
658 for section in sorted(config.sections()):
659 output += "\n\n[%s]" % section
660 for item in sorted(config.items(section)):
661 output += "\n%s = %s" % item
664 output = "configured aliases and cached searches..."
665 for section in sorted(config.sections()):
666 if config.has_option(section, "description"):
667 description = config.get(section, "description")
668 else: description = "(no description provided)"
669 output += "\n %s: %s" % (section, description)
672 def data_index(path):
675 for filename in ("airports", "places", "stations", "zctas", "zones"):
676 for dirname in path.split(":"):
677 for extension in ("", ".gz", ".txt"):
678 candidate = os.path.expanduser(
679 os.path.join( dirname, "".join( (filename, extension) ) )
681 if os.path.exists(candidate):
682 datafiles[filename] = (
684 os.stat(candidate).st_mtime
687 if filename in datafiles:
701 """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
702 import codecs, configparser, datetime, time, os, re, sys
703 datafiles = data_index(path)
704 if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
705 elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
706 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
707 elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
709 r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
712 searchtype = "coordinates"
713 elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
717 if cache_search: action = "caching"
718 else: action = "using"
727 (0.995, "excellent"),
730 if not quiet: print("Searching via %s..."%searchtype)
731 stations = configparser.ConfigParser()
732 dataname = "stations"
733 if dataname in datafiles:
734 datafile = datafiles[dataname][0]
735 if datafile.endswith(".gz"):
737 stations.read_string( gzip.open(datafile).read().decode("utf-8") )
739 stations.read(datafile, encoding="utf-8")
741 message = "%s error: can't find \"%s\" data file\n" % (
742 os.path.basename( sys.argv[0] ),
745 sys.stderr.write(message)
747 zones = configparser.ConfigParser()
749 if dataname in datafiles:
750 datafile = datafiles[dataname][0]
751 if datafile.endswith(".gz"):
753 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
755 zones.read(datafile, encoding="utf-8")
757 message = "%s error: can't find \"%s\" data file\n" % (
758 os.path.basename( sys.argv[0] ),
761 sys.stderr.write(message)
769 if searchtype == "airport":
770 expression = expression.lower()
771 airports = configparser.ConfigParser()
772 dataname = "airports"
773 if dataname in datafiles:
774 datafile = datafiles[dataname][0]
775 if datafile.endswith(".gz"):
777 airports.read_string(
778 gzip.open(datafile).read().decode("utf-8") )
780 airports.read(datafile, encoding="utf-8")
782 message = "%s error: can't find \"%s\" data file\n" % (
783 os.path.basename( sys.argv[0] ),
786 sys.stderr.write(message)
788 if airports.has_section(expression) \
789 and airports.has_option(expression, "station"):
790 search = (expression, "IATA/FAA airport code %s" % expression)
791 station = ( airports.get(expression, "station"), 0 )
792 if stations.has_option(station[0], "zone"):
793 zone = eval( stations.get(station[0], "zone") )
795 if not ( info or quiet ) \
796 and stations.has_option( station[0], "description" ):
800 stations.get(station[0], "description")
804 message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
806 datafiles["airports"][0]
808 sys.stderr.write(message)
810 elif searchtype == "station":
811 expression = expression.lower()
812 if stations.has_section(expression):
813 station = (expression, 0)
815 search = (expression, "ICAO station code %s" % expression)
816 if stations.has_option(expression, "zone"):
817 zone = eval( stations.get(expression, "zone") )
819 if not ( info or quiet ) \
820 and stations.has_option(expression, "description"):
824 stations.get(expression, "description")
828 message = "No ICAO weather station \"%s\" in the %s file.\n" % (
830 datafiles["stations"][0]
832 sys.stderr.write(message)
834 elif searchtype == "zone":
835 expression = expression.lower()
836 if zones.has_section(expression) \
837 and zones.has_option(expression, "station"):
838 zone = (expression, 0)
839 station = eval( zones.get(expression, "station") )
841 search = (expression, "NWS/NOAA weather zone %s" % expression)
842 if not ( info or quiet ) \
843 and zones.has_option(expression, "description"):
847 zones.get(expression, "description")
851 message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
853 datafiles["zones"][0]
855 sys.stderr.write(message)
857 elif searchtype == "ZCTA":
858 zctas = configparser.ConfigParser()
860 if dataname in datafiles:
861 datafile = datafiles[dataname][0]
862 if datafile.endswith(".gz"):
864 zctas.read_string( gzip.open(datafile).read().decode("utf-8") )
866 zctas.read(datafile, encoding="utf-8")
868 message = "%s error: can't find \"%s\" data file\n" % (
869 os.path.basename( sys.argv[0] ),
872 sys.stderr.write(message)
875 if zctas.has_section(expression) \
876 and zctas.has_option(expression, "station"):
877 station = eval( zctas.get(expression, "station") )
878 search = (expression, "Census ZCTA (ZIP code) %s" % expression)
879 if zctas.has_option(expression, "zone"):
880 zone = eval( zctas.get(expression, "zone") )
882 message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
884 datafiles["zctas"][0]
886 sys.stderr.write(message)
888 elif searchtype == "coordinates":
889 search = (expression, "Geographic coordinates %s" % expression)
891 for station in stations.sections():
892 if stations.has_option(station, "location"):
893 stationtable[station] = {
894 "location": eval( stations.get(station, "location") )
896 station = closest( gecos(expression), stationtable, "location", 0.1 )
898 message = "No ICAO weather station found near %s.\n" % expression
899 sys.stderr.write(message)
902 for zone in zones.sections():
903 if zones.has_option(zone, "centroid"):
905 "centroid": eval( zones.get(zone, "centroid") )
907 zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
909 message = "No NWS weather zone near %s; forecasts unavailable.\n" \
911 sys.stderr.write(message)
912 elif searchtype in ("FIPS", "name"):
913 places = configparser.ConfigParser()
915 if dataname in datafiles:
916 datafile = datafiles[dataname][0]
917 if datafile.endswith(".gz"):
919 places.read_string( gzip.open(datafile).read().decode("utf-8") )
921 places.read(datafile, encoding="utf-8")
923 message = "%s error: can't find \"%s\" data file\n" % (
924 os.path.basename( sys.argv[0] ),
927 sys.stderr.write(message)
930 place = expression.lower()
931 if places.has_section(place) and places.has_option(place, "station"):
932 station = eval( places.get(place, "station") )
933 search = (expression, "Census Place %s" % expression)
934 if places.has_option(place, "description"):
937 search[1] + ", %s" % places.get(place, "description")
939 if places.has_option(place, "zone"):
940 zone = eval( places.get(place, "zone") )
941 if not ( info or quiet ) \
942 and places.has_option(place, "description"):
946 places.get(place, "description")
950 for place in places.sections():
951 if places.has_option(place, "description") \
952 and places.has_option(place, "station") \
955 places.get(place, "description"),
958 possibilities.append(place)
959 for place in stations.sections():
960 if stations.has_option(place, "description") \
963 stations.get(place, "description"),
966 possibilities.append(place)
967 for place in zones.sections():
968 if zones.has_option(place, "description") \
969 and zones.has_option(place, "station") \
972 zones.get(place, "description"),
975 possibilities.append(place)
976 if len(possibilities) == 1:
977 place = possibilities[0]
978 if places.has_section(place):
979 station = eval( places.get(place, "station") )
980 description = places.get(place, "description")
981 if places.has_option(place, "zone"):
982 zone = eval( places.get(place, "zone" ) )
983 search = ( expression, "%s: %s" % (place, description) )
984 elif stations.has_section(place):
985 station = (place, 0.0)
986 description = stations.get(place, "description")
987 if stations.has_option(place, "zone"):
988 zone = eval( stations.get(place, "zone" ) )
989 search = ( expression, "ICAO station code %s" % place )
990 elif zones.has_section(place):
991 station = eval( zones.get(place, "station") )
992 description = zones.get(place, "description")
994 search = ( expression, "NWS/NOAA weather zone %s" % place )
995 if not ( info or quiet ):
996 print( "[%s result %s]" % (action, description) )
997 if not possibilities and not station[0]:
998 message = "No FIPS code/census area match in the %s file.\n" % (
999 datafiles["places"][0]
1001 sys.stderr.write(message)
1004 uris["metar"] = stations.get( station[0], "metar" )
1006 for key,value in zones.items( zone[0] ):
1007 if key not in ("centroid", "description", "station"):
1010 count = len(possibilities)
1011 if count <= max_results:
1012 print( "Your search is ambiguous, returning %s matches:" % count )
1013 for place in sorted(possibilities):
1014 if places.has_section(place):
1018 places.get(place, "description")
1021 elif stations.has_section(place):
1025 stations.get(place, "description")
1028 elif zones.has_section(place):
1032 zones.get(place, "description")
1037 "Your search is too ambiguous, returning %s matches." % count
1044 for section in dataset.sections():
1045 if dataset.has_option(section, "station"):
1047 eval( dataset.get(section, "station") )[1]
1049 if dataset.has_option(section, "zone"):
1050 zonelist.append( eval( dataset.get(section, "zone") )[1] )
1053 scount = len(stationlist)
1054 zcount = len(zonelist)
1057 for score in scores:
1059 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1061 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1062 description = search[1]
1063 uris["description"] = description
1065 "%s\n%s" % ( description, "-" * len(description) )
1070 stations.get( station[0], "description" )
1073 km = radian_to_km*station[1]
1074 mi = radian_to_mi*station[1]
1075 if sranks and not description.startswith("ICAO station code "):
1076 for index in range(0, len(scores)):
1077 if station[1] >= sranks[index]:
1078 score = scores[index][1]
1081 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1083 elif searchtype == "coordinates":
1084 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1087 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1089 km = radian_to_km*zone[1]
1090 mi = radian_to_mi*zone[1]
1091 if zranks and not description.startswith("NWS/NOAA weather zone "):
1092 for index in range(0, len(scores)):
1093 if zone[1] >= zranks[index]:
1094 score = scores[index][1]
1097 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1099 elif searchtype == "coordinates" and zone[0]:
1100 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1103 nowstamp = "%s (%s)" % (
1105 datetime.datetime.isoformat(
1106 datetime.datetime.fromtimestamp(now),
1110 search_cache = ["\n"]
1111 search_cache.append( "[%s]\n" % search[0] )
1112 search_cache.append( "cached = %s\n" % nowstamp )
1113 for uriname in sorted(uris.keys()):
1114 search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1115 real_cachedir = os.path.expanduser(cachedir)
1116 if not os.path.exists(real_cachedir):
1117 try: os.makedirs(real_cachedir)
1118 except (IOError, OSError): pass
1119 scache_fn = os.path.join(real_cachedir, "searches")
1120 if not os.path.exists(scache_fn):
1122 [ x[1] for x in datafiles.values() ],
1125 thenstamp = "%s (%s)" % (
1127 datetime.datetime.isoformat(
1128 datetime.datetime.fromtimestamp(then),
1132 search_cache.insert(
1134 "# based on data files from: %s\n" % thenstamp
1137 scache_existing = configparser.ConfigParser()
1138 scache_existing.read(scache_fn, encoding="utf-8")
1139 if not scache_existing.has_section(search[0]):
1140 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1141 scache_fd.writelines(search_cache)
1143 except (IOError, OSError): pass
1147 def closest(position, nodes, fieldname, angle=None):
1149 if not angle: angle = 2*math.pi
1152 if fieldname in nodes[name]:
1153 node = nodes[name][fieldname]
1154 if node and abs( position[0]-node[0] ) < angle:
1155 if abs( position[1]-node[1] ) < angle \
1156 or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1157 if position == node:
1161 candidate = math.acos(
1162 math.sin( position[0] ) * math.sin( node[0] ) \
1163 + math.cos( position[0] ) \
1164 * math.cos( node[0] ) \
1165 * math.cos( position[1] - node[1] )
1167 if candidate < angle:
1170 if match: match = str(match)
1171 return (match, angle)
1173 def gecos(formatted):
1175 coordinates = formatted.split(",")
1176 for coordinate in range(0, 2):
1177 degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1178 r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1179 coordinates[coordinate].strip().lower()
1181 value = float(degrees)
1182 if minutes: value += float(minutes)/60
1183 if seconds: value += float(seconds)/3600
1184 if hemisphere and hemisphere in "sw": value *= -1
1185 coordinates[coordinate] = math.radians(value)
1186 return tuple(coordinates)
1189 import codecs, configparser, csv, datetime, hashlib, os, re, sys, time
1190 import zipfile, zoneinfo
1191 for filename in os.listdir("."):
1192 if re.match("[0-9]{4}_Gaz_counties_national.zip$", filename):
1193 gcounties_an = filename
1194 gcounties_fn = filename[:-4] + ".txt"
1195 elif re.match("[0-9]{4}_Gaz_cousubs_national.zip$", filename):
1196 gcousubs_an = filename
1197 gcousubs_fn = filename[:-4] + ".txt"
1198 elif re.match("[0-9]{4}_Gaz_place_national.zip$", filename):
1199 gplace_an = filename
1200 gplace_fn = filename[:-4] + ".txt"
1201 elif re.match("[0-9]{4}_Gaz_zcta_national.zip$", filename):
1203 gzcta_fn = filename[:-4] + ".txt"
1204 elif re.match("bp[0-9]{2}[a-z]{2}[0-9]{2}.dbx$", filename):
1205 cpfzcf_fn = filename
1206 nsdcccc_fn = "nsd_cccc.txt"
1207 ourairports_fn = "airports.csv"
1208 overrides_fn = "overrides.conf"
1209 overrideslog_fn = "overrides.log"
1213 airports_fn = "airports"
1214 places_fn = "places"
1215 stations_fn = "stations"
1220 # generated by %s on %s from these public domain sources:
1222 # https://www.census.gov/geographies/reference-files/time-series/geo/gazetteer-files.html
1228 # https://www.weather.gov/gis/ZoneCounty/
1231 # https://tgftp.nws.noaa.gov/data/
1234 # https://ourairports.com/data/
1237 # ...and these manually-generated or hand-compiled adjustments:
1243 os.path.basename( sys.argv[0] ),
1244 datetime.date.isoformat(
1245 datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) )
1247 hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1248 datetime.date.isoformat(
1249 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcounties_an) )
1252 hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1253 datetime.date.isoformat(
1254 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcousubs_an) )
1257 hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1258 datetime.date.isoformat(
1259 datetime.datetime.utcfromtimestamp( os.path.getmtime(gplace_an) )
1262 hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1263 datetime.date.isoformat(
1264 datetime.datetime.utcfromtimestamp( os.path.getmtime(gzcta_an) )
1267 hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1268 datetime.date.isoformat(
1269 datetime.datetime.utcfromtimestamp( os.path.getmtime(cpfzcf_fn) )
1272 hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1273 datetime.date.isoformat(
1274 datetime.datetime.utcfromtimestamp( os.path.getmtime(nsdcccc_fn) )
1277 hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
1278 datetime.date.isoformat(
1279 datetime.datetime.utcfromtimestamp( os.path.getmtime(ourairports_fn) )
1282 hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1283 datetime.date.isoformat(
1284 datetime.datetime.utcfromtimestamp( os.path.getmtime(overrides_fn) )
1287 hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1288 datetime.date.isoformat(
1289 datetime.datetime.utcfromtimestamp( os.path.getmtime(slist_fn) )
1292 hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1293 datetime.date.isoformat(
1294 datetime.datetime.utcfromtimestamp( os.path.getmtime(zlist_fn) )
1303 message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1304 sys.stdout.write(message)
1307 gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "r")
1308 columns = gcounties.readline().decode("utf-8").strip().split("\t")
1309 for line in gcounties:
1310 fields = line.decode("utf-8").strip().split("\t")
1311 f_geoid = fields[ columns.index("GEOID") ].strip()
1312 f_name = fields[ columns.index("NAME") ].strip()
1313 f_usps = fields[ columns.index("USPS") ].strip()
1314 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1315 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1316 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1317 fips = "fips%s" % f_geoid
1318 if fips not in places: places[fips] = {}
1319 places[fips]["centroid"] = gecos(
1320 "%s,%s" % (f_intptlat, f_intptlong)
1322 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1325 print("done (%s lines)." % count)
1326 message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1327 sys.stdout.write(message)
1330 gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "r")
1331 columns = gcousubs.readline().decode("utf-8").strip().split("\t")
1332 for line in gcousubs:
1333 fields = line.decode("utf-8").strip().split("\t")
1334 f_geoid = fields[ columns.index("GEOID") ].strip()
1335 f_name = fields[ columns.index("NAME") ].strip()
1336 f_usps = fields[ columns.index("USPS") ].strip()
1337 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1338 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1339 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1340 fips = "fips%s" % f_geoid
1341 if fips not in places: places[fips] = {}
1342 places[fips]["centroid"] = gecos(
1343 "%s,%s" % (f_intptlat, f_intptlong)
1345 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1348 print("done (%s lines)." % count)
1349 message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1350 sys.stdout.write(message)
1353 gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "r")
1354 columns = gplace.readline().decode("utf-8").strip().split("\t")
1356 fields = line.decode("utf-8").strip().split("\t")
1357 f_geoid = fields[ columns.index("GEOID") ].strip()
1358 f_name = fields[ columns.index("NAME") ].strip()
1359 f_usps = fields[ columns.index("USPS") ].strip()
1360 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1361 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1362 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1363 fips = "fips%s" % f_geoid
1364 if fips not in places: places[fips] = {}
1365 places[fips]["centroid"] = gecos(
1366 "%s,%s" % (f_intptlat, f_intptlong)
1368 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1371 print("done (%s lines)." % count)
1372 message = "Reading %s..." % slist_fn
1373 sys.stdout.write(message)
1376 slist = codecs.open(slist_fn, "r", "utf-8")
1378 icao = line.split("#")[0].strip()
1381 "metar": "https://tgftp.nws.noaa.gov/data/observations/"\
1382 + "metar/decoded/%s.TXT" % icao.upper()
1386 print("done (%s lines)." % count)
1387 message = "Reading %s..." % nsdcccc_fn
1388 sys.stdout.write(message)
1391 nsdcccc = codecs.open(nsdcccc_fn, "r", "utf-8")
1392 for line in nsdcccc:
1394 fields = line.split(";")
1395 icao = fields[0].strip().lower()
1396 if icao in stations:
1398 name = " ".join( fields[3].strip().title().split() )
1399 if name: description.append(name)
1400 st = fields[4].strip()
1401 if st: description.append(st)
1402 country = " ".join( fields[5].strip().title().split() )
1403 if country: description.append(country)
1405 stations[icao]["description"] = ", ".join(description)
1406 lat, lon = fields[7:9]
1408 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1409 elif "location" not in stations[icao]:
1410 lat, lon = fields[5:7]
1412 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1415 print("done (%s lines)." % count)
1416 message = "Reading %s..." % ourairports_fn
1417 sys.stdout.write(message)
1420 ourairports = open(ourairports_fn, "r")
1421 for row in csv.reader(ourairports):
1422 icao = row[12].lower()
1423 if icao in stations:
1424 iata = row[13].lower()
1425 if len(iata) == 3: airports[iata] = { "station": icao }
1426 if "description" not in stations[icao]:
1429 if name: description.append(name)
1430 municipality = row[10]
1431 if municipality: description.append(municipality)
1436 c,r = region.split("-", 1)
1437 if c == country: region = r
1438 description.append(region)
1440 description.append(country)
1442 stations[icao]["description"] = ", ".join(description)
1443 if "location" not in stations[icao]:
1448 stations[icao]["location"] = gecos(
1449 "%s,%s" % (lat, lon)
1453 print("done (%s lines)." % count)
1454 message = "Reading %s..." % zlist_fn
1455 sys.stdout.write(message)
1458 zlist = codecs.open(zlist_fn, "r", "utf-8")
1460 line = line.split("#")[0].strip()
1465 print("done (%s lines)." % count)
1466 message = "Reading %s..." % cpfzcf_fn
1467 sys.stdout.write(message)
1471 cpfzcf = codecs.open(cpfzcf_fn, "r", "utf-8")
1473 fields = line.strip().split("|")
1474 if len(fields) == 11 \
1475 and fields[0] and fields[1] and fields[9] and fields[10]:
1476 zone = "z".join( fields[:2] ).lower()
1479 description = fields[3].strip()
1480 fips = "fips%s"%fields[6]
1481 countycode = "%sc%s" % (state.lower(), fips[-3:])
1483 zones[zone]["coastal_flood_statement"] = (
1484 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1485 "flood/coastal/%s/%s.txt" % (state.lower(), zone))
1486 zones[zone]["flash_flood_statement"] = (
1487 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1488 "flash_flood/statement/%s/%s.txt"
1489 % (state.lower(), countycode))
1490 zones[zone]["flash_flood_warning"] = (
1491 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1492 "flash_flood/warning/%s/%s.txt"
1493 % (state.lower(), countycode))
1494 zones[zone]["flash_flood_watch"] = (
1495 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1496 "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
1497 zones[zone]["flood_warning"] = (
1498 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1499 "flood/warning/%s/%s.txt"
1500 % (state.lower(), countycode))
1501 zones[zone]["severe_thunderstorm_warning"] = (
1502 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1503 "thunderstorm/%s/%s.txt" % (state.lower(), countycode))
1504 zones[zone]["severe_weather_statement"] = (
1505 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1506 "severe_weather_stmt/%s/%s.txt"
1507 % (state.lower(), countycode))
1508 zones[zone]["short_term_forecast"] = (
1509 "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
1510 "%s/%s.txt" % (state.lower(), zone))
1511 zones[zone]["special_weather_statement"] = (
1512 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1513 "special_weather_stmt/%s/%s.txt"
1514 % (state.lower(), zone))
1515 zones[zone]["state_forecast"] = (
1516 "https://tgftp.nws.noaa.gov/data/forecasts/state/"
1517 "%s/%s.txt" % (state.lower(), zone))
1518 zones[zone]["tornado"] = (
1519 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1520 "tornado/%s/%s.txt" % (state.lower(), countycode))
1521 zones[zone]["urgent_weather_message"] = (
1522 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1523 "non_precip/%s/%s.txt" % (state.lower(), zone))
1524 zones[zone]["zone_forecast"] = (
1525 "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
1526 "%s/%s.txt" % (state.lower(), zone))
1529 zones[zone]["tz"] = "US/Alaska"
1530 elif tzcode == "AH":
1531 zones[zone]["tz"] = "US/Aleutian"
1532 elif tzcode in ("C", "CE", "CM"):
1533 zones[zone]["tz"] = "US/Central"
1534 elif tzcode in ("E", "e"):
1535 zones[zone]["tz"] = "US/Eastern"
1537 zones[zone]["tz"] = "Pacific/Guadalcanal"
1539 zones[zone]["tz"] = "Pacific/Guam"
1541 zones[zone]["tz"] = "US/Hawaii"
1543 zones[zone]["tz"] = "Japan"
1545 zones[zone]["tz"] = "Pacific/Kwajalein"
1546 elif tzcode in ("M", "MC", "MP"):
1547 zones[zone]["tz"] = "US/Mountain"
1549 zones[zone]["tz"] = "US/Arizona"
1551 zones[zone]["tz"] = "US/Pacific"
1553 zones[zone]["tz"] = "US/Samoa"
1555 zones[zone]["tz"] = "America/Virgin"
1557 zones[zone]["tz"] = ""
1560 if description.endswith(county):
1561 description += " County"
1563 description += ", %s County" % county
1564 description += ", %s, US" % state
1565 zones[zone]["description"] = description
1566 zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1567 if fips in places and not zones[zone]["centroid"]:
1568 zones[zone]["centroid"] = places[fips]["centroid"]
1571 print("done (%s lines)." % count)
1572 message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1573 sys.stdout.write(message)
1576 gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "r")
1577 columns = gzcta.readline().decode("utf-8").strip().split("\t")
1579 fields = line.decode("utf-8").strip().split("\t")
1580 f_geoid = fields[ columns.index("GEOID") ].strip()
1581 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1582 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1583 if f_geoid and f_intptlat and f_intptlong:
1584 if f_geoid not in zctas: zctas[f_geoid] = {}
1585 zctas[f_geoid]["centroid"] = gecos(
1586 "%s,%s" % (f_intptlat, f_intptlong)
1590 print("done (%s lines)." % count)
1591 message = "Reading %s..." % overrides_fn
1592 sys.stdout.write(message)
1598 overrides = configparser.ConfigParser()
1599 overrides.read_file( codecs.open(overrides_fn, "r", "utf8") )
1601 for section in overrides.sections():
1604 if section.startswith("-"):
1605 section = section[1:]
1607 else: delete = False
1608 if re.match("[A-Za-z]{3}$", section):
1610 if section in airports:
1611 del( airports[section] )
1612 logact = "removed airport %s" % section
1615 logact = "tried to remove nonexistent airport %s" % section
1617 if section in airports:
1618 logact = "changed airport %s" % section
1621 airports[section] = {}
1622 logact = "added airport %s" % section
1624 for key,value in overrides.items(section):
1625 if key in airports[section]: chgopt += 1
1627 if key in ("centroid", "location"):
1628 airports[section][key] = eval(value)
1630 airports[section][key] = value
1631 if addopt and chgopt:
1632 logact += " (+%s/!%s options)" % (addopt, chgopt)
1633 elif addopt: logact += " (+%s options)" % addopt
1634 elif chgopt: logact += " (!%s options)" % chgopt
1635 elif re.match("[A-Za-z0-9]{4}$", section):
1637 if section in stations:
1638 del( stations[section] )
1639 logact = "removed station %s" % section
1642 logact = "tried to remove nonexistent station %s" % section
1644 if section in stations:
1645 logact = "changed station %s" % section
1648 stations[section] = {}
1649 logact = "added station %s" % section
1651 for key,value in overrides.items(section):
1652 if key in stations[section]: chgopt += 1
1654 if key in ("centroid", "location"):
1655 stations[section][key] = eval(value)
1657 stations[section][key] = value
1658 if addopt and chgopt:
1659 logact += " (+%s/!%s options)" % (addopt, chgopt)
1660 elif addopt: logact += " (+%s options)" % addopt
1661 elif chgopt: logact += " (!%s options)" % chgopt
1662 elif re.match("[0-9]{5}$", section):
1664 if section in zctas:
1665 del( zctas[section] )
1666 logact = "removed zcta %s" % section
1669 logact = "tried to remove nonexistent zcta %s" % section
1671 if section in zctas:
1672 logact = "changed zcta %s" % section
1676 logact = "added zcta %s" % section
1678 for key,value in overrides.items(section):
1679 if key in zctas[section]: chgopt += 1
1681 if key in ("centroid", "location"):
1682 zctas[section][key] = eval(value)
1684 zctas[section][key] = value
1685 if addopt and chgopt:
1686 logact += " (+%s/!%s options)" % (addopt, chgopt)
1687 elif addopt: logact += " (+%s options)" % addopt
1688 elif chgopt: logact += " (!%s options)" % chgopt
1689 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1691 if section in zones:
1692 del( zones[section] )
1693 logact = "removed zone %s" % section
1696 logact = "tried to remove nonexistent zone %s" % section
1698 if section in zones:
1699 logact = "changed zone %s" % section
1703 logact = "added zone %s" % section
1705 for key,value in overrides.items(section):
1706 if key in zones[section]: chgopt += 1
1708 if key in ("centroid", "location"):
1709 zones[section][key] = eval(value)
1711 zones[section][key] = value
1712 if addopt and chgopt:
1713 logact += " (+%s/!%s options)" % (addopt, chgopt)
1714 elif addopt: logact += " (+%s options)" % addopt
1715 elif chgopt: logact += " (!%s options)" % chgopt
1716 elif re.match("fips[0-9]+$", section):
1718 if section in places:
1719 del( places[section] )
1720 logact = "removed place %s" % section
1723 logact = "tried to remove nonexistent place %s" % section
1725 if section in places:
1726 logact = "changed place %s" % section
1729 places[section] = {}
1730 logact = "added place %s" % section
1732 for key,value in overrides.items(section):
1733 if key in places[section]: chgopt += 1
1735 if key in ("centroid", "location"):
1736 places[section][key] = eval(value)
1738 places[section][key] = value
1739 if addopt and chgopt:
1740 logact += " (+%s/!%s options)" % (addopt, chgopt)
1741 elif addopt: logact += " (+%s options)" % addopt
1742 elif chgopt: logact += " (!%s options)" % chgopt
1744 overrideslog.append("%s\n" % logact)
1746 if os.path.exists(overrideslog_fn):
1747 os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1748 overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1750 overrideslog_fd.write(
1751 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
1752 '# use, copy, modify, and distribute this software is granted under terms\n'
1753 '# provided in the LICENSE file distributed with this software.\n\n'
1754 % time.gmtime().tm_year)
1755 overrideslog_fd.writelines(overrideslog)
1756 overrideslog_fd.close()
1757 print("done (%s overridden sections: +%s/-%s/!%s)." % (
1763 estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1765 "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1769 milestones = list( range(51) )
1771 sys.stdout.write(message)
1774 centroid = places[fips]["centroid"]
1776 station = closest(centroid, stations, "location", 0.1)
1778 places[fips]["station"] = station
1781 level = int(50*count/estimate)
1782 if level in milestones:
1783 for remaining in milestones[:milestones.index(level)+1]:
1786 sys.stdout.write(message)
1789 message = "%s%%" % (remaining*2,)
1790 sys.stdout.write(message)
1792 milestones.remove(remaining)
1794 zone = closest(centroid, zones, "centroid", 0.1)
1796 places[fips]["zone"] = zone
1799 level = int(50*count/estimate)
1800 if level in milestones:
1801 for remaining in milestones[:milestones.index(level)+1]:
1804 sys.stdout.write(message)
1807 message = "%s%%" % (remaining*2,)
1808 sys.stdout.write(message)
1810 milestones.remove(remaining)
1811 for station in stations:
1812 if "location" in stations[station]:
1813 location = stations[station]["location"]
1815 zone = closest(location, zones, "centroid", 0.1)
1817 stations[station]["zone"] = zone
1820 level = int(50*count/estimate)
1821 if level in milestones:
1822 for remaining in milestones[:milestones.index(level)+1]:
1825 sys.stdout.write(message)
1828 message = "%s%%" % (remaining*2,)
1829 sys.stdout.write(message)
1831 milestones.remove(remaining)
1832 for zcta in zctas.keys():
1833 centroid = zctas[zcta]["centroid"]
1835 station = closest(centroid, stations, "location", 0.1)
1837 zctas[zcta]["station"] = station
1840 level = int(50*count/estimate)
1841 if level in milestones:
1842 for remaining in milestones[ : milestones.index(level)+1 ]:
1845 sys.stdout.write(message)
1848 message = "%s%%" % (remaining*2,)
1849 sys.stdout.write(message)
1851 milestones.remove(remaining)
1853 zone = closest(centroid, zones, "centroid", 0.1)
1855 zctas[zcta]["zone"] = zone
1858 level = int(50*count/estimate)
1859 if level in milestones:
1860 for remaining in milestones[:milestones.index(level)+1]:
1863 sys.stdout.write(message)
1866 message = "%s%%" % (remaining*2,)
1867 sys.stdout.write(message)
1869 milestones.remove(remaining)
1870 for zone in zones.keys():
1871 if "centroid" in zones[zone]:
1872 centroid = zones[zone]["centroid"]
1874 station = closest(centroid, stations, "location", 0.1)
1876 zones[zone]["station"] = station
1879 level = int(50*count/estimate)
1880 if level in milestones:
1881 for remaining in milestones[:milestones.index(level)+1]:
1884 sys.stdout.write(message)
1887 message = "%s%%" % (remaining*2,)
1888 sys.stdout.write(message)
1890 milestones.remove(remaining)
1891 for remaining in milestones:
1894 sys.stdout.write(message)
1897 message = "%s%%" % (remaining*2,)
1898 sys.stdout.write(message)
1900 print("\n done (%s correlations)." % count)
1901 message = "Writing %s..." % airports_fn
1902 sys.stdout.write(message)
1905 if os.path.exists(airports_fn):
1906 os.rename(airports_fn, "%s_old"%airports_fn)
1907 airports_fd = codecs.open(airports_fn, "w", "utf8")
1908 airports_fd.write(header)
1909 for airport in sorted( airports.keys() ):
1910 airports_fd.write("\n\n[%s]" % airport)
1911 for key, value in sorted( airports[airport].items() ):
1912 if type(value) is float: value = "%.7f"%value
1913 elif type(value) is tuple:
1915 for element in value:
1916 if type(element) is float: elements.append("%.7f"%element)
1917 else: elements.append( repr(element) )
1918 value = "(%s)"%", ".join(elements)
1919 airports_fd.write( "\n%s = %s" % (key, value) )
1921 airports_fd.write("\n")
1923 print("done (%s sections)." % count)
1924 message = "Writing %s..." % places_fn
1925 sys.stdout.write(message)
1928 if os.path.exists(places_fn):
1929 os.rename(places_fn, "%s_old"%places_fn)
1930 places_fd = codecs.open(places_fn, "w", "utf8")
1931 places_fd.write(header)
1932 for fips in sorted( places.keys() ):
1933 places_fd.write("\n\n[%s]" % fips)
1934 for key, value in sorted( places[fips].items() ):
1935 if type(value) is float: value = "%.7f"%value
1936 elif type(value) is tuple:
1938 for element in value:
1939 if type(element) is float: elements.append("%.7f"%element)
1940 else: elements.append( repr(element) )
1941 value = "(%s)"%", ".join(elements)
1942 places_fd.write( "\n%s = %s" % (key, value) )
1944 places_fd.write("\n")
1946 print("done (%s sections)." % count)
1947 message = "Writing %s..." % stations_fn
1948 sys.stdout.write(message)
1951 if os.path.exists(stations_fn):
1952 os.rename(stations_fn, "%s_old"%stations_fn)
1953 stations_fd = codecs.open(stations_fn, "w", "utf-8")
1954 stations_fd.write(header)
1955 for station in sorted( stations.keys() ):
1956 stations_fd.write("\n\n[%s]" % station)
1957 for key, value in sorted( stations[station].items() ):
1958 if type(value) is float: value = "%.7f"%value
1959 elif type(value) is tuple:
1961 for element in value:
1962 if type(element) is float: elements.append("%.7f"%element)
1963 else: elements.append( repr(element) )
1964 value = "(%s)"%", ".join(elements)
1965 if type(value) is bytes:
1966 value = value.decode("utf-8")
1967 stations_fd.write( "\n%s = %s" % (key, value) )
1969 stations_fd.write("\n")
1971 print("done (%s sections)." % count)
1972 message = "Writing %s..." % zctas_fn
1973 sys.stdout.write(message)
1976 if os.path.exists(zctas_fn):
1977 os.rename(zctas_fn, "%s_old"%zctas_fn)
1978 zctas_fd = codecs.open(zctas_fn, "w", "utf8")
1979 zctas_fd.write(header)
1980 for zcta in sorted( zctas.keys() ):
1981 zctas_fd.write("\n\n[%s]" % zcta)
1982 for key, value in sorted( zctas[zcta].items() ):
1983 if type(value) is float: value = "%.7f"%value
1984 elif type(value) is tuple:
1986 for element in value:
1987 if type(element) is float: elements.append("%.7f"%element)
1988 else: elements.append( repr(element) )
1989 value = "(%s)"%", ".join(elements)
1990 zctas_fd.write( "\n%s = %s" % (key, value) )
1992 zctas_fd.write("\n")
1994 print("done (%s sections)." % count)
1995 message = "Writing %s..." % zones_fn
1996 sys.stdout.write(message)
1999 if os.path.exists(zones_fn):
2000 os.rename(zones_fn, "%s_old"%zones_fn)
2001 zones_fd = codecs.open(zones_fn, "w", "utf8")
2002 zones_fd.write(header)
2003 for zone in sorted( zones.keys() ):
2004 zones_fd.write("\n\n[%s]" % zone)
2005 for key, value in sorted( zones[zone].items() ):
2006 if type(value) is float: value = "%.7f"%value
2007 elif type(value) is tuple:
2009 for element in value:
2010 if type(element) is float: elements.append("%.7f"%element)
2011 else: elements.append( repr(element) )
2012 value = "(%s)"%", ".join(elements)
2013 zones_fd.write( "\n%s = %s" % (key, value) )
2015 zones_fd.write("\n")
2017 print("done (%s sections)." % count)
2018 message = "Starting QA check..."
2019 sys.stdout.write(message)
2021 airports = configparser.ConfigParser()
2022 airports.read(airports_fn, encoding="utf-8")
2023 places = configparser.ConfigParser()
2024 places.read(places_fn, encoding="utf-8")
2025 stations = configparser.ConfigParser()
2026 stations.read(stations_fn, encoding="utf-8")
2027 zctas = configparser.ConfigParser()
2028 zctas.read(zctas_fn, encoding="utf-8")
2029 zones = configparser.ConfigParser()
2030 zones.read(zones_fn, encoding="utf-8")
2032 places_nocentroid = 0
2033 places_nodescription = 0
2034 for place in sorted( places.sections() ):
2035 if not places.has_option(place, "centroid"):
2036 qalog.append("%s: no centroid\n" % place)
2037 places_nocentroid += 1
2038 if not places.has_option(place, "description"):
2039 qalog.append("%s: no description\n" % place)
2040 places_nodescription += 1
2041 stations_nodescription = 0
2042 stations_nolocation = 0
2043 stations_nometar = 0
2044 for station in sorted( stations.sections() ):
2045 if not stations.has_option(station, "description"):
2046 qalog.append("%s: no description\n" % station)
2047 stations_nodescription += 1
2048 if not stations.has_option(station, "location"):
2049 qalog.append("%s: no location\n" % station)
2050 stations_nolocation += 1
2051 if not stations.has_option(station, "metar"):
2052 qalog.append("%s: no metar\n" % station)
2053 stations_nometar += 1
2054 airports_badstation = 0
2055 airports_nostation = 0
2056 for airport in sorted( airports.sections() ):
2057 if not airports.has_option(airport, "station"):
2058 qalog.append("%s: no station\n" % airport)
2059 airports_nostation += 1
2061 station = airports.get(airport, "station")
2062 if station not in stations.sections():
2063 qalog.append( "%s: bad station %s\n" % (airport, station) )
2064 airports_badstation += 1
2065 zctas_nocentroid = 0
2066 for zcta in sorted( zctas.sections() ):
2067 if not zctas.has_option(zcta, "centroid"):
2068 qalog.append("%s: no centroid\n" % zcta)
2069 zctas_nocentroid += 1
2070 zones_nocentroid = 0
2071 zones_nodescription = 0
2073 zones_noforecast = 0
2074 zones_overlapping = 0
2076 for zone in zones.sections():
2077 if zones.has_option(zone, "centroid"):
2079 "centroid": eval( zones.get(zone, "centroid") )
2081 for zone in sorted( zones.sections() ):
2082 if zones.has_option(zone, "centroid"):
2083 zonetable_local = zonetable.copy()
2084 del( zonetable_local[zone] )
2085 centroid = eval( zones.get(zone, "centroid") )
2087 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2088 if nearest[1]*radian_to_km < 1:
2089 qalog.append( "%s: within one km of %s\n" % (
2093 zones_overlapping += 1
2095 qalog.append("%s: no centroid\n" % zone)
2096 zones_nocentroid += 1
2097 if not zones.has_option(zone, "description"):
2098 qalog.append("%s: no description\n" % zone)
2099 zones_nodescription += 1
2100 if not zones.has_option(zone, "tz") or not zones.get(
2101 zone, "tz") in zoneinfo.available_timezones():
2102 qalog.append("%s: no time zone\n" % zone)
2104 if not zones.has_option(zone, "zone_forecast"):
2105 qalog.append("%s: no forecast\n" % zone)
2106 zones_noforecast += 1
2107 if os.path.exists(qalog_fn):
2108 os.rename(qalog_fn, "%s_old"%qalog_fn)
2109 qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2112 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
2113 '# use, copy, modify, and distribute this software is granted under terms\n'
2114 '# provided in the LICENSE file distributed with this software.\n\n'
2115 % time.gmtime().tm_year)
2116 qalog_fd.writelines(qalog)
2119 print("issues found (see %s for details):"%qalog_fn)
2120 if airports_badstation:
2121 print(" %s airports with invalid station"%airports_badstation)
2122 if airports_nostation:
2123 print(" %s airports with no station"%airports_nostation)
2124 if places_nocentroid:
2125 print(" %s places with no centroid"%places_nocentroid)
2126 if places_nodescription:
2127 print(" %s places with no description"%places_nodescription)
2128 if stations_nodescription:
2129 print(" %s stations with no description"%stations_nodescription)
2130 if stations_nolocation:
2131 print(" %s stations with no location"%stations_nolocation)
2132 if stations_nometar:
2133 print(" %s stations with no METAR"%stations_nometar)
2134 if zctas_nocentroid:
2135 print(" %s ZCTAs with no centroid"%zctas_nocentroid)
2136 if zones_nocentroid:
2137 print(" %s zones with no centroid"%zones_nocentroid)
2138 if zones_nodescription:
2139 print(" %s zones with no description"%zones_nodescription)
2141 print(" %s zones with no time zone"%zones_notz)
2142 if zones_noforecast:
2143 print(" %s zones with no forecast"%zones_noforecast)
2144 if zones_overlapping:
2145 print(" %s zones within one km of another"%zones_overlapping)
2146 else: print("no issues found.")
2147 print("Indexing complete!")