1 """Contains various object definitions needed by the weather utility."""
3 weather_copyright = """\
4 # Copyright (c) 2006-2024 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
9 weather_version = "2.4.4"
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
14 def pyversion(ref=None):
15 """Determine the Python version and optionally compare to a reference."""
17 ver = platform.python_version()
20 int(x) for x in ver.split(".")[:2]
22 int(x) for x in ref.split(".")[:2]
27 """An object to contain selection data."""
29 """Store the config, options and arguments."""
30 self.config = get_config()
31 self.options, self.arguments = get_options(self.config)
32 if self.get_bool("cache") and self.get_bool("cache_search") \
33 and not self.get_bool("longlist"):
34 integrate_search_cache(
39 if not self.arguments:
40 if "id" in self.options.__dict__ \
41 and self.options.__dict__["id"]:
42 self.arguments.append( self.options.__dict__["id"] )
43 del( self.options.__dict__["id"] )
45 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46 sys.stderr.write(message)
47 elif "city" in self.options.__dict__ \
48 and self.options.__dict__["city"] \
49 and "st" in self.options.__dict__ \
50 and self.options.__dict__["st"]:
51 self.arguments.append(
53 self.options.__dict__["city"],
54 self.options.__dict__["st"]
57 del( self.options.__dict__["city"] )
58 del( self.options.__dict__["st"] )
60 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61 sys.stderr.write(message)
62 def get(self, option, argument=None):
63 """Retrieve data from the config or options."""
65 if self.config.has_section(argument) and (
66 self.config.has_option(argument, "city") \
67 or self.config.has_option(argument, "id") \
68 or self.config.has_option(argument, "st")
70 self.config.remove_section(argument)
72 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73 sys.stderr.write(message)
74 if not self.config.has_section(argument):
77 path=self.get("setpath"),
78 info=self.get("info"),
80 self.get("cache") and self.get("cache_search")
82 cachedir=self.get("cachedir"),
83 quiet=self.get_bool("quiet")
85 self.config.add_section(argument)
86 for item in guessed.items():
87 self.config.set(argument, *item)
88 if self.config.has_option(argument, option):
89 return self.config.get(argument, option)
90 if option in self.options.__dict__:
91 return self.options.__dict__[option]
93 message = "WARNING: no URI defined for %s\n" % option
94 sys.stderr.write(message)
96 def get_bool(self, option, argument=None):
97 """Get data and coerce to a boolean if necessary."""
98 # Mimic configparser's getboolean() method by treating
99 # false/no/off/0 as False and true/yes/on/1 as True values,
101 value = self.get(option, argument)
102 if isinstance(value, bool):
104 if isinstance(value, str):
105 vlower = value.lower()
106 if vlower in ('false', 'no', 'off', '0'):
108 elif vlower in ('true', 'yes', 'on', '1'):
110 raise ValueError("Not a boolean: %s" % value)
111 def getint(self, option, argument=None):
112 """Get data and coerce to an integer if necessary."""
113 value = self.get(option, argument)
114 if value: return int(value)
118 """Average a list of coordinates."""
125 return (x/count, y/count)
127 def filter_units(line, units="imperial"):
128 """Filter or convert units in a line of text between US/UK and metric."""
130 # filter lines with both pressures in the form of "X inches (Y hPa)" or
133 r"(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
137 preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
138 if units == "imperial": line = preamble + in_hg + trailer
139 elif units == "metric": line = preamble + hpa + trailer
140 # filter lines with both temperatures in the form of "X F (Y C)"
142 r"(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
146 preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
147 if units == "imperial": line = preamble + fahrenheit + trailer
148 elif units == "metric": line = preamble + celsius + trailer
149 # if metric is desired, convert distances in the form of "X mile(s)" to
151 if units == "metric":
152 imperial_d = re.match(
153 r"(.* )(\d+)( mile\(s\))(.*)",
157 preamble, mi, m_u, trailer = imperial_d.groups()
158 line = preamble + str(int(round(int(mi)*1.609344))) \
159 + " kilometer(s)" + trailer
160 # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
161 # desired, convert to "Z KPH"
162 imperial_s = re.match(
163 r"(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
167 preamble, mph, m_u, kt, trailer = imperial_s.groups()
168 if units == "imperial": line = preamble + mph + m_u + trailer
169 elif units == "metric":
170 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
172 imperial_s = re.match(
173 r"(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
177 preamble, mph, m_u, kt, trailer = imperial_s.groups()
178 if units == "imperial": line = preamble + mph + m_u + trailer
179 elif units == "metric":
180 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
182 # if imperial is desired, qualify given forcast temperatures like "X F"; if
183 # metric is desired, convert to "Y C"
184 imperial_t = re.match(
185 r"(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
189 preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
190 if units == "imperial":
191 line = preamble + parameter + fahrenheit + " F" + sep + trailer
192 elif units == "metric":
193 line = preamble + parameter \
194 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
196 # hand off the resulting line
206 """Return a string containing the results of a URI GET."""
208 import urllib, urllib.error, urllib.request
209 URLError = urllib.error.URLError
210 urlopen = urllib.request.urlopen
212 import urllib2 as urllib
213 URLError = urllib.URLError
214 urlopen = urllib.urlopen
217 dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
218 if not os.path.exists(dcachedir):
219 try: os.makedirs(dcachedir)
220 except (IOError, OSError): pass
221 dcache_fn = os.path.join(
223 uri.split(":",1)[1].replace("/","_")
226 if cache_data and os.access(dcache_fn, os.R_OK) \
227 and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
228 dcache_fd = open(dcache_fn)
229 data = dcache_fd.read()
233 data = urlopen(uri).read().decode("utf-8")
235 if ignore_fail: return ""
237 sys.stderr.write("%s error: failed to retrieve\n %s\n\n" % (
238 os.path.basename( sys.argv[0] ), uri))
240 # Some data sources are HTML with the plain text wrapped in pre tags
242 data = data[data.find("<pre>")+5:data.find("</pre>")]
246 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
247 dcache_fd.write(data)
249 except (IOError, OSError): pass
263 """Return a summarized METAR for the specified station."""
266 message = "%s error: METAR URI required for conditions\n" % \
267 os.path.basename( sys.argv[0] )
268 sys.stderr.write(message)
272 cache_data=cache_data,
276 if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
277 if verbose: return metar
280 lines = metar.split("\n")
283 "relative_humidity," \
284 + "precipitation_last_hour," \
285 + "sky conditions," \
291 headerlist = headers.lower().replace("_"," ").split(",")
294 title = "Current conditions at %s"
295 place = lines[0].split(", ")
297 place = "%s, %s" % ( place[0].title(), place[1] )
298 else: place = "<UNKNOWN>"
299 output.append(title%place)
300 output.append("Last updated " + lines[1])
302 for header in headerlist:
304 if line.lower().startswith(header + ":"):
305 if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
306 if imperial: line = filter_units(line, units="imperial")
307 elif metric: line = filter_units(line, units="metric")
308 if quiet: output.append(line)
309 else: output.append(" " + line)
313 "(no conditions matched your header list, try with --verbose)"
315 return "\n".join(output)
326 """Return alert notice for the specified URI."""
332 cache_data=cache_data,
336 if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
338 if verbose: return alert
341 if re.search(r"\nNational Weather Service", alert):
345 expirycheck = re.search(r"Expires:([0-9]{12})", alert)
347 # only report alerts and forecasts that expired less than delay
349 import datetime, zoneinfo
350 expiration = datetime.datetime.fromisoformat(
351 "%sT%sZ" % (expirycheck[1][:8], expirycheck[1][-4:]))
352 now = datetime.datetime.now(tz=zoneinfo.ZoneInfo("UTC"))
353 if now - expiration > datetime.timedelta(hours=delay):
355 lines = alert.split("\n")
358 if muted and line.startswith("National Weather Service"):
365 if line and not muted:
366 if quiet: output.append(line)
367 else: output.append(" " + line)
368 return "\n".join(output)
370 def get_options(config):
371 """Parse the options passed on the command line."""
373 # for optparse's builtin -h/--help option
375 "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
377 # for optparse's builtin --version option
378 verstring = "%prog " + weather_version
382 option_parser = optparse.OptionParser(usage=usage, version=verstring)
383 # separate options object from list of arguments and return both
385 # the -a/--alert option
386 if config.has_option("default", "alert"):
387 default_alert = config.getboolean("default", "alert")
388 else: default_alert = False
389 option_parser.add_option("-a", "--alert",
392 default=default_alert,
393 help="include local alert notices")
395 # the --atypes option
396 if config.has_option("default", "atypes"):
397 default_atypes = config.get("default", "atypes")
400 "coastal_flood_statement," \
401 + "flash_flood_statement," \
402 + "flash_flood_warning," \
403 + "flash_flood_watch," \
405 + "severe_thunderstorm_warning," \
406 + "severe_weather_statement," \
407 + "special_weather_statement," \
409 + "urgent_weather_message"
410 option_parser.add_option("--atypes",
412 default=default_atypes,
413 help="list of alert notification types to display")
415 # the --build-sets option
416 option_parser.add_option("--build-sets",
420 help="(re)build location correlation sets")
422 # the --cacheage option
423 if config.has_option("default", "cacheage"):
424 default_cacheage = config.getint("default", "cacheage")
425 else: default_cacheage = 900
426 option_parser.add_option("--cacheage",
428 default=default_cacheage,
429 help="duration in seconds to refresh cached data")
431 # the --cachedir option
432 if config.has_option("default", "cachedir"):
433 default_cachedir = config.get("default", "cachedir")
434 else: default_cachedir = "~/.weather"
435 option_parser.add_option("--cachedir",
437 default=default_cachedir,
438 help="directory for storing cached searches and data")
441 if config.has_option("default", "delay"):
442 default_delay = config.getint("default", "delay")
443 else: default_delay = 1
444 option_parser.add_option("--delay",
446 default=default_delay,
447 help="hours to delay alert and forecast expiration")
449 # the -f/--forecast option
450 if config.has_option("default", "forecast"):
451 default_forecast = config.getboolean("default", "forecast")
452 else: default_forecast = False
453 option_parser.add_option("-f", "--forecast",
456 default=default_forecast,
457 help="include a local forecast")
459 # the --headers option
460 if config.has_option("default", "headers"):
461 default_headers = config.get("default", "headers")
465 + "relative_humidity," \
470 + "sky_conditions," \
471 + "precipitation_last_hour"
472 option_parser.add_option("--headers",
474 default=default_headers,
475 help="list of conditions headers to display")
477 # the --imperial option
478 if config.has_option("default", "imperial"):
479 default_imperial = config.getboolean("default", "imperial")
480 else: default_imperial = False
481 option_parser.add_option("--imperial",
484 default=default_imperial,
485 help="filter/convert conditions for US/UK units")
488 option_parser.add_option("--info",
492 help="output detailed information for your search")
494 # the -l/--list option
495 option_parser.add_option("-l", "--list",
499 help="list all configured aliases and cached searches")
501 # the --longlist option
502 option_parser.add_option("--longlist",
506 help="display details of all configured aliases")
508 # the -m/--metric option
509 if config.has_option("default", "metric"):
510 default_metric = config.getboolean("default", "metric")
511 else: default_metric = False
512 option_parser.add_option("-m", "--metric",
515 default=default_metric,
516 help="filter/convert conditions for metric units")
518 # the -n/--no-conditions option
519 if config.has_option("default", "conditions"):
520 default_conditions = config.getboolean("default", "conditions")
521 else: default_conditions = True
522 option_parser.add_option("-n", "--no-conditions",
524 action="store_false",
525 default=default_conditions,
526 help="disable output of current conditions")
528 # the --no-cache option
529 if config.has_option("default", "cache"):
530 default_cache = config.getboolean("default", "cache")
531 else: default_cache = True
532 option_parser.add_option("--no-cache",
534 action="store_false",
536 help="disable all caching (searches and data)")
538 # the --no-cache-data option
539 if config.has_option("default", "cache_data"):
540 default_cache_data = config.getboolean("default", "cache_data")
541 else: default_cache_data = True
542 option_parser.add_option("--no-cache-data",
544 action="store_false",
546 help="disable retrieved data caching")
548 # the --no-cache-search option
549 if config.has_option("default", "cache_search"):
550 default_cache_search = config.getboolean("default", "cache_search")
551 else: default_cache_search = True
552 option_parser.add_option("--no-cache-search",
554 action="store_false",
556 help="disable search result caching")
558 # the -q/--quiet option
559 if config.has_option("default", "quiet"):
560 default_quiet = config.getboolean("default", "quiet")
561 else: default_quiet = False
562 option_parser.add_option("-q", "--quiet",
565 default=default_quiet,
566 help="skip preambles and don't indent")
568 # the --setpath option
569 if config.has_option("default", "setpath"):
570 default_setpath = config.get("default", "setpath")
571 else: default_setpath = ".:~/.weather"
572 option_parser.add_option("--setpath",
574 default=default_setpath,
575 help="directory search path for correlation sets")
577 # the -v/--verbose option
578 if config.has_option("default", "verbose"):
579 default_verbose = config.getboolean("default", "verbose")
580 else: default_verbose = False
581 option_parser.add_option("-v", "--verbose",
584 default=default_verbose,
585 help="show full decoded feeds")
588 if config.has_option("default", "city"):
589 default_city = config.get("default", "city")
590 else: default_city = ""
591 option_parser.add_option("-c", "--city",
593 default=default_city,
594 help=optparse.SUPPRESS_HELP)
595 if config.has_option("default", "id"):
596 default_id = config.get("default", "id")
597 else: default_id = ""
598 option_parser.add_option("-i", "--id",
601 help=optparse.SUPPRESS_HELP)
602 if config.has_option("default", "st"):
603 default_st = config.get("default", "st")
604 else: default_st = ""
605 option_parser.add_option("-s", "--st",
608 help=optparse.SUPPRESS_HELP)
610 options, arguments = option_parser.parse_args()
611 return options, arguments
614 """Parse the aliases and configuration."""
615 if pyversion("3"): import configparser
616 else: import ConfigParser as configparser
617 config = configparser.ConfigParser()
621 "/etc/weather/weatherrc",
622 os.path.expanduser("~/.weather/weatherrc"),
623 os.path.expanduser("~/.weatherrc"),
626 for rcfile in rcfiles:
627 if os.access(rcfile, os.R_OK):
629 config.read(rcfile, encoding="utf-8")
632 for section in config.sections():
633 if section != section.lower():
634 if config.has_section(section.lower()):
635 config.remove_section(section.lower())
636 config.add_section(section.lower())
637 for option,value in config.items(section):
638 config.set(section.lower(), option, value)
641 def integrate_search_cache(config, cachedir, setpath):
642 """Add cached search results into the configuration."""
643 if pyversion("3"): import configparser
644 else: import ConfigParser as configparser
646 scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
647 if not os.access(scache_fn, os.R_OK): return config
648 scache_fd = open(scache_fn)
649 created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
652 datafiles = data_index(setpath)
654 data_freshness = sorted(
655 [ x[1] for x in datafiles.values() ],
658 else: data_freshness = now
659 if created < data_freshness <= now:
662 print( "[clearing outdated %s]" % scache_fn )
663 except (IOError, OSError):
666 scache = configparser.ConfigParser()
668 scache.read(scache_fn, encoding="utf-8")
670 scache.read(scache_fn)
671 for section in scache.sections():
672 if not config.has_section(section):
673 config.add_section(section)
674 for option,value in scache.items(section):
675 config.set(section, option, value)
678 def list_aliases(config, detail=False):
679 """Return a formatted list of aliases defined in the config."""
681 output = "\n# configured alias details..."
682 for section in sorted(config.sections()):
683 output += "\n\n[%s]" % section
684 for item in sorted(config.items(section)):
685 output += "\n%s = %s" % item
688 output = "configured aliases and cached searches..."
689 for section in sorted(config.sections()):
690 if config.has_option(section, "description"):
691 description = config.get(section, "description")
692 else: description = "(no description provided)"
693 output += "\n %s: %s" % (section, description)
696 def data_index(path):
699 for filename in ("airports", "places", "stations", "zctas", "zones"):
700 for dirname in path.split(":"):
701 for extension in ("", ".gz", ".txt"):
702 candidate = os.path.expanduser(
703 os.path.join( dirname, "".join( (filename, extension) ) )
705 if os.path.exists(candidate):
706 datafiles[filename] = (
708 os.stat(candidate).st_mtime
711 if filename in datafiles:
725 """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
726 import codecs, datetime, time, os, re, sys
727 if pyversion("3"): import configparser
728 else: import ConfigParser as configparser
729 datafiles = data_index(path)
730 if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
731 elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
732 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
733 elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
735 r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
738 searchtype = "coordinates"
739 elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
743 if cache_search: action = "caching"
744 else: action = "using"
753 (0.995, "excellent"),
756 if not quiet: print("Searching via %s..."%searchtype)
757 stations = configparser.ConfigParser()
758 dataname = "stations"
759 if dataname in datafiles:
760 datafile = datafiles[dataname][0]
761 if datafile.endswith(".gz"):
764 stations.read_string(
765 gzip.open(datafile).read().decode("utf-8") )
766 else: stations.read_file( gzip.open(datafile) )
769 stations.read(datafile, encoding="utf-8")
771 stations.read(datafile)
773 message = "%s error: can't find \"%s\" data file\n" % (
774 os.path.basename( sys.argv[0] ),
777 sys.stderr.write(message)
779 zones = configparser.ConfigParser()
781 if dataname in datafiles:
782 datafile = datafiles[dataname][0]
783 if datafile.endswith(".gz"):
786 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
787 else: zones.read_file( gzip.open(datafile) )
790 zones.read(datafile, encoding="utf-8")
794 message = "%s error: can't find \"%s\" data file\n" % (
795 os.path.basename( sys.argv[0] ),
798 sys.stderr.write(message)
806 if searchtype == "airport":
807 expression = expression.lower()
808 airports = configparser.ConfigParser()
809 dataname = "airports"
810 if dataname in datafiles:
811 datafile = datafiles[dataname][0]
812 if datafile.endswith(".gz"):
815 airports.read_string(
816 gzip.open(datafile).read().decode("utf-8") )
817 else: airports.read_file( gzip.open(datafile) )
820 airports.read(datafile, encoding="utf-8")
822 airports.read(datafile)
824 message = "%s error: can't find \"%s\" data file\n" % (
825 os.path.basename( sys.argv[0] ),
828 sys.stderr.write(message)
830 if airports.has_section(expression) \
831 and airports.has_option(expression, "station"):
832 search = (expression, "IATA/FAA airport code %s" % expression)
833 station = ( airports.get(expression, "station"), 0 )
834 if stations.has_option(station[0], "zone"):
835 zone = eval( stations.get(station[0], "zone") )
837 if not ( info or quiet ) \
838 and stations.has_option( station[0], "description" ):
842 stations.get(station[0], "description")
846 message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
848 datafiles["airports"][0]
850 sys.stderr.write(message)
852 elif searchtype == "station":
853 expression = expression.lower()
854 if stations.has_section(expression):
855 station = (expression, 0)
857 search = (expression, "ICAO station code %s" % expression)
858 if stations.has_option(expression, "zone"):
859 zone = eval( stations.get(expression, "zone") )
861 if not ( info or quiet ) \
862 and stations.has_option(expression, "description"):
866 stations.get(expression, "description")
870 message = "No ICAO weather station \"%s\" in the %s file.\n" % (
872 datafiles["stations"][0]
874 sys.stderr.write(message)
876 elif searchtype == "zone":
877 expression = expression.lower()
878 if zones.has_section(expression) \
879 and zones.has_option(expression, "station"):
880 zone = (expression, 0)
881 station = eval( zones.get(expression, "station") )
883 search = (expression, "NWS/NOAA weather zone %s" % expression)
884 if not ( info or quiet ) \
885 and zones.has_option(expression, "description"):
889 zones.get(expression, "description")
893 message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
895 datafiles["zones"][0]
897 sys.stderr.write(message)
899 elif searchtype == "ZCTA":
900 zctas = configparser.ConfigParser()
902 if dataname in datafiles:
903 datafile = datafiles[dataname][0]
904 if datafile.endswith(".gz"):
908 gzip.open(datafile).read().decode("utf-8") )
909 else: zctas.read_file( gzip.open(datafile) )
912 zctas.read(datafile, encoding="utf-8")
916 message = "%s error: can't find \"%s\" data file\n" % (
917 os.path.basename( sys.argv[0] ),
920 sys.stderr.write(message)
923 if zctas.has_section(expression) \
924 and zctas.has_option(expression, "station"):
925 station = eval( zctas.get(expression, "station") )
926 search = (expression, "Census ZCTA (ZIP code) %s" % expression)
927 if zctas.has_option(expression, "zone"):
928 zone = eval( zctas.get(expression, "zone") )
930 message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
932 datafiles["zctas"][0]
934 sys.stderr.write(message)
936 elif searchtype == "coordinates":
937 search = (expression, "Geographic coordinates %s" % expression)
939 for station in stations.sections():
940 if stations.has_option(station, "location"):
941 stationtable[station] = {
942 "location": eval( stations.get(station, "location") )
944 station = closest( gecos(expression), stationtable, "location", 0.1 )
946 message = "No ICAO weather station found near %s.\n" % expression
947 sys.stderr.write(message)
950 for zone in zones.sections():
951 if zones.has_option(zone, "centroid"):
953 "centroid": eval( zones.get(zone, "centroid") )
955 zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
957 message = "No NWS weather zone near %s; forecasts unavailable.\n" \
959 sys.stderr.write(message)
960 elif searchtype in ("FIPS", "name"):
961 places = configparser.ConfigParser()
963 if dataname in datafiles:
964 datafile = datafiles[dataname][0]
965 if datafile.endswith(".gz"):
969 gzip.open(datafile).read().decode("utf-8") )
970 else: places.read_file( gzip.open(datafile) )
973 places.read(datafile, encoding="utf-8")
975 places.read(datafile)
977 message = "%s error: can't find \"%s\" data file\n" % (
978 os.path.basename( sys.argv[0] ),
981 sys.stderr.write(message)
984 place = expression.lower()
985 if places.has_section(place) and places.has_option(place, "station"):
986 station = eval( places.get(place, "station") )
987 search = (expression, "Census Place %s" % expression)
988 if places.has_option(place, "description"):
991 search[1] + ", %s" % places.get(place, "description")
993 if places.has_option(place, "zone"):
994 zone = eval( places.get(place, "zone") )
995 if not ( info or quiet ) \
996 and places.has_option(place, "description"):
1000 places.get(place, "description")
1004 for place in places.sections():
1005 if places.has_option(place, "description") \
1006 and places.has_option(place, "station") \
1009 places.get(place, "description"),
1012 possibilities.append(place)
1013 for place in stations.sections():
1014 if stations.has_option(place, "description") \
1017 stations.get(place, "description"),
1020 possibilities.append(place)
1021 for place in zones.sections():
1022 if zones.has_option(place, "description") \
1023 and zones.has_option(place, "station") \
1026 zones.get(place, "description"),
1029 possibilities.append(place)
1030 if len(possibilities) == 1:
1031 place = possibilities[0]
1032 if places.has_section(place):
1033 station = eval( places.get(place, "station") )
1034 description = places.get(place, "description")
1035 if places.has_option(place, "zone"):
1036 zone = eval( places.get(place, "zone" ) )
1037 search = ( expression, "%s: %s" % (place, description) )
1038 elif stations.has_section(place):
1039 station = (place, 0.0)
1040 description = stations.get(place, "description")
1041 if stations.has_option(place, "zone"):
1042 zone = eval( stations.get(place, "zone" ) )
1043 search = ( expression, "ICAO station code %s" % place )
1044 elif zones.has_section(place):
1045 station = eval( zones.get(place, "station") )
1046 description = zones.get(place, "description")
1048 search = ( expression, "NWS/NOAA weather zone %s" % place )
1049 if not ( info or quiet ):
1050 print( "[%s result %s]" % (action, description) )
1051 if not possibilities and not station[0]:
1052 message = "No FIPS code/census area match in the %s file.\n" % (
1053 datafiles["places"][0]
1055 sys.stderr.write(message)
1058 uris["metar"] = stations.get( station[0], "metar" )
1060 for key,value in zones.items( zone[0] ):
1061 if key not in ("centroid", "description", "station"):
1064 count = len(possibilities)
1065 if count <= max_results:
1066 print( "Your search is ambiguous, returning %s matches:" % count )
1067 for place in sorted(possibilities):
1068 if places.has_section(place):
1072 places.get(place, "description")
1075 elif stations.has_section(place):
1079 stations.get(place, "description")
1082 elif zones.has_section(place):
1086 zones.get(place, "description")
1091 "Your search is too ambiguous, returning %s matches." % count
1098 for section in dataset.sections():
1099 if dataset.has_option(section, "station"):
1101 eval( dataset.get(section, "station") )[1]
1103 if dataset.has_option(section, "zone"):
1104 zonelist.append( eval( dataset.get(section, "zone") )[1] )
1107 scount = len(stationlist)
1108 zcount = len(zonelist)
1111 for score in scores:
1113 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1115 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1116 description = search[1]
1117 uris["description"] = description
1119 "%s\n%s" % ( description, "-" * len(description) )
1124 stations.get( station[0], "description" )
1127 km = radian_to_km*station[1]
1128 mi = radian_to_mi*station[1]
1129 if sranks and not description.startswith("ICAO station code "):
1130 for index in range(0, len(scores)):
1131 if station[1] >= sranks[index]:
1132 score = scores[index][1]
1135 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1137 elif searchtype == "coordinates":
1138 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1141 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1143 km = radian_to_km*zone[1]
1144 mi = radian_to_mi*zone[1]
1145 if zranks and not description.startswith("NWS/NOAA weather zone "):
1146 for index in range(0, len(scores)):
1147 if zone[1] >= zranks[index]:
1148 score = scores[index][1]
1151 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1153 elif searchtype == "coordinates" and zone[0]:
1154 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1157 nowstamp = "%s (%s)" % (
1159 datetime.datetime.isoformat(
1160 datetime.datetime.fromtimestamp(now),
1164 search_cache = ["\n"]
1165 search_cache.append( "[%s]\n" % search[0] )
1166 search_cache.append( "cached = %s\n" % nowstamp )
1167 for uriname in sorted(uris.keys()):
1168 search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1169 real_cachedir = os.path.expanduser(cachedir)
1170 if not os.path.exists(real_cachedir):
1171 try: os.makedirs(real_cachedir)
1172 except (IOError, OSError): pass
1173 scache_fn = os.path.join(real_cachedir, "searches")
1174 if not os.path.exists(scache_fn):
1176 [ x[1] for x in datafiles.values() ],
1179 thenstamp = "%s (%s)" % (
1181 datetime.datetime.isoformat(
1182 datetime.datetime.fromtimestamp(then),
1186 search_cache.insert(
1188 "# based on data files from: %s\n" % thenstamp
1191 scache_existing = configparser.ConfigParser()
1193 scache_existing.read(scache_fn, encoding="utf-8")
1195 scache_existing.read(scache_fn)
1196 if not scache_existing.has_section(search[0]):
1197 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1198 scache_fd.writelines(search_cache)
1200 except (IOError, OSError): pass
1204 def closest(position, nodes, fieldname, angle=None):
1206 if not angle: angle = 2*math.pi
1209 if fieldname in nodes[name]:
1210 node = nodes[name][fieldname]
1211 if node and abs( position[0]-node[0] ) < angle:
1212 if abs( position[1]-node[1] ) < angle \
1213 or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1214 if position == node:
1218 candidate = math.acos(
1219 math.sin( position[0] ) * math.sin( node[0] ) \
1220 + math.cos( position[0] ) \
1221 * math.cos( node[0] ) \
1222 * math.cos( position[1] - node[1] )
1224 if candidate < angle:
1227 if match: match = str(match)
1228 return (match, angle)
1230 def gecos(formatted):
1232 coordinates = formatted.split(",")
1233 for coordinate in range(0, 2):
1234 degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1235 r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1236 coordinates[coordinate].strip().lower()
1238 value = float(degrees)
1239 if minutes: value += float(minutes)/60
1240 if seconds: value += float(seconds)/3600
1241 if hemisphere and hemisphere in "sw": value *= -1
1242 coordinates[coordinate] = math.radians(value)
1243 return tuple(coordinates)
1246 import codecs, csv, datetime, hashlib, os, re, sys, time, zipfile, zoneinfo
1247 if pyversion("3"): import configparser
1248 else: import ConfigParser as configparser
1249 for filename in os.listdir("."):
1250 if re.match("[0-9]{4}_Gaz_counties_national.zip$", filename):
1251 gcounties_an = filename
1252 gcounties_fn = filename[:-4] + ".txt"
1253 elif re.match("[0-9]{4}_Gaz_cousubs_national.zip$", filename):
1254 gcousubs_an = filename
1255 gcousubs_fn = filename[:-4] + ".txt"
1256 elif re.match("[0-9]{4}_Gaz_place_national.zip$", filename):
1257 gplace_an = filename
1258 gplace_fn = filename[:-4] + ".txt"
1259 elif re.match("[0-9]{4}_Gaz_zcta_national.zip$", filename):
1261 gzcta_fn = filename[:-4] + ".txt"
1262 elif re.match("bp[0-9]{2}[a-z]{2}[0-9]{2}.dbx$", filename):
1263 cpfzcf_fn = filename
1264 nsdcccc_fn = "nsd_cccc.txt"
1265 ourairports_fn = "airports.csv"
1266 overrides_fn = "overrides.conf"
1267 overrideslog_fn = "overrides.log"
1271 airports_fn = "airports"
1272 places_fn = "places"
1273 stations_fn = "stations"
1278 # generated by %s on %s from these public domain sources:
1280 # https://www.census.gov/geographies/reference-files/time-series/geo/gazetteer-files.html
1286 # https://www.weather.gov/gis/ZoneCounty/
1289 # https://tgftp.nws.noaa.gov/data/
1292 # https://ourairports.com/data/
1295 # ...and these manually-generated or hand-compiled adjustments:
1301 os.path.basename( sys.argv[0] ),
1302 datetime.date.isoformat(
1303 datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) )
1305 hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1306 datetime.date.isoformat(
1307 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcounties_an) )
1310 hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1311 datetime.date.isoformat(
1312 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcousubs_an) )
1315 hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1316 datetime.date.isoformat(
1317 datetime.datetime.utcfromtimestamp( os.path.getmtime(gplace_an) )
1320 hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1321 datetime.date.isoformat(
1322 datetime.datetime.utcfromtimestamp( os.path.getmtime(gzcta_an) )
1325 hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1326 datetime.date.isoformat(
1327 datetime.datetime.utcfromtimestamp( os.path.getmtime(cpfzcf_fn) )
1330 hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1331 datetime.date.isoformat(
1332 datetime.datetime.utcfromtimestamp( os.path.getmtime(nsdcccc_fn) )
1335 hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
1336 datetime.date.isoformat(
1337 datetime.datetime.utcfromtimestamp( os.path.getmtime(ourairports_fn) )
1340 hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1341 datetime.date.isoformat(
1342 datetime.datetime.utcfromtimestamp( os.path.getmtime(overrides_fn) )
1345 hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1346 datetime.date.isoformat(
1347 datetime.datetime.utcfromtimestamp( os.path.getmtime(slist_fn) )
1350 hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1351 datetime.date.isoformat(
1352 datetime.datetime.utcfromtimestamp( os.path.getmtime(zlist_fn) )
1361 message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1362 sys.stdout.write(message)
1365 gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "r")
1366 columns = gcounties.readline().decode("utf-8").strip().split("\t")
1367 for line in gcounties:
1368 fields = line.decode("utf-8").strip().split("\t")
1369 f_geoid = fields[ columns.index("GEOID") ].strip()
1370 f_name = fields[ columns.index("NAME") ].strip()
1371 f_usps = fields[ columns.index("USPS") ].strip()
1372 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1373 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1374 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1375 fips = "fips%s" % f_geoid
1376 if fips not in places: places[fips] = {}
1377 places[fips]["centroid"] = gecos(
1378 "%s,%s" % (f_intptlat, f_intptlong)
1380 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1383 print("done (%s lines)." % count)
1384 message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1385 sys.stdout.write(message)
1388 gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "r")
1389 columns = gcousubs.readline().decode("utf-8").strip().split("\t")
1390 for line in gcousubs:
1391 fields = line.decode("utf-8").strip().split("\t")
1392 f_geoid = fields[ columns.index("GEOID") ].strip()
1393 f_name = fields[ columns.index("NAME") ].strip()
1394 f_usps = fields[ columns.index("USPS") ].strip()
1395 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1396 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1397 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1398 fips = "fips%s" % f_geoid
1399 if fips not in places: places[fips] = {}
1400 places[fips]["centroid"] = gecos(
1401 "%s,%s" % (f_intptlat, f_intptlong)
1403 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1406 print("done (%s lines)." % count)
1407 message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1408 sys.stdout.write(message)
1411 gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "r")
1412 columns = gplace.readline().decode("utf-8").strip().split("\t")
1414 fields = line.decode("utf-8").strip().split("\t")
1415 f_geoid = fields[ columns.index("GEOID") ].strip()
1416 f_name = fields[ columns.index("NAME") ].strip()
1417 f_usps = fields[ columns.index("USPS") ].strip()
1418 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1419 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1420 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1421 fips = "fips%s" % f_geoid
1422 if fips not in places: places[fips] = {}
1423 places[fips]["centroid"] = gecos(
1424 "%s,%s" % (f_intptlat, f_intptlong)
1426 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1429 print("done (%s lines)." % count)
1430 message = "Reading %s..." % slist_fn
1431 sys.stdout.write(message)
1434 slist = codecs.open(slist_fn, "r", "utf-8")
1436 icao = line.split("#")[0].strip()
1439 "metar": "https://tgftp.nws.noaa.gov/data/observations/"\
1440 + "metar/decoded/%s.TXT" % icao.upper()
1444 print("done (%s lines)." % count)
1445 message = "Reading %s..." % nsdcccc_fn
1446 sys.stdout.write(message)
1449 nsdcccc = codecs.open(nsdcccc_fn, "r", "utf-8")
1450 for line in nsdcccc:
1452 fields = line.split(";")
1453 icao = fields[0].strip().lower()
1454 if icao in stations:
1456 name = " ".join( fields[3].strip().title().split() )
1457 if name: description.append(name)
1458 st = fields[4].strip()
1459 if st: description.append(st)
1460 country = " ".join( fields[5].strip().title().split() )
1461 if country: description.append(country)
1463 stations[icao]["description"] = ", ".join(description)
1464 lat, lon = fields[7:9]
1466 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1467 elif "location" not in stations[icao]:
1468 lat, lon = fields[5:7]
1470 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1473 print("done (%s lines)." % count)
1474 message = "Reading %s..." % ourairports_fn
1475 sys.stdout.write(message)
1478 ourairports = open(ourairports_fn, "r")
1479 for row in csv.reader(ourairports):
1480 icao = row[12].lower()
1481 if icao in stations:
1482 iata = row[13].lower()
1483 if len(iata) == 3: airports[iata] = { "station": icao }
1484 if "description" not in stations[icao]:
1487 if name: description.append(name)
1488 municipality = row[10]
1489 if municipality: description.append(municipality)
1494 c,r = region.split("-", 1)
1495 if c == country: region = r
1496 description.append(region)
1498 description.append(country)
1500 stations[icao]["description"] = ", ".join(description)
1501 if "location" not in stations[icao]:
1506 stations[icao]["location"] = gecos(
1507 "%s,%s" % (lat, lon)
1511 print("done (%s lines)." % count)
1512 message = "Reading %s..." % zlist_fn
1513 sys.stdout.write(message)
1516 zlist = codecs.open(zlist_fn, "r", "utf-8")
1518 line = line.split("#")[0].strip()
1523 print("done (%s lines)." % count)
1524 message = "Reading %s..." % cpfzcf_fn
1525 sys.stdout.write(message)
1529 cpfzcf = codecs.open(cpfzcf_fn, "r", "utf-8")
1531 fields = line.strip().split("|")
1532 if len(fields) == 11 \
1533 and fields[0] and fields[1] and fields[9] and fields[10]:
1534 zone = "z".join( fields[:2] ).lower()
1537 description = fields[3].strip()
1538 fips = "fips%s"%fields[6]
1539 countycode = "%sc%s" % (state.lower(), fips[-3:])
1541 zones[zone]["coastal_flood_statement"] = (
1542 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1543 "flood/coastal/%s/%s.txt" % (state.lower(), zone))
1544 zones[zone]["flash_flood_statement"] = (
1545 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1546 "flash_flood/statement/%s/%s.txt"
1547 % (state.lower(), countycode))
1548 zones[zone]["flash_flood_warning"] = (
1549 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1550 "flash_flood/warning/%s/%s.txt"
1551 % (state.lower(), countycode))
1552 zones[zone]["flash_flood_watch"] = (
1553 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1554 "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
1555 zones[zone]["flood_warning"] = (
1556 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1557 "flood/warning/%s/%s.txt"
1558 % (state.lower(), countycode))
1559 zones[zone]["severe_thunderstorm_warning"] = (
1560 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1561 "thunderstorm/%s/%s.txt" % (state.lower(), countycode))
1562 zones[zone]["severe_weather_statement"] = (
1563 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1564 "severe_weather_stmt/%s/%s.txt"
1565 % (state.lower(), countycode))
1566 zones[zone]["short_term_forecast"] = (
1567 "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
1568 "%s/%s.txt" % (state.lower(), zone))
1569 zones[zone]["special_weather_statement"] = (
1570 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1571 "special_weather_stmt/%s/%s.txt"
1572 % (state.lower(), zone))
1573 zones[zone]["state_forecast"] = (
1574 "https://tgftp.nws.noaa.gov/data/forecasts/state/"
1575 "%s/%s.txt" % (state.lower(), zone))
1576 zones[zone]["tornado"] = (
1577 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1578 "tornado/%s/%s.txt" % (state.lower(), countycode))
1579 zones[zone]["urgent_weather_message"] = (
1580 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1581 "non_precip/%s/%s.txt" % (state.lower(), zone))
1582 zones[zone]["zone_forecast"] = (
1583 "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
1584 "%s/%s.txt" % (state.lower(), zone))
1587 zones[zone]["tz"] = "US/Alaska"
1588 elif tzcode == "AH":
1589 zones[zone]["tz"] = "US/Aleutian"
1590 elif tzcode in ("C", "CE", "CM"):
1591 zones[zone]["tz"] = "US/Central"
1592 elif tzcode in ("E", "e"):
1593 zones[zone]["tz"] = "US/Eastern"
1595 zones[zone]["tz"] = "Pacific/Guadalcanal"
1597 zones[zone]["tz"] = "Pacific/Guam"
1599 zones[zone]["tz"] = "US/Hawaii"
1601 zones[zone]["tz"] = "Japan"
1603 zones[zone]["tz"] = "Pacific/Kwajalein"
1604 elif tzcode in ("M", "MC", "MP"):
1605 zones[zone]["tz"] = "US/Mountain"
1607 zones[zone]["tz"] = "US/Arizona"
1609 zones[zone]["tz"] = "US/Pacific"
1611 zones[zone]["tz"] = "US/Samoa"
1613 zones[zone]["tz"] = "America/Virgin"
1615 zones[zone]["tz"] = ""
1618 if description.endswith(county):
1619 description += " County"
1621 description += ", %s County" % county
1622 description += ", %s, US" % state
1623 zones[zone]["description"] = description
1624 zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1625 if fips in places and not zones[zone]["centroid"]:
1626 zones[zone]["centroid"] = places[fips]["centroid"]
1629 print("done (%s lines)." % count)
1630 message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1631 sys.stdout.write(message)
1634 gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "r")
1635 columns = gzcta.readline().decode("utf-8").strip().split("\t")
1637 fields = line.decode("utf-8").strip().split("\t")
1638 f_geoid = fields[ columns.index("GEOID") ].strip()
1639 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1640 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1641 if f_geoid and f_intptlat and f_intptlong:
1642 if f_geoid not in zctas: zctas[f_geoid] = {}
1643 zctas[f_geoid]["centroid"] = gecos(
1644 "%s,%s" % (f_intptlat, f_intptlong)
1648 print("done (%s lines)." % count)
1649 message = "Reading %s..." % overrides_fn
1650 sys.stdout.write(message)
1656 overrides = configparser.ConfigParser()
1657 overrides.read_file( codecs.open(overrides_fn, "r", "utf8") )
1659 for section in overrides.sections():
1662 if section.startswith("-"):
1663 section = section[1:]
1665 else: delete = False
1666 if re.match("[A-Za-z]{3}$", section):
1668 if section in airports:
1669 del( airports[section] )
1670 logact = "removed airport %s" % section
1673 logact = "tried to remove nonexistent airport %s" % section
1675 if section in airports:
1676 logact = "changed airport %s" % section
1679 airports[section] = {}
1680 logact = "added airport %s" % section
1682 for key,value in overrides.items(section):
1683 if key in airports[section]: chgopt += 1
1685 if key in ("centroid", "location"):
1686 airports[section][key] = eval(value)
1688 airports[section][key] = value
1689 if addopt and chgopt:
1690 logact += " (+%s/!%s options)" % (addopt, chgopt)
1691 elif addopt: logact += " (+%s options)" % addopt
1692 elif chgopt: logact += " (!%s options)" % chgopt
1693 elif re.match("[A-Za-z0-9]{4}$", section):
1695 if section in stations:
1696 del( stations[section] )
1697 logact = "removed station %s" % section
1700 logact = "tried to remove nonexistent station %s" % section
1702 if section in stations:
1703 logact = "changed station %s" % section
1706 stations[section] = {}
1707 logact = "added station %s" % section
1709 for key,value in overrides.items(section):
1710 if key in stations[section]: chgopt += 1
1712 if key in ("centroid", "location"):
1713 stations[section][key] = eval(value)
1715 stations[section][key] = value
1716 if addopt and chgopt:
1717 logact += " (+%s/!%s options)" % (addopt, chgopt)
1718 elif addopt: logact += " (+%s options)" % addopt
1719 elif chgopt: logact += " (!%s options)" % chgopt
1720 elif re.match("[0-9]{5}$", section):
1722 if section in zctas:
1723 del( zctas[section] )
1724 logact = "removed zcta %s" % section
1727 logact = "tried to remove nonexistent zcta %s" % section
1729 if section in zctas:
1730 logact = "changed zcta %s" % section
1734 logact = "added zcta %s" % section
1736 for key,value in overrides.items(section):
1737 if key in zctas[section]: chgopt += 1
1739 if key in ("centroid", "location"):
1740 zctas[section][key] = eval(value)
1742 zctas[section][key] = value
1743 if addopt and chgopt:
1744 logact += " (+%s/!%s options)" % (addopt, chgopt)
1745 elif addopt: logact += " (+%s options)" % addopt
1746 elif chgopt: logact += " (!%s options)" % chgopt
1747 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1749 if section in zones:
1750 del( zones[section] )
1751 logact = "removed zone %s" % section
1754 logact = "tried to remove nonexistent zone %s" % section
1756 if section in zones:
1757 logact = "changed zone %s" % section
1761 logact = "added zone %s" % section
1763 for key,value in overrides.items(section):
1764 if key in zones[section]: chgopt += 1
1766 if key in ("centroid", "location"):
1767 zones[section][key] = eval(value)
1769 zones[section][key] = value
1770 if addopt and chgopt:
1771 logact += " (+%s/!%s options)" % (addopt, chgopt)
1772 elif addopt: logact += " (+%s options)" % addopt
1773 elif chgopt: logact += " (!%s options)" % chgopt
1774 elif re.match("fips[0-9]+$", section):
1776 if section in places:
1777 del( places[section] )
1778 logact = "removed place %s" % section
1781 logact = "tried to remove nonexistent place %s" % section
1783 if section in places:
1784 logact = "changed place %s" % section
1787 places[section] = {}
1788 logact = "added place %s" % section
1790 for key,value in overrides.items(section):
1791 if key in places[section]: chgopt += 1
1793 if key in ("centroid", "location"):
1794 places[section][key] = eval(value)
1796 places[section][key] = value
1797 if addopt and chgopt:
1798 logact += " (+%s/!%s options)" % (addopt, chgopt)
1799 elif addopt: logact += " (+%s options)" % addopt
1800 elif chgopt: logact += " (!%s options)" % chgopt
1802 overrideslog.append("%s\n" % logact)
1804 if os.path.exists(overrideslog_fn):
1805 os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1806 overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1808 overrideslog_fd.write(
1809 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
1810 '# use, copy, modify, and distribute this software is granted under terms\n'
1811 '# provided in the LICENSE file distributed with this software.\n\n'
1812 % time.gmtime().tm_year)
1813 overrideslog_fd.writelines(overrideslog)
1814 overrideslog_fd.close()
1815 print("done (%s overridden sections: +%s/-%s/!%s)." % (
1821 estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1823 "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1827 milestones = list( range(51) )
1829 sys.stdout.write(message)
1832 centroid = places[fips]["centroid"]
1834 station = closest(centroid, stations, "location", 0.1)
1836 places[fips]["station"] = station
1839 level = int(50*count/estimate)
1840 if level in milestones:
1841 for remaining in milestones[:milestones.index(level)+1]:
1844 sys.stdout.write(message)
1847 message = "%s%%" % (remaining*2,)
1848 sys.stdout.write(message)
1850 milestones.remove(remaining)
1852 zone = closest(centroid, zones, "centroid", 0.1)
1854 places[fips]["zone"] = zone
1857 level = int(50*count/estimate)
1858 if level in milestones:
1859 for remaining in milestones[:milestones.index(level)+1]:
1862 sys.stdout.write(message)
1865 message = "%s%%" % (remaining*2,)
1866 sys.stdout.write(message)
1868 milestones.remove(remaining)
1869 for station in stations:
1870 if "location" in stations[station]:
1871 location = stations[station]["location"]
1873 zone = closest(location, zones, "centroid", 0.1)
1875 stations[station]["zone"] = zone
1878 level = int(50*count/estimate)
1879 if level in milestones:
1880 for remaining in milestones[:milestones.index(level)+1]:
1883 sys.stdout.write(message)
1886 message = "%s%%" % (remaining*2,)
1887 sys.stdout.write(message)
1889 milestones.remove(remaining)
1890 for zcta in zctas.keys():
1891 centroid = zctas[zcta]["centroid"]
1893 station = closest(centroid, stations, "location", 0.1)
1895 zctas[zcta]["station"] = station
1898 level = int(50*count/estimate)
1899 if level in milestones:
1900 for remaining in milestones[ : milestones.index(level)+1 ]:
1903 sys.stdout.write(message)
1906 message = "%s%%" % (remaining*2,)
1907 sys.stdout.write(message)
1909 milestones.remove(remaining)
1911 zone = closest(centroid, zones, "centroid", 0.1)
1913 zctas[zcta]["zone"] = zone
1916 level = int(50*count/estimate)
1917 if level in milestones:
1918 for remaining in milestones[:milestones.index(level)+1]:
1921 sys.stdout.write(message)
1924 message = "%s%%" % (remaining*2,)
1925 sys.stdout.write(message)
1927 milestones.remove(remaining)
1928 for zone in zones.keys():
1929 if "centroid" in zones[zone]:
1930 centroid = zones[zone]["centroid"]
1932 station = closest(centroid, stations, "location", 0.1)
1934 zones[zone]["station"] = station
1937 level = int(50*count/estimate)
1938 if level in milestones:
1939 for remaining in milestones[:milestones.index(level)+1]:
1942 sys.stdout.write(message)
1945 message = "%s%%" % (remaining*2,)
1946 sys.stdout.write(message)
1948 milestones.remove(remaining)
1949 for remaining in milestones:
1952 sys.stdout.write(message)
1955 message = "%s%%" % (remaining*2,)
1956 sys.stdout.write(message)
1958 print("\n done (%s correlations)." % count)
1959 message = "Writing %s..." % airports_fn
1960 sys.stdout.write(message)
1963 if os.path.exists(airports_fn):
1964 os.rename(airports_fn, "%s_old"%airports_fn)
1965 airports_fd = codecs.open(airports_fn, "w", "utf8")
1966 airports_fd.write(header)
1967 for airport in sorted( airports.keys() ):
1968 airports_fd.write("\n\n[%s]" % airport)
1969 for key, value in sorted( airports[airport].items() ):
1970 if type(value) is float: value = "%.7f"%value
1971 elif type(value) is tuple:
1973 for element in value:
1974 if type(element) is float: elements.append("%.7f"%element)
1975 else: elements.append( repr(element) )
1976 value = "(%s)"%", ".join(elements)
1977 airports_fd.write( "\n%s = %s" % (key, value) )
1979 airports_fd.write("\n")
1981 print("done (%s sections)." % count)
1982 message = "Writing %s..." % places_fn
1983 sys.stdout.write(message)
1986 if os.path.exists(places_fn):
1987 os.rename(places_fn, "%s_old"%places_fn)
1988 places_fd = codecs.open(places_fn, "w", "utf8")
1989 places_fd.write(header)
1990 for fips in sorted( places.keys() ):
1991 places_fd.write("\n\n[%s]" % fips)
1992 for key, value in sorted( places[fips].items() ):
1993 if type(value) is float: value = "%.7f"%value
1994 elif type(value) is tuple:
1996 for element in value:
1997 if type(element) is float: elements.append("%.7f"%element)
1998 else: elements.append( repr(element) )
1999 value = "(%s)"%", ".join(elements)
2000 places_fd.write( "\n%s = %s" % (key, value) )
2002 places_fd.write("\n")
2004 print("done (%s sections)." % count)
2005 message = "Writing %s..." % stations_fn
2006 sys.stdout.write(message)
2009 if os.path.exists(stations_fn):
2010 os.rename(stations_fn, "%s_old"%stations_fn)
2011 stations_fd = codecs.open(stations_fn, "w", "utf-8")
2012 stations_fd.write(header)
2013 for station in sorted( stations.keys() ):
2014 stations_fd.write("\n\n[%s]" % station)
2015 for key, value in sorted( stations[station].items() ):
2016 if type(value) is float: value = "%.7f"%value
2017 elif type(value) is tuple:
2019 for element in value:
2020 if type(element) is float: elements.append("%.7f"%element)
2021 else: elements.append( repr(element) )
2022 value = "(%s)"%", ".join(elements)
2023 if type(value) is bytes:
2024 value = value.decode("utf-8")
2025 stations_fd.write( "\n%s = %s" % (key, value) )
2027 stations_fd.write("\n")
2029 print("done (%s sections)." % count)
2030 message = "Writing %s..." % zctas_fn
2031 sys.stdout.write(message)
2034 if os.path.exists(zctas_fn):
2035 os.rename(zctas_fn, "%s_old"%zctas_fn)
2036 zctas_fd = codecs.open(zctas_fn, "w", "utf8")
2037 zctas_fd.write(header)
2038 for zcta in sorted( zctas.keys() ):
2039 zctas_fd.write("\n\n[%s]" % zcta)
2040 for key, value in sorted( zctas[zcta].items() ):
2041 if type(value) is float: value = "%.7f"%value
2042 elif type(value) is tuple:
2044 for element in value:
2045 if type(element) is float: elements.append("%.7f"%element)
2046 else: elements.append( repr(element) )
2047 value = "(%s)"%", ".join(elements)
2048 zctas_fd.write( "\n%s = %s" % (key, value) )
2050 zctas_fd.write("\n")
2052 print("done (%s sections)." % count)
2053 message = "Writing %s..." % zones_fn
2054 sys.stdout.write(message)
2057 if os.path.exists(zones_fn):
2058 os.rename(zones_fn, "%s_old"%zones_fn)
2059 zones_fd = codecs.open(zones_fn, "w", "utf8")
2060 zones_fd.write(header)
2061 for zone in sorted( zones.keys() ):
2062 zones_fd.write("\n\n[%s]" % zone)
2063 for key, value in sorted( zones[zone].items() ):
2064 if type(value) is float: value = "%.7f"%value
2065 elif type(value) is tuple:
2067 for element in value:
2068 if type(element) is float: elements.append("%.7f"%element)
2069 else: elements.append( repr(element) )
2070 value = "(%s)"%", ".join(elements)
2071 zones_fd.write( "\n%s = %s" % (key, value) )
2073 zones_fd.write("\n")
2075 print("done (%s sections)." % count)
2076 message = "Starting QA check..."
2077 sys.stdout.write(message)
2079 airports = configparser.ConfigParser()
2081 airports.read(airports_fn, encoding="utf-8")
2083 airports.read(airports_fn)
2084 places = configparser.ConfigParser()
2086 places.read(places_fn, encoding="utf-8")
2088 places.read(places_fn)
2089 stations = configparser.ConfigParser()
2091 stations.read(stations_fn, encoding="utf-8")
2093 stations.read(stations_fn)
2094 zctas = configparser.ConfigParser()
2096 zctas.read(zctas_fn, encoding="utf-8")
2098 zctas.read(zctas_fn)
2099 zones = configparser.ConfigParser()
2101 zones.read(zones_fn, encoding="utf-8")
2103 zones.read(zones_fn)
2105 places_nocentroid = 0
2106 places_nodescription = 0
2107 for place in sorted( places.sections() ):
2108 if not places.has_option(place, "centroid"):
2109 qalog.append("%s: no centroid\n" % place)
2110 places_nocentroid += 1
2111 if not places.has_option(place, "description"):
2112 qalog.append("%s: no description\n" % place)
2113 places_nodescription += 1
2114 stations_nodescription = 0
2115 stations_nolocation = 0
2116 stations_nometar = 0
2117 for station in sorted( stations.sections() ):
2118 if not stations.has_option(station, "description"):
2119 qalog.append("%s: no description\n" % station)
2120 stations_nodescription += 1
2121 if not stations.has_option(station, "location"):
2122 qalog.append("%s: no location\n" % station)
2123 stations_nolocation += 1
2124 if not stations.has_option(station, "metar"):
2125 qalog.append("%s: no metar\n" % station)
2126 stations_nometar += 1
2127 airports_badstation = 0
2128 airports_nostation = 0
2129 for airport in sorted( airports.sections() ):
2130 if not airports.has_option(airport, "station"):
2131 qalog.append("%s: no station\n" % airport)
2132 airports_nostation += 1
2134 station = airports.get(airport, "station")
2135 if station not in stations.sections():
2136 qalog.append( "%s: bad station %s\n" % (airport, station) )
2137 airports_badstation += 1
2138 zctas_nocentroid = 0
2139 for zcta in sorted( zctas.sections() ):
2140 if not zctas.has_option(zcta, "centroid"):
2141 qalog.append("%s: no centroid\n" % zcta)
2142 zctas_nocentroid += 1
2143 zones_nocentroid = 0
2144 zones_nodescription = 0
2146 zones_noforecast = 0
2147 zones_overlapping = 0
2149 for zone in zones.sections():
2150 if zones.has_option(zone, "centroid"):
2152 "centroid": eval( zones.get(zone, "centroid") )
2154 for zone in sorted( zones.sections() ):
2155 if zones.has_option(zone, "centroid"):
2156 zonetable_local = zonetable.copy()
2157 del( zonetable_local[zone] )
2158 centroid = eval( zones.get(zone, "centroid") )
2160 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2161 if nearest[1]*radian_to_km < 1:
2162 qalog.append( "%s: within one km of %s\n" % (
2166 zones_overlapping += 1
2168 qalog.append("%s: no centroid\n" % zone)
2169 zones_nocentroid += 1
2170 if not zones.has_option(zone, "description"):
2171 qalog.append("%s: no description\n" % zone)
2172 zones_nodescription += 1
2173 if not zones.has_option(zone, "tz") or not zones.get(
2174 zone, "tz") in zoneinfo.available_timezones():
2175 qalog.append("%s: no time zone\n" % zone)
2177 if not zones.has_option(zone, "zone_forecast"):
2178 qalog.append("%s: no forecast\n" % zone)
2179 zones_noforecast += 1
2180 if os.path.exists(qalog_fn):
2181 os.rename(qalog_fn, "%s_old"%qalog_fn)
2182 qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2185 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
2186 '# use, copy, modify, and distribute this software is granted under terms\n'
2187 '# provided in the LICENSE file distributed with this software.\n\n'
2188 % time.gmtime().tm_year)
2189 qalog_fd.writelines(qalog)
2192 print("issues found (see %s for details):"%qalog_fn)
2193 if airports_badstation:
2194 print(" %s airports with invalid station"%airports_badstation)
2195 if airports_nostation:
2196 print(" %s airports with no station"%airports_nostation)
2197 if places_nocentroid:
2198 print(" %s places with no centroid"%places_nocentroid)
2199 if places_nodescription:
2200 print(" %s places with no description"%places_nodescription)
2201 if stations_nodescription:
2202 print(" %s stations with no description"%stations_nodescription)
2203 if stations_nolocation:
2204 print(" %s stations with no location"%stations_nolocation)
2205 if stations_nometar:
2206 print(" %s stations with no METAR"%stations_nometar)
2207 if zctas_nocentroid:
2208 print(" %s ZCTAs with no centroid"%zctas_nocentroid)
2209 if zones_nocentroid:
2210 print(" %s zones with no centroid"%zones_nocentroid)
2211 if zones_nodescription:
2212 print(" %s zones with no description"%zones_nodescription)
2214 print(" %s zones with no time zone"%zones_notz)
2215 if zones_noforecast:
2216 print(" %s zones with no forecast"%zones_noforecast)
2217 if zones_overlapping:
2218 print(" %s zones within one km of another"%zones_overlapping)
2219 else: print("no issues found.")
2220 print("Indexing complete!")