diff --git a/src/apple.py b/src/apple.py index f929d89b..4246a509 100644 --- a/src/apple.py +++ b/src/apple.py @@ -3,7 +3,6 @@ import urllib.request import datetime from bs4 import BeautifulSoup import re -from html.parser import HTMLParser URLS = [ "https://support.apple.com/en-us/HT201222", # latest @@ -51,8 +50,8 @@ release_lists = {k: {} for k in CONFIG.keys()} print("::group::apple") -def parse_date(input): - d, m, y = input.strip().split(" ") +def parse_date(s): + d, m, y = s.strip().split(" ") m = m[0:3].lower() return datetime.datetime.strptime("%s %s %s" % (d, m, y), "%d %b %Y") diff --git a/src/distrowatch.py b/src/distrowatch.py index 2a792796..f1c73c78 100644 --- a/src/distrowatch.py +++ b/src/distrowatch.py @@ -6,7 +6,6 @@ import json import frontmatter import urllib.request from bs4 import BeautifulSoup -from html.parser import HTMLParser from liquid import Template # Same as used in Ruby (update.rb) diff --git a/src/eks.py b/src/eks.py index 0144a8aa..60c2f31e 100644 --- a/src/eks.py +++ b/src/eks.py @@ -11,14 +11,13 @@ REGEX = r"^(?P\d+)\.(?P\d+)\.(?P\d+)$" def parse_platforms_page(): - versions = {} + all_versions = {} with urllib.request.urlopen(URL, data=None, timeout=5) as contents: html = markdown.markdown(contents.read().decode("utf-8"), extensions=["tables"]) soup = BeautifulSoup(html, features="html5lib") for tr in soup.findAll("tr"): td = tr.find("td") if td and re.match(REGEX, td.text): - version = td.text data = tr.findAll("td") date = data[-1].text if len(date) > 0: @@ -26,8 +25,8 @@ def parse_platforms_page(): k8s_version = ".".join(data[0].text.split(".")[:-1]) eks_version = data[1].text.replace(".", "-") version = "%s-%s" % (k8s_version, eks_version) - versions[version] = d - return versions + all_versions[version] = d + return all_versions if __name__ == "__main__": diff --git a/src/haproxy.py b/src/haproxy.py index 0ea7d7b4..052baa36 100644 --- a/src/haproxy.py +++ b/src/haproxy.py @@ -5,7 +5,7 @@ import urllib.request # https://regex101.com/r/1JCnFC/1 REGEX = r"^(\d{4})\/(\d{2})\/(\d{2})\s+:\s+(\d+\.\d+\.\d.?)$" -list = {} +versions = {} for i in range(17, 28): url = "https://www.haproxy.org/download/%s/src/CHANGELOG" % (i / 10) @@ -16,7 +16,7 @@ for i in range(17, 28): if m: year, month, date, version = m.groups() abs_date = "%s-%s-%s" % (year, month, date) - list[version] = abs_date + versions[version] = abs_date with open("releases/haproxy.json", "w") as f: - f.write(json.dumps(list, indent=2)) + f.write(json.dumps(versions, indent=2)) diff --git a/src/maven.py b/src/maven.py index c8511482..88f89af3 100644 --- a/src/maven.py +++ b/src/maven.py @@ -8,13 +8,13 @@ import datetime def fetch_releases(package_identifier): - groupId, artifactId = package_identifier.split("/") + group_id, artifact_id = package_identifier.split("/") releases = {} start = 0 while True: url = ( "https://search.maven.org/solrsearch/select?q=g:%s+AND+a:%s&core=gav&rows=100&wt=json&start=%s" - % (groupId, artifactId, start) + % (group_id, artifact_id, start) ) with urllib.request.urlopen(url, data=None, timeout=5) as response: data = json.load(response) diff --git a/src/palo-alto-networks.py b/src/palo-alto-networks.py index cb7e9ea5..1f003ef7 100644 --- a/src/palo-alto-networks.py +++ b/src/palo-alto-networks.py @@ -12,11 +12,11 @@ ID_MAPPING = { } -def update_releases(html_id, file): - list = {} +def update_releases(html_identifier, file): + versions = {} with urllib.request.urlopen(URL, data=None, timeout=5) as response: soup = BeautifulSoup(response, features="html5lib") - table = soup.find(id=html_id) + table = soup.find(id=html_identifier) for tr in table.findAll("tr")[3:]: td_list = tr.findAll("td") version = ( @@ -31,14 +31,14 @@ def update_releases(html_id, file): try: month, date, year = td_list[1].get_text().split("/") abs_date = f"{year}-{month:0>2}-{date:0>2}" - except Exception as e: + except Exception: date = datetime.datetime.strptime(td_list[1].get_text(), "%B %d, %Y") abs_date = date.strftime("%Y-%m-%d") - list[version] = abs_date + versions[version] = abs_date with open("releases/%s.json" % file, "w") as f: - f.write(json.dumps(list, indent=2)) + f.write(json.dumps(versions, indent=2)) for html_id in ID_MAPPING: diff --git a/src/pypi.py b/src/pypi.py index 4b606a7b..a6a7f624 100644 --- a/src/pypi.py +++ b/src/pypi.py @@ -5,10 +5,7 @@ import sys import json import frontmatter import urllib.request -from bs4 import BeautifulSoup from datetime import datetime -from html.parser import HTMLParser -from liquid import Template # Same as used in Ruby (update.rb) DEFAULT_TAG_TEMPLATE = ( diff --git a/src/rhel.py b/src/rhel.py index 1e64f29c..8ce16dc5 100644 --- a/src/rhel.py +++ b/src/rhel.py @@ -7,7 +7,7 @@ URL = "https://access.redhat.com/articles/3078" # https://regex101.com/r/877ibq/1 regex = r"RHEL (?P\d)(\. ?(?P\d+))?(( Update (?P\d))| GA)?" -list = {} +versions = {} headers = {"user-agent": "mozilla"} req = urllib.request.Request(URL, headers=headers) @@ -23,7 +23,7 @@ with urllib.request.urlopen(req, timeout=5) as response: version += ".%s" % m["minor"] if m["minor2"]: version += ".%s" % m["minor2"] - list[version] = td_list[1].get_text() + versions[version] = td_list[1].get_text() with open("releases/redhat.json", "w") as f: - f.write(json.dumps(list, indent=2)) + f.write(json.dumps(versions, indent=2)) diff --git a/src/ros.py b/src/ros.py index cbfa8303..21088d5c 100644 --- a/src/ros.py +++ b/src/ros.py @@ -8,7 +8,7 @@ URL = "https://wiki.ros.org/Distributions" # https://regex101.com/r/c1ribd/1 regex = r"^ROS (?P(\w| )+)" -list = {} +versions = {} with urllib.request.urlopen(URL, timeout=5) as response: soup = BeautifulSoup(response, features="html5lib") @@ -29,8 +29,8 @@ with urllib.request.urlopen(URL, timeout=5) as response: x = td_list[1].get_text().split(",") date = datetime.datetime.strptime(x[0][:-2] + x[1], "%B %d %Y") abs_date = date.strftime("%Y-%m-%d") - list[version] = abs_date + versions[version] = abs_date print("%s: %s" % (version, abs_date)) with open("releases/ros.json", "w") as f: - f.write(json.dumps(list, indent=2)) + f.write(json.dumps(versions, indent=2)) diff --git a/src/unrealircd.py b/src/unrealircd.py index 10959f73..f23e5272 100644 --- a/src/unrealircd.py +++ b/src/unrealircd.py @@ -6,7 +6,7 @@ import urllib.request URL = "https://www.unrealircd.org/docwiki/index.php?title=History_of_UnrealIRCd_releases&action=raw" REGEX = r"^(?:(\d+\.(?:\d+\.)*\d+))$" -list = {} +versions = {} with urllib.request.urlopen(URL) as response: text = response.read() wikicode = mwparserfromhell.parse(text) @@ -17,8 +17,8 @@ with urllib.request.urlopen(URL) as response: if re.match(REGEX, maybe_version): maybe_date = items[1].__strip__() if re.match(r"\d{4}-\d{2}-\d{2}", maybe_date): - list[maybe_version] = maybe_date + versions[maybe_version] = maybe_date with open("releases/unrealircd.json", "w") as f: - f.write(json.dumps(list, indent=2)) + f.write(json.dumps(versions, indent=2))