Cleanup code (#37)

* [apple,distrowatch,pypi] Remove unused imports

* [maven] use snake_case for variable names

* [eks,palo-alto-networks] Rename variables shadowing names from outer scopes

* [eks,palo-alto-networks] Remove unused variables

* [apple,haproxy,palo-alto-networks,rhel,ros,unrealircd] Rename variables shadowing built-in names
This commit is contained in:
Marc Wrobel
2022-12-14 04:50:45 +01:00
committed by GitHub
parent cb3284d507
commit b65b5ad4ee
10 changed files with 25 additions and 31 deletions

View File

@@ -3,7 +3,6 @@ import urllib.request
import datetime
from bs4 import BeautifulSoup
import re
from html.parser import HTMLParser
URLS = [
"https://support.apple.com/en-us/HT201222", # latest
@@ -51,8 +50,8 @@ release_lists = {k: {} for k in CONFIG.keys()}
print("::group::apple")
def parse_date(input):
d, m, y = input.strip().split(" ")
def parse_date(s):
d, m, y = s.strip().split(" ")
m = m[0:3].lower()
return datetime.datetime.strptime("%s %s %s" % (d, m, y), "%d %b %Y")

View File

@@ -6,7 +6,6 @@ import json
import frontmatter
import urllib.request
from bs4 import BeautifulSoup
from html.parser import HTMLParser
from liquid import Template
# Same as used in Ruby (update.rb)

View File

@@ -11,14 +11,13 @@ REGEX = r"^(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)$"
def parse_platforms_page():
versions = {}
all_versions = {}
with urllib.request.urlopen(URL, data=None, timeout=5) as contents:
html = markdown.markdown(contents.read().decode("utf-8"), extensions=["tables"])
soup = BeautifulSoup(html, features="html5lib")
for tr in soup.findAll("tr"):
td = tr.find("td")
if td and re.match(REGEX, td.text):
version = td.text
data = tr.findAll("td")
date = data[-1].text
if len(date) > 0:
@@ -26,8 +25,8 @@ def parse_platforms_page():
k8s_version = ".".join(data[0].text.split(".")[:-1])
eks_version = data[1].text.replace(".", "-")
version = "%s-%s" % (k8s_version, eks_version)
versions[version] = d
return versions
all_versions[version] = d
return all_versions
if __name__ == "__main__":

View File

@@ -5,7 +5,7 @@ import urllib.request
# https://regex101.com/r/1JCnFC/1
REGEX = r"^(\d{4})\/(\d{2})\/(\d{2})\s+:\s+(\d+\.\d+\.\d.?)$"
list = {}
versions = {}
for i in range(17, 28):
url = "https://www.haproxy.org/download/%s/src/CHANGELOG" % (i / 10)
@@ -16,7 +16,7 @@ for i in range(17, 28):
if m:
year, month, date, version = m.groups()
abs_date = "%s-%s-%s" % (year, month, date)
list[version] = abs_date
versions[version] = abs_date
with open("releases/haproxy.json", "w") as f:
f.write(json.dumps(list, indent=2))
f.write(json.dumps(versions, indent=2))

View File

@@ -8,13 +8,13 @@ import datetime
def fetch_releases(package_identifier):
groupId, artifactId = package_identifier.split("/")
group_id, artifact_id = package_identifier.split("/")
releases = {}
start = 0
while True:
url = (
"https://search.maven.org/solrsearch/select?q=g:%s+AND+a:%s&core=gav&rows=100&wt=json&start=%s"
% (groupId, artifactId, start)
% (group_id, artifact_id, start)
)
with urllib.request.urlopen(url, data=None, timeout=5) as response:
data = json.load(response)

View File

@@ -12,11 +12,11 @@ ID_MAPPING = {
}
def update_releases(html_id, file):
list = {}
def update_releases(html_identifier, file):
versions = {}
with urllib.request.urlopen(URL, data=None, timeout=5) as response:
soup = BeautifulSoup(response, features="html5lib")
table = soup.find(id=html_id)
table = soup.find(id=html_identifier)
for tr in table.findAll("tr")[3:]:
td_list = tr.findAll("td")
version = (
@@ -31,14 +31,14 @@ def update_releases(html_id, file):
try:
month, date, year = td_list[1].get_text().split("/")
abs_date = f"{year}-{month:0>2}-{date:0>2}"
except Exception as e:
except Exception:
date = datetime.datetime.strptime(td_list[1].get_text(), "%B %d, %Y")
abs_date = date.strftime("%Y-%m-%d")
list[version] = abs_date
versions[version] = abs_date
with open("releases/%s.json" % file, "w") as f:
f.write(json.dumps(list, indent=2))
f.write(json.dumps(versions, indent=2))
for html_id in ID_MAPPING:

View File

@@ -5,10 +5,7 @@ import sys
import json
import frontmatter
import urllib.request
from bs4 import BeautifulSoup
from datetime import datetime
from html.parser import HTMLParser
from liquid import Template
# Same as used in Ruby (update.rb)
DEFAULT_TAG_TEMPLATE = (

View File

@@ -7,7 +7,7 @@ URL = "https://access.redhat.com/articles/3078"
# https://regex101.com/r/877ibq/1
regex = r"RHEL (?P<major>\d)(\. ?(?P<minor>\d+))?(( Update (?P<minor2>\d))| GA)?"
list = {}
versions = {}
headers = {"user-agent": "mozilla"}
req = urllib.request.Request(URL, headers=headers)
@@ -23,7 +23,7 @@ with urllib.request.urlopen(req, timeout=5) as response:
version += ".%s" % m["minor"]
if m["minor2"]:
version += ".%s" % m["minor2"]
list[version] = td_list[1].get_text()
versions[version] = td_list[1].get_text()
with open("releases/redhat.json", "w") as f:
f.write(json.dumps(list, indent=2))
f.write(json.dumps(versions, indent=2))

View File

@@ -8,7 +8,7 @@ URL = "https://wiki.ros.org/Distributions"
# https://regex101.com/r/c1ribd/1
regex = r"^ROS (?P<name>(\w| )+)"
list = {}
versions = {}
with urllib.request.urlopen(URL, timeout=5) as response:
soup = BeautifulSoup(response, features="html5lib")
@@ -29,8 +29,8 @@ with urllib.request.urlopen(URL, timeout=5) as response:
x = td_list[1].get_text().split(",")
date = datetime.datetime.strptime(x[0][:-2] + x[1], "%B %d %Y")
abs_date = date.strftime("%Y-%m-%d")
list[version] = abs_date
versions[version] = abs_date
print("%s: %s" % (version, abs_date))
with open("releases/ros.json", "w") as f:
f.write(json.dumps(list, indent=2))
f.write(json.dumps(versions, indent=2))

View File

@@ -6,7 +6,7 @@ import urllib.request
URL = "https://www.unrealircd.org/docwiki/index.php?title=History_of_UnrealIRCd_releases&action=raw"
REGEX = r"^(?:(\d+\.(?:\d+\.)*\d+))$"
list = {}
versions = {}
with urllib.request.urlopen(URL) as response:
text = response.read()
wikicode = mwparserfromhell.parse(text)
@@ -17,8 +17,8 @@ with urllib.request.urlopen(URL) as response:
if re.match(REGEX, maybe_version):
maybe_date = items[1].__strip__()
if re.match(r"\d{4}-\d{2}-\d{2}", maybe_date):
list[maybe_version] = maybe_date
versions[maybe_version] = maybe_date
with open("releases/unrealircd.json", "w") as f:
f.write(json.dumps(list, indent=2))
f.write(json.dumps(versions, indent=2))