From b339c81ead0409018687d938aff2b1cc51dc91ad Mon Sep 17 00:00:00 2001 From: Marc Wrobel Date: Sun, 7 Jan 2024 20:22:46 +0100 Subject: [PATCH] Split endoflife.date and releasedata models in separate files (#276) This makes the code easier to read. --- src/amazon-neptune.py | 4 +- src/apache-http-server.py | 4 +- src/apple.py | 4 +- src/artifactory.py | 4 +- src/aws-lambda.py | 6 +-- src/cgit.py | 4 +- src/coldfusion.py | 4 +- src/common/endoflife.py | 96 +------------------------------------ src/common/releasedata.py | 99 +++++++++++++++++++++++++++++++++++++++ src/confluence.py | 4 +- src/cos.py | 4 +- src/couchbase-server.py | 4 +- src/debian.py | 8 ++-- src/distrowatch.py | 4 +- src/docker_hub.py | 6 +-- src/eks.py | 4 +- src/firefox.py | 4 +- src/git.py | 4 +- src/github-releases.py | 4 +- src/gke.py | 4 +- src/graalvm.py | 4 +- src/haproxy.py | 4 +- src/ibm-aix.py | 4 +- src/jira.py | 4 +- src/looker.py | 4 +- src/maven.py | 4 +- src/npm.py | 4 +- src/nutanix.py | 4 +- src/oracle-jdk.py | 4 +- src/palo-alto-networks.py | 4 +- src/php.py | 4 +- src/plesk.py | 4 +- src/pypi.py | 4 +- src/rds.py | 4 +- src/red-hat-openshift.py | 4 +- src/redhat-satellite.py | 4 +- src/rhel.py | 4 +- src/rockylinux.py | 4 +- src/ros.py | 4 +- src/sles.py | 4 +- src/splunk.py | 4 +- src/typo3.py | 4 +- src/unity.py | 4 +- src/unrealircd.py | 4 +- src/visualstudio.py | 4 +- 45 files changed, 190 insertions(+), 185 deletions(-) create mode 100644 src/common/releasedata.py diff --git a/src/amazon-neptune.py b/src/amazon-neptune.py index 92831cd8..0403590b 100644 --- a/src/amazon-neptune.py +++ b/src/amazon-neptune.py @@ -1,14 +1,14 @@ import re import xml.dom.minidom -from common import dates, endoflife, http +from common import dates, http, releasedata """Fetches Amazon Neptune versions from its RSS feed on docs.aws.amazon.com.""" RSS_URL = "https://docs.aws.amazon.com/neptune/latest/userguide/rssupdates.rss" VERSION_PATTERN = re.compile(r"^Engine version (?P[0-9R.]+)$") -product = endoflife.Product("amazon-neptune") +product = releasedata.Product("amazon-neptune") rss_response = http.fetch_url(RSS_URL) rss = xml.dom.minidom.parseString(rss_response.text) diff --git a/src/apache-http-server.py b/src/apache-http-server.py index 21038dd4..42711a74 100644 --- a/src/apache-http-server.py +++ b/src/apache-http-server.py @@ -1,6 +1,6 @@ import re -from common import dates, endoflife +from common import dates, releasedata from common.git import Git """Fetches Apache HTTP Server versions and release date from its git repository @@ -15,7 +15,7 @@ VERSION_AND_DATE_PATTERNS = [ re.compile(r"\s+(?P\d+\.\d+\.\d+)\s*:.*Tagged and [rR]olled\s(?:on\s)?(?P\w+\.?\s\d\d?,\s\d{4})"), ] -product = endoflife.Product("apache-http-server") +product = releasedata.Product("apache-http-server") git = Git("https://github.com/apache/httpd.git") git.setup() diff --git a/src/apple.py b/src/apple.py index a90ecf1a..180d6be9 100644 --- a/src/apple.py +++ b/src/apple.py @@ -2,7 +2,7 @@ import logging import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata """Fetches and parses version and release date information from Apple's support website for macOS, iOS, iPadOS, and watchOS. While all URLs are fetched once for performance reasons, the actual @@ -56,7 +56,7 @@ soups = [BeautifulSoup(response.text, features="html5lib") for response in http. logging.info("::endgroup::") for product_name in VERSION_PATTERNS: - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) for soup in soups: versions_table = soup.find(id="tableWraper") versions_table = versions_table if versions_table else soup.find('table', class_="gb-table") diff --git a/src/artifactory.py b/src/artifactory.py index d37c0b45..be93088c 100644 --- a/src/artifactory.py +++ b/src/artifactory.py @@ -1,11 +1,11 @@ -from common import dates, endoflife +from common import dates, releasedata from requests_html import HTMLSession """Fetches Artifactory versions from https://jfrog.com, using requests_html because JavaScript is needed to render the page.""" -product = endoflife.Product("artifactory") +product = releasedata.Product("artifactory") r = HTMLSession().get("https://jfrog.com/help/r/jfrog-release-information/artifactory-end-of-life") r.html.render(sleep=2, scrolldown=5) diff --git a/src/aws-lambda.py b/src/aws-lambda.py index c458e2f5..541d82be 100644 --- a/src/aws-lambda.py +++ b/src/aws-lambda.py @@ -1,6 +1,6 @@ from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, endoflife, http, releasedata """Fetches AWS lambda runtimes from https://docs.aws.amazon.com. @@ -12,8 +12,8 @@ If one day release dates are available in the AWS documentation, it would be bet them though. Note that this would also be unnecessary if it was possible to disable release/latest release dates updates in the latest.py script.""" -product = endoflife.Product("aws-lambda") -old_product = endoflife.Product.from_file(product.name) +product = releasedata.Product("aws-lambda") +old_product = releasedata.Product.from_file(product.name) product_frontmatter = endoflife.ProductFrontmatter(product.name) response = http.fetch_url("https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html") soup = BeautifulSoup(response.text, features="html5lib") diff --git a/src/cgit.py b/src/cgit.py index 1743701b..dc02cdb0 100644 --- a/src/cgit.py +++ b/src/cgit.py @@ -1,7 +1,7 @@ import sys from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, endoflife, http, releasedata """Fetches versions from repositories managed with cgit, such as the Linux kernel repository. Ideally we would want to use the git repository directly, but cgit-managed repositories don't support partial clone.""" @@ -10,7 +10,7 @@ METHOD = "cgit" p_filter = sys.argv[1] if len(sys.argv) > 1 else None for product_name in endoflife.list_products(METHOD, p_filter): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) product_frontmatter = endoflife.ProductFrontmatter(product.name) for auto_config in product_frontmatter.get_auto_configs(METHOD): response = http.fetch_url(auto_config.url + '/refs/tags') diff --git a/src/coldfusion.py b/src/coldfusion.py index 85b1f314..5213908d 100644 --- a/src/coldfusion.py +++ b/src/coldfusion.py @@ -1,7 +1,7 @@ import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata """Fetches versions from Adobe ColdFusion release notes on helpx.adobe.com. @@ -31,7 +31,7 @@ FIXED_VERSIONS = { "2023.0.0": dates.date(2022, 5, 16), # https://coldfusion.adobe.com/2023/05/coldfusion2023-release/ } -product = endoflife.Product("coldfusion") +product = releasedata.Product("coldfusion") for changelog in http.fetch_urls(URLS): changelog_soup = BeautifulSoup(changelog.text, features="html5lib") diff --git a/src/common/endoflife.py b/src/common/endoflife.py index 5277fa56..f441634c 100644 --- a/src/common/endoflife.py +++ b/src/common/endoflife.py @@ -1,8 +1,7 @@ -import json import logging import os import re -from datetime import datetime, timezone +from datetime import datetime from pathlib import Path import frontmatter @@ -18,7 +17,6 @@ DEFAULT_VERSION_PATTERN = re.compile(DEFAULT_VERSION_REGEX) DEFAULT_VERSION_TEMPLATE = "{{major}}{% if minor %}.{{minor}}{% if patch %}.{{patch}}{% if tiny %}.{{tiny}}{% endif %}{% endif %}{% endif %}" PRODUCTS_PATH = Path(os.environ.get("PRODUCTS_PATH", "website/products")) -VERSIONS_PATH = Path(os.environ.get("VERSIONS_PATH", "releases")) class AutoConfig: @@ -75,98 +73,6 @@ class ProductFrontmatter: return None -class ProductVersion: - def __init__(self, product: "Product", name: str, date: datetime) -> None: - self.product = str(product) - self.name = name - self.date = date - - @staticmethod - def from_json(product: "Product", data: dict) -> "ProductVersion": - name = data["name"] - date = datetime.strptime(data["date"], "%Y-%m-%d").replace(tzinfo=timezone.utc) - return ProductVersion(product, name, date) - - def __dict__(self) -> dict: - return { - "name": self.name, - "date": self.date.strftime("%Y-%m-%d"), - } - - def __repr__(self) -> str: - return f"{self.product}#{self.name} ({self.date})" - - -class Product: - def __init__(self, name: str) -> None: - self.name: str = name - self.versions_path: Path = VERSIONS_PATH / f"{name}.json" - self.versions: dict[str, ProductVersion] = {} - logging.info(f"::group::{self}") - - @staticmethod - def from_file(name: str) -> "Product": - product = Product(name) - - if product.versions_path.is_file(): - with product.versions_path.open() as f: - for json_version in json.load(f)["versions"].values(): - version = ProductVersion.from_json(product, json_version) - product.versions[version.name] = version - logging.info(f"loaded versions data for {product} from {product.versions_path}") - else: - logging.warning(f"no versions data found for {product} at {product.versions_path}") - - return product - - def has_version(self, version: str) -> bool: - return version in self.versions - - def get_version_date(self, version: str) -> datetime: - return self.versions[version].date if version in self.versions else None - - def declare_version(self, version: str, date: datetime) -> None: - if version in self.versions: - if self.versions[version].date != date: - logging.warning(f"overwriting {version} ({self.get_version_date(version)} -> {date}) for {self}") - else: - return # already declared - - logging.info(f"adding version {version} ({date}) to {self}") - self.versions[version] = ProductVersion(self, version, date) - - def declare_versions(self, dates_by_version: dict[str, datetime]) -> None: - for (version, date) in dates_by_version.items(): - self.declare_version(version, date) - - def replace_version(self, version: str, date: datetime) -> None: - if version not in self.versions: - msg = f"version {version} cannot be replaced as it does not exist for {self}" - raise ValueError(msg) - - logging.info(f"replacing version {version} ({self.get_version_date(version)} -> {date}) in {self}") - self.versions[version].date = date - - def remove_version(self, version: str) -> None: - if not self.has_version(version): - logging.warning(f"version {version} cannot be removed as it does not exist for {self}") - return - - logging.info(f"removing version {version} ({self.versions.pop(version)}) from {self}") - - def write(self) -> None: - # sort by date then version (desc) - ordered_versions = sorted(self.versions.values(), key=lambda v: (v.date, v.name), reverse=True) - with self.versions_path.open("w") as f: - f.write(json.dumps({ - "versions": {version.name: version.__dict__() for version in ordered_versions}, - }, indent=2)) - logging.info("::endgroup::") - - def __repr__(self) -> str: - return self.name - - def list_products(method: str, products_filter: str = None) -> list[str]: """Return a list of products that are using the same given update method.""" products = [] diff --git a/src/common/releasedata.py b/src/common/releasedata.py new file mode 100644 index 00000000..8f53bd1f --- /dev/null +++ b/src/common/releasedata.py @@ -0,0 +1,99 @@ +import json +import logging +import os +from datetime import datetime, timezone +from pathlib import Path + +VERSIONS_PATH = Path(os.environ.get("VERSIONS_PATH", "releases")) + + +class ProductVersion: + def __init__(self, product: "Product", name: str, date: datetime) -> None: + self.product = str(product) + self.name = name + self.date = date + + @staticmethod + def from_json(product: "Product", data: dict) -> "ProductVersion": + name = data["name"] + date = datetime.strptime(data["date"], "%Y-%m-%d").replace(tzinfo=timezone.utc) + return ProductVersion(product, name, date) + + def __dict__(self) -> dict: + return { + "name": self.name, + "date": self.date.strftime("%Y-%m-%d"), + } + + def __repr__(self) -> str: + return f"{self.product}#{self.name} ({self.date})" + + +class Product: + def __init__(self, name: str) -> None: + self.name: str = name + self.versions_path: Path = VERSIONS_PATH / f"{name}.json" + self.versions: dict[str, ProductVersion] = {} + logging.info(f"::group::{self}") + + @staticmethod + def from_file(name: str) -> "Product": + product = Product(name) + + if product.versions_path.is_file(): + with product.versions_path.open() as f: + for json_version in json.load(f)["versions"].values(): + version = ProductVersion.from_json(product, json_version) + product.versions[version.name] = version + logging.info(f"loaded versions data for {product} from {product.versions_path}") + else: + logging.warning(f"no versions data found for {product} at {product.versions_path}") + + return product + + def has_version(self, version: str) -> bool: + return version in self.versions + + def get_version_date(self, version: str) -> datetime: + return self.versions[version].date if version in self.versions else None + + def declare_version(self, version: str, date: datetime) -> None: + if version in self.versions: + if self.versions[version].date != date: + logging.warning(f"overwriting {version} ({self.get_version_date(version)} -> {date}) for {self}") + else: + return # already declared + + logging.info(f"adding version {version} ({date}) to {self}") + self.versions[version] = ProductVersion(self, version, date) + + def declare_versions(self, dates_by_version: dict[str, datetime]) -> None: + for (version, date) in dates_by_version.items(): + self.declare_version(version, date) + + def replace_version(self, version: str, date: datetime) -> None: + if version not in self.versions: + msg = f"version {version} cannot be replaced as it does not exist for {self}" + raise ValueError(msg) + + logging.info(f"replacing version {version} ({self.get_version_date(version)} -> {date}) in {self}") + self.versions[version].date = date + + def remove_version(self, version: str) -> None: + if not self.has_version(version): + logging.warning(f"version {version} cannot be removed as it does not exist for {self}") + return + + logging.info(f"removing version {version} ({self.versions.pop(version)}) from {self}") + + def write(self) -> None: + # sort by date then version (desc) + ordered_versions = sorted(self.versions.values(), key=lambda v: (v.date, v.name), reverse=True) + with self.versions_path.open("w") as f: + f.write(json.dumps({ + "versions": {version.name: version.__dict__() for version in ordered_versions}, + }, indent=2)) + logging.info("::endgroup::") + + def __repr__(self) -> str: + return self.name diff --git a/src/confluence.py b/src/confluence.py index 490e623c..34e413a3 100644 --- a/src/confluence.py +++ b/src/confluence.py @@ -1,11 +1,11 @@ -from common import dates, endoflife +from common import dates, releasedata from requests_html import HTMLSession """Fetches Confluence versions from www.atlassian.com. Note that requests_html is used because JavaScript is needed to render the page.""" -product = endoflife.Product("confluence") +product = releasedata.Product("confluence") r = HTMLSession().get("https://www.atlassian.com/software/confluence/download-archives") r.html.render(sleep=1, scrolldown=3) diff --git a/src/cos.py b/src/cos.py index a95e286c..47c2fcf8 100644 --- a/src/cos.py +++ b/src/cos.py @@ -2,7 +2,7 @@ import datetime import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata MILESTONE_PATTERN = re.compile(r'COS \d+ LTS') VERSION_PATTERN = re.compile(r"^(cos-\d+-\d+-\d+-\d+)") @@ -14,7 +14,7 @@ def parse_date(date_text: str) -> datetime: return dates.parse_date(date_text) -product = endoflife.Product("cos") +product = releasedata.Product("cos") main = http.fetch_url("https://cloud.google.com/container-optimized-os/docs/release-notes/") main_soup = BeautifulSoup(main.text, features="html5lib") milestones = [cell.text.split(' ')[1] for cell in main_soup.find_all('td', string=MILESTONE_PATTERN)] diff --git a/src/couchbase-server.py b/src/couchbase-server.py index b6271531..36b0db12 100644 --- a/src/couchbase-server.py +++ b/src/couchbase-server.py @@ -1,7 +1,7 @@ import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata """Fetches versions from release notes of each minor version on docs.couchbase.com. @@ -18,7 +18,7 @@ MANUAL_VERSIONS = { "7.2.0": dates.date(2023, 6, 1), # https://www.couchbase.com/blog/couchbase-capella-spring-release-72/ } -product = endoflife.Product("couchbase-server") +product = releasedata.Product("couchbase-server") main = http.fetch_url(f"{URLS}/current/install/install-intro.html") main_soup = BeautifulSoup(main.text, features="html5lib") diff --git a/src/debian.py b/src/debian.py index cdcd0faf..e2fffcb3 100644 --- a/src/debian.py +++ b/src/debian.py @@ -1,13 +1,13 @@ from pathlib import Path from subprocess import run -from common import dates, endoflife +from common import dates, releasedata from common.git import Git """Fetch Debian versions by parsing news in www.debian.org source repository.""" -def extract_major_versions(p: endoflife.Product, repo_dir: Path) -> None: +def extract_major_versions(p: releasedata.Product, repo_dir: Path) -> None: child = run( f"grep -RhE -A 1 'Debian [0-9]+.+ released' {repo_dir}/english/News " f"| cut -d '<' -f 2 " @@ -26,7 +26,7 @@ def extract_major_versions(p: endoflife.Product, repo_dir: Path) -> None: is_release_line = True -def extract_point_versions(p: endoflife.Product, repo_dir: Path) -> None: +def extract_point_versions(p: releasedata.Product, repo_dir: Path) -> None: child = run( f"grep -Rh -B 10 '' {repo_dir}/english/News " "| grep -Eo '(release_date>(.*)<|revision>(.*)<)' " @@ -41,7 +41,7 @@ def extract_point_versions(p: endoflife.Product, repo_dir: Path) -> None: p.declare_version(version, dates.parse_date(date)) -product = endoflife.Product("debian") +product = releasedata.Product("debian") git = Git("https://salsa.debian.org/webmaster-team/webwml.git") git.setup() git.checkout("master", file_list=["english/News"]) diff --git a/src/distrowatch.py b/src/distrowatch.py index 2b3d40bb..e263d1b7 100644 --- a/src/distrowatch.py +++ b/src/distrowatch.py @@ -1,13 +1,13 @@ import sys from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, endoflife, http, releasedata METHOD = 'distrowatch' p_filter = sys.argv[1] if len(sys.argv) > 1 else None for product_name in endoflife.list_products(METHOD, p_filter): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) product_frontmatter = endoflife.ProductFrontmatter(product.name) for config in product_frontmatter.get_auto_configs(METHOD): response = http.fetch_url(f"https://distrowatch.com/index.php?distribution={config.url}") diff --git a/src/docker_hub.py b/src/docker_hub.py index 35500344..833041a4 100644 --- a/src/docker_hub.py +++ b/src/docker_hub.py @@ -1,6 +1,6 @@ import sys -from common import dates, endoflife, http +from common import dates, endoflife, http, releasedata """Fetches releases from the Docker Hub API. @@ -9,7 +9,7 @@ Unfortunately images creation date cannot be retrieved, so we had to use the tag METHOD = "docker_hub" -def fetch_releases(p: endoflife.Product, c: endoflife.AutoConfig, url: str) -> None: +def fetch_releases(p: releasedata.Product, c: endoflife.AutoConfig, url: str) -> None: data = http.fetch_url(url).json() for result in data["results"]: @@ -24,7 +24,7 @@ def fetch_releases(p: endoflife.Product, c: endoflife.AutoConfig, url: str) -> N p_filter = sys.argv[1] if len(sys.argv) > 1 else None for product_name in endoflife.list_products(METHOD, p_filter): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) product_frontmatter = endoflife.ProductFrontmatter(product.name) for config in product_frontmatter.get_auto_configs(METHOD): fetch_releases(product, config, f"https://hub.docker.com/v2/repositories/{config.url}/tags?page_size=100&page=1") diff --git a/src/eks.py b/src/eks.py index 2739392c..eef20b8b 100644 --- a/src/eks.py +++ b/src/eks.py @@ -1,5 +1,5 @@ from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, endoflife, http, releasedata """Fetches EKS versions from AWS docs. Now that AWS no longer publishes docs on GitHub, we use the Web Archive to still get the older versions.""" @@ -13,7 +13,7 @@ URLS = [ "https://docs.aws.amazon.com/eks/latest/userguide/platform-versions.html", ] -product = endoflife.Product("eks") +product = releasedata.Product("eks") for version_list in http.fetch_urls(URLS): version_list_soup = BeautifulSoup(version_list.text, features="html5lib") for tr in version_list_soup.select("#main-col-body")[0].findAll("tr"): diff --git a/src/firefox.py b/src/firefox.py index 580b8f12..38d4df51 100644 --- a/src/firefox.py +++ b/src/firefox.py @@ -2,13 +2,13 @@ import re import urllib.parse from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata """Fetch Firefox versions with their dates from https://www.mozilla.org/. Versions lower than 10.0 are ignored because too difficult to parse.""" -product = endoflife.Product("firefox") +product = releasedata.Product("firefox") releases_page = http.fetch_url("https://www.mozilla.org/en-US/firefox/releases/") releases_soup = BeautifulSoup(releases_page.text, features="html5lib") releases_list = releases_soup.find_all("ol", class_="c-release-list") diff --git a/src/git.py b/src/git.py index 286b87d9..2168c65d 100644 --- a/src/git.py +++ b/src/git.py @@ -1,6 +1,6 @@ import sys -from common import dates, endoflife +from common import dates, endoflife, releasedata from common.git import Git """Fetches versions from tags in a git repository. This replace the old update.rb script.""" @@ -9,7 +9,7 @@ METHOD = 'git' p_filter = sys.argv[1] if len(sys.argv) > 1 else None for product_name in endoflife.list_products(METHOD, p_filter): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) product_frontmatter = endoflife.ProductFrontmatter(product.name) for config in product_frontmatter.get_auto_configs(METHOD): git = Git(config.url) diff --git a/src/github-releases.py b/src/github-releases.py index 065194d7..e628fa24 100644 --- a/src/github-releases.py +++ b/src/github-releases.py @@ -3,7 +3,7 @@ import logging import subprocess import sys -from common import dates, endoflife +from common import dates, endoflife, releasedata """Fetches versions from GitHub releases using the GraphQL API and the GitHub CLI. @@ -44,7 +44,7 @@ query($endCursor: String) { p_filter = sys.argv[1] if len(sys.argv) > 1 else None for product_name in endoflife.list_products(METHOD, p_filter): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) product_frontmatter = endoflife.ProductFrontmatter(product.name) for config in product_frontmatter.get_auto_configs(METHOD): for page in fetch_releases(config.url): diff --git a/src/gke.py b/src/gke.py index 8507fc82..f008a697 100644 --- a/src/gke.py +++ b/src/gke.py @@ -1,7 +1,7 @@ import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata # https://regex101.com/r/zPxBqT/1 VERSION_PATTERN = re.compile(r"\d.\d+\.\d+-gke\.\d+") @@ -13,7 +13,7 @@ URL_BY_PRODUCT = { } for product_name, url in URL_BY_PRODUCT.items(): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) relnotes = http.fetch_url(url) relnotes_soup = BeautifulSoup(relnotes.text, features="html5lib") diff --git a/src/graalvm.py b/src/graalvm.py index 4fb8b8c1..6b650091 100644 --- a/src/graalvm.py +++ b/src/graalvm.py @@ -1,7 +1,7 @@ from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata -product = endoflife.Product("graalvm") +product = releasedata.Product("graalvm") release_calendar = http.fetch_url("https://www.graalvm.org/release-calendar/") release_calendar_soup = BeautifulSoup(release_calendar.text, features="html5lib") diff --git a/src/haproxy.py b/src/haproxy.py index 4e06c002..f584c250 100644 --- a/src/haproxy.py +++ b/src/haproxy.py @@ -1,12 +1,12 @@ import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata CYCLE_PATTERN = re.compile(r"^(\d+\.\d+)/$") DATE_AND_VERSION_PATTERN = re.compile(r"^(\d{4})/(\d{2})/(\d{2})\s+:\s+(\d+\.\d+\.\d.?)$") # https://regex101.com/r/1JCnFC/1 -product = endoflife.Product("haproxy") +product = releasedata.Product("haproxy") # First, get all minor releases from the download page download = http.fetch_url('https://www.haproxy.org/download/') download_soup = BeautifulSoup(download.text, features="html5lib") diff --git a/src/ibm-aix.py b/src/ibm-aix.py index d4c3b3c8..b4e9bc51 100644 --- a/src/ibm-aix.py +++ b/src/ibm-aix.py @@ -1,12 +1,12 @@ from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata URLS = [ "https://web.archive.org/web/20210123024247/https://www.ibm.com/support/pages/aix-support-lifecycle-information", "https://www.ibm.com/support/pages/aix-support-lifecycle-information", ] -product = endoflife.Product("ibm-aix") +product = releasedata.Product("ibm-aix") for page in http.fetch_urls(URLS): page_soup = BeautifulSoup(page.text, features="html5lib") diff --git a/src/jira.py b/src/jira.py index d6400914..33476b07 100644 --- a/src/jira.py +++ b/src/jira.py @@ -1,11 +1,11 @@ -from common import dates, endoflife +from common import dates, releasedata from requests_html import HTMLSession """Fetches Jira versions from www.atlassian.com. Note that requests_html is used because JavaScript is needed to render the page.""" -product = endoflife.Product("jira") +product = releasedata.Product("jira") r = HTMLSession().get("https://www.atlassian.com/software/jira/update") r.html.render(sleep=1, scrolldown=3) diff --git a/src/looker.py b/src/looker.py index dabe1e76..b42ed499 100644 --- a/src/looker.py +++ b/src/looker.py @@ -2,7 +2,7 @@ import re import xml.dom.minidom from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata """Fetch Looker versions from the Google Cloud release notes RSS feed. """ @@ -10,7 +10,7 @@ from common import dates, endoflife, http ANNOUNCEMENT_PATTERN = re.compile(r"includes\s+the\s+following\s+changes", re.IGNORECASE) VERSION_PATTERN = re.compile(r"Looker\s+(?P\d+\.\d+)", re.IGNORECASE) -product = endoflife.Product("looker") +product = releasedata.Product("looker") response = http.fetch_url("https://cloud.google.com/feeds/looker-release-notes.xml") rss = xml.dom.minidom.parseString(response.text) diff --git a/src/maven.py b/src/maven.py index 29a72630..382c7015 100644 --- a/src/maven.py +++ b/src/maven.py @@ -1,13 +1,13 @@ import sys from datetime import datetime, timezone -from common import endoflife, http +from common import endoflife, http, releasedata METHOD = "maven" p_filter = sys.argv[1] if len(sys.argv) > 1 else None for product_name in endoflife.list_products(METHOD, p_filter): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) product_frontmatter = endoflife.ProductFrontmatter(product.name) for config in product_frontmatter.get_auto_configs(METHOD): start = 0 diff --git a/src/npm.py b/src/npm.py index 97a9ccfb..6e0513e3 100644 --- a/src/npm.py +++ b/src/npm.py @@ -1,12 +1,12 @@ import sys -from common import dates, endoflife, http +from common import dates, endoflife, http, releasedata METHOD = "npm" p_filter = sys.argv[1] if len(sys.argv) > 1 else None for product_name in endoflife.list_products(METHOD, p_filter): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) product_frontmatter = endoflife.ProductFrontmatter(product.name) for config in product_frontmatter.get_auto_configs(METHOD): data = http.fetch_url(f"https://registry.npmjs.org/{config.url}").json() diff --git a/src/nutanix.py b/src/nutanix.py index 2df541dd..219d97db 100644 --- a/src/nutanix.py +++ b/src/nutanix.py @@ -1,4 +1,4 @@ -from common import dates, endoflife, http +from common import dates, http, releasedata """Fetch Nutanix products versions from https://portal.nutanix.com/api/v1.""" @@ -9,7 +9,7 @@ PRODUCTS = { } for product_name, url in PRODUCTS.items(): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) data = http.fetch_url(url).json() for version_data in data["contents"]: diff --git a/src/oracle-jdk.py b/src/oracle-jdk.py index 2823d83f..4f7619ba 100644 --- a/src/oracle-jdk.py +++ b/src/oracle-jdk.py @@ -1,11 +1,11 @@ -from common import dates, endoflife +from common import dates, releasedata from requests_html import HTMLSession """Fetch Java versions from https://www.java.com/releases/. This script is using requests-html because the page needs JavaScript to render correctly.""" -product = endoflife.Product("oracle-jdk") +product = releasedata.Product("oracle-jdk") r = HTMLSession().get('https://www.java.com/releases/') r.html.render(sleep=1, scrolldown=3) diff --git a/src/palo-alto-networks.py b/src/palo-alto-networks.py index ccb358d1..935138aa 100644 --- a/src/palo-alto-networks.py +++ b/src/palo-alto-networks.py @@ -2,7 +2,7 @@ import logging import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata IDENTIFIERS_BY_PRODUCT = { "pan-os": "pan-os-panorama", @@ -17,7 +17,7 @@ soup = BeautifulSoup(response.text, features="html5lib") logging.info("::endgroup::") for product_name, identifier in IDENTIFIERS_BY_PRODUCT.items(): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) table = soup.find(id=identifier) for tr in table.findAll("tr")[3:]: td_list = tr.findAll("td") diff --git a/src/php.py b/src/php.py index 916e8405..f3629c42 100644 --- a/src/php.py +++ b/src/php.py @@ -1,8 +1,8 @@ -from common import dates, endoflife, http +from common import dates, endoflife, http, releasedata MAIN_URL = "https://www.php.net/releases/index.php?json&max=-1" -product = endoflife.Product("php") +product = releasedata.Product("php") # Fetch major versions latest_by_major = http.fetch_url(MAIN_URL).json() diff --git a/src/plesk.py b/src/plesk.py index 7856c174..4814bdd2 100644 --- a/src/plesk.py +++ b/src/plesk.py @@ -1,12 +1,12 @@ from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata """Fetches versions from Plesk's change log. Only 18.0.20.3 and later will be picked up, as the format of the change log for 18.0.20 and 18.0.19 are different and there is no entry for GA of version 18.0.18 and older.""" -product = endoflife.Product("plesk") +product = releasedata.Product("plesk") response = http.fetch_url("https://docs.plesk.com/release-notes/obsidian/change-log") soup = BeautifulSoup(response.text, features="html5lib") diff --git a/src/pypi.py b/src/pypi.py index 2976f777..5b63e9f2 100644 --- a/src/pypi.py +++ b/src/pypi.py @@ -1,12 +1,12 @@ import sys -from common import dates, endoflife, http +from common import dates, endoflife, http, releasedata METHOD = "pypi" p_filter = sys.argv[1] if len(sys.argv) > 1 else None for product_name in endoflife.list_products(METHOD, p_filter): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) product_frontmatter = endoflife.ProductFrontmatter(product.name) for config in product_frontmatter.get_auto_configs(METHOD): data = http.fetch_url(f"https://pypi.org/pypi/{config.url}/json").json() diff --git a/src/rds.py b/src/rds.py index 49bb3e4c..386355e9 100644 --- a/src/rds.py +++ b/src/rds.py @@ -1,7 +1,7 @@ import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata """Fetches Amazon RDS versions from the version management pages on AWS docs. @@ -16,7 +16,7 @@ PRODUCTS = { VERSION_REGEX = re.compile(r"(?P\d+(?:\.\d+)*)", flags=re.IGNORECASE) # https://regex101.com/r/BY1vwV/1 for product_name, url in PRODUCTS.items(): - product = endoflife.Product(product_name) + product = releasedata.Product(product_name) response = http.fetch_url(url) soup = BeautifulSoup(response.text, features="html5lib") diff --git a/src/red-hat-openshift.py b/src/red-hat-openshift.py index 53151386..57905ba0 100644 --- a/src/red-hat-openshift.py +++ b/src/red-hat-openshift.py @@ -1,13 +1,13 @@ import re -from common import dates, endoflife +from common import dates, releasedata from common.git import Git """Fetches Red Hat OpenShift versions from the documentation's git repository""" VERSION_AND_DATE_PATTERN = re.compile(r"{product-title}\s(?P\d+\.\d+\.\d+).*\n+Issued:\s(?P\d{4}-\d\d-\d\d)$", re.MULTILINE) -product = endoflife.Product("red-hat-openshift") +product = releasedata.Product("red-hat-openshift") git = Git("https://github.com/openshift/openshift-docs.git") git.setup() diff --git a/src/redhat-satellite.py b/src/redhat-satellite.py index a028a4e5..f86a242a 100644 --- a/src/redhat-satellite.py +++ b/src/redhat-satellite.py @@ -1,7 +1,7 @@ import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata """Fetches Satellite versions from access.redhat.com. @@ -10,7 +10,7 @@ A few of the older versions, such as 'Satellite 6.1 GA Release (Build 6.1.1)', w # https://regex101.com/r/m8aWXG/1 VERSION_PATTERN = re.compile(r"^Satellite (?P\d+\.\d+\.\d+([.-]\d+)?) ([Uu]pdate|[Rr]elease)$") -product = endoflife.Product("redhat-satellite") +product = releasedata.Product("redhat-satellite") response = http.fetch_url("https://access.redhat.com/articles/1365633") soup = BeautifulSoup(response.text, features="html5lib") diff --git a/src/rhel.py b/src/rhel.py index 14f0523c..99db4374 100644 --- a/src/rhel.py +++ b/src/rhel.py @@ -1,12 +1,12 @@ import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata # https://regex101.com/r/877ibq/1 VERSION_PATTERN = re.compile(r"RHEL (?P\d)(\. ?(?P\d+))?(( Update (?P\d))| GA)?") -product = endoflife.Product("redhat") +product = releasedata.Product("redhat") response = http.fetch_url("https://access.redhat.com/articles/3078") soup = BeautifulSoup(response.text, features="html5lib") diff --git a/src/rockylinux.py b/src/rockylinux.py index b2066f94..5c310e57 100644 --- a/src/rockylinux.py +++ b/src/rockylinux.py @@ -1,6 +1,6 @@ -from common import dates, endoflife, http +from common import dates, endoflife, http, releasedata -product = endoflife.Product("rockylinux") +product = releasedata.Product("rockylinux") response = http.fetch_url("https://raw.githubusercontent.com/rocky-linux/wiki.rockylinux.org/development/docs/include/releng/version_table.md") for line in response.text.strip().split('\n'): diff --git a/src/ros.py b/src/ros.py index df94a7c1..e25d8dfc 100644 --- a/src/ros.py +++ b/src/ros.py @@ -1,12 +1,12 @@ import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata # https://regex101.com/r/c1ribd/1 VERSION_PATTERN = re.compile(r"^ROS (?P(\w| )+)") -product = endoflife.Product("ros") +product = releasedata.Product("ros") response = http.fetch_url("https://wiki.ros.org/Distributions") soup = BeautifulSoup(response.text, features="html5lib") diff --git a/src/sles.py b/src/sles.py index c0e6f2e8..3260bcf1 100644 --- a/src/sles.py +++ b/src/sles.py @@ -1,9 +1,9 @@ import logging from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata -product = endoflife.Product("sles") +product = releasedata.Product("sles") response = http.fetch_url("https://www.suse.com/lifecycle") soup = BeautifulSoup(response.text, features="html5lib") diff --git a/src/splunk.py b/src/splunk.py index ffe89708..f4bf6a1d 100644 --- a/src/splunk.py +++ b/src/splunk.py @@ -1,7 +1,7 @@ import re from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata VERSION_DATE_PATTERN = re.compile(r"Splunk Enterprise (?P\d+\.\d+(?:\.\d+)*) was (?:first )?released on (?P\w+\s\d\d?,\s\d{4})\.", re.MULTILINE) @@ -30,7 +30,7 @@ def get_latest_minor_versions(versions: list[str]) -> list[str]: return latest_versions -product = endoflife.Product("splunk") +product = releasedata.Product("splunk") main = http.fetch_url("https://docs.splunk.com/Documentation/Splunk") soup = BeautifulSoup(main.text, features="html5lib") diff --git a/src/typo3.py b/src/typo3.py index 42dce788..2f3a1c79 100644 --- a/src/typo3.py +++ b/src/typo3.py @@ -1,6 +1,6 @@ -from common import dates, endoflife, http +from common import dates, http, releasedata -product = endoflife.Product("typo3") +product = releasedata.Product("typo3") data = http.fetch_url("https://get.typo3.org/api/v1/release/").json() for v in data: if v['type'] == 'development': diff --git a/src/unity.py b/src/unity.py index 20627703..8566e0ea 100644 --- a/src/unity.py +++ b/src/unity.py @@ -1,5 +1,5 @@ from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, http, releasedata # Fetches the Unity LTS releases from the Unity website. Non-LTS releases are not listed there, # so this automation is only partial. @@ -9,7 +9,7 @@ from common import dates, endoflife, http BASE_URL = "https://unity.com/releases/editor/qa/lts-releases" -product = endoflife.Product("unity") +product = releasedata.Product("unity") next_page_url = BASE_URL # Do not try to fetch multiple pages in parallel: it is raising a lot of errors and make the overall process slower. diff --git a/src/unrealircd.py b/src/unrealircd.py index 9c5cff03..ef58f394 100644 --- a/src/unrealircd.py +++ b/src/unrealircd.py @@ -1,11 +1,11 @@ import re import mwparserfromhell -from common import dates, endoflife, http +from common import dates, endoflife, http, releasedata DATE_PATTERN = re.compile(r"\d{4}-\d{2}-\d{2}") -product = endoflife.Product("unrealircd") +product = releasedata.Product("unrealircd") response = http.fetch_url("https://www.unrealircd.org/docwiki/index.php?title=History_of_UnrealIRCd_releases&action=raw") wikicode = mwparserfromhell.parse(response.text) diff --git a/src/visualstudio.py b/src/visualstudio.py index e4e8954e..e936e5ea 100644 --- a/src/visualstudio.py +++ b/src/visualstudio.py @@ -1,5 +1,5 @@ from bs4 import BeautifulSoup -from common import dates, endoflife, http +from common import dates, endoflife, http, releasedata # There is no build history for versions 2015 and below. # This is not a big deal because there was no version for those release in a very long time. @@ -9,7 +9,7 @@ URLS = [ "https://learn.microsoft.com/en-us/visualstudio/releases/2022/release-history", ] -product = endoflife.Product("visualstudio") +product = releasedata.Product("visualstudio") for response in http.fetch_urls(URLS): soup = BeautifulSoup(response.text, features="html5lib")