Split endoflife.date and releasedata models in separate files (#276)

This makes the code easier to read.
This commit is contained in:
Marc Wrobel
2024-01-07 20:22:46 +01:00
committed by GitHub
parent d01d8ed04b
commit b339c81ead
45 changed files with 190 additions and 185 deletions

View File

@@ -1,14 +1,14 @@
import re import re
import xml.dom.minidom import xml.dom.minidom
from common import dates, endoflife, http from common import dates, http, releasedata
"""Fetches Amazon Neptune versions from its RSS feed on docs.aws.amazon.com.""" """Fetches Amazon Neptune versions from its RSS feed on docs.aws.amazon.com."""
RSS_URL = "https://docs.aws.amazon.com/neptune/latest/userguide/rssupdates.rss" RSS_URL = "https://docs.aws.amazon.com/neptune/latest/userguide/rssupdates.rss"
VERSION_PATTERN = re.compile(r"^Engine version (?P<version>[0-9R.]+)$") VERSION_PATTERN = re.compile(r"^Engine version (?P<version>[0-9R.]+)$")
product = endoflife.Product("amazon-neptune") product = releasedata.Product("amazon-neptune")
rss_response = http.fetch_url(RSS_URL) rss_response = http.fetch_url(RSS_URL)
rss = xml.dom.minidom.parseString(rss_response.text) rss = xml.dom.minidom.parseString(rss_response.text)

View File

@@ -1,6 +1,6 @@
import re import re
from common import dates, endoflife from common import dates, releasedata
from common.git import Git from common.git import Git
"""Fetches Apache HTTP Server versions and release date from its git repository """Fetches Apache HTTP Server versions and release date from its git repository
@@ -15,7 +15,7 @@ VERSION_AND_DATE_PATTERNS = [
re.compile(r"\s+(?P<version>\d+\.\d+\.\d+)\s*:.*Tagged and [rR]olled\s(?:on\s)?(?P<date>\w+\.?\s\d\d?,\s\d{4})"), re.compile(r"\s+(?P<version>\d+\.\d+\.\d+)\s*:.*Tagged and [rR]olled\s(?:on\s)?(?P<date>\w+\.?\s\d\d?,\s\d{4})"),
] ]
product = endoflife.Product("apache-http-server") product = releasedata.Product("apache-http-server")
git = Git("https://github.com/apache/httpd.git") git = Git("https://github.com/apache/httpd.git")
git.setup() git.setup()

View File

@@ -2,7 +2,7 @@ import logging
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
"""Fetches and parses version and release date information from Apple's support website for macOS, """Fetches and parses version and release date information from Apple's support website for macOS,
iOS, iPadOS, and watchOS. While all URLs are fetched once for performance reasons, the actual iOS, iPadOS, and watchOS. While all URLs are fetched once for performance reasons, the actual
@@ -56,7 +56,7 @@ soups = [BeautifulSoup(response.text, features="html5lib") for response in http.
logging.info("::endgroup::") logging.info("::endgroup::")
for product_name in VERSION_PATTERNS: for product_name in VERSION_PATTERNS:
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
for soup in soups: for soup in soups:
versions_table = soup.find(id="tableWraper") versions_table = soup.find(id="tableWraper")
versions_table = versions_table if versions_table else soup.find('table', class_="gb-table") versions_table = versions_table if versions_table else soup.find('table', class_="gb-table")

View File

@@ -1,11 +1,11 @@
from common import dates, endoflife from common import dates, releasedata
from requests_html import HTMLSession from requests_html import HTMLSession
"""Fetches Artifactory versions from https://jfrog.com, using requests_html because JavaScript is """Fetches Artifactory versions from https://jfrog.com, using requests_html because JavaScript is
needed to render the page.""" needed to render the page."""
product = endoflife.Product("artifactory") product = releasedata.Product("artifactory")
r = HTMLSession().get("https://jfrog.com/help/r/jfrog-release-information/artifactory-end-of-life") r = HTMLSession().get("https://jfrog.com/help/r/jfrog-release-information/artifactory-end-of-life")
r.html.render(sleep=2, scrolldown=5) r.html.render(sleep=2, scrolldown=5)

View File

@@ -1,6 +1,6 @@
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, endoflife, http, releasedata
"""Fetches AWS lambda runtimes from https://docs.aws.amazon.com. """Fetches AWS lambda runtimes from https://docs.aws.amazon.com.
@@ -12,8 +12,8 @@ If one day release dates are available in the AWS documentation, it would be bet
them though. Note that this would also be unnecessary if it was possible to disable release/latest them though. Note that this would also be unnecessary if it was possible to disable release/latest
release dates updates in the latest.py script.""" release dates updates in the latest.py script."""
product = endoflife.Product("aws-lambda") product = releasedata.Product("aws-lambda")
old_product = endoflife.Product.from_file(product.name) old_product = releasedata.Product.from_file(product.name)
product_frontmatter = endoflife.ProductFrontmatter(product.name) product_frontmatter = endoflife.ProductFrontmatter(product.name)
response = http.fetch_url("https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html") response = http.fetch_url("https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html")
soup = BeautifulSoup(response.text, features="html5lib") soup = BeautifulSoup(response.text, features="html5lib")

View File

@@ -1,7 +1,7 @@
import sys import sys
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, endoflife, http, releasedata
"""Fetches versions from repositories managed with cgit, such as the Linux kernel repository. """Fetches versions from repositories managed with cgit, such as the Linux kernel repository.
Ideally we would want to use the git repository directly, but cgit-managed repositories don't support partial clone.""" Ideally we would want to use the git repository directly, but cgit-managed repositories don't support partial clone."""
@@ -10,7 +10,7 @@ METHOD = "cgit"
p_filter = sys.argv[1] if len(sys.argv) > 1 else None p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter): for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
product_frontmatter = endoflife.ProductFrontmatter(product.name) product_frontmatter = endoflife.ProductFrontmatter(product.name)
for auto_config in product_frontmatter.get_auto_configs(METHOD): for auto_config in product_frontmatter.get_auto_configs(METHOD):
response = http.fetch_url(auto_config.url + '/refs/tags') response = http.fetch_url(auto_config.url + '/refs/tags')

View File

@@ -1,7 +1,7 @@
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
"""Fetches versions from Adobe ColdFusion release notes on helpx.adobe.com. """Fetches versions from Adobe ColdFusion release notes on helpx.adobe.com.
@@ -31,7 +31,7 @@ FIXED_VERSIONS = {
"2023.0.0": dates.date(2022, 5, 16), # https://coldfusion.adobe.com/2023/05/coldfusion2023-release/ "2023.0.0": dates.date(2022, 5, 16), # https://coldfusion.adobe.com/2023/05/coldfusion2023-release/
} }
product = endoflife.Product("coldfusion") product = releasedata.Product("coldfusion")
for changelog in http.fetch_urls(URLS): for changelog in http.fetch_urls(URLS):
changelog_soup = BeautifulSoup(changelog.text, features="html5lib") changelog_soup = BeautifulSoup(changelog.text, features="html5lib")

View File

@@ -1,8 +1,7 @@
import json
import logging import logging
import os import os
import re import re
from datetime import datetime, timezone from datetime import datetime
from pathlib import Path from pathlib import Path
import frontmatter import frontmatter
@@ -18,7 +17,6 @@ DEFAULT_VERSION_PATTERN = re.compile(DEFAULT_VERSION_REGEX)
DEFAULT_VERSION_TEMPLATE = "{{major}}{% if minor %}.{{minor}}{% if patch %}.{{patch}}{% if tiny %}.{{tiny}}{% endif %}{% endif %}{% endif %}" DEFAULT_VERSION_TEMPLATE = "{{major}}{% if minor %}.{{minor}}{% if patch %}.{{patch}}{% if tiny %}.{{tiny}}{% endif %}{% endif %}{% endif %}"
PRODUCTS_PATH = Path(os.environ.get("PRODUCTS_PATH", "website/products")) PRODUCTS_PATH = Path(os.environ.get("PRODUCTS_PATH", "website/products"))
VERSIONS_PATH = Path(os.environ.get("VERSIONS_PATH", "releases"))
class AutoConfig: class AutoConfig:
@@ -75,98 +73,6 @@ class ProductFrontmatter:
return None return None
class ProductVersion:
def __init__(self, product: "Product", name: str, date: datetime) -> None:
self.product = str(product)
self.name = name
self.date = date
@staticmethod
def from_json(product: "Product", data: dict) -> "ProductVersion":
name = data["name"]
date = datetime.strptime(data["date"], "%Y-%m-%d").replace(tzinfo=timezone.utc)
return ProductVersion(product, name, date)
def __dict__(self) -> dict:
return {
"name": self.name,
"date": self.date.strftime("%Y-%m-%d"),
}
def __repr__(self) -> str:
return f"{self.product}#{self.name} ({self.date})"
class Product:
def __init__(self, name: str) -> None:
self.name: str = name
self.versions_path: Path = VERSIONS_PATH / f"{name}.json"
self.versions: dict[str, ProductVersion] = {}
logging.info(f"::group::{self}")
@staticmethod
def from_file(name: str) -> "Product":
product = Product(name)
if product.versions_path.is_file():
with product.versions_path.open() as f:
for json_version in json.load(f)["versions"].values():
version = ProductVersion.from_json(product, json_version)
product.versions[version.name] = version
logging.info(f"loaded versions data for {product} from {product.versions_path}")
else:
logging.warning(f"no versions data found for {product} at {product.versions_path}")
return product
def has_version(self, version: str) -> bool:
return version in self.versions
def get_version_date(self, version: str) -> datetime:
return self.versions[version].date if version in self.versions else None
def declare_version(self, version: str, date: datetime) -> None:
if version in self.versions:
if self.versions[version].date != date:
logging.warning(f"overwriting {version} ({self.get_version_date(version)} -> {date}) for {self}")
else:
return # already declared
logging.info(f"adding version {version} ({date}) to {self}")
self.versions[version] = ProductVersion(self, version, date)
def declare_versions(self, dates_by_version: dict[str, datetime]) -> None:
for (version, date) in dates_by_version.items():
self.declare_version(version, date)
def replace_version(self, version: str, date: datetime) -> None:
if version not in self.versions:
msg = f"version {version} cannot be replaced as it does not exist for {self}"
raise ValueError(msg)
logging.info(f"replacing version {version} ({self.get_version_date(version)} -> {date}) in {self}")
self.versions[version].date = date
def remove_version(self, version: str) -> None:
if not self.has_version(version):
logging.warning(f"version {version} cannot be removed as it does not exist for {self}")
return
logging.info(f"removing version {version} ({self.versions.pop(version)}) from {self}")
def write(self) -> None:
# sort by date then version (desc)
ordered_versions = sorted(self.versions.values(), key=lambda v: (v.date, v.name), reverse=True)
with self.versions_path.open("w") as f:
f.write(json.dumps({
"versions": {version.name: version.__dict__() for version in ordered_versions},
}, indent=2))
logging.info("::endgroup::")
def __repr__(self) -> str:
return self.name
def list_products(method: str, products_filter: str = None) -> list[str]: def list_products(method: str, products_filter: str = None) -> list[str]:
"""Return a list of products that are using the same given update method.""" """Return a list of products that are using the same given update method."""
products = [] products = []

99
src/common/releasedata.py Normal file
View File

@@ -0,0 +1,99 @@
import json
import logging
import os
from datetime import datetime, timezone
from pathlib import Path
VERSIONS_PATH = Path(os.environ.get("VERSIONS_PATH", "releases"))
class ProductVersion:
def __init__(self, product: "Product", name: str, date: datetime) -> None:
self.product = str(product)
self.name = name
self.date = date
@staticmethod
def from_json(product: "Product", data: dict) -> "ProductVersion":
name = data["name"]
date = datetime.strptime(data["date"], "%Y-%m-%d").replace(tzinfo=timezone.utc)
return ProductVersion(product, name, date)
def __dict__(self) -> dict:
return {
"name": self.name,
"date": self.date.strftime("%Y-%m-%d"),
}
def __repr__(self) -> str:
return f"{self.product}#{self.name} ({self.date})"
class Product:
def __init__(self, name: str) -> None:
self.name: str = name
self.versions_path: Path = VERSIONS_PATH / f"{name}.json"
self.versions: dict[str, ProductVersion] = {}
logging.info(f"::group::{self}")
@staticmethod
def from_file(name: str) -> "Product":
product = Product(name)
if product.versions_path.is_file():
with product.versions_path.open() as f:
for json_version in json.load(f)["versions"].values():
version = ProductVersion.from_json(product, json_version)
product.versions[version.name] = version
logging.info(f"loaded versions data for {product} from {product.versions_path}")
else:
logging.warning(f"no versions data found for {product} at {product.versions_path}")
return product
def has_version(self, version: str) -> bool:
return version in self.versions
def get_version_date(self, version: str) -> datetime:
return self.versions[version].date if version in self.versions else None
def declare_version(self, version: str, date: datetime) -> None:
if version in self.versions:
if self.versions[version].date != date:
logging.warning(f"overwriting {version} ({self.get_version_date(version)} -> {date}) for {self}")
else:
return # already declared
logging.info(f"adding version {version} ({date}) to {self}")
self.versions[version] = ProductVersion(self, version, date)
def declare_versions(self, dates_by_version: dict[str, datetime]) -> None:
for (version, date) in dates_by_version.items():
self.declare_version(version, date)
def replace_version(self, version: str, date: datetime) -> None:
if version not in self.versions:
msg = f"version {version} cannot be replaced as it does not exist for {self}"
raise ValueError(msg)
logging.info(f"replacing version {version} ({self.get_version_date(version)} -> {date}) in {self}")
self.versions[version].date = date
def remove_version(self, version: str) -> None:
if not self.has_version(version):
logging.warning(f"version {version} cannot be removed as it does not exist for {self}")
return
logging.info(f"removing version {version} ({self.versions.pop(version)}) from {self}")
def write(self) -> None:
# sort by date then version (desc)
ordered_versions = sorted(self.versions.values(), key=lambda v: (v.date, v.name), reverse=True)
with self.versions_path.open("w") as f:
f.write(json.dumps({
"versions": {version.name: version.__dict__() for version in ordered_versions},
}, indent=2))
logging.info("::endgroup::")
def __repr__(self) -> str:
return self.name

View File

@@ -1,11 +1,11 @@
from common import dates, endoflife from common import dates, releasedata
from requests_html import HTMLSession from requests_html import HTMLSession
"""Fetches Confluence versions from www.atlassian.com. """Fetches Confluence versions from www.atlassian.com.
Note that requests_html is used because JavaScript is needed to render the page.""" Note that requests_html is used because JavaScript is needed to render the page."""
product = endoflife.Product("confluence") product = releasedata.Product("confluence")
r = HTMLSession().get("https://www.atlassian.com/software/confluence/download-archives") r = HTMLSession().get("https://www.atlassian.com/software/confluence/download-archives")
r.html.render(sleep=1, scrolldown=3) r.html.render(sleep=1, scrolldown=3)

View File

@@ -2,7 +2,7 @@ import datetime
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
MILESTONE_PATTERN = re.compile(r'COS \d+ LTS') MILESTONE_PATTERN = re.compile(r'COS \d+ LTS')
VERSION_PATTERN = re.compile(r"^(cos-\d+-\d+-\d+-\d+)") VERSION_PATTERN = re.compile(r"^(cos-\d+-\d+-\d+-\d+)")
@@ -14,7 +14,7 @@ def parse_date(date_text: str) -> datetime:
return dates.parse_date(date_text) return dates.parse_date(date_text)
product = endoflife.Product("cos") product = releasedata.Product("cos")
main = http.fetch_url("https://cloud.google.com/container-optimized-os/docs/release-notes/") main = http.fetch_url("https://cloud.google.com/container-optimized-os/docs/release-notes/")
main_soup = BeautifulSoup(main.text, features="html5lib") main_soup = BeautifulSoup(main.text, features="html5lib")
milestones = [cell.text.split(' ')[1] for cell in main_soup.find_all('td', string=MILESTONE_PATTERN)] milestones = [cell.text.split(' ')[1] for cell in main_soup.find_all('td', string=MILESTONE_PATTERN)]

View File

@@ -1,7 +1,7 @@
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
"""Fetches versions from release notes of each minor version on docs.couchbase.com. """Fetches versions from release notes of each minor version on docs.couchbase.com.
@@ -18,7 +18,7 @@ MANUAL_VERSIONS = {
"7.2.0": dates.date(2023, 6, 1), # https://www.couchbase.com/blog/couchbase-capella-spring-release-72/ "7.2.0": dates.date(2023, 6, 1), # https://www.couchbase.com/blog/couchbase-capella-spring-release-72/
} }
product = endoflife.Product("couchbase-server") product = releasedata.Product("couchbase-server")
main = http.fetch_url(f"{URLS}/current/install/install-intro.html") main = http.fetch_url(f"{URLS}/current/install/install-intro.html")
main_soup = BeautifulSoup(main.text, features="html5lib") main_soup = BeautifulSoup(main.text, features="html5lib")

View File

@@ -1,13 +1,13 @@
from pathlib import Path from pathlib import Path
from subprocess import run from subprocess import run
from common import dates, endoflife from common import dates, releasedata
from common.git import Git from common.git import Git
"""Fetch Debian versions by parsing news in www.debian.org source repository.""" """Fetch Debian versions by parsing news in www.debian.org source repository."""
def extract_major_versions(p: endoflife.Product, repo_dir: Path) -> None: def extract_major_versions(p: releasedata.Product, repo_dir: Path) -> None:
child = run( child = run(
f"grep -RhE -A 1 '<define-tag pagetitle>Debian [0-9]+.+</q> released' {repo_dir}/english/News " f"grep -RhE -A 1 '<define-tag pagetitle>Debian [0-9]+.+</q> released' {repo_dir}/english/News "
f"| cut -d '<' -f 2 " f"| cut -d '<' -f 2 "
@@ -26,7 +26,7 @@ def extract_major_versions(p: endoflife.Product, repo_dir: Path) -> None:
is_release_line = True is_release_line = True
def extract_point_versions(p: endoflife.Product, repo_dir: Path) -> None: def extract_point_versions(p: releasedata.Product, repo_dir: Path) -> None:
child = run( child = run(
f"grep -Rh -B 10 '<define-tag revision>' {repo_dir}/english/News " f"grep -Rh -B 10 '<define-tag revision>' {repo_dir}/english/News "
"| grep -Eo '(release_date>(.*)<|revision>(.*)<)' " "| grep -Eo '(release_date>(.*)<|revision>(.*)<)' "
@@ -41,7 +41,7 @@ def extract_point_versions(p: endoflife.Product, repo_dir: Path) -> None:
p.declare_version(version, dates.parse_date(date)) p.declare_version(version, dates.parse_date(date))
product = endoflife.Product("debian") product = releasedata.Product("debian")
git = Git("https://salsa.debian.org/webmaster-team/webwml.git") git = Git("https://salsa.debian.org/webmaster-team/webwml.git")
git.setup() git.setup()
git.checkout("master", file_list=["english/News"]) git.checkout("master", file_list=["english/News"])

View File

@@ -1,13 +1,13 @@
import sys import sys
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, endoflife, http, releasedata
METHOD = 'distrowatch' METHOD = 'distrowatch'
p_filter = sys.argv[1] if len(sys.argv) > 1 else None p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter): for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
product_frontmatter = endoflife.ProductFrontmatter(product.name) product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD): for config in product_frontmatter.get_auto_configs(METHOD):
response = http.fetch_url(f"https://distrowatch.com/index.php?distribution={config.url}") response = http.fetch_url(f"https://distrowatch.com/index.php?distribution={config.url}")

View File

@@ -1,6 +1,6 @@
import sys import sys
from common import dates, endoflife, http from common import dates, endoflife, http, releasedata
"""Fetches releases from the Docker Hub API. """Fetches releases from the Docker Hub API.
@@ -9,7 +9,7 @@ Unfortunately images creation date cannot be retrieved, so we had to use the tag
METHOD = "docker_hub" METHOD = "docker_hub"
def fetch_releases(p: endoflife.Product, c: endoflife.AutoConfig, url: str) -> None: def fetch_releases(p: releasedata.Product, c: endoflife.AutoConfig, url: str) -> None:
data = http.fetch_url(url).json() data = http.fetch_url(url).json()
for result in data["results"]: for result in data["results"]:
@@ -24,7 +24,7 @@ def fetch_releases(p: endoflife.Product, c: endoflife.AutoConfig, url: str) -> N
p_filter = sys.argv[1] if len(sys.argv) > 1 else None p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter): for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
product_frontmatter = endoflife.ProductFrontmatter(product.name) product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD): for config in product_frontmatter.get_auto_configs(METHOD):
fetch_releases(product, config, f"https://hub.docker.com/v2/repositories/{config.url}/tags?page_size=100&page=1") fetch_releases(product, config, f"https://hub.docker.com/v2/repositories/{config.url}/tags?page_size=100&page=1")

View File

@@ -1,5 +1,5 @@
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, endoflife, http, releasedata
"""Fetches EKS versions from AWS docs. """Fetches EKS versions from AWS docs.
Now that AWS no longer publishes docs on GitHub, we use the Web Archive to still get the older versions.""" Now that AWS no longer publishes docs on GitHub, we use the Web Archive to still get the older versions."""
@@ -13,7 +13,7 @@ URLS = [
"https://docs.aws.amazon.com/eks/latest/userguide/platform-versions.html", "https://docs.aws.amazon.com/eks/latest/userguide/platform-versions.html",
] ]
product = endoflife.Product("eks") product = releasedata.Product("eks")
for version_list in http.fetch_urls(URLS): for version_list in http.fetch_urls(URLS):
version_list_soup = BeautifulSoup(version_list.text, features="html5lib") version_list_soup = BeautifulSoup(version_list.text, features="html5lib")
for tr in version_list_soup.select("#main-col-body")[0].findAll("tr"): for tr in version_list_soup.select("#main-col-body")[0].findAll("tr"):

View File

@@ -2,13 +2,13 @@ import re
import urllib.parse import urllib.parse
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
"""Fetch Firefox versions with their dates from https://www.mozilla.org/. """Fetch Firefox versions with their dates from https://www.mozilla.org/.
Versions lower than 10.0 are ignored because too difficult to parse.""" Versions lower than 10.0 are ignored because too difficult to parse."""
product = endoflife.Product("firefox") product = releasedata.Product("firefox")
releases_page = http.fetch_url("https://www.mozilla.org/en-US/firefox/releases/") releases_page = http.fetch_url("https://www.mozilla.org/en-US/firefox/releases/")
releases_soup = BeautifulSoup(releases_page.text, features="html5lib") releases_soup = BeautifulSoup(releases_page.text, features="html5lib")
releases_list = releases_soup.find_all("ol", class_="c-release-list") releases_list = releases_soup.find_all("ol", class_="c-release-list")

View File

@@ -1,6 +1,6 @@
import sys import sys
from common import dates, endoflife from common import dates, endoflife, releasedata
from common.git import Git from common.git import Git
"""Fetches versions from tags in a git repository. This replace the old update.rb script.""" """Fetches versions from tags in a git repository. This replace the old update.rb script."""
@@ -9,7 +9,7 @@ METHOD = 'git'
p_filter = sys.argv[1] if len(sys.argv) > 1 else None p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter): for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
product_frontmatter = endoflife.ProductFrontmatter(product.name) product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD): for config in product_frontmatter.get_auto_configs(METHOD):
git = Git(config.url) git = Git(config.url)

View File

@@ -3,7 +3,7 @@ import logging
import subprocess import subprocess
import sys import sys
from common import dates, endoflife from common import dates, endoflife, releasedata
"""Fetches versions from GitHub releases using the GraphQL API and the GitHub CLI. """Fetches versions from GitHub releases using the GraphQL API and the GitHub CLI.
@@ -44,7 +44,7 @@ query($endCursor: String) {
p_filter = sys.argv[1] if len(sys.argv) > 1 else None p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter): for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
product_frontmatter = endoflife.ProductFrontmatter(product.name) product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD): for config in product_frontmatter.get_auto_configs(METHOD):
for page in fetch_releases(config.url): for page in fetch_releases(config.url):

View File

@@ -1,7 +1,7 @@
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
# https://regex101.com/r/zPxBqT/1 # https://regex101.com/r/zPxBqT/1
VERSION_PATTERN = re.compile(r"\d.\d+\.\d+-gke\.\d+") VERSION_PATTERN = re.compile(r"\d.\d+\.\d+-gke\.\d+")
@@ -13,7 +13,7 @@ URL_BY_PRODUCT = {
} }
for product_name, url in URL_BY_PRODUCT.items(): for product_name, url in URL_BY_PRODUCT.items():
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
relnotes = http.fetch_url(url) relnotes = http.fetch_url(url)
relnotes_soup = BeautifulSoup(relnotes.text, features="html5lib") relnotes_soup = BeautifulSoup(relnotes.text, features="html5lib")

View File

@@ -1,7 +1,7 @@
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
product = endoflife.Product("graalvm") product = releasedata.Product("graalvm")
release_calendar = http.fetch_url("https://www.graalvm.org/release-calendar/") release_calendar = http.fetch_url("https://www.graalvm.org/release-calendar/")
release_calendar_soup = BeautifulSoup(release_calendar.text, features="html5lib") release_calendar_soup = BeautifulSoup(release_calendar.text, features="html5lib")

View File

@@ -1,12 +1,12 @@
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
CYCLE_PATTERN = re.compile(r"^(\d+\.\d+)/$") CYCLE_PATTERN = re.compile(r"^(\d+\.\d+)/$")
DATE_AND_VERSION_PATTERN = re.compile(r"^(\d{4})/(\d{2})/(\d{2})\s+:\s+(\d+\.\d+\.\d.?)$") # https://regex101.com/r/1JCnFC/1 DATE_AND_VERSION_PATTERN = re.compile(r"^(\d{4})/(\d{2})/(\d{2})\s+:\s+(\d+\.\d+\.\d.?)$") # https://regex101.com/r/1JCnFC/1
product = endoflife.Product("haproxy") product = releasedata.Product("haproxy")
# First, get all minor releases from the download page # First, get all minor releases from the download page
download = http.fetch_url('https://www.haproxy.org/download/') download = http.fetch_url('https://www.haproxy.org/download/')
download_soup = BeautifulSoup(download.text, features="html5lib") download_soup = BeautifulSoup(download.text, features="html5lib")

View File

@@ -1,12 +1,12 @@
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
URLS = [ URLS = [
"https://web.archive.org/web/20210123024247/https://www.ibm.com/support/pages/aix-support-lifecycle-information", "https://web.archive.org/web/20210123024247/https://www.ibm.com/support/pages/aix-support-lifecycle-information",
"https://www.ibm.com/support/pages/aix-support-lifecycle-information", "https://www.ibm.com/support/pages/aix-support-lifecycle-information",
] ]
product = endoflife.Product("ibm-aix") product = releasedata.Product("ibm-aix")
for page in http.fetch_urls(URLS): for page in http.fetch_urls(URLS):
page_soup = BeautifulSoup(page.text, features="html5lib") page_soup = BeautifulSoup(page.text, features="html5lib")

View File

@@ -1,11 +1,11 @@
from common import dates, endoflife from common import dates, releasedata
from requests_html import HTMLSession from requests_html import HTMLSession
"""Fetches Jira versions from www.atlassian.com. """Fetches Jira versions from www.atlassian.com.
Note that requests_html is used because JavaScript is needed to render the page.""" Note that requests_html is used because JavaScript is needed to render the page."""
product = endoflife.Product("jira") product = releasedata.Product("jira")
r = HTMLSession().get("https://www.atlassian.com/software/jira/update") r = HTMLSession().get("https://www.atlassian.com/software/jira/update")
r.html.render(sleep=1, scrolldown=3) r.html.render(sleep=1, scrolldown=3)

View File

@@ -2,7 +2,7 @@ import re
import xml.dom.minidom import xml.dom.minidom
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
"""Fetch Looker versions from the Google Cloud release notes RSS feed. """Fetch Looker versions from the Google Cloud release notes RSS feed.
""" """
@@ -10,7 +10,7 @@ from common import dates, endoflife, http
ANNOUNCEMENT_PATTERN = re.compile(r"includes\s+the\s+following\s+changes", re.IGNORECASE) ANNOUNCEMENT_PATTERN = re.compile(r"includes\s+the\s+following\s+changes", re.IGNORECASE)
VERSION_PATTERN = re.compile(r"Looker\s+(?P<version>\d+\.\d+)", re.IGNORECASE) VERSION_PATTERN = re.compile(r"Looker\s+(?P<version>\d+\.\d+)", re.IGNORECASE)
product = endoflife.Product("looker") product = releasedata.Product("looker")
response = http.fetch_url("https://cloud.google.com/feeds/looker-release-notes.xml") response = http.fetch_url("https://cloud.google.com/feeds/looker-release-notes.xml")
rss = xml.dom.minidom.parseString(response.text) rss = xml.dom.minidom.parseString(response.text)

View File

@@ -1,13 +1,13 @@
import sys import sys
from datetime import datetime, timezone from datetime import datetime, timezone
from common import endoflife, http from common import endoflife, http, releasedata
METHOD = "maven" METHOD = "maven"
p_filter = sys.argv[1] if len(sys.argv) > 1 else None p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter): for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
product_frontmatter = endoflife.ProductFrontmatter(product.name) product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD): for config in product_frontmatter.get_auto_configs(METHOD):
start = 0 start = 0

View File

@@ -1,12 +1,12 @@
import sys import sys
from common import dates, endoflife, http from common import dates, endoflife, http, releasedata
METHOD = "npm" METHOD = "npm"
p_filter = sys.argv[1] if len(sys.argv) > 1 else None p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter): for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
product_frontmatter = endoflife.ProductFrontmatter(product.name) product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD): for config in product_frontmatter.get_auto_configs(METHOD):
data = http.fetch_url(f"https://registry.npmjs.org/{config.url}").json() data = http.fetch_url(f"https://registry.npmjs.org/{config.url}").json()

View File

@@ -1,4 +1,4 @@
from common import dates, endoflife, http from common import dates, http, releasedata
"""Fetch Nutanix products versions from https://portal.nutanix.com/api/v1.""" """Fetch Nutanix products versions from https://portal.nutanix.com/api/v1."""
@@ -9,7 +9,7 @@ PRODUCTS = {
} }
for product_name, url in PRODUCTS.items(): for product_name, url in PRODUCTS.items():
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
data = http.fetch_url(url).json() data = http.fetch_url(url).json()
for version_data in data["contents"]: for version_data in data["contents"]:

View File

@@ -1,11 +1,11 @@
from common import dates, endoflife from common import dates, releasedata
from requests_html import HTMLSession from requests_html import HTMLSession
"""Fetch Java versions from https://www.java.com/releases/. """Fetch Java versions from https://www.java.com/releases/.
This script is using requests-html because the page needs JavaScript to render correctly.""" This script is using requests-html because the page needs JavaScript to render correctly."""
product = endoflife.Product("oracle-jdk") product = releasedata.Product("oracle-jdk")
r = HTMLSession().get('https://www.java.com/releases/') r = HTMLSession().get('https://www.java.com/releases/')
r.html.render(sleep=1, scrolldown=3) r.html.render(sleep=1, scrolldown=3)

View File

@@ -2,7 +2,7 @@ import logging
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
IDENTIFIERS_BY_PRODUCT = { IDENTIFIERS_BY_PRODUCT = {
"pan-os": "pan-os-panorama", "pan-os": "pan-os-panorama",
@@ -17,7 +17,7 @@ soup = BeautifulSoup(response.text, features="html5lib")
logging.info("::endgroup::") logging.info("::endgroup::")
for product_name, identifier in IDENTIFIERS_BY_PRODUCT.items(): for product_name, identifier in IDENTIFIERS_BY_PRODUCT.items():
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
table = soup.find(id=identifier) table = soup.find(id=identifier)
for tr in table.findAll("tr")[3:]: for tr in table.findAll("tr")[3:]:
td_list = tr.findAll("td") td_list = tr.findAll("td")

View File

@@ -1,8 +1,8 @@
from common import dates, endoflife, http from common import dates, endoflife, http, releasedata
MAIN_URL = "https://www.php.net/releases/index.php?json&max=-1" MAIN_URL = "https://www.php.net/releases/index.php?json&max=-1"
product = endoflife.Product("php") product = releasedata.Product("php")
# Fetch major versions # Fetch major versions
latest_by_major = http.fetch_url(MAIN_URL).json() latest_by_major = http.fetch_url(MAIN_URL).json()

View File

@@ -1,12 +1,12 @@
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
"""Fetches versions from Plesk's change log. """Fetches versions from Plesk's change log.
Only 18.0.20.3 and later will be picked up, as the format of the change log for 18.0.20 and 18.0.19 are different and Only 18.0.20.3 and later will be picked up, as the format of the change log for 18.0.20 and 18.0.19 are different and
there is no entry for GA of version 18.0.18 and older.""" there is no entry for GA of version 18.0.18 and older."""
product = endoflife.Product("plesk") product = releasedata.Product("plesk")
response = http.fetch_url("https://docs.plesk.com/release-notes/obsidian/change-log") response = http.fetch_url("https://docs.plesk.com/release-notes/obsidian/change-log")
soup = BeautifulSoup(response.text, features="html5lib") soup = BeautifulSoup(response.text, features="html5lib")

View File

@@ -1,12 +1,12 @@
import sys import sys
from common import dates, endoflife, http from common import dates, endoflife, http, releasedata
METHOD = "pypi" METHOD = "pypi"
p_filter = sys.argv[1] if len(sys.argv) > 1 else None p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter): for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
product_frontmatter = endoflife.ProductFrontmatter(product.name) product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD): for config in product_frontmatter.get_auto_configs(METHOD):
data = http.fetch_url(f"https://pypi.org/pypi/{config.url}/json").json() data = http.fetch_url(f"https://pypi.org/pypi/{config.url}/json").json()

View File

@@ -1,7 +1,7 @@
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
"""Fetches Amazon RDS versions from the version management pages on AWS docs. """Fetches Amazon RDS versions from the version management pages on AWS docs.
@@ -16,7 +16,7 @@ PRODUCTS = {
VERSION_REGEX = re.compile(r"(?P<version>\d+(?:\.\d+)*)", flags=re.IGNORECASE) # https://regex101.com/r/BY1vwV/1 VERSION_REGEX = re.compile(r"(?P<version>\d+(?:\.\d+)*)", flags=re.IGNORECASE) # https://regex101.com/r/BY1vwV/1
for product_name, url in PRODUCTS.items(): for product_name, url in PRODUCTS.items():
product = endoflife.Product(product_name) product = releasedata.Product(product_name)
response = http.fetch_url(url) response = http.fetch_url(url)
soup = BeautifulSoup(response.text, features="html5lib") soup = BeautifulSoup(response.text, features="html5lib")

View File

@@ -1,13 +1,13 @@
import re import re
from common import dates, endoflife from common import dates, releasedata
from common.git import Git from common.git import Git
"""Fetches Red Hat OpenShift versions from the documentation's git repository""" """Fetches Red Hat OpenShift versions from the documentation's git repository"""
VERSION_AND_DATE_PATTERN = re.compile(r"{product-title}\s(?P<version>\d+\.\d+\.\d+).*\n+Issued:\s(?P<date>\d{4}-\d\d-\d\d)$", re.MULTILINE) VERSION_AND_DATE_PATTERN = re.compile(r"{product-title}\s(?P<version>\d+\.\d+\.\d+).*\n+Issued:\s(?P<date>\d{4}-\d\d-\d\d)$", re.MULTILINE)
product = endoflife.Product("red-hat-openshift") product = releasedata.Product("red-hat-openshift")
git = Git("https://github.com/openshift/openshift-docs.git") git = Git("https://github.com/openshift/openshift-docs.git")
git.setup() git.setup()

View File

@@ -1,7 +1,7 @@
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
"""Fetches Satellite versions from access.redhat.com. """Fetches Satellite versions from access.redhat.com.
@@ -10,7 +10,7 @@ A few of the older versions, such as 'Satellite 6.1 GA Release (Build 6.1.1)', w
# https://regex101.com/r/m8aWXG/1 # https://regex101.com/r/m8aWXG/1
VERSION_PATTERN = re.compile(r"^Satellite (?P<version>\d+\.\d+\.\d+([.-]\d+)?) ([Uu]pdate|[Rr]elease)$") VERSION_PATTERN = re.compile(r"^Satellite (?P<version>\d+\.\d+\.\d+([.-]\d+)?) ([Uu]pdate|[Rr]elease)$")
product = endoflife.Product("redhat-satellite") product = releasedata.Product("redhat-satellite")
response = http.fetch_url("https://access.redhat.com/articles/1365633") response = http.fetch_url("https://access.redhat.com/articles/1365633")
soup = BeautifulSoup(response.text, features="html5lib") soup = BeautifulSoup(response.text, features="html5lib")

View File

@@ -1,12 +1,12 @@
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
# https://regex101.com/r/877ibq/1 # https://regex101.com/r/877ibq/1
VERSION_PATTERN = re.compile(r"RHEL (?P<major>\d)(\. ?(?P<minor>\d+))?(( Update (?P<minor2>\d))| GA)?") VERSION_PATTERN = re.compile(r"RHEL (?P<major>\d)(\. ?(?P<minor>\d+))?(( Update (?P<minor2>\d))| GA)?")
product = endoflife.Product("redhat") product = releasedata.Product("redhat")
response = http.fetch_url("https://access.redhat.com/articles/3078") response = http.fetch_url("https://access.redhat.com/articles/3078")
soup = BeautifulSoup(response.text, features="html5lib") soup = BeautifulSoup(response.text, features="html5lib")

View File

@@ -1,6 +1,6 @@
from common import dates, endoflife, http from common import dates, endoflife, http, releasedata
product = endoflife.Product("rockylinux") product = releasedata.Product("rockylinux")
response = http.fetch_url("https://raw.githubusercontent.com/rocky-linux/wiki.rockylinux.org/development/docs/include/releng/version_table.md") response = http.fetch_url("https://raw.githubusercontent.com/rocky-linux/wiki.rockylinux.org/development/docs/include/releng/version_table.md")
for line in response.text.strip().split('\n'): for line in response.text.strip().split('\n'):

View File

@@ -1,12 +1,12 @@
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
# https://regex101.com/r/c1ribd/1 # https://regex101.com/r/c1ribd/1
VERSION_PATTERN = re.compile(r"^ROS (?P<name>(\w| )+)") VERSION_PATTERN = re.compile(r"^ROS (?P<name>(\w| )+)")
product = endoflife.Product("ros") product = releasedata.Product("ros")
response = http.fetch_url("https://wiki.ros.org/Distributions") response = http.fetch_url("https://wiki.ros.org/Distributions")
soup = BeautifulSoup(response.text, features="html5lib") soup = BeautifulSoup(response.text, features="html5lib")

View File

@@ -1,9 +1,9 @@
import logging import logging
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
product = endoflife.Product("sles") product = releasedata.Product("sles")
response = http.fetch_url("https://www.suse.com/lifecycle") response = http.fetch_url("https://www.suse.com/lifecycle")
soup = BeautifulSoup(response.text, features="html5lib") soup = BeautifulSoup(response.text, features="html5lib")

View File

@@ -1,7 +1,7 @@
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
VERSION_DATE_PATTERN = re.compile(r"Splunk Enterprise (?P<version>\d+\.\d+(?:\.\d+)*) was (?:first )?released on (?P<date>\w+\s\d\d?,\s\d{4})\.", re.MULTILINE) VERSION_DATE_PATTERN = re.compile(r"Splunk Enterprise (?P<version>\d+\.\d+(?:\.\d+)*) was (?:first )?released on (?P<date>\w+\s\d\d?,\s\d{4})\.", re.MULTILINE)
@@ -30,7 +30,7 @@ def get_latest_minor_versions(versions: list[str]) -> list[str]:
return latest_versions return latest_versions
product = endoflife.Product("splunk") product = releasedata.Product("splunk")
main = http.fetch_url("https://docs.splunk.com/Documentation/Splunk") main = http.fetch_url("https://docs.splunk.com/Documentation/Splunk")
soup = BeautifulSoup(main.text, features="html5lib") soup = BeautifulSoup(main.text, features="html5lib")

View File

@@ -1,6 +1,6 @@
from common import dates, endoflife, http from common import dates, http, releasedata
product = endoflife.Product("typo3") product = releasedata.Product("typo3")
data = http.fetch_url("https://get.typo3.org/api/v1/release/").json() data = http.fetch_url("https://get.typo3.org/api/v1/release/").json()
for v in data: for v in data:
if v['type'] == 'development': if v['type'] == 'development':

View File

@@ -1,5 +1,5 @@
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, http, releasedata
# Fetches the Unity LTS releases from the Unity website. Non-LTS releases are not listed there, # Fetches the Unity LTS releases from the Unity website. Non-LTS releases are not listed there,
# so this automation is only partial. # so this automation is only partial.
@@ -9,7 +9,7 @@ from common import dates, endoflife, http
BASE_URL = "https://unity.com/releases/editor/qa/lts-releases" BASE_URL = "https://unity.com/releases/editor/qa/lts-releases"
product = endoflife.Product("unity") product = releasedata.Product("unity")
next_page_url = BASE_URL next_page_url = BASE_URL
# Do not try to fetch multiple pages in parallel: it is raising a lot of errors and make the overall process slower. # Do not try to fetch multiple pages in parallel: it is raising a lot of errors and make the overall process slower.

View File

@@ -1,11 +1,11 @@
import re import re
import mwparserfromhell import mwparserfromhell
from common import dates, endoflife, http from common import dates, endoflife, http, releasedata
DATE_PATTERN = re.compile(r"\d{4}-\d{2}-\d{2}") DATE_PATTERN = re.compile(r"\d{4}-\d{2}-\d{2}")
product = endoflife.Product("unrealircd") product = releasedata.Product("unrealircd")
response = http.fetch_url("https://www.unrealircd.org/docwiki/index.php?title=History_of_UnrealIRCd_releases&action=raw") response = http.fetch_url("https://www.unrealircd.org/docwiki/index.php?title=History_of_UnrealIRCd_releases&action=raw")
wikicode = mwparserfromhell.parse(response.text) wikicode = mwparserfromhell.parse(response.text)

View File

@@ -1,5 +1,5 @@
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from common import dates, endoflife, http from common import dates, endoflife, http, releasedata
# There is no build history for versions 2015 and below. # There is no build history for versions 2015 and below.
# This is not a big deal because there was no version for those release in a very long time. # This is not a big deal because there was no version for those release in a very long time.
@@ -9,7 +9,7 @@ URLS = [
"https://learn.microsoft.com/en-us/visualstudio/releases/2022/release-history", "https://learn.microsoft.com/en-us/visualstudio/releases/2022/release-history",
] ]
product = endoflife.Product("visualstudio") product = releasedata.Product("visualstudio")
for response in http.fetch_urls(URLS): for response in http.fetch_urls(URLS):
soup = BeautifulSoup(response.text, features="html5lib") soup = BeautifulSoup(response.text, features="html5lib")