Centralize GitHub Workflow groups declaration (#272)

It may not be the best place for that (gha.py would have been better), but it's the shorter / faster way to do it for now.

Moreover it now uses logging for writing the group. The logger format has been updated for this to work. This was done to fix issues on GitHub Action logs, where groups were declared after the logs.
This commit is contained in:
Marc Wrobel
2023-12-31 17:00:11 +01:00
committed by GitHub
parent 5ccab8f814
commit f6a8349c46
44 changed files with 18 additions and 113 deletions

View File

@@ -9,7 +9,6 @@ RSS_URL = "https://docs.aws.amazon.com/neptune/latest/userguide/rssupdates.rss"
VERSION_PATTERN = re.compile(r"^Engine version (?P<version>[0-9R.]+)$")
product = endoflife.Product("amazon-neptune")
print(f"::group::{product.name}")
rss_response = http.fetch_url(RSS_URL)
rss = xml.dom.minidom.parseString(rss_response.text)
@@ -22,4 +21,3 @@ for entry in rss.getElementsByTagName("item"):
product.declare_version(version_match['version'], dates.parse_datetime(date_str))
product.write()
print("::endgroup::")

View File

@@ -16,7 +16,6 @@ VERSION_AND_DATE_PATTERNS = [
]
product = endoflife.Product("apache-http-server")
print(f"::group::{product.name}")
git = Git("https://github.com/apache/httpd.git")
git.setup()
@@ -35,4 +34,3 @@ for branch in git.list_branches("refs/heads/?.?.x"):
product.declare_version(version, dates.parse_date(date_str))
product.write()
print("::endgroup::")

View File

@@ -51,14 +51,12 @@ VERSION_PATTERNS = {
DATE_PATTERN = re.compile(r"\b\d+\s[A-Za-z]+\s\d+\b")
print("::group::apple")
logging.info("::group::apple")
soups = [BeautifulSoup(response.text, features="html5lib") for response in http.fetch_urls(URLS)]
print("::endgroup::")
logging.info("::endgroup::")
for product_name in VERSION_PATTERNS:
product = endoflife.Product(product_name)
print(f"::group::{product.name}")
for soup in soups:
versions_table = soup.find(id="tableWraper")
versions_table = versions_table if versions_table else soup.find('table', class_="gb-table")
@@ -85,4 +83,3 @@ for product_name in VERSION_PATTERNS:
logging.info(f"ignoring version {version} ({date}) for {product.name}")
product.write()
print("::endgroup::")

View File

@@ -6,7 +6,6 @@ from requests_html import HTMLSession
needed to render the page."""
product = endoflife.Product("artifactory")
print(f"::group::{product.name}")
r = HTMLSession().get("https://jfrog.com/help/r/jfrog-release-information/artifactory-end-of-life")
r.html.render(sleep=2, scrolldown=5)
@@ -23,4 +22,3 @@ for row in r.html.find('.informaltable tbody tr'):
product.replace_version('7.29.9', dates.date(2022, 1, 11))
product.write()
print("::endgroup::")

View File

@@ -13,7 +13,6 @@ them though. Note that this would also be unnecessary if it was possible to disa
release dates updates in the latest.py script."""
product = endoflife.Product("aws-lambda")
print(f"::group::{product.name}")
old_product = endoflife.Product.from_file(product.name)
product_frontmatter = endoflife.ProductFrontmatter(product.name)
response = http.fetch_url("https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html")
@@ -38,4 +37,3 @@ for table in soup.find_all("table"):
product.declare_version(identifier, date)
product.write()
print("::endgroup::")

View File

@@ -11,8 +11,6 @@ METHOD = "cgit"
p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name)
print(f"::group::{product.name}")
product_frontmatter = endoflife.ProductFrontmatter(product.name)
for auto_config in product_frontmatter.get_auto_configs(METHOD):
response = http.fetch_url(auto_config.url + '/refs/tags')
@@ -39,4 +37,3 @@ for product_name in endoflife.list_products(METHOD, p_filter):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -32,8 +32,6 @@ FIXED_VERSIONS = {
}
product = endoflife.Product("coldfusion")
print(f"::group::{product.name}")
for changelog in http.fetch_urls(URLS):
changelog_soup = BeautifulSoup(changelog.text, features="html5lib")
@@ -46,4 +44,3 @@ for changelog in http.fetch_urls(URLS):
product.declare_versions(FIXED_VERSIONS)
product.write()
print("::endgroup::")

View File

@@ -8,7 +8,8 @@ from pathlib import Path
import frontmatter
from liquid import Template
logging.basicConfig(format=logging.BASIC_FORMAT, level=logging.INFO)
# Do not update the format: it's also used to declare groups in the GitHub Actions logs.
logging.basicConfig(format="%(message)s", level=logging.INFO)
# Handle versions having at least 2 digits (ex. 1.2) and at most 4 digits (ex. 1.2.3.4), with an optional leading "v".
# Major version must be >= 1.
@@ -78,6 +79,7 @@ class Product:
self.name: str = name
self.versions_path: Path = VERSIONS_PATH / f"{name}.json"
self.versions = {}
logging.info(f"::group::{self}")
@staticmethod
def from_file(name: str) -> "Product":
@@ -88,9 +90,9 @@ class Product:
for version, date in json.load(f).items():
date_obj = datetime.strptime(date, "%Y-%m-%d").replace(tzinfo=timezone.utc)
product.versions[version] = date_obj
logging.info(f"loaded versions data for {product.name} from {product.versions_path}")
logging.info(f"loaded versions data for {product} from {product.versions_path}")
else:
logging.warning(f"no versions data found for {product.name} at {product.versions_path}")
logging.warning(f"no versions data found for {product} at {product.versions_path}")
return product
@@ -103,11 +105,11 @@ class Product:
def declare_version(self, version: str, date: datetime) -> None:
if version in self.versions:
if self.versions[version] != date:
logging.warning(f"overwriting version {version} ({self.versions[version]} -> {date}) for {self.name}")
logging.warning(f"overwriting version {version} ({self.versions[version]} -> {date}) for {self}")
else:
return # already declared
logging.info(f"adding version {version} ({date}) to {self.name}")
logging.info(f"adding version {version} ({date}) to {self}")
self.versions[version] = date
def declare_versions(self, dates_by_version: dict[str, datetime]) -> None:
@@ -116,18 +118,18 @@ class Product:
def replace_version(self, version: str, date: datetime) -> None:
if version not in self.versions:
msg = f"version {version} cannot be replaced as it does not exist for {self.name}"
msg = f"version {version} cannot be replaced as it does not exist for {self}"
raise ValueError(msg)
logging.info(f"replacing version {version} ({self.versions[version]} -> {date}) in {self.name}")
logging.info(f"replacing version {version} ({self.versions[version]} -> {date}) in {self}")
self.versions[version] = date
def remove_version(self, version: str) -> None:
if not self.has_version(version):
logging.warning(f"version {version} cannot be removed as it does not exist for {self.name}")
logging.warning(f"version {version} cannot be removed as it does not exist for {self}")
return
logging.info(f"removing version {version} ({self.versions.pop(version)}) from {self.name}")
logging.info(f"removing version {version} ({self.versions.pop(version)}) from {self}")
def write(self) -> None:
versions = {version: date.strftime("%Y-%m-%d") for version, date in self.versions.items()}
@@ -136,9 +138,10 @@ class Product:
# sort by date then version (desc)
sorted(versions.items(), key=lambda x: (x[1], x[0]), reverse=True),
), indent=2))
logging.info("::endgroup::")
def __repr__(self) -> str:
return f"<{self.name}>"
return self.name
def list_products(method: str, products_filter: str = None) -> list[str]:

View File

@@ -6,7 +6,6 @@ from requests_html import HTMLSession
Note that requests_html is used because JavaScript is needed to render the page."""
product = endoflife.Product("confluence")
print(f"::group::{product.name}")
r = HTMLSession().get("https://www.atlassian.com/software/confluence/download-archives")
r.html.render(sleep=1, scrolldown=3)
@@ -16,4 +15,3 @@ for version_block in r.html.find('.versions-list'):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -15,7 +15,6 @@ def parse_date(date_text: str) -> datetime:
product = endoflife.Product("cos")
print(f"::group::{product.name}")
main = http.fetch_url("https://cloud.google.com/container-optimized-os/docs/release-notes/")
main_soup = BeautifulSoup(main.text, features="html5lib")
milestones = [cell.text.split(' ')[1] for cell in main_soup.find_all('td', string=MILESTONE_PATTERN)]
@@ -44,4 +43,3 @@ for milestone in http.fetch_urls(milestones_urls):
product.declare_version(version_match.group(1), date)
product.write()
print("::endgroup::")

View File

@@ -19,7 +19,6 @@ MANUAL_VERSIONS = {
}
product = endoflife.Product("couchbase-server")
print(f"::group::{product.name}")
main = http.fetch_url(f"{URLS}/current/install/install-intro.html")
main_soup = BeautifulSoup(main.text, features="html5lib")
@@ -39,4 +38,3 @@ for minor_version in http.fetch_urls(minor_version_urls):
product.declare_versions(MANUAL_VERSIONS)
product.write()
print("::endgroup::")

View File

@@ -42,7 +42,6 @@ def extract_point_versions(p: endoflife.Product, repo_dir: Path) -> None:
product = endoflife.Product("debian")
print(f"::group::{product.name}")
git = Git("https://salsa.debian.org/webmaster-team/webwml.git")
git.setup()
git.checkout("master", file_list=["english/News"])
@@ -51,4 +50,3 @@ extract_major_versions(product, git.repo_dir)
extract_point_versions(product, git.repo_dir)
product.write()
print("::endgroup::")

View File

@@ -8,8 +8,6 @@ METHOD = 'distrowatch'
p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name)
print(f"::group::{product.name}")
product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD):
response = http.fetch_url(f"https://distrowatch.com/index.php?distribution={config.url}")
@@ -29,4 +27,3 @@ for product_name in endoflife.list_products(METHOD, p_filter):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -25,11 +25,7 @@ def fetch_releases(p: endoflife.Product, c: endoflife.AutoConfig, url: str) -> N
p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name)
print(f"::group::{product.name}")
product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD):
fetch_releases(product, config, f"https://hub.docker.com/v2/repositories/{config.url}/tags?page_size=100&page=1")
product.write()
print("::endgroup::")

View File

@@ -14,8 +14,6 @@ URLS = [
]
product = endoflife.Product("eks")
print(f"::group::{product.name}")
for version_list in http.fetch_urls(URLS):
version_list_soup = BeautifulSoup(version_list.text, features="html5lib")
for tr in version_list_soup.select("#main-col-body")[0].findAll("tr"):
@@ -35,4 +33,3 @@ for version_list in http.fetch_urls(URLS):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -9,7 +9,6 @@ from common import dates, endoflife, http
Versions lower than 10.0 are ignored because too difficult to parse."""
product = endoflife.Product("firefox")
print(f"::group::{product.name}")
releases_page = http.fetch_url("https://www.mozilla.org/en-US/firefox/releases/")
releases_soup = BeautifulSoup(releases_page.text, features="html5lib")
releases_list = releases_soup.find_all("ol", class_="c-release-list")
@@ -30,4 +29,3 @@ for release_notes in http.fetch_urls(release_notes_urls):
# versions < 10.0 are ignored
product.write()
print("::endgroup::")

View File

@@ -10,8 +10,6 @@ METHOD = 'git'
p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name)
print(f"::group::{product.name}")
product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD):
git = Git(config.url)
@@ -26,4 +24,3 @@ for product_name in endoflife.list_products(METHOD, p_filter):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -45,8 +45,6 @@ query($endCursor: String) {
p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name)
print(f"::group::{product.name}")
product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD):
for page in fetch_releases(config.url):
@@ -62,4 +60,3 @@ for product_name in endoflife.list_products(METHOD, p_filter):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -14,7 +14,6 @@ URL_BY_PRODUCT = {
for product_name, url in URL_BY_PRODUCT.items():
product = endoflife.Product(product_name)
print(f"::group::{product.name}")
relnotes = http.fetch_url(url)
relnotes_soup = BeautifulSoup(relnotes.text, features="html5lib")
@@ -29,4 +28,3 @@ for product_name, url in URL_BY_PRODUCT.items():
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -2,7 +2,6 @@ from bs4 import BeautifulSoup
from common import dates, endoflife, http
product = endoflife.Product("graalvm")
print(f"::group::{product.name}")
release_calendar = http.fetch_url("https://www.graalvm.org/release-calendar/")
release_calendar_soup = BeautifulSoup(release_calendar.text, features="html5lib")
@@ -17,4 +16,3 @@ for tr in release_calendar_soup.findAll("table")[1].find("tbody").findAll("tr"):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -7,7 +7,6 @@ CYCLE_PATTERN = re.compile(r"^(\d+\.\d+)/$")
DATE_AND_VERSION_PATTERN = re.compile(r"^(\d{4})/(\d{2})/(\d{2})\s+:\s+(\d+\.\d+\.\d.?)$") # https://regex101.com/r/1JCnFC/1
product = endoflife.Product("haproxy")
print(f"::group::{product.name}")
# First, get all minor releases from the download page
download = http.fetch_url('https://www.haproxy.org/download/')
download_soup = BeautifulSoup(download.text, features="html5lib")
@@ -31,4 +30,3 @@ for changelog in http.fetch_urls(changelog_urls):
product.declare_version(version, dates.date(int(year), int(month), int(day)))
product.write()
print("::endgroup::")

View File

@@ -7,7 +7,6 @@ URLS = [
]
product = endoflife.Product("ibm-aix")
print(f"::group::{product.name}")
for page in http.fetch_urls(URLS):
page_soup = BeautifulSoup(page.text, features="html5lib")
@@ -19,4 +18,3 @@ for page in http.fetch_urls(URLS):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -6,7 +6,6 @@ from requests_html import HTMLSession
Note that requests_html is used because JavaScript is needed to render the page."""
product = endoflife.Product("jira")
print(f"::group::{product.name}")
r = HTMLSession().get("https://www.atlassian.com/software/jira/update")
r.html.render(sleep=1, scrolldown=3)
@@ -16,4 +15,3 @@ for version_block in r.html.find('.versions-list'):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -11,7 +11,6 @@ ANNOUNCEMENT_PATTERN = re.compile(r"includes\s+the\s+following\s+changes", re.IG
VERSION_PATTERN = re.compile(r"Looker\s+(?P<version>\d+\.\d+)", re.IGNORECASE)
product = endoflife.Product("looker")
print(f"::group::{product.name}")
response = http.fetch_url("https://cloud.google.com/feeds/looker-release-notes.xml")
rss = xml.dom.minidom.parseString(response.text)
@@ -33,4 +32,3 @@ for item in rss.getElementsByTagName("entry"):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -8,8 +8,6 @@ METHOD = "maven"
p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name)
print(f"::group::{product.name}")
product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD):
start = 0
@@ -31,4 +29,3 @@ for product_name in endoflife.list_products(METHOD, p_filter):
break
product.write()
print("::endgroup::")

View File

@@ -7,8 +7,6 @@ METHOD = "npm"
p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name)
print(f"::group::{product.name}")
product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD):
data = http.fetch_url(f"https://registry.npmjs.org/{config.url}").json()
@@ -20,4 +18,3 @@ for product_name in endoflife.list_products(METHOD, p_filter):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -9,7 +9,6 @@ PRODUCTS = {
}
for product_name, url in PRODUCTS.items():
print(f"::group::{product_name}")
product = endoflife.Product(product_name)
data = http.fetch_url(url).json()
@@ -20,4 +19,3 @@ for product_name, url in PRODUCTS.items():
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -6,7 +6,6 @@ from requests_html import HTMLSession
This script is using requests-html because the page needs JavaScript to render correctly."""
product = endoflife.Product("oracle-jdk")
print(f"::group::{product.name}")
r = HTMLSession().get('https://www.java.com/releases/')
r.html.render(sleep=1, scrolldown=3)
@@ -22,4 +21,3 @@ for row in r.html.find('#released tr'):
product.remove_version('1.0_alpha') # the only version we don't want, a regex is not needed
product.write()
print("::endgroup::")

View File

@@ -1,3 +1,4 @@
import logging
import re
from bs4 import BeautifulSoup
@@ -10,15 +11,13 @@ IDENTIFIERS_BY_PRODUCT = {
}
# all products are on the same page, it's faster to fetch it only once
print("::group::palo-alto-networks")
logging.info("::group::palo-alto-networks")
response = http.fetch_url("https://www.paloaltonetworks.com/services/support/end-of-life-announcements/end-of-life-summary")
soup = BeautifulSoup(response.text, features="html5lib")
print("::endgroup::")
logging.info("::endgroup::")
for product_name, identifier in IDENTIFIERS_BY_PRODUCT.items():
print(f"::group::{product_name}")
product = endoflife.Product(product_name)
table = soup.find(id=identifier)
for tr in table.findAll("tr")[3:]:
td_list = tr.findAll("td")
@@ -37,4 +36,3 @@ for product_name, identifier in IDENTIFIERS_BY_PRODUCT.items():
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -3,7 +3,6 @@ from common import dates, endoflife, http
MAIN_URL = "https://www.php.net/releases/index.php?json&max=-1"
product = endoflife.Product("php")
print(f"::group::{product.name}")
# Fetch major versions
latest_by_major = http.fetch_url(MAIN_URL).json()
@@ -18,4 +17,3 @@ for major_versions_response in http.fetch_urls(major_version_urls):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -7,7 +7,6 @@ Only 18.0.20.3 and later will be picked up, as the format of the change log for
there is no entry for GA of version 18.0.18 and older."""
product = endoflife.Product("plesk")
print(f"::group::{product.name}")
response = http.fetch_url("https://docs.plesk.com/release-notes/obsidian/change-log")
soup = BeautifulSoup(response.text, features="html5lib")
@@ -24,4 +23,3 @@ for release in soup.find_all("div", class_="changelog-entry--obsidian"):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -7,8 +7,6 @@ METHOD = "pypi"
p_filter = sys.argv[1] if len(sys.argv) > 1 else None
for product_name in endoflife.list_products(METHOD, p_filter):
product = endoflife.Product(product_name)
print(f"::group::{product.name}")
product_frontmatter = endoflife.ProductFrontmatter(product.name)
for config in product_frontmatter.get_auto_configs(METHOD):
data = http.fetch_url(f"https://pypi.org/pypi/{config.url}/json").json()
@@ -23,4 +21,3 @@ for product_name in endoflife.list_products(METHOD, p_filter):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -16,7 +16,6 @@ PRODUCTS = {
VERSION_REGEX = re.compile(r"(?P<version>\d+(?:\.\d+)*)", flags=re.IGNORECASE) # https://regex101.com/r/BY1vwV/1
for product_name, url in PRODUCTS.items():
print(f"::group::{product_name}")
product = endoflife.Product(product_name)
response = http.fetch_url(url)
soup = BeautifulSoup(response.text, features="html5lib")
@@ -34,4 +33,3 @@ for product_name, url in PRODUCTS.items():
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -8,7 +8,6 @@ from common.git import Git
VERSION_AND_DATE_PATTERN = re.compile(r"{product-title}\s(?P<version>\d+\.\d+\.\d+).*\n+Issued:\s(?P<date>\d{4}-\d\d-\d\d)$", re.MULTILINE)
product = endoflife.Product("red-hat-openshift")
print(f"::group::{product.name}")
git = Git("https://github.com/openshift/openshift-docs.git")
git.setup()
@@ -28,4 +27,3 @@ for branch in git.list_branches("refs/heads/enterprise-[4-9]*"):
product.declare_version(version, dates.parse_date(date_str))
product.write()
print("::endgroup::")

View File

@@ -11,7 +11,6 @@ A few of the older versions, such as 'Satellite 6.1 GA Release (Build 6.1.1)', w
VERSION_PATTERN = re.compile(r"^Satellite (?P<version>\d+\.\d+\.\d+([.-]\d+)?) ([Uu]pdate|[Rr]elease)$")
product = endoflife.Product("redhat-satellite")
print(f"::group::{product.name}")
response = http.fetch_url("https://access.redhat.com/articles/1365633")
soup = BeautifulSoup(response.text, features="html5lib")
@@ -27,4 +26,3 @@ for table in soup.findAll("tbody"):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -7,7 +7,6 @@ from common import dates, endoflife, http
VERSION_PATTERN = re.compile(r"RHEL (?P<major>\d)(\. ?(?P<minor>\d+))?(( Update (?P<minor2>\d))| GA)?")
product = endoflife.Product("redhat")
print(f"::group::{product.name}")
response = http.fetch_url("https://access.redhat.com/articles/3078")
soup = BeautifulSoup(response.text, features="html5lib")
@@ -26,4 +25,3 @@ for tr in soup.findAll("tr"):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -1,7 +1,6 @@
from common import dates, endoflife, http
product = endoflife.Product("rockylinux")
print(f"::group::{product.name}")
response = http.fetch_url("https://raw.githubusercontent.com/rocky-linux/wiki.rockylinux.org/development/docs/include/releng/version_table.md")
for line in response.text.strip().split('\n'):
@@ -12,4 +11,3 @@ for line in response.text.strip().split('\n'):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -7,7 +7,6 @@ from common import dates, endoflife, http
VERSION_PATTERN = re.compile(r"^ROS (?P<name>(\w| )+)")
product = endoflife.Product("ros")
print(f"::group::{product.name}")
response = http.fetch_url("https://wiki.ros.org/Distributions")
soup = BeautifulSoup(response.text, features="html5lib")
@@ -29,4 +28,3 @@ for tr in soup.findAll("tr"):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -4,7 +4,6 @@ from bs4 import BeautifulSoup
from common import dates, endoflife, http
product = endoflife.Product("sles")
print(f"::group::{product.name}")
response = http.fetch_url("https://www.suse.com/lifecycle")
soup = BeautifulSoup(response.text, features="html5lib")
@@ -31,4 +30,3 @@ for detail_id in [f"detail{row['id']}" for row in sles_header_rows]:
logging.info(f"Ignoring {version}: date '{date_str}' could not be parsed")
product.write()
print("::endgroup::")

View File

@@ -31,7 +31,6 @@ def get_latest_minor_versions(versions: list[str]) -> list[str]:
product = endoflife.Product("splunk")
print(f"::group::{product.name}")
main = http.fetch_url("https://docs.splunk.com/Documentation/Splunk")
soup = BeautifulSoup(main.text, features="html5lib")
@@ -48,4 +47,3 @@ for response in http.fetch_urls(latest_minor_versions_urls):
product.declare_version(version_str, date)
product.write()
print("::endgroup::")

View File

@@ -1,7 +1,6 @@
from common import dates, endoflife, http
product = endoflife.Product("typo3")
print(f"::group::{product.name}")
data = http.fetch_url("https://get.typo3.org/api/v1/release/").json()
for v in data:
if v['type'] == 'development':
@@ -12,4 +11,3 @@ for v in data:
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -10,7 +10,6 @@ from common import dates, endoflife, http
BASE_URL = "https://unity.com/releases/editor/qa/lts-releases"
product = endoflife.Product("unity")
print(f"::group::{product.name}")
next_page_url = BASE_URL
# Do not try to fetch multiple pages in parallel: it is raising a lot of errors and make the overall process slower.
@@ -27,4 +26,3 @@ while next_page_url:
next_page_url = BASE_URL + next_link.attrs['href'] if next_link else None
product.write()
print("::endgroup::")

View File

@@ -6,7 +6,6 @@ from common import dates, endoflife, http
DATE_PATTERN = re.compile(r"\d{4}-\d{2}-\d{2}")
product = endoflife.Product("unrealircd")
print(f"::group::{product.name}")
response = http.fetch_url("https://www.unrealircd.org/docwiki/index.php?title=History_of_UnrealIRCd_releases&action=raw")
wikicode = mwparserfromhell.parse(response.text)
@@ -22,4 +21,3 @@ for tr in wikicode.ifilter_tags(matches=lambda node: node.tag == "tr"):
product.declare_version(version, date)
product.write()
print("::endgroup::")

View File

@@ -10,7 +10,6 @@ URLS = [
]
product = endoflife.Product("visualstudio")
print(f"::group::{product.name}")
for response in http.fetch_urls(URLS):
soup = BeautifulSoup(response.text, features="html5lib")
@@ -34,4 +33,3 @@ for response in http.fetch_urls(URLS):
product.declare_version(version, date)
product.write()
print("::endgroup::")