diff --git a/.ruff.toml b/.ruff.toml index b1947828..42f25fff 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -18,6 +18,7 @@ select = [ "N", # pep8-naming "PIE", # flake8-pie "PGH", # pygrep-hooks + "PTH", # flake8-use-pathlib "RET", # flake8-return "RUF100", # unused noqa (yesqa) "SLF", # flake8-self diff --git a/latest.py b/latest.py index 4ee0b89c..3d7b3ac5 100644 --- a/latest.py +++ b/latest.py @@ -4,7 +4,6 @@ import json import logging import os import re -from os.path import exists from pathlib import Path import frontmatter @@ -97,7 +96,7 @@ class Product: self.product_path = product_dir / f"{name}.md" self.versions_path = versions_dir / f"{name}.json" - with open(self.product_path) as product_file: + with self.product_path.open() as product_file: # First read the frontmatter of the product file. yaml = YAML() yaml.preserve_quotes = True @@ -107,7 +106,7 @@ class Product: product_file.seek(0) _, self.content = frontmatter.parse(product_file.read()) - with open(self.versions_path) as versions_file: + with self.versions_path.open() as versions_file: self.versions = json.loads(versions_file.read()) self.releases = [ReleaseCycle(release) for release in self.data["releases"]] @@ -134,7 +133,7 @@ class Product: self.unmatched_versions[version] = date def write(self) -> None: - with open(self.product_path, "w") as product_file: + with self.product_path.open("w") as product_file: product_file.truncate() product_file.write("---\n") @@ -150,13 +149,13 @@ class Product: def github_output(message: str) -> None: logging.debug(f"GITHUB_OUTPUT += {message.strip()}") if os.getenv("GITHUB_OUTPUT"): - with open(os.getenv("GITHUB_OUTPUT"), 'a') as f: + with open(os.getenv("GITHUB_OUTPUT"), 'a') as f: # NOQA: PTH123 f.write(message) def update_product(name: str, product_dir: Path, releases_dir: Path) -> None: versions_path = releases_dir / f"{name}.json" - if not exists(versions_path): + if not versions_path.exists(): logging.debug(f"Skipping {name}, {versions_path} does not exist") return diff --git a/report.py b/report.py index 5f5f5510..0ef0a362 100644 --- a/report.py +++ b/report.py @@ -1,15 +1,15 @@ import sys import time -from glob import glob +from pathlib import Path import frontmatter products = {} count = 0 count_auto = 0 -products_dir = sys.argv[1] if len(sys.argv) > 1 else 'website/products/' -for product_file in sorted(glob(f'{products_dir}/*.md')): - with open(product_file) as f: +products_dir = Path(sys.argv[1] if len(sys.argv) > 1 else 'website/products/') +for product_file in sorted(products_dir.glob('*.md')): + with product_file.open() as f: data = frontmatter.load(f) count += 1 title = data['title'] diff --git a/src/apache-http-server.py b/src/apache-http-server.py index fc4fc7bd..c2a54d11 100644 --- a/src/apache-http-server.py +++ b/src/apache-http-server.py @@ -27,7 +27,7 @@ for branch in git.list_branches("refs/heads/?.?.x"): if not release_notes_file.exists(): continue - with open(release_notes_file, "rb") as f: + with release_notes_file.open("rb") as f: release_notes = f.read().decode("utf-8", errors="ignore") for pattern in VERSION_AND_DATE_PATTERNS: diff --git a/src/common/endoflife.py b/src/common/endoflife.py index 0998d729..61419f7b 100644 --- a/src/common/endoflife.py +++ b/src/common/endoflife.py @@ -3,7 +3,7 @@ import logging import os import re from datetime import datetime, timezone -from glob import glob +from pathlib import Path import frontmatter from liquid import Template @@ -16,8 +16,8 @@ DEFAULT_VERSION_REGEX = r"^v?(?P[1-9]\d*)\.(?P\d+)(\.(?P\d+ DEFAULT_VERSION_PATTERN = re.compile(DEFAULT_VERSION_REGEX) DEFAULT_VERSION_TEMPLATE = "{{major}}{% if minor %}.{{minor}}{% if patch %}.{{patch}}{% if tiny %}.{{tiny}}{% endif %}{% endif %}{% endif %}" -PRODUCTS_PATH = os.environ.get("PRODUCTS_PATH", "website/products") -VERSIONS_PATH = os.environ.get("VERSIONS_PATH", "releases") +PRODUCTS_PATH = Path(os.environ.get("PRODUCTS_PATH", "website/products")) +VERSIONS_PATH = Path(os.environ.get("VERSIONS_PATH", "releases")) class AutoConfig: @@ -44,11 +44,11 @@ class AutoConfig: class ProductFrontmatter: def __init__(self, name: str) -> None: self.name: str = name - self.path: str = f"{PRODUCTS_PATH}/{name}.md" + self.path: Path = PRODUCTS_PATH / f"{name}.md" self.data = None - if os.path.isfile(self.path): - with open(self.path) as f: + if self.path.is_file(): + with self.path.open() as f: self.data = frontmatter.load(f) logging.info(f"loaded product data for {self.name} from {self.path}") else: @@ -76,15 +76,15 @@ class ProductFrontmatter: class Product: def __init__(self, name: str) -> None: self.name: str = name - self.versions_path: str = f"{VERSIONS_PATH}/{name}.json" + self.versions_path: Path = VERSIONS_PATH / f"{name}.json" self.versions = {} @staticmethod def from_file(name: str) -> "Product": product = Product(name) - if not os.path.isfile(product.versions_path): - with open(product.versions_path) as f: + if product.versions_path.is_file(): + with product.versions_path.open() as f: for version, date in json.load(f).items(): date_obj = datetime.strptime(date, "%Y-%m-%d").replace(tzinfo=timezone.utc) product.versions[version] = date_obj @@ -131,7 +131,7 @@ class Product: def write(self) -> None: versions = {version: date.strftime("%Y-%m-%d") for version, date in self.versions.items()} - with open(self.versions_path, "w") as f: + with self.versions_path.open("w") as f: f.write(json.dumps(dict( # sort by date then version (desc) sorted(versions.items(), key=lambda x: (x[1], x[0]), reverse=True), @@ -146,12 +146,12 @@ def list_products(method: str, products_filter: str = None) -> list[str]: """ products = [] - for product_file in glob(f"{PRODUCTS_PATH}/*.md"): - product_name = os.path.splitext(os.path.basename(product_file))[0] + for product_file in PRODUCTS_PATH.glob("*.md"): + product_name = product_file.stem if products_filter and product_name != products_filter: continue - with open(product_file) as f: + with product_file.open() as f: data = frontmatter.load(f) if "auto" in data: matching_configs = list(filter(lambda config: method in config, data["auto"])) diff --git a/src/red-hat-openshift.py b/src/red-hat-openshift.py index 0d0bf397..3bf94e0c 100644 --- a/src/red-hat-openshift.py +++ b/src/red-hat-openshift.py @@ -22,7 +22,7 @@ for branch in git.list_branches("refs/heads/enterprise-[4-9]*"): if not release_notes_file.exists(): continue - with open(release_notes_file, "rb") as f: + with release_notes_file.open("rb") as f: content = f.read().decode("utf-8") for (version, date_str) in VERSION_AND_DATE_PATTERN.findall(content): product.declare_version(version, dates.parse_date(date_str)) diff --git a/update.py b/update.py index 8313763d..31f624a1 100644 --- a/update.py +++ b/update.py @@ -23,7 +23,7 @@ def github_output(name: str, value: str) -> None: else: command = f"{name}={value}" - with open(os.environ["GITHUB_OUTPUT"], 'a') as github_output_var: + with open(os.environ["GITHUB_OUTPUT"], 'a') as github_output_var: # NOQA: PTH123 print(command, file=github_output_var) logging.debug(f"Wrote to GITHUB_OUTPUT: {name}={value.strip()}") @@ -33,17 +33,17 @@ def add_summary_line(line: str) -> None: logging.debug(f"GITHUB_STEP_SUMMARY does not exist, but would have written: {line}") return - with open(os.environ["GITHUB_STEP_SUMMARY"], 'a') as github_step_summary: + with open(os.environ["GITHUB_STEP_SUMMARY"], 'a') as github_step_summary: # NOQA: PTH123 print(line, file=github_step_summary) -SRC_DIR = 'src' -DATA_DIR = 'releases' +SRC_DIR = Path('src') +DATA_DIR = Path('releases') logging.basicConfig(format=logging.BASIC_FORMAT, level=logging.INFO) # Run scripts -scripts = sorted([os.path.join(SRC_DIR, file) for file in os.listdir(SRC_DIR) if file.endswith('.py')]) +scripts = sorted([SRC_DIR / file for file in os.listdir(SRC_DIR) if file.endswith('.py')]) some_script_failed = False add_summary_line("## Script execution summary\n") @@ -53,7 +53,7 @@ for script in scripts: logging.info(f"start running {script}") start = time.perf_counter() - child = subprocess.run([sys.executable, script], timeout=300) + child = subprocess.run([sys.executable, script]) # timeout handled in subscripts elapsed_seconds = time.perf_counter() - start if child.returncode != 0: @@ -67,34 +67,35 @@ for script in scripts: # Generate commit message subprocess.run('git add --all', timeout=10, check=True, shell=True) # to also get new files in git diff git_diff = subprocess.run('git diff --name-only --staged', capture_output=True, timeout=10, check=True, shell=True) -updated_files = sorted([Path(file) for file in git_diff.stdout.decode('utf-8').split('\n') if file.startswith(DATA_DIR)]) -logging.info(f"Updated files: {updated_files}") +updated_files = [Path(file) for file in git_diff.stdout.decode('utf-8').split('\n')] +updated_product_files = sorted([file for file in updated_files if file.parent == DATA_DIR]) +logging.info(f"Updated product files: {[file.name for file in updated_product_files]}") add_summary_line("## Update summary\n") -if updated_files: +if updated_product_files: # get modified files content new_files_content = {} - for path in updated_files: - with open(path) as file: + for path in updated_product_files: + with path.open() as file: new_files_content[path] = json.load(file) # get original files content old_files_content = {} subprocess.run('git stash --all --quiet', timeout=10, check=True, shell=True) - for path in updated_files: + for path in updated_product_files: if path.exists(): - with open(path) as file: + with path.open() as file: old_files_content[path] = json.load(file) else: # new file old_files_content[path] = {} subprocess.run('git stash pop --quiet', timeout=10, check=True, shell=True) # Generate commit message - product_names = ', '.join([path.stem for path in updated_files]) + product_names = ', '.join([path.stem for path in updated_product_files]) commit_message = f"🤖: {product_names}\n\n" - add_summary_line(f"Updated {len(updated_files)} products: {product_names}.") + add_summary_line(f"Updated {len(updated_product_files)} products: {product_names}.") - for path in updated_files: + for path in updated_product_files: add_summary_line(f"### {path.stem}\n") commit_message += f"{path.stem}:\n"