[apple] Adapt script for single-product usage (#317)

Make the Apple script compatible with the way update.py now works, which is 'product' oriented, meaning the script will be called once for each product.

To minimize the impacts the responses are now cached to avoid rate-limiting by support.apple.com.

Version patterns have also been moved to product's auto configuration to make future changes simpler.
This commit is contained in:
Marc Wrobel
2024-02-21 00:01:25 +01:00
parent b11f01bc62
commit 2d5145444b
4 changed files with 20 additions and 39 deletions

View File

@@ -29,11 +29,11 @@ class AutoConfig:
regexes_include = data.get("regex", DEFAULT_VERSION_REGEX)
regexes_include = regexes_include if isinstance(regexes_include, list) else [regexes_include]
self.include_version_patterns = [re.compile(r) for r in regexes_include]
self.include_version_patterns = [re.compile(r, re.MULTILINE) for r in regexes_include]
regexes_exclude = data.get("regex_exclude", [])
regexes_exclude = regexes_exclude if isinstance(regexes_exclude, list) else [regexes_exclude]
self.exclude_version_patterns = [re.compile(r) for r in regexes_exclude]
self.exclude_version_patterns = [re.compile(r, re.MULTILINE) for r in regexes_exclude]
def first_match(self, version: str) -> re.Match | None:
for exclude_pattern in self.exclude_version_patterns:

View File

@@ -5,6 +5,7 @@ from playwright.sync_api import sync_playwright
from requests import Response
from requests.adapters import HTTPAdapter
from requests.exceptions import ChunkedEncodingError
from requests_cache import CachedSession
from requests_futures.sessions import FuturesSession
from urllib3.util import Retry
@@ -13,11 +14,13 @@ USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/1
def fetch_urls(urls: list[str], data: any = None, headers: dict[str, str] = None,
max_retries: int = 10, backoff_factor: float = 0.5, timeout: int = 30) -> list[Response]:
max_retries: int = 10, backoff_factor: float = 0.5, timeout: int = 30,
cache: bool = False) -> list[Response]:
logging.info(f"Fetching {urls}")
try:
with FuturesSession() as session:
underlying_session = CachedSession('/tmp/http_cache', backend='filesystem') if cache else None
with FuturesSession(session=underlying_session) as session:
adapter = HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor))
session.mount('http://', adapter)
session.mount('https://', adapter)