Enable flake8-return linting rules (#267)

See https://docs.astral.sh/ruff/rules/#flake8-raise-rse.
This commit is contained in:
Marc Wrobel
2023-12-30 11:47:28 +01:00
parent c985bf4d5e
commit 801859d78a
4 changed files with 10 additions and 8 deletions

View File

@@ -17,6 +17,7 @@ select = [
"N", # pep8-naming
"PIE", # flake8-pie
"PGH", # pygrep-hooks
"RET", # flake8-return
"RUF100", # unused noqa (yesqa)
"T10", # flake8-debugger
"UP", # pyupgrade

View File

@@ -45,8 +45,7 @@ def parse_datetime(text: str, formats: list[str] = frozenset([
for fmt in formats:
try:
date = datetime.strptime(text, fmt) # NOQA: DTZ007, timezone is handled below
date = date.astimezone(timezone.utc) if to_utc else date
return date
return date.astimezone(timezone.utc) if to_utc else date
except ValueError:
pass

View File

@@ -30,11 +30,12 @@ class AutoConfig:
regexes = regexes if isinstance(regexes, list) else [regexes]
self.version_patterns = [re.compile(regex) for regex in regexes]
def first_match(self, version: str) -> re.Match:
def first_match(self, version: str) -> re.Match | None:
for pattern in self.version_patterns:
match = pattern.match(version)
if match:
return match
return None
def render(self, match: re.Match) -> str:
return self.version_template.render(**match.groupdict())
@@ -65,10 +66,11 @@ class ProductFrontmatter:
return configs
def get_release_date(self, release_cycle: str) -> datetime:
def get_release_date(self, release_cycle: str) -> datetime | None:
for release in self.data["releases"]:
if release["releaseCycle"] == release_cycle:
return release["releaseDate"]
return None
class Product:

View File

@@ -32,10 +32,10 @@ def fetch_urls(urls: list[str], data: any = None, headers: dict[str, str] = None
if next_max_retries == 0:
logging.error(f"Got ChunkedEncodingError while fetching {urls} ({e}), giving up")
raise e # So that the function does not get stuck in an infinite loop.
else:
# We could wait a bit before retrying, but it's not clear if it would help.
logging.warning(f"Got ChunkedEncodingError while fetching {urls} ({e}), retrying (remaining retries = {next_max_retries}).")
return fetch_urls(urls, data, headers, next_max_retries, backoff_factor, timeout)
# We could wait a bit before retrying, but it's not clear if it would help.
logging.warning(f"Got ChunkedEncodingError while fetching {urls} ({e}), retrying (remaining retries = {next_max_retries}).")
return fetch_urls(urls, data, headers, next_max_retries, backoff_factor, timeout)
def fetch_url(url: str, data: any = None, headers: dict[str, str] = None,