Do not use wait_until='networkidle' by default with fetch_javascript_url (#340)

It does not work in all situation, especially with GitHub-rendered markdown files.
This commit is contained in:
Marc Wrobel
2024-04-02 22:28:18 +02:00
parent 81b65584ad
commit 08d4ea469e
5 changed files with 14 additions and 9 deletions

View File

@@ -38,7 +38,8 @@ def fetch_urls(urls: list[str], data: any = None, headers: dict[str, str] = None
raise e # So that the function does not get stuck in an infinite loop.
# We could wait a bit before retrying, but it's not clear if it would help.
logging.warning(f"Got ChunkedEncodingError while fetching {urls} ({e}), retrying (remaining retries = {next_max_retries}).")
logging.warning(
f"Got ChunkedEncodingError while fetching {urls} ({e}), retrying (remaining retries = {next_max_retries}).")
return fetch_urls(urls, data, headers, next_max_retries, backoff_factor, timeout)
@@ -48,13 +49,13 @@ def fetch_url(url: str, data: any = None, headers: dict[str, str] = None,
# This requires some setup, see https://playwright.dev/python/docs/intro#installing-playwright.
def fetch_javascript_url(url: str, click_selector: str = None) -> str:
logging.info(f"Fetching {url}")
def fetch_javascript_url(url: str, click_selector: str = None, wait_until: str = None) -> str:
logging.info(f"Fetching {url} with JavaScript (click_selector = {click_selector}, wait_until = {wait_until})")
with sync_playwright() as p:
browser = p.chromium.launch()
try:
page = browser.new_page()
page.goto(url, wait_until='networkidle')
page.goto(url, wait_until=wait_until)
if click_selector:
logging.info(f"Clicked on {click_selector}")
page.click(click_selector)