diff --git a/tests/unit/browsers/test_browser_pool.py b/tests/unit/browsers/test_browser_pool.py index 56e4706adc..a8e054379f 100644 --- a/tests/unit/browsers/test_browser_pool.py +++ b/tests/unit/browsers/test_browser_pool.py @@ -1,5 +1,6 @@ from __future__ import annotations +from datetime import timedelta from typing import TYPE_CHECKING from unittest.mock import AsyncMock @@ -103,7 +104,10 @@ async def test_new_page_with_each_plugin(server_url: URL) -> None: @run_alone_on_mac async def test_with_default_plugin_constructor(server_url: URL) -> None: - async with BrowserPool.with_default_plugin(headless=True, browser_type='firefox') as browser_pool: + # Use a generous operation timeout so that Firefox has enough time to launch on slow Windows CI. + async with BrowserPool.with_default_plugin( + headless=True, browser_type='firefox', operation_timeout=timedelta(seconds=60) + ) as browser_pool: assert len(browser_pool.plugins) == 1 assert isinstance(browser_pool.plugins[0], PlaywrightBrowserPlugin) diff --git a/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py b/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py index b6f0289c02..8aed535c8a 100644 --- a/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py +++ b/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py @@ -576,15 +576,17 @@ async def test_adaptive_playwright_crawler_timeout_in_sub_crawler(test_urls: lis crawler. """ static_only_predictor_no_detection = _SimpleRenderingTypePredictor(detection_probability_recommendation=cycle([0])) - request_handler_timeout = timedelta(seconds=1) + # Use a generous timeout so the static pipeline has enough time to reach the handler even on slow CI. + # The handler will block indefinitely, so the timeout will always fire during the handler's wait. + request_handler_timeout = timedelta(seconds=10) crawler = AdaptivePlaywrightCrawler.with_beautifulsoup_static_parser( max_request_retries=0, rendering_type_predictor=static_only_predictor_no_detection, request_handler_timeout=request_handler_timeout, ) - mocked_static_handler = Mock() - mocked_browser_handler = Mock() + mocked_static_handler = Mock(name='static_handler') + mocked_browser_handler = Mock(name='browser_handler') @crawler.router.default_handler async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None: @@ -596,13 +598,13 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None: mocked_static_handler() # Relax timeout for the fallback browser request to allow for slow browser startup on CI crawler._request_handler_timeout = timedelta(seconds=120) - # Sleep for time obviously larger than top crawler timeout. - await asyncio.sleep(request_handler_timeout.total_seconds() * 3) + # Block indefinitely - will be cancelled when the request_handler_timeout fires. + await asyncio.Event().wait() await crawler.run(test_urls[:1]) mocked_static_handler.assert_called_once_with() - # Browser handler was capable of running despite static handler having sleep time larger than top handler timeout. + # Browser handler was capable of running despite static handler blocking longer than the handler timeout. mocked_browser_handler.assert_called_once_with()