diff --git a/tests/unit/_utils/test_recurring_task.py b/tests/unit/_utils/test_recurring_task.py index 3dffa65011..d2ddd2d861 100644 --- a/tests/unit/_utils/test_recurring_task.py +++ b/tests/unit/_utils/test_recurring_task.py @@ -46,7 +46,7 @@ async def test_execution(function: AsyncMock, delay: timedelta) -> None: task = RecurringTask(function, delay) task.start() - await asyncio.sleep(0.1) # Wait enough for the task to execute a few times + await asyncio.sleep(0.2) # Wait enough for the task to execute a few times await task.stop() assert isinstance(task.func, AsyncMock) # To let type checker know that the function is a mock diff --git a/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py b/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py index 7eed877112..b6f0289c02 100644 --- a/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py +++ b/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py @@ -579,7 +579,7 @@ async def test_adaptive_playwright_crawler_timeout_in_sub_crawler(test_urls: lis request_handler_timeout = timedelta(seconds=1) crawler = AdaptivePlaywrightCrawler.with_beautifulsoup_static_parser( - max_request_retries=1, + max_request_retries=0, rendering_type_predictor=static_only_predictor_no_detection, request_handler_timeout=request_handler_timeout, ) @@ -594,8 +594,8 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None: mocked_browser_handler() except AdaptiveContextError: mocked_static_handler() - # Relax timeout for the fallback browser request to avoid flakiness in test - crawler._request_handler_timeout = timedelta(seconds=10) + # Relax timeout for the fallback browser request to allow for slow browser startup on CI + crawler._request_handler_timeout = timedelta(seconds=120) # Sleep for time obviously larger than top crawler timeout. await asyncio.sleep(request_handler_timeout.total_seconds() * 3)