Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/crawlee/crawlers/_abstract_http/_abstract_http_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,9 +120,9 @@ def _create_static_content_crawler_pipeline(self) -> ContextPipeline[ParsedHttpC
ContextPipeline()
.compose(self._execute_pre_navigation_hooks)
.compose(self._make_http_request)
.compose(self._handle_status_code_response) # ty: ignore[invalid-argument-type]
.compose(self._handle_status_code_response)
.compose(self._parse_http_response)
.compose(self._handle_blocked_request_by_content) # ty: ignore[invalid-argument-type]
.compose(self._handle_blocked_request_by_content)
)

async def _execute_pre_navigation_hooks(
Expand Down
4 changes: 2 additions & 2 deletions src/crawlee/crawlers/_playwright/_playwright_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,9 +198,9 @@ def __init__(
kwargs['_context_pipeline'] = (
ContextPipeline()
.compose(self._open_page)
.compose(self._navigate) # ty: ignore[invalid-argument-type]
.compose(self._navigate)
.compose(self._handle_status_code_response)
.compose(self._handle_blocked_request_by_content) # ty: ignore[invalid-argument-type]
.compose(self._handle_blocked_request_by_content)
)
kwargs['_additional_context_managers'] = [self._browser_pool]
kwargs.setdefault('_logger', logging.getLogger(__name__))
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/crawlers/_basic/test_context_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ async def middleware_b(context: EnhancedCrawlingContext) -> AsyncGenerator[MoreE
)
events.append('middleware_b_out')

pipeline = ContextPipeline[BasicCrawlingContext]().compose(middleware_a).compose(middleware_b) # ty: ignore[invalid-argument-type]
pipeline = ContextPipeline[BasicCrawlingContext]().compose(middleware_a).compose(middleware_b)

context = BasicCrawlingContext(
request=Request.from_url(url='https://test.io/'),
Expand Down
3 changes: 2 additions & 1 deletion tests/unit/crawlers/_parsel/test_parsel_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,8 @@ async def test_enqueue_links_with_incompatible_kwargs_raises_error(server_url: U
@crawler.router.default_handler
async def request_handler(context: ParselCrawlingContext) -> None:
try:
await context.enqueue_links(requests=[Request.from_url(str(server_url / 'start_enqueue'))], selector='a') # type:ignore[call-overload] # Testing runtime enforcement of the overloads.
# Testing runtime enforcement of the overloads.
await context.enqueue_links(requests=[Request.from_url(str(server_url / 'start_enqueue'))], selector='a')
except Exception as e:
exceptions.append(e)

Expand Down
3 changes: 2 additions & 1 deletion tests/unit/crawlers/_playwright/test_playwright_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,8 @@ async def some_hook(context: PlaywrightPreNavCrawlingContext) -> None:
@crawler.router.default_handler
async def request_handler(context: PlaywrightCrawlingContext) -> None:
try:
await context.enqueue_links(requests=[Request.from_url('https://www.whatever.com')], selector='a') # type:ignore[call-overload] # Testing runtime enforcement of the overloads.
# Testing runtime enforcement of the overloads.
await context.enqueue_links(requests=[Request.from_url('https://www.whatever.com')], selector='a')
except Exception as e:
exceptions.append(e)

Expand Down
Loading