diff --git a/.gitignore b/.gitignore index 74cb9fe9c0..ea73de62b4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,6 @@ # Cache __pycache__ -.mypy_cache +.uv_cache .pytest_cache .ruff_cache .uv-cache diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 940b6b7ec4..95b1982bdb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,7 +46,7 @@ make format ### Type checking -Type checking is handled by [mypy](https://mypy.readthedocs.io/), verifying code against type annotations. Configuration settings can be found in `pyproject.toml`. +Type checking is handled by [ty](https://docs.astral.sh/ty/), verifying code against type annotations. Configuration settings can be found in `pyproject.toml`. To run type checking: diff --git a/Makefile b/Makefile index 390181e21a..7224fcb752 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,7 @@ E2E_TESTS_CONCURRENCY = 1 clean: - rm -rf .mypy_cache .pytest_cache .ruff_cache .uv-cache build dist htmlcov .coverage + rm -rf .uv_cache .pytest_cache .ruff_cache .uv-cache build dist htmlcov .coverage install-sync: uv sync --all-extras @@ -27,7 +27,7 @@ lint: uv run ruff check type-check: - uv run mypy + uv run ty check unit-tests: uv run pytest \ diff --git a/docs/deployment/code_examples/google/cloud_run_example.py b/docs/deployment/code_examples/google/cloud_run_example.py index 4176cf60e4..27d23b99eb 100644 --- a/docs/deployment/code_examples/google/cloud_run_example.py +++ b/docs/deployment/code_examples/google/cloud_run_example.py @@ -1,4 +1,3 @@ -# mypy: disable-error-code="misc" import json import os @@ -9,7 +8,7 @@ from crawlee.storage_clients import MemoryStorageClient -@get('/') # type: ignore[untyped-decorator] +@get('/') async def main() -> str: """The crawler entry point that will be called when the HTTP endpoint is accessed.""" # highlight-start diff --git a/docs/deployment/code_examples/google/google_example.py b/docs/deployment/code_examples/google/google_example.py index 474e121b71..68deac804c 100644 --- a/docs/deployment/code_examples/google/google_example.py +++ b/docs/deployment/code_examples/google/google_example.py @@ -1,4 +1,3 @@ -# mypy: disable-error-code="misc" import asyncio import json from datetime import timedelta @@ -48,7 +47,7 @@ async def request_handler(context: BeautifulSoupCrawlingContext) -> None: # highlight-end -@functions_framework.http # type: ignore[untyped-decorator] +@functions_framework.http def crawlee_run(request: Request) -> Response: # You can pass data to your crawler using `request` function_id = request.headers['Function-Execution-Id'] diff --git a/docs/guides/code_examples/avoid_blocking/default_fingerprint_generator_with_args.py b/docs/guides/code_examples/avoid_blocking/default_fingerprint_generator_with_args.py index a6d2072ad3..4e6ed92aa6 100644 --- a/docs/guides/code_examples/avoid_blocking/default_fingerprint_generator_with_args.py +++ b/docs/guides/code_examples/avoid_blocking/default_fingerprint_generator_with_args.py @@ -9,7 +9,7 @@ async def main() -> None: fingerprint_generator = DefaultFingerprintGenerator( - header_options=HeaderGeneratorOptions(browsers=['chromium']), + header_options=HeaderGeneratorOptions(browsers=['chrome']), screen_options=ScreenOptions(min_width=400), ) diff --git a/docs/guides/code_examples/running_in_web_server/server.py b/docs/guides/code_examples/running_in_web_server/server.py index 09be14e2be..64e192af37 100644 --- a/docs/guides/code_examples/running_in_web_server/server.py +++ b/docs/guides/code_examples/running_in_web_server/server.py @@ -14,7 +14,7 @@ app = FastAPI(lifespan=lifespan, title='Crawler app') -@app.get('/', response_class=HTMLResponse) # type: ignore[untyped-decorator] +@app.get('/', response_class=HTMLResponse) def index() -> str: return """ @@ -32,7 +32,7 @@ def index() -> str: """ -@app.get('/scrape') # type: ignore[untyped-decorator] +@app.get('/scrape') async def scrape_url(request: Request, url: str | None = None) -> dict: if not url: return {'url': 'missing', 'scrape result': 'no results'} diff --git a/pyproject.toml b/pyproject.toml index b7a9d72679..54fecf8ce4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -102,7 +102,6 @@ dev = [ "build<2.0.0", # For e2e tests. "dycw-pytest-only<3.0.0", "fakeredis[probabilistic,json,lua]<3.0.0", - "mypy~=1.19.0", "pre-commit<5.0.0", "proxy-py<3.0.0", "pydoc-markdown<5.0.0", @@ -113,6 +112,7 @@ dev = [ "pytest<9.0.0", "ruff~=0.14.0", "setuptools", # setuptools are used by pytest, but not explicitly required + "ty~=0.0.0", "types-beautifulsoup4<5.0.0", "types-cachetools<7.0.0", "types-colorama<1.0.0", @@ -230,57 +230,24 @@ filterwarnings = [ "ignore:websockets.server.WebSocketServerProtocol is deprecated:DeprecationWarning", ] -[tool.mypy] -python_version = "3.10" -plugins = ["pydantic.mypy"] +[tool.ty.environment] +python-version = "3.10" + +[tool.ty.src] +include = ["src", "tests", "scripts", "docs", "website"] exclude = [ "src/crawlee/project_template", "docs/guides/code_examples/storage_clients/custom_storage_client_example.py", ] -files = ["src", "tests", "docs", "website"] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -no_implicit_optional = true -warn_redundant_casts = true -warn_return_any = true -warn_unreachable = true -warn_unused_ignores = true - -[[tool.mypy.overrides]] -# Example codes are sometimes showing integration of crawlee with external tool, which is not dependency of crawlee. -module = [ - "apify", # Example code shows integration of apify and crawlee. - "apify_fingerprint_datapoints", # Untyped and stubs not available - "camoufox", # Example code shows integration of camoufox and crawlee. - "fastapi", # Example code shows running in webserver. - "stagehand.*", # Example code shows integration of Stagehand and crawlee. - "starlette.*", # Example code shows running in webserver. - "flask", # Example code shows deploy on Google Cloud. - "functions_framework", # Example code shows deploy on Google Cloud. - "jaro", # Untyped and stubs not available - "litestar", # Example code shows deploy on Google Cloud Run. - "loguru", # Example code shows integration of loguru and crawlee for JSON logging. - "sklearn.linear_model", # Untyped and stubs not available - "cookiecutter.*", # Untyped and stubs not available - "inquirer.*", # Untyped and stubs not available - "warcio.*", # Example code shows WARC files creation. - "wrapt" # Untyped and stubs not available -] -ignore_missing_imports = true -[[tool.mypy.overrides]] -module = [ - "running_in_web_server.*", # False positive when fastapi not available +[[tool.ty.overrides]] +include = [ + "docs/**/*.py", + "website/**/*.py", ] -disable_error_code = ["misc"] -[tool.basedpyright] -pythonVersion = "3.10" -typeCheckingMode = "standard" -include = ["src", "tests", "docs", "website"] +[tool.ty.overrides.rules] +unresolved-import = "ignore" [tool.coverage.report] exclude_lines = ["pragma: no cover", "if TYPE_CHECKING:", "assert_never()"] diff --git a/src/crawlee/_browserforge_workaround.py b/src/crawlee/_browserforge_workaround.py index 495d2a8298..8e8dcceca4 100644 --- a/src/crawlee/_browserforge_workaround.py +++ b/src/crawlee/_browserforge_workaround.py @@ -20,7 +20,7 @@ def patch_browserforge() -> None: def DownloadIfNotExists(**flags: bool) -> None: pass - download.DownloadIfNotExists = DownloadIfNotExists + download.DownloadIfNotExists = DownloadIfNotExists # ty: ignore[invalid-assignment] import browserforge.bayesian_network diff --git a/src/crawlee/_request.py b/src/crawlee/_request.py index 9e0fd1dfc6..fd1feef791 100644 --- a/src/crawlee/_request.py +++ b/src/crawlee/_request.py @@ -93,7 +93,7 @@ def __setitem__(self, key: str, value: JsonSerializable) -> None: def __delitem__(self, key: str) -> None: del self.__pydantic_extra__[key] - def __iter__(self) -> Iterator[str]: # type: ignore[override] + def __iter__(self) -> Iterator[str]: # ty: ignore[invalid-method-override] yield from self.__pydantic_extra__ def __len__(self) -> int: @@ -195,7 +195,7 @@ class Request(BaseModel): ] = None """HTTP request payload.""" - # Workaround for pydantic 2.12 and mypy type checking issue for Annotated with default_factory + # Workaround for Pydantic and type checkers when using Annotated with default_factory if TYPE_CHECKING: headers: HttpHeaders = HttpHeaders() """HTTP request headers.""" diff --git a/src/crawlee/_types.py b/src/crawlee/_types.py index a98664d02d..bf10dd6ff3 100644 --- a/src/crawlee/_types.py +++ b/src/crawlee/_types.py @@ -62,14 +62,14 @@ class HttpHeaders(RootModel, Mapping[str, str]): model_config = ConfigDict(validate_by_name=True, validate_by_alias=True) - # Workaround for pydantic 2.12 and mypy type checking issue for Annotated with default_factory + # Workaround for Pydantic and type checkers when using Annotated with default_factory if TYPE_CHECKING: root: dict[str, str] = {} else: root: Annotated[ dict[str, str], PlainValidator(lambda value: _normalize_headers(value)), - Field(default_factory=dict), + Field(default_factory=lambda: dict[str, str]()), ] def __getitem__(self, key: str) -> str: @@ -91,7 +91,7 @@ def __ror__(self, other: HttpHeaders) -> HttpHeaders: combined_headers = {**other, **self.root} return HttpHeaders(combined_headers) - def __iter__(self) -> Iterator[str]: # type: ignore[override] + def __iter__(self) -> Iterator[str]: # ty: ignore[invalid-method-override] yield from self.root def __len__(self) -> int: @@ -671,17 +671,16 @@ def create_modified_copy( get_key_value_store: GetKeyValueStoreFromRequestHandlerFunction | None = None, ) -> Self: """Create a modified copy of the crawling context with specified changes.""" - original_fields = {field.name: getattr(self, field.name) for field in dataclasses.fields(self)} - modified_fields = { - key: value - for key, value in { - 'push_data': push_data, - 'add_requests': add_requests, - 'get_key_value_store': get_key_value_store, - }.items() - if value - } - return self.__class__(**{**original_fields, **modified_fields}) + modifications = dict[str, Any]() + + if push_data is not None: + modifications['push_data'] = push_data + if add_requests is not None: + modifications['add_requests'] = add_requests + if get_key_value_store is not None: + modifications['get_key_value_store'] = get_key_value_store + + return dataclasses.replace(self, **modifications) class GetDataKwargs(TypedDict): diff --git a/src/crawlee/_utils/context.py b/src/crawlee/_utils/context.py index fb750cf0e7..6f3a65094b 100644 --- a/src/crawlee/_utils/context.py +++ b/src/crawlee/_utils/context.py @@ -44,4 +44,4 @@ async def async_wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: return await method(self, *args, **kwargs) - return async_wrapper if inspect.iscoroutinefunction(method) else sync_wrapper # type: ignore[return-value] + return async_wrapper if inspect.iscoroutinefunction(method) else sync_wrapper # ty: ignore[invalid-return-type] diff --git a/src/crawlee/_utils/file.py b/src/crawlee/_utils/file.py index f53b6bab0a..1d297fa724 100644 --- a/src/crawlee/_utils/file.py +++ b/src/crawlee/_utils/file.py @@ -170,7 +170,7 @@ async def export_csv_to_stream( if 'lineterminator' not in kwargs: kwargs['lineterminator'] = '\n' - writer = csv.writer(dst, **kwargs) # type: ignore[arg-type] + writer = csv.writer(dst, **kwargs) write_header = True # Iterate over the dataset and write to CSV. diff --git a/src/crawlee/_utils/globs.py b/src/crawlee/_utils/globs.py index f7e1a57927..aed82e1f18 100644 --- a/src/crawlee/_utils/globs.py +++ b/src/crawlee/_utils/globs.py @@ -36,7 +36,7 @@ def _translate( if not seps: seps = (os.path.sep, os.path.altsep) if os.path.altsep else os.path.sep - escaped_seps = ''.join(map(re.escape, seps)) + escaped_seps = ''.join(map(re.escape, seps)) # ty: ignore[invalid-argument-type] any_sep = f'[{escaped_seps}]' if len(seps) > 1 else escaped_seps not_sep = f'[^{escaped_seps}]' if include_hidden: diff --git a/src/crawlee/_utils/recurring_task.py b/src/crawlee/_utils/recurring_task.py index 3a6553b6c0..ba80f8f8b0 100644 --- a/src/crawlee/_utils/recurring_task.py +++ b/src/crawlee/_utils/recurring_task.py @@ -25,7 +25,7 @@ class RecurringTask: """ def __init__(self, func: Callable, delay: timedelta) -> None: - logger.debug(f'Calling RecurringTask.__init__(func={func.__name__}, delay={delay})...') + logger.debug(f'Calling RecurringTask.__init__(func={func.__name__}, delay={delay})...') # ty: ignore[unresolved-attribute] self.func = func self.delay = delay self.task: asyncio.Task | None = None @@ -55,7 +55,7 @@ async def _wrapper(self) -> None: def start(self) -> None: """Start the recurring task execution.""" - self.task = asyncio.create_task(self._wrapper(), name=f'Task-recurring-{self.func.__name__}') + self.task = asyncio.create_task(self._wrapper(), name=f'Task-recurring-{self.func.__name__}') # ty: ignore[possibly-missing-attribute] async def stop(self) -> None: """Stop the recurring task execution.""" diff --git a/src/crawlee/_utils/sitemap.py b/src/crawlee/_utils/sitemap.py index 0d839cd1ed..ba844ca47b 100644 --- a/src/crawlee/_utils/sitemap.py +++ b/src/crawlee/_utils/sitemap.py @@ -430,10 +430,10 @@ async def parse_sitemap( up to the specified maximum depth. """ # Set default options - options = options or {} - emit_nested_sitemaps = options.get('emit_nested_sitemaps', False) - max_depth = options.get('max_depth', float('inf')) - sitemap_retries = options.get('sitemap_retries', 3) + options = options or {} # ty: ignore[invalid-assignment] + emit_nested_sitemaps = options.get('emit_nested_sitemaps', False) # ty: ignore[possibly-missing-attribute] + max_depth = options.get('max_depth', float('inf')) # ty: ignore[possibly-missing-attribute] + sitemap_retries = options.get('sitemap_retries', 3) # ty: ignore[possibly-missing-attribute] # Setup working state sources = list(initial_sources) @@ -472,7 +472,7 @@ async def parse_sitemap( sitemap_retries, emit_nested_sitemaps=emit_nested_sitemaps, proxy_info=proxy_info, - timeout=options.get('timeout', timedelta(seconds=30)), + timeout=options.get('timeout', timedelta(seconds=30)), # ty: ignore[possibly-missing-attribute] ): yield result else: diff --git a/src/crawlee/_utils/system.py b/src/crawlee/_utils/system.py index d1f1cd9976..56eeaadf24 100644 --- a/src/crawlee/_utils/system.py +++ b/src/crawlee/_utils/system.py @@ -5,7 +5,7 @@ from contextlib import suppress from datetime import datetime, timezone from logging import getLogger -from typing import Annotated +from typing import TYPE_CHECKING, Annotated import psutil from pydantic import BaseModel, ConfigDict, Field, PlainSerializer, PlainValidator @@ -41,11 +41,19 @@ class CpuInfo(BaseModel): used_ratio: Annotated[float, Field(alias='usedRatio')] """The ratio of CPU currently in use, represented as a float between 0 and 1.""" - created_at: datetime = Field( - alias='createdAt', - default_factory=lambda: datetime.now(timezone.utc), - ) - """The time at which the measurement was taken.""" + # Workaround for Pydantic and type checkers when using Annotated with default_factory + if TYPE_CHECKING: + created_at: datetime = datetime.now(timezone.utc) + """The time at which the measurement was taken.""" + else: + created_at: Annotated[ + datetime, + Field( + alias='createdAt', + default_factory=lambda: datetime.now(timezone.utc), + ), + ] + """The time at which the measurement was taken.""" class MemoryUsageInfo(BaseModel): @@ -61,11 +69,19 @@ class MemoryUsageInfo(BaseModel): ] """Memory usage of the current Python process and its children.""" - created_at: datetime = Field( - alias='createdAt', - default_factory=lambda: datetime.now(timezone.utc), - ) - """The time at which the measurement was taken.""" + # Workaround for Pydantic and type checkers when using Annotated with default_factory + if TYPE_CHECKING: + created_at: datetime = datetime.now(timezone.utc) + """The time at which the measurement was taken.""" + else: + created_at: Annotated[ + datetime, + Field( + alias='createdAt', + default_factory=lambda: datetime.now(timezone.utc), + ), + ] + """The time at which the measurement was taken.""" class MemoryInfo(MemoryUsageInfo): diff --git a/src/crawlee/browsers/_browser_pool.py b/src/crawlee/browsers/_browser_pool.py index 7d3fe0409c..480fb9fac5 100644 --- a/src/crawlee/browsers/_browser_pool.py +++ b/src/crawlee/browsers/_browser_pool.py @@ -142,7 +142,7 @@ def with_default_plugin( plugin_options['browser_new_context_options'] = browser_new_context_options or {} if headless is not None: - plugin_options['browser_launch_options']['headless'] = headless + plugin_options['browser_launch_options']['headless'] = headless # ty: ignore[invalid-assignment] if use_incognito_pages is not None: plugin_options['use_incognito_pages'] = use_incognito_pages diff --git a/src/crawlee/browsers/_playwright_browser.py b/src/crawlee/browsers/_playwright_browser.py index aba8e6b7e1..c66dcb21be 100644 --- a/src/crawlee/browsers/_playwright_browser.py +++ b/src/crawlee/browsers/_playwright_browser.py @@ -78,7 +78,7 @@ async def new_context(self, **context_options: Any) -> BrowserContext: async def _delete_temp_dir(self, _: BrowserContext | None) -> None: if self._temp_dir and self._temp_dir.exists(): - await asyncio.to_thread(shutil.rmtree, self._temp_dir, ignore_errors=True) + await asyncio.to_thread(lambda: shutil.rmtree(self._temp_dir, ignore_errors=True)) # ty: ignore[invalid-argument-type] @override async def close(self, **kwargs: Any) -> None: diff --git a/src/crawlee/crawlers/_abstract_http/_abstract_http_crawler.py b/src/crawlee/crawlers/_abstract_http/_abstract_http_crawler.py index 6c1fbb63f8..059b3adbe9 100644 --- a/src/crawlee/crawlers/_abstract_http/_abstract_http_crawler.py +++ b/src/crawlee/crawlers/_abstract_http/_abstract_http_crawler.py @@ -102,7 +102,7 @@ def create_parsed_http_crawler_class( class _ParsedHttpCrawler( AbstractHttpCrawler[ParsedHttpCrawlingContext[TParseResult], TParseResult, TSelectResult] - ): + ): # ty: ignore[invalid-generic-class] def __init__( self, parser: AbstractHttpParser[TParseResult, TSelectResult] = static_parser, @@ -122,9 +122,9 @@ def _create_static_content_crawler_pipeline(self) -> ContextPipeline[ParsedHttpC ContextPipeline() .compose(self._execute_pre_navigation_hooks) .compose(self._make_http_request) - .compose(self._handle_status_code_response) + .compose(self._handle_status_code_response) # ty: ignore[invalid-argument-type] .compose(self._parse_http_response) - .compose(self._handle_blocked_request_by_content) + .compose(self._handle_blocked_request_by_content) # ty: ignore[invalid-argument-type] ) async def _execute_pre_navigation_hooks( diff --git a/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawler.py b/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawler.py index b3b99e6f59..139e902c0e 100644 --- a/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawler.py +++ b/src/crawlee/crawlers/_adaptive_playwright/_adaptive_playwright_crawler.py @@ -160,7 +160,7 @@ def __init__( super().__init__(statistics=statistics, **kwargs) # Sub crawlers related. - playwright_crawler_specific_kwargs = playwright_crawler_specific_kwargs or {} + playwright_crawler_specific_kwargs = playwright_crawler_specific_kwargs or {} # ty: ignore[invalid-assignment] # Each sub crawler will use custom logger . static_logger = getLogger('Subcrawler_static') @@ -181,7 +181,7 @@ def __init__( ) playwright_crawler = PlaywrightCrawler( statistics=_NonPersistentStatistics(), - **playwright_crawler_specific_kwargs, + **playwright_crawler_specific_kwargs, # ty: ignore[invalid-argument-type] **basic_crawler_kwargs_for_pw_crawler, ) @@ -335,7 +335,7 @@ async def from_static_pipeline_to_top_router( ) await self.router(adaptive_crawling_context) - return self._static_context_pipeline(context_linked_to_result, from_static_pipeline_to_top_router) + return self._static_context_pipeline(context_linked_to_result, from_static_pipeline_to_top_router) # ty: ignore[invalid-argument-type] if rendering_type == 'client only': @@ -345,7 +345,7 @@ async def from_pw_pipeline_to_top_router(context: PlaywrightCrawlingContext) -> ) await self.router(adaptive_crawling_context) - return self._pw_context_pipeline(context_linked_to_result, from_pw_pipeline_to_top_router) + return self._pw_context_pipeline(context_linked_to_result, from_pw_pipeline_to_top_router) # ty: ignore[invalid-argument-type] raise RuntimeError( f'Not a valid rendering type. Must be one of the following: {", ".join(get_args(RenderingType))}' diff --git a/src/crawlee/crawlers/_basic/_basic_crawler.py b/src/crawlee/crawlers/_basic/_basic_crawler.py index 4f6f52ed43..5b88dee47b 100644 --- a/src/crawlee/crawlers/_basic/_basic_crawler.py +++ b/src/crawlee/crawlers/_basic/_basic_crawler.py @@ -410,7 +410,7 @@ def __init__( self._context_result_map = WeakKeyDictionary[BasicCrawlingContext, RequestHandlerRunResult]() # Context pipeline - self._context_pipeline = (_context_pipeline or ContextPipeline()).compose(self._check_url_after_redirects) + self._context_pipeline = (_context_pipeline or ContextPipeline()).compose(self._check_url_after_redirects) # ty: ignore[invalid-argument-type] # Crawl settings self._max_request_retries = max_request_retries @@ -774,7 +774,7 @@ async def _run_crawler(self) -> None: async with AsyncExitStack() as exit_stack: for context in contexts_to_enter: - await exit_stack.enter_async_context(context) # type: ignore[arg-type] + await exit_stack.enter_async_context(context) # ty: ignore[invalid-argument-type] await self._autoscaled_pool.run() @@ -873,7 +873,7 @@ async def export_data( dataset_id: str | None = None, dataset_name: str | None = None, dataset_alias: str | None = None, - **additional_kwargs: Unpack[ExportDataJsonKwargs | ExportDataCsvKwargs], # type: ignore[misc] + **additional_kwargs: Unpack[ExportDataJsonKwargs | ExportDataCsvKwargs], ) -> None: """Export all items from a Dataset to a JSON or CSV file. diff --git a/src/crawlee/crawlers/_playwright/_playwright_crawler.py b/src/crawlee/crawlers/_playwright/_playwright_crawler.py index 0beb04a375..c71bb71510 100644 --- a/src/crawlee/crawlers/_playwright/_playwright_crawler.py +++ b/src/crawlee/crawlers/_playwright/_playwright_crawler.py @@ -183,7 +183,7 @@ def __init__( generator_browser_type = [fingerprint_browser_type_from_playwright_browser_type(browser_type)] fingerprint_generator = DefaultFingerprintGenerator( - header_options=HeaderGeneratorOptions(browsers=generator_browser_type) + header_options=HeaderGeneratorOptions(browsers=generator_browser_type) # ty: ignore[invalid-argument-type] ) browser_pool = BrowserPool.with_default_plugin( @@ -202,9 +202,9 @@ def __init__( kwargs['_context_pipeline'] = ( ContextPipeline() .compose(self._open_page) - .compose(self._navigate) + .compose(self._navigate) # ty: ignore[invalid-argument-type] .compose(self._handle_status_code_response) - .compose(self._handle_blocked_request_by_content) + .compose(self._handle_blocked_request_by_content) # ty: ignore[invalid-argument-type] ) kwargs['_additional_context_managers'] = [self._browser_pool] kwargs.setdefault('_logger', logging.getLogger(__name__)) @@ -516,7 +516,7 @@ async def _get_cookies(self, page: Page) -> list[PlaywrightCookieParam]: async def _update_cookies(self, page: Page, cookies: list[PlaywrightCookieParam]) -> None: """Update the cookies in the page context.""" - await page.context.add_cookies([{**cookie} for cookie in cookies]) + await page.context.add_cookies([{**cookie} for cookie in cookies]) # ty: ignore[invalid-argument-type] async def _find_txt_file_for_url(self, url: str) -> RobotsTxtFile: """Find the robots.txt file for a given URL. diff --git a/src/crawlee/events/_event_manager.py b/src/crawlee/events/_event_manager.py index c623b341c1..2183727483 100644 --- a/src/crawlee/events/_event_manager.py +++ b/src/crawlee/events/_event_manager.py @@ -178,7 +178,7 @@ async def listener_wrapper(event_data: EventData) -> None: else asyncio.to_thread(cast('Callable[..., None]', listener), *bound_args.args, **bound_args.kwargs) ) - listener_task = asyncio.create_task(coro, name=f'Task-{event.value}-{listener.__name__}') + listener_task = asyncio.create_task(coro, name=f'Task-{event.value}-{listener.__name__}') # ty: ignore[invalid-argument-type, unresolved-attribute] self._listener_tasks.add(listener_task) try: @@ -189,7 +189,7 @@ async def listener_wrapper(event_data: EventData) -> None: # We need to swallow the exception and just log it here, otherwise it could break the event emitter logger.exception( 'Exception in the event listener', - extra={'event_name': event.value, 'listener_name': listener.__name__}, + extra={'event_name': event.value, 'listener_name': listener.__name__}, # ty: ignore[unresolved-attribute] ) finally: logger.debug('EventManager.on.listener_wrapper(): Removing listener task from the set...') diff --git a/src/crawlee/http_clients/_curl_impersonate.py b/src/crawlee/http_clients/_curl_impersonate.py index b4eff2421b..342a60ef08 100644 --- a/src/crawlee/http_clients/_curl_impersonate.py +++ b/src/crawlee/http_clients/_curl_impersonate.py @@ -93,12 +93,12 @@ async def read(self) -> bytes: return self._response.content async def read_stream(self) -> AsyncGenerator[bytes, None]: - if not self._response.astream_task or self._response.astream_task.done(): # type: ignore[attr-defined] + if not self._response.astream_task or self._response.astream_task.done(): # ty: ignore[possibly-missing-attribute] raise RuntimeError( 'Cannot read stream: either already consumed or Response not obtained from `stream` method' ) - async for chunk in self._response.aiter_content(): # type: ignore[no-untyped-call] + async for chunk in self._response.aiter_content(): yield chunk @@ -156,7 +156,7 @@ async def crawl( try: response = await client.request( url=request.url, - method=request.method.upper(), # type: ignore[arg-type] # curl-cffi requires uppercase method + method=request.method.upper(), # ty: ignore[invalid-argument-type] headers=request.headers, data=request.payload, cookies=session.cookies.jar if session else None, @@ -203,7 +203,7 @@ async def send_request( try: response = await client.request( url=url, - method=method.upper(), # type: ignore[arg-type] # curl-cffi requires uppercase method + method=method.upper(), # ty: ignore[invalid-argument-type] headers=dict(headers) if headers else None, data=payload, cookies=session.cookies.jar if session else None, @@ -244,7 +244,7 @@ async def stream( try: response = await client.request( url=url, - method=method.upper(), # type: ignore[arg-type] # curl-cffi requires uppercase method + method=method.upper(), # ty: ignore[invalid-argument-type] headers=dict(headers) if headers else None, data=payload, cookies=session.cookies.jar if session else None, @@ -309,8 +309,8 @@ def _is_proxy_error(error: CurlRequestError) -> bool: @staticmethod def _get_cookies(curl: Curl) -> list[Cookie]: cookies: list[Cookie] = [] - for curl_cookie in curl.getinfo(CurlInfo.COOKIELIST): # type: ignore[union-attr] - curl_morsel = CurlMorsel.from_curl_format(curl_cookie) # type: ignore[arg-type] + for curl_cookie in curl.getinfo(CurlInfo.COOKIELIST): # ty: ignore[not-iterable] + curl_morsel = CurlMorsel.from_curl_format(curl_cookie) # ty: ignore[invalid-argument-type] cookie = curl_morsel.to_cookiejar_cookie() cookies.append(cookie) return cookies diff --git a/src/crawlee/otel/crawler_instrumentor.py b/src/crawlee/otel/crawler_instrumentor.py index 09f2fda525..9c12e9e17e 100644 --- a/src/crawlee/otel/crawler_instrumentor.py +++ b/src/crawlee/otel/crawler_instrumentor.py @@ -3,9 +3,7 @@ import inspect from typing import TYPE_CHECKING, Any -from opentelemetry.instrumentation.instrumentor import ( # type:ignore[attr-defined] # Mypy has troubles with OTEL - BaseInstrumentor, -) +from opentelemetry.instrumentation.instrumentor import BaseInstrumentor from opentelemetry.instrumentation.utils import unwrap from opentelemetry.semconv.attributes.code_attributes import CODE_FUNCTION_NAME from opentelemetry.semconv.attributes.http_attributes import HTTP_REQUEST_METHOD diff --git a/src/crawlee/sessions/_cookies.py b/src/crawlee/sessions/_cookies.py index eb5a6a12ea..1089fc37f5 100644 --- a/src/crawlee/sessions/_cookies.py +++ b/src/crawlee/sessions/_cookies.py @@ -68,7 +68,7 @@ def __init__(self, cookies: SessionCookies | CookieJar | dict[str, str] | list[C if isinstance(cookies, dict): for key, value in cookies.items(): - self.set(key, value) + self.set(key, value) # ty: ignore[invalid-argument-type] elif isinstance(cookies, list): for item in cookies: @@ -152,7 +152,7 @@ def _convert_cookie_to_dict(self, cookie: Cookie) -> CookieParam: cookie_dict['expires'] = cookie.expires if (same_site := cookie.get_nonstandard_attr('SameSite')) and same_site in {'Lax', 'None', 'Strict'}: - cookie_dict['same_site'] = same_site # type: ignore[typeddict-item] + cookie_dict['same_site'] = same_site # ty: ignore[invalid-assignment] return cookie_dict diff --git a/src/crawlee/sessions/_models.py b/src/crawlee/sessions/_models.py index da709f1cdb..2f5b4a0483 100644 --- a/src/crawlee/sessions/_models.py +++ b/src/crawlee/sessions/_models.py @@ -63,19 +63,19 @@ class SessionPoolModel(BaseModel): ), ] - @computed_field(alias='sessionCount') # type: ignore[prop-decorator] + @computed_field(alias='sessionCount') @property def session_count(self) -> int: """Get the total number of sessions currently maintained in the pool.""" return len(self.sessions) - @computed_field(alias='usableSessionCount') # type: ignore[prop-decorator] + @computed_field(alias='usableSessionCount') @property def usable_session_count(self) -> int: """Get the number of sessions that are currently usable.""" return len([session for _, session in self.sessions.items() if session.is_usable]) - @computed_field(alias='retiredSessionCount') # type: ignore[prop-decorator] + @computed_field(alias='retiredSessionCount') @property def retired_session_count(self) -> int: """Get the number of sessions that are no longer usable.""" diff --git a/src/crawlee/statistics/_models.py b/src/crawlee/statistics/_models.py index 11b4310f3a..b17c618540 100644 --- a/src/crawlee/statistics/_models.py +++ b/src/crawlee/statistics/_models.py @@ -4,7 +4,7 @@ import warnings from dataclasses import asdict, dataclass from datetime import datetime, timedelta, timezone -from typing import Annotated, Any +from typing import TYPE_CHECKING, Annotated, Any from pydantic import BaseModel, ConfigDict, Field, PlainSerializer, PlainValidator, computed_field from typing_extensions import override @@ -77,9 +77,20 @@ class StatisticsState(BaseModel): crawler_started_at: Annotated[datetime | None, Field(alias='crawlerStartedAt')] = None crawler_last_started_at: Annotated[datetime | None, Field(alias='crawlerLastStartTimestamp')] = None crawler_finished_at: Annotated[datetime | None, Field(alias='crawlerFinishedAt')] = None - errors: dict[str, Any] = Field(default_factory=dict) - retry_errors: dict[str, Any] = Field(alias='retryErrors', default_factory=dict) - requests_with_status_code: dict[str, int] = Field(alias='requestsWithStatusCode', default_factory=dict) + + # Workaround for Pydantic and type checkers when using Annotated with default_factory + if TYPE_CHECKING: + errors: dict[str, Any] = {} + retry_errors: dict[str, Any] = {} + requests_with_status_code: dict[str, int] = {} + else: + errors: Annotated[dict[str, Any], Field(default_factory=dict)] + retry_errors: Annotated[dict[str, Any], Field(alias='retryErrors', default_factory=dict)] + requests_with_status_code: Annotated[ + dict[str, int], + Field(alias='requestsWithStatusCode', default_factory=dict), + ] + stats_persisted_at: Annotated[ datetime | None, Field(alias='statsPersistedAt'), PlainSerializer(lambda _: datetime.now(timezone.utc)) ] = None @@ -124,22 +135,22 @@ def crawler_runtime_for_serialization(self) -> timedelta: return self._runtime_offset + finished_at - self.crawler_last_started_at return self._runtime_offset - @computed_field(alias='requestTotalDurationMillis', return_type=timedelta_ms) # type: ignore[prop-decorator] + @computed_field(alias='requestTotalDurationMillis', return_type=timedelta_ms) @property def request_total_duration(self) -> timedelta: return self.request_total_finished_duration + self.request_total_failed_duration - @computed_field(alias='requestAvgFailedDurationMillis', return_type=timedelta_ms | None) # type: ignore[prop-decorator] + @computed_field(alias='requestAvgFailedDurationMillis', return_type=timedelta_ms | None) @property def request_avg_failed_duration(self) -> timedelta | None: return (self.request_total_failed_duration / self.requests_failed) if self.requests_failed else None - @computed_field(alias='requestAvgFinishedDurationMillis', return_type=timedelta_ms | None) # type: ignore[prop-decorator] + @computed_field(alias='requestAvgFinishedDurationMillis', return_type=timedelta_ms | None) @property def request_avg_finished_duration(self) -> timedelta | None: return (self.request_total_finished_duration / self.requests_finished) if self.requests_finished else None - @computed_field(alias='requestsTotal') # type: ignore[prop-decorator] + @computed_field(alias='requestsTotal') @property def requests_total(self) -> int: return self.requests_failed + self.requests_finished diff --git a/src/crawlee/storage_clients/_base/_dataset_client.py b/src/crawlee/storage_clients/_base/_dataset_client.py index 87573a3916..d2eeb86665 100644 --- a/src/crawlee/storage_clients/_base/_dataset_client.py +++ b/src/crawlee/storage_clients/_base/_dataset_client.py @@ -87,8 +87,8 @@ async def iterate_items( The backend method for the `Dataset.iterate_items` call. """ - # This syntax is to make mypy properly work with abstract AsyncIterator. + # This syntax is to make type checker properly work with abstract AsyncIterator. # https://mypy.readthedocs.io/en/stable/more_types.html#asynchronous-iterators raise NotImplementedError - if False: # type: ignore[unreachable] + if False: yield 0 diff --git a/src/crawlee/storage_clients/_base/_key_value_store_client.py b/src/crawlee/storage_clients/_base/_key_value_store_client.py index d23abd7d70..33c36f67bd 100644 --- a/src/crawlee/storage_clients/_base/_key_value_store_client.py +++ b/src/crawlee/storage_clients/_base/_key_value_store_client.py @@ -72,10 +72,10 @@ async def iterate_keys( The backend method for the `KeyValueStore.iterate_keys` call. """ - # This syntax is to make mypy properly work with abstract AsyncIterator. + # This syntax is to make type checker properly work with abstract AsyncIterator. # https://mypy.readthedocs.io/en/stable/more_types.html#asynchronous-iterators raise NotImplementedError - if False: # type: ignore[unreachable] + if False: yield 0 @abstractmethod diff --git a/src/crawlee/storage_clients/_file_system/_dataset_client.py b/src/crawlee/storage_clients/_file_system/_dataset_client.py index 55130d587f..4a222dc037 100644 --- a/src/crawlee/storage_clients/_file_system/_dataset_client.py +++ b/src/crawlee/storage_clients/_file_system/_dataset_client.py @@ -120,7 +120,7 @@ async def open( dataset_base_path = Path(configuration.storage_dir) / cls._STORAGE_SUBDIR if not dataset_base_path.exists(): - await asyncio.to_thread(dataset_base_path.mkdir, parents=True, exist_ok=True) + await asyncio.to_thread(lambda: dataset_base_path.mkdir(parents=True, exist_ok=True)) # Get a new instance by ID. if id: @@ -134,7 +134,7 @@ async def open( continue try: - file = await asyncio.to_thread(path_to_metadata.open, 'r', encoding='utf-8') + file = await asyncio.to_thread(lambda p=path_to_metadata: p.open(mode='r', encoding='utf-8')) try: file_content = json.load(file) metadata = DatasetMetadata(**file_content) @@ -163,7 +163,7 @@ async def open( # If the dataset directory exists, reconstruct the client from the metadata file. if path_to_dataset.exists() and path_to_metadata.exists(): - file = await asyncio.to_thread(open, path_to_metadata, 'r', encoding='utf-8') + file = await asyncio.to_thread(lambda: path_to_metadata.open(mode='r', encoding='utf-8')) try: file_content = json.load(file) finally: @@ -211,7 +211,7 @@ async def drop(self) -> None: async def purge(self) -> None: async with self._lock: for file_path in await self._get_sorted_data_files(): - await asyncio.to_thread(file_path.unlink, missing_ok=True) + await asyncio.to_thread(lambda f=file_path: f.unlink(missing_ok=True)) await self._update_metadata( update_accessed_at=True, @@ -435,7 +435,7 @@ async def _update_metadata( self._metadata.item_count = new_item_count # Ensure the parent directory for the metadata file exists. - await asyncio.to_thread(self.path_to_metadata.parent.mkdir, parents=True, exist_ok=True) + await asyncio.to_thread(lambda: self.path_to_metadata.parent.mkdir(parents=True, exist_ok=True)) # Dump the serialized metadata to the file. data = await json_dumps(self._metadata.model_dump()) @@ -456,7 +456,7 @@ async def _push_item(self, item: dict[str, Any], item_id: int) -> None: file_path = self.path_to_dataset / filename # Ensure the dataset directory exists. - await asyncio.to_thread(self.path_to_dataset.mkdir, parents=True, exist_ok=True) + await asyncio.to_thread(lambda: self.path_to_dataset.mkdir(parents=True, exist_ok=True)) # Dump the serialized item to the file. data = await json_dumps(item) @@ -473,9 +473,10 @@ async def _get_sorted_data_files(self) -> list[Path]: """ # Retrieve and sort all JSON files in the dataset directory numerically. files = await asyncio.to_thread( - sorted, - self.path_to_dataset.glob('*.json'), - key=lambda f: int(f.stem) if f.stem.isdigit() else 0, + lambda: sorted( + self.path_to_dataset.glob('*.json'), + key=lambda f: int(f.stem) if f.stem.isdigit() else 0, + ) ) # Remove the metadata file from the list if present. diff --git a/src/crawlee/storage_clients/_file_system/_key_value_store_client.py b/src/crawlee/storage_clients/_file_system/_key_value_store_client.py index 6a3db78fbc..28e724fda8 100644 --- a/src/crawlee/storage_clients/_file_system/_key_value_store_client.py +++ b/src/crawlee/storage_clients/_file_system/_key_value_store_client.py @@ -119,7 +119,7 @@ async def open( kvs_base_path = Path(configuration.storage_dir) / cls._STORAGE_SUBDIR if not kvs_base_path.exists(): - await asyncio.to_thread(kvs_base_path.mkdir, parents=True, exist_ok=True) + await asyncio.to_thread(lambda: kvs_base_path.mkdir(parents=True, exist_ok=True)) # Get a new instance by ID. if id: @@ -133,7 +133,7 @@ async def open( continue try: - file = await asyncio.to_thread(path_to_metadata.open, 'r', encoding='utf-8') + file = await asyncio.to_thread(lambda p=path_to_metadata: p.open(mode='r', encoding='utf-8')) try: file_content = json.load(file) metadata = KeyValueStoreMetadata(**file_content) @@ -162,7 +162,7 @@ async def open( # If the key-value store directory exists, reconstruct the client from the metadata file. if path_to_kvs.exists() and path_to_metadata.exists(): - file = await asyncio.to_thread(open, path_to_metadata, 'r', encoding='utf-8') + file = await asyncio.to_thread(lambda: path_to_metadata.open(mode='r', encoding='utf-8')) try: file_content = json.load(file) finally: @@ -212,7 +212,7 @@ async def purge(self) -> None: for file_path in self.path_to_kvs.glob('*'): if file_path.name == METADATA_FILENAME: continue - await asyncio.to_thread(file_path.unlink, missing_ok=True) + await asyncio.to_thread(lambda f=file_path: f.unlink(missing_ok=True)) await self._update_metadata( update_accessed_at=True, @@ -239,7 +239,7 @@ async def get_value(self, *, key: str) -> KeyValueStoreRecord | None: # Read the metadata file async with self._lock: try: - file = await asyncio.to_thread(open, record_metadata_filepath, 'r', encoding='utf-8') + file = await asyncio.to_thread(lambda: record_metadata_filepath.open(mode='r', encoding='utf-8')) except FileNotFoundError: logger.warning(f'Metadata file disappeared for key "{key}", aborting get_value') return None @@ -346,11 +346,11 @@ async def delete_value(self, *, key: str) -> None: async with self._lock: # Delete the value file and its metadata if found if record_path.exists(): - await asyncio.to_thread(record_path.unlink, missing_ok=True) + await asyncio.to_thread(lambda: record_path.unlink(missing_ok=True)) # Delete the metadata file if it exists if metadata_path.exists(): - await asyncio.to_thread(metadata_path.unlink, missing_ok=True) + await asyncio.to_thread(lambda: metadata_path.unlink(missing_ok=True)) else: logger.warning(f'Found value file for key "{key}" but no metadata file when trying to delete it.') @@ -373,7 +373,7 @@ async def iterate_keys( # List and sort all files *inside* a brief lock, then release it immediately: async with self._lock: - files = sorted(await asyncio.to_thread(list, self.path_to_kvs.glob('*'))) + files = sorted(await asyncio.to_thread(lambda: list(self.path_to_kvs.glob('*')))) count = 0 @@ -395,7 +395,7 @@ async def iterate_keys( # Try to read and parse the metadata file try: - metadata_content = await asyncio.to_thread(file_path.read_text, encoding='utf-8') + metadata_content = await asyncio.to_thread(lambda f=file_path: f.read_text(encoding='utf-8')) except FileNotFoundError: logger.warning(f'Metadata file disappeared for key "{key_name}", skipping it.') continue @@ -475,7 +475,7 @@ async def _update_metadata( self._metadata.modified_at = now # Ensure the parent directory for the metadata file exists. - await asyncio.to_thread(self.path_to_metadata.parent.mkdir, parents=True, exist_ok=True) + await asyncio.to_thread(lambda: self.path_to_metadata.parent.mkdir(parents=True, exist_ok=True)) # Dump the serialized metadata to the file. data = await json_dumps(self._metadata.model_dump()) diff --git a/src/crawlee/storage_clients/_file_system/_request_queue_client.py b/src/crawlee/storage_clients/_file_system/_request_queue_client.py index e49771b7c9..1a91ecea9e 100644 --- a/src/crawlee/storage_clients/_file_system/_request_queue_client.py +++ b/src/crawlee/storage_clients/_file_system/_request_queue_client.py @@ -183,7 +183,7 @@ async def open( rq_base_path = Path(configuration.storage_dir) / cls._STORAGE_SUBDIR if not rq_base_path.exists(): - await asyncio.to_thread(rq_base_path.mkdir, parents=True, exist_ok=True) + await asyncio.to_thread(lambda: rq_base_path.mkdir(parents=True, exist_ok=True)) # Open an existing RQ by its ID, raise an error if not found. if id: @@ -197,7 +197,7 @@ async def open( continue try: - file = await asyncio.to_thread(path_to_metadata.open, 'r', encoding='utf-8') + file = await asyncio.to_thread(lambda p=path_to_metadata: p.open(mode='r', encoding='utf-8')) try: file_content = json.load(file) metadata = RequestQueueMetadata(**file_content) @@ -232,7 +232,7 @@ async def open( # If the RQ directory exists, reconstruct the client from the metadata file. if path_to_rq.exists() and path_to_metadata.exists(): - file = await asyncio.to_thread(open, path_to_metadata, 'r', encoding='utf-8') + file = await asyncio.to_thread(lambda: path_to_metadata.open(encoding='utf-8')) try: file_content = json.load(file) finally: @@ -300,7 +300,7 @@ async def purge(self) -> None: request_files = await self._get_request_files(self.path_to_rq) for file_path in request_files: - await asyncio.to_thread(file_path.unlink, missing_ok=True) + await asyncio.to_thread(lambda f=file_path: f.unlink(missing_ok=True)) # Clear recoverable state await self._state.reset() @@ -675,7 +675,7 @@ async def _update_metadata( self._metadata.had_multiple_clients = True # Ensure the parent directory for the metadata file exists. - await asyncio.to_thread(self.path_to_metadata.parent.mkdir, parents=True, exist_ok=True) + await asyncio.to_thread(lambda: self.path_to_metadata.parent.mkdir(parents=True, exist_ok=True)) # Dump the serialized metadata to the file. data = await json_dumps(self._metadata.model_dump()) @@ -753,10 +753,10 @@ async def _get_request_files(cls, path_to_rq: Path) -> list[Path]: A list of paths to all request files. """ # Create the requests directory if it doesn't exist. - await asyncio.to_thread(path_to_rq.mkdir, parents=True, exist_ok=True) + await asyncio.to_thread(lambda: path_to_rq.mkdir(parents=True, exist_ok=True)) # List all the json files. - files = await asyncio.to_thread(list, path_to_rq.glob('*.json')) + files = await asyncio.to_thread(lambda: list(path_to_rq.glob('*.json'))) # Filter out metadata file and non-file entries. filtered = filter(lambda request_file: request_file.is_file() and request_file.name != METADATA_FILENAME, files) @@ -775,7 +775,7 @@ async def _parse_request_file(cls, file_path: Path) -> Request | None: """ # Open the request file. try: - file = await asyncio.to_thread(open, file_path, 'r', encoding='utf-8') + file = await asyncio.to_thread(lambda f=file_path: f.open(mode='r', encoding='utf-8')) except FileNotFoundError: logger.warning(f'Request file "{file_path}" not found.') return None diff --git a/src/crawlee/storage_clients/_redis/_client_mixin.py b/src/crawlee/storage_clients/_redis/_client_mixin.py index 6c66e5db7b..8a54896577 100644 --- a/src/crawlee/storage_clients/_redis/_client_mixin.py +++ b/src/crawlee/storage_clients/_redis/_client_mixin.py @@ -179,7 +179,7 @@ async def _get_pipeline(self, *, with_execute: bool = True) -> AsyncIterator[Pip """Create a new Redis pipeline.""" async with self._redis.pipeline() as pipe: try: - pipe.multi() # type: ignore[no-untyped-call] + pipe.multi() yield pipe finally: if with_execute: @@ -187,7 +187,6 @@ async def _get_pipeline(self, *, with_execute: bool = True) -> AsyncIterator[Pip async def _create_storage(self, pipeline: Pipeline) -> None: """Create the actual storage structure in Redis.""" - _ = pipeline # To avoid unused variable mypy error async def _create_script(self, script_name: str) -> AsyncScript: """Load a Lua script from a file and return a Script object.""" @@ -262,8 +261,6 @@ async def _specific_update_metadata(self, pipeline: Pipeline, **kwargs: Any) -> pipeline: The Redis pipeline to use for the update. **kwargs: Storage-specific update parameters. """ - _ = pipeline # To avoid unused variable mypy error - _ = kwargs async def _update_metadata( self, diff --git a/src/crawlee/storage_clients/_redis/_dataset_client.py b/src/crawlee/storage_clients/_redis/_dataset_client.py index 44a78bce62..74c9d6c496 100644 --- a/src/crawlee/storage_clients/_redis/_dataset_client.py +++ b/src/crawlee/storage_clients/_redis/_dataset_client.py @@ -179,13 +179,13 @@ async def get_data( case (True, int(), None): json_path += f'[:-{offset}]' case (True, int(), int()): - json_path += f'[-{offset + limit}:-{offset}]' + json_path += f'[-{offset + limit}:-{offset}]' # ty: ignore[unsupported-operator] case (False, 0, int()): json_path += f'[:{limit}]' case (False, int(), None): json_path += f'[{offset}:]' case (False, int(), int()): - json_path += f'[{offset}:{offset + limit}]' + json_path += f'[{offset}:{offset + limit}]' # ty: ignore[unsupported-operator] if json_path == '$': json_path = '$[*]' @@ -210,7 +210,7 @@ async def get_data( limit=limit or (total - offset), total=total, desc=desc, - items=data, + items=data, # ty: ignore[invalid-argument-type] ) @override diff --git a/src/crawlee/storage_clients/_redis/_key_value_store_client.py b/src/crawlee/storage_clients/_redis/_key_value_store_client.py index 99f9665ea7..8aeaa1a01d 100644 --- a/src/crawlee/storage_clients/_redis/_key_value_store_client.py +++ b/src/crawlee/storage_clients/_redis/_key_value_store_client.py @@ -144,7 +144,7 @@ async def set_value(self, *, key: str, value: Any, content_type: str | None = No async with self._get_pipeline() as pipe: # redis-py typing issue - await await_redis_response(pipe.hset(self._items_key, key, value_bytes)) # type: ignore[arg-type] + await await_redis_response(pipe.hset(self._items_key, key, value_bytes)) # ty: ignore[invalid-argument-type] await await_redis_response( pipe.hset( @@ -174,9 +174,7 @@ async def get_value(self, *, key: str) -> KeyValueStoreRecord | None: # Query the record by key # redis-py typing issue - value_bytes: bytes | None = await await_redis_response( - self._redis.hget(self._items_key, key) # type: ignore[arg-type] - ) + value_bytes: bytes | None = await await_redis_response(self._redis.hget(self._items_key, key)) # ty: ignore[invalid-assignment] if value_bytes is None: logger.warning(f'Value for key "{key}" is missing.') @@ -225,7 +223,7 @@ async def iterate_keys( raise TypeError('The items data was received in an incorrect format.') # Get all keys, sorted alphabetically - keys = sorted(items_data.keys()) + keys = sorted(items_data.keys()) # ty: ignore[invalid-argument-type] # Apply exclusive_start_key filter if provided if exclusive_start_key is not None: diff --git a/src/crawlee/storage_clients/_redis/_request_queue_client.py b/src/crawlee/storage_clients/_redis/_request_queue_client.py index 90a86ee64f..74f9028bec 100644 --- a/src/crawlee/storage_clients/_redis/_request_queue_client.py +++ b/src/crawlee/storage_clients/_redis/_request_queue_client.py @@ -247,7 +247,6 @@ async def add_batch_of_requests( *, forefront: bool = False, ) -> AddRequestsResponse: - # Mypy workaround if self._add_requests_script is None: raise RuntimeError('Scripts not loaded. Call _ensure_scripts_loaded() before using the client.') @@ -264,8 +263,8 @@ async def add_batch_of_requests( await await_redis_response(pipe.smismember(self._pending_set_key, unique_keys)) await await_redis_response(pipe.smismember(self._handled_set_key, unique_keys)) elif self._dedup_strategy == 'bloom': - await await_redis_response(pipe.bf().mexists(self._added_filter_key, *unique_keys)) # type: ignore[no-untyped-call] - await await_redis_response(pipe.bf().mexists(self._handled_filter_key, *unique_keys)) # type: ignore[no-untyped-call] + await await_redis_response(pipe.bf().mexists(self._added_filter_key, *unique_keys)) + await await_redis_response(pipe.bf().mexists(self._handled_filter_key, *unique_keys)) pipe_results = await pipe.execute() @@ -353,7 +352,6 @@ async def fetch_next_request(self) -> Request | None: if self._pending_fetch_cache: return self._pending_fetch_cache.popleft() - # Mypy workaround if self._fetch_script is None: raise RuntimeError('Scripts not loaded. Call _ensure_scripts_loaded() before using the client.') @@ -399,7 +397,7 @@ async def mark_request_as_handled(self, request: Request) -> ProcessedRequest | await await_redis_response(pipe.sadd(self._handled_set_key, request.unique_key)) await await_redis_response(pipe.srem(self._pending_set_key, request.unique_key)) elif self._dedup_strategy == 'bloom': - await await_redis_response(pipe.bf().add(self._handled_filter_key, request.unique_key)) # type: ignore[no-untyped-call] + await await_redis_response(pipe.bf().add(self._handled_filter_key, request.unique_key)) await await_redis_response(pipe.hdel(self._in_progress_key, request.unique_key)) await await_redis_response(pipe.hdel(self._data_key, request.unique_key)) @@ -499,17 +497,16 @@ async def _create_storage(self, pipeline: Pipeline) -> None: await await_redis_response( pipeline.bf().create( self._added_filter_key, errorRate=self._bloom_error_rate, capacity=100000, expansion=10 - ) # type: ignore[no-untyped-call] + ) ) await await_redis_response( pipeline.bf().create( self._handled_filter_key, errorRate=self._bloom_error_rate, capacity=100000, expansion=10 - ) # type: ignore[no-untyped-call] + ) ) async def _reclaim_stale_requests(self) -> None: """Reclaim requests that have been in progress for too long.""" - # Mypy workaround if self._reclaim_stale_script is None: raise RuntimeError('Scripts not loaded. Call _ensure_scripts_loaded() before using the client.') diff --git a/src/crawlee/storage_clients/_redis/_storage_client.py b/src/crawlee/storage_clients/_redis/_storage_client.py index 78e7bed603..a6c39f5def 100644 --- a/src/crawlee/storage_clients/_redis/_storage_client.py +++ b/src/crawlee/storage_clients/_redis/_storage_client.py @@ -57,16 +57,19 @@ def __init__( queue_bloom_error_rate: Desired false positive rate for Bloom filter deduplication. Only relevant if `queue_dedup_strategy` is set to 'bloom'. """ - match (redis, connection_string): - case (None, None): - raise ValueError('Either redis or connection_string must be provided.') - case (Redis(), None): - self._redis = redis - case (None, str()): - self._redis = Redis.from_url(connection_string) - case (Redis(), str()): - raise ValueError('Either redis or connection_string must be provided, not both.') + if redis is None and connection_string is None: + raise ValueError('Either redis or connection_string must be provided.') + if redis is not None and connection_string is not None: + raise ValueError('Either redis or connection_string must be provided, not both.') + + if isinstance(redis, Redis) and connection_string is None: + self._redis = redis + + if isinstance(connection_string, str) and redis is None: + self._redis = Redis.from_url(connection_string) + + self._redis: Redis # to help type checker self._queue_dedup_strategy = queue_dedup_strategy self._queue_bloom_error_rate = queue_bloom_error_rate diff --git a/src/crawlee/storage_clients/_redis/_utils.py b/src/crawlee/storage_clients/_redis/_utils.py index a86f979fa4..27f051d692 100644 --- a/src/crawlee/storage_clients/_redis/_utils.py +++ b/src/crawlee/storage_clients/_redis/_utils.py @@ -19,5 +19,5 @@ async def await_redis_response(response: Awaitable[T] | T) -> T: def read_lua_script(script_name: str) -> str: """Read a Lua script from a file.""" file_path = Path(__file__).parent / 'lua_scripts' / script_name - with file_path.open('r', encoding='utf-8') as file: + with file_path.open(mode='r', encoding='utf-8') as file: return file.read() diff --git a/src/crawlee/storage_clients/_sql/_client_mixin.py b/src/crawlee/storage_clients/_sql/_client_mixin.py index c681e3a220..e7ee2ae8d9 100644 --- a/src/crawlee/storage_clients/_sql/_client_mixin.py +++ b/src/crawlee/storage_clients/_sql/_client_mixin.py @@ -105,7 +105,7 @@ async def _open( else: stmt = select(cls._METADATA_TABLE).where(cls._METADATA_TABLE.internal_name == internal_name) result = await session.execute(stmt) - orm_metadata = result.scalar_one_or_none() # type: ignore[assignment] + orm_metadata = result.scalar_one_or_none() if orm_metadata: client = cls(id=orm_metadata.id, storage_client=storage_client) diff --git a/src/crawlee/storage_clients/models.py b/src/crawlee/storage_clients/models.py index e7b5927d7d..2ebd65914d 100644 --- a/src/crawlee/storage_clients/models.py +++ b/src/crawlee/storage_clients/models.py @@ -1,7 +1,7 @@ from __future__ import annotations from datetime import datetime -from typing import Annotated, Any, Generic +from typing import TYPE_CHECKING, Annotated, Any, Generic from pydantic import BaseModel, BeforeValidator, ConfigDict, Field from typing_extensions import TypeVar @@ -127,8 +127,13 @@ class DatasetItemsListPage(BaseModel): desc: Annotated[bool, Field(default=False)] """Indicates if the returned list is in descending order.""" - items: Annotated[list[dict], Field(default_factory=list)] - """The list of dataset items returned on this page.""" + # Workaround for Pydantic and type checkers when using Annotated with default_factory + if TYPE_CHECKING: + items: list[dict] = [] + """The list of dataset items returned on this page.""" + else: + items: Annotated[list[dict], Field(default_factory=list)] + """The list of dataset items returned on this page.""" @docs_group('Storage data') diff --git a/src/crawlee/storages/_storage_instance_manager.py b/src/crawlee/storages/_storage_instance_manager.py index f64d6e2c9f..e08fc80080 100644 --- a/src/crawlee/storages/_storage_instance_manager.py +++ b/src/crawlee/storages/_storage_instance_manager.py @@ -160,7 +160,7 @@ async def open_storage_instance( metadata = await client.get_metadata() - instance = cls(client, metadata.id, metadata.name) # type: ignore[call-arg] + instance = cls(client, metadata.id, metadata.name) # ty: ignore[too-many-positional-arguments] instance_name = getattr(instance, 'name', None) # Cache the instance. diff --git a/tests/unit/_autoscaling/test_snapshotter.py b/tests/unit/_autoscaling/test_snapshotter.py index 06a5682a32..7b3d50d75d 100644 --- a/tests/unit/_autoscaling/test_snapshotter.py +++ b/tests/unit/_autoscaling/test_snapshotter.py @@ -139,7 +139,10 @@ async def test_get_cpu_sample( events_data = [ EventSystemInfoData( - cpu_info=CpuInfo(used_ratio=0.5, created_at=now - timedelta(hours=delta)), + cpu_info=CpuInfo( + used_ratio=0.5, + created_at=now - timedelta(hours=delta), + ), memory_info=default_memory_info, ) for delta in range(5, 0, -1) diff --git a/tests/unit/_utils/test_html_to_text.py b/tests/unit/_utils/test_html_to_text.py index c802eee248..0a535e58db 100644 --- a/tests/unit/_utils/test_html_to_text.py +++ b/tests/unit/_utils/test_html_to_text.py @@ -191,7 +191,7 @@ def test_html_to_text(source: str, expected_text: str, html_to_text: Callable[[s @pytest.mark.parametrize('html_to_text', [html_to_text_parsel, html_to_text_beautifulsoup]) def test_html_to_text_raises_on_wrong_input_type(html_to_text: Callable[[str], str]) -> None: with pytest.raises(TypeError): - html_to_text(1) # type: ignore[arg-type] # Intentional wrong type test. + html_to_text(1) # ty: ignore[invalid-argument-type] def test_html_to_text_parsel() -> None: diff --git a/tests/unit/_utils/test_recurring_task.py b/tests/unit/_utils/test_recurring_task.py index 78f43601eb..61951ec11e 100644 --- a/tests/unit/_utils/test_recurring_task.py +++ b/tests/unit/_utils/test_recurring_task.py @@ -48,7 +48,7 @@ async def test_execution(function: AsyncMock, delay: timedelta) -> None: await asyncio.sleep(0.1) # Wait enough for the task to execute a few times await task.stop() - assert isinstance(task.func, AsyncMock) # To let MyPy know that the function is a mocked + assert isinstance(task.func, AsyncMock) # To let type checker know that the function is a mock assert task.func.call_count >= 3 await task.stop() diff --git a/tests/unit/_utils/test_timedelta_ms.py b/tests/unit/_utils/test_timedelta_ms.py index dd5fc7a8f0..5f5b0f4f4f 100644 --- a/tests/unit/_utils/test_timedelta_ms.py +++ b/tests/unit/_utils/test_timedelta_ms.py @@ -30,6 +30,6 @@ class _ModelWithTimedeltaMs(BaseModel): def test_model_with_timedelta_ms_input_types( time_delta_input: float | timedelta | Any | None, expected_time_delta: timedelta, expected_model_dump_value: int ) -> None: - model = _ModelWithTimedeltaMs(time_delta=time_delta_input) + model = _ModelWithTimedeltaMs(time_delta=time_delta_input) # ty: ignore[invalid-argument-type] assert model.time_delta == expected_time_delta assert model.model_dump() == {'time_delta': expected_model_dump_value} diff --git a/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py b/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py index c2d0f153b1..3174adfd64 100644 --- a/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py +++ b/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py @@ -115,7 +115,7 @@ class TestInput: TestInput( expected_pw_count=0, expected_static_count=2, - rendering_types=cycle(['static']), + rendering_types=cycle(['static']), # ty: ignore[invalid-argument-type] detection_probability_recommendation=cycle([0]), ), id='Static only', @@ -124,7 +124,7 @@ class TestInput: TestInput( expected_pw_count=2, expected_static_count=0, - rendering_types=cycle(['client only']), + rendering_types=cycle(['client only']), # ty: ignore[invalid-argument-type] detection_probability_recommendation=cycle([0]), ), id='Client only', @@ -133,7 +133,7 @@ class TestInput: TestInput( expected_pw_count=1, expected_static_count=1, - rendering_types=cycle(['static', 'client only']), + rendering_types=cycle(['static', 'client only']), # ty: ignore[invalid-argument-type] detection_probability_recommendation=cycle([0]), ), id='Mixed', @@ -142,7 +142,7 @@ class TestInput: TestInput( expected_pw_count=2, expected_static_count=2, - rendering_types=cycle(['static', 'client only']), + rendering_types=cycle(['static', 'client only']), # ty: ignore[invalid-argument-type] detection_probability_recommendation=cycle([1]), ), id='Enforced rendering type detection', @@ -206,7 +206,8 @@ async def pre_nav_hook(context: AdaptivePlaywrightPreNavCrawlingContext) -> None async def test_adaptive_crawling_parsel(test_urls: list[str]) -> None: """Top level test for parsel. Only one argument combination. (The rest of code is tested with bs variant.)""" predictor = _SimpleRenderingTypePredictor( - rendering_types=cycle(['static', 'client only']), detection_probability_recommendation=cycle([0]) + rendering_types=cycle(['static', 'client only']), # ty: ignore[invalid-argument-type] + detection_probability_recommendation=cycle([0]), ) crawler = AdaptivePlaywrightCrawler.with_parsel_static_parser( @@ -687,7 +688,8 @@ async def test_adaptive_context_helpers_on_changed_selector(test_urls: list[str] dynamically changed text instead of the original static text. """ browser_only_predictor_no_detection = _SimpleRenderingTypePredictor( - rendering_types=cycle(['client only']), detection_probability_recommendation=cycle([0]) + rendering_types=cycle(['client only']), # ty: ignore[invalid-argument-type] + detection_probability_recommendation=cycle([0]), ) expected_h3_tag = f'

{_H3_CHANGED_TEXT}

' @@ -712,7 +714,8 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None: async def test_adaptive_context_query_non_existing_element(test_urls: list[str]) -> None: """Test that querying non-existing selector returns `None`""" browser_only_predictor_no_detection = _SimpleRenderingTypePredictor( - rendering_types=cycle(['client only']), detection_probability_recommendation=cycle([0]) + rendering_types=cycle(['client only']), # ty: ignore[invalid-argument-type] + detection_probability_recommendation=cycle([0]), ) crawler = AdaptivePlaywrightCrawler.with_parsel_static_parser( @@ -738,7 +741,7 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None: TestInput( expected_pw_count=0, expected_static_count=2, - rendering_types=cycle(['static']), + rendering_types=cycle(['static']), # ty: ignore[invalid-argument-type] detection_probability_recommendation=cycle([0]), ), id='Static only', @@ -747,7 +750,7 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None: TestInput( expected_pw_count=2, expected_static_count=0, - rendering_types=cycle(['client only']), + rendering_types=cycle(['client only']), # ty: ignore[invalid-argument-type] detection_probability_recommendation=cycle([0]), ), id='Client only', @@ -756,7 +759,7 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None: TestInput( expected_pw_count=2, expected_static_count=2, - rendering_types=cycle(['static', 'client only']), + rendering_types=cycle(['static', 'client only']), # ty: ignore[invalid-argument-type] detection_probability_recommendation=cycle([1]), ), id='Enforced rendering type detection', diff --git a/tests/unit/crawlers/_basic/test_basic_crawler.py b/tests/unit/crawlers/_basic/test_basic_crawler.py index d4ac09ff85..e0cd7e6e72 100644 --- a/tests/unit/crawlers/_basic/test_basic_crawler.py +++ b/tests/unit/crawlers/_basic/test_basic_crawler.py @@ -2,7 +2,6 @@ from __future__ import annotations import asyncio -import concurrent import json import logging import os @@ -11,6 +10,7 @@ import time from asyncio import Future from collections import Counter +from concurrent.futures import ProcessPoolExecutor from dataclasses import dataclass from datetime import timedelta from itertools import product @@ -307,7 +307,7 @@ async def request_handler(context: BasicCrawlingContext) -> None: raise RuntimeError('Arbitrary crash for testing purposes') # Apply one of the handlers - @getattr(crawler, handler) # type: ignore[untyped-decorator] + @getattr(crawler, handler) async def handler_implementation(context: BasicCrawlingContext, error: Exception) -> None: await context.push_data(test_data) await context.add_requests(requests=[test_request], rq_alias=rq_alias) @@ -1043,16 +1043,16 @@ async def handler(context: BasicCrawlingContext) -> None: assert final_statistics.msg == 'Final request statistics:' # ignore[attr-defined] since `extra` parameters are not defined for `LogRecord` - assert final_statistics.requests_finished == 4 # type: ignore[attr-defined] - assert final_statistics.requests_failed == 33 # type: ignore[attr-defined] - assert final_statistics.retry_histogram == [1, 4, 8] # type: ignore[attr-defined] - assert final_statistics.request_avg_failed_duration == 99.0 # type: ignore[attr-defined] - assert final_statistics.request_avg_finished_duration == 0.483 # type: ignore[attr-defined] - assert final_statistics.requests_finished_per_minute == 0.33 # type: ignore[attr-defined] - assert final_statistics.requests_failed_per_minute == 0.1 # type: ignore[attr-defined] - assert final_statistics.request_total_duration == 720.0 # type: ignore[attr-defined] - assert final_statistics.requests_total == 37 # type: ignore[attr-defined] - assert final_statistics.crawler_runtime == 300.0 # type: ignore[attr-defined] + assert final_statistics.requests_finished == 4 + assert final_statistics.requests_failed == 33 + assert final_statistics.retry_histogram == [1, 4, 8] + assert final_statistics.request_avg_failed_duration == 99.0 + assert final_statistics.request_avg_finished_duration == 0.483 + assert final_statistics.requests_finished_per_minute == 0.33 + assert final_statistics.requests_failed_per_minute == 0.1 + assert final_statistics.request_total_duration == 720.0 + assert final_statistics.requests_total == 37 + assert final_statistics.crawler_runtime == 300.0 async def test_crawler_manual_stop() -> None: @@ -1774,7 +1774,7 @@ async def test_crawler_statistics_persistence(tmp_path: Path) -> None: This test simulates starting the crawler process twice, and checks that the statistics include first run.""" - with concurrent.futures.ProcessPoolExecutor() as executor: + with ProcessPoolExecutor() as executor: # Crawl 2 requests in the first run and automatically persist the state. first_run_state = executor.submit( _process_run_crawler, @@ -1784,7 +1784,7 @@ async def test_crawler_statistics_persistence(tmp_path: Path) -> None: assert first_run_state.requests_finished == 2 # Do not reuse the executor to simulate a fresh process to avoid modified class attributes. - with concurrent.futures.ProcessPoolExecutor() as executor: + with ProcessPoolExecutor() as executor: # Crawl 1 additional requests in the second run, but use previously automatically persisted state. second_run_state = executor.submit( _process_run_crawler, requests=['https://c.placeholder.com'], storage_dir=str(tmp_path) diff --git a/tests/unit/crawlers/_basic/test_context_pipeline.py b/tests/unit/crawlers/_basic/test_context_pipeline.py index 35de6c60de..b910322f08 100644 --- a/tests/unit/crawlers/_basic/test_context_pipeline.py +++ b/tests/unit/crawlers/_basic/test_context_pipeline.py @@ -88,7 +88,7 @@ async def middleware_b(context: EnhancedCrawlingContext) -> AsyncGenerator[MoreE ) events.append('middleware_b_out') - pipeline = ContextPipeline[BasicCrawlingContext]().compose(middleware_a).compose(middleware_b) + pipeline = ContextPipeline[BasicCrawlingContext]().compose(middleware_a).compose(middleware_b) # ty: ignore[invalid-argument-type] context = BasicCrawlingContext( request=Request.from_url(url='https://test.io/'), @@ -142,7 +142,7 @@ async def step_1(context: BasicCrawlingContext) -> AsyncGenerator[BasicCrawlingC async def step_2(context: BasicCrawlingContext) -> AsyncGenerator[BasicCrawlingContext, None]: raise RuntimeError('Crash during middleware initialization') - yield context # type: ignore[unreachable] + yield context pipeline = ContextPipeline().compose(step_1).compose(step_2) context = BasicCrawlingContext( diff --git a/tests/unit/crawlers/_playwright/test_playwright_crawler.py b/tests/unit/crawlers/_playwright/test_playwright_crawler.py index 134f699161..d851cfc1c2 100644 --- a/tests/unit/crawlers/_playwright/test_playwright_crawler.py +++ b/tests/unit/crawlers/_playwright/test_playwright_crawler.py @@ -420,7 +420,7 @@ async def test_save_cookies_after_handler_processing(server_url: URL) -> None: @crawler.router.default_handler async def request_handler(context: PlaywrightCrawlingContext) -> None: # Simulate cookies installed from an external source in the browser - await context.page.context.add_cookies([{'name': 'check', 'value': 'test', 'url': str(server_url)}]) + await context.page.context.add_cookies([{'name': 'check', 'value': 'test', 'url': str(server_url)}]) # ty: ignore[invalid-argument-type] if context.session: session_ids.append(context.session.id) diff --git a/tests/unit/fingerprint_suite/test_header_generator.py b/tests/unit/fingerprint_suite/test_header_generator.py index dfcb41e79d..ae9ab71bf0 100644 --- a/tests/unit/fingerprint_suite/test_header_generator.py +++ b/tests/unit/fingerprint_suite/test_header_generator.py @@ -51,7 +51,7 @@ def test_get_user_agent_header_invalid_browser_type() -> None: header_generator = HeaderGenerator() with pytest.raises(ValueError, match=r'Unsupported browser type'): - header_generator.get_user_agent_header(browser_type='invalid_browser') # type: ignore[arg-type] + header_generator.get_user_agent_header(browser_type='invalid_browser') # ty: ignore[invalid-argument-type] def test_get_sec_ch_ua_headers_chromium(header_network: dict) -> None: @@ -77,4 +77,4 @@ def test_get_sec_ch_ua_headers_invalid_browser_type() -> None: header_generator = HeaderGenerator() with pytest.raises(ValueError, match=r'Unsupported browser type'): - header_generator.get_sec_ch_ua_headers(browser_type='invalid_browser') # type: ignore[arg-type] + header_generator.get_sec_ch_ua_headers(browser_type='invalid_browser') # ty: ignore[invalid-argument-type] diff --git a/tests/unit/proxy_configuration/test_new_proxy_info.py b/tests/unit/proxy_configuration/test_new_proxy_info.py index 8c0fa3497d..1a8efe0289 100644 --- a/tests/unit/proxy_configuration/test_new_proxy_info.py +++ b/tests/unit/proxy_configuration/test_new_proxy_info.py @@ -86,7 +86,7 @@ async def test_rotates_proxies() -> None: async def test_rotates_proxies_with_sessions() -> None: proxy_urls: list[str | None] = ['http://proxy:1111', 'http://proxy:2222', 'http://proxy:3333'] - request = Request(url='http://some.domain/abc', unique_key='1', id='1') + request = Request(url='http://some.domain/abc', unique_key='1') sessions = [f'session_{i}' for i in range(6)] config = ProxyConfiguration(proxy_urls=proxy_urls) diff --git a/tests/unit/proxy_configuration/test_tiers.py b/tests/unit/proxy_configuration/test_tiers.py index 641fd6b338..59db9a43d7 100644 --- a/tests/unit/proxy_configuration/test_tiers.py +++ b/tests/unit/proxy_configuration/test_tiers.py @@ -44,7 +44,7 @@ async def test_retrying_request_makes_tier_go_up() -> None: config = ProxyConfiguration(tiered_proxy_urls=tiered_proxy_urls) # Calling `new_proxy_info` with the same request most probably means it's being retried - request_1 = Request(url='http://some.domain/abc', unique_key='1', id='1') + request_1 = Request(url='http://some.domain/abc', unique_key='1') info = await config.new_proxy_info(None, request_1, None) assert info is not None @@ -59,7 +59,7 @@ async def test_retrying_request_makes_tier_go_up() -> None: assert info.url == tiered_proxy_urls[2][0] # Subsequent requests with the same domain should use the same tier - request_2 = Request(url='http://some.domain/xyz', unique_key='2', id='2') + request_2 = Request(url='http://some.domain/xyz', unique_key='2') info = await config.new_proxy_info(None, request_2, None) assert info is not None @@ -76,7 +76,7 @@ async def test_retrying_request_makes_tier_go_up_with_sessions() -> None: config = ProxyConfiguration(tiered_proxy_urls=tiered_proxy_urls) - request = Request(url='http://some.domain/abc', unique_key='1', id='1') + request = Request(url='http://some.domain/abc', unique_key='1') # Calling `new_proxy_info` with the same request likely means that it is being retried. # However, a single session should always receive the same proxy @@ -116,7 +116,7 @@ async def test_successful_request_makes_tier_go_down() -> None: config = ProxyConfiguration(tiered_proxy_urls=tiered_proxy_urls) - request_1 = Request(url='http://some.domain/abc', unique_key='1', id='1') + request_1 = Request(url='http://some.domain/abc', unique_key='1') info = None for tier in tiered_proxy_urls: @@ -125,7 +125,7 @@ async def test_successful_request_makes_tier_go_down() -> None: assert info.url == tier[0] for i in range(100): - new_request = Request(url=f'http://some.domain/{i}', unique_key=str(i), id=str(i)) + new_request = Request(url=f'http://some.domain/{i}', unique_key=str(i)) info = await config.new_proxy_info(None, new_request, None) assert info is not None @@ -141,7 +141,7 @@ async def test_none_proxy_retrying_request_makes_tier_go_up() -> None: config = ProxyConfiguration(tiered_proxy_urls=tiered_proxy_urls) # Calling `new_proxy_info` with the same request most probably means it's being retried - request_1 = Request(url='http://some.domain/abc', unique_key='1', id='1') + request_1 = Request(url='http://some.domain/abc', unique_key='1') # No proxy used. info = await config.new_proxy_info(None, request_1, None) diff --git a/tests/unit/storage_clients/_redis/test_redis_dataset_client.py b/tests/unit/storage_clients/_redis/test_redis_dataset_client.py index 3101ac5e0b..a80264e23b 100644 --- a/tests/unit/storage_clients/_redis/test_redis_dataset_client.py +++ b/tests/unit/storage_clients/_redis/test_redis_dataset_client.py @@ -49,7 +49,7 @@ async def test_base_keys_creation(dataset_client: RedisDatasetClient) -> None: metadata_data = await await_redis_response(dataset_client.redis.json().get('datasets:test_dataset:metadata')) assert isinstance(metadata_data, dict) - assert metadata_data['id'] == metadata.id # type: ignore[unreachable] # py-json typing is broken + assert metadata_data['id'] == metadata.id async def test_record_and_content_verification(dataset_client: RedisDatasetClient) -> None: diff --git a/tests/unit/storage_clients/_redis/test_redis_kvs_client.py b/tests/unit/storage_clients/_redis/test_redis_kvs_client.py index 92a9f0d46d..d2413a06c0 100644 --- a/tests/unit/storage_clients/_redis/test_redis_kvs_client.py +++ b/tests/unit/storage_clients/_redis/test_redis_kvs_client.py @@ -46,7 +46,7 @@ async def test_base_keys_creation(kvs_client: RedisKeyValueStoreClient) -> None: metadata_data = await await_redis_response(kvs_client.redis.json().get('key_value_stores:test_kvs:metadata')) assert isinstance(metadata_data, dict) - assert metadata_data['id'] == metadata.id # type: ignore[unreachable] # py-json typing is broken + assert metadata_data['id'] == metadata.id async def test_value_record_creation_and_content(kvs_client: RedisKeyValueStoreClient) -> None: diff --git a/tests/unit/storage_clients/_redis/test_redis_rq_client.py b/tests/unit/storage_clients/_redis/test_redis_rq_client.py index 3f878ea981..26191d8189 100644 --- a/tests/unit/storage_clients/_redis/test_redis_rq_client.py +++ b/tests/unit/storage_clients/_redis/test_redis_rq_client.py @@ -60,7 +60,7 @@ async def test_base_keys_creation(rq_client: RedisRequestQueueClient) -> None: metadata_data = await await_redis_response(rq_client.redis.json().get('request_queues:test_request_queue:metadata')) assert isinstance(metadata_data, dict) - assert metadata_data['id'] == metadata.id # type: ignore[unreachable] # py-json typing is broken + assert metadata_data['id'] == metadata.id async def test_request_records_persistence(rq_client: RedisRequestQueueClient) -> None: diff --git a/tests/unit/storages/test_dataset.py b/tests/unit/storages/test_dataset.py index bb3da626b2..a95efff7f6 100644 --- a/tests/unit/storages/test_dataset.py +++ b/tests/unit/storages/test_dataset.py @@ -472,10 +472,7 @@ async def test_export_to_csv( async def test_export_to_invalid_content_type(dataset: Dataset) -> None: """Test exporting dataset with invalid content type raises error.""" with pytest.raises(ValueError, match=r'Unsupported content type'): - await dataset.export_to( - key='invalid_export', - content_type='invalid', # type: ignore[call-overload] # Intentionally invalid content type - ) + await dataset.export_to(key='invalid_export', content_type='invalid') # ty: ignore[no-matching-overload] async def test_export_with_multiple_kwargs(dataset: Dataset, tmp_path: Path) -> None: diff --git a/uv.lock b/uv.lock index 483754613e..ac92e3a8f1 100644 --- a/uv.lock +++ b/uv.lock @@ -811,7 +811,6 @@ dev = [ { name = "build" }, { name = "dycw-pytest-only" }, { name = "fakeredis", extra = ["json", "lua", "probabilistic"] }, - { name = "mypy" }, { name = "pre-commit" }, { name = "proxy-py" }, { name = "pydoc-markdown" }, @@ -822,6 +821,7 @@ dev = [ { name = "pytest-xdist" }, { name = "ruff" }, { name = "setuptools" }, + { name = "ty" }, { name = "types-beautifulsoup4" }, { name = "types-cachetools" }, { name = "types-colorama" }, @@ -886,7 +886,6 @@ dev = [ { name = "build", specifier = "<2.0.0" }, { name = "dycw-pytest-only", specifier = "<3.0.0" }, { name = "fakeredis", extras = ["probabilistic", "json", "lua"], specifier = "<3.0.0" }, - { name = "mypy", specifier = "~=1.19.0" }, { name = "pre-commit", specifier = "<5.0.0" }, { name = "proxy-py", specifier = "<3.0.0" }, { name = "pydoc-markdown", specifier = "<5.0.0" }, @@ -897,6 +896,7 @@ dev = [ { name = "pytest-xdist", specifier = "<4.0.0" }, { name = "ruff", specifier = "~=0.14.0" }, { name = "setuptools" }, + { name = "ty", specifier = "~=0.0.0" }, { name = "types-beautifulsoup4", specifier = "<5.0.0" }, { name = "types-cachetools", specifier = "<7.0.0" }, { name = "types-colorama", specifier = "<1.0.0" }, @@ -1630,79 +1630,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/35/5a/73ecb3d82f8615f32ccdadeb9356726d6cae3a4bbc840b437ceb95708063/jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6", size = 30105, upload-time = "2024-11-20T17:58:30.418Z" }, ] -[[package]] -name = "librt" -version = "0.7.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/d9/6f3d3fcf5e5543ed8a60cc70fa7d50508ed60b8a10e9af6d2058159ab54e/librt-0.7.3.tar.gz", hash = "sha256:3ec50cf65235ff5c02c5b747748d9222e564ad48597122a361269dd3aa808798", size = 144549, upload-time = "2025-12-06T19:04:45.553Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/66/79a14e672256ef58144a24eb49adb338ec02de67ff4b45320af6504682ab/librt-0.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2682162855a708e3270eba4b92026b93f8257c3e65278b456c77631faf0f4f7a", size = 54707, upload-time = "2025-12-06T19:03:10.881Z" }, - { url = "https://files.pythonhosted.org/packages/58/fa/b709c65a9d5eab85f7bcfe0414504d9775aaad6e78727a0327e175474caa/librt-0.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:440c788f707c061d237c1e83edf6164ff19f5c0f823a3bf054e88804ebf971ec", size = 56670, upload-time = "2025-12-06T19:03:12.107Z" }, - { url = "https://files.pythonhosted.org/packages/3a/56/0685a0772ec89ddad4c00e6b584603274c3d818f9a68e2c43c4eb7b39ee9/librt-0.7.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399938edbd3d78339f797d685142dd8a623dfaded023cf451033c85955e4838a", size = 161045, upload-time = "2025-12-06T19:03:13.444Z" }, - { url = "https://files.pythonhosted.org/packages/4e/d9/863ada0c5ce48aefb89df1555e392b2209fcb6daee4c153c031339b9a89b/librt-0.7.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1975eda520957c6e0eb52d12968dd3609ffb7eef05d4223d097893d6daf1d8a7", size = 169532, upload-time = "2025-12-06T19:03:14.699Z" }, - { url = "https://files.pythonhosted.org/packages/68/a0/71da6c8724fd16c31749905ef1c9e11de206d9301b5be984bf2682b4efb3/librt-0.7.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9da128d0edf990cf0d2ca011b02cd6f639e79286774bd5b0351245cbb5a6e51", size = 183277, upload-time = "2025-12-06T19:03:16.446Z" }, - { url = "https://files.pythonhosted.org/packages/8c/bf/9c97bf2f8338ba1914de233ea312bba2bbd7c59f43f807b3e119796bab18/librt-0.7.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19acfde38cb532a560b98f473adc741c941b7a9bc90f7294bc273d08becb58b", size = 179045, upload-time = "2025-12-06T19:03:17.838Z" }, - { url = "https://files.pythonhosted.org/packages/b3/b1/ceea067f489e904cb4ddcca3c9b06ba20229bc3fa7458711e24a5811f162/librt-0.7.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:7b4f57f7a0c65821c5441d98c47ff7c01d359b1e12328219709bdd97fdd37f90", size = 173521, upload-time = "2025-12-06T19:03:19.17Z" }, - { url = "https://files.pythonhosted.org/packages/7a/41/6cb18f5da9c89ed087417abb0127a445a50ad4eaf1282ba5b52588187f47/librt-0.7.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:256793988bff98040de23c57cf36e1f4c2f2dc3dcd17537cdac031d3b681db71", size = 193592, upload-time = "2025-12-06T19:03:20.637Z" }, - { url = "https://files.pythonhosted.org/packages/4c/3c/fcef208746584e7c78584b7aedc617130c4a4742cb8273361bbda8b183b5/librt-0.7.3-cp310-cp310-win32.whl", hash = "sha256:fcb72249ac4ea81a7baefcbff74df7029c3cb1cf01a711113fa052d563639c9c", size = 47201, upload-time = "2025-12-06T19:03:21.764Z" }, - { url = "https://files.pythonhosted.org/packages/c4/bf/d8a6c35d1b2b789a4df9b3ddb1c8f535ea373fde2089698965a8f0d62138/librt-0.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:4887c29cadbdc50640179e3861c276325ff2986791e6044f73136e6e798ff806", size = 54371, upload-time = "2025-12-06T19:03:23.231Z" }, - { url = "https://files.pythonhosted.org/packages/21/e6/f6391f5c6f158d31ed9af6bd1b1bcd3ffafdea1d816bc4219d0d90175a7f/librt-0.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:687403cced6a29590e6be6964463835315905221d797bc5c934a98750fe1a9af", size = 54711, upload-time = "2025-12-06T19:03:24.6Z" }, - { url = "https://files.pythonhosted.org/packages/ab/1b/53c208188c178987c081560a0fcf36f5ca500d5e21769596c845ef2f40d4/librt-0.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24d70810f6e2ea853ff79338001533716b373cc0f63e2a0be5bc96129edb5fb5", size = 56664, upload-time = "2025-12-06T19:03:25.969Z" }, - { url = "https://files.pythonhosted.org/packages/cb/5c/d9da832b9a1e5f8366e8a044ec80217945385b26cb89fd6f94bfdc7d80b0/librt-0.7.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bf8c7735fbfc0754111f00edda35cf9e98a8d478de6c47b04eaa9cef4300eaa7", size = 161701, upload-time = "2025-12-06T19:03:27.035Z" }, - { url = "https://files.pythonhosted.org/packages/20/aa/1e0a7aba15e78529dd21f233076b876ee58c8b8711b1793315bdd3b263b0/librt-0.7.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32d43610dff472eab939f4d7fbdd240d1667794192690433672ae22d7af8445", size = 171040, upload-time = "2025-12-06T19:03:28.482Z" }, - { url = "https://files.pythonhosted.org/packages/69/46/3cfa325c1c2bc25775ec6ec1718cfbec9cff4ac767d37d2d3a2d1cc6f02c/librt-0.7.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:adeaa886d607fb02563c1f625cf2ee58778a2567c0c109378da8f17ec3076ad7", size = 184720, upload-time = "2025-12-06T19:03:29.599Z" }, - { url = "https://files.pythonhosted.org/packages/99/bb/e4553433d7ac47f4c75d0a7e59b13aee0e08e88ceadbee356527a9629b0a/librt-0.7.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:572a24fc5958c61431da456a0ef1eeea6b4989d81eeb18b8e5f1f3077592200b", size = 180731, upload-time = "2025-12-06T19:03:31.201Z" }, - { url = "https://files.pythonhosted.org/packages/35/89/51cd73006232981a3106d4081fbaa584ac4e27b49bc02266468d3919db03/librt-0.7.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6488e69d408b492e08bfb68f20c4a899a354b4386a446ecd490baff8d0862720", size = 174565, upload-time = "2025-12-06T19:03:32.818Z" }, - { url = "https://files.pythonhosted.org/packages/42/54/0578a78b587e5aa22486af34239a052c6366835b55fc307bc64380229e3f/librt-0.7.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ed028fc3d41adda916320712838aec289956c89b4f0a361ceadf83a53b4c047a", size = 195247, upload-time = "2025-12-06T19:03:34.434Z" }, - { url = "https://files.pythonhosted.org/packages/b5/0a/ee747cd999753dd9447e50b98fc36ee433b6c841a42dbf6d47b64b32a56e/librt-0.7.3-cp311-cp311-win32.whl", hash = "sha256:2cf9d73499486ce39eebbff5f42452518cc1f88d8b7ea4a711ab32962b176ee2", size = 47514, upload-time = "2025-12-06T19:03:35.959Z" }, - { url = "https://files.pythonhosted.org/packages/ec/af/8b13845178dec488e752878f8e290f8f89e7e34ae1528b70277aa1a6dd1e/librt-0.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:35f1609e3484a649bb80431310ddbec81114cd86648f1d9482bc72a3b86ded2e", size = 54695, upload-time = "2025-12-06T19:03:36.956Z" }, - { url = "https://files.pythonhosted.org/packages/02/7a/ae59578501b1a25850266778f59279f4f3e726acc5c44255bfcb07b4bc57/librt-0.7.3-cp311-cp311-win_arm64.whl", hash = "sha256:550fdbfbf5bba6a2960b27376ca76d6aaa2bd4b1a06c4255edd8520c306fcfc0", size = 48142, upload-time = "2025-12-06T19:03:38.263Z" }, - { url = "https://files.pythonhosted.org/packages/29/90/ed8595fa4e35b6020317b5ea8d226a782dcbac7a997c19ae89fb07a41c66/librt-0.7.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0fa9ac2e49a6bee56e47573a6786cb635e128a7b12a0dc7851090037c0d397a3", size = 55687, upload-time = "2025-12-06T19:03:39.245Z" }, - { url = "https://files.pythonhosted.org/packages/dd/f6/6a20702a07b41006cb001a759440cb6b5362530920978f64a2b2ae2bf729/librt-0.7.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e980cf1ed1a2420a6424e2ed884629cdead291686f1048810a817de07b5eb18", size = 57127, upload-time = "2025-12-06T19:03:40.3Z" }, - { url = "https://files.pythonhosted.org/packages/79/f3/b0c4703d5ffe9359b67bb2ccb86c42d4e930a363cfc72262ac3ba53cff3e/librt-0.7.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e094e445c37c57e9ec612847812c301840239d34ccc5d153a982fa9814478c60", size = 165336, upload-time = "2025-12-06T19:03:41.369Z" }, - { url = "https://files.pythonhosted.org/packages/02/69/3ba05b73ab29ccbe003856232cea4049769be5942d799e628d1470ed1694/librt-0.7.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aca73d70c3f553552ba9133d4a09e767dcfeee352d8d8d3eb3f77e38a3beb3ed", size = 174237, upload-time = "2025-12-06T19:03:42.44Z" }, - { url = "https://files.pythonhosted.org/packages/22/ad/d7c2671e7bf6c285ef408aa435e9cd3fdc06fd994601e1f2b242df12034f/librt-0.7.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c634a0a6db395fdaba0361aa78395597ee72c3aad651b9a307a3a7eaf5efd67e", size = 189017, upload-time = "2025-12-06T19:03:44.01Z" }, - { url = "https://files.pythonhosted.org/packages/f4/94/d13f57193148004592b618555f296b41d2d79b1dc814ff8b3273a0bf1546/librt-0.7.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a59a69deeb458c858b8fea6acf9e2acd5d755d76cd81a655256bc65c20dfff5b", size = 183983, upload-time = "2025-12-06T19:03:45.834Z" }, - { url = "https://files.pythonhosted.org/packages/02/10/b612a9944ebd39fa143c7e2e2d33f2cb790205e025ddd903fb509a3a3bb3/librt-0.7.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d91e60ac44bbe3a77a67af4a4c13114cbe9f6d540337ce22f2c9eaf7454ca71f", size = 177602, upload-time = "2025-12-06T19:03:46.944Z" }, - { url = "https://files.pythonhosted.org/packages/1f/48/77bc05c4cc232efae6c5592c0095034390992edbd5bae8d6cf1263bb7157/librt-0.7.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:703456146dc2bf430f7832fd1341adac5c893ec3c1430194fdcefba00012555c", size = 199282, upload-time = "2025-12-06T19:03:48.069Z" }, - { url = "https://files.pythonhosted.org/packages/12/aa/05916ccd864227db1ffec2a303ae34f385c6b22d4e7ce9f07054dbcf083c/librt-0.7.3-cp312-cp312-win32.whl", hash = "sha256:b7c1239b64b70be7759554ad1a86288220bbb04d68518b527783c4ad3fb4f80b", size = 47879, upload-time = "2025-12-06T19:03:49.289Z" }, - { url = "https://files.pythonhosted.org/packages/50/92/7f41c42d31ea818b3c4b9cc1562e9714bac3c676dd18f6d5dd3d0f2aa179/librt-0.7.3-cp312-cp312-win_amd64.whl", hash = "sha256:ef59c938f72bdbc6ab52dc50f81d0637fde0f194b02d636987cea2ab30f8f55a", size = 54972, upload-time = "2025-12-06T19:03:50.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/dc/53582bbfb422311afcbc92adb75711f04e989cec052f08ec0152fbc36c9c/librt-0.7.3-cp312-cp312-win_arm64.whl", hash = "sha256:ff21c554304e8226bf80c3a7754be27c6c3549a9fec563a03c06ee8f494da8fc", size = 48338, upload-time = "2025-12-06T19:03:51.431Z" }, - { url = "https://files.pythonhosted.org/packages/93/7d/e0ce1837dfb452427db556e6d4c5301ba3b22fe8de318379fbd0593759b9/librt-0.7.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56f2a47beda8409061bc1c865bef2d4bd9ff9255219402c0817e68ab5ad89aed", size = 55742, upload-time = "2025-12-06T19:03:52.459Z" }, - { url = "https://files.pythonhosted.org/packages/be/c0/3564262301e507e1d5cf31c7d84cb12addf0d35e05ba53312494a2eba9a4/librt-0.7.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14569ac5dd38cfccf0a14597a88038fb16811a6fede25c67b79c6d50fc2c8fdc", size = 57163, upload-time = "2025-12-06T19:03:53.516Z" }, - { url = "https://files.pythonhosted.org/packages/be/ac/245e72b7e443d24a562f6047563c7f59833384053073ef9410476f68505b/librt-0.7.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6038ccbd5968325a5d6fd393cf6e00b622a8de545f0994b89dd0f748dcf3e19e", size = 165840, upload-time = "2025-12-06T19:03:54.918Z" }, - { url = "https://files.pythonhosted.org/packages/98/af/587e4491f40adba066ba39a450c66bad794c8d92094f936a201bfc7c2b5f/librt-0.7.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d39079379a9a28e74f4d57dc6357fa310a1977b51ff12239d7271ec7e71d67f5", size = 174827, upload-time = "2025-12-06T19:03:56.082Z" }, - { url = "https://files.pythonhosted.org/packages/78/21/5b8c60ea208bc83dd00421022a3874330685d7e856404128dc3728d5d1af/librt-0.7.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8837d5a52a2d7aa9f4c3220a8484013aed1d8ad75240d9a75ede63709ef89055", size = 189612, upload-time = "2025-12-06T19:03:57.507Z" }, - { url = "https://files.pythonhosted.org/packages/da/2f/8b819169ef696421fb81cd04c6cdf225f6e96f197366001e9d45180d7e9e/librt-0.7.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:399bbd7bcc1633c3e356ae274a1deb8781c7bf84d9c7962cc1ae0c6e87837292", size = 184584, upload-time = "2025-12-06T19:03:58.686Z" }, - { url = "https://files.pythonhosted.org/packages/6c/fc/af9d225a9395b77bd7678362cb055d0b8139c2018c37665de110ca388022/librt-0.7.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8d8cf653e798ee4c4e654062b633db36984a1572f68c3aa25e364a0ddfbbb910", size = 178269, upload-time = "2025-12-06T19:03:59.769Z" }, - { url = "https://files.pythonhosted.org/packages/6c/d8/7b4fa1683b772966749d5683aa3fd605813defffe157833a8fa69cc89207/librt-0.7.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2f03484b54bf4ae80ab2e504a8d99d20d551bfe64a7ec91e218010b467d77093", size = 199852, upload-time = "2025-12-06T19:04:00.901Z" }, - { url = "https://files.pythonhosted.org/packages/77/e8/4598413aece46ca38d9260ef6c51534bd5f34b5c21474fcf210ce3a02123/librt-0.7.3-cp313-cp313-win32.whl", hash = "sha256:44b3689b040df57f492e02cd4f0bacd1b42c5400e4b8048160c9d5e866de8abe", size = 47936, upload-time = "2025-12-06T19:04:02.054Z" }, - { url = "https://files.pythonhosted.org/packages/af/80/ac0e92d5ef8c6791b3e2c62373863827a279265e0935acdf807901353b0e/librt-0.7.3-cp313-cp313-win_amd64.whl", hash = "sha256:6b407c23f16ccc36614c136251d6b32bf30de7a57f8e782378f1107be008ddb0", size = 54965, upload-time = "2025-12-06T19:04:03.224Z" }, - { url = "https://files.pythonhosted.org/packages/f1/fd/042f823fcbff25c1449bb4203a29919891ca74141b68d3a5f6612c4ce283/librt-0.7.3-cp313-cp313-win_arm64.whl", hash = "sha256:abfc57cab3c53c4546aee31859ef06753bfc136c9d208129bad23e2eca39155a", size = 48350, upload-time = "2025-12-06T19:04:04.234Z" }, - { url = "https://files.pythonhosted.org/packages/3e/ae/c6ecc7bb97134a71b5241e8855d39964c0e5f4d96558f0d60593892806d2/librt-0.7.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:120dd21d46ff875e849f1aae19346223cf15656be489242fe884036b23d39e93", size = 55175, upload-time = "2025-12-06T19:04:05.308Z" }, - { url = "https://files.pythonhosted.org/packages/cf/bc/2cc0cb0ab787b39aa5c7645cd792433c875982bdf12dccca558b89624594/librt-0.7.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1617bea5ab31266e152871208502ee943cb349c224846928a1173c864261375e", size = 56881, upload-time = "2025-12-06T19:04:06.674Z" }, - { url = "https://files.pythonhosted.org/packages/8e/87/397417a386190b70f5bf26fcedbaa1515f19dce33366e2684c6b7ee83086/librt-0.7.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93b2a1f325fefa1482516ced160c8c7b4b8d53226763fa6c93d151fa25164207", size = 163710, upload-time = "2025-12-06T19:04:08.437Z" }, - { url = "https://files.pythonhosted.org/packages/c9/37/7338f85b80e8a17525d941211451199845093ca242b32efbf01df8531e72/librt-0.7.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d4801db8354436fd3936531e7f0e4feb411f62433a6b6cb32bb416e20b529f", size = 172471, upload-time = "2025-12-06T19:04:10.124Z" }, - { url = "https://files.pythonhosted.org/packages/3b/e0/741704edabbfae2c852fedc1b40d9ed5a783c70ed3ed8e4fe98f84b25d13/librt-0.7.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11ad45122bbed42cfc8b0597450660126ef28fd2d9ae1a219bc5af8406f95678", size = 186804, upload-time = "2025-12-06T19:04:11.586Z" }, - { url = "https://files.pythonhosted.org/packages/f4/d1/0a82129d6ba242f3be9af34815be089f35051bc79619f5c27d2c449ecef6/librt-0.7.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6b4e7bff1d76dd2b46443078519dc75df1b5e01562345f0bb740cea5266d8218", size = 181817, upload-time = "2025-12-06T19:04:12.802Z" }, - { url = "https://files.pythonhosted.org/packages/4f/32/704f80bcf9979c68d4357c46f2af788fbf9d5edda9e7de5786ed2255e911/librt-0.7.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:d86f94743a11873317094326456b23f8a5788bad9161fd2f0e52088c33564620", size = 175602, upload-time = "2025-12-06T19:04:14.004Z" }, - { url = "https://files.pythonhosted.org/packages/f7/6d/4355cfa0fae0c062ba72f541d13db5bc575770125a7ad3d4f46f4109d305/librt-0.7.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:754a0d09997095ad764ccef050dd5bf26cbf457aab9effcba5890dad081d879e", size = 196497, upload-time = "2025-12-06T19:04:15.487Z" }, - { url = "https://files.pythonhosted.org/packages/2e/eb/ac6d8517d44209e5a712fde46f26d0055e3e8969f24d715f70bd36056230/librt-0.7.3-cp314-cp314-win32.whl", hash = "sha256:fbd7351d43b80d9c64c3cfcb50008f786cc82cba0450e8599fdd64f264320bd3", size = 44678, upload-time = "2025-12-06T19:04:16.688Z" }, - { url = "https://files.pythonhosted.org/packages/e9/93/238f026d141faf9958da588c761a0812a1a21c98cc54a76f3608454e4e59/librt-0.7.3-cp314-cp314-win_amd64.whl", hash = "sha256:d376a35c6561e81d2590506804b428fc1075fcc6298fc5bb49b771534c0ba010", size = 51689, upload-time = "2025-12-06T19:04:17.726Z" }, - { url = "https://files.pythonhosted.org/packages/52/44/43f462ad9dcf9ed7d3172fe2e30d77b980956250bd90e9889a9cca93df2a/librt-0.7.3-cp314-cp314-win_arm64.whl", hash = "sha256:cbdb3f337c88b43c3b49ca377731912c101178be91cb5071aac48faa898e6f8e", size = 44662, upload-time = "2025-12-06T19:04:18.771Z" }, - { url = "https://files.pythonhosted.org/packages/1d/35/fed6348915f96b7323241de97f26e2af481e95183b34991df12fd5ce31b1/librt-0.7.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9f0e0927efe87cd42ad600628e595a1a0aa1c64f6d0b55f7e6059079a428641a", size = 57347, upload-time = "2025-12-06T19:04:19.812Z" }, - { url = "https://files.pythonhosted.org/packages/9a/f2/045383ccc83e3fea4fba1b761796584bc26817b6b2efb6b8a6731431d16f/librt-0.7.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:020c6db391268bcc8ce75105cb572df8cb659a43fd347366aaa407c366e5117a", size = 59223, upload-time = "2025-12-06T19:04:20.862Z" }, - { url = "https://files.pythonhosted.org/packages/77/3f/c081f8455ab1d7f4a10dbe58463ff97119272ff32494f21839c3b9029c2c/librt-0.7.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7af7785f5edd1f418da09a8cdb9ec84b0213e23d597413e06525340bcce1ea4f", size = 183861, upload-time = "2025-12-06T19:04:21.963Z" }, - { url = "https://files.pythonhosted.org/packages/1d/f5/73c5093c22c31fbeaebc25168837f05ebfd8bf26ce00855ef97a5308f36f/librt-0.7.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8ccadf260bb46a61b9c7e89e2218f6efea9f3eeaaab4e3d1f58571890e54858e", size = 194594, upload-time = "2025-12-06T19:04:23.14Z" }, - { url = "https://files.pythonhosted.org/packages/78/b8/d5f17d4afe16612a4a94abfded94c16c5a033f183074fb130dfe56fc1a42/librt-0.7.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9883b2d819ce83f87ba82a746c81d14ada78784db431e57cc9719179847376e", size = 206759, upload-time = "2025-12-06T19:04:24.328Z" }, - { url = "https://files.pythonhosted.org/packages/36/2e/021765c1be85ee23ffd5b5b968bb4cba7526a4db2a0fc27dcafbdfc32da7/librt-0.7.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:59cb0470612d21fa1efddfa0dd710756b50d9c7fb6c1236bbf8ef8529331dc70", size = 203210, upload-time = "2025-12-06T19:04:25.544Z" }, - { url = "https://files.pythonhosted.org/packages/77/f0/9923656e42da4fd18c594bd08cf6d7e152d4158f8b808e210d967f0dcceb/librt-0.7.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:1fe603877e1865b5fd047a5e40379509a4a60204aa7aa0f72b16f7a41c3f0712", size = 196708, upload-time = "2025-12-06T19:04:26.725Z" }, - { url = "https://files.pythonhosted.org/packages/fc/0b/0708b886ac760e64d6fbe7e16024e4be3ad1a3629d19489a97e9cf4c3431/librt-0.7.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5460d99ed30f043595bbdc888f542bad2caeb6226b01c33cda3ae444e8f82d42", size = 217212, upload-time = "2025-12-06T19:04:27.892Z" }, - { url = "https://files.pythonhosted.org/packages/5d/7f/12a73ff17bca4351e73d585dd9ebf46723c4a8622c4af7fe11a2e2d011ff/librt-0.7.3-cp314-cp314t-win32.whl", hash = "sha256:d09f677693328503c9e492e33e9601464297c01f9ebd966ea8fc5308f3069bfd", size = 45586, upload-time = "2025-12-06T19:04:29.116Z" }, - { url = "https://files.pythonhosted.org/packages/e2/df/8decd032ac9b995e4f5606cde783711a71094128d88d97a52e397daf2c89/librt-0.7.3-cp314-cp314t-win_amd64.whl", hash = "sha256:25711f364c64cab2c910a0247e90b51421e45dbc8910ceeb4eac97a9e132fc6f", size = 53002, upload-time = "2025-12-06T19:04:30.173Z" }, - { url = "https://files.pythonhosted.org/packages/de/0c/6605b6199de8178afe7efc77ca1d8e6db00453bc1d3349d27605c0f42104/librt-0.7.3-cp314-cp314t-win_arm64.whl", hash = "sha256:a9f9b661f82693eb56beb0605156c7fca57f535704ab91837405913417d6990b", size = 45647, upload-time = "2025-12-06T19:04:31.302Z" }, -] - [[package]] name = "lupa" version = "2.6" @@ -2154,52 +2081,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] -[[package]] -name = "mypy" -version = "1.19.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "librt" }, - { name = "mypy-extensions" }, - { name = "pathspec" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f9/b5/b58cdc25fadd424552804bf410855d52324183112aa004f0732c5f6324cf/mypy-1.19.0.tar.gz", hash = "sha256:f6b874ca77f733222641e5c46e4711648c4037ea13646fd0cdc814c2eaec2528", size = 3579025, upload-time = "2025-11-28T15:49:01.26Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/98/8f/55fb488c2b7dabd76e3f30c10f7ab0f6190c1fcbc3e97b1e588ec625bbe2/mypy-1.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6148ede033982a8c5ca1143de34c71836a09f105068aaa8b7d5edab2b053e6c8", size = 13093239, upload-time = "2025-11-28T15:45:11.342Z" }, - { url = "https://files.pythonhosted.org/packages/72/1b/278beea978456c56b3262266274f335c3ba5ff2c8108b3b31bec1ffa4c1d/mypy-1.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a9ac09e52bb0f7fb912f5d2a783345c72441a08ef56ce3e17c1752af36340a39", size = 12156128, upload-time = "2025-11-28T15:46:02.566Z" }, - { url = "https://files.pythonhosted.org/packages/21/f8/e06f951902e136ff74fd7a4dc4ef9d884faeb2f8eb9c49461235714f079f/mypy-1.19.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11f7254c15ab3f8ed68f8e8f5cbe88757848df793e31c36aaa4d4f9783fd08ab", size = 12753508, upload-time = "2025-11-28T15:44:47.538Z" }, - { url = "https://files.pythonhosted.org/packages/67/5a/d035c534ad86e09cee274d53cf0fd769c0b29ca6ed5b32e205be3c06878c/mypy-1.19.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318ba74f75899b0e78b847d8c50821e4c9637c79d9a59680fc1259f29338cb3e", size = 13507553, upload-time = "2025-11-28T15:44:39.26Z" }, - { url = "https://files.pythonhosted.org/packages/6a/17/c4a5498e00071ef29e483a01558b285d086825b61cf1fb2629fbdd019d94/mypy-1.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cf7d84f497f78b682edd407f14a7b6e1a2212b433eedb054e2081380b7395aa3", size = 13792898, upload-time = "2025-11-28T15:44:31.102Z" }, - { url = "https://files.pythonhosted.org/packages/67/f6/bb542422b3ee4399ae1cdc463300d2d91515ab834c6233f2fd1d52fa21e0/mypy-1.19.0-cp310-cp310-win_amd64.whl", hash = "sha256:c3385246593ac2b97f155a0e9639be906e73534630f663747c71908dfbf26134", size = 10048835, upload-time = "2025-11-28T15:48:15.744Z" }, - { url = "https://files.pythonhosted.org/packages/0f/d2/010fb171ae5ac4a01cc34fbacd7544531e5ace95c35ca166dd8fd1b901d0/mypy-1.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a31e4c28e8ddb042c84c5e977e28a21195d086aaffaf08b016b78e19c9ef8106", size = 13010563, upload-time = "2025-11-28T15:48:23.975Z" }, - { url = "https://files.pythonhosted.org/packages/41/6b/63f095c9f1ce584fdeb595d663d49e0980c735a1d2004720ccec252c5d47/mypy-1.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34ec1ac66d31644f194b7c163d7f8b8434f1b49719d403a5d26c87fff7e913f7", size = 12077037, upload-time = "2025-11-28T15:47:51.582Z" }, - { url = "https://files.pythonhosted.org/packages/d7/83/6cb93d289038d809023ec20eb0b48bbb1d80af40511fa077da78af6ff7c7/mypy-1.19.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cb64b0ba5980466a0f3f9990d1c582bcab8db12e29815ecb57f1408d99b4bff7", size = 12680255, upload-time = "2025-11-28T15:46:57.628Z" }, - { url = "https://files.pythonhosted.org/packages/99/db/d217815705987d2cbace2edd9100926196d6f85bcb9b5af05058d6e3c8ad/mypy-1.19.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:120cffe120cca5c23c03c77f84abc0c14c5d2e03736f6c312480020082f1994b", size = 13421472, upload-time = "2025-11-28T15:47:59.655Z" }, - { url = "https://files.pythonhosted.org/packages/4e/51/d2beaca7c497944b07594f3f8aad8d2f0e8fc53677059848ae5d6f4d193e/mypy-1.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7a500ab5c444268a70565e374fc803972bfd1f09545b13418a5174e29883dab7", size = 13651823, upload-time = "2025-11-28T15:45:29.318Z" }, - { url = "https://files.pythonhosted.org/packages/aa/d1/7883dcf7644db3b69490f37b51029e0870aac4a7ad34d09ceae709a3df44/mypy-1.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:c14a98bc63fd867530e8ec82f217dae29d0550c86e70debc9667fff1ec83284e", size = 10049077, upload-time = "2025-11-28T15:45:39.818Z" }, - { url = "https://files.pythonhosted.org/packages/11/7e/1afa8fb188b876abeaa14460dc4983f909aaacaa4bf5718c00b2c7e0b3d5/mypy-1.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0fb3115cb8fa7c5f887c8a8d81ccdcb94cff334684980d847e5a62e926910e1d", size = 13207728, upload-time = "2025-11-28T15:46:26.463Z" }, - { url = "https://files.pythonhosted.org/packages/b2/13/f103d04962bcbefb1644f5ccb235998b32c337d6c13145ea390b9da47f3e/mypy-1.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3e19e3b897562276bb331074d64c076dbdd3e79213f36eed4e592272dabd760", size = 12202945, upload-time = "2025-11-28T15:48:49.143Z" }, - { url = "https://files.pythonhosted.org/packages/e4/93/a86a5608f74a22284a8ccea8592f6e270b61f95b8588951110ad797c2ddd/mypy-1.19.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9d491295825182fba01b6ffe2c6fe4e5a49dbf4e2bb4d1217b6ced3b4797bc6", size = 12718673, upload-time = "2025-11-28T15:47:37.193Z" }, - { url = "https://files.pythonhosted.org/packages/3d/58/cf08fff9ced0423b858f2a7495001fda28dc058136818ee9dffc31534ea9/mypy-1.19.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6016c52ab209919b46169651b362068f632efcd5eb8ef9d1735f6f86da7853b2", size = 13608336, upload-time = "2025-11-28T15:48:32.625Z" }, - { url = "https://files.pythonhosted.org/packages/64/ed/9c509105c5a6d4b73bb08733102a3ea62c25bc02c51bca85e3134bf912d3/mypy-1.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f188dcf16483b3e59f9278c4ed939ec0254aa8a60e8fc100648d9ab5ee95a431", size = 13833174, upload-time = "2025-11-28T15:45:48.091Z" }, - { url = "https://files.pythonhosted.org/packages/cd/71/01939b66e35c6f8cb3e6fdf0b657f0fd24de2f8ba5e523625c8e72328208/mypy-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:0e3c3d1e1d62e678c339e7ade72746a9e0325de42cd2cccc51616c7b2ed1a018", size = 10112208, upload-time = "2025-11-28T15:46:41.702Z" }, - { url = "https://files.pythonhosted.org/packages/cb/0d/a1357e6bb49e37ce26fcf7e3cc55679ce9f4ebee0cd8b6ee3a0e301a9210/mypy-1.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7686ed65dbabd24d20066f3115018d2dce030d8fa9db01aa9f0a59b6813e9f9e", size = 13191993, upload-time = "2025-11-28T15:47:22.336Z" }, - { url = "https://files.pythonhosted.org/packages/5d/75/8e5d492a879ec4490e6ba664b5154e48c46c85b5ac9785792a5ec6a4d58f/mypy-1.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fd4a985b2e32f23bead72e2fb4bbe5d6aceee176be471243bd831d5b2644672d", size = 12174411, upload-time = "2025-11-28T15:44:55.492Z" }, - { url = "https://files.pythonhosted.org/packages/71/31/ad5dcee9bfe226e8eaba777e9d9d251c292650130f0450a280aec3485370/mypy-1.19.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc51a5b864f73a3a182584b1ac75c404396a17eced54341629d8bdcb644a5bba", size = 12727751, upload-time = "2025-11-28T15:44:14.169Z" }, - { url = "https://files.pythonhosted.org/packages/77/06/b6b8994ce07405f6039701f4b66e9d23f499d0b41c6dd46ec28f96d57ec3/mypy-1.19.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:37af5166f9475872034b56c5efdcf65ee25394e9e1d172907b84577120714364", size = 13593323, upload-time = "2025-11-28T15:46:34.699Z" }, - { url = "https://files.pythonhosted.org/packages/68/b1/126e274484cccdf099a8e328d4fda1c7bdb98a5e888fa6010b00e1bbf330/mypy-1.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:510c014b722308c9bd377993bcbf9a07d7e0692e5fa8fc70e639c1eb19fc6bee", size = 13818032, upload-time = "2025-11-28T15:46:18.286Z" }, - { url = "https://files.pythonhosted.org/packages/f8/56/53a8f70f562dfc466c766469133a8a4909f6c0012d83993143f2a9d48d2d/mypy-1.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:cabbee74f29aa9cd3b444ec2f1e4fa5a9d0d746ce7567a6a609e224429781f53", size = 10120644, upload-time = "2025-11-28T15:47:43.99Z" }, - { url = "https://files.pythonhosted.org/packages/b0/f4/7751f32f56916f7f8c229fe902cbdba3e4dd3f3ea9e8b872be97e7fc546d/mypy-1.19.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f2e36bed3c6d9b5f35d28b63ca4b727cb0228e480826ffc8953d1892ddc8999d", size = 13185236, upload-time = "2025-11-28T15:45:20.696Z" }, - { url = "https://files.pythonhosted.org/packages/35/31/871a9531f09e78e8d145032355890384f8a5b38c95a2c7732d226b93242e/mypy-1.19.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a18d8abdda14035c5718acb748faec09571432811af129bf0d9e7b2d6699bf18", size = 12213902, upload-time = "2025-11-28T15:46:10.117Z" }, - { url = "https://files.pythonhosted.org/packages/58/b8/af221910dd40eeefa2077a59107e611550167b9994693fc5926a0b0f87c0/mypy-1.19.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f75e60aca3723a23511948539b0d7ed514dda194bc3755eae0bfc7a6b4887aa7", size = 12738600, upload-time = "2025-11-28T15:44:22.521Z" }, - { url = "https://files.pythonhosted.org/packages/11/9f/c39e89a3e319c1d9c734dedec1183b2cc3aefbab066ec611619002abb932/mypy-1.19.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f44f2ae3c58421ee05fe609160343c25f70e3967f6e32792b5a78006a9d850f", size = 13592639, upload-time = "2025-11-28T15:48:08.55Z" }, - { url = "https://files.pythonhosted.org/packages/97/6d/ffaf5f01f5e284d9033de1267e6c1b8f3783f2cf784465378a86122e884b/mypy-1.19.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:63ea6a00e4bd6822adbfc75b02ab3653a17c02c4347f5bb0cf1d5b9df3a05835", size = 13799132, upload-time = "2025-11-28T15:47:06.032Z" }, - { url = "https://files.pythonhosted.org/packages/fe/b0/c33921e73aaa0106224e5a34822411bea38046188eb781637f5a5b07e269/mypy-1.19.0-cp314-cp314-win_amd64.whl", hash = "sha256:3ad925b14a0bb99821ff6f734553294aa6a3440a8cb082fe1f5b84dfb662afb1", size = 10269832, upload-time = "2025-11-28T15:47:29.392Z" }, - { url = "https://files.pythonhosted.org/packages/09/0e/fe228ed5aeab470c6f4eb82481837fadb642a5aa95cc8215fd2214822c10/mypy-1.19.0-py3-none-any.whl", hash = "sha256:0c01c99d626380752e527d5ce8e69ffbba2046eb8a060db0329690849cf9b6f9", size = 2469714, upload-time = "2025-11-28T15:45:33.22Z" }, -] - [[package]] name = "mypy-extensions" version = "1.1.0" @@ -3757,6 +3638,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/18/c86eb8e0202e32dd3df50d43d7ff9854f8e0603945ff398974c1d91ac1ef/tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90", size = 6675, upload-time = "2025-01-15T12:07:22.074Z" }, ] +[[package]] +name = "ty" +version = "0.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/e5/15b6aceefcd64b53997fe2002b6fa055f0b1afd23ff6fc3f55f3da944530/ty-0.0.2.tar.gz", hash = "sha256:e02dc50b65dc58d6cb8e8b0d563833f81bf03ed8a7d0b15c6396d486489a7e1d", size = 4762024, upload-time = "2025-12-16T20:13:41.07Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/86/65d4826677d966cf226662767a4a597ebb4b02c432f413673c8d5d3d1ce8/ty-0.0.2-py3-none-linux_armv6l.whl", hash = "sha256:0954a0e0b6f7e06229dd1da3a9989ee9b881a26047139a88eb7c134c585ad22e", size = 9771409, upload-time = "2025-12-16T20:13:28.964Z" }, + { url = "https://files.pythonhosted.org/packages/d4/bc/6ab06b7c109cec608c24ea182cc8b4714e746a132f70149b759817092665/ty-0.0.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d6044b491d66933547033cecc87cb7eb599ba026a3ef347285add6b21107a648", size = 9580025, upload-time = "2025-12-16T20:13:34.507Z" }, + { url = "https://files.pythonhosted.org/packages/54/de/d826804e304b2430f17bb27ae15bcf02380e7f67f38b5033047e3d2523e6/ty-0.0.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbca7f08e671a35229f6f400d73da92e2dc0a440fba53a74fe8233079a504358", size = 9098660, upload-time = "2025-12-16T20:13:01.278Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8e/5cd87944ceee02bb0826f19ced54e30c6bb971e985a22768f6be6b1a042f/ty-0.0.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3abd61153dac0b93b284d305e6f96085013a25c3a7ab44e988d24f0a5fcce729", size = 9567693, upload-time = "2025-12-16T20:13:12.559Z" }, + { url = "https://files.pythonhosted.org/packages/c6/b1/062aab2c62c5ae01c05d27b97ba022d9ff66f14a3cb9030c5ad1dca797ec/ty-0.0.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:21a9f28caafb5742e7d594104e2fe2ebd64590da31aed4745ae8bc5be67a7b85", size = 9556471, upload-time = "2025-12-16T20:13:07.771Z" }, + { url = "https://files.pythonhosted.org/packages/0e/07/856f6647a9dd6e36560d182d35d3b5fb21eae98a8bfb516cd879d0e509f3/ty-0.0.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3ec63fd23ab48e0f838fb54a47ec362a972ee80979169a7edfa6f5c5034849d", size = 9971914, upload-time = "2025-12-16T20:13:18.852Z" }, + { url = "https://files.pythonhosted.org/packages/2e/82/c2e3957dbf33a23f793a9239cfd8bd04b6defd999bd0f6e74d6a5afb9f42/ty-0.0.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e5e2e0293a259c9a53f668c9c13153cc2f1403cb0fe2b886ca054be4ac76517c", size = 10840905, upload-time = "2025-12-16T20:13:37.098Z" }, + { url = "https://files.pythonhosted.org/packages/3b/17/49bd74e3d577e6c88b8074581b7382f532a9d40552cc7c48ceaa83f1d950/ty-0.0.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2511ac02a83d0dc45d4570c7e21ec0c919be7a7263bad9914800d0cde47817", size = 10570251, upload-time = "2025-12-16T20:13:10.319Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9b/26741834069722033a1a0963fcbb63ea45925c6697357e64e361753c6166/ty-0.0.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c482bfbfb8ad18b2e62427d02a0c934ac510c414188a3cf00e16b8acc35482f0", size = 10369078, upload-time = "2025-12-16T20:13:20.851Z" }, + { url = "https://files.pythonhosted.org/packages/94/fc/1d34ec891900d9337169ff9f8252fcaa633ae5c4d36b67effd849ed4f9ac/ty-0.0.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb514711eed3f56d7a130d4885f4b5d8e490fdcd2adac098e5cf175573a0dda3", size = 10121064, upload-time = "2025-12-16T20:13:23.095Z" }, + { url = "https://files.pythonhosted.org/packages/e5/02/e640325956172355ef8deb9b08d991f229230bf9d07f1dbda8c6665a3a43/ty-0.0.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b2c37fa26c39e9fbed7c73645ba721968ab44f28b2bfe2f79a4e15965a1c426f", size = 9553817, upload-time = "2025-12-16T20:13:27.057Z" }, + { url = "https://files.pythonhosted.org/packages/35/13/c93d579ece84895da9b0aae5d34d84100bbff63ad9f60c906a533a087175/ty-0.0.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:13b264833ac5f3b214693fca38e380e78ee7327e09beaa5ff2e47d75fcab9692", size = 9577512, upload-time = "2025-12-16T20:13:16.956Z" }, + { url = "https://files.pythonhosted.org/packages/85/53/93ab1570adc799cd9120ea187d5b4c00d821e86eca069943b179fe0d3e83/ty-0.0.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:08658d6dbbf8bdef80c0a77eda56a22ab6737002ba129301b7bbd36bcb7acd75", size = 9692726, upload-time = "2025-12-16T20:13:31.169Z" }, + { url = "https://files.pythonhosted.org/packages/9a/07/5fff5335858a14196776207d231c32e23e48a5c912a7d52c80e7a3fa6f8f/ty-0.0.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4a21b5b012061cb13d47edfff6be70052694308dba633b4c819b70f840e6c158", size = 10213996, upload-time = "2025-12-16T20:13:14.606Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d3/896b1439ab765c57a8d732f73c105ec41142c417a582600638385c2bee85/ty-0.0.2-py3-none-win32.whl", hash = "sha256:d773fdad5d2b30f26313204e6b191cdd2f41ab440a6c241fdb444f8c6593c288", size = 9204906, upload-time = "2025-12-16T20:13:25.099Z" }, + { url = "https://files.pythonhosted.org/packages/5d/0a/f30981e7d637f78e3d08e77d63b818752d23db1bc4b66f9e82e2cb3d34f8/ty-0.0.2-py3-none-win_amd64.whl", hash = "sha256:d1c9ac78a8aa60d0ce89acdccf56c3cc0fcb2de07f1ecf313754d83518e8e8c5", size = 10066640, upload-time = "2025-12-16T20:13:04.045Z" }, + { url = "https://files.pythonhosted.org/packages/5a/c4/97958503cf62bfb7908d2a77b03b91a20499a7ff405f5a098c4989589f34/ty-0.0.2-py3-none-win_arm64.whl", hash = "sha256:fbdef644ade0cd4420c4ec14b604b7894cefe77bfd8659686ac2f6aba9d1a306", size = 9572022, upload-time = "2025-12-16T20:13:39.189Z" }, +] + [[package]] name = "typeapi" version = "2.3.0"