diff --git a/README.md b/README.md index e13bea1..fe5414c 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,15 @@ A toolkit for building LLM-powered applications and agent loops. uv add ai ``` +AI Gateway usage works with the base package. Direct providers that use an +OpenAI-compatible or Anthropic-compatible adapter load the corresponding +official SDK lazily and require optional extras: + +```bash +uv add "ai[openai]" # OpenAI-compatible providers +uv add "ai[anthropic]" # Anthropic-compatible providers +``` + ```python import ai ``` @@ -61,6 +70,11 @@ model = ai.get_model("openai:gpt-5.4") model = ai.get_model("anthropic:claude-sonnet-4-6") ``` +Provider IDs without a `provider:` prefix route through AI Gateway by default. +Direct OpenAI-compatible providers, including `openai:` and compatible +models.dev provider IDs, require `ai[openai]`. Direct Anthropic-compatible +providers require `ai[anthropic]`. + Structured output: ```python diff --git a/examples/check-examples.py b/examples/check-examples.py index 59da54a..8b0cc2c 100755 --- a/examples/check-examples.py +++ b/examples/check-examples.py @@ -46,6 +46,10 @@ def run_mypy(name: str, directory: Path, extra_deps: list[str]) -> bool: "uv", "run", "--frozen", + "--project", + str(REPO), + "--group", + "dev", "--with-editable", str(REPO), *with_args, diff --git a/examples/run-examples.py b/examples/run-examples.py index 94f6235..47a6b8f 100755 --- a/examples/run-examples.py +++ b/examples/run-examples.py @@ -123,6 +123,8 @@ def _sample_cmd(sample: Sample) -> list[str]: "uv", "run", "--frozen", + "--group", + "dev", "--with-editable", str(REPO), "python", diff --git a/examples/samples/builtin_web_search.py b/examples/samples/builtin_web_search.py index 61a3932..2fd2487 100644 --- a/examples/samples/builtin_web_search.py +++ b/examples/samples/builtin_web_search.py @@ -1,22 +1,14 @@ """Anthropic built-in web search. + https://docs.anthropic.com/en/docs/build-with-claude/tool-use/web-search-tool """ import asyncio import json -import sys import ai from ai.providers.anthropic import tools as anthropic_tools -provider = ai.get_provider("anthropic") - -if not provider.is_configured(): - print(f"[SKIP] {provider.name} provider is not configured") - sys.exit(0) - -model = ai.get_model("anthropic:claude-sonnet-4-6") - messages = [ ai.system_message("Be concise. Cite sources you use. The year is 2026"), ai.user_message( @@ -51,6 +43,13 @@ def format(value: object) -> str: async def main() -> None: + provider = ai.get_provider("anthropic") + if not provider.is_configured(): + print(f"[SKIP] {provider.name} provider is not configured") + return + + model = ai.Model("claude-sonnet-4-6", provider=provider) + async with ai.stream(model, messages, tools=tools) as s: async for event in s: match event: diff --git a/examples/samples/explicit_client.py b/examples/samples/explicit_client.py index 134c74a..8c61464 100644 --- a/examples/samples/explicit_client.py +++ b/examples/samples/explicit_client.py @@ -5,23 +5,23 @@ import ai -# Example for local OpenAI-compatible servers like LM Studio. -provider = ai.get_provider( - "openai", - base_url=os.environ.get("LOCAL_OPENAI_BASE_URL", "http://localhost:1234/v1"), - api_key=os.environ.get("LOCAL_OPENAI_API_KEY", "some-key"), - headers={"X-Custom-Header": "example"}, -) - -model = ai.Model( - os.environ.get("LOCAL_OPENAI_MODEL", "local-model"), - provider=provider, -) - messages = [ai.user_message("Hello!")] async def main() -> None: + # Example for local OpenAI-compatible servers like LM Studio. + provider = ai.get_provider( + "openai", + base_url=os.environ.get("LOCAL_OPENAI_BASE_URL", "http://localhost:1234/v1"), + api_key=os.environ.get("LOCAL_OPENAI_API_KEY", "some-key"), + headers={"X-Custom-Header": "example"}, + ) + + model = ai.Model( + os.environ.get("LOCAL_OPENAI_MODEL", "local-model"), + provider=provider, + ) + try: try: await ai.probe(model) diff --git a/pyproject.toml b/pyproject.toml index f260f60..cbf702d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,16 +28,17 @@ classifiers = [ ] requires-python = ">=3.12" dependencies = [ - "anthropic>=0.83.0", "httpx>=0.28.1", "mcp>=1.18.0", "modelsdotdev @ git+https://github.com/vercel-labs/modelsdotdev-python", - "openai>=2.14.0", "pydantic>=2.12.5", "typing-extensions>=4.15.0", - "vercel>=0.3.8", ] +[project.optional-dependencies] +anthropic = ["anthropic>=0.83.0"] +openai = ["openai>=2.14.0"] + [build-system] requires = ["hatchling", "uv-dynamic-versioning>=0.7.0"] build-backend = "hatchling.build" @@ -55,11 +56,13 @@ bump = true [dependency-groups] dev = [ + "anthropic>=0.83.0", "python-dotenv>=1.2.1", "pytest>=8.0", "pytest-asyncio>=0.24", "rich>=14.2.0", "mypy>=1.11", + "openai>=2.14.0", "ruff>=0.8", "pyright>=1.1.408", "async-solipsism>=0.9", diff --git a/skills/ai/SKILL.md b/skills/ai/SKILL.md index 5922a36..885a58d 100644 --- a/skills/ai/SKILL.md +++ b/skills/ai/SKILL.md @@ -11,6 +11,10 @@ Use this skill when working with the Python `ai` SDK. uv add ai ``` +Direct OpenAI-compatible and Anthropic-compatible providers require optional +extras: `uv add "ai[openai]"` or `uv add "ai[anthropic]"`. AI Gateway works +with the base package. + ```python import ai ``` diff --git a/src/ai/__init__.py b/src/ai/__init__.py index 818c8f0..88ca7de 100644 --- a/src/ai/__init__.py +++ b/src/ai/__init__.py @@ -24,6 +24,7 @@ AIError, ConfigurationError, HTTPErrorContext, + InstallationError, ProviderAPIError, ProviderAuthenticationError, ProviderBadRequestError, @@ -84,6 +85,7 @@ "AIError", "ConfigurationError", "HTTPErrorContext", + "InstallationError", "ProviderAPIError", "ProviderAuthenticationError", "ProviderBadRequestError", diff --git a/src/ai/errors.py b/src/ai/errors.py index 4dce141..4567e02 100644 --- a/src/ai/errors.py +++ b/src/ai/errors.py @@ -29,6 +29,10 @@ class ConfigurationError(AIError): """Required SDK configuration is missing or invalid.""" +class InstallationError(ConfigurationError): + """Required optional dependency is not installed.""" + + class ProviderError(AIError): """Base class for errors raised by model providers.""" @@ -307,6 +311,7 @@ def _is_retryable_status(status_code: int | None) -> bool: "AIError", "ConfigurationError", "HTTPErrorContext", + "InstallationError", "ProviderAPIError", "ProviderAuthenticationError", "ProviderBadRequestError", diff --git a/src/ai/providers/_optional.py b/src/ai/providers/_optional.py new file mode 100644 index 0000000..52e8e57 --- /dev/null +++ b/src/ai/providers/_optional.py @@ -0,0 +1,23 @@ +"""Optional provider SDK imports.""" + +from __future__ import annotations + +import importlib +from types import ModuleType + +from .. import errors as ai_errors + + +def import_optional_sdk(module_name: str, *, provider: str, extra: str) -> ModuleType: + """Import an optional upstream SDK or raise a helpful installation error.""" + root_module = module_name.partition(".")[0] + try: + return importlib.import_module(module_name) + except ModuleNotFoundError as exc: + if exc.name not in {module_name, root_module}: + raise + raise ai_errors.InstallationError( + f"could not import `{root_module}`, which is required to use the " + f"{provider} provider, you can install it with `pip install " + f'"ai[{extra}]"` or `uv add "ai[{extra}]"`' + ) from exc diff --git a/src/ai/providers/anthropic/__init__.py b/src/ai/providers/anthropic/__init__.py index 74ba631..73192c1 100644 --- a/src/ai/providers/anthropic/__init__.py +++ b/src/ai/providers/anthropic/__init__.py @@ -17,7 +17,8 @@ ) as s: ... -The protocol module is loaded lazily by provider methods. +The optional upstream Anthropic SDK is loaded lazily when the provider creates +or uses an SDK client. """ from . import tools diff --git a/src/ai/providers/anthropic/_sdk.py b/src/ai/providers/anthropic/_sdk.py new file mode 100644 index 0000000..e269c3a --- /dev/null +++ b/src/ai/providers/anthropic/_sdk.py @@ -0,0 +1,31 @@ +"""Lazy Anthropic SDK imports.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Protocol, cast + +from .. import _optional + +if TYPE_CHECKING: + import anthropic + + +class AnthropicSDK(Protocol): + AsyncAnthropic: type[anthropic.AsyncAnthropic] + AnthropicError: type[anthropic.AnthropicError] + APIConnectionError: type[anthropic.APIConnectionError] + APIError: type[anthropic.APIError] + APIResponseValidationError: type[anthropic.APIResponseValidationError] + APIStatusError: type[anthropic.APIStatusError] + APITimeoutError: type[anthropic.APITimeoutError] + + +def import_sdk(*, provider: str = "anthropic") -> AnthropicSDK: + return cast( + AnthropicSDK, + _optional.import_optional_sdk( + "anthropic", + provider=provider, + extra="anthropic", + ), + ) diff --git a/src/ai/providers/anthropic/errors.py b/src/ai/providers/anthropic/errors.py index 26e4340..bf4f0eb 100644 --- a/src/ai/providers/anthropic/errors.py +++ b/src/ai/providers/anthropic/errors.py @@ -2,12 +2,15 @@ from __future__ import annotations -from typing import Any +from typing import TYPE_CHECKING, Any -import anthropic import httpx from ... import errors as ai_errors +from . import _sdk + +if TYPE_CHECKING: + import anthropic def map_error( @@ -17,7 +20,8 @@ def map_error( model_id: str | None = None, ) -> ai_errors.ProviderAPIError: """Map an Anthropic SDK exception to the public provider hierarchy.""" - if isinstance(exc, anthropic.APITimeoutError): + anthropic_sdk = _sdk.import_sdk(provider=provider or "anthropic") + if isinstance(exc, anthropic_sdk.APITimeoutError): return _provider_error( ai_errors.ProviderTimeoutError, exc, @@ -25,7 +29,7 @@ def map_error( model_id=model_id, is_retryable=True, ) - if isinstance(exc, anthropic.APIConnectionError): + if isinstance(exc, anthropic_sdk.APIConnectionError): return _provider_error( ai_errors.ProviderConnectionError, exc, @@ -33,16 +37,20 @@ def map_error( model_id=model_id, is_retryable=True, ) - if isinstance(exc, anthropic.APIResponseValidationError): + if isinstance(exc, anthropic_sdk.APIResponseValidationError): return _provider_error( ai_errors.ProviderResponseError, exc, provider=provider, model_id=model_id, ) - if isinstance(exc, anthropic.APIStatusError): - return _map_status_error(exc, provider=provider, model_id=model_id) - if isinstance(exc, anthropic.APIError): + if isinstance(exc, anthropic_sdk.APIStatusError): + return _map_status_error( + exc, + provider=provider, + model_id=model_id, + ) + if isinstance(exc, anthropic_sdk.APIError): return _provider_error( ai_errors.ProviderAPIError, exc, diff --git a/src/ai/providers/anthropic/protocol.py b/src/ai/providers/anthropic/protocol.py index 3f99683..9e83d63 100644 --- a/src/ai/providers/anthropic/protocol.py +++ b/src/ai/providers/anthropic/protocol.py @@ -4,19 +4,24 @@ Anthropic-compatible providers own the SDK client used by this protocol. """ +from __future__ import annotations + +import base64 import json from collections.abc import AsyncGenerator, Mapping, Sequence -from typing import Any +from typing import TYPE_CHECKING, Any -import anthropic import pydantic from ... import types from ...models import core from ...types import events -from . import errors +from . import _sdk, errors from . import tools as anthropic_tools +if TYPE_CHECKING: + import anthropic + PROVIDER_NAME = "anthropic" # Anthropic block types that carry server-tool results. We track these @@ -159,9 +164,7 @@ def _file_part_to_anthropic( "source": {"type": "url", "url": part.data}, } else: - import base64 as _b64 - - text_data = _b64.b64decode(part.data).decode("utf-8") + text_data = base64.b64decode(part.data).decode("utf-8") return { "type": "document", "source": { @@ -390,6 +393,7 @@ async def stream( ``params`` may be a raw dict of Anthropic SDK kwargs. Provider-specific request options are forwarded without local validation or translation. """ + anthropic_sdk = _sdk.import_sdk(provider=provider) stream_params = _coerce_params(params) system_prompt, anthropic_messages = await _messages_to_anthropic(messages) @@ -573,7 +577,7 @@ async def stream( raw=sdk_usage.model_dump(exclude_none=True) or None, ) yield events.StreamEnd(usage=usage) - except anthropic.AnthropicError as exc: + except anthropic_sdk.AnthropicError as exc: raise errors.map_error( exc, provider=provider, diff --git a/src/ai/providers/anthropic/provider.py b/src/ai/providers/anthropic/provider.py index 9b5a8c1..c06a935 100644 --- a/src/ai/providers/anthropic/provider.py +++ b/src/ai/providers/anthropic/provider.py @@ -6,14 +6,15 @@ from types import ModuleType from typing import TYPE_CHECKING, Any, ClassVar -import anthropic import httpx from ... import errors as ai_errors from .. import base -from . import errors +from . import _sdk, errors, protocol +from . import tools as tools_module if TYPE_CHECKING: + import anthropic import modelsdotdev import pydantic @@ -22,7 +23,11 @@ from ...types import messages as messages_ from ...types import tools as tools_ -AnthropicClient = httpx.AsyncClient | anthropic.AsyncAnthropic + AnthropicClient = httpx.AsyncClient | anthropic.AsyncAnthropic + AnthropicSDKClient = anthropic.AsyncAnthropic +else: + AnthropicClient = Any + AnthropicSDKClient = Any _BASE_URL = "https://api.anthropic.com" _BASE_URL_ENV = "ANTHROPIC_BASE_URL" @@ -30,7 +35,7 @@ _ANTHROPIC_VERSION = "2023-06-01" -class AnthropicCompatibleProvider(base.Provider[anthropic.AsyncAnthropic]): +class AnthropicCompatibleProvider(base.Provider[AnthropicSDKClient]): """Callable provider for Anthropic-compatible APIs.""" handles: ClassVar[tuple[str, ...]] = ("anthropic", "@ai-sdk/anthropic") @@ -49,7 +54,13 @@ def __init__( env: Mapping[str, str] | None = None, client: AnthropicClient | None = None, ) -> None: - if isinstance(client, anthropic.AsyncAnthropic): + anthropic_sdk = None + if client is not None and not isinstance(client, httpx.AsyncClient): + anthropic_sdk = _sdk.import_sdk(provider=name) + + if anthropic_sdk is not None and isinstance( + client, anthropic_sdk.AsyncAnthropic + ): sdk_client = client http_client = None self._has_user_sdk_client = True @@ -84,8 +95,9 @@ def _make_sdk_client( self, *, http_client: httpx.AsyncClient | None = None, - ) -> anthropic.AsyncAnthropic: - return anthropic.AsyncAnthropic( + ) -> AnthropicSDKClient: + anthropic_sdk = _sdk.import_sdk(provider=self.name) + return anthropic_sdk.AsyncAnthropic( base_url=self.base_url, api_key=self.api_key or "", http_client=http_client, @@ -96,7 +108,7 @@ def _make_sdk_client( ) @property - def sdk_client(self) -> anthropic.AsyncAnthropic: + def sdk_client(self) -> AnthropicSDKClient: """Provider SDK client used for Anthropic-compatible API requests.""" return self.client @@ -122,8 +134,6 @@ def stream( params: Any = None, ) -> AsyncGenerator[events.Event]: """Stream via the Anthropic messages protocol.""" - from . import protocol - return protocol.stream( self.sdk_client, model, @@ -145,7 +155,7 @@ def from_modelsdev_provider( headers: Mapping[str, str] | None = None, env: Mapping[str, str] | None = None, client: AnthropicClient | None = None, - ) -> base.Provider[anthropic.AsyncAnthropic]: + ) -> base.Provider[AnthropicSDKClient]: resolved_base_url = base_url or base.provider_base_url( provider, model_provider_config, @@ -175,15 +185,14 @@ def tools(self) -> ModuleType: Convenience accessor: ``anthropic.tools.web_search(...)``. """ - from . import tools as tools_module - return tools_module async def list_models(self) -> list[str]: """List available model IDs from the Anthropic API.""" + anthropic_sdk = _sdk.import_sdk(provider=self.name) try: sdk_models = await self.sdk_client.models.list() - except anthropic.AnthropicError as exc: + except anthropic_sdk.AnthropicError as exc: raise errors.map_error(exc, provider=self.name) from exc return sorted(str(m.id) for m in sdk_models.data) @@ -194,9 +203,10 @@ async def probe(self, model: model_.Model) -> None: f"provider {self.name!r} is not configured", provider=self.name, ) + anthropic_sdk = _sdk.import_sdk(provider=self.name) try: await self.sdk_client.models.retrieve(model.id) - except anthropic.AnthropicError as exc: + except anthropic_sdk.AnthropicError as exc: raise errors.map_error( exc, provider=self.name, diff --git a/src/ai/providers/openai/__init__.py b/src/ai/providers/openai/__init__.py index 2703318..6c12119 100644 --- a/src/ai/providers/openai/__init__.py +++ b/src/ai/providers/openai/__init__.py @@ -9,7 +9,8 @@ model = ai.Model("llama3", provider=provider) ids = await ai.get_provider("openai").list_models() -The protocol module is loaded lazily by provider methods. +The optional upstream OpenAI SDK is loaded lazily when the provider creates or +uses an SDK client. """ from . import tools diff --git a/src/ai/providers/openai/_sdk.py b/src/ai/providers/openai/_sdk.py new file mode 100644 index 0000000..ac232bf --- /dev/null +++ b/src/ai/providers/openai/_sdk.py @@ -0,0 +1,43 @@ +"""Lazy OpenAI SDK imports.""" + +from __future__ import annotations + +from collections.abc import Callable +from typing import TYPE_CHECKING, Any, Protocol, cast + +from .. import _optional + +if TYPE_CHECKING: + import openai + + +class OpenAISDK(Protocol): + AsyncOpenAI: type[openai.AsyncOpenAI] + OpenAIError: type[openai.OpenAIError] + APIConnectionError: type[openai.APIConnectionError] + APIError: type[openai.APIError] + APIResponseValidationError: type[openai.APIResponseValidationError] + APIStatusError: type[openai.APIStatusError] + APITimeoutError: type[openai.APITimeoutError] + + +class OpenAIPydantic(Protocol): + to_strict_json_schema: Callable[[Any], dict[str, Any]] + + +def import_sdk(*, provider: str = "openai") -> OpenAISDK: + return cast( + OpenAISDK, + _optional.import_optional_sdk("openai", provider=provider, extra="openai"), + ) + + +def import_pydantic(*, provider: str = "openai") -> OpenAIPydantic: + return cast( + OpenAIPydantic, + _optional.import_optional_sdk( + "openai.lib._pydantic", + provider=provider, + extra="openai", + ), + ) diff --git a/src/ai/providers/openai/errors.py b/src/ai/providers/openai/errors.py index 7e22078..fdbdbf2 100644 --- a/src/ai/providers/openai/errors.py +++ b/src/ai/providers/openai/errors.py @@ -2,24 +2,25 @@ from __future__ import annotations -from typing import Any +from typing import TYPE_CHECKING, Any import httpx -import openai from ... import errors as ai_errors - -_STATUS_ERROR_MAP: dict[ - type[openai.APIStatusError], type[ai_errors.ProviderAPIError] -] = { - openai.BadRequestError: ai_errors.ProviderBadRequestError, - openai.AuthenticationError: ai_errors.ProviderAuthenticationError, - openai.PermissionDeniedError: ai_errors.ProviderPermissionDeniedError, - openai.NotFoundError: ai_errors.ProviderNotFoundError, - openai.ConflictError: ai_errors.ProviderConflictError, - openai.UnprocessableEntityError: ai_errors.ProviderUnprocessableEntityError, - openai.RateLimitError: ai_errors.ProviderRateLimitError, - openai.InternalServerError: ai_errors.ProviderInternalServerError, +from . import _sdk + +if TYPE_CHECKING: + import openai + +_STATUS_ERROR_MAP: dict[str, type[ai_errors.ProviderAPIError]] = { + "BadRequestError": ai_errors.ProviderBadRequestError, + "AuthenticationError": ai_errors.ProviderAuthenticationError, + "PermissionDeniedError": ai_errors.ProviderPermissionDeniedError, + "NotFoundError": ai_errors.ProviderNotFoundError, + "ConflictError": ai_errors.ProviderConflictError, + "UnprocessableEntityError": ai_errors.ProviderUnprocessableEntityError, + "RateLimitError": ai_errors.ProviderRateLimitError, + "InternalServerError": ai_errors.ProviderInternalServerError, } @@ -30,7 +31,8 @@ def map_error( model_id: str | None = None, ) -> ai_errors.ProviderAPIError: """Map an OpenAI SDK exception to the public provider hierarchy.""" - if isinstance(exc, openai.APITimeoutError): + openai_sdk = _sdk.import_sdk(provider=provider or "openai") + if isinstance(exc, openai_sdk.APITimeoutError): return _provider_error( ai_errors.ProviderTimeoutError, exc, @@ -38,7 +40,7 @@ def map_error( model_id=model_id, is_retryable=True, ) - if isinstance(exc, openai.APIConnectionError): + if isinstance(exc, openai_sdk.APIConnectionError): return _provider_error( ai_errors.ProviderConnectionError, exc, @@ -46,16 +48,20 @@ def map_error( model_id=model_id, is_retryable=True, ) - if isinstance(exc, openai.APIResponseValidationError): + if isinstance(exc, openai_sdk.APIResponseValidationError): return _provider_error( ai_errors.ProviderResponseError, exc, provider=provider, model_id=model_id, ) - if isinstance(exc, openai.APIStatusError): - return _map_status_error(exc, provider=provider, model_id=model_id) - if isinstance(exc, openai.APIError): + if isinstance(exc, openai_sdk.APIStatusError): + return _map_status_error( + exc, + provider=provider, + model_id=model_id, + ) + if isinstance(exc, openai_sdk.APIError): return _provider_error( ai_errors.ProviderAPIError, exc, @@ -80,7 +86,7 @@ def _map_status_error( cls: type[ai_errors.ProviderAPIError] = ai_errors.ProviderModelNotFoundError else: cls = _STATUS_ERROR_MAP.get( - type(exc) + type(exc).__name__ ) or ai_errors.http_status_to_provider_status_error_class(exc.status_code) return _provider_error(cls, exc, provider=provider, model_id=model_id) diff --git a/src/ai/providers/openai/protocol.py b/src/ai/providers/openai/protocol.py index 904421b..e67a010 100644 --- a/src/ai/providers/openai/protocol.py +++ b/src/ai/providers/openai/protocol.py @@ -4,15 +4,20 @@ OpenAI-compatible providers own the SDK client used by this protocol. """ +from __future__ import annotations + +import base64 from collections.abc import AsyncGenerator, Mapping, Sequence -from typing import Any +from typing import TYPE_CHECKING, Any -import openai import pydantic from ... import types from ...models import core -from . import errors +from . import _sdk, errors + +if TYPE_CHECKING: + import openai # --------------------------------------------------------------------------- # Message / tool conversion — internal types → OpenAI wire format @@ -94,9 +99,7 @@ async def _file_part_to_openai( elif types.media.is_url(data): text_content = data else: - import base64 as _b64 - - text_content = _b64.b64decode(data).decode("utf-8") + text_content = base64.b64decode(data).decode("utf-8") return {"type": "text", "text": text_content} raise ValueError(f"Unsupported media type for OpenAI: {mt}") @@ -219,6 +222,7 @@ async def stream( provider: str, ) -> AsyncGenerator[types.events.Event]: """Stream through the OpenAI chat completions protocol using *sdk_client*.""" + openai_sdk = _sdk.import_sdk(provider=provider) if tools and any(t.kind == "provider" for t in tools): raise NotImplementedError( "OpenAI built-in tools require the Responses API. " @@ -248,13 +252,13 @@ async def stream( api_kwargs["tools"] = openai_tools if output_type is not None: - from openai.lib._pydantic import to_strict_json_schema + openai_pydantic = _sdk.import_pydantic(provider=provider) api_kwargs["response_format"] = { "type": "json_schema", "json_schema": { "name": output_type.__name__, - "schema": to_strict_json_schema(output_type), + "schema": openai_pydantic.to_strict_json_schema(output_type), "strict": True, }, } @@ -366,7 +370,7 @@ async def stream( tc["started"] = False yield types.events.StreamEnd(usage=usage) - except openai.OpenAIError as exc: + except openai_sdk.OpenAIError as exc: raise errors.map_error( exc, provider=provider, diff --git a/src/ai/providers/openai/provider.py b/src/ai/providers/openai/provider.py index f0d4591..7777687 100644 --- a/src/ai/providers/openai/provider.py +++ b/src/ai/providers/openai/provider.py @@ -7,14 +7,15 @@ from typing import TYPE_CHECKING, Any, ClassVar import httpx -import openai from ... import errors as ai_errors from .. import base -from . import errors +from . import _sdk, errors, protocol +from . import tools as tools_module if TYPE_CHECKING: import modelsdotdev + import openai import pydantic from ...models.core import model as model_ @@ -22,14 +23,18 @@ from ...types import messages as messages_ from ...types import tools as tools_ -OpenAIClient = httpx.AsyncClient | openai.AsyncOpenAI + OpenAIClient = httpx.AsyncClient | openai.AsyncOpenAI + OpenAISDKClient = openai.AsyncOpenAI +else: + OpenAIClient = Any + OpenAISDKClient = Any _BASE_URL = "https://api.openai.com/v1" _BASE_URL_ENV = "OPENAI_BASE_URL" _API_KEY_ENV = "OPENAI_API_KEY" -class OpenAICompatibleProvider(base.Provider[openai.AsyncOpenAI]): +class OpenAICompatibleProvider(base.Provider[OpenAISDKClient]): """Provider configuration for OpenAI-compatible APIs.""" handles: ClassVar[tuple[str, ...]] = ( @@ -51,7 +56,11 @@ def __init__( env: Mapping[str, str] | None = None, client: OpenAIClient | None = None, ) -> None: - if isinstance(client, openai.AsyncOpenAI): + openai_sdk = None + if client is not None and not isinstance(client, httpx.AsyncClient): + openai_sdk = _sdk.import_sdk(provider=name) + + if openai_sdk is not None and isinstance(client, openai_sdk.AsyncOpenAI): sdk_client = client http_client = None self._has_user_sdk_client = True @@ -84,8 +93,9 @@ def _make_sdk_client( self, *, http_client: httpx.AsyncClient | None = None, - ) -> openai.AsyncOpenAI: - return openai.AsyncOpenAI( + ) -> OpenAISDKClient: + openai_sdk = _sdk.import_sdk(provider=self.name) + return openai_sdk.AsyncOpenAI( base_url=self.base_url, api_key=self.api_key or "", default_headers=self.headers, @@ -93,7 +103,7 @@ def _make_sdk_client( ) @property - def sdk_client(self) -> openai.AsyncOpenAI: + def sdk_client(self) -> OpenAISDKClient: """Provider SDK client used for OpenAI-compatible API requests.""" return self.client @@ -119,8 +129,6 @@ def stream( params: Any = None, ) -> AsyncGenerator[events.Event]: """Stream via the OpenAI chat completions protocol.""" - from . import protocol - return protocol.stream( self.sdk_client, model, @@ -142,7 +150,7 @@ def from_modelsdev_provider( headers: Mapping[str, str] | None = None, env: Mapping[str, str] | None = None, client: OpenAIClient | None = None, - ) -> base.Provider[openai.AsyncOpenAI]: + ) -> base.Provider[OpenAISDKClient]: resolved_base_url = base_url or base.provider_base_url( provider, model_provider_config, @@ -175,15 +183,14 @@ def tools(self) -> ModuleType: passed; route via the AI Gateway provider until a Responses protocol ships. """ - from . import tools as tools_module - return tools_module async def list_models(self) -> list[str]: """List available model IDs from the OpenAI-compatible API.""" + openai_sdk = _sdk.import_sdk(provider=self.name) try: sdk_models = await self.sdk_client.models.list() - except openai.OpenAIError as exc: + except openai_sdk.OpenAIError as exc: raise errors.map_error(exc, provider=self.name) from exc return sorted(str(m.id) for m in sdk_models.data) @@ -194,9 +201,10 @@ async def probe(self, model: model_.Model) -> None: f"provider {self.name!r} is not configured", provider=self.name, ) + openai_sdk = _sdk.import_sdk(provider=self.name) try: await self.sdk_client.models.retrieve(model.id) - except openai.OpenAIError as exc: + except openai_sdk.OpenAIError as exc: raise errors.map_error( exc, provider=self.name, diff --git a/tests/providers/anthropic/test_provider.py b/tests/providers/anthropic/test_provider.py index 1f5e5cc..01f9822 100644 --- a/tests/providers/anthropic/test_provider.py +++ b/tests/providers/anthropic/test_provider.py @@ -1,5 +1,7 @@ from __future__ import annotations +import importlib + import anthropic import httpx import pytest @@ -144,6 +146,26 @@ def test_provider_is_configured_requires_api_key( assert ai.get_provider("anthropic", api_key="sk-test").is_configured() is True +def test_get_provider_raises_installation_error_when_anthropic_sdk_missing( + monkeypatch: pytest.MonkeyPatch, +) -> None: + real_import_module = importlib.import_module + + def _missing_anthropic(name: str, package: str | None = None) -> object: + if name == "anthropic" or name.startswith("anthropic."): + raise ModuleNotFoundError(name="anthropic") + return real_import_module(name, package) + + monkeypatch.setattr(importlib, "import_module", _missing_anthropic) + + with pytest.raises(ai.InstallationError) as exc_info: + ai.get_provider("anthropic", api_key="sk-test") + + assert "could not import `anthropic`" in str(exc_info.value) + assert "required to use the anthropic provider" in str(exc_info.value) + assert "ai[anthropic]" in str(exc_info.value) + + def test_get_provider_accepts_base_url_and_api_key() -> None: provider = ai.get_provider( "anthropic", diff --git a/tests/providers/openai/test_provider.py b/tests/providers/openai/test_provider.py index bc1e11f..ca7ea94 100644 --- a/tests/providers/openai/test_provider.py +++ b/tests/providers/openai/test_provider.py @@ -1,5 +1,7 @@ from __future__ import annotations +import importlib + import httpx import openai import pytest @@ -151,6 +153,50 @@ def test_provider_is_configured_requires_api_key( assert ai.get_provider("openai", api_key="sk-test").is_configured() is True +def test_get_provider_raises_installation_error_when_openai_sdk_missing( + monkeypatch: pytest.MonkeyPatch, +) -> None: + real_import_module = importlib.import_module + + def _missing_openai(name: str, package: str | None = None) -> object: + if name == "openai" or name.startswith("openai."): + raise ModuleNotFoundError(name="openai") + return real_import_module(name, package) + + monkeypatch.setattr(importlib, "import_module", _missing_openai) + + with pytest.raises(ai.InstallationError) as exc_info: + ai.get_provider("openai", api_key="sk-test") + + assert "could not import `openai`" in str(exc_info.value) + assert "required to use the openai provider" in str(exc_info.value) + assert "ai[openai]" in str(exc_info.value) + + +def test_installation_error_uses_modelsdev_provider_id( + monkeypatch: pytest.MonkeyPatch, +) -> None: + real_import_module = importlib.import_module + + def _missing_openai(name: str, package: str | None = None) -> object: + if name == "openai" or name.startswith("openai."): + raise ModuleNotFoundError(name="openai") + return real_import_module(name, package) + + monkeypatch.setattr(importlib, "import_module", _missing_openai) + + with pytest.raises(ai.InstallationError) as exc_info: + ai.get_provider( + "cloudflare-workers-ai", + env={ + "CLOUDFLARE_ACCOUNT_ID": "account-123", + "CLOUDFLARE_API_KEY": "sk-test", + }, + ) + + assert "required to use the cloudflare-workers-ai provider" in str(exc_info.value) + + def test_get_provider_accepts_base_url_and_api_key() -> None: provider = ai.get_provider( "openai", diff --git a/uv.lock b/uv.lock index b1ce2f7..bad2204 100644 --- a/uv.lock +++ b/uv.lock @@ -10,20 +10,27 @@ resolution-markers = [ name = "ai" source = { editable = "." } dependencies = [ - { name = "anthropic" }, { name = "httpx" }, { name = "mcp" }, { name = "modelsdotdev" }, - { name = "openai" }, { name = "pydantic" }, { name = "typing-extensions" }, - { name = "vercel" }, +] + +[package.optional-dependencies] +anthropic = [ + { name = "anthropic" }, +] +openai = [ + { name = "openai" }, ] [package.dev-dependencies] dev = [ + { name = "anthropic" }, { name = "async-solipsism" }, { name = "mypy" }, + { name = "openai" }, { name = "pyright" }, { name = "pytest" }, { name = "pytest-asyncio" }, @@ -34,20 +41,22 @@ dev = [ [package.metadata] requires-dist = [ - { name = "anthropic", specifier = ">=0.83.0" }, + { name = "anthropic", marker = "extra == 'anthropic'", specifier = ">=0.83.0" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "mcp", specifier = ">=1.18.0" }, { name = "modelsdotdev", git = "https://github.com/vercel-labs/modelsdotdev-python" }, - { name = "openai", specifier = ">=2.14.0" }, + { name = "openai", marker = "extra == 'openai'", specifier = ">=2.14.0" }, { name = "pydantic", specifier = ">=2.12.5" }, { name = "typing-extensions", specifier = ">=4.15.0" }, - { name = "vercel", specifier = ">=0.3.8" }, ] +provides-extras = ["anthropic", "openai"] [package.metadata.requires-dev] dev = [ + { name = "anthropic", specifier = ">=0.83.0" }, { name = "async-solipsism", specifier = ">=0.9" }, { name = "mypy", specifier = ">=1.11" }, + { name = "openai", specifier = ">=2.14.0" }, { name = "pyright", specifier = ">=1.1.408" }, { name = "pytest", specifier = ">=8.0" }, { name = "pytest-asyncio", specifier = ">=0.24" }, @@ -1064,81 +1073,3 @@ sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e66 wheels = [ { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" }, ] - -[[package]] -name = "vercel" -version = "0.3.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "httpx" }, - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "vercel-sandbox" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/ff/b44f61587e75cd8b78cc56324fe43ca6dbf104c94dcc9396579fdf2e0d69/vercel-0.3.8.tar.gz", hash = "sha256:047fdc66db775bb334c4a12aba3ac4f39bd56613e0216c7e05f1d57d7989d21d", size = 41814, upload-time = "2026-01-20T21:33:28.921Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/0d/acf76808ecc6c8408d80778cb5dee834f3f17ad85a144ec3775236c44af1/vercel-0.3.8-py3-none-any.whl", hash = "sha256:4e4cebd5e665701eee00e4917a813f00277e4fb37b82c47fb7062e0ab927a6d2", size = 53092, upload-time = "2026-01-20T21:33:26.107Z" }, -] - -[[package]] -name = "vercel-sandbox" -version = "0.0.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "httpx" }, - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "vercel" }, - { name = "websockets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/03/a3/066c2d6f9b48d72a22113a561c797046e3c37a59f46a3e43d7f216b13083/vercel_sandbox-0.0.5.tar.gz", hash = "sha256:a30d2301c8b2fcc35640b8f376fa7d708e0fe34f801703e67ff6a0034f206ca1", size = 19105, upload-time = "2026-02-02T23:35:59.267Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/50/634fc188d15b248f25bcec95f8dfa5db4a7ff7d43735c85e521c09014e9b/vercel_sandbox-0.0.5-py3-none-any.whl", hash = "sha256:eed37c405c41da8eff21c1a8900bbdc8096239da38a741e6bb45f678c42453fc", size = 21934, upload-time = "2026-02-02T23:35:57.644Z" }, -] - -[[package]] -name = "websockets" -version = "16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, - { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, - { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, - { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, - { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, - { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, - { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, - { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, - { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, - { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, - { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, - { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, - { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, - { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, - { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, - { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, - { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, - { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, - { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, - { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, - { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, - { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, - { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, - { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, - { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, - { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, - { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, - { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, - { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, - { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, - { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, - { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, - { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, - { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, - { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, - { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, -]