Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,15 @@ A toolkit for building LLM-powered applications and agent loops.
uv add ai
```

AI Gateway usage works with the base package. Direct providers that use an
OpenAI-compatible or Anthropic-compatible adapter load the corresponding
official SDK lazily and require optional extras:

```bash
uv add "ai[openai]" # OpenAI-compatible providers
uv add "ai[anthropic]" # Anthropic-compatible providers
```

```python
import ai
```
Expand Down Expand Up @@ -61,6 +70,11 @@ model = ai.get_model("openai:gpt-5.4")
model = ai.get_model("anthropic:claude-sonnet-4-6")
```

Provider IDs without a `provider:` prefix route through AI Gateway by default.
Direct OpenAI-compatible providers, including `openai:` and compatible
models.dev provider IDs, require `ai[openai]`. Direct Anthropic-compatible
providers require `ai[anthropic]`.

Structured output:

```python
Expand Down
4 changes: 4 additions & 0 deletions examples/check-examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,10 @@ def run_mypy(name: str, directory: Path, extra_deps: list[str]) -> bool:
"uv",
"run",
"--frozen",
"--project",
str(REPO),
"--group",
"dev",
"--with-editable",
str(REPO),
*with_args,
Expand Down
2 changes: 2 additions & 0 deletions examples/run-examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,8 @@ def _sample_cmd(sample: Sample) -> list[str]:
"uv",
"run",
"--frozen",
"--group",
"dev",
"--with-editable",
str(REPO),
"python",
Expand Down
17 changes: 8 additions & 9 deletions examples/samples/builtin_web_search.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,14 @@
"""Anthropic built-in web search.

https://docs.anthropic.com/en/docs/build-with-claude/tool-use/web-search-tool
"""

import asyncio
import json
import sys

import ai
from ai.providers.anthropic import tools as anthropic_tools

provider = ai.get_provider("anthropic")

if not provider.is_configured():
print(f"[SKIP] {provider.name} provider is not configured")
sys.exit(0)

model = ai.get_model("anthropic:claude-sonnet-4-6")

messages = [
ai.system_message("Be concise. Cite sources you use. The year is 2026"),
ai.user_message(
Expand Down Expand Up @@ -51,6 +43,13 @@ def format(value: object) -> str:


async def main() -> None:
provider = ai.get_provider("anthropic")
if not provider.is_configured():
print(f"[SKIP] {provider.name} provider is not configured")
return

model = ai.Model("claude-sonnet-4-6", provider=provider)

async with ai.stream(model, messages, tools=tools) as s:
async for event in s:
match event:
Expand Down
26 changes: 13 additions & 13 deletions examples/samples/explicit_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,23 +5,23 @@

import ai

# Example for local OpenAI-compatible servers like LM Studio.
provider = ai.get_provider(
"openai",
base_url=os.environ.get("LOCAL_OPENAI_BASE_URL", "http://localhost:1234/v1"),
api_key=os.environ.get("LOCAL_OPENAI_API_KEY", "some-key"),
headers={"X-Custom-Header": "example"},
)

model = ai.Model(
os.environ.get("LOCAL_OPENAI_MODEL", "local-model"),
provider=provider,
)

messages = [ai.user_message("Hello!")]


async def main() -> None:
# Example for local OpenAI-compatible servers like LM Studio.
provider = ai.get_provider(
"openai",
base_url=os.environ.get("LOCAL_OPENAI_BASE_URL", "http://localhost:1234/v1"),
api_key=os.environ.get("LOCAL_OPENAI_API_KEY", "some-key"),
headers={"X-Custom-Header": "example"},
)

model = ai.Model(
os.environ.get("LOCAL_OPENAI_MODEL", "local-model"),
provider=provider,
)

try:
try:
await ai.probe(model)
Expand Down
9 changes: 6 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,17 @@ classifiers = [
]
requires-python = ">=3.12"
dependencies = [
"anthropic>=0.83.0",
"httpx>=0.28.1",
"mcp>=1.18.0",
"modelsdotdev @ git+https://github.com/vercel-labs/modelsdotdev-python",
"openai>=2.14.0",
"pydantic>=2.12.5",
"typing-extensions>=4.15.0",
"vercel>=0.3.8",
]

[project.optional-dependencies]
anthropic = ["anthropic>=0.83.0"]
openai = ["openai>=2.14.0"]

[build-system]
requires = ["hatchling", "uv-dynamic-versioning>=0.7.0"]
build-backend = "hatchling.build"
Expand All @@ -55,11 +56,13 @@ bump = true

[dependency-groups]
dev = [
"anthropic>=0.83.0",
"python-dotenv>=1.2.1",
"pytest>=8.0",
"pytest-asyncio>=0.24",
"rich>=14.2.0",
"mypy>=1.11",
"openai>=2.14.0",
"ruff>=0.8",
"pyright>=1.1.408",
"async-solipsism>=0.9",
Expand Down
4 changes: 4 additions & 0 deletions skills/ai/SKILL.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@ Use this skill when working with the Python `ai` SDK.
uv add ai
```

Direct OpenAI-compatible and Anthropic-compatible providers require optional
extras: `uv add "ai[openai]"` or `uv add "ai[anthropic]"`. AI Gateway works
with the base package.

```python
import ai
```
Expand Down
2 changes: 2 additions & 0 deletions src/ai/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
AIError,
ConfigurationError,
HTTPErrorContext,
InstallationError,
ProviderAPIError,
ProviderAuthenticationError,
ProviderBadRequestError,
Expand Down Expand Up @@ -84,6 +85,7 @@
"AIError",
"ConfigurationError",
"HTTPErrorContext",
"InstallationError",
"ProviderAPIError",
"ProviderAuthenticationError",
"ProviderBadRequestError",
Expand Down
5 changes: 5 additions & 0 deletions src/ai/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,10 @@ class ConfigurationError(AIError):
"""Required SDK configuration is missing or invalid."""


class InstallationError(ConfigurationError):
"""Required optional dependency is not installed."""


class ProviderError(AIError):
"""Base class for errors raised by model providers."""

Expand Down Expand Up @@ -307,6 +311,7 @@ def _is_retryable_status(status_code: int | None) -> bool:
"AIError",
"ConfigurationError",
"HTTPErrorContext",
"InstallationError",
"ProviderAPIError",
"ProviderAuthenticationError",
"ProviderBadRequestError",
Expand Down
23 changes: 23 additions & 0 deletions src/ai/providers/_optional.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
"""Optional provider SDK imports."""

from __future__ import annotations

import importlib
from types import ModuleType

from .. import errors as ai_errors


def import_optional_sdk(module_name: str, *, provider: str, extra: str) -> ModuleType:
"""Import an optional upstream SDK or raise a helpful installation error."""
root_module = module_name.partition(".")[0]
try:
return importlib.import_module(module_name)
except ModuleNotFoundError as exc:
if exc.name not in {module_name, root_module}:
raise
raise ai_errors.InstallationError(
f"could not import `{root_module}`, which is required to use the "
f"{provider} provider, you can install it with `pip install "
f'"ai[{extra}]"` or `uv add "ai[{extra}]"`'
) from exc
3 changes: 2 additions & 1 deletion src/ai/providers/anthropic/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
) as s:
...

The protocol module is loaded lazily by provider methods.
The optional upstream Anthropic SDK is loaded lazily when the provider creates
or uses an SDK client.
"""

from . import tools
Expand Down
31 changes: 31 additions & 0 deletions src/ai/providers/anthropic/_sdk.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
"""Lazy Anthropic SDK imports."""

from __future__ import annotations

from typing import TYPE_CHECKING, Protocol, cast

from .. import _optional

if TYPE_CHECKING:
import anthropic


class AnthropicSDK(Protocol):
AsyncAnthropic: type[anthropic.AsyncAnthropic]
AnthropicError: type[anthropic.AnthropicError]
APIConnectionError: type[anthropic.APIConnectionError]
APIError: type[anthropic.APIError]
APIResponseValidationError: type[anthropic.APIResponseValidationError]
APIStatusError: type[anthropic.APIStatusError]
APITimeoutError: type[anthropic.APITimeoutError]


def import_sdk(*, provider: str = "anthropic") -> AnthropicSDK:
return cast(
AnthropicSDK,
_optional.import_optional_sdk(
"anthropic",
provider=provider,
extra="anthropic",
),
)
24 changes: 16 additions & 8 deletions src/ai/providers/anthropic/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,15 @@

from __future__ import annotations

from typing import Any
from typing import TYPE_CHECKING, Any

import anthropic
import httpx

from ... import errors as ai_errors
from . import _sdk

if TYPE_CHECKING:
import anthropic


def map_error(
Expand All @@ -17,32 +20,37 @@ def map_error(
model_id: str | None = None,
) -> ai_errors.ProviderAPIError:
"""Map an Anthropic SDK exception to the public provider hierarchy."""
if isinstance(exc, anthropic.APITimeoutError):
anthropic_sdk = _sdk.import_sdk(provider=provider or "anthropic")
if isinstance(exc, anthropic_sdk.APITimeoutError):
return _provider_error(
ai_errors.ProviderTimeoutError,
exc,
provider=provider,
model_id=model_id,
is_retryable=True,
)
if isinstance(exc, anthropic.APIConnectionError):
if isinstance(exc, anthropic_sdk.APIConnectionError):
return _provider_error(
ai_errors.ProviderConnectionError,
exc,
provider=provider,
model_id=model_id,
is_retryable=True,
)
if isinstance(exc, anthropic.APIResponseValidationError):
if isinstance(exc, anthropic_sdk.APIResponseValidationError):
return _provider_error(
ai_errors.ProviderResponseError,
exc,
provider=provider,
model_id=model_id,
)
if isinstance(exc, anthropic.APIStatusError):
return _map_status_error(exc, provider=provider, model_id=model_id)
if isinstance(exc, anthropic.APIError):
if isinstance(exc, anthropic_sdk.APIStatusError):
return _map_status_error(
exc,
provider=provider,
model_id=model_id,
)
if isinstance(exc, anthropic_sdk.APIError):
return _provider_error(
ai_errors.ProviderAPIError,
exc,
Expand Down
18 changes: 11 additions & 7 deletions src/ai/providers/anthropic/protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,24 @@
Anthropic-compatible providers own the SDK client used by this protocol.
"""

from __future__ import annotations

import base64
import json
from collections.abc import AsyncGenerator, Mapping, Sequence
from typing import Any
from typing import TYPE_CHECKING, Any

import anthropic
import pydantic

from ... import types
from ...models import core
from ...types import events
from . import errors
from . import _sdk, errors
from . import tools as anthropic_tools

if TYPE_CHECKING:
import anthropic

PROVIDER_NAME = "anthropic"

# Anthropic block types that carry server-tool results. We track these
Expand Down Expand Up @@ -159,9 +164,7 @@ def _file_part_to_anthropic(
"source": {"type": "url", "url": part.data},
}
else:
import base64 as _b64

text_data = _b64.b64decode(part.data).decode("utf-8")
text_data = base64.b64decode(part.data).decode("utf-8")
return {
"type": "document",
"source": {
Expand Down Expand Up @@ -390,6 +393,7 @@ async def stream(
``params`` may be a raw dict of Anthropic SDK kwargs. Provider-specific
request options are forwarded without local validation or translation.
"""
anthropic_sdk = _sdk.import_sdk(provider=provider)
stream_params = _coerce_params(params)
system_prompt, anthropic_messages = await _messages_to_anthropic(messages)

Expand Down Expand Up @@ -573,7 +577,7 @@ async def stream(
raw=sdk_usage.model_dump(exclude_none=True) or None,
)
yield events.StreamEnd(usage=usage)
except anthropic.AnthropicError as exc:
except anthropic_sdk.AnthropicError as exc:
raise errors.map_error(
exc,
provider=provider,
Expand Down
Loading
Loading