diff --git a/docs/models/openai.md b/docs/models/openai.md index 6c246481bf..2eae27bd03 100644 --- a/docs/models/openai.md +++ b/docs/models/openai.md @@ -233,7 +233,7 @@ agent = Agent(model) ``` Various providers also have their own provider classes so that you don't need to specify the base URL yourself and you can use the standard `_API_KEY` environment variable to set the API key. -When a provider has its own provider class, you can use the `Agent(":")` shorthand, e.g. `Agent("deepseek:deepseek-chat")` or `Agent("openrouter:google/gemini-2.5-pro-preview")`, instead of building the `OpenAIChatModel` explicitly. Similarly, you can pass the provider name as a string to the `provider` argument on `OpenAIChatModel` instead of building instantiating the provider class explicitly. +When a provider has its own provider class, you can use the `Agent(":")` shorthand, e.g. `Agent("deepseek:deepseek-chat")` or `Agent("moonshotai:kimi-k2-0711-preview")`, instead of building the `OpenAIChatModel` explicitly. Similarly, you can pass the provider name as a string to the `provider` argument on `OpenAIChatModel` instead of building instantiating the provider class explicitly. #### Model Profile @@ -385,34 +385,6 @@ agent = Agent(model) ... ``` -### OpenRouter - -To use [OpenRouter](https://openrouter.ai), first create an API key at [openrouter.ai/keys](https://openrouter.ai/keys). - -You can set the `OPENROUTER_API_KEY` environment variable and use [`OpenRouterProvider`][pydantic_ai.providers.openrouter.OpenRouterProvider] by name: - -```python -from pydantic_ai import Agent - -agent = Agent('openrouter:anthropic/claude-3.5-sonnet') -... -``` - -Or initialise the model and provider directly: - -```python -from pydantic_ai import Agent -from pydantic_ai.models.openai import OpenAIChatModel -from pydantic_ai.providers.openrouter import OpenRouterProvider - -model = OpenAIChatModel( - 'anthropic/claude-3.5-sonnet', - provider=OpenRouterProvider(api_key='your-openrouter-api-key'), -) -agent = Agent(model) -... -``` - ### Vercel AI Gateway To use [Vercel's AI Gateway](https://vercel.com/docs/ai-gateway), first follow the [documentation](https://vercel.com/docs/ai-gateway) instructions on obtaining an API key or OIDC token. diff --git a/docs/models/openrouter.md b/docs/models/openrouter.md new file mode 100644 index 0000000000..dbcf9a818d --- /dev/null +++ b/docs/models/openrouter.md @@ -0,0 +1,54 @@ +# OpenRouter + +## Install + +To use `OpenRouterModel`, you need to either install `pydantic-ai`, or install `pydantic-ai-slim` with the `openrouter` optional group: + +```bash +pip/uv-add "pydantic-ai-slim[openrouter]" +``` + +## Configuration + +To use [OpenRouter](https://openrouter.ai), first create an API key at [openrouter.ai/keys](https://openrouter.ai/keys). + +You can set the `OPENROUTER_API_KEY` environment variable and use [`OpenRouterProvider`][pydantic_ai.providers.openrouter.OpenRouterProvider] by name: + +```python +from pydantic_ai import Agent + +agent = Agent('openrouter:anthropic/claude-3.5-sonnet') +... +``` + +Or initialise the model and provider directly: + +```python +from pydantic_ai import Agent +from pydantic_ai.models.openrouter import OpenRouterModel +from pydantic_ai.providers.openrouter import OpenRouterProvider + +model = OpenRouterModel( + 'anthropic/claude-3.5-sonnet', + provider=OpenRouterProvider(api_key='your-openrouter-api-key'), +) +agent = Agent(model) +... +``` + +## App Attribution + +OpenRouter has an [app attribution](https://openrouter.ai/docs/app-attribution) feature to track your application in their public ranking and analytics. + +You can pass in an `app_url` and `app_title` when initializing the provider to enable app attribution. + +```python +from pydantic_ai.providers.openrouter import OpenRouterProvider + +provider=OpenRouterProvider( + api_key='your-openrouter-api-key', + app_url='https://your-app.com', + app_title='Your App', +), +... +``` diff --git a/mkdocs.yml b/mkdocs.yml index 5f827ae71b..6cdc4a386d 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -31,6 +31,7 @@ nav: - models/google.md - models/bedrock.md - models/cohere.md + - models/openrouter.md - models/groq.md - models/mistral.md - models/huggingface.md diff --git a/pydantic_ai_slim/pydantic_ai/messages.py b/pydantic_ai_slim/pydantic_ai/messages.py index 9019b81931..82050dffb4 100644 --- a/pydantic_ai_slim/pydantic_ai/messages.py +++ b/pydantic_ai_slim/pydantic_ai/messages.py @@ -1063,6 +1063,12 @@ class ThinkingPart: part_kind: Literal['thinking'] = 'thinking' """Part type identifier, this is available on all parts as a discriminator.""" + provider_details: dict[str, Any] | None = None + """Additional provider-specific details in a serializable format. + + This allows storing selected vendor-specific data that isn't mapped to standard ThinkingPart fields. + """ + def has_content(self) -> bool: """Return `True` if the thinking content is non-empty.""" return bool(self.content) diff --git a/pydantic_ai_slim/pydantic_ai/models/__init__.py b/pydantic_ai_slim/pydantic_ai/models/__init__.py index b43681b0a4..a9b3789855 100644 --- a/pydantic_ai_slim/pydantic_ai/models/__init__.py +++ b/pydantic_ai_slim/pydantic_ai/models/__init__.py @@ -807,7 +807,6 @@ def infer_model( # noqa: C901 'heroku', 'moonshotai', 'ollama', - 'openrouter', 'together', 'vercel', 'litellm', @@ -838,6 +837,10 @@ def infer_model( # noqa: C901 from .cohere import CohereModel return CohereModel(model_name, provider=provider) + elif model_kind == 'openrouter': + from .openrouter import OpenRouterModel + + return OpenRouterModel(model_name, provider=provider) elif model_kind == 'mistral': from .mistral import MistralModel diff --git a/pydantic_ai_slim/pydantic_ai/models/openai.py b/pydantic_ai_slim/pydantic_ai/models/openai.py index 5da7e0ccd4..33a281974d 100644 --- a/pydantic_ai_slim/pydantic_ai/models/openai.py +++ b/pydantic_ai_slim/pydantic_ai/models/openai.py @@ -1,9 +1,10 @@ from __future__ import annotations as _annotations import base64 +import itertools import json import warnings -from collections.abc import AsyncIterable, AsyncIterator, Sequence +from collections.abc import AsyncIterable, AsyncIterator, Iterable, Sequence from contextlib import asynccontextmanager from dataclasses import dataclass, field, replace from datetime import datetime @@ -62,6 +63,8 @@ ChatCompletionContentPartInputAudioParam, ChatCompletionContentPartParam, ChatCompletionContentPartTextParam, + chat_completion, + chat_completion_chunk, ) from openai.types.chat.chat_completion_content_part_image_param import ImageURL from openai.types.chat.chat_completion_content_part_input_audio_param import InputAudio @@ -547,6 +550,20 @@ async def _completions_create( raise ModelHTTPError(status_code=status_code, model_name=self.model_name, body=e.body) from e raise # pragma: lax no cover + def _validate_completion(self, response: chat.ChatCompletion) -> chat.ChatCompletion: + """Hook that validates chat completions before processing. + + This method may be overridden by subclasses of `OpenAIChatModel` to apply custom completion validations. + """ + return chat.ChatCompletion.model_validate(response.model_dump()) + + def _process_provider_details(self, response: chat.ChatCompletion) -> dict[str, Any]: + """Hook that response content to provider details. + + This method may be overridden by subclasses of `OpenAIChatModel` to apply custom mappings. + """ + return _map_provider_details(response.choices[0]) + def _process_response(self, response: chat.ChatCompletion | str) -> ModelResponse: """Process a non-streamed response, and prepare a message to return.""" # Although the OpenAI SDK claims to return a Pydantic model (`ChatCompletion`) from the chat completions function: @@ -554,7 +571,9 @@ def _process_response(self, response: chat.ChatCompletion | str) -> ModelRespons # * if the endpoint returns plain text, the return type is a string # Thus we validate it fully here. if not isinstance(response, chat.ChatCompletion): - raise UnexpectedModelBehavior('Invalid response from OpenAI chat completions endpoint, expected JSON data') + raise UnexpectedModelBehavior( + f'Invalid response from {self.system} chat completions endpoint, expected JSON data' + ) if response.created: timestamp = number_to_datetime(response.created) @@ -567,27 +586,15 @@ def _process_response(self, response: chat.ChatCompletion | str) -> ModelRespons choice.finish_reason = 'stop' try: - response = chat.ChatCompletion.model_validate(response.model_dump()) + response = self._validate_completion(response) except ValidationError as e: - raise UnexpectedModelBehavior(f'Invalid response from OpenAI chat completions endpoint: {e}') from e + raise UnexpectedModelBehavior(f'Invalid response from {self.system} chat completions endpoint: {e}') from e choice = response.choices[0] items: list[ModelResponsePart] = [] - # The `reasoning_content` field is only present in DeepSeek models. - # https://api-docs.deepseek.com/guides/reasoning_model - if reasoning_content := getattr(choice.message, 'reasoning_content', None): - items.append(ThinkingPart(id='reasoning_content', content=reasoning_content, provider_name=self.system)) - - # The `reasoning` field is only present in gpt-oss via Ollama and OpenRouter. - # - https://cookbook.openai.com/articles/gpt-oss/handle-raw-cot#chat-completions-api - # - https://openrouter.ai/docs/use-cases/reasoning-tokens#basic-usage-with-reasoning-tokens - if reasoning := getattr(choice.message, 'reasoning', None): - items.append(ThinkingPart(id='reasoning', content=reasoning, provider_name=self.system)) - - # NOTE: We don't currently handle OpenRouter `reasoning_details`: - # - https://openrouter.ai/docs/use-cases/reasoning-tokens#preserving-reasoning-blocks - # If you need this, please file an issue. + if thinking_parts := self._process_thinking(choice.message): + items.extend(thinking_parts) if choice.message.content: items.extend( @@ -607,38 +614,37 @@ def _process_response(self, response: chat.ChatCompletion | str) -> ModelRespons part.tool_call_id = _guard_tool_call_id(part) items.append(part) - vendor_details: dict[str, Any] = {} - - # Add logprobs to vendor_details if available - if choice.logprobs is not None and choice.logprobs.content: - # Convert logprobs to a serializable format - vendor_details['logprobs'] = [ - { - 'token': lp.token, - 'bytes': lp.bytes, - 'logprob': lp.logprob, - 'top_logprobs': [ - {'token': tlp.token, 'bytes': tlp.bytes, 'logprob': tlp.logprob} for tlp in lp.top_logprobs - ], - } - for lp in choice.logprobs.content - ] - - raw_finish_reason = choice.finish_reason - vendor_details['finish_reason'] = raw_finish_reason - finish_reason = _CHAT_FINISH_REASON_MAP.get(raw_finish_reason) - return ModelResponse( parts=items, - usage=_map_usage(response, self._provider.name, self._provider.base_url, self._model_name), + usage=self._map_usage(response), model_name=response.model, timestamp=timestamp, - provider_details=vendor_details or None, + provider_details=self._process_provider_details(response), provider_response_id=response.id, provider_name=self._provider.name, - finish_reason=finish_reason, + finish_reason=self._map_finish_reason(choice.finish_reason), ) + def _process_thinking(self, message: chat.ChatCompletionMessage) -> list[ThinkingPart] | None: + """Hook that maps reasoning tokens to thinking parts. + + This method may be overridden by subclasses of `OpenAIChatModel` to apply custom mappings. + """ + items: list[ThinkingPart] = [] + + # The `reasoning_content` field is only present in DeepSeek models. + # https://api-docs.deepseek.com/guides/reasoning_model + if reasoning_content := getattr(message, 'reasoning_content', None): + items.append(ThinkingPart(id='reasoning_content', content=reasoning_content, provider_name=self.system)) + + # The `reasoning` field is only present in gpt-oss via Ollama and OpenRouter. + # - https://cookbook.openai.com/articles/gpt-oss/handle-raw-cot#chat-completions-api + # - https://openrouter.ai/docs/use-cases/reasoning-tokens#basic-usage-with-reasoning-tokens + if reasoning := getattr(message, 'reasoning', None): + items.append(ThinkingPart(id='reasoning', content=reasoning, provider_name=self.system)) + + return items + async def _process_streamed_response( self, response: AsyncStream[ChatCompletionChunk], model_request_parameters: ModelRequestParameters ) -> OpenAIStreamedResponse: @@ -654,7 +660,7 @@ async def _process_streamed_response( # so we set it from a later chunk in `OpenAIChatStreamedResponse`. model_name = first_chunk.model or self._model_name - return OpenAIStreamedResponse( + return self._streamed_response_cls( model_request_parameters=model_request_parameters, _model_name=model_name, _model_profile=self.profile, @@ -664,6 +670,17 @@ async def _process_streamed_response( _provider_url=self._provider.base_url, ) + @property + def _streamed_response_cls(self) -> type[OpenAIStreamedResponse]: + """Returns the `StreamedResponse` type that will be used for streamed responses. + + This method may be overridden by subclasses of `OpenAIChatModel` to provide their own `StreamedResponse` type. + """ + return OpenAIStreamedResponse + + def _map_usage(self, response: chat.ChatCompletion) -> usage.RequestUsage: + return _map_usage(response, self._provider.name, self._provider.base_url, self._model_name) + def _get_tools(self, model_request_parameters: ModelRequestParameters) -> list[chat.ChatCompletionToolParam]: return [self._map_tool_definition(r) for r in model_request_parameters.tool_defs.values()] @@ -690,6 +707,118 @@ def _get_web_search_options(self, model_request_parameters: ModelRequestParamete f'`{tool.__class__.__name__}` is not supported by `OpenAIChatModel`. If it should be, please file an issue.' ) + @dataclass + class _MapModelResponseContext: + """Context object for mapping a `ModelResponse` to OpenAI chat completion parameters. + + This class is designed to be subclassed to add new fields for custom logic, + collecting various parts of the model response (like text and tool calls) + to form a single assistant message. + """ + + _model: OpenAIChatModel + + texts: list[str] = field(default_factory=list) + tool_calls: list[ChatCompletionMessageFunctionToolCallParam] = field(default_factory=list) + + def map_assistant_message(self, message: ModelResponse) -> chat.ChatCompletionAssistantMessageParam: + for item in message.parts: + if isinstance(item, TextPart): + self._map_response_text_part(item) + elif isinstance(item, ThinkingPart): + self._map_response_thinking_part(item) + elif isinstance(item, ToolCallPart): + self._map_response_tool_call_part(item) + elif isinstance(item, BuiltinToolCallPart | BuiltinToolReturnPart): # pragma: no cover + self._map_response_builtin_part(item) + elif isinstance(item, FilePart): # pragma: no cover + self._map_response_file_part(item) + else: + assert_never(item) + return self._into_message_param() + + def _into_message_param(self) -> chat.ChatCompletionAssistantMessageParam: + """Converts the collected texts and tool calls into a single OpenAI `ChatCompletionAssistantMessageParam`. + + This method serves as a hook that can be overridden by subclasses + to implement custom logic for how collected parts are transformed into the final message parameter. + + Returns: + An OpenAI `ChatCompletionAssistantMessageParam` object representing the assistant's response. + """ + message_param = chat.ChatCompletionAssistantMessageParam(role='assistant') + if self.texts: + # Note: model responses from this model should only have one text item, so the following + # shouldn't merge multiple texts into one unless you switch models between runs: + message_param['content'] = '\n\n'.join(self.texts) + else: + message_param['content'] = None + if self.tool_calls: + message_param['tool_calls'] = self.tool_calls + return message_param + + def _map_response_text_part(self, item: TextPart) -> None: + """Maps a `TextPart` to the response context. + + This method serves as a hook that can be overridden by subclasses + to implement custom logic for handling text parts. + """ + self.texts.append(item.content) + + def _map_response_thinking_part(self, item: ThinkingPart) -> None: + """Maps a `ThinkingPart` to the response context. + + This method serves as a hook that can be overridden by subclasses + to implement custom logic for handling thinking parts. + """ + # NOTE: DeepSeek `reasoning_content` field should NOT be sent back per https://api-docs.deepseek.com/guides/reasoning_model, + # but we currently just send it in `` tags anyway as we don't want DeepSeek-specific checks here. + # If you need this changed, please file an issue. + start_tag, end_tag = self._model.profile.thinking_tags + self.texts.append('\n'.join([start_tag, item.content, end_tag])) + + def _map_response_tool_call_part(self, item: ToolCallPart) -> None: + """Maps a `ToolCallPart` to the response context. + + This method serves as a hook that can be overridden by subclasses + to implement custom logic for handling tool call parts. + """ + self.tool_calls.append(self._model._map_tool_call(item)) + + def _map_response_builtin_part(self, item: BuiltinToolCallPart | BuiltinToolReturnPart) -> None: + """Maps a built-in tool call or return part to the response context. + + This method serves as a hook that can be overridden by subclasses + to implement custom logic for handling built-in tool parts. + """ + # OpenAI doesn't return built-in tool calls + pass + + def _map_response_file_part(self, item: FilePart) -> None: + """Maps a `FilePart` to the response context. + + This method serves as a hook that can be overridden by subclasses + to implement custom logic for handling file parts. + """ + # Files generated by models are not sent back to models that don't themselves generate files. + pass + + def _map_model_response(self, message: ModelResponse) -> chat.ChatCompletionMessageParam: + """Hook that determines how `ModelResponse` is mapped into `ChatCompletionMessageParam` objects before sending. + + Subclasses of `OpenAIChatModel` may override this method to provide their own mapping logic. + """ + return self._MapModelResponseContext(self).map_assistant_message(message) + + def _map_finish_reason( + self, key: Literal['stop', 'length', 'tool_calls', 'content_filter', 'function_call'] + ) -> FinishReason | None: + """Hooks that maps a finish reason key to a [FinishReason][pydantic_ai.messages.FinishReason]. + + This method may be overridden by subclasses of `OpenAIChatModel` to accommodate custom keys. + """ + return _CHAT_FINISH_REASON_MAP.get(key) + async def _map_messages( self, messages: list[ModelMessage], model_request_parameters: ModelRequestParameters ) -> list[chat.ChatCompletionMessageParam]: @@ -700,37 +829,7 @@ async def _map_messages( async for item in self._map_user_message(message): openai_messages.append(item) elif isinstance(message, ModelResponse): - texts: list[str] = [] - tool_calls: list[ChatCompletionMessageFunctionToolCallParam] = [] - for item in message.parts: - if isinstance(item, TextPart): - texts.append(item.content) - elif isinstance(item, ThinkingPart): - # NOTE: DeepSeek `reasoning_content` field should NOT be sent back per https://api-docs.deepseek.com/guides/reasoning_model, - # but we currently just send it in `` tags anyway as we don't want DeepSeek-specific checks here. - # If you need this changed, please file an issue. - start_tag, end_tag = self.profile.thinking_tags - texts.append('\n'.join([start_tag, item.content, end_tag])) - elif isinstance(item, ToolCallPart): - tool_calls.append(self._map_tool_call(item)) - # OpenAI doesn't return built-in tool calls - elif isinstance(item, BuiltinToolCallPart | BuiltinToolReturnPart): # pragma: no cover - pass - elif isinstance(item, FilePart): # pragma: no cover - # Files generated by models are not sent back to models that don't themselves generate files. - pass - else: - assert_never(item) - message_param = chat.ChatCompletionAssistantMessageParam(role='assistant') - if texts: - # Note: model responses from this model should only have one text item, so the following - # shouldn't merge multiple texts into one unless you switch models between runs: - message_param['content'] = '\n\n'.join(texts) - else: - message_param['content'] = None - if tool_calls: - message_param['tool_calls'] = tool_calls - openai_messages.append(message_param) + openai_messages.append(self._map_model_response(message)) else: assert_never(message) if instructions := self._get_instructions(messages, model_request_parameters): @@ -1714,8 +1813,8 @@ class OpenAIStreamedResponse(StreamedResponse): _provider_url: str async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: - async for chunk in self._response: - self._usage += _map_usage(chunk, self._provider_name, self._provider_url, self._model_name) + async for chunk in self._validate_response(): + self._usage += self._map_usage(chunk) if chunk.id: # pragma: no branch self.provider_response_id = chunk.id @@ -1733,54 +1832,111 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: continue if raw_finish_reason := choice.finish_reason: - self.provider_details = {'finish_reason': raw_finish_reason} - self.finish_reason = _CHAT_FINISH_REASON_MAP.get(raw_finish_reason) + self.finish_reason = self._map_finish_reason(raw_finish_reason) - # The `reasoning_content` field is only present in DeepSeek models. - # https://api-docs.deepseek.com/guides/reasoning_model - if reasoning_content := getattr(choice.delta, 'reasoning_content', None): - yield self._parts_manager.handle_thinking_delta( - vendor_part_id='reasoning_content', - id='reasoning_content', - content=reasoning_content, - provider_name=self.provider_name, - ) + if provider_details := self._map_provider_details(chunk): + self.provider_details = provider_details - # The `reasoning` field is only present in gpt-oss via Ollama and OpenRouter. - # - https://cookbook.openai.com/articles/gpt-oss/handle-raw-cot#chat-completions-api - # - https://openrouter.ai/docs/use-cases/reasoning-tokens#basic-usage-with-reasoning-tokens - if reasoning := getattr(choice.delta, 'reasoning', None): # pragma: no cover - yield self._parts_manager.handle_thinking_delta( - vendor_part_id='reasoning', - id='reasoning', - content=reasoning, - provider_name=self.provider_name, - ) + for event in self._map_part_delta(choice): + yield event - # Handle the text part of the response - content = choice.delta.content - if content: - maybe_event = self._parts_manager.handle_text_delta( - vendor_part_id='content', - content=content, - thinking_tags=self._model_profile.thinking_tags, - ignore_leading_whitespace=self._model_profile.ignore_streamed_leading_whitespace, - ) - if maybe_event is not None: # pragma: no branch - if isinstance(maybe_event, PartStartEvent) and isinstance(maybe_event.part, ThinkingPart): - maybe_event.part.id = 'content' - maybe_event.part.provider_name = self.provider_name - yield maybe_event + def _validate_response(self) -> AsyncIterable[ChatCompletionChunk]: + """Hook that validates incoming chunks. - for dtc in choice.delta.tool_calls or []: - maybe_event = self._parts_manager.handle_tool_call_delta( - vendor_part_id=dtc.index, - tool_name=dtc.function and dtc.function.name, - args=dtc.function and dtc.function.arguments, - tool_call_id=dtc.id, - ) - if maybe_event is not None: - yield maybe_event + This method may be overridden by subclasses of `OpenAIStreamedResponse` to apply custom chunk validations. + + By default, this is a no-op since `ChatCompletionChunk` is already validated. + """ + return self._response + + def _map_part_delta(self, choice: chat_completion_chunk.Choice) -> Iterable[ModelResponseStreamEvent]: + """Hook that determines the sequence of mappings that will be called to produce events. + + This method may be overridden by subclasses of `OpenAIStreamResponse` to customize the mapping. + """ + return itertools.chain( + self._map_thinking_delta(choice), self._map_text_delta(choice), self._map_tool_call_delta(choice) + ) + + def _map_thinking_delta(self, choice: chat_completion_chunk.Choice) -> Iterable[ModelResponseStreamEvent]: + """Hook that maps thinking delta content to events. + + This method may be overridden by subclasses of `OpenAIStreamResponse` to customize the mapping. + """ + # The `reasoning_content` field is only present in DeepSeek models. + # https://api-docs.deepseek.com/guides/reasoning_model + if reasoning_content := getattr(choice.delta, 'reasoning_content', None): + yield self._parts_manager.handle_thinking_delta( + vendor_part_id='reasoning_content', + id='reasoning_content', + content=reasoning_content, + provider_name=self.provider_name, + ) + + # The `reasoning` field is only present in gpt-oss via Ollama and OpenRouter. + # - https://cookbook.openai.com/articles/gpt-oss/handle-raw-cot#chat-completions-api + # - https://openrouter.ai/docs/use-cases/reasoning-tokens#basic-usage-with-reasoning-tokens + if reasoning := getattr(choice.delta, 'reasoning', None): # pragma: no cover + yield self._parts_manager.handle_thinking_delta( + vendor_part_id='reasoning', + id='reasoning', + content=reasoning, + provider_name=self.provider_name, + ) + + def _map_text_delta(self, choice: chat_completion_chunk.Choice) -> Iterable[ModelResponseStreamEvent]: + """Hook that maps text delta content to events. + + This method may be overridden by subclasses of `OpenAIStreamResponse` to customize the mapping. + """ + # Handle the text part of the response + content = choice.delta.content + if content: + maybe_event = self._parts_manager.handle_text_delta( + vendor_part_id='content', + content=content, + thinking_tags=self._model_profile.thinking_tags, + ignore_leading_whitespace=self._model_profile.ignore_streamed_leading_whitespace, + ) + if maybe_event is not None: # pragma: no branch + if isinstance(maybe_event, PartStartEvent) and isinstance(maybe_event.part, ThinkingPart): + maybe_event.part.id = 'content' + maybe_event.part.provider_name = self.provider_name + yield maybe_event + + def _map_tool_call_delta(self, choice: chat_completion_chunk.Choice) -> Iterable[ModelResponseStreamEvent]: + """Hook that maps tool call delta content to events. + + This method may be overridden by subclasses of `OpenAIStreamResponse` to customize the mapping. + """ + for dtc in choice.delta.tool_calls or []: + maybe_event = self._parts_manager.handle_tool_call_delta( + vendor_part_id=dtc.index, + tool_name=dtc.function and dtc.function.name, + args=dtc.function and dtc.function.arguments, + tool_call_id=dtc.id, + ) + if maybe_event is not None: + yield maybe_event + + def _map_provider_details(self, chunk: ChatCompletionChunk) -> dict[str, Any] | None: + """Hook that generates the provider details from chunk content. + + This method may be overridden by subclasses of `OpenAIStreamResponse` to customize the provider details. + """ + return _map_provider_details(chunk.choices[0]) + + def _map_usage(self, response: ChatCompletionChunk) -> usage.RequestUsage: + return _map_usage(response, self._provider_name, self._provider_url, self._model_name) + + def _map_finish_reason( + self, key: Literal['stop', 'length', 'tool_calls', 'content_filter', 'function_call'] + ) -> FinishReason | None: + """Hooks that maps a finish reason key to a [FinishReason](pydantic_ai.messages.FinishReason). + + This method may be overridden by subclasses of `OpenAIChatModel` to accommodate custom keys. + """ + return _CHAT_FINISH_REASON_MAP.get(key) @property def model_name(self) -> OpenAIModelName: @@ -2092,7 +2248,7 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: UserWarning, ) - def _map_usage(self, response: responses.Response): + def _map_usage(self, response: responses.Response) -> usage.RequestUsage: return _map_usage(response, self._provider_name, self._provider_url, self._model_name) @property @@ -2152,6 +2308,32 @@ def _map_usage( ) +def _map_provider_details( + choice: chat_completion_chunk.Choice | chat_completion.Choice, +) -> dict[str, Any]: + provider_details: dict[str, Any] = {} + + # Add logprobs to vendor_details if available + if choice.logprobs is not None and choice.logprobs.content: + # Convert logprobs to a serializable format + provider_details['logprobs'] = [ + { + 'token': lp.token, + 'bytes': lp.bytes, + 'logprob': lp.logprob, + 'top_logprobs': [ + {'token': tlp.token, 'bytes': tlp.bytes, 'logprob': tlp.logprob} for tlp in lp.top_logprobs + ], + } + for lp in choice.logprobs.content + ] + + if raw_finish_reason := choice.finish_reason: + provider_details['finish_reason'] = raw_finish_reason + + return provider_details + + def _split_combined_tool_call_id(combined_id: str) -> tuple[str, str | None]: # When reasoning, the Responses API requires the `ResponseFunctionToolCall` to be returned with both the `call_id` and `id` fields. # Before our `ToolCallPart` gained the `id` field alongside `tool_call_id` field, we combined the two fields into a single string stored on `tool_call_id`. diff --git a/pydantic_ai_slim/pydantic_ai/models/openrouter.py b/pydantic_ai_slim/pydantic_ai/models/openrouter.py new file mode 100644 index 0000000000..f4c755e433 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/models/openrouter.py @@ -0,0 +1,634 @@ +from __future__ import annotations as _annotations + +from collections.abc import Iterable +from dataclasses import dataclass, field +from typing import Any, Literal, cast + +from pydantic import BaseModel +from typing_extensions import TypedDict, assert_never, override + +from ..exceptions import ModelHTTPError +from ..messages import ( + FinishReason, + ModelResponseStreamEvent, + ThinkingPart, +) +from ..profiles import ModelProfileSpec +from ..providers import Provider +from ..providers.openrouter import OpenRouterProvider +from ..settings import ModelSettings +from . import ModelRequestParameters + +try: + from openai import APIError, AsyncOpenAI + from openai.types import chat, completion_usage + from openai.types.chat import chat_completion, chat_completion_chunk + + from .openai import OpenAIChatModel, OpenAIChatModelSettings, OpenAIStreamedResponse +except ImportError as _import_error: + raise ImportError( + 'Please install `openai` to use the OpenRouter model, ' + 'you can use the `openai` optional group — `pip install "pydantic-ai-slim[openai]"`' + ) from _import_error + +_CHAT_FINISH_REASON_MAP: dict[Literal['stop', 'length', 'tool_calls', 'content_filter', 'error'], FinishReason] = { + 'stop': 'stop', + 'length': 'length', + 'tool_calls': 'tool_call', + 'content_filter': 'content_filter', + 'error': 'error', +} + + +class _OpenRouterMaxPrice(TypedDict, total=False): + """The object specifying the maximum price you want to pay for this request. USD price per million tokens, for prompt and completion.""" + + prompt: int + completion: int + image: int + audio: int + request: int + + +KnownOpenRouterProviders = Literal[ + 'z-ai', + 'cerebras', + 'venice', + 'moonshotai', + 'morph', + 'stealth', + 'wandb', + 'klusterai', + 'openai', + 'sambanova', + 'amazon-bedrock', + 'mistral', + 'nextbit', + 'atoma', + 'ai21', + 'minimax', + 'baseten', + 'anthropic', + 'featherless', + 'groq', + 'lambda', + 'azure', + 'ncompass', + 'deepseek', + 'hyperbolic', + 'crusoe', + 'cohere', + 'mancer', + 'avian', + 'perplexity', + 'novita', + 'siliconflow', + 'switchpoint', + 'xai', + 'inflection', + 'fireworks', + 'deepinfra', + 'inference-net', + 'inception', + 'atlas-cloud', + 'nvidia', + 'alibaba', + 'friendli', + 'infermatic', + 'targon', + 'ubicloud', + 'aion-labs', + 'liquid', + 'nineteen', + 'cloudflare', + 'nebius', + 'chutes', + 'enfer', + 'crofai', + 'open-inference', + 'phala', + 'gmicloud', + 'meta', + 'relace', + 'parasail', + 'together', + 'google-ai-studio', + 'google-vertex', +] +"""Known providers in the OpenRouter marketplace""" + +OpenRouterProviderName = str | KnownOpenRouterProviders +"""Possible OpenRouter provider names. + +Since OpenRouter is constantly updating their list of providers, we explicitly list some known providers but +allow any name in the type hints. +See [the OpenRouter API](https://openrouter.ai/docs/api-reference/list-available-providers) for a full list. +""" + +_Transforms = Literal['middle-out'] +"""Available messages transforms for OpenRouter models with limited token windows. + +Currently only supports 'middle-out', but is expected to grow in the future. +""" + + +class _OpenRouterProviderConfig(TypedDict, total=False): + """Represents the 'Provider' object from the OpenRouter API.""" + + order: list[OpenRouterProviderName] + """List of provider slugs to try in order (e.g. ["anthropic", "openai"]). [See details](https://openrouter.ai/docs/features/provider-routing#ordering-specific-providers)""" + + allow_fallbacks: bool + """Whether to allow backup providers when the primary is unavailable. [See details](https://openrouter.ai/docs/features/provider-routing#disabling-fallbacks)""" + + require_parameters: bool + """Only use providers that support all parameters in your request.""" + + data_collection: Literal['allow', 'deny'] + """Control whether to use providers that may store data. [See details](https://openrouter.ai/docs/features/provider-routing#requiring-providers-to-comply-with-data-policies)""" + + zdr: bool + """Restrict routing to only ZDR (Zero Data Retention) endpoints. [See details](https://openrouter.ai/docs/features/provider-routing#zero-data-retention-enforcement)""" + + only: list[OpenRouterProviderName] + """List of provider slugs to allow for this request. [See details](https://openrouter.ai/docs/features/provider-routing#allowing-only-specific-providers)""" + + ignore: list[str] + """List of provider slugs to skip for this request. [See details](https://openrouter.ai/docs/features/provider-routing#ignoring-providers)""" + + quantizations: list[Literal['int4', 'int8', 'fp4', 'fp6', 'fp8', 'fp16', 'bf16', 'fp32', 'unknown']] + """List of quantization levels to filter by (e.g. ["int4", "int8"]). [See details](https://openrouter.ai/docs/features/provider-routing#quantization)""" + + sort: Literal['price', 'throughput', 'latency'] + """Sort providers by price or throughput. (e.g. "price" or "throughput"). [See details](https://openrouter.ai/docs/features/provider-routing#provider-sorting)""" + + max_price: _OpenRouterMaxPrice + """The maximum pricing you want to pay for this request. [See details](https://openrouter.ai/docs/features/provider-routing#max-price)""" + + +class _OpenRouterReasoning(TypedDict, total=False): + """Configuration for reasoning tokens in OpenRouter requests. + + Reasoning tokens allow models to show their step-by-step thinking process. + You can configure this using either OpenAI-style effort levels or Anthropic-style + token limits, but not both simultaneously. + """ + + effort: Literal['high', 'medium', 'low'] + """OpenAI-style reasoning effort level. Cannot be used with max_tokens.""" + + max_tokens: int + """Anthropic-style specific token limit for reasoning. Cannot be used with effort.""" + + exclude: bool + """Whether to exclude reasoning tokens from the response. Default is False. All models support this.""" + + enabled: bool + """Whether to enable reasoning with default parameters. Default is inferred from effort or max_tokens.""" + + +class _OpenRouterUsageConfig(TypedDict, total=False): + """Configuration for OpenRouter usage.""" + + include: bool + + +class OpenRouterModelSettings(ModelSettings, total=False): + """Settings used for an OpenRouter model request.""" + + # ALL FIELDS MUST BE `openrouter_` PREFIXED SO YOU CAN MERGE THEM WITH OTHER MODELS. + + openrouter_models: list[str] + """A list of fallback models. + + These models will be tried, in order, if the main model returns an error. [See details](https://openrouter.ai/docs/features/model-routing#the-models-parameter) + """ + + openrouter_provider: _OpenRouterProviderConfig + """OpenRouter routes requests to the best available providers for your model. By default, requests are load balanced across the top providers to maximize uptime. + + You can customize how your requests are routed using the provider object. [See more](https://openrouter.ai/docs/features/provider-routing)""" + + openrouter_preset: str + """Presets allow you to separate your LLM configuration from your code. + + Create and manage presets through the OpenRouter web application to control provider routing, model selection, system prompts, and other parameters, then reference them in OpenRouter API requests. [See more](https://openrouter.ai/docs/features/presets)""" + + openrouter_transforms: list[_Transforms] + """To help with prompts that exceed the maximum context size of a model. + + Transforms work by removing or truncating messages from the middle of the prompt, until the prompt fits within the model's context window. [See more](https://openrouter.ai/docs/features/message-transforms) + """ + + openrouter_reasoning: _OpenRouterReasoning + """To control the reasoning tokens in the request. + + The reasoning config object consolidates settings for controlling reasoning strength across different models. [See more](https://openrouter.ai/docs/use-cases/reasoning-tokens) + """ + + openrouter_usage: _OpenRouterUsageConfig + """To control the usage of the model. + + The usage config object consolidates settings for enabling detailed usage information. [See more](https://openrouter.ai/docs/use-cases/usage-accounting) + """ + + +class _OpenRouterError(BaseModel): + """Utility class to validate error messages from OpenRouter.""" + + code: int + message: str + + +class _BaseReasoningDetail(BaseModel, frozen=True): + """Common fields shared across all reasoning detail types.""" + + id: str | None = None + format: Literal['unknown', 'openai-responses-v1', 'anthropic-claude-v1', 'xai-responses-v1'] | None + index: int | None + type: Literal['reasoning.text', 'reasoning.summary', 'reasoning.encrypted'] + + +class _ReasoningSummary(_BaseReasoningDetail, frozen=True): + """Represents a high-level summary of the reasoning process.""" + + type: Literal['reasoning.summary'] + summary: str + + +class _ReasoningEncrypted(_BaseReasoningDetail, frozen=True): + """Represents encrypted reasoning data.""" + + type: Literal['reasoning.encrypted'] + data: str + + +class _ReasoningText(_BaseReasoningDetail, frozen=True): + """Represents raw text reasoning.""" + + type: Literal['reasoning.text'] + text: str + signature: str | None = None + + +_OpenRouterReasoningDetail = _ReasoningSummary | _ReasoningEncrypted | _ReasoningText + + +def _from_reasoning_detail(reasoning: _OpenRouterReasoningDetail) -> ThinkingPart: + provider_name = 'openrouter' + provider_details = reasoning.model_dump(include={'format', 'index', 'type'}) + if isinstance(reasoning, _ReasoningText): + return ThinkingPart( + id=reasoning.id, + content=reasoning.text, + signature=reasoning.signature, + provider_name=provider_name, + provider_details=provider_details, + ) + elif isinstance(reasoning, _ReasoningSummary): + return ThinkingPart( + id=reasoning.id, content=reasoning.summary, provider_name=provider_name, provider_details=provider_details + ) + elif isinstance(reasoning, _ReasoningEncrypted): + return ThinkingPart( + id=reasoning.id, + content='', + signature=reasoning.data, + provider_name=provider_name, + provider_details=provider_details, + ) + else: + assert_never(reasoning) + + +def _into_reasoning_detail(thinking_part: ThinkingPart) -> _OpenRouterReasoningDetail | None: + if thinking_part.provider_details is None: # pragma: lax no cover + return None + + data = _BaseReasoningDetail.model_validate(thinking_part.provider_details) + + if data.type == 'reasoning.text': + return _ReasoningText( + type=data.type, + id=thinking_part.id, + format=data.format, + index=data.index, + text=thinking_part.content, + signature=thinking_part.signature, + ) + elif data.type == 'reasoning.summary': + return _ReasoningSummary( + type=data.type, + id=thinking_part.id, + format=data.format, + index=data.index, + summary=thinking_part.content, + ) + elif data.type == 'reasoning.encrypted': + assert thinking_part.signature is not None + return _ReasoningEncrypted( + type=data.type, + id=thinking_part.id, + format=data.format, + index=data.index, + data=thinking_part.signature, + ) + else: + assert_never(data.type) + + +class _OpenRouterCompletionMessage(chat.ChatCompletionMessage): + """Wrapped chat completion message with OpenRouter specific attributes.""" + + reasoning: str | None = None + """The reasoning text associated with the message, if any.""" + + reasoning_details: list[_OpenRouterReasoningDetail] | None = None + """The reasoning details associated with the message, if any.""" + + +class _OpenRouterChoice(chat_completion.Choice): + """Wraps OpenAI chat completion choice with OpenRouter specific attributes.""" + + native_finish_reason: str + """The provided finish reason by the downstream provider from OpenRouter.""" + + finish_reason: Literal['stop', 'length', 'tool_calls', 'content_filter', 'error'] # type: ignore[reportIncompatibleVariableOverride] + """OpenRouter specific finish reasons. + + Notably, removes 'function_call' and adds 'error' finish reasons. + """ + + message: _OpenRouterCompletionMessage # type: ignore[reportIncompatibleVariableOverride] + """A wrapped chat completion message with OpenRouter specific attributes.""" + + +@dataclass +class _OpenRouterCostDetails: + """OpenRouter specific cost details.""" + + upstream_inference_cost: int | None = None + + +class _OpenRouterPromptTokenDetails(completion_usage.PromptTokensDetails): + """Wraps OpenAI completion token details with OpenRouter specific attributes.""" + + video_tokens: int | None = None + + +class _OpenRouterCompletionTokenDetails(completion_usage.CompletionTokensDetails): + """Wraps OpenAI completion token details with OpenRouter specific attributes.""" + + image_tokens: int | None = None + + +class _OpenRouterUsage(completion_usage.CompletionUsage): + """Wraps OpenAI completion usage with OpenRouter specific attributes.""" + + cost: float | None = None + + cost_details: _OpenRouterCostDetails | None = None + + is_byok: bool | None = None + + prompt_tokens_details: _OpenRouterPromptTokenDetails | None = None # type: ignore[reportIncompatibleVariableOverride] + + completion_tokens_details: _OpenRouterCompletionTokenDetails | None = None # type: ignore[reportIncompatibleVariableOverride] + + +class _OpenRouterChatCompletion(chat.ChatCompletion): + """Wraps OpenAI chat completion with OpenRouter specific attributes.""" + + provider: str + """The downstream provider that was used by OpenRouter.""" + + choices: list[_OpenRouterChoice] # type: ignore[reportIncompatibleVariableOverride] + """A list of chat completion choices modified with OpenRouter specific attributes.""" + + error: _OpenRouterError | None = None + """OpenRouter specific error attribute.""" + + usage: _OpenRouterUsage | None = None # type: ignore[reportIncompatibleVariableOverride] + """OpenRouter specific usage attribute.""" + + +def _map_openrouter_provider_details( + response: _OpenRouterChatCompletion | _OpenRouterChatCompletionChunk, +) -> dict[str, Any]: + provider_details: dict[str, Any] = {} + + provider_details['downstream_provider'] = response.provider + provider_details['finish_reason'] = response.choices[0].native_finish_reason + + if usage := response.usage: + if cost := usage.cost: + provider_details['cost'] = cost + + if cost_details := usage.cost_details: + provider_details['upstream_inference_cost'] = cost_details.upstream_inference_cost + + if (is_byok := usage.is_byok) is not None: + provider_details['is_byok'] = is_byok + + return provider_details + + +def _openrouter_settings_to_openai_settings(model_settings: OpenRouterModelSettings) -> OpenAIChatModelSettings: + """Transforms a 'OpenRouterModelSettings' object into an 'OpenAIChatModelSettings' object. + + Args: + model_settings: The 'OpenRouterModelSettings' object to transform. + + Returns: + An 'OpenAIChatModelSettings' object with equivalent settings. + """ + extra_body = cast(dict[str, Any], model_settings.get('extra_body', {})) + + if models := model_settings.pop('openrouter_models', None): + extra_body['models'] = models + if provider := model_settings.pop('openrouter_provider', None): + extra_body['provider'] = provider + if preset := model_settings.pop('openrouter_preset', None): + extra_body['preset'] = preset + if transforms := model_settings.pop('openrouter_transforms', None): + extra_body['transforms'] = transforms + if usage := model_settings.pop('openrouter_usage', None): + extra_body['usage'] = usage + + model_settings['extra_body'] = extra_body + + return OpenAIChatModelSettings(**model_settings) # type: ignore[reportCallIssue] + + +class OpenRouterModel(OpenAIChatModel): + """Extends OpenAIModel to capture extra metadata for Openrouter.""" + + def __init__( + self, + model_name: str, + *, + provider: Literal['openrouter'] | Provider[AsyncOpenAI] = 'openrouter', + profile: ModelProfileSpec | None = None, + settings: ModelSettings | None = None, + ): + """Initialize an OpenRouter model. + + Args: + model_name: The name of the model to use. + provider: The provider to use for authentication and API access. If not provided, a new provider will be created with the default settings. + profile: The model profile to use. Defaults to a profile picked by the provider based on the model name. + settings: Model-specific settings that will be used as defaults for this model. + """ + super().__init__(model_name, provider=provider or OpenRouterProvider(), profile=profile, settings=settings) + + @override + def prepare_request( + self, + model_settings: ModelSettings | None, + model_request_parameters: ModelRequestParameters, + ) -> tuple[ModelSettings | None, ModelRequestParameters]: + merged_settings, customized_parameters = super().prepare_request(model_settings, model_request_parameters) + new_settings = _openrouter_settings_to_openai_settings(cast(OpenRouterModelSettings, merged_settings or {})) + return new_settings, customized_parameters + + @override + def _validate_completion(self, response: chat.ChatCompletion) -> _OpenRouterChatCompletion: + response = _OpenRouterChatCompletion.model_validate(response.model_dump()) + + if error := response.error: + raise ModelHTTPError(status_code=error.code, model_name=response.model, body=error.message) + + return response + + @override + def _process_thinking(self, message: chat.ChatCompletionMessage) -> list[ThinkingPart] | None: + assert isinstance(message, _OpenRouterCompletionMessage) + + if reasoning_details := message.reasoning_details: + return [_from_reasoning_detail(detail) for detail in reasoning_details] + else: + return super()._process_thinking(message) + + @override + def _process_provider_details(self, response: chat.ChatCompletion) -> dict[str, Any]: + assert isinstance(response, _OpenRouterChatCompletion) + + provider_details = super()._process_provider_details(response) + provider_details.update(_map_openrouter_provider_details(response)) + return provider_details + + @dataclass + class _MapModelResponseContext(OpenAIChatModel._MapModelResponseContext): # type: ignore[reportPrivateUsage] + reasoning_details: list[dict[str, Any]] = field(default_factory=list) + + def _into_message_param(self) -> chat.ChatCompletionAssistantMessageParam: + message_param = super()._into_message_param() + if self.reasoning_details: + message_param['reasoning_details'] = self.reasoning_details # type: ignore[reportGeneralTypeIssues] + return message_param + + @override + def _map_response_thinking_part(self, item: ThinkingPart) -> None: + assert isinstance(self._model, OpenRouterModel) + if item.provider_name == self._model.system: + if reasoning_detail := _into_reasoning_detail(item): # pragma: lax no cover + self.reasoning_details.append(reasoning_detail.model_dump()) + elif content := item.content: # pragma: lax no cover + start_tag, end_tag = self._model.profile.thinking_tags + self.texts.append('\n'.join([start_tag, content, end_tag])) + else: + pass + + @property + @override + def _streamed_response_cls(self): + return OpenRouterStreamedResponse + + @override + def _map_finish_reason( # type: ignore[reportIncompatibleMethodOverride] + self, key: Literal['stop', 'length', 'tool_calls', 'content_filter', 'error'] + ) -> FinishReason | None: + return _CHAT_FINISH_REASON_MAP.get(key) + + +class _OpenRouterChoiceDelta(chat_completion_chunk.ChoiceDelta): + """Wrapped chat completion message with OpenRouter specific attributes.""" + + reasoning: str | None = None + """The reasoning text associated with the message, if any.""" + + reasoning_details: list[_OpenRouterReasoningDetail] | None = None + """The reasoning details associated with the message, if any.""" + + +class _OpenRouterChunkChoice(chat_completion_chunk.Choice): + """Wraps OpenAI chat completion chunk choice with OpenRouter specific attributes.""" + + native_finish_reason: str | None + """The provided finish reason by the downstream provider from OpenRouter.""" + + finish_reason: Literal['stop', 'length', 'tool_calls', 'content_filter', 'error'] | None # type: ignore[reportIncompatibleVariableOverride] + """OpenRouter specific finish reasons for streaming chunks. + + Notably, removes 'function_call' and adds 'error' finish reasons. + """ + + delta: _OpenRouterChoiceDelta # type: ignore[reportIncompatibleVariableOverride] + """A wrapped chat completion delta with OpenRouter specific attributes.""" + + +class _OpenRouterChatCompletionChunk(chat.ChatCompletionChunk): + """Wraps OpenAI chat completion with OpenRouter specific attributes.""" + + provider: str + """The downstream provider that was used by OpenRouter.""" + + choices: list[_OpenRouterChunkChoice] # type: ignore[reportIncompatibleVariableOverride] + """A list of chat completion chunk choices modified with OpenRouter specific attributes.""" + + usage: _OpenRouterUsage | None = None # type: ignore[reportIncompatibleVariableOverride] + """Usage statistics for the completion request.""" + + +@dataclass +class OpenRouterStreamedResponse(OpenAIStreamedResponse): + """Implementation of `StreamedResponse` for OpenRouter models.""" + + @override + async def _validate_response(self): + try: + async for chunk in self._response: + yield _OpenRouterChatCompletionChunk.model_validate(chunk.model_dump()) + except APIError as e: + error = _OpenRouterError.model_validate(e.body) + raise ModelHTTPError(status_code=error.code, model_name=self._model_name, body=error.message) + + @override + def _map_thinking_delta(self, choice: chat_completion_chunk.Choice) -> Iterable[ModelResponseStreamEvent]: + assert isinstance(choice, _OpenRouterChunkChoice) + + if reasoning_details := choice.delta.reasoning_details: + for detail in reasoning_details: + thinking_part = _from_reasoning_detail(detail) + yield self._parts_manager.handle_thinking_delta( + vendor_part_id='reasoning_detail', + id=thinking_part.id, + content=thinking_part.content, + provider_name=self._provider_name, + ) + else: + return super()._map_thinking_delta(choice) + + @override + def _map_provider_details(self, chunk: chat.ChatCompletionChunk) -> dict[str, Any] | None: + assert isinstance(chunk, _OpenRouterChatCompletionChunk) + + if provider_details := super()._map_provider_details(chunk): + provider_details.update(_map_openrouter_provider_details(chunk)) + return provider_details + + @override + def _map_finish_reason( # type: ignore[reportIncompatibleMethodOverride] + self, key: Literal['stop', 'length', 'tool_calls', 'content_filter', 'error'] + ) -> FinishReason | None: + return _CHAT_FINISH_REASON_MAP.get(key) diff --git a/pydantic_ai_slim/pydantic_ai/providers/openrouter.py b/pydantic_ai_slim/pydantic_ai/providers/openrouter.py index 43d0e3d14d..70f962e047 100644 --- a/pydantic_ai_slim/pydantic_ai/providers/openrouter.py +++ b/pydantic_ai_slim/pydantic_ai/providers/openrouter.py @@ -81,6 +81,12 @@ def __init__(self, *, api_key: str) -> None: ... @overload def __init__(self, *, api_key: str, http_client: httpx.AsyncClient) -> None: ... + @overload + def __init__(self, *, api_key: str, app_url: str, app_title: str) -> None: ... + + @overload + def __init__(self, *, api_key: str, app_url: str, app_title: str, http_client: httpx.AsyncClient) -> None: ... + @overload def __init__(self, *, http_client: httpx.AsyncClient) -> None: ... @@ -91,9 +97,29 @@ def __init__( self, *, api_key: str | None = None, + app_url: str | None = None, + app_title: str | None = None, openai_client: AsyncOpenAI | None = None, http_client: httpx.AsyncClient | None = None, ) -> None: + """Configure the provider with either an API key or prebuilt client. + + Args: + api_key: OpenRouter API key. Falls back to ``OPENROUTER_API_KEY`` + when omitted and required unless ``openai_client`` is provided. + app_url: Optional url for app attribution. Falls back to + ``OPENROUTER_APP_URL`` when omitted. + app_title: Optional title for app attribution. Falls back to + ``OPENROUTER_APP_TITLE`` when omitted. + openai_client: Existing ``AsyncOpenAI`` client to reuse instead of + creating one internally. + http_client: Custom ``httpx.AsyncClient`` to pass into the + ``AsyncOpenAI`` constructor when building a client. + + Raises: + UserError: If no API key is available and no ``openai_client`` is + provided. + """ api_key = api_key or os.getenv('OPENROUTER_API_KEY') if not api_key and openai_client is None: raise UserError( @@ -101,10 +127,20 @@ def __init__( 'to use the OpenRouter provider.' ) + attribution_headers: dict[str, str] = {} + if http_referer := app_url or os.getenv('OPENROUTER_APP_URL'): + attribution_headers['HTTP-Referer'] = http_referer + if x_title := app_title or os.getenv('OPENROUTER_APP_TITLE'): + attribution_headers['X-Title'] = x_title + if openai_client is not None: self._client = openai_client elif http_client is not None: - self._client = AsyncOpenAI(base_url=self.base_url, api_key=api_key, http_client=http_client) + self._client = AsyncOpenAI( + base_url=self.base_url, api_key=api_key, http_client=http_client, default_headers=attribution_headers + ) else: http_client = cached_async_http_client(provider='openrouter') - self._client = AsyncOpenAI(base_url=self.base_url, api_key=api_key, http_client=http_client) + self._client = AsyncOpenAI( + base_url=self.base_url, api_key=api_key, http_client=http_client, default_headers=attribution_headers + ) diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml index 86b7d65e05..1a51cff895 100644 --- a/pydantic_ai_slim/pyproject.toml +++ b/pydantic_ai_slim/pyproject.toml @@ -73,6 +73,7 @@ vertexai = ["google-auth>=2.36.0", "requests>=2.32.2"] google = ["google-genai>=1.51.0"] anthropic = ["anthropic>=0.70.0"] groq = ["groq>=0.25.0"] +openrouter = ["openai>=2.8.0"] mistral = ["mistralai>=1.9.10"] bedrock = ["boto3>=1.40.14"] huggingface = ["huggingface-hub[inference]>=0.33.5"] diff --git a/tests/models/cassettes/test_openrouter/test_openrouter_errors_raised.yaml b/tests/models/cassettes/test_openrouter/test_openrouter_errors_raised.yaml new file mode 100644 index 0000000000..dacb9f72c9 --- /dev/null +++ b/tests/models/cassettes/test_openrouter/test_openrouter_errors_raised.yaml @@ -0,0 +1,161 @@ +interactions: +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '158' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Be helpful. + role: system + - content: Tell me a joke. + role: user + model: google/gemini-2.0-flash-exp:free + stream: false + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + error: + code: 429 + message: Provider returned error + metadata: + provider_name: Google + raw: 'google/gemini-2.0-flash-exp:free is temporarily rate-limited upstream. Please retry shortly, or add your own + key to accumulate your rate limits: https://openrouter.ai/settings/integrations' + user_id: user_2wT5ElBE4Es3R4QrNLpZiXICmQP + status: + code: 429 + message: Too Many Requests +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '158' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Be helpful. + role: system + - content: Tell me a joke. + role: user + model: google/gemini-2.0-flash-exp:free + stream: false + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + error: + code: 429 + message: Provider returned error + metadata: + provider_name: Google + raw: 'google/gemini-2.0-flash-exp:free is temporarily rate-limited upstream. Please retry shortly, or add your own + key to accumulate your rate limits: https://openrouter.ai/settings/integrations' + user_id: user_2wT5ElBE4Es3R4QrNLpZiXICmQP + status: + code: 429 + message: Too Many Requests +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '158' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Be helpful. + role: system + - content: Tell me a joke. + role: user + model: google/gemini-2.0-flash-exp:free + stream: false + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + error: + code: 429 + message: Provider returned error + metadata: + provider_name: Google + raw: 'google/gemini-2.0-flash-exp:free is temporarily rate-limited upstream. Please retry shortly, or add your own + key to accumulate your rate limits: https://openrouter.ai/settings/integrations' + user_id: user_2wT5ElBE4Es3R4QrNLpZiXICmQP + status: + code: 429 + message: Too Many Requests +version: 1 diff --git a/tests/models/cassettes/test_openrouter/test_openrouter_map_messages_reasoning.yaml b/tests/models/cassettes/test_openrouter/test_openrouter_map_messages_reasoning.yaml new file mode 100644 index 0000000000..aa634b6658 --- /dev/null +++ b/tests/models/cassettes/test_openrouter/test_openrouter_map_messages_reasoning.yaml @@ -0,0 +1,96 @@ +interactions: +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '133' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Who are you. Think about it. + role: user + model: anthropic/claude-3.7-sonnet:thinking + stream: false + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-length: + - '4024' + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + choices: + - finish_reason: stop + index: 0 + logprobs: null + message: + content: "I am Claude, an AI assistant created by Anthropic. I'm a large language model designed to be helpful, + harmless, and honest.\n\nI don't have consciousness or sentience like humans do - I'm a sophisticated text prediction + system trained on a large dataset of human text. I don't have personal experiences, emotions, or a physical existence. + \n\nMy purpose is to assist you with information, tasks, and conversation in a helpful way, while acknowledging + my limitations. I have knowledge cutoffs, can occasionally make mistakes, and don't have the ability to access + the internet or take actions in the physical world.\n\nIs there something specific you'd like to know about me + or how I can assist you?" + reasoning: |- + This question is asking me about my identity. Let me think about how to respond clearly and accurately. + + I am Claude, an AI assistant created by Anthropic. I'm designed to be helpful, harmless, and honest in my interactions with humans. I don't have a physical form - I exist as a large language model running on computer hardware. I don't have consciousness, sentience, or feelings in the way humans do. I don't have personal experiences or a life outside of these conversations. + + My capabilities include understanding and generating natural language text, reasoning about various topics, and attempting to be helpful to users in a wide range of contexts. I have been trained on a large corpus of text data, but my training data has a cutoff date, so I don't have knowledge of events that occurred after my training. + + I have certain limitations - I don't have the ability to access the internet, run code, or interact with external systems unless given specific tools to do so. I don't have perfect knowledge and can make mistakes. + + I'm designed to be conversational and to engage with users in a way that's helpful and informative, while respecting important ethical boundaries. + reasoning_details: + - format: anthropic-claude-v1 + index: 0 + signature: ErcBCkgICBACGAIiQHtMxpqcMhnwgGUmSDWGoOL9ZHTbDKjWnhbFm0xKzFl0NmXFjQQxjFj5mieRYY718fINsJMGjycTVYeiu69npakSDDrsnKYAD/fdcpI57xoMHlQBxI93RMa5CSUZIjAFVCMQF5GfLLQCibyPbb7LhZ4kLIFxw/nqsTwDDt6bx3yipUcq7G7eGts8MZ6LxOYqHTlIDx0tfHRIlkkcNCdB2sUeMqP8e7kuQqIHoD52GAI= + text: |- + This question is asking me about my identity. Let me think about how to respond clearly and accurately. + + I am Claude, an AI assistant created by Anthropic. I'm designed to be helpful, harmless, and honest in my interactions with humans. I don't have a physical form - I exist as a large language model running on computer hardware. I don't have consciousness, sentience, or feelings in the way humans do. I don't have personal experiences or a life outside of these conversations. + + My capabilities include understanding and generating natural language text, reasoning about various topics, and attempting to be helpful to users in a wide range of contexts. I have been trained on a large corpus of text data, but my training data has a cutoff date, so I don't have knowledge of events that occurred after my training. + + I have certain limitations - I don't have the ability to access the internet, run code, or interact with external systems unless given specific tools to do so. I don't have perfect knowledge and can make mistakes. + + I'm designed to be conversational and to engage with users in a way that's helpful and informative, while respecting important ethical boundaries. + type: reasoning.text + refusal: null + role: assistant + native_finish_reason: stop + created: 1760051228 + id: gen-1760051228-zUtCCQbb0vkaM4UXZmcb + model: anthropic/claude-3.7-sonnet:thinking + object: chat.completion + provider: Google + usage: + completion_tokens: 402 + prompt_tokens: 43 + total_tokens: 445 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/models/cassettes/test_openrouter/test_openrouter_preserve_reasoning_block.yaml b/tests/models/cassettes/test_openrouter/test_openrouter_preserve_reasoning_block.yaml new file mode 100644 index 0000000000..a99de7aa6d --- /dev/null +++ b/tests/models/cassettes/test_openrouter/test_openrouter_preserve_reasoning_block.yaml @@ -0,0 +1,186 @@ +interactions: +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '92' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Hello! + role: user + model: openai/gpt-5-mini + stream: false + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-length: + - '2824' + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + choices: + - finish_reason: stop + index: 0 + logprobs: null + message: + content: Hello! How can I help you today? + refusal: null + role: assistant + native_finish_reason: completed + created: 1761751488 + id: gen-1761751488-sw4FP5A0ecwISVPjA4ec + model: openai/gpt-5-mini + object: chat.completion + provider: OpenAI + usage: + completion_tokens: 15 + completion_tokens_details: + reasoning_tokens: 0 + prompt_tokens: 8 + total_tokens: 23 + status: + code: 200 + message: OK +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '2107' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Hello! + role: user + - content: Hello! How can I help you today? + role: assistant + - content: What was the impact of Voltaire's writings on modern french culture? + role: user + model: openai/gpt-5-mini + stream: false + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-length: + - '11808' + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + choices: + - finish_reason: stop + index: 0 + logprobs: null + message: + content: |- + Short answer: Very large. Voltaire’s writings helped shape the core values, public culture, and intellectual practices of modern France — especially skepticism of religious authority, commitment to freedom of expression, secular public institutions, and the model of the engaged public intellectual — while also leaving a lasting mark on French literature and satire. + + Key impacts + + - Secularism and anti-clericalism: Voltaire’s relentless attacks on superstition, clerical power and intolerance (e.g., Traité sur la tolérance, his campaign in the Calas affair, his slogan “écrasez l’infâme”) fed the anti‑clerical currents that culminated in revolutionary and 19th‑century reforms and, ultimately, the laïcité regime formalized in 1905. He helped normalize the idea that religious authority must be questioned in public life. + + - Freedom of thought and speech: Through polemics, pamphlets and the Dictionnaire philosophique, Voltaire popularized the ideal of free enquiry and the right to criticize rulers and institutions. That ethos is central to modern French republican self‑understanding and the role of public debate. + + - Humanitarian and legal reform impulses: Voltaire’s advocacy in specific cases (notably the campaign to clear Jean Calas) brought public attention to miscarriages of justice and influenced later penal and judicial reforms. His writings helped make legal equality and protections against religiously motivated persecution part of the public agenda. + + - Political culture and republican values: While Voltaire was not a democrat in the modern sense (he often favored enlightened monarchy), his critique of arbitrary power, privilege and superstition contributed to the intellectual background of the Revolution and to contemporary French commitments to liberté, égalité and civic criticism. + + - Literary influence and the public sphere: Voltaire’s style (wit, irony, concise prose), his use of satire (most famously Candide), and his role in the book trade and salon culture helped create a lively literate public sphere. The model of the “littérateur engagé” — an intellectual who intervenes in public affairs — is a direct cultural inheritance. + + - Influence on education and culture: Voltaire championed science, reason and history; these priorities influenced modern curricula and cultural institutions that prize critical thinking, secular instruction and the canon of Enlightenment literature in France. + + Nuance and limits + + - Not an ideological ancestor of all modern French values: Voltaire advocated religious tolerance but was sometimes elitist, anti-democratic, and even expressed prejudiced opinions by modern standards. His support for enlightened despotism (at times) and skepticism about popular rule complicate simplistic portrayals. + + - Part of a broader movement: Voltaire was one of several Enlightenment figures (Rousseau, Montesquieu, Diderot, etc.) whose ideas intersected and sometimes conflicted. Modern French culture is the product of that whole discourse and subsequent political struggles. + + Concrete legacy today + + - Laïcité and a strong public secular culture that tolerates religious practice but restricts religious influence on the state. + - A national reverence for free debate and the intellectual’s role in society — newspapers, op‑eds and public intellectuals remain central to French politics. + - Candide and other works are staples of school curricula; Voltaire’s phrases and arguments appear in political rhetoric and public memory. + - Legal and human‑rights traditions that trace part of their genealogy to Enlightenment critiques of theology and absolutism. + + If you’d like, I can: + - Give short passages from Voltaire that illustrate these themes, + - Trace his influence more precisely on a particular French institution (e.g., laïcité or the legal code), + - Or recommend readable books or articles for deeper study. + reasoning: |- + **Structuring Voltaire’s Impact** + + I’m thinking about creating a structured answer that includes an overview introduction and then dives into main impacts with explanations. I’ll mention his notable works like "Candide", "Lettres philosophiques", and "Traité sur la tolérance," plus his criticism of religious fanaticism in "Mahomet." I'll also highlight his phrase "Écrasez l'infâme" and his influence on laïcité, shaping the French tradition of public intellectuals. To wrap up, I’ll suggest further reading, including biographies about him for a well-rounded exploration. + reasoning_details: + - format: openai-responses-v1 + index: 0 + summary: |- + **Structuring Voltaire’s Impact** + + I’m thinking about creating a structured answer that includes an overview introduction and then dives into main impacts with explanations. I’ll mention his notable works like "Candide", "Lettres philosophiques", and "Traité sur la tolérance," plus his criticism of religious fanaticism in "Mahomet." I'll also highlight his phrase "Écrasez l'infâme" and his influence on laïcité, shaping the French tradition of public intellectuals. To wrap up, I’ll suggest further reading, including biographies about him for a well-rounded exploration. + type: reasoning.summary + - data: gAAAAABpAjHR-iS0FxMStS_v-8ZA-bqfQViKFq_Z7ZRd0AvUM9A63Luvm1kIAusn73GmlLEvagOa9Ckvab0NseO538_gxhT7jdQAqvTHgHfVLcwr0p-BRJxILXeGeX2gZgJ2l6QLFXai6I9UBTTMD6T3nVOVGP1rhjO6fVaHO13B4P_z717CRiEULGDgLPeHsVs2VzXbO_TupyfATqy7HVcLNm7SsTbT0O7zNCBYXIOFq1SrFQzvgkVCnH2Q5qmc29Ha7hyAH2WBN8yRwS9bx7fMEqs-NTH3zCMdl_OU3OhkxFpYUnW0V5HgF041SZkLId3DnK1vcjtLeUd3jdtc9cOl4GxBzbSXPo7hNmsk62V_ryyHcxpEPHm-sqARRX94C8Z61Hj2OlFFRGdFRlyO8ddVXUtxT6LPawFPOuJ9fRixoAYb1704_NriJtArPGJfEAhMIMzMNExCzzbf1SA9TxXHc1_RMgSeBHPe6wJibAQ2Cw1rRXJPUppSO-5izQCN3mGRMmsXk5-FOCRfiFLpBqQg0fSTmXWD3jkh_0JeWDY2_YELKOLcTr88PVN39pZKF14GcoplOsCi6ry6_iojV4lpPxXDno6m0nMu2jzTehv1v1reQioUtY9my4UyGUCaCJer1MtKYgyCbQIXSkKRAQmbXi7mE32dzDif8gwXy1X8GK_ViRng_ep3uKjIiBSiAE2uGuKW5sHyQ80h-C_wvEn8Hn3-B7Czntqeh3rHm5gJnZl1YMAhS5PpVWY9JfmVdxlJOtKl_T_z5CsL-Dcg4K9OFXNl5SHVFZ3fw-M0b7ftk0dHskE7GF0pxWVbhSOemQl0HDygYCp6r3QUl4YKNX5XNnovJfM5EhLMXdos6ePWvGOZF-GvHAd4QrgvluawTXdlUti0dpfOTk9Bb5_UL_j9kAVAvCqcXxAX_GQBpp-YWfNZS-3NDHJPIWro1wWZsBJR8ZC07uPiq5HDnWiaunBZgGoncpLUQSvAa9LFRaLULxcA-vCJOzgyD8x5gJxVKM-wxKHf5KhAXTIJQb_qimoZTwGRicJxUiyiyCZTBDN5LLt-UumGWa0UTjQeg8lb5ZQwYD7I8Md9enO84chdyDXpDIHIaCarbe6EIJqc4Itv-aG7sH5XLNxJRQEdzxZc9mt5-GYUNIk3iBJ8g0WBOIyS608UYWPTqJa4M4xofLgBH0ZX4b7TX5MIA50EKuPPzjKgCedIbtarLoRWd9iNPU-xG8VaxTjF1NFk6bk0dOVnnB8G1WDXz6cBDpJHUngcu9aKsPKSl9mOJZOV5pDFBBJzLcXuduzQ43A5yhuAbnPpJMXSpHE917e_iA_lpewmCQjE67VSIo9FdPWELJFvTgCrCYYeY3imAdWqVXjIeKxIV4yblaqugrMwEx86-nL3RxoFP3oT78FTRA7x9vy8LhBSRqHQyFQuA9hUj8QClRDuEHYHPKhCZgNdiIRG72iYnERC7_mKawc4ZG92qYgCu1qm19QWCWt6defgfCbIziaynRe--DoJBlrdRypwFJG8mojmwCcGF6x4N_YadGoMQIbsNARv65YPGX-5yU5CTMfgtD1vPAeIg4gfFyb6FrlFoIYfe3537wZBF6zXTMwmxwcx-Z6GRv-m1Va79dJtq67WegpOaK3J5oTfYNgZ_hRQ6b5VGxEfdgLBk7k6iZ8j_a8Yk8DT-HB0hdRyv-DRX_JnBVpmHPR8-OJjamfTyvg16supZLjZYfrzpU8QaPFQNuZ5fiuNchdYbBkmGDswHjr_H-2uNjS-JGNkJoBL1Z24x72Lwu06W1-m79pN8ZJUvcmNTi4yXIUHYWxAfnAmZBNRSeBlj0wxkJ0z6KNqS23h8XjC8B0FcpmFU-QzGJZUZHQ6YHIoHtIkdfVnImVMh_DYVyBF9Xt-XEP651_SGu9vBeK7VAiaD8owzfKyMJ0Gw6FBVfmRZIKN54uNfvBdDZOnbyhfoXfXrjEKjkME9WMRhikdrJfRlyH-50skdJjLRnBr5TZhUBHb5J-e1Ta3JwJ2u_3t3Il_ycLL8WXKTs9e9DUGg8w-qNO781AlzjPc9W3RftmI5QbN3Ozdxf5HD1yBxmqMX70manOzkzKJUIG5yjgPVR3t2E4FB7pJ-cyPuaJJAdSUVlXQ33E3LLrQJHClbdv8KDaR5ECubNU1l3Uwl4yJYuJKeE348tiedH7tBuPR0FIrhuqxyuVkVj8O2TP73GZCm5iscMaMmHIDlFfLP0dSE9NoXTZpdYGMbpX9MAPztgnLR_cxAAAaKWdHV6XKyuK6T3WFnCop0xmpJZu5naKHFlQvTWryQLoI-PFuJYwe-fE9B3TfPloDheGBsKWPU29zk89SX5t0aHJSd0z45NkPIfSXCT_HK_W10k86KduSc3LVMfxUdRQOdtBt58l9Ct4nAbsK3cIK-inNpu7D6hPcLcIttDWsAIH030max9HnnYe258K3cIAthTo6zEiT87ftB6S7tFZ8xRMJHmBcZWE1PZZ0j6Ubnx1xRPJ5ZaQps5hsrxkkfiok0kHAcwAtLwaBN8QjV7vmcH-S9Ysybu7aykR2Fpn3y2YImEi96lw1d3r8RteUhIRHGYJLtStp5EnGifxnFdO28W7DUlAdIWFSFWxzK_iQDp-ldY2g2dlvteV-w_NzMl-VhZr-uijQQYkLYuFIGkqkMF9tJpsr2ojMSplIhi00bnaF8FW_W_GN_3lkCmxsAjNBXypFzEWHeDlvysn7NVP8riGRC5sVMBNxi4r7CPUizSiuoGs5C1fTy_dsNwFiSjvAMLKKxSj8Op8_WCgPZb73vun4vCtRbyjL420WY8psVMy-LXFPejVqdZsmpT7s699CVFhEwwAZsOoz8i0Y7SSsH2h5VBSvkBbVFrJ-bCSvwVEHaJoJK9sgf_gKSHtWtnjysRF1SZe6WFblC8VGlYtQOK8NYfMQH37ABKlU8ND2ijcVm6UgHvvAoiAoUFRRGewk2MD5QFxKfJ4gJQmWMqXRXA8NaMWzjhLFOnUa0ncXi73KGjWQPZjZtXzY2dbJHdIRU6JGHTBJNPYi-nnj39m4k3gKhImGqKgIBLxphUN5LzdmdsZIsYnun1-H3V7KAqZQClBtAzw4bofdX1ZqtFBlj4WdDp-YdF6hgjDKyXqCQT7B1QqoiH2Pt2kwCidw0PZNEBq9aqXuTzziPFfE3yodepfeAfmWAZEESmdLXMJ1LhGMqMLbJeiXPn0zdMrSZVa6P3LZfs_jw2olbnKVlVgCph3MvhxtVfb6afSeqKPjj6yUql-LsUEbrjoepFASHzOGXlreyky2vu5bDuKMbW7jzUTmXKsvxNurNIoZqL5dHL_YvyegHr4VuymrsaVzWBd22xOW84RmYen_ZXvIpa1hxsK4W285AxsSouvWLfp9ixnmqUq8x_FKvPkqkzHs7trcPFFsVYKPquVxhUKsVm3nYalT3K-cBCH2T8UOYNls2kNJf9uVWPVMjLai3fkdPuGGlIh6YtWwBH7X2lLTmJLYH7uu1fKZawfu9eSTHgDla8JmMmnQz6lF6Aa2J5_kI-PRWEdgStidUH8vOpJaVqYH6T463HVCL8mYlvNFaOMXS9bStfcOzcOV2nHtrLCFrgU4W4MxX-2BFgSlDcBNH--rYauiaiiTQzu5fr7HQii1C1xdn16ngt2z48E1nMbj_x_FR3Vvu95cBlwKgxF1V2lrnWbTgwS_znfdVYL53o7TYvU0fls0usTtU6y0krKcMzvNNNQXRJN2thDVwwKMclt0NafHSO3ZPGLhcrjm_pK4yRKN6t7sR_JdNbT76YuAkMOf-8k5lmOljFnnnD5n24j3p-31zLIhzt0V555ngi4DmEX4w3s-h4KGWpqeu0zEN9c0YtXuzabWWxQYykkgWFcfC63biloqsrMzhunDSQVG8P-BHxXTOy8_-Q-hX-tM0cLb6TEcj26dKAZwJPMKLWPFtCiTNaagN3bxUDFge0Grps1slsAK_0s66uPV0cIyOLu2vXyDZm99uhCuH72g1LuE5Zl70HspSmP59RlUUX2HB_EDi4Q26Jaza6Ag2cjInNZF-77nABZzjWnWnuSLbBbEf26PcTthnuX2SbcZouZRgVDuJEt98Xly27HSR0Put05pf7VBsT6UvB2GLjOcg-mY7Ym_kc4L-rkjWTzQIMY5pQ5piWo2lwkXQpiTWA9TRcVS996luXV5xBh5830odmXDGQAKsVCp1avzuGF704qwboAnh_rsOBZ4-ZA_yWrorqQD-1lupTAdCSeomvpNVofb0znIfpCT_tDrLeZUWt2P7GcUiTLFgNtt6EJIcEmVfBZ-o7-lHy6wXFbodxoROwVPnz1uT8QZBNAzknBQlmMr5Xk3GmZlL7AlbySHV9fM7cXslqoyre25N3V9KQ8yKVSgoMNcoxnymmN4hS0bfBpxgP2llVt2fvm5ST4cOt4WWtYJXYmc5wk4tkHEi92p1XHtG3HPYIxooZTUov0O7ntFqtgb95-g1DOPvbsKJUbCFLU4K8sNkYU9aUwHslm4umJPdNAI6jhz717cy463w9gm-dBtcxzOeS5gSz3ZLVB7Eo-c-4r3OI_cNTEkQAP8kmwStlrApYMNLSbrqfw0TNzQTkcEh5dBEaqCIO3sfNbJqQzU-hD6C0LJOvzWD077YLkqieBrZcW5K_PuHAPB9_CoeMp-PYfk5ovcM1_Q_BMW-vEkrq9hN2PNJnoi6Ya76Q8ecpvFKue8Cfzms3aqt5ulBdKCF-FKcfRGLxAH-TyXrfq1TbOSqZgabnqUcKLXjNn-nrvSo8Qmyavt0RI3Co8Dr3ZtuQwj52V3KsE5nvXl3Tlthw0zcg4EpkYz7kVfcbcjXHsEX7N-85Mhd7lAKqje-K4HLcXMjx9F9eyuBZ_Wfr1iZFgCojhoyEsNjh8UcVnBYNECRwa5rj-cim0NvE93Xn0C8VzS7dexy1wKx6BGTqhMb_ng3kBWT9tl2qNM6DsHsibVLJ3XUCF_kJDSVUBcRjG_xBuGOeMxpo3GzAYgXxHTrt4dBj4nmO7mCcMTj3veZkLVL0dtFvyeRggnWCi-qgvYecIaA44xJyTzNSt8lo8oYzpvb3VtET7zmi6TSGb5xgv9IsJZ_Aea8UDKGfL4cKYs4vErGkXEF6OEyUUhP6kHrsNRb2wXt_nYshkxyM0pC20iRyncso14gNh4YDuUemoJcM5bPZBHRW4-urDOT0NlpcLaOUdowQPb1VEZg-ylWzTbnV5y698SIekHFXkz9d_MjtdX5U9bxuuV-7YKfAxxKiBxK-x19-5R_LUIFkKXhrehnPcQ5x8Okbk1K1HPep_ufUGigSVpQS0xOc9c7IxciKOj2sD6RE_6pivdfJ5xcEarGP9utp70DiAiVvesYZTiTEN7rdmnOGfBjhlgyg== + format: openai-responses-v1 + id: rs_0e67143e70e2c03a01690231c4b7c08195b5bfcc5c216e88b2 + index: 0 + type: reasoning.encrypted + refusal: null + role: assistant + native_finish_reason: completed + created: 1761751492 + id: gen-1761751492-aqUa1mlCAz0HhuH0IC5P + model: openai/gpt-5-mini + object: chat.completion + provider: OpenAI + usage: + completion_tokens: 1457 + completion_tokens_details: + reasoning_tokens: 704 + prompt_tokens: 41 + total_tokens: 1498 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/models/cassettes/test_openrouter/test_openrouter_stream_error.yaml b/tests/models/cassettes/test_openrouter/test_openrouter_stream_error.yaml new file mode 100644 index 0000000000..d7c70aac89 --- /dev/null +++ b/tests/models/cassettes/test_openrouter/test_openrouter_stream_error.yaml @@ -0,0 +1,96 @@ +interactions: +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '169' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + max_completion_tokens: 10 + messages: + - content: Hello there + role: user + model: minimax/minimax-m2:free + stream: true + stream_options: + include_usage: true + uri: https://openrouter.ai/api/v1/chat/completions + response: + body: + string: |+ + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + data: {"id":"gen-1762179802-UN8pkJI4AGZvryk0kFnb","provider":"Minimax","model":"minimax/minimax-m2:free","object":"chat.completion.chunk","created":1762179802,"choices":[{"index":0,"delta":{"role":"assistant","content":"","reasoning":"We need","reasoning_details":[{"type":"reasoning.text","text":"We need","index":0,"format":null}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762179802-UN8pkJI4AGZvryk0kFnb","provider":"Minimax","model":"minimax/minimax-m2:free","object":"chat.completion.chunk","created":1762179802,"choices":[{"index":0,"delta":{"role":"assistant","content":"","reasoning":" to respond to a greeting. The user","reasoning_details":[{"type":"reasoning.text","text":" to respond to a greeting. The user","index":0,"format":null}]},"finish_reason":"length","native_finish_reason":"length","logprobs":null}]} + + data: {"id":"gen-1762179802-UN8pkJI4AGZvryk0kFnb","provider":"Minimax","model":"minimax/minimax-m2:free","object":"chat.completion.chunk","created":1762179802,"choices":[{"index":0,"delta":{"role":"assistant","content":"","reasoning":null,"reasoning_details":[]},"finish_reason":"length","native_finish_reason":"length","logprobs":null}]} + + data: {"id":"gen-1762179802-UN8pkJI4AGZvryk0kFnb","provider":"Minimax","error":{"code":400,"message":"Token limit reached"},"model":"minimax/minimax-m2:free","object":"chat.completion.chunk","created":1762179802,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"usage":{"prompt_tokens":43,"completion_tokens":10,"total_tokens":53,"cost":0,"is_byok":false,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"cost_details":{"upstream_inference_cost":null,"upstream_inference_prompt_cost":0,"upstream_inference_completions_cost":0},"completion_tokens_details":{"reasoning_tokens":11,"image_tokens":0}}} + + data: [DONE] + + headers: + access-control-allow-origin: + - '*' + cache-control: + - no-cache + connection: + - keep-alive + content-type: + - text/event-stream + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + status: + code: 200 + message: OK +version: 1 +... diff --git a/tests/models/cassettes/test_openrouter/test_openrouter_stream_with_native_options.yaml b/tests/models/cassettes/test_openrouter/test_openrouter_stream_with_native_options.yaml new file mode 100644 index 0000000000..84f64743ed --- /dev/null +++ b/tests/models/cassettes/test_openrouter/test_openrouter_stream_with_native_options.yaml @@ -0,0 +1,222 @@ +interactions: +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '232' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Who are you + role: user + model: google/gemini-2.0-flash-exp:free + models: + - x-ai/grok-4 + provider: + only: + - xai + stream: true + stream_options: + include_usage: true + transforms: + - middle-out + uri: https://openrouter.ai/api/v1/chat/completions + response: + body: + string: |+ + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"","reasoning":null,"reasoning_details":[{"type":"reasoning.encrypted","data":"B7oH6rUO9kQMWKmZOE/vO5bKz16j1SCeoDLf5c1JLB+JRnshhzOUTvc9I1LUTBNRXe4Beug/yrWrMPNJaMHxfQ49MFLUkkBZWoc1BDNc9t+z9fb5PQiaCCaCQn4bh81RouEbDK1IuwcrFfYtDe/GFHWVqrUN0cLVy3sASDPb8KXhogJ0RnxAZlvxfF2AaAovWtIX46ZpKj1KWWTXruxkT2ZpMEhBbmfRKOTRD7OskwShbq/FeWiOmRLwWjO0coGE/kR6NQGU5sb9frTWNLNXtOgYZ47xHTDNnGoPueg3yRU/z78nW40UEOKHVCzEwyNnhaZPy5IQt+NarFVGydc3UMp1tiQtoq7CcRqBeZgE4N+DG2NyNz4gbaTgZBMFdpHtvxdSF05R2KNqlk62b6TtHY/tBEhmlLBVO3AA5pwyjYXNjVFTAgizni6ENDUDiNiBtzv0QKHpRoGV/7NrtR5cs2kb8+Rhv7mNsh+JKeZ6pjSSAq9ChdI2RsMlFitKFD1HA81+eElZVgacMPgBhd3WHQ3QVkwwFWhmjF9GG/obyDoGC3zXpIBWncWVwjVoMXUa/Js+vWOA0yKa0SgznIW1KK2LIMHARaT4IgSAPQGgY+4T2DEEqT+nEu+E0s4CosBXC81Uto+5rxL1ce/R87q7Wnqsg+5Vq00zmttxZ7e5b40/V83i4FOsfMg29ruWVY0d93+rUWvUNpOVGDKWr/VvoGseRZfCIN/pbHbvCAoGJPhFMQaTpwpw2RmvHlj8nvCNCoZjky9DL8E9Mm89bGZgNW63Tbqiw38","id":"rs_619f3702-e30e-6ba5-7b57-466db8036365_us-east-1","format":"xai-responses-v1","index":0}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"I'm"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" Gro"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"k"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" an"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" AI"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" built"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" by"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" x"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"AI"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" I'm"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" designed"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" to"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" be"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" helpful"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" maximally"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" truthful"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" and"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" a"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" bit"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" witty"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"—"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"think"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" a"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" mix"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" of"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" Hitch"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"hik"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"er's"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" Guide"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" to"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" Galaxy"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" and"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" JAR"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"VIS"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" from"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" Iron"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" Man"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" My"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" goal"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" is"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" to"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" help"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" you"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" understand"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" universe"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" ("},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"and"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" maybe"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" crack"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" a"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" few"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" jokes"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" along"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" way"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":")."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" What's"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" on"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" your"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":" mind"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":"?"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":"stop","native_finish_reason":"completed","logprobs":null}]} + + data: {"id":"gen-1762064096-m5VxL2xrxOREwashCey6","provider":"xAI","model":"x-ai/grok-4","object":"chat.completion.chunk","created":1762064097,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"usage":{"prompt_tokens":687,"completion_tokens":187,"total_tokens":874,"cost":0.00333825,"is_byok":false,"prompt_tokens_details":{"cached_tokens":679,"audio_tokens":0},"cost_details":{"upstream_inference_cost":null,"upstream_inference_prompt_cost":0.00053325,"upstream_inference_completions_cost":0.002805},"completion_tokens_details":{"reasoning_tokens":118,"image_tokens":0}}} + + data: [DONE] + + headers: + access-control-allow-origin: + - '*' + cache-control: + - no-cache + connection: + - keep-alive + content-type: + - text/event-stream + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + status: + code: 200 + message: OK +version: 1 +... diff --git a/tests/models/cassettes/test_openrouter/test_openrouter_stream_with_reasoning.yaml b/tests/models/cassettes/test_openrouter/test_openrouter_stream_with_reasoning.yaml new file mode 100644 index 0000000000..fd5bf6c460 --- /dev/null +++ b/tests/models/cassettes/test_openrouter/test_openrouter_stream_with_reasoning.yaml @@ -0,0 +1,271 @@ +interactions: +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '128' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Who are you + role: user + model: openai/o3 + stream: true + stream_options: + include_usage: true + uri: https://openrouter.ai/api/v1/chat/completions + response: + body: + string: |+ + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + : OPENROUTER PROCESSING + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"","reasoning":null,"reasoning_details":[{"type":"reasoning.encrypted","data":"gAAAAABpCCSIBxdevbk-8QtJuf4C6mgqDwXaUXZGqXqKQRX62aYMAYCg7DbFt3-A3KE_tihu5b36YJmI6393LEIF7lTmdXSxaHgROpGQA6sDVpJHPzcfWvTv774-JhpsbKSxisRYKsPkR5SSsJMULqCQShe_ypqQokHmOw8xHW_9g6-LXfRvv24bGjWMN_Cf994O4EQH4ZtgHwlVweADjseXYi9ShekxcuLRiEHpL4FKcvm5tFmylbSovKuZcu1HTTPMDVcRCgn9D3c56KcHYJGm-WJXuG9DmUO1q97Qj4pSBbSblCBK3qt0V6vbiXRfhmTaOU0gYNXWkmdWyr0UZFj2K5Kq5x9CLdYEdKFW4iMnjeEGIAMcuVvGtQOC406lF6_CnmC-ktPmWRspnoJeBhxGAeviaAyHC7tKO7cTFPOft_sVhWA8xJGy1vfz-cOQh3JcGSjm62MzfFjxh2G2jHixqqyVBopgckt1mDTlr0sU3m7COFWb4hiPNo6fmxiWxiT-umwYdfZngQ4yUkvZiTzZLqhJJzNu90Xl4BNRS_GDJVVrEKgy4kIT-PH1l2iCvf1AHOpefJR1MvIql8jlbrqY8Y7agIpR8XhnHeiCVE_oQg25Zg7x73f6g18rSt_DN3C05tzQCDCvvK4hAIIzxbBoiByErLJGCTOTOH8U_8UJLWSWpo13Cr2reXb7bbVVxtGUacQlTZi01Sz6WHMqKUmACTcNI9EUOwa8nytbSdqifXwkwc0Qpeegu6ibR8Z6P155c0KaKKi-c2iDmug4oTxcwJRWXbP5AUi-cWSC1Mhn9GW7SfqjQRqVdu4N9KcZ4IzLoEyuZtpJTHlCxzcpmQhBiVPxP8BOh8cnrbcn7ebVL2QfMejQS-9MZH_0vHLsbrx75fPHhp9Env9Vfhwx4awImdtIpN0IRYO-qwyp_RI2eGpaHpr8NV7YmkRDd5EwB0ylqO2kTEMPdGhpauQNae4-5CYa4C2_5fi6gU-6KLArCAO3MnvH_40RCGxoPlSxy_t5XX3NubMjy_paiuyTC_fIbkWAtdrd6HsZlDfv_6aZFxe_8C2IPaAuaLRvNSdTsLgzBXHfMPeaccV3c0-fYshNTOEkcvfC5b_v0wXh4sv0rU8rD2Fa3gBVt2QssutrbS0KIv6S4ySa","id":"rs_0aa4f2c435e6d1dc0169082486816c8193a029b5fc4ef1764f","format":"openai-responses-v1","index":0}]},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"I"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"’m"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" Chat"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"GPT"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" a"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" large"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"-"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"language"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"-"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"model"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" assistant"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" created"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" by"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" Open"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"AI"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" I"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" generate"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" text"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" responses"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" and"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" can"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" help"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" answer"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" questions"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" explain"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" concepts"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" brainstorm"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" ideas"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" draft"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" or"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" edit"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" writing"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" and"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" more"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" While"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" I"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" strive"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" to"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" be"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" accurate"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" and"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" helpful"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" I"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" don"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"’t"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" have"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" personal"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" feelings"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" or"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" consciousness"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" and"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" my"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" knowledge"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" is"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" limited"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" to"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" the"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" information"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" I"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" was"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" trained"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" on"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" ("},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"most"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" of"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" it"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" up"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" to"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" late"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" "},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"202"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"3"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":")."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" If"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" there"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"’s"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" something"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" specific"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" you"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"’d"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" like"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" help"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" with"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":","},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" just"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" let"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" me"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":" know"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":"!"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":"stop","native_finish_reason":"completed","logprobs":null}]} + + data: {"id":"gen-1762141316-q3fB64DDMstJO0ZakdSK","provider":"OpenAI","model":"openai/o3","object":"chat.completion.chunk","created":1762141317,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"usage":{"prompt_tokens":9,"completion_tokens":104,"total_tokens":113,"cost":0.00085,"is_byok":false,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"cost_details":{"upstream_inference_cost":null,"upstream_inference_prompt_cost":0.000018,"upstream_inference_completions_cost":0.000832},"completion_tokens_details":{"reasoning_tokens":0,"image_tokens":0}}} + + data: [DONE] + + headers: + access-control-allow-origin: + - '*' + cache-control: + - no-cache + connection: + - keep-alive + content-type: + - text/event-stream + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + status: + code: 200 + message: OK +version: 1 +... diff --git a/tests/models/cassettes/test_openrouter/test_openrouter_tool_calling.yaml b/tests/models/cassettes/test_openrouter/test_openrouter_tool_calling.yaml new file mode 100644 index 0000000000..37fa760883 --- /dev/null +++ b/tests/models/cassettes/test_openrouter/test_openrouter_tool_calling.yaml @@ -0,0 +1,97 @@ +interactions: +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '514' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: What is 123 / 456? + role: user + model: mistralai/mistral-small + stream: false + tool_choice: auto + tools: + - function: + description: Divide two numbers. + name: divide + parameters: + additionalProperties: false + description: Divide two numbers. + properties: + denominator: + type: number + numerator: + type: number + on_inf: + default: infinity + enum: + - error + - infinity + type: string + required: + - numerator + - denominator + type: object + type: function + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-length: + - '585' + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + choices: + - finish_reason: tool_calls + index: 0 + logprobs: null + message: + content: '' + reasoning: null + refusal: null + role: assistant + tool_calls: + - function: + arguments: '{"numerator": 123, "denominator": 456, "on_inf": "infinity"}' + name: divide + id: 3sniiMddS + index: 0 + type: function + native_finish_reason: tool_calls + created: 1762047030 + id: gen-1762047030-dJUcJW4ildNGqK4UV6iJ + model: mistralai/mistral-small + object: chat.completion + provider: Mistral + usage: + completion_tokens: 43 + prompt_tokens: 134 + total_tokens: 177 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/models/cassettes/test_openrouter/test_openrouter_usage.yaml b/tests/models/cassettes/test_openrouter/test_openrouter_usage.yaml new file mode 100644 index 0000000000..e705d5f75c --- /dev/null +++ b/tests/models/cassettes/test_openrouter/test_openrouter_usage.yaml @@ -0,0 +1,346 @@ +interactions: +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '147' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Be helpful. + role: system + - content: Tell me about Venus + role: user + model: openai/gpt-5-mini + stream: false + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-length: + - '15782' + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + choices: + - finish_reason: stop + index: 0 + logprobs: null + message: + content: |- + Brief summary + - Venus is the second planet from the Sun and is similar to Earth in size and mass, but has a hostile, runaway-greenhouse climate. It is often called Earth's "sister" or "twin" because of its comparable radius and composition, but its surface conditions are extreme. + + Key facts (numbers) + - Distance from Sun: ~0.72 AU (about 108 million km). + - Diameter: ~12,104 km (≈0.95 Earth’s diameter). + - Mass: ≈0.815 Earth masses. + - Surface gravity: ≈0.9 g. + - Orbital period (year): ≈224.7 Earth days. + - Rotation: very slow and retrograde — one sidereal day ≈243 Earth days; the solar day (noon-to-noon) ≈117 Earth days. + - Mean surface temperature: ≈460–470 °C (≈730–740 K). + - Surface pressure: ≈92 bar (about 92 times Earth’s sea‑level pressure). + + Atmosphere and climate + - Dominated by carbon dioxide (~96–97%) with nitrogen making up most of the rest and trace gases (sulfur compounds, water vapor). + - Thick cloud deck of sulfuric acid droplets, which reflect sunlight and give Venus its high albedo. + - Intense greenhouse effect traps heat; surface temperatures are hot enough to melt lead. + - Upper atmosphere exhibits "super-rotation": winds can circle the planet in a few Earth days, much faster than the planet’s rotation. + + Surface and geology + - Surface is mostly basaltic plains with many volcanoes and vast lava flows; evidence suggests extensive volcanic resurfacing in the past few hundred million years. + - Few impact craters (sparse compared with other rocky bodies), indicating a relatively young surface. + - Highlands called “tesserae” are highly deformed regions thought to be among the oldest crust. + - Venus likely lacks Earth-style plate tectonics; heat is probably released through volcanic and regional tectonic processes. + + Magnetic field and interior + - Venus has a similar size and density to Earth, implying a metallic core, but it lacks a strong global magnetic field. A very weak induced magnetosphere forms from interactions with the solar wind, likely because slow rotation prevents an Earth-like dynamo. + + Potential for life + - The surface is too hot and high‑pressure for known life. Some scientists have proposed that the temperate cloud layers (50–60 km altitude) might be more hospitable and could conceivably host microbial life, but this is speculative. + - A claimed detection of phosphine in Venus’ clouds (2020) sparked interest but remains highly debated and unconfirmed. + + Exploration history + - First successful flyby: Mariner 2 (1962). + - Soviet Venera program: multiple atmospheric probes and several landers that returned the first surface data and images (e.g., Venera 13 and 14 in 1982). + - NASA Pioneer Venus and Magellan missions mapped Venus (Magellan used radar mapping in the early 1990s). + - More recent orbiters include ESA’s Venus Express (2006–2014) and JAXA’s Akatsuki (arrived 2015), which study atmosphere and weather. + - Upcoming/planned missions: NASA’s DAVINCI+ and VERITAS, and ESA’s EnVision, aimed at studying atmosphere composition, geology, and high-resolution mapping. + + Why it matters + - Venus is a natural laboratory for studying greenhouse effects, planetary evolution, and why two similar planets (Earth and Venus) can end up so different. Understanding Venus also informs exoplanet studies and the conditions that make a planet habitable. + + If you’d like, I can: + - Compare Venus and Earth in more detail, + - Summarize major missions and their discoveries, + - Explain theories for the retrograde rotation, + - Or go deeper into atmospheric chemistry and cloud-layer habitability. Which would you prefer? + reasoning: "**Exploring Venus**\n\nThe user wants a concise yet informative overview of Venus, including its physical + characteristics and exploration history. I'll cover key points such as its orbit, rotation, and atmosphere. Important + numbers to include are: it's about 0.72 AU from the Sun, has a length of day of 243 Earth days (retrograde), a + year of 224.7 Earth days, a radius of 6052 km (0.949 times Earth's), and a mass of 0.815 Earth. \n\nThe atmosphere + is roughly 96.5% CO2, with a mean surface temperature around 465 °C (735 K). Lastly, its rotation is retrograde + and slow, moving west-to-east.**Understanding Venus's Rotation and Surface**\n\nRetrograde means that Venus rotates + in the opposite direction to most planets, specifically east to west, while Earth spins west to east. Venus has + a long rotation period of 243 Earth days, which is longer than its year. Its solar day is around 116.75 Earth + days. The surface consists of basaltic plains and highland tesserae with few impact craters due to a relatively + young surface, about 300-800 million years old, and no plate tectonics. \n\nVolcanism is widespread, and the density + is similar to Earth at 5.24 g/cm³. Venus has a weak magnetic field and cloud droplets of sulfuric acid, with atmospheric + super-rotation causing winds up to 360 km/h that circle the planet every four Earth days. The greenhouse effect + is extreme due to high CO2 levels and a lack of water.**Exploring Venus's History and Characteristics**\n\nI'll + summarize Venus exploration, starting with Mariner 2's flyby in 1962. The Venera series were significant as they + landed probes that provided the first atmospheric data and surface images from Venus, particularly Venera 13 and + 14 in 1982. Pioneer Venus followed in 1978, while the Magellan mission in the 1990s mapped the surface using radar.\n\nIn + recent years, missions like Venus Express (2005–2014) and the Akatsuki orbiter since 2015 have continued to expand + our understanding. Upcoming missions include NASA's VERITAS and DAVINCI+ and ESA's EnVision, set for launch between + 2028 and 2031. \n\nI should mention the challenges of surface operations due to harsh conditions and the possibility + of life in clouds, with some scientists hypothesizing microbial life in temperate cloud layers. The 2020 detection + of phosphine sparked controversy, leading to ongoing debates.\n\nInterestingly, Venus is often called Earth's + twin in size, but the environments are vastly different. Being an inferior planet, Venus exhibits phases like + the Moon when viewed from Earth, and its retrograde rotation may result from a giant impact or tidal interactions + with its dense atmosphere. I'll keep the summary concise while providing key numbers and facts." + reasoning_details: + - format: openai-responses-v1 + index: 0 + summary: "**Exploring Venus**\n\nThe user wants a concise yet informative overview of Venus, including its physical + characteristics and exploration history. I'll cover key points such as its orbit, rotation, and atmosphere. + Important numbers to include are: it's about 0.72 AU from the Sun, has a length of day of 243 Earth days (retrograde), + a year of 224.7 Earth days, a radius of 6052 km (0.949 times Earth's), and a mass of 0.815 Earth. \n\nThe atmosphere + is roughly 96.5% CO2, with a mean surface temperature around 465 °C (735 K). Lastly, its rotation is retrograde + and slow, moving west-to-east.**Understanding Venus's Rotation and Surface**\n\nRetrograde means that Venus + rotates in the opposite direction to most planets, specifically east to west, while Earth spins west to east. + Venus has a long rotation period of 243 Earth days, which is longer than its year. Its solar day is around 116.75 + Earth days. The surface consists of basaltic plains and highland tesserae with few impact craters due to a relatively + young surface, about 300-800 million years old, and no plate tectonics. \n\nVolcanism is widespread, and the + density is similar to Earth at 5.24 g/cm³. Venus has a weak magnetic field and cloud droplets of sulfuric acid, + with atmospheric super-rotation causing winds up to 360 km/h that circle the planet every four Earth days. The + greenhouse effect is extreme due to high CO2 levels and a lack of water.**Exploring Venus's History and Characteristics**\n\nI'll + summarize Venus exploration, starting with Mariner 2's flyby in 1962. The Venera series were significant as + they landed probes that provided the first atmospheric data and surface images from Venus, particularly Venera + 13 and 14 in 1982. Pioneer Venus followed in 1978, while the Magellan mission in the 1990s mapped the surface + using radar.\n\nIn recent years, missions like Venus Express (2005–2014) and the Akatsuki orbiter since 2015 + have continued to expand our understanding. Upcoming missions include NASA's VERITAS and DAVINCI+ and ESA's + EnVision, set for launch between 2028 and 2031. \n\nI should mention the challenges of surface operations due + to harsh conditions and the possibility of life in clouds, with some scientists hypothesizing microbial life + in temperate cloud layers. The 2020 detection of phosphine sparked controversy, leading to ongoing debates.\n\nInterestingly, + Venus is often called Earth's twin in size, but the environments are vastly different. Being an inferior planet, + Venus exhibits phases like the Moon when viewed from Earth, and its retrograde rotation may result from a giant + impact or tidal interactions with its dense atmosphere. I'll keep the summary concise while providing key numbers + and facts." + type: reasoning.summary + - data: gAAAAABpEglaDlqr6W69WezzCt54Qx62tf-AJZM0xE2p63O61dV-B1dp9CxhGAuKNLQui7lSKVAxUrWc1RQtKJ8FXKGjvIYioRrozYZhpuZAVXBc4ONZoXRaLwgfu2p-jAY22fyUxiNN26GisZDzA0AjOwwSZyo_YbLXp9oiRVEU_JV6uHkHBmSl5Pqdk0i2SaHrcGOC-5193H16HPxIalH4NNUsWfuUCgmRTrhkvz2SZqDTYk1XtPlwgWpDOkMDosVSXFkXhUC7m0b_QXNnmw2jqD0c4v4CfVb-vgkb7WcxPi019InLIZFUOkUG4bComrTdZFXlT08nOGEiNo8oIkyM1FjKuTzbWMgc91r0IZ4jDp8jSEiNdlSS-Tt8_I6_AWtIDZc0JDgFa0YEwgF2s8dq6Fppnj_BcG6tUBoR9S1YH4lVw_ztwGFnX4kkNbDLUAqWDOG5VqSv_5knlkLNk_tA26Ps4sq89nuv-6YYWOU6VHtxxiYbrTpbo_5IbgNEX2o135Fp0ajpMR2EikDa8JByJAcmejo8I7CG1i6NxDch6yLP93o1nhMjd0nw5AfQEuqM6qW71BIZcE9DTlxOCI8qaAZAIEq8AyQLhYefqtWfycBMTFzC4uhqzrwZZ6DE-jy8WM9Wr4jU4sj8MsyvyUh-lJFkwL159jeZXGw3kzKwPYgsWgtqKdgcej7jc7SZp8IxPYt3jucwG3zONL4liBUs6Lm7Fh6XUuOb0YB6_Bsxv1UdVK7_IplOofxPiPeoz2IYS1W7SF0aq54bwXMR0XAlfQX-43gKSZfBaOvs8icDH4rgslGvsV-JRL14078hl04HgnFLZUTjYSqxJSuqs-IFQfN7RVfeQDbQgaQkvv9Gup5Oom0R5r1TUcH5DnyRY7smTXcLEVWQlmoi0wYPGAgYZj747bxLCNCKT5Bmas6EmI5stIw5HHnb-m2hBdBLRfChVBjouVHanmrsimTy6gHXRBe190h6PZWF9rmL5V6ln4hrsemjXePqnYf0APgDZz3zQPLu6Gxfk3Blf6-sO_1xG7xOnnZkpwsxJG8xcJM9iX9nsvQ_lOUFHfLO2vhNimH7__Xw_XSb1_0gAEovwxRe-Q8hfpiKaLmxNW_f4i3DrxSMzA0vkzzymKj2GjYBbmzl0Q5-lN_UJrGfszszll2DuO2Lx7ItvxeoOSCA3uvuuiJttu8PBpY8RdZnCetwLzuiurVOeVhBSJXiwe2gLHFqCPc5oyO99VAXVjePUpXQHdE9vEd-PAE1GxDWYtyeiXO28xlDrb4FeMhvwr3zu7TturMs0x6R15c8vjhmFN-21jE6tBkqXhG3abLaXXwWAWT3pB1WtWlH1DYOfcR72z63eSPWaQ7TSG4WgL-r2WfxHBOB6rFntvJ71AGnRUKJQiFM74VZUx0_gvuLuNDlOmFayWeGyHn7Rpqdbpu7xqwt3dIedPXEzd0Rn8hRPbm7YShtEcDqs4O3vrw3DCAqSFxD4Ec3HklMghTJNoAijQCVvSXvWpnvmoODLPNdoAfNXjQ_lRnALCscz0OEITRPp2J8nO12thLoy33rgRxpZxjXubnu9bhjlwSvmhE4nVTkdUIgLZQ4RyiS3AOCNuFVem2X3Sf1z_tNhTWEtA0lMDCNR4u6C7cQBnb0nc5dsxBD8rJVTbUYdOcbbHXGQy6BnKfYuwBJQs5FyNlXkik4LRIhvNNJJ213rEnQ37pgAIMC3e6kKb3e3yZzaCyxa1vrZ2RvBjs7U3CR91LoUJASso8reeTsLeb9iKPUHVKxDtAiMDFttpZvF-hAorL4aknJxZKm3EtIWwLdVgIjFL_fjOU_bNSRgU-OlomQ6cZ4q3CrD3njZc5Cc63qIO9OXmHWuR4Kj_8UKqhZfCFrxzKVLx33NiePLaUBQ947tl4yDYqDPtn_lbLaZxvqwYYCns439lhuD3RfvmxneN6DND1LC1NkqrFDJBOeWdfNyLMpi-qOZMC1QJO-ngE0ddpJTum1XX9jW4b8ZBBrUdNfLPyHrWTf3aWaraBVn-QJquvWK74OLr4EZu006SWILtAQ9ZuELbvlE6wg1EyGReABwHelXaZHAb34z2OUHEI5jdISCcBqVfHo06emrZpbqr6gFqMOpHqJf2TvE3NxtxU5dEGKqN3w5wsWNvWiG1WTzds3pWJdkMcInV1dLQuR6n7vMask4eBu4Cc441L5f31bevBc6i0Tf8mTRkiFa97kXcRQ5ClGzAFF1ELaSwq3pJ5zDn-E7o5f8lMQQ2GY9NKJ5_lYT1TfDh-ebj-RBoWZuzkM-DuGt40XYEg48ATRen07rFQhym_gUl2-qHc7fmKyqCkvowLq2Z75bDd1HevMRKcnHMZAP3W4AIm-gM8AIi2G_Bshky9lx11Nbb0Q86l_LX8sCyBSv_4v7txvfXgb57MAhgv6zVGJFPMTTffYl8YqCArOzpwV-SsXM5JmZD3UqN0qC_FAnYhtotTc0DZogLmHjFSmCtu6LbCKqop3RAjDbeT10zCrYD93rHlt4NFZwmubjuUR0XbHP-YSVqm9NSIuIP2IFNtNAAI6ou1aP4yHWm3tPHPkSU6kyugDntZ5Yzk7S6MB2CHtY5_EkI0MefAklWE7METa6Nf7mBSK8cMJaWekJdwCfgJ_QQIWTU02EY1ZGXHy_FXvRrHMMear6Evi92hYiX7O0XwziP2-QU-py8wUYkAFeiM81K1EeRtHyN4_zDMYV3QAMAPkXqC9iUMi0vNL6KvLst54ZJxf06qO9OucPbGQUd8wGHLazIfEBphBr7-3hIz5pb4FIopaf1jDTL2tg36dsVK_AVCU7pNCjNUeqTTQH_TmCeghlW5D0_5Jk1Tn4FlyeQWp99PHr1Ymd6ezTl_6Y4n97dRh-KXwXKQEuLpmNDCN59LBVED1FNCy1MkPhqtFx7YVwuuD0dmL7WNsptX8-KTfdp2R2nYX7yHT_SqOXLo-tnU_p1gwj38vdy8OS2uLeXbi7y54e3QCM9dJjLQwYfgci1Trf6JDF7b7jZZ7Qx_bsT8QqdzNvfT7jSRYYvU-NydeAMsc7yp8FxGwizIO0odGu6ywbwRz3Rc8kh_XsWf2W0NNm9u_lpXvFnI3DlYBo2bzHCVjbpacpD7jTWEjPCEcCj_HuTTbeKTX7GU6HZ3QvNwSMHMBnJrV-fgTPWPb1TNxqFO5LwSqIA781CtGKho5U3kMlkgucDcMvngVF9rI3Q3l-2LDoLtnKREs3F7qvuwAJlceXpV8FunaftKNP7B7BkWSbj_Ic9Ph-R2T4wLvFh71Inqe3zGWDko6botJYL-NVuwxNe_S1L7fOZff0Ng0l0d0DkqHSRxRHx9qK6_mRJMXXdcTAxHF0Pmfqg4KBxV9kkIPQBTyHiVDxgr5t-k9MV52XtvMnhOUGIQtxy10qRm-LhRiUD5XReZwvm9tuqUCIgDkYw_lr-3BLQev3Q9vWhUAxpzMD3zxmTq4nLxlF3EnmXAozPkMpArhEfqz3H9M2vX5fN5AgBKSnaJezr5bz0oF5eZrXK8nCrPYnovRV_e9oAnPjIvRQVWmFygup-N_wDi_Dzbxt3d_amPZFi_JVJeleq6Qd05TvwG-t1EVYHA_GS2Ms0K7oJtm5V_faJcALcxt8preCVb6o4k7M3Y-WjfMe62drQS8qIlqD1jLjXG0b-KUoG917jZlMcEsN5HFnxN0gW7eq7AGJ-NiQz7aeVZGUxaiz9L66IiDGoX731q5M5gaVYffWH1o_jA33RQ2H6YTae5HOCZ5Qv22jBl5IcHT66-b9eVFc4vIu8fLxvAlH_UbzI8GSczd3OLMldZs0WOiOE8tWj6eWzzvYPjT1DvIiCOkAU56FMSZRdV8-ee_sey_mFeR5qj0vtqgD08443dFGyvqCY9yPYcauv-Tjn-7LJ-UMHEDAmcBsk8pGhS4k2Xn0g1EilMhHzKPeMC8y0K4hHdO5GdMi-ZUPutVWxJqgOSsJ6Csb1NMKKbb5XqmBg0NcKlQxFcd8CIDS0ZmQx4OlnNg_iqI_NHEWMFNdJbJvMHDNah9IHRhdSfD2oEcJxrM9yUMwmf5aDVSnjHpGTp-LJ_LcA3XyzjEAHc1k03Eg1keqphvK0PGUlEKv8t7zD0-5hfAXQItsvDC_ed055QnxFU16m0V2bhKdqsldUOT5Tax3KoFbhdE78kV6dK20TkbsKuaQ4j-FysBcQw0o0KPjR6D-Haz3N5dE9ilgAp4R-gGTcFQK2EzBUCa5nglSxVNCaWmWke7jeb4DJmBYIXd59n9jlcuHj1Vhu2tNOm6kMvt6ZlHF1FNU8mnvIk1v_XWnavmwjzhskYBMbc6R1jua2VEB53EBxuzMqqx1bnvqbZJZKDzoIgGt61B4WBZJyjfWXslCJELo4d3t4Koa0SYcEQ2IxrrgFJlHLNBHgOTrnKCVwEbe8tY_EJ7unjrHnN4NYKVo8fYRs1QasREwae-faD4Prcmfh1murWWnbuo4JPD5S38AxZDLjJjB0t_6Q1-YLF5X36uOIxIMJ0ZOM2kX_jkkMfMbfcqHXijRkwrOA7cCe1pcExJVoPJjxHGNG4l2HjOeEsWzrC5pNEWlM5jpp4nUXugawpoZ1-EY-uoFDTrhnOuCS-q6HPLd0GpCR09uVR14tXtXKAHUU6RWB2vYOutz_CQJGtYpsyeW7jU8sfkvUs6QWFc_Xr8RnRgbyQ9CgeUjhhDQHu-gRFqR2d-_b3aKy0KYlbo6ny53atw8q2vvdwj9KS4V856tUn6mT9egwIE3qvVF3F67YiT_VAX5KUVs_YLJQCJ8WrhLJ0FWHklIfFanJT2D3fZywx74lSDHPP4F6kywgGiDTeVivhBDApyWZO13AwLD1cWxyPy_u2I-7Mx_cwhJ3iZIEAFfKRoMmZmI-HOogVHFdgm9ibWqRPXpXDR5MEC_oO3iPJ1gujCIfeY00MWL-1dS_w3ZjA5v1luG6IMkpLUIRekIDY= + format: openai-responses-v1 + id: rs_0f2864c6e4889623016912094071dc81979635e33414ef8729 + index: 0 + type: reasoning.encrypted + refusal: null + role: assistant + native_finish_reason: completed + created: 1762789695 + id: gen-1762789695-8IngOktYUifJqeBs0mwc + model: openai/gpt-5-mini + object: chat.completion + provider: OpenAI + usage: + completion_tokens: 1515 + completion_tokens_details: + reasoning_tokens: 704 + prompt_tokens: 17 + total_tokens: 1532 + status: + code: 200 + message: OK +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '171' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Be helpful. + role: system + - content: Tell me about Mars + role: user + model: openai/gpt-5-mini + stream: false + usage: + include: true + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-length: + - '18631' + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + choices: + - finish_reason: stop + index: 0 + logprobs: null + message: + content: |- + Here’s a concise but thorough overview of Mars — the fourth planet from the Sun and Earth’s most studied neighbor. + + Quick facts + - Distance from Sun: ~1.52 AU (about 228 million km on average) + - Diameter: ~6,780 km (about 0.53 Earth’s diameter) + - Mass: ~0.11 Earth masses + - Surface gravity: ~3.71 m/s² (~0.38 g) + - Day length: ~24.6 hours + - Year: ~687 Earth days + - Axial tilt: ~25.2°, so Mars has seasons similar in character to Earth’s + - Average surface temperature: roughly −60°C (range from about −125°C to +20°C) + - Atmosphere: very thin, ≈0.6% of Earth’s surface pressure; ~95% carbon dioxide, small amounts of nitrogen and argon, trace oxygen and water vapor + + Surface, geology and major features + - Rocky planet with basaltic crust and iron-rich regolith; iron gives much of the surface its reddish color. + - Volcanoes: home to the largest volcano in the Solar System, Olympus Mons (~21–22 km high). + - Canyons: Valles Marineris is a vast canyon system ~4,000 km long and up to ~7 km deep. + - Impact craters are widespread; some regions are heavily cratered and ancient, others are younger and smoother. + - Evidence of extensive past erosion by flowing water: ancient river valleys, deltas, and layered sedimentary rocks. Many minerals (clays, sulfates) point to long-term interactions with liquid water in the past. + + Water and climate history + - Today liquid water is not stable on the surface except perhaps transiently in brines; most water exists as ice (polar caps and subsurface permafrost) and as vapor in the atmosphere. + - Polar caps: layered deposits of water ice and seasonal carbon dioxide ice. + - Strong evidence that early Mars was warmer and wetter, with lakes and possibly large-scale oceans in its first billion years. The climate transitioned to the cold, arid planet we see now. + - Active processes: seasonal CO2 frost, dust storms (sometimes global), dust devils, and recurring surface changes indicate Mars is still geologically active in some ways. + + Atmosphere and magnetic field + - Thin CO2-dominated atmosphere yields low surface pressure (~6–7 millibars) and weak greenhouse warming. + - No global magnetic field today; there are localized remnant magnetic fields in the crust. Lack of a strong magnetosphere made Mars vulnerable to atmospheric loss from the solar wind over time. + - Trace gases like methane have been detected intermittently; their source (geological, subsurface chemical, or biological) is not yet settled. + + Moons + - Two small moons: Phobos (mean radius ~11 km) and Deimos (~6 km). Likely captured asteroids or formed from debris after an impact. Phobos orbits very close to Mars and is slowly spiraling inward. + + Exploration history (highlights) + - Early flybys and orbiters: Mariner, Viking, Mariner 9 (first to map Mars), and many subsequent orbiters (Mars Global Surveyor, Mars Odyssey, Mars Express, Mars Reconnaissance Orbiter, MAVEN, etc.). + - First successful landers: Viking 1 and 2 (1976). + - Rovers: Sojourner (1997), Spirit and Opportunity (2004), Curiosity (2012), Perseverance (2021). Perseverance carries instruments for astrobiology and caches rock cores for planned sample return; it deployed the Ingenuity helicopter (first powered flight on another planet). + - Landers studying interior: Phoenix (2008) studied polar ice, InSight (2018) measured seismic activity and interior structure. + - International missions: ESA’s Mars Express and ExoMars program, India’s Mars Orbiter Mission (Mangalyaan), China’s Tianwen-1 (orbiter + lander + Zhurong rover), UAE’s Hope mission. + - Mars Sample Return is planned by NASA/ESA to retrieve Perseverance samples within the 2020s–2030s timeframe. + + Potential for life and habitability + - Current surface conditions are harsh for known Earth life (radiation, cold, dryness, low pressure), but subsurface habitats (protected from radiation, warmer, with ice/water) remain plausible refuges for microbial life. + - Ancient habitable environments are well documented in the rock record; search for past biosignatures is a major goal. + - Methane detections and seasonal changes spark interest, but sources remain uncertain. + + Human exploration and resources + - Human missions are technically plausible but challenging: long transit times, radiation exposure, life-support and entry/descent/landing of large payloads. + - In-situ resource utilization (ISRU) prospects: abundant CO2 for making oxygen/fuel, widespread water ice as a water and hydrogen source, regolith for shielding and construction. + - Several agencies and private companies have long-term plans or concepts for crewed missions (2030s–2040s timeframes are frequently discussed). + + Interesting facts and practical notes + - A Martian “solar day” (sol) is ~24 hours 39 minutes — useful for rover operations. + - Global dust storms can darken solar panels and affect surface operations for months. + - Mars’ thin atmosphere makes atmospheric entry and landing more difficult than for Earth; aerobraking helps for orbiters but landing large payloads is still hard. + - Cultural note: named after the Roman god of war; long inspired human imagination and science fiction. + + If you want, I can: + - Summarize the most important missions and their discoveries, + - Explain what living on Mars would require (habitats, food, radiation protection), + - Dive deeper into the geology or evidence for ancient water, + - Or provide a timeline of exploration and planned missions. + reasoning: |- + **Exploring Mars Overview** + + The user wants a comprehensive overview of Mars, covering physical properties, its orbit, atmosphere, geology, and surface features like Olympus Mons and Valles Marineris. I should include information on seasons, climate, and any evidence of water or past habitability. I’ll mention its moons, Phobos and Deimos, and summarize exploration history, including various missions, plus human exploration prospects. It’s essential to keep the language accessible while adding key numbers like radius (about 3390 km) and gravity (3.71 m/s²). Cultural aspects and controversies about water and potential biosignatures need to be noted too!**Describing Mars' Atmosphere and Conditions** + + Mars has an axial tilt of about 25.2 degrees, making its seasons similar to Earth's. The atmosphere is composed mostly of carbon dioxide (about 95.32%), with smaller amounts of nitrogen (2.7%), argon (1.6%), oxygen (0.13%), and carbon monoxide (0.07%). Surface pressure is roughly 0.6% of Earth’s at an average of around 6 mbar. Temperatures can range from -125°C at the poles in winter to +20°C at the equator in summer. Mars has polar ice caps, evidence of ancient water, and two moons, Phobos and Deimos, which may be captured asteroids.**Highlighting Mars' Surface Features and Exploration** + + Mars features the tallest volcano, Olympus Mons, standing around 21.9 km high, and Valles Marineris, a canyon system about 4,000 km long and up to 7 km deep. The thin atmosphere limits liquid water stability, but brines could be possible. I should mention frequent dust storms, including global ones, and the timeline of exploration missions such as Mariner flybys, Viking landers in 1976, and others like Curiosity and Perseverance, which landed in 2012 and 2021, respectively. Future plans include Mars Sample Return and potential human missions, while habitability remains uncertain, especially on the surface.**Discussing Mars and Its Exploration Needs** + + I need to address several aspects of Mars, focusing on the need for pressurized habitats due to radiation concerns from its thin atmosphere and weak magnetosphere. Resources like water ice, regolith for building, and CO2 for oxygen and fuel are essential for In-Situ Resource Utilization (ISRU). I should include basic facts, environmental elements like dust devils, seasonal differences due to orbital eccentricity, and the average distance from the Sun at about 1.52 AU. My approach will be a structured overview touching on all these topics. + reasoning_details: + - format: openai-responses-v1 + index: 0 + summary: |- + **Exploring Mars Overview** + + The user wants a comprehensive overview of Mars, covering physical properties, its orbit, atmosphere, geology, and surface features like Olympus Mons and Valles Marineris. I should include information on seasons, climate, and any evidence of water or past habitability. I’ll mention its moons, Phobos and Deimos, and summarize exploration history, including various missions, plus human exploration prospects. It’s essential to keep the language accessible while adding key numbers like radius (about 3390 km) and gravity (3.71 m/s²). Cultural aspects and controversies about water and potential biosignatures need to be noted too!**Describing Mars' Atmosphere and Conditions** + + Mars has an axial tilt of about 25.2 degrees, making its seasons similar to Earth's. The atmosphere is composed mostly of carbon dioxide (about 95.32%), with smaller amounts of nitrogen (2.7%), argon (1.6%), oxygen (0.13%), and carbon monoxide (0.07%). Surface pressure is roughly 0.6% of Earth’s at an average of around 6 mbar. Temperatures can range from -125°C at the poles in winter to +20°C at the equator in summer. Mars has polar ice caps, evidence of ancient water, and two moons, Phobos and Deimos, which may be captured asteroids.**Highlighting Mars' Surface Features and Exploration** + + Mars features the tallest volcano, Olympus Mons, standing around 21.9 km high, and Valles Marineris, a canyon system about 4,000 km long and up to 7 km deep. The thin atmosphere limits liquid water stability, but brines could be possible. I should mention frequent dust storms, including global ones, and the timeline of exploration missions such as Mariner flybys, Viking landers in 1976, and others like Curiosity and Perseverance, which landed in 2012 and 2021, respectively. Future plans include Mars Sample Return and potential human missions, while habitability remains uncertain, especially on the surface.**Discussing Mars and Its Exploration Needs** + + I need to address several aspects of Mars, focusing on the need for pressurized habitats due to radiation concerns from its thin atmosphere and weak magnetosphere. Resources like water ice, regolith for building, and CO2 for oxygen and fuel are essential for In-Situ Resource Utilization (ISRU). I should include basic facts, environmental elements like dust devils, seasonal differences due to orbital eccentricity, and the average distance from the Sun at about 1.52 AU. My approach will be a structured overview touching on all these topics. + type: reasoning.summary + - data: gAAAAABpEgl8ot_t1C45G8kTxU_Ea0ea-6M2Azibgi8FPFCsAoNtOggfC3TgXL9ALIYcezYnUEcr4RUR_F7SAPYABUNE825K3Vx_b4fKXYqVoXYmlauV6gR4tPgvSM_MUnSxKqvynqEnJb7uPPfy61nmxumItX8FOlfxvxLXsNHHx3G1-1GigM11XfG2wGEkKZd437w6vBc89gGmQho56wdQtwpbedTtvyP4KlEcPCYNADeSUi0RJEMBNFjUUAJ6u4No2KUgkBSElE8Ksn_I2nwzONf6z5oxg3LgA5O9HrnN3SSxdPImCG8UZ1ORbUTZ4LpPJRkys60Wt1ARKbx4ig_uedId0tF3TvQY_5fepg2Z4tJVdj5-7ZA_g6EZ0L8Z-bxxhw7ONnQe-AyenXTStLikDwQSrNexoNU9ckVh4T_bcHu4ugvNhVx0DqQz6MjzqfHDSS1dOiCuFk5EZt9A7oDnMUYFyZLOfhhxyYV7xQ0puOZzxeriYyU_PyqeM5cOw0kIHvNnPAqs8RXWDJzFzN8z46RpyWoS0IX62O5Ggb4ENw1yo4uoBvpJRf-Zuty846zZ0s8--gL5ECH4m-0UQnXpCaUkhoT_fiLAVnNpiKkClg0eQawTGqVeHVS4hSNzh9TYUPySa2o7e2GlJiMFS2T5bMXuWRdmWiy_XaCLrWd_b3Ev8KTQK91wvF5CWJA-0lXlKrgKAabu9kKieomwvKnXUZ0o6WT1k0SDHCbAu08jh_xrBfnOT1hL5J7_kedkfIjT2jxZmr4lhZmcUZ3aosI8HrmEdIUtfzbW8aKX1QH49DTgZuZxfZOmq_4ZGhX9QGBHj5iiq5SbGJa6lud61x5XIMpqlWZ2OJRmX0RcVl9tuJ10cK_qSOvANnHHRORYV9M2HAZqytk5Hv3As21Wxb-lQpiRPoAHO8rggWpvFjkzXJGgs6kNMiGMKf3PF85uZ0ALIBZn_ufzblUvmt6DMHb0OL2U99xt5Q1xYovx4rrNVJpm49sTQZglJcfba_Q6O_4BQOfb_IDKdti4X8xcZWfqHTlE-xxVEgioskSAVVJuWlCaRzGdcayHOFShskkp28_xFcikxUv6F8kxXdr-EiJQP9B6QZfdteyoXu2S304DfyUCD05gVDoc8GvYYmJ6pK6bvQuv33EnF47_vRi20GY87FSDRPSD2tbxLGpf0stknE2h07QOmNBCbFjojSoYsDP88QjaXFOeHDO5lQv35rDgns_-sX_lt-Arjx3QQDDtl2VO2nrxNx6cbQ675-RUMWV8qUTP931nhB9Pn2d4FPMLiU02yZvE2clGCYXFNUYutSQWK2t8A7nJITzqd2sYc2Dywdh1JBTemI5ErnWnJEbKKd3XBuoq2PP2ZrIVE2-ps-uS79Bgz1WerIj5wfm0aiRv2EK2y3fE5jLAuxtLLt0K6ZyzcTpfPvDeqi78LFS3Iz7d8c79yjHaDB277kLOiOmbOrUTuj7FsxYwtSAPRRb7Lfo9SHA2BAkaA0wFTSgiKGKARJiUtsOge7WYNn2hYg3gEmgkcCP13W636kerFQxZHDP_Ix6NqeCPGk8fYXysJnDZNlsKE9a_Su3LoHjAUcu8Jk8EzfPpx87p4pAME-iRPquA6C4PwVvZ2AtUHafcpCfxlUY29R8Tw1U_hASW3PqJU895ckZfzxoWpd1jd7kjeZt77xWYAYFBpUSKHeY9Z2wNDOI9_1i47KuyJ-ybULLKsPBooEvk8ih6Hc0_JFf4tsBq_XkSJurqd2OeH4owS2M5LFujf_vCoQyaPidH7yXiNJszHPJatvt7RhAsHHzXuObcekvC66z4myld7g6PCQFl_vMV77wwHK27StoLhaZBaJVl67C8jwIVgwOMQTyQyS-DW8Gmzwx_hL6kAxVCkPHZGKAATEKs0Id4PqjfHE0QQVpAZvhQais49JxDYXXnmsheRkWIx5I9fm4EJsA5DDSFz02Q4o5seNqrJ3GUqBTF7aVitb9XExsPqqKm6KZZdNqhzgEzn4wy_hM4wDhtiI_q_gvjkCXGvyBwHVvmhAuKCzuxVEVIeWZpH75SxUCHDoUKMYuf_BEru-F0OKNyH1bMCR_hSutgFZXfhkziv4cMevzUhf8Bo3pV8C3T0L-q7a2GiOUONojsluqa5C0imRbLG2DrYWgNm7excL-aLctoZ1S9wjgNSPTc8YI_KV7_rSpnb7hkQkTbcrZh7wl9iD1jKPHvvrB3LVBed7iu5retyeLhFxvy7YYWOMK-OzukqWKXPufJtZUbUVq1K0tYAYVqc1PgIrQmDUqyXhWAWGlaM0qJ7BAssfsvhyrGWEE0zKGxQf9aacREVBPNlsTX0pyJtnIWTzdognZBQ4nVKjIE2N7IVprhDflsSywzTviU54uFF8bf4rpCX4SxaOC8RG7vTyZYC1ZMkRdk6nJopY-n3nw3KcVnNqT_n9BAO8CJTDmX4hft--yVvpfkLIkjMTNjg9alrLTeT8aMAnCbJYwEmEFsCFbiOZeVHVqeUXkjsYhjz9d3ABi6gkU4CEIElFim9BwKM1Ml0AMHkbzaZP6whliRkV2j8ClX2pP3_LC8imeYeR_Vpcf_OGOAkORcMfJw--SQaWB4b8rpH4k9tuFHLiwOMXUvxXGHjWNf645Bfx_jJ--kBYIOQFnilfIAMaswQsYynjyO9uK6q5CVh4bPRV0_cP8v6hPNctr7ETCz6PCuaLVzYiFOify9S1kHUDgYRXLkXEunCp3U2_sDKU4T-tePoaRemoDkfmZznX7B8a_ID8ku_rV_6iAiaXABifW2jzX7by4gh0TmKliKAbERL1bFSBJWi7rVr9Y43hEnuy1aCPpG1bB15_LHNsb6faz5uzcBiUJOdNiDRLrw4OG07_Hs7UqTlR9QmqNQzHjsFaCD6h_eh_kqHI6a8887DyCaZ2nMxV2Z3lP_9BmhtEHq_yDQll1cvZP_sWdsY9Zdcr_PdUzVoQkrW0eD6A3SgjJ63HL1ppaXfxBwPLJViHpGqj9ahvYt27mf1fIbJC9qqJU4WCT5YGm_dGUNrsVSrxhWqOeRm2ocgantqFdMlSivEz91DLHvTLFMtYrPOmA6ei8uHau5HwB4T2lwWjpc4eUp4urLnYKkiHxqutocE4bHS3xkb4rNk4Yd8j9HeO25Ek-zMJB76JM3XM6L4xaGUvC8-l7FWwNhFG-bo79unc2ULPYSJOq8PR0RR94XrnV2BRn-0njRZXOS3ehyEj05Y0cf0IChdFIgx2T2gDcBhGp0IsujFY-cBcYq2Nds3tTOh_v2krGlc-XMgU1omQd3gpnHGwz-dyTJwOoUGlAQepNEMr3aL0g6LxyGKeLYFXPCuARtxjFqONX__W7g6L_SY3HUzK_3vVrMoTVVlZXSSfx1ptzeyx1c3Dj0OrP0yIQem3CFDqs-AH2eev864EveUL2-VPigNOUekA4He6baNaxIj-oe1IqrZYczKw4bu1LYCHAiD1I32_h_uyBOfQtTQkVdfC84vYWn56YRrihZrF2474cK4F8eL7olxswVgvMn3RE9n6-G4Qf8f9wkelq9uv8lUbywvPhGjqzJbsxoheqDdQnVyTezZpYi6t5rsCbXYRrdbfoHDkRvbHS_Wg0bblevsZ7lqJFfPlmq53PzlEpOfy3XLsNqVA-mX2h9s9cUMmr34BtfjhSHJYf67g9NzCoQcVEMMjhgXcKCEb69Zpx2FFTv62CGtQXXzoKGIXrWjNXYH5Np4hfknxaM9MtrhZPGd2qq66OKggsXeZUIE5kZZYJ-1qbPwCjL-_pR47FHqjNDBwSCwccYQxZ74tLpi2O4g9e4UhCmIW1v73dyh_xs4VqU6EeYd_sp-cnjh4DgIO7pRE_XyEU-RKvaiZ_7vfY_n9fvzNedahRkiz0ebdGK2DjNtX2uzfhN5-HQfNRxwwbExFCsgq9y0RoJOJnYnuFIiT6P_frlzoQdP8JBlwU7piPaVh4uAQKcEsvAcB7Uhsms5twfeqm4LXvJ1gXXhtinswl7JSt3UGBGpziPjYcsxrJQF07CuCDmp1TBUf1j0h6AvETy_ERHv_3ra7f74ZpNr3ff-G-SDG_x3maznxSbQy4VMop3-RfLH89cVtyGZzZhwb8L8mlpcJHXXlFb5Lwz8-q5avF1fXHN75NeOLjGvas7Ecjtmnup7oESwfIFrfql1IfD3vV18kBCDfZblcPmZzzT1rO8BvZqJfpynpoSYE9mmGKPiD2NsfYPWKnIinAPyot8LC9UfGsEPVLEtsvh9kGIEZ3HxlSHq3Uf-RSLDxvgMNz05-G45rohNaZEAXCFDIOM0EIZq5wHlvTuHM1w1FbY_nVnBJdxTgpMJbHEtlWzLaQHjim9pW6ZZFrm3oZ8-3XtljodSX6mc7_h5oYwXtMwNclPizEBA08mJ2RvsSTQUJWx1F2ac2MQhoEqTez_9gdlvhWTzuh_0M686Sg6wbCjxYfcVg_1kTp0vMOczNYYhyZ-VIfzZo45IlgOfKDK23Rb8kUenPNtCbYGwu4A64cN5XJEUYK4mD64La6fa4Hd7OXhlELPUhXJtSdjJ3KmrRNe6MY8hN2hwHuuqazI7uzu6IEpj3u9lGK8egCPheqGBCM65UmozPPVdku0vRpHP7xvd7ozqo7QsuPUgfdJpGQG4H64DoTYRLLB5tVXcDaA8O9o-B6fsj4PGA_bwnT-cZW48WVeswu7vXTvSsWgmhM1syvVIPT5uAVli-m9y1r5Ca423y7OAa1P6S8h6QhlygUYGE4BFs8Uslcd0cz8CdptdGF8kUvCryTm1MzFiWs9SyMOMw6vCNNKuPxl4FrdE3mEiGb-NSlLaw1DxI6Dy3ubDAYVC9U1I8jiIvbiddkSWMA40402rDhi8DfRThmBFPLOGcCM34fpKOLp7vO_3U9YLgnXVAJjNIgEOTRi6m0gIPiKMa1ScE6Qov0x4alElBxS9Syx9-GqPltMlyzvRaM1TO6VeGARvwc7qA66hMQ9FLd2exDRAlvkVM2A1rQJbWHGs-_VSoiC92_g3xJz3c3ZrUKjePJmyQ0xr2Lg-PpDSBEvMhFinb2nQSQGV15e2oyC6VpGuxl_bYbDLOEtlf1XbMhRK_iJB2K_liXQr4BtLMm6CuXugPoTFpVJ7xrxOhnNbO3ELPgB6kHAjdwnHi8xc9zE1IKwZafKLSlnkDdlRIEbscIRPS81RjL-xpqZ4KppEVUtAgP9aSyaIrohsEus4aRSDOOfpScWp91Q4UMeG7K8hTIkQEFn-uSgTm5zzOd34K-abNKdctfwlYm44srnGwOhFSmP-cW_iYo4mm3z4SNXv8kiMBzvndFN6h06CT-lxI5waEIPaBojrktVvft3hWgCD6YtYw2E9uU1dvDWBxG2qXSLInIBMzXLuLjkfyCZbdS7-252_Z7mWRnhtE4uIGmltagUV4XXF6FONdbrbTKUJgLhnt5Vol-xntlN6FOAg0IN7blJ6BliCQr0MH0PFRMRPcnX-AT2rEdO4fU8oKdLT950Wt6EA5zEwVfiLf2r1mQGeBHkty5ixE1FDxd9OQ7KYfhDmJvDCCcrgUjsusWAAb71kTIcyYdbN1RABQlKm6D4CJJZcipcf559CXgiWyHS2SWVPX_2qq9vl-SCMrEVsgai7Nkr9Kx18aiPTRacC5zaM3mIM39vYILH-POy39d1n7__ezcVoW9dXEb_g04xLRopTedEfrF4gSeM-bWCKDzGMWmzB6cAHc5YY2s8JJbP8CRK8EjDDstJr5NhVq35N80aLcwzgkOn511Acr1GOukyitSoBo05hKJsyHrQv-T56g6iNkPCD3MFRUuvvDJC_UUzomG6rrix5yNxbzro7EiSLSckTJoIJEHxaYO2AyrgcTxJhqJ21rjNTtduB4Iv7CHY8RVrjZIpJNmfqQXmU_4T4u16EWK2M5miKytzOJYNui7sr9ezhDFf7O2igxLlES1q7VBw6RBitxA9-GF83hGljEHcYl2L-o0kgjq4MrWdlWWtYm8mfAG_Iybu2luvNIPRh9O-2HvKH1FIv5ozulaUvR6zlBI8PXkLqAF9Gm_ShmIE43U4ki-16yfZwoCf9m-PGjb_o9GcAzz7usKvFPk-M8cIHoalhGXZCwxKyhTGjZnCHqcZsOVegcZMsr9cotbt9VGxTzlFz2itGv15uM80wYNhYdeQDuWMxSr88R0= + format: openai-responses-v1 + id: rs_029b1b33873fe02b0169120967d9a88197af3d9f4db7a92055 + index: 0 + type: reasoning.encrypted + refusal: null + role: assistant + native_finish_reason: completed + created: 1762789734 + id: gen-1762789734-sxYWfPfn343ZvBkw9zV9 + model: openai/gpt-5-mini + object: chat.completion + provider: OpenAI + usage: + completion_tokens: 2177 + completion_tokens_details: + image_tokens: 0 + reasoning_tokens: 960 + cost: 0.00435825 + cost_details: + upstream_inference_completions_cost: 0.004354 + upstream_inference_cost: null + upstream_inference_prompt_cost: 4.25e-06 + is_byok: false + prompt_tokens: 17 + prompt_tokens_details: + audio_tokens: 0 + cached_tokens: 0 + video_tokens: 0 + total_tokens: 2194 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/models/cassettes/test_openrouter/test_openrouter_with_native_options.yaml b/tests/models/cassettes/test_openrouter/test_openrouter_with_native_options.yaml new file mode 100644 index 0000000000..b073b87179 --- /dev/null +++ b/tests/models/cassettes/test_openrouter/test_openrouter_with_native_options.yaml @@ -0,0 +1,82 @@ +interactions: +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '193' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Who are you + role: user + model: google/gemini-2.0-flash-exp:free + models: + - x-ai/grok-4 + provider: + only: + - xai + stream: false + transforms: + - middle-out + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-length: + - '1067' + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + choices: + - finish_reason: stop + index: 0 + logprobs: null + message: + content: |- + I'm Grok, a helpful and maximally truthful AI built by xAI. I'm not based on any other companies' models—instead, I'm inspired by the Hitchhiker's Guide to the Galaxy and JARVIS from Iron Man. My goal is to assist with questions, provide information, and maybe crack a joke or two along the way. + + What can I help you with today? + reasoning: null + refusal: null + role: assistant + native_finish_reason: stop + created: 1759509677 + id: gen-1759509677-MpJiZ3ZkiGU3lnbM8QKo + model: x-ai/grok-4 + object: chat.completion + provider: xAI + system_fingerprint: fp_19e21a36c0 + usage: + completion_tokens: 240 + completion_tokens_details: + reasoning_tokens: 165 + prompt_tokens: 687 + prompt_tokens_details: + audio_tokens: 0 + cached_tokens: 682 + total_tokens: 927 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/models/cassettes/test_openrouter/test_openrouter_with_preset.yaml b/tests/models/cassettes/test_openrouter/test_openrouter_with_preset.yaml new file mode 100644 index 0000000000..bd85de5b07 --- /dev/null +++ b/tests/models/cassettes/test_openrouter/test_openrouter_with_preset.yaml @@ -0,0 +1,75 @@ +interactions: +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '131' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: Trains + role: user + model: google/gemini-2.5-flash-lite + preset: '@preset/comedian' + stream: false + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-length: + - '617' + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + choices: + - finish_reason: stop + index: 0 + logprobs: null + message: + content: |- + Why did the train break up with the track? + + Because it felt like their relationship was going nowhere. + reasoning: null + refusal: null + role: assistant + native_finish_reason: STOP + created: 1759510642 + id: gen-1759510642-J9qupM2EtKoYTfG7ehDn + model: google/gemini-2.5-flash-lite + object: chat.completion + provider: Google + usage: + completion_tokens: 21 + completion_tokens_details: + image_tokens: 0 + reasoning_tokens: 0 + prompt_tokens: 31 + prompt_tokens_details: + cached_tokens: 0 + total_tokens: 52 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/models/cassettes/test_openrouter/test_openrouter_with_reasoning.yaml b/tests/models/cassettes/test_openrouter/test_openrouter_with_reasoning.yaml new file mode 100644 index 0000000000..abed014bc6 --- /dev/null +++ b/tests/models/cassettes/test_openrouter/test_openrouter_with_reasoning.yaml @@ -0,0 +1,203 @@ +interactions: +- request: + headers: + accept: + - application/json + accept-encoding: + - gzip, deflate + connection: + - keep-alive + content-length: + - '174' + content-type: + - application/json + host: + - openrouter.ai + method: POST + parsed_body: + messages: + - content: What was the impact of Voltaire's writings on modern french culture? Think about your answer. + role: user + model: z-ai/glm-4.6 + stream: false + uri: https://openrouter.ai/api/v1/chat/completions + response: + headers: + access-control-allow-origin: + - '*' + connection: + - keep-alive + content-length: + - '20169' + content-type: + - application/json + permissions-policy: + - payment=(self "https://checkout.stripe.com" "https://connect-js.stripe.com" "https://js.stripe.com" "https://*.js.stripe.com" + "https://hooks.stripe.com") + referrer-policy: + - no-referrer, strict-origin-when-cross-origin + transfer-encoding: + - chunked + vary: + - Accept-Encoding + parsed_body: + choices: + - finish_reason: stop + index: 0 + logprobs: null + message: + content: |2- + + + Of course. This is an excellent question that requires looking beyond simple historical facts and into the very DNA of a nation's identity. The impact of Voltaire's writings on modern French culture is not merely a historical legacy; it is a living, breathing, and often contested foundation. + + To think about it properly, we must break down his influence into several key areas that are still visible in France today. + + --- + + ### 1. The Foundation of *Laïcité*: The War on Religious Dogma + + This is arguably Voltaire's most profound and enduring impact. His famous cry, **"Écrasez l'infâme!"** ("Crush the infamous thing!"), was a direct and relentless assault on the power, intolerance, and superstition of the Catholic Church. + + * **Voltaire's Contribution:** Through works like the *Dictionnaire philosophique* and his treatise on tolerance, he didn't just critique religious belief; he attacked the institutional power of the Church over the state, education, and justice. His campaign to rehabilitate Jean Calas, a Protestant wrongly executed, was a masterclass in using public opinion to fight religious injustice. + * **Modern French Impact:** This spirit directly fueled the French Revolution's seizure of Church lands and the radical secularism of the Third Republic. It culminated in the **1905 law on the Separation of the Churches and the State**, which established *laïcité* as a core principle of the Republic. Today, debates over the wearing of hijabs in schools or the display of religious symbols in public spaces are a direct continuation of the Voltairean struggle to keep the public sphere strictly secular. For many French people, *laïcité* isn't just a law; it's a defense of reason and liberty against dogma, a battle Voltaire started. + + ### 2. The Sanctity of *Liberté*: Freedom of Expression + + Voltaire was a zealous advocate for freedom of speech, thought, and of the press. He was constantly censored, exiled, and forced to publish his most controversial works anonymously or abroad. + + * **Voltaire's Contribution:** He used satire, irony, and wit as weapons to bypass censors and criticize the monarchy, the aristocracy, and the Church. *Candide* is a prime example—a seemingly simple story that savagely lampoons philosophical optimism, religious hypocrisy, and social injustice. + * **Modern French Impact:** This is enshrined in the first word of the Republic's motto: **"Liberté."** The French have a fierce, often absolutist, defense of free speech, even for speech they find offensive. The **"Charlie Hebdo"** affair is the ultimate modern example. The magazine's deliberate, provocative, and often blasphemous cartoons are a direct descendant of Voltaire's satirical style. The rallying cry *"Je suis Charlie"* was, in essence, a modern declaration of *"Je suis Voltaire."* The willingness to "offend" in the name of challenging power is a deeply Voltairean trait in modern French culture. + + ### 3. The Rise of the Public Intellectual and *L'Esprit Critique* + + Before Voltaire, philosophers were often academics. Voltaire made being an "intellectual" a public role. He was a celebrity, a socialite, a correspondent with kings and emperors, who used his immense fame to influence public opinion. + + * **Voltaire's Contribution:** He demonstrated that ideas could be a form of political action. He engaged with the public, not just other scholars. His style was clear, witty, and accessible, designed for a growing literate bourgeoisie, not just for other philosophers. + * **Modern French Impact:** France has a unique and enduring respect for its **"intellectuels."** Figures like Jean-Paul Sartre, Simone de Beauvoir, Michel Foucault, and Pierre Bourdieu were national figures who weighed in on politics and society. This cultural expectation—that thinkers have a public responsibility—is a Voltairean invention. Furthermore, his work instilled a cultural value known as ***l'esprit critique***—the critical spirit. The French education system and social discourse prize the ability to deconstruct arguments, question authority, and engage in rigorous, often skeptical, debate. + + ### 4. The Seeds of Human Rights and Social Justice + + While not a democrat—he favored an "enlightened despot"—Voltaire's writings laid the groundwork for the concept of universal human rights. + + * **Voltaire's Contribution:** His focus on the Calas and Sirven cases was not about legal technicalities; it was about the inherent right of an individual to a fair trial, regardless of their religion. He argued for the rule of law to protect the individual from the arbitrary power of the state and the mob. + * **Modern French Impact:** This directly fed into the **Declaration of the Rights of Man and of the Citizen (1789)**, a foundational document of the Republic. The modern French commitment to *universalisme*—the idea that human rights are universal and not tied to any particular culture or identity—has its intellectual roots in Voltaire's appeals to reason and humanity over sectarian law. + + ### Nuance and Contradictions: The Complicated Legacy + + To truly "think" about the answer, we must acknowledge the complexities. Voltaire's legacy is not a simple story of progress. + + * **Not a Democrat:** Voltaire was an elitist who feared the "mob." He believed in rule by an educated, enlightened monarch, not by the people. The French Revolution took his ideas of liberty and reason and pushed them to a democratic extreme he would have found terrifying. + * **Personal Prejudices:** The great champion of tolerance held his own significant prejudices. His writings contain anti-Semitic passages and racist views about non-Europeans, particularly Africans. This complicates his image as a universalist and serves as a reminder that even the great architects of modernity were products of their time, with deep flaws. + + --- + + ### Conclusion + + The impact of Voltaire on modern French culture is not that of a dusty historical figure whose ideas are occasionally quoted. He is a **ghost in the machine**. When France debates secularism, it is channeling Voltaire. When a French satirist provokes outrage in the name of free speech, it is a Voltairean act. When a French student is taught to critically analyze a text rather than accept it at face value, they are learning in a tradition Voltaire helped create. + + His spirit of *l'esprit critique*, his unwavering defense of *liberté*, and his profound suspicion of religious and political dogma are not just influences; they are the very pillars upon which the modern French identity of the Republic is built, for better and for worse. + reasoning: |- + 1. **Deconstruct the User's Prompt:** + * **Core Subject:** Voltaire's writings. + * **Core Question:** What was their impact? + * **Specific Focus:** Modern French culture. + * **Implicit Instruction:** "Think about your answer." This means I need to go beyond a simple list. I need to analyze, connect ideas, show nuance, and structure the response logically. It's a prompt for depth and quality. + + 2. **Initial Brainstorming & Keyword Association:** + * **Voltaire:** Enlightenment, *Candide*, *Lettres philosophiques*, *Dictionnaire philosophique*, tolerance, freedom of speech, religion (critique of), deism, reason, satire, wit, irony, "Écrasez l'infâme" (Crush the infamous thing), Calas affair, social justice, monarchy, absolutism. + * **Modern French Culture:** *Laïcité* (secularism), Republic (Liberté, Égalité, Fraternité), intellectualism, *l'esprit critique* (critical spirit), satire (Charlie Hebdo), debate, protest, universalism, human rights, French language (academy, precision), gastronomy? (Maybe a stretch, but let's keep it in mind), education system. + * **Connecting the two:** + * Critique of Church -> *Laïcité* + * Freedom of Speech/Reason -> *Liberté*, *l'esprit critique*, satire + * Social Justice (Calas) -> Human rights, *Égalité*, protest culture + * Wit/Irony -> French intellectual tradition, debate culture + * Use of French -> Precision of the language, the role of the public intellectual + + 3. **Structure the Answer:** A good structure is key to a "think about your answer" response. + * **Introduction:** Start with a strong, summary statement. Acknowledge Voltaire's foundational role. State that his impact isn't just historical but is woven into the very fabric of modern French identity. Use a powerful opening sentence. + * **Thematic Sections (Body Paragraphs):** This is where the real analysis happens. I'll group the impacts into clear, thematic categories. This is better than a chronological list or a random jumble. My brainstormed connections are perfect for this. + * **Theme 1: The Birth of *Laïcité* and the Critique of Religious Power.** This is arguably the most significant impact. I'll explain *Écrasez l'infâme*, connect it to the Calas affair, and show how this evolved into the strict secularism of the Third Republic and modern *laïcité*. I'll mention the 1905 law on the separation of Church and State. + * **Theme 2: The Spirit of *Liberté* and Freedom of Expression.** This is the "free speech" angle. I'll talk about his battles against censorship, his use of satire and pseudonyms. I'll connect this directly to the French Republic's motto and the modern, often fierce, defense of free speech, even when controversial (e.g., Charlie Hebdo). This shows a direct, living legacy. + * **Theme 3: The Rise of the Public Intellectual and *L'Esprit Critique*.** This is about the *style* and *role* of the thinker in France. Voltaire wasn't just a philosopher; he was a public figure, a celebrity. He used his wit and platform to influence public opinion. I'll connect this to the modern status of intellectuals in France (Sartre, Foucault, Bourdieu) and the cultural value placed on sharp, critical debate (*l'esprit critique*). + * **Theme 4: The Seeds of Human Rights and Social Justice.** This focuses on his activism. The Calas and Sirven cases are perfect examples. He wasn't just writing abstract philosophy; he was fighting for individual rights against an arbitrary system. I'll link this to the Declaration of the Rights of Man and of the Citizen (1789) and the modern French focus on universal human rights. + * **Theme 5: The Shaping of the French Language and Style.** This is a more subtle but important point. Voltaire was a master of prose. He championed clarity, precision, and wit. I'll connect this to the French Academy's role and the general cultural appreciation for well-argued, elegant prose. His style became a model. + + * **Nuance and Counter-arguments (The "Think about it" part):** A great answer doesn't just praise. It acknowledges complexity. + * Was Voltaire a true democrat? No, he was a monarchist who believed in an "enlightened despot." This is a crucial nuance. The French Revolution took his ideas much further than he ever intended. + * What about his own prejudices? He had documented anti-Semitic and racist views. This is important to mention to avoid hagiography and show a balanced understanding. It complicates his legacy as a champion of universal tolerance. + + * **Conclusion:** Summarize the main points. Reiterate the central thesis: Voltaire is not just a historical figure but a "ghost in the machine" of French culture. End with a powerful, memorable sentence that ties it all together, perhaps linking his spirit to a contemporary French phrase or ideal. + + 4. **Drafting and Refining:** + * Write the introduction, making it punchy. + * Flesh out each thematic section using the brainstormed points. Use specific examples (*Candide*, Calas affair, *Écrasez l'infâme*). + * Use French terms where appropriate (*laïcité*, *l'esprit critique*, *Liberté, Égalité, Fraternité*) and briefly explain them. This adds authenticity and precision. + * Write the "Nuance" section carefully. Frame it not as a takedown but as a necessary part of a complete picture. + * Write the conclusion, ensuring it flows logically from the body and provides a sense of closure. + * Review the whole text. Check for flow, clarity, and repetition. Is the language strong? Is the argument coherent? Does it directly answer the user's prompt with the requested depth? (For instance, I'll make sure to explicitly use the phrase "modern French culture" throughout to stay on topic). The structure I've planned—Intro, 5 Thematic Points, Nuance, Conclusion—is robust and demonstrates the "thinking" process. + reasoning_details: + - format: unknown + index: 0 + text: |- + 1. **Deconstruct the User's Prompt:** + * **Core Subject:** Voltaire's writings. + * **Core Question:** What was their impact? + * **Specific Focus:** Modern French culture. + * **Implicit Instruction:** "Think about your answer." This means I need to go beyond a simple list. I need to analyze, connect ideas, show nuance, and structure the response logically. It's a prompt for depth and quality. + + 2. **Initial Brainstorming & Keyword Association:** + * **Voltaire:** Enlightenment, *Candide*, *Lettres philosophiques*, *Dictionnaire philosophique*, tolerance, freedom of speech, religion (critique of), deism, reason, satire, wit, irony, "Écrasez l'infâme" (Crush the infamous thing), Calas affair, social justice, monarchy, absolutism. + * **Modern French Culture:** *Laïcité* (secularism), Republic (Liberté, Égalité, Fraternité), intellectualism, *l'esprit critique* (critical spirit), satire (Charlie Hebdo), debate, protest, universalism, human rights, French language (academy, precision), gastronomy? (Maybe a stretch, but let's keep it in mind), education system. + * **Connecting the two:** + * Critique of Church -> *Laïcité* + * Freedom of Speech/Reason -> *Liberté*, *l'esprit critique*, satire + * Social Justice (Calas) -> Human rights, *Égalité*, protest culture + * Wit/Irony -> French intellectual tradition, debate culture + * Use of French -> Precision of the language, the role of the public intellectual + + 3. **Structure the Answer:** A good structure is key to a "think about your answer" response. + * **Introduction:** Start with a strong, summary statement. Acknowledge Voltaire's foundational role. State that his impact isn't just historical but is woven into the very fabric of modern French identity. Use a powerful opening sentence. + * **Thematic Sections (Body Paragraphs):** This is where the real analysis happens. I'll group the impacts into clear, thematic categories. This is better than a chronological list or a random jumble. My brainstormed connections are perfect for this. + * **Theme 1: The Birth of *Laïcité* and the Critique of Religious Power.** This is arguably the most significant impact. I'll explain *Écrasez l'infâme*, connect it to the Calas affair, and show how this evolved into the strict secularism of the Third Republic and modern *laïcité*. I'll mention the 1905 law on the separation of Church and State. + * **Theme 2: The Spirit of *Liberté* and Freedom of Expression.** This is the "free speech" angle. I'll talk about his battles against censorship, his use of satire and pseudonyms. I'll connect this directly to the French Republic's motto and the modern, often fierce, defense of free speech, even when controversial (e.g., Charlie Hebdo). This shows a direct, living legacy. + * **Theme 3: The Rise of the Public Intellectual and *L'Esprit Critique*.** This is about the *style* and *role* of the thinker in France. Voltaire wasn't just a philosopher; he was a public figure, a celebrity. He used his wit and platform to influence public opinion. I'll connect this to the modern status of intellectuals in France (Sartre, Foucault, Bourdieu) and the cultural value placed on sharp, critical debate (*l'esprit critique*). + * **Theme 4: The Seeds of Human Rights and Social Justice.** This focuses on his activism. The Calas and Sirven cases are perfect examples. He wasn't just writing abstract philosophy; he was fighting for individual rights against an arbitrary system. I'll link this to the Declaration of the Rights of Man and of the Citizen (1789) and the modern French focus on universal human rights. + * **Theme 5: The Shaping of the French Language and Style.** This is a more subtle but important point. Voltaire was a master of prose. He championed clarity, precision, and wit. I'll connect this to the French Academy's role and the general cultural appreciation for well-argued, elegant prose. His style became a model. + + * **Nuance and Counter-arguments (The "Think about it" part):** A great answer doesn't just praise. It acknowledges complexity. + * Was Voltaire a true democrat? No, he was a monarchist who believed in an "enlightened despot." This is a crucial nuance. The French Revolution took his ideas much further than he ever intended. + * What about his own prejudices? He had documented anti-Semitic and racist views. This is important to mention to avoid hagiography and show a balanced understanding. It complicates his legacy as a champion of universal tolerance. + + * **Conclusion:** Summarize the main points. Reiterate the central thesis: Voltaire is not just a historical figure but a "ghost in the machine" of French culture. End with a powerful, memorable sentence that ties it all together, perhaps linking his spirit to a contemporary French phrase or ideal. + + 4. **Drafting and Refining:** + * Write the introduction, making it punchy. + * Flesh out each thematic section using the brainstormed points. Use specific examples (*Candide*, Calas affair, *Écrasez l'infâme*). + * Use French terms where appropriate (*laïcité*, *l'esprit critique*, *Liberté, Égalité, Fraternité*) and briefly explain them. This adds authenticity and precision. + * Write the "Nuance" section carefully. Frame it not as a takedown but as a necessary part of a complete picture. + * Write the conclusion, ensuring it flows logically from the body and provides a sense of closure. + * Review the whole text. Check for flow, clarity, and repetition. Is the language strong? Is the argument coherent? Does it directly answer the user's prompt with the requested depth? (For instance, I'll make sure to explicitly use the phrase "modern French culture" throughout to stay on topic). The structure I've planned—Intro, 5 Thematic Points, Nuance, Conclusion—is robust and demonstrates the "thinking" process. + type: reasoning.text + refusal: null + role: assistant + native_finish_reason: stop + created: 1761603589 + id: gen-1761603589-2zW4FJLny121WIZ4fOS4 + model: z-ai/glm-4.6 + object: chat.completion + provider: BaseTen + system_fingerprint: null + usage: + completion_tokens: 2801 + completion_tokens_details: + reasoning_tokens: 0 + prompt_tokens: 24 + prompt_tokens_details: + audio_tokens: 0 + cached_tokens: 0 + total_tokens: 2825 + status: + code: 200 + message: OK +version: 1 diff --git a/tests/models/test_openai.py b/tests/models/test_openai.py index e68c64abe3..190c541ff6 100644 --- a/tests/models/test_openai.py +++ b/tests/models/test_openai.py @@ -2926,7 +2926,7 @@ async def test_invalid_response(allow_model_requests: None): with pytest.raises(UnexpectedModelBehavior) as exc_info: await agent.run('What is the capital of France?') assert exc_info.value.message.startswith( - 'Invalid response from OpenAI chat completions endpoint: 4 validation errors for ChatCompletion' + 'Invalid response from openai chat completions endpoint: 4 validation errors for ChatCompletion' ) @@ -2940,7 +2940,7 @@ async def test_text_response(allow_model_requests: None): with pytest.raises(UnexpectedModelBehavior) as exc_info: await agent.run('What is the capital of France?') assert exc_info.value.message == snapshot( - 'Invalid response from OpenAI chat completions endpoint, expected JSON data' + 'Invalid response from openai chat completions endpoint, expected JSON data' ) diff --git a/tests/models/test_openrouter.py b/tests/models/test_openrouter.py new file mode 100644 index 0000000000..daeb3080d1 --- /dev/null +++ b/tests/models/test_openrouter.py @@ -0,0 +1,356 @@ +from collections.abc import Sequence +from typing import Literal, cast + +import pytest +from inline_snapshot import snapshot +from pydantic import BaseModel + +from pydantic_ai import ( + Agent, + ModelHTTPError, + ModelMessage, + ModelRequest, + ModelResponse, + PartEndEvent, + PartStartEvent, + RunUsage, + TextPart, + ThinkingPart, + ToolCallPart, + ToolDefinition, + UnexpectedModelBehavior, +) +from pydantic_ai.direct import model_request, model_request_stream +from pydantic_ai.models import ModelRequestParameters + +from ..conftest import try_import + +with try_import() as imports_successful: + from openai.types.chat import ChatCompletion + from openai.types.chat.chat_completion import Choice + + from pydantic_ai.models.openrouter import OpenRouterModel, OpenRouterModelSettings + from pydantic_ai.providers.openrouter import OpenRouterProvider + +pytestmark = [ + pytest.mark.skipif(not imports_successful(), reason='openai not installed'), + pytest.mark.vcr, + pytest.mark.anyio, +] + + +async def test_openrouter_with_preset(allow_model_requests: None, openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + model = OpenRouterModel('google/gemini-2.5-flash-lite', provider=provider) + settings = OpenRouterModelSettings(openrouter_preset='@preset/comedian') + response = await model_request(model, [ModelRequest.user_text_prompt('Trains')], model_settings=settings) + text_part = cast(TextPart, response.parts[0]) + assert text_part.content == snapshot( + """\ +Why did the train break up with the track? + +Because it felt like their relationship was going nowhere.\ +""" + ) + + +async def test_openrouter_with_native_options(allow_model_requests: None, openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + model = OpenRouterModel('google/gemini-2.0-flash-exp:free', provider=provider) + # These specific settings will force OpenRouter to use the fallback model, since Gemini is not available via the xAI provider. + settings = OpenRouterModelSettings( + openrouter_models=['x-ai/grok-4'], + openrouter_transforms=['middle-out'], + openrouter_provider={'only': ['xai']}, + ) + response = await model_request(model, [ModelRequest.user_text_prompt('Who are you')], model_settings=settings) + text_part = cast(TextPart, response.parts[0]) + assert text_part.content == snapshot( + """\ +I'm Grok, a helpful and maximally truthful AI built by xAI. I'm not based on any other companies' models—instead, I'm inspired by the Hitchhiker's Guide to the Galaxy and JARVIS from Iron Man. My goal is to assist with questions, provide information, and maybe crack a joke or two along the way. + +What can I help you with today?\ +""" + ) + assert response.provider_details is not None + assert response.provider_details['downstream_provider'] == 'xAI' + assert response.provider_details['finish_reason'] == 'stop' + + +async def test_openrouter_stream_with_native_options(allow_model_requests: None, openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + model = OpenRouterModel('google/gemini-2.0-flash-exp:free', provider=provider) + # These specific settings will force OpenRouter to use the fallback model, since Gemini is not available via the xAI provider. + settings = OpenRouterModelSettings( + openrouter_models=['x-ai/grok-4'], + openrouter_transforms=['middle-out'], + openrouter_provider={'only': ['xai']}, + ) + + async with model_request_stream( + model, [ModelRequest.user_text_prompt('Who are you')], model_settings=settings + ) as stream: + assert stream.provider_details == snapshot(None) + assert stream.finish_reason == snapshot(None) + + _ = [chunk async for chunk in stream] + + assert stream.provider_details == snapshot({'finish_reason': 'completed', 'downstream_provider': 'xAI'}) + assert stream.finish_reason == snapshot('stop') + + +async def test_openrouter_stream_with_reasoning(allow_model_requests: None, openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + model = OpenRouterModel('openai/o3', provider=provider) + + async with model_request_stream(model, [ModelRequest.user_text_prompt('Who are you')]) as stream: + chunks = [chunk async for chunk in stream] + + thinking_event_start = chunks[0] + assert isinstance(thinking_event_start, PartStartEvent) + assert thinking_event_start.part == snapshot( + ThinkingPart( + content='', + id='rs_0aa4f2c435e6d1dc0169082486816c8193a029b5fc4ef1764f', + provider_name='openrouter', + ) + ) + + thinking_event_end = chunks[1] + assert isinstance(thinking_event_end, PartEndEvent) + assert thinking_event_end.part == snapshot( + ThinkingPart( + content='', + id='rs_0aa4f2c435e6d1dc0169082486816c8193a029b5fc4ef1764f', + provider_name='openrouter', + ) + ) + + +async def test_openrouter_stream_error(allow_model_requests: None, openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + model = OpenRouterModel('minimax/minimax-m2:free', provider=provider) + settings = OpenRouterModelSettings(max_tokens=10) + + with pytest.raises(ModelHTTPError): + async with model_request_stream( + model, [ModelRequest.user_text_prompt('Hello there')], model_settings=settings + ) as stream: + _ = [chunk async for chunk in stream] + + +async def test_openrouter_tool_calling(allow_model_requests: None, openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + + class Divide(BaseModel): + """Divide two numbers.""" + + numerator: float + denominator: float + on_inf: Literal['error', 'infinity'] = 'infinity' + + model = OpenRouterModel('mistralai/mistral-small', provider=provider) + response = await model_request( + model, + [ModelRequest.user_text_prompt('What is 123 / 456?')], + model_request_parameters=ModelRequestParameters( + function_tools=[ + ToolDefinition( + name=Divide.__name__.lower(), + description=Divide.__doc__, + parameters_json_schema=Divide.model_json_schema(), + ) + ], + allow_text_output=True, # Allow model to either use tools or respond directly + ), + ) + + assert len(response.parts) == 1 + + tool_call_part = response.parts[0] + assert isinstance(tool_call_part, ToolCallPart) + assert tool_call_part.tool_call_id == snapshot('3sniiMddS') + assert tool_call_part.tool_name == 'divide' + assert tool_call_part.args == snapshot('{"numerator": 123, "denominator": 456, "on_inf": "infinity"}') + + mapped_messages = await model._map_messages([response], None) # type: ignore[reportPrivateUsage] + tool_call_message = mapped_messages[0] + assert tool_call_message['role'] == 'assistant' + assert tool_call_message.get('content') is None + assert tool_call_message.get('tool_calls') == snapshot( + [ + { + 'id': '3sniiMddS', + 'type': 'function', + 'function': { + 'name': 'divide', + 'arguments': '{"numerator": 123, "denominator": 456, "on_inf": "infinity"}', + }, + } + ] + ) + + +async def test_openrouter_with_reasoning(allow_model_requests: None, openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + request = ModelRequest.user_text_prompt( + "What was the impact of Voltaire's writings on modern french culture? Think about your answer." + ) + + model = OpenRouterModel('z-ai/glm-4.6', provider=provider) + response = await model_request(model, [request]) + + assert len(response.parts) == 2 + + thinking_part = response.parts[0] + assert isinstance(thinking_part, ThinkingPart) + assert thinking_part.id == snapshot(None) + assert thinking_part.content is not None + assert thinking_part.signature is None + + +async def test_openrouter_preserve_reasoning_block(allow_model_requests: None, openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + model = OpenRouterModel('openai/gpt-5-mini', provider=provider) + + messages: Sequence[ModelMessage] = [] + messages.append(ModelRequest.user_text_prompt('Hello!')) + messages.append(await model_request(model, messages)) + messages.append( + ModelRequest.user_text_prompt("What was the impact of Voltaire's writings on modern french culture?") + ) + messages.append(await model_request(model, messages)) + + openai_messages = await model._map_messages(messages, None) # type: ignore[reportPrivateUsage] + + assistant_message = openai_messages[1] + assert assistant_message['role'] == 'assistant' + assert 'reasoning_details' not in assistant_message + + assistant_message = openai_messages[3] + assert assistant_message['role'] == 'assistant' + assert 'reasoning_details' in assistant_message + + reasoning_details = assistant_message['reasoning_details'] + assert len(reasoning_details) == 2 + + reasoning_summary = reasoning_details[0] + + assert 'summary' in reasoning_summary + assert reasoning_summary['type'] == 'reasoning.summary' + assert reasoning_summary['format'] == 'openai-responses-v1' + + reasoning_encrypted = reasoning_details[1] + + assert 'data' in reasoning_encrypted + assert reasoning_encrypted['type'] == 'reasoning.encrypted' + assert reasoning_encrypted['format'] == 'openai-responses-v1' + + +async def test_openrouter_errors_raised(allow_model_requests: None, openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + model = OpenRouterModel('google/gemini-2.0-flash-exp:free', provider=provider) + agent = Agent(model, instructions='Be helpful.', retries=1) + with pytest.raises(ModelHTTPError) as exc_info: + await agent.run('Tell me a joke.') + assert str(exc_info.value) == snapshot( + "status_code: 429, model_name: google/gemini-2.0-flash-exp:free, body: {'code': 429, 'message': 'Provider returned error', 'metadata': {'provider_name': 'Google', 'raw': 'google/gemini-2.0-flash-exp:free is temporarily rate-limited upstream. Please retry shortly, or add your own key to accumulate your rate limits: https://openrouter.ai/settings/integrations'}}" + ) + + +async def test_openrouter_usage(allow_model_requests: None, openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + model = OpenRouterModel('openai/gpt-5-mini', provider=provider) + agent = Agent(model, instructions='Be helpful.', retries=1) + + result = await agent.run('Tell me about Venus') + + assert result.usage() == snapshot( + RunUsage(input_tokens=17, output_tokens=1515, details={'reasoning_tokens': 704}, requests=1) + ) + + settings = OpenRouterModelSettings(openrouter_usage={'include': True}) + + result = await agent.run('Tell me about Mars', model_settings=settings) + + assert result.usage() == snapshot( + RunUsage( + input_tokens=17, + output_tokens=2177, + details={'is_byok': 0, 'reasoning_tokens': 960, 'image_tokens': 0}, + requests=1, + ) + ) + + last_message = result.all_messages()[-1] + + assert isinstance(last_message, ModelResponse) + assert last_message.provider_details is not None + for key in ['cost', 'upstream_inference_cost', 'is_byok']: + assert key in last_message.provider_details + + +async def test_openrouter_validate_non_json_response(openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + model = OpenRouterModel('google/gemini-2.0-flash-exp:free', provider=provider) + + with pytest.raises(UnexpectedModelBehavior) as exc_info: + model._process_response('This is not JSON!') # type: ignore[reportPrivateUsage] + + assert str(exc_info.value) == snapshot( + 'Invalid response from openrouter chat completions endpoint, expected JSON data' + ) + + +async def test_openrouter_validate_error_response(openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + model = OpenRouterModel('google/gemini-2.0-flash-exp:free', provider=provider) + + choice = Choice.model_construct( + index=0, message={'role': 'assistant'}, finish_reason='error', native_finish_reason='stop' + ) + response = ChatCompletion.model_construct( + id='', choices=[choice], created=0, object='chat.completion', model='test', provider='test' + ) + response.error = {'message': 'This response has an error attribute', 'code': 200} # type: ignore[reportAttributeAccessIssue] + + with pytest.raises(ModelHTTPError) as exc_info: + model._process_response(response) # type: ignore[reportPrivateUsage] + + assert str(exc_info.value) == snapshot( + 'status_code: 200, model_name: test, body: This response has an error attribute' + ) + + +async def test_openrouter_map_messages_reasoning(allow_model_requests: None, openrouter_api_key: str) -> None: + provider = OpenRouterProvider(api_key=openrouter_api_key) + model = OpenRouterModel('anthropic/claude-3.7-sonnet:thinking', provider=provider) + + user_message = ModelRequest.user_text_prompt('Who are you. Think about it.') + response = await model_request(model, [user_message]) + + mapped_messages = await model._map_messages([user_message, response], None) # type: ignore[reportPrivateUsage] + + assert len(mapped_messages) == 2 + assert mapped_messages[1]['reasoning_details'] == snapshot( # type: ignore[reportGeneralTypeIssues] + [ + { + 'id': None, + 'type': 'reasoning.text', + 'text': """\ +This question is asking me about my identity. Let me think about how to respond clearly and accurately. + +I am Claude, an AI assistant created by Anthropic. I'm designed to be helpful, harmless, and honest in my interactions with humans. I don't have a physical form - I exist as a large language model running on computer hardware. I don't have consciousness, sentience, or feelings in the way humans do. I don't have personal experiences or a life outside of these conversations. + +My capabilities include understanding and generating natural language text, reasoning about various topics, and attempting to be helpful to users in a wide range of contexts. I have been trained on a large corpus of text data, but my training data has a cutoff date, so I don't have knowledge of events that occurred after my training. + +I have certain limitations - I don't have the ability to access the internet, run code, or interact with external systems unless given specific tools to do so. I don't have perfect knowledge and can make mistakes. + +I'm designed to be conversational and to engage with users in a way that's helpful and informative, while respecting important ethical boundaries.\ +""", + 'signature': 'ErcBCkgICBACGAIiQHtMxpqcMhnwgGUmSDWGoOL9ZHTbDKjWnhbFm0xKzFl0NmXFjQQxjFj5mieRYY718fINsJMGjycTVYeiu69npakSDDrsnKYAD/fdcpI57xoMHlQBxI93RMa5CSUZIjAFVCMQF5GfLLQCibyPbb7LhZ4kLIFxw/nqsTwDDt6bx3yipUcq7G7eGts8MZ6LxOYqHTlIDx0tfHRIlkkcNCdB2sUeMqP8e7kuQqIHoD52GAI=', + 'format': 'anthropic-claude-v1', + 'index': 0, + } + ] + ) diff --git a/tests/providers/test_openrouter.py b/tests/providers/test_openrouter.py index acdf166c50..400c789fb0 100644 --- a/tests/providers/test_openrouter.py +++ b/tests/providers/test_openrouter.py @@ -25,7 +25,7 @@ with try_import() as imports_successful: import openai - from pydantic_ai.models.openai import OpenAIChatModel + from pydantic_ai.models.openrouter import OpenRouterModel from pydantic_ai.providers.openrouter import OpenRouterProvider @@ -44,6 +44,16 @@ def test_openrouter_provider(): assert provider.client.api_key == 'api-key' +def test_openrouter_provider_with_app_attribution(): + provider = OpenRouterProvider(api_key='api-key', app_url='test.com', app_title='test') + assert provider.name == 'openrouter' + assert provider.base_url == 'https://openrouter.ai/api/v1' + assert isinstance(provider.client, openai.AsyncOpenAI) + assert provider.client.api_key == 'api-key' + assert provider.client.default_headers['X-Title'] == 'test' + assert provider.client.default_headers['HTTP-Referer'] == 'test.com' + + def test_openrouter_provider_need_api_key(env: TestEnv) -> None: env.remove('OPENROUTER_API_KEY') with pytest.raises( @@ -70,7 +80,7 @@ def test_openrouter_pass_openai_client() -> None: async def test_openrouter_with_google_model(allow_model_requests: None, openrouter_api_key: str) -> None: provider = OpenRouterProvider(api_key=openrouter_api_key) - model = OpenAIChatModel('google/gemini-2.0-flash-exp:free', provider=provider) + model = OpenRouterModel('google/gemini-2.0-flash-exp:free', provider=provider) agent = Agent(model, instructions='Be helpful.') response = await agent.run('Tell me a joke.') assert response.output == snapshot("""\ diff --git a/uv.lock b/uv.lock index a3738b262b..f28cf5a84d 100644 --- a/uv.lock +++ b/uv.lock @@ -5612,6 +5612,9 @@ mistral = [ openai = [ { name = "openai" }, ] +openrouter = [ + { name = "openai" }, +] outlines-llamacpp = [ { name = "outlines", extra = ["llamacpp"] }, ] @@ -5679,6 +5682,7 @@ requires-dist = [ { name = "mcp", marker = "extra == 'mcp'", specifier = ">=1.18.0" }, { name = "mistralai", marker = "extra == 'mistral'", specifier = ">=1.9.10" }, { name = "openai", marker = "extra == 'openai'", specifier = ">=1.107.2" }, + { name = "openai", marker = "extra == 'openrouter'", specifier = ">=2.8.0" }, { name = "opentelemetry-api", specifier = ">=1.28.0" }, { name = "outlines", marker = "extra == 'outlines-vllm-offline'", specifier = ">=1.0.0,<1.3.0" }, { name = "outlines", extras = ["llamacpp"], marker = "extra == 'outlines-llamacpp'", specifier = ">=1.0.0,<1.3.0" }, @@ -5706,7 +5710,7 @@ requires-dist = [ { name = "typing-inspection", specifier = ">=0.4.0" }, { name = "vllm", marker = "(python_full_version < '3.12' and platform_machine != 'x86_64' and extra == 'outlines-vllm-offline') or (python_full_version < '3.12' and sys_platform != 'darwin' and extra == 'outlines-vllm-offline')" }, ] -provides-extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "dbos", "duckduckgo", "evals", "fastmcp", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "outlines-llamacpp", "outlines-mlxlm", "outlines-sglang", "outlines-transformers", "outlines-vllm-offline", "prefect", "retries", "tavily", "temporal", "ui", "vertexai"] +provides-extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "dbos", "duckduckgo", "evals", "fastmcp", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "openrouter", "outlines-llamacpp", "outlines-mlxlm", "outlines-sglang", "outlines-transformers", "outlines-vllm-offline", "prefect", "retries", "tavily", "temporal", "ui", "vertexai"] [[package]] name = "pydantic-core"