Skip to content

Adding extra_headers parameters to ModelSettings #550

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/agents/extensions/models/litellm_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ async def _fetch_response(
stream=stream,
stream_options=stream_options,
reasoning_effort=reasoning_effort,
extra_headers=HEADERS,
extra_headers={**HEADERS, **(model_settings.extra_headers or {})},
api_key=self.api_key,
base_url=self.base_url,
**extra_kwargs,
Expand Down
6 changes: 5 additions & 1 deletion src/agents/model_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from dataclasses import dataclass, fields, replace
from typing import Literal

from openai._types import Body, Query
from openai._types import Body, Query, Headers
from openai.types.shared import Reasoning


Expand Down Expand Up @@ -67,6 +67,10 @@ class ModelSettings:
"""Additional body fields to provide with the request.
Defaults to None if not provided."""

extra_headers: Headers | None = None
"""Additional headers to provide with the request.
Defaults to None if not provided."""

def resolve(self, override: ModelSettings | None) -> ModelSettings:
"""Produce a new ModelSettings by overlaying any non-None values from the
override on top of this instance."""
Expand Down
2 changes: 1 addition & 1 deletion src/agents/models/openai_chatcompletions.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ async def _fetch_response(
stream_options=self._non_null_or_not_given(stream_options),
store=self._non_null_or_not_given(store),
reasoning_effort=self._non_null_or_not_given(reasoning_effort),
extra_headers=HEADERS,
extra_headers={ **HEADERS, **(model_settings.extra_headers or {}) },
extra_query=model_settings.extra_query,
extra_body=model_settings.extra_body,
metadata=self._non_null_or_not_given(model_settings.metadata),
Expand Down
2 changes: 1 addition & 1 deletion src/agents/models/openai_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ async def _fetch_response(
tool_choice=tool_choice,
parallel_tool_calls=parallel_tool_calls,
stream=stream,
extra_headers=_HEADERS,
extra_headers={**_HEADERS, **(model_settings.extra_headers or {})},
extra_query=model_settings.extra_query,
extra_body=model_settings.extra_body,
text=response_format,
Expand Down
93 changes: 93 additions & 0 deletions tests/test_extra_headers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
import pytest
from agents import (
OpenAIChatCompletionsModel,
OpenAIResponsesModel,
ModelSettings,
ModelTracing
)
from openai.types.chat.chat_completion import ChatCompletion, Choice
from openai.types.chat.chat_completion_message import ChatCompletionMessage

@pytest.mark.allow_call_model_methods
@pytest.mark.asyncio
async def test_extra_headers_passed_to_openai_responses_model():
"""
Ensure extra_headers in ModelSettings is passed to the OpenAIResponsesModel client.
"""
called_kwargs = {}

class DummyResponses:
async def create(self, **kwargs):
nonlocal called_kwargs
called_kwargs = kwargs
class DummyResponse:
id = "dummy"
output = []
usage = type("Usage", (), {"input_tokens": 0, "output_tokens": 0, "total_tokens": 0})()
return DummyResponse()

class DummyClient:
def __init__(self):
self.responses = DummyResponses()



model = OpenAIResponsesModel(model="gpt-4", openai_client=DummyClient())
extra_headers = {"X-Test-Header": "test-value"}
await model.get_response(
system_instructions=None,
input="hi",
model_settings=ModelSettings(extra_headers=extra_headers),
tools=[],
output_schema=None,
handoffs=[],
tracing=ModelTracing.DISABLED,
previous_response_id=None,
)
assert "extra_headers" in called_kwargs
assert called_kwargs["extra_headers"]["X-Test-Header"] == "test-value"



@pytest.mark.allow_call_model_methods
@pytest.mark.asyncio
async def test_extra_headers_passed_to_openai_client():
"""
Ensure extra_headers in ModelSettings is passed to the OpenAI client.
"""
called_kwargs = {}

class DummyCompletions:
async def create(self, **kwargs):
nonlocal called_kwargs
called_kwargs = kwargs
msg = ChatCompletionMessage(role="assistant", content="Hello")
choice = Choice(index=0, finish_reason="stop", message=msg)
return ChatCompletion(
id="resp-id",
created=0,
model="fake",
object="chat.completion",
choices=[choice],
usage=None,
)

class DummyClient:
def __init__(self):
self.chat = type("_Chat", (), {"completions": DummyCompletions()})()
self.base_url = "https://api.openai.com"

model = OpenAIChatCompletionsModel(model="gpt-4", openai_client=DummyClient())
extra_headers = {"X-Test-Header": "test-value"}
await model.get_response(
system_instructions=None,
input="hi",
model_settings=ModelSettings(extra_headers=extra_headers),
tools=[],
output_schema=None,
handoffs=[],
tracing=ModelTracing.DISABLED,
previous_response_id=None,
)
assert "extra_headers" in called_kwargs
assert called_kwargs["extra_headers"]["X-Test-Header"] == "test-value"