Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions pydantic_ai_slim/pydantic_ai/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,36 @@ def result(self) -> AgentRunResult[OutputDataT] | None:
self._traceparent(required=False),
)

def all_messages(self) -> list[_messages.ModelMessage]:
"""Return all messages for the run so far.
Messages from older runs are included.
"""
return self.ctx.state.message_history

def all_messages_json(self, *, output_tool_return_content: str | None = None) -> bytes:
"""Return all messages from [`all_messages`][pydantic_ai.agent.AgentRun.all_messages] as JSON bytes.
Returns:
JSON bytes representing the messages.
"""
return _messages.ModelMessagesTypeAdapter.dump_json(self.all_messages())

def new_messages(self) -> list[_messages.ModelMessage]:
"""Return new messages for the run so far.
Messages from older runs are excluded.
"""
return self.all_messages()[self.ctx.deps.new_message_index :]

def new_messages_json(self) -> bytes:
"""Return new messages from [`new_messages`][pydantic_ai.agent.AgentRun.new_messages] as JSON bytes.
Returns:
JSON bytes representing the new messages.
"""
return _messages.ModelMessagesTypeAdapter.dump_json(self.new_messages())

def __aiter__(
self,
) -> AsyncIterator[_agent_graph.AgentNode[AgentDepsT, OutputDataT] | End[FinalResult[OutputDataT]]]:
Expand Down
45 changes: 45 additions & 0 deletions tests/test_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -5833,3 +5833,48 @@ def delete_file() -> None:
assert result.output == snapshot(
DeferredToolRequests(approvals=[ToolCallPart(tool_name='delete_file', tool_call_id=IsStr())])
)


async def test_message_history():
def llm(messages: list[ModelMessage], _info: AgentInfo) -> ModelResponse:
return ModelResponse(parts=[TextPart('ok here is text')])

agent = Agent(FunctionModel(llm))

async with agent.iter(
message_history=[
ModelRequest(parts=[UserPromptPart(content='Hello')]),
],
) as run:
async for _ in run:
pass
assert run.new_messages() == snapshot(
[
ModelResponse(
parts=[TextPart(content='ok here is text')],
usage=RequestUsage(input_tokens=51, output_tokens=4),
model_name='function:llm:',
timestamp=IsDatetime(),
),
]
)
assert run.new_messages_json().startswith(b'[{"parts":[{"content":"ok here is text",')
assert run.all_messages() == snapshot(
[
ModelRequest(
parts=[
UserPromptPart(
content='Hello',
timestamp=IsDatetime(),
)
]
),
ModelResponse(
parts=[TextPart(content='ok here is text')],
usage=RequestUsage(input_tokens=51, output_tokens=4),
model_name='function:llm:',
timestamp=IsDatetime(),
),
]
)
assert run.all_messages_json().startswith(b'[{"parts":[{"content":"Hello",')