Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
4fdb27d
add dump_messages method to vercel ai adapter
dsfaccini Nov 11, 2025
1cb60bf
fix broken loop and add tests for coverage
dsfaccini Nov 11, 2025
261bc3a
add missing tests for coverage
dsfaccini Nov 11, 2025
3f70b83
wip: remove id generator and BuiltinToolReturnPart - fix tests using …
dsfaccini Nov 17, 2025
97feec2
Merge branch 'main' into vercelai-adapter-dump-messages
dsfaccini Nov 20, 2025
dfcb30c
Merge branch 'main' into vercelai-adapter-dump-messages
dsfaccini Nov 22, 2025
f99bc0c
Merge branch 'main' into vercelai-adapter-dump-messages
dsfaccini Nov 26, 2025
4870dd4
refactor: simplify dump_messages method and remove unused id generator
dsfaccini Nov 23, 2025
b1272b7
test: add unit test for dumping and loading ThinkingPart with metadata
dsfaccini Nov 27, 2025
6cdec4d
Merge branch 'main' into vercelai-adapter-dump-messages
dsfaccini Nov 27, 2025
e300c15
coverage
dsfaccini Nov 27, 2025
42596b9
Merge branch 'main' into vercelai-adapter-dump-messages
dsfaccini Nov 27, 2025
57157d6
syntax imprevement
dsfaccini Nov 28, 2025
58a71a2
address review points
dsfaccini Nov 28, 2025
2f3b2a2
Merge branch 'main' into vercelai-adapter-dump-messages
dsfaccini Nov 29, 2025
a061421
fix coverage
dsfaccini Nov 29, 2025
73fb21d
refaator (for comfort)
dsfaccini Nov 30, 2025
e49f656
Refactor dump_messages per review: merge tool dicts, handle RetryProm…
dsfaccini Dec 1, 2025
54f77a7
test builtin tool call without return in dump_messages
dsfaccini Dec 3, 2025
dbdca1d
Merge branch 'main' into vercelai-adapter-dump-messages
dsfaccini Dec 3, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions pydantic_ai_slim/pydantic_ai/ui/_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,11 @@ def load_messages(cls, messages: Sequence[MessageT]) -> list[ModelMessage]:
"""Transform protocol-specific messages into Pydantic AI messages."""
raise NotImplementedError

@classmethod
def dump_messages(cls, messages: Sequence[ModelMessage]) -> list[MessageT]:
"""Transform Pydantic AI messages into protocol-specific messages."""
raise NotImplementedError

@abstractmethod
def build_event_stream(self) -> UIEventStream[RunInputT, EventT, AgentDepsT, OutputDataT]:
"""Build a protocol-specific event stream transformer."""
Expand Down
143 changes: 69 additions & 74 deletions pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

from ...messages import (
AudioUrl,
BaseToolCallPart,
BinaryContent,
BuiltinToolCallPart,
BuiltinToolReturnPart,
Expand Down Expand Up @@ -246,10 +245,12 @@ def _dump_request_message(msg: ModelRequest) -> tuple[list[UIMessagePart], list[
# Tool returns are merged into the tool call in the assistant message
pass
elif isinstance(part, RetryPromptPart):
# RetryPromptPart always has a tool_call_id (generated if not provided).
# These are handled when processing ToolCallPart in ModelResponse,
# where they become DynamicToolOutputErrorPart via the tool_errors dict.
pass
if part.tool_name:
# Tool-related retries are handled when processing ToolCallPart in ModelResponse
pass
else:
# Non-tool retries (e.g., output validation errors) become user text
user_ui_parts.append(TextUIPart(text=part.model_response(), state='done'))
else:
assert_never(part)

Expand All @@ -258,8 +259,7 @@ def _dump_request_message(msg: ModelRequest) -> tuple[list[UIMessagePart], list[
@staticmethod
def _dump_response_message( # noqa: C901
msg: ModelResponse,
tool_returns: dict[str, ToolReturnPart],
tool_errors: dict[str, RetryPromptPart],
tool_results: dict[str, ToolReturnPart | RetryPromptPart],
) -> list[UIMessagePart]:
"""Convert a ModelResponse into a UIMessage."""
ui_parts: list[UIMessagePart] = []
Expand Down Expand Up @@ -298,71 +298,69 @@ def _dump_response_message( # noqa: C901
media_type=part.content.media_type,
)
)
elif isinstance(part, BaseToolCallPart):
if isinstance(part, BuiltinToolCallPart):
call_provider_metadata = (
{'pydantic_ai': {'provider_name': part.provider_name}} if part.provider_name else None
)
elif isinstance(part, BuiltinToolCallPart):
call_provider_metadata = (
{'pydantic_ai': {'provider_name': part.provider_name}} if part.provider_name else None
)

if builtin_return := local_builtin_returns.get(part.tool_call_id):
content = builtin_return.model_response_str()
ui_parts.append(
ToolOutputAvailablePart(
type=f'tool-{part.tool_name}',
tool_call_id=part.tool_call_id,
input=part.args_as_json_str(),
output=content,
state='output-available',
provider_executed=True,
call_provider_metadata=call_provider_metadata,
)
if builtin_return := local_builtin_returns.get(part.tool_call_id):
content = builtin_return.model_response_str()
ui_parts.append(
ToolOutputAvailablePart(
type=f'tool-{part.tool_name}',
tool_call_id=part.tool_call_id,
input=part.args_as_json_str(),
output=content,
state='output-available',
provider_executed=True,
call_provider_metadata=call_provider_metadata,
)
else: # pragma: no cover
ui_parts.append(
ToolInputAvailablePart(
type=f'tool-{part.tool_name}',
tool_call_id=part.tool_call_id,
input=part.args_as_json_str(),
state='input-available',
provider_executed=True,
call_provider_metadata=call_provider_metadata,
)
)
else: # pragma: no cover
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's cover this branch as well

ui_parts.append(
ToolInputAvailablePart(
type=f'tool-{part.tool_name}',
tool_call_id=part.tool_call_id,
input=part.args_as_json_str(),
state='input-available',
provider_executed=True,
call_provider_metadata=call_provider_metadata,
)
else:
tool_return = tool_returns.get(part.tool_call_id)
tool_error = tool_errors.get(part.tool_call_id)

if isinstance(tool_return, ToolReturnPart):
content = tool_return.model_response_str()
ui_parts.append(
DynamicToolOutputAvailablePart(
tool_name=part.tool_name,
tool_call_id=part.tool_call_id,
input=part.args_as_json_str(),
output=content,
state='output-available',
)
)
elif isinstance(part, ToolCallPart):
tool_result = tool_results.get(part.tool_call_id)

if isinstance(tool_result, ToolReturnPart):
content = tool_result.model_response_str()
ui_parts.append(
DynamicToolOutputAvailablePart(
tool_name=part.tool_name,
tool_call_id=part.tool_call_id,
input=part.args_as_json_str(),
output=content,
state='output-available',
)
elif tool_error:
error_text = tool_error.model_response()
ui_parts.append(
DynamicToolOutputErrorPart(
tool_name=part.tool_name,
tool_call_id=part.tool_call_id,
input=part.args_as_json_str(),
error_text=error_text,
state='output-error',
)
)
elif isinstance(tool_result, RetryPromptPart):
error_text = tool_result.model_response()
ui_parts.append(
DynamicToolOutputErrorPart(
tool_name=part.tool_name,
tool_call_id=part.tool_call_id,
input=part.args_as_json_str(),
error_text=error_text,
state='output-error',
)
else:
ui_parts.append(
DynamicToolInputAvailablePart(
tool_name=part.tool_name,
tool_call_id=part.tool_call_id,
input=part.args_as_json_str(),
state='input-available',
)
)
else:
ui_parts.append(
DynamicToolInputAvailablePart(
tool_name=part.tool_name,
tool_call_id=part.tool_call_id,
input=part.args_as_json_str(),
state='input-available',
)
)
else:
assert_never(part)

Expand All @@ -381,16 +379,15 @@ def dump_messages(
Returns:
A list of UIMessage objects in Vercel AI format
"""
tool_returns: dict[str, ToolReturnPart] = {}
tool_errors: dict[str, RetryPromptPart] = {}
tool_results: dict[str, ToolReturnPart | RetryPromptPart] = {}

for msg in messages:
if isinstance(msg, ModelRequest):
for part in msg.parts:
if isinstance(part, ToolReturnPart):
tool_returns[part.tool_call_id] = part
elif isinstance(part, RetryPromptPart) and part.tool_call_id:
tool_errors[part.tool_call_id] = part
tool_results[part.tool_call_id] = part
elif isinstance(part, RetryPromptPart) and part.tool_name:
tool_results[part.tool_call_id] = part

result: list[UIMessage] = []

Expand All @@ -406,9 +403,7 @@ def dump_messages(
elif isinstance( # pragma: no branch
msg, ModelResponse
):
ui_parts: list[UIMessagePart] = cls._dump_response_message(
msg, tool_returns=tool_returns, tool_errors=tool_errors
)
ui_parts: list[UIMessagePart] = cls._dump_response_message(msg, tool_results)
if ui_parts: # pragma: no branch
result.append(UIMessage(id=str(uuid.uuid4()), role='assistant', parts=ui_parts))
else:
Expand Down
55 changes: 55 additions & 0 deletions tests/test_vercel_ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -2292,6 +2292,61 @@ async def test_adapter_dump_messages_with_retry():
)


async def test_adapter_dump_messages_with_retry_no_tool_name():
"""Test dumping messages with retry prompts without tool_name (e.g., output validation errors)."""
messages = [
ModelRequest(parts=[UserPromptPart(content='Give me a number')]),
ModelResponse(parts=[TextPart(content='Not a valid number')]),
ModelRequest(
parts=[
RetryPromptPart(
content='Output validation failed: expected integer',
# No tool_name - this is an output validation error, not a tool error
)
]
),
]

ui_messages = VercelAIAdapter.dump_messages(messages)

ui_message_dicts = [msg.model_dump() for msg in ui_messages]

assert ui_message_dicts == snapshot(
[
{
'id': IsStr(),
'role': 'user',
'metadata': None,
'parts': [{'type': 'text', 'text': 'Give me a number', 'state': 'done', 'provider_metadata': None}],
},
{
'id': IsStr(),
'role': 'assistant',
'metadata': None,
'parts': [{'type': 'text', 'text': 'Not a valid number', 'state': 'done', 'provider_metadata': None}],
},
{
'id': IsStr(),
'role': 'user',
'metadata': None,
'parts': [
{
'type': 'text',
'text': """\
Validation feedback:
Output validation failed: expected integer

Fix the errors and try again.\
""",
'state': 'done',
'provider_metadata': None,
}
],
},
]
)


async def test_adapter_dump_messages_consecutive_text():
"""Test that consecutive text parts are concatenated correctly."""
messages = [
Expand Down