diff --git a/pydantic_ai_slim/pydantic_ai/_parts_manager.py b/pydantic_ai_slim/pydantic_ai/_parts_manager.py index 5dfc53630d..d9d7eaac59 100644 --- a/pydantic_ai_slim/pydantic_ai/_parts_manager.py +++ b/pydantic_ai_slim/pydantic_ai/_parts_manager.py @@ -76,6 +76,7 @@ def handle_text_delta( vendor_part_id: VendorId | None, content: str, id: str | None = None, + provider_name: str | None = None, provider_details: dict[str, Any] | None = None, thinking_tags: tuple[str, str] | None = None, ignore_leading_whitespace: bool = False, @@ -92,6 +93,7 @@ def handle_text_delta( a TextPart. content: The text content to append to the appropriate TextPart. id: An optional id for the text part. + provider_name: An optional provider name for the text part. provider_details: An optional dictionary of provider-specific details for the text part. thinking_tags: If provided, will handle content between the thinking tags as thinking parts. ignore_leading_whitespace: If True, will ignore leading whitespace in the content. @@ -121,7 +123,7 @@ def handle_text_delta( self._handle_embedded_thinking_end(vendor_part_id) return yield from self._handle_embedded_thinking_content( - existing_part, part_index, content, provider_details + existing_part, part_index, content, provider_name, provider_details ) return elif isinstance(existing_part, TextPart): @@ -131,7 +133,7 @@ def handle_text_delta( if thinking_tags and content == thinking_tags[0]: # When we see a thinking start tag (which is a single token), we'll build a new thinking part instead - yield from self._handle_embedded_thinking_start(vendor_part_id, provider_details) + yield from self._handle_embedded_thinking_start(vendor_part_id, provider_name, provider_details) return if existing_text_part_and_index is None: @@ -141,13 +143,15 @@ def handle_text_delta( return # There is no existing text part that should be updated, so create a new one - part = TextPart(content=content, id=id, provider_details=provider_details) + part = TextPart(content=content, id=id, provider_name=provider_name, provider_details=provider_details) new_part_index = self._append_part(part, vendor_part_id) yield PartStartEvent(index=new_part_index, part=part) else: # Update the existing TextPart with the new content delta existing_text_part, part_index = existing_text_part_and_index - part_delta = TextPartDelta(content_delta=content, provider_details=provider_details) + part_delta = TextPartDelta( + content_delta=content, provider_name=provider_name, provider_details=provider_details + ) self._parts[part_index] = part_delta.apply(existing_text_part) yield PartDeltaEvent(index=part_index, delta=part_delta) @@ -241,6 +245,7 @@ def handle_tool_call_delta( tool_name: str | None = None, args: str | dict[str, Any] | None = None, tool_call_id: str | None = None, + provider_name: str | None = None, provider_details: dict[str, Any] | None = None, ) -> ModelResponseStreamEvent | None: """Handle or update a tool call, creating or updating a `ToolCallPart`, `BuiltinToolCallPart`, or `ToolCallPartDelta`. @@ -258,6 +263,7 @@ def handle_tool_call_delta( a name match when `vendor_part_id` is None. args: Arguments for the tool call, either as a string, a dictionary of key-value pairs, or None. tool_call_id: An optional string representing an identifier for this tool call. + provider_name: An optional provider name for the tool call part. provider_details: An optional dictionary of provider-specific details for the tool call part. Returns: @@ -293,7 +299,11 @@ def handle_tool_call_delta( if existing_matching_part_and_index is None: # No matching part/delta was found, so create a new ToolCallPartDelta (or ToolCallPart if fully formed) delta = ToolCallPartDelta( - tool_name_delta=tool_name, args_delta=args, tool_call_id=tool_call_id, provider_details=provider_details + tool_name_delta=tool_name, + args_delta=args, + tool_call_id=tool_call_id, + provider_name=provider_name, + provider_details=provider_details, ) part = delta.as_part() or delta new_part_index = self._append_part(part, vendor_part_id) @@ -304,7 +314,11 @@ def handle_tool_call_delta( # Update the existing part or delta with the new information existing_part, part_index = existing_matching_part_and_index delta = ToolCallPartDelta( - tool_name_delta=tool_name, args_delta=args, tool_call_id=tool_call_id, provider_details=provider_details + tool_name_delta=tool_name, + args_delta=args, + tool_call_id=tool_call_id, + provider_name=provider_name, + provider_details=provider_details, ) updated_part = delta.apply(existing_part) self._parts[part_index] = updated_part @@ -326,6 +340,7 @@ def handle_tool_call_part( args: str | dict[str, Any] | None, tool_call_id: str | None = None, id: str | None = None, + provider_name: str | None = None, provider_details: dict[str, Any] | None = None, ) -> ModelResponseStreamEvent: """Immediately create or fully-overwrite a ToolCallPart with the given information. @@ -339,6 +354,7 @@ def handle_tool_call_part( args: The arguments for the tool call, either as a string, a dictionary, or None. tool_call_id: An optional string identifier for this tool call. id: An optional identifier for this tool call part. + provider_name: An optional provider name for the tool call part. provider_details: An optional dictionary of provider-specific details for the tool call part. Returns: @@ -350,6 +366,7 @@ def handle_tool_call_part( args=args, tool_call_id=tool_call_id or _generate_tool_call_id(), id=id, + provider_name=provider_name, provider_details=provider_details, ) if vendor_part_id is None: @@ -420,19 +437,26 @@ def _latest_part_if_of_type(self, *part_types: type[PartT]) -> tuple[PartT, int] return None def _handle_embedded_thinking_start( - self, vendor_part_id: VendorId, provider_details: dict[str, Any] | None + self, vendor_part_id: VendorId, provider_name: str | None, provider_details: dict[str, Any] | None ) -> Iterator[ModelResponseStreamEvent]: """Handle tag - create new ThinkingPart.""" self._stop_tracking_vendor_id(vendor_part_id) - part = ThinkingPart(content='', provider_details=provider_details) + part = ThinkingPart(content='', provider_name=provider_name, provider_details=provider_details) new_index = self._append_part(part, vendor_part_id) yield PartStartEvent(index=new_index, part=part) def _handle_embedded_thinking_content( - self, existing_part: ThinkingPart, part_index: int, content: str, provider_details: dict[str, Any] | None + self, + existing_part: ThinkingPart, + part_index: int, + content: str, + provider_name: str | None, + provider_details: dict[str, Any] | None, ) -> Iterator[ModelResponseStreamEvent]: """Handle content inside ....""" - part_delta = ThinkingPartDelta(content_delta=content, provider_details=provider_details) + part_delta = ThinkingPartDelta( + content_delta=content, provider_name=provider_name, provider_details=provider_details + ) self._parts[part_index] = part_delta.apply(existing_part) yield PartDeltaEvent(index=part_index, delta=part_delta) diff --git a/pydantic_ai_slim/pydantic_ai/messages.py b/pydantic_ai_slim/pydantic_ai/messages.py index 41d6ca51d2..afa4ee7fd0 100644 --- a/pydantic_ai_slim/pydantic_ai/messages.py +++ b/pydantic_ai_slim/pydantic_ai/messages.py @@ -1032,6 +1032,9 @@ class TextPart: id: str | None = None """An optional identifier of the text part.""" + provider_name: str | None = None + """The name of the provider that generated the response.""" + provider_details: dict[str, Any] | None = None """Additional data returned by the provider that can't be mapped to standard fields. @@ -1148,6 +1151,12 @@ class BaseToolCallPart: This is used by some APIs like OpenAI Responses.""" + provider_name: str | None = None + """The name of the provider that generated the response. + + Tool calls are only sent back to the same provider. + """ + provider_details: dict[str, Any] | None = None """Additional data returned by the provider that can't be mapped to standard fields. @@ -1205,12 +1214,6 @@ class BuiltinToolCallPart(BaseToolCallPart): _: KW_ONLY - provider_name: str | None = None - """The name of the provider that generated the response. - - Built-in tool calls are only sent back to the same provider. - """ - part_kind: Literal['builtin-tool-call'] = 'builtin-tool-call' """Part type identifier, this is available on all parts as a discriminator.""" @@ -1496,6 +1499,9 @@ class TextPartDelta: _: KW_ONLY + provider_name: str | None = None + """The name of the provider that generated the response.""" + provider_details: dict[str, Any] | None = None """Additional data returned by the provider that can't be mapped to standard fields. @@ -1521,6 +1527,7 @@ def apply(self, part: ModelResponsePart) -> TextPart: return replace( part, content=part.content + self.content_delta, + provider_name=self.provider_name or part.provider_name, provider_details={**(part.provider_details or {}), **(self.provider_details or {})} or None, ) @@ -1653,6 +1660,9 @@ class ToolCallPartDelta: Note this is never treated as a delta — it can replace None, but otherwise if a non-matching value is provided an error will be raised.""" + provider_name: str | None = None + """The name of the provider that generated the response.""" + provider_details: dict[str, Any] | None = None """Additional data returned by the provider that can't be mapped to standard fields. @@ -1674,6 +1684,7 @@ def as_part(self) -> ToolCallPart | None: self.tool_name_delta, self.args_delta, self.tool_call_id or _generate_tool_call_id(), + provider_name=self.provider_name, provider_details=self.provider_details, ) @@ -1735,6 +1746,9 @@ def _apply_to_delta(self, delta: ToolCallPartDelta) -> ToolCallPart | BuiltinToo if self.tool_call_id: delta = replace(delta, tool_call_id=self.tool_call_id) + if self.provider_name: + delta = replace(delta, provider_name=self.provider_name) + if self.provider_details: merged_provider_details = {**(delta.provider_details or {}), **self.provider_details} delta = replace(delta, provider_details=merged_provider_details) @@ -1745,6 +1759,7 @@ def _apply_to_delta(self, delta: ToolCallPartDelta) -> ToolCallPart | BuiltinToo delta.tool_name_delta, delta.args_delta, delta.tool_call_id or _generate_tool_call_id(), + provider_name=delta.provider_name, provider_details=delta.provider_details, ) @@ -1771,6 +1786,9 @@ def _apply_to_part(self, part: ToolCallPart | BuiltinToolCallPart) -> ToolCallPa if self.tool_call_id: part = replace(part, tool_call_id=self.tool_call_id) + if self.provider_name: + part = replace(part, provider_name=self.provider_name) + if self.provider_details: merged_provider_details = {**(part.provider_details or {}), **self.provider_details} part = replace(part, provider_details=merged_provider_details) diff --git a/pydantic_ai_slim/pydantic_ai/models/gemini.py b/pydantic_ai_slim/pydantic_ai/models/gemini.py index f063ca5798..204287ea14 100644 --- a/pydantic_ai_slim/pydantic_ai/models/gemini.py +++ b/pydantic_ai_slim/pydantic_ai/models/gemini.py @@ -297,6 +297,7 @@ def _process_response(self, response: _GeminiResponse) -> ModelResponse: usage, vendor_id=vendor_id, vendor_details=vendor_details, + provider_name=self._provider.name, provider_url=self.base_url, ) @@ -719,6 +720,7 @@ def _process_response_from_parts( model_name: GeminiModelName, usage: usage.RequestUsage, vendor_id: str | None, + provider_name: str, provider_url: str, vendor_details: dict[str, Any] | None = None, ) -> ModelResponse: @@ -741,6 +743,7 @@ def _process_response_from_parts( parts=items, usage=usage, model_name=model_name, + provider_name=provider_name, provider_response_id=vendor_id, provider_details=vendor_details, provider_url=provider_url, diff --git a/pydantic_ai_slim/pydantic_ai/models/google.py b/pydantic_ai_slim/pydantic_ai/models/google.py index b05c7d795e..518163b240 100644 --- a/pydantic_ai_slim/pydantic_ai/models/google.py +++ b/pydantic_ai_slim/pydantic_ai/models/google.py @@ -825,12 +825,18 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: continue if part.thought: for event in self._parts_manager.handle_thinking_delta( - vendor_part_id=None, content=part.text, provider_details=provider_details + vendor_part_id=None, + content=part.text, + provider_name=self.provider_name if provider_details else None, + provider_details=provider_details, ): yield event else: for event in self._parts_manager.handle_text_delta( - vendor_part_id=None, content=part.text, provider_details=provider_details + vendor_part_id=None, + content=part.text, + provider_name=self.provider_name if provider_details else None, + provider_details=provider_details, ): yield event elif part.function_call: @@ -839,6 +845,7 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: tool_name=part.function_call.name, args=part.function_call.args, tool_call_id=part.function_call.id, + provider_name=self.provider_name if provider_details else None, provider_details=provider_details, ) if maybe_event is not None: # pragma: no branch @@ -855,7 +862,11 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: content = BinaryContent(data=data, media_type=mime_type) yield self._parts_manager.handle_part( vendor_part_id=uuid4(), - part=FilePart(content=BinaryContent.narrow_type(content), provider_details=provider_details), + part=FilePart( + content=BinaryContent.narrow_type(content), + provider_name=self.provider_name if provider_details else None, + provider_details=provider_details, + ), ) elif part.executable_code is not None: part_obj = self._handle_executable_code_streaming(part.executable_code) @@ -1029,6 +1040,59 @@ def _content_model_response(m: ModelResponse, provider_name: str) -> ContentDict return ContentDict(role='model', parts=parts) +def _process_part( + part: Part, code_execution_tool_call_id: str | None, provider_name: str +) -> tuple[ModelResponsePart | None, str | None]: + """Process a Google Part and return the corresponding ModelResponsePart. + + Returns: + A tuple of (item, code_execution_tool_call_id). Returns (None, id) if the part should be skipped. + """ + provider_details: dict[str, Any] | None = None + if part.thought_signature: + # Per https://ai.google.dev/gemini-api/docs/function-calling?example=meeting#thought-signatures: + # - Always send the thought_signature back to the model inside its original Part. + # - Don't merge a Part containing a signature with one that does not. This breaks the positional context of the thought. + # - Don't combine two Parts that both contain signatures, as the signature strings cannot be merged. + thought_signature = base64.b64encode(part.thought_signature).decode('utf-8') + provider_details = {'thought_signature': thought_signature} + + if part.executable_code is not None: + code_execution_tool_call_id = _utils.generate_tool_call_id() + item = _map_executable_code(part.executable_code, provider_name, code_execution_tool_call_id) + elif part.code_execution_result is not None: + assert code_execution_tool_call_id is not None + item = _map_code_execution_result(part.code_execution_result, provider_name, code_execution_tool_call_id) + elif part.text is not None: + # Google sometimes sends empty text parts, we don't want to add them to the response + if len(part.text) == 0 and not provider_details: + return None, code_execution_tool_call_id + if part.thought: + item = ThinkingPart(content=part.text) + else: + item = TextPart(content=part.text) + elif part.function_call: + assert part.function_call.name is not None + item = ToolCallPart(tool_name=part.function_call.name, args=part.function_call.args) + if part.function_call.id is not None: + item.tool_call_id = part.function_call.id # pragma: no cover + elif inline_data := part.inline_data: + data = inline_data.data + mime_type = inline_data.mime_type + assert data and mime_type, 'Inline data must have data and mime type' + content = BinaryContent(data=data, media_type=mime_type) + item = FilePart(content=BinaryContent.narrow_type(content)) + else: # pragma: no cover + raise UnexpectedModelBehavior(f'Unsupported response from Gemini: {part!r}') + + if provider_details: + item.provider_details = {**(item.provider_details or {}), **provider_details} + if provider_name: + item.provider_name = item.provider_name or provider_name + + return item, code_execution_tool_call_id + + def _process_response_from_parts( parts: list[Part], grounding_metadata: GroundingMetadata | None, @@ -1060,47 +1124,10 @@ def _process_response_from_parts( item: ModelResponsePart | None = None code_execution_tool_call_id: str | None = None for part in parts: - provider_details: dict[str, Any] | None = None - if part.thought_signature: - # Per https://ai.google.dev/gemini-api/docs/function-calling?example=meeting#thought-signatures: - # - Always send the thought_signature back to the model inside its original Part. - # - Don't merge a Part containing a signature with one that does not. This breaks the positional context of the thought. - # - Don't combine two Parts that both contain signatures, as the signature strings cannot be merged. - thought_signature = base64.b64encode(part.thought_signature).decode('utf-8') - provider_details = {'thought_signature': thought_signature} - - if part.executable_code is not None: - code_execution_tool_call_id = _utils.generate_tool_call_id() - item = _map_executable_code(part.executable_code, provider_name, code_execution_tool_call_id) - elif part.code_execution_result is not None: - assert code_execution_tool_call_id is not None - item = _map_code_execution_result(part.code_execution_result, provider_name, code_execution_tool_call_id) - elif part.text is not None: - # Google sometimes sends empty text parts, we don't want to add them to the response - if len(part.text) == 0 and not provider_details: - continue - if part.thought: - item = ThinkingPart(content=part.text) - else: - item = TextPart(content=part.text) - elif part.function_call: - assert part.function_call.name is not None - item = ToolCallPart(tool_name=part.function_call.name, args=part.function_call.args) - if part.function_call.id is not None: - item.tool_call_id = part.function_call.id # pragma: no cover - elif inline_data := part.inline_data: - data = inline_data.data - mime_type = inline_data.mime_type - assert data and mime_type, 'Inline data must have data and mime type' - content = BinaryContent(data=data, media_type=mime_type) - item = FilePart(content=BinaryContent.narrow_type(content)) - else: # pragma: no cover - raise UnexpectedModelBehavior(f'Unsupported response from Gemini: {part!r}') - - if provider_details: - item.provider_details = {**(item.provider_details or {}), **provider_details} - - items.append(item) + item, code_execution_tool_call_id = _process_part(part, code_execution_tool_call_id, provider_name) + if item is not None: + items.append(item) + return ModelResponse( parts=items, model_name=model_name, diff --git a/pydantic_ai_slim/pydantic_ai/models/openai.py b/pydantic_ai_slim/pydantic_ai/models/openai.py index d1a25055d0..8ddbf51f73 100644 --- a/pydantic_ai_slim/pydantic_ai/models/openai.py +++ b/pydantic_ai_slim/pydantic_ai/models/openai.py @@ -1287,7 +1287,14 @@ def _process_response( # noqa: C901 part_provider_details: dict[str, Any] | None = None if content.logprobs: part_provider_details = {'logprobs': _map_logprobs(content.logprobs)} - items.append(TextPart(content.text, id=item.id, provider_details=part_provider_details)) + items.append( + TextPart( + content.text, + id=item.id, + provider_name=self.system if part_provider_details else None, + provider_details=part_provider_details, + ) + ) elif isinstance(item, responses.ResponseFunctionToolCall): items.append( ToolCallPart( @@ -2395,6 +2402,7 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: for event in self._parts_manager.handle_thinking_delta( vendor_part_id=chunk.item_id, id=chunk.item_id, + provider_name=self.provider_name, provider_details=_make_raw_content_updater(chunk.delta, chunk.content_index), ): yield event diff --git a/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_adapter.py b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_adapter.py index fa82b9255b..d56b51bf2c 100644 --- a/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_adapter.py +++ b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_adapter.py @@ -38,13 +38,16 @@ from ...tools import AgentDepsT from .. import MessagesBuilder, UIAdapter, UIEventStream from ._event_stream import VercelAIEventStream +from ._utils import dump_provider_metadata, load_provider_metadata from .request_types import ( DataUIPart, DynamicToolInputAvailablePart, + DynamicToolInputStreamingPart, DynamicToolOutputAvailablePart, DynamicToolOutputErrorPart, DynamicToolUIPart, FileUIPart, + ProviderMetadata, ReasoningUIPart, RequestData, SourceDocumentUIPart, @@ -52,6 +55,7 @@ StepStartUIPart, TextUIPart, ToolInputAvailablePart, + ToolInputStreamingPart, ToolOutputAvailablePart, ToolOutputErrorPart, ToolUIPart, @@ -130,16 +134,24 @@ def load_messages(cls, messages: Sequence[UIMessage]) -> list[ModelMessage]: # elif msg.role == 'assistant': for part in msg.parts: if isinstance(part, TextUIPart): - builder.add(TextPart(content=part.text)) + provider_meta = load_provider_metadata(part.provider_metadata) + builder.add( + TextPart( + content=part.text, + id=provider_meta.get('id'), + provider_name=provider_meta.get('provider_name'), + provider_details=provider_meta.get('provider_details'), + ) + ) elif isinstance(part, ReasoningUIPart): - pydantic_ai_meta = (part.provider_metadata or {}).get('pydantic_ai', {}) + provider_meta = load_provider_metadata(part.provider_metadata) builder.add( ThinkingPart( content=part.text, - id=pydantic_ai_meta.get('id'), - signature=pydantic_ai_meta.get('signature'), - provider_name=pydantic_ai_meta.get('provider_name'), - provider_details=pydantic_ai_meta.get('provider_details'), + id=provider_meta.get('id'), + signature=provider_meta.get('signature'), + provider_name=provider_meta.get('provider_name'), + provider_details=provider_meta.get('provider_details'), ) ) elif isinstance(part, FileUIPart): @@ -150,7 +162,15 @@ def load_messages(cls, messages: Sequence[UIMessage]) -> list[ModelMessage]: # raise ValueError( 'Vercel AI integration can currently only handle assistant file parts with data URIs.' ) from e - builder.add(FilePart(content=file)) + provider_meta = load_provider_metadata(part.provider_metadata) + builder.add( + FilePart( + content=file, + id=provider_meta.get('id'), + provider_name=provider_meta.get('provider_name'), + provider_details=provider_meta.get('provider_details'), + ) + ) elif isinstance(part, ToolUIPart | DynamicToolUIPart): if isinstance(part, DynamicToolUIPart): tool_name = part.tool_name @@ -175,31 +195,63 @@ def load_messages(cls, messages: Sequence[UIMessage]) -> list[ModelMessage]: # else: assert_never(args) - if builtin_tool: - call_part = BuiltinToolCallPart(tool_name=tool_name, tool_call_id=tool_call_id, args=args) - builder.add(call_part) + provider_meta = {} + part_id = provider_name = provider_details = None + if not isinstance(part, (DynamicToolInputStreamingPart, ToolInputStreamingPart)): + provider_meta = load_provider_metadata(part.call_provider_metadata) + part_id = provider_meta.get('id') + provider_name = provider_meta.get('provider_name') + provider_details = provider_meta.get('provider_details') - if isinstance(part, ToolOutputAvailablePart | ToolOutputErrorPart): - if part.state == 'output-available': + if builtin_tool: + # For builtin tools, we need to create 2 parts (BuiltinToolCall & BuiltinToolReturn) for a single Vercel ToolOutput + # The call and return metadata are combined in the output part. + # We extract and return them to the respective parts for pydantic_ai. + call_meta = return_meta = {} + output: Any | None = None + has_output = isinstance(part, (ToolOutputAvailablePart, ToolOutputErrorPart)) + + if has_output: + loaded_call_meta, loaded_return_meta = cls._load_builtin_tool_meta(provider_meta) + call_meta = loaded_call_meta or {} + return_meta = loaded_return_meta or {} + if isinstance(part, ToolOutputAvailablePart): output = part.output - else: + elif isinstance(part, ToolOutputErrorPart): output = {'error_text': part.error_text, 'is_error': True} - provider_name = ( - (part.call_provider_metadata or {}).get('pydantic_ai', {}).get('provider_name') + builder.add( + BuiltinToolCallPart( + tool_name=tool_name, + tool_call_id=tool_call_id, + args=args, + id=call_meta.get('id') or part_id, + provider_name=call_meta.get('provider_name') or provider_name, + provider_details=call_meta.get('provider_details') or provider_details, ) - call_part.provider_name = provider_name + ) + if has_output: builder.add( BuiltinToolReturnPart( tool_name=tool_name, tool_call_id=tool_call_id, content=output, - provider_name=provider_name, + provider_name=return_meta.get('provider_name') or provider_name, + provider_details=return_meta.get('provider_details') or provider_details, ) ) else: - builder.add(ToolCallPart(tool_name=tool_name, tool_call_id=tool_call_id, args=args)) + builder.add( + ToolCallPart( + tool_name=tool_name, + tool_call_id=tool_call_id, + args=args, + id=part_id, + provider_name=provider_name, + provider_details=provider_details, + ) + ) if part.state == 'output-available': builder.add( @@ -230,6 +282,20 @@ def load_messages(cls, messages: Sequence[UIMessage]) -> list[ModelMessage]: # return builder.messages + @staticmethod + def _dump_builtin_tool_meta( + call_provider_metadata: ProviderMetadata | None, return_provider_metadata: ProviderMetadata | None + ) -> ProviderMetadata | None: + """Use special keys (call_meta and return_meta) to dump combined provider metadata.""" + return dump_provider_metadata(call_meta=call_provider_metadata, return_meta=return_provider_metadata) + + @staticmethod + def _load_builtin_tool_meta( + provider_metadata: ProviderMetadata, + ) -> tuple[dict[str, Any] | None, dict[str, Any] | None]: + """Use special keys (call_meta and return_meta) to load combined provider metadata.""" + return provider_metadata.get('call_meta'), provider_metadata.get('return_meta') + @staticmethod def _dump_request_message(msg: ModelRequest) -> tuple[list[UIMessagePart], list[UIMessagePart]]: """Convert a ModelRequest into a UIMessage.""" @@ -256,10 +322,9 @@ def _dump_request_message(msg: ModelRequest) -> tuple[list[UIMessagePart], list[ return system_ui_parts, user_ui_parts - @staticmethod - def _dump_response_message( # noqa: C901 - msg: ModelResponse, - tool_results: dict[str, ToolReturnPart | RetryPromptPart], + @classmethod + def _dump_response_message( + cls, msg: ModelResponse, tool_results: dict[str, ToolReturnPart | RetryPromptPart] ) -> list[UIMessagePart]: """Convert a ModelResponse into a UIMessage.""" ui_parts: list[UIMessagePart] = [] @@ -277,33 +342,52 @@ def _dump_response_message( # noqa: C901 if ui_parts and isinstance(ui_parts[-1], TextUIPart): ui_parts[-1].text += part.content else: - ui_parts.append(TextUIPart(text=part.content, state='done')) + provider_metadata = dump_provider_metadata( + id=part.id, provider_name=part.provider_name, provider_details=part.provider_details + ) + ui_parts.append(TextUIPart(text=part.content, state='done', provider_metadata=provider_metadata)) elif isinstance(part, ThinkingPart): - thinking_metadata: dict[str, Any] = {} - if part.id is not None: - thinking_metadata['id'] = part.id - if part.signature is not None: - thinking_metadata['signature'] = part.signature - if part.provider_name is not None: - thinking_metadata['provider_name'] = part.provider_name - if part.provider_details is not None: - thinking_metadata['provider_details'] = part.provider_details - - provider_metadata = {'pydantic_ai': thinking_metadata} if thinking_metadata else None + provider_metadata = dump_provider_metadata( + id=part.id, + signature=part.signature, + provider_name=part.provider_name, + provider_details=part.provider_details, + ) ui_parts.append(ReasoningUIPart(text=part.content, state='done', provider_metadata=provider_metadata)) elif isinstance(part, FilePart): ui_parts.append( FileUIPart( url=part.content.data_uri, media_type=part.content.media_type, + provider_metadata=dump_provider_metadata( + id=part.id, provider_name=part.provider_name, provider_details=part.provider_details + ), ) ) elif isinstance(part, BuiltinToolCallPart): - call_provider_metadata = ( - {'pydantic_ai': {'provider_name': part.provider_name}} if part.provider_name else None - ) - if builtin_return := local_builtin_returns.get(part.tool_call_id): + # Builtin tool calls are represented by two parts in pydantic_ai: + # 1. BuiltinToolCallPart (the tool request) -> part + # 2. BuiltinToolReturnPart (the tool's output) -> builtin_return + # The Vercel AI SDK only has a single ToolOutputAvailablePart. + # So, we need to combine the metadata so that when we later convert back from Vercel AI to pydantic_ai, + # we can properly reconstruct both the call and return parts with their respective metadata. + # Note: This extra metadata handling is only needed for built-in tools, since normal tool returns + # (ToolReturnPart) do not include provider metadata. + + call_meta = dump_provider_metadata( + wrapper_key=None, + id=part.id, + provider_name=part.provider_name, + provider_details=part.provider_details, + ) + return_meta = dump_provider_metadata( + wrapper_key=None, + provider_name=builtin_return.provider_name, + provider_details=builtin_return.provider_details, + ) + combined_provider_meta = cls._dump_builtin_tool_meta(call_meta, return_meta) + content = builtin_return.model_response_str() ui_parts.append( ToolOutputAvailablePart( @@ -313,10 +397,13 @@ def _dump_response_message( # noqa: C901 output=content, state='output-available', provider_executed=True, - call_provider_metadata=call_provider_metadata, + call_provider_metadata=combined_provider_meta, ) ) else: + call_provider_metadata = dump_provider_metadata( + id=part.id, provider_name=part.provider_name, provider_details=part.provider_details + ) ui_parts.append( ToolInputAvailablePart( type=f'tool-{part.tool_name}', @@ -329,6 +416,9 @@ def _dump_response_message( # noqa: C901 ) elif isinstance(part, ToolCallPart): tool_result = tool_results.get(part.tool_call_id) + call_provider_metadata = dump_provider_metadata( + id=part.id, provider_name=part.provider_name, provider_details=part.provider_details + ) if isinstance(tool_result, ToolReturnPart): content = tool_result.model_response_str() @@ -339,6 +429,7 @@ def _dump_response_message( # noqa: C901 input=part.args_as_json_str(), output=content, state='output-available', + call_provider_metadata=call_provider_metadata, ) ) elif isinstance(tool_result, RetryPromptPart): @@ -350,6 +441,7 @@ def _dump_response_message( # noqa: C901 input=part.args_as_json_str(), error_text=error_text, state='output-error', + call_provider_metadata=call_provider_metadata, ) ) else: @@ -359,6 +451,7 @@ def _dump_response_message( # noqa: C901 tool_call_id=part.tool_call_id, input=part.args_as_json_str(), state='input-available', + call_provider_metadata=call_provider_metadata, ) ) else: diff --git a/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_event_stream.py b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_event_stream.py index 070166df98..b9fd28cbef 100644 --- a/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_event_stream.py +++ b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_event_stream.py @@ -27,6 +27,7 @@ from ...run import AgentRunResultEvent from ...tools import AgentDepsT from .. import UIEventStream +from ._utils import dump_provider_metadata from .request_types import RequestData from .response_types import ( BaseChunk, @@ -113,39 +114,67 @@ async def on_error(self, error: Exception) -> AsyncIterator[BaseChunk]: yield ErrorChunk(error_text=str(error)) async def handle_text_start(self, part: TextPart, follows_text: bool = False) -> AsyncIterator[BaseChunk]: + provider_metadata = dump_provider_metadata( + id=part.id, provider_name=part.provider_name, provider_details=part.provider_details + ) if follows_text: message_id = self.message_id else: message_id = self.new_message_id() - yield TextStartChunk(id=message_id) + yield TextStartChunk(id=message_id, provider_metadata=provider_metadata) if part.content: - yield TextDeltaChunk(id=message_id, delta=part.content) + yield TextDeltaChunk(id=message_id, delta=part.content, provider_metadata=provider_metadata) async def handle_text_delta(self, delta: TextPartDelta) -> AsyncIterator[BaseChunk]: if delta.content_delta: # pragma: no branch - yield TextDeltaChunk(id=self.message_id, delta=delta.content_delta) + provider_metadata = dump_provider_metadata( + provider_name=delta.provider_name, provider_details=delta.provider_details + ) + yield TextDeltaChunk(id=self.message_id, delta=delta.content_delta, provider_metadata=provider_metadata) async def handle_text_end(self, part: TextPart, followed_by_text: bool = False) -> AsyncIterator[BaseChunk]: if not followed_by_text: - yield TextEndChunk(id=self.message_id) + provider_metadata = dump_provider_metadata( + id=part.id, provider_name=part.provider_name, provider_details=part.provider_details + ) + yield TextEndChunk(id=self.message_id, provider_metadata=provider_metadata) async def handle_thinking_start( self, part: ThinkingPart, follows_thinking: bool = False ) -> AsyncIterator[BaseChunk]: message_id = self.new_message_id() - yield ReasoningStartChunk(id=message_id) + provider_metadata = dump_provider_metadata( + id=part.id, + signature=part.signature, + provider_name=part.provider_name, + provider_details=part.provider_details, + ) + yield ReasoningStartChunk(id=message_id, provider_metadata=provider_metadata) if part.content: - yield ReasoningDeltaChunk(id=message_id, delta=part.content) + yield ReasoningDeltaChunk(id=message_id, delta=part.content, provider_metadata=provider_metadata) async def handle_thinking_delta(self, delta: ThinkingPartDelta) -> AsyncIterator[BaseChunk]: if delta.content_delta: # pragma: no branch - yield ReasoningDeltaChunk(id=self.message_id, delta=delta.content_delta) + provider_metadata = dump_provider_metadata( + provider_name=delta.provider_name, + signature=delta.signature_delta, + provider_details=delta.provider_details, + ) + yield ReasoningDeltaChunk( + id=self.message_id, delta=delta.content_delta, provider_metadata=provider_metadata + ) async def handle_thinking_end( self, part: ThinkingPart, followed_by_thinking: bool = False ) -> AsyncIterator[BaseChunk]: - yield ReasoningEndChunk(id=self.message_id) + provider_metadata = dump_provider_metadata( + id=part.id, + signature=part.signature, + provider_name=part.provider_name, + provider_details=part.provider_details, + ) + yield ReasoningEndChunk(id=self.message_id, provider_metadata=provider_metadata) def handle_tool_call_start(self, part: ToolCallPart | BuiltinToolCallPart) -> AsyncIterator[BaseChunk]: return self._handle_tool_call_start(part) @@ -178,7 +207,12 @@ async def handle_tool_call_delta(self, delta: ToolCallPartDelta) -> AsyncIterato async def handle_tool_call_end(self, part: ToolCallPart) -> AsyncIterator[BaseChunk]: yield ToolInputAvailableChunk( - tool_call_id=part.tool_call_id, tool_name=part.tool_name, input=part.args_as_dict() + tool_call_id=part.tool_call_id, + tool_name=part.tool_name, + input=part.args_as_dict(), + provider_metadata=dump_provider_metadata( + id=part.id, provider_name=part.provider_name, provider_details=part.provider_details + ), ) async def handle_builtin_tool_call_end(self, part: BuiltinToolCallPart) -> AsyncIterator[BaseChunk]: @@ -187,7 +221,9 @@ async def handle_builtin_tool_call_end(self, part: BuiltinToolCallPart) -> Async tool_name=part.tool_name, input=part.args_as_dict(), provider_executed=True, - provider_metadata={'pydantic_ai': {'provider_name': part.provider_name}}, + provider_metadata=dump_provider_metadata( + id=part.id, provider_name=part.provider_name, provider_details=part.provider_details + ), ) async def handle_builtin_tool_return(self, part: BuiltinToolReturnPart) -> AsyncIterator[BaseChunk]: diff --git a/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_models.py b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_models.py new file mode 100644 index 0000000000..c530aace98 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_models.py @@ -0,0 +1,12 @@ +"""Models for Vercel AI protocol.""" + +from abc import ABC + +from pydantic import BaseModel, ConfigDict +from pydantic.alias_generators import to_camel + + +class CamelBaseModel(BaseModel, ABC): + """Base model with camelCase aliases.""" + + model_config = ConfigDict(alias_generator=to_camel, populate_by_name=True, extra='forbid') diff --git a/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_utils.py b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_utils.py index 0daf8f35de..41dfddb7aa 100644 --- a/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_utils.py +++ b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/_utils.py @@ -1,16 +1,27 @@ -"""Utilities for Vercel AI protocol. +"""Utilities for handling Pydantic AI and Vercel data streams.""" -Converted to Python from: -https://github.com/vercel/ai/blob/ai%405.0.34/packages/ai/src/ui/ui-messages.ts -""" +from typing import Any -from abc import ABC +from pydantic_ai.messages import ProviderDetailsDelta +from pydantic_ai.ui.vercel_ai.response_types import ProviderMetadata -from pydantic import BaseModel, ConfigDict -from pydantic.alias_generators import to_camel +__all__ = [] +PROVIDER_METADATA_KEY = 'pydantic_ai' -class CamelBaseModel(BaseModel, ABC): - """Base model with camelCase aliases.""" - model_config = ConfigDict(alias_generator=to_camel, populate_by_name=True, extra='forbid') +def load_provider_metadata(provider_metadata: ProviderMetadata | None) -> dict[str, Any]: + """Load the Pydantic AI metadata from the provider metadata.""" + return provider_metadata.get(PROVIDER_METADATA_KEY, {}) if provider_metadata else {} + + +def dump_provider_metadata( + wrapper_key: str | None = PROVIDER_METADATA_KEY, + **kwargs: ProviderDetailsDelta | str, +) -> dict[str, Any] | None: + """Dump provider metadata from keyword arguments.""" + filtered = {k: v for k, v in kwargs.items() if v is not None} + if wrapper_key: + return {wrapper_key: filtered} if filtered else None + else: + return filtered if filtered else None diff --git a/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/request_types.py b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/request_types.py index 1fe9a593af..e397052287 100644 --- a/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/request_types.py +++ b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/request_types.py @@ -9,7 +9,7 @@ from pydantic import Discriminator, Field -from ._utils import CamelBaseModel +from ._models import CamelBaseModel # Technically this is recursive union of JSON types; for simplicity, we call it Any JSONValue = Any diff --git a/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/response_types.py b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/response_types.py index 6a7b98a2dc..78b2e5f122 100644 --- a/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/response_types.py +++ b/pydantic_ai_slim/pydantic_ai/ui/vercel_ai/response_types.py @@ -9,7 +9,7 @@ from pydantic import Field -from ._utils import CamelBaseModel +from ._models import CamelBaseModel # Technically this is recursive union of JSON types; for simplicity, we call it Any JSONValue = Any diff --git a/tests/models/test_gemini.py b/tests/models/test_gemini.py index dddafc0755..1f2acc96e5 100644 --- a/tests/models/test_gemini.py +++ b/tests/models/test_gemini.py @@ -569,6 +569,7 @@ async def test_text_success(get_gemini_client: GetGeminiClient): usage=RequestUsage(input_tokens=1, output_tokens=2), model_name='gemini-1.5-flash-123', timestamp=IsNow(tz=timezone.utc), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, run_id=IsStr(), @@ -591,6 +592,7 @@ async def test_text_success(get_gemini_client: GetGeminiClient): usage=RequestUsage(input_tokens=1, output_tokens=2), model_name='gemini-1.5-flash-123', timestamp=IsNow(tz=timezone.utc), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, run_id=IsStr(), @@ -605,6 +607,7 @@ async def test_text_success(get_gemini_client: GetGeminiClient): usage=RequestUsage(input_tokens=1, output_tokens=2), model_name='gemini-1.5-flash-123', timestamp=IsNow(tz=timezone.utc), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, run_id=IsStr(), @@ -635,6 +638,7 @@ async def test_request_structured_response(get_gemini_client: GetGeminiClient): usage=RequestUsage(input_tokens=1, output_tokens=2), model_name='gemini-1.5-flash-123', timestamp=IsNow(tz=timezone.utc), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, run_id=IsStr(), @@ -704,6 +708,7 @@ async def get_location(loc_name: str) -> str: usage=RequestUsage(input_tokens=1, output_tokens=2), model_name='gemini-1.5-flash-123', timestamp=IsNow(tz=timezone.utc), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, run_id=IsStr(), @@ -728,6 +733,7 @@ async def get_location(loc_name: str) -> str: usage=RequestUsage(input_tokens=1, output_tokens=2), model_name='gemini-1.5-flash-123', timestamp=IsNow(tz=timezone.utc), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, run_id=IsStr(), @@ -755,6 +761,7 @@ async def get_location(loc_name: str) -> str: usage=RequestUsage(input_tokens=1, output_tokens=2), model_name='gemini-1.5-flash-123', timestamp=IsNow(tz=timezone.utc), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, run_id=IsStr(), @@ -1224,6 +1231,7 @@ async def get_image() -> BinaryContent: ), model_name='gemini-3-pro-preview', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -1271,6 +1279,7 @@ async def get_image() -> BinaryContent: ), model_name='gemini-3-pro-preview', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -1401,6 +1410,7 @@ async def test_gemini_model_instructions(allow_model_requests: None, gemini_api_ ), model_name='gemini-1.5-flash', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, run_id=IsStr(), @@ -1614,6 +1624,7 @@ async def test_gemini_model_thinking_part(allow_model_requests: None, gemini_api ), model_name='gemini-2.5-flash-preview-04-17', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, run_id=IsStr(), @@ -1660,6 +1671,7 @@ async def test_gemini_youtube_video_url_input(allow_model_requests: None, gemini ), model_name='gemini-2.0-flash', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, run_id=IsStr(), @@ -1735,6 +1747,7 @@ async def bar() -> str: ), model_name='gemini-2.0-flash', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -1765,6 +1778,7 @@ async def bar() -> str: ), model_name='gemini-2.0-flash', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -1822,6 +1836,7 @@ async def get_user_country() -> str: ), model_name='gemini-2.0-flash', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -1852,6 +1867,7 @@ async def get_user_country() -> str: ), model_name='gemini-2.0-flash', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -1916,6 +1932,7 @@ def upcase(text: str) -> str: ), model_name='models/gemini-2.5-pro-preview-05-06', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id='TT9IaNfGN_DmqtsPzKnE4AE', @@ -1991,6 +2008,7 @@ class CityLocation(BaseModel): ), model_name='gemini-2.0-flash', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -2050,6 +2068,7 @@ class CountryLanguage(BaseModel): ), model_name='gemini-2.0-flash', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -2095,6 +2114,7 @@ class CityLocation(BaseModel): ), model_name='gemini-2.0-flash', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -2142,6 +2162,7 @@ async def get_user_country() -> str: ), model_name='models/gemini-2.5-pro-preview-05-06', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -2166,6 +2187,7 @@ async def get_user_country() -> str: ), model_name='models/gemini-2.5-pro-preview-05-06', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -2217,6 +2239,7 @@ class CountryLanguage(BaseModel): ), model_name='gemini-2.0-flash', timestamp=IsDatetime(), + provider_name='google-gla', provider_url='https://generativelanguage.googleapis.com/v1beta/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), diff --git a/tests/models/test_gemini_vertex.py b/tests/models/test_gemini_vertex.py index 84175e1104..6471ca37ee 100644 --- a/tests/models/test_gemini_vertex.py +++ b/tests/models/test_gemini_vertex.py @@ -149,6 +149,7 @@ async def test_url_input( usage=IsInstance(RequestUsage), model_name='gemini-2.0-flash', timestamp=IsDatetime(), + provider_name='google-vertex', provider_url='https://us-central1-aiplatform.googleapis.com/v1/projects/pydantic-ai/locations/us-central1/publishers/google/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), @@ -190,6 +191,7 @@ async def test_url_input_force_download(allow_model_requests: None) -> None: # usage=IsInstance(RequestUsage), model_name='gemini-2.0-flash', timestamp=IsDatetime(), + provider_name='google-vertex', provider_url='https://us-central1-aiplatform.googleapis.com/v1/projects/pydantic-ai/locations/us-central1/publishers/google/models/', provider_details={'finish_reason': 'STOP'}, provider_response_id=IsStr(), diff --git a/tests/models/test_google.py b/tests/models/test_google.py index 081f830f5c..32e710558d 100644 --- a/tests/models/test_google.py +++ b/tests/models/test_google.py @@ -597,6 +597,7 @@ async def get_capital(country: str) -> str: tool_name='get_capital', args={'country': 'France'}, tool_call_id=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ], @@ -630,6 +631,7 @@ async def get_capital(country: str) -> str: tool_name='get_capital', args={'country': 'La France'}, tool_call_id=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ], @@ -661,6 +663,7 @@ async def get_capital(country: str) -> str: parts=[ TextPart( content='Paris', + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ], @@ -769,11 +772,7 @@ async def get_temperature(city: str) -> str: ), PartEndEvent( index=0, - part=ToolCallPart( - tool_name='get_capital', - args={'country': 'France'}, - tool_call_id=IsStr(), - ), + part=ToolCallPart(tool_name='get_capital', args={'country': 'France'}, tool_call_id=IsStr()), ), IsInstance(FunctionToolCallEvent), FunctionToolResultEvent( @@ -787,11 +786,7 @@ async def get_temperature(city: str) -> str: ), PartEndEvent( index=0, - part=ToolCallPart( - tool_name='get_temperature', - args={'city': 'Paris'}, - tool_call_id=IsStr(), - ), + part=ToolCallPart(tool_name='get_temperature', args={'city': 'Paris'}, tool_call_id=IsStr()), ), IsInstance(FunctionToolCallEvent), FunctionToolResultEvent( @@ -1638,7 +1633,11 @@ async def test_google_model_web_fetch_tool_stream(allow_model_requests: None, go ), previous_part_kind='builtin-tool-call', ), - PartStartEvent(index=2, part=TextPart(content=IsStr()), previous_part_kind='builtin-tool-return'), + PartStartEvent( + index=2, + part=TextPart(content=IsStr()), + previous_part_kind='builtin-tool-return', + ), FinalResultEvent(tool_name=None, tool_call_id=None), PartDeltaEvent(index=2, delta=TextPartDelta(content_delta=IsStr())), PartEndEvent(index=2, part=TextPart(content=IsStr())), @@ -1988,6 +1987,7 @@ def dummy() -> None: ... # pragma: no cover ThinkingPart(content=IsStr()), TextPart( content=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ), ], @@ -2027,6 +2027,7 @@ def dummy() -> None: ... # pragma: no cover ThinkingPart(content=IsStr()), TextPart( content=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ), ], @@ -2131,6 +2132,7 @@ def dummy() -> None: ... # pragma: no cover ThinkingPart(content=IsStr()), TextPart( content=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ), ], @@ -2189,6 +2191,7 @@ def dummy() -> None: ... # pragma: no cover ThinkingPart(content=IsStr()), TextPart( content=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ), ], @@ -2245,6 +2248,7 @@ def dummy() -> None: ... # pragma: no cover index=1, part=TextPart( content='This is a great question! Safely crossing the street is all about being aware and predictable. Here is a step-by-step', + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ), previous_part_kind='thinking', @@ -2305,6 +2309,7 @@ def dummy() -> None: ... # pragma: no cover The most important rule is to **stay alert and be predictable**. Always assume a driver might not see you.\ """, + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ), ), @@ -2508,13 +2513,7 @@ async def bar() -> str: run_id=IsStr(), ), ModelResponse( - parts=[ - ToolCallPart( - tool_name='final_result', - args={'bar': 'hello'}, - tool_call_id=IsStr(), - ) - ], + parts=[ToolCallPart(tool_name='final_result', args={'bar': 'hello'}, tool_call_id=IsStr())], usage=RequestUsage( input_tokens=27, output_tokens=5, details={'text_candidates_tokens': 5, 'text_prompt_tokens': 27} ), @@ -3211,6 +3210,7 @@ async def test_google_image_generation(allow_model_requests: None, google_provid media_type='image/jpeg', _identifier='b6e95a', ), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ], @@ -3253,6 +3253,7 @@ async def test_google_image_generation(allow_model_requests: None, google_provid media_type='image/jpeg', _identifier='14bec0', ), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ], @@ -3403,6 +3404,7 @@ async def test_google_image_generation_with_text(allow_model_requests: None, goo A little axolotl named Archie lived in a beautiful glass tank, but he always wondered what was beyond the clear walls. One day, he bravely peeked over the edge and discovered a whole new world of sunshine and potted plants. """, + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ), FilePart( @@ -3412,6 +3414,7 @@ async def test_google_image_generation_with_text(allow_model_requests: None, goo _identifier='00f2af', identifier=IsStr(), ), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ), ], @@ -3522,6 +3525,7 @@ class Animal(BaseModel): media_type='image/jpeg', _identifier='4e5b3e', ), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ], @@ -3559,6 +3563,7 @@ class Animal(BaseModel): "name": "Axolotl" } \ """, + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ], @@ -3663,6 +3668,7 @@ async def test_google_image_generation_with_web_search(allow_model_requests: Non media_type='image/jpeg', _identifier='787c28', ), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ), ], @@ -4612,12 +4618,18 @@ async def test_google_model_file_search_tool_stream(allow_model_requests: None, next_part_kind='text', ), PartStartEvent( - index=1, part=TextPart(content='The capital of France'), previous_part_kind='builtin-tool-call' + index=1, + part=TextPart(content='The capital of France'), + previous_part_kind='builtin-tool-call', ), FinalResultEvent(tool_name=None, tool_call_id=None), - PartDeltaEvent(index=1, delta=TextPartDelta(content_delta=' is Paris. The city is well-known for its')), PartDeltaEvent( - index=1, delta=TextPartDelta(content_delta=' famous landmarks, including the Eiffel Tower.') + index=1, + delta=TextPartDelta(content_delta=' is Paris. The city is well-known for its'), + ), + PartDeltaEvent( + index=1, + delta=TextPartDelta(content_delta=' famous landmarks, including the Eiffel Tower.'), ), PartEndEvent( index=1, @@ -4858,10 +4870,7 @@ def get_country() -> str: signature=IsStr(), provider_name='openai', ), - TextPart( - content='Mexico City (Ciudad de México).', - id=IsStr(), - ), + TextPart(content='Mexico City (Ciudad de México).', id=IsStr()), ], usage=RequestUsage(input_tokens=379, output_tokens=77, details={'reasoning_tokens': 64}), model_name='gpt-5-2025-08-07', @@ -4891,6 +4900,7 @@ def get_country() -> str: tool_name='final_result', args={'city': 'Mexico City', 'country': 'Mexico'}, tool_call_id=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ], @@ -5003,6 +5013,7 @@ async def test_google_model_retrying_after_empty_response(allow_model_requests: parts=[ TextPart( content='Hello! How can I help you today?', + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ], @@ -5196,6 +5207,7 @@ def get_country() -> str: tool_name='get_country', args={}, tool_call_id=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ], @@ -5245,6 +5257,7 @@ def get_country() -> str: tool_name='get_country', args={}, tool_call_id=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ), ), @@ -5254,6 +5267,7 @@ def get_country() -> str: tool_name='get_country', args={}, tool_call_id=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ), ), @@ -5262,6 +5276,7 @@ def get_country() -> str: tool_name='get_country', args={}, tool_call_id=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ), diff --git a/tests/models/test_openai_responses.py b/tests/models/test_openai_responses.py index 2d5d459697..5c8db0d900 100644 --- a/tests/models/test_openai_responses.py +++ b/tests/models/test_openai_responses.py @@ -722,10 +722,7 @@ async def test_openai_responses_model_builtin_tools_web_search(allow_model_reque signature='gAAAAABoxKrgxBU1Y3g_B0Eo5nVBHYxLC3Lgh2vNx7AcpSm-o7XiHfQvzLqaLvkI-Cc3F15mQexU0OTvx9FePdIKbwkMNm_X_s_K7YazPjZUTQ0TEod2VereH-Ebh6Xjq3bHm7mh5PWWGnY2SqVMCdKGtrXkoMzBraxedlv2-Tz8o0p6SYuyzM8yHecIfkG6Zd40AdZSiDzsnRNg7gA0zCddrDrRcOpeMTzSPw1z74UZtng-_pPeiv-TGCgwdlmBv8RRr2cuQTYE-yhcp6doCMKqemL8ShuIyfJz0KhQPwYE1zM1CB8sFc_TuArJJD3V2U-Bl3o8anIA8X7YclTlzz_N7HROtVI5qFQjSNhSrbxZKUBFDfAayrpQBEOyIRu7J42uAiBmoyms1WG1E2UtO69nx2ELSJs5yheEuVy4cTXyndBJr2sCs8VkVvcX7xvYkfKeChvkAbUfCotc991qAiyVNzhncM2Z31IEXDEDypeo2IFSwAcKuuXgePFFPBiJxmNQAQmErqbSoB3Woe1j5XjAzJ2eY5YEBZ-68GI3B5wmiZOLsPla_L4iBrczHI1iwGASgtMsuHPj5KVzwef093kg9QBlt-7pZHM3yoU1l5DFSJ5C168MdMdNGF3hn0T2Q3teUmJ5khgcKMKz4_ZVUjEDq8bPwp8DiaWlFgTv-Y-I8etik4o35EFmmmZbIZ7tk69xlBrGizm_KlcYWHBQ5BfuNyZDXZ13MKDyn4uyYxRvkHq4z4jPFEiZ3xX79mlNP3-B0T9g8CsqX1G1prKI7lde6oAHcWPFSWqZmM_JxvYXDBbck2DpEpx4xTuE_iJfGnKiNzanqV4EdOXiCTBVLZhMvXj9rAbwnhttvz5WhIeYAdsKEE0M1MUHuSWuWFVtClp6lPKSLtHQCBtE6mpPDyzUuaw6S1DoixZ6f33Sr8DB-EwF_deHRa95kEN9w4i_LqNbl5QQPF_1je6spo-yQTDpHc5wUidI0fBEQzM57rr9XH0F2afZtrQv9HcLfWKVufBTdd7ScpyOaKj70zgqTAq08Te-Yrj9eo3tbDt698U1fKEYW_uqP48ZKmnSNtFzKOoBzkPpKcwA5AQUiFOYH4-iDPDTOH23SYx8vlymoRiK1imCdPwWYI3miMURxPr9-zCHoM7AiB8cnJlD--zk-j1vQqcf3AntIKPwqycSEuJ7MWb9iN5Ybd1YE25_ZiXKJNVg8wnmTueelRdeM-2JVzAQwth1_3gnsemXn5v0uDVNpxvXoRtR1w8L_zQzKzag8kZMvfESnLCAEwYsCcrP-ngO97iKVvUQnII4RUtG_mSPV4V6Ses_cMUVqyHiM_W_frIosY-7dXnlox89-SPWrRwyC1jlGRA_LE1fpPZ2cZU7Gcyzrxp6yBuTCx8BHr9FJvqgbqtAUeYDpr_Sv-RsG8-w4IulSNZLH5Bh8TyvBGDhi8_lUbDCFTS3KI1ZJ8KJwbNLxF4YUI156zkWIN5yU0WDVlwoxpJD0naMPZzR0sQadMuaXEvLXTFm9Gtb667B2cjdzJqbb8z6NkAx3txRRD6EoezoYADq_ZR_LYha0iwv3bHvg4HIblhU_GVhnU-a-lQGQhTJ5Mh4OmrnTGUVD2Is1OVI0EmNscUuaVc7M1_ga5KbOgyff6bYS0ARh3Io5ekKQKkPVyBLgjjKlej4tB-vSEgitDhEJ-PD__ouuFaogm6twZy7hWVn9cgJmt-RHDZ6gOZm4QP8dWqRpuyEAtTpWR2TLTQVgM05hWpDqDL5AvBjAQ_GWkHCvdCvUINyyl5TsyXUcL207shrLUDCpBe_kESpF5dpAVng8_Zfu1dt3c04cCG1eg40e9JcO5iA9-upTrEPIPrXnAKy4vw-vbhQyL1r2jZWRVga9Do2idmzVf-c7yQ_AHGmf62SHGm-qqbljw0sXJe1rdPt2IHxzYXkhxpqqoaUueQk-pXLUvpMFeMcH97sK3toeCO3oiWQPG-nev0B0b__U8ntgI5m9df6n4IA97iS2zSylSY-F-XEJmLM2TKuSEdgAx1EBL_jyRQKB_8PW-0hSQGJLT70SQqDUJexwyrKABkApv3FuSH4FO0rXZ9TGN3GsnJSkIrTrzE2NG4OXK4syrmtBCb8DjsiicvjAvQhcouOM1xMZ89aSG9Psx5HRnViy6M73TIhYmWO71BRNEayMJaOMgUlgpl5alvV1YFBsChL6mxLVAJWUFuv2YPNaaDRqZEXYHWljhwSn24ASetweLc5GhnehdiT4JVJ_nfT3bygPIjEzvvIa7bbJSeL_bcY-qGAgsuR5m70BdjIH6xLmuqn3lEqulh9n6IPaDciryWqRr1OwxZJQ0-x3u6-G1wrbtrhVMK2Z6cyNUX6MvIMz39B_782X4JcLMrVm9Jgt6qzmfbJPnGA_NK3e9dlz6hP_AYoY-Je-IZEtpv4wyXAYE8v7QXsZbf6DetAM2LzGmxkEI647-pwVPQua-L-84L56GoAw9yDeoXxgyxyf40sbaPIiVLgl_3A4Nghl7uOnOX_1VnZL2X85zCkOZbmm5pZbuSeKesBYbX002PN-_P-P5xRv5b8dZzD0utGv4GUuZJXKJPhbpv8cuBUR0BYHKBQkmOzOBxgCFCDtX84VkZcrFwmQHcS7zmjgqEl39UNrqq6NZXW6HZDyi_SSvEYV7eJfJfxnUUF7RJ49RtSbC9n0AkzorBi0mSMnCC_A1zhamNLjT1-tj4E2a1zI9YsBZ8lPv3t7a6U85iMYjl3kCPiAXkRIDVBihBK4ki_OEa4v6kNBEgXNMuFmd1l8O3WTqZRSTLek4yH95V_uE5DQ9NH52pkgrN7QOe0QXxZ0aErqjkSQRbbhFVVRYp2VN7QpvMGZIAtu_mGssA5Id3X1ZsLEU9zGNibIzAmJdBjS98fVj2MsD-4qZmzlWiCGcC5ko2bbpTrFGtr4r3-SNc4UMOa3dsdyrRlnK3o_tbXbPN7c1H44oneAsqWuekfUVFGvCRm3yA0X7njFB2l8tSXkAuophgRUlWnzp4mEMcpFRwEX3WEnK9hPqXEhdirLtC18yupkKYBtIpCIT98zgJNb5TRbfwRplInEG1E8dk4gCbwyXCNu67QEI2NM2yqCHc4P5rWhwTGAl30tmDQ064ba920L9ZV8d6PgpBHZmUxpJ-JUZuYMzXfCFdlBQANdjtuxCy3-Pi0-cO7UEA84WN-keYB-kHck3aPpeTG7-lv3je0N-407H_A1TKUqkSknjlmwVdL3h41bbGmqxFGizNXfq-uCGUD2tWaZ-cdmZZtGXxgEQ2z7_tLur28eS1tlx43y9CKtKPPJruJm_7BljMOCMPnSmOJDI0JnoGpjNRqzKbSuZFTihaQSBo_Vc-NxRpFwM4xJgq3z5eShb_WamKw9uYrjCBEEwYFTW2QjmiQJtM9eVHBuLkfOVa66YZowcCvL8aCccsuPbe7KBMCD21IGzH4nlhfgUKa1cTAUiWjRSgn6SO5Wqahxs7dEf44F5HvPG6XUy9HFOe-d61ZE-tJQsHZgssQWqV1UfPsccqgyWIc2yv9aK4pPpu2lcrlGu8aDZDz7pBD-dPUG_B9XWt5c0CQj4CCnURDATNWqH8J8VvKap6Zn7pBHW_PxNSJ3f0z_l-GjBlx7U4w6XmOMBtJK8lE_Y8CuuQY9dNVnTGMPibCeJt7M_Q9-IYcqhriUh7Q5WkCvDVu8157gIRwwUAvgqsWcD2msXtO9svRkXKxNxYFdW7KolF-y8oxXRPwVJy1bf89pAOa8djb21ovJuJmbvrRzplFGYNj8rGZ2hXenxDoYiKv71LGALVU63mS9q-Y1zfTHCPpA-Rw7oR6T5G_Q35H-elaA_u-vkgh64mQNP5sgc_kpwbVlM0wSl79RcExnmBTpA-kn7B4w_QPwt185WD9jQRjhh3LMQa_crf4nCWLlsYcDCyB07TU0vXQiQ3nynqsX2MstUc2DaiseVG1SO0UEv8oobwLhnSvl3n8zWMWq93NSuISAsaWmqriNhM74aSHw4CVPoO68RSSdNrpxaKGf8kuO9Xy6iLr3VPE_vyMJDq65q42AEvKqP0TCoFUzXA28Tkrg0tsMLsXIhuT5MGtO3O8RpLnthF9vT0lM64jMp9_QSH2BuWYtwgok7xk3gRX5yBQeksAos3c7Jn2bLM9VNrV9dLi7MH_mRl5C64b0Lgj6Zi1USCyyPhL95ZJIvdxLWHSII2RFbL9ToCThKp_cgPZklLAVJXBeIOqG09pIQ==', provider_name='openai', ), - TextPart( - content=IsStr(), - id='msg_0e3d55e9502941380068c4aada6d8c8195b8b6f92edbb53b4f', - ), + TextPart(content=IsStr(), id='msg_0e3d55e9502941380068c4aada6d8c8195b8b6f92edbb53b4f'), ], usage=RequestUsage( input_tokens=115886, @@ -832,10 +829,7 @@ async def test_openai_responses_model_web_search_tool(allow_model_requests: None signature='gAAAAABoycg2MBei1jlOMd9YfezZ45PArjJAExhzJt4YG36vuQT_e4K6W78Awn6mrJEueCnEAbciBRoPBd8n0YMXbqTiKdgeceqAoZu_UJAVWxgY7tVDlkg4e8BgJ_SrAumbi0yL4Ttwy5yZNU8g1aICCSdjGqfI0cmVbJpXEyCU8Wt4UKV_912jaG62vA6Tlqii0ikc8UItcrgk94TEGpOEQXlG1HXsWyAryCvOMSM2F785Q4Jx2XOrNv4klRPEZGUeIbp4ReTVXVi0JT-cjc6O3gKNxN6vxzUbvPhmcyTa9UogLuCTHjv3KpcIvBOw-_pF3Z02oQE0GaJKBpP4SJLE2yZsIII4uMls7Lw07EuHZjsZoCQRg12dRle6rwba7IeRw0RJWYEp9aavT0Ttrj69dO0e20NpispmeAXLh0xxrRCKcjxAn6c5XtEbJP54_ka1FUSVY4x8IaU_pCKI85fGmHIx-HarXBtWzZO9B5O1K4Pqr3BE7LELTXaMwWQ2SU-RGsvgmDpmUZjwifQ2YgamjIJPt0UcuGWb8BTwssP81XT5mQ2Tsq1YjQmgfzeF28yeb7XhkEaBUNejSou3SuEXZ9aEuSaMz62gzPSpsSrr51QoBJpMBF9Jd7LXuFJwaQV7jP9NJawF9GT-CMWj2IOXgVca7cL_d99IMSR94vNyg8yPzDsncJZ9Dw3HXFsPfdGHtO2FaFUB3RRZAVKoHy7S1NTNfLxdtB-p0eDuu1JbcsgtULWC71E6TbPxg8OguiEgAPTXJviUAed6udruUrSMlZQv-AgRYfxYPPMXLeUIWTTUo6PKICy_PO3U5CF6VBkaNUvCLf317L47FCeEAJNTb9Uj_S67ZqoAnEG0tQG7tVPuN13cy12xO2-8xFQSpO7gg0DzF8vCD1cAcKAvo0FUEnIeXOVHVQxThLHDiXOmB_ZpoT-qJYb88RTLNoAq5oI0ZuZYvPHJ63EhVjaANKwNe4DrfAvoPpf0qWiBOH2vHxnlIJc84pRh33ixB-azK7arhetqwIuLhDo4u9REcD2avxew8rDEOTqb5Tk02hhCKX9drLYCriNdkQh3mrC3KYzOWZ9aebwOR1c-s54KbvGDHAjTNPCLlROf30MmTON3jb-NW15YyzQrVFfV1c-egUiWRwMVE3KeWi4wmicK_QGMZkdyEqZMSzNcgOZMFfUWxdUKxACHY5J_7lUZltrz9JnhsfuM7KMuEW3GMASIP8f8WmR03nleJTi7k21oLtX-xz1gjble9WzSzd5pTz9GrFw4KWatCyrLXtKWw9fAqm_k5HpIJdya9KK3jNve6MirP6jdetIUNIbN3MGkMJ8lfavyTaa6-t4hsQSmyTQn6OKwhK_PA8-KTluNMW-dpqZU2YPFYk_QHYW6EJe_Kw5aOq-zpKR3hGgoHm75Ossr23QERsVgP0LChljPzR4OQlce1GMDtRNqLX0wGu1RO7OdM9R_lqJWMlIaAa5wfvdH5LznaQV1vuGPrfpzGL4mlocKDv8ASvrxA4bm5fWBoqsfzcLu-H8uz069vLDyHgrPNse6W4Ex1BVY6By0K_f7sidbmc1FxwP3ypVv4nX_lncg6RiZzaQTHTxXJFmvVO8_L9XBHJcGkQGpEuEjx2aMTWZGJNxfaO2fKJ8U3XflYVXJkSg5b5ixTHuvDYjCOELs3fTVAy50CuMXMoCEgyZlqZNg_EJXEmz5niLNQnwQPRWUbe3kicaLzJqvZrtrvPOPcTM31Ph2-_dfEOeKNOIE2B0pvMgTaFRck_xOc7s5J2tWAEYszDz6aMXvnvzm1WH9cXYLbgZPyJmMUxeGZ70DdnueVbrNr8VA5bzvjkgjEkhks_BQprXEAZL1lSL2s0O9G8ekgFnt75JBJmSFGT0twl-t1ia1BFkRtMGXLIj91xWJb2GsF6ZN9Uknfm0Akfk1STtRbxFIeBRlwQsix5rQ7EstyhfsBXiBILky2rSfj0UJwH1NjDskXjFxxpy-FEE7KRYwMws9rKKuMQMyURUK-DbLvMmQoxekYvqu7bJfWqxj3lndGwD1sQL78cpVVPVfJeqnlAw7k_xd6QdHg9DwSlGNb4OCYdFWT4xaaltFIJfo6g1Pay7HD8gWTrrgUzHgEWfbJxcKIXs1etHx1lxYVTmm9TFkXshmsbKptL7kAaxBy9JknSsGsh9gZXf3YFkocEj1xa8f8Xcuf3zatefAeFFh1Q629b0Sc-GzfXnu-KfuSyJzAZulrP1IQ0jlOiGP5hKnvzePVL_JZGTNJrJxmtWXejLodY-JzLzUjIeALKtyUsu1ELFtwDxyadPSsFW8qvMeolLcVDysGm8NkmRgLzQTBDGR4AcipdozZmElDRTm5P6JArLlqdZCxXpiOH2x4juPIYUfRrrTT2g6emTXHz_AurjFgYn55G6xv1YGSuM5tNBXc_WP5ya9cdpBIEYj1i05DIMsvUPsNAkt0MIeTiVSPPDMgpT4lLsR1ezwBMx2kQBJI6E7rmH9f3Abn5H6yeKQLZckAAru1SLkVwoDxcTTJZqD3sZt6RhBDuuMWX5ZoB21K-zkE3Tde6caBupWLK-W2eGJSJ_oOaG2YGQxL56irxU6DIVxLuMWUTOVH5vpqeo2RlrGpXu-lJkg3tC69gXlNd55233uIkchhihakwSIxFF1Ka-hcBlKtn0Kz7CXrXam4B0sSWjc9xGRfSOaQ6LiameoozXfhj8r_GSOwoV8EMa2vIBFggFGrPEzaczNkOKBiA-xTQtdEPqmfQNznuZ-B-VX-s0E0Ew2EopP4ljZ4QMW8k6pbNX1aegBBxbxkNc5ugJhBBoSVJeEAC2Lw3iCZUnX_leWUJBp2up09oJtRWlnGG4mLAu7nYsI7blues0ZLZE4C49v2eYBmfkeyq1DBAGXu0RC1qMz5729tzLPUEPYpKS1H7w2iGHQ9P1jBBWAAfFoqgn1lYtBF1ioxL7ry6YMrvCgTlqvVRXB7zmAUlsJdPq-CTWpF79YSco4fAhrDVCmxdS6Y4arD7p26YWk8PioCDt9ranaUi7--wlyh2OTdJPHAUHW2-o5NaXXfhqaIVfCqH1sbVmNwP0BRiAmUlwK7GB_m7dtEztYz1sHl5sXmXEDcFjJtr6uozFDjEA42F48AVuZMlQfQ3eJNSRqHEThYeyzbtCdYZ6J6ntg2XS0uDHISgM4zi1mDeur6-ZCw4rGwUXvB1BWXifFeh2miEGtvRzw3sa1zBKBCGtYtRsl4Iz5Plo9RNN8eQ_vvwmfDk2F-5YWsDZbpJuSXQXy1hjDvyM7TVGj4uL9gxFQ-ZCxFl9cufUeqfEGgHX38mZoJAT2emXbe4A4byFYvWfM-NxjpbNA67ZkOWgcDPtY853Y6dKoBihh49ZAzvmEjmPixKp2rBuNX26jJzhW2OJH91GpsncHGwJ3ajWht88XbKBp4Lb8sNVxYD3hK4c-mB95WYYaUKe5_ugc-PhC4FGu-FYNLYTX2ZxLKpk_T4uEG64zBQ0NbS9y8WWiTojeQ7b4-MBG_j3VJr5Pi0T0meC623J2ldwud3DRBZXB5q5rKgofFF6WqvwhIDi8YLL7CVUJ9aOE57SkUKVrYYD48Cv8Wv9piI2hbTgXwWkCpg_tVROBjl4RYfYVlOBV4pM1G5AK73PXfDGsPdiCxhmxHlvzanAm30eVKIctRaS1xlcBqLp8CUPkgnPDlPVclMagd1CjIlN4igMnFN9gDPOUckrA0-VBlg-EKsHG3o_jNMbsvgfXg8BuApc=', provider_name='openai', ), - TextPart( - content=IsStr(), - id='msg_028829e50fbcad090068c9c8362ef08195a8a69090feef1ac8', - ), + TextPart(content=IsStr(), id='msg_028829e50fbcad090068c9c8362ef08195a8a69090feef1ac8'), ], usage=RequestUsage( input_tokens=9299, cache_read_tokens=8448, output_tokens=577, details={'reasoning_tokens': 512} @@ -972,10 +966,7 @@ async def test_openai_responses_model_web_search_tool_with_user_location( signature='gAAAAABoxKsLhc5YCXvcJidIAJvFyzs2T3IwW0fie9oMN3Nk5fAcAP3apWArzw8DdWjWjR0tn8Fpw_H_xATFGktsCeA5nzkcKvdc0Bbu2bwMo2QUkQZfFcLHqlcNnAcvrw49XpolGFl-mu7hAyP38LGGtjbTBNRh4dHkd-hYZzy3nYd56JQi5GLS_KuxdU78xUW3gNOtAvrseTx1fcY2eseUcNLm8uDi8a_qDw16nFvuY31ZkrmuVCESawkppmxrhGFVg0Y99dgyufnSVXXMKyE89tmXMc60yZiaB1i5cIJQcZMkwupod7yZNGqmr1GtFru5uJq-bJfGx7nAEs50jUcu-rP-_ZbvptkuADDC-bfzFjaeq13wCih8wCXqDWqnGjqIHlFkBM6agn6VKOcuDC18L3caqcH3KEYT4f3TGwg_ZZjsiRDdBC-saqIduaAjjMDqMKx9XpmreRq5BLfC7fPjRykpUcWQQYbQ07J9pe0EW2VhZwoGtd1u96fmz55MzryX4VOWIwDsUTEZAoCzULvVrEBnzFqnfvQwejBxJX2XU4fIlOtT_XpOcI2afolh8KgitzHHpJ8Dr9ELI-Be2KEd6enxmdaPhgYUif2D8ZCVfOoXZEmrFBMQTRyuxtp9H0U3zGamEYuUxRavxkQD77HhmqWOSr1Agm8pWzAN97jxJSxxY4BEnjtrgp1mavtv4G7VHjrpNWrL-smZEWmnCPGKVxP9afrdSZYL-HXKY9yO6__0PR6DdX1o0JvUq1KFPx2dzag4eXDxb56HI5MKNr6J5P8Smmxxwoelx6UXEKw_hyFWMmPUHYD5Yw5dxrXeYmAiomYKFpG0bxVbuAb4_iAVliHkdIsOBcWoix0KLxmS-4RJnikZPMvDwLDWfENZ2sh9_RrQbuMBAgjHwlfWM_tww0ufm_aVdDZ1CULJ5Ki3ZxH_0oIRRyyB-a25q3DARnVzutgo32H9X6qjMb06ExMn--ndCinBglTTGvj1QOIJews6UMrcKj5ZPTc7GyPbHXvdPmPdIrtJ0wCqFj4cgNRuxjiaZDSCqmEQERYyX9Fxu8tY4f7-Fxje6A_zflqrIyhLfzo1iMaoNbba4HNkzRMWba1L1fC8St8MO4ZuZTGs_60FwzSUmBDW4Gl0CcRAdY39BE65uEpKGZeRqDfxvLUelG9YlJTowqN8hzAYShzcPPkgWk_s1AtY0RT_roregPuQ8PQayvHcJzKqnijOIhRA9k6LjF6cnHj90d6fSzTYn8F27rhufLySe56n9SA2WDWhVcjsFEFAcsL461tjiQ5U0mjaFdBQ5H__s09dhp0NzhE35I4q0pzM2KI1YWgLnwlyPFnnfce9bbL81jvbXw8DDC2KfZVOGU-ZDdqIqF0UmwNyBaMYb4SonrG8vrj5bFmCMPSFsEeuDPv_bmD8HRx8536b30RmYD0K38Wf6-UoatMxzgMpgmwsBP6Wh0HCpFeIhjRsJLxYXeoafypcKJPQgKXJwuXVLi4iejXkrbjBdc2Sq2dqIVzzUhULLJSPBYouyjeyVSbYYp9WPoBNWj67uQsX7OUbQN1_qxopsPJdqqQynJIAtULNHjKrDA0GKpyZ3OUV660OkogPAWoxTVevRemwkIJZbr2hXyy0Nx6Xc1Vf9xC0nPclJ6VXapdnjK69bIDHxDUZGCh8UZt6DbcA7azBrugcXlbaMJzoHWkzmusJoTh_2UXRjrS3B33jsxf6LQnUl0s1ETo3Tif868zLvkTEtfo6btbND0FPDFFQrdeVlW4mUWEOJhPeOmwnDeLsafTfRCI_V_xTzgkpQxx7pVZt6mkYZ2qDTE--NhqgFfHPlw-nC4zU6klRdbaO8284QGlbJvHmdsmHi4AtMSWAf-_jegocmaneM1wUquNKoy6hnbkZFul9qV2c-_L077uC4nZYNjRay3lT_3giVH6Ra6WnBovt9ocCYIwSeygVAyqBHxo5EJpfyJhNCtak3bl-CIz2TraYqqUCiB0h1fyxIF7M0uENZKALtwqRVHOtEsN5JVotgv-8YzaBRFs3qvtjQn7eEcw-zrIg5fwMP7tDi8O3TXl6qPVWTCHMa1wkfb7OkfuwXREognLvO-3qdRgxinodvKyHn9XbsUcQMQjPPFMLOs4wpEhTJpcIFPqtR6tArjTT3P-T21mc8B56K1wXfEDvpU64XQ0HnfZWaqS1TbDyfL2i12ddhhnxbCV-0f3lUGnZVsfeGEc4FlST7iqUguhwPGb4mBpjBVFu2dv3DMCIPHew1v92gZH1OJqZJJVDUpu0vvFGTqxHz31LSX6lWa4gn2l6hvkT1e4aXkjHg93iy0ZXMpB0JqJbbWseZY0LDYzpH9noHq626Q9H4ZEKPo_MYBWSS_yH-V2_cN6a4HarqhcRwD9oT1QJ4_4AzWeFIrCZlClYbA-84H1CbBfQjgtRh6zTZLDHM2In2M8mKGyFSfeIhMHIcfPBTpG4flLBmTNrwwbuOP-0ss_bb5gxLeDsgU5xjwfaUzOWXudPJOEorz4t6Oc88MiRH42troV2fun6Uf7e7j1OQSGtTQ1kXf0rroz2ykDfVIXCefX_3io_xJ7ev9dH54CNlARSF6cVpTqzbyLWkA0BJeAVYcX2JW_AT-9VYTOo1Vixja7KtMAmMMk1E08japeGnoAd_a_4-bEfklFTChseUDgZhOt5_XtBiuQdPvJDorSQWQl8VCPKdMATr-EdUiZN54GSM46pdBr6p-Dg7LvB-zBAbTlm_6SET0O0k4RkkHxUCtgRMZQ52aC4brcym771djtWC-BbaR5CefibOoSo-i-BP2Zf-RVaS_MuFar0dT03zXdb0XuC2vuhbVPPF-7gsJez2dufEiU9LBhV3__zTDlFc-rGwwf04Fh5KuleNzr1QNyVPH9GZSS8jZkja6EcRfGn0X-oBr2oRLyxuL5vWgOdPadBOJGjIoRnMhCAxGla_gD_5m0qwF9CtWWv7ugW7YpATe62zE0O1icYDPwaXGovzTOeRDRn4BfJzgzwLRkP3-zOgF_09X41umrq0TCnCujXe-JOhFuIcYx8IxOb_cCcfGRqGXeZYP7z', provider_name='openai', ), - TextPart( - content=IsStr(), - id='msg_0b385a0fdc82fd920068c4ab0996c08197a1adfce3593080f0', - ), + TextPart(content=IsStr(), id='msg_0b385a0fdc82fd920068c4ab0996c08197a1adfce3593080f0'), ], usage=RequestUsage( input_tokens=9463, cache_read_tokens=8320, output_tokens=660, details={'reasoning_tokens': 512} @@ -1048,10 +1039,7 @@ async def test_openai_responses_model_web_search_tool_with_invalid_region( signature='gAAAAABoxKsmZCctfduUbipds6REy8FkoOiADLcLER75WMHyO7PtQt26NIhcGkiXReZWucbDdEBRKk7_g9PUuu9g-zEBe6kIQwm4lwjxCGPy-rQdmfJpueznyPJ14Ood-wazqT9a8ab_BMFS7VLonsOjZR_b1gxcx5yO62oLvv1GnZfkEykIgRbGIBSYDWX6I55Sfwkf0JRaiOFgeHoOvQ6f2mdb5UetdJwbIFgRh9Bk-_l6goC-ONyElqvPxrh8zlLxqEhL-KtTVw6TPNW67QeYxekA4vdXseYT4W2oJMcMKp8aIfxYr3-ZWSy81UqGPD2PAfs1DoOYkWMHxt_VnZjLQs0qkO-JBPsWBFWEofZC1GxOIT6gd_dDvExBXkaFdNH7xf0OxsxSMWfyKSXMlq3kmVsDIN3hKImwfZQ171mkFEwgwIBeo4XiY58YJXmzyNXSs3c82gAeGpS8cOQw5shjC449uJZkixSaXmwOKwtm0z1MOVAp17QLGeD_2YVa-DZUy1z6xTqStuZWnwLDOz8HPL_rW3MXGcmC63kWmcTCsFngwR_IArcTd8lsRAXJghnEdOZDYgrU7uc7bbqO8W_PyzPDDnrAbcwo0InMqJ2BZErMXOmy2dEm1jlJPEn23PL26k8r_sKNCZpg-I-q8epjbF225NJ9S8g_vvqLsyCzo-WnHPHaFDMfUhRxU-ylSReCZO3pcjNJXAfmsNiBs3g272BtvNWn7GpDqlJL9aB9Erc79CpLghKKV9JiVRsr79aW-JSzn9gJET3JteU2MMCvRxv3ePPkmZUvQdKOmzZQMwQ8j0FQHd--4qMkXDdAz-lsUjitCKK0z2ES0oSnWOVVPoR5AVIUCSfg-yGwBWhKv6qIkMTsCYaCaR86j_hGlCSxNqYdbMwy7sr6nwqDmqgmcsiNkAVUAUeU7LLXmVfGDR9InNL3lNCICpmcHMd8YJO5A1wFMPHFgfXt3o4CZP1ZSjQjQuQ-Oh2AfLaAYSNbU4y8JDtKPiini_rWIqH1yykwV0Xt__QvQtj600ksUqij_zxbKnZKy_u3Ud5E04bNgTZ0Mq9ihVtPBlcDCtWSsp5U8Sm6JL0ZXV5XaT3CVG3T7Mj-kKs4yHHOLNoR2rKAGPTA6VRzaJDNO4goMeE7aIqWKhFTYMBcKJEGD-B2J2J36iZ2RNGo9JbxmUw4ZPMVaPPulSfpLvDptYEN3LX0D6L4Xu5iaW900EQ_Ym60siMB257NRxfVPb5Sg8hqxGeKKgg6NGa6y-qyVXvqjy4HA-ODvHLbiT2n75fTD_OE2CX1FpLgmpmKkSopjT5G1vv5qtXqdhigDy-l_b9Qxwvbd7XXD72EUVPzDVwMDBZNeJkylcCecaRVJZRnhmOMkGbV4WFrMxjy7eoYrIBQ6zytutBFXNkAb6a6UXdTrlOlzclPP4P81sp3J6BytVSaLJXCIpZ3pAM9aWVzfavRW22R-rIMbmCWT9hq-1ZDfjdglHN7yowAF_rjVGrgl02wsh8IlLKfJreh7ughi9vSk1WMinlsiZfZynp33IfB3ayv00a_huU4oSKXstf1KaeQ1Z8L-ReCdPRwDYaLbP1ZT7BQAbXKgIjUsLdSiU3MmW8FVBdevLQq8AUUKsXxfQLS4TsjMYTNZ_8LkMcVeuwTDQTBYkBdyTl7jawXy2jujxDJe5mK3ZvvS_70sWokuPXkCApVFkJpNRDdcvBuoLG3g_KZ7dA0oQW9QHkKpd_-FEuUZFnL6-ZhjR7pe-EmR6gqJbuQVs19N2qho2pnNEe21WqAN-anBb4H7QN2V1ODJkW6vDDRH5sV8Ya7YYUScSI3TUASWH3MWapL1_-lRiXtVIM9Q8leFFIO_qkr8DFXoDOHp29HNa3gpQkjOqAFqX0VLg1Ub6X6C-kUbXWMcYIUoKNvQx5-Yhy5Lo0N6izxdE4Zw6U6Lfu90rA2DWeQ5-iae79H9yUy74jZw3bclkJFzGkydXWIP4OkKnDPemIKmsh28ovmfgtz_gJ99SlQDBmI6paH6P8wmHd7QvDQkMBnuACOnTnTud_MqdNUR4-qtcnPoNkFPXoTfYJNDDBkxvaEIXylqKK0wPf9aBsICsvB0N96nPpQTYuV2YHfIr8PagOi8wWC9ceUmDib8fMq3xgClujOcXOPk2Hh4Xuslecn315m-SoLjRg-dIdmTjuIyT9CrSdXMto5Jp7vcPTsRPebw41Tf4iR78BOTuGhbe_B7_WDm5FH10EptF1e3GZ0eO--VdgqLY3T3ivuoxtXIkTvDHvLHqNwFJIvH4ULUAIx3UGqJwE84_OqGwKBRT4UuQRm5wwZUZ0teyzOQx0cp7aKhsOkBzKY8jVFMmTBKin52ioD1inMiyBUYICYwYUngdYRmE5Qx7qzqB6Mg5CSW_7TaXuZFNVuVnitQp5uw2RrOlookLqyKYIQhruNjaUAvvDnhhIrTjh_Bi7f-wv7znhbJDE7YWy_zC_ufQj9VfxJcz6eXKu3fXr4EKlLayk2nwO5BkwaijetPdBNs4SOroEo6WfvFgVtbt-c6kkEfY5abo5zK6OPVHrpBVyew-A53SA0bQNptBVMNkZDiPczaviF3H3fnkMQH59RhIhMV9knjfCbAhP5BTmBFyFIXjX_ErOJgb3RtUObwjnifMNwN2hIE_-eMqk8K-jxMrT7xNoojwqcCgmzcY5w8hbmA77xW4ZnlBuTZORjFhppokfhLPcoVCcbt1AEWLc3oFYhquugqG9WZbS_7p_pI8C_zB4Q4x8MTn7lO9RZFufBeI9iTm6JP95asBuEafpQxP91ZAhfiU93UybWsoaKQb78PvjqwwK2D-LRumK6ftSMU3LNn1MBmiFowwzOLPxrkN4dzqF89rXEXJCuqS3jl9fEwKOdCvhpXyVRN6Kx5VBxSrY8KO9ItwWkrjHF4cWCTRVNePbw92TzRnzgLB4aEZ9T5TkIvdNgOyCQYSaOZ1TMSgO3a-i03avh9KisZcyt-gUbD11-EJmt_KOSeK5o-Jn3GmUKnZJJX9hKCOWCmN00qv8DzYCfIO9Bd6kfOXAqJJ0RFDHn6a4VHv4NrZNyXQWrX12_V3H4oHVZhDurhlhhak-6xoSC6KWeHFFlU39xzKx-2BfggTfghpTj4x8WiObhHvg7I6OY67vzfyRtJoA4muFzqq0c-RJ1QMvOXLGDEMJMSmuXxT0GOux0GvkB6VB4snKw5ZWdzTdm-maT6LBL9POZ8f2psW9CtE9tuzs1EfrBS9SHn9s_B6NHRCahEwwaIRFePU0v9mT3hhQoq_CawOykzNVGAPPAKyA8PNZr5GGmdmV7v0fWppgHUZA_sQPbq0XuxgoQFLJttwnCEf_mkS1zPYMYBv16U9G-kZQ25-rdHBFyZG-Wa6nBCSk7lm6ZNkDKSN7L-lBAVgpPgzDvXlCHaklZmQXwtNnBSPOZ3yO2-MBcDmSyoDbXpdM0zYZhMCyv0vMf2mKhEP91a2xD4tsp-Og6gAo0AXgk6Ge_be4zhMaUxm_NdPGg65mkaSaOZqCuevYVh0En18B7x2erzzUAMuJoo5C8ab1yLVGZSKNda3z8j40JeqcaYLN-yS4RaGaNdva_pmCq0dXYadIjaoivy4TqnHig9uJtboQqBevHPq2xXdsSutQOyEEexxjYbEz1USu25bTvog4tJs5okxNWDnL_0vBXZTpYCGdVo2WcMJgwqNBp-CPoZjMxCQ9IM6iS3KKETc9U46ksBbN95ZSeRUoUUtO_i0AoBsxE9A4NFbK9Uox2RGcJxOlC9HM2n5D6LmOyIO5KaYl16sfmURTRlcNpgTYAvat5HbfDYMFrH9EgSxu0y735-2wvZSuD0credILM3XFTyBmM7-278If-6-QaDX7zV9JxJaXrXx92T-srNH2Z5DLBOJDkl7oo1lVGKcFAmEgHjnkT_rPt8DvU4tlh0eI8HzSe7B35oA02GJE17hiWk-_VOUG2zNaOaesGK437EOzcCcc1dMZAtN206qPtzDZsNPhQNEBUx9Ta_jPG6waGpwihNxVfhwVvrR0zFUy1IspR9B1ONXttsi7nQ0YAtDSJaBuUgwwtYk2KL4QqRAixv_KSma8mOfuxs0th-sTyFGQ5f77q71ZcLUeYqVqrsjcDsh0K9pDvj4-KXcQXgd6EzY8zfh7VvXOHIr2aHBcHk1tw9zjYAR19sP87lo7YdVNrYlB09IkCICT9N1RSWJHUsszCvP0oBSmdNPfelx1CvHlClrc2qNGcyalsF8hc4wnG3mrYIC0rb4sHLc6Xp47g7vWnXH1ud169K4dB5YwnLam08lPwSYJwqculJw5d_L2egSoNIdYGvlvH-4prN6EkkyiqmZCHXYSNoKorU-ce7cRpc6mbxxU6CLCS_1FhlgfG_mZFP-KAZ3b-lQVdimYcudQeCgtjaydeAcUP4raEP_Wa3bhMB-GK90eskPs0cZgeRDvwohATR8ynHvxFCAeoiQcL-3bQgdOhZxY6r8dn6HF3RWWaeA6o4xS0XTlxecl4rOXs4nJAvn3jGZ4VmU9qkYcoVBW44IkLnbx0q07n4rRiurI4596rknVRJwbeb--_d9l9gSqn_ZwIHHyO4tk9np7I8yMTGp0j3ea_GbKrss2_8gU-XDU57ihgCQyOrAcyyfljyHTE6m-upNK0glJ-2m9r0ktOToCN-6ve4H3trSNvRL26rmH_WV8d-gwsF76cPYdlCZu46pC3Ib_R4sHUeBjg39ilY0IxUTOsLz-34NuMeKKnaViX68pZw1XzMLb7ZJOYhe0AKKO4Yrrkwpwlqvbpgd369PENtcqdakdbn44wKOfp49d9czQYQcYlRK3L08MhGsHXuDTlUcqqEYSDpwM_D2__AicfRazviJzdWQQMNJHA_0COIuhQ4c0dbPOOZqCMM9BxQe69fNlTfZEpFL2Axh_6-TqEXdqU8CO2fYScvQfuXZ2AMbmit46qlhUJMj5082R_XYNwIR_b-QMqm0e6aI_vZRVw8MwdJHG73Z_u4whBIR36VHrrK1qUYLxC2pYyLOwHlPEYlyN7HlTs6i_iJ9z4TQuK_mk_b1bc4-1XfgQUU8ZfjYPNoQNII_Dtym-9k7Ukv-pU5Nk1lItlLk07wiCcKMlui8Y-23K9mb03O38x9ZhN051SusVM9ItehAp684sy-kb6MymRW0LsXXIPdRc9LxI85RZ3aANfAtMaHbRov2jpVvZT4OQhTQIJLg3656y_NG32DJvFQoBLEgfFCTKYQgpKWmbxj1gRsVDrdk8EBF3rz1ohyUfxqyrHSYM39YGs2bnk9TkvaOaHOluV_ZoY-qIDysJ_p1eKxJVdpF2VCxZ1ctwuKCbVx6pl6XLuN-g2KaJnpgxVcVbrnxsgLrh5OGeDuXiBFYeLYaF09wFBHTHF0naw63TgB8jy61c5r7_y4DVAiicoSJ3B8SJxEmB5qgXVse_vwmKOxvULXcgU9XLaONbYYIUulkSNOSK_x_xWnVRL7yWHj9xMjWTvBXgVcux1CmehPPQ7dGhooXgzCoipDZ_y_sRl43wYZiaqG7Nl79ciyfdwi6xKUb0CgLQp1D2Q90bHKRUV1Y1IdcIUl-atTUcMGYDyLKmYQQ0BWvqXeaZtHra_yDzoIlB7rR9Hg9agchVJsUA46egTwwvlHdiYPIxJidKAQFgpDospYReegQxCIZHg_PI0FPVfXBfNR2Vc8fIrXiNwzPi4jvj83YmDTvTJ1xBLYDao7QzDQUjkpl09EnP4UoGlvFYlrXH0Ev1sWz_svhFVAduqJzHke7BW5b7gYipmIqQCvPgehCMuD8-NkaEAtE613V6BLPTu51IPtkvFoS_zSRCkLnspDFVTeDToBKQlN0-u1LlMF9f1dQDPxBE8ZLacKFP2F6lezHhikzuoJTyfCzF0xT4nn8alqzDzRV3K0wAl_4NKjhwSHz9i8MRxPo1WEfO8Xpt1aKa6WIbZ2rr5ayhX3H4ASPQ7UDoMNrRZP82lcAerRb_j7wyL57W6oE7VetxnmbexD15h_7LukUqUNSSgg6D0zxX2C23EhpBaQ7Bw4Va_costesVZBuYwEig3VR5Y-9WvmN0CuaeE1oZkXJ5zBCBgO5F_hIESxHP9zx9Z4fs7fswQDJHaick1xpSSZNDbBghUqlswGvI4TTtUWGPc5R1mf9dLQDF6j5wTo1kycMpfXIUF6hVqZRlKHgP4DRetOCsAgb_WMW0b_GCVyK8JyeZsTSXN547g8Q6WMRYikbZDP25hglrI5hU03GLf3m2WLJAd4eKB5e1nlDhIqAGn289gdttwfe8rUzB5BhdSZ6BcaWAEVp64EHYFmtco1aBleXa0RVlSDS6gt7U7ozAp0YxkBW7YlqXxfM8A8y-Dn8LkKewv5p7q7yL5Bkun5Cy7rZ_FPQ_4ktHUr_RzqpQbgSgtXwOSyCfoDKqIPNg4AhjaI33nD93HuRQeV_mhxYwXN5GNTq-7SxkulMwTSgg7b2UhmOSu87pX_FMk5nFaglzYzHKpoZA3QuNxwHzTVInF8Ufu6fAIOPT5fEuhfilDU3uxCkpC-us4yeLwm8e36ICJZFfcqa5dXHkFezEXPKvFbhpVgjTO-TI2EH_vb4QcYNQxtQGWUqFcuQ7IaIgYChVS7ifjkPc65wR9ffjTEEqFAt6e-_mviI4ltyiTLTNTWY68JV64SnjeMQ9qR9gPYmefUp_E_LyOdwfetRYKBJ81jAMz2piWNoJHwHbFjBxeZj8iZ34TnirgvWRltUi20aN09b8TN_IbFNPFjkI1UwshqMwLY9GXT4eq0QaIdvhW9CE90--KNVjGvqyRLodo0gsGTpmTcoTPDgF_AuaeDlaBrbAnW-pFr1HOV5YqUGja5_vkDvi9mdKooFrlSau-Dt1HmZf81izJ8odFR-tHl0u-wT66G0aEkk1DS81IXvSLLNAQlIpj5FoZYx2RPFWyw1WBlY8iSa4r6HyN5YKW9taJ7ljUliA8KClax8VM282lqYL5Fd-wtYu5Iceez8jGGj4cZ7JetWp6X-wjLHeo6SDUGjNO7k7h3ODmCRnIKJZVtbx6qJEVX1u8J9mIAXEjdArqa_7YiUBTuka0W7IxVXZUx9R96h5f', provider_name='openai', ), - TextPart( - content=IsStr(), - id='msg_0b4f29854724a3120068c4ab22122081918f25e06f1368274e', - ), + TextPart(content=IsStr(), id='msg_0b4f29854724a3120068c4ab22122081918f25e06f1368274e'), ], usage=RequestUsage( input_tokens=9939, cache_read_tokens=8320, output_tokens=1610, details={'reasoning_tokens': 1344} @@ -2230,10 +2218,7 @@ async def test_openai_responses_model_thinking_part(allow_model_requests: None, ThinkingPart(content=IsStr(), id='rs_68c42c90b950819c9e32c46d4f8326ca07460311b0c8d3de'), ThinkingPart(content=IsStr(), id='rs_68c42c90b950819c9e32c46d4f8326ca07460311b0c8d3de'), ThinkingPart(content=IsStr(), id='rs_68c42c90b950819c9e32c46d4f8326ca07460311b0c8d3de'), - TextPart( - content=IsStr(), - id='msg_68c42cb1aaec819cb992bd92a8c7766007460311b0c8d3de', - ), + TextPart(content=IsStr(), id='msg_68c42cb1aaec819cb992bd92a8c7766007460311b0c8d3de'), ], usage=RequestUsage(input_tokens=13, output_tokens=2199, details={'reasoning_tokens': 1920}), model_name='gpt-5-2025-08-07', @@ -2279,10 +2264,7 @@ async def test_openai_responses_model_thinking_part(allow_model_requests: None, ThinkingPart(content=IsStr(), id='rs_68c42cb43d3c819caf078978cc2514ea07460311b0c8d3de'), ThinkingPart(content=IsStr(), id='rs_68c42cb43d3c819caf078978cc2514ea07460311b0c8d3de'), ThinkingPart(content=IsStr(), id='rs_68c42cb43d3c819caf078978cc2514ea07460311b0c8d3de'), - TextPart( - content=IsStr(), - id='msg_68c42cd36134819c800463490961f7df07460311b0c8d3de', - ), + TextPart(content=IsStr(), id='msg_68c42cd36134819c800463490961f7df07460311b0c8d3de'), ], usage=RequestUsage(input_tokens=314, output_tokens=2737, details={'reasoning_tokens': 2112}), model_name='gpt-5-2025-08-07', @@ -4008,10 +3990,7 @@ async def test_openai_responses_code_execution_return_image(allow_model_requests timestamp=IsDatetime(), provider_name='openai', ), - TextPart( - content=IsStr(), - id='msg_68cdc398d3bc8190bbcf78c0293a4ca60187028ba77f15f7', - ), + TextPart(content=IsStr(), id='msg_68cdc398d3bc8190bbcf78c0293a4ca60187028ba77f15f7'), ], usage=RequestUsage( input_tokens=2973, cache_read_tokens=1920, output_tokens=707, details={'reasoning_tokens': 512} @@ -7067,10 +7046,7 @@ async def test_openai_responses_model_mcp_server_tool(allow_model_requests: None signature='gAAAAABo-r2bD-v0Y3pAlyAEK1Sb8qJJcJRKSRtYwymHwLNXY-SKCqd_Q5RbN0DLCclspuPCAasGLm1WM1Q2Y_3szaEEr_OJalXTVEfRvhCJE1iTgoz2Uyf7KttZ4W92hlYjE8cjgdo5tKtSVkNyzTs4JUHKRHoDMutL2KivjZKuK_4n-lo9paJC_jmz6RWO8wUoXo3_fGxjliOGnWyRXwEPmgAcEWNOSVgCgAEO3vXerXRPLie02HegWcLMtK6WORDHd02Kr86QSK3W30bnvU7glAFX6VhSSnR8G0ceAM-ImoomQ8obEDyedX1-pYDKPOa4pZ5iTjD24ABYOwz-0L7SNziQJLycwwsr11Fj0_Au9yJph8YkNb2nAyFeiNVCRjKul51B7dZgz-UZ9juWO2ffeI0GNtQTYzf46_Y1t0qykGW6w59xjmBHTKf5SiSe0pqWxZ6LOLoPx01rX2gLaKgNZZiERSbO0iwbA4tpxb9ur-qeFVv5tS7xy8KFYOa8SPrypvFWDoY6CjSwTS3ir0vyfpbJy-n6bcYP_pTwDZxy_1aVkciim8Tmm_9wYgI0uY5kcA9VYJuyc4cg7S7ykTUxMZz7xiLMf8FoXl1gHbVJrYriyZzh2poYTWlcCuSCiUaXhQKxcxMRrt_P7WANx0n68ENQ40HkoJ6rThvWUuwtmEYqZ0ldh3XSFtyNrqha4PQ5eg_DudlU_5CxyykuzWmi_o5MEW4_XW4b9vdXg1laqx4189_jEuV_JPGNeL3Ke4EbMbKHzsiaGePRZGgNutnlERagmU4VFTeoE5bN3oHlR_Au4PeQxdb7BuBmZRDDCnnIRd2NfSWb7bgfUozkA4S6rm_089OlRBeRVoLtA8zZZinNGtOZl7MtkLnoJVIWpF1rr7D_47eWSyyegUIIS2e5UKLJfCLkNgSlWPU9VquHEzSfqeHfzoN5ccoVwrvrHmeveTjI-wIJygdfuyti5cMgOOkAtLzjWmbs4CjmlWcbZKeidtDj5YpCSmYAGFuZze-cSbNjMv4th639dCu_jmRMze-l2Y5npbRwMqEJr7VLXghmLc1vhOsaQM3gxoF0CJJlmvtR4jxPqhE3694YRva6LS1WjR4oueM6zfpVeB2kC0hQgqaL6MiwtTRYFfuCzEHi18TwA5bqqkfgrDXedmjAzlEGSZFe2EBRlF_ZtagrVVTCagHQArnH3DkVQMEDCHCqDxA_PINR_997IxeNgGPsvazVdOOBef7sO4rvAWrC94nIlt7d4aViqbTNMW-W8rqjGFOqj1swrM0yoX5y6LY5oXPc3Mu35xeitn_paqtGPkvuH6WeGzAiNZFDoQkUdLkZ4SIH2lr4ZXmMI3nuTzCrwyshwcEu-hhVtGAEQEqVrIn8J75IzYTs1UGLBvhmcpHxCfG04MFNoVf-EPI4SgjNEgV61861TYshxCRrydVhaJmbLqYh8yzLYBHK6oIymv-BrIJ0LX222LwoGbSc0gMTMaudtthlFXrHdnswKf81ubhF7viiD3Y=', provider_name='openai', ), - TextPart( - content=IsStr(), - id='msg_0083938b3a28070e0068fabd989bb481a08c61416ab343ef49', - ), + TextPart(content=IsStr(), id='msg_0083938b3a28070e0068fabd989bb481a08c61416ab343ef49'), ], usage=RequestUsage(input_tokens=1207, output_tokens=535, details={'reasoning_tokens': 320}), model_name='o4-mini-2025-04-16', @@ -7727,10 +7703,7 @@ async def test_openai_responses_model_mcp_server_tool_with_connector(allow_model signature='gAAAAABo-qEECuiSxfvrR92v1hkqyCTCWyfmpHSaW-vVouk5mOTIFDvaBZdVTFH8-dJfpwEG3MCejRKh9V-I8mrYAjhudVr1ayHo8UYOOU1cfVc6w3wsrkL8hXljjE-amiJhBSjvRc2nwwGtgYpDxOfWTqJkaUvFnMD6MrS4CwMrCBbDOLYZgM1cQbidtrrtpP7D5u42tR6coC_PCOqwPzDN4f0RggrxVxh0038p81VUmlkUeA2jWzRyFpeDGRjXFk84Og73rXAp7EWQv7TmzgVXBjCVwwzJNU8HCZ_gkwh5dvL94QxBx32lEmfOOKcqA3hN3FLwDqXlZ8f7jEqYInnpILQgX5XMdM9OrCyXmDCr_eIy00cjvxnTcXhCnZBOaKCKmTP74yUpGNdLbQcr4BalTiviNYEeCAhJyRo4KnhUZbBoT7MB5NULf-kqhRo1gEGKjWiLdV47PhR7Z8i4BK7zBceganMKpLtzIMW5a6JAujC4Z9FYxcpJZI_CD9NHsPr4SjKgIwv89d6BYo89-xfflF6ZUZBkuDUnL2-Nc9CKgGuKlcDunvYLr38pzA278OFYzh9T42u4SbS8KkSXKjGU3H8LfpMnBEZigriixLt5vj7qnWmZvCFarzxT4U4qqR1ITp5rkO6G9kYvBEfS7wu768mteDBgAajUaeOMQEfjJRErC4wfzbB89YCsXPJz0JE90QZ5LeiP5ZlVezTTaddG9JmiGsBCPckqUb1LWdpvekCfPkePF_uDMVWyJpQ4ZBzQsZx8sHf5spygsiQjlzTiriqwhoTcPuXoONoCr9HeFX1Qy8SGOm87siRPAD7FHJdDxbJwq8tOlMpx8MH1dqEY07lwoxZB0GQ9XbB7QJXfQR_27nkpqBYFkrbqChNJLO2x8gNFClbB0mgYQE1CRy64y6yOrG3CtS53RK5VGrF1GnqwuWdZ452VgShT5nAmPFRlRk1S9px4eMUTAozT0QAYrlHQC7b6I6K3m_Qe3kXGpnn_87i2eGG8mHmXG2FvFChkgf2OU7-LRy_Wl_u-ataICeoBwfngBFMppvUW6tJP009HK7mUE8P1KJntN3ExKLIBhmKhV6ziBpIi1bSTmd8leYqfSaf648c7-sVuDRx7DzxTp19l3fwVFa67GdiagZFs7xaU1HxMnMc3uy5VKWAH_qcv-Mga3VCTtTPpMTjvB95nsLeOFjS2FtpPvaP0N6o5kkkzW7cteWpOHhSX0z7AQA7CqgOCQLfLUc7ltVxnOH4WdHoeZFah_q_Ue6caf0kNo4YsTfbRDdzsW70o8P5Agr-Pgttg19vTDA_eBFur9GDKIRT0vYMWPpykwJBDTgJKOFW6uyNkqNWk_RAAvleE9pAyOoSmgomyrMcnnpdeYHNxeNxvTWFC3mcKSjJIB316wypPvaGTJyaK_pxJScD7CtLrIPkgwPpOsJnDySF6wGe-fGsUMt3zxJrc-S6fp24mYVfTRZbjUsP0fJgLmCohJiAtEg_xvlQ8sPyuLoLdOdossTQ7ufl0CwVn4f_ol4q__gpTvYVaoGsWl3QmHul5zj7OUAn7of6iBfCSlXbrauJvMyNYt4x_dLM8SXTRNPe-ZMDmER9DOw0KJXcUrpl6uw4TphKmUOK6KrxqshujXdN9VDgOwD7eKqIHpvC_6a2R6sS6ZHcebmh2o3bic-Hctomrbv03OQ==', provider_name='openai', ), - TextPart( - content=IsStr(), - id='msg_0558010cf1416a490068faa103e6c481a0930eda4f04bb3f2a', - ), + TextPart(content=IsStr(), id='msg_0558010cf1416a490068faa103e6c481a0930eda4f04bb3f2a'), ], usage=RequestUsage(input_tokens=1065, output_tokens=760, details={'reasoning_tokens': 576}), model_name='o4-mini-2025-04-16', @@ -8028,6 +8001,7 @@ async def test_openai_responses_raw_cot_stream_openrouter(allow_model_requests: ThinkingPart( content='', id='rs_tmp_2kbe7x16sax', + provider_name='openrouter', provider_details={ 'raw_content': [ 'The user asks: "What is 2+2?" They expect a straightforward answer: 4. Just answer 4.' @@ -8461,10 +8435,7 @@ async def test_openai_responses_model_file_search_tool(allow_model_requests: Non timestamp=IsDatetime(), provider_name='openai', ), - TextPart( - content='The capital of France is Paris.', - id=IsStr(), - ), + TextPart(content='The capital of France is Paris.', id=IsStr()), ], usage=RequestUsage(input_tokens=870, output_tokens=30, details={'reasoning_tokens': 0}), model_name='gpt-4o-2024-08-06', @@ -8844,10 +8815,7 @@ async def test_openai_responses_model_file_search_tool_with_results(allow_model_ timestamp=IsDatetime(), provider_name='openai', ), - TextPart( - content=IsStr(), - id=IsStr(), - ), + TextPart(content=IsStr(), id=IsStr()), ], usage=RequestUsage(input_tokens=IsInt(), output_tokens=IsInt(), details={'reasoning_tokens': 0}), model_name='gpt-4o-2024-08-06', diff --git a/tests/test_agent.py b/tests/test_agent.py index 8fee907f24..4ca09ce7d5 100644 --- a/tests/test_agent.py +++ b/tests/test_agent.py @@ -4735,7 +4735,13 @@ def test_binary_content_serializable(): }, { 'parts': [ - {'content': 'success (no tool calls)', 'id': None, 'part_kind': 'text', 'provider_details': None} + { + 'content': 'success (no tool calls)', + 'id': None, + 'provider_name': None, + 'part_kind': 'text', + 'provider_details': None, + } ], 'usage': { 'input_tokens': 56, @@ -4799,7 +4805,13 @@ def test_image_url_serializable_missing_media_type(): }, { 'parts': [ - {'content': 'success (no tool calls)', 'id': None, 'part_kind': 'text', 'provider_details': None} + { + 'content': 'success (no tool calls)', + 'id': None, + 'provider_name': None, + 'part_kind': 'text', + 'provider_details': None, + } ], 'usage': { 'input_tokens': 51, @@ -4870,7 +4882,13 @@ def test_image_url_serializable(): }, { 'parts': [ - {'content': 'success (no tool calls)', 'id': None, 'part_kind': 'text', 'provider_details': None} + { + 'content': 'success (no tool calls)', + 'id': None, + 'provider_name': None, + 'part_kind': 'text', + 'provider_details': None, + } ], 'usage': { 'input_tokens': 51, diff --git a/tests/test_agent_output_schemas.py b/tests/test_agent_output_schemas.py index 5c63343126..d08595428a 100644 --- a/tests/test_agent_output_schemas.py +++ b/tests/test_agent_output_schemas.py @@ -413,6 +413,11 @@ async def test_deferred_output_json_schema(): }, 'tool_call_id': {'title': 'Tool Call Id', 'type': 'string'}, 'id': {'anyOf': [{'type': 'string'}, {'type': 'null'}], 'default': None, 'title': 'Id'}, + 'provider_name': { + 'anyOf': [{'type': 'string'}, {'type': 'null'}], + 'default': None, + 'title': 'Provider Name', + }, 'provider_details': { 'anyOf': [{'additionalProperties': True, 'type': 'object'}, {'type': 'null'}], 'default': None, @@ -532,6 +537,11 @@ async def test_deferred_output_json_schema(): }, 'tool_call_id': {'title': 'Tool Call Id', 'type': 'string'}, 'id': {'anyOf': [{'type': 'string'}, {'type': 'null'}], 'default': None, 'title': 'Id'}, + 'provider_name': { + 'anyOf': [{'type': 'string'}, {'type': 'null'}], + 'default': None, + 'title': 'Provider Name', + }, 'provider_details': { 'anyOf': [{'additionalProperties': True, 'type': 'object'}, {'type': 'null'}], 'default': None, diff --git a/tests/test_dbos.py b/tests/test_dbos.py index be1bfa7c66..9ed7709e2b 100644 --- a/tests/test_dbos.py +++ b/tests/test_dbos.py @@ -322,22 +322,22 @@ async def test_complex_agent_run_in_workflow(allow_model_requests: None, dbos: D children=[ BasicSpan(content='ctx.run_step=1'), BasicSpan( - content='{"index":0,"part":{"tool_name":"get_country","args":"","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' + content='{"index":0,"part":{"tool_name":"get_country","args":"","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"part":{"tool_name":"get_country","args":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":"tool-call","event_kind":"part_end"}' + content='{"index":0,"part":{"tool_name":"get_country","args":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":"tool-call","event_kind":"part_end"}' ), BasicSpan( - content='{"index":1,"part":{"tool_name":"get_product_name","args":"","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":"tool-call","event_kind":"part_start"}' + content='{"index":1,"part":{"tool_name":"get_product_name","args":"","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":"tool-call","event_kind":"part_start"}' ), BasicSpan( - content='{"index":1,"delta":{"tool_name_delta":null,"args_delta":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":1,"delta":{"tool_name_delta":null,"args_delta":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":1,"part":{"tool_name":"get_product_name","args":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' + content='{"index":1,"part":{"tool_name":"get_product_name","args":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' ), ], ) @@ -348,7 +348,7 @@ async def test_complex_agent_run_in_workflow(allow_model_requests: None, dbos: D children=[ BasicSpan(content='ctx.run_step=1'), BasicSpan( - content='{"part":{"tool_name":"get_country","args":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' + content='{"part":{"tool_name":"get_country","args":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' ), ], ), @@ -357,7 +357,7 @@ async def test_complex_agent_run_in_workflow(allow_model_requests: None, dbos: D children=[ BasicSpan(content='ctx.run_step=1'), BasicSpan( - content='{"part":{"tool_name":"get_product_name","args":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' + content='{"part":{"tool_name":"get_product_name","args":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' ), ], ), @@ -402,28 +402,28 @@ async def test_complex_agent_run_in_workflow(allow_model_requests: None, dbos: D children=[ BasicSpan(content='ctx.run_step=2'), BasicSpan( - content='{"index":0,"part":{"tool_name":"get_weather","args":"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' + content='{"index":0,"part":{"tool_name":"get_weather","args":"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"city","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"city","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Mexico","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Mexico","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" City","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" City","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"part":{"tool_name":"get_weather","args":"{\\"city\\":\\"Mexico City\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' + content='{"index":0,"part":{"tool_name":"get_weather","args":"{\\"city\\":\\"Mexico City\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' ), ], ) @@ -434,7 +434,7 @@ async def test_complex_agent_run_in_workflow(allow_model_requests: None, dbos: D children=[ BasicSpan(content='ctx.run_step=2'), BasicSpan( - content='{"part":{"tool_name":"get_weather","args":"{\\"city\\":\\"Mexico City\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' + content='{"part":{"tool_name":"get_weather","args":"{\\"city\\":\\"Mexico City\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' ), ], ), @@ -466,133 +466,133 @@ async def test_complex_agent_run_in_workflow(allow_model_requests: None, dbos: D children=[ BasicSpan(content='ctx.run_step=3'), BasicSpan( - content='{"index":0,"part":{"tool_name":"final_result","args":"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","id":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' + content='{"index":0,"part":{"tool_name":"final_result","args":"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' ), BasicSpan( content='{"tool_name":"final_result","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","event_kind":"final_result"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answers","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answers","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":[","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":[","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Capital","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Capital","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" of","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" of","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" the","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" the","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" country","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" country","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Mexico","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Mexico","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" City","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" City","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"},{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"},{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Weather","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Weather","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" in","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" in","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" the","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" the","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" capital","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" capital","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Sunny","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Sunny","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"},{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"},{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Product","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Product","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" Name","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" Name","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"P","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"P","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"yd","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"yd","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"antic","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"antic","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" AI","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" AI","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"]}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"]}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"part":{"tool_name":"final_result","args":"{\\"answers\\":[{\\"label\\":\\"Capital of the country\\",\\"answer\\":\\"Mexico City\\"},{\\"label\\":\\"Weather in the capital\\",\\"answer\\":\\"Sunny\\"},{\\"label\\":\\"Product Name\\",\\"answer\\":\\"Pydantic AI\\"}]}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","id":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' + content='{"index":0,"part":{"tool_name":"final_result","args":"{\\"answers\\":[{\\"label\\":\\"Capital of the country\\",\\"answer\\":\\"Mexico City\\"},{\\"label\\":\\"Weather in the capital\\",\\"answer\\":\\"Sunny\\"},{\\"label\\":\\"Product Name\\",\\"answer\\":\\"Pydantic AI\\"}]}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' ), ], ) diff --git a/tests/test_mcp.py b/tests/test_mcp.py index 3ceb90c2b9..a323bf3f38 100644 --- a/tests/test_mcp.py +++ b/tests/test_mcp.py @@ -930,6 +930,7 @@ async def test_tool_returning_audio_resource_link( tool_name='get_audio_resource_link', args={}, tool_call_id=IsStr(), + provider_name='google-gla', provider_details={'thought_signature': IsStr()}, ) ], diff --git a/tests/test_temporal.py b/tests/test_temporal.py index e8c09e8aae..ea11b4be7f 100644 --- a/tests/test_temporal.py +++ b/tests/test_temporal.py @@ -391,22 +391,22 @@ async def test_complex_agent_run_in_workflow( children=[ BasicSpan(content='ctx.run_step=1'), BasicSpan( - content='{"index":0,"part":{"tool_name":"get_country","args":"","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' + content='{"index":0,"part":{"tool_name":"get_country","args":"","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"part":{"tool_name":"get_country","args":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":"tool-call","event_kind":"part_end"}' + content='{"index":0,"part":{"tool_name":"get_country","args":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":"tool-call","event_kind":"part_end"}' ), BasicSpan( - content='{"index":1,"part":{"tool_name":"get_product_name","args":"","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":"tool-call","event_kind":"part_start"}' + content='{"index":1,"part":{"tool_name":"get_product_name","args":"","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":"tool-call","event_kind":"part_start"}' ), BasicSpan( - content='{"index":1,"delta":{"tool_name_delta":null,"args_delta":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":1,"delta":{"tool_name_delta":null,"args_delta":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":1,"part":{"tool_name":"get_product_name","args":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' + content='{"index":1,"part":{"tool_name":"get_product_name","args":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' ), ], ) @@ -422,7 +422,7 @@ async def test_complex_agent_run_in_workflow( children=[ BasicSpan(content='ctx.run_step=1'), BasicSpan( - content='{"part":{"tool_name":"get_country","args":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' + content='{"part":{"tool_name":"get_country","args":"{}","tool_call_id":"call_3rqTYrA6H21AYUaRGP4F66oq","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' ), ], ) @@ -436,7 +436,7 @@ async def test_complex_agent_run_in_workflow( children=[ BasicSpan(content='ctx.run_step=1'), BasicSpan( - content='{"part":{"tool_name":"get_product_name","args":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' + content='{"part":{"tool_name":"get_product_name","args":"{}","tool_call_id":"call_Xw9XMKBJU48kAAd78WgIswDx","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' ), ], ) @@ -510,28 +510,28 @@ async def test_complex_agent_run_in_workflow( children=[ BasicSpan(content='ctx.run_step=2'), BasicSpan( - content='{"index":0,"part":{"tool_name":"get_weather","args":"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' + content='{"index":0,"part":{"tool_name":"get_weather","args":"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"city","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"city","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Mexico","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Mexico","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" City","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" City","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"part":{"tool_name":"get_weather","args":"{\\"city\\":\\"Mexico City\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' + content='{"index":0,"part":{"tool_name":"get_weather","args":"{\\"city\\":\\"Mexico City\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' ), ], ) @@ -547,7 +547,7 @@ async def test_complex_agent_run_in_workflow( children=[ BasicSpan(content='ctx.run_step=2'), BasicSpan( - content='{"part":{"tool_name":"get_weather","args":"{\\"city\\":\\"Mexico City\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' + content='{"part":{"tool_name":"get_weather","args":"{\\"city\\":\\"Mexico City\\"}","tool_call_id":"call_Vz0Sie91Ap56nH0ThKGrZXT7","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"event_kind":"function_tool_call"}' ), ], ) @@ -604,133 +604,133 @@ async def test_complex_agent_run_in_workflow( children=[ BasicSpan(content='ctx.run_step=3'), BasicSpan( - content='{"index":0,"part":{"tool_name":"final_result","args":"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","id":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' + content='{"index":0,"part":{"tool_name":"final_result","args":"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"previous_part_kind":null,"event_kind":"part_start"}' ), BasicSpan( content='{"tool_name":"final_result","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","event_kind":"final_result"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answers","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answers","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":[","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":[","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Capital","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Capital","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" of","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" of","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" the","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" the","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" country","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" country","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Mexico","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Mexico","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" City","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" City","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"},{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"},{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Weather","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Weather","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" in","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" in","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" the","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" the","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" capital","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" capital","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Sunny","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Sunny","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"},{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"},{\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"label","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Product","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"Product","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" Name","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" Name","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\",\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"answer","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\":\\"","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"P","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"P","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"yd","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"yd","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"antic","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"antic","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" AI","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":" AI","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"\\"}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"]}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' + content='{"index":0,"delta":{"tool_name_delta":null,"args_delta":"]}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","provider_name":null,"provider_details":null,"part_delta_kind":"tool_call"},"event_kind":"part_delta"}' ), BasicSpan( - content='{"index":0,"part":{"tool_name":"final_result","args":"{\\"answers\\":[{\\"label\\":\\"Capital of the country\\",\\"answer\\":\\"Mexico City\\"},{\\"label\\":\\"Weather in the capital\\",\\"answer\\":\\"Sunny\\"},{\\"label\\":\\"Product Name\\",\\"answer\\":\\"Pydantic AI\\"}]}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","id":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' + content='{"index":0,"part":{"tool_name":"final_result","args":"{\\"answers\\":[{\\"label\\":\\"Capital of the country\\",\\"answer\\":\\"Mexico City\\"},{\\"label\\":\\"Weather in the capital\\",\\"answer\\":\\"Sunny\\"},{\\"label\\":\\"Product Name\\",\\"answer\\":\\"Pydantic AI\\"}]}","tool_call_id":"call_4kc6691zCzjPnOuEtbEGUvz2","id":null,"provider_name":null,"provider_details":null,"part_kind":"tool-call"},"next_part_kind":null,"event_kind":"part_end"}' ), ], ) diff --git a/tests/test_vercel_ai.py b/tests/test_vercel_ai.py index 59fff168ec..42fd73205c 100644 --- a/tests/test_vercel_ai.py +++ b/tests/test_vercel_ai.py @@ -46,6 +46,7 @@ from pydantic_ai.models.test import TestModel from pydantic_ai.run import AgentRunResult from pydantic_ai.ui.vercel_ai import VercelAIAdapter, VercelAIEventStream +from pydantic_ai.ui.vercel_ai._utils import dump_provider_metadata, load_provider_metadata from pydantic_ai.ui.vercel_ai.request_types import ( DynamicToolOutputAvailablePart, FileUIPart, @@ -285,8 +286,28 @@ async def test_run(allow_model_requests: None, openai_api_key: str): [ {'type': 'start'}, {'type': 'start-step'}, - {'type': 'reasoning-start', 'id': IsStr()}, - {'type': 'reasoning-end', 'id': IsStr()}, + { + 'type': 'reasoning-start', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e647f10d8c819187515d1b2517b059', + } + }, + }, + { + 'type': 'reasoning-end', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e647f10d8c819187515d1b2517b059', + } + }, + }, {'type': 'tool-input-start', 'toolCallId': IsStr(), 'toolName': 'web_search', 'providerExecuted': True}, { 'type': 'tool-input-delta', @@ -310,8 +331,28 @@ async def test_run(allow_model_requests: None, openai_api_key: str): 'output': {'status': 'completed'}, 'providerExecuted': True, }, - {'type': 'reasoning-start', 'id': IsStr()}, - {'type': 'reasoning-end', 'id': IsStr()}, + { + 'type': 'reasoning-start', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e647fa69e48191b6f5385a856b2948', + } + }, + }, + { + 'type': 'reasoning-end', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e647fa69e48191b6f5385a856b2948', + } + }, + }, {'type': 'tool-input-start', 'toolCallId': IsStr(), 'toolName': 'web_search', 'providerExecuted': True}, { 'type': 'tool-input-delta', @@ -332,8 +373,28 @@ async def test_run(allow_model_requests: None, openai_api_key: str): 'output': {'status': 'completed'}, 'providerExecuted': True, }, - {'type': 'reasoning-start', 'id': IsStr()}, - {'type': 'reasoning-end', 'id': IsStr()}, + { + 'type': 'reasoning-start', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e647fd656081919385a27bd1162fcd', + } + }, + }, + { + 'type': 'reasoning-end', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e647fd656081919385a27bd1162fcd', + } + }, + }, {'type': 'tool-input-start', 'toolCallId': IsStr(), 'toolName': 'web_search', 'providerExecuted': True}, { 'type': 'tool-input-delta', @@ -354,8 +415,28 @@ async def test_run(allow_model_requests: None, openai_api_key: str): 'output': {'status': 'completed'}, 'providerExecuted': True, }, - {'type': 'reasoning-start', 'id': IsStr()}, - {'type': 'reasoning-end', 'id': IsStr()}, + { + 'type': 'reasoning-start', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e648022d288191a6acb6cff99dafba', + } + }, + }, + { + 'type': 'reasoning-end', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e648022d288191a6acb6cff99dafba', + } + }, + }, {'type': 'tool-input-start', 'toolCallId': IsStr(), 'toolName': 'web_search', 'providerExecuted': True}, { 'type': 'tool-input-delta', @@ -379,8 +460,28 @@ async def test_run(allow_model_requests: None, openai_api_key: str): 'output': {'status': 'completed'}, 'providerExecuted': True, }, - {'type': 'reasoning-start', 'id': IsStr()}, - {'type': 'reasoning-end', 'id': IsStr()}, + { + 'type': 'reasoning-start', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e648060b088191974c790f06b8ea8e', + } + }, + }, + { + 'type': 'reasoning-end', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e648060b088191974c790f06b8ea8e', + } + }, + }, {'type': 'tool-input-start', 'toolCallId': IsStr(), 'toolName': 'web_search', 'providerExecuted': True}, { 'type': 'tool-input-delta', @@ -401,8 +502,28 @@ async def test_run(allow_model_requests: None, openai_api_key: str): 'output': {'status': 'completed'}, 'providerExecuted': True, }, - {'type': 'reasoning-start', 'id': IsStr()}, - {'type': 'reasoning-end', 'id': IsStr()}, + { + 'type': 'reasoning-start', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e6480bbd348191b11aa4762de66297', + } + }, + }, + { + 'type': 'reasoning-end', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e6480bbd348191b11aa4762de66297', + } + }, + }, {'type': 'tool-input-start', 'toolCallId': IsStr(), 'toolName': 'web_search', 'providerExecuted': True}, { 'type': 'tool-input-delta', @@ -423,8 +544,28 @@ async def test_run(allow_model_requests: None, openai_api_key: str): 'output': {'status': 'completed'}, 'providerExecuted': True, }, - {'type': 'reasoning-start', 'id': IsStr()}, - {'type': 'reasoning-end', 'id': IsStr()}, + { + 'type': 'reasoning-start', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e6480f16f08191beaad2936e3d3195', + } + }, + }, + { + 'type': 'reasoning-end', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e6480f16f08191beaad2936e3d3195', + } + }, + }, {'type': 'tool-input-start', 'toolCallId': IsStr(), 'toolName': 'web_search', 'providerExecuted': True}, { 'type': 'tool-input-delta', @@ -445,9 +586,33 @@ async def test_run(allow_model_requests: None, openai_api_key: str): 'output': {'status': 'completed'}, 'providerExecuted': True, }, - {'type': 'reasoning-start', 'id': IsStr()}, - {'type': 'reasoning-end', 'id': IsStr()}, - {'type': 'text-start', 'id': IsStr()}, + { + 'type': 'reasoning-start', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e648130f0481918dc71103fbd6a486', + } + }, + }, + { + 'type': 'reasoning-end', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': IsStr(), + 'provider_name': 'openai', + 'id': 'rs_00e767404995b9950068e648130f0481918dc71103fbd6a486', + } + }, + }, + { + 'type': 'text-start', + 'id': IsStr(), + 'providerMetadata': {'pydantic_ai': {'id': 'msg_00e767404995b9950068e6482f25e0819181582a15cdd9207f'}}, + }, { 'type': 'text-delta', 'delta': """\ @@ -455,6 +620,7 @@ async def test_run(allow_model_requests: None, openai_api_key: str): - Default\ """, 'id': IsStr(), + 'providerMetadata': {'pydantic_ai': {'id': 'msg_00e767404995b9950068e6482f25e0819181582a15cdd9207f'}}, }, {'type': 'text-delta', 'delta': ' FastAPI/OpenTelemetry', 'id': IsStr()}, { @@ -1037,7 +1203,7 @@ def client_response\ {'type': 'text-delta', 'delta': ' skip binary content,', 'id': IsStr()}, {'type': 'text-delta', 'delta': ' or accumulate chunked', 'id': IsStr()}, {'type': 'text-delta', 'delta': ' bodies safely?', 'id': IsStr()}, - {'type': 'text-end', 'id': IsStr()}, + {'type': 'text-end', 'id': IsStr(), 'providerMetadata': {'pydantic_ai': {'id': IsStr()}}}, {'type': 'finish-step'}, {'type': 'finish', 'finishReason': 'stop'}, '[DONE]', @@ -1104,6 +1270,134 @@ async def stream_function( ) +async def test_run_stream_thinking_with_signature(): + """Test that thinking parts with signatures include providerMetadata in reasoning-end events.""" + + async def stream_function( + messages: list[ModelMessage], agent_info: AgentInfo + ) -> AsyncIterator[DeltaThinkingCalls | str]: + yield {0: DeltaThinkingPart(content='Let me think...')} + yield {0: DeltaThinkingPart(signature='sig_abc123')} + yield 'Here is my answer.' + + agent = Agent(model=FunctionModel(stream_function=stream_function)) + + request = SubmitMessage( + id='foo', + messages=[ + UIMessage( + id='bar', + role='user', + parts=[TextUIPart(text='Think about something')], + ), + ], + ) + + adapter = VercelAIAdapter(agent, request) + events = [ + '[DONE]' if '[DONE]' in event else json.loads(event.removeprefix('data: ')) + async for event in adapter.encode_stream(adapter.run_stream()) + ] + + assert events == snapshot( + [ + {'type': 'start'}, + {'type': 'start-step'}, + {'type': 'reasoning-start', 'id': IsStr()}, + {'type': 'reasoning-delta', 'id': IsStr(), 'delta': 'Let me think...'}, + { + 'type': 'reasoning-end', + 'id': IsStr(), + 'providerMetadata': {'pydantic_ai': {'signature': 'sig_abc123', 'provider_name': 'function'}}, + }, + {'type': 'text-start', 'id': IsStr()}, + {'type': 'text-delta', 'delta': 'Here is my answer.', 'id': IsStr()}, + {'type': 'text-end', 'id': IsStr()}, + {'type': 'finish-step'}, + {'type': 'finish'}, + '[DONE]', + ] + ) + + +async def test_event_stream_thinking_end_with_full_metadata(): + """Test handle_thinking_end with all metadata fields (signature, provider_name, provider_details, id).""" + + async def event_generator(): + part = ThinkingPart( + content='Deep thought...', + id='thinking_456', + signature='sig_xyz789', + provider_name='anthropic', + provider_details={'model': 'claude-3', 'tokens': 100}, + ) + yield PartStartEvent(index=0, part=part) + yield PartEndEvent(index=0, part=part) + + request = SubmitMessage( + id='foo', + messages=[ + UIMessage( + id='bar', + role='user', + parts=[TextUIPart(text='Think deeply')], + ), + ], + ) + event_stream = VercelAIEventStream(run_input=request) + events = [ + '[DONE]' if '[DONE]' in event else json.loads(event.removeprefix('data: ')) + async for event in event_stream.encode_stream(event_stream.transform_stream(event_generator())) + ] + + assert events == snapshot( + [ + {'type': 'start'}, + {'type': 'start-step'}, + { + 'type': 'reasoning-start', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': 'sig_xyz789', + 'provider_name': 'anthropic', + 'id': 'thinking_456', + 'provider_details': {'model': 'claude-3', 'tokens': 100}, + } + }, + }, + { + 'type': 'reasoning-delta', + 'id': IsStr(), + 'delta': 'Deep thought...', + 'providerMetadata': { + 'pydantic_ai': { + 'signature': 'sig_xyz789', + 'provider_name': 'anthropic', + 'id': 'thinking_456', + 'provider_details': {'model': 'claude-3', 'tokens': 100}, + } + }, + }, + { + 'type': 'reasoning-end', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'signature': 'sig_xyz789', + 'provider_name': 'anthropic', + 'provider_details': {'model': 'claude-3', 'tokens': 100}, + 'id': 'thinking_456', + } + }, + }, + {'type': 'finish-step'}, + {'type': 'finish'}, + '[DONE]', + ] + ) + + async def test_event_stream_back_to_back_text(): async def event_generator(): yield PartStartEvent(index=0, part=TextPart(content='Hello')) @@ -1810,7 +2104,12 @@ async def test_adapter_load_messages(): input={'query': 'What is Logfire?'}, output="[Scrubbed due to 'Auth']", provider_executed=True, - call_provider_metadata={'pydantic_ai': {'provider_name': 'openai'}}, + call_provider_metadata={ + 'pydantic_ai': { + 'call_meta': {'provider_name': 'openai'}, + 'return_meta': {'provider_name': 'openai_return'}, + } + }, ), ToolOutputErrorPart( type='tool-web_search', @@ -1956,7 +2255,7 @@ async def test_adapter_load_messages(): content="[Scrubbed due to 'Auth']", tool_call_id='toolu_01W2yGpGQcMx7pXV2zZ4s', timestamp=IsDatetime(), - provider_name='openai', + provider_name='openai_return', ), BuiltinToolCallPart( tool_name='web_search', @@ -2115,12 +2414,14 @@ async def test_adapter_dump_messages_with_builtin_tools(): args={'query': 'test'}, tool_call_id='tool_456', provider_name='openai', + provider_details={'tool_type': 'web_search_preview'}, ), BuiltinToolReturnPart( tool_name='web_search', content={'status': 'completed'}, tool_call_id='tool_456', provider_name='openai', + provider_details={'execution_time_ms': 150}, ), ] ), @@ -2149,7 +2450,18 @@ async def test_adapter_dump_messages_with_builtin_tools(): 'input': '{"query":"test"}', 'output': '{"status":"completed"}', 'provider_executed': True, - 'call_provider_metadata': {'pydantic_ai': {'provider_name': 'openai'}}, + 'call_provider_metadata': { + 'pydantic_ai': { + 'call_meta': { + 'provider_name': 'openai', + 'provider_details': {'tool_type': 'web_search_preview'}, + }, + 'return_meta': { + 'provider_name': 'openai', + 'provider_details': {'execution_time_ms': 150}, + }, + } + }, 'preliminary': None, } ], @@ -2196,7 +2508,9 @@ async def test_adapter_dump_messages_with_builtin_tool_without_return(): 'state': 'input-available', 'input': '{"query":"orphan query"}', 'provider_executed': True, - 'call_provider_metadata': {'pydantic_ai': {'provider_name': 'openai'}}, + 'call_provider_metadata': { + 'pydantic_ai': {'provider_name': 'openai'} + }, # No return part, so defaults to normal call provider name } ], }, @@ -2492,7 +2806,12 @@ async def test_adapter_dump_messages_text_with_interruption(): 'input': '{}', 'output': 'result', 'provider_executed': True, - 'call_provider_metadata': {'pydantic_ai': {'provider_name': 'test'}}, + 'call_provider_metadata': { + 'pydantic_ai': { + 'call_meta': {'provider_name': 'test'}, + 'return_meta': {'provider_name': 'test'}, + } + }, 'preliminary': None, }, { @@ -2532,17 +2851,9 @@ async def test_adapter_dump_load_roundtrip(): ui_messages = VercelAIAdapter.dump_messages(original_messages) - def sync_timestamps(original: list[ModelRequest | ModelResponse], new: list[ModelRequest | ModelResponse]) -> None: - for orig_msg, new_msg in zip(original, new): - for orig_part, new_part in zip(orig_msg.parts, new_msg.parts): - if hasattr(orig_part, 'timestamp') and hasattr(new_part, 'timestamp'): - new_part.timestamp = orig_part.timestamp # pyright: ignore[reportAttributeAccessIssue, reportUnknownMemberType] - if hasattr(orig_msg, 'timestamp') and hasattr(new_msg, 'timestamp'): # pragma: no branch - new_msg.timestamp = orig_msg.timestamp # pyright: ignore[reportAttributeAccessIssue] - # Load back to Pydantic AI format reloaded_messages = VercelAIAdapter.load_messages(ui_messages) - sync_timestamps(original_messages, reloaded_messages) + _sync_timestamps(original_messages, reloaded_messages) assert reloaded_messages == original_messages @@ -2884,3 +3195,1008 @@ async def test_adapter_dump_messages_with_cache_point(): } ] ) + + +async def test_adapter_dump_messages_text_with_provider_details(): + """Test dumping TextPart with provider_name and provider_details preserves metadata.""" + messages = [ + ModelResponse( + parts=[ + TextPart( + content='Hello with metadata', + provider_name='openai', + provider_details={'model': 'gpt-4', 'finish_reason': 'stop'}, + ), + ] + ), + ] + + ui_messages = VercelAIAdapter.dump_messages(messages) + ui_message_dicts = [msg.model_dump() for msg in ui_messages] + + assert ui_message_dicts == snapshot( + [ + { + 'id': IsStr(), + 'role': 'assistant', + 'metadata': None, + 'parts': [ + { + 'type': 'text', + 'text': 'Hello with metadata', + 'state': 'done', + 'provider_metadata': { + 'pydantic_ai': { + 'provider_name': 'openai', + 'provider_details': {'model': 'gpt-4', 'finish_reason': 'stop'}, + } + }, + } + ], + } + ] + ) + + +async def test_adapter_load_messages_text_with_provider_metadata(): + """Test loading TextUIPart with provider_metadata preserves metadata on TextPart.""" + ui_messages = [ + UIMessage( + id='msg1', + role='assistant', + parts=[ + TextUIPart( + text='Hello with metadata', + state='done', + provider_metadata={ + 'pydantic_ai': { + 'id': 'text_123', + 'provider_name': 'anthropic', + 'provider_details': {'model': 'gpt-4', 'tokens': 50}, + } + }, + ) + ], + ) + ] + + messages = VercelAIAdapter.load_messages(ui_messages) + assert messages == snapshot( + [ + ModelResponse( + parts=[ + TextPart( + content='Hello with metadata', + id='text_123', + provider_name='anthropic', + provider_details={'model': 'gpt-4', 'tokens': 50}, + ) + ], + timestamp=IsDatetime(), + ) + ] + ) + + +async def test_adapter_text_roundtrip_with_provider_details(): + """Test TextPart with provider_name and provider_details survives dump/load roundtrip.""" + original_messages = [ + ModelResponse( + parts=[ + TextPart( + content='Roundtrip text', + id='text_456', + provider_name='google', + provider_details={'completion_tokens': 100}, + ), + ] + ), + ] + + ui_messages = VercelAIAdapter.dump_messages(original_messages) + reloaded_messages = VercelAIAdapter.load_messages(ui_messages) + + # Sync timestamps for comparison + for orig_msg, new_msg in zip(original_messages, reloaded_messages): + new_msg.timestamp = orig_msg.timestamp + + assert reloaded_messages == original_messages + + +async def test_adapter_dump_messages_tool_call_with_provider_details(): + """Test dumping ToolCallPart with provider_name and provider_details preserves metadata.""" + messages = [ + ModelRequest(parts=[UserPromptPart(content='Do something')]), + ModelResponse( + parts=[ + ToolCallPart( + tool_name='my_tool', + args={'arg': 'value'}, + tool_call_id='tool_abc', + id='call_123', + provider_name='openai', + provider_details={'index': 0, 'type': 'function'}, + ), + ] + ), + ModelRequest( + parts=[ + ToolReturnPart( + tool_name='my_tool', + content='result', + tool_call_id='tool_abc', + ) + ] + ), + ] + + ui_messages = VercelAIAdapter.dump_messages(messages) + ui_message_dicts = [msg.model_dump() for msg in ui_messages] + + assert ui_message_dicts == snapshot( + [ + { + 'id': IsStr(), + 'role': 'user', + 'metadata': None, + 'parts': [{'type': 'text', 'text': 'Do something', 'state': 'done', 'provider_metadata': None}], + }, + { + 'id': IsStr(), + 'role': 'assistant', + 'metadata': None, + 'parts': [ + { + 'type': 'dynamic-tool', + 'tool_name': 'my_tool', + 'tool_call_id': 'tool_abc', + 'state': 'output-available', + 'input': '{"arg":"value"}', + 'output': 'result', + 'call_provider_metadata': { + 'pydantic_ai': { + 'id': 'call_123', + 'provider_name': 'openai', + 'provider_details': {'index': 0, 'type': 'function'}, + } + }, + 'preliminary': None, + } + ], + }, + ] + ) + + +async def test_adapter_load_messages_tool_call_with_provider_metadata(): + """Test loading dynamic tool part with provider_metadata preserves metadata on ToolCallPart.""" + from pydantic_ai.ui.vercel_ai.request_types import DynamicToolInputAvailablePart + + ui_messages = [ + UIMessage( + id='msg1', + role='assistant', + parts=[ + DynamicToolInputAvailablePart( + tool_name='my_tool', + tool_call_id='tc_123', + input='{"key": "value"}', + state='input-available', + call_provider_metadata={ + 'pydantic_ai': { + 'provider_name': 'anthropic', + 'provider_details': {'index': 0}, + } + }, + ) + ], + ) + ] + + messages = VercelAIAdapter.load_messages(ui_messages) + assert messages == snapshot( + [ + ModelResponse( + parts=[ + ToolCallPart( + tool_name='my_tool', + args={'key': 'value'}, + tool_call_id='tc_123', + provider_name='anthropic', + provider_details={'index': 0}, + ), + ], + timestamp=IsDatetime(), + ) + ] + ) + + +async def test_adapter_tool_call_roundtrip_with_provider_details(): + """Test ToolCallPart with provider_name and provider_details survives dump/load roundtrip.""" + original_messages = [ + ModelResponse( + parts=[ + ToolCallPart( + tool_name='roundtrip_tool', + args={'param': 'val'}, + tool_call_id='tc_roundtrip', + provider_name='google', + provider_details={'model_internal_id': 'xyz'}, + ), + ] + ), + ModelRequest( + parts=[ + ToolReturnPart( + tool_name='roundtrip_tool', + content='done', + tool_call_id='tc_roundtrip', + ) + ] + ), + ] + + ui_messages = VercelAIAdapter.dump_messages(original_messages) + reloaded_messages = VercelAIAdapter.load_messages(ui_messages) + _sync_timestamps(original_messages, reloaded_messages) + + assert reloaded_messages == original_messages + + +async def test_adapter_dump_messages_file_part_with_metadata(): + """Test dumping FilePart with provider metadata preserves id, provider_name, and provider_details.""" + messages = [ + ModelResponse( + parts=[ + FilePart( + content=BinaryContent(data=b'file_data', media_type='image/png'), + id='file_123', + provider_name='openai', + provider_details={'generation_id': 'gen_abc'}, + ), + ] + ), + ] + + ui_messages = VercelAIAdapter.dump_messages(messages) + ui_message_dicts = [msg.model_dump() for msg in ui_messages] + + assert ui_message_dicts == snapshot( + [ + { + 'id': IsStr(), + 'role': 'assistant', + 'metadata': None, + 'parts': [ + { + 'type': 'file', + 'media_type': 'image/png', + 'filename': None, + 'url': 'data:image/png;base64,ZmlsZV9kYXRh', + 'provider_metadata': { + 'pydantic_ai': { + 'id': 'file_123', + 'provider_name': 'openai', + 'provider_details': {'generation_id': 'gen_abc'}, + } + }, + } + ], + } + ] + ) + + +async def test_adapter_load_messages_file_with_provider_metadata(): + """Test loading FileUIPart with provider_metadata preserves id, provider_name, and provider_details.""" + ui_messages = [ + UIMessage( + id='msg1', + role='assistant', + parts=[ + FileUIPart( + url='data:image/png;base64,ZmlsZV9kYXRh', + media_type='image/png', + provider_metadata={ + 'pydantic_ai': { + 'id': 'file_456', + 'provider_name': 'anthropic', + 'provider_details': {'source': 'generated'}, + } + }, + ) + ], + ) + ] + + messages = VercelAIAdapter.load_messages(ui_messages) + assert messages == snapshot( + [ + ModelResponse( + parts=[ + FilePart( + content=BinaryImage(data=b'file_data', media_type='image/png', _identifier='cdd967'), + id='file_456', + provider_name='anthropic', + provider_details={'source': 'generated'}, + ) + ], + timestamp=IsDatetime(), + ) + ] + ) + + +async def test_adapter_dump_messages_builtin_tool_with_full_metadata(): + """Test dumping BuiltinToolCallPart with id, provider_name, and provider_details.""" + messages = [ + ModelRequest(parts=[UserPromptPart(content='Search')]), + ModelResponse( + parts=[ + BuiltinToolCallPart( + tool_name='web_search', + args={'query': 'test'}, + tool_call_id='bt_123', + id='call_456', + provider_name='openai', + provider_details={'tool_type': 'web_search_preview'}, + ), + BuiltinToolReturnPart( + tool_name='web_search', + content={'results': []}, + tool_call_id='bt_123', + provider_name='openai', + provider_details={'execution_time_ms': 150}, + ), + ] + ), + ] + + ui_messages = VercelAIAdapter.dump_messages(messages) + ui_message_dicts = [msg.model_dump() for msg in ui_messages] + + assert ui_message_dicts == snapshot( + [ + { + 'id': IsStr(), + 'role': 'user', + 'metadata': None, + 'parts': [{'type': 'text', 'text': 'Search', 'state': 'done', 'provider_metadata': None}], + }, + { + 'id': IsStr(), + 'role': 'assistant', + 'metadata': None, + 'parts': [ + { + 'type': 'tool-web_search', + 'tool_call_id': 'bt_123', + 'state': 'output-available', + 'input': '{"query":"test"}', + 'output': '{"results":[]}', + 'provider_executed': True, + 'call_provider_metadata': { + 'pydantic_ai': { + 'call_meta': { + 'id': 'call_456', + 'provider_name': 'openai', + 'provider_details': {'tool_type': 'web_search_preview'}, + }, + 'return_meta': { + 'provider_name': 'openai', + 'provider_details': {'execution_time_ms': 150}, + }, + } + }, + 'preliminary': None, + } + ], + }, + ] + ) + + +async def test_adapter_load_messages_builtin_tool_with_provider_details(): + """Test loading builtin tool with provider_details on return part.""" + ui_messages = [ + UIMessage( + id='msg1', + role='assistant', + parts=[ + ToolOutputAvailablePart( + type='tool-web_search', + tool_call_id='bt_load', + input='{"query": "test"}', + output='{"results": []}', + state='output-available', + provider_executed=True, + call_provider_metadata={ + 'pydantic_ai': { + 'call_meta': { + 'id': 'call_456', + 'provider_name': 'openai', + 'provider_details': {'tool_type': 'web_search_preview'}, + }, + 'return_meta': { + 'id': 'call_456', + 'provider_name': 'openai', + 'provider_details': {'execution_time_ms': 150}, + }, + } + }, + ) + ], + ) + ] + + messages = VercelAIAdapter.load_messages(ui_messages) + assert messages == snapshot( + [ + ModelResponse( + parts=[ + BuiltinToolCallPart( + tool_name='web_search', + args={'query': 'test'}, + tool_call_id='bt_load', + id='call_456', + provider_details={'tool_type': 'web_search_preview'}, + provider_name='openai', + ), + BuiltinToolReturnPart( + tool_name='web_search', + content='{"results": []}', + tool_call_id='bt_load', + timestamp=IsDatetime(), + provider_name='openai', + provider_details={'execution_time_ms': 150}, + ), + ], + timestamp=IsDatetime(), + ) + ] + ) + + +async def test_adapter_load_messages_builtin_tool_error_with_provider_details(): + """Test loading builtin tool error with provider_details - ensures ToolOutputErrorPart metadata is extracted.""" + ui_messages = [ + UIMessage( + id='msg1', + role='assistant', + parts=[ + ToolOutputErrorPart( + type='tool-web_search', + tool_call_id='bt_error', + input='{"query": "test"}', + error_text='Search failed: rate limit exceeded', + state='output-error', + provider_executed=True, + call_provider_metadata={ + 'pydantic_ai': { + 'call_meta': { + 'id': 'call_789', + 'provider_name': 'openai', + 'provider_details': {'tool_type': 'web_search_preview'}, + }, + 'return_meta': { + 'provider_name': 'openai', + 'provider_details': {'error_code': 'RATE_LIMIT'}, + }, + } + }, + ) + ], + ) + ] + + messages = VercelAIAdapter.load_messages(ui_messages) + assert messages == snapshot( + [ + ModelResponse( + parts=[ + BuiltinToolCallPart( + tool_name='web_search', + args={'query': 'test'}, + tool_call_id='bt_error', + id='call_789', + provider_details={'tool_type': 'web_search_preview'}, + provider_name='openai', + ), + BuiltinToolReturnPart( + tool_name='web_search', + content={'error_text': 'Search failed: rate limit exceeded', 'is_error': True}, + tool_call_id='bt_error', + timestamp=IsDatetime(), + provider_name='openai', + provider_details={'error_code': 'RATE_LIMIT'}, + ), + ], + timestamp=IsDatetime(), + ) + ] + ) + + +async def test_adapter_load_messages_tool_input_streaming_part(): + """Test loading ToolInputStreamingPart which doesn't have call_provider_metadata yet.""" + from pydantic_ai.ui.vercel_ai.request_types import ToolInputStreamingPart + + ui_messages = [ + UIMessage( + id='msg1', + role='assistant', + parts=[ + ToolInputStreamingPart( + type='tool-my_tool', + tool_call_id='tc_streaming', + input='{"query": "test"}', + state='input-streaming', + ) + ], + ) + ] + + messages = VercelAIAdapter.load_messages(ui_messages) + assert messages == snapshot( + [ + ModelResponse( + parts=[ + ToolCallPart(tool_name='my_tool', args={'query': 'test'}, tool_call_id='tc_streaming'), + ], + timestamp=IsDatetime(), + ) + ] + ) + + +async def test_adapter_load_messages_dynamic_tool_input_streaming_part(): + """Test loading DynamicToolInputStreamingPart which doesn't have call_provider_metadata yet.""" + from pydantic_ai.ui.vercel_ai.request_types import DynamicToolInputStreamingPart + + ui_messages = [ + UIMessage( + id='msg1', + role='assistant', + parts=[ + DynamicToolInputStreamingPart( + tool_name='dynamic_tool', + tool_call_id='tc_dyn_streaming', + input='{"arg": 123}', + state='input-streaming', + ) + ], + ) + ] + + messages = VercelAIAdapter.load_messages(ui_messages) + assert messages == snapshot( + [ + ModelResponse( + parts=[ + ToolCallPart(tool_name='dynamic_tool', args={'arg': 123}, tool_call_id='tc_dyn_streaming'), + ], + timestamp=IsDatetime(), + ) + ] + ) + + +async def test_adapter_dump_messages_tool_error_with_provider_metadata(): + """Test dumping ToolCallPart with RetryPromptPart includes provider metadata with provider_name.""" + messages = [ + ModelRequest(parts=[UserPromptPart(content='Do task')]), + ModelResponse( + parts=[ + ToolCallPart( + tool_name='failing_tool', + args={'x': 1}, + tool_call_id='tc_fail', + id='call_fail_id', + provider_name='google', + provider_details={'attempt': 1}, + ), + ] + ), + ModelRequest( + parts=[ + RetryPromptPart( + content='Tool execution failed', + tool_name='failing_tool', + tool_call_id='tc_fail', + ) + ] + ), + ] + + ui_messages = VercelAIAdapter.dump_messages(messages) + ui_message_dicts = [msg.model_dump() for msg in ui_messages] + + assert ui_message_dicts == snapshot( + [ + { + 'id': IsStr(), + 'role': 'user', + 'metadata': None, + 'parts': [{'type': 'text', 'text': 'Do task', 'state': 'done', 'provider_metadata': None}], + }, + { + 'id': IsStr(), + 'role': 'assistant', + 'metadata': None, + 'parts': [ + { + 'type': 'dynamic-tool', + 'tool_name': 'failing_tool', + 'tool_call_id': 'tc_fail', + 'state': 'output-error', + 'input': '{"x":1}', + 'error_text': """\ +Tool execution failed + +Fix the errors and try again.\ +""", + 'call_provider_metadata': { + 'pydantic_ai': { + 'id': 'call_fail_id', + 'provider_name': 'google', + 'provider_details': {'attempt': 1}, + } + }, + } + ], + }, + ] + ) + + +async def test_event_stream_text_with_provider_metadata(): + """Test that text events include provider_metadata when TextPart has provider_name and provider_details.""" + + async def event_generator(): + part = TextPart( + content='Hello with details', + id='text_event_id', + provider_name='openai', + provider_details={'model': 'gpt-4', 'tokens': 10}, + ) + yield PartStartEvent(index=0, part=part) + yield PartEndEvent(index=0, part=part) + + request = SubmitMessage( + id='foo', + messages=[ + UIMessage( + id='bar', + role='user', + parts=[TextUIPart(text='Test')], + ), + ], + ) + event_stream = VercelAIEventStream(run_input=request) + events = [ + '[DONE]' if '[DONE]' in event else json.loads(event.removeprefix('data: ')) + async for event in event_stream.encode_stream(event_stream.transform_stream(event_generator())) + ] + + assert events == snapshot( + [ + {'type': 'start'}, + {'type': 'start-step'}, + { + 'type': 'text-start', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'id': 'text_event_id', + 'provider_name': 'openai', + 'provider_details': {'model': 'gpt-4', 'tokens': 10}, + } + }, + }, + { + 'type': 'text-delta', + 'id': IsStr(), + 'delta': 'Hello with details', + 'providerMetadata': { + 'pydantic_ai': { + 'id': 'text_event_id', + 'provider_name': 'openai', + 'provider_details': {'model': 'gpt-4', 'tokens': 10}, + } + }, + }, + { + 'type': 'text-end', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'id': 'text_event_id', + 'provider_name': 'openai', + 'provider_details': {'model': 'gpt-4', 'tokens': 10}, + } + }, + }, + {'type': 'finish-step'}, + {'type': 'finish'}, + '[DONE]', + ] + ) + + +async def test_event_stream_tool_call_end_with_provider_metadata(): + """Test that tool-input-available events include provider_metadata with provider_name.""" + + async def event_generator(): + part = ToolCallPart( + tool_name='my_tool', + tool_call_id='tc_meta', + args={'key': 'value'}, + id='tool_call_id_123', + provider_name='anthropic', + provider_details={'tool_index': 0}, + ) + yield PartStartEvent(index=0, part=part) + yield PartEndEvent(index=0, part=part) + + request = SubmitMessage( + id='foo', + messages=[ + UIMessage( + id='bar', + role='user', + parts=[TextUIPart(text='Test')], + ), + ], + ) + event_stream = VercelAIEventStream(run_input=request) + events = [ + '[DONE]' if '[DONE]' in event else json.loads(event.removeprefix('data: ')) + async for event in event_stream.encode_stream(event_stream.transform_stream(event_generator())) + ] + + assert events == snapshot( + [ + {'type': 'start'}, + {'type': 'start-step'}, + {'type': 'tool-input-start', 'toolCallId': 'tc_meta', 'toolName': 'my_tool'}, + {'type': 'tool-input-delta', 'toolCallId': 'tc_meta', 'inputTextDelta': '{"key":"value"}'}, + { + 'type': 'tool-input-available', + 'toolCallId': 'tc_meta', + 'toolName': 'my_tool', + 'input': {'key': 'value'}, + 'providerMetadata': { + 'pydantic_ai': { + 'id': 'tool_call_id_123', + 'provider_name': 'anthropic', + 'provider_details': {'tool_index': 0}, + } + }, + }, + {'type': 'finish-step'}, + {'type': 'finish'}, + '[DONE]', + ] + ) + + +async def test_event_stream_builtin_tool_call_end_with_provider_metadata(): + """Test that builtin tool-input-available events include provider_name in provider_metadata.""" + + async def event_generator(): + part = BuiltinToolCallPart( + tool_name='web_search', + tool_call_id='btc_meta', + args={'query': 'test'}, + id='builtin_call_id_456', + provider_name='openai', + provider_details={'tool_type': 'web_search_preview'}, + ) + yield PartStartEvent(index=0, part=part) + yield PartEndEvent(index=0, part=part) + + request = SubmitMessage( + id='foo', + messages=[ + UIMessage( + id='bar', + role='user', + parts=[TextUIPart(text='Search')], + ), + ], + ) + event_stream = VercelAIEventStream(run_input=request) + events = [ + '[DONE]' if '[DONE]' in event else json.loads(event.removeprefix('data: ')) + async for event in event_stream.encode_stream(event_stream.transform_stream(event_generator())) + ] + + assert events == snapshot( + [ + {'type': 'start'}, + {'type': 'start-step'}, + {'type': 'tool-input-start', 'toolCallId': 'btc_meta', 'toolName': 'web_search', 'providerExecuted': True}, + {'type': 'tool-input-delta', 'toolCallId': 'btc_meta', 'inputTextDelta': '{"query":"test"}'}, + { + 'type': 'tool-input-available', + 'toolCallId': 'btc_meta', + 'toolName': 'web_search', + 'input': {'query': 'test'}, + 'providerExecuted': True, + 'providerMetadata': { + 'pydantic_ai': { + 'provider_name': 'openai', + 'provider_details': {'tool_type': 'web_search_preview'}, + 'id': 'builtin_call_id_456', + } + }, + }, + {'type': 'finish-step'}, + {'type': 'finish'}, + '[DONE]', + ] + ) + + +async def test_event_stream_thinking_delta_with_provider_metadata(): + """Test that thinking delta events include provider_metadata.""" + from pydantic_ai.messages import ThinkingPartDelta + + async def event_generator(): + part = ThinkingPart( + content='', + id='think_delta', + signature='initial_sig', + provider_name='anthropic', + provider_details={'model': 'claude'}, + ) + yield PartStartEvent(index=0, part=part) + yield PartDeltaEvent( + index=0, + delta=ThinkingPartDelta( + content_delta='thinking...', + signature_delta='updated_sig', + provider_name='anthropic', + provider_details={'chunk': 1}, + ), + ) + yield PartEndEvent( + index=0, + part=ThinkingPart( + content='thinking...', + id='think_delta', + signature='updated_sig', + provider_name='anthropic', + ), + ) + + request = SubmitMessage( + id='foo', + messages=[ + UIMessage( + id='bar', + role='user', + parts=[TextUIPart(text='Think')], + ), + ], + ) + event_stream = VercelAIEventStream(run_input=request) + events = [ + '[DONE]' if '[DONE]' in event else json.loads(event.removeprefix('data: ')) + async for event in event_stream.encode_stream(event_stream.transform_stream(event_generator())) + ] + + assert events == snapshot( + [ + {'type': 'start'}, + {'type': 'start-step'}, + { + 'type': 'reasoning-start', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': { + 'id': 'think_delta', + 'signature': 'initial_sig', + 'provider_name': 'anthropic', + 'provider_details': {'model': 'claude'}, + } + }, + }, + { + 'type': 'reasoning-delta', + 'id': IsStr(), + 'delta': 'thinking...', + 'providerMetadata': { + 'pydantic_ai': { + 'provider_name': 'anthropic', + 'signature': 'updated_sig', + 'provider_details': {'chunk': 1}, + } + }, + }, + { + 'type': 'reasoning-end', + 'id': IsStr(), + 'providerMetadata': { + 'pydantic_ai': {'id': 'think_delta', 'signature': 'updated_sig', 'provider_name': 'anthropic'} + }, + }, + {'type': 'finish-step'}, + {'type': 'finish'}, + '[DONE]', + ] + ) + + +def _sync_timestamps(original: list[ModelMessage], new: list[ModelMessage]) -> None: + """Utility function to sync timestamps between original and new messages.""" + for orig_msg, new_msg in zip(original, new): + for orig_part, new_part in zip(orig_msg.parts, new_msg.parts): + if hasattr(orig_part, 'timestamp') and hasattr(new_part, 'timestamp'): + new_part.timestamp = orig_part.timestamp # pyright: ignore[reportAttributeAccessIssue, reportUnknownMemberType] + if hasattr(orig_msg, 'timestamp') and hasattr(new_msg, 'timestamp'): + new_msg.timestamp = orig_msg.timestamp # pyright: ignore[reportAttributeAccessIssue] + + +class TestDumpProviderMetadata: + async def test_dump_provider_metadata_filters_none_values(self): + """Test that dump_provider_metadata only includes non-None values.""" + + # All None - should return None + result = dump_provider_metadata(id=None, provider_name=None, provider_details=None) + assert result is None + + # Some values + result = dump_provider_metadata(id='test_id', provider_name=None, provider_details={'key': 'val'}) + assert result == {'pydantic_ai': {'id': 'test_id', 'provider_details': {'key': 'val'}}} + + # All values + result = dump_provider_metadata( + id='full_id', + signature='sig', + provider_name='provider', + provider_details={'detail': 1}, + ) + assert result == { + 'pydantic_ai': { + 'id': 'full_id', + 'signature': 'sig', + 'provider_name': 'provider', + 'provider_details': {'detail': 1}, + } + } + + async def test_dump_provider_metadata_wrapper_key(self): + """Test that dump_provider_metadata includes the wrapper key.""" + + result = dump_provider_metadata( + wrapper_key='test', id='test_id', provider_name='test_provider', provider_details={'test_detail': 1} + ) + assert result == { + 'test': {'id': 'test_id', 'provider_name': 'test_provider', 'provider_details': {'test_detail': 1}} + } + + # Test with None wrapper key + result = dump_provider_metadata( + None, id='test_id', provider_name='test_provider', provider_details={'test_detail': 1} + ) + assert result == {'id': 'test_id', 'provider_name': 'test_provider', 'provider_details': {'test_detail': 1}} + + +class TestLoadProviderMetadata: + async def test_load_provider_metadata_loads_provider_metadata(self): + """Test that load_provider_metadata loads provider metadata.""" + + provider_metadata = { + 'pydantic_ai': {'id': 'test_id', 'provider_name': 'test_provider', 'provider_details': {'test_detail': 1}} + } + result = load_provider_metadata(provider_metadata) + assert result == {'id': 'test_id', 'provider_name': 'test_provider', 'provider_details': {'test_detail': 1}} + + async def test_load_provider_metadata_loads_provider_metadata_incorrect_key(self): + """Test that load_provider_metadata fails to load provider metadata if the wrapper key is not present.""" + + provider_metadata = {'test': {'id': 'test_id'}} + result = load_provider_metadata(provider_metadata) + assert result == {}