Skip to content
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 37 additions & 5 deletions pydantic_ai_slim/pydantic_ai/models/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,6 +329,12 @@ class OpenAIResponsesModelSettings(OpenAIChatModelSettings, total=False):
Corresponds to the `web_search_call.action.sources` value of the `include` parameter in the Responses API.
"""

openai_include_web_search_content_annotations: bool
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd like to include the word raw here :)

"""Whether to include the raw output for text annotations in `TextPart.provider_details['annotations']`

Opt-in to avoid confusion when https://github.com/pydantic/pydantic-ai/issues/3126 is resolved.
"""

openai_include_file_search_results: bool
"""Whether to include the file search results in the response.

Expand Down Expand Up @@ -1279,10 +1285,22 @@ def _process_response( # noqa: C901
elif isinstance(item, responses.ResponseOutputMessage):
for content in item.content:
if isinstance(content, responses.ResponseOutputText): # pragma: no branch
part_provider_details: dict[str, Any] | None = None
part_provider_details: dict[str, Any] = {}
if content.logprobs:
part_provider_details = {'logprobs': _map_logprobs(content.logprobs)}
items.append(TextPart(content.text, id=item.id, provider_details=part_provider_details))
part_provider_details['logprobs'] = _map_logprobs(content.logprobs)
# NOTE: can be removed after https://github.com/pydantic/pydantic-ai/issues/3126
# TODO: discuss gate via model settings
# if content.annotations and model_settings.openai_include_web_search_content_annotations:
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Feel free to pass model settings into this method

if content.annotations:
part_provider_details['annotations'] = content.annotations

items.append(
TextPart(
content=content.text,
id=item.id,
provider_details=part_provider_details or None,
)
)
elif isinstance(item, responses.ResponseFunctionToolCall):
items.append(
ToolCallPart(
Expand Down Expand Up @@ -2387,8 +2405,22 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
pass # content already accumulated via delta events

elif isinstance(chunk, responses.ResponseOutputTextAnnotationAddedEvent):
# TODO(Marcelo): We should support annotations in the future.
pass # there's nothing we need to do here
# NOTE: can be removed after https://github.com/pydantic/pydantic-ai/issues/3126
# TODO: discuss gate via model settings
# if not model_settings.openai_include_web_search_content_annotations:
# continue

annotations_by_item_id = getattr(self, '_annotations_by_item_id', {})
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We've gotta find a better way to do this :)

Since we need to be able to update the existing provider_details['annotations'], look at how handle_thinking_delta accepts a callable that modifies the existing provider details as an alternative to a dict. Can we use that method here as well?

setattr(self, '_annotations_by_item_id', annotations_by_item_id)
annotations_by_item_id.setdefault(chunk.item_id, []).append(chunk.annotation)

for event in self._parts_manager.handle_text_delta(
vendor_part_id=chunk.item_id,
content='',
id=chunk.item_id,
provider_details={'annotations': annotations_by_item_id[chunk.item_id]},
):
yield event

elif isinstance(chunk, responses.ResponseTextDeltaEvent):
for event in self._parts_manager.handle_text_delta(
Expand Down
Loading