pydantic-ai-slim 1.0.4__tar.gz → 1.0.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydantic-ai-slim might be problematic. Click here for more details.
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/PKG-INFO +3 -3
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_parts_manager.py +3 -1
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/messages.py +3 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/openai.py +95 -35
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/.gitignore +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/LICENSE +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/README.md +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/__init__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/__main__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_a2a.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_agent_graph.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_cli.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_function_schema.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_griffe.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_mcp.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_otel_messages.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_output.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_run_context.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_system_prompt.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_thinking_part.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_tool_manager.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/_utils.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/ag_ui.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/agent/__init__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/agent/abstract.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/agent/wrapper.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/builtin_tools.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/common_tools/__init__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/common_tools/duckduckgo.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/common_tools/tavily.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/direct.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/__init__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/dbos/__init__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/dbos/_agent.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/dbos/_mcp_server.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/dbos/_model.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/dbos/_utils.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/__init__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_agent.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_function_toolset.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_logfire.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_mcp_server.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_model.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_run_context.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_toolset.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/exceptions.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/ext/__init__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/ext/aci.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/ext/langchain.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/format_prompt.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/mcp.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/__init__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/anthropic.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/bedrock.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/cohere.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/fallback.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/function.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/gemini.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/google.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/groq.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/huggingface.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/instrumented.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/mcp_sampling.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/mistral.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/test.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/models/wrapper.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/output.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/__init__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/_json_schema.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/amazon.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/anthropic.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/cohere.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/deepseek.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/google.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/grok.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/groq.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/harmony.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/meta.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/mistral.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/moonshotai.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/openai.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/profiles/qwen.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/__init__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/anthropic.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/azure.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/bedrock.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/cerebras.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/cohere.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/deepseek.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/fireworks.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/gateway.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/github.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/google.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/google_gla.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/google_vertex.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/grok.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/groq.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/heroku.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/huggingface.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/litellm.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/mistral.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/moonshotai.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/ollama.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/openai.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/openrouter.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/together.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/providers/vercel.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/py.typed +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/result.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/retries.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/run.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/settings.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/tools.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/__init__.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/_dynamic.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/abstract.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/approval_required.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/combined.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/external.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/filtered.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/function.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/prefixed.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/prepared.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/renamed.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/toolsets/wrapper.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/usage.py +0 -0
- {pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pyproject.toml +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydantic-ai-slim
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.5
|
|
4
4
|
Summary: Agent Framework / shim to use Pydantic with LLMs, slim package
|
|
5
5
|
Project-URL: Homepage, https://github.com/pydantic/pydantic-ai/tree/main/pydantic_ai_slim
|
|
6
6
|
Project-URL: Source, https://github.com/pydantic/pydantic-ai/tree/main/pydantic_ai_slim
|
|
@@ -33,7 +33,7 @@ Requires-Dist: genai-prices>=0.0.23
|
|
|
33
33
|
Requires-Dist: griffe>=1.3.2
|
|
34
34
|
Requires-Dist: httpx>=0.27
|
|
35
35
|
Requires-Dist: opentelemetry-api>=1.28.0
|
|
36
|
-
Requires-Dist: pydantic-graph==1.0.
|
|
36
|
+
Requires-Dist: pydantic-graph==1.0.5
|
|
37
37
|
Requires-Dist: pydantic>=2.10
|
|
38
38
|
Requires-Dist: typing-inspection>=0.4.0
|
|
39
39
|
Provides-Extra: a2a
|
|
@@ -57,7 +57,7 @@ Requires-Dist: dbos>=1.13.0; extra == 'dbos'
|
|
|
57
57
|
Provides-Extra: duckduckgo
|
|
58
58
|
Requires-Dist: ddgs>=9.0.0; extra == 'duckduckgo'
|
|
59
59
|
Provides-Extra: evals
|
|
60
|
-
Requires-Dist: pydantic-evals==1.0.
|
|
60
|
+
Requires-Dist: pydantic-evals==1.0.5; extra == 'evals'
|
|
61
61
|
Provides-Extra: google
|
|
62
62
|
Requires-Dist: google-genai>=1.31.0; extra == 'google'
|
|
63
63
|
Provides-Extra: groq
|
|
@@ -71,6 +71,7 @@ class ModelResponsePartsManager:
|
|
|
71
71
|
*,
|
|
72
72
|
vendor_part_id: VendorId | None,
|
|
73
73
|
content: str,
|
|
74
|
+
id: str | None = None,
|
|
74
75
|
thinking_tags: tuple[str, str] | None = None,
|
|
75
76
|
ignore_leading_whitespace: bool = False,
|
|
76
77
|
) -> ModelResponseStreamEvent | None:
|
|
@@ -85,6 +86,7 @@ class ModelResponsePartsManager:
|
|
|
85
86
|
of text. If None, a new part will be created unless the latest part is already
|
|
86
87
|
a TextPart.
|
|
87
88
|
content: The text content to append to the appropriate TextPart.
|
|
89
|
+
id: An optional id for the text part.
|
|
88
90
|
thinking_tags: If provided, will handle content between the thinking tags as thinking parts.
|
|
89
91
|
ignore_leading_whitespace: If True, will ignore leading whitespace in the content.
|
|
90
92
|
|
|
@@ -137,7 +139,7 @@ class ModelResponsePartsManager:
|
|
|
137
139
|
|
|
138
140
|
# There is no existing text part that should be updated, so create a new one
|
|
139
141
|
new_part_index = len(self._parts)
|
|
140
|
-
part = TextPart(content=content)
|
|
142
|
+
part = TextPart(content=content, id=id)
|
|
141
143
|
if vendor_part_id is not None:
|
|
142
144
|
self._vendor_id_to_part_index[vendor_part_id] = new_part_index
|
|
143
145
|
self._parts.append(part)
|
|
@@ -190,10 +190,19 @@ class OpenAIResponsesModelSettings(OpenAIChatModelSettings, total=False):
|
|
|
190
190
|
This can be useful for debugging and understanding the model's reasoning process.
|
|
191
191
|
One of `concise` or `detailed`.
|
|
192
192
|
|
|
193
|
-
Check the [OpenAI
|
|
193
|
+
Check the [OpenAI Reasoning documentation](https://platform.openai.com/docs/guides/reasoning?api-mode=responses#reasoning-summaries)
|
|
194
194
|
for more details.
|
|
195
195
|
"""
|
|
196
196
|
|
|
197
|
+
openai_send_reasoning_ids: bool
|
|
198
|
+
"""Whether to send reasoning IDs from the message history to the model. Enabled by default.
|
|
199
|
+
|
|
200
|
+
This can result in errors like `"Item 'rs_123' of type 'reasoning' was provided without its required following item."`
|
|
201
|
+
if the message history you're sending does not match exactly what was received from the Responses API in a previous response,
|
|
202
|
+
for example if you're using a [history processor](../../message-history.md#processing-message-history).
|
|
203
|
+
In that case, you'll want to disable this.
|
|
204
|
+
"""
|
|
205
|
+
|
|
197
206
|
openai_truncation: Literal['disabled', 'auto']
|
|
198
207
|
"""The truncation strategy to use for the model response.
|
|
199
208
|
|
|
@@ -859,24 +868,34 @@ class OpenAIResponsesModel(Model):
|
|
|
859
868
|
for item in response.output:
|
|
860
869
|
if isinstance(item, responses.ResponseReasoningItem):
|
|
861
870
|
signature = item.encrypted_content
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
871
|
+
if item.summary:
|
|
872
|
+
for summary in item.summary:
|
|
873
|
+
# We use the same id for all summaries so that we can merge them on the round trip.
|
|
874
|
+
items.append(
|
|
875
|
+
ThinkingPart(
|
|
876
|
+
content=summary.text,
|
|
877
|
+
id=item.id,
|
|
878
|
+
signature=signature,
|
|
879
|
+
provider_name=self.system if signature else None,
|
|
880
|
+
)
|
|
881
|
+
)
|
|
882
|
+
# We only need to store the signature once.
|
|
883
|
+
signature = None
|
|
884
|
+
elif signature:
|
|
865
885
|
items.append(
|
|
866
886
|
ThinkingPart(
|
|
867
|
-
content=
|
|
887
|
+
content='',
|
|
868
888
|
id=item.id,
|
|
869
889
|
signature=signature,
|
|
870
|
-
provider_name=self.system
|
|
890
|
+
provider_name=self.system,
|
|
871
891
|
)
|
|
872
892
|
)
|
|
873
|
-
signature = None
|
|
874
893
|
# NOTE: We don't currently handle the raw CoT from gpt-oss `reasoning_text`: https://cookbook.openai.com/articles/gpt-oss/handle-raw-cot
|
|
875
894
|
# If you need this, please file an issue.
|
|
876
895
|
elif isinstance(item, responses.ResponseOutputMessage):
|
|
877
896
|
for content in item.content:
|
|
878
897
|
if isinstance(content, responses.ResponseOutputText): # pragma: no branch
|
|
879
|
-
items.append(TextPart(content.text))
|
|
898
|
+
items.append(TextPart(content.text, id=item.id))
|
|
880
899
|
elif isinstance(item, responses.ResponseFunctionToolCall):
|
|
881
900
|
items.append(
|
|
882
901
|
ToolCallPart(item.name, item.arguments, tool_call_id=_combine_tool_call_ids(item.call_id, item.id))
|
|
@@ -958,7 +977,7 @@ class OpenAIResponsesModel(Model):
|
|
|
958
977
|
else:
|
|
959
978
|
tool_choice = 'auto'
|
|
960
979
|
|
|
961
|
-
instructions, openai_messages = await self._map_messages(messages)
|
|
980
|
+
instructions, openai_messages = await self._map_messages(messages, model_settings)
|
|
962
981
|
reasoning = self._get_reasoning(model_settings)
|
|
963
982
|
|
|
964
983
|
text: responses.ResponseTextConfigParam | None = None
|
|
@@ -1074,7 +1093,7 @@ class OpenAIResponsesModel(Model):
|
|
|
1074
1093
|
}
|
|
1075
1094
|
|
|
1076
1095
|
async def _map_messages( # noqa: C901
|
|
1077
|
-
self, messages: list[ModelMessage]
|
|
1096
|
+
self, messages: list[ModelMessage], model_settings: OpenAIResponsesModelSettings
|
|
1078
1097
|
) -> tuple[str | NotGiven, list[responses.ResponseInputItemParam]]:
|
|
1079
1098
|
"""Just maps a `pydantic_ai.Message` to a `openai.types.responses.ResponseInputParam`."""
|
|
1080
1099
|
openai_messages: list[responses.ResponseInputItemParam] = []
|
|
@@ -1112,30 +1131,70 @@ class OpenAIResponsesModel(Model):
|
|
|
1112
1131
|
else:
|
|
1113
1132
|
assert_never(part)
|
|
1114
1133
|
elif isinstance(message, ModelResponse):
|
|
1134
|
+
message_item: responses.ResponseOutputMessageParam | None = None
|
|
1115
1135
|
reasoning_item: responses.ResponseReasoningItemParam | None = None
|
|
1116
1136
|
for item in message.parts:
|
|
1117
1137
|
if isinstance(item, TextPart):
|
|
1118
|
-
|
|
1138
|
+
if item.id and item.id.startswith('msg_'):
|
|
1139
|
+
if message_item is None or message_item['id'] != item.id: # pragma: no branch
|
|
1140
|
+
message_item = responses.ResponseOutputMessageParam(
|
|
1141
|
+
role='assistant',
|
|
1142
|
+
id=item.id or _utils.generate_tool_call_id(),
|
|
1143
|
+
content=[],
|
|
1144
|
+
type='message',
|
|
1145
|
+
status='completed',
|
|
1146
|
+
)
|
|
1147
|
+
openai_messages.append(message_item)
|
|
1148
|
+
|
|
1149
|
+
message_item['content'] = [
|
|
1150
|
+
*message_item['content'],
|
|
1151
|
+
responses.ResponseOutputTextParam(
|
|
1152
|
+
text=item.content, type='output_text', annotations=[]
|
|
1153
|
+
),
|
|
1154
|
+
]
|
|
1155
|
+
else:
|
|
1156
|
+
openai_messages.append(
|
|
1157
|
+
responses.EasyInputMessageParam(role='assistant', content=item.content)
|
|
1158
|
+
)
|
|
1119
1159
|
elif isinstance(item, ToolCallPart):
|
|
1120
1160
|
openai_messages.append(self._map_tool_call(item))
|
|
1121
1161
|
elif isinstance(item, BuiltinToolCallPart | BuiltinToolReturnPart):
|
|
1122
1162
|
# We don't currently track built-in tool calls from OpenAI
|
|
1123
1163
|
pass
|
|
1124
1164
|
elif isinstance(item, ThinkingPart):
|
|
1125
|
-
if
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1165
|
+
if (
|
|
1166
|
+
item.id
|
|
1167
|
+
and item.provider_name == self.system
|
|
1168
|
+
and OpenAIModelProfile.from_profile(
|
|
1169
|
+
self.profile
|
|
1170
|
+
).openai_supports_encrypted_reasoning_content
|
|
1171
|
+
and model_settings.get('openai_send_reasoning_ids', True)
|
|
1172
|
+
):
|
|
1173
|
+
if (
|
|
1174
|
+
reasoning_item is None
|
|
1175
|
+
or reasoning_item['id'] != item.id
|
|
1176
|
+
and (item.signature or item.content)
|
|
1177
|
+
): # pragma: no branch
|
|
1178
|
+
reasoning_item = responses.ResponseReasoningItemParam(
|
|
1179
|
+
id=item.id,
|
|
1180
|
+
summary=[],
|
|
1181
|
+
encrypted_content=item.signature,
|
|
1182
|
+
type='reasoning',
|
|
1183
|
+
)
|
|
1184
|
+
openai_messages.append(reasoning_item)
|
|
1185
|
+
|
|
1186
|
+
if item.content:
|
|
1187
|
+
reasoning_item['summary'] = [
|
|
1188
|
+
*reasoning_item['summary'],
|
|
1189
|
+
Summary(text=item.content, type='summary_text'),
|
|
1190
|
+
]
|
|
1191
|
+
else:
|
|
1192
|
+
start_tag, end_tag = self.profile.thinking_tags
|
|
1193
|
+
openai_messages.append(
|
|
1194
|
+
responses.EasyInputMessageParam(
|
|
1195
|
+
role='assistant', content='\n'.join([start_tag, item.content, end_tag])
|
|
1196
|
+
)
|
|
1197
|
+
)
|
|
1139
1198
|
else:
|
|
1140
1199
|
assert_never(item)
|
|
1141
1200
|
else:
|
|
@@ -1391,15 +1450,14 @@ class OpenAIResponsesStreamedResponse(StreamedResponse):
|
|
|
1391
1450
|
|
|
1392
1451
|
elif isinstance(chunk, responses.ResponseOutputItemDoneEvent):
|
|
1393
1452
|
if isinstance(chunk.item, responses.ResponseReasoningItem):
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
pass
|
|
1453
|
+
if signature := chunk.item.encrypted_content: # pragma: no branch
|
|
1454
|
+
# Add the signature to the part corresponding to the first summary item
|
|
1455
|
+
yield self._parts_manager.handle_thinking_delta(
|
|
1456
|
+
vendor_part_id=f'{chunk.item.id}-0',
|
|
1457
|
+
id=chunk.item.id,
|
|
1458
|
+
signature=signature,
|
|
1459
|
+
provider_name=self.provider_name,
|
|
1460
|
+
)
|
|
1403
1461
|
|
|
1404
1462
|
elif isinstance(chunk, responses.ResponseReasoningSummaryPartAddedEvent):
|
|
1405
1463
|
yield self._parts_manager.handle_thinking_delta(
|
|
@@ -1426,7 +1484,9 @@ class OpenAIResponsesStreamedResponse(StreamedResponse):
|
|
|
1426
1484
|
pass # there's nothing we need to do here
|
|
1427
1485
|
|
|
1428
1486
|
elif isinstance(chunk, responses.ResponseTextDeltaEvent):
|
|
1429
|
-
maybe_event = self._parts_manager.handle_text_delta(
|
|
1487
|
+
maybe_event = self._parts_manager.handle_text_delta(
|
|
1488
|
+
vendor_part_id=chunk.item_id, content=chunk.delta, id=chunk.item_id
|
|
1489
|
+
)
|
|
1430
1490
|
if maybe_event is not None: # pragma: no branch
|
|
1431
1491
|
yield maybe_event
|
|
1432
1492
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/dbos/_mcp_server.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/__init__.py
RENAMED
|
File without changes
|
{pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_agent.py
RENAMED
|
File without changes
|
|
File without changes
|
{pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_logfire.py
RENAMED
|
File without changes
|
{pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_mcp_server.py
RENAMED
|
File without changes
|
{pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_model.py
RENAMED
|
File without changes
|
{pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_run_context.py
RENAMED
|
File without changes
|
{pydantic_ai_slim-1.0.4 → pydantic_ai_slim-1.0.5}/pydantic_ai/durable_exec/temporal/_toolset.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|