pydantic-ai-slim 1.0.7__tar.gz → 1.0.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydantic-ai-slim might be problematic. Click here for more details.
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/PKG-INFO +3 -3
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_agent_graph.py +23 -9
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/ag_ui.py +7 -7
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/agent/__init__.py +2 -4
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/__init__.py +4 -6
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/result.py +3 -5
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/usage.py +2 -2
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/.gitignore +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/LICENSE +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/README.md +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/__init__.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/__main__.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_a2a.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_cli.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_function_schema.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_griffe.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_mcp.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_otel_messages.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_output.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_parts_manager.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_run_context.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_system_prompt.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_thinking_part.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_tool_manager.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/_utils.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/agent/abstract.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/agent/wrapper.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/builtin_tools.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/common_tools/__init__.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/common_tools/duckduckgo.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/common_tools/tavily.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/direct.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/__init__.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/dbos/__init__.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/dbos/_agent.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/dbos/_mcp_server.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/dbos/_model.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/dbos/_utils.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/__init__.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_agent.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_function_toolset.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_logfire.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_mcp_server.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_model.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_run_context.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_toolset.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/exceptions.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/ext/__init__.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/ext/aci.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/ext/langchain.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/format_prompt.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/mcp.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/messages.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/anthropic.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/bedrock.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/cohere.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/fallback.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/function.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/gemini.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/google.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/groq.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/huggingface.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/instrumented.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/mcp_sampling.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/mistral.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/openai.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/test.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/models/wrapper.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/output.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/__init__.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/_json_schema.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/amazon.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/anthropic.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/cohere.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/deepseek.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/google.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/grok.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/groq.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/harmony.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/meta.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/mistral.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/moonshotai.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/openai.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/profiles/qwen.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/__init__.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/anthropic.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/azure.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/bedrock.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/cerebras.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/cohere.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/deepseek.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/fireworks.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/gateway.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/github.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/google.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/google_gla.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/google_vertex.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/grok.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/groq.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/heroku.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/huggingface.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/litellm.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/mistral.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/moonshotai.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/ollama.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/openai.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/openrouter.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/together.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/providers/vercel.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/py.typed +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/retries.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/run.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/settings.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/tools.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/__init__.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/_dynamic.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/abstract.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/approval_required.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/combined.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/external.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/filtered.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/function.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/prefixed.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/prepared.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/renamed.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/toolsets/wrapper.py +0 -0
- {pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pyproject.toml +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydantic-ai-slim
|
|
3
|
-
Version: 1.0.
|
|
3
|
+
Version: 1.0.8
|
|
4
4
|
Summary: Agent Framework / shim to use Pydantic with LLMs, slim package
|
|
5
5
|
Project-URL: Homepage, https://github.com/pydantic/pydantic-ai/tree/main/pydantic_ai_slim
|
|
6
6
|
Project-URL: Source, https://github.com/pydantic/pydantic-ai/tree/main/pydantic_ai_slim
|
|
@@ -33,7 +33,7 @@ Requires-Dist: genai-prices>=0.0.23
|
|
|
33
33
|
Requires-Dist: griffe>=1.3.2
|
|
34
34
|
Requires-Dist: httpx>=0.27
|
|
35
35
|
Requires-Dist: opentelemetry-api>=1.28.0
|
|
36
|
-
Requires-Dist: pydantic-graph==1.0.
|
|
36
|
+
Requires-Dist: pydantic-graph==1.0.8
|
|
37
37
|
Requires-Dist: pydantic>=2.10
|
|
38
38
|
Requires-Dist: typing-inspection>=0.4.0
|
|
39
39
|
Provides-Extra: a2a
|
|
@@ -57,7 +57,7 @@ Requires-Dist: dbos>=1.13.0; extra == 'dbos'
|
|
|
57
57
|
Provides-Extra: duckduckgo
|
|
58
58
|
Requires-Dist: ddgs>=9.0.0; extra == 'duckduckgo'
|
|
59
59
|
Provides-Extra: evals
|
|
60
|
-
Requires-Dist: pydantic-evals==1.0.
|
|
60
|
+
Requires-Dist: pydantic-evals==1.0.8; extra == 'evals'
|
|
61
61
|
Provides-Extra: google
|
|
62
62
|
Requires-Dist: google-genai>=1.31.0; extra == 'google'
|
|
63
63
|
Provides-Extra: groq
|
|
@@ -8,6 +8,7 @@ from collections import defaultdict, deque
|
|
|
8
8
|
from collections.abc import AsyncIterator, Awaitable, Callable, Iterator, Sequence
|
|
9
9
|
from contextlib import asynccontextmanager, contextmanager
|
|
10
10
|
from contextvars import ContextVar
|
|
11
|
+
from copy import deepcopy
|
|
11
12
|
from dataclasses import field, replace
|
|
12
13
|
from typing import TYPE_CHECKING, Any, Generic, Literal, TypeGuard, cast
|
|
13
14
|
|
|
@@ -186,9 +187,8 @@ class UserPromptNode(AgentNode[DepsT, NodeRunEndT]):
|
|
|
186
187
|
messages = ctx_messages.messages
|
|
187
188
|
ctx_messages.used = True
|
|
188
189
|
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
messages.extend(message_history)
|
|
190
|
+
# Replace the `capture_run_messages` list with the message history
|
|
191
|
+
messages[:] = _clean_message_history(ctx.state.message_history)
|
|
192
192
|
# Use the `capture_run_messages` list as the message history so that new messages are added to it
|
|
193
193
|
ctx.state.message_history = messages
|
|
194
194
|
ctx.deps.new_message_index = len(messages)
|
|
@@ -455,7 +455,18 @@ class ModelRequestNode(AgentNode[DepsT, NodeRunEndT]):
|
|
|
455
455
|
# This will raise errors for any tool name conflicts
|
|
456
456
|
ctx.deps.tool_manager = await ctx.deps.tool_manager.for_run_step(run_context)
|
|
457
457
|
|
|
458
|
-
|
|
458
|
+
original_history = ctx.state.message_history[:]
|
|
459
|
+
message_history = await _process_message_history(original_history, ctx.deps.history_processors, run_context)
|
|
460
|
+
# Never merge the new `ModelRequest` with the one preceding it, to keep `new_messages()` from accidentally including part of the existing message history
|
|
461
|
+
message_history = [*_clean_message_history(message_history[:-1]), message_history[-1]]
|
|
462
|
+
# `ctx.state.message_history` is the same list used by `capture_run_messages`, so we should replace its contents, not the reference
|
|
463
|
+
ctx.state.message_history[:] = message_history
|
|
464
|
+
# Update the new message index to ensure `result.new_messages()` returns the correct messages
|
|
465
|
+
ctx.deps.new_message_index -= len(original_history) - len(message_history)
|
|
466
|
+
|
|
467
|
+
# Do one more cleaning pass to merge possible consecutive trailing `ModelRequest`s into one, with tool call parts before user parts,
|
|
468
|
+
# but don't store it in the message history on state.
|
|
469
|
+
# See `tests/test_tools.py::test_parallel_tool_return_with_deferred` for an example where this is necessary
|
|
459
470
|
message_history = _clean_message_history(message_history)
|
|
460
471
|
|
|
461
472
|
model_request_parameters = await _prepare_request_parameters(ctx)
|
|
@@ -465,7 +476,7 @@ class ModelRequestNode(AgentNode[DepsT, NodeRunEndT]):
|
|
|
465
476
|
usage = ctx.state.usage
|
|
466
477
|
if ctx.deps.usage_limits.count_tokens_before_request:
|
|
467
478
|
# Copy to avoid modifying the original usage object with the counted usage
|
|
468
|
-
usage =
|
|
479
|
+
usage = deepcopy(usage)
|
|
469
480
|
|
|
470
481
|
counted_usage = await ctx.deps.model.count_tokens(message_history, model_settings, model_request_parameters)
|
|
471
482
|
usage.incr(counted_usage)
|
|
@@ -1086,12 +1097,11 @@ def build_agent_graph(
|
|
|
1086
1097
|
|
|
1087
1098
|
|
|
1088
1099
|
async def _process_message_history(
|
|
1089
|
-
|
|
1100
|
+
messages: list[_messages.ModelMessage],
|
|
1090
1101
|
processors: Sequence[HistoryProcessor[DepsT]],
|
|
1091
1102
|
run_context: RunContext[DepsT],
|
|
1092
1103
|
) -> list[_messages.ModelMessage]:
|
|
1093
1104
|
"""Process message history through a sequence of processors."""
|
|
1094
|
-
messages = state.message_history
|
|
1095
1105
|
for processor in processors:
|
|
1096
1106
|
takes_ctx = is_takes_ctx(processor)
|
|
1097
1107
|
|
|
@@ -1109,8 +1119,12 @@ async def _process_message_history(
|
|
|
1109
1119
|
sync_processor = cast(_HistoryProcessorSync, processor)
|
|
1110
1120
|
messages = await run_in_executor(sync_processor, messages)
|
|
1111
1121
|
|
|
1112
|
-
|
|
1113
|
-
|
|
1122
|
+
if len(messages) == 0:
|
|
1123
|
+
raise exceptions.UserError('Processed history cannot be empty.')
|
|
1124
|
+
|
|
1125
|
+
if not isinstance(messages[-1], _messages.ModelRequest):
|
|
1126
|
+
raise exceptions.UserError('Processed history must end with a `ModelRequest`.')
|
|
1127
|
+
|
|
1114
1128
|
return messages
|
|
1115
1129
|
|
|
1116
1130
|
|
|
@@ -559,15 +559,15 @@ async def _handle_tool_result_event(
|
|
|
559
559
|
content=result.model_response_str(),
|
|
560
560
|
)
|
|
561
561
|
|
|
562
|
-
# Now check for
|
|
563
|
-
|
|
564
|
-
if isinstance(
|
|
565
|
-
yield
|
|
566
|
-
elif isinstance(
|
|
562
|
+
# Now check for AG-UI events returned by the tool calls.
|
|
563
|
+
possible_event = result.metadata or result.content
|
|
564
|
+
if isinstance(possible_event, BaseEvent):
|
|
565
|
+
yield possible_event
|
|
566
|
+
elif isinstance(possible_event, str | bytes): # pragma: no branch
|
|
567
567
|
# Avoid iterable check for strings and bytes.
|
|
568
568
|
pass
|
|
569
|
-
elif isinstance(
|
|
570
|
-
for item in
|
|
569
|
+
elif isinstance(possible_event, Iterable): # pragma: no branch
|
|
570
|
+
for item in possible_event: # type: ignore[reportUnknownMemberType]
|
|
571
571
|
if isinstance(item, BaseEvent): # pragma: no branch
|
|
572
572
|
yield item
|
|
573
573
|
|
|
@@ -614,12 +614,10 @@ class Agent(AbstractAgent[AgentDepsT, OutputDataT]):
|
|
|
614
614
|
instrumentation_settings = None
|
|
615
615
|
tracer = NoOpTracer()
|
|
616
616
|
|
|
617
|
-
graph_deps = _agent_graph.GraphAgentDeps[
|
|
618
|
-
AgentDepsT, RunOutputDataT
|
|
619
|
-
](
|
|
617
|
+
graph_deps = _agent_graph.GraphAgentDeps[AgentDepsT, RunOutputDataT](
|
|
620
618
|
user_deps=deps,
|
|
621
619
|
prompt=user_prompt,
|
|
622
|
-
new_message_index=
|
|
620
|
+
new_message_index=len(message_history) if message_history else 0,
|
|
623
621
|
model=model_used,
|
|
624
622
|
model_settings=model_settings,
|
|
625
623
|
usage_limits=usage_limits,
|
|
@@ -783,6 +783,8 @@ def cached_async_http_client(*, provider: str | None = None, timeout: int = 600,
|
|
|
783
783
|
The client is cached based on the provider parameter. If provider is None, it's used for non-provider specific
|
|
784
784
|
requests (like downloading images). Multiple agents and calls can share the same client when they use the same provider.
|
|
785
785
|
|
|
786
|
+
Each client will get its own transport with its own connection pool. The default pool size is defined by `httpx.DEFAULT_LIMITS`.
|
|
787
|
+
|
|
786
788
|
There are good reasons why in production you should use a `httpx.AsyncClient` as an async context manager as
|
|
787
789
|
described in [encode/httpx#2026](https://github.com/encode/httpx/pull/2026), but when experimenting or showing
|
|
788
790
|
examples, it's very useful not to.
|
|
@@ -793,6 +795,8 @@ def cached_async_http_client(*, provider: str | None = None, timeout: int = 600,
|
|
|
793
795
|
client = _cached_async_http_client(provider=provider, timeout=timeout, connect=connect)
|
|
794
796
|
if client.is_closed:
|
|
795
797
|
# This happens if the context manager is used, so we need to create a new client.
|
|
798
|
+
# Since there is no API from `functools.cache` to clear the cache for a specific
|
|
799
|
+
# key, clear the entire cache here as a workaround.
|
|
796
800
|
_cached_async_http_client.cache_clear()
|
|
797
801
|
client = _cached_async_http_client(provider=provider, timeout=timeout, connect=connect)
|
|
798
802
|
return client
|
|
@@ -801,17 +805,11 @@ def cached_async_http_client(*, provider: str | None = None, timeout: int = 600,
|
|
|
801
805
|
@cache
|
|
802
806
|
def _cached_async_http_client(provider: str | None, timeout: int = 600, connect: int = 5) -> httpx.AsyncClient:
|
|
803
807
|
return httpx.AsyncClient(
|
|
804
|
-
transport=_cached_async_http_transport(),
|
|
805
808
|
timeout=httpx.Timeout(timeout=timeout, connect=connect),
|
|
806
809
|
headers={'User-Agent': get_user_agent()},
|
|
807
810
|
)
|
|
808
811
|
|
|
809
812
|
|
|
810
|
-
@cache
|
|
811
|
-
def _cached_async_http_transport() -> httpx.AsyncHTTPTransport:
|
|
812
|
-
return httpx.AsyncHTTPTransport()
|
|
813
|
-
|
|
814
|
-
|
|
815
813
|
DataT = TypeVar('DataT', str, bytes)
|
|
816
814
|
|
|
817
815
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations as _annotations
|
|
2
2
|
|
|
3
3
|
from collections.abc import AsyncIterator, Awaitable, Callable, Iterable
|
|
4
|
-
from copy import
|
|
4
|
+
from copy import deepcopy
|
|
5
5
|
from dataclasses import dataclass, field
|
|
6
6
|
from datetime import datetime
|
|
7
7
|
from typing import Generic, cast, overload
|
|
@@ -56,7 +56,7 @@ class AgentStream(Generic[AgentDepsT, OutputDataT]):
|
|
|
56
56
|
_initial_run_ctx_usage: RunUsage = field(init=False)
|
|
57
57
|
|
|
58
58
|
def __post_init__(self):
|
|
59
|
-
self._initial_run_ctx_usage =
|
|
59
|
+
self._initial_run_ctx_usage = deepcopy(self._run_ctx.usage)
|
|
60
60
|
|
|
61
61
|
async def stream_output(self, *, debounce_by: float | None = 0.1) -> AsyncIterator[OutputDataT]:
|
|
62
62
|
"""Asynchronously stream the (validated) agent outputs."""
|
|
@@ -322,9 +322,7 @@ class StreamedRunResult(Generic[AgentDepsT, OutputDataT]):
|
|
|
322
322
|
self.all_messages(output_tool_return_content=output_tool_return_content)
|
|
323
323
|
)
|
|
324
324
|
|
|
325
|
-
def new_messages(
|
|
326
|
-
self, *, output_tool_return_content: str | None = None
|
|
327
|
-
) -> list[_messages.ModelMessage]: # pragma: no cover
|
|
325
|
+
def new_messages(self, *, output_tool_return_content: str | None = None) -> list[_messages.ModelMessage]:
|
|
328
326
|
"""Return new messages associated with this run.
|
|
329
327
|
|
|
330
328
|
Messages from older runs are excluded.
|
|
@@ -135,7 +135,7 @@ class RunUsage(UsageBase):
|
|
|
135
135
|
"""Number of successful tool calls executed during the run."""
|
|
136
136
|
|
|
137
137
|
input_tokens: int = 0
|
|
138
|
-
"""Total number of
|
|
138
|
+
"""Total number of input/prompt tokens."""
|
|
139
139
|
|
|
140
140
|
cache_write_tokens: int = 0
|
|
141
141
|
"""Total number of tokens written to the cache."""
|
|
@@ -150,7 +150,7 @@ class RunUsage(UsageBase):
|
|
|
150
150
|
"""Total number of audio tokens read from the cache."""
|
|
151
151
|
|
|
152
152
|
output_tokens: int = 0
|
|
153
|
-
"""Total number of
|
|
153
|
+
"""Total number of output/completion tokens."""
|
|
154
154
|
|
|
155
155
|
details: dict[str, int] = dataclasses.field(default_factory=dict)
|
|
156
156
|
"""Any extra details returned by the model."""
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/dbos/_mcp_server.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/__init__.py
RENAMED
|
File without changes
|
{pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_agent.py
RENAMED
|
File without changes
|
|
File without changes
|
{pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_logfire.py
RENAMED
|
File without changes
|
{pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_mcp_server.py
RENAMED
|
File without changes
|
{pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_model.py
RENAMED
|
File without changes
|
{pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_run_context.py
RENAMED
|
File without changes
|
{pydantic_ai_slim-1.0.7 → pydantic_ai_slim-1.0.8}/pydantic_ai/durable_exec/temporal/_toolset.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|