pydantic-ai-slim 0.8.0__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydantic-ai-slim might be problematic. Click here for more details.
- pydantic_ai/__init__.py +28 -2
- pydantic_ai/_a2a.py +1 -1
- pydantic_ai/_agent_graph.py +323 -156
- pydantic_ai/_function_schema.py +5 -5
- pydantic_ai/_griffe.py +2 -1
- pydantic_ai/_otel_messages.py +2 -2
- pydantic_ai/_output.py +31 -35
- pydantic_ai/_parts_manager.py +7 -5
- pydantic_ai/_run_context.py +3 -1
- pydantic_ai/_system_prompt.py +2 -2
- pydantic_ai/_tool_manager.py +32 -28
- pydantic_ai/_utils.py +14 -26
- pydantic_ai/ag_ui.py +82 -51
- pydantic_ai/agent/__init__.py +84 -17
- pydantic_ai/agent/abstract.py +35 -4
- pydantic_ai/agent/wrapper.py +6 -0
- pydantic_ai/builtin_tools.py +2 -2
- pydantic_ai/common_tools/duckduckgo.py +4 -2
- pydantic_ai/durable_exec/temporal/__init__.py +70 -17
- pydantic_ai/durable_exec/temporal/_agent.py +93 -11
- pydantic_ai/durable_exec/temporal/_function_toolset.py +53 -6
- pydantic_ai/durable_exec/temporal/_logfire.py +6 -3
- pydantic_ai/durable_exec/temporal/_mcp_server.py +2 -1
- pydantic_ai/durable_exec/temporal/_model.py +2 -2
- pydantic_ai/durable_exec/temporal/_run_context.py +2 -1
- pydantic_ai/durable_exec/temporal/_toolset.py +2 -1
- pydantic_ai/exceptions.py +45 -2
- pydantic_ai/format_prompt.py +2 -2
- pydantic_ai/mcp.py +15 -27
- pydantic_ai/messages.py +156 -44
- pydantic_ai/models/__init__.py +20 -7
- pydantic_ai/models/anthropic.py +10 -17
- pydantic_ai/models/bedrock.py +55 -57
- pydantic_ai/models/cohere.py +3 -3
- pydantic_ai/models/fallback.py +2 -2
- pydantic_ai/models/function.py +25 -23
- pydantic_ai/models/gemini.py +13 -14
- pydantic_ai/models/google.py +19 -5
- pydantic_ai/models/groq.py +127 -39
- pydantic_ai/models/huggingface.py +5 -5
- pydantic_ai/models/instrumented.py +49 -21
- pydantic_ai/models/mcp_sampling.py +3 -1
- pydantic_ai/models/mistral.py +8 -8
- pydantic_ai/models/openai.py +37 -42
- pydantic_ai/models/test.py +24 -4
- pydantic_ai/output.py +27 -32
- pydantic_ai/profiles/__init__.py +3 -3
- pydantic_ai/profiles/groq.py +1 -1
- pydantic_ai/profiles/openai.py +25 -4
- pydantic_ai/providers/__init__.py +4 -0
- pydantic_ai/providers/anthropic.py +2 -3
- pydantic_ai/providers/bedrock.py +3 -2
- pydantic_ai/providers/google_vertex.py +2 -1
- pydantic_ai/providers/groq.py +21 -2
- pydantic_ai/providers/litellm.py +134 -0
- pydantic_ai/result.py +173 -52
- pydantic_ai/retries.py +52 -31
- pydantic_ai/run.py +12 -5
- pydantic_ai/tools.py +127 -23
- pydantic_ai/toolsets/__init__.py +4 -1
- pydantic_ai/toolsets/_dynamic.py +4 -4
- pydantic_ai/toolsets/abstract.py +18 -2
- pydantic_ai/toolsets/approval_required.py +32 -0
- pydantic_ai/toolsets/combined.py +7 -12
- pydantic_ai/toolsets/{deferred.py → external.py} +11 -5
- pydantic_ai/toolsets/filtered.py +1 -1
- pydantic_ai/toolsets/function.py +58 -21
- pydantic_ai/toolsets/wrapper.py +2 -1
- pydantic_ai/usage.py +44 -8
- {pydantic_ai_slim-0.8.0.dist-info → pydantic_ai_slim-1.0.0.dist-info}/METADATA +8 -9
- pydantic_ai_slim-1.0.0.dist-info/RECORD +121 -0
- pydantic_ai_slim-0.8.0.dist-info/RECORD +0 -119
- {pydantic_ai_slim-0.8.0.dist-info → pydantic_ai_slim-1.0.0.dist-info}/WHEEL +0 -0
- {pydantic_ai_slim-0.8.0.dist-info → pydantic_ai_slim-1.0.0.dist-info}/entry_points.txt +0 -0
- {pydantic_ai_slim-0.8.0.dist-info → pydantic_ai_slim-1.0.0.dist-info}/licenses/LICENSE +0 -0
pydantic_ai/mcp.py
CHANGED
|
@@ -5,12 +5,12 @@ import functools
|
|
|
5
5
|
import warnings
|
|
6
6
|
from abc import ABC, abstractmethod
|
|
7
7
|
from asyncio import Lock
|
|
8
|
-
from collections.abc import AsyncIterator, Awaitable, Sequence
|
|
8
|
+
from collections.abc import AsyncIterator, Awaitable, Callable, Sequence
|
|
9
9
|
from contextlib import AbstractAsyncContextManager, AsyncExitStack, asynccontextmanager
|
|
10
10
|
from dataclasses import field, replace
|
|
11
11
|
from datetime import timedelta
|
|
12
12
|
from pathlib import Path
|
|
13
|
-
from typing import Any
|
|
13
|
+
from typing import Any
|
|
14
14
|
|
|
15
15
|
import anyio
|
|
16
16
|
import httpx
|
|
@@ -20,6 +20,7 @@ from typing_extensions import Self, assert_never, deprecated
|
|
|
20
20
|
|
|
21
21
|
from pydantic_ai.tools import RunContext, ToolDefinition
|
|
22
22
|
|
|
23
|
+
from .direct import model_request
|
|
23
24
|
from .toolsets.abstract import AbstractToolset, ToolsetTool
|
|
24
25
|
|
|
25
26
|
try:
|
|
@@ -300,6 +301,8 @@ class MCPServer(AbstractToolset[Any], ABC):
|
|
|
300
301
|
return self
|
|
301
302
|
|
|
302
303
|
async def __aexit__(self, *args: Any) -> bool | None:
|
|
304
|
+
if self._running_count == 0:
|
|
305
|
+
raise ValueError('MCPServer.__aexit__ called more times than __aenter__')
|
|
303
306
|
async with self._enter_lock:
|
|
304
307
|
self._running_count -= 1
|
|
305
308
|
if self._running_count == 0 and self._exit_stack is not None:
|
|
@@ -327,11 +330,7 @@ class MCPServer(AbstractToolset[Any], ABC):
|
|
|
327
330
|
if stop_sequences := params.stopSequences: # pragma: no branch
|
|
328
331
|
model_settings['stop_sequences'] = stop_sequences
|
|
329
332
|
|
|
330
|
-
model_response = await self.sampling_model
|
|
331
|
-
pai_messages,
|
|
332
|
-
model_settings,
|
|
333
|
-
models.ModelRequestParameters(),
|
|
334
|
-
)
|
|
333
|
+
model_response = await model_request(self.sampling_model, pai_messages, model_settings=model_settings)
|
|
335
334
|
return mcp_types.CreateMessageResult(
|
|
336
335
|
role='assistant',
|
|
337
336
|
content=_mcp.map_from_model_response(model_response),
|
|
@@ -401,16 +400,7 @@ class MCPServerStdio(MCPServer):
|
|
|
401
400
|
from pydantic_ai.mcp import MCPServerStdio
|
|
402
401
|
|
|
403
402
|
server = MCPServerStdio( # (1)!
|
|
404
|
-
'
|
|
405
|
-
args=[
|
|
406
|
-
'run',
|
|
407
|
-
'-N',
|
|
408
|
-
'-R=node_modules',
|
|
409
|
-
'-W=node_modules',
|
|
410
|
-
'--node-modules-dir=auto',
|
|
411
|
-
'jsr:@pydantic/mcp-run-python',
|
|
412
|
-
'stdio',
|
|
413
|
-
]
|
|
403
|
+
'uv', args=['run', 'mcp-run-python', 'stdio'], timeout=10
|
|
414
404
|
)
|
|
415
405
|
agent = Agent('openai:gpt-4o', toolsets=[server])
|
|
416
406
|
|
|
@@ -419,7 +409,7 @@ class MCPServerStdio(MCPServer):
|
|
|
419
409
|
...
|
|
420
410
|
```
|
|
421
411
|
|
|
422
|
-
1. See [MCP Run Python](
|
|
412
|
+
1. See [MCP Run Python](https://github.com/pydantic/mcp-run-python) for more information.
|
|
423
413
|
2. This will start the server as a subprocess and connect to it.
|
|
424
414
|
"""
|
|
425
415
|
|
|
@@ -455,6 +445,7 @@ class MCPServerStdio(MCPServer):
|
|
|
455
445
|
self,
|
|
456
446
|
command: str,
|
|
457
447
|
args: Sequence[str],
|
|
448
|
+
*,
|
|
458
449
|
env: dict[str, str] | None = None,
|
|
459
450
|
cwd: str | Path | None = None,
|
|
460
451
|
tool_prefix: str | None = None,
|
|
@@ -467,7 +458,6 @@ class MCPServerStdio(MCPServer):
|
|
|
467
458
|
sampling_model: models.Model | None = None,
|
|
468
459
|
max_retries: int = 1,
|
|
469
460
|
elicitation_callback: ElicitationFnT | None = None,
|
|
470
|
-
*,
|
|
471
461
|
id: str | None = None,
|
|
472
462
|
):
|
|
473
463
|
"""Build a new MCP server.
|
|
@@ -581,8 +571,8 @@ class _MCPServerHTTP(MCPServer):
|
|
|
581
571
|
|
|
582
572
|
def __init__(
|
|
583
573
|
self,
|
|
584
|
-
*,
|
|
585
574
|
url: str,
|
|
575
|
+
*,
|
|
586
576
|
headers: dict[str, str] | None = None,
|
|
587
577
|
http_client: httpx.AsyncClient | None = None,
|
|
588
578
|
id: str | None = None,
|
|
@@ -732,16 +722,15 @@ class MCPServerSSE(_MCPServerHTTP):
|
|
|
732
722
|
from pydantic_ai import Agent
|
|
733
723
|
from pydantic_ai.mcp import MCPServerSSE
|
|
734
724
|
|
|
735
|
-
server = MCPServerSSE('http://localhost:3001/sse')
|
|
725
|
+
server = MCPServerSSE('http://localhost:3001/sse')
|
|
736
726
|
agent = Agent('openai:gpt-4o', toolsets=[server])
|
|
737
727
|
|
|
738
728
|
async def main():
|
|
739
|
-
async with agent: # (
|
|
729
|
+
async with agent: # (1)!
|
|
740
730
|
...
|
|
741
731
|
```
|
|
742
732
|
|
|
743
|
-
1.
|
|
744
|
-
2. This will connect to a server running on `localhost:3001`.
|
|
733
|
+
1. This will connect to a server running on `localhost:3001`.
|
|
745
734
|
"""
|
|
746
735
|
|
|
747
736
|
@property
|
|
@@ -765,7 +754,7 @@ class MCPServerHTTP(MCPServerSSE):
|
|
|
765
754
|
from pydantic_ai import Agent
|
|
766
755
|
from pydantic_ai.mcp import MCPServerHTTP
|
|
767
756
|
|
|
768
|
-
server = MCPServerHTTP('http://localhost:3001/sse')
|
|
757
|
+
server = MCPServerHTTP('http://localhost:3001/sse')
|
|
769
758
|
agent = Agent('openai:gpt-4o', toolsets=[server])
|
|
770
759
|
|
|
771
760
|
async def main():
|
|
@@ -773,8 +762,7 @@ class MCPServerHTTP(MCPServerSSE):
|
|
|
773
762
|
...
|
|
774
763
|
```
|
|
775
764
|
|
|
776
|
-
1.
|
|
777
|
-
2. This will connect to a server running on `localhost:3001`.
|
|
765
|
+
1. This will connect to a server running on `localhost:3001`.
|
|
778
766
|
"""
|
|
779
767
|
|
|
780
768
|
|