pydantic-ai-slim 0.2.12__tar.gz → 0.2.14__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydantic-ai-slim might be problematic. Click here for more details.
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/PKG-INFO +4 -4
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/_output.py +1 -1
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/mcp.py +30 -32
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/gemini.py +10 -1
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/google.py +2 -2
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/openai.py +3 -1
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/tools.py +1 -1
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/.gitignore +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/LICENSE +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/README.md +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/__init__.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/__main__.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/_a2a.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/_agent_graph.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/_cli.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/_function_schema.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/_griffe.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/_parts_manager.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/_system_prompt.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/_utils.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/agent.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/common_tools/__init__.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/common_tools/duckduckgo.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/common_tools/tavily.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/direct.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/exceptions.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/format_as_xml.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/format_prompt.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/messages.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/__init__.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/anthropic.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/bedrock.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/cohere.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/fallback.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/function.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/groq.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/instrumented.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/mistral.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/test.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/models/wrapper.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/__init__.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/_json_schema.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/amazon.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/anthropic.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/cohere.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/deepseek.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/google.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/grok.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/meta.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/mistral.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/openai.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/profiles/qwen.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/__init__.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/anthropic.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/azure.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/bedrock.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/cohere.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/deepseek.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/fireworks.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/google.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/google_gla.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/google_vertex.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/grok.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/groq.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/mistral.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/openai.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/openrouter.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/providers/together.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/py.typed +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/result.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/settings.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pydantic_ai/usage.py +0 -0
- {pydantic_ai_slim-0.2.12 → pydantic_ai_slim-0.2.14}/pyproject.toml +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydantic-ai-slim
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.14
|
|
4
4
|
Summary: Agent Framework / shim to use Pydantic with LLMs, slim package
|
|
5
5
|
Author-email: Samuel Colvin <samuel@pydantic.dev>, Marcelo Trylesinski <marcelotryle@gmail.com>, David Montague <david@pydantic.dev>, Alex Hall <alex@pydantic.dev>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -30,11 +30,11 @@ Requires-Dist: exceptiongroup; python_version < '3.11'
|
|
|
30
30
|
Requires-Dist: griffe>=1.3.2
|
|
31
31
|
Requires-Dist: httpx>=0.27
|
|
32
32
|
Requires-Dist: opentelemetry-api>=1.28.0
|
|
33
|
-
Requires-Dist: pydantic-graph==0.2.
|
|
33
|
+
Requires-Dist: pydantic-graph==0.2.14
|
|
34
34
|
Requires-Dist: pydantic>=2.10
|
|
35
35
|
Requires-Dist: typing-inspection>=0.4.0
|
|
36
36
|
Provides-Extra: a2a
|
|
37
|
-
Requires-Dist: fasta2a==0.2.
|
|
37
|
+
Requires-Dist: fasta2a==0.2.14; extra == 'a2a'
|
|
38
38
|
Provides-Extra: anthropic
|
|
39
39
|
Requires-Dist: anthropic>=0.52.0; extra == 'anthropic'
|
|
40
40
|
Provides-Extra: bedrock
|
|
@@ -48,7 +48,7 @@ Requires-Dist: cohere>=5.13.11; (platform_system != 'Emscripten') and extra == '
|
|
|
48
48
|
Provides-Extra: duckduckgo
|
|
49
49
|
Requires-Dist: duckduckgo-search>=7.0.0; extra == 'duckduckgo'
|
|
50
50
|
Provides-Extra: evals
|
|
51
|
-
Requires-Dist: pydantic-evals==0.2.
|
|
51
|
+
Requires-Dist: pydantic-evals==0.2.14; extra == 'evals'
|
|
52
52
|
Provides-Extra: google
|
|
53
53
|
Requires-Dist: google-genai>=1.15.0; extra == 'google'
|
|
54
54
|
Provides-Extra: groq
|
|
@@ -407,7 +407,7 @@ class OutputTool(Generic[OutputDataT]):
|
|
|
407
407
|
if wrap_validation_errors:
|
|
408
408
|
m = _messages.RetryPromptPart(
|
|
409
409
|
tool_name=tool_call.tool_name,
|
|
410
|
-
content=e.errors(include_url=False),
|
|
410
|
+
content=e.errors(include_url=False, include_context=False),
|
|
411
411
|
tool_call_id=tool_call.tool_call_id,
|
|
412
412
|
)
|
|
413
413
|
raise ToolRetryError(m) from e
|
|
@@ -6,18 +6,17 @@ from abc import ABC, abstractmethod
|
|
|
6
6
|
from collections.abc import AsyncIterator, Sequence
|
|
7
7
|
from contextlib import AsyncExitStack, asynccontextmanager
|
|
8
8
|
from dataclasses import dataclass
|
|
9
|
-
from datetime import timedelta
|
|
10
9
|
from pathlib import Path
|
|
11
10
|
from types import TracebackType
|
|
12
11
|
from typing import Any
|
|
13
12
|
|
|
14
13
|
import anyio
|
|
15
14
|
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
|
16
|
-
from mcp.shared.message import SessionMessage
|
|
17
15
|
from mcp.types import (
|
|
18
16
|
BlobResourceContents,
|
|
19
17
|
EmbeddedResource,
|
|
20
18
|
ImageContent,
|
|
19
|
+
JSONRPCMessage,
|
|
21
20
|
LoggingLevel,
|
|
22
21
|
TextContent,
|
|
23
22
|
TextResourceContents,
|
|
@@ -30,8 +29,8 @@ from pydantic_ai.tools import ToolDefinition
|
|
|
30
29
|
|
|
31
30
|
try:
|
|
32
31
|
from mcp.client.session import ClientSession
|
|
32
|
+
from mcp.client.sse import sse_client
|
|
33
33
|
from mcp.client.stdio import StdioServerParameters, stdio_client
|
|
34
|
-
from mcp.client.streamable_http import streamablehttp_client
|
|
35
34
|
except ImportError as _import_error:
|
|
36
35
|
raise ImportError(
|
|
37
36
|
'Please install the `mcp` package to use the MCP server, '
|
|
@@ -57,8 +56,8 @@ class MCPServer(ABC):
|
|
|
57
56
|
"""
|
|
58
57
|
|
|
59
58
|
_client: ClientSession
|
|
60
|
-
_read_stream: MemoryObjectReceiveStream[
|
|
61
|
-
_write_stream: MemoryObjectSendStream[
|
|
59
|
+
_read_stream: MemoryObjectReceiveStream[JSONRPCMessage | Exception]
|
|
60
|
+
_write_stream: MemoryObjectSendStream[JSONRPCMessage]
|
|
62
61
|
_exit_stack: AsyncExitStack
|
|
63
62
|
|
|
64
63
|
@abstractmethod
|
|
@@ -66,7 +65,10 @@ class MCPServer(ABC):
|
|
|
66
65
|
async def client_streams(
|
|
67
66
|
self,
|
|
68
67
|
) -> AsyncIterator[
|
|
69
|
-
tuple[
|
|
68
|
+
tuple[
|
|
69
|
+
MemoryObjectReceiveStream[JSONRPCMessage | Exception],
|
|
70
|
+
MemoryObjectSendStream[JSONRPCMessage],
|
|
71
|
+
]
|
|
70
72
|
]:
|
|
71
73
|
"""Create the streams for the MCP server."""
|
|
72
74
|
raise NotImplementedError('MCP Server subclasses must implement this method.')
|
|
@@ -263,7 +265,10 @@ class MCPServerStdio(MCPServer):
|
|
|
263
265
|
async def client_streams(
|
|
264
266
|
self,
|
|
265
267
|
) -> AsyncIterator[
|
|
266
|
-
tuple[
|
|
268
|
+
tuple[
|
|
269
|
+
MemoryObjectReceiveStream[JSONRPCMessage | Exception],
|
|
270
|
+
MemoryObjectSendStream[JSONRPCMessage],
|
|
271
|
+
]
|
|
267
272
|
]:
|
|
268
273
|
server = StdioServerParameters(command=self.command, args=list(self.args), env=self.env, cwd=self.cwd)
|
|
269
274
|
async with stdio_client(server=server) as (read_stream, write_stream):
|
|
@@ -283,11 +288,11 @@ class MCPServerStdio(MCPServer):
|
|
|
283
288
|
class MCPServerHTTP(MCPServer):
|
|
284
289
|
"""An MCP server that connects over streamable HTTP connections.
|
|
285
290
|
|
|
286
|
-
This class implements the
|
|
287
|
-
See <https://modelcontextprotocol.io/specification/
|
|
291
|
+
This class implements the SSE transport from the MCP specification.
|
|
292
|
+
See <https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse> for more information.
|
|
288
293
|
|
|
289
|
-
The
|
|
290
|
-
|
|
294
|
+
The name "HTTP" is used since this implemented will be adapted in future to use the new
|
|
295
|
+
[Streamable HTTP](https://github.com/modelcontextprotocol/specification/pull/206) currently in development.
|
|
291
296
|
|
|
292
297
|
!!! note
|
|
293
298
|
Using this class as an async context manager will create a new pool of HTTP connections to connect
|
|
@@ -298,7 +303,7 @@ class MCPServerHTTP(MCPServer):
|
|
|
298
303
|
from pydantic_ai import Agent
|
|
299
304
|
from pydantic_ai.mcp import MCPServerHTTP
|
|
300
305
|
|
|
301
|
-
server = MCPServerHTTP('http://localhost:3001/
|
|
306
|
+
server = MCPServerHTTP('http://localhost:3001/sse') # (1)!
|
|
302
307
|
agent = Agent('openai:gpt-4o', mcp_servers=[server])
|
|
303
308
|
|
|
304
309
|
async def main():
|
|
@@ -311,13 +316,13 @@ class MCPServerHTTP(MCPServer):
|
|
|
311
316
|
"""
|
|
312
317
|
|
|
313
318
|
url: str
|
|
314
|
-
"""The URL of the SSE
|
|
319
|
+
"""The URL of the SSE endpoint on the MCP server.
|
|
315
320
|
|
|
316
|
-
For example for a server running locally, this might be `http://localhost:3001/
|
|
321
|
+
For example for a server running locally, this might be `http://localhost:3001/sse`.
|
|
317
322
|
"""
|
|
318
323
|
|
|
319
324
|
headers: dict[str, Any] | None = None
|
|
320
|
-
"""Optional HTTP headers to be sent with each request to the endpoint.
|
|
325
|
+
"""Optional HTTP headers to be sent with each request to the SSE endpoint.
|
|
321
326
|
|
|
322
327
|
These headers will be passed directly to the underlying `httpx.AsyncClient`.
|
|
323
328
|
Useful for authentication, custom headers, or other HTTP-specific configurations.
|
|
@@ -330,8 +335,8 @@ class MCPServerHTTP(MCPServer):
|
|
|
330
335
|
If the connection cannot be established within this time, the operation will fail.
|
|
331
336
|
"""
|
|
332
337
|
|
|
333
|
-
sse_read_timeout: float =
|
|
334
|
-
"""Maximum time
|
|
338
|
+
sse_read_timeout: float = 5 * 60
|
|
339
|
+
"""Maximum time in seconds to wait for new SSE messages before timing out.
|
|
335
340
|
|
|
336
341
|
This timeout applies to the long-lived SSE connection after it's established.
|
|
337
342
|
If no new messages are received within this time, the connection will be considered stale
|
|
@@ -353,28 +358,21 @@ class MCPServerHTTP(MCPServer):
|
|
|
353
358
|
For example, if `tool_prefix='foo'`, then a tool named `bar` will be registered as `foo_bar`
|
|
354
359
|
"""
|
|
355
360
|
|
|
356
|
-
def __post_init__(self):
|
|
357
|
-
# streamablehttp_client expects timedeltas, so we accept them too to match,
|
|
358
|
-
# but primarily work with floats for a simpler user API.
|
|
359
|
-
|
|
360
|
-
if isinstance(self.timeout, timedelta):
|
|
361
|
-
self.timeout = self.timeout.total_seconds()
|
|
362
|
-
|
|
363
|
-
if isinstance(self.sse_read_timeout, timedelta):
|
|
364
|
-
self.sse_read_timeout = self.sse_read_timeout.total_seconds()
|
|
365
|
-
|
|
366
361
|
@asynccontextmanager
|
|
367
362
|
async def client_streams(
|
|
368
363
|
self,
|
|
369
364
|
) -> AsyncIterator[
|
|
370
|
-
tuple[
|
|
365
|
+
tuple[
|
|
366
|
+
MemoryObjectReceiveStream[JSONRPCMessage | Exception],
|
|
367
|
+
MemoryObjectSendStream[JSONRPCMessage],
|
|
368
|
+
]
|
|
371
369
|
]: # pragma: no cover
|
|
372
|
-
async with
|
|
370
|
+
async with sse_client(
|
|
373
371
|
url=self.url,
|
|
374
372
|
headers=self.headers,
|
|
375
|
-
timeout=
|
|
376
|
-
sse_read_timeout=
|
|
377
|
-
) as (read_stream, write_stream
|
|
373
|
+
timeout=self.timeout,
|
|
374
|
+
sse_read_timeout=self.sse_read_timeout,
|
|
375
|
+
) as (read_stream, write_stream):
|
|
378
376
|
yield read_stream, write_stream
|
|
379
377
|
|
|
380
378
|
def _get_log_level(self) -> LoggingLevel | None:
|
|
@@ -431,7 +431,8 @@ class GeminiStreamedResponse(StreamedResponse):
|
|
|
431
431
|
if maybe_event is not None: # pragma: no branch
|
|
432
432
|
yield maybe_event
|
|
433
433
|
else:
|
|
434
|
-
|
|
434
|
+
if not any([key in gemini_part for key in ['function_response', 'thought']]):
|
|
435
|
+
raise AssertionError(f'Unexpected part: {gemini_part}') # pragma: no cover
|
|
435
436
|
|
|
436
437
|
async def _get_gemini_responses(self) -> AsyncIterator[_GeminiResponse]:
|
|
437
438
|
# This method exists to ensure we only yield completed items, so we don't need to worry about
|
|
@@ -605,6 +606,11 @@ class _GeminiFileDataPart(TypedDict):
|
|
|
605
606
|
file_data: Annotated[_GeminiFileData, pydantic.Field(alias='fileData')]
|
|
606
607
|
|
|
607
608
|
|
|
609
|
+
class _GeminiThoughtPart(TypedDict):
|
|
610
|
+
thought: bool
|
|
611
|
+
thought_signature: Annotated[str, pydantic.Field(alias='thoughtSignature')]
|
|
612
|
+
|
|
613
|
+
|
|
608
614
|
class _GeminiFunctionCallPart(TypedDict):
|
|
609
615
|
function_call: Annotated[_GeminiFunctionCall, pydantic.Field(alias='functionCall')]
|
|
610
616
|
|
|
@@ -665,6 +671,8 @@ def _part_discriminator(v: Any) -> str:
|
|
|
665
671
|
return 'inline_data' # pragma: no cover
|
|
666
672
|
elif 'fileData' in v:
|
|
667
673
|
return 'file_data' # pragma: no cover
|
|
674
|
+
elif 'thought' in v:
|
|
675
|
+
return 'thought'
|
|
668
676
|
elif 'functionCall' in v or 'function_call' in v:
|
|
669
677
|
return 'function_call'
|
|
670
678
|
elif 'functionResponse' in v or 'function_response' in v:
|
|
@@ -682,6 +690,7 @@ _GeminiPartUnion = Annotated[
|
|
|
682
690
|
Annotated[_GeminiFunctionResponsePart, pydantic.Tag('function_response')],
|
|
683
691
|
Annotated[_GeminiInlineDataPart, pydantic.Tag('inline_data')],
|
|
684
692
|
Annotated[_GeminiFileDataPart, pydantic.Tag('file_data')],
|
|
693
|
+
Annotated[_GeminiThoughtPart, pydantic.Tag('thought')],
|
|
685
694
|
],
|
|
686
695
|
pydantic.Discriminator(_part_discriminator),
|
|
687
696
|
]
|
|
@@ -399,7 +399,7 @@ class GeminiStreamedResponse(StreamedResponse):
|
|
|
399
399
|
raise UnexpectedModelBehavior('Streamed response has no content field') # pragma: no cover
|
|
400
400
|
assert candidate.content.parts is not None
|
|
401
401
|
for part in candidate.content.parts:
|
|
402
|
-
if part.text:
|
|
402
|
+
if part.text is not None:
|
|
403
403
|
yield self._parts_manager.handle_text_delta(vendor_part_id='content', content=part.text)
|
|
404
404
|
elif part.function_call:
|
|
405
405
|
maybe_event = self._parts_manager.handle_tool_call_delta(
|
|
@@ -447,7 +447,7 @@ def _process_response_from_parts(
|
|
|
447
447
|
) -> ModelResponse:
|
|
448
448
|
items: list[ModelResponsePart] = []
|
|
449
449
|
for part in parts:
|
|
450
|
-
if part.text:
|
|
450
|
+
if part.text is not None:
|
|
451
451
|
items.append(TextPart(content=part.text))
|
|
452
452
|
elif part.function_call:
|
|
453
453
|
assert part.function_call.name is not None
|
|
@@ -334,7 +334,9 @@ class OpenAIModel(Model):
|
|
|
334
334
|
items.append(TextPart(choice.message.content))
|
|
335
335
|
if choice.message.tool_calls is not None:
|
|
336
336
|
for c in choice.message.tool_calls:
|
|
337
|
-
|
|
337
|
+
part = ToolCallPart(c.function.name, c.function.arguments, tool_call_id=c.id)
|
|
338
|
+
part.tool_call_id = _guard_tool_call_id(part)
|
|
339
|
+
items.append(part)
|
|
338
340
|
return ModelResponse(
|
|
339
341
|
items,
|
|
340
342
|
usage=_map_usage(response),
|
|
@@ -392,7 +392,7 @@ class Tool(Generic[AgentDepsT]):
|
|
|
392
392
|
raise UnexpectedModelBehavior(f'Tool exceeded max retries count of {self.max_retries}') from exc
|
|
393
393
|
else:
|
|
394
394
|
if isinstance(exc, ValidationError):
|
|
395
|
-
content = exc.errors(include_url=False)
|
|
395
|
+
content = exc.errors(include_url=False, include_context=False)
|
|
396
396
|
else:
|
|
397
397
|
content = exc.message
|
|
398
398
|
return _messages.RetryPromptPart(
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|