pydantic-ai-slim 0.0.42__tar.gz → 0.0.43__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydantic-ai-slim might be problematic. Click here for more details.
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/PKG-INFO +2 -2
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/_cli.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/agent.py +2 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/common_tools/duckduckgo.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/common_tools/tavily.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/mcp.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/messages.py +3 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/anthropic.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/cohere.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/groq.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/instrumented.py +13 -7
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/mistral.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/openai.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/vertexai.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/wrapper.py +5 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/providers/anthropic.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/providers/azure.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/providers/bedrock.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/providers/deepseek.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/providers/google_vertex.py +11 -19
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/providers/groq.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/providers/mistral.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/providers/openai.py +1 -1
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pyproject.toml +2 -2
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/.gitignore +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/README.md +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/__init__.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/_agent_graph.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/_griffe.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/_parts_manager.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/_pydantic.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/_result.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/_system_prompt.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/_utils.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/common_tools/__init__.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/exceptions.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/format_as_xml.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/__init__.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/bedrock.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/fallback.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/function.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/gemini.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/models/test.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/providers/__init__.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/providers/google_gla.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/py.typed +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/result.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/settings.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/tools.py +0 -0
- {pydantic_ai_slim-0.0.42 → pydantic_ai_slim-0.0.43}/pydantic_ai/usage.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydantic-ai-slim
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.43
|
|
4
4
|
Summary: Agent Framework / shim to use Pydantic with LLMs, slim package
|
|
5
5
|
Author-email: Samuel Colvin <samuel@pydantic.dev>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -29,7 +29,7 @@ Requires-Dist: exceptiongroup; python_version < '3.11'
|
|
|
29
29
|
Requires-Dist: griffe>=1.3.2
|
|
30
30
|
Requires-Dist: httpx>=0.27
|
|
31
31
|
Requires-Dist: opentelemetry-api>=1.28.0
|
|
32
|
-
Requires-Dist: pydantic-graph==0.0.
|
|
32
|
+
Requires-Dist: pydantic-graph==0.0.43
|
|
33
33
|
Requires-Dist: pydantic>=2.10
|
|
34
34
|
Requires-Dist: typing-inspection>=0.4.0
|
|
35
35
|
Provides-Extra: anthropic
|
|
@@ -31,7 +31,7 @@ try:
|
|
|
31
31
|
except ImportError as _import_error:
|
|
32
32
|
raise ImportError(
|
|
33
33
|
'Please install `rich`, `prompt-toolkit` and `argcomplete` to use the PydanticAI CLI, '
|
|
34
|
-
|
|
34
|
+
'you can use the `cli` optional group — `pip install "pydantic-ai-slim[cli]"`'
|
|
35
35
|
) from _import_error
|
|
36
36
|
|
|
37
37
|
from pydantic_ai.agent import Agent
|
|
@@ -195,6 +195,7 @@ class Agent(Generic[AgentDepsT, ResultDataT]):
|
|
|
195
195
|
If this isn't set, then the last value set by
|
|
196
196
|
[`Agent.instrument_all()`][pydantic_ai.Agent.instrument_all]
|
|
197
197
|
will be used, which defaults to False.
|
|
198
|
+
See the [Debugging and Monitoring guide](https://ai.pydantic.dev/logfire/) for more info.
|
|
198
199
|
"""
|
|
199
200
|
if model is None or defer_model_check:
|
|
200
201
|
self.model = model
|
|
@@ -445,7 +446,7 @@ class Agent(Generic[AgentDepsT, ResultDataT]):
|
|
|
445
446
|
usage_limits = usage_limits or _usage.UsageLimits()
|
|
446
447
|
|
|
447
448
|
if isinstance(model_used, InstrumentedModel):
|
|
448
|
-
tracer = model_used.
|
|
449
|
+
tracer = model_used.settings.tracer
|
|
449
450
|
else:
|
|
450
451
|
tracer = NoOpTracer()
|
|
451
452
|
agent_name = self.name or 'agent'
|
|
@@ -13,7 +13,7 @@ try:
|
|
|
13
13
|
except ImportError as _import_error:
|
|
14
14
|
raise ImportError(
|
|
15
15
|
'Please install `duckduckgo-search` to use the DuckDuckGo search tool, '
|
|
16
|
-
|
|
16
|
+
'you can use the `duckduckgo` optional group — `pip install "pydantic-ai-slim[duckduckgo]"`'
|
|
17
17
|
) from _import_error
|
|
18
18
|
|
|
19
19
|
__all__ = ('duckduckgo_search_tool',)
|
|
@@ -11,7 +11,7 @@ try:
|
|
|
11
11
|
except ImportError as _import_error:
|
|
12
12
|
raise ImportError(
|
|
13
13
|
'Please install `tavily-python` to use the Tavily search tool, '
|
|
14
|
-
|
|
14
|
+
'you can use the `tavily` optional group — `pip install "pydantic-ai-slim[tavily]"`'
|
|
15
15
|
) from _import_error
|
|
16
16
|
|
|
17
17
|
__all__ = ('tavily_search_tool',)
|
|
@@ -21,7 +21,7 @@ try:
|
|
|
21
21
|
except ImportError as _import_error:
|
|
22
22
|
raise ImportError(
|
|
23
23
|
'Please install the `mcp` package to use the MCP server, '
|
|
24
|
-
|
|
24
|
+
'you can use the `mcp` optional group — `pip install "pydantic-ai-slim[mcp]"`'
|
|
25
25
|
) from _import_error
|
|
26
26
|
|
|
27
27
|
__all__ = 'MCPServer', 'MCPServerStdio', 'MCPServerHTTP'
|
|
@@ -26,6 +26,9 @@ class SystemPromptPart:
|
|
|
26
26
|
content: str
|
|
27
27
|
"""The content of the prompt."""
|
|
28
28
|
|
|
29
|
+
timestamp: datetime = field(default_factory=_now_utc)
|
|
30
|
+
"""The timestamp of the prompt."""
|
|
31
|
+
|
|
29
32
|
dynamic_ref: str | None = None
|
|
30
33
|
"""The ref of the dynamic system prompt function that generated this part.
|
|
31
34
|
|
|
@@ -65,7 +65,7 @@ try:
|
|
|
65
65
|
except ImportError as _import_error:
|
|
66
66
|
raise ImportError(
|
|
67
67
|
'Please install `anthropic` to use the Anthropic model, '
|
|
68
|
-
|
|
68
|
+
'you can use the `anthropic` optional group — `pip install "pydantic-ai-slim[anthropic]"`'
|
|
69
69
|
) from _import_error
|
|
70
70
|
|
|
71
71
|
LatestAnthropicModelNames = Literal[
|
|
@@ -50,7 +50,7 @@ try:
|
|
|
50
50
|
except ImportError as _import_error:
|
|
51
51
|
raise ImportError(
|
|
52
52
|
'Please install `cohere` to use the Cohere model, '
|
|
53
|
-
|
|
53
|
+
'you can use the `cohere` optional group — `pip install "pydantic-ai-slim[cohere]"`'
|
|
54
54
|
) from _import_error
|
|
55
55
|
|
|
56
56
|
LatestCohereModelNames = Literal[
|
|
@@ -41,7 +41,7 @@ try:
|
|
|
41
41
|
except ImportError as _import_error:
|
|
42
42
|
raise ImportError(
|
|
43
43
|
'Please install `groq` to use the Groq model, '
|
|
44
|
-
|
|
44
|
+
'you can use the `groq` optional group — `pip install "pydantic-ai-slim[groq]"`'
|
|
45
45
|
) from _import_error
|
|
46
46
|
|
|
47
47
|
|
|
@@ -52,7 +52,9 @@ class InstrumentationSettings:
|
|
|
52
52
|
|
|
53
53
|
- `Agent(instrument=...)`
|
|
54
54
|
- [`Agent.instrument_all()`][pydantic_ai.agent.Agent.instrument_all]
|
|
55
|
-
- `InstrumentedModel`
|
|
55
|
+
- [`InstrumentedModel`][pydantic_ai.models.instrumented.InstrumentedModel]
|
|
56
|
+
|
|
57
|
+
See the [Debugging and Monitoring guide](https://ai.pydantic.dev/logfire/) for more info.
|
|
56
58
|
"""
|
|
57
59
|
|
|
58
60
|
tracer: Tracer = field(repr=False)
|
|
@@ -94,9 +96,13 @@ GEN_AI_REQUEST_MODEL_ATTRIBUTE = 'gen_ai.request.model'
|
|
|
94
96
|
|
|
95
97
|
@dataclass
|
|
96
98
|
class InstrumentedModel(WrapperModel):
|
|
97
|
-
"""Model which
|
|
99
|
+
"""Model which wraps another model so that requests are instrumented with OpenTelemetry.
|
|
100
|
+
|
|
101
|
+
See the [Debugging and Monitoring guide](https://ai.pydantic.dev/logfire/) for more info.
|
|
102
|
+
"""
|
|
98
103
|
|
|
99
|
-
|
|
104
|
+
settings: InstrumentationSettings
|
|
105
|
+
"""Configuration for instrumenting requests."""
|
|
100
106
|
|
|
101
107
|
def __init__(
|
|
102
108
|
self,
|
|
@@ -104,7 +110,7 @@ class InstrumentedModel(WrapperModel):
|
|
|
104
110
|
options: InstrumentationSettings | None = None,
|
|
105
111
|
) -> None:
|
|
106
112
|
super().__init__(wrapped)
|
|
107
|
-
self.
|
|
113
|
+
self.settings = options or InstrumentationSettings()
|
|
108
114
|
|
|
109
115
|
async def request(
|
|
110
116
|
self,
|
|
@@ -156,7 +162,7 @@ class InstrumentedModel(WrapperModel):
|
|
|
156
162
|
if isinstance(value := model_settings.get(key), (float, int)):
|
|
157
163
|
attributes[f'gen_ai.request.{key}'] = value
|
|
158
164
|
|
|
159
|
-
with self.
|
|
165
|
+
with self.settings.tracer.start_as_current_span(span_name, attributes=attributes) as span:
|
|
160
166
|
|
|
161
167
|
def finish(response: ModelResponse, usage: Usage):
|
|
162
168
|
if not span.is_recording():
|
|
@@ -190,9 +196,9 @@ class InstrumentedModel(WrapperModel):
|
|
|
190
196
|
yield finish
|
|
191
197
|
|
|
192
198
|
def _emit_events(self, span: Span, events: list[Event]) -> None:
|
|
193
|
-
if self.
|
|
199
|
+
if self.settings.event_mode == 'logs':
|
|
194
200
|
for event in events:
|
|
195
|
-
self.
|
|
201
|
+
self.settings.event_logger.emit(event)
|
|
196
202
|
else:
|
|
197
203
|
attr_name = 'events'
|
|
198
204
|
span.set_attributes(
|
|
@@ -75,7 +75,7 @@ try:
|
|
|
75
75
|
except ImportError as e:
|
|
76
76
|
raise ImportError(
|
|
77
77
|
'Please install `mistral` to use the Mistral model, '
|
|
78
|
-
|
|
78
|
+
'you can use the `mistral` optional group — `pip install "pydantic-ai-slim[mistral]"`'
|
|
79
79
|
) from e
|
|
80
80
|
|
|
81
81
|
LatestMistralModelNames = Literal[
|
|
@@ -57,7 +57,7 @@ try:
|
|
|
57
57
|
except ImportError as _import_error:
|
|
58
58
|
raise ImportError(
|
|
59
59
|
'Please install `openai` to use the OpenAI model, '
|
|
60
|
-
|
|
60
|
+
'you can use the `openai` optional group — `pip install "pydantic-ai-slim[openai]"`'
|
|
61
61
|
) from _import_error
|
|
62
62
|
|
|
63
63
|
OpenAIModelName = Union[str, ChatModel]
|
|
@@ -27,7 +27,7 @@ try:
|
|
|
27
27
|
except ImportError as _import_error:
|
|
28
28
|
raise ImportError(
|
|
29
29
|
'Please install `google-auth` to use the VertexAI model, '
|
|
30
|
-
|
|
30
|
+
'you can use the `vertexai` optional group — `pip install "pydantic-ai-slim[vertexai]"`'
|
|
31
31
|
) from _import_error
|
|
32
32
|
|
|
33
33
|
VERTEX_AI_URL_TEMPLATE = (
|
|
@@ -13,9 +13,13 @@ from . import KnownModelName, Model, ModelRequestParameters, StreamedResponse, i
|
|
|
13
13
|
|
|
14
14
|
@dataclass(init=False)
|
|
15
15
|
class WrapperModel(Model):
|
|
16
|
-
"""Model which wraps another model.
|
|
16
|
+
"""Model which wraps another model.
|
|
17
|
+
|
|
18
|
+
Does nothing on its own, used as a base class.
|
|
19
|
+
"""
|
|
17
20
|
|
|
18
21
|
wrapped: Model
|
|
22
|
+
"""The underlying model being wrapped."""
|
|
19
23
|
|
|
20
24
|
def __init__(self, wrapped: Model | KnownModelName):
|
|
21
25
|
self.wrapped = infer_model(wrapped)
|
|
@@ -12,7 +12,7 @@ try:
|
|
|
12
12
|
except ImportError as _import_error: # pragma: no cover
|
|
13
13
|
raise ImportError(
|
|
14
14
|
'Please install the `anthropic` package to use the Anthropic provider, '
|
|
15
|
-
|
|
15
|
+
'you can use the `anthropic` optional group — `pip install "pydantic-ai-slim[anthropic]"`'
|
|
16
16
|
) from _import_error
|
|
17
17
|
|
|
18
18
|
|
|
@@ -13,7 +13,7 @@ try:
|
|
|
13
13
|
except ImportError as _import_error: # pragma: no cover
|
|
14
14
|
raise ImportError(
|
|
15
15
|
'Please install the `openai` package to use the Azure provider, '
|
|
16
|
-
|
|
16
|
+
'you can use the `openai` optional group — `pip install "pydantic-ai-slim[openai]"`'
|
|
17
17
|
) from _import_error
|
|
18
18
|
|
|
19
19
|
|
|
@@ -11,7 +11,7 @@ try:
|
|
|
11
11
|
except ImportError as _import_error:
|
|
12
12
|
raise ImportError(
|
|
13
13
|
'Please install the `boto3` package to use the Bedrock provider, '
|
|
14
|
-
|
|
14
|
+
'you can use the `bedrock` optional group — `pip install "pydantic-ai-slim[bedrock]"`'
|
|
15
15
|
) from _import_error
|
|
16
16
|
|
|
17
17
|
|
|
@@ -13,7 +13,7 @@ try:
|
|
|
13
13
|
except ImportError as _import_error: # pragma: no cover
|
|
14
14
|
raise ImportError(
|
|
15
15
|
'Please install the `openai` package to use the DeepSeek provider, '
|
|
16
|
-
|
|
16
|
+
'you can use the `openai` optional group — `pip install "pydantic-ai-slim[openai]"`'
|
|
17
17
|
) from _import_error
|
|
18
18
|
|
|
19
19
|
from . import Provider
|
|
@@ -2,7 +2,6 @@ from __future__ import annotations as _annotations
|
|
|
2
2
|
|
|
3
3
|
import functools
|
|
4
4
|
from collections.abc import AsyncGenerator, Mapping
|
|
5
|
-
from datetime import datetime, timedelta
|
|
6
5
|
from pathlib import Path
|
|
7
6
|
from typing import Literal, overload
|
|
8
7
|
|
|
@@ -22,15 +21,12 @@ try:
|
|
|
22
21
|
except ImportError as _import_error:
|
|
23
22
|
raise ImportError(
|
|
24
23
|
'Please install the `google-auth` package to use the Google Vertex AI provider, '
|
|
25
|
-
|
|
24
|
+
'you can use the `vertexai` optional group — `pip install "pydantic-ai-slim[vertexai]"`'
|
|
26
25
|
) from _import_error
|
|
27
26
|
|
|
28
27
|
|
|
29
28
|
__all__ = ('GoogleVertexProvider',)
|
|
30
29
|
|
|
31
|
-
# default expiry is 3600 seconds
|
|
32
|
-
MAX_TOKEN_AGE = timedelta(seconds=3000)
|
|
33
|
-
|
|
34
30
|
|
|
35
31
|
class GoogleVertexProvider(Provider[httpx.AsyncClient]):
|
|
36
32
|
"""Provider for Vertex AI API."""
|
|
@@ -131,19 +127,21 @@ class _VertexAIAuth(httpx.Auth):
|
|
|
131
127
|
self.region = region
|
|
132
128
|
|
|
133
129
|
self.credentials = None
|
|
134
|
-
self.token_created: datetime | None = None
|
|
135
130
|
|
|
136
131
|
async def async_auth_flow(self, request: httpx.Request) -> AsyncGenerator[httpx.Request, httpx.Response]:
|
|
137
132
|
if self.credentials is None:
|
|
138
133
|
self.credentials = await self._get_credentials()
|
|
139
|
-
if self.credentials.token is None
|
|
140
|
-
await
|
|
141
|
-
self.token_created = datetime.now()
|
|
134
|
+
if self.credentials.token is None: # type: ignore[reportUnknownMemberType]
|
|
135
|
+
await self._refresh_token()
|
|
142
136
|
request.headers['Authorization'] = f'Bearer {self.credentials.token}' # type: ignore[reportUnknownMemberType]
|
|
143
|
-
|
|
144
137
|
# NOTE: This workaround is in place because we might get the project_id from the credentials.
|
|
145
138
|
request.url = httpx.URL(str(request.url).replace('projects/None', f'projects/{self.project_id}'))
|
|
146
|
-
yield request
|
|
139
|
+
response = yield request
|
|
140
|
+
|
|
141
|
+
if response.status_code == 401:
|
|
142
|
+
await self._refresh_token()
|
|
143
|
+
request.headers['Authorization'] = f'Bearer {self.credentials.token}' # type: ignore[reportUnknownMemberType]
|
|
144
|
+
yield request
|
|
147
145
|
|
|
148
146
|
async def _get_credentials(self) -> BaseCredentials | ServiceAccountCredentials:
|
|
149
147
|
if self.service_account_file is not None:
|
|
@@ -166,15 +164,9 @@ class _VertexAIAuth(httpx.Auth):
|
|
|
166
164
|
self.project_id = creds_project_id
|
|
167
165
|
return creds
|
|
168
166
|
|
|
169
|
-
def
|
|
170
|
-
if self.token_created is None:
|
|
171
|
-
return True
|
|
172
|
-
else:
|
|
173
|
-
return (datetime.now() - self.token_created) > MAX_TOKEN_AGE
|
|
174
|
-
|
|
175
|
-
def _refresh_token(self) -> str: # pragma: no cover
|
|
167
|
+
async def _refresh_token(self) -> str: # pragma: no cover
|
|
176
168
|
assert self.credentials is not None
|
|
177
|
-
self.credentials.refresh
|
|
169
|
+
await anyio.to_thread.run_sync(self.credentials.refresh, Request()) # type: ignore[reportUnknownMemberType]
|
|
178
170
|
assert isinstance(self.credentials.token, str), f'Expected token to be a string, got {self.credentials.token}' # type: ignore[reportUnknownMemberType]
|
|
179
171
|
return self.credentials.token
|
|
180
172
|
|
|
@@ -12,7 +12,7 @@ try:
|
|
|
12
12
|
except ImportError as _import_error: # pragma: no cover
|
|
13
13
|
raise ImportError(
|
|
14
14
|
'Please install the `groq` package to use the Groq provider, '
|
|
15
|
-
|
|
15
|
+
'you can use the `groq` optional group — `pip install "pydantic-ai-slim[groq]"`'
|
|
16
16
|
) from _import_error
|
|
17
17
|
|
|
18
18
|
|
|
@@ -12,7 +12,7 @@ try:
|
|
|
12
12
|
except ImportError as e: # pragma: no cover
|
|
13
13
|
raise ImportError(
|
|
14
14
|
'Please install the `mistral` package to use the Mistral provider, '
|
|
15
|
-
|
|
15
|
+
'you can use the `mistral` optional group — `pip install "pydantic-ai-slim[mistral]"`'
|
|
16
16
|
) from e
|
|
17
17
|
|
|
18
18
|
|
|
@@ -11,7 +11,7 @@ try:
|
|
|
11
11
|
except ImportError as _import_error: # pragma: no cover
|
|
12
12
|
raise ImportError(
|
|
13
13
|
'Please install the `openai` package to use the OpenAI provider, '
|
|
14
|
-
|
|
14
|
+
'you can use the `openai` optional group — `pip install "pydantic-ai-slim[openai]"`'
|
|
15
15
|
) from _import_error
|
|
16
16
|
|
|
17
17
|
|
|
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "pydantic-ai-slim"
|
|
7
|
-
version = "0.0.
|
|
7
|
+
version = "0.0.43"
|
|
8
8
|
description = "Agent Framework / shim to use Pydantic with LLMs, slim package"
|
|
9
9
|
authors = [{ name = "Samuel Colvin", email = "samuel@pydantic.dev" }]
|
|
10
10
|
license = "MIT"
|
|
@@ -36,7 +36,7 @@ dependencies = [
|
|
|
36
36
|
"griffe>=1.3.2",
|
|
37
37
|
"httpx>=0.27",
|
|
38
38
|
"pydantic>=2.10",
|
|
39
|
-
"pydantic-graph==0.0.
|
|
39
|
+
"pydantic-graph==0.0.43",
|
|
40
40
|
"exceptiongroup; python_version < '3.11'",
|
|
41
41
|
"opentelemetry-api>=1.28.0",
|
|
42
42
|
"typing-inspection>=0.4.0",
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|