pydantic-ai-slim 1.2.1__py3-none-any.whl → 1.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydantic-ai-slim might be problematic. Click here for more details.
- pydantic_ai/__init__.py +4 -0
- pydantic_ai/_agent_graph.py +41 -8
- pydantic_ai/agent/__init__.py +11 -19
- pydantic_ai/builtin_tools.py +106 -4
- pydantic_ai/exceptions.py +5 -0
- pydantic_ai/mcp.py +1 -22
- pydantic_ai/models/__init__.py +45 -37
- pydantic_ai/models/anthropic.py +132 -11
- pydantic_ai/models/bedrock.py +4 -4
- pydantic_ai/models/cohere.py +0 -7
- pydantic_ai/models/gemini.py +9 -2
- pydantic_ai/models/google.py +31 -21
- pydantic_ai/models/groq.py +4 -4
- pydantic_ai/models/huggingface.py +2 -2
- pydantic_ai/models/openai.py +243 -49
- pydantic_ai/providers/__init__.py +21 -12
- pydantic_ai/providers/bedrock.py +60 -16
- pydantic_ai/providers/gateway.py +60 -72
- pydantic_ai/providers/google.py +61 -23
- pydantic_ai/providers/ovhcloud.py +95 -0
- pydantic_ai/usage.py +13 -2
- {pydantic_ai_slim-1.2.1.dist-info → pydantic_ai_slim-1.4.0.dist-info}/METADATA +5 -5
- {pydantic_ai_slim-1.2.1.dist-info → pydantic_ai_slim-1.4.0.dist-info}/RECORD +26 -25
- {pydantic_ai_slim-1.2.1.dist-info → pydantic_ai_slim-1.4.0.dist-info}/WHEEL +0 -0
- {pydantic_ai_slim-1.2.1.dist-info → pydantic_ai_slim-1.4.0.dist-info}/entry_points.txt +0 -0
- {pydantic_ai_slim-1.2.1.dist-info → pydantic_ai_slim-1.4.0.dist-info}/licenses/LICENSE +0 -0
pydantic_ai/providers/gateway.py
CHANGED
|
@@ -3,14 +3,16 @@
|
|
|
3
3
|
from __future__ import annotations as _annotations
|
|
4
4
|
|
|
5
5
|
import os
|
|
6
|
+
from collections.abc import Awaitable, Callable
|
|
6
7
|
from typing import TYPE_CHECKING, Any, Literal, overload
|
|
7
8
|
|
|
8
9
|
import httpx
|
|
9
10
|
|
|
10
11
|
from pydantic_ai.exceptions import UserError
|
|
11
|
-
from pydantic_ai.models import
|
|
12
|
+
from pydantic_ai.models import cached_async_http_client
|
|
12
13
|
|
|
13
14
|
if TYPE_CHECKING:
|
|
15
|
+
from botocore.client import BaseClient
|
|
14
16
|
from google.genai import Client as GoogleClient
|
|
15
17
|
from groq import AsyncGroq
|
|
16
18
|
from openai import AsyncOpenAI
|
|
@@ -18,6 +20,8 @@ if TYPE_CHECKING:
|
|
|
18
20
|
from pydantic_ai.models.anthropic import AsyncAnthropicClient
|
|
19
21
|
from pydantic_ai.providers import Provider
|
|
20
22
|
|
|
23
|
+
GATEWAY_BASE_URL = 'https://gateway.pydantic.dev/proxy'
|
|
24
|
+
|
|
21
25
|
|
|
22
26
|
@overload
|
|
23
27
|
def gateway_provider(
|
|
@@ -57,13 +61,34 @@ def gateway_provider(
|
|
|
57
61
|
) -> Provider[AsyncAnthropicClient]: ...
|
|
58
62
|
|
|
59
63
|
|
|
64
|
+
@overload
|
|
65
|
+
def gateway_provider(
|
|
66
|
+
upstream_provider: Literal['bedrock'],
|
|
67
|
+
*,
|
|
68
|
+
api_key: str | None = None,
|
|
69
|
+
base_url: str | None = None,
|
|
70
|
+
) -> Provider[BaseClient]: ...
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@overload
|
|
60
74
|
def gateway_provider(
|
|
61
|
-
upstream_provider:
|
|
75
|
+
upstream_provider: str,
|
|
76
|
+
*,
|
|
77
|
+
api_key: str | None = None,
|
|
78
|
+
base_url: str | None = None,
|
|
79
|
+
) -> Provider[Any]: ...
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
UpstreamProvider = Literal['openai', 'openai-chat', 'openai-responses', 'groq', 'google-vertex', 'anthropic', 'bedrock']
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def gateway_provider(
|
|
86
|
+
upstream_provider: UpstreamProvider | str,
|
|
62
87
|
*,
|
|
63
88
|
# Every provider
|
|
64
89
|
api_key: str | None = None,
|
|
65
90
|
base_url: str | None = None,
|
|
66
|
-
# OpenAI &
|
|
91
|
+
# OpenAI, Groq & Anthropic
|
|
67
92
|
http_client: httpx.AsyncClient | None = None,
|
|
68
93
|
) -> Provider[Any]:
|
|
69
94
|
"""Create a new Gateway provider.
|
|
@@ -73,25 +98,21 @@ def gateway_provider(
|
|
|
73
98
|
api_key: The API key to use for authentication. If not provided, the `PYDANTIC_AI_GATEWAY_API_KEY`
|
|
74
99
|
environment variable will be used if available.
|
|
75
100
|
base_url: The base URL to use for the Gateway. If not provided, the `PYDANTIC_AI_GATEWAY_BASE_URL`
|
|
76
|
-
environment variable will be used if available. Otherwise, defaults to `
|
|
101
|
+
environment variable will be used if available. Otherwise, defaults to `https://gateway.pydantic.dev/proxy`.
|
|
77
102
|
http_client: The HTTP client to use for the Gateway.
|
|
78
103
|
"""
|
|
79
104
|
api_key = api_key or os.getenv('PYDANTIC_AI_GATEWAY_API_KEY')
|
|
80
105
|
if not api_key:
|
|
81
106
|
raise UserError(
|
|
82
|
-
'Set the `PYDANTIC_AI_GATEWAY_API_KEY` environment variable or pass it via `gateway_provider(api_key=...)`'
|
|
107
|
+
'Set the `PYDANTIC_AI_GATEWAY_API_KEY` environment variable or pass it via `gateway_provider(..., api_key=...)`'
|
|
83
108
|
' to use the Pydantic AI Gateway provider.'
|
|
84
109
|
)
|
|
85
110
|
|
|
86
|
-
base_url = base_url or os.getenv('PYDANTIC_AI_GATEWAY_BASE_URL',
|
|
87
|
-
http_client = http_client or cached_async_http_client(provider=f'gateway
|
|
88
|
-
http_client.event_hooks = {'request': [_request_hook]}
|
|
89
|
-
|
|
90
|
-
if upstream_provider in ('openai', 'openai-chat'):
|
|
91
|
-
from .openai import OpenAIProvider
|
|
111
|
+
base_url = base_url or os.getenv('PYDANTIC_AI_GATEWAY_BASE_URL', GATEWAY_BASE_URL)
|
|
112
|
+
http_client = http_client or cached_async_http_client(provider=f'gateway/{upstream_provider}')
|
|
113
|
+
http_client.event_hooks = {'request': [_request_hook(api_key)]}
|
|
92
114
|
|
|
93
|
-
|
|
94
|
-
elif upstream_provider == 'openai-responses':
|
|
115
|
+
if upstream_provider in ('openai', 'openai-chat', 'openai-responses'):
|
|
95
116
|
from .openai import OpenAIProvider
|
|
96
117
|
|
|
97
118
|
return OpenAIProvider(api_key=api_key, base_url=_merge_url_path(base_url, 'openai'), http_client=http_client)
|
|
@@ -111,79 +132,46 @@ def gateway_provider(
|
|
|
111
132
|
http_client=http_client,
|
|
112
133
|
)
|
|
113
134
|
)
|
|
114
|
-
elif upstream_provider == '
|
|
115
|
-
from
|
|
135
|
+
elif upstream_provider == 'bedrock':
|
|
136
|
+
from .bedrock import BedrockProvider
|
|
116
137
|
|
|
138
|
+
return BedrockProvider(
|
|
139
|
+
api_key=api_key,
|
|
140
|
+
base_url=_merge_url_path(base_url, 'bedrock'),
|
|
141
|
+
region_name='pydantic-ai-gateway', # Fake region name to avoid NoRegionError
|
|
142
|
+
)
|
|
143
|
+
elif upstream_provider == 'google-vertex':
|
|
117
144
|
from .google import GoogleProvider
|
|
118
145
|
|
|
119
146
|
return GoogleProvider(
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
'base_url': _merge_url_path(base_url, 'google-vertex'),
|
|
125
|
-
'headers': {'User-Agent': get_user_agent(), 'Authorization': api_key},
|
|
126
|
-
# TODO(Marcelo): Until https://github.com/googleapis/python-genai/issues/1357 is solved.
|
|
127
|
-
'async_client_args': {
|
|
128
|
-
'transport': httpx.AsyncHTTPTransport(),
|
|
129
|
-
'event_hooks': {'request': [_request_hook]},
|
|
130
|
-
},
|
|
131
|
-
},
|
|
132
|
-
)
|
|
147
|
+
vertexai=True,
|
|
148
|
+
api_key=api_key,
|
|
149
|
+
base_url=_merge_url_path(base_url, 'google-vertex'),
|
|
150
|
+
http_client=http_client,
|
|
133
151
|
)
|
|
134
|
-
else:
|
|
135
|
-
raise UserError(f'Unknown provider: {upstream_provider}')
|
|
152
|
+
else:
|
|
153
|
+
raise UserError(f'Unknown upstream provider: {upstream_provider}')
|
|
136
154
|
|
|
137
155
|
|
|
138
|
-
def
|
|
139
|
-
"""
|
|
140
|
-
|
|
141
|
-
Args:
|
|
142
|
-
model_name: The name of the model to infer. Must be in the format "provider/model_name".
|
|
156
|
+
def _request_hook(api_key: str) -> Callable[[httpx.Request], Awaitable[httpx.Request]]:
|
|
157
|
+
"""Request hook for the gateway provider.
|
|
143
158
|
|
|
144
|
-
|
|
145
|
-
The model class that will be used to make requests to the gateway.
|
|
159
|
+
It adds the `"traceparent"` and `"Authorization"` headers to the request.
|
|
146
160
|
"""
|
|
147
|
-
try:
|
|
148
|
-
upstream_provider, model_name = model_name.split('/', 1)
|
|
149
|
-
except ValueError:
|
|
150
|
-
raise UserError(f'The model name "{model_name}" is not in the format "provider/model_name".')
|
|
151
161
|
|
|
152
|
-
|
|
153
|
-
from
|
|
162
|
+
async def _hook(request: httpx.Request) -> httpx.Request:
|
|
163
|
+
from opentelemetry.propagate import inject
|
|
154
164
|
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
return OpenAIResponsesModel(model_name, provider=gateway_provider('openai'))
|
|
160
|
-
elif upstream_provider == 'groq':
|
|
161
|
-
from pydantic_ai.models.groq import GroqModel
|
|
165
|
+
headers: dict[str, Any] = {}
|
|
166
|
+
inject(headers)
|
|
167
|
+
request.headers.update(headers)
|
|
162
168
|
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
from pydantic_ai.models.anthropic import AnthropicModel
|
|
166
|
-
|
|
167
|
-
return AnthropicModel(model_name, provider=gateway_provider('anthropic'))
|
|
168
|
-
elif upstream_provider == 'google-vertex':
|
|
169
|
-
from pydantic_ai.models.google import GoogleModel
|
|
170
|
-
|
|
171
|
-
return GoogleModel(model_name, provider=gateway_provider('google-vertex'))
|
|
172
|
-
raise UserError(f'Unknown upstream provider: {upstream_provider}')
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
async def _request_hook(request: httpx.Request) -> httpx.Request:
|
|
176
|
-
"""Request hook for the gateway provider.
|
|
177
|
-
|
|
178
|
-
It adds the `"traceparent"` header to the request.
|
|
179
|
-
"""
|
|
180
|
-
from opentelemetry.propagate import inject
|
|
169
|
+
if 'Authorization' not in request.headers:
|
|
170
|
+
request.headers['Authorization'] = f'Bearer {api_key}'
|
|
181
171
|
|
|
182
|
-
|
|
183
|
-
inject(headers)
|
|
184
|
-
request.headers.update(headers)
|
|
172
|
+
return request
|
|
185
173
|
|
|
186
|
-
return
|
|
174
|
+
return _hook
|
|
187
175
|
|
|
188
176
|
|
|
189
177
|
def _merge_url_path(base_url: str, path: str) -> str:
|
pydantic_ai/providers/google.py
CHANGED
|
@@ -7,14 +7,14 @@ import httpx
|
|
|
7
7
|
|
|
8
8
|
from pydantic_ai import ModelProfile
|
|
9
9
|
from pydantic_ai.exceptions import UserError
|
|
10
|
-
from pydantic_ai.models import get_user_agent
|
|
10
|
+
from pydantic_ai.models import cached_async_http_client, get_user_agent
|
|
11
11
|
from pydantic_ai.profiles.google import google_model_profile
|
|
12
12
|
from pydantic_ai.providers import Provider
|
|
13
13
|
|
|
14
14
|
try:
|
|
15
15
|
from google.auth.credentials import Credentials
|
|
16
16
|
from google.genai import Client
|
|
17
|
-
from google.genai.types import
|
|
17
|
+
from google.genai.types import HttpOptions
|
|
18
18
|
except ImportError as _import_error:
|
|
19
19
|
raise ImportError(
|
|
20
20
|
'Please install the `google-genai` package to use the Google provider, '
|
|
@@ -41,7 +41,9 @@ class GoogleProvider(Provider[Client]):
|
|
|
41
41
|
return google_model_profile(model_name)
|
|
42
42
|
|
|
43
43
|
@overload
|
|
44
|
-
def __init__(
|
|
44
|
+
def __init__(
|
|
45
|
+
self, *, api_key: str, http_client: httpx.AsyncClient | None = None, base_url: str | None = None
|
|
46
|
+
) -> None: ...
|
|
45
47
|
|
|
46
48
|
@overload
|
|
47
49
|
def __init__(
|
|
@@ -49,14 +51,23 @@ class GoogleProvider(Provider[Client]):
|
|
|
49
51
|
*,
|
|
50
52
|
credentials: Credentials | None = None,
|
|
51
53
|
project: str | None = None,
|
|
52
|
-
location: VertexAILocation | Literal['global'] | None = None,
|
|
54
|
+
location: VertexAILocation | Literal['global'] | str | None = None,
|
|
55
|
+
http_client: httpx.AsyncClient | None = None,
|
|
56
|
+
base_url: str | None = None,
|
|
53
57
|
) -> None: ...
|
|
54
58
|
|
|
55
59
|
@overload
|
|
56
60
|
def __init__(self, *, client: Client) -> None: ...
|
|
57
61
|
|
|
58
62
|
@overload
|
|
59
|
-
def __init__(
|
|
63
|
+
def __init__(
|
|
64
|
+
self,
|
|
65
|
+
*,
|
|
66
|
+
vertexai: bool = False,
|
|
67
|
+
api_key: str | None = None,
|
|
68
|
+
http_client: httpx.AsyncClient | None = None,
|
|
69
|
+
base_url: str | None = None,
|
|
70
|
+
) -> None: ...
|
|
60
71
|
|
|
61
72
|
def __init__(
|
|
62
73
|
self,
|
|
@@ -64,16 +75,17 @@ class GoogleProvider(Provider[Client]):
|
|
|
64
75
|
api_key: str | None = None,
|
|
65
76
|
credentials: Credentials | None = None,
|
|
66
77
|
project: str | None = None,
|
|
67
|
-
location: VertexAILocation | Literal['global'] | None = None,
|
|
68
|
-
client: Client | None = None,
|
|
78
|
+
location: VertexAILocation | Literal['global'] | str | None = None,
|
|
69
79
|
vertexai: bool | None = None,
|
|
80
|
+
client: Client | None = None,
|
|
81
|
+
http_client: httpx.AsyncClient | None = None,
|
|
82
|
+
base_url: str | None = None,
|
|
70
83
|
) -> None:
|
|
71
84
|
"""Create a new Google provider.
|
|
72
85
|
|
|
73
86
|
Args:
|
|
74
87
|
api_key: The `API key <https://ai.google.dev/gemini-api/docs/api-key>`_ to
|
|
75
88
|
use for authentication. It can also be set via the `GOOGLE_API_KEY` environment variable.
|
|
76
|
-
Applies to the Gemini Developer API only.
|
|
77
89
|
credentials: The credentials to use for authentication when calling the Vertex AI APIs. Credentials can be
|
|
78
90
|
obtained from environment variables and default credentials. For more information, see Set up
|
|
79
91
|
Application Default Credentials. Applies to the Vertex AI API only.
|
|
@@ -81,43 +93,60 @@ class GoogleProvider(Provider[Client]):
|
|
|
81
93
|
(for example, GOOGLE_CLOUD_PROJECT). Applies to the Vertex AI API only.
|
|
82
94
|
location: The location to send API requests to (for example, us-central1). Can be obtained from environment variables.
|
|
83
95
|
Applies to the Vertex AI API only.
|
|
84
|
-
client: A pre-initialized client to use.
|
|
85
96
|
vertexai: Force the use of the Vertex AI API. If `False`, the Google Generative Language API will be used.
|
|
86
|
-
Defaults to `False
|
|
97
|
+
Defaults to `False` unless `location`, `project`, or `credentials` are provided.
|
|
98
|
+
client: A pre-initialized client to use.
|
|
99
|
+
http_client: An existing `httpx.AsyncClient` to use for making HTTP requests.
|
|
100
|
+
base_url: The base URL for the Google API.
|
|
87
101
|
"""
|
|
88
102
|
if client is None:
|
|
89
103
|
# NOTE: We are keeping GEMINI_API_KEY for backwards compatibility.
|
|
90
104
|
api_key = api_key or os.getenv('GOOGLE_API_KEY') or os.getenv('GEMINI_API_KEY')
|
|
91
105
|
|
|
106
|
+
vertex_ai_args_used = bool(location or project or credentials)
|
|
92
107
|
if vertexai is None:
|
|
93
|
-
vertexai =
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
'
|
|
97
|
-
|
|
98
|
-
|
|
108
|
+
vertexai = vertex_ai_args_used
|
|
109
|
+
|
|
110
|
+
http_client = http_client or cached_async_http_client(
|
|
111
|
+
provider='google-vertex' if vertexai else 'google-gla'
|
|
112
|
+
)
|
|
113
|
+
http_options = HttpOptions(
|
|
114
|
+
base_url=base_url,
|
|
115
|
+
headers={'User-Agent': get_user_agent()},
|
|
116
|
+
httpx_async_client=http_client,
|
|
117
|
+
# TODO: Remove once https://github.com/googleapis/python-genai/pull/1509#issuecomment-3430028790 is solved.
|
|
118
|
+
async_client_args={'transport': httpx.AsyncHTTPTransport()},
|
|
119
|
+
)
|
|
99
120
|
if not vertexai:
|
|
100
121
|
if api_key is None:
|
|
101
|
-
raise UserError(
|
|
122
|
+
raise UserError(
|
|
102
123
|
'Set the `GOOGLE_API_KEY` environment variable or pass it via `GoogleProvider(api_key=...)`'
|
|
103
124
|
'to use the Google Generative Language API.'
|
|
104
125
|
)
|
|
105
|
-
self._client =
|
|
126
|
+
self._client = _SafelyClosingClient(vertexai=False, api_key=api_key, http_options=http_options)
|
|
106
127
|
else:
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
128
|
+
if vertex_ai_args_used:
|
|
129
|
+
api_key = None
|
|
130
|
+
|
|
131
|
+
if api_key is None:
|
|
132
|
+
project = project or os.getenv('GOOGLE_CLOUD_PROJECT')
|
|
110
133
|
# From https://github.com/pydantic/pydantic-ai/pull/2031/files#r2169682149:
|
|
111
134
|
# Currently `us-central1` supports the most models by far of any region including `global`, but not
|
|
112
135
|
# all of them. `us-central1` has all google models but is missing some Anthropic partner models,
|
|
113
136
|
# which use `us-east5` instead. `global` has fewer models but higher availability.
|
|
114
137
|
# For more details, check: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/locations#available-regions
|
|
115
|
-
location=location or os.getenv('GOOGLE_CLOUD_LOCATION') or 'us-central1'
|
|
138
|
+
location = location or os.getenv('GOOGLE_CLOUD_LOCATION') or 'us-central1'
|
|
139
|
+
|
|
140
|
+
self._client = _SafelyClosingClient(
|
|
141
|
+
vertexai=True,
|
|
142
|
+
api_key=api_key,
|
|
143
|
+
project=project,
|
|
144
|
+
location=location,
|
|
116
145
|
credentials=credentials,
|
|
117
146
|
http_options=http_options,
|
|
118
147
|
)
|
|
119
148
|
else:
|
|
120
|
-
self._client = client
|
|
149
|
+
self._client = client # pragma: no cover
|
|
121
150
|
|
|
122
151
|
|
|
123
152
|
VertexAILocation = Literal[
|
|
@@ -154,3 +183,12 @@ VertexAILocation = Literal[
|
|
|
154
183
|
"""Regions available for Vertex AI.
|
|
155
184
|
More details [here](https://cloud.google.com/vertex-ai/generative-ai/docs/learn/locations#genai-locations).
|
|
156
185
|
"""
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
class _SafelyClosingClient(Client):
|
|
189
|
+
def close(self) -> None:
|
|
190
|
+
# This is called from `Client.__del__`, even if `Client.__init__` raised an error before `self._api_client` is set, which would raise an `AttributeError` here.
|
|
191
|
+
try:
|
|
192
|
+
super().close()
|
|
193
|
+
except AttributeError:
|
|
194
|
+
pass
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
from __future__ import annotations as _annotations
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from typing import overload
|
|
5
|
+
|
|
6
|
+
import httpx
|
|
7
|
+
|
|
8
|
+
from pydantic_ai import ModelProfile
|
|
9
|
+
from pydantic_ai.exceptions import UserError
|
|
10
|
+
from pydantic_ai.models import cached_async_http_client
|
|
11
|
+
from pydantic_ai.profiles.deepseek import deepseek_model_profile
|
|
12
|
+
from pydantic_ai.profiles.harmony import harmony_model_profile
|
|
13
|
+
from pydantic_ai.profiles.meta import meta_model_profile
|
|
14
|
+
from pydantic_ai.profiles.mistral import mistral_model_profile
|
|
15
|
+
from pydantic_ai.profiles.openai import OpenAIJsonSchemaTransformer, OpenAIModelProfile
|
|
16
|
+
from pydantic_ai.profiles.qwen import qwen_model_profile
|
|
17
|
+
from pydantic_ai.providers import Provider
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
from openai import AsyncOpenAI
|
|
21
|
+
except ImportError as _import_error: # pragma: no cover
|
|
22
|
+
raise ImportError(
|
|
23
|
+
'Please install the `openai` package to use OVHcloud AI Endpoints provider.'
|
|
24
|
+
'You can use the `openai` optional group — `pip install "pydantic-ai-slim[openai]"`'
|
|
25
|
+
) from _import_error
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class OVHcloudProvider(Provider[AsyncOpenAI]):
|
|
29
|
+
"""Provider for OVHcloud AI Endpoints."""
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
def name(self) -> str:
|
|
33
|
+
return 'ovhcloud'
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def base_url(self) -> str:
|
|
37
|
+
return 'https://oai.endpoints.kepler.ai.cloud.ovh.net/v1'
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def client(self) -> AsyncOpenAI:
|
|
41
|
+
return self._client
|
|
42
|
+
|
|
43
|
+
def model_profile(self, model_name: str) -> ModelProfile | None:
|
|
44
|
+
model_name = model_name.lower()
|
|
45
|
+
|
|
46
|
+
prefix_to_profile = {
|
|
47
|
+
'llama': meta_model_profile,
|
|
48
|
+
'meta-': meta_model_profile,
|
|
49
|
+
'deepseek': deepseek_model_profile,
|
|
50
|
+
'mistral': mistral_model_profile,
|
|
51
|
+
'gpt': harmony_model_profile,
|
|
52
|
+
'qwen': qwen_model_profile,
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
profile = None
|
|
56
|
+
for prefix, profile_func in prefix_to_profile.items():
|
|
57
|
+
if model_name.startswith(prefix):
|
|
58
|
+
profile = profile_func(model_name)
|
|
59
|
+
|
|
60
|
+
# As the OVHcloud AI Endpoints API is OpenAI-compatible, let's assume we also need OpenAIJsonSchemaTransformer.
|
|
61
|
+
return OpenAIModelProfile(json_schema_transformer=OpenAIJsonSchemaTransformer).update(profile)
|
|
62
|
+
|
|
63
|
+
@overload
|
|
64
|
+
def __init__(self) -> None: ...
|
|
65
|
+
|
|
66
|
+
@overload
|
|
67
|
+
def __init__(self, *, api_key: str) -> None: ...
|
|
68
|
+
|
|
69
|
+
@overload
|
|
70
|
+
def __init__(self, *, api_key: str, http_client: httpx.AsyncClient) -> None: ...
|
|
71
|
+
|
|
72
|
+
@overload
|
|
73
|
+
def __init__(self, *, openai_client: AsyncOpenAI | None = None) -> None: ...
|
|
74
|
+
|
|
75
|
+
def __init__(
|
|
76
|
+
self,
|
|
77
|
+
*,
|
|
78
|
+
api_key: str | None = None,
|
|
79
|
+
openai_client: AsyncOpenAI | None = None,
|
|
80
|
+
http_client: httpx.AsyncClient | None = None,
|
|
81
|
+
) -> None:
|
|
82
|
+
api_key = api_key or os.getenv('OVHCLOUD_API_KEY')
|
|
83
|
+
if not api_key and openai_client is None:
|
|
84
|
+
raise UserError(
|
|
85
|
+
'Set the `OVHCLOUD_API_KEY` environment variable or pass it via '
|
|
86
|
+
'`OVHcloudProvider(api_key=...)` to use OVHcloud AI Endpoints provider.'
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
if openai_client is not None:
|
|
90
|
+
self._client = openai_client
|
|
91
|
+
elif http_client is not None:
|
|
92
|
+
self._client = AsyncOpenAI(base_url=self.base_url, api_key=api_key, http_client=http_client)
|
|
93
|
+
else:
|
|
94
|
+
http_client = cached_async_http_client(provider='ovhcloud')
|
|
95
|
+
self._client = AsyncOpenAI(base_url=self.base_url, api_key=api_key, http_client=http_client)
|
pydantic_ai/usage.py
CHANGED
|
@@ -72,7 +72,18 @@ class UsageBase:
|
|
|
72
72
|
result['gen_ai.usage.input_tokens'] = self.input_tokens
|
|
73
73
|
if self.output_tokens:
|
|
74
74
|
result['gen_ai.usage.output_tokens'] = self.output_tokens
|
|
75
|
-
|
|
75
|
+
|
|
76
|
+
details = self.details.copy()
|
|
77
|
+
if self.cache_write_tokens:
|
|
78
|
+
details['cache_write_tokens'] = self.cache_write_tokens
|
|
79
|
+
if self.cache_read_tokens:
|
|
80
|
+
details['cache_read_tokens'] = self.cache_read_tokens
|
|
81
|
+
if self.input_audio_tokens:
|
|
82
|
+
details['input_audio_tokens'] = self.input_audio_tokens
|
|
83
|
+
if self.cache_audio_read_tokens:
|
|
84
|
+
details['cache_audio_read_tokens'] = self.cache_audio_read_tokens
|
|
85
|
+
if self.output_audio_tokens:
|
|
86
|
+
details['output_audio_tokens'] = self.output_audio_tokens
|
|
76
87
|
if details:
|
|
77
88
|
prefix = 'gen_ai.usage.details.'
|
|
78
89
|
for key, value in details.items():
|
|
@@ -129,7 +140,7 @@ class RequestUsage(UsageBase):
|
|
|
129
140
|
provider: str,
|
|
130
141
|
provider_url: str,
|
|
131
142
|
provider_fallback: str,
|
|
132
|
-
api_flavor: str
|
|
143
|
+
api_flavor: str = 'default',
|
|
133
144
|
details: dict[str, Any] | None = None,
|
|
134
145
|
) -> RequestUsage:
|
|
135
146
|
"""Extract usage information from the response data using genai-prices.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydantic-ai-slim
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.4.0
|
|
4
4
|
Summary: Agent Framework / shim to use Pydantic with LLMs, slim package
|
|
5
5
|
Project-URL: Homepage, https://github.com/pydantic/pydantic-ai/tree/main/pydantic_ai_slim
|
|
6
6
|
Project-URL: Source, https://github.com/pydantic/pydantic-ai/tree/main/pydantic_ai_slim
|
|
@@ -29,11 +29,11 @@ Classifier: Topic :: Internet
|
|
|
29
29
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
30
30
|
Requires-Python: >=3.10
|
|
31
31
|
Requires-Dist: exceptiongroup; python_version < '3.11'
|
|
32
|
-
Requires-Dist: genai-prices>=0.0.
|
|
32
|
+
Requires-Dist: genai-prices>=0.0.35
|
|
33
33
|
Requires-Dist: griffe>=1.3.2
|
|
34
34
|
Requires-Dist: httpx>=0.27
|
|
35
35
|
Requires-Dist: opentelemetry-api>=1.28.0
|
|
36
|
-
Requires-Dist: pydantic-graph==1.
|
|
36
|
+
Requires-Dist: pydantic-graph==1.4.0
|
|
37
37
|
Requires-Dist: pydantic>=2.10
|
|
38
38
|
Requires-Dist: typing-inspection>=0.4.0
|
|
39
39
|
Provides-Extra: a2a
|
|
@@ -57,9 +57,9 @@ Requires-Dist: dbos>=1.14.0; extra == 'dbos'
|
|
|
57
57
|
Provides-Extra: duckduckgo
|
|
58
58
|
Requires-Dist: ddgs>=9.0.0; extra == 'duckduckgo'
|
|
59
59
|
Provides-Extra: evals
|
|
60
|
-
Requires-Dist: pydantic-evals==1.
|
|
60
|
+
Requires-Dist: pydantic-evals==1.4.0; extra == 'evals'
|
|
61
61
|
Provides-Extra: google
|
|
62
|
-
Requires-Dist: google-genai>=1.
|
|
62
|
+
Requires-Dist: google-genai>=1.46.0; extra == 'google'
|
|
63
63
|
Provides-Extra: groq
|
|
64
64
|
Requires-Dist: groq>=0.25.0; extra == 'groq'
|
|
65
65
|
Provides-Extra: huggingface
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
pydantic_ai/__init__.py,sha256=
|
|
1
|
+
pydantic_ai/__init__.py,sha256=KAHapOSW1U5w9qiC5YDoe8e-YCS1aO5t0HDRrbICGXA,5239
|
|
2
2
|
pydantic_ai/__main__.py,sha256=Q_zJU15DUA01YtlJ2mnaLCoId2YmgmreVEERGuQT-Y0,132
|
|
3
3
|
pydantic_ai/_a2a.py,sha256=3_pl7JW2yHdu31qLgCrdcTZTqXaJNjAwUV6zavah_w8,12159
|
|
4
|
-
pydantic_ai/_agent_graph.py,sha256=
|
|
4
|
+
pydantic_ai/_agent_graph.py,sha256=U5MUfLpMRdNpHrvZ22eE_P0PoFx7uTX_AxxiZiGdu2U,56433
|
|
5
5
|
pydantic_ai/_cli.py,sha256=iZTCFrpJy3aUZ49nJQ5nw2INFw6gPVQd8EhB0rahVcI,14005
|
|
6
6
|
pydantic_ai/_function_schema.py,sha256=UnDGh7Wh5z70pEaRujXF_hKsSibQdN2ywI6lZGz3LUo,11663
|
|
7
7
|
pydantic_ai/_griffe.py,sha256=BphvTL00FHxsSY56GM-bNyCOdwrpL0T3LbDQITWUK_Q,5280
|
|
@@ -17,11 +17,11 @@ pydantic_ai/_thinking_part.py,sha256=_0DajGyWPa50WUTPWN1UPfZw0xD8_hHcuSt0T3fgRr0
|
|
|
17
17
|
pydantic_ai/_tool_manager.py,sha256=se5Fikg4HaiTOnxJ4LFrezktZ2Zfv9a2OH0V9PtFE54,10464
|
|
18
18
|
pydantic_ai/_utils.py,sha256=TBzJ03szJPrmDdqRqKTyhRboTsyP6wppnCCprpZFBMw,16620
|
|
19
19
|
pydantic_ai/ag_ui.py,sha256=X3b4P_IraypCE3r-L2ETIo8G951A1MDdP4P5TQ8Fces,32067
|
|
20
|
-
pydantic_ai/builtin_tools.py,sha256=
|
|
20
|
+
pydantic_ai/builtin_tools.py,sha256=EYSp9JVRethTLz-cL6HNrFRqnYaJMYBoDi-FTMcFf8c,8448
|
|
21
21
|
pydantic_ai/direct.py,sha256=i5yZ9Tx8IiwXg6Nz9CW4-fyXzxnjP59fsklExCh5sjA,15111
|
|
22
|
-
pydantic_ai/exceptions.py,sha256=
|
|
22
|
+
pydantic_ai/exceptions.py,sha256=oPwXgGMADfA59ehGYNOhfqL9LOlaV_QnYq-ojrogZfA,5136
|
|
23
23
|
pydantic_ai/format_prompt.py,sha256=cLyWO8g77Y4JzqVSikqodXaAfTn6i-k206rNhYTiIsE,9710
|
|
24
|
-
pydantic_ai/mcp.py,sha256=
|
|
24
|
+
pydantic_ai/mcp.py,sha256=FHlD5pHH7Z6h76P6IjddQz0Pt6F0gAVlepmks4U1Cho,36190
|
|
25
25
|
pydantic_ai/messages.py,sha256=GBuRGeq3ZpEuSNdq96Mb7l-UVEl7cNuNUN1LpwaAaxQ,64848
|
|
26
26
|
pydantic_ai/output.py,sha256=q91oqvJ-FqV9GbUUil7WVWbii66SVsVZ54AEm_NWSEo,13002
|
|
27
27
|
pydantic_ai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -30,8 +30,8 @@ pydantic_ai/retries.py,sha256=QM4oDA9DG-Y2qP06fbCp8Dqq8ups40Rr4HYjAOlbNyM,14650
|
|
|
30
30
|
pydantic_ai/run.py,sha256=dV3zIztC-lfOCKecXg_Mcx2CyOfUbxQC0JbZuPvQhTI,16227
|
|
31
31
|
pydantic_ai/settings.py,sha256=0mr6KudxKKjTG8e3nsv_8vDLxNhu_1-WvefCOzCGSYM,3565
|
|
32
32
|
pydantic_ai/tools.py,sha256=dCecmJtRkF1ioqFYbfT00XGGqzGB4PPO9n6IrHCQtnc,20343
|
|
33
|
-
pydantic_ai/usage.py,sha256=
|
|
34
|
-
pydantic_ai/agent/__init__.py,sha256=
|
|
33
|
+
pydantic_ai/usage.py,sha256=lhReoVNwqt7mfmWk40A1ddnKk4-MVFJ0qCl_oFdGzxo,16251
|
|
34
|
+
pydantic_ai/agent/__init__.py,sha256=UTd9xNwUM5pPpnve0AGpOQ3WKT_FHJtfAO08wkFusKQ,66697
|
|
35
35
|
pydantic_ai/agent/abstract.py,sha256=Akq1NvfzXbIEJwwvo_t-FQ6MobW_cPWSeUXffdUN7Og,55651
|
|
36
36
|
pydantic_ai/agent/wrapper.py,sha256=ygwfMq24mGe3pGIK-TtPAy3cV7M8VZJW3ulEHvwNTck,10293
|
|
37
37
|
pydantic_ai/common_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -62,20 +62,20 @@ pydantic_ai/durable_exec/temporal/_toolset.py,sha256=IlPQrumm2MpZrb518ru15s0jIl8
|
|
|
62
62
|
pydantic_ai/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
63
63
|
pydantic_ai/ext/aci.py,sha256=YWYLXzTQJ6hS7qfgNycA8cRl69gogGgThqEU6II7eMA,2527
|
|
64
64
|
pydantic_ai/ext/langchain.py,sha256=kmbbV3Cx2BiNYEJCZMHVYQquUQD-zG2L_bwDangy0Ww,2317
|
|
65
|
-
pydantic_ai/models/__init__.py,sha256=
|
|
66
|
-
pydantic_ai/models/anthropic.py,sha256
|
|
67
|
-
pydantic_ai/models/bedrock.py,sha256=
|
|
68
|
-
pydantic_ai/models/cohere.py,sha256=
|
|
65
|
+
pydantic_ai/models/__init__.py,sha256=YPE0kbN0C71ljqH75m4Xsnlq5gkTxiIQt9efSGKHpf8,36146
|
|
66
|
+
pydantic_ai/models/anthropic.py,sha256=tNHLk-sao7YEFNr8-bWU2rJS3a_yO8SHgrDacScvk_k,44772
|
|
67
|
+
pydantic_ai/models/bedrock.py,sha256=M_3h_S3t2s7GOiP0YIHoJjwW3d2PLzNnmXTENomV9GM,33699
|
|
68
|
+
pydantic_ai/models/cohere.py,sha256=wQ3UYiFMs5Oyeyz5sd6NyG3b94iCeYBptnJC8bEYOUA,13892
|
|
69
69
|
pydantic_ai/models/fallback.py,sha256=fjQz7qRuxEwC6aFYkglBv-2Z39-6kZ931vs6o7PIti8,5016
|
|
70
70
|
pydantic_ai/models/function.py,sha256=7-ej1m4f7c1TbvgB8sF02qlFD7Kf-EX-k_xN4RkbIEw,15880
|
|
71
|
-
pydantic_ai/models/gemini.py,sha256=
|
|
72
|
-
pydantic_ai/models/google.py,sha256=
|
|
73
|
-
pydantic_ai/models/groq.py,sha256=
|
|
74
|
-
pydantic_ai/models/huggingface.py,sha256=
|
|
71
|
+
pydantic_ai/models/gemini.py,sha256=ZMO1mUX6GXPo0N2OHoi_nS9Lb-Rqf0YFsILoRcssaG4,40410
|
|
72
|
+
pydantic_ai/models/google.py,sha256=rcYzRMELj98dgnw8YrBHM1R3HLVjCTkWgDXMSNQrxOA,42141
|
|
73
|
+
pydantic_ai/models/groq.py,sha256=cB42E-EPX5O-lRRMsd3FTypVVuVVMDc2hV2c8H4N4rA,29665
|
|
74
|
+
pydantic_ai/models/huggingface.py,sha256=iADyoCKYrNyjixr55rEpXW02F-sah4rLmqrThEcNNDw,21464
|
|
75
75
|
pydantic_ai/models/instrumented.py,sha256=J8eVTutr3UP1r_wd5sM5c0BIdzkRqT-EGgd2NiF0ssQ,22319
|
|
76
76
|
pydantic_ai/models/mcp_sampling.py,sha256=qY4y4nXbRpNp2QbkfjzWLvF_8KLZGXypz4cc0lYRHXU,3553
|
|
77
77
|
pydantic_ai/models/mistral.py,sha256=fi57hADjYxZw8wEpAcNI6mqY32VG9hHK9GGRQ-9vlZg,33905
|
|
78
|
-
pydantic_ai/models/openai.py,sha256=
|
|
78
|
+
pydantic_ai/models/openai.py,sha256=wQJDGVAPzN5GNzny4ZN0CrnnrPIMxUOXQYfAtK0u7z4,108980
|
|
79
79
|
pydantic_ai/models/test.py,sha256=5ER66nwZG7Iwm-KkzPo4vwNd3rulzgkpgysu4YcT1W4,20568
|
|
80
80
|
pydantic_ai/models/wrapper.py,sha256=nwh8Gea59blbr1JDKlUnkYICuI9TUubC4qP7iZRRW28,2440
|
|
81
81
|
pydantic_ai/profiles/__init__.py,sha256=UHknN-CYsQexUaxfsgz_J_uSZ9QwistLSuAErQkvbcM,3385
|
|
@@ -92,17 +92,17 @@ pydantic_ai/profiles/mistral.py,sha256=ll01PmcK3szwlTfbaJLQmfd0TADN8lqjov9HpPJzC
|
|
|
92
92
|
pydantic_ai/profiles/moonshotai.py,sha256=e1RJnbEvazE6aJAqfmYLYGNtwNwg52XQDRDkcLrv3fU,272
|
|
93
93
|
pydantic_ai/profiles/openai.py,sha256=MXOsktUqfcF2pBgYJMyFWMZafPJ7tejwyoFM2mjKzaY,9689
|
|
94
94
|
pydantic_ai/profiles/qwen.py,sha256=9SnTpMKndxNQMFyumyaOczJa5JGWbYQdpVKKW4OzKjk,749
|
|
95
|
-
pydantic_ai/providers/__init__.py,sha256=
|
|
95
|
+
pydantic_ai/providers/__init__.py,sha256=UMgxQqav_-nxZw7oA5pUAlNJV694HwTtvMrv8WgELfI,4872
|
|
96
96
|
pydantic_ai/providers/anthropic.py,sha256=vwNjO2JJ0Ux_3PXI9_XvzNZ24PKessm8z2ja1uzbBwM,3327
|
|
97
97
|
pydantic_ai/providers/azure.py,sha256=PFRykTOfARMdANODnTLq__0ZynX7DlQ35GVf2Qs9VBY,5814
|
|
98
|
-
pydantic_ai/providers/bedrock.py,sha256=
|
|
98
|
+
pydantic_ai/providers/bedrock.py,sha256=bPbz-o3UhDzCRrg5xCrTfluLpDi2Yy9-JiCtC5mCIRk,8539
|
|
99
99
|
pydantic_ai/providers/cerebras.py,sha256=3rIu092TYYuI5S4mlRjWxay5uomPbEDyHWIBMfrDBdA,3427
|
|
100
100
|
pydantic_ai/providers/cohere.py,sha256=L3wgvcbxRRPrIKoZka_DQl1Uvi1VxBPMJikrzJ85iHE,2839
|
|
101
101
|
pydantic_ai/providers/deepseek.py,sha256=zop0sb1XzdzSuI2dCNXrinfMdxoqB8H_rp2zw6ItbKc,3023
|
|
102
102
|
pydantic_ai/providers/fireworks.py,sha256=t4PznbxnD9GnzZ3wYqSn6xdxRRJlYzNKf_EZzX0UWl8,3585
|
|
103
|
-
pydantic_ai/providers/gateway.py,sha256
|
|
103
|
+
pydantic_ai/providers/gateway.py,sha256=-h5EAx2I-0brlvIW1yVccxPWYt3Mbug8NpVNkD_6ehg,5760
|
|
104
104
|
pydantic_ai/providers/github.py,sha256=yi7c16_Ao1E1QmehVfdsO9NrjDGK1moaHTK-P5cIrsI,4369
|
|
105
|
-
pydantic_ai/providers/google.py,sha256=
|
|
105
|
+
pydantic_ai/providers/google.py,sha256=FcP6P3zBbZGtdK5RSomcTErwqtG1LHHWGHEgGkUh65w,7603
|
|
106
106
|
pydantic_ai/providers/google_gla.py,sha256=PnmnzgCOPJB1kMVnNVqZu2Cdzk7K9jx2z0MpbJ6EkII,1951
|
|
107
107
|
pydantic_ai/providers/google_vertex.py,sha256=5vlSHghGVBQ4lW46tKbe0RLGfG-Ch69q2aJ2UB4R6HQ,9743
|
|
108
108
|
pydantic_ai/providers/grok.py,sha256=adYTR_nEcADw8UXGNsl5HGzxOT7mhcYNa55cRziptgk,3100
|
|
@@ -116,6 +116,7 @@ pydantic_ai/providers/nebius.py,sha256=nGpgbZnBZgNz4wHTi1vgvc-9tO2_zj5r3vRzEUbhP
|
|
|
116
116
|
pydantic_ai/providers/ollama.py,sha256=jg48g_3fYsvK8g-V3UOmR9HOsvnvb533BAB-rZZDxdA,4733
|
|
117
117
|
pydantic_ai/providers/openai.py,sha256=cVVf99GgBnYBKYeWKBscvnkoRCu0ctWuKulG19lgWMo,3401
|
|
118
118
|
pydantic_ai/providers/openrouter.py,sha256=o33Fk7kMyMhEM4NcSXU6IuG0cIUc45ySaenozrRypBI,4145
|
|
119
|
+
pydantic_ai/providers/ovhcloud.py,sha256=qvPB7-hgeClBMeNSKOiTrF-pSp6RczRaqWg5iAeUwss,3428
|
|
119
120
|
pydantic_ai/providers/together.py,sha256=QtIR1BVJjoEYLvsUFpvPe81akx0iQvjYptl87XVpCpo,3441
|
|
120
121
|
pydantic_ai/providers/vercel.py,sha256=AdaRmTejcr4CLPY2X0D3iZ0T4xPdUm4HAXXLS0Q0jMA,4248
|
|
121
122
|
pydantic_ai/toolsets/__init__.py,sha256=lYwnxjSqxY6rIYYDTDctyWPckDwnRX_9orvqY2Ap2B8,806
|
|
@@ -130,8 +131,8 @@ pydantic_ai/toolsets/prefixed.py,sha256=0KwcDkW8OM36ZUsOLVP5h-Nj2tPq78L3_E2c-1Fb
|
|
|
130
131
|
pydantic_ai/toolsets/prepared.py,sha256=Zjfz6S8In6PBVxoKFN9sKPN984zO6t0awB7Lnq5KODw,1431
|
|
131
132
|
pydantic_ai/toolsets/renamed.py,sha256=JuLHpi-hYPiSPlaTpN8WiXLiGsywYK0axi2lW2Qs75k,1637
|
|
132
133
|
pydantic_ai/toolsets/wrapper.py,sha256=KRzF1p8dncHbva8CE6Ud-IC5E_aygIHlwH5atXK55k4,1673
|
|
133
|
-
pydantic_ai_slim-1.
|
|
134
|
-
pydantic_ai_slim-1.
|
|
135
|
-
pydantic_ai_slim-1.
|
|
136
|
-
pydantic_ai_slim-1.
|
|
137
|
-
pydantic_ai_slim-1.
|
|
134
|
+
pydantic_ai_slim-1.4.0.dist-info/METADATA,sha256=KDC3HgFLp5M-yucTuTNbGO9BJe4-YfjRlvjCQEGptAk,4703
|
|
135
|
+
pydantic_ai_slim-1.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
136
|
+
pydantic_ai_slim-1.4.0.dist-info/entry_points.txt,sha256=kbKxe2VtDCYS06hsI7P3uZGxcVC08-FPt1rxeiMpIps,50
|
|
137
|
+
pydantic_ai_slim-1.4.0.dist-info/licenses/LICENSE,sha256=vA6Jc482lEyBBuGUfD1pYx-cM7jxvLYOxPidZ30t_PQ,1100
|
|
138
|
+
pydantic_ai_slim-1.4.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|