pydantic-ai-slim 0.0.25__py3-none-any.whl → 0.0.27__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pydantic-ai-slim might be problematic. Click here for more details.
- pydantic_ai/__init__.py +22 -4
- pydantic_ai/_agent_graph.py +15 -12
- pydantic_ai/agent.py +13 -13
- pydantic_ai/exceptions.py +42 -1
- pydantic_ai/messages.py +90 -1
- pydantic_ai/models/anthropic.py +58 -28
- pydantic_ai/models/cohere.py +22 -13
- pydantic_ai/models/fallback.py +116 -0
- pydantic_ai/models/function.py +28 -10
- pydantic_ai/models/gemini.py +78 -10
- pydantic_ai/models/groq.py +59 -27
- pydantic_ai/models/mistral.py +50 -15
- pydantic_ai/models/openai.py +84 -30
- pydantic_ai/tools.py +2 -2
- {pydantic_ai_slim-0.0.25.dist-info → pydantic_ai_slim-0.0.27.dist-info}/METADATA +3 -2
- pydantic_ai_slim-0.0.27.dist-info/RECORD +33 -0
- pydantic_ai_slim-0.0.25.dist-info/RECORD +0 -32
- {pydantic_ai_slim-0.0.25.dist-info → pydantic_ai_slim-0.0.27.dist-info}/WHEEL +0 -0
pydantic_ai/models/openai.py
CHANGED
|
@@ -1,19 +1,22 @@
|
|
|
1
1
|
from __future__ import annotations as _annotations
|
|
2
2
|
|
|
3
|
+
import base64
|
|
3
4
|
import os
|
|
4
|
-
from collections.abc import AsyncIterable, AsyncIterator
|
|
5
|
+
from collections.abc import AsyncIterable, AsyncIterator
|
|
5
6
|
from contextlib import asynccontextmanager
|
|
6
7
|
from dataclasses import dataclass, field
|
|
7
8
|
from datetime import datetime, timezone
|
|
8
|
-
from itertools import chain
|
|
9
9
|
from typing import Literal, Union, cast, overload
|
|
10
10
|
|
|
11
11
|
from httpx import AsyncClient as AsyncHTTPClient
|
|
12
12
|
from typing_extensions import assert_never
|
|
13
13
|
|
|
14
|
-
from .. import UnexpectedModelBehavior, _utils, usage
|
|
14
|
+
from .. import ModelHTTPError, UnexpectedModelBehavior, _utils, usage
|
|
15
15
|
from .._utils import guard_tool_call_id as _guard_tool_call_id
|
|
16
16
|
from ..messages import (
|
|
17
|
+
AudioUrl,
|
|
18
|
+
BinaryContent,
|
|
19
|
+
ImageUrl,
|
|
17
20
|
ModelMessage,
|
|
18
21
|
ModelRequest,
|
|
19
22
|
ModelResponse,
|
|
@@ -37,9 +40,17 @@ from . import (
|
|
|
37
40
|
)
|
|
38
41
|
|
|
39
42
|
try:
|
|
40
|
-
from openai import NOT_GIVEN, AsyncOpenAI, AsyncStream
|
|
43
|
+
from openai import NOT_GIVEN, APIStatusError, AsyncOpenAI, AsyncStream
|
|
41
44
|
from openai.types import ChatModel, chat
|
|
42
|
-
from openai.types.chat import
|
|
45
|
+
from openai.types.chat import (
|
|
46
|
+
ChatCompletionChunk,
|
|
47
|
+
ChatCompletionContentPartImageParam,
|
|
48
|
+
ChatCompletionContentPartInputAudioParam,
|
|
49
|
+
ChatCompletionContentPartParam,
|
|
50
|
+
ChatCompletionContentPartTextParam,
|
|
51
|
+
)
|
|
52
|
+
from openai.types.chat.chat_completion_content_part_image_param import ImageURL
|
|
53
|
+
from openai.types.chat.chat_completion_content_part_input_audio_param import InputAudio
|
|
43
54
|
except ImportError as _import_error:
|
|
44
55
|
raise ImportError(
|
|
45
56
|
'Please install `openai` to use the OpenAI model, '
|
|
@@ -208,27 +219,35 @@ class OpenAIModel(Model):
|
|
|
208
219
|
else:
|
|
209
220
|
tool_choice = 'auto'
|
|
210
221
|
|
|
211
|
-
openai_messages
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
222
|
+
openai_messages: list[chat.ChatCompletionMessageParam] = []
|
|
223
|
+
for m in messages:
|
|
224
|
+
async for msg in self._map_message(m):
|
|
225
|
+
openai_messages.append(msg)
|
|
226
|
+
|
|
227
|
+
try:
|
|
228
|
+
return await self.client.chat.completions.create(
|
|
229
|
+
model=self._model_name,
|
|
230
|
+
messages=openai_messages,
|
|
231
|
+
n=1,
|
|
232
|
+
parallel_tool_calls=model_settings.get('parallel_tool_calls', NOT_GIVEN),
|
|
233
|
+
tools=tools or NOT_GIVEN,
|
|
234
|
+
tool_choice=tool_choice or NOT_GIVEN,
|
|
235
|
+
stream=stream,
|
|
236
|
+
stream_options={'include_usage': True} if stream else NOT_GIVEN,
|
|
237
|
+
max_tokens=model_settings.get('max_tokens', NOT_GIVEN),
|
|
238
|
+
temperature=model_settings.get('temperature', NOT_GIVEN),
|
|
239
|
+
top_p=model_settings.get('top_p', NOT_GIVEN),
|
|
240
|
+
timeout=model_settings.get('timeout', NOT_GIVEN),
|
|
241
|
+
seed=model_settings.get('seed', NOT_GIVEN),
|
|
242
|
+
presence_penalty=model_settings.get('presence_penalty', NOT_GIVEN),
|
|
243
|
+
frequency_penalty=model_settings.get('frequency_penalty', NOT_GIVEN),
|
|
244
|
+
logit_bias=model_settings.get('logit_bias', NOT_GIVEN),
|
|
245
|
+
reasoning_effort=model_settings.get('openai_reasoning_effort', NOT_GIVEN),
|
|
246
|
+
)
|
|
247
|
+
except APIStatusError as e:
|
|
248
|
+
if (status_code := e.status_code) >= 400:
|
|
249
|
+
raise ModelHTTPError(status_code=status_code, model_name=self.model_name, body=e.body) from e
|
|
250
|
+
raise
|
|
232
251
|
|
|
233
252
|
def _process_response(self, response: chat.ChatCompletion) -> ModelResponse:
|
|
234
253
|
"""Process a non-streamed response, and prepare a message to return."""
|
|
@@ -261,10 +280,11 @@ class OpenAIModel(Model):
|
|
|
261
280
|
tools += [self._map_tool_definition(r) for r in model_request_parameters.result_tools]
|
|
262
281
|
return tools
|
|
263
282
|
|
|
264
|
-
def _map_message(self, message: ModelMessage) ->
|
|
283
|
+
async def _map_message(self, message: ModelMessage) -> AsyncIterable[chat.ChatCompletionMessageParam]:
|
|
265
284
|
"""Just maps a `pydantic_ai.Message` to a `openai.types.ChatCompletionMessageParam`."""
|
|
266
285
|
if isinstance(message, ModelRequest):
|
|
267
|
-
|
|
286
|
+
async for item in self._map_user_message(message):
|
|
287
|
+
yield item
|
|
268
288
|
elif isinstance(message, ModelResponse):
|
|
269
289
|
texts: list[str] = []
|
|
270
290
|
tool_calls: list[chat.ChatCompletionMessageToolCallParam] = []
|
|
@@ -305,7 +325,7 @@ class OpenAIModel(Model):
|
|
|
305
325
|
},
|
|
306
326
|
}
|
|
307
327
|
|
|
308
|
-
def _map_user_message(self, message: ModelRequest) ->
|
|
328
|
+
async def _map_user_message(self, message: ModelRequest) -> AsyncIterable[chat.ChatCompletionMessageParam]:
|
|
309
329
|
for part in message.parts:
|
|
310
330
|
if isinstance(part, SystemPromptPart):
|
|
311
331
|
if self.system_prompt_role == 'developer':
|
|
@@ -315,7 +335,7 @@ class OpenAIModel(Model):
|
|
|
315
335
|
else:
|
|
316
336
|
yield chat.ChatCompletionSystemMessageParam(role='system', content=part.content)
|
|
317
337
|
elif isinstance(part, UserPromptPart):
|
|
318
|
-
yield
|
|
338
|
+
yield await self._map_user_prompt(part)
|
|
319
339
|
elif isinstance(part, ToolReturnPart):
|
|
320
340
|
yield chat.ChatCompletionToolMessageParam(
|
|
321
341
|
role='tool',
|
|
@@ -334,6 +354,40 @@ class OpenAIModel(Model):
|
|
|
334
354
|
else:
|
|
335
355
|
assert_never(part)
|
|
336
356
|
|
|
357
|
+
@staticmethod
|
|
358
|
+
async def _map_user_prompt(part: UserPromptPart) -> chat.ChatCompletionUserMessageParam:
|
|
359
|
+
content: str | list[ChatCompletionContentPartParam]
|
|
360
|
+
if isinstance(part.content, str):
|
|
361
|
+
content = part.content
|
|
362
|
+
else:
|
|
363
|
+
content = []
|
|
364
|
+
for item in part.content:
|
|
365
|
+
if isinstance(item, str):
|
|
366
|
+
content.append(ChatCompletionContentPartTextParam(text=item, type='text'))
|
|
367
|
+
elif isinstance(item, ImageUrl):
|
|
368
|
+
image_url = ImageURL(url=item.url)
|
|
369
|
+
content.append(ChatCompletionContentPartImageParam(image_url=image_url, type='image_url'))
|
|
370
|
+
elif isinstance(item, BinaryContent):
|
|
371
|
+
base64_encoded = base64.b64encode(item.data).decode('utf-8')
|
|
372
|
+
if item.is_image:
|
|
373
|
+
image_url = ImageURL(url=f'data:{item.media_type};base64,{base64_encoded}')
|
|
374
|
+
content.append(ChatCompletionContentPartImageParam(image_url=image_url, type='image_url'))
|
|
375
|
+
elif item.is_audio:
|
|
376
|
+
audio = InputAudio(data=base64_encoded, format=item.audio_format)
|
|
377
|
+
content.append(ChatCompletionContentPartInputAudioParam(input_audio=audio, type='input_audio'))
|
|
378
|
+
else: # pragma: no cover
|
|
379
|
+
raise RuntimeError(f'Unsupported binary content type: {item.media_type}')
|
|
380
|
+
elif isinstance(item, AudioUrl): # pragma: no cover
|
|
381
|
+
client = cached_async_http_client()
|
|
382
|
+
response = await client.get(item.url)
|
|
383
|
+
response.raise_for_status()
|
|
384
|
+
base64_encoded = base64.b64encode(response.content).decode('utf-8')
|
|
385
|
+
audio = InputAudio(data=base64_encoded, format=response.headers.get('content-type'))
|
|
386
|
+
content.append(ChatCompletionContentPartInputAudioParam(input_audio=audio, type='input_audio'))
|
|
387
|
+
else:
|
|
388
|
+
assert_never(item)
|
|
389
|
+
return chat.ChatCompletionUserMessageParam(role='user', content=content)
|
|
390
|
+
|
|
337
391
|
|
|
338
392
|
@dataclass
|
|
339
393
|
class OpenAIStreamedResponse(StreamedResponse):
|
pydantic_ai/tools.py
CHANGED
|
@@ -2,7 +2,7 @@ from __future__ import annotations as _annotations
|
|
|
2
2
|
|
|
3
3
|
import dataclasses
|
|
4
4
|
import inspect
|
|
5
|
-
from collections.abc import Awaitable
|
|
5
|
+
from collections.abc import Awaitable, Sequence
|
|
6
6
|
from dataclasses import dataclass, field
|
|
7
7
|
from typing import TYPE_CHECKING, Any, Callable, Generic, Literal, Union, cast
|
|
8
8
|
|
|
@@ -45,7 +45,7 @@ class RunContext(Generic[AgentDepsT]):
|
|
|
45
45
|
"""The model used in this run."""
|
|
46
46
|
usage: Usage
|
|
47
47
|
"""LLM usage associated with the run."""
|
|
48
|
-
prompt: str
|
|
48
|
+
prompt: str | Sequence[_messages.UserContent]
|
|
49
49
|
"""The original user prompt passed to the run."""
|
|
50
50
|
messages: list[_messages.ModelMessage] = field(default_factory=list)
|
|
51
51
|
"""Messages exchanged in the conversation so far."""
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pydantic-ai-slim
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.27
|
|
4
4
|
Summary: Agent Framework / shim to use Pydantic with LLMs, slim package
|
|
5
5
|
Author-email: Samuel Colvin <samuel@pydantic.dev>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -25,10 +25,11 @@ Classifier: Topic :: Internet
|
|
|
25
25
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
26
26
|
Requires-Python: >=3.9
|
|
27
27
|
Requires-Dist: eval-type-backport>=0.2.0
|
|
28
|
+
Requires-Dist: exceptiongroup; python_version < '3.11'
|
|
28
29
|
Requires-Dist: griffe>=1.3.2
|
|
29
30
|
Requires-Dist: httpx>=0.27
|
|
30
31
|
Requires-Dist: logfire-api>=1.2.0
|
|
31
|
-
Requires-Dist: pydantic-graph==0.0.
|
|
32
|
+
Requires-Dist: pydantic-graph==0.0.27
|
|
32
33
|
Requires-Dist: pydantic>=2.10
|
|
33
34
|
Provides-Extra: anthropic
|
|
34
35
|
Requires-Dist: anthropic>=0.40.0; extra == 'anthropic'
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
pydantic_ai/__init__.py,sha256=Rmpjmorf8YY1PtlkXRRNN-J3ZoQDSh7chaibVGyqY0k,937
|
|
2
|
+
pydantic_ai/_agent_graph.py,sha256=lNKTtUyVY14M0WODP5K1NUaE9zJA716-9rZutapSg8A,29042
|
|
3
|
+
pydantic_ai/_griffe.py,sha256=RYRKiLbgG97QxnazbAwlnc74XxevGHLQet-FGfq9qls,3960
|
|
4
|
+
pydantic_ai/_parts_manager.py,sha256=ARfDQY1_5AIY5rNl_M2fAYHEFCe03ZxdhgjHf9qeIKw,11872
|
|
5
|
+
pydantic_ai/_pydantic.py,sha256=dROz3Hmfdi0C2exq88FhefDRVo_8S3rtkXnoUHzsz0c,8753
|
|
6
|
+
pydantic_ai/_result.py,sha256=tN1pVulf_EM4bkBvpNUWPnUXezLY-sBrJEVCFdy2nLU,10264
|
|
7
|
+
pydantic_ai/_system_prompt.py,sha256=602c2jyle2R_SesOrITBDETZqsLk4BZ8Cbo8yEhmx04,1120
|
|
8
|
+
pydantic_ai/_utils.py,sha256=w9BYSfFZiaX757fRtMRclOL1uYzyQnxV_lxqbU2WTPs,9435
|
|
9
|
+
pydantic_ai/agent.py,sha256=43xGZCeEsfl_eedFVgY94RAhRoLBIw4XdV8WH8omw3M,63659
|
|
10
|
+
pydantic_ai/exceptions.py,sha256=1ujJeB3jDDQ-pH5ydBYrgStvR35-GlEW0bYGTGEr4ME,3127
|
|
11
|
+
pydantic_ai/format_as_xml.py,sha256=QE7eMlg5-YUMw1_2kcI3h0uKYPZZyGkgXFDtfZTMeeI,4480
|
|
12
|
+
pydantic_ai/messages.py,sha256=U-RgeRsMR-Ew6IoeBDrnQVONX9AwxyVd0aTnAxEA7EM,20918
|
|
13
|
+
pydantic_ai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
+
pydantic_ai/result.py,sha256=Rqbog6efO1l_bFJSuAd-_ZZLoQa_rz4motOGeR_5N3I,16803
|
|
15
|
+
pydantic_ai/settings.py,sha256=ntuWnke9UA18aByDxk9OIhN0tAgOaPdqCEkRf-wlp8Y,3059
|
|
16
|
+
pydantic_ai/tools.py,sha256=c6QPa3Lio5S-iC3av7rYHaxHQTH_2y5LmlL6DGLmTRk,13249
|
|
17
|
+
pydantic_ai/usage.py,sha256=60d9f6M7YEYuKMbqDGDogX4KsA73fhDtWyDXYXoIPaI,4948
|
|
18
|
+
pydantic_ai/models/__init__.py,sha256=Qw_g58KzGUmuDKOBa2u3yFrNbgCXkdRSNtkhseLC1VM,13758
|
|
19
|
+
pydantic_ai/models/anthropic.py,sha256=E194k2TgZVHPAOMmMBARlTlT4VtGAZGgiq2BTa56nEc,19398
|
|
20
|
+
pydantic_ai/models/cohere.py,sha256=6F6eWPGVT7mpMXlRugbVbR-a8Q1zmb1SKS_fWOoBL80,11514
|
|
21
|
+
pydantic_ai/models/fallback.py,sha256=smHwNIpxu19JsgYYjY0nmzl3yox7yQRJ0Ir08zdhnk0,4207
|
|
22
|
+
pydantic_ai/models/function.py,sha256=EMlASu436RE-XzOTuHGkIqkS8J4WItUvwwaL08LLkX8,10948
|
|
23
|
+
pydantic_ai/models/gemini.py,sha256=2hDTMIMf899dp-MS0tLT7m1GkXsL9KIRMBklGM0VLB4,34223
|
|
24
|
+
pydantic_ai/models/groq.py,sha256=Z4sZJDu5Yxa2tZiAPp9EjSVMz4uwLhS3fW7kFSc09gI,16406
|
|
25
|
+
pydantic_ai/models/instrumented.py,sha256=cvjHgQE_gJOH-YVQyvx9tBpGNB_Iuc8N8THn0TL0Rjk,8791
|
|
26
|
+
pydantic_ai/models/mistral.py,sha256=ZJ4xPcL9wJIQ5io34yP2fPyXy8GZrSvsW4itZiKPYFw,27448
|
|
27
|
+
pydantic_ai/models/openai.py,sha256=koIcK_pDHmV-JFq_-VIzU-edAqGKOOzkSk5QSYWvfoc,20156
|
|
28
|
+
pydantic_ai/models/test.py,sha256=Ux20cmuJFkhvI9L1N7ItHNFcd-j284TBEsrM53eWRag,16873
|
|
29
|
+
pydantic_ai/models/vertexai.py,sha256=9Kp_1KMBlbP8_HRJTuFnrkkFmlJ7yFhADQYjxOgIh9Y,9523
|
|
30
|
+
pydantic_ai/models/wrapper.py,sha256=Zr3fgiUBpt2N9gXds6iSwaMEtEsFKr9WwhpHjSoHa7o,1410
|
|
31
|
+
pydantic_ai_slim-0.0.27.dist-info/METADATA,sha256=xGhDjUz-jpvDe6I05iUbhPYb3X-1IpZWxt-j461m5hw,2894
|
|
32
|
+
pydantic_ai_slim-0.0.27.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
33
|
+
pydantic_ai_slim-0.0.27.dist-info/RECORD,,
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
pydantic_ai/__init__.py,sha256=2Sc-fnJ3OtqFrIZ47PsVVBE_TWPHWuKbik9upnFQw_4,647
|
|
2
|
-
pydantic_ai/_agent_graph.py,sha256=ihtYxeh59G6BR8piOp7eZyNoB4nLb3XhFnoDuwna1G4,28789
|
|
3
|
-
pydantic_ai/_griffe.py,sha256=RYRKiLbgG97QxnazbAwlnc74XxevGHLQet-FGfq9qls,3960
|
|
4
|
-
pydantic_ai/_parts_manager.py,sha256=ARfDQY1_5AIY5rNl_M2fAYHEFCe03ZxdhgjHf9qeIKw,11872
|
|
5
|
-
pydantic_ai/_pydantic.py,sha256=dROz3Hmfdi0C2exq88FhefDRVo_8S3rtkXnoUHzsz0c,8753
|
|
6
|
-
pydantic_ai/_result.py,sha256=tN1pVulf_EM4bkBvpNUWPnUXezLY-sBrJEVCFdy2nLU,10264
|
|
7
|
-
pydantic_ai/_system_prompt.py,sha256=602c2jyle2R_SesOrITBDETZqsLk4BZ8Cbo8yEhmx04,1120
|
|
8
|
-
pydantic_ai/_utils.py,sha256=w9BYSfFZiaX757fRtMRclOL1uYzyQnxV_lxqbU2WTPs,9435
|
|
9
|
-
pydantic_ai/agent.py,sha256=MGVvvccxcD2g9t-Tjd1uyff0CwGk4D7--KWIRgB9Gxo,63361
|
|
10
|
-
pydantic_ai/exceptions.py,sha256=eGDKX6bGhgVxXBzu81Sk3iiAkXr0GUtgT7bD5Rxlqpg,2028
|
|
11
|
-
pydantic_ai/format_as_xml.py,sha256=QE7eMlg5-YUMw1_2kcI3h0uKYPZZyGkgXFDtfZTMeeI,4480
|
|
12
|
-
pydantic_ai/messages.py,sha256=RGzYX58M1plN-y2BmoGLA9m3r-tcha60OmzqOuiMLbk,18218
|
|
13
|
-
pydantic_ai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
-
pydantic_ai/result.py,sha256=Rqbog6efO1l_bFJSuAd-_ZZLoQa_rz4motOGeR_5N3I,16803
|
|
15
|
-
pydantic_ai/settings.py,sha256=ntuWnke9UA18aByDxk9OIhN0tAgOaPdqCEkRf-wlp8Y,3059
|
|
16
|
-
pydantic_ai/tools.py,sha256=lhupwm815lPlFFS79B0P61AyhUYtepA62LbZOCJrPEY,13205
|
|
17
|
-
pydantic_ai/usage.py,sha256=60d9f6M7YEYuKMbqDGDogX4KsA73fhDtWyDXYXoIPaI,4948
|
|
18
|
-
pydantic_ai/models/__init__.py,sha256=Qw_g58KzGUmuDKOBa2u3yFrNbgCXkdRSNtkhseLC1VM,13758
|
|
19
|
-
pydantic_ai/models/anthropic.py,sha256=9kyWzt81Vrx9NUUZVXLRuGB0cJ3jPVNVCo9jgX89tPI,17752
|
|
20
|
-
pydantic_ai/models/cohere.py,sha256=Rotl0F8O3fz7hvR269j-JGves42WieNDWU046XbIpWk,11000
|
|
21
|
-
pydantic_ai/models/function.py,sha256=YU2Y3t5Olw0tBzWXUr0DkjtxtyVXUtlOhbmoRdtKx84,10333
|
|
22
|
-
pydantic_ai/models/gemini.py,sha256=vTN3lRTTXfVHFAUJJQN2IRH24iabUWe5feYu8syV5Lc,31433
|
|
23
|
-
pydantic_ai/models/groq.py,sha256=M7P-9LrH1lm4wyIMiytsHf28bwkukOyVzw3S7F9sbic,14612
|
|
24
|
-
pydantic_ai/models/instrumented.py,sha256=cvjHgQE_gJOH-YVQyvx9tBpGNB_Iuc8N8THn0TL0Rjk,8791
|
|
25
|
-
pydantic_ai/models/mistral.py,sha256=yUJj9MT8iaujMrD7dMOOAWHfAJJQEuWGNLtChHdyakg,25795
|
|
26
|
-
pydantic_ai/models/openai.py,sha256=B6kn8xP5J5Pjfwyq5zJStpCWph8ZU8B6-NuNgNMXdLQ,17151
|
|
27
|
-
pydantic_ai/models/test.py,sha256=Ux20cmuJFkhvI9L1N7ItHNFcd-j284TBEsrM53eWRag,16873
|
|
28
|
-
pydantic_ai/models/vertexai.py,sha256=9Kp_1KMBlbP8_HRJTuFnrkkFmlJ7yFhADQYjxOgIh9Y,9523
|
|
29
|
-
pydantic_ai/models/wrapper.py,sha256=Zr3fgiUBpt2N9gXds6iSwaMEtEsFKr9WwhpHjSoHa7o,1410
|
|
30
|
-
pydantic_ai_slim-0.0.25.dist-info/METADATA,sha256=VbDk3sx1LMYhOxZaouy_IcVSlFvOzE00ph8NSm-Wdyo,2839
|
|
31
|
-
pydantic_ai_slim-0.0.25.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
32
|
-
pydantic_ai_slim-0.0.25.dist-info/RECORD,,
|
|
File without changes
|