pydantic-ai-slim 1.0.15__py3-none-any.whl → 1.0.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pydantic-ai-slim might be problematic. Click here for more details.

@@ -351,7 +351,7 @@ class AnthropicModel(Model):
351
351
 
352
352
  return ModelResponse(
353
353
  parts=items,
354
- usage=_map_usage(response),
354
+ usage=_map_usage(response, self._provider.name, self._provider.base_url, self._model_name),
355
355
  model_name=response.model,
356
356
  provider_response_id=response.id,
357
357
  provider_name=self._provider.name,
@@ -375,6 +375,7 @@ class AnthropicModel(Model):
375
375
  _response=peekable_response,
376
376
  _timestamp=_utils.now_utc(),
377
377
  _provider_name=self._provider.name,
378
+ _provider_url=self._provider.base_url,
378
379
  )
379
380
 
380
381
  def _get_tools(self, model_request_parameters: ModelRequestParameters) -> list[BetaToolUnionParam]:
@@ -616,7 +617,13 @@ class AnthropicModel(Model):
616
617
  }
617
618
 
618
619
 
619
- def _map_usage(message: BetaMessage | BetaRawMessageStartEvent | BetaRawMessageDeltaEvent) -> usage.RequestUsage:
620
+ def _map_usage(
621
+ message: BetaMessage | BetaRawMessageStartEvent | BetaRawMessageDeltaEvent,
622
+ provider: str,
623
+ provider_url: str,
624
+ model: str,
625
+ existing_usage: usage.RequestUsage | None = None,
626
+ ) -> usage.RequestUsage:
620
627
  if isinstance(message, BetaMessage):
621
628
  response_usage = message.usage
622
629
  elif isinstance(message, BetaRawMessageStartEvent):
@@ -626,24 +633,17 @@ def _map_usage(message: BetaMessage | BetaRawMessageStartEvent | BetaRawMessageD
626
633
  else:
627
634
  assert_never(message)
628
635
 
629
- # Store all integer-typed usage values in the details, except 'output_tokens' which is represented exactly by
630
- # `response_tokens`
631
- details: dict[str, int] = {
636
+ # In streaming, usage appears in different events.
637
+ # The values are cumulative, meaning new values should replace existing ones entirely.
638
+ details: dict[str, int] = (existing_usage.details if existing_usage else {}) | {
632
639
  key: value for key, value in response_usage.model_dump().items() if isinstance(value, int)
633
640
  }
634
641
 
635
- # Usage coming from the RawMessageDeltaEvent doesn't have input token data, hence using `get`
636
- # Tokens are only counted once between input_tokens, cache_creation_input_tokens, and cache_read_input_tokens
637
- # This approach maintains request_tokens as the count of all input tokens, with cached counts as details
638
- cache_write_tokens = details.get('cache_creation_input_tokens', 0)
639
- cache_read_tokens = details.get('cache_read_input_tokens', 0)
640
- request_tokens = details.get('input_tokens', 0) + cache_write_tokens + cache_read_tokens
641
-
642
- return usage.RequestUsage(
643
- input_tokens=request_tokens,
644
- cache_read_tokens=cache_read_tokens,
645
- cache_write_tokens=cache_write_tokens,
646
- output_tokens=response_usage.output_tokens,
642
+ return usage.RequestUsage.extract(
643
+ dict(model=model, usage=details),
644
+ provider=provider,
645
+ provider_url=provider_url,
646
+ provider_fallback='anthropic',
647
647
  details=details,
648
648
  )
649
649
 
@@ -656,13 +656,14 @@ class AnthropicStreamedResponse(StreamedResponse):
656
656
  _response: AsyncIterable[BetaRawMessageStreamEvent]
657
657
  _timestamp: datetime
658
658
  _provider_name: str
659
+ _provider_url: str
659
660
 
660
661
  async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: # noqa: C901
661
662
  current_block: BetaContentBlock | None = None
662
663
 
663
664
  async for event in self._response:
664
665
  if isinstance(event, BetaRawMessageStartEvent):
665
- self._usage = _map_usage(event)
666
+ self._usage = _map_usage(event, self._provider_name, self._provider_url, self._model_name)
666
667
  self.provider_response_id = event.message.id
667
668
 
668
669
  elif isinstance(event, BetaRawContentBlockStartEvent):
@@ -743,7 +744,7 @@ class AnthropicStreamedResponse(StreamedResponse):
743
744
  pass
744
745
 
745
746
  elif isinstance(event, BetaRawMessageDeltaEvent):
746
- self._usage = _map_usage(event)
747
+ self._usage = _map_usage(event, self._provider_name, self._provider_url, self._model_name, self._usage)
747
748
  if raw_finish_reason := event.delta.stop_reason: # pragma: no branch
748
749
  self.provider_details = {'finish_reason': raw_finish_reason}
749
750
  self.finish_reason = _FINISH_REASON_MAP.get(raw_finish_reason)
@@ -776,6 +776,9 @@ class OpenAIChatModel(Model):
776
776
  image_url: ImageURL = {'url': item.url}
777
777
  if metadata := item.vendor_metadata:
778
778
  image_url['detail'] = metadata.get('detail', 'auto')
779
+ if item.force_download:
780
+ image_content = await download_item(item, data_format='base64_uri', type_format='extension')
781
+ image_url['url'] = image_content['data']
779
782
  content.append(ChatCompletionContentPartImageParam(image_url=image_url, type='image_url'))
780
783
  elif isinstance(item, BinaryContent):
781
784
  if self._is_text_like_media_type(item.media_type):
@@ -1529,11 +1532,16 @@ class OpenAIResponsesModel(Model):
1529
1532
  raise RuntimeError(f'Unsupported binary content type: {item.media_type}')
1530
1533
  elif isinstance(item, ImageUrl):
1531
1534
  detail: Literal['auto', 'low', 'high'] = 'auto'
1535
+ image_url = item.url
1532
1536
  if metadata := item.vendor_metadata:
1533
1537
  detail = cast(Literal['auto', 'low', 'high'], metadata.get('detail', 'auto'))
1538
+ if item.force_download:
1539
+ downloaded_item = await download_item(item, data_format='base64_uri', type_format='extension')
1540
+ image_url = downloaded_item['data']
1541
+
1534
1542
  content.append(
1535
1543
  responses.ResponseInputImageParam(
1536
- image_url=item.url,
1544
+ image_url=image_url,
1537
1545
  type='input_image',
1538
1546
  detail=detail,
1539
1547
  )
pydantic_ai/result.py CHANGED
@@ -67,12 +67,12 @@ class AgentStream(Generic[AgentDepsT, OutputDataT]):
67
67
  except ValidationError:
68
68
  pass
69
69
  if self._raw_stream_response.final_result_event is not None: # pragma: no branch
70
- yield await self.validate_response_output(self._raw_stream_response.get())
70
+ yield await self.validate_response_output(self.response)
71
71
 
72
72
  async def stream_responses(self, *, debounce_by: float | None = 0.1) -> AsyncIterator[_messages.ModelResponse]:
73
73
  """Asynchronously stream the (unvalidated) model responses for the agent."""
74
74
  # if the message currently has any parts with content, yield before streaming
75
- msg = self._raw_stream_response.get()
75
+ msg = self.response
76
76
  for part in msg.parts:
77
77
  if part.has_content():
78
78
  yield msg
@@ -80,7 +80,7 @@ class AgentStream(Generic[AgentDepsT, OutputDataT]):
80
80
 
81
81
  async with _utils.group_by_temporal(self, debounce_by) as group_iter:
82
82
  async for _items in group_iter:
83
- yield self._raw_stream_response.get() # current state of the response
83
+ yield self.response # current state of the response
84
84
 
85
85
  async def stream_text(self, *, delta: bool = False, debounce_by: float | None = 0.1) -> AsyncIterator[str]:
86
86
  """Stream the text result as an async iterable.
@@ -136,7 +136,7 @@ class AgentStream(Generic[AgentDepsT, OutputDataT]):
136
136
  async for _ in self:
137
137
  pass
138
138
 
139
- return await self.validate_response_output(self._raw_stream_response.get())
139
+ return await self.validate_response_output(self.response)
140
140
 
141
141
  async def validate_response_output(
142
142
  self, message: _messages.ModelResponse, *, allow_partial: bool = False
@@ -201,7 +201,7 @@ class AgentStream(Generic[AgentDepsT, OutputDataT]):
201
201
  # yields tuples of (text_content, part_index)
202
202
  # we don't currently make use of the part_index, but in principle this may be useful
203
203
  # so we retain it here for now to make possible future refactors simpler
204
- msg = self._raw_stream_response.get()
204
+ msg = self.response
205
205
  for i, part in enumerate(msg.parts):
206
206
  if isinstance(part, _messages.TextPart) and part.content:
207
207
  yield part.content, i
pydantic_ai/usage.py CHANGED
@@ -3,8 +3,9 @@ from __future__ import annotations as _annotations
3
3
  import dataclasses
4
4
  from copy import copy
5
5
  from dataclasses import dataclass, fields
6
- from typing import Annotated
6
+ from typing import Annotated, Any
7
7
 
8
+ from genai_prices.data_snapshot import get_snapshot
8
9
  from pydantic import AliasChoices, BeforeValidator, Field
9
10
  from typing_extensions import deprecated, overload
10
11
 
@@ -120,6 +121,39 @@ class RequestUsage(UsageBase):
120
121
  new_usage.incr(other)
121
122
  return new_usage
122
123
 
124
+ @classmethod
125
+ def extract(
126
+ cls,
127
+ data: Any,
128
+ *,
129
+ provider: str,
130
+ provider_url: str,
131
+ provider_fallback: str,
132
+ api_flavor: str | None = None,
133
+ details: dict[str, Any] | None = None,
134
+ ) -> RequestUsage:
135
+ """Extract usage information from the response data using genai-prices.
136
+
137
+ Args:
138
+ data: The response data from the model API.
139
+ provider: The actual provider ID
140
+ provider_url: The provider base_url
141
+ provider_fallback: The fallback provider ID to use if the actual provider is not found in genai-prices.
142
+ For example, an OpenAI model should set this to "openai" in case it has an obscure provider ID.
143
+ api_flavor: The API flavor to use when extracting usage information,
144
+ e.g. 'chat' or 'responses' for OpenAI.
145
+ details: Becomes the `details` field on the returned `RequestUsage` for convenience.
146
+ """
147
+ details = details or {}
148
+ for provider_id, provider_api_url in [(None, provider_url), (provider, None), (provider_fallback, None)]:
149
+ try:
150
+ provider_obj = get_snapshot().find_provider(None, provider_id, provider_api_url)
151
+ _model_ref, extracted_usage = provider_obj.extract_usage(data, api_flavor=api_flavor)
152
+ return cls(**{k: v for k, v in extracted_usage.__dict__.items() if v is not None}, details=details)
153
+ except Exception:
154
+ pass
155
+ return cls(details=details)
156
+
123
157
 
124
158
  @dataclass(repr=False, kw_only=True)
125
159
  class RunUsage(UsageBase):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pydantic-ai-slim
3
- Version: 1.0.15
3
+ Version: 1.0.16
4
4
  Summary: Agent Framework / shim to use Pydantic with LLMs, slim package
5
5
  Project-URL: Homepage, https://github.com/pydantic/pydantic-ai/tree/main/pydantic_ai_slim
6
6
  Project-URL: Source, https://github.com/pydantic/pydantic-ai/tree/main/pydantic_ai_slim
@@ -29,11 +29,11 @@ Classifier: Topic :: Internet
29
29
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
30
30
  Requires-Python: >=3.10
31
31
  Requires-Dist: exceptiongroup; python_version < '3.11'
32
- Requires-Dist: genai-prices>=0.0.28
32
+ Requires-Dist: genai-prices>=0.0.30
33
33
  Requires-Dist: griffe>=1.3.2
34
34
  Requires-Dist: httpx>=0.27
35
35
  Requires-Dist: opentelemetry-api>=1.28.0
36
- Requires-Dist: pydantic-graph==1.0.15
36
+ Requires-Dist: pydantic-graph==1.0.16
37
37
  Requires-Dist: pydantic>=2.10
38
38
  Requires-Dist: typing-inspection>=0.4.0
39
39
  Provides-Extra: a2a
@@ -57,7 +57,7 @@ Requires-Dist: dbos>=1.14.0; extra == 'dbos'
57
57
  Provides-Extra: duckduckgo
58
58
  Requires-Dist: ddgs>=9.0.0; extra == 'duckduckgo'
59
59
  Provides-Extra: evals
60
- Requires-Dist: pydantic-evals==1.0.15; extra == 'evals'
60
+ Requires-Dist: pydantic-evals==1.0.16; extra == 'evals'
61
61
  Provides-Extra: google
62
62
  Requires-Dist: google-genai>=1.31.0; extra == 'google'
63
63
  Provides-Extra: groq
@@ -1,8 +1,8 @@
1
1
  pydantic_ai/__init__.py,sha256=IgLTfgpGwbYsT_d_2wSucOfFyIMl1GH6v-yfkNs_zrM,5149
2
2
  pydantic_ai/__main__.py,sha256=Q_zJU15DUA01YtlJ2mnaLCoId2YmgmreVEERGuQT-Y0,132
3
3
  pydantic_ai/_a2a.py,sha256=3_pl7JW2yHdu31qLgCrdcTZTqXaJNjAwUV6zavah_w8,12159
4
- pydantic_ai/_agent_graph.py,sha256=ZzUkx9remZgSdgfMPfBhKtb7ZyRuSfQb-QLESWrHGcU,53965
5
- pydantic_ai/_cli.py,sha256=HxnWPUkEi_pRJZYZ_s25PkV361uAZY6IpN9ub5yMAzg,13994
4
+ pydantic_ai/_agent_graph.py,sha256=D_Oo_LbPqGwkVAaEAcFFERwrg9pVkxeZ45vtTMhES8M,54230
5
+ pydantic_ai/_cli.py,sha256=iZTCFrpJy3aUZ49nJQ5nw2INFw6gPVQd8EhB0rahVcI,14005
6
6
  pydantic_ai/_function_schema.py,sha256=UnDGh7Wh5z70pEaRujXF_hKsSibQdN2ywI6lZGz3LUo,11663
7
7
  pydantic_ai/_griffe.py,sha256=BphvTL00FHxsSY56GM-bNyCOdwrpL0T3LbDQITWUK_Q,5280
8
8
  pydantic_ai/_instrumentation.py,sha256=3XJxRUT0m2K6NfpAb-JKro4Rpw-8weqQ_ydtufeKVrU,2964
@@ -18,33 +18,33 @@ pydantic_ai/_tool_manager.py,sha256=se5Fikg4HaiTOnxJ4LFrezktZ2Zfv9a2OH0V9PtFE54,
18
18
  pydantic_ai/_utils.py,sha256=TBzJ03szJPrmDdqRqKTyhRboTsyP6wppnCCprpZFBMw,16620
19
19
  pydantic_ai/ag_ui.py,sha256=X3b4P_IraypCE3r-L2ETIo8G951A1MDdP4P5TQ8Fces,32067
20
20
  pydantic_ai/builtin_tools.py,sha256=xtRIlEGUJ9UQzxqsKIXs-KD0awHCxBOvXlZ7CLq5oDM,5666
21
- pydantic_ai/direct.py,sha256=bSYSA5RyxXQMA4gk6CA6meXRbWv2Mn3IeF8vr6viOXI,15061
21
+ pydantic_ai/direct.py,sha256=i5yZ9Tx8IiwXg6Nz9CW4-fyXzxnjP59fsklExCh5sjA,15111
22
22
  pydantic_ai/exceptions.py,sha256=zsXZMKf2BJuVsfuHl1fWTkogLU37bd4yq7D6BKHAzVs,4968
23
- pydantic_ai/format_prompt.py,sha256=Enhe3wn-Fo15DcHM0_XgEOLSBWmNpwWYE4M_2vWw_VI,9565
23
+ pydantic_ai/format_prompt.py,sha256=cLyWO8g77Y4JzqVSikqodXaAfTn6i-k206rNhYTiIsE,9710
24
24
  pydantic_ai/mcp.py,sha256=7Ouwepk-p2rOq_Rkv-MSZYyEGJ6FfrJvR7ySghuSLwc,36693
25
- pydantic_ai/messages.py,sha256=Riay2z3WPgx4VQN-MuXzTfrxqCW8aBSP1CHPjGPNtdo,63393
25
+ pydantic_ai/messages.py,sha256=CuquO_BpWsMQP73GlhcGniMzBa3np926hjUxewommp4,64465
26
26
  pydantic_ai/output.py,sha256=q91oqvJ-FqV9GbUUil7WVWbii66SVsVZ54AEm_NWSEo,13002
27
27
  pydantic_ai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
- pydantic_ai/result.py,sha256=vfRe-s01zBtAZ7f_NoibptI3r80XfAq7CBhpJjgHSpo,26790
28
+ pydantic_ai/result.py,sha256=sVabgrAJXmj96I7NM-w0RBz1rH5x_zZql1V6epei4JU,26700
29
29
  pydantic_ai/retries.py,sha256=QM4oDA9DG-Y2qP06fbCp8Dqq8ups40Rr4HYjAOlbNyM,14650
30
30
  pydantic_ai/run.py,sha256=dV3zIztC-lfOCKecXg_Mcx2CyOfUbxQC0JbZuPvQhTI,16227
31
31
  pydantic_ai/settings.py,sha256=0mr6KudxKKjTG8e3nsv_8vDLxNhu_1-WvefCOzCGSYM,3565
32
32
  pydantic_ai/tools.py,sha256=dCecmJtRkF1ioqFYbfT00XGGqzGB4PPO9n6IrHCQtnc,20343
33
- pydantic_ai/usage.py,sha256=mTKKkXGTdEdka4HqER4i2hE7ULF4CiunGbisjgr38Z8,14102
34
- pydantic_ai/agent/__init__.py,sha256=gmLwyTsyvL89sYvZR83Ba62A8lKLqTfRBk_KYbYYNQo,64116
35
- pydantic_ai/agent/abstract.py,sha256=PX6r2g0uzbqQdigb5qeTBdfCEbGy6Djb0haKCCmTLrc,52702
36
- pydantic_ai/agent/wrapper.py,sha256=xZw0spYA5oZkBc6GMcxV4hoQZSr02LbH9GCKqI3MokI,9734
33
+ pydantic_ai/usage.py,sha256=_xXoPIfpENghWcjBvMj0URXQV6YwHWxxZYma4WZ4vUg,15710
34
+ pydantic_ai/agent/__init__.py,sha256=amguU7oy8PT3NTy0MK4Ir-02hgu87hWxc9Ehc2WWyGw,64704
35
+ pydantic_ai/agent/abstract.py,sha256=p94SOxrhvdBJNcBdIw1YHnBbpkgTV3LFjnDOQeWfucI,52859
36
+ pydantic_ai/agent/wrapper.py,sha256=0F7CboJSOjsmhcUy5rNJCtB29DFa0GkCyA_yt_Nq960,9925
37
37
  pydantic_ai/common_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
38
  pydantic_ai/common_tools/duckduckgo.py,sha256=1ae_o3zqMGrC6KFqAmuqPwJqQgNBTisuvU2jX9KU8PI,2273
39
39
  pydantic_ai/common_tools/tavily.py,sha256=a7p2X03l9GS9B_0mvZZV3jePlCwf2TLNeej62-sPycs,2505
40
40
  pydantic_ai/durable_exec/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
41
  pydantic_ai/durable_exec/dbos/__init__.py,sha256=H_dT0ERuNCBP0Im8eVGl8F9h7E9Aj87-pvmnLpDelF0,199
42
- pydantic_ai/durable_exec/dbos/_agent.py,sha256=35PMPRSWwfi98e-p1U-3WRJfz-rXqIBbW_qevbvmdXg,37904
42
+ pydantic_ai/durable_exec/dbos/_agent.py,sha256=5GcnOatcuLs1jjLhIEp_9qLhGvg2DYkzP15Zxtf4FrU,38151
43
43
  pydantic_ai/durable_exec/dbos/_mcp_server.py,sha256=cLMCKmXQHqhqnn_E3Nf4IsNFIbqk-V7gnIvpmYeDCSA,2989
44
44
  pydantic_ai/durable_exec/dbos/_model.py,sha256=_Cxh0zYFF3cungXiSXpGHmjyBQF7KnksfurV7hMKp-E,5106
45
45
  pydantic_ai/durable_exec/dbos/_utils.py,sha256=_aNceFvTcNeqb78sTDYM2TdYph85tbdeLueyXY1lbTA,242
46
46
  pydantic_ai/durable_exec/temporal/__init__.py,sha256=XKwy68wfgmjr057nolRwGHTKiadxufpQEGEUprAV09k,5563
47
- pydantic_ai/durable_exec/temporal/_agent.py,sha256=ayNY1QpoVkgYeyQKSU90bNMoZJgWvJCMI_Ou9Ufq7kU,42557
47
+ pydantic_ai/durable_exec/temporal/_agent.py,sha256=mwQoasXnxKxk-nMfqBXZFDCmutWSNfD2nv7Tn1xcUBw,42796
48
48
  pydantic_ai/durable_exec/temporal/_function_toolset.py,sha256=3n_A5uHzygsT88LM105kKuYqwxC1sjI4bOzETeUbT4E,5553
49
49
  pydantic_ai/durable_exec/temporal/_logfire.py,sha256=ASd7vb0cd61yESI0mgU2w9SCGxsOegz95HtQjKdlQkE,2472
50
50
  pydantic_ai/durable_exec/temporal/_mcp_server.py,sha256=vxfWeI7ZtYyXVgX621rPtG-WOZjlKWnqJhcvR9eBgIo,6014
@@ -55,7 +55,7 @@ pydantic_ai/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
55
55
  pydantic_ai/ext/aci.py,sha256=YWYLXzTQJ6hS7qfgNycA8cRl69gogGgThqEU6II7eMA,2527
56
56
  pydantic_ai/ext/langchain.py,sha256=kmbbV3Cx2BiNYEJCZMHVYQquUQD-zG2L_bwDangy0Ww,2317
57
57
  pydantic_ai/models/__init__.py,sha256=bZXZRrvQa5xEbv6GLvwmcI39vCZG-y6AxUGres1UtBk,35700
58
- pydantic_ai/models/anthropic.py,sha256=OYFmejUY6W2DOVyLEaslmUBTznuH96PoZ2mOWNZLJ1Y,38220
58
+ pydantic_ai/models/anthropic.py,sha256=-vW7aoPrELKnJzbooCEhMu8__jY6iqvWdFJbIKeQPa8,38087
59
59
  pydantic_ai/models/bedrock.py,sha256=fha8zVZgDFYgDqO5nvBkZ2CEv4GV92yq_YnK4qmD73E,33639
60
60
  pydantic_ai/models/cohere.py,sha256=_ccK7XBts1OwD-RP8puU3z425SZ4PeJGts1WFhPjikg,14051
61
61
  pydantic_ai/models/fallback.py,sha256=fjQz7qRuxEwC6aFYkglBv-2Z39-6kZ931vs6o7PIti8,5016
@@ -67,7 +67,7 @@ pydantic_ai/models/huggingface.py,sha256=711C0ysjLYKriGfSxPiaF6lqjGcNmIaJaCvAXou
67
67
  pydantic_ai/models/instrumented.py,sha256=J8eVTutr3UP1r_wd5sM5c0BIdzkRqT-EGgd2NiF0ssQ,22319
68
68
  pydantic_ai/models/mcp_sampling.py,sha256=qY4y4nXbRpNp2QbkfjzWLvF_8KLZGXypz4cc0lYRHXU,3553
69
69
  pydantic_ai/models/mistral.py,sha256=fi57hADjYxZw8wEpAcNI6mqY32VG9hHK9GGRQ-9vlZg,33905
70
- pydantic_ai/models/openai.py,sha256=t-H7Na6Ak7FPApcwn86-Rg_IBCvgBrPwBN-gKSnN_Wg,99216
70
+ pydantic_ai/models/openai.py,sha256=_qU8o9PBwmPmELQz3V8OAjxkKy8gXiKtdG6MKQ7Iq_Y,99708
71
71
  pydantic_ai/models/test.py,sha256=5ER66nwZG7Iwm-KkzPo4vwNd3rulzgkpgysu4YcT1W4,20568
72
72
  pydantic_ai/models/wrapper.py,sha256=nwh8Gea59blbr1JDKlUnkYICuI9TUubC4qP7iZRRW28,2440
73
73
  pydantic_ai/profiles/__init__.py,sha256=UHknN-CYsQexUaxfsgz_J_uSZ9QwistLSuAErQkvbcM,3385
@@ -121,8 +121,8 @@ pydantic_ai/toolsets/prefixed.py,sha256=0KwcDkW8OM36ZUsOLVP5h-Nj2tPq78L3_E2c-1Fb
121
121
  pydantic_ai/toolsets/prepared.py,sha256=Zjfz6S8In6PBVxoKFN9sKPN984zO6t0awB7Lnq5KODw,1431
122
122
  pydantic_ai/toolsets/renamed.py,sha256=JuLHpi-hYPiSPlaTpN8WiXLiGsywYK0axi2lW2Qs75k,1637
123
123
  pydantic_ai/toolsets/wrapper.py,sha256=KRzF1p8dncHbva8CE6Ud-IC5E_aygIHlwH5atXK55k4,1673
124
- pydantic_ai_slim-1.0.15.dist-info/METADATA,sha256=f2l2aq6lWglum9DIPNIXWcaeHlEl6VSjWe9zl_xiZso,4631
125
- pydantic_ai_slim-1.0.15.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
126
- pydantic_ai_slim-1.0.15.dist-info/entry_points.txt,sha256=kbKxe2VtDCYS06hsI7P3uZGxcVC08-FPt1rxeiMpIps,50
127
- pydantic_ai_slim-1.0.15.dist-info/licenses/LICENSE,sha256=vA6Jc482lEyBBuGUfD1pYx-cM7jxvLYOxPidZ30t_PQ,1100
128
- pydantic_ai_slim-1.0.15.dist-info/RECORD,,
124
+ pydantic_ai_slim-1.0.16.dist-info/METADATA,sha256=wnSrB4dase8ngwq7y219eZR9aA6FE5HYY6B4FNQ6e-Y,4631
125
+ pydantic_ai_slim-1.0.16.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
126
+ pydantic_ai_slim-1.0.16.dist-info/entry_points.txt,sha256=kbKxe2VtDCYS06hsI7P3uZGxcVC08-FPt1rxeiMpIps,50
127
+ pydantic_ai_slim-1.0.16.dist-info/licenses/LICENSE,sha256=vA6Jc482lEyBBuGUfD1pYx-cM7jxvLYOxPidZ30t_PQ,1100
128
+ pydantic_ai_slim-1.0.16.dist-info/RECORD,,