pydantic-ai-slim 0.2.6__py3-none-any.whl → 0.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,9 +5,9 @@ from collections.abc import AsyncGenerator, AsyncIterable, AsyncIterator
5
5
  from contextlib import asynccontextmanager
6
6
  from dataclasses import dataclass, field
7
7
  from datetime import datetime, timezone
8
- from json import JSONDecodeError, loads as json_loads
9
8
  from typing import Any, Literal, Union, cast, overload
10
9
 
10
+ from anthropic import AsyncAnthropic
11
11
  from typing_extensions import assert_never
12
12
 
13
13
  from .. import ModelHTTPError, UnexpectedModelBehavior, _utils, usage
@@ -41,32 +41,34 @@ from . import (
41
41
  )
42
42
 
43
43
  try:
44
- from anthropic import NOT_GIVEN, APIStatusError, AsyncAnthropic, AsyncStream
45
- from anthropic.types import (
46
- Base64PDFSourceParam,
47
- ContentBlock,
48
- DocumentBlockParam,
49
- ImageBlockParam,
50
- Message as AnthropicMessage,
51
- MessageParam,
52
- MetadataParam,
53
- PlainTextSourceParam,
54
- RawContentBlockDeltaEvent,
55
- RawContentBlockStartEvent,
56
- RawContentBlockStopEvent,
57
- RawMessageDeltaEvent,
58
- RawMessageStartEvent,
59
- RawMessageStopEvent,
60
- RawMessageStreamEvent,
61
- TextBlock,
62
- TextBlockParam,
63
- TextDelta,
64
- ToolChoiceParam,
65
- ToolParam,
66
- ToolResultBlockParam,
67
- ToolUseBlock,
68
- ToolUseBlockParam,
44
+ from anthropic import NOT_GIVEN, APIStatusError, AsyncStream
45
+ from anthropic.types.beta import (
46
+ BetaBase64PDFBlockParam,
47
+ BetaBase64PDFSourceParam,
48
+ BetaContentBlock,
49
+ BetaContentBlockParam,
50
+ BetaImageBlockParam,
51
+ BetaMessage,
52
+ BetaMessageParam,
53
+ BetaMetadataParam,
54
+ BetaPlainTextSourceParam,
55
+ BetaRawContentBlockDeltaEvent,
56
+ BetaRawContentBlockStartEvent,
57
+ BetaRawContentBlockStopEvent,
58
+ BetaRawMessageDeltaEvent,
59
+ BetaRawMessageStartEvent,
60
+ BetaRawMessageStopEvent,
61
+ BetaRawMessageStreamEvent,
62
+ BetaTextBlock,
63
+ BetaTextBlockParam,
64
+ BetaTextDelta,
65
+ BetaToolChoiceParam,
66
+ BetaToolParam,
67
+ BetaToolResultBlockParam,
68
+ BetaToolUseBlock,
69
+ BetaToolUseBlockParam,
69
70
  )
71
+
70
72
  except ImportError as _import_error:
71
73
  raise ImportError(
72
74
  'Please install `anthropic` to use the Anthropic model, '
@@ -96,7 +98,7 @@ class AnthropicModelSettings(ModelSettings, total=False):
96
98
  ALL FIELDS MUST BE `anthropic_` PREFIXED SO YOU CAN MERGE THEM WITH OTHER MODELS.
97
99
  """
98
100
 
99
- anthropic_metadata: MetadataParam
101
+ anthropic_metadata: BetaMetadataParam
100
102
  """An object describing metadata about the request.
101
103
 
102
104
  Contains `user_id`, an external identifier for the user who is associated with the request."""
@@ -185,7 +187,7 @@ class AnthropicModel(Model):
185
187
  stream: Literal[True],
186
188
  model_settings: AnthropicModelSettings,
187
189
  model_request_parameters: ModelRequestParameters,
188
- ) -> AsyncStream[RawMessageStreamEvent]:
190
+ ) -> AsyncStream[BetaRawMessageStreamEvent]:
189
191
  pass
190
192
 
191
193
  @overload
@@ -195,7 +197,7 @@ class AnthropicModel(Model):
195
197
  stream: Literal[False],
196
198
  model_settings: AnthropicModelSettings,
197
199
  model_request_parameters: ModelRequestParameters,
198
- ) -> AnthropicMessage:
200
+ ) -> BetaMessage:
199
201
  pass
200
202
 
201
203
  async def _messages_create(
@@ -204,10 +206,10 @@ class AnthropicModel(Model):
204
206
  stream: bool,
205
207
  model_settings: AnthropicModelSettings,
206
208
  model_request_parameters: ModelRequestParameters,
207
- ) -> AnthropicMessage | AsyncStream[RawMessageStreamEvent]:
209
+ ) -> BetaMessage | AsyncStream[BetaRawMessageStreamEvent]:
208
210
  # standalone function to make it easier to override
209
211
  tools = self._get_tools(model_request_parameters)
210
- tool_choice: ToolChoiceParam | None
212
+ tool_choice: BetaToolChoiceParam | None
211
213
 
212
214
  if not tools:
213
215
  tool_choice = None
@@ -225,7 +227,7 @@ class AnthropicModel(Model):
225
227
  try:
226
228
  extra_headers = model_settings.get('extra_headers', {})
227
229
  extra_headers.setdefault('User-Agent', get_user_agent())
228
- return await self.client.messages.create(
230
+ return await self.client.beta.messages.create(
229
231
  max_tokens=model_settings.get('max_tokens', 1024),
230
232
  system=system_prompt or NOT_GIVEN,
231
233
  messages=anthropic_messages,
@@ -246,14 +248,14 @@ class AnthropicModel(Model):
246
248
  raise ModelHTTPError(status_code=status_code, model_name=self.model_name, body=e.body) from e
247
249
  raise # pragma: lax no cover
248
250
 
249
- def _process_response(self, response: AnthropicMessage) -> ModelResponse:
251
+ def _process_response(self, response: BetaMessage) -> ModelResponse:
250
252
  """Process a non-streamed response, and prepare a message to return."""
251
253
  items: list[ModelResponsePart] = []
252
254
  for item in response.content:
253
- if isinstance(item, TextBlock):
255
+ if isinstance(item, BetaTextBlock):
254
256
  items.append(TextPart(content=item.text))
255
257
  else:
256
- assert isinstance(item, ToolUseBlock), 'unexpected item type'
258
+ assert isinstance(item, BetaToolUseBlock), f'unexpected item type {type(item)}'
257
259
  items.append(
258
260
  ToolCallPart(
259
261
  tool_name=item.name,
@@ -264,7 +266,7 @@ class AnthropicModel(Model):
264
266
 
265
267
  return ModelResponse(items, usage=_map_usage(response), model_name=response.model, vendor_id=response.id)
266
268
 
267
- async def _process_streamed_response(self, response: AsyncStream[RawMessageStreamEvent]) -> StreamedResponse:
269
+ async def _process_streamed_response(self, response: AsyncStream[BetaRawMessageStreamEvent]) -> StreamedResponse:
268
270
  peekable_response = _utils.PeekableAsyncStream(response)
269
271
  first_chunk = await peekable_response.peek()
270
272
  if isinstance(first_chunk, _utils.Unset):
@@ -276,21 +278,19 @@ class AnthropicModel(Model):
276
278
  _model_name=self._model_name, _response=peekable_response, _timestamp=timestamp
277
279
  )
278
280
 
279
- def _get_tools(self, model_request_parameters: ModelRequestParameters) -> list[ToolParam]:
281
+ def _get_tools(self, model_request_parameters: ModelRequestParameters) -> list[BetaToolParam]:
280
282
  tools = [self._map_tool_definition(r) for r in model_request_parameters.function_tools]
281
283
  if model_request_parameters.output_tools:
282
284
  tools += [self._map_tool_definition(r) for r in model_request_parameters.output_tools]
283
285
  return tools
284
286
 
285
- async def _map_message(self, messages: list[ModelMessage]) -> tuple[str, list[MessageParam]]:
287
+ async def _map_message(self, messages: list[ModelMessage]) -> tuple[str, list[BetaMessageParam]]:
286
288
  """Just maps a `pydantic_ai.Message` to a `anthropic.types.MessageParam`."""
287
289
  system_prompt_parts: list[str] = []
288
- anthropic_messages: list[MessageParam] = []
290
+ anthropic_messages: list[BetaMessageParam] = []
289
291
  for m in messages:
290
292
  if isinstance(m, ModelRequest):
291
- user_content_params: list[
292
- ToolResultBlockParam | TextBlockParam | ImageBlockParam | DocumentBlockParam
293
- ] = []
293
+ user_content_params: list[BetaContentBlockParam] = []
294
294
  for request_part in m.parts:
295
295
  if isinstance(request_part, SystemPromptPart):
296
296
  system_prompt_parts.append(request_part.content)
@@ -298,7 +298,7 @@ class AnthropicModel(Model):
298
298
  async for content in self._map_user_prompt(request_part):
299
299
  user_content_params.append(content)
300
300
  elif isinstance(request_part, ToolReturnPart):
301
- tool_result_block_param = ToolResultBlockParam(
301
+ tool_result_block_param = BetaToolResultBlockParam(
302
302
  tool_use_id=_guard_tool_call_id(t=request_part),
303
303
  type='tool_result',
304
304
  content=request_part.model_response_str(),
@@ -308,30 +308,30 @@ class AnthropicModel(Model):
308
308
  elif isinstance(request_part, RetryPromptPart): # pragma: no branch
309
309
  if request_part.tool_name is None:
310
310
  text = request_part.model_response() # pragma: no cover
311
- retry_param = TextBlockParam(type='text', text=text) # pragma: no cover
311
+ retry_param = BetaTextBlockParam(type='text', text=text) # pragma: no cover
312
312
  else:
313
- retry_param = ToolResultBlockParam(
313
+ retry_param = BetaToolResultBlockParam(
314
314
  tool_use_id=_guard_tool_call_id(t=request_part),
315
315
  type='tool_result',
316
316
  content=request_part.model_response(),
317
317
  is_error=True,
318
318
  )
319
319
  user_content_params.append(retry_param)
320
- anthropic_messages.append(MessageParam(role='user', content=user_content_params))
320
+ anthropic_messages.append(BetaMessageParam(role='user', content=user_content_params))
321
321
  elif isinstance(m, ModelResponse):
322
- assistant_content_params: list[TextBlockParam | ToolUseBlockParam] = []
322
+ assistant_content_params: list[BetaTextBlockParam | BetaToolUseBlockParam] = []
323
323
  for response_part in m.parts:
324
324
  if isinstance(response_part, TextPart):
325
- assistant_content_params.append(TextBlockParam(text=response_part.content, type='text'))
325
+ assistant_content_params.append(BetaTextBlockParam(text=response_part.content, type='text'))
326
326
  else:
327
- tool_use_block_param = ToolUseBlockParam(
327
+ tool_use_block_param = BetaToolUseBlockParam(
328
328
  id=_guard_tool_call_id(t=response_part),
329
329
  type='tool_use',
330
330
  name=response_part.tool_name,
331
331
  input=response_part.args_as_dict(),
332
332
  )
333
333
  assistant_content_params.append(tool_use_block_param)
334
- anthropic_messages.append(MessageParam(role='assistant', content=assistant_content_params))
334
+ anthropic_messages.append(BetaMessageParam(role='assistant', content=assistant_content_params))
335
335
  else:
336
336
  assert_never(m)
337
337
  system_prompt = '\n\n'.join(system_prompt_parts)
@@ -342,22 +342,22 @@ class AnthropicModel(Model):
342
342
  @staticmethod
343
343
  async def _map_user_prompt(
344
344
  part: UserPromptPart,
345
- ) -> AsyncGenerator[ImageBlockParam | TextBlockParam | DocumentBlockParam]:
345
+ ) -> AsyncGenerator[BetaContentBlockParam]:
346
346
  if isinstance(part.content, str):
347
- yield TextBlockParam(text=part.content, type='text')
347
+ yield BetaTextBlockParam(text=part.content, type='text')
348
348
  else:
349
349
  for item in part.content:
350
350
  if isinstance(item, str):
351
- yield TextBlockParam(text=item, type='text')
351
+ yield BetaTextBlockParam(text=item, type='text')
352
352
  elif isinstance(item, BinaryContent):
353
353
  if item.is_image:
354
- yield ImageBlockParam(
354
+ yield BetaImageBlockParam(
355
355
  source={'data': io.BytesIO(item.data), 'media_type': item.media_type, 'type': 'base64'}, # type: ignore
356
356
  type='image',
357
357
  )
358
358
  elif item.media_type == 'application/pdf':
359
- yield DocumentBlockParam(
360
- source=Base64PDFSourceParam(
359
+ yield BetaBase64PDFBlockParam(
360
+ source=BetaBase64PDFSourceParam(
361
361
  data=io.BytesIO(item.data),
362
362
  media_type='application/pdf',
363
363
  type='base64',
@@ -367,15 +367,17 @@ class AnthropicModel(Model):
367
367
  else:
368
368
  raise RuntimeError('Only images and PDFs are supported for binary content')
369
369
  elif isinstance(item, ImageUrl):
370
- yield ImageBlockParam(source={'type': 'url', 'url': item.url}, type='image')
370
+ yield BetaImageBlockParam(source={'type': 'url', 'url': item.url}, type='image')
371
371
  elif isinstance(item, DocumentUrl):
372
372
  if item.media_type == 'application/pdf':
373
- yield DocumentBlockParam(source={'url': item.url, 'type': 'url'}, type='document')
373
+ yield BetaBase64PDFBlockParam(source={'url': item.url, 'type': 'url'}, type='document')
374
374
  elif item.media_type == 'text/plain':
375
375
  response = await cached_async_http_client().get(item.url)
376
376
  response.raise_for_status()
377
- yield DocumentBlockParam(
378
- source=PlainTextSourceParam(data=response.text, media_type=item.media_type, type='text'),
377
+ yield BetaBase64PDFBlockParam(
378
+ source=BetaPlainTextSourceParam(
379
+ data=response.text, media_type=item.media_type, type='text'
380
+ ),
379
381
  type='document',
380
382
  )
381
383
  else: # pragma: no cover
@@ -384,7 +386,7 @@ class AnthropicModel(Model):
384
386
  raise RuntimeError(f'Unsupported content type: {type(item)}') # pragma: no cover
385
387
 
386
388
  @staticmethod
387
- def _map_tool_definition(f: ToolDefinition) -> ToolParam:
389
+ def _map_tool_definition(f: ToolDefinition) -> BetaToolParam:
388
390
  return {
389
391
  'name': f.name,
390
392
  'description': f.description,
@@ -392,12 +394,12 @@ class AnthropicModel(Model):
392
394
  }
393
395
 
394
396
 
395
- def _map_usage(message: AnthropicMessage | RawMessageStreamEvent) -> usage.Usage:
396
- if isinstance(message, AnthropicMessage):
397
+ def _map_usage(message: BetaMessage | BetaRawMessageStreamEvent) -> usage.Usage:
398
+ if isinstance(message, BetaMessage):
397
399
  response_usage = message.usage
398
- elif isinstance(message, RawMessageStartEvent):
400
+ elif isinstance(message, BetaRawMessageStartEvent):
399
401
  response_usage = message.message.usage
400
- elif isinstance(message, RawMessageDeltaEvent):
402
+ elif isinstance(message, BetaRawMessageDeltaEvent):
401
403
  response_usage = message.usage
402
404
  else:
403
405
  # No usage information provided in:
@@ -435,60 +437,51 @@ class AnthropicStreamedResponse(StreamedResponse):
435
437
  """Implementation of `StreamedResponse` for Anthropic models."""
436
438
 
437
439
  _model_name: AnthropicModelName
438
- _response: AsyncIterable[RawMessageStreamEvent]
440
+ _response: AsyncIterable[BetaRawMessageStreamEvent]
439
441
  _timestamp: datetime
440
442
 
441
443
  async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
442
- current_block: ContentBlock | None = None
443
- current_json: str = ''
444
+ current_block: BetaContentBlock | None = None
444
445
 
445
446
  async for event in self._response:
446
447
  self._usage += _map_usage(event)
447
448
 
448
- if isinstance(event, RawContentBlockStartEvent):
449
+ if isinstance(event, BetaRawContentBlockStartEvent):
449
450
  current_block = event.content_block
450
- if isinstance(current_block, TextBlock) and current_block.text:
451
+ if isinstance(current_block, BetaTextBlock) and current_block.text:
451
452
  yield self._parts_manager.handle_text_delta( # pragma: lax no cover
452
453
  vendor_part_id='content', content=current_block.text
453
454
  )
454
- elif isinstance(current_block, ToolUseBlock): # pragma: no branch
455
+ elif isinstance(current_block, BetaToolUseBlock): # pragma: no branch
455
456
  maybe_event = self._parts_manager.handle_tool_call_delta(
456
457
  vendor_part_id=current_block.id,
457
458
  tool_name=current_block.name,
458
- args=cast(dict[str, Any], current_block.input),
459
+ args=cast(dict[str, Any], current_block.input) or None,
459
460
  tool_call_id=current_block.id,
460
461
  )
461
462
  if maybe_event is not None: # pragma: no branch
462
463
  yield maybe_event
463
464
 
464
- elif isinstance(event, RawContentBlockDeltaEvent):
465
- if isinstance(event.delta, TextDelta):
465
+ elif isinstance(event, BetaRawContentBlockDeltaEvent):
466
+ if isinstance(event.delta, BetaTextDelta):
466
467
  yield self._parts_manager.handle_text_delta( # pragma: no cover
467
468
  vendor_part_id='content', content=event.delta.text
468
469
  )
469
470
  elif ( # pragma: no branch
470
- current_block and event.delta.type == 'input_json_delta' and isinstance(current_block, ToolUseBlock)
471
+ current_block
472
+ and event.delta.type == 'input_json_delta'
473
+ and isinstance(current_block, BetaToolUseBlock)
471
474
  ):
472
- # Try to parse the JSON immediately, otherwise cache the value for later. This handles
473
- # cases where the JSON is not currently valid but will be valid once we stream more tokens.
474
- try:
475
- parsed_args = json_loads(current_json + event.delta.partial_json)
476
- current_json = ''
477
- except JSONDecodeError:
478
- current_json += event.delta.partial_json
479
- continue
480
-
481
- # For tool calls, we need to handle partial JSON updates
482
475
  maybe_event = self._parts_manager.handle_tool_call_delta(
483
476
  vendor_part_id=current_block.id,
484
477
  tool_name='',
485
- args=parsed_args,
478
+ args=event.delta.partial_json,
486
479
  tool_call_id=current_block.id,
487
480
  )
488
481
  if maybe_event is not None: # pragma: no branch
489
482
  yield maybe_event
490
483
 
491
- elif isinstance(event, (RawContentBlockStopEvent, RawMessageStopEvent)):
484
+ elif isinstance(event, (BetaRawContentBlockStopEvent, BetaRawMessageStopEvent)):
492
485
  current_block = None
493
486
 
494
487
  @property
@@ -552,8 +552,8 @@ class BedrockStreamedResponse(StreamedResponse):
552
552
  args=None,
553
553
  tool_call_id=tool_id,
554
554
  )
555
- if maybe_event:
556
- yield maybe_event # pragma: no cover
555
+ if maybe_event: # pragma: no branch
556
+ yield maybe_event
557
557
  if 'contentBlockDelta' in chunk:
558
558
  index = chunk['contentBlockDelta']['contentBlockIndex']
559
559
  delta = chunk['contentBlockDelta']['delta']
@@ -442,7 +442,7 @@ def _process_response_from_parts(parts: list[Part], model_name: GoogleModelName,
442
442
  items.append(TextPart(content=part.text))
443
443
  elif part.function_call:
444
444
  assert part.function_call.name is not None
445
- tool_call_part = ToolCallPart(tool_name=part.function_call.name, args=part.function_call.args or {})
445
+ tool_call_part = ToolCallPart(tool_name=part.function_call.name, args=part.function_call.args)
446
446
  if part.function_call.id is not None:
447
447
  tool_call_part.tool_call_id = part.function_call.id # pragma: no cover
448
448
  items.append(tool_call_part)
@@ -368,7 +368,7 @@ class MistralModel(Model):
368
368
  return MistralToolCall(
369
369
  id=_utils.guard_tool_call_id(t=t),
370
370
  type='function',
371
- function=MistralFunctionCall(name=t.tool_name, arguments=t.args),
371
+ function=MistralFunctionCall(name=t.tool_name, arguments=t.args or {}),
372
372
  )
373
373
 
374
374
  def _generate_user_output_format(self, schemas: list[dict[str, Any]]) -> MistralUserMessage:
@@ -277,7 +277,6 @@ class OpenAIModel(Model):
277
277
  return await self.client.chat.completions.create(
278
278
  model=self._model_name,
279
279
  messages=openai_messages,
280
- n=1,
281
280
  parallel_tool_calls=model_settings.get('parallel_tool_calls', NOT_GIVEN),
282
281
  tools=tools or NOT_GIVEN,
283
282
  tool_choice=tool_choice or NOT_GIVEN,
pydantic_ai/result.py CHANGED
@@ -544,15 +544,15 @@ class StreamedRunResult(Generic[AgentDepsT, OutputDataT]):
544
544
  yield ''.join(deltas)
545
545
 
546
546
 
547
- @dataclass
547
+ @dataclass(repr=False)
548
548
  class FinalResult(Generic[OutputDataT]):
549
549
  """Marker class storing the final output of an agent run and associated metadata."""
550
550
 
551
551
  output: OutputDataT
552
552
  """The final result data."""
553
- tool_name: str | None
553
+ tool_name: str | None = None
554
554
  """Name of the final output tool; `None` if the output came from unstructured text content."""
555
- tool_call_id: str | None
555
+ tool_call_id: str | None = None
556
556
  """ID of the tool call that produced the final output; `None` if the output came from unstructured text content."""
557
557
 
558
558
  @property
@@ -560,6 +560,8 @@ class FinalResult(Generic[OutputDataT]):
560
560
  def data(self) -> OutputDataT:
561
561
  return self.output
562
562
 
563
+ __repr__ = _utils.dataclasses_no_defaults_repr
564
+
563
565
 
564
566
  def _get_usage_checking_stream_response(
565
567
  stream_response: AsyncIterable[_messages.ModelResponseStreamEvent],
pydantic_ai/tools.py CHANGED
@@ -39,7 +39,7 @@ AgentDepsT = TypeVar('AgentDepsT', default=None, contravariant=True)
39
39
  """Type variable for agent dependencies."""
40
40
 
41
41
 
42
- @dataclasses.dataclass
42
+ @dataclasses.dataclass(repr=False)
43
43
  class RunContext(Generic[AgentDepsT]):
44
44
  """Information about the current call."""
45
45
 
@@ -73,6 +73,8 @@ class RunContext(Generic[AgentDepsT]):
73
73
  kwargs['tool_name'] = tool_name
74
74
  return dataclasses.replace(self, **kwargs)
75
75
 
76
+ __repr__ = _utils.dataclasses_no_defaults_repr
77
+
76
78
 
77
79
  ToolParams = ParamSpec('ToolParams', default=...)
78
80
  """Retrieval function param spec."""
@@ -367,7 +369,7 @@ class Tool(Generic[AgentDepsT]):
367
369
  if isinstance(message.args, str):
368
370
  args_dict = self._validator.validate_json(message.args or '{}')
369
371
  else:
370
- args_dict = self._validator.validate_python(message.args)
372
+ args_dict = self._validator.validate_python(message.args or {})
371
373
  except ValidationError as e:
372
374
  return self._on_error(e, message)
373
375
 
@@ -439,7 +441,7 @@ With PEP-728 this should be a TypedDict with `type: Literal['object']`, and `ext
439
441
  """
440
442
 
441
443
 
442
- @dataclass
444
+ @dataclass(repr=False)
443
445
  class ToolDefinition:
444
446
  """Definition of a tool passed to a model.
445
447
 
@@ -472,3 +474,5 @@ class ToolDefinition:
472
474
 
473
475
  Note: this is currently only supported by OpenAI models.
474
476
  """
477
+
478
+ __repr__ = _utils.dataclasses_no_defaults_repr
pydantic_ai/usage.py CHANGED
@@ -3,12 +3,13 @@ from __future__ import annotations as _annotations
3
3
  from copy import copy
4
4
  from dataclasses import dataclass
5
5
 
6
+ from . import _utils
6
7
  from .exceptions import UsageLimitExceeded
7
8
 
8
9
  __all__ = 'Usage', 'UsageLimits'
9
10
 
10
11
 
11
- @dataclass
12
+ @dataclass(repr=False)
12
13
  class Usage:
13
14
  """LLM usage associated with a request or run.
14
15
 
@@ -68,8 +69,10 @@ class Usage:
68
69
  """Whether any values are set and non-zero."""
69
70
  return bool(self.requests or self.request_tokens or self.response_tokens or self.details)
70
71
 
72
+ __repr__ = _utils.dataclasses_no_defaults_repr
71
73
 
72
- @dataclass
74
+
75
+ @dataclass(repr=False)
73
76
  class UsageLimits:
74
77
  """Limits on model usage.
75
78
 
@@ -124,3 +127,5 @@ class UsageLimits:
124
127
  total_tokens = usage.total_tokens or 0
125
128
  if self.total_tokens_limit is not None and total_tokens > self.total_tokens_limit:
126
129
  raise UsageLimitExceeded(f'Exceeded the total_tokens_limit of {self.total_tokens_limit} ({total_tokens=})')
130
+
131
+ __repr__ = _utils.dataclasses_no_defaults_repr
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pydantic-ai-slim
3
- Version: 0.2.6
3
+ Version: 0.2.8
4
4
  Summary: Agent Framework / shim to use Pydantic with LLMs, slim package
5
5
  Author-email: Samuel Colvin <samuel@pydantic.dev>, Marcelo Trylesinski <marcelotryle@gmail.com>, David Montague <david@pydantic.dev>, Alex Hall <alex@pydantic.dev>
6
6
  License-Expression: MIT
@@ -30,11 +30,11 @@ Requires-Dist: exceptiongroup; python_version < '3.11'
30
30
  Requires-Dist: griffe>=1.3.2
31
31
  Requires-Dist: httpx>=0.27
32
32
  Requires-Dist: opentelemetry-api>=1.28.0
33
- Requires-Dist: pydantic-graph==0.2.6
33
+ Requires-Dist: pydantic-graph==0.2.8
34
34
  Requires-Dist: pydantic>=2.10
35
35
  Requires-Dist: typing-inspection>=0.4.0
36
36
  Provides-Extra: a2a
37
- Requires-Dist: fasta2a==0.2.6; extra == 'a2a'
37
+ Requires-Dist: fasta2a==0.2.8; extra == 'a2a'
38
38
  Provides-Extra: anthropic
39
39
  Requires-Dist: anthropic>=0.49.0; extra == 'anthropic'
40
40
  Provides-Extra: bedrock
@@ -48,7 +48,7 @@ Requires-Dist: cohere>=5.13.11; (platform_system != 'Emscripten') and extra == '
48
48
  Provides-Extra: duckduckgo
49
49
  Requires-Dist: duckduckgo-search>=7.0.0; extra == 'duckduckgo'
50
50
  Provides-Extra: evals
51
- Requires-Dist: pydantic-evals==0.2.6; extra == 'evals'
51
+ Requires-Dist: pydantic-evals==0.2.8; extra == 'evals'
52
52
  Provides-Extra: google
53
53
  Requires-Dist: google-genai>=1.15.0; extra == 'google'
54
54
  Provides-Extra: groq
@@ -1,42 +1,42 @@
1
1
  pydantic_ai/__init__.py,sha256=5flxyMQJVrHRMQ3MYaZf1el2ctNs0JmPClKbw2Q-Lsk,1160
2
2
  pydantic_ai/__main__.py,sha256=Q_zJU15DUA01YtlJ2mnaLCoId2YmgmreVEERGuQT-Y0,132
3
3
  pydantic_ai/_a2a.py,sha256=8nNtx6GENDt2Ej3f1ui9L-FuNQBYVELpJFfwz-y7fUw,7234
4
- pydantic_ai/_agent_graph.py,sha256=7MekV5ytDQ1wtTr2W28V76pYPqN0SACiaJ7umU0kWe4,35666
4
+ pydantic_ai/_agent_graph.py,sha256=vabNuwWnJlvmLls8RkOgoda9T-kb_xf5TnzIgk-DeBI,36516
5
5
  pydantic_ai/_cli.py,sha256=Jpp4ymlYHMYsFU3nb048AK7MISENYDCgsUg9PPGWbt0,12943
6
6
  pydantic_ai/_griffe.py,sha256=Sf_DisE9k2TA0VFeVIK2nf1oOct5MygW86PBCACJkFA,5244
7
- pydantic_ai/_output.py,sha256=bN2gnIhtVW8-rIu6BUlXFqLbbDauipEdNHzaS5vMNv0,11357
8
- pydantic_ai/_parts_manager.py,sha256=mG6uh2hqeMrkxbaBNbHkQF91rLXJZre4WejE7K3kFs8,12210
9
- pydantic_ai/_pydantic.py,sha256=1EO1tv-ULj3l_L1qMcC7gIOKTL2e2a-xTbUD_kqKiOg,8921
7
+ pydantic_ai/_output.py,sha256=fJ3xyaIUf7R_QmYQaTZLqNBbZdVgzFTYsS2NxyqsybI,11447
8
+ pydantic_ai/_parts_manager.py,sha256=c0Gj29FH8K20AmxIr7MY8_SQVdb7SRIRcJYTQVmVYgc,12204
9
+ pydantic_ai/_pydantic.py,sha256=Dz9pVp-mcxBtIUK6TnfSZIAVXdsB1JKErZsINgX8Fpk,9161
10
10
  pydantic_ai/_system_prompt.py,sha256=602c2jyle2R_SesOrITBDETZqsLk4BZ8Cbo8yEhmx04,1120
11
- pydantic_ai/_utils.py,sha256=Vlww1AMQMTvFfGRlFKAyvl4VrE24Lk1MH28EwVTWy8c,10122
12
- pydantic_ai/agent.py,sha256=6NjKs4AhT4BYyTX98zjLJjNDw1fpyXRoY7NVzyUdamE,94598
13
- pydantic_ai/direct.py,sha256=nGIPlp2edVEc3aixcRyT6mqGASWgTZbv3F9aXNwzIKA,8441
11
+ pydantic_ai/_utils.py,sha256=XfZ7mZmrv5ZsU3DwjwLXwmbVNTQrgX_kIL9SIfatg90,10456
12
+ pydantic_ai/agent.py,sha256=gz_AFp5dO2xSbf9ej-jTNLa6-LZFV_JCWeRwA4Kq0tc,93721
13
+ pydantic_ai/direct.py,sha256=tXRcQ3fMkykaawO51VxnSwQnqcEmu1LhCy7U9gOyM-g,7768
14
14
  pydantic_ai/exceptions.py,sha256=IdFw594Ou7Vn4YFa7xdZ040_j_6nmyA3MPANbC7sys4,3175
15
15
  pydantic_ai/format_as_xml.py,sha256=IINfh1evWDphGahqHNLBArB5dQ4NIqS3S-kru35ztGg,372
16
16
  pydantic_ai/format_prompt.py,sha256=qdKep95Sjlr7u1-qag4JwPbjoURbG0GbeU_l5ODTNw4,4466
17
- pydantic_ai/mcp.py,sha256=UsiBsg2ZuFh0OTMc-tvvxzfyW9YiPSIe6h8_KGloxqI,11312
18
- pydantic_ai/messages.py,sha256=HTDWBpmEdhD1gN_mJgyugon6YuhvxHXDp-Cny9owGao,31191
17
+ pydantic_ai/mcp.py,sha256=C8oui0iM76Q2NRgcoywon62dHKR8CcVAto25sLvfyyQ,13076
18
+ pydantic_ai/messages.py,sha256=jJUh10-NGp58YkiTrKUUzffmd7JC2w2HXUlKWVhbGUM,32442
19
19
  pydantic_ai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- pydantic_ai/result.py,sha256=YgaF1c_LS2F2YcLNHTS-8KxqwnrBj7s3Z9s8WIB_oNA,27747
20
+ pydantic_ai/result.py,sha256=un7AHAY3Rtqh1naSacZiTcrPgDGYo1vvAGlW7-95EO8,27825
21
21
  pydantic_ai/settings.py,sha256=U2XzZ9y1fIi_L6yCGTugZRxfk7_01rk5GKSgFqySHL4,3520
22
- pydantic_ai/tools.py,sha256=TSVPzO6GERP6izYHLj8wtNyY5vpjUY6BC96WutYXgvE,17990
23
- pydantic_ai/usage.py,sha256=NoK2JXSFU6dFR7sd2vX7D16l8A2-BtOdCvJkhcBVbrs,5447
22
+ pydantic_ai/tools.py,sha256=UspUFXFa54ohFX5FNEN34cMTObqkCSN6gXOrKizm5C0,18124
23
+ pydantic_ai/usage.py,sha256=35YPmItlzfNOwP35Rhh0qBUOlg5On5rUE7xqHQWrpaU,5596
24
24
  pydantic_ai/common_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  pydantic_ai/common_tools/duckduckgo.py,sha256=Ty9tu1rCwMfGKgz1JAaC2q_4esmL6QvpkHQUN8F0Ecc,2152
26
26
  pydantic_ai/common_tools/tavily.py,sha256=Q1xxSF5HtXAaZ10Pp-OaDOHXwJf2mco9wScGEQXD7E4,2495
27
27
  pydantic_ai/models/__init__.py,sha256=xTCjoFWQg6zZTY3yI-rakOd3RjUAf3uUGrzWn7Oxebw,22299
28
28
  pydantic_ai/models/_json_schema.py,sha256=RD0cIU9mOGIdRuhkjLtPdlwfmF8XDOP1kLevIOLudaE,6540
29
- pydantic_ai/models/anthropic.py,sha256=Q1gem9MieSRIbO6yxs7Fe29K7OfkXKC5IlMBwvC8epE,21360
30
- pydantic_ai/models/bedrock.py,sha256=JCeHy0kRXJJEtZqBppdWsMZtt6aVhtvMih4CO7RT354,26368
29
+ pydantic_ai/models/anthropic.py,sha256=oyYJ8Eg-wllap_ckcCiKgDPW1CVNs5VmGo7v1LxO5_g,20973
30
+ pydantic_ai/models/bedrock.py,sha256=mv-NFtnbaXS4NyGgLjExItrD6Hcu-8gHh1wVu2a-gZQ,26369
31
31
  pydantic_ai/models/cohere.py,sha256=a9dxjrH5StfvPP5CTituQsOhujO2KPHD6JOGpsx1E9o,11852
32
32
  pydantic_ai/models/fallback.py,sha256=idOYGMo3CZzpCBT8DDiuPAAgnV2jzluDUq3ESb3KteM,4981
33
33
  pydantic_ai/models/function.py,sha256=rnihsyakyieCGbEyxfqzvoBHnR_3LJn4x6DXQqdAAM4,11458
34
34
  pydantic_ai/models/gemini.py,sha256=7KDOOBFR9pbGGGokMevoW34SdOng1_Cnscs5yBgwMSg,37429
35
- pydantic_ai/models/google.py,sha256=AtcRih9pmmRtXPScKfMrX1I6ViK5Eteo06frVV3E0Tk,24276
35
+ pydantic_ai/models/google.py,sha256=y576QHYUTm0pCXFZmhJC5aBLStGQhhsELcKOJr9HdSQ,24270
36
36
  pydantic_ai/models/groq.py,sha256=px2C3oW6Yvrk695E0dzDzDRUav6XiwkjyJvjro4Tb9M,17520
37
37
  pydantic_ai/models/instrumented.py,sha256=Y3SxAlP9cCX_Ch02c8qN9mrWMY9_tuyj6zMeN5Gz-W0,12356
38
- pydantic_ai/models/mistral.py,sha256=nRFpY_QDAyH-FLYCqA9ZURPvL5r6uzMy7BJoMYzXKxk,29344
39
- pydantic_ai/models/openai.py,sha256=HZHI_hu9NH9EuL053c6a9BnBZhFtXHDKBzpTLDkV9lE,49795
38
+ pydantic_ai/models/mistral.py,sha256=h24_VwAx2iaAN8FHW9W72U6y5YyhYm1X9XK5rIQvTGI,29350
39
+ pydantic_ai/models/openai.py,sha256=YEn9D-bhU-iOdyeB6vQoEnKaM_OK8uXCq45JyyOsLL8,49774
40
40
  pydantic_ai/models/test.py,sha256=Jlq-YQ9dhzENgmBMVerZpM4L-I2aPf7HH7ifIncyDlE,17010
41
41
  pydantic_ai/models/wrapper.py,sha256=43ntRkTF7rVBYLC-Ihdo1fkwpeveOpA_1fXe1fd3W9Y,1690
42
42
  pydantic_ai/providers/__init__.py,sha256=mBRULL9WaiGAgjKGY5aPQ7KatYoIOhkHSJJ3DkP6jvE,2695
@@ -52,8 +52,8 @@ pydantic_ai/providers/groq.py,sha256=DoY6qkfhuemuKB5JXhUkqG-3t1HQkxwSXoE_kHQIAK0
52
52
  pydantic_ai/providers/mistral.py,sha256=FAS7yKn26yWy7LTmEiBSvqe0HpTXi8_nIf824vE6RFQ,2892
53
53
  pydantic_ai/providers/openai.py,sha256=ePF-QWwLkGkSE5w245gTTDVR3VoTIUqFoIhQ0TAoUiA,2866
54
54
  pydantic_ai/providers/openrouter.py,sha256=ZwlpGjrHFqZefohdGJGL6MQiLOGKwu6kOCHziymJA_E,2215
55
- pydantic_ai_slim-0.2.6.dist-info/METADATA,sha256=BhyvvZNTY9znBMhA45CLU7WHNIYQyK1t1Z3RmD4U9Ds,3846
56
- pydantic_ai_slim-0.2.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
57
- pydantic_ai_slim-0.2.6.dist-info/entry_points.txt,sha256=kbKxe2VtDCYS06hsI7P3uZGxcVC08-FPt1rxeiMpIps,50
58
- pydantic_ai_slim-0.2.6.dist-info/licenses/LICENSE,sha256=vA6Jc482lEyBBuGUfD1pYx-cM7jxvLYOxPidZ30t_PQ,1100
59
- pydantic_ai_slim-0.2.6.dist-info/RECORD,,
55
+ pydantic_ai_slim-0.2.8.dist-info/METADATA,sha256=kUj5-FD5LHFToM7D5OuHovcdYyLCbXWHIk8m1XvOL4Y,3846
56
+ pydantic_ai_slim-0.2.8.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
57
+ pydantic_ai_slim-0.2.8.dist-info/entry_points.txt,sha256=kbKxe2VtDCYS06hsI7P3uZGxcVC08-FPt1rxeiMpIps,50
58
+ pydantic_ai_slim-0.2.8.dist-info/licenses/LICENSE,sha256=vA6Jc482lEyBBuGUfD1pYx-cM7jxvLYOxPidZ30t_PQ,1100
59
+ pydantic_ai_slim-0.2.8.dist-info/RECORD,,