pydantic-ai-slim 0.0.40__tar.gz → 0.0.41__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pydantic-ai-slim might be problematic. Click here for more details.

Files changed (49) hide show
  1. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/PKG-INFO +2 -2
  2. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/anthropic.py +31 -2
  3. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/fallback.py +13 -8
  4. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/gemini.py +3 -5
  5. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/groq.py +2 -3
  6. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/mistral.py +37 -5
  7. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/openai.py +2 -3
  8. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/providers/__init__.py +8 -0
  9. pydantic_ai_slim-0.0.41/pydantic_ai/providers/anthropic.py +74 -0
  10. pydantic_ai_slim-0.0.41/pydantic_ai/providers/mistral.py +73 -0
  11. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pyproject.toml +2 -2
  12. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/.gitignore +0 -0
  13. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/README.md +0 -0
  14. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/__init__.py +0 -0
  15. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/_agent_graph.py +0 -0
  16. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/_cli.py +0 -0
  17. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/_griffe.py +0 -0
  18. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/_parts_manager.py +0 -0
  19. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/_pydantic.py +0 -0
  20. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/_result.py +0 -0
  21. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/_system_prompt.py +0 -0
  22. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/_utils.py +0 -0
  23. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/agent.py +0 -0
  24. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/common_tools/__init__.py +0 -0
  25. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/common_tools/duckduckgo.py +0 -0
  26. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/common_tools/tavily.py +0 -0
  27. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/exceptions.py +0 -0
  28. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/format_as_xml.py +0 -0
  29. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/messages.py +0 -0
  30. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/__init__.py +0 -0
  31. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/bedrock.py +0 -0
  32. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/cohere.py +0 -0
  33. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/function.py +0 -0
  34. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/instrumented.py +0 -0
  35. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/test.py +0 -0
  36. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/vertexai.py +0 -0
  37. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/models/wrapper.py +0 -0
  38. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/providers/azure.py +0 -0
  39. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/providers/bedrock.py +0 -0
  40. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/providers/deepseek.py +0 -0
  41. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/providers/google_gla.py +0 -0
  42. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/providers/google_vertex.py +0 -0
  43. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/providers/groq.py +0 -0
  44. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/providers/openai.py +0 -0
  45. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/py.typed +0 -0
  46. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/result.py +0 -0
  47. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/settings.py +0 -0
  48. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/tools.py +0 -0
  49. {pydantic_ai_slim-0.0.40 → pydantic_ai_slim-0.0.41}/pydantic_ai/usage.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pydantic-ai-slim
3
- Version: 0.0.40
3
+ Version: 0.0.41
4
4
  Summary: Agent Framework / shim to use Pydantic with LLMs, slim package
5
5
  Author-email: Samuel Colvin <samuel@pydantic.dev>
6
6
  License-Expression: MIT
@@ -29,7 +29,7 @@ Requires-Dist: exceptiongroup; python_version < '3.11'
29
29
  Requires-Dist: griffe>=1.3.2
30
30
  Requires-Dist: httpx>=0.27
31
31
  Requires-Dist: opentelemetry-api>=1.28.0
32
- Requires-Dist: pydantic-graph==0.0.40
32
+ Requires-Dist: pydantic-graph==0.0.41
33
33
  Requires-Dist: pydantic>=2.10
34
34
  Requires-Dist: typing-inspection>=0.4.0
35
35
  Provides-Extra: anthropic
@@ -11,7 +11,7 @@ from typing import Any, Literal, Union, cast, overload
11
11
 
12
12
  from anthropic.types import DocumentBlockParam
13
13
  from httpx import AsyncClient as AsyncHTTPClient
14
- from typing_extensions import assert_never
14
+ from typing_extensions import assert_never, deprecated
15
15
 
16
16
  from .. import ModelHTTPError, UnexpectedModelBehavior, _utils, usage
17
17
  from .._utils import guard_tool_call_id as _guard_tool_call_id
@@ -31,6 +31,7 @@ from ..messages import (
31
31
  ToolReturnPart,
32
32
  UserPromptPart,
33
33
  )
34
+ from ..providers import Provider, infer_provider
34
35
  from ..settings import ModelSettings
35
36
  from ..tools import ToolDefinition
36
37
  from . import Model, ModelRequestParameters, StreamedResponse, cached_async_http_client, check_allow_model_requests
@@ -111,10 +112,31 @@ class AnthropicModel(Model):
111
112
  _model_name: AnthropicModelName = field(repr=False)
112
113
  _system: str = field(default='anthropic', repr=False)
113
114
 
115
+ @overload
116
+ def __init__(
117
+ self,
118
+ model_name: AnthropicModelName,
119
+ *,
120
+ provider: Literal['anthropic'] | Provider[AsyncAnthropic] = 'anthropic',
121
+ ) -> None: ...
122
+
123
+ @deprecated('Use the `provider` parameter instead of `api_key`, `anthropic_client`, and `http_client`.')
124
+ @overload
125
+ def __init__(
126
+ self,
127
+ model_name: AnthropicModelName,
128
+ *,
129
+ provider: None = None,
130
+ api_key: str | None = None,
131
+ anthropic_client: AsyncAnthropic | None = None,
132
+ http_client: AsyncHTTPClient | None = None,
133
+ ) -> None: ...
134
+
114
135
  def __init__(
115
136
  self,
116
137
  model_name: AnthropicModelName,
117
138
  *,
139
+ provider: Literal['anthropic'] | Provider[AsyncAnthropic] | None = None,
118
140
  api_key: str | None = None,
119
141
  anthropic_client: AsyncAnthropic | None = None,
120
142
  http_client: AsyncHTTPClient | None = None,
@@ -124,6 +146,8 @@ class AnthropicModel(Model):
124
146
  Args:
125
147
  model_name: The name of the Anthropic model to use. List of model names available
126
148
  [here](https://docs.anthropic.com/en/docs/about-claude/models).
149
+ provider: The provider to use for the Anthropic API. Can be either the string 'anthropic' or an
150
+ instance of `Provider[AsyncAnthropic]`. If not provided, the other parameters will be used.
127
151
  api_key: The API key to use for authentication, if not provided, the `ANTHROPIC_API_KEY` environment variable
128
152
  will be used if available.
129
153
  anthropic_client: An existing
@@ -132,7 +156,12 @@ class AnthropicModel(Model):
132
156
  http_client: An existing `httpx.AsyncClient` to use for making HTTP requests.
133
157
  """
134
158
  self._model_name = model_name
135
- if anthropic_client is not None:
159
+
160
+ if provider is not None:
161
+ if isinstance(provider, str):
162
+ provider = infer_provider(provider)
163
+ self.client = provider.client
164
+ elif anthropic_client is not None:
136
165
  assert http_client is None, 'Cannot provide both `anthropic_client` and `http_client`'
137
166
  assert api_key is None, 'Cannot provide both `anthropic_client` and `api_key`'
138
167
  self.client = anthropic_client
@@ -70,14 +70,9 @@ class FallbackModel(Model):
70
70
  exceptions.append(exc)
71
71
  continue
72
72
  raise exc
73
- else:
74
- with suppress(Exception):
75
- span = get_current_span()
76
- if span.is_recording():
77
- attributes = getattr(span, 'attributes', {})
78
- if attributes.get('gen_ai.request.model') == self.model_name:
79
- span.set_attributes(InstrumentedModel.model_attributes(model))
80
- return response, usage
73
+
74
+ self._set_span_attributes(model)
75
+ return response, usage
81
76
 
82
77
  raise FallbackExceptionGroup('All models from FallbackModel failed', exceptions)
83
78
 
@@ -102,11 +97,21 @@ class FallbackModel(Model):
102
97
  exceptions.append(exc)
103
98
  continue
104
99
  raise exc
100
+
101
+ self._set_span_attributes(model)
105
102
  yield response
106
103
  return
107
104
 
108
105
  raise FallbackExceptionGroup('All models from FallbackModel failed', exceptions)
109
106
 
107
+ def _set_span_attributes(self, model: Model):
108
+ with suppress(Exception):
109
+ span = get_current_span()
110
+ if span.is_recording():
111
+ attributes = getattr(span, 'attributes', {})
112
+ if attributes.get('gen_ai.request.model') == self.model_name:
113
+ span.set_attributes(InstrumentedModel.model_attributes(model))
114
+
110
115
  @property
111
116
  def model_name(self) -> str:
112
117
  """The model name."""
@@ -139,11 +139,9 @@ class GeminiModel(Model):
139
139
 
140
140
  if provider is not None:
141
141
  if isinstance(provider, str):
142
- self._system = provider
143
- self.client = infer_provider(provider).client
144
- else:
145
- self._system = provider.name
146
- self.client = provider.client
142
+ provider = infer_provider(provider)
143
+ self._system = provider.name
144
+ self.client = provider.client
147
145
  self._url = str(self.client.base_url)
148
146
  else:
149
147
  if api_key is None:
@@ -138,9 +138,8 @@ class GroqModel(Model):
138
138
 
139
139
  if provider is not None:
140
140
  if isinstance(provider, str):
141
- self.client = infer_provider(provider).client
142
- else:
143
- self.client = provider.client
141
+ provider = infer_provider(provider)
142
+ self.client = provider.client
144
143
  elif groq_client is not None:
145
144
  assert http_client is None, 'Cannot provide both `groq_client` and `http_client`'
146
145
  assert api_key is None, 'Cannot provide both `groq_client` and `api_key`'
@@ -7,11 +7,11 @@ from contextlib import asynccontextmanager
7
7
  from dataclasses import dataclass, field
8
8
  from datetime import datetime, timezone
9
9
  from itertools import chain
10
- from typing import Any, Callable, Literal, Union, cast
10
+ from typing import Any, Callable, Literal, Union, cast, overload
11
11
 
12
12
  import pydantic_core
13
13
  from httpx import AsyncClient as AsyncHTTPClient, Timeout
14
- from typing_extensions import assert_never
14
+ from typing_extensions import assert_never, deprecated
15
15
 
16
16
  from .. import ModelHTTPError, UnexpectedModelBehavior, _utils
17
17
  from .._utils import now_utc as _now_utc
@@ -31,6 +31,7 @@ from ..messages import (
31
31
  ToolReturnPart,
32
32
  UserPromptPart,
33
33
  )
34
+ from ..providers import Provider, infer_provider
34
35
  from ..result import Usage
35
36
  from ..settings import ModelSettings
36
37
  from ..tools import ToolDefinition
@@ -112,10 +113,33 @@ class MistralModel(Model):
112
113
  _model_name: MistralModelName = field(repr=False)
113
114
  _system: str = field(default='mistral_ai', repr=False)
114
115
 
116
+ @overload
115
117
  def __init__(
116
118
  self,
117
119
  model_name: MistralModelName,
118
120
  *,
121
+ provider: Literal['mistral'] | Provider[Mistral] = 'mistral',
122
+ json_mode_schema_prompt: str = """Answer in JSON Object, respect the format:\n```\n{schema}\n```\n""",
123
+ ) -> None: ...
124
+
125
+ @overload
126
+ @deprecated('Use the `provider` parameter instead of `api_key`, `client` and `http_client`.')
127
+ def __init__(
128
+ self,
129
+ model_name: MistralModelName,
130
+ *,
131
+ provider: None = None,
132
+ api_key: str | Callable[[], str | None] | None = None,
133
+ client: Mistral | None = None,
134
+ http_client: AsyncHTTPClient | None = None,
135
+ json_mode_schema_prompt: str = """Answer in JSON Object, respect the format:\n```\n{schema}\n```\n""",
136
+ ) -> None: ...
137
+
138
+ def __init__(
139
+ self,
140
+ model_name: MistralModelName,
141
+ *,
142
+ provider: Literal['mistral'] | Provider[Mistral] | None = None,
119
143
  api_key: str | Callable[[], str | None] | None = None,
120
144
  client: Mistral | None = None,
121
145
  http_client: AsyncHTTPClient | None = None,
@@ -124,6 +148,9 @@ class MistralModel(Model):
124
148
  """Initialize a Mistral model.
125
149
 
126
150
  Args:
151
+ provider: The provider to use for authentication and API access. Can be either the string
152
+ 'mistral' or an instance of `Provider[Mistral]`. If not provided, a new provider will be
153
+ created using the other parameters.
127
154
  model_name: The name of the model to use.
128
155
  api_key: The API key to use for authentication, if unset uses `MISTRAL_API_KEY` environment variable.
129
156
  client: An existing `Mistral` client to use, if provided, `api_key` and `http_client` must be `None`.
@@ -133,17 +160,22 @@ class MistralModel(Model):
133
160
  self._model_name = model_name
134
161
  self.json_mode_schema_prompt = json_mode_schema_prompt
135
162
 
136
- if client is not None:
163
+ if provider is not None:
164
+ if isinstance(provider, str):
165
+ # TODO(Marcelo): We should add an integration test with VCR when I get the API key.
166
+ provider = infer_provider(provider) # pragma: no cover
167
+ self.client = provider.client
168
+ elif client is not None:
137
169
  assert http_client is None, 'Cannot provide both `mistral_client` and `http_client`'
138
170
  assert api_key is None, 'Cannot provide both `mistral_client` and `api_key`'
139
171
  self.client = client
140
172
  else:
141
- api_key = os.getenv('MISTRAL_API_KEY') if api_key is None else api_key
173
+ api_key = api_key or os.getenv('MISTRAL_API_KEY')
142
174
  self.client = Mistral(api_key=api_key, async_client=http_client or cached_async_http_client())
143
175
 
144
176
  @property
145
177
  def base_url(self) -> str:
146
- return str(self.client.sdk_configuration.get_server_details()[0])
178
+ return self.client.sdk_configuration.get_server_details()[0]
147
179
 
148
180
  async def request(
149
181
  self,
@@ -162,9 +162,8 @@ class OpenAIModel(Model):
162
162
 
163
163
  if provider is not None:
164
164
  if isinstance(provider, str):
165
- self.client = infer_provider(provider).client
166
- else:
167
- self.client = provider.client
165
+ provider = infer_provider(provider)
166
+ self.client = provider.client
168
167
  else: # pragma: no cover
169
168
  # This is a workaround for the OpenAI client requiring an API key, whilst locally served,
170
169
  # openai compatible models do not always need an API key, but a placeholder (non-empty) key is required.
@@ -69,5 +69,13 @@ def infer_provider(provider: str) -> Provider[Any]:
69
69
  from .groq import GroqProvider
70
70
 
71
71
  return GroqProvider()
72
+ elif provider == 'anthropic':
73
+ from .anthropic import AnthropicProvider
74
+
75
+ return AnthropicProvider()
76
+ elif provider == 'mistral':
77
+ from .mistral import MistralProvider
78
+
79
+ return MistralProvider()
72
80
  else: # pragma: no cover
73
81
  raise ValueError(f'Unknown provider: {provider}')
@@ -0,0 +1,74 @@
1
+ from __future__ import annotations as _annotations
2
+
3
+ import os
4
+ from typing import overload
5
+
6
+ import httpx
7
+
8
+ from pydantic_ai.models import cached_async_http_client
9
+
10
+ try:
11
+ from anthropic import AsyncAnthropic
12
+ except ImportError as _import_error: # pragma: no cover
13
+ raise ImportError(
14
+ 'Please install the `anthropic` package to use the Anthropic provider, '
15
+ "you can use the `anthropic` optional group — `pip install 'pydantic-ai-slim[anthropic]'`"
16
+ ) from _import_error
17
+
18
+
19
+ from . import Provider
20
+
21
+
22
+ class AnthropicProvider(Provider[AsyncAnthropic]):
23
+ """Provider for Anthropic API."""
24
+
25
+ @property
26
+ def name(self) -> str:
27
+ return 'anthropic'
28
+
29
+ @property
30
+ def base_url(self) -> str:
31
+ return str(self._client.base_url)
32
+
33
+ @property
34
+ def client(self) -> AsyncAnthropic:
35
+ return self._client
36
+
37
+ @overload
38
+ def __init__(self, *, anthropic_client: AsyncAnthropic | None = None) -> None: ...
39
+
40
+ @overload
41
+ def __init__(self, *, api_key: str | None = None, http_client: httpx.AsyncClient | None = None) -> None: ...
42
+
43
+ def __init__(
44
+ self,
45
+ *,
46
+ api_key: str | None = None,
47
+ anthropic_client: AsyncAnthropic | None = None,
48
+ http_client: httpx.AsyncClient | None = None,
49
+ ) -> None:
50
+ """Create a new Anthropic provider.
51
+
52
+ Args:
53
+ api_key: The API key to use for authentication, if not provided, the `ANTHROPIC_API_KEY` environment variable
54
+ will be used if available.
55
+ anthropic_client: An existing [`AsyncAnthropic`](https://github.com/anthropics/anthropic-sdk-python)
56
+ client to use. If provided, the `api_key` and `http_client` arguments will be ignored.
57
+ http_client: An existing `httpx.AsyncClient` to use for making HTTP requests.
58
+ """
59
+ if anthropic_client is not None:
60
+ assert http_client is None, 'Cannot provide both `anthropic_client` and `http_client`'
61
+ assert api_key is None, 'Cannot provide both `anthropic_client` and `api_key`'
62
+ self._client = anthropic_client
63
+ else:
64
+ api_key = api_key or os.environ.get('ANTHROPIC_API_KEY')
65
+ if api_key is None:
66
+ raise ValueError(
67
+ 'Set the `ANTHROPIC_API_KEY` environment variable or pass it via `AnthropicProvider(api_key=...)`'
68
+ 'to use the Anthropic provider.'
69
+ )
70
+
71
+ if http_client is not None:
72
+ self._client = AsyncAnthropic(api_key=api_key, http_client=http_client)
73
+ else:
74
+ self._client = AsyncAnthropic(api_key=api_key, http_client=cached_async_http_client())
@@ -0,0 +1,73 @@
1
+ from __future__ import annotations as _annotations
2
+
3
+ import os
4
+ from typing import overload
5
+
6
+ from httpx import AsyncClient as AsyncHTTPClient
7
+
8
+ from pydantic_ai.models import cached_async_http_client
9
+
10
+ try:
11
+ from mistralai import Mistral
12
+ except ImportError as e: # pragma: no cover
13
+ raise ImportError(
14
+ 'Please install the `mistral` package to use the Mistral provider, '
15
+ "you can use the `mistral` optional group — `pip install 'pydantic-ai-slim[mistral]'`"
16
+ ) from e
17
+
18
+
19
+ from . import Provider
20
+
21
+
22
+ class MistralProvider(Provider[Mistral]):
23
+ """Provider for Mistral API."""
24
+
25
+ @property
26
+ def name(self) -> str:
27
+ return 'mistral'
28
+
29
+ @property
30
+ def base_url(self) -> str:
31
+ return self.client.sdk_configuration.get_server_details()[0]
32
+
33
+ @property
34
+ def client(self) -> Mistral:
35
+ return self._client
36
+
37
+ @overload
38
+ def __init__(self, *, mistral_client: Mistral | None = None) -> None: ...
39
+
40
+ @overload
41
+ def __init__(self, *, api_key: str | None = None, http_client: AsyncHTTPClient | None = None) -> None: ...
42
+
43
+ def __init__(
44
+ self,
45
+ *,
46
+ api_key: str | None = None,
47
+ mistral_client: Mistral | None = None,
48
+ http_client: AsyncHTTPClient | None = None,
49
+ ) -> None:
50
+ """Create a new Mistral provider.
51
+
52
+ Args:
53
+ api_key: The API key to use for authentication, if not provided, the `MISTRAL_API_KEY` environment variable
54
+ will be used if available.
55
+ mistral_client: An existing `Mistral` client to use, if provided, `api_key` and `http_client` must be `None`.
56
+ http_client: An existing async client to use for making HTTP requests.
57
+ """
58
+ api_key = api_key or os.environ.get('MISTRAL_API_KEY')
59
+
60
+ if api_key is None and mistral_client is None:
61
+ raise ValueError(
62
+ 'Set the `MISTRAL_API_KEY` environment variable or pass it via `MistralProvider(api_key=...)`'
63
+ 'to use the Mistral provider.'
64
+ )
65
+
66
+ if mistral_client is not None:
67
+ assert http_client is None, 'Cannot provide both `mistral_client` and `http_client`'
68
+ assert api_key is None, 'Cannot provide both `mistral_client` and `api_key`'
69
+ self._client = mistral_client
70
+ elif http_client is not None:
71
+ self._client = Mistral(api_key=api_key, async_client=http_client)
72
+ else:
73
+ self._client = Mistral(api_key=api_key, async_client=cached_async_http_client())
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "pydantic-ai-slim"
7
- version = "0.0.40"
7
+ version = "0.0.41"
8
8
  description = "Agent Framework / shim to use Pydantic with LLMs, slim package"
9
9
  authors = [{ name = "Samuel Colvin", email = "samuel@pydantic.dev" }]
10
10
  license = "MIT"
@@ -36,7 +36,7 @@ dependencies = [
36
36
  "griffe>=1.3.2",
37
37
  "httpx>=0.27",
38
38
  "pydantic>=2.10",
39
- "pydantic-graph==0.0.40",
39
+ "pydantic-graph==0.0.41",
40
40
  "exceptiongroup; python_version < '3.11'",
41
41
  "opentelemetry-api>=1.28.0",
42
42
  "typing-inspection>=0.4.0",