mistralai 1.9.10__py3-none-any.whl → 1.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. mistralai/_hooks/registration.py +5 -0
  2. mistralai/_hooks/tracing.py +50 -0
  3. mistralai/_version.py +3 -3
  4. mistralai/accesses.py +51 -116
  5. mistralai/agents.py +58 -85
  6. mistralai/audio.py +8 -3
  7. mistralai/basesdk.py +15 -5
  8. mistralai/batch.py +6 -3
  9. mistralai/beta.py +10 -5
  10. mistralai/chat.py +70 -97
  11. mistralai/classifiers.py +57 -144
  12. mistralai/conversations.py +435 -412
  13. mistralai/documents.py +156 -359
  14. mistralai/embeddings.py +21 -42
  15. mistralai/extra/observability/__init__.py +15 -0
  16. mistralai/extra/observability/otel.py +393 -0
  17. mistralai/extra/run/tools.py +28 -16
  18. mistralai/files.py +53 -176
  19. mistralai/fim.py +46 -73
  20. mistralai/fine_tuning.py +6 -3
  21. mistralai/jobs.py +49 -158
  22. mistralai/libraries.py +71 -178
  23. mistralai/mistral_agents.py +298 -179
  24. mistralai/mistral_jobs.py +51 -138
  25. mistralai/models/__init__.py +94 -5
  26. mistralai/models/agent.py +15 -2
  27. mistralai/models/agentconversation.py +11 -3
  28. mistralai/models/agentcreationrequest.py +6 -2
  29. mistralai/models/agents_api_v1_agents_deleteop.py +16 -0
  30. mistralai/models/agents_api_v1_agents_getop.py +40 -3
  31. mistralai/models/agents_api_v1_agents_listop.py +72 -2
  32. mistralai/models/agents_api_v1_conversations_deleteop.py +18 -0
  33. mistralai/models/agents_api_v1_conversations_listop.py +39 -2
  34. mistralai/models/agentscompletionrequest.py +21 -6
  35. mistralai/models/agentscompletionstreamrequest.py +21 -6
  36. mistralai/models/agentupdaterequest.py +18 -2
  37. mistralai/models/audiotranscriptionrequest.py +2 -0
  38. mistralai/models/batchjobin.py +10 -0
  39. mistralai/models/chatcompletionrequest.py +22 -5
  40. mistralai/models/chatcompletionstreamrequest.py +22 -5
  41. mistralai/models/conversationrequest.py +15 -4
  42. mistralai/models/conversationrestartrequest.py +50 -2
  43. mistralai/models/conversationrestartstreamrequest.py +50 -2
  44. mistralai/models/conversationstreamrequest.py +15 -4
  45. mistralai/models/documentout.py +26 -10
  46. mistralai/models/documentupdatein.py +24 -3
  47. mistralai/models/embeddingrequest.py +8 -8
  48. mistralai/models/files_api_routes_list_filesop.py +7 -0
  49. mistralai/models/fimcompletionrequest.py +8 -9
  50. mistralai/models/fimcompletionstreamrequest.py +8 -9
  51. mistralai/models/httpvalidationerror.py +11 -6
  52. mistralai/models/libraries_documents_list_v1op.py +15 -2
  53. mistralai/models/libraryout.py +10 -7
  54. mistralai/models/listfilesout.py +35 -4
  55. mistralai/models/mistralerror.py +26 -0
  56. mistralai/models/modelcapabilities.py +13 -4
  57. mistralai/models/modelconversation.py +8 -2
  58. mistralai/models/no_response_error.py +13 -0
  59. mistralai/models/ocrpageobject.py +26 -5
  60. mistralai/models/ocrrequest.py +17 -1
  61. mistralai/models/ocrtableobject.py +31 -0
  62. mistralai/models/prediction.py +4 -0
  63. mistralai/models/requestsource.py +7 -0
  64. mistralai/models/responseformat.py +4 -2
  65. mistralai/models/responseformats.py +0 -1
  66. mistralai/models/responsevalidationerror.py +25 -0
  67. mistralai/models/sdkerror.py +30 -14
  68. mistralai/models/sharingdelete.py +36 -5
  69. mistralai/models/sharingin.py +36 -5
  70. mistralai/models/sharingout.py +3 -3
  71. mistralai/models/toolexecutiondeltaevent.py +13 -4
  72. mistralai/models/toolexecutiondoneevent.py +13 -4
  73. mistralai/models/toolexecutionentry.py +9 -4
  74. mistralai/models/toolexecutionstartedevent.py +13 -4
  75. mistralai/models_.py +67 -212
  76. mistralai/ocr.py +33 -36
  77. mistralai/sdk.py +15 -2
  78. mistralai/transcriptions.py +21 -60
  79. mistralai/utils/__init__.py +18 -5
  80. mistralai/utils/eventstreaming.py +10 -0
  81. mistralai/utils/serializers.py +3 -2
  82. mistralai/utils/unmarshal_json_response.py +24 -0
  83. {mistralai-1.9.10.dist-info → mistralai-1.10.0.dist-info}/METADATA +89 -40
  84. {mistralai-1.9.10.dist-info → mistralai-1.10.0.dist-info}/RECORD +86 -75
  85. {mistralai-1.9.10.dist-info → mistralai-1.10.0.dist-info}/WHEEL +1 -1
  86. {mistralai-1.9.10.dist-info → mistralai-1.10.0.dist-info/licenses}/LICENSE +0 -0
@@ -5,6 +5,7 @@ from mistralai import models, utils
5
5
  from mistralai._hooks import HookContext
6
6
  from mistralai.types import OptionalNullable, UNSET
7
7
  from mistralai.utils import eventstreaming, get_security_from_env
8
+ from mistralai.utils.unmarshal_json_response import unmarshal_json_response
8
9
  from typing import List, Mapping, Optional, Union
9
10
 
10
11
 
@@ -28,7 +29,7 @@ class Transcriptions(BaseSDK):
28
29
  ) -> models.TranscriptionResponse:
29
30
  r"""Create Transcription
30
31
 
31
- :param model:
32
+ :param model: ID of the model to be used.
32
33
  :param file:
33
34
  :param file_url: Url of a file to be transcribed
34
35
  :param file_id: ID of a file uploaded to /v1/files
@@ -103,26 +104,15 @@ class Transcriptions(BaseSDK):
103
104
  )
104
105
 
105
106
  if utils.match_response(http_res, "200", "application/json"):
106
- return utils.unmarshal_json(http_res.text, models.TranscriptionResponse)
107
+ return unmarshal_json_response(models.TranscriptionResponse, http_res)
107
108
  if utils.match_response(http_res, "4XX", "*"):
108
109
  http_res_text = utils.stream_to_text(http_res)
109
- raise models.SDKError(
110
- "API error occurred", http_res.status_code, http_res_text, http_res
111
- )
110
+ raise models.SDKError("API error occurred", http_res, http_res_text)
112
111
  if utils.match_response(http_res, "5XX", "*"):
113
112
  http_res_text = utils.stream_to_text(http_res)
114
- raise models.SDKError(
115
- "API error occurred", http_res.status_code, http_res_text, http_res
116
- )
113
+ raise models.SDKError("API error occurred", http_res, http_res_text)
117
114
 
118
- content_type = http_res.headers.get("Content-Type")
119
- http_res_text = utils.stream_to_text(http_res)
120
- raise models.SDKError(
121
- f"Unexpected response received (code: {http_res.status_code}, type: {content_type})",
122
- http_res.status_code,
123
- http_res_text,
124
- http_res,
125
- )
115
+ raise models.SDKError("Unexpected response received", http_res)
126
116
 
127
117
  async def complete_async(
128
118
  self,
@@ -141,7 +131,7 @@ class Transcriptions(BaseSDK):
141
131
  ) -> models.TranscriptionResponse:
142
132
  r"""Create Transcription
143
133
 
144
- :param model:
134
+ :param model: ID of the model to be used.
145
135
  :param file:
146
136
  :param file_url: Url of a file to be transcribed
147
137
  :param file_id: ID of a file uploaded to /v1/files
@@ -216,26 +206,15 @@ class Transcriptions(BaseSDK):
216
206
  )
217
207
 
218
208
  if utils.match_response(http_res, "200", "application/json"):
219
- return utils.unmarshal_json(http_res.text, models.TranscriptionResponse)
209
+ return unmarshal_json_response(models.TranscriptionResponse, http_res)
220
210
  if utils.match_response(http_res, "4XX", "*"):
221
211
  http_res_text = await utils.stream_to_text_async(http_res)
222
- raise models.SDKError(
223
- "API error occurred", http_res.status_code, http_res_text, http_res
224
- )
212
+ raise models.SDKError("API error occurred", http_res, http_res_text)
225
213
  if utils.match_response(http_res, "5XX", "*"):
226
214
  http_res_text = await utils.stream_to_text_async(http_res)
227
- raise models.SDKError(
228
- "API error occurred", http_res.status_code, http_res_text, http_res
229
- )
215
+ raise models.SDKError("API error occurred", http_res, http_res_text)
230
216
 
231
- content_type = http_res.headers.get("Content-Type")
232
- http_res_text = await utils.stream_to_text_async(http_res)
233
- raise models.SDKError(
234
- f"Unexpected response received (code: {http_res.status_code}, type: {content_type})",
235
- http_res.status_code,
236
- http_res_text,
237
- http_res,
238
- )
217
+ raise models.SDKError("Unexpected response received", http_res)
239
218
 
240
219
  def stream(
241
220
  self,
@@ -252,7 +231,7 @@ class Transcriptions(BaseSDK):
252
231
  timeout_ms: Optional[int] = None,
253
232
  http_headers: Optional[Mapping[str, str]] = None,
254
233
  ) -> eventstreaming.EventStream[models.TranscriptionStreamEvents]:
255
- r"""Create streaming transcription (SSE)
234
+ r"""Create Streaming Transcription (SSE)
256
235
 
257
236
  :param model:
258
237
  :param file:
@@ -337,26 +316,17 @@ class Transcriptions(BaseSDK):
337
316
  return eventstreaming.EventStream(
338
317
  http_res,
339
318
  lambda raw: utils.unmarshal_json(raw, models.TranscriptionStreamEvents),
319
+ client_ref=self,
340
320
  )
341
321
  if utils.match_response(http_res, "4XX", "*"):
342
322
  http_res_text = utils.stream_to_text(http_res)
343
- raise models.SDKError(
344
- "API error occurred", http_res.status_code, http_res_text, http_res
345
- )
323
+ raise models.SDKError("API error occurred", http_res, http_res_text)
346
324
  if utils.match_response(http_res, "5XX", "*"):
347
325
  http_res_text = utils.stream_to_text(http_res)
348
- raise models.SDKError(
349
- "API error occurred", http_res.status_code, http_res_text, http_res
350
- )
326
+ raise models.SDKError("API error occurred", http_res, http_res_text)
351
327
 
352
- content_type = http_res.headers.get("Content-Type")
353
328
  http_res_text = utils.stream_to_text(http_res)
354
- raise models.SDKError(
355
- f"Unexpected response received (code: {http_res.status_code}, type: {content_type})",
356
- http_res.status_code,
357
- http_res_text,
358
- http_res,
359
- )
329
+ raise models.SDKError("Unexpected response received", http_res, http_res_text)
360
330
 
361
331
  async def stream_async(
362
332
  self,
@@ -373,7 +343,7 @@ class Transcriptions(BaseSDK):
373
343
  timeout_ms: Optional[int] = None,
374
344
  http_headers: Optional[Mapping[str, str]] = None,
375
345
  ) -> eventstreaming.EventStreamAsync[models.TranscriptionStreamEvents]:
376
- r"""Create streaming transcription (SSE)
346
+ r"""Create Streaming Transcription (SSE)
377
347
 
378
348
  :param model:
379
349
  :param file:
@@ -458,23 +428,14 @@ class Transcriptions(BaseSDK):
458
428
  return eventstreaming.EventStreamAsync(
459
429
  http_res,
460
430
  lambda raw: utils.unmarshal_json(raw, models.TranscriptionStreamEvents),
431
+ client_ref=self,
461
432
  )
462
433
  if utils.match_response(http_res, "4XX", "*"):
463
434
  http_res_text = await utils.stream_to_text_async(http_res)
464
- raise models.SDKError(
465
- "API error occurred", http_res.status_code, http_res_text, http_res
466
- )
435
+ raise models.SDKError("API error occurred", http_res, http_res_text)
467
436
  if utils.match_response(http_res, "5XX", "*"):
468
437
  http_res_text = await utils.stream_to_text_async(http_res)
469
- raise models.SDKError(
470
- "API error occurred", http_res.status_code, http_res_text, http_res
471
- )
438
+ raise models.SDKError("API error occurred", http_res, http_res_text)
472
439
 
473
- content_type = http_res.headers.get("Content-Type")
474
440
  http_res_text = await utils.stream_to_text_async(http_res)
475
- raise models.SDKError(
476
- f"Unexpected response received (code: {http_res.status_code}, type: {content_type})",
477
- http_res.status_code,
478
- http_res_text,
479
- http_res,
480
- )
441
+ raise models.SDKError("Unexpected response received", http_res, http_res_text)
@@ -2,6 +2,8 @@
2
2
 
3
3
  from typing import TYPE_CHECKING
4
4
  from importlib import import_module
5
+ import builtins
6
+ import sys
5
7
 
6
8
  if TYPE_CHECKING:
7
9
  from .annotations import get_discriminator
@@ -161,6 +163,18 @@ _dynamic_imports: dict[str, str] = {
161
163
  }
162
164
 
163
165
 
166
+ def dynamic_import(modname, retries=3):
167
+ for attempt in range(retries):
168
+ try:
169
+ return import_module(modname, __package__)
170
+ except KeyError:
171
+ # Clear any half-initialized module and retry
172
+ sys.modules.pop(modname, None)
173
+ if attempt == retries - 1:
174
+ break
175
+ raise KeyError(f"Failed to import module '{modname}' after {retries} attempts")
176
+
177
+
164
178
  def __getattr__(attr_name: str) -> object:
165
179
  module_name = _dynamic_imports.get(attr_name)
166
180
  if module_name is None:
@@ -169,9 +183,8 @@ def __getattr__(attr_name: str) -> object:
169
183
  )
170
184
 
171
185
  try:
172
- module = import_module(module_name, __package__)
173
- result = getattr(module, attr_name)
174
- return result
186
+ module = dynamic_import(module_name)
187
+ return getattr(module, attr_name)
175
188
  except ImportError as e:
176
189
  raise ImportError(
177
190
  f"Failed to import {attr_name} from {module_name}: {e}"
@@ -183,5 +196,5 @@ def __getattr__(attr_name: str) -> object:
183
196
 
184
197
 
185
198
  def __dir__():
186
- lazy_attrs = list(_dynamic_imports.keys())
187
- return sorted(lazy_attrs)
199
+ lazy_attrs = builtins.list(_dynamic_imports.keys())
200
+ return builtins.sorted(lazy_attrs)
@@ -17,6 +17,9 @@ T = TypeVar("T")
17
17
 
18
18
 
19
19
  class EventStream(Generic[T]):
20
+ # Holds a reference to the SDK client to avoid it being garbage collected
21
+ # and cause termination of the underlying httpx client.
22
+ client_ref: Optional[object]
20
23
  response: httpx.Response
21
24
  generator: Generator[T, None, None]
22
25
 
@@ -25,9 +28,11 @@ class EventStream(Generic[T]):
25
28
  response: httpx.Response,
26
29
  decoder: Callable[[str], T],
27
30
  sentinel: Optional[str] = None,
31
+ client_ref: Optional[object] = None,
28
32
  ):
29
33
  self.response = response
30
34
  self.generator = stream_events(response, decoder, sentinel)
35
+ self.client_ref = client_ref
31
36
 
32
37
  def __iter__(self):
33
38
  return self
@@ -43,6 +48,9 @@ class EventStream(Generic[T]):
43
48
 
44
49
 
45
50
  class EventStreamAsync(Generic[T]):
51
+ # Holds a reference to the SDK client to avoid it being garbage collected
52
+ # and cause termination of the underlying httpx client.
53
+ client_ref: Optional[object]
46
54
  response: httpx.Response
47
55
  generator: AsyncGenerator[T, None]
48
56
 
@@ -51,9 +59,11 @@ class EventStreamAsync(Generic[T]):
51
59
  response: httpx.Response,
52
60
  decoder: Callable[[str], T],
53
61
  sentinel: Optional[str] = None,
62
+ client_ref: Optional[object] = None,
54
63
  ):
55
64
  self.response = response
56
65
  self.generator = stream_events_async(response, decoder, sentinel)
66
+ self.client_ref = client_ref
57
67
 
58
68
  def __aiter__(self):
59
69
  return self
@@ -192,7 +192,9 @@ def is_union(obj: object) -> bool:
192
192
  """
193
193
  Returns True if the given object is a typing.Union or typing_extensions.Union.
194
194
  """
195
- return any(obj is typing_obj for typing_obj in _get_typing_objects_by_name_of("Union"))
195
+ return any(
196
+ obj is typing_obj for typing_obj in _get_typing_objects_by_name_of("Union")
197
+ )
196
198
 
197
199
 
198
200
  def stream_to_text(stream: httpx.Response) -> str:
@@ -245,4 +247,3 @@ def _get_typing_objects_by_name_of(name: str) -> Tuple[Any, ...]:
245
247
  f"Neither typing nor typing_extensions has an object called {name!r}"
246
248
  )
247
249
  return result
248
-
@@ -0,0 +1,24 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from typing import Any, Optional
4
+
5
+ import httpx
6
+
7
+ from .serializers import unmarshal_json
8
+ from mistralai import models
9
+
10
+
11
+ def unmarshal_json_response(
12
+ typ: Any, http_res: httpx.Response, body: Optional[str] = None
13
+ ) -> Any:
14
+ if body is None:
15
+ body = http_res.text
16
+ try:
17
+ return unmarshal_json(body, typ)
18
+ except Exception as e:
19
+ raise models.ResponseValidationError(
20
+ "Response validation failed",
21
+ http_res,
22
+ e,
23
+ body,
24
+ ) from e
@@ -1,7 +1,8 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: mistralai
3
- Version: 1.9.10
3
+ Version: 1.10.0
4
4
  Summary: Python Client SDK for the Mistral AI API.
5
+ License-File: LICENSE
5
6
  Author: Mistral
6
7
  Requires-Python: >=3.9
7
8
  Classifier: Programming Language :: Python :: 3
@@ -10,6 +11,7 @@ Classifier: Programming Language :: Python :: 3.10
10
11
  Classifier: Programming Language :: Python :: 3.11
11
12
  Classifier: Programming Language :: Python :: 3.12
12
13
  Classifier: Programming Language :: Python :: 3.13
14
+ Classifier: Programming Language :: Python :: 3.14
13
15
  Provides-Extra: agents
14
16
  Provides-Extra: gcp
15
17
  Requires-Dist: authlib (>=1.5.2,<2.0) ; extra == "agents"
@@ -19,6 +21,10 @@ Requires-Dist: griffe (>=1.7.3,<2.0) ; extra == "agents"
19
21
  Requires-Dist: httpx (>=0.28.1)
20
22
  Requires-Dist: invoke (>=2.2.0,<3.0.0)
21
23
  Requires-Dist: mcp (>=1.0,<2.0) ; (python_version >= "3.10") and (extra == "agents")
24
+ Requires-Dist: opentelemetry-api (>=1.33.1,<2.0.0)
25
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http (>=1.37.0,<2.0.0)
26
+ Requires-Dist: opentelemetry-sdk (>=1.33.1,<2.0.0)
27
+ Requires-Dist: opentelemetry-semantic-conventions (>=0.59b0,<0.60)
22
28
  Requires-Dist: pydantic (>=2.10.3)
23
29
  Requires-Dist: python-dateutil (>=2.8.2)
24
30
  Requires-Dist: pyyaml (>=6.0.2,<7.0.0)
@@ -87,7 +93,15 @@ Mistral AI API: Our Chat Completion and Embeddings APIs specification. Create yo
87
93
  >
88
94
  > Once a Python version reaches its [official end of life date](https://devguide.python.org/versions/), a 3-month grace period is provided for users to upgrade. Following this grace period, the minimum python version supported in the SDK will be updated.
89
95
 
90
- The SDK can be installed with either *pip* or *poetry* package managers.
96
+ The SDK can be installed with *uv*, *pip*, or *poetry* package managers.
97
+
98
+ ### uv
99
+
100
+ *uv* is a fast Python package installer and resolver, designed as a drop-in replacement for pip and pip-tools. It's recommended for its speed and modern Python tooling capabilities.
101
+
102
+ ```bash
103
+ uv add mistralai
104
+ ```
91
105
 
92
106
  ### PIP
93
107
 
@@ -166,12 +180,14 @@ with Mistral(
166
180
  api_key=os.getenv("MISTRAL_API_KEY", ""),
167
181
  ) as mistral:
168
182
 
169
- res = mistral.chat.complete(model="mistral-small-latest", messages=[
183
+ res = mistral.chat.complete(model="mistral-large-latest", messages=[
170
184
  {
171
185
  "content": "Who is the best French painter? Answer in one short sentence.",
172
186
  "role": "user",
173
187
  },
174
- ], stream=False)
188
+ ], stream=False, response_format={
189
+ "type": "text",
190
+ })
175
191
 
176
192
  # Handle response
177
193
  print(res)
@@ -179,7 +195,7 @@ with Mistral(
179
195
 
180
196
  </br>
181
197
 
182
- The same SDK client can also be used to make asychronous requests by importing asyncio.
198
+ The same SDK client can also be used to make asynchronous requests by importing asyncio.
183
199
  ```python
184
200
  # Asynchronous Example
185
201
  import asyncio
@@ -192,12 +208,14 @@ async def main():
192
208
  api_key=os.getenv("MISTRAL_API_KEY", ""),
193
209
  ) as mistral:
194
210
 
195
- res = await mistral.chat.complete_async(model="mistral-small-latest", messages=[
211
+ res = await mistral.chat.complete_async(model="mistral-large-latest", messages=[
196
212
  {
197
213
  "content": "Who is the best French painter? Answer in one short sentence.",
198
214
  "role": "user",
199
215
  },
200
- ], stream=False)
216
+ ], stream=False, response_format={
217
+ "type": "text",
218
+ })
201
219
 
202
220
  # Handle response
203
221
  print(res)
@@ -230,7 +248,7 @@ with Mistral(
230
248
 
231
249
  </br>
232
250
 
233
- The same SDK client can also be used to make asychronous requests by importing asyncio.
251
+ The same SDK client can also be used to make asynchronous requests by importing asyncio.
234
252
  ```python
235
253
  # Asynchronous Example
236
254
  import asyncio
@@ -273,7 +291,9 @@ with Mistral(
273
291
  "content": "Who is the best French painter? Answer in one short sentence.",
274
292
  "role": "user",
275
293
  },
276
- ], agent_id="<id>", stream=False)
294
+ ], agent_id="<id>", stream=False, response_format={
295
+ "type": "text",
296
+ })
277
297
 
278
298
  # Handle response
279
299
  print(res)
@@ -281,7 +301,7 @@ with Mistral(
281
301
 
282
302
  </br>
283
303
 
284
- The same SDK client can also be used to make asychronous requests by importing asyncio.
304
+ The same SDK client can also be used to make asynchronous requests by importing asyncio.
285
305
  ```python
286
306
  # Asynchronous Example
287
307
  import asyncio
@@ -299,7 +319,9 @@ async def main():
299
319
  "content": "Who is the best French painter? Answer in one short sentence.",
300
320
  "role": "user",
301
321
  },
302
- ], agent_id="<id>", stream=False)
322
+ ], agent_id="<id>", stream=False, response_format={
323
+ "type": "text",
324
+ })
303
325
 
304
326
  # Handle response
305
327
  print(res)
@@ -332,7 +354,7 @@ with Mistral(
332
354
 
333
355
  </br>
334
356
 
335
- The same SDK client can also be used to make asychronous requests by importing asyncio.
357
+ The same SDK client can also be used to make asynchronous requests by importing asyncio.
336
358
  ```python
337
359
  # Asynchronous Example
338
360
  import asyncio
@@ -469,7 +491,7 @@ The documentation for the GCP SDK is available [here](https://github.com/mistral
469
491
  #### [audio.transcriptions](https://github.com/mistralai/client-python/blob/master/docs/sdks/transcriptions/README.md)
470
492
 
471
493
  * [complete](https://github.com/mistralai/client-python/blob/master/docs/sdks/transcriptions/README.md#complete) - Create Transcription
472
- * [stream](https://github.com/mistralai/client-python/blob/master/docs/sdks/transcriptions/README.md#stream) - Create streaming transcription (SSE)
494
+ * [stream](https://github.com/mistralai/client-python/blob/master/docs/sdks/transcriptions/README.md#stream) - Create Streaming Transcription (SSE)
473
495
 
474
496
  ### [batch](https://github.com/mistralai/client-python/blob/master/docs/sdks/batch/README.md)
475
497
 
@@ -490,6 +512,7 @@ The documentation for the GCP SDK is available [here](https://github.com/mistral
490
512
  * [list](https://github.com/mistralai/client-python/blob/master/docs/sdks/mistralagents/README.md#list) - List agent entities.
491
513
  * [get](https://github.com/mistralai/client-python/blob/master/docs/sdks/mistralagents/README.md#get) - Retrieve an agent entity.
492
514
  * [update](https://github.com/mistralai/client-python/blob/master/docs/sdks/mistralagents/README.md#update) - Update an agent entity.
515
+ * [delete](https://github.com/mistralai/client-python/blob/master/docs/sdks/mistralagents/README.md#delete) - Delete an agent entity.
493
516
  * [update_version](https://github.com/mistralai/client-python/blob/master/docs/sdks/mistralagents/README.md#update_version) - Update an agent version.
494
517
 
495
518
  #### [beta.conversations](https://github.com/mistralai/client-python/blob/master/docs/sdks/conversations/README.md)
@@ -497,6 +520,7 @@ The documentation for the GCP SDK is available [here](https://github.com/mistral
497
520
  * [start](https://github.com/mistralai/client-python/blob/master/docs/sdks/conversations/README.md#start) - Create a conversation and append entries to it.
498
521
  * [list](https://github.com/mistralai/client-python/blob/master/docs/sdks/conversations/README.md#list) - List all created conversations.
499
522
  * [get](https://github.com/mistralai/client-python/blob/master/docs/sdks/conversations/README.md#get) - Retrieve a conversation information.
523
+ * [delete](https://github.com/mistralai/client-python/blob/master/docs/sdks/conversations/README.md#delete) - Delete a conversation.
500
524
  * [append](https://github.com/mistralai/client-python/blob/master/docs/sdks/conversations/README.md#append) - Append new entries to an existing conversation.
501
525
  * [get_history](https://github.com/mistralai/client-python/blob/master/docs/sdks/conversations/README.md#get_history) - Retrieve all entries in a conversation.
502
526
  * [get_messages](https://github.com/mistralai/client-python/blob/master/docs/sdks/conversations/README.md#get_messages) - Retrieve all messages in a conversation.
@@ -521,7 +545,7 @@ The documentation for the GCP SDK is available [here](https://github.com/mistral
521
545
 
522
546
  #### [beta.libraries.documents](https://github.com/mistralai/client-python/blob/master/docs/sdks/documents/README.md)
523
547
 
524
- * [list](https://github.com/mistralai/client-python/blob/master/docs/sdks/documents/README.md#list) - List document in a given library.
548
+ * [list](https://github.com/mistralai/client-python/blob/master/docs/sdks/documents/README.md#list) - List documents in a given library.
525
549
  * [upload](https://github.com/mistralai/client-python/blob/master/docs/sdks/documents/README.md#upload) - Upload a new document.
526
550
  * [get](https://github.com/mistralai/client-python/blob/master/docs/sdks/documents/README.md#get) - Retrieve the metadata of a specific document.
527
551
  * [update](https://github.com/mistralai/client-python/blob/master/docs/sdks/documents/README.md#update) - Update the metadata of a specific document.
@@ -618,7 +642,11 @@ with Mistral(
618
642
  "tool_call_id": "<id>",
619
643
  "result": "<value>",
620
644
  },
621
- ], stream=True)
645
+ ], stream=True, completion_args={
646
+ "response_format": {
647
+ "type": "text",
648
+ },
649
+ })
622
650
 
623
651
  with res as event_stream:
624
652
  for event in event_stream:
@@ -709,27 +737,20 @@ with Mistral(
709
737
  <!-- Start Error Handling [errors] -->
710
738
  ## Error Handling
711
739
 
712
- Handling errors in this SDK should largely match your expectations. All operations return a response object or raise an exception.
713
-
714
- By default, an API error will raise a models.SDKError exception, which has the following properties:
715
-
716
- | Property | Type | Description |
717
- |-----------------|------------------|-----------------------|
718
- | `.status_code` | *int* | The HTTP status code |
719
- | `.message` | *str* | The error message |
720
- | `.raw_response` | *httpx.Response* | The raw HTTP response |
721
- | `.body` | *str* | The response content |
740
+ [`MistralError`](https://github.com/mistralai/client-python/blob/master/./src/mistralai/models/mistralerror.py) is the base class for all HTTP error responses. It has the following properties:
722
741
 
723
- When custom error responses are specified for an operation, the SDK may also raise their associated exceptions. You can refer to respective *Errors* tables in SDK docs for more details on possible exception types for each operation. For example, the `list_async` method may raise the following exceptions:
724
-
725
- | Error Type | Status Code | Content Type |
726
- | -------------------------- | ----------- | ---------------- |
727
- | models.HTTPValidationError | 422 | application/json |
728
- | models.SDKError | 4XX, 5XX | \*/\* |
742
+ | Property | Type | Description |
743
+ | ------------------ | ---------------- | --------------------------------------------------------------------------------------- |
744
+ | `err.message` | `str` | Error message |
745
+ | `err.status_code` | `int` | HTTP response status code eg `404` |
746
+ | `err.headers` | `httpx.Headers` | HTTP response headers |
747
+ | `err.body` | `str` | HTTP body. Can be empty string if no body is returned. |
748
+ | `err.raw_response` | `httpx.Response` | Raw HTTP response |
749
+ | `err.data` | | Optional. Some errors may contain structured data. [See Error Classes](https://github.com/mistralai/client-python/blob/master/#error-classes). |
729
750
 
730
751
  ### Example
731
-
732
752
  ```python
753
+ import mistralai
733
754
  from mistralai import Mistral, models
734
755
  import os
735
756
 
@@ -740,18 +761,46 @@ with Mistral(
740
761
  res = None
741
762
  try:
742
763
 
743
- res = mistral.models.list()
764
+ res = mistral.models.retrieve(model_id="ft:open-mistral-7b:587a6b29:20240514:7e773925")
744
765
 
745
766
  # Handle response
746
767
  print(res)
747
768
 
748
- except models.HTTPValidationError as e:
749
- # handle e.data: models.HTTPValidationErrorData
750
- raise(e)
751
- except models.SDKError as e:
752
- # handle exception
753
- raise(e)
769
+
770
+ except models.MistralError as e:
771
+ # The base class for HTTP error responses
772
+ print(e.message)
773
+ print(e.status_code)
774
+ print(e.body)
775
+ print(e.headers)
776
+ print(e.raw_response)
777
+
778
+ # Depending on the method different errors may be thrown
779
+ if isinstance(e, models.HTTPValidationError):
780
+ print(e.data.detail) # Optional[List[mistralai.ValidationError]]
754
781
  ```
782
+
783
+ ### Error Classes
784
+ **Primary error:**
785
+ * [`MistralError`](https://github.com/mistralai/client-python/blob/master/./src/mistralai/models/mistralerror.py): The base class for HTTP error responses.
786
+
787
+ <details><summary>Less common errors (6)</summary>
788
+
789
+ <br />
790
+
791
+ **Network errors:**
792
+ * [`httpx.RequestError`](https://www.python-httpx.org/exceptions/#httpx.RequestError): Base class for request errors.
793
+ * [`httpx.ConnectError`](https://www.python-httpx.org/exceptions/#httpx.ConnectError): HTTP client was unable to make a request to a server.
794
+ * [`httpx.TimeoutException`](https://www.python-httpx.org/exceptions/#httpx.TimeoutException): HTTP request timed out.
795
+
796
+
797
+ **Inherit from [`MistralError`](https://github.com/mistralai/client-python/blob/master/./src/mistralai/models/mistralerror.py)**:
798
+ * [`HTTPValidationError`](https://github.com/mistralai/client-python/blob/master/./src/mistralai/models/httpvalidationerror.py): Validation Error. Status code `422`. Applicable to 48 of 70 methods.*
799
+ * [`ResponseValidationError`](https://github.com/mistralai/client-python/blob/master/./src/mistralai/models/responsevalidationerror.py): Type mismatch between the response data and the expected Pydantic model. Provides access to the Pydantic validation error via the `cause` attribute.
800
+
801
+ </details>
802
+
803
+ \* Check [the method documentation](https://github.com/mistralai/client-python/blob/master/#available-resources-and-operations) to see if the error is applicable.
755
804
  <!-- End Error Handling [errors] -->
756
805
 
757
806
  <!-- Start Server Selection [server] -->