mistralai 1.5.0__py3-none-any.whl → 1.5.2rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. mistralai/_hooks/types.py +15 -3
  2. mistralai/_version.py +3 -3
  3. mistralai/agents.py +32 -12
  4. mistralai/basesdk.py +8 -0
  5. mistralai/chat.py +37 -17
  6. mistralai/classifiers.py +59 -37
  7. mistralai/embeddings.py +22 -18
  8. mistralai/extra/utils/response_format.py +3 -3
  9. mistralai/files.py +36 -0
  10. mistralai/fim.py +37 -17
  11. mistralai/httpclient.py +4 -2
  12. mistralai/jobs.py +30 -0
  13. mistralai/mistral_jobs.py +24 -0
  14. mistralai/models/__init__.py +43 -16
  15. mistralai/models/assistantmessage.py +2 -0
  16. mistralai/models/chatcompletionrequest.py +3 -10
  17. mistralai/models/chatcompletionstreamrequest.py +3 -10
  18. mistralai/models/chatmoderationrequest.py +86 -0
  19. mistralai/models/classificationrequest.py +7 -36
  20. mistralai/models/contentchunk.py +8 -1
  21. mistralai/models/documenturlchunk.py +56 -0
  22. mistralai/models/embeddingrequest.py +8 -44
  23. mistralai/models/filepurpose.py +1 -1
  24. mistralai/models/fimcompletionrequest.py +2 -3
  25. mistralai/models/fimcompletionstreamrequest.py +2 -3
  26. mistralai/models/ocrimageobject.py +77 -0
  27. mistralai/models/ocrpagedimensions.py +25 -0
  28. mistralai/models/ocrpageobject.py +64 -0
  29. mistralai/models/ocrrequest.py +97 -0
  30. mistralai/models/ocrresponse.py +26 -0
  31. mistralai/models/ocrusageinfo.py +51 -0
  32. mistralai/models/prediction.py +4 -5
  33. mistralai/models_.py +66 -18
  34. mistralai/ocr.py +248 -0
  35. mistralai/sdk.py +23 -3
  36. mistralai/sdkconfiguration.py +4 -2
  37. mistralai/utils/__init__.py +2 -0
  38. mistralai/utils/serializers.py +10 -6
  39. mistralai/utils/values.py +4 -1
  40. {mistralai-1.5.0.dist-info → mistralai-1.5.2rc1.dist-info}/METADATA +70 -19
  41. {mistralai-1.5.0.dist-info → mistralai-1.5.2rc1.dist-info}/RECORD +88 -76
  42. {mistralai-1.5.0.dist-info → mistralai-1.5.2rc1.dist-info}/WHEEL +1 -1
  43. mistralai_azure/__init__.py +10 -1
  44. mistralai_azure/_hooks/types.py +15 -3
  45. mistralai_azure/_version.py +3 -0
  46. mistralai_azure/basesdk.py +8 -0
  47. mistralai_azure/chat.py +88 -20
  48. mistralai_azure/httpclient.py +52 -0
  49. mistralai_azure/models/__init__.py +7 -0
  50. mistralai_azure/models/assistantmessage.py +2 -0
  51. mistralai_azure/models/chatcompletionrequest.py +8 -10
  52. mistralai_azure/models/chatcompletionstreamrequest.py +8 -10
  53. mistralai_azure/models/function.py +3 -0
  54. mistralai_azure/models/jsonschema.py +61 -0
  55. mistralai_azure/models/prediction.py +25 -0
  56. mistralai_azure/models/responseformat.py +42 -1
  57. mistralai_azure/models/responseformats.py +1 -1
  58. mistralai_azure/models/toolcall.py +3 -0
  59. mistralai_azure/sdk.py +56 -14
  60. mistralai_azure/sdkconfiguration.py +14 -6
  61. mistralai_azure/utils/__init__.py +2 -0
  62. mistralai_azure/utils/serializers.py +10 -6
  63. mistralai_azure/utils/values.py +4 -1
  64. mistralai_gcp/__init__.py +10 -1
  65. mistralai_gcp/_hooks/types.py +15 -3
  66. mistralai_gcp/_version.py +3 -0
  67. mistralai_gcp/basesdk.py +8 -0
  68. mistralai_gcp/chat.py +89 -21
  69. mistralai_gcp/fim.py +61 -21
  70. mistralai_gcp/httpclient.py +52 -0
  71. mistralai_gcp/models/__init__.py +7 -0
  72. mistralai_gcp/models/assistantmessage.py +2 -0
  73. mistralai_gcp/models/chatcompletionrequest.py +8 -10
  74. mistralai_gcp/models/chatcompletionstreamrequest.py +8 -10
  75. mistralai_gcp/models/fimcompletionrequest.py +2 -3
  76. mistralai_gcp/models/fimcompletionstreamrequest.py +2 -3
  77. mistralai_gcp/models/function.py +3 -0
  78. mistralai_gcp/models/jsonschema.py +61 -0
  79. mistralai_gcp/models/prediction.py +25 -0
  80. mistralai_gcp/models/responseformat.py +42 -1
  81. mistralai_gcp/models/responseformats.py +1 -1
  82. mistralai_gcp/models/toolcall.py +3 -0
  83. mistralai_gcp/sdk.py +63 -19
  84. mistralai_gcp/sdkconfiguration.py +14 -6
  85. mistralai_gcp/utils/__init__.py +2 -0
  86. mistralai_gcp/utils/serializers.py +10 -6
  87. mistralai_gcp/utils/values.py +4 -1
  88. mistralai/models/chatclassificationrequest.py +0 -113
  89. {mistralai-1.5.0.dist-info → mistralai-1.5.2rc1.dist-info}/LICENSE +0 -0
mistralai/sdk.py CHANGED
@@ -17,6 +17,7 @@ from mistralai.files import Files
17
17
  from mistralai.fim import Fim
18
18
  from mistralai.fine_tuning import FineTuning
19
19
  from mistralai.models_ import Models
20
+ from mistralai.ocr import Ocr
20
21
  from mistralai.types import OptionalNullable, UNSET
21
22
  from typing import Any, Callable, Dict, Optional, Union, cast
22
23
  import weakref
@@ -41,6 +42,8 @@ class Mistral(BaseSDK):
41
42
  r"""Embeddings API."""
42
43
  classifiers: Classifiers
43
44
  r"""Classifiers API."""
45
+ ocr: Ocr
46
+ r"""OCR API"""
44
47
 
45
48
  def __init__(
46
49
  self,
@@ -65,15 +68,19 @@ class Mistral(BaseSDK):
65
68
  :param retry_config: The retry configuration to use for all supported methods
66
69
  :param timeout_ms: Optional request timeout applied to each operation in milliseconds
67
70
  """
71
+ client_supplied = True
68
72
  if client is None:
69
73
  client = httpx.Client()
74
+ client_supplied = False
70
75
 
71
76
  assert issubclass(
72
77
  type(client), HttpClient
73
78
  ), "The provided client must implement the HttpClient protocol."
74
79
 
80
+ async_client_supplied = True
75
81
  if async_client is None:
76
82
  async_client = httpx.AsyncClient()
83
+ async_client_supplied = False
77
84
 
78
85
  if debug_logger is None:
79
86
  debug_logger = get_default_logger()
@@ -97,7 +104,9 @@ class Mistral(BaseSDK):
97
104
  self,
98
105
  SDKConfiguration(
99
106
  client=client,
107
+ client_supplied=client_supplied,
100
108
  async_client=async_client,
109
+ async_client_supplied=async_client_supplied,
101
110
  security=security,
102
111
  server_url=server_url,
103
112
  server=server,
@@ -111,7 +120,7 @@ class Mistral(BaseSDK):
111
120
 
112
121
  current_server_url, *_ = self.sdk_configuration.get_server_details()
113
122
  server_url, self.sdk_configuration.client = hooks.sdk_init(
114
- current_server_url, self.sdk_configuration.client
123
+ current_server_url, client
115
124
  )
116
125
  if current_server_url != server_url:
117
126
  self.sdk_configuration.server_url = server_url
@@ -124,7 +133,9 @@ class Mistral(BaseSDK):
124
133
  close_clients,
125
134
  cast(ClientOwner, self.sdk_configuration),
126
135
  self.sdk_configuration.client,
136
+ self.sdk_configuration.client_supplied,
127
137
  self.sdk_configuration.async_client,
138
+ self.sdk_configuration.async_client_supplied,
128
139
  )
129
140
 
130
141
  self._init_sdks()
@@ -139,6 +150,7 @@ class Mistral(BaseSDK):
139
150
  self.agents = Agents(self.sdk_configuration)
140
151
  self.embeddings = Embeddings(self.sdk_configuration)
141
152
  self.classifiers = Classifiers(self.sdk_configuration)
153
+ self.ocr = Ocr(self.sdk_configuration)
142
154
 
143
155
  def __enter__(self):
144
156
  return self
@@ -147,9 +159,17 @@ class Mistral(BaseSDK):
147
159
  return self
148
160
 
149
161
  def __exit__(self, exc_type, exc_val, exc_tb):
150
- if self.sdk_configuration.client is not None:
162
+ if (
163
+ self.sdk_configuration.client is not None
164
+ and not self.sdk_configuration.client_supplied
165
+ ):
151
166
  self.sdk_configuration.client.close()
167
+ self.sdk_configuration.client = None
152
168
 
153
169
  async def __aexit__(self, exc_type, exc_val, exc_tb):
154
- if self.sdk_configuration.async_client is not None:
170
+ if (
171
+ self.sdk_configuration.async_client is not None
172
+ and not self.sdk_configuration.async_client_supplied
173
+ ):
155
174
  await self.sdk_configuration.async_client.aclose()
175
+ self.sdk_configuration.async_client = None
@@ -26,8 +26,10 @@ SERVERS = {
26
26
 
27
27
  @dataclass
28
28
  class SDKConfiguration:
29
- client: HttpClient
30
- async_client: AsyncHttpClient
29
+ client: Union[HttpClient, None]
30
+ client_supplied: bool
31
+ async_client: Union[AsyncHttpClient, None]
32
+ async_client_supplied: bool
31
33
  debug_logger: Logger
32
34
  security: Optional[Union[models.Security, Callable[[], models.Security]]] = None
33
35
  server_url: Optional[str] = ""
@@ -43,6 +43,7 @@ from .values import (
43
43
  match_content_type,
44
44
  match_status_codes,
45
45
  match_response,
46
+ cast_partial,
46
47
  )
47
48
  from .logger import Logger, get_body_content, get_default_logger
48
49
 
@@ -96,4 +97,5 @@ __all__ = [
96
97
  "validate_float",
97
98
  "validate_int",
98
99
  "validate_open_enum",
100
+ "cast_partial",
99
101
  ]
@@ -7,14 +7,15 @@ import httpx
7
7
  from typing_extensions import get_origin
8
8
  from pydantic import ConfigDict, create_model
9
9
  from pydantic_core import from_json
10
- from typing_inspect import is_optional_type
10
+ from typing_inspection.typing_objects import is_union
11
11
 
12
12
  from ..types.basemodel import BaseModel, Nullable, OptionalNullable, Unset
13
13
 
14
14
 
15
15
  def serialize_decimal(as_str: bool):
16
16
  def serialize(d):
17
- if is_optional_type(type(d)) and d is None:
17
+ # Optional[T] is a Union[T, None]
18
+ if is_union(type(d)) and type(None) in get_args(type(d)) and d is None:
18
19
  return None
19
20
  if isinstance(d, Unset):
20
21
  return d
@@ -42,7 +43,8 @@ def validate_decimal(d):
42
43
 
43
44
  def serialize_float(as_str: bool):
44
45
  def serialize(f):
45
- if is_optional_type(type(f)) and f is None:
46
+ # Optional[T] is a Union[T, None]
47
+ if is_union(type(f)) and type(None) in get_args(type(f)) and f is None:
46
48
  return None
47
49
  if isinstance(f, Unset):
48
50
  return f
@@ -70,7 +72,8 @@ def validate_float(f):
70
72
 
71
73
  def serialize_int(as_str: bool):
72
74
  def serialize(i):
73
- if is_optional_type(type(i)) and i is None:
75
+ # Optional[T] is a Union[T, None]
76
+ if is_union(type(i)) and type(None) in get_args(type(i)) and i is None:
74
77
  return None
75
78
  if isinstance(i, Unset):
76
79
  return i
@@ -118,7 +121,8 @@ def validate_open_enum(is_int: bool):
118
121
 
119
122
  def validate_const(v):
120
123
  def validate(c):
121
- if is_optional_type(type(c)) and c is None:
124
+ # Optional[T] is a Union[T, None]
125
+ if is_union(type(c)) and type(None) in get_args(type(c)) and c is None:
122
126
  return None
123
127
 
124
128
  if v != c:
@@ -163,7 +167,7 @@ def marshal_json(val, typ):
163
167
  if len(d) == 0:
164
168
  return ""
165
169
 
166
- return json.dumps(d[next(iter(d))], separators=(",", ":"), sort_keys=True)
170
+ return json.dumps(d[next(iter(d))], separators=(",", ":"))
167
171
 
168
172
 
169
173
  def is_nullable(field):
mistralai/utils/values.py CHANGED
@@ -3,8 +3,9 @@
3
3
  from datetime import datetime
4
4
  from enum import Enum
5
5
  from email.message import Message
6
+ from functools import partial
6
7
  import os
7
- from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union
8
+ from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union, cast
8
9
 
9
10
  from httpx import Response
10
11
  from pydantic import BaseModel
@@ -51,6 +52,8 @@ def match_status_codes(status_codes: List[str], status_code: int) -> bool:
51
52
 
52
53
  T = TypeVar("T")
53
54
 
55
+ def cast_partial(typ):
56
+ return partial(cast, typ)
54
57
 
55
58
  def get_global_from_env(
56
59
  value: Optional[T], env_key: str, type_cast: Callable[[str], T]
@@ -1,11 +1,10 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: mistralai
3
- Version: 1.5.0
3
+ Version: 1.5.2rc1
4
4
  Summary: Python Client SDK for the Mistral AI API.
5
5
  Author: Mistral
6
- Requires-Python: >=3.8
6
+ Requires-Python: >=3.9
7
7
  Classifier: Programming Language :: Python :: 3
8
- Classifier: Programming Language :: Python :: 3.8
9
8
  Classifier: Programming Language :: Python :: 3.9
10
9
  Classifier: Programming Language :: Python :: 3.10
11
10
  Classifier: Programming Language :: Python :: 3.11
@@ -14,12 +13,11 @@ Classifier: Programming Language :: Python :: 3.13
14
13
  Provides-Extra: gcp
15
14
  Requires-Dist: eval-type-backport (>=0.2.0)
16
15
  Requires-Dist: google-auth (>=2.27.0) ; extra == "gcp"
17
- Requires-Dist: httpx (>=0.27.0)
18
- Requires-Dist: jsonpath-python (>=1.0.6)
19
- Requires-Dist: pydantic (>=2.9.0)
16
+ Requires-Dist: httpx (>=0.28.1)
17
+ Requires-Dist: pydantic (>=2.10.3)
20
18
  Requires-Dist: python-dateutil (>=2.8.2)
21
19
  Requires-Dist: requests (>=2.32.3) ; extra == "gcp"
22
- Requires-Dist: typing-inspect (>=0.9.0)
20
+ Requires-Dist: typing-inspection (>=0.4.0)
23
21
  Project-URL: Repository, https://github.com/mistralai/client-python.git
24
22
  Description-Content-Type: text/markdown
25
23
 
@@ -100,6 +98,37 @@ pip install mistralai
100
98
  ```bash
101
99
  poetry add mistralai
102
100
  ```
101
+
102
+ ### Shell and script usage with `uv`
103
+
104
+ You can use this SDK in a Python shell with [uv](https://docs.astral.sh/uv/) and the `uvx` command that comes with it like so:
105
+
106
+ ```shell
107
+ uvx --from mistralai python
108
+ ```
109
+
110
+ It's also possible to write a standalone Python script without needing to set up a whole project like so:
111
+
112
+ ```python
113
+ #!/usr/bin/env -S uv run --script
114
+ # /// script
115
+ # requires-python = ">=3.9"
116
+ # dependencies = [
117
+ # "mistralai",
118
+ # ]
119
+ # ///
120
+
121
+ from mistralai import Mistral
122
+
123
+ sdk = Mistral(
124
+ # SDK arguments
125
+ )
126
+
127
+ # Rest of script here...
128
+ ```
129
+
130
+ Once that is saved to a file, you can run it with `uv run script.py` where
131
+ `script.py` can be replaced with the actual file name.
103
132
  <!-- End SDK Installation [installation] -->
104
133
 
105
134
  <!-- Start SDK Example Usage [usage] -->
@@ -114,6 +143,7 @@ This example shows how to create chat completions.
114
143
  from mistralai import Mistral
115
144
  import os
116
145
 
146
+
117
147
  with Mistral(
118
148
  api_key=os.getenv("MISTRAL_API_KEY", ""),
119
149
  ) as mistral:
@@ -123,7 +153,7 @@ with Mistral(
123
153
  "content": "Who is the best French painter? Answer in one short sentence.",
124
154
  "role": "user",
125
155
  },
126
- ], stream=False)
156
+ ])
127
157
 
128
158
  # Handle response
129
159
  print(res)
@@ -139,6 +169,7 @@ from mistralai import Mistral
139
169
  import os
140
170
 
141
171
  async def main():
172
+
142
173
  async with Mistral(
143
174
  api_key=os.getenv("MISTRAL_API_KEY", ""),
144
175
  ) as mistral:
@@ -148,7 +179,7 @@ async def main():
148
179
  "content": "Who is the best French painter? Answer in one short sentence.",
149
180
  "role": "user",
150
181
  },
151
- ], stream=False)
182
+ ])
152
183
 
153
184
  # Handle response
154
185
  print(res)
@@ -165,6 +196,7 @@ This example shows how to upload a file.
165
196
  from mistralai import Mistral
166
197
  import os
167
198
 
199
+
168
200
  with Mistral(
169
201
  api_key=os.getenv("MISTRAL_API_KEY", ""),
170
202
  ) as mistral:
@@ -188,6 +220,7 @@ from mistralai import Mistral
188
220
  import os
189
221
 
190
222
  async def main():
223
+
191
224
  async with Mistral(
192
225
  api_key=os.getenv("MISTRAL_API_KEY", ""),
193
226
  ) as mistral:
@@ -212,6 +245,7 @@ This example shows how to create agents completions.
212
245
  from mistralai import Mistral
213
246
  import os
214
247
 
248
+
215
249
  with Mistral(
216
250
  api_key=os.getenv("MISTRAL_API_KEY", ""),
217
251
  ) as mistral:
@@ -221,7 +255,7 @@ with Mistral(
221
255
  "content": "Who is the best French painter? Answer in one short sentence.",
222
256
  "role": "user",
223
257
  },
224
- ], agent_id="<id>", stream=False)
258
+ ], agent_id="<id>")
225
259
 
226
260
  # Handle response
227
261
  print(res)
@@ -237,6 +271,7 @@ from mistralai import Mistral
237
271
  import os
238
272
 
239
273
  async def main():
274
+
240
275
  async with Mistral(
241
276
  api_key=os.getenv("MISTRAL_API_KEY", ""),
242
277
  ) as mistral:
@@ -246,7 +281,7 @@ async def main():
246
281
  "content": "Who is the best French painter? Answer in one short sentence.",
247
282
  "role": "user",
248
283
  },
249
- ], agent_id="<id>", stream=False)
284
+ ], agent_id="<id>")
250
285
 
251
286
  # Handle response
252
287
  print(res)
@@ -263,14 +298,15 @@ This example shows how to create embedding request.
263
298
  from mistralai import Mistral
264
299
  import os
265
300
 
301
+
266
302
  with Mistral(
267
303
  api_key=os.getenv("MISTRAL_API_KEY", ""),
268
304
  ) as mistral:
269
305
 
270
- res = mistral.embeddings.create(inputs=[
306
+ res = mistral.embeddings.create(model="mistral-embed", inputs=[
271
307
  "Embed this sentence.",
272
308
  "As well as this one.",
273
- ], model="mistral-embed")
309
+ ])
274
310
 
275
311
  # Handle response
276
312
  print(res)
@@ -286,14 +322,15 @@ from mistralai import Mistral
286
322
  import os
287
323
 
288
324
  async def main():
325
+
289
326
  async with Mistral(
290
327
  api_key=os.getenv("MISTRAL_API_KEY", ""),
291
328
  ) as mistral:
292
329
 
293
- res = await mistral.embeddings.create_async(inputs=[
330
+ res = await mistral.embeddings.create_async(model="mistral-embed", inputs=[
294
331
  "Embed this sentence.",
295
332
  "As well as this one.",
296
- ], model="mistral-embed")
333
+ ])
297
334
 
298
335
  # Handle response
299
336
  print(res)
@@ -467,6 +504,10 @@ The documentation for the GCP SDK is available [here](https://github.com/mistral
467
504
  * [archive](https://github.com/mistralai/client-python/blob/master/docs/sdks/models/README.md#archive) - Archive Fine Tuned Model
468
505
  * [unarchive](https://github.com/mistralai/client-python/blob/master/docs/sdks/models/README.md#unarchive) - Unarchive Fine Tuned Model
469
506
 
507
+ ### [ocr](https://github.com/mistralai/client-python/blob/master/docs/sdks/ocr/README.md)
508
+
509
+ * [process](https://github.com/mistralai/client-python/blob/master/docs/sdks/ocr/README.md#process) - OCR
510
+
470
511
  </details>
471
512
  <!-- End Available Resources and Operations [operations] -->
472
513
 
@@ -486,6 +527,7 @@ underlying connection when the context is exited.
486
527
  from mistralai import Mistral
487
528
  import os
488
529
 
530
+
489
531
  with Mistral(
490
532
  api_key=os.getenv("MISTRAL_API_KEY", ""),
491
533
  ) as mistral:
@@ -495,7 +537,7 @@ with Mistral(
495
537
  "content": "Who is the best French painter? Answer in one short sentence.",
496
538
  "role": "user",
497
539
  },
498
- ], stream=True)
540
+ ])
499
541
 
500
542
  with res as event_stream:
501
543
  for event in event_stream:
@@ -523,6 +565,7 @@ Certain SDK methods accept file objects as part of a request body or multi-part
523
565
  from mistralai import Mistral
524
566
  import os
525
567
 
568
+
526
569
  with Mistral(
527
570
  api_key=os.getenv("MISTRAL_API_KEY", ""),
528
571
  ) as mistral:
@@ -549,6 +592,7 @@ from mistralai import Mistral
549
592
  from mistralai.utils import BackoffStrategy, RetryConfig
550
593
  import os
551
594
 
595
+
552
596
  with Mistral(
553
597
  api_key=os.getenv("MISTRAL_API_KEY", ""),
554
598
  ) as mistral:
@@ -567,6 +611,7 @@ from mistralai import Mistral
567
611
  from mistralai.utils import BackoffStrategy, RetryConfig
568
612
  import os
569
613
 
614
+
570
615
  with Mistral(
571
616
  retry_config=RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False),
572
617
  api_key=os.getenv("MISTRAL_API_KEY", ""),
@@ -607,6 +652,7 @@ When custom error responses are specified for an operation, the SDK may also rai
607
652
  from mistralai import Mistral, models
608
653
  import os
609
654
 
655
+
610
656
  with Mistral(
611
657
  api_key=os.getenv("MISTRAL_API_KEY", ""),
612
658
  ) as mistral:
@@ -634,9 +680,9 @@ with Mistral(
634
680
 
635
681
  You can override the default server globally by passing a server name to the `server: str` optional parameter when initializing the SDK client instance. The selected server will then be used as the default on the operations that use it. This table lists the names associated with the available servers:
636
682
 
637
- | Name | Server |
638
- | ---- | ------------------------ |
639
- | `eu` | `https://api.mistral.ai` |
683
+ | Name | Server | Description |
684
+ | ---- | ------------------------ | -------------------- |
685
+ | `eu` | `https://api.mistral.ai` | EU Production server |
640
686
 
641
687
  #### Example
642
688
 
@@ -644,6 +690,7 @@ You can override the default server globally by passing a server name to the `se
644
690
  from mistralai import Mistral
645
691
  import os
646
692
 
693
+
647
694
  with Mistral(
648
695
  server="eu",
649
696
  api_key=os.getenv("MISTRAL_API_KEY", ""),
@@ -663,6 +710,7 @@ The default server can also be overridden globally by passing a URL to the `serv
663
710
  from mistralai import Mistral
664
711
  import os
665
712
 
713
+
666
714
  with Mistral(
667
715
  server_url="https://api.mistral.ai",
668
716
  api_key=os.getenv("MISTRAL_API_KEY", ""),
@@ -773,6 +821,7 @@ To authenticate with the API the `api_key` parameter must be set when initializi
773
821
  from mistralai import Mistral
774
822
  import os
775
823
 
824
+
776
825
  with Mistral(
777
826
  api_key=os.getenv("MISTRAL_API_KEY", ""),
778
827
  ) as mistral:
@@ -796,6 +845,7 @@ The `Mistral` class implements the context manager protocol and registers a fina
796
845
  from mistralai import Mistral
797
846
  import os
798
847
  def main():
848
+
799
849
  with Mistral(
800
850
  api_key=os.getenv("MISTRAL_API_KEY", ""),
801
851
  ) as mistral:
@@ -804,6 +854,7 @@ def main():
804
854
 
805
855
  # Or when using async:
806
856
  async def amain():
857
+
807
858
  async with Mistral(
808
859
  api_key=os.getenv("MISTRAL_API_KEY", ""),
809
860
  ) as mistral: