mistralai 1.0.0rc1__py3-none-any.whl → 1.0.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. mistralai/agents.py +434 -0
  2. mistralai/basesdk.py +43 -6
  3. mistralai/chat.py +29 -34
  4. mistralai/embeddings.py +4 -4
  5. mistralai/files.py +10 -10
  6. mistralai/fim.py +17 -18
  7. mistralai/fine_tuning.py +10 -849
  8. mistralai/jobs.py +854 -0
  9. mistralai/models/__init__.py +4 -2
  10. mistralai/models/agentscompletionrequest.py +96 -0
  11. mistralai/models/agentscompletionstreamrequest.py +92 -0
  12. mistralai/models/assistantmessage.py +4 -9
  13. mistralai/models/chatcompletionchoice.py +4 -15
  14. mistralai/models/chatcompletionrequest.py +11 -16
  15. mistralai/models/chatcompletionstreamrequest.py +11 -16
  16. mistralai/models/completionresponsestreamchoice.py +4 -9
  17. mistralai/models/deltamessage.py +4 -9
  18. mistralai/models/detailedjobout.py +4 -9
  19. mistralai/models/embeddingrequest.py +4 -9
  20. mistralai/models/eventout.py +4 -9
  21. mistralai/models/fileschema.py +4 -9
  22. mistralai/models/fimcompletionrequest.py +11 -16
  23. mistralai/models/fimcompletionstreamrequest.py +11 -16
  24. mistralai/models/ftmodelout.py +4 -9
  25. mistralai/models/githubrepositoryin.py +4 -9
  26. mistralai/models/githubrepositoryout.py +4 -9
  27. mistralai/models/httpvalidationerror.py +1 -1
  28. mistralai/models/jobin.py +4 -9
  29. mistralai/models/jobmetadataout.py +4 -9
  30. mistralai/models/jobout.py +4 -9
  31. mistralai/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py +4 -9
  32. mistralai/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobsop.py +4 -9
  33. mistralai/models/legacyjobmetadataout.py +4 -9
  34. mistralai/models/metricout.py +4 -9
  35. mistralai/models/modelcard.py +4 -9
  36. mistralai/models/retrievefileout.py +4 -9
  37. mistralai/models/security.py +4 -4
  38. mistralai/models/toolmessage.py +4 -9
  39. mistralai/models/trainingparameters.py +4 -9
  40. mistralai/models/trainingparametersin.py +4 -9
  41. mistralai/models/updateftmodelin.py +4 -9
  42. mistralai/models/uploadfileout.py +4 -9
  43. mistralai/models/wandbintegration.py +4 -9
  44. mistralai/models/wandbintegrationout.py +4 -9
  45. mistralai/models_.py +14 -14
  46. mistralai/sdk.py +14 -6
  47. mistralai/sdkconfiguration.py +5 -4
  48. mistralai/types/basemodel.py +10 -6
  49. mistralai/utils/__init__.py +4 -0
  50. mistralai/utils/eventstreaming.py +8 -9
  51. mistralai/utils/logger.py +16 -0
  52. mistralai/utils/retries.py +2 -2
  53. mistralai/utils/security.py +5 -2
  54. {mistralai-1.0.0rc1.dist-info → mistralai-1.0.0rc2.dist-info}/METADATA +121 -56
  55. {mistralai-1.0.0rc1.dist-info → mistralai-1.0.0rc2.dist-info}/RECORD +96 -89
  56. mistralai_azure/basesdk.py +42 -4
  57. mistralai_azure/chat.py +15 -20
  58. mistralai_azure/models/__init__.py +2 -2
  59. mistralai_azure/models/assistantmessage.py +4 -9
  60. mistralai_azure/models/chatcompletionchoice.py +4 -15
  61. mistralai_azure/models/chatcompletionrequest.py +7 -12
  62. mistralai_azure/models/chatcompletionstreamrequest.py +7 -12
  63. mistralai_azure/models/completionresponsestreamchoice.py +4 -9
  64. mistralai_azure/models/deltamessage.py +4 -9
  65. mistralai_azure/models/httpvalidationerror.py +1 -1
  66. mistralai_azure/models/toolmessage.py +4 -9
  67. mistralai_azure/sdk.py +7 -2
  68. mistralai_azure/sdkconfiguration.py +5 -4
  69. mistralai_azure/types/basemodel.py +10 -6
  70. mistralai_azure/utils/__init__.py +4 -0
  71. mistralai_azure/utils/eventstreaming.py +8 -9
  72. mistralai_azure/utils/logger.py +16 -0
  73. mistralai_azure/utils/retries.py +2 -2
  74. mistralai_gcp/basesdk.py +42 -4
  75. mistralai_gcp/chat.py +12 -17
  76. mistralai_gcp/fim.py +12 -13
  77. mistralai_gcp/models/__init__.py +2 -2
  78. mistralai_gcp/models/assistantmessage.py +4 -9
  79. mistralai_gcp/models/chatcompletionchoice.py +4 -15
  80. mistralai_gcp/models/chatcompletionrequest.py +9 -14
  81. mistralai_gcp/models/chatcompletionstreamrequest.py +9 -14
  82. mistralai_gcp/models/completionresponsestreamchoice.py +4 -9
  83. mistralai_gcp/models/deltamessage.py +4 -9
  84. mistralai_gcp/models/fimcompletionrequest.py +11 -16
  85. mistralai_gcp/models/fimcompletionstreamrequest.py +11 -16
  86. mistralai_gcp/models/httpvalidationerror.py +1 -1
  87. mistralai_gcp/models/toolmessage.py +4 -9
  88. mistralai_gcp/sdk.py +9 -0
  89. mistralai_gcp/sdkconfiguration.py +5 -4
  90. mistralai_gcp/types/basemodel.py +10 -6
  91. mistralai_gcp/utils/__init__.py +4 -0
  92. mistralai_gcp/utils/eventstreaming.py +8 -9
  93. mistralai_gcp/utils/logger.py +16 -0
  94. mistralai_gcp/utils/retries.py +2 -2
  95. {mistralai-1.0.0rc1.dist-info → mistralai-1.0.0rc2.dist-info}/LICENSE +0 -0
  96. {mistralai-1.0.0rc1.dist-info → mistralai-1.0.0rc2.dist-info}/WHEEL +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mistralai
3
- Version: 1.0.0rc1
3
+ Version: 1.0.0rc2
4
4
  Summary: Python Client SDK for the Mistral AI API.
5
5
  Author: Mistral
6
6
  Requires-Python: >=3.8,<4.0
@@ -61,12 +61,12 @@ s = Mistral(
61
61
  )
62
62
 
63
63
 
64
- res = s.chat.create(messages=[
64
+ res = s.chat.complete(model="mistral-small-latest", messages=[
65
65
  {
66
66
  "content": "Who is the best French painter? Answer in one short sentence.",
67
67
  "role": "user",
68
68
  },
69
- ], model="mistral-small-latest")
69
+ ])
70
70
 
71
71
  if res is not None:
72
72
  # handle response
@@ -86,12 +86,12 @@ async def main():
86
86
  s = Mistral(
87
87
  api_key=os.getenv("MISTRAL_API_KEY", ""),
88
88
  )
89
- res = await s.chat.create_async(messages=[
89
+ res = await s.chat.complete_async(model="mistral-small-latest", messages=[
90
90
  {
91
91
  "content": "Who is the best French painter? Answer in one short sentence.",
92
92
  "role": "user",
93
93
  },
94
- ], model="mistral-small-latest")
94
+ ])
95
95
  if res is not None:
96
96
  # handle response
97
97
  pass
@@ -144,17 +144,72 @@ async def main():
144
144
  # handle response
145
145
  pass
146
146
 
147
+ asyncio.run(main())
148
+ ```
149
+
150
+ ### Create Agents Completions
151
+
152
+ This example shows how to create agents completions.
153
+
154
+ ```python
155
+ # Synchronous Example
156
+ from mistralai import Mistral
157
+ import os
158
+
159
+ s = Mistral(
160
+ api_key=os.getenv("MISTRAL_API_KEY", ""),
161
+ )
162
+
163
+
164
+ res = s.agents.complete(messages=[
165
+ {
166
+ "content": "Who is the best French painter? Answer in one short sentence.",
167
+ "role": "user",
168
+ },
169
+ ], agent_id="<value>")
170
+
171
+ if res is not None:
172
+ # handle response
173
+ pass
174
+ ```
175
+
176
+ </br>
177
+
178
+ The same SDK client can also be used to make asychronous requests by importing asyncio.
179
+ ```python
180
+ # Asynchronous Example
181
+ import asyncio
182
+ from mistralai import Mistral
183
+ import os
184
+
185
+ async def main():
186
+ s = Mistral(
187
+ api_key=os.getenv("MISTRAL_API_KEY", ""),
188
+ )
189
+ res = await s.agents.complete_async(messages=[
190
+ {
191
+ "content": "Who is the best French painter? Answer in one short sentence.",
192
+ "role": "user",
193
+ },
194
+ ], agent_id="<value>")
195
+ if res is not None:
196
+ # handle response
197
+ pass
198
+
147
199
  asyncio.run(main())
148
200
  ```
149
201
  <!-- End SDK Example Usage [usage] -->
150
202
 
203
+ ## Providers' SDKs Example Usage
204
+
151
205
  ### Azure AI
152
206
 
153
207
  **Prerequisites**
208
+
154
209
  Before you begin, ensure you have `AZUREAI_ENDPOINT` and an `AZURE_API_KEY`. To obtain these, you will need to deploy Mistral on Azure AI.
155
210
  See [instructions for deploying Mistral on Azure AI here](https://docs.mistral.ai/deployment/cloud/azure/).
156
211
 
157
- Here's a basic example to get you started. You can also run [the example in the `examples` directory](/examples/azure/).
212
+ Here's a basic example to get you started. You can also run [the example in the `examples` directory](/examples/azure).
158
213
 
159
214
  ```python
160
215
  import asyncio
@@ -164,33 +219,32 @@ from mistralai_azure import MistralAzure
164
219
 
165
220
  client = MistralAzure(
166
221
  azure_api_key=os.getenv("AZURE_API_KEY", ""),
167
- azure_endpoint="your_azure_endpoint"
222
+ azure_endpoint=os.getenv("AZURE_ENDPOINT", "")
168
223
  )
169
224
 
170
225
  async def main() -> None:
171
- res = await client.chat.create_async(
172
- request={
173
- "max_tokens": 100,
174
- "temperature": 0.5,
175
- "messages": [
176
- {
177
- "content": "Hello there!",
178
- "role": "user"
179
- }
180
- ]
181
- }
226
+ res = await client.chat.complete_async(
227
+ max_tokens= 100,
228
+ temperature= 0.5,
229
+ messages= [
230
+ {
231
+ "content": "Hello there!",
232
+ "role": "user"
233
+ }
234
+ ]
182
235
  )
183
236
  print(res)
184
237
 
185
238
  asyncio.run(main())
186
239
  ```
187
- The documentation for the Azure SDK is available [here](/packages/mistralai_azure/README.md).
240
+ The documentation for the Azure SDK is available [here](packages/mistralai_azure/README.md).
188
241
 
189
242
  ### Google Cloud
190
243
 
244
+
191
245
  **Prerequisites**
192
246
 
193
- Before you begin, you will need to create a Google Cloud project and enable the Mistral API. To do this, follow the instructions [here](https://docs.mistral.ai/deployment/cloud/google/).
247
+ Before you begin, you will need to create a Google Cloud project and enable the Mistral API. To do this, follow the instructions [here](https://docs.mistral.ai/deployment/cloud/vertex/).
194
248
 
195
249
  To run this locally you will also need to ensure you are authenticated with Google Cloud. You can do this by running
196
250
 
@@ -218,23 +272,21 @@ client = MistralGoogleCloud()
218
272
 
219
273
 
220
274
  async def main() -> None:
221
- res = await client.chat.create_async(
222
- request={
223
- "model": "mistral-small-2402",
224
- "messages": [
225
- {
226
- "content": "Hello there!",
227
- "role": "user"
228
- }
229
- ]
230
- }
275
+ res = await client.chat.complete_async(
276
+ model= "mistral-small-2402",
277
+ messages= [
278
+ {
279
+ "content": "Hello there!",
280
+ "role": "user"
281
+ }
282
+ ]
231
283
  )
232
284
  print(res)
233
285
 
234
286
  asyncio.run(main())
235
287
  ```
236
288
 
237
- The documentation for the GCP SDK is available [here](/packages/mistralai_gcp/README.md).
289
+ The documentation for the GCP SDK is available [here](packages/mistralai_gcp/README.md).
238
290
 
239
291
 
240
292
  <!-- Start Available Resources and Operations [operations] -->
@@ -256,24 +308,30 @@ The documentation for the GCP SDK is available [here](/packages/mistralai_gcp/RE
256
308
  * [retrieve](docs/sdks/files/README.md#retrieve) - Retrieve File
257
309
  * [delete](docs/sdks/files/README.md#delete) - Delete File
258
310
 
259
- ### [fine_tuning](docs/sdks/finetuning/README.md)
260
311
 
261
- * [list_jobs](docs/sdks/finetuning/README.md#list_jobs) - Get Fine Tuning Jobs
262
- * [create_job](docs/sdks/finetuning/README.md#create_job) - Create Fine Tuning Job
263
- * [get_job](docs/sdks/finetuning/README.md#get_job) - Get Fine Tuning Job
264
- * [cancel_job](docs/sdks/finetuning/README.md#cancel_job) - Cancel Fine Tuning Job
265
- * [start_job](docs/sdks/finetuning/README.md#start_job) - Start Fine Tuning Job
312
+ ### [fine_tuning.jobs](docs/sdks/jobs/README.md)
313
+
314
+ * [list](docs/sdks/jobs/README.md#list) - Get Fine Tuning Jobs
315
+ * [create](docs/sdks/jobs/README.md#create) - Create Fine Tuning Job
316
+ * [get](docs/sdks/jobs/README.md#get) - Get Fine Tuning Job
317
+ * [cancel](docs/sdks/jobs/README.md#cancel) - Cancel Fine Tuning Job
318
+ * [start](docs/sdks/jobs/README.md#start) - Start Fine Tuning Job
266
319
 
267
320
  ### [chat](docs/sdks/chat/README.md)
268
321
 
269
- * [create](docs/sdks/chat/README.md#create) - Chat Completion
322
+ * [complete](docs/sdks/chat/README.md#complete) - Chat Completion
270
323
  * [stream](docs/sdks/chat/README.md#stream) - Stream chat completion
271
324
 
272
325
  ### [fim](docs/sdks/fim/README.md)
273
326
 
274
- * [create](docs/sdks/fim/README.md#create) - Fim Completion
327
+ * [complete](docs/sdks/fim/README.md#complete) - Fim Completion
275
328
  * [stream](docs/sdks/fim/README.md#stream) - Stream fim completion
276
329
 
330
+ ### [agents](docs/sdks/agents/README.md)
331
+
332
+ * [complete](docs/sdks/agents/README.md#complete) - Chat Completion
333
+ * [stream](docs/sdks/agents/README.md#stream) - Stream Agents completion
334
+
277
335
  ### [embeddings](docs/sdks/embeddings/README.md)
278
336
 
279
337
  * [create](docs/sdks/embeddings/README.md#create) - Embeddings
@@ -297,12 +355,12 @@ s = Mistral(
297
355
  )
298
356
 
299
357
 
300
- res = s.chat.stream(messages=[
358
+ res = s.chat.stream(model="mistral-small-latest", messages=[
301
359
  {
302
360
  "content": "Who is the best French painter? Answer in one short sentence.",
303
361
  "role": "user",
304
362
  },
305
- ], model="mistral-small-latest")
363
+ ])
306
364
 
307
365
  if res is not None:
308
366
  for event in res:
@@ -397,10 +455,10 @@ if res is not None:
397
455
 
398
456
  Handling errors in this SDK should largely match your expectations. All operations return a response object or raise an error. If Error objects are specified in your OpenAPI Spec, the SDK will raise the appropriate Error type.
399
457
 
400
- | Error Object | Status Code | Content Type |
401
- | -------------------------- | ----------- | ---------------- |
402
- | models.HTTPValidationError | 422 | application/json |
403
- | models.SDKError | 4xx-5xx | */* |
458
+ | Error Object | Status Code | Content Type |
459
+ | -------------------------- | -------------------------- | -------------------------- |
460
+ | models.HTTPValidationError | 422 | application/json |
461
+ | models.SDKError | 4xx-5xx | */* |
404
462
 
405
463
  ### Example
406
464
 
@@ -437,9 +495,9 @@ if res is not None:
437
495
 
438
496
  You can override the default server globally by passing a server name to the `server: str` optional parameter when initializing the SDK client instance. The selected server will then be used as the default on the operations that use it. This table lists the names associated with the available servers:
439
497
 
440
- | Name | Server | Variables |
441
- | ------ | ------------------------ | --------- |
442
- | `prod` | `https://api.mistral.ai` | None |
498
+ | Name | Server | Variables |
499
+ | ----- | ------ | --------- |
500
+ | `prod` | `https://api.mistral.ai` | None |
443
501
 
444
502
  #### Example
445
503
 
@@ -572,11 +630,11 @@ s = Mistral(async_client=CustomClient(httpx.AsyncClient()))
572
630
 
573
631
  This SDK supports the following security scheme globally:
574
632
 
575
- | Name | Type | Scheme |
576
- | --------- | ---- | ----------- |
577
- | `api_key` | http | HTTP Bearer |
633
+ | Name | Type | Scheme | Environment Variable |
634
+ | -------------------- | -------------------- | -------------------- | -------------------- |
635
+ | `api_key` | http | HTTP Bearer | `MISTRAL_API_KEY` |
578
636
 
579
- To authenticate with the API the `api_key` parameter must be set when initializing the SDK client instance. For example:
637
+ To authenticate with the API the `null` parameter must be set when initializing the SDK client instance. For example:
580
638
  ```python
581
639
  from mistralai import Mistral
582
640
  import os
@@ -594,13 +652,20 @@ if res is not None:
594
652
 
595
653
  ```
596
654
  <!-- End Authentication [security] -->
597
- ## Providers Support
598
655
 
599
- We also provide provider specific SDK for:
656
+ <!-- Start Debugging [debug] -->
657
+ ## Debugging
600
658
 
601
- - [GCP](packages/mistralai_gcp/README.md)
602
- - [Azure](packages/mistralai_azure/README.md)
659
+ To emit debug logs for SDK requests and responses you can pass a logger object directly into your SDK object.
603
660
 
661
+ ```python
662
+ from mistralai import Mistral
663
+ import logging
664
+
665
+ logging.basicConfig(level=logging.DEBUG)
666
+ s = Mistral(debug_logger=logging.getLogger("mistralai"))
667
+ ```
668
+ <!-- End Debugging [debug] -->
604
669
 
605
670
  <!-- Placeholder for Future Speakeasy SDK Sections -->
606
671