mistralai 1.10.0__py3-none-any.whl → 1.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. mistralai/_hooks/tracing.py +28 -3
  2. mistralai/_version.py +2 -2
  3. mistralai/classifiers.py +13 -1
  4. mistralai/embeddings.py +7 -1
  5. mistralai/extra/README.md +1 -1
  6. mistralai/extra/mcp/auth.py +10 -11
  7. mistralai/extra/mcp/base.py +17 -16
  8. mistralai/extra/mcp/sse.py +13 -15
  9. mistralai/extra/mcp/stdio.py +5 -6
  10. mistralai/extra/observability/otel.py +47 -68
  11. mistralai/extra/run/context.py +33 -43
  12. mistralai/extra/run/result.py +29 -30
  13. mistralai/extra/run/tools.py +8 -9
  14. mistralai/extra/struct_chat.py +15 -8
  15. mistralai/extra/utils/response_format.py +5 -3
  16. mistralai/mistral_jobs.py +31 -5
  17. mistralai/models/__init__.py +30 -1
  18. mistralai/models/agents_api_v1_agents_listop.py +1 -1
  19. mistralai/models/agents_api_v1_conversations_listop.py +1 -1
  20. mistralai/models/audioencoding.py +13 -0
  21. mistralai/models/audioformat.py +19 -0
  22. mistralai/models/batchjobin.py +17 -6
  23. mistralai/models/batchjobout.py +5 -0
  24. mistralai/models/batchrequest.py +48 -0
  25. mistralai/models/classificationrequest.py +37 -3
  26. mistralai/models/embeddingrequest.py +11 -3
  27. mistralai/models/jobs_api_routes_batch_get_batch_jobop.py +40 -3
  28. mistralai/models/toolfilechunk.py +11 -4
  29. mistralai/models/toolreferencechunk.py +13 -4
  30. {mistralai-1.10.0.dist-info → mistralai-1.10.1.dist-info}/METADATA +142 -150
  31. {mistralai-1.10.0.dist-info → mistralai-1.10.1.dist-info}/RECORD +122 -105
  32. {mistralai-1.10.0.dist-info → mistralai-1.10.1.dist-info}/WHEEL +1 -1
  33. mistralai_azure/_version.py +3 -3
  34. mistralai_azure/basesdk.py +15 -5
  35. mistralai_azure/chat.py +59 -98
  36. mistralai_azure/models/__init__.py +50 -3
  37. mistralai_azure/models/chatcompletionrequest.py +16 -4
  38. mistralai_azure/models/chatcompletionstreamrequest.py +16 -4
  39. mistralai_azure/models/httpvalidationerror.py +11 -6
  40. mistralai_azure/models/mistralazureerror.py +26 -0
  41. mistralai_azure/models/no_response_error.py +13 -0
  42. mistralai_azure/models/prediction.py +4 -0
  43. mistralai_azure/models/responseformat.py +4 -2
  44. mistralai_azure/models/responseformats.py +0 -1
  45. mistralai_azure/models/responsevalidationerror.py +25 -0
  46. mistralai_azure/models/sdkerror.py +30 -14
  47. mistralai_azure/models/systemmessage.py +7 -3
  48. mistralai_azure/models/systemmessagecontentchunks.py +21 -0
  49. mistralai_azure/models/thinkchunk.py +35 -0
  50. mistralai_azure/ocr.py +15 -36
  51. mistralai_azure/utils/__init__.py +18 -5
  52. mistralai_azure/utils/eventstreaming.py +10 -0
  53. mistralai_azure/utils/serializers.py +3 -2
  54. mistralai_azure/utils/unmarshal_json_response.py +24 -0
  55. mistralai_gcp/_hooks/types.py +7 -0
  56. mistralai_gcp/_version.py +4 -4
  57. mistralai_gcp/basesdk.py +27 -25
  58. mistralai_gcp/chat.py +75 -98
  59. mistralai_gcp/fim.py +39 -74
  60. mistralai_gcp/httpclient.py +6 -16
  61. mistralai_gcp/models/__init__.py +321 -116
  62. mistralai_gcp/models/assistantmessage.py +1 -1
  63. mistralai_gcp/models/chatcompletionrequest.py +36 -7
  64. mistralai_gcp/models/chatcompletionresponse.py +6 -6
  65. mistralai_gcp/models/chatcompletionstreamrequest.py +36 -7
  66. mistralai_gcp/models/completionresponsestreamchoice.py +1 -1
  67. mistralai_gcp/models/deltamessage.py +1 -1
  68. mistralai_gcp/models/fimcompletionrequest.py +3 -9
  69. mistralai_gcp/models/fimcompletionresponse.py +6 -6
  70. mistralai_gcp/models/fimcompletionstreamrequest.py +3 -9
  71. mistralai_gcp/models/httpvalidationerror.py +11 -6
  72. mistralai_gcp/models/imageurl.py +1 -1
  73. mistralai_gcp/models/jsonschema.py +1 -1
  74. mistralai_gcp/models/mistralgcperror.py +26 -0
  75. mistralai_gcp/models/mistralpromptmode.py +8 -0
  76. mistralai_gcp/models/no_response_error.py +13 -0
  77. mistralai_gcp/models/prediction.py +4 -0
  78. mistralai_gcp/models/responseformat.py +5 -3
  79. mistralai_gcp/models/responseformats.py +0 -1
  80. mistralai_gcp/models/responsevalidationerror.py +25 -0
  81. mistralai_gcp/models/sdkerror.py +30 -14
  82. mistralai_gcp/models/systemmessage.py +7 -3
  83. mistralai_gcp/models/systemmessagecontentchunks.py +21 -0
  84. mistralai_gcp/models/thinkchunk.py +35 -0
  85. mistralai_gcp/models/toolmessage.py +1 -1
  86. mistralai_gcp/models/usageinfo.py +71 -8
  87. mistralai_gcp/models/usermessage.py +1 -1
  88. mistralai_gcp/sdk.py +12 -10
  89. mistralai_gcp/sdkconfiguration.py +0 -7
  90. mistralai_gcp/types/basemodel.py +3 -3
  91. mistralai_gcp/utils/__init__.py +143 -45
  92. mistralai_gcp/utils/datetimes.py +23 -0
  93. mistralai_gcp/utils/enums.py +67 -27
  94. mistralai_gcp/utils/eventstreaming.py +10 -0
  95. mistralai_gcp/utils/forms.py +49 -28
  96. mistralai_gcp/utils/serializers.py +33 -3
  97. mistralai_gcp/utils/unmarshal_json_response.py +24 -0
  98. {mistralai-1.10.0.dist-info → mistralai-1.10.1.dist-info}/licenses/LICENSE +0 -0
mistralai/mistral_jobs.py CHANGED
@@ -7,7 +7,7 @@ from mistralai._hooks import HookContext
7
7
  from mistralai.types import OptionalNullable, UNSET
8
8
  from mistralai.utils import get_security_from_env
9
9
  from mistralai.utils.unmarshal_json_response import unmarshal_json_response
10
- from typing import Any, Dict, List, Mapping, Optional
10
+ from typing import Any, Dict, List, Mapping, Optional, Union
11
11
 
12
12
 
13
13
  class MistralJobs(BaseSDK):
@@ -222,8 +222,11 @@ class MistralJobs(BaseSDK):
222
222
  def create(
223
223
  self,
224
224
  *,
225
- input_files: List[str],
226
225
  endpoint: models.APIEndpoint,
226
+ input_files: OptionalNullable[List[str]] = UNSET,
227
+ requests: OptionalNullable[
228
+ Union[List[models.BatchRequest], List[models.BatchRequestTypedDict]]
229
+ ] = UNSET,
227
230
  model: OptionalNullable[str] = UNSET,
228
231
  agent_id: OptionalNullable[str] = UNSET,
229
232
  metadata: OptionalNullable[Dict[str, str]] = UNSET,
@@ -237,8 +240,9 @@ class MistralJobs(BaseSDK):
237
240
 
238
241
  Create a new batch job, it will be queued for processing.
239
242
 
240
- :param input_files: The list of input files to be used for batch inference, these files should be `jsonl` files, containing the input data corresponding to the bory request for the batch inference in a \"body\" field. An example of such file is the following: ```json {\"custom_id\": \"0\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French cheese?\"}]}} {\"custom_id\": \"1\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French wine?\"}]}} ```
241
243
  :param endpoint:
244
+ :param input_files: The list of input files to be used for batch inference, these files should be `jsonl` files, containing the input data corresponding to the bory request for the batch inference in a \"body\" field. An example of such file is the following: ```json {\"custom_id\": \"0\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French cheese?\"}]}} {\"custom_id\": \"1\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French wine?\"}]}} ```
245
+ :param requests:
242
246
  :param model: The model to be used for batch inference.
243
247
  :param agent_id: In case you want to use a specific agent from the **deprecated** agents api for batch inference, you can specify the agent ID here.
244
248
  :param metadata: The metadata of your choice to be associated with the batch inference job.
@@ -260,6 +264,9 @@ class MistralJobs(BaseSDK):
260
264
 
261
265
  request = models.BatchJobIn(
262
266
  input_files=input_files,
267
+ requests=utils.get_pydantic_model(
268
+ requests, OptionalNullable[List[models.BatchRequest]]
269
+ ),
263
270
  endpoint=endpoint,
264
271
  model=model,
265
272
  agent_id=agent_id,
@@ -323,8 +330,11 @@ class MistralJobs(BaseSDK):
323
330
  async def create_async(
324
331
  self,
325
332
  *,
326
- input_files: List[str],
327
333
  endpoint: models.APIEndpoint,
334
+ input_files: OptionalNullable[List[str]] = UNSET,
335
+ requests: OptionalNullable[
336
+ Union[List[models.BatchRequest], List[models.BatchRequestTypedDict]]
337
+ ] = UNSET,
328
338
  model: OptionalNullable[str] = UNSET,
329
339
  agent_id: OptionalNullable[str] = UNSET,
330
340
  metadata: OptionalNullable[Dict[str, str]] = UNSET,
@@ -338,8 +348,9 @@ class MistralJobs(BaseSDK):
338
348
 
339
349
  Create a new batch job, it will be queued for processing.
340
350
 
341
- :param input_files: The list of input files to be used for batch inference, these files should be `jsonl` files, containing the input data corresponding to the bory request for the batch inference in a \"body\" field. An example of such file is the following: ```json {\"custom_id\": \"0\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French cheese?\"}]}} {\"custom_id\": \"1\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French wine?\"}]}} ```
342
351
  :param endpoint:
352
+ :param input_files: The list of input files to be used for batch inference, these files should be `jsonl` files, containing the input data corresponding to the bory request for the batch inference in a \"body\" field. An example of such file is the following: ```json {\"custom_id\": \"0\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French cheese?\"}]}} {\"custom_id\": \"1\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French wine?\"}]}} ```
353
+ :param requests:
343
354
  :param model: The model to be used for batch inference.
344
355
  :param agent_id: In case you want to use a specific agent from the **deprecated** agents api for batch inference, you can specify the agent ID here.
345
356
  :param metadata: The metadata of your choice to be associated with the batch inference job.
@@ -361,6 +372,9 @@ class MistralJobs(BaseSDK):
361
372
 
362
373
  request = models.BatchJobIn(
363
374
  input_files=input_files,
375
+ requests=utils.get_pydantic_model(
376
+ requests, OptionalNullable[List[models.BatchRequest]]
377
+ ),
364
378
  endpoint=endpoint,
365
379
  model=model,
366
380
  agent_id=agent_id,
@@ -425,6 +439,7 @@ class MistralJobs(BaseSDK):
425
439
  self,
426
440
  *,
427
441
  job_id: str,
442
+ inline: OptionalNullable[bool] = UNSET,
428
443
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
429
444
  server_url: Optional[str] = None,
430
445
  timeout_ms: Optional[int] = None,
@@ -434,7 +449,11 @@ class MistralJobs(BaseSDK):
434
449
 
435
450
  Get a batch job details by its UUID.
436
451
 
452
+ Args:
453
+ inline: If True, return results inline in the response.
454
+
437
455
  :param job_id:
456
+ :param inline:
438
457
  :param retries: Override the default retry configuration for this method
439
458
  :param server_url: Override the default server URL for this method
440
459
  :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
@@ -452,6 +471,7 @@ class MistralJobs(BaseSDK):
452
471
 
453
472
  request = models.JobsAPIRoutesBatchGetBatchJobRequest(
454
473
  job_id=job_id,
474
+ inline=inline,
455
475
  )
456
476
 
457
477
  req = self._build_request(
@@ -508,6 +528,7 @@ class MistralJobs(BaseSDK):
508
528
  self,
509
529
  *,
510
530
  job_id: str,
531
+ inline: OptionalNullable[bool] = UNSET,
511
532
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
512
533
  server_url: Optional[str] = None,
513
534
  timeout_ms: Optional[int] = None,
@@ -517,7 +538,11 @@ class MistralJobs(BaseSDK):
517
538
 
518
539
  Get a batch job details by its UUID.
519
540
 
541
+ Args:
542
+ inline: If True, return results inline in the response.
543
+
520
544
  :param job_id:
545
+ :param inline:
521
546
  :param retries: Override the default retry configuration for this method
522
547
  :param server_url: Override the default server URL for this method
523
548
  :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
@@ -535,6 +560,7 @@ class MistralJobs(BaseSDK):
535
560
 
536
561
  request = models.JobsAPIRoutesBatchGetBatchJobRequest(
537
562
  job_id=job_id,
563
+ inline=inline,
538
564
  )
539
565
 
540
566
  req = self._build_request_async(
@@ -141,6 +141,8 @@ if TYPE_CHECKING:
141
141
  AssistantMessageTypedDict,
142
142
  )
143
143
  from .audiochunk import AudioChunk, AudioChunkType, AudioChunkTypedDict
144
+ from .audioencoding import AudioEncoding
145
+ from .audioformat import AudioFormat, AudioFormatTypedDict
144
146
  from .audiotranscriptionrequest import (
145
147
  AudioTranscriptionRequest,
146
148
  AudioTranscriptionRequestTypedDict,
@@ -155,6 +157,7 @@ if TYPE_CHECKING:
155
157
  from .batchjobout import BatchJobOut, BatchJobOutObject, BatchJobOutTypedDict
156
158
  from .batchjobsout import BatchJobsOut, BatchJobsOutObject, BatchJobsOutTypedDict
157
159
  from .batchjobstatus import BatchJobStatus
160
+ from .batchrequest import BatchRequest, BatchRequestTypedDict
158
161
  from .builtinconnectors import BuiltInConnectors
159
162
  from .chatclassificationrequest import (
160
163
  ChatClassificationRequest,
@@ -840,7 +843,13 @@ if TYPE_CHECKING:
840
843
  ToolExecutionStartedEventType,
841
844
  ToolExecutionStartedEventTypedDict,
842
845
  )
843
- from .toolfilechunk import ToolFileChunk, ToolFileChunkType, ToolFileChunkTypedDict
846
+ from .toolfilechunk import (
847
+ ToolFileChunk,
848
+ ToolFileChunkTool,
849
+ ToolFileChunkToolTypedDict,
850
+ ToolFileChunkType,
851
+ ToolFileChunkTypedDict,
852
+ )
844
853
  from .toolmessage import (
845
854
  ToolMessage,
846
855
  ToolMessageContent,
@@ -850,6 +859,8 @@ if TYPE_CHECKING:
850
859
  )
851
860
  from .toolreferencechunk import (
852
861
  ToolReferenceChunk,
862
+ ToolReferenceChunkTool,
863
+ ToolReferenceChunkToolTypedDict,
853
864
  ToolReferenceChunkType,
854
865
  ToolReferenceChunkTypedDict,
855
866
  )
@@ -1018,6 +1029,9 @@ __all__ = [
1018
1029
  "AudioChunk",
1019
1030
  "AudioChunkType",
1020
1031
  "AudioChunkTypedDict",
1032
+ "AudioEncoding",
1033
+ "AudioFormat",
1034
+ "AudioFormatTypedDict",
1021
1035
  "AudioTranscriptionRequest",
1022
1036
  "AudioTranscriptionRequestStream",
1023
1037
  "AudioTranscriptionRequestStreamTypedDict",
@@ -1036,6 +1050,8 @@ __all__ = [
1036
1050
  "BatchJobsOut",
1037
1051
  "BatchJobsOutObject",
1038
1052
  "BatchJobsOutTypedDict",
1053
+ "BatchRequest",
1054
+ "BatchRequestTypedDict",
1039
1055
  "BuiltInConnectors",
1040
1056
  "ChatClassificationRequest",
1041
1057
  "ChatClassificationRequestTypedDict",
@@ -1567,6 +1583,8 @@ __all__ = [
1567
1583
  "ToolExecutionStartedEventType",
1568
1584
  "ToolExecutionStartedEventTypedDict",
1569
1585
  "ToolFileChunk",
1586
+ "ToolFileChunkTool",
1587
+ "ToolFileChunkToolTypedDict",
1570
1588
  "ToolFileChunkType",
1571
1589
  "ToolFileChunkTypedDict",
1572
1590
  "ToolMessage",
@@ -1575,6 +1593,8 @@ __all__ = [
1575
1593
  "ToolMessageRole",
1576
1594
  "ToolMessageTypedDict",
1577
1595
  "ToolReferenceChunk",
1596
+ "ToolReferenceChunkTool",
1597
+ "ToolReferenceChunkToolTypedDict",
1578
1598
  "ToolReferenceChunkType",
1579
1599
  "ToolReferenceChunkTypedDict",
1580
1600
  "ToolTypedDict",
@@ -1724,6 +1744,9 @@ _dynamic_imports: dict[str, str] = {
1724
1744
  "AudioChunk": ".audiochunk",
1725
1745
  "AudioChunkType": ".audiochunk",
1726
1746
  "AudioChunkTypedDict": ".audiochunk",
1747
+ "AudioEncoding": ".audioencoding",
1748
+ "AudioFormat": ".audioformat",
1749
+ "AudioFormatTypedDict": ".audioformat",
1727
1750
  "AudioTranscriptionRequest": ".audiotranscriptionrequest",
1728
1751
  "AudioTranscriptionRequestTypedDict": ".audiotranscriptionrequest",
1729
1752
  "AudioTranscriptionRequestStream": ".audiotranscriptionrequeststream",
@@ -1742,6 +1765,8 @@ _dynamic_imports: dict[str, str] = {
1742
1765
  "BatchJobsOutObject": ".batchjobsout",
1743
1766
  "BatchJobsOutTypedDict": ".batchjobsout",
1744
1767
  "BatchJobStatus": ".batchjobstatus",
1768
+ "BatchRequest": ".batchrequest",
1769
+ "BatchRequestTypedDict": ".batchrequest",
1745
1770
  "BuiltInConnectors": ".builtinconnectors",
1746
1771
  "ChatClassificationRequest": ".chatclassificationrequest",
1747
1772
  "ChatClassificationRequestTypedDict": ".chatclassificationrequest",
@@ -2277,6 +2302,8 @@ _dynamic_imports: dict[str, str] = {
2277
2302
  "ToolExecutionStartedEventType": ".toolexecutionstartedevent",
2278
2303
  "ToolExecutionStartedEventTypedDict": ".toolexecutionstartedevent",
2279
2304
  "ToolFileChunk": ".toolfilechunk",
2305
+ "ToolFileChunkTool": ".toolfilechunk",
2306
+ "ToolFileChunkToolTypedDict": ".toolfilechunk",
2280
2307
  "ToolFileChunkType": ".toolfilechunk",
2281
2308
  "ToolFileChunkTypedDict": ".toolfilechunk",
2282
2309
  "ToolMessage": ".toolmessage",
@@ -2285,6 +2312,8 @@ _dynamic_imports: dict[str, str] = {
2285
2312
  "ToolMessageRole": ".toolmessage",
2286
2313
  "ToolMessageTypedDict": ".toolmessage",
2287
2314
  "ToolReferenceChunk": ".toolreferencechunk",
2315
+ "ToolReferenceChunkTool": ".toolreferencechunk",
2316
+ "ToolReferenceChunkToolTypedDict": ".toolreferencechunk",
2288
2317
  "ToolReferenceChunkType": ".toolreferencechunk",
2289
2318
  "ToolReferenceChunkTypedDict": ".toolreferencechunk",
2290
2319
  "ToolTypes": ".tooltypes",
@@ -52,7 +52,7 @@ class AgentsAPIV1AgentsListRequest(BaseModel):
52
52
 
53
53
  metadata: Annotated[
54
54
  OptionalNullable[Dict[str, Any]],
55
- FieldMetadata(query=QueryParamMetadata(style="form", explode=True)),
55
+ FieldMetadata(query=QueryParamMetadata(serialization="json")),
56
56
  ] = UNSET
57
57
 
58
58
  @model_serializer(mode="wrap")
@@ -29,7 +29,7 @@ class AgentsAPIV1ConversationsListRequest(BaseModel):
29
29
 
30
30
  metadata: Annotated[
31
31
  OptionalNullable[Dict[str, Any]],
32
- FieldMetadata(query=QueryParamMetadata(style="form", explode=True)),
32
+ FieldMetadata(query=QueryParamMetadata(serialization="json")),
33
33
  ] = UNSET
34
34
 
35
35
  @model_serializer(mode="wrap")
@@ -0,0 +1,13 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from mistralai.types import UnrecognizedStr
5
+ from typing import Literal, Union
6
+
7
+
8
+ AudioEncoding = Union[
9
+ Literal[
10
+ "pcm_s16le", "pcm_s32le", "pcm_f16le", "pcm_f32le", "pcm_mulaw", "pcm_alaw"
11
+ ],
12
+ UnrecognizedStr,
13
+ ]
@@ -0,0 +1,19 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .audioencoding import AudioEncoding
5
+ from mistralai.types import BaseModel
6
+ from mistralai.utils import validate_open_enum
7
+ from pydantic.functional_validators import PlainValidator
8
+ from typing_extensions import Annotated, TypedDict
9
+
10
+
11
+ class AudioFormatTypedDict(TypedDict):
12
+ encoding: AudioEncoding
13
+ sample_rate: int
14
+
15
+
16
+ class AudioFormat(BaseModel):
17
+ encoding: Annotated[AudioEncoding, PlainValidator(validate_open_enum(False))]
18
+
19
+ sample_rate: int
@@ -2,6 +2,7 @@
2
2
 
3
3
  from __future__ import annotations
4
4
  from .apiendpoint import APIEndpoint
5
+ from .batchrequest import BatchRequest, BatchRequestTypedDict
5
6
  from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
6
7
  from mistralai.utils import validate_open_enum
7
8
  from pydantic import model_serializer
@@ -11,9 +12,10 @@ from typing_extensions import Annotated, NotRequired, TypedDict
11
12
 
12
13
 
13
14
  class BatchJobInTypedDict(TypedDict):
14
- input_files: List[str]
15
- r"""The list of input files to be used for batch inference, these files should be `jsonl` files, containing the input data corresponding to the bory request for the batch inference in a \"body\" field. An example of such file is the following: ```json {\"custom_id\": \"0\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French cheese?\"}]}} {\"custom_id\": \"1\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French wine?\"}]}} ```"""
16
15
  endpoint: APIEndpoint
16
+ input_files: NotRequired[Nullable[List[str]]]
17
+ r"""The list of input files to be used for batch inference, these files should be `jsonl` files, containing the input data corresponding to the bory request for the batch inference in a \"body\" field. An example of such file is the following: ```json {\"custom_id\": \"0\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French cheese?\"}]}} {\"custom_id\": \"1\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French wine?\"}]}} ```"""
18
+ requests: NotRequired[Nullable[List[BatchRequestTypedDict]]]
17
19
  model: NotRequired[Nullable[str]]
18
20
  r"""The model to be used for batch inference."""
19
21
  agent_id: NotRequired[Nullable[str]]
@@ -25,10 +27,12 @@ class BatchJobInTypedDict(TypedDict):
25
27
 
26
28
 
27
29
  class BatchJobIn(BaseModel):
28
- input_files: List[str]
30
+ endpoint: Annotated[APIEndpoint, PlainValidator(validate_open_enum(False))]
31
+
32
+ input_files: OptionalNullable[List[str]] = UNSET
29
33
  r"""The list of input files to be used for batch inference, these files should be `jsonl` files, containing the input data corresponding to the bory request for the batch inference in a \"body\" field. An example of such file is the following: ```json {\"custom_id\": \"0\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French cheese?\"}]}} {\"custom_id\": \"1\", \"body\": {\"max_tokens\": 100, \"messages\": [{\"role\": \"user\", \"content\": \"What is the best French wine?\"}]}} ```"""
30
34
 
31
- endpoint: Annotated[APIEndpoint, PlainValidator(validate_open_enum(False))]
35
+ requests: OptionalNullable[List[BatchRequest]] = UNSET
32
36
 
33
37
  model: OptionalNullable[str] = UNSET
34
38
  r"""The model to be used for batch inference."""
@@ -44,8 +48,15 @@ class BatchJobIn(BaseModel):
44
48
 
45
49
  @model_serializer(mode="wrap")
46
50
  def serialize_model(self, handler):
47
- optional_fields = ["model", "agent_id", "metadata", "timeout_hours"]
48
- nullable_fields = ["model", "agent_id", "metadata"]
51
+ optional_fields = [
52
+ "input_files",
53
+ "requests",
54
+ "model",
55
+ "agent_id",
56
+ "metadata",
57
+ "timeout_hours",
58
+ ]
59
+ nullable_fields = ["input_files", "requests", "model", "agent_id", "metadata"]
49
60
  null_default_fields = []
50
61
 
51
62
  serialized = handler(self)
@@ -29,6 +29,7 @@ class BatchJobOutTypedDict(TypedDict):
29
29
  agent_id: NotRequired[Nullable[str]]
30
30
  output_file: NotRequired[Nullable[str]]
31
31
  error_file: NotRequired[Nullable[str]]
32
+ outputs: NotRequired[Nullable[List[Dict[str, Any]]]]
32
33
  started_at: NotRequired[Nullable[int]]
33
34
  completed_at: NotRequired[Nullable[int]]
34
35
 
@@ -66,6 +67,8 @@ class BatchJobOut(BaseModel):
66
67
 
67
68
  error_file: OptionalNullable[str] = UNSET
68
69
 
70
+ outputs: OptionalNullable[List[Dict[str, Any]]] = UNSET
71
+
69
72
  started_at: OptionalNullable[int] = UNSET
70
73
 
71
74
  completed_at: OptionalNullable[int] = UNSET
@@ -79,6 +82,7 @@ class BatchJobOut(BaseModel):
79
82
  "agent_id",
80
83
  "output_file",
81
84
  "error_file",
85
+ "outputs",
82
86
  "started_at",
83
87
  "completed_at",
84
88
  ]
@@ -88,6 +92,7 @@ class BatchJobOut(BaseModel):
88
92
  "agent_id",
89
93
  "output_file",
90
94
  "error_file",
95
+ "outputs",
91
96
  "started_at",
92
97
  "completed_at",
93
98
  ]
@@ -0,0 +1,48 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
5
+ from pydantic import model_serializer
6
+ from typing import Any, Dict
7
+ from typing_extensions import NotRequired, TypedDict
8
+
9
+
10
+ class BatchRequestTypedDict(TypedDict):
11
+ body: Dict[str, Any]
12
+ custom_id: NotRequired[Nullable[str]]
13
+
14
+
15
+ class BatchRequest(BaseModel):
16
+ body: Dict[str, Any]
17
+
18
+ custom_id: OptionalNullable[str] = UNSET
19
+
20
+ @model_serializer(mode="wrap")
21
+ def serialize_model(self, handler):
22
+ optional_fields = ["custom_id"]
23
+ nullable_fields = ["custom_id"]
24
+ null_default_fields = []
25
+
26
+ serialized = handler(self)
27
+
28
+ m = {}
29
+
30
+ for n, f in type(self).model_fields.items():
31
+ k = f.alias or n
32
+ val = serialized.get(k)
33
+ serialized.pop(k, None)
34
+
35
+ optional_nullable = k in optional_fields and k in nullable_fields
36
+ is_set = (
37
+ self.__pydantic_fields_set__.intersection({n})
38
+ or k in null_default_fields
39
+ ) # pylint: disable=no-member
40
+
41
+ if val is not None and val != UNSET_SENTINEL:
42
+ m[k] = val
43
+ elif val != UNSET_SENTINEL and (
44
+ not k in optional_fields or (optional_nullable and is_set)
45
+ ):
46
+ m[k] = val
47
+
48
+ return m
@@ -1,10 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from mistralai.types import BaseModel
4
+ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
5
5
  import pydantic
6
- from typing import List, Union
7
- from typing_extensions import Annotated, TypeAliasType, TypedDict
6
+ from pydantic import model_serializer
7
+ from typing import Any, Dict, List, Union
8
+ from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
8
9
 
9
10
 
10
11
  ClassificationRequestInputsTypedDict = TypeAliasType(
@@ -24,6 +25,7 @@ class ClassificationRequestTypedDict(TypedDict):
24
25
  r"""ID of the model to use."""
25
26
  inputs: ClassificationRequestInputsTypedDict
26
27
  r"""Text to classify."""
28
+ metadata: NotRequired[Nullable[Dict[str, Any]]]
27
29
 
28
30
 
29
31
  class ClassificationRequest(BaseModel):
@@ -32,3 +34,35 @@ class ClassificationRequest(BaseModel):
32
34
 
33
35
  inputs: Annotated[ClassificationRequestInputs, pydantic.Field(alias="input")]
34
36
  r"""Text to classify."""
37
+
38
+ metadata: OptionalNullable[Dict[str, Any]] = UNSET
39
+
40
+ @model_serializer(mode="wrap")
41
+ def serialize_model(self, handler):
42
+ optional_fields = ["metadata"]
43
+ nullable_fields = ["metadata"]
44
+ null_default_fields = []
45
+
46
+ serialized = handler(self)
47
+
48
+ m = {}
49
+
50
+ for n, f in type(self).model_fields.items():
51
+ k = f.alias or n
52
+ val = serialized.get(k)
53
+ serialized.pop(k, None)
54
+
55
+ optional_nullable = k in optional_fields and k in nullable_fields
56
+ is_set = (
57
+ self.__pydantic_fields_set__.intersection({n})
58
+ or k in null_default_fields
59
+ ) # pylint: disable=no-member
60
+
61
+ if val is not None and val != UNSET_SENTINEL:
62
+ m[k] = val
63
+ elif val != UNSET_SENTINEL and (
64
+ not k in optional_fields or (optional_nullable and is_set)
65
+ ):
66
+ m[k] = val
67
+
68
+ return m
@@ -6,7 +6,7 @@ from .encodingformat import EncodingFormat
6
6
  from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
7
7
  import pydantic
8
8
  from pydantic import model_serializer
9
- from typing import List, Optional, Union
9
+ from typing import Any, Dict, List, Optional, Union
10
10
  from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
11
11
 
12
12
 
@@ -25,6 +25,7 @@ class EmbeddingRequestTypedDict(TypedDict):
25
25
  r"""The ID of the model to be used for embedding."""
26
26
  inputs: EmbeddingRequestInputsTypedDict
27
27
  r"""The text content to be embedded, can be a string or an array of strings for fast processing in bulk."""
28
+ metadata: NotRequired[Nullable[Dict[str, Any]]]
28
29
  output_dimension: NotRequired[Nullable[int]]
29
30
  r"""The dimension of the output embeddings when feature available. If not provided, a default output dimension will be used."""
30
31
  output_dtype: NotRequired[EmbeddingDtype]
@@ -38,6 +39,8 @@ class EmbeddingRequest(BaseModel):
38
39
  inputs: Annotated[EmbeddingRequestInputs, pydantic.Field(alias="input")]
39
40
  r"""The text content to be embedded, can be a string or an array of strings for fast processing in bulk."""
40
41
 
42
+ metadata: OptionalNullable[Dict[str, Any]] = UNSET
43
+
41
44
  output_dimension: OptionalNullable[int] = UNSET
42
45
  r"""The dimension of the output embeddings when feature available. If not provided, a default output dimension will be used."""
43
46
 
@@ -47,8 +50,13 @@ class EmbeddingRequest(BaseModel):
47
50
 
48
51
  @model_serializer(mode="wrap")
49
52
  def serialize_model(self, handler):
50
- optional_fields = ["output_dimension", "output_dtype", "encoding_format"]
51
- nullable_fields = ["output_dimension"]
53
+ optional_fields = [
54
+ "metadata",
55
+ "output_dimension",
56
+ "output_dtype",
57
+ "encoding_format",
58
+ ]
59
+ nullable_fields = ["metadata", "output_dimension"]
52
60
  null_default_fields = []
53
61
 
54
62
  serialized = handler(self)
@@ -1,16 +1,53 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from mistralai.types import BaseModel
5
- from mistralai.utils import FieldMetadata, PathParamMetadata
6
- from typing_extensions import Annotated, TypedDict
4
+ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
5
+ from mistralai.utils import FieldMetadata, PathParamMetadata, QueryParamMetadata
6
+ from pydantic import model_serializer
7
+ from typing_extensions import Annotated, NotRequired, TypedDict
7
8
 
8
9
 
9
10
  class JobsAPIRoutesBatchGetBatchJobRequestTypedDict(TypedDict):
10
11
  job_id: str
12
+ inline: NotRequired[Nullable[bool]]
11
13
 
12
14
 
13
15
  class JobsAPIRoutesBatchGetBatchJobRequest(BaseModel):
14
16
  job_id: Annotated[
15
17
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
16
18
  ]
19
+
20
+ inline: Annotated[
21
+ OptionalNullable[bool],
22
+ FieldMetadata(query=QueryParamMetadata(style="form", explode=True)),
23
+ ] = UNSET
24
+
25
+ @model_serializer(mode="wrap")
26
+ def serialize_model(self, handler):
27
+ optional_fields = ["inline"]
28
+ nullable_fields = ["inline"]
29
+ null_default_fields = []
30
+
31
+ serialized = handler(self)
32
+
33
+ m = {}
34
+
35
+ for n, f in type(self).model_fields.items():
36
+ k = f.alias or n
37
+ val = serialized.get(k)
38
+ serialized.pop(k, None)
39
+
40
+ optional_nullable = k in optional_fields and k in nullable_fields
41
+ is_set = (
42
+ self.__pydantic_fields_set__.intersection({n})
43
+ or k in null_default_fields
44
+ ) # pylint: disable=no-member
45
+
46
+ if val is not None and val != UNSET_SENTINEL:
47
+ m[k] = val
48
+ elif val != UNSET_SENTINEL and (
49
+ not k in optional_fields or (optional_nullable and is_set)
50
+ ):
51
+ m[k] = val
52
+
53
+ return m
@@ -4,15 +4,22 @@ from __future__ import annotations
4
4
  from .builtinconnectors import BuiltInConnectors
5
5
  from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
6
6
  from pydantic import model_serializer
7
- from typing import Literal, Optional
8
- from typing_extensions import NotRequired, TypedDict
7
+ from typing import Literal, Optional, Union
8
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
9
9
 
10
10
 
11
11
  ToolFileChunkType = Literal["tool_file"]
12
12
 
13
+ ToolFileChunkToolTypedDict = TypeAliasType(
14
+ "ToolFileChunkToolTypedDict", Union[BuiltInConnectors, str]
15
+ )
16
+
17
+
18
+ ToolFileChunkTool = TypeAliasType("ToolFileChunkTool", Union[BuiltInConnectors, str])
19
+
13
20
 
14
21
  class ToolFileChunkTypedDict(TypedDict):
15
- tool: BuiltInConnectors
22
+ tool: ToolFileChunkToolTypedDict
16
23
  file_id: str
17
24
  type: NotRequired[ToolFileChunkType]
18
25
  file_name: NotRequired[Nullable[str]]
@@ -20,7 +27,7 @@ class ToolFileChunkTypedDict(TypedDict):
20
27
 
21
28
 
22
29
  class ToolFileChunk(BaseModel):
23
- tool: BuiltInConnectors
30
+ tool: ToolFileChunkTool
24
31
 
25
32
  file_id: str
26
33
 
@@ -4,15 +4,24 @@ from __future__ import annotations
4
4
  from .builtinconnectors import BuiltInConnectors
5
5
  from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
6
6
  from pydantic import model_serializer
7
- from typing import Literal, Optional
8
- from typing_extensions import NotRequired, TypedDict
7
+ from typing import Literal, Optional, Union
8
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
9
9
 
10
10
 
11
11
  ToolReferenceChunkType = Literal["tool_reference"]
12
12
 
13
+ ToolReferenceChunkToolTypedDict = TypeAliasType(
14
+ "ToolReferenceChunkToolTypedDict", Union[BuiltInConnectors, str]
15
+ )
16
+
17
+
18
+ ToolReferenceChunkTool = TypeAliasType(
19
+ "ToolReferenceChunkTool", Union[BuiltInConnectors, str]
20
+ )
21
+
13
22
 
14
23
  class ToolReferenceChunkTypedDict(TypedDict):
15
- tool: BuiltInConnectors
24
+ tool: ToolReferenceChunkToolTypedDict
16
25
  title: str
17
26
  type: NotRequired[ToolReferenceChunkType]
18
27
  url: NotRequired[Nullable[str]]
@@ -21,7 +30,7 @@ class ToolReferenceChunkTypedDict(TypedDict):
21
30
 
22
31
 
23
32
  class ToolReferenceChunk(BaseModel):
24
- tool: BuiltInConnectors
33
+ tool: ToolReferenceChunkTool
25
34
 
26
35
  title: str
27
36