mistralai 1.4.0__py3-none-any.whl → 1.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. mistralai/_version.py +3 -3
  2. mistralai/chat.py +87 -5
  3. mistralai/classifiers.py +27 -25
  4. mistralai/embeddings.py +2 -8
  5. mistralai/extra/README.md +56 -0
  6. mistralai/extra/__init__.py +5 -0
  7. mistralai/extra/struct_chat.py +41 -0
  8. mistralai/extra/tests/__init__.py +0 -0
  9. mistralai/extra/tests/test_struct_chat.py +103 -0
  10. mistralai/extra/tests/test_utils.py +162 -0
  11. mistralai/extra/utils/__init__.py +3 -0
  12. mistralai/extra/utils/_pydantic_helper.py +20 -0
  13. mistralai/extra/utils/response_format.py +24 -0
  14. mistralai/fim.py +5 -5
  15. mistralai/httpclient.py +50 -0
  16. mistralai/models/__init__.py +41 -16
  17. mistralai/models/assistantmessage.py +2 -0
  18. mistralai/models/chatcompletionrequest.py +3 -10
  19. mistralai/models/chatcompletionstreamrequest.py +3 -10
  20. mistralai/models/chatmoderationrequest.py +86 -0
  21. mistralai/models/classificationrequest.py +7 -36
  22. mistralai/models/contentchunk.py +8 -1
  23. mistralai/models/documenturlchunk.py +62 -0
  24. mistralai/models/embeddingrequest.py +1 -37
  25. mistralai/models/fimcompletionrequest.py +2 -3
  26. mistralai/models/fimcompletionstreamrequest.py +2 -3
  27. mistralai/models/jsonschema.py +55 -0
  28. mistralai/models/ocrimageobject.py +77 -0
  29. mistralai/models/ocrpagedimensions.py +25 -0
  30. mistralai/models/ocrpageobject.py +64 -0
  31. mistralai/models/ocrrequest.py +97 -0
  32. mistralai/models/ocrresponse.py +26 -0
  33. mistralai/models/ocrusageinfo.py +51 -0
  34. mistralai/models/prediction.py +4 -5
  35. mistralai/models/responseformat.py +36 -1
  36. mistralai/models/responseformats.py +1 -1
  37. mistralai/ocr.py +238 -0
  38. mistralai/sdk.py +15 -2
  39. {mistralai-1.4.0.dist-info → mistralai-1.5.1.dist-info}/METADATA +37 -1
  40. {mistralai-1.4.0.dist-info → mistralai-1.5.1.dist-info}/RECORD +42 -24
  41. {mistralai-1.4.0.dist-info → mistralai-1.5.1.dist-info}/WHEEL +1 -1
  42. mistralai/models/chatclassificationrequest.py +0 -113
  43. {mistralai-1.4.0.dist-info → mistralai-1.5.1.dist-info}/LICENSE +0 -0
@@ -0,0 +1,162 @@
1
+ from ..utils.response_format import (
2
+ pydantic_model_from_json,
3
+ response_format_from_pydantic_model,
4
+ rec_strict_json_schema,
5
+ )
6
+ from pydantic import BaseModel, ValidationError
7
+
8
+ from ...models import ResponseFormat, JSONSchema
9
+ from ...types.basemodel import Unset
10
+
11
+ import unittest
12
+
13
+
14
+ class Student(BaseModel):
15
+ name: str
16
+ age: int
17
+
18
+
19
+ class Explanation(BaseModel):
20
+ explanation: str
21
+ output: str
22
+
23
+
24
+ class MathDemonstration(BaseModel):
25
+ steps: list[Explanation]
26
+ final_answer: str
27
+
28
+
29
+ mathdemo_schema = {
30
+ "$defs": {
31
+ "Explanation": {
32
+ "properties": {
33
+ "explanation": {"title": "Explanation", "type": "string"},
34
+ "output": {"title": "Output", "type": "string"},
35
+ },
36
+ "required": ["explanation", "output"],
37
+ "title": "Explanation",
38
+ "type": "object",
39
+ }
40
+ },
41
+ "properties": {
42
+ "steps": {
43
+ "items": {"$ref": "#/$defs/Explanation"},
44
+ "title": "Steps",
45
+ "type": "array",
46
+ },
47
+ "final_answer": {"title": "Final Answer", "type": "string"},
48
+ },
49
+ "required": ["steps", "final_answer"],
50
+ "title": "MathDemonstration",
51
+ "type": "object",
52
+ }
53
+
54
+ mathdemo_strict_schema = mathdemo_schema.copy()
55
+ mathdemo_strict_schema["$defs"]["Explanation"]["additionalProperties"] = False # type: ignore
56
+ mathdemo_strict_schema["additionalProperties"] = False
57
+
58
+ mathdemo_response_format = ResponseFormat(
59
+ type="json_schema",
60
+ json_schema=JSONSchema(
61
+ name="MathDemonstration",
62
+ schema_definition=mathdemo_strict_schema,
63
+ description=Unset(),
64
+ strict=True,
65
+ ),
66
+ )
67
+
68
+
69
+ class TestResponseFormat(unittest.TestCase):
70
+ def test_pydantic_model_from_json(self):
71
+ missing_json_data = {"name": "Jean Dupont"}
72
+ good_json_data = {"name": "Jean Dupont", "age": 25}
73
+ extra_json_data = {
74
+ "name": "Jean Dupont",
75
+ "age": 25,
76
+ "extra_field": "extra_value",
77
+ }
78
+ complex_json_data = {
79
+ "final_answer": "x = -4",
80
+ "steps": [
81
+ {
82
+ "explanation": "Start with the given equation.",
83
+ "output": "8x + 7 = -23",
84
+ },
85
+ {
86
+ "explanation": "Subtract 7 from both sides to isolate the term with x.",
87
+ "output": "8x = -23 - 7",
88
+ },
89
+ {
90
+ "explanation": "Simplify the right side of the equation.",
91
+ "output": "8x = -30",
92
+ },
93
+ {
94
+ "explanation": "Divide both sides by 8 to solve for x.",
95
+ "output": "x = -30 / 8",
96
+ },
97
+ {
98
+ "explanation": "Simplify the fraction to get the final answer.",
99
+ "output": "x = -4",
100
+ },
101
+ ],
102
+ }
103
+
104
+ self.assertEqual(
105
+ pydantic_model_from_json(good_json_data, Student),
106
+ Student(name="Jean Dupont", age=25),
107
+ )
108
+ self.assertEqual(
109
+ pydantic_model_from_json(extra_json_data, Student),
110
+ Student(name="Jean Dupont", age=25),
111
+ )
112
+ self.assertEqual(
113
+ pydantic_model_from_json(complex_json_data, MathDemonstration),
114
+ MathDemonstration(
115
+ steps=[
116
+ Explanation(
117
+ explanation="Start with the given equation.",
118
+ output="8x + 7 = -23",
119
+ ),
120
+ Explanation(
121
+ explanation="Subtract 7 from both sides to isolate the term with x.",
122
+ output="8x = -23 - 7",
123
+ ),
124
+ Explanation(
125
+ explanation="Simplify the right side of the equation.",
126
+ output="8x = -30",
127
+ ),
128
+ Explanation(
129
+ explanation="Divide both sides by 8 to solve for x.",
130
+ output="x = -30 / 8",
131
+ ),
132
+ Explanation(
133
+ explanation="Simplify the fraction to get the final answer.",
134
+ output="x = -4",
135
+ ),
136
+ ],
137
+ final_answer="x = -4",
138
+ ),
139
+ )
140
+
141
+ # Check it raises a validation error
142
+ with self.assertRaises(ValidationError):
143
+ pydantic_model_from_json(missing_json_data, Student) # type: ignore
144
+
145
+ def test_response_format_from_pydantic_model(self):
146
+ self.assertEqual(
147
+ response_format_from_pydantic_model(MathDemonstration),
148
+ mathdemo_response_format,
149
+ )
150
+
151
+ def test_rec_strict_json_schema(self):
152
+ invalid_schema = mathdemo_schema | {"wrong_value": 1}
153
+ self.assertEqual(
154
+ rec_strict_json_schema(mathdemo_schema), mathdemo_strict_schema
155
+ )
156
+
157
+ with self.assertRaises(ValueError):
158
+ rec_strict_json_schema(invalid_schema)
159
+
160
+
161
+ if __name__ == "__main__":
162
+ unittest.main()
@@ -0,0 +1,3 @@
1
+ from .response_format import response_format_from_pydantic_model
2
+
3
+ __all__ = ["response_format_from_pydantic_model"]
@@ -0,0 +1,20 @@
1
+ from typing import Any
2
+
3
+ def rec_strict_json_schema(schema_node: Any) -> Any:
4
+ """
5
+ Recursively set the additionalProperties property to False for all objects in the JSON Schema.
6
+ This makes the JSON Schema strict (i.e. no additional properties are allowed).
7
+ """
8
+ if isinstance(schema_node, (str, bool)):
9
+ return schema_node
10
+ if isinstance(schema_node, dict):
11
+ if "type" in schema_node and schema_node["type"] == "object":
12
+ schema_node["additionalProperties"] = False
13
+ for key, value in schema_node.items():
14
+ schema_node[key] = rec_strict_json_schema(value)
15
+ elif isinstance(schema_node, list):
16
+ for i, value in enumerate(schema_node):
17
+ schema_node[i] = rec_strict_json_schema(value)
18
+ else:
19
+ raise ValueError(f"Unexpected type: {schema_node}")
20
+ return schema_node
@@ -0,0 +1,24 @@
1
+ from pydantic import BaseModel
2
+ from typing import TypeVar, Any, Type
3
+ from ...models import JSONSchema, ResponseFormat
4
+ from ._pydantic_helper import rec_strict_json_schema
5
+
6
+ CustomPydanticModel = TypeVar("CustomPydanticModel", bound=BaseModel)
7
+
8
+
9
+ def response_format_from_pydantic_model(
10
+ model: type[CustomPydanticModel],
11
+ ) -> ResponseFormat:
12
+ """Generate a strict JSON schema from a pydantic model."""
13
+ model_schema = rec_strict_json_schema(model.model_json_schema())
14
+ json_schema = JSONSchema.model_validate(
15
+ {"name": model.__name__, "schema": model_schema, "strict": True}
16
+ )
17
+ return ResponseFormat(type="json_schema", json_schema=json_schema)
18
+
19
+
20
+ def pydantic_model_from_json(
21
+ json_data: dict[str, Any], pydantic_model: Type[CustomPydanticModel]
22
+ ) -> CustomPydanticModel:
23
+ """Parse a JSON schema into a pydantic model."""
24
+ return pydantic_model.model_validate(json_data)
mistralai/fim.py CHANGED
@@ -3,7 +3,7 @@
3
3
  from .basesdk import BaseSDK
4
4
  from mistralai import models, utils
5
5
  from mistralai._hooks import HookContext
6
- from mistralai.types import Nullable, OptionalNullable, UNSET
6
+ from mistralai.types import OptionalNullable, UNSET
7
7
  from mistralai.utils import eventstreaming, get_security_from_env
8
8
  from typing import Any, Mapping, Optional, Union
9
9
 
@@ -14,7 +14,7 @@ class Fim(BaseSDK):
14
14
  def complete(
15
15
  self,
16
16
  *,
17
- model: Nullable[str],
17
+ model: str,
18
18
  prompt: str,
19
19
  temperature: OptionalNullable[float] = UNSET,
20
20
  top_p: Optional[float] = 1,
@@ -143,7 +143,7 @@ class Fim(BaseSDK):
143
143
  async def complete_async(
144
144
  self,
145
145
  *,
146
- model: Nullable[str],
146
+ model: str,
147
147
  prompt: str,
148
148
  temperature: OptionalNullable[float] = UNSET,
149
149
  top_p: Optional[float] = 1,
@@ -272,7 +272,7 @@ class Fim(BaseSDK):
272
272
  def stream(
273
273
  self,
274
274
  *,
275
- model: Nullable[str],
275
+ model: str,
276
276
  prompt: str,
277
277
  temperature: OptionalNullable[float] = UNSET,
278
278
  top_p: Optional[float] = 1,
@@ -407,7 +407,7 @@ class Fim(BaseSDK):
407
407
  async def stream_async(
408
408
  self,
409
409
  *,
410
- model: Nullable[str],
410
+ model: str,
411
411
  prompt: str,
412
412
  temperature: OptionalNullable[float] = UNSET,
413
413
  top_p: Optional[float] = 1,
mistralai/httpclient.py CHANGED
@@ -1,6 +1,8 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  # pyright: reportReturnType = false
4
+ import asyncio
5
+ from concurrent.futures import ThreadPoolExecutor
4
6
  from typing_extensions import Protocol, runtime_checkable
5
7
  import httpx
6
8
  from typing import Any, Optional, Union
@@ -82,3 +84,51 @@ class AsyncHttpClient(Protocol):
82
84
 
83
85
  async def aclose(self) -> None:
84
86
  pass
87
+
88
+
89
+ class ClientOwner(Protocol):
90
+ client: Union[HttpClient, None]
91
+ async_client: Union[AsyncHttpClient, None]
92
+
93
+
94
+ def close_clients(
95
+ owner: ClientOwner,
96
+ sync_client: Union[HttpClient, None],
97
+ async_client: Union[AsyncHttpClient, None],
98
+ ) -> None:
99
+ """
100
+ A finalizer function that is meant to be used with weakref.finalize to close
101
+ httpx clients used by an SDK so that underlying resources can be garbage
102
+ collected.
103
+ """
104
+
105
+ # Unset the client/async_client properties so there are no more references
106
+ # to them from the owning SDK instance and they can be reaped.
107
+ owner.client = None
108
+ owner.async_client = None
109
+
110
+ if sync_client is not None:
111
+ try:
112
+ sync_client.close()
113
+ except Exception:
114
+ pass
115
+
116
+ if async_client is not None:
117
+ is_async = False
118
+ try:
119
+ asyncio.get_running_loop()
120
+ is_async = True
121
+ except RuntimeError:
122
+ pass
123
+
124
+ try:
125
+ # If this function is called in an async loop then start another
126
+ # loop in a separate thread to close the async http client.
127
+ if is_async:
128
+ with ThreadPoolExecutor(max_workers=1) as executor:
129
+ future = executor.submit(asyncio.run, async_client.aclose())
130
+ future.result()
131
+ else:
132
+ asyncio.run(async_client.aclose())
133
+ except Exception:
134
+ pass
@@ -39,16 +39,6 @@ from .batchjobin import BatchJobIn, BatchJobInTypedDict
39
39
  from .batchjobout import BatchJobOut, BatchJobOutObject, BatchJobOutTypedDict
40
40
  from .batchjobsout import BatchJobsOut, BatchJobsOutObject, BatchJobsOutTypedDict
41
41
  from .batchjobstatus import BatchJobStatus
42
- from .chatclassificationrequest import (
43
- ChatClassificationRequest,
44
- ChatClassificationRequestInputs,
45
- ChatClassificationRequestInputsTypedDict,
46
- ChatClassificationRequestTypedDict,
47
- One,
48
- OneTypedDict,
49
- Two,
50
- TwoTypedDict,
51
- )
52
42
  from .chatcompletionchoice import (
53
43
  ChatCompletionChoice,
54
44
  ChatCompletionChoiceTypedDict,
@@ -78,6 +68,16 @@ from .chatcompletionstreamrequest import (
78
68
  ChatCompletionStreamRequestToolChoiceTypedDict,
79
69
  ChatCompletionStreamRequestTypedDict,
80
70
  )
71
+ from .chatmoderationrequest import (
72
+ ChatModerationRequest,
73
+ ChatModerationRequestInputs,
74
+ ChatModerationRequestInputsTypedDict,
75
+ ChatModerationRequestTypedDict,
76
+ One,
77
+ OneTypedDict,
78
+ Two,
79
+ TwoTypedDict,
80
+ )
81
81
  from .checkpointout import CheckpointOut, CheckpointOutTypedDict
82
82
  from .classificationobject import ClassificationObject, ClassificationObjectTypedDict
83
83
  from .classificationrequest import (
@@ -115,6 +115,7 @@ from .detailedjobout import (
115
115
  DetailedJobOutStatus,
116
116
  DetailedJobOutTypedDict,
117
117
  )
118
+ from .documenturlchunk import DocumentURLChunk, DocumentURLChunkTypedDict
118
119
  from .embeddingrequest import (
119
120
  EmbeddingRequest,
120
121
  EmbeddingRequestTypedDict,
@@ -264,6 +265,7 @@ from .jobs_api_routes_fine_tuning_update_fine_tuned_modelop import (
264
265
  JobsAPIRoutesFineTuningUpdateFineTunedModelRequestTypedDict,
265
266
  )
266
267
  from .jobsout import JobsOut, JobsOutObject, JobsOutTypedDict
268
+ from .jsonschema import JSONSchema, JSONSchemaTypedDict
267
269
  from .legacyjobmetadataout import (
268
270
  LegacyJobMetadataOut,
269
271
  LegacyJobMetadataOutObject,
@@ -273,7 +275,13 @@ from .listfilesout import ListFilesOut, ListFilesOutTypedDict
273
275
  from .metricout import MetricOut, MetricOutTypedDict
274
276
  from .modelcapabilities import ModelCapabilities, ModelCapabilitiesTypedDict
275
277
  from .modellist import Data, DataTypedDict, ModelList, ModelListTypedDict
276
- from .prediction import Prediction, PredictionType, PredictionTypedDict
278
+ from .ocrimageobject import OCRImageObject, OCRImageObjectTypedDict
279
+ from .ocrpagedimensions import OCRPageDimensions, OCRPageDimensionsTypedDict
280
+ from .ocrpageobject import OCRPageObject, OCRPageObjectTypedDict
281
+ from .ocrrequest import Document, DocumentTypedDict, OCRRequest, OCRRequestTypedDict
282
+ from .ocrresponse import OCRResponse, OCRResponseTypedDict
283
+ from .ocrusageinfo import OCRUsageInfo, OCRUsageInfoTypedDict
284
+ from .prediction import Prediction, PredictionTypedDict
277
285
  from .referencechunk import ReferenceChunk, ReferenceChunkType, ReferenceChunkTypedDict
278
286
  from .responseformat import ResponseFormat, ResponseFormatTypedDict
279
287
  from .responseformats import ResponseFormats
@@ -385,10 +393,6 @@ __all__ = [
385
393
  "BatchJobsOut",
386
394
  "BatchJobsOutObject",
387
395
  "BatchJobsOutTypedDict",
388
- "ChatClassificationRequest",
389
- "ChatClassificationRequestInputs",
390
- "ChatClassificationRequestInputsTypedDict",
391
- "ChatClassificationRequestTypedDict",
392
396
  "ChatCompletionChoice",
393
397
  "ChatCompletionChoiceTypedDict",
394
398
  "ChatCompletionRequest",
@@ -405,6 +409,10 @@ __all__ = [
405
409
  "ChatCompletionStreamRequestToolChoice",
406
410
  "ChatCompletionStreamRequestToolChoiceTypedDict",
407
411
  "ChatCompletionStreamRequestTypedDict",
412
+ "ChatModerationRequest",
413
+ "ChatModerationRequestInputs",
414
+ "ChatModerationRequestInputsTypedDict",
415
+ "ChatModerationRequestTypedDict",
408
416
  "CheckpointOut",
409
417
  "CheckpointOutTypedDict",
410
418
  "ClassificationObject",
@@ -444,6 +452,10 @@ __all__ = [
444
452
  "DetailedJobOutRepositoriesTypedDict",
445
453
  "DetailedJobOutStatus",
446
454
  "DetailedJobOutTypedDict",
455
+ "Document",
456
+ "DocumentTypedDict",
457
+ "DocumentURLChunk",
458
+ "DocumentURLChunkTypedDict",
447
459
  "EmbeddingRequest",
448
460
  "EmbeddingRequestTypedDict",
449
461
  "EmbeddingResponse",
@@ -515,6 +527,8 @@ __all__ = [
515
527
  "InputsTypedDict",
516
528
  "Integrations",
517
529
  "IntegrationsTypedDict",
530
+ "JSONSchema",
531
+ "JSONSchemaTypedDict",
518
532
  "JobIn",
519
533
  "JobInIntegrations",
520
534
  "JobInIntegrationsTypedDict",
@@ -565,11 +579,22 @@ __all__ = [
565
579
  "ModelCapabilitiesTypedDict",
566
580
  "ModelList",
567
581
  "ModelListTypedDict",
582
+ "OCRImageObject",
583
+ "OCRImageObjectTypedDict",
584
+ "OCRPageDimensions",
585
+ "OCRPageDimensionsTypedDict",
586
+ "OCRPageObject",
587
+ "OCRPageObjectTypedDict",
588
+ "OCRRequest",
589
+ "OCRRequestTypedDict",
590
+ "OCRResponse",
591
+ "OCRResponseTypedDict",
592
+ "OCRUsageInfo",
593
+ "OCRUsageInfoTypedDict",
568
594
  "Object",
569
595
  "One",
570
596
  "OneTypedDict",
571
597
  "Prediction",
572
- "PredictionType",
573
598
  "PredictionTypedDict",
574
599
  "QueryParamStatus",
575
600
  "ReferenceChunk",
@@ -26,6 +26,7 @@ class AssistantMessageTypedDict(TypedDict):
26
26
  content: NotRequired[Nullable[AssistantMessageContentTypedDict]]
27
27
  tool_calls: NotRequired[Nullable[List[ToolCallTypedDict]]]
28
28
  prefix: NotRequired[bool]
29
+ r"""Set this to `true` when adding an assistant message as prefix to condition the model response. The role of the prefix message is to force the model to start its answer by the content of the message."""
29
30
  role: NotRequired[AssistantMessageRole]
30
31
 
31
32
 
@@ -35,6 +36,7 @@ class AssistantMessage(BaseModel):
35
36
  tool_calls: OptionalNullable[List[ToolCall]] = UNSET
36
37
 
37
38
  prefix: Optional[bool] = False
39
+ r"""Set this to `true` when adding an assistant message as prefix to condition the model response. The role of the prefix message is to force the model to start its answer by the content of the message."""
38
40
 
39
41
  role: Optional[AssistantMessageRole] = "assistant"
40
42
 
@@ -59,7 +59,7 @@ ChatCompletionRequestToolChoice = TypeAliasType(
59
59
 
60
60
 
61
61
  class ChatCompletionRequestTypedDict(TypedDict):
62
- model: Nullable[str]
62
+ model: str
63
63
  r"""ID of the model to use. You can use the [List Available Models](/api/#tag/models/operation/list_models_v1_models_get) API to see all of your available models, or see our [Model overview](/models) for model descriptions."""
64
64
  messages: List[MessagesTypedDict]
65
65
  r"""The prompt(s) to generate completions for, encoded as a list of dict with role and content."""
@@ -90,7 +90,7 @@ class ChatCompletionRequestTypedDict(TypedDict):
90
90
 
91
91
 
92
92
  class ChatCompletionRequest(BaseModel):
93
- model: Nullable[str]
93
+ model: str
94
94
  r"""ID of the model to use. You can use the [List Available Models](/api/#tag/models/operation/list_models_v1_models_get) API to see all of your available models, or see our [Model overview](/models) for model descriptions."""
95
95
 
96
96
  messages: List[Messages]
@@ -152,14 +152,7 @@ class ChatCompletionRequest(BaseModel):
152
152
  "prediction",
153
153
  "safe_prompt",
154
154
  ]
155
- nullable_fields = [
156
- "model",
157
- "temperature",
158
- "max_tokens",
159
- "random_seed",
160
- "tools",
161
- "n",
162
- ]
155
+ nullable_fields = ["temperature", "max_tokens", "random_seed", "tools", "n"]
163
156
  null_default_fields = []
164
157
 
165
158
  serialized = handler(self)
@@ -63,7 +63,7 @@ ChatCompletionStreamRequestToolChoice = TypeAliasType(
63
63
 
64
64
 
65
65
  class ChatCompletionStreamRequestTypedDict(TypedDict):
66
- model: Nullable[str]
66
+ model: str
67
67
  r"""ID of the model to use. You can use the [List Available Models](/api/#tag/models/operation/list_models_v1_models_get) API to see all of your available models, or see our [Model overview](/models) for model descriptions."""
68
68
  messages: List[ChatCompletionStreamRequestMessagesTypedDict]
69
69
  r"""The prompt(s) to generate completions for, encoded as a list of dict with role and content."""
@@ -93,7 +93,7 @@ class ChatCompletionStreamRequestTypedDict(TypedDict):
93
93
 
94
94
 
95
95
  class ChatCompletionStreamRequest(BaseModel):
96
- model: Nullable[str]
96
+ model: str
97
97
  r"""ID of the model to use. You can use the [List Available Models](/api/#tag/models/operation/list_models_v1_models_get) API to see all of your available models, or see our [Model overview](/models) for model descriptions."""
98
98
 
99
99
  messages: List[ChatCompletionStreamRequestMessages]
@@ -154,14 +154,7 @@ class ChatCompletionStreamRequest(BaseModel):
154
154
  "prediction",
155
155
  "safe_prompt",
156
156
  ]
157
- nullable_fields = [
158
- "model",
159
- "temperature",
160
- "max_tokens",
161
- "random_seed",
162
- "tools",
163
- "n",
164
- ]
157
+ nullable_fields = ["temperature", "max_tokens", "random_seed", "tools", "n"]
165
158
  null_default_fields = []
166
159
 
167
160
  serialized = handler(self)
@@ -0,0 +1,86 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .assistantmessage import AssistantMessage, AssistantMessageTypedDict
5
+ from .systemmessage import SystemMessage, SystemMessageTypedDict
6
+ from .toolmessage import ToolMessage, ToolMessageTypedDict
7
+ from .usermessage import UserMessage, UserMessageTypedDict
8
+ from mistralai.types import BaseModel
9
+ from mistralai.utils import get_discriminator
10
+ import pydantic
11
+ from pydantic import Discriminator, Tag
12
+ from typing import List, Optional, Union
13
+ from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
14
+
15
+
16
+ TwoTypedDict = TypeAliasType(
17
+ "TwoTypedDict",
18
+ Union[
19
+ SystemMessageTypedDict,
20
+ UserMessageTypedDict,
21
+ AssistantMessageTypedDict,
22
+ ToolMessageTypedDict,
23
+ ],
24
+ )
25
+
26
+
27
+ Two = Annotated[
28
+ Union[
29
+ Annotated[AssistantMessage, Tag("assistant")],
30
+ Annotated[SystemMessage, Tag("system")],
31
+ Annotated[ToolMessage, Tag("tool")],
32
+ Annotated[UserMessage, Tag("user")],
33
+ ],
34
+ Discriminator(lambda m: get_discriminator(m, "role", "role")),
35
+ ]
36
+
37
+
38
+ OneTypedDict = TypeAliasType(
39
+ "OneTypedDict",
40
+ Union[
41
+ SystemMessageTypedDict,
42
+ UserMessageTypedDict,
43
+ AssistantMessageTypedDict,
44
+ ToolMessageTypedDict,
45
+ ],
46
+ )
47
+
48
+
49
+ One = Annotated[
50
+ Union[
51
+ Annotated[AssistantMessage, Tag("assistant")],
52
+ Annotated[SystemMessage, Tag("system")],
53
+ Annotated[ToolMessage, Tag("tool")],
54
+ Annotated[UserMessage, Tag("user")],
55
+ ],
56
+ Discriminator(lambda m: get_discriminator(m, "role", "role")),
57
+ ]
58
+
59
+
60
+ ChatModerationRequestInputsTypedDict = TypeAliasType(
61
+ "ChatModerationRequestInputsTypedDict",
62
+ Union[List[OneTypedDict], List[List[TwoTypedDict]]],
63
+ )
64
+ r"""Chat to classify"""
65
+
66
+
67
+ ChatModerationRequestInputs = TypeAliasType(
68
+ "ChatModerationRequestInputs", Union[List[One], List[List[Two]]]
69
+ )
70
+ r"""Chat to classify"""
71
+
72
+
73
+ class ChatModerationRequestTypedDict(TypedDict):
74
+ model: str
75
+ inputs: ChatModerationRequestInputsTypedDict
76
+ r"""Chat to classify"""
77
+ truncate_for_context_length: NotRequired[bool]
78
+
79
+
80
+ class ChatModerationRequest(BaseModel):
81
+ model: str
82
+
83
+ inputs: Annotated[ChatModerationRequestInputs, pydantic.Field(alias="input")]
84
+ r"""Chat to classify"""
85
+
86
+ truncate_for_context_length: Optional[bool] = False
@@ -1,11 +1,10 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
4
+ from mistralai.types import BaseModel
5
5
  import pydantic
6
- from pydantic import model_serializer
7
6
  from typing import List, Union
8
- from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
7
+ from typing_extensions import Annotated, TypeAliasType, TypedDict
9
8
 
10
9
 
11
10
  ClassificationRequestInputsTypedDict = TypeAliasType(
@@ -21,43 +20,15 @@ r"""Text to classify."""
21
20
 
22
21
 
23
22
  class ClassificationRequestTypedDict(TypedDict):
23
+ model: str
24
+ r"""ID of the model to use."""
24
25
  inputs: ClassificationRequestInputsTypedDict
25
26
  r"""Text to classify."""
26
- model: NotRequired[Nullable[str]]
27
27
 
28
28
 
29
29
  class ClassificationRequest(BaseModel):
30
+ model: str
31
+ r"""ID of the model to use."""
32
+
30
33
  inputs: Annotated[ClassificationRequestInputs, pydantic.Field(alias="input")]
31
34
  r"""Text to classify."""
32
-
33
- model: OptionalNullable[str] = UNSET
34
-
35
- @model_serializer(mode="wrap")
36
- def serialize_model(self, handler):
37
- optional_fields = ["model"]
38
- nullable_fields = ["model"]
39
- null_default_fields = []
40
-
41
- serialized = handler(self)
42
-
43
- m = {}
44
-
45
- for n, f in self.model_fields.items():
46
- k = f.alias or n
47
- val = serialized.get(k)
48
- serialized.pop(k, None)
49
-
50
- optional_nullable = k in optional_fields and k in nullable_fields
51
- is_set = (
52
- self.__pydantic_fields_set__.intersection({n})
53
- or k in null_default_fields
54
- ) # pylint: disable=no-member
55
-
56
- if val is not None and val != UNSET_SENTINEL:
57
- m[k] = val
58
- elif val != UNSET_SENTINEL and (
59
- not k in optional_fields or (optional_nullable and is_set)
60
- ):
61
- m[k] = val
62
-
63
- return m