mistralai 1.6.0__py3-none-any.whl → 1.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. mistralai/_version.py +3 -3
  2. mistralai/classifiers.py +431 -19
  3. mistralai/embeddings.py +6 -2
  4. mistralai/extra/utils/_pydantic_helper.py +2 -1
  5. mistralai/jobs.py +84 -38
  6. mistralai/mistral_jobs.py +2 -2
  7. mistralai/models/__init__.py +197 -46
  8. mistralai/models/archiveftmodelout.py +3 -11
  9. mistralai/models/batchjobout.py +3 -9
  10. mistralai/models/batchjobsout.py +3 -9
  11. mistralai/models/chatclassificationrequest.py +20 -0
  12. mistralai/models/chatmoderationrequest.py +4 -7
  13. mistralai/models/classificationresponse.py +12 -9
  14. mistralai/models/classificationtargetresult.py +14 -0
  15. mistralai/models/classifierdetailedjobout.py +156 -0
  16. mistralai/models/classifierftmodelout.py +101 -0
  17. mistralai/models/classifierjobout.py +165 -0
  18. mistralai/models/classifiertargetin.py +55 -0
  19. mistralai/models/classifiertargetout.py +24 -0
  20. mistralai/models/classifiertrainingparameters.py +73 -0
  21. mistralai/models/classifiertrainingparametersin.py +85 -0
  22. mistralai/models/{detailedjobout.py → completiondetailedjobout.py} +34 -34
  23. mistralai/models/{ftmodelout.py → completionftmodelout.py} +12 -12
  24. mistralai/models/{jobout.py → completionjobout.py} +25 -24
  25. mistralai/models/{trainingparameters.py → completiontrainingparameters.py} +7 -7
  26. mistralai/models/{trainingparametersin.py → completiontrainingparametersin.py} +7 -7
  27. mistralai/models/embeddingrequest.py +6 -4
  28. mistralai/models/finetuneablemodeltype.py +7 -0
  29. mistralai/models/ftclassifierlossfunction.py +7 -0
  30. mistralai/models/ftmodelcapabilitiesout.py +3 -0
  31. mistralai/models/githubrepositoryin.py +3 -11
  32. mistralai/models/githubrepositoryout.py +3 -11
  33. mistralai/models/inputs.py +54 -0
  34. mistralai/models/instructrequest.py +42 -0
  35. mistralai/models/jobin.py +52 -12
  36. mistralai/models/jobs_api_routes_batch_get_batch_jobsop.py +3 -3
  37. mistralai/models/jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop.py +29 -2
  38. mistralai/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py +21 -4
  39. mistralai/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobop.py +29 -2
  40. mistralai/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobsop.py +8 -0
  41. mistralai/models/jobs_api_routes_fine_tuning_start_fine_tuning_jobop.py +29 -2
  42. mistralai/models/jobs_api_routes_fine_tuning_update_fine_tuned_modelop.py +28 -2
  43. mistralai/models/jobsout.py +24 -13
  44. mistralai/models/legacyjobmetadataout.py +3 -12
  45. mistralai/models/{classificationobject.py → moderationobject.py} +6 -6
  46. mistralai/models/moderationresponse.py +21 -0
  47. mistralai/models/ocrimageobject.py +7 -1
  48. mistralai/models/ocrrequest.py +15 -0
  49. mistralai/models/ocrresponse.py +38 -2
  50. mistralai/models/unarchiveftmodelout.py +3 -11
  51. mistralai/models/wandbintegration.py +3 -11
  52. mistralai/models/wandbintegrationout.py +8 -13
  53. mistralai/models_.py +10 -4
  54. mistralai/ocr.py +28 -0
  55. {mistralai-1.6.0.dist-info → mistralai-1.7.1.dist-info}/METADATA +3 -1
  56. {mistralai-1.6.0.dist-info → mistralai-1.7.1.dist-info}/RECORD +58 -44
  57. {mistralai-1.6.0.dist-info → mistralai-1.7.1.dist-info}/WHEEL +1 -1
  58. {mistralai-1.6.0.dist-info → mistralai-1.7.1.dist-info}/LICENSE +0 -0
@@ -0,0 +1,42 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .assistantmessage import AssistantMessage, AssistantMessageTypedDict
5
+ from .systemmessage import SystemMessage, SystemMessageTypedDict
6
+ from .toolmessage import ToolMessage, ToolMessageTypedDict
7
+ from .usermessage import UserMessage, UserMessageTypedDict
8
+ from mistralai.types import BaseModel
9
+ from mistralai.utils import get_discriminator
10
+ from pydantic import Discriminator, Tag
11
+ from typing import List, Union
12
+ from typing_extensions import Annotated, TypeAliasType, TypedDict
13
+
14
+
15
+ InstructRequestMessagesTypedDict = TypeAliasType(
16
+ "InstructRequestMessagesTypedDict",
17
+ Union[
18
+ SystemMessageTypedDict,
19
+ UserMessageTypedDict,
20
+ AssistantMessageTypedDict,
21
+ ToolMessageTypedDict,
22
+ ],
23
+ )
24
+
25
+
26
+ InstructRequestMessages = Annotated[
27
+ Union[
28
+ Annotated[AssistantMessage, Tag("assistant")],
29
+ Annotated[SystemMessage, Tag("system")],
30
+ Annotated[ToolMessage, Tag("tool")],
31
+ Annotated[UserMessage, Tag("user")],
32
+ ],
33
+ Discriminator(lambda m: get_discriminator(m, "role", "role")),
34
+ ]
35
+
36
+
37
+ class InstructRequestTypedDict(TypedDict):
38
+ messages: List[InstructRequestMessagesTypedDict]
39
+
40
+
41
+ class InstructRequest(BaseModel):
42
+ messages: List[InstructRequestMessages]
mistralai/models/jobin.py CHANGED
@@ -1,14 +1,23 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from .classifiertargetin import ClassifierTargetIn, ClassifierTargetInTypedDict
5
+ from .classifiertrainingparametersin import (
6
+ ClassifierTrainingParametersIn,
7
+ ClassifierTrainingParametersInTypedDict,
8
+ )
9
+ from .completiontrainingparametersin import (
10
+ CompletionTrainingParametersIn,
11
+ CompletionTrainingParametersInTypedDict,
12
+ )
13
+ from .finetuneablemodeltype import FineTuneableModelType
4
14
  from .githubrepositoryin import GithubRepositoryIn, GithubRepositoryInTypedDict
5
15
  from .trainingfile import TrainingFile, TrainingFileTypedDict
6
- from .trainingparametersin import TrainingParametersIn, TrainingParametersInTypedDict
7
16
  from .wandbintegration import WandbIntegration, WandbIntegrationTypedDict
8
17
  from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
9
18
  from pydantic import model_serializer
10
- from typing import List, Optional
11
- from typing_extensions import NotRequired, TypedDict
19
+ from typing import List, Optional, Union
20
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
12
21
 
13
22
 
14
23
  JobInIntegrationsTypedDict = WandbIntegrationTypedDict
@@ -17,6 +26,20 @@ JobInIntegrationsTypedDict = WandbIntegrationTypedDict
17
26
  JobInIntegrations = WandbIntegration
18
27
 
19
28
 
29
+ HyperparametersTypedDict = TypeAliasType(
30
+ "HyperparametersTypedDict",
31
+ Union[
32
+ ClassifierTrainingParametersInTypedDict, CompletionTrainingParametersInTypedDict
33
+ ],
34
+ )
35
+
36
+
37
+ Hyperparameters = TypeAliasType(
38
+ "Hyperparameters",
39
+ Union[ClassifierTrainingParametersIn, CompletionTrainingParametersIn],
40
+ )
41
+
42
+
20
43
  JobInRepositoriesTypedDict = GithubRepositoryInTypedDict
21
44
 
22
45
 
@@ -26,8 +49,7 @@ JobInRepositories = GithubRepositoryIn
26
49
  class JobInTypedDict(TypedDict):
27
50
  model: str
28
51
  r"""The name of the model to fine-tune."""
29
- hyperparameters: TrainingParametersInTypedDict
30
- r"""The fine-tuning hyperparameter settings used in a fine-tune job."""
52
+ hyperparameters: HyperparametersTypedDict
31
53
  training_files: NotRequired[List[TrainingFileTypedDict]]
32
54
  validation_files: NotRequired[Nullable[List[str]]]
33
55
  r"""A list containing the IDs of uploaded files that contain validation data. If you provide these files, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in `checkpoints` when getting the status of a running fine-tuning job. The same data should not be present in both train and validation files."""
@@ -35,17 +57,19 @@ class JobInTypedDict(TypedDict):
35
57
  r"""A string that will be added to your fine-tuning model name. For example, a suffix of \"my-great-model\" would produce a model name like `ft:open-mistral-7b:my-great-model:xxx...`"""
36
58
  integrations: NotRequired[Nullable[List[JobInIntegrationsTypedDict]]]
37
59
  r"""A list of integrations to enable for your fine-tuning job."""
38
- repositories: NotRequired[List[JobInRepositoriesTypedDict]]
39
60
  auto_start: NotRequired[bool]
40
61
  r"""This field will be required in a future release."""
62
+ invalid_sample_skip_percentage: NotRequired[float]
63
+ job_type: NotRequired[Nullable[FineTuneableModelType]]
64
+ repositories: NotRequired[Nullable[List[JobInRepositoriesTypedDict]]]
65
+ classifier_targets: NotRequired[Nullable[List[ClassifierTargetInTypedDict]]]
41
66
 
42
67
 
43
68
  class JobIn(BaseModel):
44
69
  model: str
45
70
  r"""The name of the model to fine-tune."""
46
71
 
47
- hyperparameters: TrainingParametersIn
48
- r"""The fine-tuning hyperparameter settings used in a fine-tune job."""
72
+ hyperparameters: Hyperparameters
49
73
 
50
74
  training_files: Optional[List[TrainingFile]] = None
51
75
 
@@ -58,11 +82,17 @@ class JobIn(BaseModel):
58
82
  integrations: OptionalNullable[List[JobInIntegrations]] = UNSET
59
83
  r"""A list of integrations to enable for your fine-tuning job."""
60
84
 
61
- repositories: Optional[List[JobInRepositories]] = None
62
-
63
85
  auto_start: Optional[bool] = None
64
86
  r"""This field will be required in a future release."""
65
87
 
88
+ invalid_sample_skip_percentage: Optional[float] = 0
89
+
90
+ job_type: OptionalNullable[FineTuneableModelType] = UNSET
91
+
92
+ repositories: OptionalNullable[List[JobInRepositories]] = UNSET
93
+
94
+ classifier_targets: OptionalNullable[List[ClassifierTargetIn]] = UNSET
95
+
66
96
  @model_serializer(mode="wrap")
67
97
  def serialize_model(self, handler):
68
98
  optional_fields = [
@@ -70,10 +100,20 @@ class JobIn(BaseModel):
70
100
  "validation_files",
71
101
  "suffix",
72
102
  "integrations",
73
- "repositories",
74
103
  "auto_start",
104
+ "invalid_sample_skip_percentage",
105
+ "job_type",
106
+ "repositories",
107
+ "classifier_targets",
108
+ ]
109
+ nullable_fields = [
110
+ "validation_files",
111
+ "suffix",
112
+ "integrations",
113
+ "job_type",
114
+ "repositories",
115
+ "classifier_targets",
75
116
  ]
76
- nullable_fields = ["validation_files", "suffix", "integrations"]
77
117
  null_default_fields = []
78
118
 
79
119
  serialized = handler(self)
@@ -6,7 +6,7 @@ from datetime import datetime
6
6
  from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
7
7
  from mistralai.utils import FieldMetadata, QueryParamMetadata
8
8
  from pydantic import model_serializer
9
- from typing import Any, Dict, Optional
9
+ from typing import Any, Dict, List, Optional
10
10
  from typing_extensions import Annotated, NotRequired, TypedDict
11
11
 
12
12
 
@@ -17,7 +17,7 @@ class JobsAPIRoutesBatchGetBatchJobsRequestTypedDict(TypedDict):
17
17
  metadata: NotRequired[Nullable[Dict[str, Any]]]
18
18
  created_after: NotRequired[Nullable[datetime]]
19
19
  created_by_me: NotRequired[bool]
20
- status: NotRequired[Nullable[BatchJobStatus]]
20
+ status: NotRequired[Nullable[List[BatchJobStatus]]]
21
21
 
22
22
 
23
23
  class JobsAPIRoutesBatchGetBatchJobsRequest(BaseModel):
@@ -52,7 +52,7 @@ class JobsAPIRoutesBatchGetBatchJobsRequest(BaseModel):
52
52
  ] = False
53
53
 
54
54
  status: Annotated[
55
- OptionalNullable[BatchJobStatus],
55
+ OptionalNullable[List[BatchJobStatus]],
56
56
  FieldMetadata(query=QueryParamMetadata(style="form", explode=True)),
57
57
  ] = UNSET
58
58
 
@@ -1,9 +1,19 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from .classifierdetailedjobout import (
5
+ ClassifierDetailedJobOut,
6
+ ClassifierDetailedJobOutTypedDict,
7
+ )
8
+ from .completiondetailedjobout import (
9
+ CompletionDetailedJobOut,
10
+ CompletionDetailedJobOutTypedDict,
11
+ )
4
12
  from mistralai.types import BaseModel
5
- from mistralai.utils import FieldMetadata, PathParamMetadata
6
- from typing_extensions import Annotated, TypedDict
13
+ from mistralai.utils import FieldMetadata, PathParamMetadata, get_discriminator
14
+ from pydantic import Discriminator, Tag
15
+ from typing import Union
16
+ from typing_extensions import Annotated, TypeAliasType, TypedDict
7
17
 
8
18
 
9
19
  class JobsAPIRoutesFineTuningCancelFineTuningJobRequestTypedDict(TypedDict):
@@ -16,3 +26,20 @@ class JobsAPIRoutesFineTuningCancelFineTuningJobRequest(BaseModel):
16
26
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
17
27
  ]
18
28
  r"""The ID of the job to cancel."""
29
+
30
+
31
+ JobsAPIRoutesFineTuningCancelFineTuningJobResponseTypedDict = TypeAliasType(
32
+ "JobsAPIRoutesFineTuningCancelFineTuningJobResponseTypedDict",
33
+ Union[CompletionDetailedJobOutTypedDict, ClassifierDetailedJobOutTypedDict],
34
+ )
35
+ r"""OK"""
36
+
37
+
38
+ JobsAPIRoutesFineTuningCancelFineTuningJobResponse = Annotated[
39
+ Union[
40
+ Annotated[ClassifierDetailedJobOut, Tag("classifier")],
41
+ Annotated[CompletionDetailedJobOut, Tag("completion")],
42
+ ],
43
+ Discriminator(lambda m: get_discriminator(m, "job_type", "job_type")),
44
+ ]
45
+ r"""OK"""
@@ -1,21 +1,38 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from .jobout import JobOut, JobOutTypedDict
4
+ from .classifierjobout import ClassifierJobOut, ClassifierJobOutTypedDict
5
+ from .completionjobout import CompletionJobOut, CompletionJobOutTypedDict
5
6
  from .legacyjobmetadataout import LegacyJobMetadataOut, LegacyJobMetadataOutTypedDict
7
+ from mistralai.utils import get_discriminator
8
+ from pydantic import Discriminator, Tag
6
9
  from typing import Union
7
- from typing_extensions import TypeAliasType
10
+ from typing_extensions import Annotated, TypeAliasType
11
+
12
+
13
+ Response1TypedDict = TypeAliasType(
14
+ "Response1TypedDict", Union[ClassifierJobOutTypedDict, CompletionJobOutTypedDict]
15
+ )
16
+
17
+
18
+ Response1 = Annotated[
19
+ Union[
20
+ Annotated[ClassifierJobOut, Tag("classifier")],
21
+ Annotated[CompletionJobOut, Tag("completion")],
22
+ ],
23
+ Discriminator(lambda m: get_discriminator(m, "job_type", "job_type")),
24
+ ]
8
25
 
9
26
 
10
27
  JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict = TypeAliasType(
11
28
  "JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict",
12
- Union[LegacyJobMetadataOutTypedDict, JobOutTypedDict],
29
+ Union[LegacyJobMetadataOutTypedDict, Response1TypedDict],
13
30
  )
14
31
  r"""OK"""
15
32
 
16
33
 
17
34
  JobsAPIRoutesFineTuningCreateFineTuningJobResponse = TypeAliasType(
18
35
  "JobsAPIRoutesFineTuningCreateFineTuningJobResponse",
19
- Union[LegacyJobMetadataOut, JobOut],
36
+ Union[LegacyJobMetadataOut, Response1],
20
37
  )
21
38
  r"""OK"""
@@ -1,9 +1,19 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from .classifierdetailedjobout import (
5
+ ClassifierDetailedJobOut,
6
+ ClassifierDetailedJobOutTypedDict,
7
+ )
8
+ from .completiondetailedjobout import (
9
+ CompletionDetailedJobOut,
10
+ CompletionDetailedJobOutTypedDict,
11
+ )
4
12
  from mistralai.types import BaseModel
5
- from mistralai.utils import FieldMetadata, PathParamMetadata
6
- from typing_extensions import Annotated, TypedDict
13
+ from mistralai.utils import FieldMetadata, PathParamMetadata, get_discriminator
14
+ from pydantic import Discriminator, Tag
15
+ from typing import Union
16
+ from typing_extensions import Annotated, TypeAliasType, TypedDict
7
17
 
8
18
 
9
19
  class JobsAPIRoutesFineTuningGetFineTuningJobRequestTypedDict(TypedDict):
@@ -16,3 +26,20 @@ class JobsAPIRoutesFineTuningGetFineTuningJobRequest(BaseModel):
16
26
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
17
27
  ]
18
28
  r"""The ID of the job to analyse."""
29
+
30
+
31
+ JobsAPIRoutesFineTuningGetFineTuningJobResponseTypedDict = TypeAliasType(
32
+ "JobsAPIRoutesFineTuningGetFineTuningJobResponseTypedDict",
33
+ Union[CompletionDetailedJobOutTypedDict, ClassifierDetailedJobOutTypedDict],
34
+ )
35
+ r"""OK"""
36
+
37
+
38
+ JobsAPIRoutesFineTuningGetFineTuningJobResponse = Annotated[
39
+ Union[
40
+ Annotated[ClassifierDetailedJobOut, Tag("classifier")],
41
+ Annotated[CompletionDetailedJobOut, Tag("completion")],
42
+ ],
43
+ Discriminator(lambda m: get_discriminator(m, "job_type", "job_type")),
44
+ ]
45
+ r"""OK"""
@@ -33,6 +33,7 @@ class JobsAPIRoutesFineTuningGetFineTuningJobsRequestTypedDict(TypedDict):
33
33
  r"""The model name used for fine-tuning to filter on. When set, the other results are not displayed."""
34
34
  created_after: NotRequired[Nullable[datetime]]
35
35
  r"""The date/time to filter on. When set, the results for previous creation times are not displayed."""
36
+ created_before: NotRequired[Nullable[datetime]]
36
37
  created_by_me: NotRequired[bool]
37
38
  r"""When set, only return results for jobs created by the API caller. Other results are not displayed."""
38
39
  status: NotRequired[Nullable[QueryParamStatus]]
@@ -70,6 +71,11 @@ class JobsAPIRoutesFineTuningGetFineTuningJobsRequest(BaseModel):
70
71
  ] = UNSET
71
72
  r"""The date/time to filter on. When set, the results for previous creation times are not displayed."""
72
73
 
74
+ created_before: Annotated[
75
+ OptionalNullable[datetime],
76
+ FieldMetadata(query=QueryParamMetadata(style="form", explode=True)),
77
+ ] = UNSET
78
+
73
79
  created_by_me: Annotated[
74
80
  Optional[bool],
75
81
  FieldMetadata(query=QueryParamMetadata(style="form", explode=True)),
@@ -107,6 +113,7 @@ class JobsAPIRoutesFineTuningGetFineTuningJobsRequest(BaseModel):
107
113
  "page_size",
108
114
  "model",
109
115
  "created_after",
116
+ "created_before",
110
117
  "created_by_me",
111
118
  "status",
112
119
  "wandb_project",
@@ -116,6 +123,7 @@ class JobsAPIRoutesFineTuningGetFineTuningJobsRequest(BaseModel):
116
123
  nullable_fields = [
117
124
  "model",
118
125
  "created_after",
126
+ "created_before",
119
127
  "status",
120
128
  "wandb_project",
121
129
  "wandb_name",
@@ -1,9 +1,19 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from .classifierdetailedjobout import (
5
+ ClassifierDetailedJobOut,
6
+ ClassifierDetailedJobOutTypedDict,
7
+ )
8
+ from .completiondetailedjobout import (
9
+ CompletionDetailedJobOut,
10
+ CompletionDetailedJobOutTypedDict,
11
+ )
4
12
  from mistralai.types import BaseModel
5
- from mistralai.utils import FieldMetadata, PathParamMetadata
6
- from typing_extensions import Annotated, TypedDict
13
+ from mistralai.utils import FieldMetadata, PathParamMetadata, get_discriminator
14
+ from pydantic import Discriminator, Tag
15
+ from typing import Union
16
+ from typing_extensions import Annotated, TypeAliasType, TypedDict
7
17
 
8
18
 
9
19
  class JobsAPIRoutesFineTuningStartFineTuningJobRequestTypedDict(TypedDict):
@@ -14,3 +24,20 @@ class JobsAPIRoutesFineTuningStartFineTuningJobRequest(BaseModel):
14
24
  job_id: Annotated[
15
25
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
16
26
  ]
27
+
28
+
29
+ JobsAPIRoutesFineTuningStartFineTuningJobResponseTypedDict = TypeAliasType(
30
+ "JobsAPIRoutesFineTuningStartFineTuningJobResponseTypedDict",
31
+ Union[CompletionDetailedJobOutTypedDict, ClassifierDetailedJobOutTypedDict],
32
+ )
33
+ r"""OK"""
34
+
35
+
36
+ JobsAPIRoutesFineTuningStartFineTuningJobResponse = Annotated[
37
+ Union[
38
+ Annotated[ClassifierDetailedJobOut, Tag("classifier")],
39
+ Annotated[CompletionDetailedJobOut, Tag("completion")],
40
+ ],
41
+ Discriminator(lambda m: get_discriminator(m, "job_type", "job_type")),
42
+ ]
43
+ r"""OK"""
@@ -1,10 +1,19 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from .classifierftmodelout import ClassifierFTModelOut, ClassifierFTModelOutTypedDict
5
+ from .completionftmodelout import CompletionFTModelOut, CompletionFTModelOutTypedDict
4
6
  from .updateftmodelin import UpdateFTModelIn, UpdateFTModelInTypedDict
5
7
  from mistralai.types import BaseModel
6
- from mistralai.utils import FieldMetadata, PathParamMetadata, RequestMetadata
7
- from typing_extensions import Annotated, TypedDict
8
+ from mistralai.utils import (
9
+ FieldMetadata,
10
+ PathParamMetadata,
11
+ RequestMetadata,
12
+ get_discriminator,
13
+ )
14
+ from pydantic import Discriminator, Tag
15
+ from typing import Union
16
+ from typing_extensions import Annotated, TypeAliasType, TypedDict
8
17
 
9
18
 
10
19
  class JobsAPIRoutesFineTuningUpdateFineTunedModelRequestTypedDict(TypedDict):
@@ -23,3 +32,20 @@ class JobsAPIRoutesFineTuningUpdateFineTunedModelRequest(BaseModel):
23
32
  UpdateFTModelIn,
24
33
  FieldMetadata(request=RequestMetadata(media_type="application/json")),
25
34
  ]
35
+
36
+
37
+ JobsAPIRoutesFineTuningUpdateFineTunedModelResponseTypedDict = TypeAliasType(
38
+ "JobsAPIRoutesFineTuningUpdateFineTunedModelResponseTypedDict",
39
+ Union[CompletionFTModelOutTypedDict, ClassifierFTModelOutTypedDict],
40
+ )
41
+ r"""OK"""
42
+
43
+
44
+ JobsAPIRoutesFineTuningUpdateFineTunedModelResponse = Annotated[
45
+ Union[
46
+ Annotated[ClassifierFTModelOut, Tag("classifier")],
47
+ Annotated[CompletionFTModelOut, Tag("completion")],
48
+ ],
49
+ Discriminator(lambda m: get_discriminator(m, "model_type", "model_type")),
50
+ ]
51
+ r"""OK"""
@@ -1,13 +1,27 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from .jobout import JobOut, JobOutTypedDict
4
+ from .classifierjobout import ClassifierJobOut, ClassifierJobOutTypedDict
5
+ from .completionjobout import CompletionJobOut, CompletionJobOutTypedDict
5
6
  from mistralai.types import BaseModel
6
- from mistralai.utils import validate_const
7
- import pydantic
8
- from pydantic.functional_validators import AfterValidator
9
- from typing import List, Literal, Optional
10
- from typing_extensions import Annotated, NotRequired, TypedDict
7
+ from mistralai.utils import get_discriminator
8
+ from pydantic import Discriminator, Tag
9
+ from typing import List, Literal, Optional, Union
10
+ from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
11
+
12
+
13
+ JobsOutDataTypedDict = TypeAliasType(
14
+ "JobsOutDataTypedDict", Union[ClassifierJobOutTypedDict, CompletionJobOutTypedDict]
15
+ )
16
+
17
+
18
+ JobsOutData = Annotated[
19
+ Union[
20
+ Annotated[ClassifierJobOut, Tag("classifier")],
21
+ Annotated[CompletionJobOut, Tag("completion")],
22
+ ],
23
+ Discriminator(lambda m: get_discriminator(m, "job_type", "job_type")),
24
+ ]
11
25
 
12
26
 
13
27
  JobsOutObject = Literal["list"]
@@ -15,16 +29,13 @@ JobsOutObject = Literal["list"]
15
29
 
16
30
  class JobsOutTypedDict(TypedDict):
17
31
  total: int
18
- data: NotRequired[List[JobOutTypedDict]]
19
- object: JobsOutObject
32
+ data: NotRequired[List[JobsOutDataTypedDict]]
33
+ object: NotRequired[JobsOutObject]
20
34
 
21
35
 
22
36
  class JobsOut(BaseModel):
23
37
  total: int
24
38
 
25
- data: Optional[List[JobOut]] = None
39
+ data: Optional[List[JobsOutData]] = None
26
40
 
27
- OBJECT: Annotated[
28
- Annotated[Optional[JobsOutObject], AfterValidator(validate_const("list"))],
29
- pydantic.Field(alias="object"),
30
- ] = "list"
41
+ object: Optional[JobsOutObject] = "list"
@@ -2,12 +2,9 @@
2
2
 
3
3
  from __future__ import annotations
4
4
  from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
5
- from mistralai.utils import validate_const
6
- import pydantic
7
5
  from pydantic import model_serializer
8
- from pydantic.functional_validators import AfterValidator
9
6
  from typing import Literal, Optional
10
- from typing_extensions import Annotated, NotRequired, TypedDict
7
+ from typing_extensions import NotRequired, TypedDict
11
8
 
12
9
 
13
10
  LegacyJobMetadataOutObject = Literal["job.metadata"]
@@ -33,7 +30,7 @@ class LegacyJobMetadataOutTypedDict(TypedDict):
33
30
  r"""The number of complete passes through the entire training dataset."""
34
31
  training_steps: NotRequired[Nullable[int]]
35
32
  r"""The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset."""
36
- object: LegacyJobMetadataOutObject
33
+ object: NotRequired[LegacyJobMetadataOutObject]
37
34
 
38
35
 
39
36
  class LegacyJobMetadataOut(BaseModel):
@@ -67,13 +64,7 @@ class LegacyJobMetadataOut(BaseModel):
67
64
  training_steps: OptionalNullable[int] = UNSET
68
65
  r"""The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset."""
69
66
 
70
- OBJECT: Annotated[
71
- Annotated[
72
- Optional[LegacyJobMetadataOutObject],
73
- AfterValidator(validate_const("job.metadata")),
74
- ],
75
- pydantic.Field(alias="object"),
76
- ] = "job.metadata"
67
+ object: Optional[LegacyJobMetadataOutObject] = "job.metadata"
77
68
 
78
69
  @model_serializer(mode="wrap")
79
70
  def serialize_model(self, handler):
@@ -6,16 +6,16 @@ from typing import Dict, Optional
6
6
  from typing_extensions import NotRequired, TypedDict
7
7
 
8
8
 
9
- class ClassificationObjectTypedDict(TypedDict):
9
+ class ModerationObjectTypedDict(TypedDict):
10
10
  categories: NotRequired[Dict[str, bool]]
11
- r"""Classifier result thresholded"""
11
+ r"""Moderation result thresholds"""
12
12
  category_scores: NotRequired[Dict[str, float]]
13
- r"""Classifier result"""
13
+ r"""Moderation result"""
14
14
 
15
15
 
16
- class ClassificationObject(BaseModel):
16
+ class ModerationObject(BaseModel):
17
17
  categories: Optional[Dict[str, bool]] = None
18
- r"""Classifier result thresholded"""
18
+ r"""Moderation result thresholds"""
19
19
 
20
20
  category_scores: Optional[Dict[str, float]] = None
21
- r"""Classifier result"""
21
+ r"""Moderation result"""
@@ -0,0 +1,21 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .moderationobject import ModerationObject, ModerationObjectTypedDict
5
+ from mistralai.types import BaseModel
6
+ from typing import List
7
+ from typing_extensions import TypedDict
8
+
9
+
10
+ class ModerationResponseTypedDict(TypedDict):
11
+ id: str
12
+ model: str
13
+ results: List[ModerationObjectTypedDict]
14
+
15
+
16
+ class ModerationResponse(BaseModel):
17
+ id: str
18
+
19
+ model: str
20
+
21
+ results: List[ModerationObject]
@@ -19,6 +19,8 @@ class OCRImageObjectTypedDict(TypedDict):
19
19
  r"""Y coordinate of bottom-right corner of the extracted image"""
20
20
  image_base64: NotRequired[Nullable[str]]
21
21
  r"""Base64 string of the extracted image"""
22
+ image_annotation: NotRequired[Nullable[str]]
23
+ r"""Annotation of the extracted image in json str"""
22
24
 
23
25
 
24
26
  class OCRImageObject(BaseModel):
@@ -40,15 +42,19 @@ class OCRImageObject(BaseModel):
40
42
  image_base64: OptionalNullable[str] = UNSET
41
43
  r"""Base64 string of the extracted image"""
42
44
 
45
+ image_annotation: OptionalNullable[str] = UNSET
46
+ r"""Annotation of the extracted image in json str"""
47
+
43
48
  @model_serializer(mode="wrap")
44
49
  def serialize_model(self, handler):
45
- optional_fields = ["image_base64"]
50
+ optional_fields = ["image_base64", "image_annotation"]
46
51
  nullable_fields = [
47
52
  "top_left_x",
48
53
  "top_left_y",
49
54
  "bottom_right_x",
50
55
  "bottom_right_y",
51
56
  "image_base64",
57
+ "image_annotation",
52
58
  ]
53
59
  null_default_fields = []
54
60
 
@@ -3,6 +3,7 @@
3
3
  from __future__ import annotations
4
4
  from .documenturlchunk import DocumentURLChunk, DocumentURLChunkTypedDict
5
5
  from .imageurlchunk import ImageURLChunk, ImageURLChunkTypedDict
6
+ from .responseformat import ResponseFormat, ResponseFormatTypedDict
6
7
  from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
7
8
  from pydantic import model_serializer
8
9
  from typing import List, Optional, Union
@@ -32,6 +33,10 @@ class OCRRequestTypedDict(TypedDict):
32
33
  r"""Max images to extract"""
33
34
  image_min_size: NotRequired[Nullable[int]]
34
35
  r"""Minimum height and width of image to extract"""
36
+ bbox_annotation_format: NotRequired[Nullable[ResponseFormatTypedDict]]
37
+ r"""Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field"""
38
+ document_annotation_format: NotRequired[Nullable[ResponseFormatTypedDict]]
39
+ r"""Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field"""
35
40
 
36
41
 
37
42
  class OCRRequest(BaseModel):
@@ -54,6 +59,12 @@ class OCRRequest(BaseModel):
54
59
  image_min_size: OptionalNullable[int] = UNSET
55
60
  r"""Minimum height and width of image to extract"""
56
61
 
62
+ bbox_annotation_format: OptionalNullable[ResponseFormat] = UNSET
63
+ r"""Structured output class for extracting useful information from each extracted bounding box / image from document. Only json_schema is valid for this field"""
64
+
65
+ document_annotation_format: OptionalNullable[ResponseFormat] = UNSET
66
+ r"""Structured output class for extracting useful information from the entire document. Only json_schema is valid for this field"""
67
+
57
68
  @model_serializer(mode="wrap")
58
69
  def serialize_model(self, handler):
59
70
  optional_fields = [
@@ -62,6 +73,8 @@ class OCRRequest(BaseModel):
62
73
  "include_image_base64",
63
74
  "image_limit",
64
75
  "image_min_size",
76
+ "bbox_annotation_format",
77
+ "document_annotation_format",
65
78
  ]
66
79
  nullable_fields = [
67
80
  "model",
@@ -69,6 +82,8 @@ class OCRRequest(BaseModel):
69
82
  "include_image_base64",
70
83
  "image_limit",
71
84
  "image_min_size",
85
+ "bbox_annotation_format",
86
+ "document_annotation_format",
72
87
  ]
73
88
  null_default_fields = []
74
89