mistralai 1.6.0__py3-none-any.whl → 1.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. mistralai/_version.py +3 -3
  2. mistralai/classifiers.py +431 -19
  3. mistralai/embeddings.py +6 -2
  4. mistralai/extra/utils/_pydantic_helper.py +2 -1
  5. mistralai/jobs.py +84 -38
  6. mistralai/mistral_jobs.py +2 -2
  7. mistralai/models/__init__.py +197 -46
  8. mistralai/models/archiveftmodelout.py +3 -11
  9. mistralai/models/batchjobout.py +3 -9
  10. mistralai/models/batchjobsout.py +3 -9
  11. mistralai/models/chatclassificationrequest.py +20 -0
  12. mistralai/models/chatmoderationrequest.py +4 -7
  13. mistralai/models/classificationresponse.py +12 -9
  14. mistralai/models/classificationtargetresult.py +14 -0
  15. mistralai/models/classifierdetailedjobout.py +156 -0
  16. mistralai/models/classifierftmodelout.py +101 -0
  17. mistralai/models/classifierjobout.py +165 -0
  18. mistralai/models/classifiertargetin.py +55 -0
  19. mistralai/models/classifiertargetout.py +24 -0
  20. mistralai/models/classifiertrainingparameters.py +73 -0
  21. mistralai/models/classifiertrainingparametersin.py +85 -0
  22. mistralai/models/{detailedjobout.py → completiondetailedjobout.py} +34 -34
  23. mistralai/models/{ftmodelout.py → completionftmodelout.py} +12 -12
  24. mistralai/models/{jobout.py → completionjobout.py} +25 -24
  25. mistralai/models/{trainingparameters.py → completiontrainingparameters.py} +7 -7
  26. mistralai/models/{trainingparametersin.py → completiontrainingparametersin.py} +7 -7
  27. mistralai/models/embeddingrequest.py +6 -4
  28. mistralai/models/finetuneablemodeltype.py +7 -0
  29. mistralai/models/ftclassifierlossfunction.py +7 -0
  30. mistralai/models/ftmodelcapabilitiesout.py +3 -0
  31. mistralai/models/githubrepositoryin.py +3 -11
  32. mistralai/models/githubrepositoryout.py +3 -11
  33. mistralai/models/inputs.py +54 -0
  34. mistralai/models/instructrequest.py +42 -0
  35. mistralai/models/jobin.py +52 -12
  36. mistralai/models/jobs_api_routes_batch_get_batch_jobsop.py +3 -3
  37. mistralai/models/jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop.py +29 -2
  38. mistralai/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py +21 -4
  39. mistralai/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobop.py +29 -2
  40. mistralai/models/jobs_api_routes_fine_tuning_get_fine_tuning_jobsop.py +8 -0
  41. mistralai/models/jobs_api_routes_fine_tuning_start_fine_tuning_jobop.py +29 -2
  42. mistralai/models/jobs_api_routes_fine_tuning_update_fine_tuned_modelop.py +28 -2
  43. mistralai/models/jobsout.py +24 -13
  44. mistralai/models/legacyjobmetadataout.py +3 -12
  45. mistralai/models/{classificationobject.py → moderationobject.py} +6 -6
  46. mistralai/models/moderationresponse.py +21 -0
  47. mistralai/models/ocrimageobject.py +7 -1
  48. mistralai/models/ocrrequest.py +15 -0
  49. mistralai/models/ocrresponse.py +38 -2
  50. mistralai/models/unarchiveftmodelout.py +3 -11
  51. mistralai/models/wandbintegration.py +3 -11
  52. mistralai/models/wandbintegrationout.py +8 -13
  53. mistralai/models_.py +10 -4
  54. mistralai/ocr.py +28 -0
  55. {mistralai-1.6.0.dist-info → mistralai-1.7.1.dist-info}/METADATA +3 -1
  56. {mistralai-1.6.0.dist-info → mistralai-1.7.1.dist-info}/RECORD +58 -44
  57. {mistralai-1.6.0.dist-info → mistralai-1.7.1.dist-info}/WHEEL +1 -1
  58. {mistralai-1.6.0.dist-info → mistralai-1.7.1.dist-info}/LICENSE +0 -0
@@ -0,0 +1,156 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .checkpointout import CheckpointOut, CheckpointOutTypedDict
5
+ from .classifiertargetout import ClassifierTargetOut, ClassifierTargetOutTypedDict
6
+ from .classifiertrainingparameters import (
7
+ ClassifierTrainingParameters,
8
+ ClassifierTrainingParametersTypedDict,
9
+ )
10
+ from .eventout import EventOut, EventOutTypedDict
11
+ from .jobmetadataout import JobMetadataOut, JobMetadataOutTypedDict
12
+ from .wandbintegrationout import WandbIntegrationOut, WandbIntegrationOutTypedDict
13
+ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
14
+ from pydantic import model_serializer
15
+ from typing import List, Literal, Optional
16
+ from typing_extensions import NotRequired, TypedDict
17
+
18
+
19
+ ClassifierDetailedJobOutStatus = Literal[
20
+ "QUEUED",
21
+ "STARTED",
22
+ "VALIDATING",
23
+ "VALIDATED",
24
+ "RUNNING",
25
+ "FAILED_VALIDATION",
26
+ "FAILED",
27
+ "SUCCESS",
28
+ "CANCELLED",
29
+ "CANCELLATION_REQUESTED",
30
+ ]
31
+
32
+ ClassifierDetailedJobOutObject = Literal["job"]
33
+
34
+ ClassifierDetailedJobOutIntegrationsTypedDict = WandbIntegrationOutTypedDict
35
+
36
+
37
+ ClassifierDetailedJobOutIntegrations = WandbIntegrationOut
38
+
39
+
40
+ ClassifierDetailedJobOutJobType = Literal["classifier"]
41
+
42
+
43
+ class ClassifierDetailedJobOutTypedDict(TypedDict):
44
+ id: str
45
+ auto_start: bool
46
+ model: str
47
+ r"""The name of the model to fine-tune."""
48
+ status: ClassifierDetailedJobOutStatus
49
+ created_at: int
50
+ modified_at: int
51
+ training_files: List[str]
52
+ hyperparameters: ClassifierTrainingParametersTypedDict
53
+ classifier_targets: List[ClassifierTargetOutTypedDict]
54
+ validation_files: NotRequired[Nullable[List[str]]]
55
+ object: NotRequired[ClassifierDetailedJobOutObject]
56
+ fine_tuned_model: NotRequired[Nullable[str]]
57
+ suffix: NotRequired[Nullable[str]]
58
+ integrations: NotRequired[
59
+ Nullable[List[ClassifierDetailedJobOutIntegrationsTypedDict]]
60
+ ]
61
+ trained_tokens: NotRequired[Nullable[int]]
62
+ metadata: NotRequired[Nullable[JobMetadataOutTypedDict]]
63
+ job_type: NotRequired[ClassifierDetailedJobOutJobType]
64
+ events: NotRequired[List[EventOutTypedDict]]
65
+ r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here."""
66
+ checkpoints: NotRequired[List[CheckpointOutTypedDict]]
67
+
68
+
69
+ class ClassifierDetailedJobOut(BaseModel):
70
+ id: str
71
+
72
+ auto_start: bool
73
+
74
+ model: str
75
+ r"""The name of the model to fine-tune."""
76
+
77
+ status: ClassifierDetailedJobOutStatus
78
+
79
+ created_at: int
80
+
81
+ modified_at: int
82
+
83
+ training_files: List[str]
84
+
85
+ hyperparameters: ClassifierTrainingParameters
86
+
87
+ classifier_targets: List[ClassifierTargetOut]
88
+
89
+ validation_files: OptionalNullable[List[str]] = UNSET
90
+
91
+ object: Optional[ClassifierDetailedJobOutObject] = "job"
92
+
93
+ fine_tuned_model: OptionalNullable[str] = UNSET
94
+
95
+ suffix: OptionalNullable[str] = UNSET
96
+
97
+ integrations: OptionalNullable[List[ClassifierDetailedJobOutIntegrations]] = UNSET
98
+
99
+ trained_tokens: OptionalNullable[int] = UNSET
100
+
101
+ metadata: OptionalNullable[JobMetadataOut] = UNSET
102
+
103
+ job_type: Optional[ClassifierDetailedJobOutJobType] = "classifier"
104
+
105
+ events: Optional[List[EventOut]] = None
106
+ r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here."""
107
+
108
+ checkpoints: Optional[List[CheckpointOut]] = None
109
+
110
+ @model_serializer(mode="wrap")
111
+ def serialize_model(self, handler):
112
+ optional_fields = [
113
+ "validation_files",
114
+ "object",
115
+ "fine_tuned_model",
116
+ "suffix",
117
+ "integrations",
118
+ "trained_tokens",
119
+ "metadata",
120
+ "job_type",
121
+ "events",
122
+ "checkpoints",
123
+ ]
124
+ nullable_fields = [
125
+ "validation_files",
126
+ "fine_tuned_model",
127
+ "suffix",
128
+ "integrations",
129
+ "trained_tokens",
130
+ "metadata",
131
+ ]
132
+ null_default_fields = []
133
+
134
+ serialized = handler(self)
135
+
136
+ m = {}
137
+
138
+ for n, f in self.model_fields.items():
139
+ k = f.alias or n
140
+ val = serialized.get(k)
141
+ serialized.pop(k, None)
142
+
143
+ optional_nullable = k in optional_fields and k in nullable_fields
144
+ is_set = (
145
+ self.__pydantic_fields_set__.intersection({n})
146
+ or k in null_default_fields
147
+ ) # pylint: disable=no-member
148
+
149
+ if val is not None and val != UNSET_SENTINEL:
150
+ m[k] = val
151
+ elif val != UNSET_SENTINEL and (
152
+ not k in optional_fields or (optional_nullable and is_set)
153
+ ):
154
+ m[k] = val
155
+
156
+ return m
@@ -0,0 +1,101 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .classifiertargetout import ClassifierTargetOut, ClassifierTargetOutTypedDict
5
+ from .ftmodelcapabilitiesout import (
6
+ FTModelCapabilitiesOut,
7
+ FTModelCapabilitiesOutTypedDict,
8
+ )
9
+ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
10
+ from pydantic import model_serializer
11
+ from typing import List, Literal, Optional
12
+ from typing_extensions import NotRequired, TypedDict
13
+
14
+
15
+ ClassifierFTModelOutObject = Literal["model"]
16
+
17
+ ClassifierFTModelOutModelType = Literal["classifier"]
18
+
19
+
20
+ class ClassifierFTModelOutTypedDict(TypedDict):
21
+ id: str
22
+ created: int
23
+ owned_by: str
24
+ root: str
25
+ archived: bool
26
+ capabilities: FTModelCapabilitiesOutTypedDict
27
+ job: str
28
+ classifier_targets: List[ClassifierTargetOutTypedDict]
29
+ object: NotRequired[ClassifierFTModelOutObject]
30
+ name: NotRequired[Nullable[str]]
31
+ description: NotRequired[Nullable[str]]
32
+ max_context_length: NotRequired[int]
33
+ aliases: NotRequired[List[str]]
34
+ model_type: NotRequired[ClassifierFTModelOutModelType]
35
+
36
+
37
+ class ClassifierFTModelOut(BaseModel):
38
+ id: str
39
+
40
+ created: int
41
+
42
+ owned_by: str
43
+
44
+ root: str
45
+
46
+ archived: bool
47
+
48
+ capabilities: FTModelCapabilitiesOut
49
+
50
+ job: str
51
+
52
+ classifier_targets: List[ClassifierTargetOut]
53
+
54
+ object: Optional[ClassifierFTModelOutObject] = "model"
55
+
56
+ name: OptionalNullable[str] = UNSET
57
+
58
+ description: OptionalNullable[str] = UNSET
59
+
60
+ max_context_length: Optional[int] = 32768
61
+
62
+ aliases: Optional[List[str]] = None
63
+
64
+ model_type: Optional[ClassifierFTModelOutModelType] = "classifier"
65
+
66
+ @model_serializer(mode="wrap")
67
+ def serialize_model(self, handler):
68
+ optional_fields = [
69
+ "object",
70
+ "name",
71
+ "description",
72
+ "max_context_length",
73
+ "aliases",
74
+ "model_type",
75
+ ]
76
+ nullable_fields = ["name", "description"]
77
+ null_default_fields = []
78
+
79
+ serialized = handler(self)
80
+
81
+ m = {}
82
+
83
+ for n, f in self.model_fields.items():
84
+ k = f.alias or n
85
+ val = serialized.get(k)
86
+ serialized.pop(k, None)
87
+
88
+ optional_nullable = k in optional_fields and k in nullable_fields
89
+ is_set = (
90
+ self.__pydantic_fields_set__.intersection({n})
91
+ or k in null_default_fields
92
+ ) # pylint: disable=no-member
93
+
94
+ if val is not None and val != UNSET_SENTINEL:
95
+ m[k] = val
96
+ elif val != UNSET_SENTINEL and (
97
+ not k in optional_fields or (optional_nullable and is_set)
98
+ ):
99
+ m[k] = val
100
+
101
+ return m
@@ -0,0 +1,165 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .classifiertrainingparameters import (
5
+ ClassifierTrainingParameters,
6
+ ClassifierTrainingParametersTypedDict,
7
+ )
8
+ from .jobmetadataout import JobMetadataOut, JobMetadataOutTypedDict
9
+ from .wandbintegrationout import WandbIntegrationOut, WandbIntegrationOutTypedDict
10
+ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
11
+ from pydantic import model_serializer
12
+ from typing import List, Literal, Optional
13
+ from typing_extensions import NotRequired, TypedDict
14
+
15
+
16
+ ClassifierJobOutStatus = Literal[
17
+ "QUEUED",
18
+ "STARTED",
19
+ "VALIDATING",
20
+ "VALIDATED",
21
+ "RUNNING",
22
+ "FAILED_VALIDATION",
23
+ "FAILED",
24
+ "SUCCESS",
25
+ "CANCELLED",
26
+ "CANCELLATION_REQUESTED",
27
+ ]
28
+ r"""The current status of the fine-tuning job."""
29
+
30
+ ClassifierJobOutObject = Literal["job"]
31
+ r"""The object type of the fine-tuning job."""
32
+
33
+ ClassifierJobOutIntegrationsTypedDict = WandbIntegrationOutTypedDict
34
+
35
+
36
+ ClassifierJobOutIntegrations = WandbIntegrationOut
37
+
38
+
39
+ ClassifierJobOutJobType = Literal["classifier"]
40
+ r"""The type of job (`FT` for fine-tuning)."""
41
+
42
+
43
+ class ClassifierJobOutTypedDict(TypedDict):
44
+ id: str
45
+ r"""The ID of the job."""
46
+ auto_start: bool
47
+ model: str
48
+ r"""The name of the model to fine-tune."""
49
+ status: ClassifierJobOutStatus
50
+ r"""The current status of the fine-tuning job."""
51
+ created_at: int
52
+ r"""The UNIX timestamp (in seconds) for when the fine-tuning job was created."""
53
+ modified_at: int
54
+ r"""The UNIX timestamp (in seconds) for when the fine-tuning job was last modified."""
55
+ training_files: List[str]
56
+ r"""A list containing the IDs of uploaded files that contain training data."""
57
+ hyperparameters: ClassifierTrainingParametersTypedDict
58
+ validation_files: NotRequired[Nullable[List[str]]]
59
+ r"""A list containing the IDs of uploaded files that contain validation data."""
60
+ object: NotRequired[ClassifierJobOutObject]
61
+ r"""The object type of the fine-tuning job."""
62
+ fine_tuned_model: NotRequired[Nullable[str]]
63
+ r"""The name of the fine-tuned model that is being created. The value will be `null` if the fine-tuning job is still running."""
64
+ suffix: NotRequired[Nullable[str]]
65
+ r"""Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`."""
66
+ integrations: NotRequired[Nullable[List[ClassifierJobOutIntegrationsTypedDict]]]
67
+ r"""A list of integrations enabled for your fine-tuning job."""
68
+ trained_tokens: NotRequired[Nullable[int]]
69
+ r"""Total number of tokens trained."""
70
+ metadata: NotRequired[Nullable[JobMetadataOutTypedDict]]
71
+ job_type: NotRequired[ClassifierJobOutJobType]
72
+ r"""The type of job (`FT` for fine-tuning)."""
73
+
74
+
75
+ class ClassifierJobOut(BaseModel):
76
+ id: str
77
+ r"""The ID of the job."""
78
+
79
+ auto_start: bool
80
+
81
+ model: str
82
+ r"""The name of the model to fine-tune."""
83
+
84
+ status: ClassifierJobOutStatus
85
+ r"""The current status of the fine-tuning job."""
86
+
87
+ created_at: int
88
+ r"""The UNIX timestamp (in seconds) for when the fine-tuning job was created."""
89
+
90
+ modified_at: int
91
+ r"""The UNIX timestamp (in seconds) for when the fine-tuning job was last modified."""
92
+
93
+ training_files: List[str]
94
+ r"""A list containing the IDs of uploaded files that contain training data."""
95
+
96
+ hyperparameters: ClassifierTrainingParameters
97
+
98
+ validation_files: OptionalNullable[List[str]] = UNSET
99
+ r"""A list containing the IDs of uploaded files that contain validation data."""
100
+
101
+ object: Optional[ClassifierJobOutObject] = "job"
102
+ r"""The object type of the fine-tuning job."""
103
+
104
+ fine_tuned_model: OptionalNullable[str] = UNSET
105
+ r"""The name of the fine-tuned model that is being created. The value will be `null` if the fine-tuning job is still running."""
106
+
107
+ suffix: OptionalNullable[str] = UNSET
108
+ r"""Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`."""
109
+
110
+ integrations: OptionalNullable[List[ClassifierJobOutIntegrations]] = UNSET
111
+ r"""A list of integrations enabled for your fine-tuning job."""
112
+
113
+ trained_tokens: OptionalNullable[int] = UNSET
114
+ r"""Total number of tokens trained."""
115
+
116
+ metadata: OptionalNullable[JobMetadataOut] = UNSET
117
+
118
+ job_type: Optional[ClassifierJobOutJobType] = "classifier"
119
+ r"""The type of job (`FT` for fine-tuning)."""
120
+
121
+ @model_serializer(mode="wrap")
122
+ def serialize_model(self, handler):
123
+ optional_fields = [
124
+ "validation_files",
125
+ "object",
126
+ "fine_tuned_model",
127
+ "suffix",
128
+ "integrations",
129
+ "trained_tokens",
130
+ "metadata",
131
+ "job_type",
132
+ ]
133
+ nullable_fields = [
134
+ "validation_files",
135
+ "fine_tuned_model",
136
+ "suffix",
137
+ "integrations",
138
+ "trained_tokens",
139
+ "metadata",
140
+ ]
141
+ null_default_fields = []
142
+
143
+ serialized = handler(self)
144
+
145
+ m = {}
146
+
147
+ for n, f in self.model_fields.items():
148
+ k = f.alias or n
149
+ val = serialized.get(k)
150
+ serialized.pop(k, None)
151
+
152
+ optional_nullable = k in optional_fields and k in nullable_fields
153
+ is_set = (
154
+ self.__pydantic_fields_set__.intersection({n})
155
+ or k in null_default_fields
156
+ ) # pylint: disable=no-member
157
+
158
+ if val is not None and val != UNSET_SENTINEL:
159
+ m[k] = val
160
+ elif val != UNSET_SENTINEL and (
161
+ not k in optional_fields or (optional_nullable and is_set)
162
+ ):
163
+ m[k] = val
164
+
165
+ return m
@@ -0,0 +1,55 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .ftclassifierlossfunction import FTClassifierLossFunction
5
+ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
6
+ from pydantic import model_serializer
7
+ from typing import List, Optional
8
+ from typing_extensions import NotRequired, TypedDict
9
+
10
+
11
+ class ClassifierTargetInTypedDict(TypedDict):
12
+ name: str
13
+ labels: List[str]
14
+ weight: NotRequired[float]
15
+ loss_function: NotRequired[Nullable[FTClassifierLossFunction]]
16
+
17
+
18
+ class ClassifierTargetIn(BaseModel):
19
+ name: str
20
+
21
+ labels: List[str]
22
+
23
+ weight: Optional[float] = 1
24
+
25
+ loss_function: OptionalNullable[FTClassifierLossFunction] = UNSET
26
+
27
+ @model_serializer(mode="wrap")
28
+ def serialize_model(self, handler):
29
+ optional_fields = ["weight", "loss_function"]
30
+ nullable_fields = ["loss_function"]
31
+ null_default_fields = []
32
+
33
+ serialized = handler(self)
34
+
35
+ m = {}
36
+
37
+ for n, f in self.model_fields.items():
38
+ k = f.alias or n
39
+ val = serialized.get(k)
40
+ serialized.pop(k, None)
41
+
42
+ optional_nullable = k in optional_fields and k in nullable_fields
43
+ is_set = (
44
+ self.__pydantic_fields_set__.intersection({n})
45
+ or k in null_default_fields
46
+ ) # pylint: disable=no-member
47
+
48
+ if val is not None and val != UNSET_SENTINEL:
49
+ m[k] = val
50
+ elif val != UNSET_SENTINEL and (
51
+ not k in optional_fields or (optional_nullable and is_set)
52
+ ):
53
+ m[k] = val
54
+
55
+ return m
@@ -0,0 +1,24 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .ftclassifierlossfunction import FTClassifierLossFunction
5
+ from mistralai.types import BaseModel
6
+ from typing import List
7
+ from typing_extensions import TypedDict
8
+
9
+
10
+ class ClassifierTargetOutTypedDict(TypedDict):
11
+ name: str
12
+ labels: List[str]
13
+ weight: float
14
+ loss_function: FTClassifierLossFunction
15
+
16
+
17
+ class ClassifierTargetOut(BaseModel):
18
+ name: str
19
+
20
+ labels: List[str]
21
+
22
+ weight: float
23
+
24
+ loss_function: FTClassifierLossFunction
@@ -0,0 +1,73 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
5
+ from pydantic import model_serializer
6
+ from typing import Optional
7
+ from typing_extensions import NotRequired, TypedDict
8
+
9
+
10
+ class ClassifierTrainingParametersTypedDict(TypedDict):
11
+ training_steps: NotRequired[Nullable[int]]
12
+ learning_rate: NotRequired[float]
13
+ weight_decay: NotRequired[Nullable[float]]
14
+ warmup_fraction: NotRequired[Nullable[float]]
15
+ epochs: NotRequired[Nullable[float]]
16
+ seq_len: NotRequired[Nullable[int]]
17
+
18
+
19
+ class ClassifierTrainingParameters(BaseModel):
20
+ training_steps: OptionalNullable[int] = UNSET
21
+
22
+ learning_rate: Optional[float] = 0.0001
23
+
24
+ weight_decay: OptionalNullable[float] = UNSET
25
+
26
+ warmup_fraction: OptionalNullable[float] = UNSET
27
+
28
+ epochs: OptionalNullable[float] = UNSET
29
+
30
+ seq_len: OptionalNullable[int] = UNSET
31
+
32
+ @model_serializer(mode="wrap")
33
+ def serialize_model(self, handler):
34
+ optional_fields = [
35
+ "training_steps",
36
+ "learning_rate",
37
+ "weight_decay",
38
+ "warmup_fraction",
39
+ "epochs",
40
+ "seq_len",
41
+ ]
42
+ nullable_fields = [
43
+ "training_steps",
44
+ "weight_decay",
45
+ "warmup_fraction",
46
+ "epochs",
47
+ "seq_len",
48
+ ]
49
+ null_default_fields = []
50
+
51
+ serialized = handler(self)
52
+
53
+ m = {}
54
+
55
+ for n, f in self.model_fields.items():
56
+ k = f.alias or n
57
+ val = serialized.get(k)
58
+ serialized.pop(k, None)
59
+
60
+ optional_nullable = k in optional_fields and k in nullable_fields
61
+ is_set = (
62
+ self.__pydantic_fields_set__.intersection({n})
63
+ or k in null_default_fields
64
+ ) # pylint: disable=no-member
65
+
66
+ if val is not None and val != UNSET_SENTINEL:
67
+ m[k] = val
68
+ elif val != UNSET_SENTINEL and (
69
+ not k in optional_fields or (optional_nullable and is_set)
70
+ ):
71
+ m[k] = val
72
+
73
+ return m
@@ -0,0 +1,85 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
5
+ from pydantic import model_serializer
6
+ from typing import Optional
7
+ from typing_extensions import NotRequired, TypedDict
8
+
9
+
10
+ class ClassifierTrainingParametersInTypedDict(TypedDict):
11
+ r"""The fine-tuning hyperparameter settings used in a classifier fine-tune job."""
12
+
13
+ training_steps: NotRequired[Nullable[int]]
14
+ r"""The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset."""
15
+ learning_rate: NotRequired[float]
16
+ r"""A parameter describing how much to adjust the pre-trained model's weights in response to the estimated error each time the weights are updated during the fine-tuning process."""
17
+ weight_decay: NotRequired[Nullable[float]]
18
+ r"""(Advanced Usage) Weight decay adds a term to the loss function that is proportional to the sum of the squared weights. This term reduces the magnitude of the weights and prevents them from growing too large."""
19
+ warmup_fraction: NotRequired[Nullable[float]]
20
+ r"""(Advanced Usage) A parameter that specifies the percentage of the total training steps at which the learning rate warm-up phase ends. During this phase, the learning rate gradually increases from a small value to the initial learning rate, helping to stabilize the training process and improve convergence. Similar to `pct_start` in [mistral-finetune](https://github.com/mistralai/mistral-finetune)"""
21
+ epochs: NotRequired[Nullable[float]]
22
+ seq_len: NotRequired[Nullable[int]]
23
+
24
+
25
+ class ClassifierTrainingParametersIn(BaseModel):
26
+ r"""The fine-tuning hyperparameter settings used in a classifier fine-tune job."""
27
+
28
+ training_steps: OptionalNullable[int] = UNSET
29
+ r"""The number of training steps to perform. A training step refers to a single update of the model weights during the fine-tuning process. This update is typically calculated using a batch of samples from the training dataset."""
30
+
31
+ learning_rate: Optional[float] = 0.0001
32
+ r"""A parameter describing how much to adjust the pre-trained model's weights in response to the estimated error each time the weights are updated during the fine-tuning process."""
33
+
34
+ weight_decay: OptionalNullable[float] = UNSET
35
+ r"""(Advanced Usage) Weight decay adds a term to the loss function that is proportional to the sum of the squared weights. This term reduces the magnitude of the weights and prevents them from growing too large."""
36
+
37
+ warmup_fraction: OptionalNullable[float] = UNSET
38
+ r"""(Advanced Usage) A parameter that specifies the percentage of the total training steps at which the learning rate warm-up phase ends. During this phase, the learning rate gradually increases from a small value to the initial learning rate, helping to stabilize the training process and improve convergence. Similar to `pct_start` in [mistral-finetune](https://github.com/mistralai/mistral-finetune)"""
39
+
40
+ epochs: OptionalNullable[float] = UNSET
41
+
42
+ seq_len: OptionalNullable[int] = UNSET
43
+
44
+ @model_serializer(mode="wrap")
45
+ def serialize_model(self, handler):
46
+ optional_fields = [
47
+ "training_steps",
48
+ "learning_rate",
49
+ "weight_decay",
50
+ "warmup_fraction",
51
+ "epochs",
52
+ "seq_len",
53
+ ]
54
+ nullable_fields = [
55
+ "training_steps",
56
+ "weight_decay",
57
+ "warmup_fraction",
58
+ "epochs",
59
+ "seq_len",
60
+ ]
61
+ null_default_fields = []
62
+
63
+ serialized = handler(self)
64
+
65
+ m = {}
66
+
67
+ for n, f in self.model_fields.items():
68
+ k = f.alias or n
69
+ val = serialized.get(k)
70
+ serialized.pop(k, None)
71
+
72
+ optional_nullable = k in optional_fields and k in nullable_fields
73
+ is_set = (
74
+ self.__pydantic_fields_set__.intersection({n})
75
+ or k in null_default_fields
76
+ ) # pylint: disable=no-member
77
+
78
+ if val is not None and val != UNSET_SENTINEL:
79
+ m[k] = val
80
+ elif val != UNSET_SENTINEL and (
81
+ not k in optional_fields or (optional_nullable and is_set)
82
+ ):
83
+ m[k] = val
84
+
85
+ return m