mistralai 1.0.0rc2__py3-none-any.whl → 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mistralai/agents.py +33 -33
- mistralai/chat.py +4 -4
- mistralai/client.py +1 -1
- mistralai/jobs.py +24 -34
- mistralai/models/__init__.py +22 -22
- mistralai/models/agentscompletionrequest.py +14 -14
- mistralai/models/agentscompletionstreamrequest.py +41 -39
- mistralai/models/archiveftmodelout.py +4 -2
- mistralai/models/chatcompletionchoice.py +3 -4
- mistralai/models/chatcompletionrequest.py +16 -16
- mistralai/models/chatcompletionstreamrequest.py +16 -16
- mistralai/models/delete_model_v1_models_model_id_deleteop.py +2 -0
- mistralai/models/deltamessage.py +6 -6
- mistralai/models/detailedjobout.py +19 -5
- mistralai/models/embeddingrequest.py +8 -8
- mistralai/models/files_api_routes_upload_fileop.py +7 -4
- mistralai/models/fileschema.py +8 -3
- mistralai/models/fimcompletionrequest.py +8 -8
- mistralai/models/fimcompletionstreamrequest.py +8 -8
- mistralai/models/ftmodelout.py +4 -2
- mistralai/models/functioncall.py +9 -3
- mistralai/models/githubrepositoryin.py +4 -2
- mistralai/models/githubrepositoryout.py +4 -2
- mistralai/models/jobin.py +16 -4
- mistralai/models/jobout.py +20 -5
- mistralai/models/jobs_api_routes_fine_tuning_archive_fine_tuned_modelop.py +2 -0
- mistralai/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py +1 -54
- mistralai/models/jobs_api_routes_fine_tuning_unarchive_fine_tuned_modelop.py +2 -0
- mistralai/models/jobs_api_routes_fine_tuning_update_fine_tuned_modelop.py +2 -0
- mistralai/models/jobsout.py +4 -2
- mistralai/models/legacyjobmetadataout.py +4 -2
- mistralai/models/retrieve_model_v1_models_model_id_getop.py +2 -0
- mistralai/models/retrievefileout.py +8 -3
- mistralai/models/systemmessage.py +6 -6
- mistralai/models/tool.py +9 -5
- mistralai/models/toolcall.py +8 -4
- mistralai/models/trainingparameters.py +6 -2
- mistralai/models/trainingparametersin.py +10 -2
- mistralai/models/unarchiveftmodelout.py +4 -2
- mistralai/models/uploadfileout.py +8 -3
- mistralai/models/usermessage.py +6 -6
- mistralai/models/validationerror.py +6 -6
- mistralai/models/wandbintegration.py +4 -2
- mistralai/models/wandbintegrationout.py +4 -2
- mistralai/models_.py +10 -10
- mistralai/sdk.py +2 -2
- mistralai/sdkconfiguration.py +3 -3
- mistralai/utils/__init__.py +2 -2
- mistralai/utils/forms.py +10 -9
- mistralai/utils/headers.py +8 -8
- mistralai/utils/logger.py +8 -0
- mistralai/utils/queryparams.py +16 -14
- mistralai/utils/serializers.py +17 -8
- mistralai/utils/url.py +13 -8
- mistralai/utils/values.py +6 -0
- mistralai/version.py +7 -0
- {mistralai-1.0.0rc2.dist-info → mistralai-1.0.2.dist-info}/METADATA +40 -18
- {mistralai-1.0.0rc2.dist-info → mistralai-1.0.2.dist-info}/RECORD +87 -86
- mistralai_azure/models/__init__.py +4 -4
- mistralai_azure/models/chatcompletionchoice.py +3 -4
- mistralai_azure/models/chatcompletionrequest.py +14 -14
- mistralai_azure/models/chatcompletionstreamrequest.py +14 -14
- mistralai_azure/models/deltamessage.py +6 -6
- mistralai_azure/models/functioncall.py +9 -3
- mistralai_azure/models/systemmessage.py +6 -6
- mistralai_azure/models/tool.py +9 -5
- mistralai_azure/models/toolcall.py +8 -4
- mistralai_azure/models/usermessage.py +6 -6
- mistralai_azure/models/validationerror.py +6 -6
- mistralai_azure/sdkconfiguration.py +3 -3
- mistralai_gcp/chat.py +4 -4
- mistralai_gcp/models/__init__.py +4 -4
- mistralai_gcp/models/chatcompletionchoice.py +3 -4
- mistralai_gcp/models/chatcompletionrequest.py +16 -16
- mistralai_gcp/models/chatcompletionstreamrequest.py +16 -16
- mistralai_gcp/models/deltamessage.py +6 -6
- mistralai_gcp/models/fimcompletionrequest.py +8 -8
- mistralai_gcp/models/fimcompletionstreamrequest.py +8 -8
- mistralai_gcp/models/functioncall.py +9 -3
- mistralai_gcp/models/systemmessage.py +6 -6
- mistralai_gcp/models/tool.py +9 -5
- mistralai_gcp/models/toolcall.py +8 -4
- mistralai_gcp/models/usermessage.py +6 -6
- mistralai_gcp/models/validationerror.py +6 -6
- mistralai_gcp/sdkconfiguration.py +3 -3
- {mistralai-1.0.0rc2.dist-info → mistralai-1.0.2.dist-info}/LICENSE +0 -0
- {mistralai-1.0.0rc2.dist-info → mistralai-1.0.2.dist-info}/WHEEL +0 -0
|
@@ -3,10 +3,12 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
from mistralai.types import BaseModel
|
|
5
5
|
import pydantic
|
|
6
|
-
from typing import Final, Optional, TypedDict
|
|
6
|
+
from typing import Final, Literal, Optional, TypedDict
|
|
7
7
|
from typing_extensions import Annotated, NotRequired
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
ArchiveFTModelOutObject = Literal["model"]
|
|
11
|
+
|
|
10
12
|
class ArchiveFTModelOutTypedDict(TypedDict):
|
|
11
13
|
id: str
|
|
12
14
|
archived: NotRequired[bool]
|
|
@@ -14,6 +16,6 @@ class ArchiveFTModelOutTypedDict(TypedDict):
|
|
|
14
16
|
|
|
15
17
|
class ArchiveFTModelOut(BaseModel):
|
|
16
18
|
id: str
|
|
17
|
-
OBJECT: Annotated[Final[Optional[
|
|
19
|
+
OBJECT: Annotated[Final[Optional[ArchiveFTModelOutObject]], pydantic.Field(alias="object")] = "model" # type: ignore
|
|
18
20
|
archived: Optional[bool] = True
|
|
19
21
|
|
|
@@ -3,20 +3,19 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
from .assistantmessage import AssistantMessage, AssistantMessageTypedDict
|
|
5
5
|
from mistralai.types import BaseModel
|
|
6
|
-
from typing import Literal,
|
|
7
|
-
from typing_extensions import NotRequired
|
|
6
|
+
from typing import Literal, TypedDict
|
|
8
7
|
|
|
9
8
|
|
|
10
9
|
FinishReason = Literal["stop", "length", "model_length", "error", "tool_calls"]
|
|
11
10
|
|
|
12
11
|
class ChatCompletionChoiceTypedDict(TypedDict):
|
|
13
12
|
index: int
|
|
13
|
+
message: AssistantMessageTypedDict
|
|
14
14
|
finish_reason: FinishReason
|
|
15
|
-
message: NotRequired[AssistantMessageTypedDict]
|
|
16
15
|
|
|
17
16
|
|
|
18
17
|
class ChatCompletionChoice(BaseModel):
|
|
19
18
|
index: int
|
|
19
|
+
message: AssistantMessage
|
|
20
20
|
finish_reason: FinishReason
|
|
21
|
-
message: Optional[AssistantMessage] = None
|
|
22
21
|
|
|
@@ -14,11 +14,25 @@ from typing import List, Literal, Optional, TypedDict, Union
|
|
|
14
14
|
from typing_extensions import Annotated, NotRequired
|
|
15
15
|
|
|
16
16
|
|
|
17
|
+
StopTypedDict = Union[str, List[str]]
|
|
18
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
Stop = Union[str, List[str]]
|
|
22
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
MessagesTypedDict = Union[SystemMessageTypedDict, UserMessageTypedDict, AssistantMessageTypedDict, ToolMessageTypedDict]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
Messages = Annotated[Union[Annotated[AssistantMessage, Tag("assistant")], Annotated[SystemMessage, Tag("system")], Annotated[ToolMessage, Tag("tool")], Annotated[UserMessage, Tag("user")]], Discriminator(lambda m: get_discriminator(m, "role", "role"))]
|
|
29
|
+
|
|
30
|
+
|
|
17
31
|
ToolChoice = Literal["auto", "none", "any"]
|
|
18
32
|
|
|
19
33
|
class ChatCompletionRequestTypedDict(TypedDict):
|
|
20
34
|
model: Nullable[str]
|
|
21
|
-
r"""ID of the model to use. You can use the [List Available Models](/api
|
|
35
|
+
r"""ID of the model to use. You can use the [List Available Models](/api/#tag/models/operation/list_models_v1_models_get) API to see all of your available models, or see our [Model overview](/models) for model descriptions."""
|
|
22
36
|
messages: List[MessagesTypedDict]
|
|
23
37
|
r"""The prompt(s) to generate completions for, encoded as a list of dict with role and content."""
|
|
24
38
|
temperature: NotRequired[float]
|
|
@@ -44,7 +58,7 @@ class ChatCompletionRequestTypedDict(TypedDict):
|
|
|
44
58
|
|
|
45
59
|
class ChatCompletionRequest(BaseModel):
|
|
46
60
|
model: Nullable[str]
|
|
47
|
-
r"""ID of the model to use. You can use the [List Available Models](/api
|
|
61
|
+
r"""ID of the model to use. You can use the [List Available Models](/api/#tag/models/operation/list_models_v1_models_get) API to see all of your available models, or see our [Model overview](/models) for model descriptions."""
|
|
48
62
|
messages: List[Messages]
|
|
49
63
|
r"""The prompt(s) to generate completions for, encoded as a list of dict with role and content."""
|
|
50
64
|
temperature: Optional[float] = 0.7
|
|
@@ -93,17 +107,3 @@ class ChatCompletionRequest(BaseModel):
|
|
|
93
107
|
|
|
94
108
|
return m
|
|
95
109
|
|
|
96
|
-
|
|
97
|
-
StopTypedDict = Union[str, List[str]]
|
|
98
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
Stop = Union[str, List[str]]
|
|
102
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
MessagesTypedDict = Union[SystemMessageTypedDict, UserMessageTypedDict, AssistantMessageTypedDict, ToolMessageTypedDict]
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
Messages = Annotated[Union[Annotated[AssistantMessage, Tag("assistant")], Annotated[SystemMessage, Tag("system")], Annotated[ToolMessage, Tag("tool")], Annotated[UserMessage, Tag("user")]], Discriminator(lambda m: get_discriminator(m, "role", "role"))]
|
|
109
|
-
|
|
@@ -14,11 +14,25 @@ from typing import List, Literal, Optional, TypedDict, Union
|
|
|
14
14
|
from typing_extensions import Annotated, NotRequired
|
|
15
15
|
|
|
16
16
|
|
|
17
|
+
ChatCompletionStreamRequestStopTypedDict = Union[str, List[str]]
|
|
18
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
ChatCompletionStreamRequestStop = Union[str, List[str]]
|
|
22
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
ChatCompletionStreamRequestMessagesTypedDict = Union[SystemMessageTypedDict, UserMessageTypedDict, AssistantMessageTypedDict, ToolMessageTypedDict]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
ChatCompletionStreamRequestMessages = Annotated[Union[Annotated[AssistantMessage, Tag("assistant")], Annotated[SystemMessage, Tag("system")], Annotated[ToolMessage, Tag("tool")], Annotated[UserMessage, Tag("user")]], Discriminator(lambda m: get_discriminator(m, "role", "role"))]
|
|
29
|
+
|
|
30
|
+
|
|
17
31
|
ChatCompletionStreamRequestToolChoice = Literal["auto", "none", "any"]
|
|
18
32
|
|
|
19
33
|
class ChatCompletionStreamRequestTypedDict(TypedDict):
|
|
20
34
|
model: Nullable[str]
|
|
21
|
-
r"""ID of the model to use. You can use the [List Available Models](/api
|
|
35
|
+
r"""ID of the model to use. You can use the [List Available Models](/api/#tag/models/operation/list_models_v1_models_get) API to see all of your available models, or see our [Model overview](/models) for model descriptions."""
|
|
22
36
|
messages: List[ChatCompletionStreamRequestMessagesTypedDict]
|
|
23
37
|
r"""The prompt(s) to generate completions for, encoded as a list of dict with role and content."""
|
|
24
38
|
temperature: NotRequired[float]
|
|
@@ -43,7 +57,7 @@ class ChatCompletionStreamRequestTypedDict(TypedDict):
|
|
|
43
57
|
|
|
44
58
|
class ChatCompletionStreamRequest(BaseModel):
|
|
45
59
|
model: Nullable[str]
|
|
46
|
-
r"""ID of the model to use. You can use the [List Available Models](/api
|
|
60
|
+
r"""ID of the model to use. You can use the [List Available Models](/api/#tag/models/operation/list_models_v1_models_get) API to see all of your available models, or see our [Model overview](/models) for model descriptions."""
|
|
47
61
|
messages: List[ChatCompletionStreamRequestMessages]
|
|
48
62
|
r"""The prompt(s) to generate completions for, encoded as a list of dict with role and content."""
|
|
49
63
|
temperature: Optional[float] = 0.7
|
|
@@ -91,17 +105,3 @@ class ChatCompletionStreamRequest(BaseModel):
|
|
|
91
105
|
|
|
92
106
|
return m
|
|
93
107
|
|
|
94
|
-
|
|
95
|
-
ChatCompletionStreamRequestStopTypedDict = Union[str, List[str]]
|
|
96
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
ChatCompletionStreamRequestStop = Union[str, List[str]]
|
|
100
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
ChatCompletionStreamRequestMessagesTypedDict = Union[SystemMessageTypedDict, UserMessageTypedDict, AssistantMessageTypedDict, ToolMessageTypedDict]
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
ChatCompletionStreamRequestMessages = Annotated[Union[Annotated[AssistantMessage, Tag("assistant")], Annotated[SystemMessage, Tag("system")], Annotated[ToolMessage, Tag("tool")], Annotated[UserMessage, Tag("user")]], Discriminator(lambda m: get_discriminator(m, "role", "role"))]
|
|
107
|
-
|
|
@@ -9,8 +9,10 @@ from typing_extensions import Annotated
|
|
|
9
9
|
|
|
10
10
|
class DeleteModelV1ModelsModelIDDeleteRequestTypedDict(TypedDict):
|
|
11
11
|
model_id: str
|
|
12
|
+
r"""The ID of the model to delete."""
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class DeleteModelV1ModelsModelIDDeleteRequest(BaseModel):
|
|
15
16
|
model_id: Annotated[str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))]
|
|
17
|
+
r"""The ID of the model to delete."""
|
|
16
18
|
|
mistralai/models/deltamessage.py
CHANGED
|
@@ -4,25 +4,25 @@ from __future__ import annotations
|
|
|
4
4
|
from .toolcall import ToolCall, ToolCallTypedDict
|
|
5
5
|
from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
|
|
6
6
|
from pydantic import model_serializer
|
|
7
|
-
from typing import Optional, TypedDict
|
|
7
|
+
from typing import List, Optional, TypedDict
|
|
8
8
|
from typing_extensions import NotRequired
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
class DeltaMessageTypedDict(TypedDict):
|
|
12
12
|
role: NotRequired[str]
|
|
13
|
-
content: NotRequired[str]
|
|
14
|
-
tool_calls: NotRequired[Nullable[ToolCallTypedDict]]
|
|
13
|
+
content: NotRequired[Nullable[str]]
|
|
14
|
+
tool_calls: NotRequired[Nullable[List[ToolCallTypedDict]]]
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class DeltaMessage(BaseModel):
|
|
18
18
|
role: Optional[str] = None
|
|
19
|
-
content:
|
|
20
|
-
tool_calls: OptionalNullable[ToolCall] = UNSET
|
|
19
|
+
content: OptionalNullable[str] = UNSET
|
|
20
|
+
tool_calls: OptionalNullable[List[ToolCall]] = UNSET
|
|
21
21
|
|
|
22
22
|
@model_serializer(mode="wrap")
|
|
23
23
|
def serialize_model(self, handler):
|
|
24
24
|
optional_fields = ["role", "content", "tool_calls"]
|
|
25
|
-
nullable_fields = ["tool_calls"]
|
|
25
|
+
nullable_fields = ["content", "tool_calls"]
|
|
26
26
|
null_default_fields = []
|
|
27
27
|
|
|
28
28
|
serialized = handler(self)
|
|
@@ -17,6 +17,20 @@ from typing_extensions import Annotated, NotRequired
|
|
|
17
17
|
|
|
18
18
|
DetailedJobOutStatus = Literal["QUEUED", "STARTED", "VALIDATING", "VALIDATED", "RUNNING", "FAILED_VALIDATION", "FAILED", "SUCCESS", "CANCELLED", "CANCELLATION_REQUESTED"]
|
|
19
19
|
|
|
20
|
+
DetailedJobOutObject = Literal["job"]
|
|
21
|
+
|
|
22
|
+
DetailedJobOutIntegrationsTypedDict = WandbIntegrationOutTypedDict
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
DetailedJobOutIntegrations = WandbIntegrationOut
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
DetailedJobOutRepositoriesTypedDict = GithubRepositoryOutTypedDict
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
DetailedJobOutRepositories = GithubRepositoryOut
|
|
32
|
+
|
|
33
|
+
|
|
20
34
|
class DetailedJobOutTypedDict(TypedDict):
|
|
21
35
|
id: str
|
|
22
36
|
auto_start: bool
|
|
@@ -31,9 +45,9 @@ class DetailedJobOutTypedDict(TypedDict):
|
|
|
31
45
|
validation_files: NotRequired[Nullable[List[str]]]
|
|
32
46
|
fine_tuned_model: NotRequired[Nullable[str]]
|
|
33
47
|
suffix: NotRequired[Nullable[str]]
|
|
34
|
-
integrations: NotRequired[Nullable[List[
|
|
48
|
+
integrations: NotRequired[Nullable[List[DetailedJobOutIntegrationsTypedDict]]]
|
|
35
49
|
trained_tokens: NotRequired[Nullable[int]]
|
|
36
|
-
repositories: NotRequired[List[
|
|
50
|
+
repositories: NotRequired[List[DetailedJobOutRepositoriesTypedDict]]
|
|
37
51
|
metadata: NotRequired[Nullable[JobMetadataOutTypedDict]]
|
|
38
52
|
events: NotRequired[List[EventOutTypedDict]]
|
|
39
53
|
r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here."""
|
|
@@ -52,12 +66,12 @@ class DetailedJobOut(BaseModel):
|
|
|
52
66
|
modified_at: int
|
|
53
67
|
training_files: List[str]
|
|
54
68
|
validation_files: OptionalNullable[List[str]] = UNSET
|
|
55
|
-
OBJECT: Annotated[Final[Optional[
|
|
69
|
+
OBJECT: Annotated[Final[Optional[DetailedJobOutObject]], pydantic.Field(alias="object")] = "job" # type: ignore
|
|
56
70
|
fine_tuned_model: OptionalNullable[str] = UNSET
|
|
57
71
|
suffix: OptionalNullable[str] = UNSET
|
|
58
|
-
integrations: OptionalNullable[List[
|
|
72
|
+
integrations: OptionalNullable[List[DetailedJobOutIntegrations]] = UNSET
|
|
59
73
|
trained_tokens: OptionalNullable[int] = UNSET
|
|
60
|
-
repositories: Optional[List[
|
|
74
|
+
repositories: Optional[List[DetailedJobOutRepositories]] = None
|
|
61
75
|
metadata: OptionalNullable[JobMetadataOut] = UNSET
|
|
62
76
|
events: Optional[List[EventOut]] = None
|
|
63
77
|
r"""Event items are created every time the status of a fine-tuning job changes. The timestamped list of all events is accessible here."""
|
|
@@ -8,6 +8,14 @@ from typing import List, TypedDict, Union
|
|
|
8
8
|
from typing_extensions import Annotated, NotRequired
|
|
9
9
|
|
|
10
10
|
|
|
11
|
+
InputsTypedDict = Union[str, List[str]]
|
|
12
|
+
r"""Text to embed."""
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
Inputs = Union[str, List[str]]
|
|
16
|
+
r"""Text to embed."""
|
|
17
|
+
|
|
18
|
+
|
|
11
19
|
class EmbeddingRequestTypedDict(TypedDict):
|
|
12
20
|
inputs: InputsTypedDict
|
|
13
21
|
r"""Text to embed."""
|
|
@@ -51,11 +59,3 @@ class EmbeddingRequest(BaseModel):
|
|
|
51
59
|
|
|
52
60
|
return m
|
|
53
61
|
|
|
54
|
-
|
|
55
|
-
InputsTypedDict = Union[str, List[str]]
|
|
56
|
-
r"""Text to embed."""
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
Inputs = Union[str, List[str]]
|
|
60
|
-
r"""Text to embed."""
|
|
61
|
-
|
|
@@ -2,13 +2,16 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
import io
|
|
5
|
-
from mistralai.types import BaseModel
|
|
6
|
-
from mistralai.utils import FieldMetadata, MultipartFormMetadata
|
|
5
|
+
from mistralai.types import BaseModel, UnrecognizedStr
|
|
6
|
+
from mistralai.utils import FieldMetadata, MultipartFormMetadata, validate_open_enum
|
|
7
7
|
import pydantic
|
|
8
|
-
from
|
|
8
|
+
from pydantic.functional_validators import PlainValidator
|
|
9
|
+
from typing import Final, IO, Literal, Optional, TypedDict, Union
|
|
9
10
|
from typing_extensions import Annotated, NotRequired
|
|
10
11
|
|
|
11
12
|
|
|
13
|
+
FilesAPIRoutesUploadFilePurpose = Union[Literal["fine-tune"], UnrecognizedStr]
|
|
14
|
+
|
|
12
15
|
class FileTypedDict(TypedDict):
|
|
13
16
|
file_name: str
|
|
14
17
|
content: Union[bytes, IO[bytes], io.BufferedReader]
|
|
@@ -47,5 +50,5 @@ class FilesAPIRoutesUploadFileMultiPartBodyParams(BaseModel):
|
|
|
47
50
|
file=@path/to/your/file.jsonl
|
|
48
51
|
```
|
|
49
52
|
"""
|
|
50
|
-
PURPOSE: Annotated[Final[Optional[
|
|
53
|
+
PURPOSE: Annotated[Final[Annotated[Optional[FilesAPIRoutesUploadFilePurpose], PlainValidator(validate_open_enum(False))]], pydantic.Field(alias="purpose"), FieldMetadata(multipart=True)] = "fine-tune" # type: ignore
|
|
51
54
|
|
mistralai/models/fileschema.py
CHANGED
|
@@ -3,13 +3,18 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
from .sampletype import SampleType
|
|
5
5
|
from .source import Source
|
|
6
|
-
from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
|
|
6
|
+
from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL, UnrecognizedStr
|
|
7
|
+
from mistralai.utils import validate_open_enum
|
|
7
8
|
import pydantic
|
|
8
9
|
from pydantic import model_serializer
|
|
9
|
-
from
|
|
10
|
+
from pydantic.functional_validators import PlainValidator
|
|
11
|
+
from typing import Final, Literal, TypedDict, Union
|
|
10
12
|
from typing_extensions import Annotated, NotRequired
|
|
11
13
|
|
|
12
14
|
|
|
15
|
+
FileSchemaPurpose = Union[Literal["fine-tune"], UnrecognizedStr]
|
|
16
|
+
r"""The intended purpose of the uploaded file. Only accepts fine-tuning (`fine-tune`) for now."""
|
|
17
|
+
|
|
13
18
|
class FileSchemaTypedDict(TypedDict):
|
|
14
19
|
id: str
|
|
15
20
|
r"""The unique identifier of the file."""
|
|
@@ -39,7 +44,7 @@ class FileSchema(BaseModel):
|
|
|
39
44
|
r"""The name of the uploaded file."""
|
|
40
45
|
sample_type: SampleType
|
|
41
46
|
source: Source
|
|
42
|
-
PURPOSE: Annotated[Final[
|
|
47
|
+
PURPOSE: Annotated[Final[Annotated[FileSchemaPurpose, PlainValidator(validate_open_enum(False))]], pydantic.Field(alias="purpose")] = "fine-tune" # type: ignore
|
|
43
48
|
r"""The intended purpose of the uploaded file. Only accepts fine-tuning (`fine-tune`) for now."""
|
|
44
49
|
num_lines: OptionalNullable[int] = UNSET
|
|
45
50
|
|
|
@@ -7,6 +7,14 @@ from typing import List, Optional, TypedDict, Union
|
|
|
7
7
|
from typing_extensions import NotRequired
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
FIMCompletionRequestStopTypedDict = Union[str, List[str]]
|
|
11
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
FIMCompletionRequestStop = Union[str, List[str]]
|
|
15
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
16
|
+
|
|
17
|
+
|
|
10
18
|
class FIMCompletionRequestTypedDict(TypedDict):
|
|
11
19
|
model: Nullable[str]
|
|
12
20
|
r"""ID of the model to use. Only compatible for now with:
|
|
@@ -84,11 +92,3 @@ class FIMCompletionRequest(BaseModel):
|
|
|
84
92
|
|
|
85
93
|
return m
|
|
86
94
|
|
|
87
|
-
|
|
88
|
-
FIMCompletionRequestStopTypedDict = Union[str, List[str]]
|
|
89
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
FIMCompletionRequestStop = Union[str, List[str]]
|
|
93
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
94
|
-
|
|
@@ -7,6 +7,14 @@ from typing import List, Optional, TypedDict, Union
|
|
|
7
7
|
from typing_extensions import NotRequired
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
FIMCompletionStreamRequestStopTypedDict = Union[str, List[str]]
|
|
11
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
FIMCompletionStreamRequestStop = Union[str, List[str]]
|
|
15
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
16
|
+
|
|
17
|
+
|
|
10
18
|
class FIMCompletionStreamRequestTypedDict(TypedDict):
|
|
11
19
|
model: Nullable[str]
|
|
12
20
|
r"""ID of the model to use. Only compatible for now with:
|
|
@@ -82,11 +90,3 @@ class FIMCompletionStreamRequest(BaseModel):
|
|
|
82
90
|
|
|
83
91
|
return m
|
|
84
92
|
|
|
85
|
-
|
|
86
|
-
FIMCompletionStreamRequestStopTypedDict = Union[str, List[str]]
|
|
87
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
FIMCompletionStreamRequestStop = Union[str, List[str]]
|
|
91
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
92
|
-
|
mistralai/models/ftmodelout.py
CHANGED
|
@@ -5,10 +5,12 @@ from .ftmodelcapabilitiesout import FTModelCapabilitiesOut, FTModelCapabilitiesO
|
|
|
5
5
|
from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
|
|
6
6
|
import pydantic
|
|
7
7
|
from pydantic import model_serializer
|
|
8
|
-
from typing import Final, List, Optional, TypedDict
|
|
8
|
+
from typing import Final, List, Literal, Optional, TypedDict
|
|
9
9
|
from typing_extensions import Annotated, NotRequired
|
|
10
10
|
|
|
11
11
|
|
|
12
|
+
FTModelOutObject = Literal["model"]
|
|
13
|
+
|
|
12
14
|
class FTModelOutTypedDict(TypedDict):
|
|
13
15
|
id: str
|
|
14
16
|
created: int
|
|
@@ -31,7 +33,7 @@ class FTModelOut(BaseModel):
|
|
|
31
33
|
archived: bool
|
|
32
34
|
capabilities: FTModelCapabilitiesOut
|
|
33
35
|
job: str
|
|
34
|
-
OBJECT: Annotated[Final[Optional[
|
|
36
|
+
OBJECT: Annotated[Final[Optional[FTModelOutObject]], pydantic.Field(alias="object")] = "model" # type: ignore
|
|
35
37
|
name: OptionalNullable[str] = UNSET
|
|
36
38
|
description: OptionalNullable[str] = UNSET
|
|
37
39
|
max_context_length: Optional[int] = 32768
|
mistralai/models/functioncall.py
CHANGED
|
@@ -2,15 +2,21 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
from mistralai.types import BaseModel
|
|
5
|
-
from typing import TypedDict
|
|
5
|
+
from typing import Any, Dict, TypedDict, Union
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
ArgumentsTypedDict = Union[Dict[str, Any], str]
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
Arguments = Union[Dict[str, Any], str]
|
|
6
12
|
|
|
7
13
|
|
|
8
14
|
class FunctionCallTypedDict(TypedDict):
|
|
9
15
|
name: str
|
|
10
|
-
arguments:
|
|
16
|
+
arguments: ArgumentsTypedDict
|
|
11
17
|
|
|
12
18
|
|
|
13
19
|
class FunctionCall(BaseModel):
|
|
14
20
|
name: str
|
|
15
|
-
arguments:
|
|
21
|
+
arguments: Arguments
|
|
16
22
|
|
|
@@ -4,10 +4,12 @@ from __future__ import annotations
|
|
|
4
4
|
from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
|
|
5
5
|
import pydantic
|
|
6
6
|
from pydantic import model_serializer
|
|
7
|
-
from typing import Final, Optional, TypedDict
|
|
7
|
+
from typing import Final, Literal, Optional, TypedDict
|
|
8
8
|
from typing_extensions import Annotated, NotRequired
|
|
9
9
|
|
|
10
10
|
|
|
11
|
+
GithubRepositoryInType = Literal["github"]
|
|
12
|
+
|
|
11
13
|
class GithubRepositoryInTypedDict(TypedDict):
|
|
12
14
|
name: str
|
|
13
15
|
owner: str
|
|
@@ -20,7 +22,7 @@ class GithubRepositoryIn(BaseModel):
|
|
|
20
22
|
name: str
|
|
21
23
|
owner: str
|
|
22
24
|
token: str
|
|
23
|
-
TYPE: Annotated[Final[Optional[
|
|
25
|
+
TYPE: Annotated[Final[Optional[GithubRepositoryInType]], pydantic.Field(alias="type")] = "github" # type: ignore
|
|
24
26
|
ref: OptionalNullable[str] = UNSET
|
|
25
27
|
weight: Optional[float] = 1
|
|
26
28
|
|
|
@@ -4,10 +4,12 @@ from __future__ import annotations
|
|
|
4
4
|
from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
|
|
5
5
|
import pydantic
|
|
6
6
|
from pydantic import model_serializer
|
|
7
|
-
from typing import Final, Optional, TypedDict
|
|
7
|
+
from typing import Final, Literal, Optional, TypedDict
|
|
8
8
|
from typing_extensions import Annotated, NotRequired
|
|
9
9
|
|
|
10
10
|
|
|
11
|
+
GithubRepositoryOutType = Literal["github"]
|
|
12
|
+
|
|
11
13
|
class GithubRepositoryOutTypedDict(TypedDict):
|
|
12
14
|
name: str
|
|
13
15
|
owner: str
|
|
@@ -20,7 +22,7 @@ class GithubRepositoryOut(BaseModel):
|
|
|
20
22
|
name: str
|
|
21
23
|
owner: str
|
|
22
24
|
commit_id: str
|
|
23
|
-
TYPE: Annotated[Final[Optional[
|
|
25
|
+
TYPE: Annotated[Final[Optional[GithubRepositoryOutType]], pydantic.Field(alias="type")] = "github" # type: ignore
|
|
24
26
|
ref: OptionalNullable[str] = UNSET
|
|
25
27
|
weight: Optional[float] = 1
|
|
26
28
|
|
mistralai/models/jobin.py
CHANGED
|
@@ -12,6 +12,18 @@ from typing import List, Optional, TypedDict
|
|
|
12
12
|
from typing_extensions import NotRequired
|
|
13
13
|
|
|
14
14
|
|
|
15
|
+
JobInIntegrationsTypedDict = WandbIntegrationTypedDict
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
JobInIntegrations = WandbIntegration
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
JobInRepositoriesTypedDict = GithubRepositoryInTypedDict
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
JobInRepositories = GithubRepositoryIn
|
|
25
|
+
|
|
26
|
+
|
|
15
27
|
class JobInTypedDict(TypedDict):
|
|
16
28
|
model: FineTuneableModel
|
|
17
29
|
r"""The name of the model to fine-tune."""
|
|
@@ -22,9 +34,9 @@ class JobInTypedDict(TypedDict):
|
|
|
22
34
|
r"""A list containing the IDs of uploaded files that contain validation data. If you provide these files, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in `checkpoints` when getting the status of a running fine-tuning job. The same data should not be present in both train and validation files."""
|
|
23
35
|
suffix: NotRequired[Nullable[str]]
|
|
24
36
|
r"""A string that will be added to your fine-tuning model name. For example, a suffix of \"my-great-model\" would produce a model name like `ft:open-mistral-7b:my-great-model:xxx...`"""
|
|
25
|
-
integrations: NotRequired[Nullable[List[
|
|
37
|
+
integrations: NotRequired[Nullable[List[JobInIntegrationsTypedDict]]]
|
|
26
38
|
r"""A list of integrations to enable for your fine-tuning job."""
|
|
27
|
-
repositories: NotRequired[List[
|
|
39
|
+
repositories: NotRequired[List[JobInRepositoriesTypedDict]]
|
|
28
40
|
auto_start: NotRequired[bool]
|
|
29
41
|
r"""This field will be required in a future release."""
|
|
30
42
|
|
|
@@ -39,9 +51,9 @@ class JobIn(BaseModel):
|
|
|
39
51
|
r"""A list containing the IDs of uploaded files that contain validation data. If you provide these files, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in `checkpoints` when getting the status of a running fine-tuning job. The same data should not be present in both train and validation files."""
|
|
40
52
|
suffix: OptionalNullable[str] = UNSET
|
|
41
53
|
r"""A string that will be added to your fine-tuning model name. For example, a suffix of \"my-great-model\" would produce a model name like `ft:open-mistral-7b:my-great-model:xxx...`"""
|
|
42
|
-
integrations: OptionalNullable[List[
|
|
54
|
+
integrations: OptionalNullable[List[JobInIntegrations]] = UNSET
|
|
43
55
|
r"""A list of integrations to enable for your fine-tuning job."""
|
|
44
|
-
repositories: Optional[List[
|
|
56
|
+
repositories: Optional[List[JobInRepositories]] = None
|
|
45
57
|
auto_start: Optional[bool] = None
|
|
46
58
|
r"""This field will be required in a future release."""
|
|
47
59
|
|
mistralai/models/jobout.py
CHANGED
|
@@ -16,6 +16,21 @@ from typing_extensions import Annotated, NotRequired
|
|
|
16
16
|
Status = Literal["QUEUED", "STARTED", "VALIDATING", "VALIDATED", "RUNNING", "FAILED_VALIDATION", "FAILED", "SUCCESS", "CANCELLED", "CANCELLATION_REQUESTED"]
|
|
17
17
|
r"""The current status of the fine-tuning job."""
|
|
18
18
|
|
|
19
|
+
Object = Literal["job"]
|
|
20
|
+
r"""The object type of the fine-tuning job."""
|
|
21
|
+
|
|
22
|
+
IntegrationsTypedDict = WandbIntegrationOutTypedDict
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
Integrations = WandbIntegrationOut
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
RepositoriesTypedDict = GithubRepositoryOutTypedDict
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
Repositories = GithubRepositoryOut
|
|
32
|
+
|
|
33
|
+
|
|
19
34
|
class JobOutTypedDict(TypedDict):
|
|
20
35
|
id: str
|
|
21
36
|
r"""The ID of the job."""
|
|
@@ -39,11 +54,11 @@ class JobOutTypedDict(TypedDict):
|
|
|
39
54
|
r"""The name of the fine-tuned model that is being created. The value will be `null` if the fine-tuning job is still running."""
|
|
40
55
|
suffix: NotRequired[Nullable[str]]
|
|
41
56
|
r"""Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`."""
|
|
42
|
-
integrations: NotRequired[Nullable[List[
|
|
57
|
+
integrations: NotRequired[Nullable[List[IntegrationsTypedDict]]]
|
|
43
58
|
r"""A list of integrations enabled for your fine-tuning job."""
|
|
44
59
|
trained_tokens: NotRequired[Nullable[int]]
|
|
45
60
|
r"""Total number of tokens trained."""
|
|
46
|
-
repositories: NotRequired[List[
|
|
61
|
+
repositories: NotRequired[List[RepositoriesTypedDict]]
|
|
47
62
|
metadata: NotRequired[Nullable[JobMetadataOutTypedDict]]
|
|
48
63
|
|
|
49
64
|
|
|
@@ -66,17 +81,17 @@ class JobOut(BaseModel):
|
|
|
66
81
|
r"""A list containing the IDs of uploaded files that contain training data."""
|
|
67
82
|
validation_files: OptionalNullable[List[str]] = UNSET
|
|
68
83
|
r"""A list containing the IDs of uploaded files that contain validation data."""
|
|
69
|
-
OBJECT: Annotated[Final[Optional[
|
|
84
|
+
OBJECT: Annotated[Final[Optional[Object]], pydantic.Field(alias="object")] = "job" # type: ignore
|
|
70
85
|
r"""The object type of the fine-tuning job."""
|
|
71
86
|
fine_tuned_model: OptionalNullable[str] = UNSET
|
|
72
87
|
r"""The name of the fine-tuned model that is being created. The value will be `null` if the fine-tuning job is still running."""
|
|
73
88
|
suffix: OptionalNullable[str] = UNSET
|
|
74
89
|
r"""Optional text/code that adds more context for the model. When given a `prompt` and a `suffix` the model will fill what is between them. When `suffix` is not provided, the model will simply execute completion starting with `prompt`."""
|
|
75
|
-
integrations: OptionalNullable[List[
|
|
90
|
+
integrations: OptionalNullable[List[Integrations]] = UNSET
|
|
76
91
|
r"""A list of integrations enabled for your fine-tuning job."""
|
|
77
92
|
trained_tokens: OptionalNullable[int] = UNSET
|
|
78
93
|
r"""Total number of tokens trained."""
|
|
79
|
-
repositories: Optional[List[
|
|
94
|
+
repositories: Optional[List[Repositories]] = None
|
|
80
95
|
metadata: OptionalNullable[JobMetadataOut] = UNSET
|
|
81
96
|
|
|
82
97
|
@model_serializer(mode="wrap")
|
|
@@ -9,8 +9,10 @@ from typing_extensions import Annotated
|
|
|
9
9
|
|
|
10
10
|
class JobsAPIRoutesFineTuningArchiveFineTunedModelRequestTypedDict(TypedDict):
|
|
11
11
|
model_id: str
|
|
12
|
+
r"""The ID of the model to archive."""
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class JobsAPIRoutesFineTuningArchiveFineTunedModelRequest(BaseModel):
|
|
15
16
|
model_id: Annotated[str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))]
|
|
17
|
+
r"""The ID of the model to archive."""
|
|
16
18
|
|