mistralai 1.0.0rc2__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mistralai/agents.py +2 -2
- mistralai/client.py +1 -1
- mistralai/jobs.py +20 -30
- mistralai/models/__init__.py +3 -3
- mistralai/models/agentscompletionrequest.py +14 -14
- mistralai/models/agentscompletionstreamrequest.py +8 -8
- mistralai/models/chatcompletionrequest.py +14 -14
- mistralai/models/chatcompletionstreamrequest.py +14 -14
- mistralai/models/delete_model_v1_models_model_id_deleteop.py +2 -0
- mistralai/models/deltamessage.py +3 -3
- mistralai/models/embeddingrequest.py +8 -8
- mistralai/models/fimcompletionrequest.py +8 -8
- mistralai/models/fimcompletionstreamrequest.py +8 -8
- mistralai/models/functioncall.py +9 -3
- mistralai/models/jobs_api_routes_fine_tuning_archive_fine_tuned_modelop.py +2 -0
- mistralai/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py +1 -54
- mistralai/models/jobs_api_routes_fine_tuning_unarchive_fine_tuned_modelop.py +2 -0
- mistralai/models/jobs_api_routes_fine_tuning_update_fine_tuned_modelop.py +2 -0
- mistralai/models/retrieve_model_v1_models_model_id_getop.py +2 -0
- mistralai/models/systemmessage.py +6 -6
- mistralai/models/usermessage.py +6 -6
- mistralai/models/validationerror.py +6 -6
- mistralai/models_.py +10 -10
- mistralai/sdkconfiguration.py +3 -3
- {mistralai-1.0.0rc2.dist-info → mistralai-1.0.1.dist-info}/METADATA +36 -17
- {mistralai-1.0.0rc2.dist-info → mistralai-1.0.1.dist-info}/RECORD +48 -48
- mistralai_azure/models/__init__.py +2 -2
- mistralai_azure/models/chatcompletionrequest.py +14 -14
- mistralai_azure/models/chatcompletionstreamrequest.py +14 -14
- mistralai_azure/models/deltamessage.py +3 -3
- mistralai_azure/models/functioncall.py +9 -3
- mistralai_azure/models/systemmessage.py +6 -6
- mistralai_azure/models/usermessage.py +6 -6
- mistralai_azure/models/validationerror.py +6 -6
- mistralai_azure/sdkconfiguration.py +3 -3
- mistralai_gcp/models/__init__.py +2 -2
- mistralai_gcp/models/chatcompletionrequest.py +14 -14
- mistralai_gcp/models/chatcompletionstreamrequest.py +14 -14
- mistralai_gcp/models/deltamessage.py +3 -3
- mistralai_gcp/models/fimcompletionrequest.py +8 -8
- mistralai_gcp/models/fimcompletionstreamrequest.py +8 -8
- mistralai_gcp/models/functioncall.py +9 -3
- mistralai_gcp/models/systemmessage.py +6 -6
- mistralai_gcp/models/usermessage.py +6 -6
- mistralai_gcp/models/validationerror.py +6 -6
- mistralai_gcp/sdkconfiguration.py +3 -3
- {mistralai-1.0.0rc2.dist-info → mistralai-1.0.1.dist-info}/LICENSE +0 -0
- {mistralai-1.0.0rc2.dist-info → mistralai-1.0.1.dist-info}/WHEEL +0 -0
mistralai/agents.py
CHANGED
|
@@ -27,7 +27,7 @@ class Agents(BaseSDK):
|
|
|
27
27
|
server_url: Optional[str] = None,
|
|
28
28
|
timeout_ms: Optional[int] = None,
|
|
29
29
|
) -> Optional[models.ChatCompletionResponse]:
|
|
30
|
-
r"""
|
|
30
|
+
r"""Agents Completion
|
|
31
31
|
|
|
32
32
|
:param messages: The prompt(s) to generate completions for, encoded as a list of dict with role and content.
|
|
33
33
|
:param agent_id: The ID of the agent to use for this completion.
|
|
@@ -131,7 +131,7 @@ class Agents(BaseSDK):
|
|
|
131
131
|
server_url: Optional[str] = None,
|
|
132
132
|
timeout_ms: Optional[int] = None,
|
|
133
133
|
) -> Optional[models.ChatCompletionResponse]:
|
|
134
|
-
r"""
|
|
134
|
+
r"""Agents Completion
|
|
135
135
|
|
|
136
136
|
:param messages: The prompt(s) to generate completions for, encoded as a list of dict with role and content.
|
|
137
137
|
:param agent_id: The ID of the agent to use for this completion.
|
mistralai/client.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from typing import Optional
|
|
2
2
|
|
|
3
|
-
MIGRATION_MESSAGE = "This client is deprecated
|
|
3
|
+
MIGRATION_MESSAGE = "This client is deprecated. To migrate to the new client, please refer to this guide: https://github.com/mistralai/client-python/blob/main/MIGRATION.md. If you need to use this client anyway, pin your version to 0.4.2."
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
class MistralClient:
|
mistralai/jobs.py
CHANGED
|
@@ -211,7 +211,6 @@ class Jobs(BaseSDK):
|
|
|
211
211
|
self, *,
|
|
212
212
|
model: models.FineTuneableModel,
|
|
213
213
|
hyperparameters: Union[models.TrainingParametersIn, models.TrainingParametersInTypedDict],
|
|
214
|
-
dry_run: OptionalNullable[bool] = UNSET,
|
|
215
214
|
training_files: Optional[Union[List[models.TrainingFile], List[models.TrainingFileTypedDict]]] = None,
|
|
216
215
|
validation_files: OptionalNullable[List[str]] = UNSET,
|
|
217
216
|
suffix: OptionalNullable[str] = UNSET,
|
|
@@ -228,7 +227,6 @@ class Jobs(BaseSDK):
|
|
|
228
227
|
|
|
229
228
|
:param model: The name of the model to fine-tune.
|
|
230
229
|
:param hyperparameters: The fine-tuning hyperparameter settings used in a fine-tune job.
|
|
231
|
-
:param dry_run: * If `true` the job is not spawned, instead the query returns a handful of useful metadata for the user to perform sanity checks (see `LegacyJobMetadataOut` response). * Otherwise, the job is started and the query returns the job ID along with some of the input parameters (see `JobOut` response).
|
|
232
230
|
:param training_files:
|
|
233
231
|
:param validation_files: A list containing the IDs of uploaded files that contain validation data. If you provide these files, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in `checkpoints` when getting the status of a running fine-tuning job. The same data should not be present in both train and validation files.
|
|
234
232
|
:param suffix: A string that will be added to your fine-tuning model name. For example, a suffix of \"my-great-model\" would produce a model name like `ft:open-mistral-7b:my-great-model:xxx...`
|
|
@@ -247,18 +245,15 @@ class Jobs(BaseSDK):
|
|
|
247
245
|
if server_url is not None:
|
|
248
246
|
base_url = server_url
|
|
249
247
|
|
|
250
|
-
request = models.
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
repositories=utils.get_pydantic_model(repositories, Optional[List[models.GithubRepositoryIn]]),
|
|
260
|
-
auto_start=auto_start,
|
|
261
|
-
),
|
|
248
|
+
request = models.JobIn(
|
|
249
|
+
model=model,
|
|
250
|
+
training_files=utils.get_pydantic_model(training_files, Optional[List[models.TrainingFile]]),
|
|
251
|
+
validation_files=validation_files,
|
|
252
|
+
hyperparameters=utils.get_pydantic_model(hyperparameters, models.TrainingParametersIn),
|
|
253
|
+
suffix=suffix,
|
|
254
|
+
integrations=utils.get_pydantic_model(integrations, OptionalNullable[List[models.WandbIntegration]]),
|
|
255
|
+
repositories=utils.get_pydantic_model(repositories, Optional[List[models.GithubRepositoryIn]]),
|
|
256
|
+
auto_start=auto_start,
|
|
262
257
|
)
|
|
263
258
|
|
|
264
259
|
req = self.build_request(
|
|
@@ -273,7 +268,7 @@ class Jobs(BaseSDK):
|
|
|
273
268
|
user_agent_header="user-agent",
|
|
274
269
|
accept_header_value="application/json",
|
|
275
270
|
security=self.sdk_configuration.security,
|
|
276
|
-
get_serialized_body=lambda: utils.serialize_request_body(request
|
|
271
|
+
get_serialized_body=lambda: utils.serialize_request_body(request, False, False, "json", models.JobIn),
|
|
277
272
|
timeout_ms=timeout_ms,
|
|
278
273
|
)
|
|
279
274
|
|
|
@@ -312,7 +307,6 @@ class Jobs(BaseSDK):
|
|
|
312
307
|
self, *,
|
|
313
308
|
model: models.FineTuneableModel,
|
|
314
309
|
hyperparameters: Union[models.TrainingParametersIn, models.TrainingParametersInTypedDict],
|
|
315
|
-
dry_run: OptionalNullable[bool] = UNSET,
|
|
316
310
|
training_files: Optional[Union[List[models.TrainingFile], List[models.TrainingFileTypedDict]]] = None,
|
|
317
311
|
validation_files: OptionalNullable[List[str]] = UNSET,
|
|
318
312
|
suffix: OptionalNullable[str] = UNSET,
|
|
@@ -329,7 +323,6 @@ class Jobs(BaseSDK):
|
|
|
329
323
|
|
|
330
324
|
:param model: The name of the model to fine-tune.
|
|
331
325
|
:param hyperparameters: The fine-tuning hyperparameter settings used in a fine-tune job.
|
|
332
|
-
:param dry_run: * If `true` the job is not spawned, instead the query returns a handful of useful metadata for the user to perform sanity checks (see `LegacyJobMetadataOut` response). * Otherwise, the job is started and the query returns the job ID along with some of the input parameters (see `JobOut` response).
|
|
333
326
|
:param training_files:
|
|
334
327
|
:param validation_files: A list containing the IDs of uploaded files that contain validation data. If you provide these files, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in `checkpoints` when getting the status of a running fine-tuning job. The same data should not be present in both train and validation files.
|
|
335
328
|
:param suffix: A string that will be added to your fine-tuning model name. For example, a suffix of \"my-great-model\" would produce a model name like `ft:open-mistral-7b:my-great-model:xxx...`
|
|
@@ -348,18 +341,15 @@ class Jobs(BaseSDK):
|
|
|
348
341
|
if server_url is not None:
|
|
349
342
|
base_url = server_url
|
|
350
343
|
|
|
351
|
-
request = models.
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
repositories=utils.get_pydantic_model(repositories, Optional[List[models.GithubRepositoryIn]]),
|
|
361
|
-
auto_start=auto_start,
|
|
362
|
-
),
|
|
344
|
+
request = models.JobIn(
|
|
345
|
+
model=model,
|
|
346
|
+
training_files=utils.get_pydantic_model(training_files, Optional[List[models.TrainingFile]]),
|
|
347
|
+
validation_files=validation_files,
|
|
348
|
+
hyperparameters=utils.get_pydantic_model(hyperparameters, models.TrainingParametersIn),
|
|
349
|
+
suffix=suffix,
|
|
350
|
+
integrations=utils.get_pydantic_model(integrations, OptionalNullable[List[models.WandbIntegration]]),
|
|
351
|
+
repositories=utils.get_pydantic_model(repositories, Optional[List[models.GithubRepositoryIn]]),
|
|
352
|
+
auto_start=auto_start,
|
|
363
353
|
)
|
|
364
354
|
|
|
365
355
|
req = self.build_request(
|
|
@@ -374,7 +364,7 @@ class Jobs(BaseSDK):
|
|
|
374
364
|
user_agent_header="user-agent",
|
|
375
365
|
accept_header_value="application/json",
|
|
376
366
|
security=self.sdk_configuration.security,
|
|
377
|
-
get_serialized_body=lambda: utils.serialize_request_body(request
|
|
367
|
+
get_serialized_body=lambda: utils.serialize_request_body(request, False, False, "json", models.JobIn),
|
|
378
368
|
timeout_ms=timeout_ms,
|
|
379
369
|
)
|
|
380
370
|
|
mistralai/models/__init__.py
CHANGED
|
@@ -33,7 +33,7 @@ from .finetuneablemodel import FineTuneableModel
|
|
|
33
33
|
from .ftmodelcapabilitiesout import FTModelCapabilitiesOut, FTModelCapabilitiesOutTypedDict
|
|
34
34
|
from .ftmodelout import FTModelOut, FTModelOutTypedDict
|
|
35
35
|
from .function import Function, FunctionTypedDict
|
|
36
|
-
from .functioncall import FunctionCall, FunctionCallTypedDict
|
|
36
|
+
from .functioncall import Arguments, ArgumentsTypedDict, FunctionCall, FunctionCallTypedDict
|
|
37
37
|
from .githubrepositoryin import GithubRepositoryIn, GithubRepositoryInTypedDict
|
|
38
38
|
from .githubrepositoryout import GithubRepositoryOut, GithubRepositoryOutTypedDict
|
|
39
39
|
from .httpvalidationerror import HTTPValidationError, HTTPValidationErrorData
|
|
@@ -42,7 +42,7 @@ from .jobmetadataout import JobMetadataOut, JobMetadataOutTypedDict
|
|
|
42
42
|
from .jobout import JobOut, JobOutTypedDict, Status
|
|
43
43
|
from .jobs_api_routes_fine_tuning_archive_fine_tuned_modelop import JobsAPIRoutesFineTuningArchiveFineTunedModelRequest, JobsAPIRoutesFineTuningArchiveFineTunedModelRequestTypedDict
|
|
44
44
|
from .jobs_api_routes_fine_tuning_cancel_fine_tuning_jobop import JobsAPIRoutesFineTuningCancelFineTuningJobRequest, JobsAPIRoutesFineTuningCancelFineTuningJobRequestTypedDict
|
|
45
|
-
from .jobs_api_routes_fine_tuning_create_fine_tuning_jobop import
|
|
45
|
+
from .jobs_api_routes_fine_tuning_create_fine_tuning_jobop import JobsAPIRoutesFineTuningCreateFineTuningJobResponse, JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict
|
|
46
46
|
from .jobs_api_routes_fine_tuning_get_fine_tuning_jobop import JobsAPIRoutesFineTuningGetFineTuningJobRequest, JobsAPIRoutesFineTuningGetFineTuningJobRequestTypedDict
|
|
47
47
|
from .jobs_api_routes_fine_tuning_get_fine_tuning_jobsop import JobsAPIRoutesFineTuningGetFineTuningJobsRequest, JobsAPIRoutesFineTuningGetFineTuningJobsRequestTypedDict, QueryParamStatus
|
|
48
48
|
from .jobs_api_routes_fine_tuning_start_fine_tuning_jobop import JobsAPIRoutesFineTuningStartFineTuningJobRequest, JobsAPIRoutesFineTuningStartFineTuningJobRequestTypedDict
|
|
@@ -79,4 +79,4 @@ from .validationerror import Loc, LocTypedDict, ValidationError, ValidationError
|
|
|
79
79
|
from .wandbintegration import WandbIntegration, WandbIntegrationTypedDict
|
|
80
80
|
from .wandbintegrationout import WandbIntegrationOut, WandbIntegrationOutTypedDict
|
|
81
81
|
|
|
82
|
-
__all__ = ["AgentsCompletionRequest", "AgentsCompletionRequestMessages", "AgentsCompletionRequestMessagesTypedDict", "AgentsCompletionRequestStop", "AgentsCompletionRequestStopTypedDict", "AgentsCompletionRequestToolChoice", "AgentsCompletionRequestTypedDict", "AgentsCompletionStreamRequest", "AgentsCompletionStreamRequestStop", "AgentsCompletionStreamRequestStopTypedDict", "AgentsCompletionStreamRequestTypedDict", "ArchiveFTModelOut", "ArchiveFTModelOutTypedDict", "AssistantMessage", "AssistantMessageRole", "AssistantMessageTypedDict", "ChatCompletionChoice", "ChatCompletionChoiceTypedDict", "ChatCompletionRequest", "ChatCompletionRequestTypedDict", "ChatCompletionResponse", "ChatCompletionResponseTypedDict", "ChatCompletionStreamRequest", "ChatCompletionStreamRequestMessages", "ChatCompletionStreamRequestMessagesTypedDict", "ChatCompletionStreamRequestStop", "ChatCompletionStreamRequestStopTypedDict", "ChatCompletionStreamRequestToolChoice", "ChatCompletionStreamRequestTypedDict", "CheckpointOut", "CheckpointOutTypedDict", "CompletionChunk", "CompletionChunkTypedDict", "CompletionEvent", "CompletionEventTypedDict", "CompletionResponseStreamChoice", "CompletionResponseStreamChoiceFinishReason", "CompletionResponseStreamChoiceTypedDict", "Content", "ContentChunk", "ContentChunkTypedDict", "ContentTypedDict", "DeleteFileOut", "DeleteFileOutTypedDict", "DeleteModelOut", "DeleteModelOutTypedDict", "DeleteModelV1ModelsModelIDDeleteRequest", "DeleteModelV1ModelsModelIDDeleteRequestTypedDict", "DeltaMessage", "DeltaMessageTypedDict", "DetailedJobOut", "DetailedJobOutStatus", "DetailedJobOutTypedDict", "EmbeddingRequest", "EmbeddingRequestTypedDict", "EmbeddingResponse", "EmbeddingResponseData", "EmbeddingResponseDataTypedDict", "EmbeddingResponseTypedDict", "EventOut", "EventOutTypedDict", "FIMCompletionRequest", "FIMCompletionRequestStop", "FIMCompletionRequestStopTypedDict", "FIMCompletionRequestTypedDict", "FIMCompletionResponse", "FIMCompletionResponseTypedDict", "FIMCompletionStreamRequest", "FIMCompletionStreamRequestStop", "FIMCompletionStreamRequestStopTypedDict", "FIMCompletionStreamRequestTypedDict", "FTModelCapabilitiesOut", "FTModelCapabilitiesOutTypedDict", "FTModelOut", "FTModelOutTypedDict", "File", "FileSchema", "FileSchemaTypedDict", "FileTypedDict", "FilesAPIRoutesDeleteFileRequest", "FilesAPIRoutesDeleteFileRequestTypedDict", "FilesAPIRoutesRetrieveFileRequest", "FilesAPIRoutesRetrieveFileRequestTypedDict", "FilesAPIRoutesUploadFileMultiPartBodyParams", "FilesAPIRoutesUploadFileMultiPartBodyParamsTypedDict", "FineTuneableModel", "FinishReason", "Function", "FunctionCall", "FunctionCallTypedDict", "FunctionTypedDict", "GithubRepositoryIn", "GithubRepositoryInTypedDict", "GithubRepositoryOut", "GithubRepositoryOutTypedDict", "HTTPValidationError", "HTTPValidationErrorData", "Inputs", "InputsTypedDict", "JobIn", "JobInTypedDict", "JobMetadataOut", "JobMetadataOutTypedDict", "JobOut", "JobOutTypedDict", "JobsAPIRoutesFineTuningArchiveFineTunedModelRequest", "JobsAPIRoutesFineTuningArchiveFineTunedModelRequestTypedDict", "JobsAPIRoutesFineTuningCancelFineTuningJobRequest", "JobsAPIRoutesFineTuningCancelFineTuningJobRequestTypedDict", "
|
|
82
|
+
__all__ = ["AgentsCompletionRequest", "AgentsCompletionRequestMessages", "AgentsCompletionRequestMessagesTypedDict", "AgentsCompletionRequestStop", "AgentsCompletionRequestStopTypedDict", "AgentsCompletionRequestToolChoice", "AgentsCompletionRequestTypedDict", "AgentsCompletionStreamRequest", "AgentsCompletionStreamRequestStop", "AgentsCompletionStreamRequestStopTypedDict", "AgentsCompletionStreamRequestTypedDict", "ArchiveFTModelOut", "ArchiveFTModelOutTypedDict", "Arguments", "ArgumentsTypedDict", "AssistantMessage", "AssistantMessageRole", "AssistantMessageTypedDict", "ChatCompletionChoice", "ChatCompletionChoiceTypedDict", "ChatCompletionRequest", "ChatCompletionRequestTypedDict", "ChatCompletionResponse", "ChatCompletionResponseTypedDict", "ChatCompletionStreamRequest", "ChatCompletionStreamRequestMessages", "ChatCompletionStreamRequestMessagesTypedDict", "ChatCompletionStreamRequestStop", "ChatCompletionStreamRequestStopTypedDict", "ChatCompletionStreamRequestToolChoice", "ChatCompletionStreamRequestTypedDict", "CheckpointOut", "CheckpointOutTypedDict", "CompletionChunk", "CompletionChunkTypedDict", "CompletionEvent", "CompletionEventTypedDict", "CompletionResponseStreamChoice", "CompletionResponseStreamChoiceFinishReason", "CompletionResponseStreamChoiceTypedDict", "Content", "ContentChunk", "ContentChunkTypedDict", "ContentTypedDict", "DeleteFileOut", "DeleteFileOutTypedDict", "DeleteModelOut", "DeleteModelOutTypedDict", "DeleteModelV1ModelsModelIDDeleteRequest", "DeleteModelV1ModelsModelIDDeleteRequestTypedDict", "DeltaMessage", "DeltaMessageTypedDict", "DetailedJobOut", "DetailedJobOutStatus", "DetailedJobOutTypedDict", "EmbeddingRequest", "EmbeddingRequestTypedDict", "EmbeddingResponse", "EmbeddingResponseData", "EmbeddingResponseDataTypedDict", "EmbeddingResponseTypedDict", "EventOut", "EventOutTypedDict", "FIMCompletionRequest", "FIMCompletionRequestStop", "FIMCompletionRequestStopTypedDict", "FIMCompletionRequestTypedDict", "FIMCompletionResponse", "FIMCompletionResponseTypedDict", "FIMCompletionStreamRequest", "FIMCompletionStreamRequestStop", "FIMCompletionStreamRequestStopTypedDict", "FIMCompletionStreamRequestTypedDict", "FTModelCapabilitiesOut", "FTModelCapabilitiesOutTypedDict", "FTModelOut", "FTModelOutTypedDict", "File", "FileSchema", "FileSchemaTypedDict", "FileTypedDict", "FilesAPIRoutesDeleteFileRequest", "FilesAPIRoutesDeleteFileRequestTypedDict", "FilesAPIRoutesRetrieveFileRequest", "FilesAPIRoutesRetrieveFileRequestTypedDict", "FilesAPIRoutesUploadFileMultiPartBodyParams", "FilesAPIRoutesUploadFileMultiPartBodyParamsTypedDict", "FineTuneableModel", "FinishReason", "Function", "FunctionCall", "FunctionCallTypedDict", "FunctionTypedDict", "GithubRepositoryIn", "GithubRepositoryInTypedDict", "GithubRepositoryOut", "GithubRepositoryOutTypedDict", "HTTPValidationError", "HTTPValidationErrorData", "Inputs", "InputsTypedDict", "JobIn", "JobInTypedDict", "JobMetadataOut", "JobMetadataOutTypedDict", "JobOut", "JobOutTypedDict", "JobsAPIRoutesFineTuningArchiveFineTunedModelRequest", "JobsAPIRoutesFineTuningArchiveFineTunedModelRequestTypedDict", "JobsAPIRoutesFineTuningCancelFineTuningJobRequest", "JobsAPIRoutesFineTuningCancelFineTuningJobRequestTypedDict", "JobsAPIRoutesFineTuningCreateFineTuningJobResponse", "JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict", "JobsAPIRoutesFineTuningGetFineTuningJobRequest", "JobsAPIRoutesFineTuningGetFineTuningJobRequestTypedDict", "JobsAPIRoutesFineTuningGetFineTuningJobsRequest", "JobsAPIRoutesFineTuningGetFineTuningJobsRequestTypedDict", "JobsAPIRoutesFineTuningStartFineTuningJobRequest", "JobsAPIRoutesFineTuningStartFineTuningJobRequestTypedDict", "JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequest", "JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequestTypedDict", "JobsAPIRoutesFineTuningUpdateFineTunedModelRequest", "JobsAPIRoutesFineTuningUpdateFineTunedModelRequestTypedDict", "JobsOut", "JobsOutTypedDict", "LegacyJobMetadataOut", "LegacyJobMetadataOutTypedDict", "ListFilesOut", "ListFilesOutTypedDict", "Loc", "LocTypedDict", "Messages", "MessagesTypedDict", "MetricOut", "MetricOutTypedDict", "ModelCapabilities", "ModelCapabilitiesTypedDict", "ModelCard", "ModelCardTypedDict", "ModelList", "ModelListTypedDict", "QueryParamStatus", "ResponseFormat", "ResponseFormatTypedDict", "ResponseFormats", "RetrieveFileOut", "RetrieveFileOutTypedDict", "RetrieveModelV1ModelsModelIDGetRequest", "RetrieveModelV1ModelsModelIDGetRequestTypedDict", "Role", "SDKError", "SampleType", "Security", "SecurityTypedDict", "Source", "Status", "Stop", "StopTypedDict", "SystemMessage", "SystemMessageTypedDict", "TextChunk", "TextChunkTypedDict", "Tool", "ToolCall", "ToolCallTypedDict", "ToolChoice", "ToolMessage", "ToolMessageRole", "ToolMessageTypedDict", "ToolTypedDict", "TrainingFile", "TrainingFileTypedDict", "TrainingParameters", "TrainingParametersIn", "TrainingParametersInTypedDict", "TrainingParametersTypedDict", "UnarchiveFTModelOut", "UnarchiveFTModelOutTypedDict", "UpdateFTModelIn", "UpdateFTModelInTypedDict", "UploadFileOut", "UploadFileOutTypedDict", "UsageInfo", "UsageInfoTypedDict", "UserMessage", "UserMessageContent", "UserMessageContentTypedDict", "UserMessageRole", "UserMessageTypedDict", "ValidationError", "ValidationErrorTypedDict", "WandbIntegration", "WandbIntegrationOut", "WandbIntegrationOutTypedDict", "WandbIntegrationTypedDict"]
|
|
@@ -13,6 +13,20 @@ from typing import List, Literal, Optional, TypedDict, Union
|
|
|
13
13
|
from typing_extensions import Annotated, NotRequired
|
|
14
14
|
|
|
15
15
|
|
|
16
|
+
AgentsCompletionRequestStopTypedDict = Union[str, List[str]]
|
|
17
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
AgentsCompletionRequestStop = Union[str, List[str]]
|
|
21
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
AgentsCompletionRequestMessagesTypedDict = Union[UserMessageTypedDict, AssistantMessageTypedDict, ToolMessageTypedDict]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
AgentsCompletionRequestMessages = Annotated[Union[Annotated[AssistantMessage, Tag("assistant")], Annotated[ToolMessage, Tag("tool")], Annotated[UserMessage, Tag("user")]], Discriminator(lambda m: get_discriminator(m, "role", "role"))]
|
|
28
|
+
|
|
29
|
+
|
|
16
30
|
AgentsCompletionRequestToolChoice = Literal["auto", "none", "any"]
|
|
17
31
|
|
|
18
32
|
class AgentsCompletionRequestTypedDict(TypedDict):
|
|
@@ -80,17 +94,3 @@ class AgentsCompletionRequest(BaseModel):
|
|
|
80
94
|
|
|
81
95
|
return m
|
|
82
96
|
|
|
83
|
-
|
|
84
|
-
AgentsCompletionRequestStopTypedDict = Union[str, List[str]]
|
|
85
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
AgentsCompletionRequestStop = Union[str, List[str]]
|
|
89
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
AgentsCompletionRequestMessagesTypedDict = Union[UserMessageTypedDict, AssistantMessageTypedDict, ToolMessageTypedDict]
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
AgentsCompletionRequestMessages = Annotated[Union[Annotated[AssistantMessage, Tag("assistant")], Annotated[ToolMessage, Tag("tool")], Annotated[UserMessage, Tag("user")]], Discriminator(lambda m: get_discriminator(m, "role", "role"))]
|
|
96
|
-
|
|
@@ -7,6 +7,14 @@ from typing import List, Optional, TypedDict, Union
|
|
|
7
7
|
from typing_extensions import NotRequired
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
AgentsCompletionStreamRequestStopTypedDict = Union[str, List[str]]
|
|
11
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
AgentsCompletionStreamRequestStop = Union[str, List[str]]
|
|
15
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
16
|
+
|
|
17
|
+
|
|
10
18
|
class AgentsCompletionStreamRequestTypedDict(TypedDict):
|
|
11
19
|
model: Nullable[str]
|
|
12
20
|
r"""ID of the model to use. Only compatible for now with:
|
|
@@ -82,11 +90,3 @@ class AgentsCompletionStreamRequest(BaseModel):
|
|
|
82
90
|
|
|
83
91
|
return m
|
|
84
92
|
|
|
85
|
-
|
|
86
|
-
AgentsCompletionStreamRequestStopTypedDict = Union[str, List[str]]
|
|
87
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
AgentsCompletionStreamRequestStop = Union[str, List[str]]
|
|
91
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
92
|
-
|
|
@@ -14,6 +14,20 @@ from typing import List, Literal, Optional, TypedDict, Union
|
|
|
14
14
|
from typing_extensions import Annotated, NotRequired
|
|
15
15
|
|
|
16
16
|
|
|
17
|
+
StopTypedDict = Union[str, List[str]]
|
|
18
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
Stop = Union[str, List[str]]
|
|
22
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
MessagesTypedDict = Union[SystemMessageTypedDict, UserMessageTypedDict, AssistantMessageTypedDict, ToolMessageTypedDict]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
Messages = Annotated[Union[Annotated[AssistantMessage, Tag("assistant")], Annotated[SystemMessage, Tag("system")], Annotated[ToolMessage, Tag("tool")], Annotated[UserMessage, Tag("user")]], Discriminator(lambda m: get_discriminator(m, "role", "role"))]
|
|
29
|
+
|
|
30
|
+
|
|
17
31
|
ToolChoice = Literal["auto", "none", "any"]
|
|
18
32
|
|
|
19
33
|
class ChatCompletionRequestTypedDict(TypedDict):
|
|
@@ -93,17 +107,3 @@ class ChatCompletionRequest(BaseModel):
|
|
|
93
107
|
|
|
94
108
|
return m
|
|
95
109
|
|
|
96
|
-
|
|
97
|
-
StopTypedDict = Union[str, List[str]]
|
|
98
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
Stop = Union[str, List[str]]
|
|
102
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
MessagesTypedDict = Union[SystemMessageTypedDict, UserMessageTypedDict, AssistantMessageTypedDict, ToolMessageTypedDict]
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
Messages = Annotated[Union[Annotated[AssistantMessage, Tag("assistant")], Annotated[SystemMessage, Tag("system")], Annotated[ToolMessage, Tag("tool")], Annotated[UserMessage, Tag("user")]], Discriminator(lambda m: get_discriminator(m, "role", "role"))]
|
|
109
|
-
|
|
@@ -14,6 +14,20 @@ from typing import List, Literal, Optional, TypedDict, Union
|
|
|
14
14
|
from typing_extensions import Annotated, NotRequired
|
|
15
15
|
|
|
16
16
|
|
|
17
|
+
ChatCompletionStreamRequestStopTypedDict = Union[str, List[str]]
|
|
18
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
ChatCompletionStreamRequestStop = Union[str, List[str]]
|
|
22
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
ChatCompletionStreamRequestMessagesTypedDict = Union[SystemMessageTypedDict, UserMessageTypedDict, AssistantMessageTypedDict, ToolMessageTypedDict]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
ChatCompletionStreamRequestMessages = Annotated[Union[Annotated[AssistantMessage, Tag("assistant")], Annotated[SystemMessage, Tag("system")], Annotated[ToolMessage, Tag("tool")], Annotated[UserMessage, Tag("user")]], Discriminator(lambda m: get_discriminator(m, "role", "role"))]
|
|
29
|
+
|
|
30
|
+
|
|
17
31
|
ChatCompletionStreamRequestToolChoice = Literal["auto", "none", "any"]
|
|
18
32
|
|
|
19
33
|
class ChatCompletionStreamRequestTypedDict(TypedDict):
|
|
@@ -91,17 +105,3 @@ class ChatCompletionStreamRequest(BaseModel):
|
|
|
91
105
|
|
|
92
106
|
return m
|
|
93
107
|
|
|
94
|
-
|
|
95
|
-
ChatCompletionStreamRequestStopTypedDict = Union[str, List[str]]
|
|
96
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
ChatCompletionStreamRequestStop = Union[str, List[str]]
|
|
100
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
ChatCompletionStreamRequestMessagesTypedDict = Union[SystemMessageTypedDict, UserMessageTypedDict, AssistantMessageTypedDict, ToolMessageTypedDict]
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
ChatCompletionStreamRequestMessages = Annotated[Union[Annotated[AssistantMessage, Tag("assistant")], Annotated[SystemMessage, Tag("system")], Annotated[ToolMessage, Tag("tool")], Annotated[UserMessage, Tag("user")]], Discriminator(lambda m: get_discriminator(m, "role", "role"))]
|
|
107
|
-
|
|
@@ -9,8 +9,10 @@ from typing_extensions import Annotated
|
|
|
9
9
|
|
|
10
10
|
class DeleteModelV1ModelsModelIDDeleteRequestTypedDict(TypedDict):
|
|
11
11
|
model_id: str
|
|
12
|
+
r"""The ID of the model to delete."""
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class DeleteModelV1ModelsModelIDDeleteRequest(BaseModel):
|
|
15
16
|
model_id: Annotated[str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))]
|
|
17
|
+
r"""The ID of the model to delete."""
|
|
16
18
|
|
mistralai/models/deltamessage.py
CHANGED
|
@@ -4,20 +4,20 @@ from __future__ import annotations
|
|
|
4
4
|
from .toolcall import ToolCall, ToolCallTypedDict
|
|
5
5
|
from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
|
|
6
6
|
from pydantic import model_serializer
|
|
7
|
-
from typing import Optional, TypedDict
|
|
7
|
+
from typing import List, Optional, TypedDict
|
|
8
8
|
from typing_extensions import NotRequired
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
class DeltaMessageTypedDict(TypedDict):
|
|
12
12
|
role: NotRequired[str]
|
|
13
13
|
content: NotRequired[str]
|
|
14
|
-
tool_calls: NotRequired[Nullable[ToolCallTypedDict]]
|
|
14
|
+
tool_calls: NotRequired[Nullable[List[ToolCallTypedDict]]]
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class DeltaMessage(BaseModel):
|
|
18
18
|
role: Optional[str] = None
|
|
19
19
|
content: Optional[str] = None
|
|
20
|
-
tool_calls: OptionalNullable[ToolCall] = UNSET
|
|
20
|
+
tool_calls: OptionalNullable[List[ToolCall]] = UNSET
|
|
21
21
|
|
|
22
22
|
@model_serializer(mode="wrap")
|
|
23
23
|
def serialize_model(self, handler):
|
|
@@ -8,6 +8,14 @@ from typing import List, TypedDict, Union
|
|
|
8
8
|
from typing_extensions import Annotated, NotRequired
|
|
9
9
|
|
|
10
10
|
|
|
11
|
+
InputsTypedDict = Union[str, List[str]]
|
|
12
|
+
r"""Text to embed."""
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
Inputs = Union[str, List[str]]
|
|
16
|
+
r"""Text to embed."""
|
|
17
|
+
|
|
18
|
+
|
|
11
19
|
class EmbeddingRequestTypedDict(TypedDict):
|
|
12
20
|
inputs: InputsTypedDict
|
|
13
21
|
r"""Text to embed."""
|
|
@@ -51,11 +59,3 @@ class EmbeddingRequest(BaseModel):
|
|
|
51
59
|
|
|
52
60
|
return m
|
|
53
61
|
|
|
54
|
-
|
|
55
|
-
InputsTypedDict = Union[str, List[str]]
|
|
56
|
-
r"""Text to embed."""
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
Inputs = Union[str, List[str]]
|
|
60
|
-
r"""Text to embed."""
|
|
61
|
-
|
|
@@ -7,6 +7,14 @@ from typing import List, Optional, TypedDict, Union
|
|
|
7
7
|
from typing_extensions import NotRequired
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
FIMCompletionRequestStopTypedDict = Union[str, List[str]]
|
|
11
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
FIMCompletionRequestStop = Union[str, List[str]]
|
|
15
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
16
|
+
|
|
17
|
+
|
|
10
18
|
class FIMCompletionRequestTypedDict(TypedDict):
|
|
11
19
|
model: Nullable[str]
|
|
12
20
|
r"""ID of the model to use. Only compatible for now with:
|
|
@@ -84,11 +92,3 @@ class FIMCompletionRequest(BaseModel):
|
|
|
84
92
|
|
|
85
93
|
return m
|
|
86
94
|
|
|
87
|
-
|
|
88
|
-
FIMCompletionRequestStopTypedDict = Union[str, List[str]]
|
|
89
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
FIMCompletionRequestStop = Union[str, List[str]]
|
|
93
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
94
|
-
|
|
@@ -7,6 +7,14 @@ from typing import List, Optional, TypedDict, Union
|
|
|
7
7
|
from typing_extensions import NotRequired
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
FIMCompletionStreamRequestStopTypedDict = Union[str, List[str]]
|
|
11
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
FIMCompletionStreamRequestStop = Union[str, List[str]]
|
|
15
|
+
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
16
|
+
|
|
17
|
+
|
|
10
18
|
class FIMCompletionStreamRequestTypedDict(TypedDict):
|
|
11
19
|
model: Nullable[str]
|
|
12
20
|
r"""ID of the model to use. Only compatible for now with:
|
|
@@ -82,11 +90,3 @@ class FIMCompletionStreamRequest(BaseModel):
|
|
|
82
90
|
|
|
83
91
|
return m
|
|
84
92
|
|
|
85
|
-
|
|
86
|
-
FIMCompletionStreamRequestStopTypedDict = Union[str, List[str]]
|
|
87
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
FIMCompletionStreamRequestStop = Union[str, List[str]]
|
|
91
|
-
r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
|
|
92
|
-
|
mistralai/models/functioncall.py
CHANGED
|
@@ -2,15 +2,21 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
from mistralai.types import BaseModel
|
|
5
|
-
from typing import TypedDict
|
|
5
|
+
from typing import Any, Dict, TypedDict, Union
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
ArgumentsTypedDict = Union[Dict[str, Any], str]
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
Arguments = Union[Dict[str, Any], str]
|
|
6
12
|
|
|
7
13
|
|
|
8
14
|
class FunctionCallTypedDict(TypedDict):
|
|
9
15
|
name: str
|
|
10
|
-
arguments:
|
|
16
|
+
arguments: ArgumentsTypedDict
|
|
11
17
|
|
|
12
18
|
|
|
13
19
|
class FunctionCall(BaseModel):
|
|
14
20
|
name: str
|
|
15
|
-
arguments:
|
|
21
|
+
arguments: Arguments
|
|
16
22
|
|
|
@@ -9,8 +9,10 @@ from typing_extensions import Annotated
|
|
|
9
9
|
|
|
10
10
|
class JobsAPIRoutesFineTuningArchiveFineTunedModelRequestTypedDict(TypedDict):
|
|
11
11
|
model_id: str
|
|
12
|
+
r"""The ID of the model to archive."""
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class JobsAPIRoutesFineTuningArchiveFineTunedModelRequest(BaseModel):
|
|
15
16
|
model_id: Annotated[str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))]
|
|
17
|
+
r"""The ID of the model to archive."""
|
|
16
18
|
|
|
@@ -1,64 +1,11 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from .jobin import JobIn, JobInTypedDict
|
|
5
4
|
from .jobout import JobOut, JobOutTypedDict
|
|
6
5
|
from .legacyjobmetadataout import LegacyJobMetadataOut, LegacyJobMetadataOutTypedDict
|
|
7
|
-
from
|
|
8
|
-
from mistralai.utils import FieldMetadata, QueryParamMetadata, RequestMetadata
|
|
9
|
-
from pydantic import model_serializer
|
|
10
|
-
from typing import TypedDict, Union
|
|
11
|
-
from typing_extensions import Annotated, NotRequired
|
|
6
|
+
from typing import Union
|
|
12
7
|
|
|
13
8
|
|
|
14
|
-
class JobsAPIRoutesFineTuningCreateFineTuningJobRequestTypedDict(TypedDict):
|
|
15
|
-
job_in: JobInTypedDict
|
|
16
|
-
dry_run: NotRequired[Nullable[bool]]
|
|
17
|
-
r"""* If `true` the job is not spawned, instead the query returns a handful of useful metadata
|
|
18
|
-
for the user to perform sanity checks (see `LegacyJobMetadataOut` response).
|
|
19
|
-
* Otherwise, the job is started and the query returns the job ID along with some of the
|
|
20
|
-
input parameters (see `JobOut` response).
|
|
21
|
-
|
|
22
|
-
"""
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
class JobsAPIRoutesFineTuningCreateFineTuningJobRequest(BaseModel):
|
|
26
|
-
job_in: Annotated[JobIn, FieldMetadata(request=RequestMetadata(media_type="application/json"))]
|
|
27
|
-
dry_run: Annotated[OptionalNullable[bool], FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = UNSET
|
|
28
|
-
r"""* If `true` the job is not spawned, instead the query returns a handful of useful metadata
|
|
29
|
-
for the user to perform sanity checks (see `LegacyJobMetadataOut` response).
|
|
30
|
-
* Otherwise, the job is started and the query returns the job ID along with some of the
|
|
31
|
-
input parameters (see `JobOut` response).
|
|
32
|
-
|
|
33
|
-
"""
|
|
34
|
-
|
|
35
|
-
@model_serializer(mode="wrap")
|
|
36
|
-
def serialize_model(self, handler):
|
|
37
|
-
optional_fields = ["dry_run"]
|
|
38
|
-
nullable_fields = ["dry_run"]
|
|
39
|
-
null_default_fields = []
|
|
40
|
-
|
|
41
|
-
serialized = handler(self)
|
|
42
|
-
|
|
43
|
-
m = {}
|
|
44
|
-
|
|
45
|
-
for n, f in self.model_fields.items():
|
|
46
|
-
k = f.alias or n
|
|
47
|
-
val = serialized.get(k)
|
|
48
|
-
|
|
49
|
-
optional_nullable = k in optional_fields and k in nullable_fields
|
|
50
|
-
is_set = (self.__pydantic_fields_set__.intersection({n}) or k in null_default_fields) # pylint: disable=no-member
|
|
51
|
-
|
|
52
|
-
if val is not None and val != UNSET_SENTINEL:
|
|
53
|
-
m[k] = val
|
|
54
|
-
elif val != UNSET_SENTINEL and (
|
|
55
|
-
not k in optional_fields or (optional_nullable and is_set)
|
|
56
|
-
):
|
|
57
|
-
m[k] = val
|
|
58
|
-
|
|
59
|
-
return m
|
|
60
|
-
|
|
61
|
-
|
|
62
9
|
JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict = Union[LegacyJobMetadataOutTypedDict, JobOutTypedDict]
|
|
63
10
|
r"""OK"""
|
|
64
11
|
|
|
@@ -9,8 +9,10 @@ from typing_extensions import Annotated
|
|
|
9
9
|
|
|
10
10
|
class JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequestTypedDict(TypedDict):
|
|
11
11
|
model_id: str
|
|
12
|
+
r"""The ID of the model to unarchive."""
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class JobsAPIRoutesFineTuningUnarchiveFineTunedModelRequest(BaseModel):
|
|
15
16
|
model_id: Annotated[str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))]
|
|
17
|
+
r"""The ID of the model to unarchive."""
|
|
16
18
|
|
|
@@ -10,10 +10,12 @@ from typing_extensions import Annotated
|
|
|
10
10
|
|
|
11
11
|
class JobsAPIRoutesFineTuningUpdateFineTunedModelRequestTypedDict(TypedDict):
|
|
12
12
|
model_id: str
|
|
13
|
+
r"""The ID of the model to update."""
|
|
13
14
|
update_ft_model_in: UpdateFTModelInTypedDict
|
|
14
15
|
|
|
15
16
|
|
|
16
17
|
class JobsAPIRoutesFineTuningUpdateFineTunedModelRequest(BaseModel):
|
|
17
18
|
model_id: Annotated[str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))]
|
|
19
|
+
r"""The ID of the model to update."""
|
|
18
20
|
update_ft_model_in: Annotated[UpdateFTModelIn, FieldMetadata(request=RequestMetadata(media_type="application/json"))]
|
|
19
21
|
|
|
@@ -9,8 +9,10 @@ from typing_extensions import Annotated
|
|
|
9
9
|
|
|
10
10
|
class RetrieveModelV1ModelsModelIDGetRequestTypedDict(TypedDict):
|
|
11
11
|
model_id: str
|
|
12
|
+
r"""The ID of the model to retrieve."""
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
class RetrieveModelV1ModelsModelIDGetRequest(BaseModel):
|
|
15
16
|
model_id: Annotated[str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))]
|
|
17
|
+
r"""The ID of the model to retrieve."""
|
|
16
18
|
|
|
@@ -7,6 +7,12 @@ from typing import List, Literal, Optional, TypedDict, Union
|
|
|
7
7
|
from typing_extensions import NotRequired
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
ContentTypedDict = Union[str, List[ContentChunkTypedDict]]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
Content = Union[str, List[ContentChunk]]
|
|
14
|
+
|
|
15
|
+
|
|
10
16
|
Role = Literal["system"]
|
|
11
17
|
|
|
12
18
|
class SystemMessageTypedDict(TypedDict):
|
|
@@ -18,9 +24,3 @@ class SystemMessage(BaseModel):
|
|
|
18
24
|
content: Content
|
|
19
25
|
role: Optional[Role] = "system"
|
|
20
26
|
|
|
21
|
-
|
|
22
|
-
ContentTypedDict = Union[str, List[ContentChunkTypedDict]]
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
Content = Union[str, List[ContentChunk]]
|
|
26
|
-
|
mistralai/models/usermessage.py
CHANGED
|
@@ -7,6 +7,12 @@ from typing import List, Literal, Optional, TypedDict, Union
|
|
|
7
7
|
from typing_extensions import NotRequired
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
UserMessageContentTypedDict = Union[str, List[TextChunkTypedDict]]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
UserMessageContent = Union[str, List[TextChunk]]
|
|
14
|
+
|
|
15
|
+
|
|
10
16
|
UserMessageRole = Literal["user"]
|
|
11
17
|
|
|
12
18
|
class UserMessageTypedDict(TypedDict):
|
|
@@ -18,9 +24,3 @@ class UserMessage(BaseModel):
|
|
|
18
24
|
content: UserMessageContent
|
|
19
25
|
role: Optional[UserMessageRole] = "user"
|
|
20
26
|
|
|
21
|
-
|
|
22
|
-
UserMessageContentTypedDict = Union[str, List[TextChunkTypedDict]]
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
UserMessageContent = Union[str, List[TextChunk]]
|
|
26
|
-
|