mistralai 1.4.0__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mistralai/_version.py CHANGED
@@ -3,10 +3,10 @@
3
3
  import importlib.metadata
4
4
 
5
5
  __title__: str = "mistralai"
6
- __version__: str = "1.4.0"
6
+ __version__: str = "1.5.0"
7
7
  __openapi_doc_version__: str = "0.0.2"
8
- __gen_version__: str = "2.493.32"
9
- __user_agent__: str = "speakeasy-sdk/python 1.4.0 2.493.32 0.0.2 mistralai"
8
+ __gen_version__: str = "2.497.0"
9
+ __user_agent__: str = "speakeasy-sdk/python 1.5.0 2.497.0 0.0.2 mistralai"
10
10
 
11
11
  try:
12
12
  if __package__ is not None:
mistralai/chat.py CHANGED
@@ -7,10 +7,92 @@ from mistralai.types import Nullable, OptionalNullable, UNSET
7
7
  from mistralai.utils import eventstreaming, get_security_from_env
8
8
  from typing import Any, List, Mapping, Optional, Union
9
9
 
10
+ # region imports
11
+ from typing import Type
12
+ from mistralai.extra import (
13
+ convert_to_parsed_chat_completion_response,
14
+ response_format_from_pydantic_model,
15
+ CustomPydanticModel,
16
+ ParsedChatCompletionResponse,
17
+ )
18
+ # endregion imports
19
+
10
20
 
11
21
  class Chat(BaseSDK):
12
22
  r"""Chat Completion API."""
13
23
 
24
+ # region sdk-class-body
25
+ # Custom .parse methods for the Structure Outputs Feature.
26
+
27
+ def parse(
28
+ self, response_format: Type[CustomPydanticModel], **kwargs: Any
29
+ ) -> ParsedChatCompletionResponse[CustomPydanticModel]:
30
+ """
31
+ Parse the response using the provided response format.
32
+ :param Type[CustomPydanticModel] response_format: The Pydantic model to parse the response into
33
+ :param Any **kwargs Additional keyword arguments to pass to the .complete method
34
+ :return: The parsed response
35
+ """
36
+ # Convert the input Pydantic Model to a strict JSON ready to be passed to chat.complete
37
+ json_response_format = response_format_from_pydantic_model(response_format)
38
+ # Run the inference
39
+ response = self.complete(**kwargs, response_format=json_response_format)
40
+ # Parse response back to the input pydantic model
41
+ parsed_response = convert_to_parsed_chat_completion_response(
42
+ response, response_format
43
+ )
44
+ return parsed_response
45
+
46
+ async def parse_async(
47
+ self, response_format: Type[CustomPydanticModel], **kwargs
48
+ ) -> ParsedChatCompletionResponse[CustomPydanticModel]:
49
+ """
50
+ Asynchronously parse the response using the provided response format.
51
+ :param Type[CustomPydanticModel] response_format: The Pydantic model to parse the response into
52
+ :param Any **kwargs Additional keyword arguments to pass to the .complete method
53
+ :return: The parsed response
54
+ """
55
+ json_response_format = response_format_from_pydantic_model(response_format)
56
+ response = await self.complete_async( # pylint: disable=E1125
57
+ **kwargs, response_format=json_response_format
58
+ )
59
+ parsed_response = convert_to_parsed_chat_completion_response(
60
+ response, response_format
61
+ )
62
+ return parsed_response
63
+
64
+ def parse_stream(
65
+ self, response_format: Type[CustomPydanticModel], **kwargs
66
+ ) -> eventstreaming.EventStream[models.CompletionEvent]:
67
+ """
68
+ Parse the response using the provided response format.
69
+ For now the response will be in JSON format not in the input Pydantic model.
70
+ :param Type[CustomPydanticModel] response_format: The Pydantic model to parse the response into
71
+ :param Any **kwargs Additional keyword arguments to pass to the .stream method
72
+ :return: The JSON parsed response
73
+ """
74
+ json_response_format = response_format_from_pydantic_model(response_format)
75
+ response = self.stream(**kwargs, response_format=json_response_format)
76
+ return response
77
+
78
+ async def parse_stream_async(
79
+ self, response_format: Type[CustomPydanticModel], **kwargs
80
+ ) -> eventstreaming.EventStreamAsync[models.CompletionEvent]:
81
+ """
82
+ Asynchronously parse the response using the provided response format.
83
+ For now the response will be in JSON format not in the input Pydantic model.
84
+ :param Type[CustomPydanticModel] response_format: The Pydantic model to parse the response into
85
+ :param Any **kwargs Additional keyword arguments to pass to the .stream method
86
+ :return: The JSON parsed response
87
+ """
88
+ json_response_format = response_format_from_pydantic_model(response_format)
89
+ response = await self.stream_async( # pylint: disable=E1125
90
+ **kwargs, response_format=json_response_format
91
+ )
92
+ return response
93
+
94
+ # endregion sdk-class-body
95
+
14
96
  def complete(
15
97
  self,
16
98
  *,
@@ -0,0 +1,56 @@
1
+ ## Context
2
+
3
+ The extra package contains the custom logic which is too complex to be generated by Speakeasy from the OpenAPI specs. It was introduced to add the Structured Outputs feature.
4
+
5
+ ## Development / Contributing
6
+
7
+ To add custom code in the SDK, you need to use [Speakeasy custom code regions](https://www.speakeasy.com/docs/customize/code/code-regions/overview) as below.
8
+
9
+ ### Runbook of SDK customization
10
+
11
+ 1. Add the code you want to import in the `src/mistralai/extra/` package. To have it importable from the SDK, you need to add it in the `__init__.py` file:
12
+ ```python
13
+ from .my_custom_file import my_custom_function
14
+
15
+ __all__ = ["my_custom_function"]
16
+ ```
17
+
18
+ 2. Add a new custom code region in the SDK files, e.g in `src/mistralai/chat.py`:
19
+ ```python
20
+ # region imports
21
+ from typing import Type
22
+ from mistralai.extra import my_custom_function
23
+ # endregion imports
24
+
25
+ class Chat(BaseSDK):
26
+ r"""Chat Completion API."""
27
+
28
+ # region sdk-class-body
29
+ def my_custom_method(self, param: str) -> Type[some_type]:
30
+ output = my_custom_function(param1)
31
+ return output
32
+ # endregion sdk-class-body
33
+ ```
34
+
35
+ 3. Now build the SDK with the custom code:
36
+ ```bash
37
+ rm -rf dist; poetry build; python3 -m pip install ~/client-python/dist/mistralai-1.4.1-py3-none-any.whl --force-reinstall
38
+ ```
39
+
40
+ 4. And now you should be able to call the custom method:
41
+ ```python
42
+ import os
43
+ from mistralai import Mistral
44
+
45
+ api_key = os.environ["MISTRAL_API_KEY"]
46
+ client = Mistral(api_key=api_key)
47
+
48
+ client.chat.my_custom_method(param="test")
49
+ ```
50
+
51
+ ### Run the unit tests
52
+
53
+ To run the unit tests for the `extra` package, you can run the following command from the root of the repository:
54
+ ```bash
55
+ python3.12 -m unittest discover -s src/mistralai/extra/tests -t src
56
+ ```
@@ -0,0 +1,5 @@
1
+ from .struct_chat import ParsedChatCompletionResponse, convert_to_parsed_chat_completion_response
2
+ from .utils import response_format_from_pydantic_model
3
+ from .utils.response_format import CustomPydanticModel
4
+
5
+ __all__ = ["convert_to_parsed_chat_completion_response", "response_format_from_pydantic_model", "CustomPydanticModel", "ParsedChatCompletionResponse"]
@@ -0,0 +1,41 @@
1
+ from ..models import ChatCompletionResponse, ChatCompletionChoice, AssistantMessage
2
+ from .utils.response_format import CustomPydanticModel, pydantic_model_from_json
3
+ from typing import List, Optional, Type, Generic
4
+ from pydantic import BaseModel
5
+ import json
6
+
7
+ class ParsedAssistantMessage(AssistantMessage, Generic[CustomPydanticModel]):
8
+ parsed: Optional[CustomPydanticModel]
9
+
10
+ class ParsedChatCompletionChoice(ChatCompletionChoice, Generic[CustomPydanticModel]):
11
+ message: Optional[ParsedAssistantMessage[CustomPydanticModel]] # type: ignore
12
+
13
+ class ParsedChatCompletionResponse(ChatCompletionResponse, Generic[CustomPydanticModel]):
14
+ choices: Optional[List[ParsedChatCompletionChoice[CustomPydanticModel]]] # type: ignore
15
+
16
+ def convert_to_parsed_chat_completion_response(response: ChatCompletionResponse, response_format: Type[BaseModel]) -> ParsedChatCompletionResponse:
17
+ parsed_choices = []
18
+
19
+ if response.choices:
20
+ for choice in response.choices:
21
+ if choice.message:
22
+ parsed_message: ParsedAssistantMessage = ParsedAssistantMessage(
23
+ **choice.message.model_dump(),
24
+ parsed=None
25
+ )
26
+ if isinstance(parsed_message.content, str):
27
+ parsed_message.parsed = pydantic_model_from_json(json.loads(parsed_message.content), response_format)
28
+ elif parsed_message.content is None:
29
+ parsed_message.parsed = None
30
+ else:
31
+ raise TypeError(f"Unexpected type for message.content: {type(parsed_message.content)}")
32
+ choice_dict = choice.model_dump()
33
+ choice_dict["message"] = parsed_message
34
+ parsed_choice: ParsedChatCompletionChoice = ParsedChatCompletionChoice(**choice_dict)
35
+ parsed_choices.append(parsed_choice)
36
+ else:
37
+ parsed_choice = ParsedChatCompletionChoice(**choice.model_dump())
38
+ parsed_choices.append(parsed_choice)
39
+ response_dict = response.model_dump()
40
+ response_dict["choices"] = parsed_choices
41
+ return ParsedChatCompletionResponse(**response_dict)
File without changes
@@ -0,0 +1,103 @@
1
+ import unittest
2
+ from ..struct_chat import (
3
+ convert_to_parsed_chat_completion_response,
4
+ ParsedChatCompletionResponse,
5
+ ParsedChatCompletionChoice,
6
+ ParsedAssistantMessage,
7
+ )
8
+ from ...models import (
9
+ ChatCompletionResponse,
10
+ UsageInfo,
11
+ ChatCompletionChoice,
12
+ AssistantMessage,
13
+ )
14
+ from pydantic import BaseModel
15
+
16
+
17
+ class Explanation(BaseModel):
18
+ explanation: str
19
+ output: str
20
+
21
+
22
+ class MathDemonstration(BaseModel):
23
+ steps: list[Explanation]
24
+ final_answer: str
25
+
26
+
27
+ mock_cc_response = ChatCompletionResponse(
28
+ id="c0271b2098954c6094231703875ca0bc",
29
+ object="chat.completion",
30
+ model="mistral-large-latest",
31
+ usage=UsageInfo(prompt_tokens=75, completion_tokens=220, total_tokens=295),
32
+ created=1737727558,
33
+ choices=[
34
+ ChatCompletionChoice(
35
+ index=0,
36
+ message=AssistantMessage(
37
+ content='{\n "final_answer": "x = -4",\n "steps": [\n {\n "explanation": "Start with the given equation.",\n "output": "8x + 7 = -23"\n },\n {\n "explanation": "Subtract 7 from both sides to isolate the term with x.",\n "output": "8x = -23 - 7"\n },\n {\n "explanation": "Simplify the right side of the equation.",\n "output": "8x = -30"\n },\n {\n "explanation": "Divide both sides by 8 to solve for x.",\n "output": "x = -30 / 8"\n },\n {\n "explanation": "Simplify the fraction to get the final answer.",\n "output": "x = -4"\n }\n ]\n}',
38
+ tool_calls=None,
39
+ prefix=False,
40
+ role="assistant",
41
+ ),
42
+ finish_reason="stop",
43
+ )
44
+ ],
45
+ )
46
+
47
+
48
+ expected_response = ParsedChatCompletionResponse(
49
+ choices=[
50
+ ParsedChatCompletionChoice(
51
+ index=0,
52
+ message=ParsedAssistantMessage(
53
+ content='{\n "final_answer": "x = -4",\n "steps": [\n {\n "explanation": "Start with the given equation.",\n "output": "8x + 7 = -23"\n },\n {\n "explanation": "Subtract 7 from both sides to isolate the term with x.",\n "output": "8x = -23 - 7"\n },\n {\n "explanation": "Simplify the right side of the equation.",\n "output": "8x = -30"\n },\n {\n "explanation": "Divide both sides by 8 to solve for x.",\n "output": "x = -30 / 8"\n },\n {\n "explanation": "Simplify the fraction to get the final answer.",\n "output": "x = -4"\n }\n ]\n}',
54
+ tool_calls=None,
55
+ prefix=False,
56
+ role="assistant",
57
+ parsed=MathDemonstration(
58
+ steps=[
59
+ Explanation(
60
+ explanation="Start with the given equation.",
61
+ output="8x + 7 = -23",
62
+ ),
63
+ Explanation(
64
+ explanation="Subtract 7 from both sides to isolate the term with x.",
65
+ output="8x = -23 - 7",
66
+ ),
67
+ Explanation(
68
+ explanation="Simplify the right side of the equation.",
69
+ output="8x = -30",
70
+ ),
71
+ Explanation(
72
+ explanation="Divide both sides by 8 to solve for x.",
73
+ output="x = -30 / 8",
74
+ ),
75
+ Explanation(
76
+ explanation="Simplify the fraction to get the final answer.",
77
+ output="x = -4",
78
+ ),
79
+ ],
80
+ final_answer="x = -4",
81
+ ),
82
+ ),
83
+ finish_reason="stop",
84
+ )
85
+ ],
86
+ created=1737727558,
87
+ id="c0271b2098954c6094231703875ca0bc",
88
+ model="mistral-large-latest",
89
+ object="chat.completion",
90
+ usage=UsageInfo(prompt_tokens=75, completion_tokens=220, total_tokens=295),
91
+ )
92
+
93
+
94
+ class TestConvertToParsedChatCompletionResponse(unittest.TestCase):
95
+ def test_convert_to_parsed_chat_completion_response(self):
96
+ output = convert_to_parsed_chat_completion_response(
97
+ mock_cc_response, MathDemonstration
98
+ )
99
+ self.assertEqual(output, expected_response)
100
+
101
+
102
+ if __name__ == "__main__":
103
+ unittest.main()
@@ -0,0 +1,162 @@
1
+ from ..utils.response_format import (
2
+ pydantic_model_from_json,
3
+ response_format_from_pydantic_model,
4
+ rec_strict_json_schema,
5
+ )
6
+ from pydantic import BaseModel, ValidationError
7
+
8
+ from ...models import ResponseFormat, JSONSchema
9
+ from ...types.basemodel import Unset
10
+
11
+ import unittest
12
+
13
+
14
+ class Student(BaseModel):
15
+ name: str
16
+ age: int
17
+
18
+
19
+ class Explanation(BaseModel):
20
+ explanation: str
21
+ output: str
22
+
23
+
24
+ class MathDemonstration(BaseModel):
25
+ steps: list[Explanation]
26
+ final_answer: str
27
+
28
+
29
+ mathdemo_schema = {
30
+ "$defs": {
31
+ "Explanation": {
32
+ "properties": {
33
+ "explanation": {"title": "Explanation", "type": "string"},
34
+ "output": {"title": "Output", "type": "string"},
35
+ },
36
+ "required": ["explanation", "output"],
37
+ "title": "Explanation",
38
+ "type": "object",
39
+ }
40
+ },
41
+ "properties": {
42
+ "steps": {
43
+ "items": {"$ref": "#/$defs/Explanation"},
44
+ "title": "Steps",
45
+ "type": "array",
46
+ },
47
+ "final_answer": {"title": "Final Answer", "type": "string"},
48
+ },
49
+ "required": ["steps", "final_answer"],
50
+ "title": "MathDemonstration",
51
+ "type": "object",
52
+ }
53
+
54
+ mathdemo_strict_schema = mathdemo_schema.copy()
55
+ mathdemo_strict_schema["$defs"]["Explanation"]["additionalProperties"] = False # type: ignore
56
+ mathdemo_strict_schema["additionalProperties"] = False
57
+
58
+ mathdemo_response_format = ResponseFormat(
59
+ type="json_schema",
60
+ json_schema=JSONSchema(
61
+ name="MathDemonstration",
62
+ schema_definition=mathdemo_strict_schema,
63
+ description=Unset(),
64
+ strict=True,
65
+ ),
66
+ )
67
+
68
+
69
+ class TestResponseFormat(unittest.TestCase):
70
+ def test_pydantic_model_from_json(self):
71
+ missing_json_data = {"name": "Jean Dupont"}
72
+ good_json_data = {"name": "Jean Dupont", "age": 25}
73
+ extra_json_data = {
74
+ "name": "Jean Dupont",
75
+ "age": 25,
76
+ "extra_field": "extra_value",
77
+ }
78
+ complex_json_data = {
79
+ "final_answer": "x = -4",
80
+ "steps": [
81
+ {
82
+ "explanation": "Start with the given equation.",
83
+ "output": "8x + 7 = -23",
84
+ },
85
+ {
86
+ "explanation": "Subtract 7 from both sides to isolate the term with x.",
87
+ "output": "8x = -23 - 7",
88
+ },
89
+ {
90
+ "explanation": "Simplify the right side of the equation.",
91
+ "output": "8x = -30",
92
+ },
93
+ {
94
+ "explanation": "Divide both sides by 8 to solve for x.",
95
+ "output": "x = -30 / 8",
96
+ },
97
+ {
98
+ "explanation": "Simplify the fraction to get the final answer.",
99
+ "output": "x = -4",
100
+ },
101
+ ],
102
+ }
103
+
104
+ self.assertEqual(
105
+ pydantic_model_from_json(good_json_data, Student),
106
+ Student(name="Jean Dupont", age=25),
107
+ )
108
+ self.assertEqual(
109
+ pydantic_model_from_json(extra_json_data, Student),
110
+ Student(name="Jean Dupont", age=25),
111
+ )
112
+ self.assertEqual(
113
+ pydantic_model_from_json(complex_json_data, MathDemonstration),
114
+ MathDemonstration(
115
+ steps=[
116
+ Explanation(
117
+ explanation="Start with the given equation.",
118
+ output="8x + 7 = -23",
119
+ ),
120
+ Explanation(
121
+ explanation="Subtract 7 from both sides to isolate the term with x.",
122
+ output="8x = -23 - 7",
123
+ ),
124
+ Explanation(
125
+ explanation="Simplify the right side of the equation.",
126
+ output="8x = -30",
127
+ ),
128
+ Explanation(
129
+ explanation="Divide both sides by 8 to solve for x.",
130
+ output="x = -30 / 8",
131
+ ),
132
+ Explanation(
133
+ explanation="Simplify the fraction to get the final answer.",
134
+ output="x = -4",
135
+ ),
136
+ ],
137
+ final_answer="x = -4",
138
+ ),
139
+ )
140
+
141
+ # Check it raises a validation error
142
+ with self.assertRaises(ValidationError):
143
+ pydantic_model_from_json(missing_json_data, Student) # type: ignore
144
+
145
+ def test_response_format_from_pydantic_model(self):
146
+ self.assertEqual(
147
+ response_format_from_pydantic_model(MathDemonstration),
148
+ mathdemo_response_format,
149
+ )
150
+
151
+ def test_rec_strict_json_schema(self):
152
+ invalid_schema = mathdemo_schema | {"wrong_value": 1}
153
+ self.assertEqual(
154
+ rec_strict_json_schema(mathdemo_schema), mathdemo_strict_schema
155
+ )
156
+
157
+ with self.assertRaises(ValueError):
158
+ rec_strict_json_schema(invalid_schema)
159
+
160
+
161
+ if __name__ == "__main__":
162
+ unittest.main()
@@ -0,0 +1,3 @@
1
+ from .response_format import response_format_from_pydantic_model
2
+
3
+ __all__ = ["response_format_from_pydantic_model"]
@@ -0,0 +1,20 @@
1
+ from typing import Any
2
+
3
+ def rec_strict_json_schema(schema_node: Any) -> Any:
4
+ """
5
+ Recursively set the additionalProperties property to False for all objects in the JSON Schema.
6
+ This makes the JSON Schema strict (i.e. no additional properties are allowed).
7
+ """
8
+ if isinstance(schema_node, (str, bool)):
9
+ return schema_node
10
+ if isinstance(schema_node, dict):
11
+ if "type" in schema_node and schema_node["type"] == "object":
12
+ schema_node["additionalProperties"] = False
13
+ for key, value in schema_node.items():
14
+ schema_node[key] = rec_strict_json_schema(value)
15
+ elif isinstance(schema_node, list):
16
+ for i, value in enumerate(schema_node):
17
+ schema_node[i] = rec_strict_json_schema(value)
18
+ else:
19
+ raise ValueError(f"Unexpected type: {schema_node}")
20
+ return schema_node
@@ -0,0 +1,24 @@
1
+ from pydantic import BaseModel
2
+ from typing import TypeVar, Any, Type
3
+ from ...models import JSONSchema, ResponseFormat
4
+ from ._pydantic_helper import rec_strict_json_schema
5
+
6
+ CustomPydanticModel = TypeVar("CustomPydanticModel", bound=BaseModel)
7
+
8
+
9
+ def response_format_from_pydantic_model(
10
+ model: type[CustomPydanticModel],
11
+ ) -> ResponseFormat:
12
+ """Generate a strict JSON schema from a pydantic model."""
13
+ model_schema = rec_strict_json_schema(model.model_json_schema())
14
+ json_schema = JSONSchema.model_validate(
15
+ {"name": model.__name__, "schema": model_schema, "strict": True}
16
+ )
17
+ return ResponseFormat(type="json_schema", json_schema=json_schema)
18
+
19
+
20
+ def pydantic_model_from_json(
21
+ json_data: dict[str, Any], pydantic_model: Type[CustomPydanticModel]
22
+ ) -> CustomPydanticModel:
23
+ """Parse a JSON schema into a pydantic model."""
24
+ return pydantic_model.model_validate(json_data)
mistralai/httpclient.py CHANGED
@@ -1,6 +1,8 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  # pyright: reportReturnType = false
4
+ import asyncio
5
+ from concurrent.futures import ThreadPoolExecutor
4
6
  from typing_extensions import Protocol, runtime_checkable
5
7
  import httpx
6
8
  from typing import Any, Optional, Union
@@ -82,3 +84,51 @@ class AsyncHttpClient(Protocol):
82
84
 
83
85
  async def aclose(self) -> None:
84
86
  pass
87
+
88
+
89
+ class ClientOwner(Protocol):
90
+ client: Union[HttpClient, None]
91
+ async_client: Union[AsyncHttpClient, None]
92
+
93
+
94
+ def close_clients(
95
+ owner: ClientOwner,
96
+ sync_client: Union[HttpClient, None],
97
+ async_client: Union[AsyncHttpClient, None],
98
+ ) -> None:
99
+ """
100
+ A finalizer function that is meant to be used with weakref.finalize to close
101
+ httpx clients used by an SDK so that underlying resources can be garbage
102
+ collected.
103
+ """
104
+
105
+ # Unset the client/async_client properties so there are no more references
106
+ # to them from the owning SDK instance and they can be reaped.
107
+ owner.client = None
108
+ owner.async_client = None
109
+
110
+ if sync_client is not None:
111
+ try:
112
+ sync_client.close()
113
+ except Exception:
114
+ pass
115
+
116
+ if async_client is not None:
117
+ is_async = False
118
+ try:
119
+ asyncio.get_running_loop()
120
+ is_async = True
121
+ except RuntimeError:
122
+ pass
123
+
124
+ try:
125
+ # If this function is called in an async loop then start another
126
+ # loop in a separate thread to close the async http client.
127
+ if is_async:
128
+ with ThreadPoolExecutor(max_workers=1) as executor:
129
+ future = executor.submit(asyncio.run, async_client.aclose())
130
+ future.result()
131
+ else:
132
+ asyncio.run(async_client.aclose())
133
+ except Exception:
134
+ pass
@@ -264,6 +264,7 @@ from .jobs_api_routes_fine_tuning_update_fine_tuned_modelop import (
264
264
  JobsAPIRoutesFineTuningUpdateFineTunedModelRequestTypedDict,
265
265
  )
266
266
  from .jobsout import JobsOut, JobsOutObject, JobsOutTypedDict
267
+ from .jsonschema import JSONSchema, JSONSchemaTypedDict
267
268
  from .legacyjobmetadataout import (
268
269
  LegacyJobMetadataOut,
269
270
  LegacyJobMetadataOutObject,
@@ -515,6 +516,8 @@ __all__ = [
515
516
  "InputsTypedDict",
516
517
  "Integrations",
517
518
  "IntegrationsTypedDict",
519
+ "JSONSchema",
520
+ "JSONSchemaTypedDict",
518
521
  "JobIn",
519
522
  "JobInIntegrations",
520
523
  "JobInIntegrationsTypedDict",
@@ -0,0 +1,55 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
5
+ import pydantic
6
+ from pydantic import model_serializer
7
+ from typing import Any, Dict, Optional
8
+ from typing_extensions import Annotated, NotRequired, TypedDict
9
+
10
+
11
+ class JSONSchemaTypedDict(TypedDict):
12
+ name: str
13
+ schema_definition: Dict[str, Any]
14
+ description: NotRequired[Nullable[str]]
15
+ strict: NotRequired[bool]
16
+
17
+
18
+ class JSONSchema(BaseModel):
19
+ name: str
20
+
21
+ schema_definition: Annotated[Dict[str, Any], pydantic.Field(alias="schema")]
22
+
23
+ description: OptionalNullable[str] = UNSET
24
+
25
+ strict: Optional[bool] = False
26
+
27
+ @model_serializer(mode="wrap")
28
+ def serialize_model(self, handler):
29
+ optional_fields = ["description", "strict"]
30
+ nullable_fields = ["description"]
31
+ null_default_fields = []
32
+
33
+ serialized = handler(self)
34
+
35
+ m = {}
36
+
37
+ for n, f in self.model_fields.items():
38
+ k = f.alias or n
39
+ val = serialized.get(k)
40
+ serialized.pop(k, None)
41
+
42
+ optional_nullable = k in optional_fields and k in nullable_fields
43
+ is_set = (
44
+ self.__pydantic_fields_set__.intersection({n})
45
+ or k in null_default_fields
46
+ ) # pylint: disable=no-member
47
+
48
+ if val is not None and val != UNSET_SENTINEL:
49
+ m[k] = val
50
+ elif val != UNSET_SENTINEL and (
51
+ not k in optional_fields or (optional_nullable and is_set)
52
+ ):
53
+ m[k] = val
54
+
55
+ return m
@@ -1,8 +1,10 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from .jsonschema import JSONSchema, JSONSchemaTypedDict
4
5
  from .responseformats import ResponseFormats
5
- from mistralai.types import BaseModel
6
+ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
7
+ from pydantic import model_serializer
6
8
  from typing import Optional
7
9
  from typing_extensions import NotRequired, TypedDict
8
10
 
@@ -10,8 +12,41 @@ from typing_extensions import NotRequired, TypedDict
10
12
  class ResponseFormatTypedDict(TypedDict):
11
13
  type: NotRequired[ResponseFormats]
12
14
  r"""An object specifying the format that the model must output. Setting to `{ \"type\": \"json_object\" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message."""
15
+ json_schema: NotRequired[Nullable[JSONSchemaTypedDict]]
13
16
 
14
17
 
15
18
  class ResponseFormat(BaseModel):
16
19
  type: Optional[ResponseFormats] = None
17
20
  r"""An object specifying the format that the model must output. Setting to `{ \"type\": \"json_object\" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message."""
21
+
22
+ json_schema: OptionalNullable[JSONSchema] = UNSET
23
+
24
+ @model_serializer(mode="wrap")
25
+ def serialize_model(self, handler):
26
+ optional_fields = ["type", "json_schema"]
27
+ nullable_fields = ["json_schema"]
28
+ null_default_fields = []
29
+
30
+ serialized = handler(self)
31
+
32
+ m = {}
33
+
34
+ for n, f in self.model_fields.items():
35
+ k = f.alias or n
36
+ val = serialized.get(k)
37
+ serialized.pop(k, None)
38
+
39
+ optional_nullable = k in optional_fields and k in nullable_fields
40
+ is_set = (
41
+ self.__pydantic_fields_set__.intersection({n})
42
+ or k in null_default_fields
43
+ ) # pylint: disable=no-member
44
+
45
+ if val is not None and val != UNSET_SENTINEL:
46
+ m[k] = val
47
+ elif val != UNSET_SENTINEL and (
48
+ not k in optional_fields or (optional_nullable and is_set)
49
+ ):
50
+ m[k] = val
51
+
52
+ return m
@@ -4,5 +4,5 @@ from __future__ import annotations
4
4
  from typing import Literal
5
5
 
6
6
 
7
- ResponseFormats = Literal["text", "json_object"]
7
+ ResponseFormats = Literal["text", "json_object", "json_schema"]
8
8
  r"""An object specifying the format that the model must output. Setting to `{ \"type\": \"json_object\" }` enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message."""
mistralai/sdk.py CHANGED
@@ -1,7 +1,7 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from .basesdk import BaseSDK
4
- from .httpclient import AsyncHttpClient, HttpClient
4
+ from .httpclient import AsyncHttpClient, ClientOwner, HttpClient, close_clients
5
5
  from .sdkconfiguration import SDKConfiguration
6
6
  from .utils.logger import Logger, get_default_logger
7
7
  from .utils.retries import RetryConfig
@@ -18,7 +18,8 @@ from mistralai.fim import Fim
18
18
  from mistralai.fine_tuning import FineTuning
19
19
  from mistralai.models_ import Models
20
20
  from mistralai.types import OptionalNullable, UNSET
21
- from typing import Any, Callable, Dict, Optional, Union
21
+ from typing import Any, Callable, Dict, Optional, Union, cast
22
+ import weakref
22
23
 
23
24
 
24
25
  class Mistral(BaseSDK):
@@ -118,6 +119,14 @@ class Mistral(BaseSDK):
118
119
  # pylint: disable=protected-access
119
120
  self.sdk_configuration.__dict__["_hooks"] = hooks
120
121
 
122
+ weakref.finalize(
123
+ self,
124
+ close_clients,
125
+ cast(ClientOwner, self.sdk_configuration),
126
+ self.sdk_configuration.client,
127
+ self.sdk_configuration.async_client,
128
+ )
129
+
121
130
  self._init_sdks()
122
131
 
123
132
  def _init_sdks(self):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: mistralai
3
- Version: 1.4.0
3
+ Version: 1.5.0
4
4
  Summary: Python Client SDK for the Mistral AI API.
5
5
  Author: Mistral
6
6
  Requires-Python: >=3.8
@@ -67,6 +67,7 @@ Mistral AI API: Our Chat Completion and Embeddings APIs specification. Create yo
67
67
  * [Server Selection](https://github.com/mistralai/client-python/blob/master/#server-selection)
68
68
  * [Custom HTTP Client](https://github.com/mistralai/client-python/blob/master/#custom-http-client)
69
69
  * [Authentication](https://github.com/mistralai/client-python/blob/master/#authentication)
70
+ * [Resource Management](https://github.com/mistralai/client-python/blob/master/#resource-management)
70
71
  * [Debugging](https://github.com/mistralai/client-python/blob/master/#debugging)
71
72
  * [IDE Support](https://github.com/mistralai/client-python/blob/master/#ide-support)
72
73
  * [Development](https://github.com/mistralai/client-python/blob/master/#development)
@@ -77,6 +78,11 @@ Mistral AI API: Our Chat Completion and Embeddings APIs specification. Create yo
77
78
  <!-- Start SDK Installation [installation] -->
78
79
  ## SDK Installation
79
80
 
81
+ > [!NOTE]
82
+ > **Python version upgrade policy**
83
+ >
84
+ > Once a Python version reaches its [official end of life date](https://devguide.python.org/versions/), a 3-month grace period is provided for users to upgrade. Following this grace period, the minimum python version supported in the SDK will be updated.
85
+
80
86
  The SDK can be installed with either *pip* or *poetry* package managers.
81
87
 
82
88
  ### PIP
@@ -779,6 +785,32 @@ with Mistral(
779
785
  ```
780
786
  <!-- End Authentication [security] -->
781
787
 
788
+ <!-- Start Resource Management [resource-management] -->
789
+ ## Resource Management
790
+
791
+ The `Mistral` class implements the context manager protocol and registers a finalizer function to close the underlying sync and async HTTPX clients it uses under the hood. This will close HTTP connections, release memory and free up other resources held by the SDK. In short-lived Python programs and notebooks that make a few SDK method calls, resource management may not be a concern. However, in longer-lived programs, it is beneficial to create a single SDK instance via a [context manager][context-manager] and reuse it across the application.
792
+
793
+ [context-manager]: https://docs.python.org/3/reference/datamodel.html#context-managers
794
+
795
+ ```python
796
+ from mistralai import Mistral
797
+ import os
798
+ def main():
799
+ with Mistral(
800
+ api_key=os.getenv("MISTRAL_API_KEY", ""),
801
+ ) as mistral:
802
+ # Rest of application here...
803
+
804
+
805
+ # Or when using async:
806
+ async def amain():
807
+ async with Mistral(
808
+ api_key=os.getenv("MISTRAL_API_KEY", ""),
809
+ ) as mistral:
810
+ # Rest of application here...
811
+ ```
812
+ <!-- End Resource Management [resource-management] -->
813
+
782
814
  <!-- Start Debugging [debug] -->
783
815
  ## Debugging
784
816
 
@@ -131,22 +131,31 @@ mistralai/_hooks/deprecation_warning.py,sha256=eyEOf7-o9uqqNWJnufD2RXp3dYrGV4in9
131
131
  mistralai/_hooks/registration.py,sha256=ML0W-XbE4WYdJ4eGks_XxF2aLCJTaIWjQATFGzFwvyU,861
132
132
  mistralai/_hooks/sdkhooks.py,sha256=s-orhdvnV89TmI3QiPC2LWQtYeM9RrsG1CTll-fYZmQ,2559
133
133
  mistralai/_hooks/types.py,sha256=vUkTVk_TSaK10aEj368KYWvnd4T5EsawixMcTro_UHc,2570
134
- mistralai/_version.py,sha256=Ks6KHk739TqW4iKtkhh7FmwsiwVVsnyyC6K3ETq80ls,462
134
+ mistralai/_version.py,sha256=GRgL4RQk2kYt7mohgi1PRAMqEbZSHKMkxC4qZQ_m4Ak,460
135
135
  mistralai/agents.py,sha256=63-FJfMJ_9Qb0O0-uorAJM8km4FHpCEjjIk14margM8,31392
136
136
  mistralai/async_client.py,sha256=KUdYxIIqoD6L7vB0EGwUR6lQ0NK5iCTHjnLVR9CVcJY,355
137
137
  mistralai/basesdk.py,sha256=da0sFeLR-ztU5-fuGJ4TMqUnnqSzXbPAjpNgKI52tBk,11999
138
138
  mistralai/batch.py,sha256=YN4D0Duwrap9Ysmp_lRpADYp1Znay7THE_z8ERGvDds,501
139
- mistralai/chat.py,sha256=wO3VoQYzEbBDly8sNKj1NKxlNMz51LN4E5toul-UKUs,35575
139
+ mistralai/chat.py,sha256=n_FpKg-yN0CbAcnVRAkkwcXQuBPwiZBe3e1YSg2hfq8,39339
140
140
  mistralai/classifiers.py,sha256=oxGQEj6QfIWvV2GIAhfev9DVbFTG79uR5ODKTmOiJxs,16594
141
141
  mistralai/client.py,sha256=hrPg-LciKMKiascF0WbRRmqQyCv1lb2yDh6j-aaKVNo,509
142
142
  mistralai/embeddings.py,sha256=oT6SgsC3ODtn9mAWfpHN9Eli6puLmz9dEe-ZrY0QiPA,8590
143
+ mistralai/extra/README.md,sha256=BTS9fy0ijkiUP7ZVoFQ7FVBxHtXIXqucYZyy_ucFjo4,1739
144
+ mistralai/extra/__init__.py,sha256=MHf0pUgLc9Sb7eTUE31JlE2FKMxfQupmJ_iR8UkgQ9w,360
145
+ mistralai/extra/struct_chat.py,sha256=ZkpdExC5rgC-nBZ44hQIVhQmK6lYMk36RBSFPZMFaIg,2157
146
+ mistralai/extra/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
147
+ mistralai/extra/tests/test_struct_chat.py,sha256=WT6GGfcbXCok8UkEny19u4q1g2QOgkekvmAb3ZinQZ8,4343
148
+ mistralai/extra/tests/test_utils.py,sha256=VesGDR_IiE6u0iY7yOi1iERd7esdJgi2aL4xZp0vKVI,5113
149
+ mistralai/extra/utils/__init__.py,sha256=SExo5t_hx0ybiQhVJIG3r3hOA-Pfny3lIO_WsqNXlN8,116
150
+ mistralai/extra/utils/_pydantic_helper.py,sha256=kU_HbsSl1qGXnrrHnBcxun2MtHowu8eBp3jYMyFsPWw,859
151
+ mistralai/extra/utils/response_format.py,sha256=OiIpNXMODKJ6U2QDCXxPHBoVtXzXF7jtBzCLmI4t_CU,907
143
152
  mistralai/files.py,sha256=E1-MxJc-84IdKrc-0k-bvYNa7OSoNFCQ7wBX9tMnFb8,44359
144
153
  mistralai/fim.py,sha256=WFlELwTAT_dT7HcBRuby8xqoQsUsw-IXqzqufyyqz_g,27289
145
154
  mistralai/fine_tuning.py,sha256=UENQqfE054VEsAYxdruV-TBLFIFfO-joXNznH08GUvE,477
146
- mistralai/httpclient.py,sha256=WDbLpMzo7qmWki_ryOJcCAYNI1T4uyWKV08rRuCdNII,2688
155
+ mistralai/httpclient.py,sha256=N-D-srtDBykpfyVKacTY4upDGvNLqdWlEYqhJvta99E,4194
147
156
  mistralai/jobs.py,sha256=SVNlvn8XGaCkNJ5tKOKci5QLavacmkNqoYeIGF4ik0Q,43481
148
157
  mistralai/mistral_jobs.py,sha256=2ScKd2Tv79-MWxEQkrqr53Ikya8rmTbSiJ96judp7DY,30166
149
- mistralai/models/__init__.py,sha256=W9QnG5egzQoSpFCy5Oiz9wYHvbmN1EkVvwW173XeLsY,21852
158
+ mistralai/models/__init__.py,sha256=IGlVf2t6ZzB5HvRNQpFaf5WmKJOemcrCU9Uvsf9IAz0,21953
150
159
  mistralai/models/agentscompletionrequest.py,sha256=2tV6_p33zLCfBD7EzlAVeSjG0E_pknbVZlbQFM3Lulc,7794
151
160
  mistralai/models/agentscompletionstreamrequest.py,sha256=D1fla5nnnwNKXwHG1w4XVGnqaEvx6sOuhTXdP_e65sM,7239
152
161
  mistralai/models/apiendpoint.py,sha256=Hvar5leWsJR_FYb0UzRlSw3vRdBZhk_6BR5r2pIb214,400
@@ -218,6 +227,7 @@ mistralai/models/jobs_api_routes_fine_tuning_start_fine_tuning_jobop.py,sha256=h
218
227
  mistralai/models/jobs_api_routes_fine_tuning_unarchive_fine_tuned_modelop.py,sha256=_pkyhD7OzG-59fgcajI9NmSLTLDktkCxXo_IuvWeyfs,636
219
228
  mistralai/models/jobs_api_routes_fine_tuning_update_fine_tuned_modelop.py,sha256=s-EYS-Hw0NExYeIyN-3JlHbKmnTmtyB8ljVSfOylqYk,907
220
229
  mistralai/models/jobsout.py,sha256=uCKt0aw7yXzI4oLDGeAAEhsRjdRg3g7lPopg0__czTA,818
230
+ mistralai/models/jsonschema.py,sha256=Sh6VCcRxJffd-vXX61GVElbxKNi-j2MKvMUZQcW5EUs,1653
221
231
  mistralai/models/legacyjobmetadataout.py,sha256=08zAGNTSrICsK8u2SFFUXiNWF7MCQvezmFQeMQzxsys,4762
222
232
  mistralai/models/listfilesout.py,sha256=tW2fNabLKcftc5kytkjwVaChlOzWRL4FKtNzDak9MNs,468
223
233
  mistralai/models/metricout.py,sha256=dXQMMU4Nk6-Zr06Jx1TWilFi6cOwiVLjSanCFn0cPxo,2034
@@ -225,8 +235,8 @@ mistralai/models/modelcapabilities.py,sha256=No-Dl09zT1sG4MxsWnx4s8Yo1tUeMQ7k-HR
225
235
  mistralai/models/modellist.py,sha256=D4Y784kQkx0ARhofFrpEqGLfxa-jTY8ev0TQMrD_n8I,995
226
236
  mistralai/models/prediction.py,sha256=54P1n4Y5pXu4YsFKAcmThj0GyUrWxyOlIfjG5K2htB8,726
227
237
  mistralai/models/referencechunk.py,sha256=A9vV5pZv-tUqGlswdu0HOyCYy0Q-UIJY0Oc9ZfM6XJA,519
228
- mistralai/models/responseformat.py,sha256=C_zO6X4cbT1qSS_q9Qxq64AmtfK10i9tqEz_39ZcFzo,1045
229
- mistralai/models/responseformats.py,sha256=oeXHoVUoZrZwrz-0cm-rHj5sHygv9MpeqgdummGa8ww,488
238
+ mistralai/models/responseformat.py,sha256=-TAPGth3_FAiNl-kuE4COI5cSP5fxQ7xewFSV989i58,2225
239
+ mistralai/models/responseformats.py,sha256=O9lwS2M9m53DsRxTC4uRP12SvRhgaQoMjIYsDys5A7s,503
230
240
  mistralai/models/retrieve_model_v1_models_model_id_getop.py,sha256=N9_JFwiz9tz4zRXJ9c1V0c_anFEVxVzPDoFt2Wrer4M,1388
231
241
  mistralai/models/retrievefileout.py,sha256=nAjSITJCHj0daChhpwOZTmps-74mmYZO4IckGA0yIvQ,2644
232
242
  mistralai/models/sampletype.py,sha256=zowUiTFxum8fltBs6j__BrFPio-dQdG0CIyLj-5icG8,316
@@ -254,7 +264,7 @@ mistralai/models/wandbintegration.py,sha256=BkLD3r08ToZkMAhPXdnC7bfOGr3banKqt1wV
254
264
  mistralai/models/wandbintegrationout.py,sha256=C0HpS8jJGnACs7eWnuIq0qJEroIUAbjkvzfSSkSKS7Q,2274
255
265
  mistralai/models_.py,sha256=r0etSSUChK7hxxf7ZyhoeloyE8TyPRL1s5Jh4Jgukbw,44394
256
266
  mistralai/py.typed,sha256=zrp19r0G21lr2yRiMC0f8MFkQFGj9wMpSbboePMg8KM,59
257
- mistralai/sdk.py,sha256=9eHH11No72LdVYs7uJXyUqyiZ4hX-qXRtoCxq0LCn3M,5455
267
+ mistralai/sdk.py,sha256=itKygBxr6JLO8f9u0v0m-bsHLOjKutuLalZsg0OLISU,5733
258
268
  mistralai/sdkconfiguration.py,sha256=sx6U1xgxFbflhJdOBAeI7isId-8wlMd8NK9pi-JGs1o,1789
259
269
  mistralai/types/__init__.py,sha256=RArOwSgeeTIva6h-4ttjXwMUeCkz10nAFBL9D-QljI4,377
260
270
  mistralai/types/basemodel.py,sha256=PexI39iKiOkIlobB8Ueo0yn8PLHp6_wb-WO-zelNDZY,1170
@@ -274,7 +284,7 @@ mistralai/utils/serializers.py,sha256=BSJT7kBOkNBFyP7KREyMoe14JGbgijD1M6AXFMbdmc
274
284
  mistralai/utils/url.py,sha256=BgGPgcTA6MRK4bF8fjP2dUopN3NzEzxWMXPBVg8NQUA,5254
275
285
  mistralai/utils/values.py,sha256=_89YXPTI_BU6SXJBzFR4pIzTCBPQW9tsOTN1jeBBIDs,3428
276
286
  mistralai/version.py,sha256=iosXhlXclBwBqlADFKEilxAC2wWKbtuBKi87AmPi7s8,196
277
- mistralai-1.4.0.dist-info/LICENSE,sha256=rUtQ_9GD0OyLPlb-2uWVdfE87hzudMRmsW-tS-0DK-0,11340
278
- mistralai-1.4.0.dist-info/METADATA,sha256=6Gs8wKfT4XGnX9TsM5l63GXbcRcI6EnPo-23DI7YIS4,27705
279
- mistralai-1.4.0.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
280
- mistralai-1.4.0.dist-info/RECORD,,
287
+ mistralai-1.5.0.dist-info/LICENSE,sha256=rUtQ_9GD0OyLPlb-2uWVdfE87hzudMRmsW-tS-0DK-0,11340
288
+ mistralai-1.5.0.dist-info/METADATA,sha256=drBwjaz85HBcWlgOsNldybLkbE3KitbeGUsThLU8etU,29251
289
+ mistralai-1.5.0.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
290
+ mistralai-1.5.0.dist-info/RECORD,,