mistralai 1.2.2__py3-none-any.whl → 1.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. mistralai/_version.py +1 -1
  2. mistralai/agents.py +5 -5
  3. mistralai/chat.py +5 -5
  4. mistralai/files.py +166 -0
  5. mistralai/fim.py +5 -5
  6. mistralai/httpclient.py +6 -0
  7. mistralai/jobs.py +2 -2
  8. mistralai/models/__init__.py +22 -3
  9. mistralai/models/agentscompletionrequest.py +23 -11
  10. mistralai/models/agentscompletionstreamrequest.py +23 -13
  11. mistralai/models/apiendpoint.py +11 -3
  12. mistralai/models/assistantmessage.py +7 -3
  13. mistralai/models/batchjobin.py +4 -2
  14. mistralai/models/chatclassificationrequest.py +26 -17
  15. mistralai/models/chatcompletionrequest.py +19 -11
  16. mistralai/models/chatcompletionstreamrequest.py +23 -13
  17. mistralai/models/classificationrequest.py +7 -3
  18. mistralai/models/contentchunk.py +9 -3
  19. mistralai/models/deltamessage.py +5 -3
  20. mistralai/models/detailedjobout.py +2 -3
  21. mistralai/models/embeddingrequest.py +3 -3
  22. mistralai/models/files_api_routes_get_signed_urlop.py +25 -0
  23. mistralai/models/filesignedurl.py +13 -0
  24. mistralai/models/fimcompletionrequest.py +7 -3
  25. mistralai/models/fimcompletionstreamrequest.py +7 -3
  26. mistralai/models/functioncall.py +3 -3
  27. mistralai/models/imageurlchunk.py +9 -14
  28. mistralai/models/jobin.py +2 -3
  29. mistralai/models/jobout.py +2 -3
  30. mistralai/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py +9 -4
  31. mistralai/models/modellist.py +4 -2
  32. mistralai/models/referencechunk.py +20 -0
  33. mistralai/models/retrieve_model_v1_models_model_id_getop.py +5 -4
  34. mistralai/models/systemmessage.py +7 -3
  35. mistralai/models/textchunk.py +3 -9
  36. mistralai/models/toolmessage.py +14 -5
  37. mistralai/models/usermessage.py +5 -3
  38. mistralai/models/validationerror.py +3 -3
  39. mistralai/sdk.py +14 -0
  40. mistralai/sdkconfiguration.py +3 -3
  41. mistralai/utils/annotations.py +42 -17
  42. mistralai/utils/eventstreaming.py +61 -1
  43. {mistralai-1.2.2.dist-info → mistralai-1.2.4.dist-info}/METADATA +181 -176
  44. {mistralai-1.2.2.dist-info → mistralai-1.2.4.dist-info}/RECORD +88 -84
  45. mistralai_azure/_version.py +1 -1
  46. mistralai_azure/chat.py +5 -5
  47. mistralai_azure/httpclient.py +6 -0
  48. mistralai_azure/models/__init__.py +13 -1
  49. mistralai_azure/models/assistantmessage.py +7 -3
  50. mistralai_azure/models/chatcompletionrequest.py +23 -11
  51. mistralai_azure/models/chatcompletionstreamrequest.py +19 -13
  52. mistralai_azure/models/contentchunk.py +14 -2
  53. mistralai_azure/models/deltamessage.py +5 -3
  54. mistralai_azure/models/functioncall.py +3 -3
  55. mistralai_azure/models/referencechunk.py +20 -0
  56. mistralai_azure/models/systemmessage.py +7 -3
  57. mistralai_azure/models/textchunk.py +3 -9
  58. mistralai_azure/models/toolmessage.py +14 -5
  59. mistralai_azure/models/usermessage.py +5 -3
  60. mistralai_azure/models/validationerror.py +3 -3
  61. mistralai_azure/sdkconfiguration.py +3 -3
  62. mistralai_azure/utils/annotations.py +42 -17
  63. mistralai_azure/utils/eventstreaming.py +61 -1
  64. mistralai_gcp/_version.py +1 -1
  65. mistralai_gcp/chat.py +5 -5
  66. mistralai_gcp/fim.py +5 -5
  67. mistralai_gcp/httpclient.py +6 -0
  68. mistralai_gcp/models/__init__.py +13 -1
  69. mistralai_gcp/models/assistantmessage.py +7 -3
  70. mistralai_gcp/models/chatcompletionrequest.py +23 -11
  71. mistralai_gcp/models/chatcompletionstreamrequest.py +19 -13
  72. mistralai_gcp/models/contentchunk.py +14 -2
  73. mistralai_gcp/models/deltamessage.py +5 -3
  74. mistralai_gcp/models/fimcompletionrequest.py +7 -3
  75. mistralai_gcp/models/fimcompletionstreamrequest.py +7 -3
  76. mistralai_gcp/models/functioncall.py +3 -3
  77. mistralai_gcp/models/referencechunk.py +20 -0
  78. mistralai_gcp/models/systemmessage.py +7 -3
  79. mistralai_gcp/models/textchunk.py +3 -9
  80. mistralai_gcp/models/toolmessage.py +14 -5
  81. mistralai_gcp/models/usermessage.py +5 -3
  82. mistralai_gcp/models/validationerror.py +3 -3
  83. mistralai_gcp/sdk.py +5 -4
  84. mistralai_gcp/sdkconfiguration.py +3 -3
  85. mistralai_gcp/utils/annotations.py +42 -17
  86. mistralai_gcp/utils/eventstreaming.py +61 -1
  87. mistralai/models/finetuneablemodel.py +0 -14
  88. {mistralai-1.2.2.dist-info → mistralai-1.2.4.dist-info}/LICENSE +0 -0
  89. {mistralai-1.2.2.dist-info → mistralai-1.2.4.dist-info}/WHEEL +0 -0
@@ -1,24 +1,33 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from .contentchunk import ContentChunk, ContentChunkTypedDict
4
5
  from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
5
6
  from pydantic import model_serializer
6
- from typing import Literal, Optional
7
- from typing_extensions import NotRequired, TypedDict
7
+ from typing import List, Literal, Optional, Union
8
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
9
+
10
+
11
+ ToolMessageContentTypedDict = TypeAliasType(
12
+ "ToolMessageContentTypedDict", Union[str, List[ContentChunkTypedDict]]
13
+ )
14
+
15
+
16
+ ToolMessageContent = TypeAliasType("ToolMessageContent", Union[str, List[ContentChunk]])
8
17
 
9
18
 
10
19
  ToolMessageRole = Literal["tool"]
11
20
 
12
21
 
13
22
  class ToolMessageTypedDict(TypedDict):
14
- content: str
23
+ content: Nullable[ToolMessageContentTypedDict]
15
24
  tool_call_id: NotRequired[Nullable[str]]
16
25
  name: NotRequired[Nullable[str]]
17
26
  role: NotRequired[ToolMessageRole]
18
27
 
19
28
 
20
29
  class ToolMessage(BaseModel):
21
- content: str
30
+ content: Nullable[ToolMessageContent]
22
31
 
23
32
  tool_call_id: OptionalNullable[str] = UNSET
24
33
 
@@ -29,7 +38,7 @@ class ToolMessage(BaseModel):
29
38
  @model_serializer(mode="wrap")
30
39
  def serialize_model(self, handler):
31
40
  optional_fields = ["tool_call_id", "name", "role"]
32
- nullable_fields = ["tool_call_id", "name"]
41
+ nullable_fields = ["content", "tool_call_id", "name"]
33
42
  null_default_fields = []
34
43
 
35
44
  serialized = handler(self)
@@ -5,13 +5,15 @@ from .contentchunk import ContentChunk, ContentChunkTypedDict
5
5
  from mistralai.types import BaseModel, Nullable, UNSET_SENTINEL
6
6
  from pydantic import model_serializer
7
7
  from typing import List, Literal, Optional, Union
8
- from typing_extensions import NotRequired, TypedDict
8
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
9
9
 
10
10
 
11
- UserMessageContentTypedDict = Union[str, List[ContentChunkTypedDict]]
11
+ UserMessageContentTypedDict = TypeAliasType(
12
+ "UserMessageContentTypedDict", Union[str, List[ContentChunkTypedDict]]
13
+ )
12
14
 
13
15
 
14
- UserMessageContent = Union[str, List[ContentChunk]]
16
+ UserMessageContent = TypeAliasType("UserMessageContent", Union[str, List[ContentChunk]])
15
17
 
16
18
 
17
19
  UserMessageRole = Literal["user"]
@@ -3,13 +3,13 @@
3
3
  from __future__ import annotations
4
4
  from mistralai.types import BaseModel
5
5
  from typing import List, Union
6
- from typing_extensions import TypedDict
6
+ from typing_extensions import TypeAliasType, TypedDict
7
7
 
8
8
 
9
- LocTypedDict = Union[str, int]
9
+ LocTypedDict = TypeAliasType("LocTypedDict", Union[str, int])
10
10
 
11
11
 
12
- Loc = Union[str, int]
12
+ Loc = TypeAliasType("Loc", Union[str, int])
13
13
 
14
14
 
15
15
  class ValidationErrorTypedDict(TypedDict):
mistralai/sdk.py CHANGED
@@ -129,3 +129,17 @@ class Mistral(BaseSDK):
129
129
  self.agents = Agents(self.sdk_configuration)
130
130
  self.embeddings = Embeddings(self.sdk_configuration)
131
131
  self.classifiers = Classifiers(self.sdk_configuration)
132
+
133
+ def __enter__(self):
134
+ return self
135
+
136
+ async def __aenter__(self):
137
+ return self
138
+
139
+ def __exit__(self, exc_type, exc_val, exc_tb):
140
+ if self.sdk_configuration.client is not None:
141
+ self.sdk_configuration.client.close()
142
+
143
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
144
+ if self.sdk_configuration.async_client is not None:
145
+ await self.sdk_configuration.async_client.aclose()
@@ -28,9 +28,9 @@ class SDKConfiguration:
28
28
  server: Optional[str] = ""
29
29
  language: str = "python"
30
30
  openapi_doc_version: str = "0.0.2"
31
- sdk_version: str = "1.2.2"
32
- gen_version: str = "2.457.2"
33
- user_agent: str = "speakeasy-sdk/python 1.2.2 2.457.2 0.0.2 mistralai"
31
+ sdk_version: str = "1.2.4"
32
+ gen_version: str = "2.470.1"
33
+ user_agent: str = "speakeasy-sdk/python 1.2.4 2.470.1 0.0.2 mistralai"
34
34
  retry_config: OptionalNullable[RetryConfig] = Field(default_factory=lambda: UNSET)
35
35
  timeout_ms: Optional[int] = None
36
36
 
@@ -1,30 +1,55 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from enum import Enum
4
- from typing import Any
4
+ from typing import Any, Optional
5
5
 
6
6
  def get_discriminator(model: Any, fieldname: str, key: str) -> str:
7
- if isinstance(model, dict):
8
- try:
9
- return f'{model.get(key)}'
10
- except AttributeError as e:
11
- raise ValueError(f'Could not find discriminator key {key} in {model}') from e
7
+ """
8
+ Recursively search for the discriminator attribute in a model.
12
9
 
13
- if hasattr(model, fieldname):
14
- attr = getattr(model, fieldname)
10
+ Args:
11
+ model (Any): The model to search within.
12
+ fieldname (str): The name of the field to search for.
13
+ key (str): The key to search for in dictionaries.
15
14
 
16
- if isinstance(attr, Enum):
17
- return f'{attr.value}'
15
+ Returns:
16
+ str: The name of the discriminator attribute.
18
17
 
19
- return f'{attr}'
18
+ Raises:
19
+ ValueError: If the discriminator attribute is not found.
20
+ """
21
+ upper_fieldname = fieldname.upper()
20
22
 
21
- fieldname = fieldname.upper()
22
- if hasattr(model, fieldname):
23
- attr = getattr(model, fieldname)
23
+ def get_field_discriminator(field: Any) -> Optional[str]:
24
+ """Search for the discriminator attribute in a given field."""
24
25
 
25
- if isinstance(attr, Enum):
26
- return f'{attr.value}'
26
+ if isinstance(field, dict):
27
+ if key in field:
28
+ return f'{field[key]}'
27
29
 
28
- return f'{attr}'
30
+ if hasattr(field, fieldname):
31
+ attr = getattr(field, fieldname)
32
+ if isinstance(attr, Enum):
33
+ return f'{attr.value}'
34
+ return f'{attr}'
35
+
36
+ if hasattr(field, upper_fieldname):
37
+ attr = getattr(field, upper_fieldname)
38
+ if isinstance(attr, Enum):
39
+ return f'{attr.value}'
40
+ return f'{attr}'
41
+
42
+ return None
43
+
44
+
45
+ if isinstance(model, list):
46
+ for field in model:
47
+ discriminator = get_field_discriminator(field)
48
+ if discriminator is not None:
49
+ return discriminator
50
+
51
+ discriminator = get_field_discriminator(model)
52
+ if discriminator is not None:
53
+ return discriminator
29
54
 
30
55
  raise ValueError(f'Could not find discriminator field {fieldname} in {model}')
@@ -2,12 +2,72 @@
2
2
 
3
3
  import re
4
4
  import json
5
- from typing import Callable, TypeVar, Optional, Generator, AsyncGenerator, Tuple
5
+ from typing import (
6
+ Callable,
7
+ Generic,
8
+ TypeVar,
9
+ Optional,
10
+ Generator,
11
+ AsyncGenerator,
12
+ Tuple,
13
+ )
6
14
  import httpx
7
15
 
8
16
  T = TypeVar("T")
9
17
 
10
18
 
19
+ class EventStream(Generic[T]):
20
+ response: httpx.Response
21
+ generator: Generator[T, None, None]
22
+
23
+ def __init__(
24
+ self,
25
+ response: httpx.Response,
26
+ decoder: Callable[[str], T],
27
+ sentinel: Optional[str] = None,
28
+ ):
29
+ self.response = response
30
+ self.generator = stream_events(response, decoder, sentinel)
31
+
32
+ def __iter__(self):
33
+ return self
34
+
35
+ def __next__(self):
36
+ return next(self.generator)
37
+
38
+ def __enter__(self):
39
+ return self
40
+
41
+ def __exit__(self, exc_type, exc_val, exc_tb):
42
+ self.response.close()
43
+
44
+
45
+ class EventStreamAsync(Generic[T]):
46
+ response: httpx.Response
47
+ generator: AsyncGenerator[T, None]
48
+
49
+ def __init__(
50
+ self,
51
+ response: httpx.Response,
52
+ decoder: Callable[[str], T],
53
+ sentinel: Optional[str] = None,
54
+ ):
55
+ self.response = response
56
+ self.generator = stream_events_async(response, decoder, sentinel)
57
+
58
+ def __aiter__(self):
59
+ return self
60
+
61
+ async def __anext__(self):
62
+ return await self.generator.__anext__()
63
+
64
+ async def __aenter__(self):
65
+ return self
66
+
67
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
68
+ await self.response.aclose()
69
+
70
+
11
71
  class ServerEvent:
12
72
  id: Optional[str] = None
13
73
  event: Optional[str] = None