mistralai 1.2.3__py3-none-any.whl → 1.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. mistralai/_version.py +1 -1
  2. mistralai/agents.py +5 -5
  3. mistralai/chat.py +5 -5
  4. mistralai/files.py +166 -0
  5. mistralai/fim.py +5 -5
  6. mistralai/httpclient.py +6 -0
  7. mistralai/jobs.py +2 -2
  8. mistralai/models/__init__.py +9 -2
  9. mistralai/models/agentscompletionrequest.py +23 -11
  10. mistralai/models/agentscompletionstreamrequest.py +23 -13
  11. mistralai/models/apiendpoint.py +11 -3
  12. mistralai/models/assistantmessage.py +7 -3
  13. mistralai/models/batchjobin.py +4 -2
  14. mistralai/models/chatclassificationrequest.py +26 -17
  15. mistralai/models/chatcompletionrequest.py +19 -11
  16. mistralai/models/chatcompletionstreamrequest.py +23 -13
  17. mistralai/models/classificationrequest.py +7 -3
  18. mistralai/models/contentchunk.py +5 -4
  19. mistralai/models/deltamessage.py +5 -3
  20. mistralai/models/detailedjobout.py +2 -3
  21. mistralai/models/embeddingrequest.py +3 -3
  22. mistralai/models/files_api_routes_get_signed_urlop.py +25 -0
  23. mistralai/models/filesignedurl.py +13 -0
  24. mistralai/models/fimcompletionrequest.py +7 -3
  25. mistralai/models/fimcompletionstreamrequest.py +7 -3
  26. mistralai/models/functioncall.py +3 -3
  27. mistralai/models/imageurlchunk.py +9 -14
  28. mistralai/models/jobin.py +2 -3
  29. mistralai/models/jobout.py +2 -3
  30. mistralai/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py +9 -4
  31. mistralai/models/modellist.py +4 -2
  32. mistralai/models/referencechunk.py +3 -11
  33. mistralai/models/retrieve_model_v1_models_model_id_getop.py +5 -4
  34. mistralai/models/systemmessage.py +7 -3
  35. mistralai/models/textchunk.py +3 -9
  36. mistralai/models/toolmessage.py +5 -3
  37. mistralai/models/usermessage.py +5 -3
  38. mistralai/models/validationerror.py +3 -3
  39. mistralai/sdk.py +14 -0
  40. mistralai/sdkconfiguration.py +3 -3
  41. mistralai/utils/annotations.py +42 -17
  42. mistralai/utils/eventstreaming.py +61 -1
  43. {mistralai-1.2.3.dist-info → mistralai-1.2.4.dist-info}/METADATA +181 -176
  44. {mistralai-1.2.3.dist-info → mistralai-1.2.4.dist-info}/RECORD +84 -83
  45. mistralai_azure/chat.py +5 -5
  46. mistralai_azure/httpclient.py +6 -0
  47. mistralai_azure/models/assistantmessage.py +7 -3
  48. mistralai_azure/models/chatcompletionrequest.py +23 -11
  49. mistralai_azure/models/chatcompletionstreamrequest.py +19 -13
  50. mistralai_azure/models/contentchunk.py +4 -2
  51. mistralai_azure/models/deltamessage.py +5 -3
  52. mistralai_azure/models/functioncall.py +3 -3
  53. mistralai_azure/models/referencechunk.py +3 -11
  54. mistralai_azure/models/systemmessage.py +7 -3
  55. mistralai_azure/models/textchunk.py +3 -9
  56. mistralai_azure/models/toolmessage.py +5 -3
  57. mistralai_azure/models/usermessage.py +5 -3
  58. mistralai_azure/models/validationerror.py +3 -3
  59. mistralai_azure/sdkconfiguration.py +2 -2
  60. mistralai_azure/utils/annotations.py +42 -17
  61. mistralai_azure/utils/eventstreaming.py +61 -1
  62. mistralai_gcp/chat.py +5 -5
  63. mistralai_gcp/fim.py +5 -5
  64. mistralai_gcp/httpclient.py +6 -0
  65. mistralai_gcp/models/assistantmessage.py +7 -3
  66. mistralai_gcp/models/chatcompletionrequest.py +23 -11
  67. mistralai_gcp/models/chatcompletionstreamrequest.py +19 -13
  68. mistralai_gcp/models/contentchunk.py +4 -2
  69. mistralai_gcp/models/deltamessage.py +5 -3
  70. mistralai_gcp/models/fimcompletionrequest.py +7 -3
  71. mistralai_gcp/models/fimcompletionstreamrequest.py +7 -3
  72. mistralai_gcp/models/functioncall.py +3 -3
  73. mistralai_gcp/models/referencechunk.py +3 -11
  74. mistralai_gcp/models/systemmessage.py +7 -3
  75. mistralai_gcp/models/textchunk.py +3 -9
  76. mistralai_gcp/models/toolmessage.py +5 -3
  77. mistralai_gcp/models/usermessage.py +5 -3
  78. mistralai_gcp/models/validationerror.py +3 -3
  79. mistralai_gcp/sdk.py +5 -4
  80. mistralai_gcp/sdkconfiguration.py +2 -2
  81. mistralai_gcp/utils/annotations.py +42 -17
  82. mistralai_gcp/utils/eventstreaming.py +61 -1
  83. mistralai/models/finetuneablemodel.py +0 -14
  84. {mistralai-1.2.3.dist-info → mistralai-1.2.4.dist-info}/LICENSE +0 -0
  85. {mistralai-1.2.3.dist-info → mistralai-1.2.4.dist-info}/WHEEL +0 -0
@@ -4,13 +4,17 @@ from __future__ import annotations
4
4
  from .textchunk import TextChunk, TextChunkTypedDict
5
5
  from mistralai_azure.types import BaseModel
6
6
  from typing import List, Literal, Optional, Union
7
- from typing_extensions import NotRequired, TypedDict
7
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
8
8
 
9
9
 
10
- SystemMessageContentTypedDict = Union[str, List[TextChunkTypedDict]]
10
+ SystemMessageContentTypedDict = TypeAliasType(
11
+ "SystemMessageContentTypedDict", Union[str, List[TextChunkTypedDict]]
12
+ )
11
13
 
12
14
 
13
- SystemMessageContent = Union[str, List[TextChunk]]
15
+ SystemMessageContent = TypeAliasType(
16
+ "SystemMessageContent", Union[str, List[TextChunk]]
17
+ )
14
18
 
15
19
 
16
20
  Role = Literal["system"]
@@ -2,11 +2,8 @@
2
2
 
3
3
  from __future__ import annotations
4
4
  from mistralai_azure.types import BaseModel
5
- from mistralai_azure.utils import validate_const
6
- import pydantic
7
- from pydantic.functional_validators import AfterValidator
8
5
  from typing import Literal, Optional
9
- from typing_extensions import Annotated, TypedDict
6
+ from typing_extensions import NotRequired, TypedDict
10
7
 
11
8
 
12
9
  Type = Literal["text"]
@@ -14,13 +11,10 @@ Type = Literal["text"]
14
11
 
15
12
  class TextChunkTypedDict(TypedDict):
16
13
  text: str
17
- type: Type
14
+ type: NotRequired[Type]
18
15
 
19
16
 
20
17
  class TextChunk(BaseModel):
21
18
  text: str
22
19
 
23
- TYPE: Annotated[
24
- Annotated[Optional[Type], AfterValidator(validate_const("text"))],
25
- pydantic.Field(alias="type"),
26
- ] = "text"
20
+ type: Optional[Type] = "text"
@@ -11,13 +11,15 @@ from mistralai_azure.types import (
11
11
  )
12
12
  from pydantic import model_serializer
13
13
  from typing import List, Literal, Optional, Union
14
- from typing_extensions import NotRequired, TypedDict
14
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
15
15
 
16
16
 
17
- ToolMessageContentTypedDict = Union[str, List[ContentChunkTypedDict]]
17
+ ToolMessageContentTypedDict = TypeAliasType(
18
+ "ToolMessageContentTypedDict", Union[str, List[ContentChunkTypedDict]]
19
+ )
18
20
 
19
21
 
20
- ToolMessageContent = Union[str, List[ContentChunk]]
22
+ ToolMessageContent = TypeAliasType("ToolMessageContent", Union[str, List[ContentChunk]])
21
23
 
22
24
 
23
25
  ToolMessageRole = Literal["tool"]
@@ -5,13 +5,15 @@ from .contentchunk import ContentChunk, ContentChunkTypedDict
5
5
  from mistralai_azure.types import BaseModel, Nullable, UNSET_SENTINEL
6
6
  from pydantic import model_serializer
7
7
  from typing import List, Literal, Optional, Union
8
- from typing_extensions import NotRequired, TypedDict
8
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
9
9
 
10
10
 
11
- UserMessageContentTypedDict = Union[str, List[ContentChunkTypedDict]]
11
+ UserMessageContentTypedDict = TypeAliasType(
12
+ "UserMessageContentTypedDict", Union[str, List[ContentChunkTypedDict]]
13
+ )
12
14
 
13
15
 
14
- UserMessageContent = Union[str, List[ContentChunk]]
16
+ UserMessageContent = TypeAliasType("UserMessageContent", Union[str, List[ContentChunk]])
15
17
 
16
18
 
17
19
  UserMessageRole = Literal["user"]
@@ -3,13 +3,13 @@
3
3
  from __future__ import annotations
4
4
  from mistralai_azure.types import BaseModel
5
5
  from typing import List, Union
6
- from typing_extensions import TypedDict
6
+ from typing_extensions import TypeAliasType, TypedDict
7
7
 
8
8
 
9
- LocTypedDict = Union[str, int]
9
+ LocTypedDict = TypeAliasType("LocTypedDict", Union[str, int])
10
10
 
11
11
 
12
- Loc = Union[str, int]
12
+ Loc = TypeAliasType("Loc", Union[str, int])
13
13
 
14
14
 
15
15
  class ValidationErrorTypedDict(TypedDict):
@@ -29,8 +29,8 @@ class SDKConfiguration:
29
29
  language: str = "python"
30
30
  openapi_doc_version: str = "0.0.2"
31
31
  sdk_version: str = "1.2.3"
32
- gen_version: str = "2.460.1"
33
- user_agent: str = "speakeasy-sdk/python 1.2.3 2.460.1 0.0.2 mistralai_azure"
32
+ gen_version: str = "2.470.1"
33
+ user_agent: str = "speakeasy-sdk/python 1.2.3 2.470.1 0.0.2 mistralai_azure"
34
34
  retry_config: OptionalNullable[RetryConfig] = Field(default_factory=lambda: UNSET)
35
35
  timeout_ms: Optional[int] = None
36
36
 
@@ -1,30 +1,55 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from enum import Enum
4
- from typing import Any
4
+ from typing import Any, Optional
5
5
 
6
6
  def get_discriminator(model: Any, fieldname: str, key: str) -> str:
7
- if isinstance(model, dict):
8
- try:
9
- return f'{model.get(key)}'
10
- except AttributeError as e:
11
- raise ValueError(f'Could not find discriminator key {key} in {model}') from e
7
+ """
8
+ Recursively search for the discriminator attribute in a model.
12
9
 
13
- if hasattr(model, fieldname):
14
- attr = getattr(model, fieldname)
10
+ Args:
11
+ model (Any): The model to search within.
12
+ fieldname (str): The name of the field to search for.
13
+ key (str): The key to search for in dictionaries.
15
14
 
16
- if isinstance(attr, Enum):
17
- return f'{attr.value}'
15
+ Returns:
16
+ str: The name of the discriminator attribute.
18
17
 
19
- return f'{attr}'
18
+ Raises:
19
+ ValueError: If the discriminator attribute is not found.
20
+ """
21
+ upper_fieldname = fieldname.upper()
20
22
 
21
- fieldname = fieldname.upper()
22
- if hasattr(model, fieldname):
23
- attr = getattr(model, fieldname)
23
+ def get_field_discriminator(field: Any) -> Optional[str]:
24
+ """Search for the discriminator attribute in a given field."""
24
25
 
25
- if isinstance(attr, Enum):
26
- return f'{attr.value}'
26
+ if isinstance(field, dict):
27
+ if key in field:
28
+ return f'{field[key]}'
27
29
 
28
- return f'{attr}'
30
+ if hasattr(field, fieldname):
31
+ attr = getattr(field, fieldname)
32
+ if isinstance(attr, Enum):
33
+ return f'{attr.value}'
34
+ return f'{attr}'
35
+
36
+ if hasattr(field, upper_fieldname):
37
+ attr = getattr(field, upper_fieldname)
38
+ if isinstance(attr, Enum):
39
+ return f'{attr.value}'
40
+ return f'{attr}'
41
+
42
+ return None
43
+
44
+
45
+ if isinstance(model, list):
46
+ for field in model:
47
+ discriminator = get_field_discriminator(field)
48
+ if discriminator is not None:
49
+ return discriminator
50
+
51
+ discriminator = get_field_discriminator(model)
52
+ if discriminator is not None:
53
+ return discriminator
29
54
 
30
55
  raise ValueError(f'Could not find discriminator field {fieldname} in {model}')
@@ -2,12 +2,72 @@
2
2
 
3
3
  import re
4
4
  import json
5
- from typing import Callable, TypeVar, Optional, Generator, AsyncGenerator, Tuple
5
+ from typing import (
6
+ Callable,
7
+ Generic,
8
+ TypeVar,
9
+ Optional,
10
+ Generator,
11
+ AsyncGenerator,
12
+ Tuple,
13
+ )
6
14
  import httpx
7
15
 
8
16
  T = TypeVar("T")
9
17
 
10
18
 
19
+ class EventStream(Generic[T]):
20
+ response: httpx.Response
21
+ generator: Generator[T, None, None]
22
+
23
+ def __init__(
24
+ self,
25
+ response: httpx.Response,
26
+ decoder: Callable[[str], T],
27
+ sentinel: Optional[str] = None,
28
+ ):
29
+ self.response = response
30
+ self.generator = stream_events(response, decoder, sentinel)
31
+
32
+ def __iter__(self):
33
+ return self
34
+
35
+ def __next__(self):
36
+ return next(self.generator)
37
+
38
+ def __enter__(self):
39
+ return self
40
+
41
+ def __exit__(self, exc_type, exc_val, exc_tb):
42
+ self.response.close()
43
+
44
+
45
+ class EventStreamAsync(Generic[T]):
46
+ response: httpx.Response
47
+ generator: AsyncGenerator[T, None]
48
+
49
+ def __init__(
50
+ self,
51
+ response: httpx.Response,
52
+ decoder: Callable[[str], T],
53
+ sentinel: Optional[str] = None,
54
+ ):
55
+ self.response = response
56
+ self.generator = stream_events_async(response, decoder, sentinel)
57
+
58
+ def __aiter__(self):
59
+ return self
60
+
61
+ async def __anext__(self):
62
+ return await self.generator.__anext__()
63
+
64
+ async def __aenter__(self):
65
+ return self
66
+
67
+ async def __aexit__(self, exc_type, exc_val, exc_tb):
68
+ await self.response.aclose()
69
+
70
+
11
71
  class ServerEvent:
12
72
  id: Optional[str] = None
13
73
  event: Optional[str] = None
mistralai_gcp/chat.py CHANGED
@@ -5,7 +5,7 @@ from mistralai_gcp import models, utils
5
5
  from mistralai_gcp._hooks import HookContext
6
6
  from mistralai_gcp.types import Nullable, OptionalNullable, UNSET
7
7
  from mistralai_gcp.utils import eventstreaming
8
- from typing import Any, AsyncGenerator, Generator, List, Optional, Union
8
+ from typing import Any, List, Optional, Union
9
9
 
10
10
 
11
11
  class Chat(BaseSDK):
@@ -40,7 +40,7 @@ class Chat(BaseSDK):
40
40
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
41
41
  server_url: Optional[str] = None,
42
42
  timeout_ms: Optional[int] = None,
43
- ) -> Optional[Generator[models.CompletionEvent, None, None]]:
43
+ ) -> Optional[eventstreaming.EventStream[models.CompletionEvent]]:
44
44
  r"""Stream chat completion
45
45
 
46
46
  Mistral AI provides the ability to stream responses back to a client in order to allow partial results for certain requests. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON.
@@ -132,7 +132,7 @@ class Chat(BaseSDK):
132
132
 
133
133
  data: Any = None
134
134
  if utils.match_response(http_res, "200", "text/event-stream"):
135
- return eventstreaming.stream_events(
135
+ return eventstreaming.EventStream(
136
136
  http_res,
137
137
  lambda raw: utils.unmarshal_json(raw, models.CompletionEvent),
138
138
  sentinel="[DONE]",
@@ -185,7 +185,7 @@ class Chat(BaseSDK):
185
185
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
186
186
  server_url: Optional[str] = None,
187
187
  timeout_ms: Optional[int] = None,
188
- ) -> Optional[AsyncGenerator[models.CompletionEvent, None]]:
188
+ ) -> Optional[eventstreaming.EventStreamAsync[models.CompletionEvent]]:
189
189
  r"""Stream chat completion
190
190
 
191
191
  Mistral AI provides the ability to stream responses back to a client in order to allow partial results for certain requests. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON.
@@ -277,7 +277,7 @@ class Chat(BaseSDK):
277
277
 
278
278
  data: Any = None
279
279
  if utils.match_response(http_res, "200", "text/event-stream"):
280
- return eventstreaming.stream_events_async(
280
+ return eventstreaming.EventStreamAsync(
281
281
  http_res,
282
282
  lambda raw: utils.unmarshal_json(raw, models.CompletionEvent),
283
283
  sentinel="[DONE]",
mistralai_gcp/fim.py CHANGED
@@ -5,7 +5,7 @@ from mistralai_gcp import models, utils
5
5
  from mistralai_gcp._hooks import HookContext
6
6
  from mistralai_gcp.types import Nullable, OptionalNullable, UNSET
7
7
  from mistralai_gcp.utils import eventstreaming
8
- from typing import Any, AsyncGenerator, Generator, Optional, Union
8
+ from typing import Any, Optional, Union
9
9
 
10
10
 
11
11
  class Fim(BaseSDK):
@@ -32,7 +32,7 @@ class Fim(BaseSDK):
32
32
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
33
33
  server_url: Optional[str] = None,
34
34
  timeout_ms: Optional[int] = None,
35
- ) -> Optional[Generator[models.CompletionEvent, None, None]]:
35
+ ) -> Optional[eventstreaming.EventStream[models.CompletionEvent]]:
36
36
  r"""Stream fim completion
37
37
 
38
38
  Mistral AI provides the ability to stream responses back to a client in order to allow partial results for certain requests. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON.
@@ -112,7 +112,7 @@ class Fim(BaseSDK):
112
112
 
113
113
  data: Any = None
114
114
  if utils.match_response(http_res, "200", "text/event-stream"):
115
- return eventstreaming.stream_events(
115
+ return eventstreaming.EventStream(
116
116
  http_res,
117
117
  lambda raw: utils.unmarshal_json(raw, models.CompletionEvent),
118
118
  sentinel="[DONE]",
@@ -157,7 +157,7 @@ class Fim(BaseSDK):
157
157
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
158
158
  server_url: Optional[str] = None,
159
159
  timeout_ms: Optional[int] = None,
160
- ) -> Optional[AsyncGenerator[models.CompletionEvent, None]]:
160
+ ) -> Optional[eventstreaming.EventStreamAsync[models.CompletionEvent]]:
161
161
  r"""Stream fim completion
162
162
 
163
163
  Mistral AI provides the ability to stream responses back to a client in order to allow partial results for certain requests. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON.
@@ -237,7 +237,7 @@ class Fim(BaseSDK):
237
237
 
238
238
  data: Any = None
239
239
  if utils.match_response(http_res, "200", "text/event-stream"):
240
- return eventstreaming.stream_events_async(
240
+ return eventstreaming.EventStreamAsync(
241
241
  http_res,
242
242
  lambda raw: utils.unmarshal_json(raw, models.CompletionEvent),
243
243
  sentinel="[DONE]",
@@ -41,6 +41,9 @@ class HttpClient(Protocol):
41
41
  ) -> httpx.Request:
42
42
  pass
43
43
 
44
+ def close(self) -> None:
45
+ pass
46
+
44
47
 
45
48
  @runtime_checkable
46
49
  class AsyncHttpClient(Protocol):
@@ -76,3 +79,6 @@ class AsyncHttpClient(Protocol):
76
79
  extensions: Optional[httpx._types.RequestExtensions] = None,
77
80
  ) -> httpx.Request:
78
81
  pass
82
+
83
+ async def aclose(self) -> None:
84
+ pass
@@ -12,13 +12,17 @@ from mistralai_gcp.types import (
12
12
  )
13
13
  from pydantic import model_serializer
14
14
  from typing import List, Literal, Optional, Union
15
- from typing_extensions import NotRequired, TypedDict
15
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
16
16
 
17
17
 
18
- AssistantMessageContentTypedDict = Union[str, List[ContentChunkTypedDict]]
18
+ AssistantMessageContentTypedDict = TypeAliasType(
19
+ "AssistantMessageContentTypedDict", Union[str, List[ContentChunkTypedDict]]
20
+ )
19
21
 
20
22
 
21
- AssistantMessageContent = Union[str, List[ContentChunk]]
23
+ AssistantMessageContent = TypeAliasType(
24
+ "AssistantMessageContent", Union[str, List[ContentChunk]]
25
+ )
22
26
 
23
27
 
24
28
  AssistantMessageRole = Literal["assistant"]
@@ -19,23 +19,30 @@ from mistralai_gcp.types import (
19
19
  from mistralai_gcp.utils import get_discriminator
20
20
  from pydantic import Discriminator, Tag, model_serializer
21
21
  from typing import List, Optional, Union
22
- from typing_extensions import Annotated, NotRequired, TypedDict
22
+ from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
23
23
 
24
24
 
25
- ChatCompletionRequestStopTypedDict = Union[str, List[str]]
25
+ ChatCompletionRequestStopTypedDict = TypeAliasType(
26
+ "ChatCompletionRequestStopTypedDict", Union[str, List[str]]
27
+ )
26
28
  r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
27
29
 
28
30
 
29
- ChatCompletionRequestStop = Union[str, List[str]]
31
+ ChatCompletionRequestStop = TypeAliasType(
32
+ "ChatCompletionRequestStop", Union[str, List[str]]
33
+ )
30
34
  r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
31
35
 
32
36
 
33
- ChatCompletionRequestMessagesTypedDict = Union[
34
- SystemMessageTypedDict,
35
- UserMessageTypedDict,
36
- AssistantMessageTypedDict,
37
- ToolMessageTypedDict,
38
- ]
37
+ ChatCompletionRequestMessagesTypedDict = TypeAliasType(
38
+ "ChatCompletionRequestMessagesTypedDict",
39
+ Union[
40
+ SystemMessageTypedDict,
41
+ UserMessageTypedDict,
42
+ AssistantMessageTypedDict,
43
+ ToolMessageTypedDict,
44
+ ],
45
+ )
39
46
 
40
47
 
41
48
  ChatCompletionRequestMessages = Annotated[
@@ -49,10 +56,15 @@ ChatCompletionRequestMessages = Annotated[
49
56
  ]
50
57
 
51
58
 
52
- ChatCompletionRequestToolChoiceTypedDict = Union[ToolChoiceTypedDict, ToolChoiceEnum]
59
+ ChatCompletionRequestToolChoiceTypedDict = TypeAliasType(
60
+ "ChatCompletionRequestToolChoiceTypedDict",
61
+ Union[ToolChoiceTypedDict, ToolChoiceEnum],
62
+ )
53
63
 
54
64
 
55
- ChatCompletionRequestToolChoice = Union[ToolChoice, ToolChoiceEnum]
65
+ ChatCompletionRequestToolChoice = TypeAliasType(
66
+ "ChatCompletionRequestToolChoice", Union[ToolChoice, ToolChoiceEnum]
67
+ )
56
68
 
57
69
 
58
70
  class ChatCompletionRequestTypedDict(TypedDict):
@@ -19,23 +19,26 @@ from mistralai_gcp.types import (
19
19
  from mistralai_gcp.utils import get_discriminator
20
20
  from pydantic import Discriminator, Tag, model_serializer
21
21
  from typing import List, Optional, Union
22
- from typing_extensions import Annotated, NotRequired, TypedDict
22
+ from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
23
23
 
24
24
 
25
- StopTypedDict = Union[str, List[str]]
25
+ StopTypedDict = TypeAliasType("StopTypedDict", Union[str, List[str]])
26
26
  r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
27
27
 
28
28
 
29
- Stop = Union[str, List[str]]
29
+ Stop = TypeAliasType("Stop", Union[str, List[str]])
30
30
  r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
31
31
 
32
32
 
33
- MessagesTypedDict = Union[
34
- SystemMessageTypedDict,
35
- UserMessageTypedDict,
36
- AssistantMessageTypedDict,
37
- ToolMessageTypedDict,
38
- ]
33
+ MessagesTypedDict = TypeAliasType(
34
+ "MessagesTypedDict",
35
+ Union[
36
+ SystemMessageTypedDict,
37
+ UserMessageTypedDict,
38
+ AssistantMessageTypedDict,
39
+ ToolMessageTypedDict,
40
+ ],
41
+ )
39
42
 
40
43
 
41
44
  Messages = Annotated[
@@ -49,12 +52,15 @@ Messages = Annotated[
49
52
  ]
50
53
 
51
54
 
52
- ChatCompletionStreamRequestToolChoiceTypedDict = Union[
53
- ToolChoiceTypedDict, ToolChoiceEnum
54
- ]
55
+ ChatCompletionStreamRequestToolChoiceTypedDict = TypeAliasType(
56
+ "ChatCompletionStreamRequestToolChoiceTypedDict",
57
+ Union[ToolChoiceTypedDict, ToolChoiceEnum],
58
+ )
55
59
 
56
60
 
57
- ChatCompletionStreamRequestToolChoice = Union[ToolChoice, ToolChoiceEnum]
61
+ ChatCompletionStreamRequestToolChoice = TypeAliasType(
62
+ "ChatCompletionStreamRequestToolChoice", Union[ToolChoice, ToolChoiceEnum]
63
+ )
58
64
 
59
65
 
60
66
  class ChatCompletionStreamRequestTypedDict(TypedDict):
@@ -6,10 +6,12 @@ from .textchunk import TextChunk, TextChunkTypedDict
6
6
  from mistralai_gcp.utils import get_discriminator
7
7
  from pydantic import Discriminator, Tag
8
8
  from typing import Union
9
- from typing_extensions import Annotated
9
+ from typing_extensions import Annotated, TypeAliasType
10
10
 
11
11
 
12
- ContentChunkTypedDict = Union[TextChunkTypedDict, ReferenceChunkTypedDict]
12
+ ContentChunkTypedDict = TypeAliasType(
13
+ "ContentChunkTypedDict", Union[TextChunkTypedDict, ReferenceChunkTypedDict]
14
+ )
13
15
 
14
16
 
15
17
  ContentChunk = Annotated[
@@ -12,13 +12,15 @@ from mistralai_gcp.types import (
12
12
  )
13
13
  from pydantic import model_serializer
14
14
  from typing import List, Union
15
- from typing_extensions import NotRequired, TypedDict
15
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
16
16
 
17
17
 
18
- ContentTypedDict = Union[str, List[ContentChunkTypedDict]]
18
+ ContentTypedDict = TypeAliasType(
19
+ "ContentTypedDict", Union[str, List[ContentChunkTypedDict]]
20
+ )
19
21
 
20
22
 
21
- Content = Union[str, List[ContentChunk]]
23
+ Content = TypeAliasType("Content", Union[str, List[ContentChunk]])
22
24
 
23
25
 
24
26
  class DeltaMessageTypedDict(TypedDict):
@@ -10,14 +10,18 @@ from mistralai_gcp.types import (
10
10
  )
11
11
  from pydantic import model_serializer
12
12
  from typing import List, Optional, Union
13
- from typing_extensions import NotRequired, TypedDict
13
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
14
14
 
15
15
 
16
- FIMCompletionRequestStopTypedDict = Union[str, List[str]]
16
+ FIMCompletionRequestStopTypedDict = TypeAliasType(
17
+ "FIMCompletionRequestStopTypedDict", Union[str, List[str]]
18
+ )
17
19
  r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
18
20
 
19
21
 
20
- FIMCompletionRequestStop = Union[str, List[str]]
22
+ FIMCompletionRequestStop = TypeAliasType(
23
+ "FIMCompletionRequestStop", Union[str, List[str]]
24
+ )
21
25
  r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
22
26
 
23
27
 
@@ -10,14 +10,18 @@ from mistralai_gcp.types import (
10
10
  )
11
11
  from pydantic import model_serializer
12
12
  from typing import List, Optional, Union
13
- from typing_extensions import NotRequired, TypedDict
13
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
14
14
 
15
15
 
16
- FIMCompletionStreamRequestStopTypedDict = Union[str, List[str]]
16
+ FIMCompletionStreamRequestStopTypedDict = TypeAliasType(
17
+ "FIMCompletionStreamRequestStopTypedDict", Union[str, List[str]]
18
+ )
17
19
  r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
18
20
 
19
21
 
20
- FIMCompletionStreamRequestStop = Union[str, List[str]]
22
+ FIMCompletionStreamRequestStop = TypeAliasType(
23
+ "FIMCompletionStreamRequestStop", Union[str, List[str]]
24
+ )
21
25
  r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array"""
22
26
 
23
27
 
@@ -3,13 +3,13 @@
3
3
  from __future__ import annotations
4
4
  from mistralai_gcp.types import BaseModel
5
5
  from typing import Any, Dict, Union
6
- from typing_extensions import TypedDict
6
+ from typing_extensions import TypeAliasType, TypedDict
7
7
 
8
8
 
9
- ArgumentsTypedDict = Union[Dict[str, Any], str]
9
+ ArgumentsTypedDict = TypeAliasType("ArgumentsTypedDict", Union[Dict[str, Any], str])
10
10
 
11
11
 
12
- Arguments = Union[Dict[str, Any], str]
12
+ Arguments = TypeAliasType("Arguments", Union[Dict[str, Any], str])
13
13
 
14
14
 
15
15
  class FunctionCallTypedDict(TypedDict):
@@ -2,11 +2,8 @@
2
2
 
3
3
  from __future__ import annotations
4
4
  from mistralai_gcp.types import BaseModel
5
- from mistralai_gcp.utils import validate_const
6
- import pydantic
7
- from pydantic.functional_validators import AfterValidator
8
5
  from typing import List, Literal, Optional
9
- from typing_extensions import Annotated, TypedDict
6
+ from typing_extensions import NotRequired, TypedDict
10
7
 
11
8
 
12
9
  ReferenceChunkType = Literal["reference"]
@@ -14,15 +11,10 @@ ReferenceChunkType = Literal["reference"]
14
11
 
15
12
  class ReferenceChunkTypedDict(TypedDict):
16
13
  reference_ids: List[int]
17
- type: ReferenceChunkType
14
+ type: NotRequired[ReferenceChunkType]
18
15
 
19
16
 
20
17
  class ReferenceChunk(BaseModel):
21
18
  reference_ids: List[int]
22
19
 
23
- TYPE: Annotated[
24
- Annotated[
25
- Optional[ReferenceChunkType], AfterValidator(validate_const("reference"))
26
- ],
27
- pydantic.Field(alias="type"),
28
- ] = "reference"
20
+ type: Optional[ReferenceChunkType] = "reference"
@@ -4,13 +4,17 @@ from __future__ import annotations
4
4
  from .textchunk import TextChunk, TextChunkTypedDict
5
5
  from mistralai_gcp.types import BaseModel
6
6
  from typing import List, Literal, Optional, Union
7
- from typing_extensions import NotRequired, TypedDict
7
+ from typing_extensions import NotRequired, TypeAliasType, TypedDict
8
8
 
9
9
 
10
- SystemMessageContentTypedDict = Union[str, List[TextChunkTypedDict]]
10
+ SystemMessageContentTypedDict = TypeAliasType(
11
+ "SystemMessageContentTypedDict", Union[str, List[TextChunkTypedDict]]
12
+ )
11
13
 
12
14
 
13
- SystemMessageContent = Union[str, List[TextChunk]]
15
+ SystemMessageContent = TypeAliasType(
16
+ "SystemMessageContent", Union[str, List[TextChunk]]
17
+ )
14
18
 
15
19
 
16
20
  Role = Literal["system"]