mistralai 1.5.1__py3-none-any.whl → 1.5.2rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mistralai/_hooks/types.py +15 -3
- mistralai/_version.py +3 -3
- mistralai/agents.py +32 -12
- mistralai/basesdk.py +8 -0
- mistralai/chat.py +32 -12
- mistralai/classifiers.py +32 -12
- mistralai/embeddings.py +20 -10
- mistralai/extra/utils/response_format.py +3 -3
- mistralai/files.py +36 -0
- mistralai/fim.py +32 -12
- mistralai/httpclient.py +4 -2
- mistralai/jobs.py +30 -0
- mistralai/mistral_jobs.py +24 -0
- mistralai/models/__init__.py +6 -1
- mistralai/models/documenturlchunk.py +8 -14
- mistralai/models/embeddingrequest.py +7 -7
- mistralai/models/filepurpose.py +1 -1
- mistralai/models_.py +66 -18
- mistralai/ocr.py +16 -6
- mistralai/sdk.py +19 -3
- mistralai/sdkconfiguration.py +4 -2
- mistralai/utils/__init__.py +2 -0
- mistralai/utils/serializers.py +10 -6
- mistralai/utils/values.py +4 -1
- {mistralai-1.5.1.dist-info → mistralai-1.5.2rc1.dist-info}/METADATA +66 -19
- {mistralai-1.5.1.dist-info → mistralai-1.5.2rc1.dist-info}/RECORD +73 -69
- mistralai_azure/__init__.py +10 -1
- mistralai_azure/_hooks/types.py +15 -3
- mistralai_azure/_version.py +3 -0
- mistralai_azure/basesdk.py +8 -0
- mistralai_azure/chat.py +88 -20
- mistralai_azure/httpclient.py +52 -0
- mistralai_azure/models/__init__.py +7 -0
- mistralai_azure/models/assistantmessage.py +2 -0
- mistralai_azure/models/chatcompletionrequest.py +8 -10
- mistralai_azure/models/chatcompletionstreamrequest.py +8 -10
- mistralai_azure/models/function.py +3 -0
- mistralai_azure/models/jsonschema.py +61 -0
- mistralai_azure/models/prediction.py +25 -0
- mistralai_azure/models/responseformat.py +42 -1
- mistralai_azure/models/responseformats.py +1 -1
- mistralai_azure/models/toolcall.py +3 -0
- mistralai_azure/sdk.py +56 -14
- mistralai_azure/sdkconfiguration.py +14 -6
- mistralai_azure/utils/__init__.py +2 -0
- mistralai_azure/utils/serializers.py +10 -6
- mistralai_azure/utils/values.py +4 -1
- mistralai_gcp/__init__.py +10 -1
- mistralai_gcp/_hooks/types.py +15 -3
- mistralai_gcp/_version.py +3 -0
- mistralai_gcp/basesdk.py +8 -0
- mistralai_gcp/chat.py +89 -21
- mistralai_gcp/fim.py +61 -21
- mistralai_gcp/httpclient.py +52 -0
- mistralai_gcp/models/__init__.py +7 -0
- mistralai_gcp/models/assistantmessage.py +2 -0
- mistralai_gcp/models/chatcompletionrequest.py +8 -10
- mistralai_gcp/models/chatcompletionstreamrequest.py +8 -10
- mistralai_gcp/models/fimcompletionrequest.py +2 -3
- mistralai_gcp/models/fimcompletionstreamrequest.py +2 -3
- mistralai_gcp/models/function.py +3 -0
- mistralai_gcp/models/jsonschema.py +61 -0
- mistralai_gcp/models/prediction.py +25 -0
- mistralai_gcp/models/responseformat.py +42 -1
- mistralai_gcp/models/responseformats.py +1 -1
- mistralai_gcp/models/toolcall.py +3 -0
- mistralai_gcp/sdk.py +63 -19
- mistralai_gcp/sdkconfiguration.py +14 -6
- mistralai_gcp/utils/__init__.py +2 -0
- mistralai_gcp/utils/serializers.py +10 -6
- mistralai_gcp/utils/values.py +4 -1
- {mistralai-1.5.1.dist-info → mistralai-1.5.2rc1.dist-info}/LICENSE +0 -0
- {mistralai-1.5.1.dist-info → mistralai-1.5.2rc1.dist-info}/WHEEL +0 -0
mistralai/_hooks/types.py
CHANGED
|
@@ -7,16 +7,19 @@ from typing import Any, Callable, List, Optional, Tuple, Union
|
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
class HookContext:
|
|
10
|
+
base_url: str
|
|
10
11
|
operation_id: str
|
|
11
12
|
oauth2_scopes: Optional[List[str]] = None
|
|
12
13
|
security_source: Optional[Union[Any, Callable[[], Any]]] = None
|
|
13
14
|
|
|
14
15
|
def __init__(
|
|
15
16
|
self,
|
|
17
|
+
base_url: str,
|
|
16
18
|
operation_id: str,
|
|
17
19
|
oauth2_scopes: Optional[List[str]],
|
|
18
20
|
security_source: Optional[Union[Any, Callable[[], Any]]],
|
|
19
21
|
):
|
|
22
|
+
self.base_url = base_url
|
|
20
23
|
self.operation_id = operation_id
|
|
21
24
|
self.oauth2_scopes = oauth2_scopes
|
|
22
25
|
self.security_source = security_source
|
|
@@ -25,21 +28,30 @@ class HookContext:
|
|
|
25
28
|
class BeforeRequestContext(HookContext):
|
|
26
29
|
def __init__(self, hook_ctx: HookContext):
|
|
27
30
|
super().__init__(
|
|
28
|
-
hook_ctx.
|
|
31
|
+
hook_ctx.base_url,
|
|
32
|
+
hook_ctx.operation_id,
|
|
33
|
+
hook_ctx.oauth2_scopes,
|
|
34
|
+
hook_ctx.security_source,
|
|
29
35
|
)
|
|
30
36
|
|
|
31
37
|
|
|
32
38
|
class AfterSuccessContext(HookContext):
|
|
33
39
|
def __init__(self, hook_ctx: HookContext):
|
|
34
40
|
super().__init__(
|
|
35
|
-
hook_ctx.
|
|
41
|
+
hook_ctx.base_url,
|
|
42
|
+
hook_ctx.operation_id,
|
|
43
|
+
hook_ctx.oauth2_scopes,
|
|
44
|
+
hook_ctx.security_source,
|
|
36
45
|
)
|
|
37
46
|
|
|
38
47
|
|
|
39
48
|
class AfterErrorContext(HookContext):
|
|
40
49
|
def __init__(self, hook_ctx: HookContext):
|
|
41
50
|
super().__init__(
|
|
42
|
-
hook_ctx.
|
|
51
|
+
hook_ctx.base_url,
|
|
52
|
+
hook_ctx.operation_id,
|
|
53
|
+
hook_ctx.oauth2_scopes,
|
|
54
|
+
hook_ctx.security_source,
|
|
43
55
|
)
|
|
44
56
|
|
|
45
57
|
|
mistralai/_version.py
CHANGED
|
@@ -3,10 +3,10 @@
|
|
|
3
3
|
import importlib.metadata
|
|
4
4
|
|
|
5
5
|
__title__: str = "mistralai"
|
|
6
|
-
__version__: str = "1.5.1"
|
|
6
|
+
__version__: str = "1.5.2-rc.1"
|
|
7
7
|
__openapi_doc_version__: str = "0.0.2"
|
|
8
|
-
__gen_version__: str = "2.
|
|
9
|
-
__user_agent__: str = "speakeasy-sdk/python 1.5.1 2.
|
|
8
|
+
__gen_version__: str = "2.548.6"
|
|
9
|
+
__user_agent__: str = "speakeasy-sdk/python 1.5.2-rc.1 2.548.6 0.0.2 mistralai"
|
|
10
10
|
|
|
11
11
|
try:
|
|
12
12
|
if __package__ is not None:
|
mistralai/agents.py
CHANGED
|
@@ -78,6 +78,8 @@ class Agents(BaseSDK):
|
|
|
78
78
|
|
|
79
79
|
if server_url is not None:
|
|
80
80
|
base_url = server_url
|
|
81
|
+
else:
|
|
82
|
+
base_url = self._get_url(base_url, url_variables)
|
|
81
83
|
|
|
82
84
|
request = models.AgentsCompletionRequest(
|
|
83
85
|
max_tokens=max_tokens,
|
|
@@ -132,6 +134,7 @@ class Agents(BaseSDK):
|
|
|
132
134
|
|
|
133
135
|
http_res = self.do_request(
|
|
134
136
|
hook_ctx=HookContext(
|
|
137
|
+
base_url=base_url or "",
|
|
135
138
|
operation_id="agents_completion_v1_agents_completions_post",
|
|
136
139
|
oauth2_scopes=[],
|
|
137
140
|
security_source=get_security_from_env(
|
|
@@ -143,12 +146,14 @@ class Agents(BaseSDK):
|
|
|
143
146
|
retry_config=retry_config,
|
|
144
147
|
)
|
|
145
148
|
|
|
146
|
-
|
|
149
|
+
response_data: Any = None
|
|
147
150
|
if utils.match_response(http_res, "200", "application/json"):
|
|
148
151
|
return utils.unmarshal_json(http_res.text, models.ChatCompletionResponse)
|
|
149
152
|
if utils.match_response(http_res, "422", "application/json"):
|
|
150
|
-
|
|
151
|
-
|
|
153
|
+
response_data = utils.unmarshal_json(
|
|
154
|
+
http_res.text, models.HTTPValidationErrorData
|
|
155
|
+
)
|
|
156
|
+
raise models.HTTPValidationError(data=response_data)
|
|
152
157
|
if utils.match_response(http_res, "4XX", "*"):
|
|
153
158
|
http_res_text = utils.stream_to_text(http_res)
|
|
154
159
|
raise models.SDKError(
|
|
@@ -236,6 +241,8 @@ class Agents(BaseSDK):
|
|
|
236
241
|
|
|
237
242
|
if server_url is not None:
|
|
238
243
|
base_url = server_url
|
|
244
|
+
else:
|
|
245
|
+
base_url = self._get_url(base_url, url_variables)
|
|
239
246
|
|
|
240
247
|
request = models.AgentsCompletionRequest(
|
|
241
248
|
max_tokens=max_tokens,
|
|
@@ -290,6 +297,7 @@ class Agents(BaseSDK):
|
|
|
290
297
|
|
|
291
298
|
http_res = await self.do_request_async(
|
|
292
299
|
hook_ctx=HookContext(
|
|
300
|
+
base_url=base_url or "",
|
|
293
301
|
operation_id="agents_completion_v1_agents_completions_post",
|
|
294
302
|
oauth2_scopes=[],
|
|
295
303
|
security_source=get_security_from_env(
|
|
@@ -301,12 +309,14 @@ class Agents(BaseSDK):
|
|
|
301
309
|
retry_config=retry_config,
|
|
302
310
|
)
|
|
303
311
|
|
|
304
|
-
|
|
312
|
+
response_data: Any = None
|
|
305
313
|
if utils.match_response(http_res, "200", "application/json"):
|
|
306
314
|
return utils.unmarshal_json(http_res.text, models.ChatCompletionResponse)
|
|
307
315
|
if utils.match_response(http_res, "422", "application/json"):
|
|
308
|
-
|
|
309
|
-
|
|
316
|
+
response_data = utils.unmarshal_json(
|
|
317
|
+
http_res.text, models.HTTPValidationErrorData
|
|
318
|
+
)
|
|
319
|
+
raise models.HTTPValidationError(data=response_data)
|
|
310
320
|
if utils.match_response(http_res, "4XX", "*"):
|
|
311
321
|
http_res_text = await utils.stream_to_text_async(http_res)
|
|
312
322
|
raise models.SDKError(
|
|
@@ -396,6 +406,8 @@ class Agents(BaseSDK):
|
|
|
396
406
|
|
|
397
407
|
if server_url is not None:
|
|
398
408
|
base_url = server_url
|
|
409
|
+
else:
|
|
410
|
+
base_url = self._get_url(base_url, url_variables)
|
|
399
411
|
|
|
400
412
|
request = models.AgentsCompletionStreamRequest(
|
|
401
413
|
max_tokens=max_tokens,
|
|
@@ -450,6 +462,7 @@ class Agents(BaseSDK):
|
|
|
450
462
|
|
|
451
463
|
http_res = self.do_request(
|
|
452
464
|
hook_ctx=HookContext(
|
|
465
|
+
base_url=base_url or "",
|
|
453
466
|
operation_id="stream_agents",
|
|
454
467
|
oauth2_scopes=[],
|
|
455
468
|
security_source=get_security_from_env(
|
|
@@ -462,7 +475,7 @@ class Agents(BaseSDK):
|
|
|
462
475
|
retry_config=retry_config,
|
|
463
476
|
)
|
|
464
477
|
|
|
465
|
-
|
|
478
|
+
response_data: Any = None
|
|
466
479
|
if utils.match_response(http_res, "200", "text/event-stream"):
|
|
467
480
|
return eventstreaming.EventStream(
|
|
468
481
|
http_res,
|
|
@@ -471,8 +484,10 @@ class Agents(BaseSDK):
|
|
|
471
484
|
)
|
|
472
485
|
if utils.match_response(http_res, "422", "application/json"):
|
|
473
486
|
http_res_text = utils.stream_to_text(http_res)
|
|
474
|
-
|
|
475
|
-
|
|
487
|
+
response_data = utils.unmarshal_json(
|
|
488
|
+
http_res_text, models.HTTPValidationErrorData
|
|
489
|
+
)
|
|
490
|
+
raise models.HTTPValidationError(data=response_data)
|
|
476
491
|
if utils.match_response(http_res, "4XX", "*"):
|
|
477
492
|
http_res_text = utils.stream_to_text(http_res)
|
|
478
493
|
raise models.SDKError(
|
|
@@ -562,6 +577,8 @@ class Agents(BaseSDK):
|
|
|
562
577
|
|
|
563
578
|
if server_url is not None:
|
|
564
579
|
base_url = server_url
|
|
580
|
+
else:
|
|
581
|
+
base_url = self._get_url(base_url, url_variables)
|
|
565
582
|
|
|
566
583
|
request = models.AgentsCompletionStreamRequest(
|
|
567
584
|
max_tokens=max_tokens,
|
|
@@ -616,6 +633,7 @@ class Agents(BaseSDK):
|
|
|
616
633
|
|
|
617
634
|
http_res = await self.do_request_async(
|
|
618
635
|
hook_ctx=HookContext(
|
|
636
|
+
base_url=base_url or "",
|
|
619
637
|
operation_id="stream_agents",
|
|
620
638
|
oauth2_scopes=[],
|
|
621
639
|
security_source=get_security_from_env(
|
|
@@ -628,7 +646,7 @@ class Agents(BaseSDK):
|
|
|
628
646
|
retry_config=retry_config,
|
|
629
647
|
)
|
|
630
648
|
|
|
631
|
-
|
|
649
|
+
response_data: Any = None
|
|
632
650
|
if utils.match_response(http_res, "200", "text/event-stream"):
|
|
633
651
|
return eventstreaming.EventStreamAsync(
|
|
634
652
|
http_res,
|
|
@@ -637,8 +655,10 @@ class Agents(BaseSDK):
|
|
|
637
655
|
)
|
|
638
656
|
if utils.match_response(http_res, "422", "application/json"):
|
|
639
657
|
http_res_text = await utils.stream_to_text_async(http_res)
|
|
640
|
-
|
|
641
|
-
|
|
658
|
+
response_data = utils.unmarshal_json(
|
|
659
|
+
http_res_text, models.HTTPValidationErrorData
|
|
660
|
+
)
|
|
661
|
+
raise models.HTTPValidationError(data=response_data)
|
|
642
662
|
if utils.match_response(http_res, "4XX", "*"):
|
|
643
663
|
http_res_text = await utils.stream_to_text_async(http_res)
|
|
644
664
|
raise models.SDKError(
|
mistralai/basesdk.py
CHANGED
|
@@ -231,6 +231,10 @@ class BaseSDK:
|
|
|
231
231
|
req.headers,
|
|
232
232
|
get_body_content(req),
|
|
233
233
|
)
|
|
234
|
+
|
|
235
|
+
if client is None:
|
|
236
|
+
raise ValueError("client is required")
|
|
237
|
+
|
|
234
238
|
http_res = client.send(req, stream=stream)
|
|
235
239
|
except Exception as e:
|
|
236
240
|
_, e = self.sdk_configuration.get_hooks().after_error(
|
|
@@ -303,6 +307,10 @@ class BaseSDK:
|
|
|
303
307
|
req.headers,
|
|
304
308
|
get_body_content(req),
|
|
305
309
|
)
|
|
310
|
+
|
|
311
|
+
if client is None:
|
|
312
|
+
raise ValueError("client is required")
|
|
313
|
+
|
|
306
314
|
http_res = await client.send(req, stream=stream)
|
|
307
315
|
except Exception as e:
|
|
308
316
|
_, e = self.sdk_configuration.get_hooks().after_error(
|
mistralai/chat.py
CHANGED
|
@@ -158,6 +158,8 @@ class Chat(BaseSDK):
|
|
|
158
158
|
|
|
159
159
|
if server_url is not None:
|
|
160
160
|
base_url = server_url
|
|
161
|
+
else:
|
|
162
|
+
base_url = self._get_url(base_url, url_variables)
|
|
161
163
|
|
|
162
164
|
request = models.ChatCompletionRequest(
|
|
163
165
|
model=model,
|
|
@@ -213,6 +215,7 @@ class Chat(BaseSDK):
|
|
|
213
215
|
|
|
214
216
|
http_res = self.do_request(
|
|
215
217
|
hook_ctx=HookContext(
|
|
218
|
+
base_url=base_url or "",
|
|
216
219
|
operation_id="chat_completion_v1_chat_completions_post",
|
|
217
220
|
oauth2_scopes=[],
|
|
218
221
|
security_source=get_security_from_env(
|
|
@@ -224,12 +227,14 @@ class Chat(BaseSDK):
|
|
|
224
227
|
retry_config=retry_config,
|
|
225
228
|
)
|
|
226
229
|
|
|
227
|
-
|
|
230
|
+
response_data: Any = None
|
|
228
231
|
if utils.match_response(http_res, "200", "application/json"):
|
|
229
232
|
return utils.unmarshal_json(http_res.text, models.ChatCompletionResponse)
|
|
230
233
|
if utils.match_response(http_res, "422", "application/json"):
|
|
231
|
-
|
|
232
|
-
|
|
234
|
+
response_data = utils.unmarshal_json(
|
|
235
|
+
http_res.text, models.HTTPValidationErrorData
|
|
236
|
+
)
|
|
237
|
+
raise models.HTTPValidationError(data=response_data)
|
|
233
238
|
if utils.match_response(http_res, "4XX", "*"):
|
|
234
239
|
http_res_text = utils.stream_to_text(http_res)
|
|
235
240
|
raise models.SDKError(
|
|
@@ -315,6 +320,8 @@ class Chat(BaseSDK):
|
|
|
315
320
|
|
|
316
321
|
if server_url is not None:
|
|
317
322
|
base_url = server_url
|
|
323
|
+
else:
|
|
324
|
+
base_url = self._get_url(base_url, url_variables)
|
|
318
325
|
|
|
319
326
|
request = models.ChatCompletionRequest(
|
|
320
327
|
model=model,
|
|
@@ -370,6 +377,7 @@ class Chat(BaseSDK):
|
|
|
370
377
|
|
|
371
378
|
http_res = await self.do_request_async(
|
|
372
379
|
hook_ctx=HookContext(
|
|
380
|
+
base_url=base_url or "",
|
|
373
381
|
operation_id="chat_completion_v1_chat_completions_post",
|
|
374
382
|
oauth2_scopes=[],
|
|
375
383
|
security_source=get_security_from_env(
|
|
@@ -381,12 +389,14 @@ class Chat(BaseSDK):
|
|
|
381
389
|
retry_config=retry_config,
|
|
382
390
|
)
|
|
383
391
|
|
|
384
|
-
|
|
392
|
+
response_data: Any = None
|
|
385
393
|
if utils.match_response(http_res, "200", "application/json"):
|
|
386
394
|
return utils.unmarshal_json(http_res.text, models.ChatCompletionResponse)
|
|
387
395
|
if utils.match_response(http_res, "422", "application/json"):
|
|
388
|
-
|
|
389
|
-
|
|
396
|
+
response_data = utils.unmarshal_json(
|
|
397
|
+
http_res.text, models.HTTPValidationErrorData
|
|
398
|
+
)
|
|
399
|
+
raise models.HTTPValidationError(data=response_data)
|
|
390
400
|
if utils.match_response(http_res, "4XX", "*"):
|
|
391
401
|
http_res_text = await utils.stream_to_text_async(http_res)
|
|
392
402
|
raise models.SDKError(
|
|
@@ -482,6 +492,8 @@ class Chat(BaseSDK):
|
|
|
482
492
|
|
|
483
493
|
if server_url is not None:
|
|
484
494
|
base_url = server_url
|
|
495
|
+
else:
|
|
496
|
+
base_url = self._get_url(base_url, url_variables)
|
|
485
497
|
|
|
486
498
|
request = models.ChatCompletionStreamRequest(
|
|
487
499
|
model=model,
|
|
@@ -539,6 +551,7 @@ class Chat(BaseSDK):
|
|
|
539
551
|
|
|
540
552
|
http_res = self.do_request(
|
|
541
553
|
hook_ctx=HookContext(
|
|
554
|
+
base_url=base_url or "",
|
|
542
555
|
operation_id="stream_chat",
|
|
543
556
|
oauth2_scopes=[],
|
|
544
557
|
security_source=get_security_from_env(
|
|
@@ -551,7 +564,7 @@ class Chat(BaseSDK):
|
|
|
551
564
|
retry_config=retry_config,
|
|
552
565
|
)
|
|
553
566
|
|
|
554
|
-
|
|
567
|
+
response_data: Any = None
|
|
555
568
|
if utils.match_response(http_res, "200", "text/event-stream"):
|
|
556
569
|
return eventstreaming.EventStream(
|
|
557
570
|
http_res,
|
|
@@ -560,8 +573,10 @@ class Chat(BaseSDK):
|
|
|
560
573
|
)
|
|
561
574
|
if utils.match_response(http_res, "422", "application/json"):
|
|
562
575
|
http_res_text = utils.stream_to_text(http_res)
|
|
563
|
-
|
|
564
|
-
|
|
576
|
+
response_data = utils.unmarshal_json(
|
|
577
|
+
http_res_text, models.HTTPValidationErrorData
|
|
578
|
+
)
|
|
579
|
+
raise models.HTTPValidationError(data=response_data)
|
|
565
580
|
if utils.match_response(http_res, "4XX", "*"):
|
|
566
581
|
http_res_text = utils.stream_to_text(http_res)
|
|
567
582
|
raise models.SDKError(
|
|
@@ -657,6 +672,8 @@ class Chat(BaseSDK):
|
|
|
657
672
|
|
|
658
673
|
if server_url is not None:
|
|
659
674
|
base_url = server_url
|
|
675
|
+
else:
|
|
676
|
+
base_url = self._get_url(base_url, url_variables)
|
|
660
677
|
|
|
661
678
|
request = models.ChatCompletionStreamRequest(
|
|
662
679
|
model=model,
|
|
@@ -714,6 +731,7 @@ class Chat(BaseSDK):
|
|
|
714
731
|
|
|
715
732
|
http_res = await self.do_request_async(
|
|
716
733
|
hook_ctx=HookContext(
|
|
734
|
+
base_url=base_url or "",
|
|
717
735
|
operation_id="stream_chat",
|
|
718
736
|
oauth2_scopes=[],
|
|
719
737
|
security_source=get_security_from_env(
|
|
@@ -726,7 +744,7 @@ class Chat(BaseSDK):
|
|
|
726
744
|
retry_config=retry_config,
|
|
727
745
|
)
|
|
728
746
|
|
|
729
|
-
|
|
747
|
+
response_data: Any = None
|
|
730
748
|
if utils.match_response(http_res, "200", "text/event-stream"):
|
|
731
749
|
return eventstreaming.EventStreamAsync(
|
|
732
750
|
http_res,
|
|
@@ -735,8 +753,10 @@ class Chat(BaseSDK):
|
|
|
735
753
|
)
|
|
736
754
|
if utils.match_response(http_res, "422", "application/json"):
|
|
737
755
|
http_res_text = await utils.stream_to_text_async(http_res)
|
|
738
|
-
|
|
739
|
-
|
|
756
|
+
response_data = utils.unmarshal_json(
|
|
757
|
+
http_res_text, models.HTTPValidationErrorData
|
|
758
|
+
)
|
|
759
|
+
raise models.HTTPValidationError(data=response_data)
|
|
740
760
|
if utils.match_response(http_res, "4XX", "*"):
|
|
741
761
|
http_res_text = await utils.stream_to_text_async(http_res)
|
|
742
762
|
raise models.SDKError(
|
mistralai/classifiers.py
CHANGED
|
@@ -40,6 +40,8 @@ class Classifiers(BaseSDK):
|
|
|
40
40
|
|
|
41
41
|
if server_url is not None:
|
|
42
42
|
base_url = server_url
|
|
43
|
+
else:
|
|
44
|
+
base_url = self._get_url(base_url, url_variables)
|
|
43
45
|
|
|
44
46
|
request = models.ClassificationRequest(
|
|
45
47
|
model=model,
|
|
@@ -75,6 +77,7 @@ class Classifiers(BaseSDK):
|
|
|
75
77
|
|
|
76
78
|
http_res = self.do_request(
|
|
77
79
|
hook_ctx=HookContext(
|
|
80
|
+
base_url=base_url or "",
|
|
78
81
|
operation_id="moderations_v1_moderations_post",
|
|
79
82
|
oauth2_scopes=[],
|
|
80
83
|
security_source=get_security_from_env(
|
|
@@ -86,12 +89,14 @@ class Classifiers(BaseSDK):
|
|
|
86
89
|
retry_config=retry_config,
|
|
87
90
|
)
|
|
88
91
|
|
|
89
|
-
|
|
92
|
+
response_data: Any = None
|
|
90
93
|
if utils.match_response(http_res, "200", "application/json"):
|
|
91
94
|
return utils.unmarshal_json(http_res.text, models.ClassificationResponse)
|
|
92
95
|
if utils.match_response(http_res, "422", "application/json"):
|
|
93
|
-
|
|
94
|
-
|
|
96
|
+
response_data = utils.unmarshal_json(
|
|
97
|
+
http_res.text, models.HTTPValidationErrorData
|
|
98
|
+
)
|
|
99
|
+
raise models.HTTPValidationError(data=response_data)
|
|
95
100
|
if utils.match_response(http_res, "4XX", "*"):
|
|
96
101
|
http_res_text = utils.stream_to_text(http_res)
|
|
97
102
|
raise models.SDKError(
|
|
@@ -141,6 +146,8 @@ class Classifiers(BaseSDK):
|
|
|
141
146
|
|
|
142
147
|
if server_url is not None:
|
|
143
148
|
base_url = server_url
|
|
149
|
+
else:
|
|
150
|
+
base_url = self._get_url(base_url, url_variables)
|
|
144
151
|
|
|
145
152
|
request = models.ClassificationRequest(
|
|
146
153
|
model=model,
|
|
@@ -176,6 +183,7 @@ class Classifiers(BaseSDK):
|
|
|
176
183
|
|
|
177
184
|
http_res = await self.do_request_async(
|
|
178
185
|
hook_ctx=HookContext(
|
|
186
|
+
base_url=base_url or "",
|
|
179
187
|
operation_id="moderations_v1_moderations_post",
|
|
180
188
|
oauth2_scopes=[],
|
|
181
189
|
security_source=get_security_from_env(
|
|
@@ -187,12 +195,14 @@ class Classifiers(BaseSDK):
|
|
|
187
195
|
retry_config=retry_config,
|
|
188
196
|
)
|
|
189
197
|
|
|
190
|
-
|
|
198
|
+
response_data: Any = None
|
|
191
199
|
if utils.match_response(http_res, "200", "application/json"):
|
|
192
200
|
return utils.unmarshal_json(http_res.text, models.ClassificationResponse)
|
|
193
201
|
if utils.match_response(http_res, "422", "application/json"):
|
|
194
|
-
|
|
195
|
-
|
|
202
|
+
response_data = utils.unmarshal_json(
|
|
203
|
+
http_res.text, models.HTTPValidationErrorData
|
|
204
|
+
)
|
|
205
|
+
raise models.HTTPValidationError(data=response_data)
|
|
196
206
|
if utils.match_response(http_res, "4XX", "*"):
|
|
197
207
|
http_res_text = await utils.stream_to_text_async(http_res)
|
|
198
208
|
raise models.SDKError(
|
|
@@ -244,6 +254,8 @@ class Classifiers(BaseSDK):
|
|
|
244
254
|
|
|
245
255
|
if server_url is not None:
|
|
246
256
|
base_url = server_url
|
|
257
|
+
else:
|
|
258
|
+
base_url = self._get_url(base_url, url_variables)
|
|
247
259
|
|
|
248
260
|
request = models.ChatModerationRequest(
|
|
249
261
|
model=model,
|
|
@@ -280,6 +292,7 @@ class Classifiers(BaseSDK):
|
|
|
280
292
|
|
|
281
293
|
http_res = self.do_request(
|
|
282
294
|
hook_ctx=HookContext(
|
|
295
|
+
base_url=base_url or "",
|
|
283
296
|
operation_id="moderations_chat_v1_chat_moderations_post",
|
|
284
297
|
oauth2_scopes=[],
|
|
285
298
|
security_source=get_security_from_env(
|
|
@@ -291,12 +304,14 @@ class Classifiers(BaseSDK):
|
|
|
291
304
|
retry_config=retry_config,
|
|
292
305
|
)
|
|
293
306
|
|
|
294
|
-
|
|
307
|
+
response_data: Any = None
|
|
295
308
|
if utils.match_response(http_res, "200", "application/json"):
|
|
296
309
|
return utils.unmarshal_json(http_res.text, models.ClassificationResponse)
|
|
297
310
|
if utils.match_response(http_res, "422", "application/json"):
|
|
298
|
-
|
|
299
|
-
|
|
311
|
+
response_data = utils.unmarshal_json(
|
|
312
|
+
http_res.text, models.HTTPValidationErrorData
|
|
313
|
+
)
|
|
314
|
+
raise models.HTTPValidationError(data=response_data)
|
|
300
315
|
if utils.match_response(http_res, "4XX", "*"):
|
|
301
316
|
http_res_text = utils.stream_to_text(http_res)
|
|
302
317
|
raise models.SDKError(
|
|
@@ -348,6 +363,8 @@ class Classifiers(BaseSDK):
|
|
|
348
363
|
|
|
349
364
|
if server_url is not None:
|
|
350
365
|
base_url = server_url
|
|
366
|
+
else:
|
|
367
|
+
base_url = self._get_url(base_url, url_variables)
|
|
351
368
|
|
|
352
369
|
request = models.ChatModerationRequest(
|
|
353
370
|
model=model,
|
|
@@ -384,6 +401,7 @@ class Classifiers(BaseSDK):
|
|
|
384
401
|
|
|
385
402
|
http_res = await self.do_request_async(
|
|
386
403
|
hook_ctx=HookContext(
|
|
404
|
+
base_url=base_url or "",
|
|
387
405
|
operation_id="moderations_chat_v1_chat_moderations_post",
|
|
388
406
|
oauth2_scopes=[],
|
|
389
407
|
security_source=get_security_from_env(
|
|
@@ -395,12 +413,14 @@ class Classifiers(BaseSDK):
|
|
|
395
413
|
retry_config=retry_config,
|
|
396
414
|
)
|
|
397
415
|
|
|
398
|
-
|
|
416
|
+
response_data: Any = None
|
|
399
417
|
if utils.match_response(http_res, "200", "application/json"):
|
|
400
418
|
return utils.unmarshal_json(http_res.text, models.ClassificationResponse)
|
|
401
419
|
if utils.match_response(http_res, "422", "application/json"):
|
|
402
|
-
|
|
403
|
-
|
|
420
|
+
response_data = utils.unmarshal_json(
|
|
421
|
+
http_res.text, models.HTTPValidationErrorData
|
|
422
|
+
)
|
|
423
|
+
raise models.HTTPValidationError(data=response_data)
|
|
404
424
|
if utils.match_response(http_res, "4XX", "*"):
|
|
405
425
|
http_res_text = await utils.stream_to_text_async(http_res)
|
|
406
426
|
raise models.SDKError(
|
mistralai/embeddings.py
CHANGED
|
@@ -14,8 +14,8 @@ class Embeddings(BaseSDK):
|
|
|
14
14
|
def create(
|
|
15
15
|
self,
|
|
16
16
|
*,
|
|
17
|
+
model: str,
|
|
17
18
|
inputs: Union[models.Inputs, models.InputsTypedDict],
|
|
18
|
-
model: Optional[str] = "mistral-embed",
|
|
19
19
|
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
20
20
|
server_url: Optional[str] = None,
|
|
21
21
|
timeout_ms: Optional[int] = None,
|
|
@@ -25,8 +25,8 @@ class Embeddings(BaseSDK):
|
|
|
25
25
|
|
|
26
26
|
Embeddings
|
|
27
27
|
|
|
28
|
-
:param inputs: Text to embed.
|
|
29
28
|
:param model: ID of the model to use.
|
|
29
|
+
:param inputs: Text to embed.
|
|
30
30
|
:param retries: Override the default retry configuration for this method
|
|
31
31
|
:param server_url: Override the default server URL for this method
|
|
32
32
|
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
@@ -39,6 +39,8 @@ class Embeddings(BaseSDK):
|
|
|
39
39
|
|
|
40
40
|
if server_url is not None:
|
|
41
41
|
base_url = server_url
|
|
42
|
+
else:
|
|
43
|
+
base_url = self._get_url(base_url, url_variables)
|
|
42
44
|
|
|
43
45
|
request = models.EmbeddingRequest(
|
|
44
46
|
model=model,
|
|
@@ -74,6 +76,7 @@ class Embeddings(BaseSDK):
|
|
|
74
76
|
|
|
75
77
|
http_res = self.do_request(
|
|
76
78
|
hook_ctx=HookContext(
|
|
79
|
+
base_url=base_url or "",
|
|
77
80
|
operation_id="embeddings_v1_embeddings_post",
|
|
78
81
|
oauth2_scopes=[],
|
|
79
82
|
security_source=get_security_from_env(
|
|
@@ -85,12 +88,14 @@ class Embeddings(BaseSDK):
|
|
|
85
88
|
retry_config=retry_config,
|
|
86
89
|
)
|
|
87
90
|
|
|
88
|
-
|
|
91
|
+
response_data: Any = None
|
|
89
92
|
if utils.match_response(http_res, "200", "application/json"):
|
|
90
93
|
return utils.unmarshal_json(http_res.text, models.EmbeddingResponse)
|
|
91
94
|
if utils.match_response(http_res, "422", "application/json"):
|
|
92
|
-
|
|
93
|
-
|
|
95
|
+
response_data = utils.unmarshal_json(
|
|
96
|
+
http_res.text, models.HTTPValidationErrorData
|
|
97
|
+
)
|
|
98
|
+
raise models.HTTPValidationError(data=response_data)
|
|
94
99
|
if utils.match_response(http_res, "4XX", "*"):
|
|
95
100
|
http_res_text = utils.stream_to_text(http_res)
|
|
96
101
|
raise models.SDKError(
|
|
@@ -114,8 +119,8 @@ class Embeddings(BaseSDK):
|
|
|
114
119
|
async def create_async(
|
|
115
120
|
self,
|
|
116
121
|
*,
|
|
122
|
+
model: str,
|
|
117
123
|
inputs: Union[models.Inputs, models.InputsTypedDict],
|
|
118
|
-
model: Optional[str] = "mistral-embed",
|
|
119
124
|
retries: OptionalNullable[utils.RetryConfig] = UNSET,
|
|
120
125
|
server_url: Optional[str] = None,
|
|
121
126
|
timeout_ms: Optional[int] = None,
|
|
@@ -125,8 +130,8 @@ class Embeddings(BaseSDK):
|
|
|
125
130
|
|
|
126
131
|
Embeddings
|
|
127
132
|
|
|
128
|
-
:param inputs: Text to embed.
|
|
129
133
|
:param model: ID of the model to use.
|
|
134
|
+
:param inputs: Text to embed.
|
|
130
135
|
:param retries: Override the default retry configuration for this method
|
|
131
136
|
:param server_url: Override the default server URL for this method
|
|
132
137
|
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
|
|
@@ -139,6 +144,8 @@ class Embeddings(BaseSDK):
|
|
|
139
144
|
|
|
140
145
|
if server_url is not None:
|
|
141
146
|
base_url = server_url
|
|
147
|
+
else:
|
|
148
|
+
base_url = self._get_url(base_url, url_variables)
|
|
142
149
|
|
|
143
150
|
request = models.EmbeddingRequest(
|
|
144
151
|
model=model,
|
|
@@ -174,6 +181,7 @@ class Embeddings(BaseSDK):
|
|
|
174
181
|
|
|
175
182
|
http_res = await self.do_request_async(
|
|
176
183
|
hook_ctx=HookContext(
|
|
184
|
+
base_url=base_url or "",
|
|
177
185
|
operation_id="embeddings_v1_embeddings_post",
|
|
178
186
|
oauth2_scopes=[],
|
|
179
187
|
security_source=get_security_from_env(
|
|
@@ -185,12 +193,14 @@ class Embeddings(BaseSDK):
|
|
|
185
193
|
retry_config=retry_config,
|
|
186
194
|
)
|
|
187
195
|
|
|
188
|
-
|
|
196
|
+
response_data: Any = None
|
|
189
197
|
if utils.match_response(http_res, "200", "application/json"):
|
|
190
198
|
return utils.unmarshal_json(http_res.text, models.EmbeddingResponse)
|
|
191
199
|
if utils.match_response(http_res, "422", "application/json"):
|
|
192
|
-
|
|
193
|
-
|
|
200
|
+
response_data = utils.unmarshal_json(
|
|
201
|
+
http_res.text, models.HTTPValidationErrorData
|
|
202
|
+
)
|
|
203
|
+
raise models.HTTPValidationError(data=response_data)
|
|
194
204
|
if utils.match_response(http_res, "4XX", "*"):
|
|
195
205
|
http_res_text = await utils.stream_to_text_async(http_res)
|
|
196
206
|
raise models.SDKError(
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from pydantic import BaseModel
|
|
2
|
-
from typing import TypeVar, Any, Type
|
|
2
|
+
from typing import TypeVar, Any, Type, Dict
|
|
3
3
|
from ...models import JSONSchema, ResponseFormat
|
|
4
4
|
from ._pydantic_helper import rec_strict_json_schema
|
|
5
5
|
|
|
@@ -7,7 +7,7 @@ CustomPydanticModel = TypeVar("CustomPydanticModel", bound=BaseModel)
|
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def response_format_from_pydantic_model(
|
|
10
|
-
model:
|
|
10
|
+
model: Type[CustomPydanticModel],
|
|
11
11
|
) -> ResponseFormat:
|
|
12
12
|
"""Generate a strict JSON schema from a pydantic model."""
|
|
13
13
|
model_schema = rec_strict_json_schema(model.model_json_schema())
|
|
@@ -18,7 +18,7 @@ def response_format_from_pydantic_model(
|
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
def pydantic_model_from_json(
|
|
21
|
-
json_data:
|
|
21
|
+
json_data: Dict[str, Any], pydantic_model: Type[CustomPydanticModel]
|
|
22
22
|
) -> CustomPydanticModel:
|
|
23
23
|
"""Parse a JSON schema into a pydantic model."""
|
|
24
24
|
return pydantic_model.model_validate(json_data)
|