vectorvein 0.1.61__py3-none-any.whl → 0.1.63__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vectorvein/chat_clients/anthropic_client.py +60 -47
- vectorvein/types/llm_parameters.py +1 -1
- {vectorvein-0.1.61.dist-info → vectorvein-0.1.63.dist-info}/METADATA +1 -1
- {vectorvein-0.1.61.dist-info → vectorvein-0.1.63.dist-info}/RECORD +6 -6
- {vectorvein-0.1.61.dist-info → vectorvein-0.1.63.dist-info}/WHEEL +0 -0
- {vectorvein-0.1.61.dist-info → vectorvein-0.1.63.dist-info}/entry_points.txt +0 -0
@@ -2,7 +2,6 @@
|
|
2
2
|
# @Date: 2024-07-26 14:48:55
|
3
3
|
import json
|
4
4
|
import random
|
5
|
-
from functools import cached_property
|
6
5
|
from typing import overload, Generator, AsyncGenerator, Any, Literal, Iterable
|
7
6
|
|
8
7
|
import httpx
|
@@ -178,19 +177,23 @@ class AnthropicChatClient(BaseChatClient):
|
|
178
177
|
backend_name,
|
179
178
|
)
|
180
179
|
self.model_id = None
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
self.
|
190
|
-
|
180
|
+
self.endpoint = None
|
181
|
+
|
182
|
+
@property
|
183
|
+
def raw_client(self): # type: ignore
|
184
|
+
if self.endpoint is None:
|
185
|
+
if self.random_endpoint:
|
186
|
+
self.random_endpoint = True
|
187
|
+
endpoint = random.choice(self.backend_settings.models[self.model].endpoints)
|
188
|
+
self.model_id = None
|
189
|
+
if isinstance(endpoint, dict):
|
190
|
+
self.endpoint_id = endpoint["endpoint_id"]
|
191
|
+
self.model_id = endpoint["model_id"]
|
192
|
+
else:
|
193
|
+
self.endpoint_id = endpoint
|
194
|
+
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
191
195
|
else:
|
192
|
-
self.
|
193
|
-
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
196
|
+
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
194
197
|
|
195
198
|
if self.endpoint.is_vertex:
|
196
199
|
if self.endpoint.credentials is None:
|
@@ -330,16 +333,19 @@ class AnthropicChatClient(BaseChatClient):
|
|
330
333
|
if temperature is not None:
|
331
334
|
self.temperature = temperature
|
332
335
|
|
333
|
-
if self.
|
334
|
-
self.random_endpoint
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
336
|
+
if self.endpoint is None:
|
337
|
+
if self.random_endpoint:
|
338
|
+
self.random_endpoint = True
|
339
|
+
endpoint = random.choice(self.backend_settings.models[self.model].endpoints)
|
340
|
+
self.model_id = None
|
341
|
+
if isinstance(endpoint, dict):
|
342
|
+
self.endpoint_id = endpoint["endpoint_id"]
|
343
|
+
self.model_id = endpoint["model_id"]
|
344
|
+
else:
|
345
|
+
self.endpoint_id = endpoint
|
346
|
+
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
340
347
|
else:
|
341
|
-
self.
|
342
|
-
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
348
|
+
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
343
349
|
|
344
350
|
if self.endpoint.api_schema_type == "openai":
|
345
351
|
_tools = OPENAI_NOT_GIVEN if tools is NOT_GIVEN else tools
|
@@ -401,7 +407,8 @@ class AnthropicChatClient(BaseChatClient):
|
|
401
407
|
**kwargs,
|
402
408
|
)
|
403
409
|
|
404
|
-
|
410
|
+
raw_client = self.raw_client # 调用完 self.raw_client 后,self.model_id 会被赋值
|
411
|
+
assert isinstance(raw_client, Anthropic | AnthropicVertex | AnthropicBedrock)
|
405
412
|
|
406
413
|
if isinstance(tools, OpenAINotGiven):
|
407
414
|
tools = NOT_GIVEN
|
@@ -410,7 +417,6 @@ class AnthropicChatClient(BaseChatClient):
|
|
410
417
|
if isinstance(top_p, OpenAINotGiven) or top_p is None:
|
411
418
|
top_p = NOT_GIVEN
|
412
419
|
|
413
|
-
raw_client = self.raw_client # 调用完 self.raw_client 后,self.model_id 会被赋值
|
414
420
|
self.model_setting = self.backend_settings.models[self.model]
|
415
421
|
if self.model_id is None:
|
416
422
|
self.model_id = self.model_setting.id
|
@@ -569,19 +575,23 @@ class AsyncAnthropicChatClient(BaseAsyncChatClient):
|
|
569
575
|
backend_name,
|
570
576
|
)
|
571
577
|
self.model_id = None
|
572
|
-
|
573
|
-
|
574
|
-
|
575
|
-
|
576
|
-
|
577
|
-
|
578
|
-
|
579
|
-
|
580
|
-
self.
|
581
|
-
|
578
|
+
self.endpoint = None
|
579
|
+
|
580
|
+
@property
|
581
|
+
def raw_client(self): # type: ignore
|
582
|
+
if self.endpoint is None:
|
583
|
+
if self.random_endpoint:
|
584
|
+
self.random_endpoint = True
|
585
|
+
endpoint = random.choice(self.backend_settings.models[self.model].endpoints)
|
586
|
+
self.model_id = None
|
587
|
+
if isinstance(endpoint, dict):
|
588
|
+
self.endpoint_id = endpoint["endpoint_id"]
|
589
|
+
self.model_id = endpoint["model_id"]
|
590
|
+
else:
|
591
|
+
self.endpoint_id = endpoint
|
592
|
+
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
582
593
|
else:
|
583
|
-
self.
|
584
|
-
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
594
|
+
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
585
595
|
|
586
596
|
if self.endpoint.is_vertex:
|
587
597
|
if self.endpoint.credentials is None:
|
@@ -720,16 +730,19 @@ class AsyncAnthropicChatClient(BaseAsyncChatClient):
|
|
720
730
|
if temperature is not None:
|
721
731
|
self.temperature = temperature
|
722
732
|
|
723
|
-
if self.
|
724
|
-
self.random_endpoint
|
725
|
-
|
726
|
-
|
727
|
-
|
728
|
-
|
729
|
-
|
733
|
+
if self.endpoint is None:
|
734
|
+
if self.random_endpoint:
|
735
|
+
self.random_endpoint = True
|
736
|
+
endpoint = random.choice(self.backend_settings.models[self.model].endpoints)
|
737
|
+
self.model_id = None
|
738
|
+
if isinstance(endpoint, dict):
|
739
|
+
self.endpoint_id = endpoint["endpoint_id"]
|
740
|
+
self.model_id = endpoint["model_id"]
|
741
|
+
else:
|
742
|
+
self.endpoint_id = endpoint
|
743
|
+
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
730
744
|
else:
|
731
|
-
self.
|
732
|
-
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
745
|
+
self.endpoint = settings.get_endpoint(self.endpoint_id)
|
733
746
|
|
734
747
|
if self.endpoint.api_schema_type == "openai":
|
735
748
|
_tools = OPENAI_NOT_GIVEN if tools is NOT_GIVEN else tools
|
@@ -793,7 +806,8 @@ class AsyncAnthropicChatClient(BaseAsyncChatClient):
|
|
793
806
|
**kwargs,
|
794
807
|
)
|
795
808
|
|
796
|
-
|
809
|
+
raw_client = self.raw_client # 调用完 self.raw_client 后,self.model_id 会被赋值
|
810
|
+
assert isinstance(raw_client, AsyncAnthropic | AsyncAnthropicVertex | AsyncAnthropicBedrock)
|
797
811
|
|
798
812
|
if isinstance(tools, OpenAINotGiven):
|
799
813
|
tools = NOT_GIVEN
|
@@ -802,7 +816,6 @@ class AsyncAnthropicChatClient(BaseAsyncChatClient):
|
|
802
816
|
if isinstance(top_p, OpenAINotGiven) or top_p is None:
|
803
817
|
top_p = NOT_GIVEN
|
804
818
|
|
805
|
-
raw_client = self.raw_client # 调用完 self.raw_client 后,self.model_id 会被赋值
|
806
819
|
self.model_setting = self.backend_settings.models[self.model]
|
807
820
|
if self.model_id is None:
|
808
821
|
self.model_id = self.model_setting.id
|
@@ -28,7 +28,7 @@ class EndpointOptionDict(TypedDict):
|
|
28
28
|
class EndpointSetting(BaseModel):
|
29
29
|
id: str = Field(..., description="The id of the endpoint.")
|
30
30
|
region: Optional[str] = Field(None, description="The region for the endpoint.")
|
31
|
-
api_base: str = Field(None, description="The base URL for the API.")
|
31
|
+
api_base: Optional[str] = Field(None, description="The base URL for the API.")
|
32
32
|
api_key: Optional[str] = Field(None, description="The API key for authentication.")
|
33
33
|
api_schema_type: Optional[str] = Field(
|
34
34
|
"default",
|
@@ -1,9 +1,9 @@
|
|
1
|
-
vectorvein-0.1.
|
2
|
-
vectorvein-0.1.
|
3
|
-
vectorvein-0.1.
|
1
|
+
vectorvein-0.1.63.dist-info/METADATA,sha256=Nhl-efJ6b-cKBEYgetKraAgBsmre6FyZI8imEsWkzCI,641
|
2
|
+
vectorvein-0.1.63.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
3
|
+
vectorvein-0.1.63.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
|
4
4
|
vectorvein/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
5
|
vectorvein/chat_clients/__init__.py,sha256=Oev7Lv1DIEWCMD-2Pm7e2cwzX7JFQTnIK-j6o4iUuyQ,17725
|
6
|
-
vectorvein/chat_clients/anthropic_client.py,sha256=
|
6
|
+
vectorvein/chat_clients/anthropic_client.py,sha256=tRBhX6k2cKvlm0XCiE6wvI6iYYBMV_RpBoquWSCkAmk,39240
|
7
7
|
vectorvein/chat_clients/baichuan_client.py,sha256=CVMvpgjdrZGv0BWnTOBD-f2ufZ3wq3496wqukumsAr4,526
|
8
8
|
vectorvein/chat_clients/base_client.py,sha256=tmD3ai6YjQnCKHuPsUww1khRlJeJ2AJzYubksb-2UaM,10489
|
9
9
|
vectorvein/chat_clients/deepseek_client.py,sha256=3qWu01NlJAP2N-Ff62d5-CZXZitlizE1fzb20LNetig,526
|
@@ -29,8 +29,8 @@ vectorvein/settings/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
29
29
|
vectorvein/types/defaults.py,sha256=Hb9BFNJIRJcwjePsQTKKihHqJGeJGbFHfNeV_pIV8gM,24479
|
30
30
|
vectorvein/types/enums.py,sha256=7KTJSVtQueImmbr1fSwv3rQVtc0RyMWXJmoE2tDOaso,1667
|
31
31
|
vectorvein/types/exception.py,sha256=gnW4GnJ76jND6UGnodk9xmqkcbeS7Cz2rvncA2HpD5E,69
|
32
|
-
vectorvein/types/llm_parameters.py,sha256=
|
32
|
+
vectorvein/types/llm_parameters.py,sha256=g2Q0RKMC2vOcMI0tFpZ53xfVSfC8MLoK0YntPqce49U,5360
|
33
33
|
vectorvein/types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
34
34
|
vectorvein/utilities/media_processing.py,sha256=CTRq-lGlFkFgP_FSRhNwF_qUgmOrXPf2_1Ok9HY42_g,5887
|
35
35
|
vectorvein/utilities/retry.py,sha256=6KFS9R2HdhqM3_9jkjD4F36ZSpEx2YNFGOVlpOsUetM,2208
|
36
|
-
vectorvein-0.1.
|
36
|
+
vectorvein-0.1.63.dist-info/RECORD,,
|
File without changes
|
File without changes
|