vectorvein 0.1.32__py3-none-any.whl → 0.1.33__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vectorvein/chat_clients/py.typed +0 -0
- vectorvein/py.typed +0 -0
- vectorvein/settings/py.typed +0 -0
- vectorvein/types/defaults.py +35 -33
- vectorvein/types/llm_parameters.py +20 -2
- vectorvein/types/py.typed +0 -0
- {vectorvein-0.1.32.dist-info → vectorvein-0.1.33.dist-info}/METADATA +1 -1
- {vectorvein-0.1.32.dist-info → vectorvein-0.1.33.dist-info}/RECORD +10 -6
- {vectorvein-0.1.32.dist-info → vectorvein-0.1.33.dist-info}/WHEEL +0 -0
- {vectorvein-0.1.32.dist-info → vectorvein-0.1.33.dist-info}/entry_points.txt +0 -0
File without changes
|
vectorvein/py.typed
ADDED
File without changes
|
File without changes
|
vectorvein/types/defaults.py
CHANGED
@@ -1,17 +1,19 @@
|
|
1
1
|
# @Author: Bi Ying
|
2
2
|
# @Date: 2024-07-27 00:02:34
|
3
|
+
from typing import Final, Dict, Any
|
4
|
+
|
3
5
|
from .enums import ContextLengthControlType
|
4
6
|
|
5
|
-
CONTEXT_LENGTH_CONTROL = ContextLengthControlType.Latest
|
7
|
+
CONTEXT_LENGTH_CONTROL: Final[ContextLengthControlType] = ContextLengthControlType.Latest
|
6
8
|
|
7
|
-
ENDPOINT_CONCURRENT_REQUESTS = 20
|
8
|
-
ENDPOINT_RPM = 60
|
9
|
-
ENDPOINT_TPM = 300000
|
9
|
+
ENDPOINT_CONCURRENT_REQUESTS: Final[int] = 20
|
10
|
+
ENDPOINT_RPM: Final[int] = 60
|
11
|
+
ENDPOINT_TPM: Final[int] = 300000
|
10
12
|
|
11
|
-
MODEL_CONTEXT_LENGTH = 32768
|
13
|
+
MODEL_CONTEXT_LENGTH: Final[int] = 32768
|
12
14
|
|
13
15
|
# Moonshot models
|
14
|
-
MOONSHOT_MODELS = {
|
16
|
+
MOONSHOT_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
15
17
|
"moonshot-v1-8k": {
|
16
18
|
"id": "moonshot-v1-8k",
|
17
19
|
"context_length": 8192,
|
@@ -31,10 +33,10 @@ MOONSHOT_MODELS = {
|
|
31
33
|
"response_format_available": True,
|
32
34
|
},
|
33
35
|
}
|
34
|
-
MOONSHOT_DEFAULT_MODEL = "moonshot-v1-8k"
|
36
|
+
MOONSHOT_DEFAULT_MODEL: Final[str] = "moonshot-v1-8k"
|
35
37
|
|
36
38
|
# Deepseek models
|
37
|
-
DEEPSEEK_MODELS = {
|
39
|
+
DEEPSEEK_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
38
40
|
"deepseek-chat": {
|
39
41
|
"id": "deepseek-chat",
|
40
42
|
"context_length": 128000,
|
@@ -50,10 +52,10 @@ DEEPSEEK_MODELS = {
|
|
50
52
|
"response_format_available": True,
|
51
53
|
},
|
52
54
|
}
|
53
|
-
DEEPSEEK_DEFAULT_MODEL = "deepseek-chat"
|
55
|
+
DEEPSEEK_DEFAULT_MODEL: Final[str] = "deepseek-chat"
|
54
56
|
|
55
57
|
# Baichuan models
|
56
|
-
BAICHUAN_MODELS = {
|
58
|
+
BAICHUAN_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
57
59
|
"Baichuan4": {
|
58
60
|
"id": "Baichuan4",
|
59
61
|
"context_length": 32768,
|
@@ -90,11 +92,11 @@ BAICHUAN_MODELS = {
|
|
90
92
|
"response_format_available": False,
|
91
93
|
},
|
92
94
|
}
|
93
|
-
BAICHUAN_DEFAULT_MODEL = "Baichuan3-Turbo"
|
95
|
+
BAICHUAN_DEFAULT_MODEL: Final[str] = "Baichuan3-Turbo"
|
94
96
|
|
95
97
|
# Groq models
|
96
|
-
GROQ_DEFAULT_MODEL = "llama3-70b-8192"
|
97
|
-
GROQ_MODELS = {
|
98
|
+
GROQ_DEFAULT_MODEL: Final[str] = "llama3-70b-8192"
|
99
|
+
GROQ_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
98
100
|
"mixtral-8x7b-32768": {
|
99
101
|
"id": "mixtral-8x7b-32768",
|
100
102
|
"context_length": 32768,
|
@@ -150,8 +152,8 @@ GROQ_MODELS = {
|
|
150
152
|
}
|
151
153
|
|
152
154
|
# Qwen models
|
153
|
-
QWEN_DEFAULT_MODEL = "qwen2.5-72b-instruct"
|
154
|
-
QWEN_MODELS = {
|
155
|
+
QWEN_DEFAULT_MODEL: Final[str] = "qwen2.5-72b-instruct"
|
156
|
+
QWEN_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
155
157
|
"qwen1.5-1.8b-chat": {
|
156
158
|
"id": "qwen1.5-1.8b-chat",
|
157
159
|
"context_length": 30000,
|
@@ -267,8 +269,8 @@ QWEN_MODELS = {
|
|
267
269
|
}
|
268
270
|
|
269
271
|
# Yi models
|
270
|
-
YI_DEFAULT_MODEL = "yi-large-turbo"
|
271
|
-
YI_MODELS = {
|
272
|
+
YI_DEFAULT_MODEL: Final[str] = "yi-large-turbo"
|
273
|
+
YI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
272
274
|
"yi-large": {
|
273
275
|
"id": "yi-large",
|
274
276
|
"context_length": 32000,
|
@@ -322,8 +324,8 @@ YI_MODELS = {
|
|
322
324
|
}
|
323
325
|
|
324
326
|
# ZhiPuAI models
|
325
|
-
ZHIPUAI_DEFAULT_MODEL = "glm-4-air"
|
326
|
-
ZHIPUAI_MODELS = {
|
327
|
+
ZHIPUAI_DEFAULT_MODEL: Final[str] = "glm-4-air"
|
328
|
+
ZHIPUAI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
327
329
|
"glm-3-turbo": {
|
328
330
|
"id": "glm-3-turbo",
|
329
331
|
"context_length": 128000,
|
@@ -399,8 +401,8 @@ ZHIPUAI_MODELS = {
|
|
399
401
|
}
|
400
402
|
|
401
403
|
# Mistral models
|
402
|
-
MISTRAL_DEFAULT_MODEL = "mistral-small"
|
403
|
-
MISTRAL_MODELS = {
|
404
|
+
MISTRAL_DEFAULT_MODEL: Final[str] = "mistral-small"
|
405
|
+
MISTRAL_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
404
406
|
"open-mistral-7b": {
|
405
407
|
"id": "open-mistral-7b",
|
406
408
|
"context_length": 32000,
|
@@ -452,8 +454,8 @@ MISTRAL_MODELS = {
|
|
452
454
|
}
|
453
455
|
|
454
456
|
# OpenAI models
|
455
|
-
OPENAI_DEFAULT_MODEL = "gpt-4o"
|
456
|
-
OPENAI_MODELS = {
|
457
|
+
OPENAI_DEFAULT_MODEL: Final[str] = "gpt-4o"
|
458
|
+
OPENAI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
457
459
|
"gpt-35-turbo": {
|
458
460
|
"id": "gpt-35-turbo",
|
459
461
|
"context_length": 16385,
|
@@ -517,8 +519,8 @@ OPENAI_MODELS = {
|
|
517
519
|
}
|
518
520
|
|
519
521
|
# Anthropic models
|
520
|
-
ANTHROPIC_DEFAULT_MODEL = "claude-3-5-sonnet-20240620"
|
521
|
-
ANTHROPIC_MODELS = {
|
522
|
+
ANTHROPIC_DEFAULT_MODEL: Final[str] = "claude-3-5-sonnet-20240620"
|
523
|
+
ANTHROPIC_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
522
524
|
"claude-3-opus-20240229": {
|
523
525
|
"id": "claude-3-opus-20240229",
|
524
526
|
"context_length": 200000,
|
@@ -554,8 +556,8 @@ ANTHROPIC_MODELS = {
|
|
554
556
|
}
|
555
557
|
|
556
558
|
# Minimax models
|
557
|
-
MINIMAX_DEFAULT_MODEL = "abab6.5s-chat"
|
558
|
-
MINIMAX_MODELS = {
|
559
|
+
MINIMAX_DEFAULT_MODEL: Final[str] = "abab6.5s-chat"
|
560
|
+
MINIMAX_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
559
561
|
"abab5-chat": {
|
560
562
|
"id": "abab5-chat",
|
561
563
|
"context_length": 6144,
|
@@ -587,8 +589,8 @@ MINIMAX_MODELS = {
|
|
587
589
|
}
|
588
590
|
|
589
591
|
# Gemini models
|
590
|
-
GEMINI_DEFAULT_MODEL = "gemini-1.5-pro"
|
591
|
-
GEMINI_MODELS = {
|
592
|
+
GEMINI_DEFAULT_MODEL: Final[str] = "gemini-1.5-pro"
|
593
|
+
GEMINI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
592
594
|
"gemini-1.5-pro": {
|
593
595
|
"id": "gemini-1.5-pro",
|
594
596
|
"context_length": 1048576,
|
@@ -606,8 +608,8 @@ GEMINI_MODELS = {
|
|
606
608
|
}
|
607
609
|
|
608
610
|
# 百度文心一言 ERNIE 模型
|
609
|
-
ERNIE_DEFAULT_MODEL = "ernie-lite"
|
610
|
-
ERNIE_MODELS = {
|
611
|
+
ERNIE_DEFAULT_MODEL: Final[str] = "ernie-lite"
|
612
|
+
ERNIE_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
611
613
|
"ernie-lite": {
|
612
614
|
"id": "ernie-lite",
|
613
615
|
"context_length": 6144,
|
@@ -646,8 +648,8 @@ ERNIE_MODELS = {
|
|
646
648
|
}
|
647
649
|
|
648
650
|
|
649
|
-
STEPFUN_DEFAULT_MODEL = "step-1-8k"
|
650
|
-
STEPFUN_MODELS = {
|
651
|
+
STEPFUN_DEFAULT_MODEL: Final[str] = "step-1-8k"
|
652
|
+
STEPFUN_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
651
653
|
"step-1-8k": {
|
652
654
|
"id": "step-1-8k",
|
653
655
|
"context_length": 8192,
|
@@ -9,7 +9,7 @@ from anthropic._types import NotGiven as AnthropicNotGiven
|
|
9
9
|
from anthropic.types.message_create_params import ToolChoice as AnthropicToolChoice
|
10
10
|
|
11
11
|
from openai._types import NotGiven as OpenAINotGiven
|
12
|
-
from openai._types import NOT_GIVEN as
|
12
|
+
from openai._types import NOT_GIVEN as OPENAI_NOT_GIVEN
|
13
13
|
from openai.types.chat.chat_completion_chunk import ChoiceDeltaToolCall
|
14
14
|
from openai.types.chat.chat_completion_tool_param import ChatCompletionToolParam
|
15
15
|
from openai.types.chat.chat_completion_message_tool_call import ChatCompletionMessageToolCall
|
@@ -93,7 +93,7 @@ class ChatCompletionDeltaMessage(BaseModel):
|
|
93
93
|
|
94
94
|
NotGiven = Union[AnthropicNotGiven, OpenAINotGiven]
|
95
95
|
|
96
|
-
NOT_GIVEN =
|
96
|
+
NOT_GIVEN = OPENAI_NOT_GIVEN
|
97
97
|
|
98
98
|
OpenAIToolParam = ChatCompletionToolParam
|
99
99
|
ToolParam = OpenAIToolParam
|
@@ -101,3 +101,21 @@ ToolParam = OpenAIToolParam
|
|
101
101
|
Tools = Iterable[ToolParam]
|
102
102
|
|
103
103
|
ToolChoice = ChatCompletionToolChoiceOptionParam
|
104
|
+
|
105
|
+
|
106
|
+
__all__ = [
|
107
|
+
"EndpointSetting",
|
108
|
+
"ModelSetting",
|
109
|
+
"BackendSettings",
|
110
|
+
"Usage",
|
111
|
+
"ChatCompletionMessage",
|
112
|
+
"ChatCompletionDeltaMessage",
|
113
|
+
"NotGiven",
|
114
|
+
"NOT_GIVEN",
|
115
|
+
"OpenAIToolParam",
|
116
|
+
"ToolParam",
|
117
|
+
"Tools",
|
118
|
+
"ToolChoice",
|
119
|
+
"AnthropicToolParam",
|
120
|
+
"AnthropicToolChoice",
|
121
|
+
]
|
File without changes
|
@@ -1,6 +1,6 @@
|
|
1
|
-
vectorvein-0.1.
|
2
|
-
vectorvein-0.1.
|
3
|
-
vectorvein-0.1.
|
1
|
+
vectorvein-0.1.33.dist-info/METADATA,sha256=yNyHQyf6bljhKwy5f4rbAfU0M3r-_XYqtNDaVg6y3ng,502
|
2
|
+
vectorvein-0.1.33.dist-info/WHEEL,sha256=Vza3XR51HW1KmFP0iIMUVYIvz0uQuKJpIXKYOBGQyFQ,90
|
3
|
+
vectorvein-0.1.33.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
|
4
4
|
vectorvein/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
5
|
vectorvein/chat_clients/__init__.py,sha256=dW169oK1n3v8Z0uD8itghzlCP72rxiaS-XYn6fvI2xM,16788
|
6
6
|
vectorvein/chat_clients/anthropic_client.py,sha256=h82GxBi7h22B7leBuPofwBstxH_c12tEgGjpnKg6UDc,25007
|
@@ -15,16 +15,20 @@ vectorvein/chat_clients/mistral_client.py,sha256=1aKSylzBDaLYcFnaBIL4-sXSzWmXfBe
|
|
15
15
|
vectorvein/chat_clients/moonshot_client.py,sha256=gbu-6nGxx8uM_U2WlI4Wus881rFRotzHtMSoYOcruGU,526
|
16
16
|
vectorvein/chat_clients/openai_client.py,sha256=Nz6tV45pWcsOupxjnsRsGTicbQNJWIZyxuJoJ5DGMpg,527
|
17
17
|
vectorvein/chat_clients/openai_compatible_client.py,sha256=gfCTXji8pgFUiultiNDKcmPIGu7lFfQ9VmA8o2_Mm6c,18823
|
18
|
+
vectorvein/chat_clients/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
18
19
|
vectorvein/chat_clients/qwen_client.py,sha256=-ryh-m9PgsO0fc4ulcCmPTy1155J8YUy15uPoJQOHA0,513
|
19
20
|
vectorvein/chat_clients/stepfun_client.py,sha256=zsD2W5ahmR4DD9cqQTXmJr3txrGuvxbRWhFlRdwNijI,519
|
20
21
|
vectorvein/chat_clients/utils.py,sha256=1LddLLVf8r8_Hj5LEYrQRus2qfsuXkJPMOu9VsiKMys,24338
|
21
22
|
vectorvein/chat_clients/yi_client.py,sha256=RNf4CRuPJfixrwLZ3-DEc3t25QDe1mvZeb9sku2f8Bc,484
|
22
23
|
vectorvein/chat_clients/zhipuai_client.py,sha256=Ys5DSeLCuedaDXr3PfG1EW2zKXopt-awO2IylWSwY0s,519
|
24
|
+
vectorvein/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
23
25
|
vectorvein/settings/__init__.py,sha256=0L-2WicBq9ctaJRoSwx8ZhVtX4slS5tHrIlSGf-tJxg,3564
|
24
|
-
vectorvein/
|
26
|
+
vectorvein/settings/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
27
|
+
vectorvein/types/defaults.py,sha256=3cMvobQzTyvGtS47INKJxUK6KC4ezTVPhpO7qvR4ayc,21877
|
25
28
|
vectorvein/types/enums.py,sha256=x_S0IJiEWijOAEiMNdiGDGEWGtmt7TwMriJVDqrDmTo,1637
|
26
29
|
vectorvein/types/exception.py,sha256=gnW4GnJ76jND6UGnodk9xmqkcbeS7Cz2rvncA2HpD5E,69
|
27
|
-
vectorvein/types/llm_parameters.py,sha256=
|
30
|
+
vectorvein/types/llm_parameters.py,sha256=5Q_NWVjbEhEcG7lYLebiQZ9uQU9rZznFmrUxDZ17yqY,4714
|
31
|
+
vectorvein/types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
28
32
|
vectorvein/utilities/media_processing.py,sha256=BujciRmw1GMmc3ELRvafL8STcy6r5b2rVnh27-uA7so,2256
|
29
33
|
vectorvein/utilities/retry.py,sha256=9ePuJdeUUGx-qMWfaFxmlOvG_lQPwCQ4UB1z3Edlo34,993
|
30
|
-
vectorvein-0.1.
|
34
|
+
vectorvein-0.1.33.dist-info/RECORD,,
|
File without changes
|
File without changes
|