vectorvein 0.1.32__py3-none-any.whl → 0.1.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
vectorvein/py.typed ADDED
File without changes
File without changes
@@ -1,17 +1,19 @@
1
1
  # @Author: Bi Ying
2
2
  # @Date: 2024-07-27 00:02:34
3
+ from typing import Final, Dict, Any
4
+
3
5
  from .enums import ContextLengthControlType
4
6
 
5
- CONTEXT_LENGTH_CONTROL = ContextLengthControlType.Latest
7
+ CONTEXT_LENGTH_CONTROL: Final[ContextLengthControlType] = ContextLengthControlType.Latest
6
8
 
7
- ENDPOINT_CONCURRENT_REQUESTS = 20
8
- ENDPOINT_RPM = 60
9
- ENDPOINT_TPM = 300000
9
+ ENDPOINT_CONCURRENT_REQUESTS: Final[int] = 20
10
+ ENDPOINT_RPM: Final[int] = 60
11
+ ENDPOINT_TPM: Final[int] = 300000
10
12
 
11
- MODEL_CONTEXT_LENGTH = 32768
13
+ MODEL_CONTEXT_LENGTH: Final[int] = 32768
12
14
 
13
15
  # Moonshot models
14
- MOONSHOT_MODELS = {
16
+ MOONSHOT_MODELS: Final[Dict[str, Dict[str, Any]]] = {
15
17
  "moonshot-v1-8k": {
16
18
  "id": "moonshot-v1-8k",
17
19
  "context_length": 8192,
@@ -31,10 +33,10 @@ MOONSHOT_MODELS = {
31
33
  "response_format_available": True,
32
34
  },
33
35
  }
34
- MOONSHOT_DEFAULT_MODEL = "moonshot-v1-8k"
36
+ MOONSHOT_DEFAULT_MODEL: Final[str] = "moonshot-v1-8k"
35
37
 
36
38
  # Deepseek models
37
- DEEPSEEK_MODELS = {
39
+ DEEPSEEK_MODELS: Final[Dict[str, Dict[str, Any]]] = {
38
40
  "deepseek-chat": {
39
41
  "id": "deepseek-chat",
40
42
  "context_length": 128000,
@@ -50,10 +52,10 @@ DEEPSEEK_MODELS = {
50
52
  "response_format_available": True,
51
53
  },
52
54
  }
53
- DEEPSEEK_DEFAULT_MODEL = "deepseek-chat"
55
+ DEEPSEEK_DEFAULT_MODEL: Final[str] = "deepseek-chat"
54
56
 
55
57
  # Baichuan models
56
- BAICHUAN_MODELS = {
58
+ BAICHUAN_MODELS: Final[Dict[str, Dict[str, Any]]] = {
57
59
  "Baichuan4": {
58
60
  "id": "Baichuan4",
59
61
  "context_length": 32768,
@@ -90,11 +92,11 @@ BAICHUAN_MODELS = {
90
92
  "response_format_available": False,
91
93
  },
92
94
  }
93
- BAICHUAN_DEFAULT_MODEL = "Baichuan3-Turbo"
95
+ BAICHUAN_DEFAULT_MODEL: Final[str] = "Baichuan3-Turbo"
94
96
 
95
97
  # Groq models
96
- GROQ_DEFAULT_MODEL = "llama3-70b-8192"
97
- GROQ_MODELS = {
98
+ GROQ_DEFAULT_MODEL: Final[str] = "llama3-70b-8192"
99
+ GROQ_MODELS: Final[Dict[str, Dict[str, Any]]] = {
98
100
  "mixtral-8x7b-32768": {
99
101
  "id": "mixtral-8x7b-32768",
100
102
  "context_length": 32768,
@@ -150,8 +152,8 @@ GROQ_MODELS = {
150
152
  }
151
153
 
152
154
  # Qwen models
153
- QWEN_DEFAULT_MODEL = "qwen2.5-72b-instruct"
154
- QWEN_MODELS = {
155
+ QWEN_DEFAULT_MODEL: Final[str] = "qwen2.5-72b-instruct"
156
+ QWEN_MODELS: Final[Dict[str, Dict[str, Any]]] = {
155
157
  "qwen1.5-1.8b-chat": {
156
158
  "id": "qwen1.5-1.8b-chat",
157
159
  "context_length": 30000,
@@ -267,8 +269,8 @@ QWEN_MODELS = {
267
269
  }
268
270
 
269
271
  # Yi models
270
- YI_DEFAULT_MODEL = "yi-large-turbo"
271
- YI_MODELS = {
272
+ YI_DEFAULT_MODEL: Final[str] = "yi-lightning"
273
+ YI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
272
274
  "yi-large": {
273
275
  "id": "yi-large",
274
276
  "context_length": 32000,
@@ -276,6 +278,13 @@ YI_MODELS = {
276
278
  "function_call_available": False,
277
279
  "response_format_available": False,
278
280
  },
281
+ "yi-lightning": {
282
+ "id": "yi-lightning",
283
+ "context_length": 16000,
284
+ "max_output_tokens": 4096,
285
+ "function_call_available": False,
286
+ "response_format_available": False,
287
+ },
279
288
  "yi-large-turbo": {
280
289
  "id": "yi-large-turbo",
281
290
  "context_length": 16000,
@@ -322,8 +331,8 @@ YI_MODELS = {
322
331
  }
323
332
 
324
333
  # ZhiPuAI models
325
- ZHIPUAI_DEFAULT_MODEL = "glm-4-air"
326
- ZHIPUAI_MODELS = {
334
+ ZHIPUAI_DEFAULT_MODEL: Final[str] = "glm-4-air"
335
+ ZHIPUAI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
327
336
  "glm-3-turbo": {
328
337
  "id": "glm-3-turbo",
329
338
  "context_length": 128000,
@@ -399,8 +408,8 @@ ZHIPUAI_MODELS = {
399
408
  }
400
409
 
401
410
  # Mistral models
402
- MISTRAL_DEFAULT_MODEL = "mistral-small"
403
- MISTRAL_MODELS = {
411
+ MISTRAL_DEFAULT_MODEL: Final[str] = "mistral-small"
412
+ MISTRAL_MODELS: Final[Dict[str, Dict[str, Any]]] = {
404
413
  "open-mistral-7b": {
405
414
  "id": "open-mistral-7b",
406
415
  "context_length": 32000,
@@ -452,8 +461,8 @@ MISTRAL_MODELS = {
452
461
  }
453
462
 
454
463
  # OpenAI models
455
- OPENAI_DEFAULT_MODEL = "gpt-4o"
456
- OPENAI_MODELS = {
464
+ OPENAI_DEFAULT_MODEL: Final[str] = "gpt-4o"
465
+ OPENAI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
457
466
  "gpt-35-turbo": {
458
467
  "id": "gpt-35-turbo",
459
468
  "context_length": 16385,
@@ -517,8 +526,8 @@ OPENAI_MODELS = {
517
526
  }
518
527
 
519
528
  # Anthropic models
520
- ANTHROPIC_DEFAULT_MODEL = "claude-3-5-sonnet-20240620"
521
- ANTHROPIC_MODELS = {
529
+ ANTHROPIC_DEFAULT_MODEL: Final[str] = "claude-3-5-sonnet-20240620"
530
+ ANTHROPIC_MODELS: Final[Dict[str, Dict[str, Any]]] = {
522
531
  "claude-3-opus-20240229": {
523
532
  "id": "claude-3-opus-20240229",
524
533
  "context_length": 200000,
@@ -554,8 +563,8 @@ ANTHROPIC_MODELS = {
554
563
  }
555
564
 
556
565
  # Minimax models
557
- MINIMAX_DEFAULT_MODEL = "abab6.5s-chat"
558
- MINIMAX_MODELS = {
566
+ MINIMAX_DEFAULT_MODEL: Final[str] = "abab6.5s-chat"
567
+ MINIMAX_MODELS: Final[Dict[str, Dict[str, Any]]] = {
559
568
  "abab5-chat": {
560
569
  "id": "abab5-chat",
561
570
  "context_length": 6144,
@@ -587,8 +596,8 @@ MINIMAX_MODELS = {
587
596
  }
588
597
 
589
598
  # Gemini models
590
- GEMINI_DEFAULT_MODEL = "gemini-1.5-pro"
591
- GEMINI_MODELS = {
599
+ GEMINI_DEFAULT_MODEL: Final[str] = "gemini-1.5-pro"
600
+ GEMINI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
592
601
  "gemini-1.5-pro": {
593
602
  "id": "gemini-1.5-pro",
594
603
  "context_length": 1048576,
@@ -606,8 +615,8 @@ GEMINI_MODELS = {
606
615
  }
607
616
 
608
617
  # 百度文心一言 ERNIE 模型
609
- ERNIE_DEFAULT_MODEL = "ernie-lite"
610
- ERNIE_MODELS = {
618
+ ERNIE_DEFAULT_MODEL: Final[str] = "ernie-lite"
619
+ ERNIE_MODELS: Final[Dict[str, Dict[str, Any]]] = {
611
620
  "ernie-lite": {
612
621
  "id": "ernie-lite",
613
622
  "context_length": 6144,
@@ -646,8 +655,8 @@ ERNIE_MODELS = {
646
655
  }
647
656
 
648
657
 
649
- STEPFUN_DEFAULT_MODEL = "step-1-8k"
650
- STEPFUN_MODELS = {
658
+ STEPFUN_DEFAULT_MODEL: Final[str] = "step-1-8k"
659
+ STEPFUN_MODELS: Final[Dict[str, Dict[str, Any]]] = {
651
660
  "step-1-8k": {
652
661
  "id": "step-1-8k",
653
662
  "context_length": 8192,
@@ -9,7 +9,7 @@ from anthropic._types import NotGiven as AnthropicNotGiven
9
9
  from anthropic.types.message_create_params import ToolChoice as AnthropicToolChoice
10
10
 
11
11
  from openai._types import NotGiven as OpenAINotGiven
12
- from openai._types import NOT_GIVEN as OpenAINOT_GIVEN
12
+ from openai._types import NOT_GIVEN as OPENAI_NOT_GIVEN
13
13
  from openai.types.chat.chat_completion_chunk import ChoiceDeltaToolCall
14
14
  from openai.types.chat.chat_completion_tool_param import ChatCompletionToolParam
15
15
  from openai.types.chat.chat_completion_message_tool_call import ChatCompletionMessageToolCall
@@ -93,7 +93,7 @@ class ChatCompletionDeltaMessage(BaseModel):
93
93
 
94
94
  NotGiven = Union[AnthropicNotGiven, OpenAINotGiven]
95
95
 
96
- NOT_GIVEN = OpenAINOT_GIVEN
96
+ NOT_GIVEN = OPENAI_NOT_GIVEN
97
97
 
98
98
  OpenAIToolParam = ChatCompletionToolParam
99
99
  ToolParam = OpenAIToolParam
@@ -101,3 +101,21 @@ ToolParam = OpenAIToolParam
101
101
  Tools = Iterable[ToolParam]
102
102
 
103
103
  ToolChoice = ChatCompletionToolChoiceOptionParam
104
+
105
+
106
+ __all__ = [
107
+ "EndpointSetting",
108
+ "ModelSetting",
109
+ "BackendSettings",
110
+ "Usage",
111
+ "ChatCompletionMessage",
112
+ "ChatCompletionDeltaMessage",
113
+ "NotGiven",
114
+ "NOT_GIVEN",
115
+ "OpenAIToolParam",
116
+ "ToolParam",
117
+ "Tools",
118
+ "ToolChoice",
119
+ "AnthropicToolParam",
120
+ "AnthropicToolChoice",
121
+ ]
File without changes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vectorvein
3
- Version: 0.1.32
3
+ Version: 0.1.34
4
4
  Summary: Default template for PDM package
5
5
  Author-Email: Anderson <andersonby@163.com>
6
6
  License: MIT
@@ -12,7 +12,7 @@ Requires-Dist: anthropic[vertex]>=0.31.2
12
12
  Requires-Dist: pydantic>=2.8.2
13
13
  Requires-Dist: Pillow>=10.4.0
14
14
  Requires-Dist: deepseek-tokenizer>=0.1.0
15
- Requires-Dist: qwen-tokenizer>=0.1.0
15
+ Requires-Dist: qwen-tokenizer>=0.2.0
16
16
  Description-Content-Type: text/markdown
17
17
 
18
18
  # vectorvein
@@ -1,6 +1,6 @@
1
- vectorvein-0.1.32.dist-info/METADATA,sha256=ZxI75mXI-su2ofzfkg5zRTBgiwSDc5Ftm0JNeDyfE-k,502
2
- vectorvein-0.1.32.dist-info/WHEEL,sha256=Vza3XR51HW1KmFP0iIMUVYIvz0uQuKJpIXKYOBGQyFQ,90
3
- vectorvein-0.1.32.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
1
+ vectorvein-0.1.34.dist-info/METADATA,sha256=vU6f2zVHj_35BGEKhGfkeJ_Cj4uAE1MzTIHu25n5WOI,502
2
+ vectorvein-0.1.34.dist-info/WHEEL,sha256=pM0IBB6ZwH3nkEPhtcp50KvKNX-07jYtnb1g1m6Z4Co,90
3
+ vectorvein-0.1.34.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
4
  vectorvein/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  vectorvein/chat_clients/__init__.py,sha256=dW169oK1n3v8Z0uD8itghzlCP72rxiaS-XYn6fvI2xM,16788
6
6
  vectorvein/chat_clients/anthropic_client.py,sha256=h82GxBi7h22B7leBuPofwBstxH_c12tEgGjpnKg6UDc,25007
@@ -15,16 +15,20 @@ vectorvein/chat_clients/mistral_client.py,sha256=1aKSylzBDaLYcFnaBIL4-sXSzWmXfBe
15
15
  vectorvein/chat_clients/moonshot_client.py,sha256=gbu-6nGxx8uM_U2WlI4Wus881rFRotzHtMSoYOcruGU,526
16
16
  vectorvein/chat_clients/openai_client.py,sha256=Nz6tV45pWcsOupxjnsRsGTicbQNJWIZyxuJoJ5DGMpg,527
17
17
  vectorvein/chat_clients/openai_compatible_client.py,sha256=gfCTXji8pgFUiultiNDKcmPIGu7lFfQ9VmA8o2_Mm6c,18823
18
+ vectorvein/chat_clients/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
19
  vectorvein/chat_clients/qwen_client.py,sha256=-ryh-m9PgsO0fc4ulcCmPTy1155J8YUy15uPoJQOHA0,513
19
20
  vectorvein/chat_clients/stepfun_client.py,sha256=zsD2W5ahmR4DD9cqQTXmJr3txrGuvxbRWhFlRdwNijI,519
20
21
  vectorvein/chat_clients/utils.py,sha256=1LddLLVf8r8_Hj5LEYrQRus2qfsuXkJPMOu9VsiKMys,24338
21
22
  vectorvein/chat_clients/yi_client.py,sha256=RNf4CRuPJfixrwLZ3-DEc3t25QDe1mvZeb9sku2f8Bc,484
22
23
  vectorvein/chat_clients/zhipuai_client.py,sha256=Ys5DSeLCuedaDXr3PfG1EW2zKXopt-awO2IylWSwY0s,519
24
+ vectorvein/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
25
  vectorvein/settings/__init__.py,sha256=0L-2WicBq9ctaJRoSwx8ZhVtX4slS5tHrIlSGf-tJxg,3564
24
- vectorvein/types/defaults.py,sha256=8IZrVFQs1dj5yh-6PKPSepTyZe3qp9bcw5MFqWB8Q4s,21113
26
+ vectorvein/settings/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
+ vectorvein/types/defaults.py,sha256=xefmRNYBGbnWA5kjLLFKN91UM5gnHZ5-kcCNlQRfznk,22095
25
28
  vectorvein/types/enums.py,sha256=x_S0IJiEWijOAEiMNdiGDGEWGtmt7TwMriJVDqrDmTo,1637
26
29
  vectorvein/types/exception.py,sha256=gnW4GnJ76jND6UGnodk9xmqkcbeS7Cz2rvncA2HpD5E,69
27
- vectorvein/types/llm_parameters.py,sha256=N6RQ8tqO1RCywMFRWPooffeAEPd9x3JW6Bl4UgQtF5I,4379
30
+ vectorvein/types/llm_parameters.py,sha256=5Q_NWVjbEhEcG7lYLebiQZ9uQU9rZznFmrUxDZ17yqY,4714
31
+ vectorvein/types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
32
  vectorvein/utilities/media_processing.py,sha256=BujciRmw1GMmc3ELRvafL8STcy6r5b2rVnh27-uA7so,2256
29
33
  vectorvein/utilities/retry.py,sha256=9ePuJdeUUGx-qMWfaFxmlOvG_lQPwCQ4UB1z3Edlo34,993
30
- vectorvein-0.1.32.dist-info/RECORD,,
34
+ vectorvein-0.1.34.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: pdm-backend (2.4.1)
2
+ Generator: pdm-backend (2.4.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any