vectorvein 0.1.31__py3-none-any.whl → 0.1.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
vectorvein/py.typed ADDED
File without changes
File without changes
@@ -1,17 +1,19 @@
1
1
  # @Author: Bi Ying
2
2
  # @Date: 2024-07-27 00:02:34
3
+ from typing import Final, Dict, Any
4
+
3
5
  from .enums import ContextLengthControlType
4
6
 
5
- CONTEXT_LENGTH_CONTROL = ContextLengthControlType.Latest
7
+ CONTEXT_LENGTH_CONTROL: Final[ContextLengthControlType] = ContextLengthControlType.Latest
6
8
 
7
- ENDPOINT_CONCURRENT_REQUESTS = 20
8
- ENDPOINT_RPM = 60
9
- ENDPOINT_TPM = 300000
9
+ ENDPOINT_CONCURRENT_REQUESTS: Final[int] = 20
10
+ ENDPOINT_RPM: Final[int] = 60
11
+ ENDPOINT_TPM: Final[int] = 300000
10
12
 
11
- MODEL_CONTEXT_LENGTH = 32768
13
+ MODEL_CONTEXT_LENGTH: Final[int] = 32768
12
14
 
13
15
  # Moonshot models
14
- MOONSHOT_MODELS = {
16
+ MOONSHOT_MODELS: Final[Dict[str, Dict[str, Any]]] = {
15
17
  "moonshot-v1-8k": {
16
18
  "id": "moonshot-v1-8k",
17
19
  "context_length": 8192,
@@ -31,10 +33,10 @@ MOONSHOT_MODELS = {
31
33
  "response_format_available": True,
32
34
  },
33
35
  }
34
- MOONSHOT_DEFAULT_MODEL = "moonshot-v1-8k"
36
+ MOONSHOT_DEFAULT_MODEL: Final[str] = "moonshot-v1-8k"
35
37
 
36
38
  # Deepseek models
37
- DEEPSEEK_MODELS = {
39
+ DEEPSEEK_MODELS: Final[Dict[str, Dict[str, Any]]] = {
38
40
  "deepseek-chat": {
39
41
  "id": "deepseek-chat",
40
42
  "context_length": 128000,
@@ -50,10 +52,10 @@ DEEPSEEK_MODELS = {
50
52
  "response_format_available": True,
51
53
  },
52
54
  }
53
- DEEPSEEK_DEFAULT_MODEL = "deepseek-chat"
55
+ DEEPSEEK_DEFAULT_MODEL: Final[str] = "deepseek-chat"
54
56
 
55
57
  # Baichuan models
56
- BAICHUAN_MODELS = {
58
+ BAICHUAN_MODELS: Final[Dict[str, Dict[str, Any]]] = {
57
59
  "Baichuan4": {
58
60
  "id": "Baichuan4",
59
61
  "context_length": 32768,
@@ -90,11 +92,11 @@ BAICHUAN_MODELS = {
90
92
  "response_format_available": False,
91
93
  },
92
94
  }
93
- BAICHUAN_DEFAULT_MODEL = "Baichuan3-Turbo"
95
+ BAICHUAN_DEFAULT_MODEL: Final[str] = "Baichuan3-Turbo"
94
96
 
95
97
  # Groq models
96
- GROQ_DEFAULT_MODEL = "llama3-70b-8192"
97
- GROQ_MODELS = {
98
+ GROQ_DEFAULT_MODEL: Final[str] = "llama3-70b-8192"
99
+ GROQ_MODELS: Final[Dict[str, Dict[str, Any]]] = {
98
100
  "mixtral-8x7b-32768": {
99
101
  "id": "mixtral-8x7b-32768",
100
102
  "context_length": 32768,
@@ -150,8 +152,8 @@ GROQ_MODELS = {
150
152
  }
151
153
 
152
154
  # Qwen models
153
- QWEN_DEFAULT_MODEL = "qwen2.5-72b-instruct"
154
- QWEN_MODELS = {
155
+ QWEN_DEFAULT_MODEL: Final[str] = "qwen2.5-72b-instruct"
156
+ QWEN_MODELS: Final[Dict[str, Dict[str, Any]]] = {
155
157
  "qwen1.5-1.8b-chat": {
156
158
  "id": "qwen1.5-1.8b-chat",
157
159
  "context_length": 30000,
@@ -267,8 +269,8 @@ QWEN_MODELS = {
267
269
  }
268
270
 
269
271
  # Yi models
270
- YI_DEFAULT_MODEL = "yi-large-turbo"
271
- YI_MODELS = {
272
+ YI_DEFAULT_MODEL: Final[str] = "yi-large-turbo"
273
+ YI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
272
274
  "yi-large": {
273
275
  "id": "yi-large",
274
276
  "context_length": 32000,
@@ -322,8 +324,8 @@ YI_MODELS = {
322
324
  }
323
325
 
324
326
  # ZhiPuAI models
325
- ZHIPUAI_DEFAULT_MODEL = "glm-4-air"
326
- ZHIPUAI_MODELS = {
327
+ ZHIPUAI_DEFAULT_MODEL: Final[str] = "glm-4-air"
328
+ ZHIPUAI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
327
329
  "glm-3-turbo": {
328
330
  "id": "glm-3-turbo",
329
331
  "context_length": 128000,
@@ -399,8 +401,8 @@ ZHIPUAI_MODELS = {
399
401
  }
400
402
 
401
403
  # Mistral models
402
- MISTRAL_DEFAULT_MODEL = "mistral-small"
403
- MISTRAL_MODELS = {
404
+ MISTRAL_DEFAULT_MODEL: Final[str] = "mistral-small"
405
+ MISTRAL_MODELS: Final[Dict[str, Dict[str, Any]]] = {
404
406
  "open-mistral-7b": {
405
407
  "id": "open-mistral-7b",
406
408
  "context_length": 32000,
@@ -452,8 +454,8 @@ MISTRAL_MODELS = {
452
454
  }
453
455
 
454
456
  # OpenAI models
455
- OPENAI_DEFAULT_MODEL = "gpt-4o"
456
- OPENAI_MODELS = {
457
+ OPENAI_DEFAULT_MODEL: Final[str] = "gpt-4o"
458
+ OPENAI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
457
459
  "gpt-35-turbo": {
458
460
  "id": "gpt-35-turbo",
459
461
  "context_length": 16385,
@@ -498,11 +500,27 @@ OPENAI_MODELS = {
498
500
  "function_call_available": True,
499
501
  "response_format_available": True,
500
502
  },
503
+ "o1-mini": {
504
+ "id": "o1-mini",
505
+ "context_length": 128000,
506
+ "max_output_tokens": 65536,
507
+ "function_call_available": False,
508
+ "response_format_available": False,
509
+ "native_multimodal": False,
510
+ },
511
+ "o1-preview": {
512
+ "id": "o1-preview",
513
+ "context_length": 128000,
514
+ "max_output_tokens": 32768,
515
+ "function_call_available": False,
516
+ "response_format_available": False,
517
+ "native_multimodal": False,
518
+ },
501
519
  }
502
520
 
503
521
  # Anthropic models
504
- ANTHROPIC_DEFAULT_MODEL = "claude-3-5-sonnet-20240620"
505
- ANTHROPIC_MODELS = {
522
+ ANTHROPIC_DEFAULT_MODEL: Final[str] = "claude-3-5-sonnet-20240620"
523
+ ANTHROPIC_MODELS: Final[Dict[str, Dict[str, Any]]] = {
506
524
  "claude-3-opus-20240229": {
507
525
  "id": "claude-3-opus-20240229",
508
526
  "context_length": 200000,
@@ -538,8 +556,8 @@ ANTHROPIC_MODELS = {
538
556
  }
539
557
 
540
558
  # Minimax models
541
- MINIMAX_DEFAULT_MODEL = "abab6.5s-chat"
542
- MINIMAX_MODELS = {
559
+ MINIMAX_DEFAULT_MODEL: Final[str] = "abab6.5s-chat"
560
+ MINIMAX_MODELS: Final[Dict[str, Dict[str, Any]]] = {
543
561
  "abab5-chat": {
544
562
  "id": "abab5-chat",
545
563
  "context_length": 6144,
@@ -571,8 +589,8 @@ MINIMAX_MODELS = {
571
589
  }
572
590
 
573
591
  # Gemini models
574
- GEMINI_DEFAULT_MODEL = "gemini-1.5-pro"
575
- GEMINI_MODELS = {
592
+ GEMINI_DEFAULT_MODEL: Final[str] = "gemini-1.5-pro"
593
+ GEMINI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
576
594
  "gemini-1.5-pro": {
577
595
  "id": "gemini-1.5-pro",
578
596
  "context_length": 1048576,
@@ -590,8 +608,8 @@ GEMINI_MODELS = {
590
608
  }
591
609
 
592
610
  # 百度文心一言 ERNIE 模型
593
- ERNIE_DEFAULT_MODEL = "ernie-lite"
594
- ERNIE_MODELS = {
611
+ ERNIE_DEFAULT_MODEL: Final[str] = "ernie-lite"
612
+ ERNIE_MODELS: Final[Dict[str, Dict[str, Any]]] = {
595
613
  "ernie-lite": {
596
614
  "id": "ernie-lite",
597
615
  "context_length": 6144,
@@ -630,8 +648,8 @@ ERNIE_MODELS = {
630
648
  }
631
649
 
632
650
 
633
- STEPFUN_DEFAULT_MODEL = "step-1-8k"
634
- STEPFUN_MODELS = {
651
+ STEPFUN_DEFAULT_MODEL: Final[str] = "step-1-8k"
652
+ STEPFUN_MODELS: Final[Dict[str, Dict[str, Any]]] = {
635
653
  "step-1-8k": {
636
654
  "id": "step-1-8k",
637
655
  "context_length": 8192,
@@ -9,7 +9,7 @@ from anthropic._types import NotGiven as AnthropicNotGiven
9
9
  from anthropic.types.message_create_params import ToolChoice as AnthropicToolChoice
10
10
 
11
11
  from openai._types import NotGiven as OpenAINotGiven
12
- from openai._types import NOT_GIVEN as OpenAINOT_GIVEN
12
+ from openai._types import NOT_GIVEN as OPENAI_NOT_GIVEN
13
13
  from openai.types.chat.chat_completion_chunk import ChoiceDeltaToolCall
14
14
  from openai.types.chat.chat_completion_tool_param import ChatCompletionToolParam
15
15
  from openai.types.chat.chat_completion_message_tool_call import ChatCompletionMessageToolCall
@@ -93,7 +93,7 @@ class ChatCompletionDeltaMessage(BaseModel):
93
93
 
94
94
  NotGiven = Union[AnthropicNotGiven, OpenAINotGiven]
95
95
 
96
- NOT_GIVEN = OpenAINOT_GIVEN
96
+ NOT_GIVEN = OPENAI_NOT_GIVEN
97
97
 
98
98
  OpenAIToolParam = ChatCompletionToolParam
99
99
  ToolParam = OpenAIToolParam
@@ -101,3 +101,21 @@ ToolParam = OpenAIToolParam
101
101
  Tools = Iterable[ToolParam]
102
102
 
103
103
  ToolChoice = ChatCompletionToolChoiceOptionParam
104
+
105
+
106
+ __all__ = [
107
+ "EndpointSetting",
108
+ "ModelSetting",
109
+ "BackendSettings",
110
+ "Usage",
111
+ "ChatCompletionMessage",
112
+ "ChatCompletionDeltaMessage",
113
+ "NotGiven",
114
+ "NOT_GIVEN",
115
+ "OpenAIToolParam",
116
+ "ToolParam",
117
+ "Tools",
118
+ "ToolChoice",
119
+ "AnthropicToolParam",
120
+ "AnthropicToolChoice",
121
+ ]
File without changes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vectorvein
3
- Version: 0.1.31
3
+ Version: 0.1.33
4
4
  Summary: Default template for PDM package
5
5
  Author-Email: Anderson <andersonby@163.com>
6
6
  License: MIT
@@ -1,6 +1,6 @@
1
- vectorvein-0.1.31.dist-info/METADATA,sha256=RNZJEkOptnqazFSybizJ-8Vwi7EkX5dowGfkwXoC6So,502
2
- vectorvein-0.1.31.dist-info/WHEEL,sha256=Vza3XR51HW1KmFP0iIMUVYIvz0uQuKJpIXKYOBGQyFQ,90
3
- vectorvein-0.1.31.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
1
+ vectorvein-0.1.33.dist-info/METADATA,sha256=yNyHQyf6bljhKwy5f4rbAfU0M3r-_XYqtNDaVg6y3ng,502
2
+ vectorvein-0.1.33.dist-info/WHEEL,sha256=Vza3XR51HW1KmFP0iIMUVYIvz0uQuKJpIXKYOBGQyFQ,90
3
+ vectorvein-0.1.33.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
4
  vectorvein/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  vectorvein/chat_clients/__init__.py,sha256=dW169oK1n3v8Z0uD8itghzlCP72rxiaS-XYn6fvI2xM,16788
6
6
  vectorvein/chat_clients/anthropic_client.py,sha256=h82GxBi7h22B7leBuPofwBstxH_c12tEgGjpnKg6UDc,25007
@@ -15,16 +15,20 @@ vectorvein/chat_clients/mistral_client.py,sha256=1aKSylzBDaLYcFnaBIL4-sXSzWmXfBe
15
15
  vectorvein/chat_clients/moonshot_client.py,sha256=gbu-6nGxx8uM_U2WlI4Wus881rFRotzHtMSoYOcruGU,526
16
16
  vectorvein/chat_clients/openai_client.py,sha256=Nz6tV45pWcsOupxjnsRsGTicbQNJWIZyxuJoJ5DGMpg,527
17
17
  vectorvein/chat_clients/openai_compatible_client.py,sha256=gfCTXji8pgFUiultiNDKcmPIGu7lFfQ9VmA8o2_Mm6c,18823
18
+ vectorvein/chat_clients/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
19
  vectorvein/chat_clients/qwen_client.py,sha256=-ryh-m9PgsO0fc4ulcCmPTy1155J8YUy15uPoJQOHA0,513
19
20
  vectorvein/chat_clients/stepfun_client.py,sha256=zsD2W5ahmR4DD9cqQTXmJr3txrGuvxbRWhFlRdwNijI,519
20
21
  vectorvein/chat_clients/utils.py,sha256=1LddLLVf8r8_Hj5LEYrQRus2qfsuXkJPMOu9VsiKMys,24338
21
22
  vectorvein/chat_clients/yi_client.py,sha256=RNf4CRuPJfixrwLZ3-DEc3t25QDe1mvZeb9sku2f8Bc,484
22
23
  vectorvein/chat_clients/zhipuai_client.py,sha256=Ys5DSeLCuedaDXr3PfG1EW2zKXopt-awO2IylWSwY0s,519
24
+ vectorvein/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
25
  vectorvein/settings/__init__.py,sha256=0L-2WicBq9ctaJRoSwx8ZhVtX4slS5tHrIlSGf-tJxg,3564
24
- vectorvein/types/defaults.py,sha256=gq0R_9QMsxJXE8cHrJPog9U81-XDWGZ4mbeQNLS1kOU,20609
26
+ vectorvein/settings/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
+ vectorvein/types/defaults.py,sha256=3cMvobQzTyvGtS47INKJxUK6KC4ezTVPhpO7qvR4ayc,21877
25
28
  vectorvein/types/enums.py,sha256=x_S0IJiEWijOAEiMNdiGDGEWGtmt7TwMriJVDqrDmTo,1637
26
29
  vectorvein/types/exception.py,sha256=gnW4GnJ76jND6UGnodk9xmqkcbeS7Cz2rvncA2HpD5E,69
27
- vectorvein/types/llm_parameters.py,sha256=N6RQ8tqO1RCywMFRWPooffeAEPd9x3JW6Bl4UgQtF5I,4379
30
+ vectorvein/types/llm_parameters.py,sha256=5Q_NWVjbEhEcG7lYLebiQZ9uQU9rZznFmrUxDZ17yqY,4714
31
+ vectorvein/types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
32
  vectorvein/utilities/media_processing.py,sha256=BujciRmw1GMmc3ELRvafL8STcy6r5b2rVnh27-uA7so,2256
29
33
  vectorvein/utilities/retry.py,sha256=9ePuJdeUUGx-qMWfaFxmlOvG_lQPwCQ4UB1z3Edlo34,993
30
- vectorvein-0.1.31.dist-info/RECORD,,
34
+ vectorvein-0.1.33.dist-info/RECORD,,