vectorvein 0.1.5__tar.gz → 0.1.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. {vectorvein-0.1.5 → vectorvein-0.1.7}/PKG-INFO +1 -1
  2. {vectorvein-0.1.5 → vectorvein-0.1.7}/pyproject.toml +1 -1
  3. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/anthropic_client.py +4 -0
  4. vectorvein-0.1.7/src/vectorvein/settings/__init__.py +69 -0
  5. vectorvein-0.1.7/tests/sample_settings.py +947 -0
  6. {vectorvein-0.1.5 → vectorvein-0.1.7}/tests/test_create_chat_client.py +4 -2
  7. {vectorvein-0.1.5 → vectorvein-0.1.7}/tests/test_image_input_chat_client.py +3 -3
  8. vectorvein-0.1.5/src/vectorvein/settings/__init__.py +0 -71
  9. vectorvein-0.1.5/tests/sample_settings.py +0 -501
  10. {vectorvein-0.1.5 → vectorvein-0.1.7}/README.md +0 -0
  11. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/__init__.py +0 -0
  12. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/__init__.py +0 -0
  13. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/base_client.py +0 -0
  14. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/deepseek_client.py +0 -0
  15. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/gemini_client.py +0 -0
  16. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/groq_client.py +0 -0
  17. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/local_client.py +0 -0
  18. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/minimax_client.py +0 -0
  19. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/mistral_client.py +0 -0
  20. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/moonshot_client.py +0 -0
  21. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/openai_client.py +0 -0
  22. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/openai_compatible_client.py +0 -0
  23. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/qwen_client.py +0 -0
  24. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/utils.py +0 -0
  25. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/yi_client.py +0 -0
  26. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/chat_clients/zhipuai_client.py +0 -0
  27. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/types/defaults.py +0 -0
  28. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/types/enums.py +0 -0
  29. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/types/llm_parameters.py +0 -0
  30. {vectorvein-0.1.5 → vectorvein-0.1.7}/src/vectorvein/utilities/media_processing.py +0 -0
  31. {vectorvein-0.1.5 → vectorvein-0.1.7}/tests/__init__.py +0 -0
  32. {vectorvein-0.1.5 → vectorvein-0.1.7}/tests/cat.png +0 -0
  33. {vectorvein-0.1.5 → vectorvein-0.1.7}/tests/test_format_messages.py +0 -0
  34. {vectorvein-0.1.5 → vectorvein-0.1.7}/tests/test_tool_use_multi_turns.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vectorvein
3
- Version: 0.1.5
3
+ Version: 0.1.7
4
4
  Summary: Default template for PDM package
5
5
  Author-Email: Anderson <andersonby@163.com>
6
6
  License: MIT
@@ -14,7 +14,7 @@ description = "Default template for PDM package"
14
14
  name = "vectorvein"
15
15
  readme = "README.md"
16
16
  requires-python = ">=3.10"
17
- version = "0.1.5"
17
+ version = "0.1.7"
18
18
 
19
19
  [project.license]
20
20
  text = "MIT"
@@ -125,6 +125,8 @@ class AnthropicChatClient(BaseChatClient):
125
125
  self.stream = stream
126
126
  if temperature is not None:
127
127
  self.temperature = temperature
128
+ if isinstance(tools, OpenAINotGiven):
129
+ tools = NOT_GIVEN
128
130
  if isinstance(tool_choice, OpenAINotGiven):
129
131
  tool_choice = NOT_GIVEN
130
132
 
@@ -311,6 +313,8 @@ class AsyncAnthropicChatClient(BaseAsyncChatClient):
311
313
  self.stream = stream
312
314
  if temperature is not None:
313
315
  self.temperature = temperature
316
+ if isinstance(tools, OpenAINotGiven):
317
+ tools = NOT_GIVEN
314
318
  if isinstance(tool_choice, OpenAINotGiven):
315
319
  tool_choice = NOT_GIVEN
316
320
 
@@ -0,0 +1,69 @@
1
+ # @Author: Bi Ying
2
+ # @Date: 2024-07-27 00:30:56
3
+ from typing import List, Dict
4
+
5
+ from pydantic import BaseModel, Field
6
+
7
+ from ..types import defaults as defs
8
+ from ..types.enums import BackendType
9
+ from ..types.llm_parameters import BackendSettings, EndpointSetting
10
+
11
+
12
+ class Settings(BaseModel):
13
+ endpoints: List[EndpointSetting] = Field(
14
+ default_factory=list, description="Available endpoints for the LLM service."
15
+ )
16
+
17
+ anthropic: BackendSettings = Field(default_factory=BackendSettings, description="Anthropic models settings.")
18
+ deepseek: BackendSettings = Field(default_factory=BackendSettings, description="Deepseek models settings.")
19
+ gemini: BackendSettings = Field(default_factory=BackendSettings, description="Gemini models settings.")
20
+ groq: BackendSettings = Field(default_factory=BackendSettings, description="Groq models settings.")
21
+ local: BackendSettings = Field(default_factory=BackendSettings, description="Local models settings.")
22
+ minimax: BackendSettings = Field(default_factory=BackendSettings, description="Minimax models settings.")
23
+ mistral: BackendSettings = Field(default_factory=BackendSettings, description="Mistral models settings.")
24
+ moonshot: BackendSettings = Field(default_factory=BackendSettings, description="Moonshot models settings.")
25
+ openai: BackendSettings = Field(default_factory=BackendSettings, description="OpenAI models settings.")
26
+ qwen: BackendSettings = Field(default_factory=BackendSettings, description="Qwen models settings.")
27
+ yi: BackendSettings = Field(default_factory=BackendSettings, description="Yi models settings.")
28
+ zhipuai: BackendSettings = Field(default_factory=BackendSettings, description="Zhipuai models settings.")
29
+
30
+ def __init__(self, **data):
31
+ model_types = {
32
+ "anthropic": defs.ANTHROPIC_MODELS,
33
+ "deepseek": defs.DEEPSEEK_MODELS,
34
+ "gemini": defs.GEMINI_MODELS,
35
+ "groq": defs.GROQ_MODELS,
36
+ "local": {},
37
+ "minimax": defs.MINIMAX_MODELS,
38
+ "mistral": defs.MISTRAL_MODELS,
39
+ "moonshot": defs.MOONSHOT_MODELS,
40
+ "openai": defs.OPENAI_MODELS,
41
+ "qwen": defs.QWEN_MODELS,
42
+ "yi": defs.YI_MODELS,
43
+ "zhipuai": defs.ZHIPUAI_MODELS,
44
+ }
45
+
46
+ for model_type, default_models in model_types.items():
47
+ if model_type in data:
48
+ model_settings = BackendSettings()
49
+ model_settings.update_models(default_models, data[model_type].get("models", {}))
50
+ data[model_type] = model_settings
51
+ else:
52
+ data[model_type] = BackendSettings(models=default_models)
53
+
54
+ super().__init__(**data)
55
+
56
+ def load(self, settings_dict: Dict):
57
+ self.__init__(**settings_dict)
58
+
59
+ def get_endpoint(self, endpoint_id: str) -> EndpointSetting:
60
+ for endpoint in self.endpoints:
61
+ if endpoint.id == endpoint_id:
62
+ return endpoint
63
+ return EndpointSetting()
64
+
65
+ def get_backend(self, backend: BackendType) -> BackendSettings:
66
+ return getattr(self, backend.value.lower())
67
+
68
+
69
+ settings = Settings()