vectorvein 0.1.52__tar.gz → 0.1.53__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {vectorvein-0.1.52 → vectorvein-0.1.53}/PKG-INFO +1 -1
- {vectorvein-0.1.52 → vectorvein-0.1.53}/pyproject.toml +1 -1
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/__init__.py +31 -0
- vectorvein-0.1.53/src/vectorvein/chat_clients/xai_client.py +15 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/settings/__init__.py +1 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/types/defaults.py +11 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/types/enums.py +3 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/README.md +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/__init__.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/anthropic_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/baichuan_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/base_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/deepseek_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/gemini_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/groq_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/local_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/minimax_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/mistral_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/moonshot_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/openai_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/openai_compatible_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/py.typed +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/qwen_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/stepfun_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/utils.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/yi_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/zhipuai_client.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/py.typed +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/server/token_server.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/settings/py.typed +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/types/exception.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/types/llm_parameters.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/types/py.typed +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/utilities/media_processing.py +0 -0
- {vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/utilities/retry.py +0 -0
@@ -6,6 +6,7 @@ from typing import overload, Literal
|
|
6
6
|
from .base_client import BaseChatClient, BaseAsyncChatClient
|
7
7
|
|
8
8
|
from .yi_client import YiChatClient, AsyncYiChatClient
|
9
|
+
from .xai_client import XAIChatClient, AsyncXAIChatClient
|
9
10
|
from .groq_client import GroqChatClient, AsyncGroqChatClient
|
10
11
|
from .qwen_client import QwenChatClient, AsyncQwenChatClient
|
11
12
|
from .local_client import LocalChatClient, AsyncLocalChatClient
|
@@ -41,6 +42,7 @@ BackendMap = {
|
|
41
42
|
BackendType.ZhiPuAI: ZhiPuAIChatClient,
|
42
43
|
BackendType.Baichuan: BaichuanChatClient,
|
43
44
|
BackendType.StepFun: StepFunChatClient,
|
45
|
+
BackendType.XAI: XAIChatClient,
|
44
46
|
},
|
45
47
|
"async": {
|
46
48
|
BackendType.Anthropic: AsyncAnthropicChatClient,
|
@@ -57,6 +59,7 @@ BackendMap = {
|
|
57
59
|
BackendType.ZhiPuAI: AsyncZhiPuAIChatClient,
|
58
60
|
BackendType.Baichuan: AsyncBaichuanChatClient,
|
59
61
|
BackendType.StepFun: AsyncStepFunChatClient,
|
62
|
+
BackendType.XAI: AsyncXAIChatClient,
|
60
63
|
},
|
61
64
|
}
|
62
65
|
|
@@ -257,6 +260,20 @@ def create_chat_client(
|
|
257
260
|
) -> StepFunChatClient: ...
|
258
261
|
|
259
262
|
|
263
|
+
@overload
|
264
|
+
def create_chat_client(
|
265
|
+
backend: Literal[BackendType.XAI],
|
266
|
+
model: str | None = None,
|
267
|
+
stream: bool = False,
|
268
|
+
temperature: float = 0.7,
|
269
|
+
context_length_control: ContextLengthControlType = defs.CONTEXT_LENGTH_CONTROL,
|
270
|
+
random_endpoint: bool = True,
|
271
|
+
endpoint_id: str = "",
|
272
|
+
http_client: httpx.Client | None = None,
|
273
|
+
**kwargs,
|
274
|
+
) -> XAIChatClient: ...
|
275
|
+
|
276
|
+
|
260
277
|
@overload
|
261
278
|
def create_chat_client(
|
262
279
|
backend: BackendType,
|
@@ -496,6 +513,20 @@ def create_async_chat_client(
|
|
496
513
|
) -> AsyncStepFunChatClient: ...
|
497
514
|
|
498
515
|
|
516
|
+
@overload
|
517
|
+
def create_async_chat_client(
|
518
|
+
backend: Literal[BackendType.XAI],
|
519
|
+
model: str | None = None,
|
520
|
+
stream: bool = False,
|
521
|
+
temperature: float = 0.7,
|
522
|
+
context_length_control: ContextLengthControlType = defs.CONTEXT_LENGTH_CONTROL,
|
523
|
+
random_endpoint: bool = True,
|
524
|
+
endpoint_id: str = "",
|
525
|
+
http_client: httpx.AsyncClient | None = None,
|
526
|
+
**kwargs,
|
527
|
+
) -> AsyncXAIChatClient: ...
|
528
|
+
|
529
|
+
|
499
530
|
@overload
|
500
531
|
def create_async_chat_client(
|
501
532
|
backend: BackendType,
|
@@ -0,0 +1,15 @@
|
|
1
|
+
# @Author: Bi Ying
|
2
|
+
# @Date: 2024-07-26 14:48:55
|
3
|
+
from ..types.enums import BackendType
|
4
|
+
from ..types.defaults import XAI_DEFAULT_MODEL
|
5
|
+
from .openai_compatible_client import OpenAICompatibleChatClient, AsyncOpenAICompatibleChatClient
|
6
|
+
|
7
|
+
|
8
|
+
class XAIChatClient(OpenAICompatibleChatClient):
|
9
|
+
DEFAULT_MODEL = XAI_DEFAULT_MODEL
|
10
|
+
BACKEND_NAME = BackendType.XAI
|
11
|
+
|
12
|
+
|
13
|
+
class AsyncXAIChatClient(AsyncOpenAICompatibleChatClient):
|
14
|
+
DEFAULT_MODEL = XAI_DEFAULT_MODEL
|
15
|
+
BACKEND_NAME = BackendType.XAI
|
@@ -35,6 +35,7 @@ class Settings(BaseModel):
|
|
35
35
|
zhipuai: BackendSettings = Field(default_factory=BackendSettings, description="Zhipuai models settings.")
|
36
36
|
baichuan: BackendSettings = Field(default_factory=BackendSettings, description="Baichuan models settings.")
|
37
37
|
stepfun: BackendSettings = Field(default_factory=BackendSettings, description="StepFun models settings.")
|
38
|
+
xai: BackendSettings = Field(default_factory=BackendSettings, description="XAI models settings.")
|
38
39
|
|
39
40
|
def __init__(self, **data):
|
40
41
|
model_types = {
|
@@ -724,3 +724,14 @@ STEPFUN_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
|
724
724
|
"native_multimodal": True,
|
725
725
|
},
|
726
726
|
}
|
727
|
+
|
728
|
+
|
729
|
+
XAI_DEFAULT_MODEL: Final[str] = "grok-beta"
|
730
|
+
XAI_MODELS: Final[Dict[str, Dict[str, Any]]] = {
|
731
|
+
"grok-beta": {
|
732
|
+
"id": "grok-beta",
|
733
|
+
"context_length": 131072,
|
734
|
+
"function_call_available": True,
|
735
|
+
"response_format_available": True,
|
736
|
+
},
|
737
|
+
}
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{vectorvein-0.1.52 → vectorvein-0.1.53}/src/vectorvein/chat_clients/openai_compatible_client.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|