yaicli 0.6.0__py3-none-any.whl → 0.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyproject.toml +1 -1
- yaicli/llms/provider.py +2 -0
- yaicli/llms/providers/ai21_provider.py +5 -4
- yaicli/llms/providers/chatglm_provider.py +6 -1
- yaicli/llms/providers/chutes_provider.py +7 -0
- yaicli/llms/providers/deepseek_provider.py +6 -3
- yaicli/llms/providers/doubao_provider.py +24 -22
- yaicli/llms/providers/groq_provider.py +7 -5
- yaicli/llms/providers/infiniai_provider.py +7 -1
- yaicli/llms/providers/minimax_provider.py +13 -0
- yaicli/llms/providers/modelscope_provider.py +6 -3
- yaicli/llms/providers/openai_provider.py +36 -12
- yaicli/llms/providers/openrouter_provider.py +6 -3
- yaicli/llms/providers/sambanova_provider.py +8 -6
- yaicli/llms/providers/siliconflow_provider.py +6 -3
- yaicli/llms/providers/targon_provider.py +14 -0
- yaicli/llms/providers/yi_provider.py +8 -1
- {yaicli-0.6.0.dist-info → yaicli-0.6.1.dist-info}/METADATA +1 -1
- yaicli-0.6.1.dist-info/RECORD +43 -0
- yaicli-0.6.0.dist-info/RECORD +0 -41
- {yaicli-0.6.0.dist-info → yaicli-0.6.1.dist-info}/WHEEL +0 -0
- {yaicli-0.6.0.dist-info → yaicli-0.6.1.dist-info}/entry_points.txt +0 -0
- {yaicli-0.6.0.dist-info → yaicli-0.6.1.dist-info}/licenses/LICENSE +0 -0
pyproject.toml
CHANGED
yaicli/llms/provider.py
CHANGED
@@ -54,6 +54,8 @@ class ProviderFactory:
|
|
54
54
|
"ollama": (".providers.ollama_provider", "OllamaProvider"),
|
55
55
|
"cohere": (".providers.cohere_provider", "CohereProvider"),
|
56
56
|
"sambanova": (".providers.sambanova_provider", "SambanovaProvider"),
|
57
|
+
"minimax": (".providers.minimax_provider", "MinimaxProvider"),
|
58
|
+
"targon": (".providers.targon_provider", "TargonProvider"),
|
57
59
|
}
|
58
60
|
|
59
61
|
@classmethod
|
@@ -1,4 +1,4 @@
|
|
1
|
-
from typing import Generator, Optional
|
1
|
+
from typing import Any, Dict, Generator, Optional
|
2
2
|
|
3
3
|
from openai._streaming import Stream
|
4
4
|
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk
|
@@ -12,9 +12,10 @@ class AI21Provider(OpenAIProvider):
|
|
12
12
|
|
13
13
|
DEFAULT_BASE_URL = "https://api.ai21.com/studio/v1"
|
14
14
|
|
15
|
-
def
|
16
|
-
super().
|
17
|
-
|
15
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
16
|
+
params = super().get_completion_params()
|
17
|
+
params["max_tokens"] = params.pop("max_completion_tokens")
|
18
|
+
return params
|
18
19
|
|
19
20
|
def _handle_stream_response(self, response: Stream[ChatCompletionChunk]) -> Generator[LLMResponse, None, None]:
|
20
21
|
"""Handle streaming response from AI21 models
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import json
|
2
|
-
from typing import Generator, Optional
|
2
|
+
from typing import Any, Dict, Generator, Optional
|
3
3
|
|
4
4
|
from openai._streaming import Stream
|
5
5
|
from openai.types.chat.chat_completion import ChatCompletion, Choice
|
@@ -14,6 +14,11 @@ class ChatglmProvider(OpenAIProvider):
|
|
14
14
|
|
15
15
|
DEFAULT_BASE_URL = "https://open.bigmodel.cn/api/paas/v4/"
|
16
16
|
|
17
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
18
|
+
params = super().get_completion_params()
|
19
|
+
params["max_tokens"] = params.pop("max_completion_tokens")
|
20
|
+
return params
|
21
|
+
|
17
22
|
def _handle_normal_response(self, response: ChatCompletion) -> Generator[LLMResponse, None, None]:
|
18
23
|
"""Handle normal (non-streaming) response
|
19
24
|
Support both openai capabilities and chatglm
|
@@ -1,3 +1,5 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
|
1
3
|
from .openai_provider import OpenAIProvider
|
2
4
|
|
3
5
|
|
@@ -5,3 +7,8 @@ class ChutesProvider(OpenAIProvider):
|
|
5
7
|
"""Chutes provider implementation based on openai-compatible API"""
|
6
8
|
|
7
9
|
DEFAULT_BASE_URL = "https://llm.chutes.ai/v1"
|
10
|
+
|
11
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
12
|
+
params = super().get_completion_params()
|
13
|
+
params["max_tokens"] = params.pop("max_completion_tokens")
|
14
|
+
return params
|
@@ -1,3 +1,5 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
|
1
3
|
from .openai_provider import OpenAIProvider
|
2
4
|
|
3
5
|
|
@@ -6,6 +8,7 @@ class DeepSeekProvider(OpenAIProvider):
|
|
6
8
|
|
7
9
|
DEFAULT_BASE_URL = "https://api.deepseek.com/v1"
|
8
10
|
|
9
|
-
def
|
10
|
-
super().
|
11
|
-
|
11
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
12
|
+
params = super().get_completion_params()
|
13
|
+
params["max_tokens"] = params.pop("max_completion_tokens")
|
14
|
+
return params
|
@@ -1,3 +1,5 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
|
1
3
|
from volcenginesdkarkruntime import Ark
|
2
4
|
|
3
5
|
from ...config import cfg
|
@@ -9,43 +11,43 @@ class DoubaoProvider(OpenAIProvider):
|
|
9
11
|
"""Doubao provider implementation based on openai-compatible API"""
|
10
12
|
|
11
13
|
DEFAULT_BASE_URL = "https://ark.cn-beijing.volces.com/api/v3"
|
14
|
+
CLIENT_CLS = Ark
|
12
15
|
|
13
16
|
def __init__(self, config: dict = cfg, **kwargs):
|
14
17
|
self.config = config
|
15
18
|
self.enable_function = self.config["ENABLE_FUNCTIONS"]
|
19
|
+
self.client_params = self.get_client_params()
|
20
|
+
|
21
|
+
# Initialize client
|
22
|
+
self.client = self.CLIENT_CLS(**self.client_params)
|
23
|
+
self.console = get_console()
|
24
|
+
|
25
|
+
# Store completion params
|
26
|
+
self.completion_params = self.get_completion_params()
|
27
|
+
|
28
|
+
def get_client_params(self) -> Dict[str, Any]:
|
16
29
|
# Initialize client params
|
17
|
-
|
30
|
+
client_params = {"base_url": self.DEFAULT_BASE_URL}
|
18
31
|
if self.config.get("API_KEY", None):
|
19
|
-
|
32
|
+
client_params["api_key"] = self.config["API_KEY"]
|
20
33
|
if self.config.get("BASE_URL", None):
|
21
|
-
|
34
|
+
client_params["base_url"] = self.config["BASE_URL"]
|
22
35
|
if self.config.get("AK", None):
|
23
|
-
|
36
|
+
client_params["ak"] = self.config["AK"]
|
24
37
|
if self.config.get("SK", None):
|
25
|
-
|
38
|
+
client_params["sk"] = self.config["SK"]
|
26
39
|
if self.config.get("REGION", None):
|
27
|
-
|
40
|
+
client_params["region"] = self.config["REGION"]
|
41
|
+
return client_params
|
28
42
|
|
29
|
-
|
30
|
-
|
31
|
-
self.console = get_console()
|
32
|
-
|
33
|
-
# Store completion params
|
34
|
-
self.completion_params = {
|
43
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
44
|
+
params = {
|
35
45
|
"model": self.config["MODEL"],
|
36
46
|
"temperature": self.config["TEMPERATURE"],
|
37
47
|
"top_p": self.config["TOP_P"],
|
38
48
|
"max_tokens": self.config["MAX_TOKENS"],
|
39
49
|
"timeout": self.config["TIMEOUT"],
|
40
50
|
}
|
41
|
-
# Add extra headers if set
|
42
|
-
if self.config.get("EXTRA_HEADERS", None):
|
43
|
-
self.completion_params["extra_headers"] = {
|
44
|
-
**self.config["EXTRA_HEADERS"],
|
45
|
-
"X-Title": self.APP_NAME,
|
46
|
-
"HTTP-Referer": self.APPA_REFERER,
|
47
|
-
}
|
48
|
-
|
49
|
-
# Add extra body params if set
|
50
51
|
if self.config.get("EXTRA_BODY", None):
|
51
|
-
|
52
|
+
params["extra_body"] = self.config["EXTRA_BODY"]
|
53
|
+
return params
|
@@ -1,3 +1,5 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
|
1
3
|
from .openai_provider import OpenAIProvider
|
2
4
|
|
3
5
|
|
@@ -6,9 +8,9 @@ class GroqProvider(OpenAIProvider):
|
|
6
8
|
|
7
9
|
DEFAULT_BASE_URL = "https://api.groq.com/openai/v1"
|
8
10
|
|
9
|
-
def
|
10
|
-
super().
|
11
|
-
if self.config
|
11
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
12
|
+
params = super().get_completion_params()
|
13
|
+
if self.config["EXTRA_BODY"] and "N" in self.config["EXTRA_BODY"] and self.config["EXTRA_BODY"]["N"] != 1:
|
12
14
|
self.console.print("Groq does not support N parameter, setting N to 1 as Groq default", style="yellow")
|
13
|
-
|
14
|
-
|
15
|
+
params["extra_body"]["N"] = 1
|
16
|
+
return params
|
@@ -1,3 +1,5 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
|
1
3
|
from .openai_provider import OpenAIProvider
|
2
4
|
|
3
5
|
|
@@ -11,4 +13,8 @@ class InfiniAIProvider(OpenAIProvider):
|
|
11
13
|
if self.enable_function:
|
12
14
|
self.console.print("InfiniAI does not support functions, disabled", style="yellow")
|
13
15
|
self.enable_function = False
|
14
|
-
|
16
|
+
|
17
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
18
|
+
params = super().get_completion_params()
|
19
|
+
params["max_tokens"] = params.pop("max_completion_tokens")
|
20
|
+
return params
|
@@ -0,0 +1,13 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
from .openai_provider import OpenAIProvider
|
3
|
+
|
4
|
+
|
5
|
+
class MinimaxProvider(OpenAIProvider):
|
6
|
+
"""Minimax provider implementation based on openai-compatible API"""
|
7
|
+
|
8
|
+
DEFAULT_BASE_URL = "https://api.minimaxi.com/v1"
|
9
|
+
|
10
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
11
|
+
params = super().get_completion_params()
|
12
|
+
params["max_tokens"] = params.pop("max_completion_tokens")
|
13
|
+
return params
|
@@ -1,3 +1,5 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
|
1
3
|
from .openai_provider import OpenAIProvider
|
2
4
|
|
3
5
|
|
@@ -6,6 +8,7 @@ class ModelScopeProvider(OpenAIProvider):
|
|
6
8
|
|
7
9
|
DEFAULT_BASE_URL = "https://api-inference.modelscope.cn/v1/"
|
8
10
|
|
9
|
-
def
|
10
|
-
super().
|
11
|
-
|
11
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
12
|
+
params = super().get_completion_params()
|
13
|
+
params["max_tokens"] = params.pop("max_completion_tokens")
|
14
|
+
return params
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import json
|
1
2
|
from typing import Any, Dict, Generator, List, Optional
|
2
3
|
|
3
4
|
import openai
|
@@ -16,41 +17,51 @@ class OpenAIProvider(Provider):
|
|
16
17
|
"""OpenAI provider implementation based on openai library"""
|
17
18
|
|
18
19
|
DEFAULT_BASE_URL = "https://api.openai.com/v1"
|
20
|
+
CLIENT_CLS = openai.OpenAI
|
19
21
|
|
20
22
|
def __init__(self, config: dict = cfg, verbose: bool = False, **kwargs):
|
21
23
|
self.config = config
|
22
24
|
self.enable_function = self.config["ENABLE_FUNCTIONS"]
|
23
25
|
self.verbose = verbose
|
26
|
+
|
27
|
+
# Initialize client
|
28
|
+
self.client_params = self.get_client_params()
|
29
|
+
self.client = self.CLIENT_CLS(**self.client_params)
|
30
|
+
self.console = get_console()
|
31
|
+
|
32
|
+
# Store completion params
|
33
|
+
self.completion_params = self.get_completion_params()
|
34
|
+
|
35
|
+
def get_client_params(self) -> Dict[str, Any]:
|
36
|
+
"""Get the client parameters"""
|
24
37
|
# Initialize client params
|
25
|
-
|
38
|
+
client_params = {
|
26
39
|
"api_key": self.config["API_KEY"],
|
27
40
|
"base_url": self.config["BASE_URL"] or self.DEFAULT_BASE_URL,
|
28
41
|
}
|
29
42
|
|
30
43
|
# Add extra headers if set
|
31
44
|
if self.config["EXTRA_HEADERS"]:
|
32
|
-
|
45
|
+
client_params["default_headers"] = {
|
33
46
|
**self.config["EXTRA_HEADERS"],
|
34
47
|
"X-Title": self.APP_NAME,
|
35
48
|
"HTTP-Referer": self.APPA_REFERER,
|
36
49
|
}
|
50
|
+
return client_params
|
37
51
|
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
# Store completion params
|
43
|
-
self.completion_params = {
|
52
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
53
|
+
"""Get the completion parameters"""
|
54
|
+
completion_params = {
|
44
55
|
"model": self.config["MODEL"],
|
45
56
|
"temperature": self.config["TEMPERATURE"],
|
46
57
|
"top_p": self.config["TOP_P"],
|
47
58
|
"max_completion_tokens": self.config["MAX_TOKENS"],
|
48
59
|
"timeout": self.config["TIMEOUT"],
|
49
60
|
}
|
50
|
-
|
51
61
|
# Add extra body params if set
|
52
62
|
if self.config["EXTRA_BODY"]:
|
53
|
-
|
63
|
+
completion_params["extra_body"] = self.config["EXTRA_BODY"]
|
64
|
+
return completion_params
|
54
65
|
|
55
66
|
def _convert_messages(self, messages: List[ChatMessage]) -> List[Dict[str, Any]]:
|
56
67
|
"""Convert a list of ChatMessage objects to a list of OpenAI message dicts."""
|
@@ -103,6 +114,11 @@ class OpenAIProvider(Provider):
|
|
103
114
|
|
104
115
|
def _handle_normal_response(self, response: ChatCompletion) -> Generator[LLMResponse, None, None]:
|
105
116
|
"""Handle normal (non-streaming) response"""
|
117
|
+
if not response.choices:
|
118
|
+
yield LLMResponse(
|
119
|
+
content=json.dumps(getattr(response, "base_resp", None) or response.to_dict()), finish_reason="stop"
|
120
|
+
)
|
121
|
+
return
|
106
122
|
choice = response.choices[0]
|
107
123
|
content = choice.message.content or "" # type: ignore
|
108
124
|
reasoning = choice.message.reasoning_content # type: ignore
|
@@ -124,12 +140,20 @@ class OpenAIProvider(Provider):
|
|
124
140
|
"""Handle streaming response from OpenAI API"""
|
125
141
|
# Initialize tool call object to accumulate tool call data across chunks
|
126
142
|
tool_call: Optional[ToolCall] = None
|
127
|
-
|
143
|
+
started = False
|
128
144
|
# Process each chunk in the response stream
|
129
145
|
for chunk in response:
|
130
|
-
if not chunk.choices:
|
146
|
+
if not chunk.choices and not started:
|
147
|
+
# Some api could return error message in the first chunk, no choices to handle, return raw response to show the message
|
148
|
+
yield LLMResponse(
|
149
|
+
content=json.dumps(getattr(chunk, "base_resp", None) or chunk.to_dict()), finish_reason="stop"
|
150
|
+
)
|
151
|
+
started = True
|
131
152
|
continue
|
132
153
|
|
154
|
+
if not chunk.choices:
|
155
|
+
continue
|
156
|
+
started = True
|
133
157
|
choice = chunk.choices[0]
|
134
158
|
delta = choice.delta
|
135
159
|
finish_reason = choice.finish_reason
|
@@ -1,3 +1,5 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
|
1
3
|
from .openai_provider import OpenAIProvider
|
2
4
|
|
3
5
|
|
@@ -6,6 +8,7 @@ class OpenRouterProvider(OpenAIProvider):
|
|
6
8
|
|
7
9
|
DEFAULT_BASE_URL = "https://openrouter.ai/api/v1"
|
8
10
|
|
9
|
-
def
|
10
|
-
super().
|
11
|
-
|
11
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
12
|
+
params = super().get_completion_params()
|
13
|
+
params["max_tokens"] = params.pop("max_completion_tokens")
|
14
|
+
return params
|
@@ -1,3 +1,5 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
|
1
3
|
from ...const import DEFAULT_TEMPERATURE
|
2
4
|
from .openai_provider import OpenAIProvider
|
3
5
|
|
@@ -14,13 +16,13 @@ class SambanovaProvider(OpenAIProvider):
|
|
14
16
|
"DeepSeek-V3-0324",
|
15
17
|
)
|
16
18
|
|
17
|
-
def
|
18
|
-
super().
|
19
|
-
|
20
|
-
|
21
|
-
if
|
19
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
20
|
+
params = super().get_completion_params()
|
21
|
+
params.pop("presence_penalty", None)
|
22
|
+
params.pop("frequency_penalty", None)
|
23
|
+
if params.get("temperature") < 0 or params.get("temperature") > 1:
|
22
24
|
self.console.print("Sambanova temperature must be between 0 and 1, setting to 0.4", style="yellow")
|
23
|
-
|
25
|
+
params["temperature"] = DEFAULT_TEMPERATURE
|
24
26
|
if self.enable_function and self.config["MODEL"] not in self.SUPPORT_FUNCTION_CALL_MOELS:
|
25
27
|
self.console.print(
|
26
28
|
f"Sambanova supports function call models: {', '.join(self.SUPPORT_FUNCTION_CALL_MOELS)}",
|
@@ -1,3 +1,5 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
|
1
3
|
from .openai_provider import OpenAIProvider
|
2
4
|
|
3
5
|
|
@@ -6,6 +8,7 @@ class SiliconFlowProvider(OpenAIProvider):
|
|
6
8
|
|
7
9
|
DEFAULT_BASE_URL = "https://api.siliconflow.cn/v1"
|
8
10
|
|
9
|
-
def
|
10
|
-
super().
|
11
|
-
|
11
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
12
|
+
params = super().get_completion_params()
|
13
|
+
params["max_tokens"] = params.pop("max_completion_tokens")
|
14
|
+
return params
|
@@ -0,0 +1,14 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
|
3
|
+
from .openai_provider import OpenAIProvider
|
4
|
+
|
5
|
+
|
6
|
+
class TargonProvider(OpenAIProvider):
|
7
|
+
"""Targon provider implementation based on openai-compatible API"""
|
8
|
+
|
9
|
+
DEFAULT_BASE_URL = "https://api.targon.com/v1"
|
10
|
+
|
11
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
12
|
+
params = super().get_completion_params()
|
13
|
+
params["max_tokens"] = params.pop("max_completion_tokens")
|
14
|
+
return params
|
@@ -1,7 +1,14 @@
|
|
1
|
+
from typing import Any, Dict
|
2
|
+
|
1
3
|
from .openai_provider import OpenAIProvider
|
2
4
|
|
3
5
|
|
4
6
|
class YiProvider(OpenAIProvider):
|
5
|
-
"""
|
7
|
+
"""Lingyiwanwu provider implementation based on openai-compatible API"""
|
6
8
|
|
7
9
|
DEFAULT_BASE_URL = "https://api.lingyiwanwu.com/v1"
|
10
|
+
|
11
|
+
def get_completion_params(self) -> Dict[str, Any]:
|
12
|
+
params = super().get_completion_params()
|
13
|
+
params["max_tokens"] = params.pop("max_completion_tokens")
|
14
|
+
return params
|
@@ -0,0 +1,43 @@
|
|
1
|
+
pyproject.toml,sha256=kId8ADm-DI5ydtpa9S2__9z0qnrxI7k0e1D-LPXXsKA,2434
|
2
|
+
yaicli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
+
yaicli/chat.py,sha256=_emvZEdgMBth2nQGaNWPf0P45oW2k3bpuIwqsxFcM5A,13676
|
4
|
+
yaicli/cli.py,sha256=YUKbtvRcNbe6iTmm0dx-38QDzzrAP4cOYaOChxpJ2wg,23673
|
5
|
+
yaicli/config.py,sha256=HrWYcelLXE61XX719eVcuuo3292xxf1BNQznWdvjQFQ,6535
|
6
|
+
yaicli/console.py,sha256=vARPJd-3lafutsQWrGntQVjLrYqaJD3qisN82pmuhjU,1973
|
7
|
+
yaicli/const.py,sha256=jC01jLLuuYJ7K_QaekXkIN7j1bIKCoGwJeTpquJy55Q,8178
|
8
|
+
yaicli/entry.py,sha256=Q1eqLE7tcHide7ooyPO7OCJpKE2YVuxR-NNFA2Pt2Hw,8693
|
9
|
+
yaicli/exceptions.py,sha256=WBYg8OTJJzaj7lt6HE7ZyBoe5T6A3yZRNCRfWd4iN0c,372
|
10
|
+
yaicli/history.py,sha256=s-57X9FMsaQHF7XySq1gGH_jpd_cHHTYafYu2ECuG6M,2472
|
11
|
+
yaicli/printer.py,sha256=g1TS7aDSQlWlSrQRhvNhNqoQKlsaf1lVOyUSK6LQLNQ,7945
|
12
|
+
yaicli/render.py,sha256=k8o2P8fI44PJlyQbs7gmMiu2x2prwajdWn5JIt15BIA,505
|
13
|
+
yaicli/role.py,sha256=PfwiVJIlzg7EzlvMM-kIy6vBK0d5d_J4M1I_fIZGnWk,7399
|
14
|
+
yaicli/schemas.py,sha256=Ty2ybCvld-ritgBZoI3RR93vYfw9LUNqkR8xk8VRZ2A,762
|
15
|
+
yaicli/tools.py,sha256=xw8KEs_xlSf79A2Aq1rAsUWahS6A_e5QMLt7QDXL5bs,5086
|
16
|
+
yaicli/utils.py,sha256=bpo3Xhozpxsaci3FtEIKZ32l4ZdyWMsrHjYGX0tB4J4,4541
|
17
|
+
yaicli/functions/__init__.py,sha256=_FJooQ9GkijG8xLwuU0cr5GBrGnC9Nc6bnCeUjrsT0k,1271
|
18
|
+
yaicli/functions/buildin/execute_shell_command.py,sha256=unl1-F8p6QZajeHdA0u5UpURMJM0WhdWMUWCCCHVRcI,1320
|
19
|
+
yaicli/llms/__init__.py,sha256=cN54nu-YalZipXjLW0YAAe0rRv0tXAQ8lLi1ohuTpao,363
|
20
|
+
yaicli/llms/client.py,sha256=mkE9KHSuPcJfpNQXbzF2YXGkel3jrOW8KfQ3YYpaK4M,4453
|
21
|
+
yaicli/llms/provider.py,sha256=u_pHF806rmKqkAuKfBi1vNAm2xdWfnqFPYMb6xf2wTQ,2959
|
22
|
+
yaicli/llms/providers/ai21_provider.py,sha256=_RRHtj7Nopwm6t_D_jukA7VjqVhlOC_0TphRL-UxVnI,2752
|
23
|
+
yaicli/llms/providers/chatglm_provider.py,sha256=1xP4KVAi6SDKZ-lMi2wdzywtDydsTf6jDzh3jBBGMfA,6437
|
24
|
+
yaicli/llms/providers/chutes_provider.py,sha256=qaTV8863zWLUsxnjWqpOUCVtnxS4tTzFCJBo5yakvhU,431
|
25
|
+
yaicli/llms/providers/cohere_provider.py,sha256=hc6vQxbCHz9kM2tNKK-kGkuOf4-gkskXW9ctr9V4Cxk,10837
|
26
|
+
yaicli/llms/providers/deepseek_provider.py,sha256=VjGes_jFin5WGYNFxYKMoHwgAQX_eYbYhQKfjeh-9eI,438
|
27
|
+
yaicli/llms/providers/doubao_provider.py,sha256=4eOdE91ITUn3uo3mvYAzdrHsuFIIBwZWib21mtZn8OY,1938
|
28
|
+
yaicli/llms/providers/groq_provider.py,sha256=W87b22nSILZiFXSzMrU4aL6zdtTzDDUihJhmFvGKxAc,639
|
29
|
+
yaicli/llms/providers/infiniai_provider.py,sha256=1dseUIZiXsxYRATRtk_obFclyXMwi4glsP7l_tVtnv8,710
|
30
|
+
yaicli/llms/providers/minimax_provider.py,sha256=V50Rfi_ffx7tSplPuAwjlZikSpqjdjqBVP8oSRQAQy0,435
|
31
|
+
yaicli/llms/providers/modelscope_provider.py,sha256=BzBhYixiDEWB7gujQ0rcG__7nsv0psJRxdtYCYXBhdM,454
|
32
|
+
yaicli/llms/providers/ollama_provider.py,sha256=pjpYjfnHWnExweZi1KGbT07JGkcxzKPhqICo8dD82D0,6967
|
33
|
+
yaicli/llms/providers/openai_provider.py,sha256=5Oq2UEH2SVMbqy5oQZsC3zibzhKBehZyrtJyCVB8hsw,8589
|
34
|
+
yaicli/llms/providers/openrouter_provider.py,sha256=McMBl9YWcUUWdjp9njphqCoSeHc37n-UJo8vy7EjhNg,443
|
35
|
+
yaicli/llms/providers/sambanova_provider.py,sha256=ngYnOQFyYVjtplf_RKR-_DmlTNLKxK9kURxbprgyDJs,1227
|
36
|
+
yaicli/llms/providers/siliconflow_provider.py,sha256=7Ir73me9jGMO5TAZDjrAbX7tbb_QBmLjTGywY0yliqc,446
|
37
|
+
yaicli/llms/providers/targon_provider.py,sha256=Lqj2i-YydduqKpeMED06gL-0gu-8Y548icgrSsxPVUs,432
|
38
|
+
yaicli/llms/providers/yi_provider.py,sha256=UgIOmxvyq7UxnF20NBX2oFDnhRdcAKnx_qplrsbpobM,438
|
39
|
+
yaicli-0.6.1.dist-info/METADATA,sha256=9UTlwCBHXS9GCOj63pnfupmLwOdUkrHV1onl-N7s0RE,53514
|
40
|
+
yaicli-0.6.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
41
|
+
yaicli-0.6.1.dist-info/entry_points.txt,sha256=iYVyQP0PJIm9tQnlQheqT435kK_xdGoi5j9aswGV9hA,66
|
42
|
+
yaicli-0.6.1.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
43
|
+
yaicli-0.6.1.dist-info/RECORD,,
|
yaicli-0.6.0.dist-info/RECORD
DELETED
@@ -1,41 +0,0 @@
|
|
1
|
-
pyproject.toml,sha256=vPeQggLMvO9RDn7BRhQjn5sAf0HyANCd0CFkLZ0a_uE,2434
|
2
|
-
yaicli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
-
yaicli/chat.py,sha256=_emvZEdgMBth2nQGaNWPf0P45oW2k3bpuIwqsxFcM5A,13676
|
4
|
-
yaicli/cli.py,sha256=YUKbtvRcNbe6iTmm0dx-38QDzzrAP4cOYaOChxpJ2wg,23673
|
5
|
-
yaicli/config.py,sha256=HrWYcelLXE61XX719eVcuuo3292xxf1BNQznWdvjQFQ,6535
|
6
|
-
yaicli/console.py,sha256=vARPJd-3lafutsQWrGntQVjLrYqaJD3qisN82pmuhjU,1973
|
7
|
-
yaicli/const.py,sha256=jC01jLLuuYJ7K_QaekXkIN7j1bIKCoGwJeTpquJy55Q,8178
|
8
|
-
yaicli/entry.py,sha256=Q1eqLE7tcHide7ooyPO7OCJpKE2YVuxR-NNFA2Pt2Hw,8693
|
9
|
-
yaicli/exceptions.py,sha256=WBYg8OTJJzaj7lt6HE7ZyBoe5T6A3yZRNCRfWd4iN0c,372
|
10
|
-
yaicli/history.py,sha256=s-57X9FMsaQHF7XySq1gGH_jpd_cHHTYafYu2ECuG6M,2472
|
11
|
-
yaicli/printer.py,sha256=g1TS7aDSQlWlSrQRhvNhNqoQKlsaf1lVOyUSK6LQLNQ,7945
|
12
|
-
yaicli/render.py,sha256=k8o2P8fI44PJlyQbs7gmMiu2x2prwajdWn5JIt15BIA,505
|
13
|
-
yaicli/role.py,sha256=PfwiVJIlzg7EzlvMM-kIy6vBK0d5d_J4M1I_fIZGnWk,7399
|
14
|
-
yaicli/schemas.py,sha256=Ty2ybCvld-ritgBZoI3RR93vYfw9LUNqkR8xk8VRZ2A,762
|
15
|
-
yaicli/tools.py,sha256=xw8KEs_xlSf79A2Aq1rAsUWahS6A_e5QMLt7QDXL5bs,5086
|
16
|
-
yaicli/utils.py,sha256=bpo3Xhozpxsaci3FtEIKZ32l4ZdyWMsrHjYGX0tB4J4,4541
|
17
|
-
yaicli/functions/__init__.py,sha256=_FJooQ9GkijG8xLwuU0cr5GBrGnC9Nc6bnCeUjrsT0k,1271
|
18
|
-
yaicli/functions/buildin/execute_shell_command.py,sha256=unl1-F8p6QZajeHdA0u5UpURMJM0WhdWMUWCCCHVRcI,1320
|
19
|
-
yaicli/llms/__init__.py,sha256=cN54nu-YalZipXjLW0YAAe0rRv0tXAQ8lLi1ohuTpao,363
|
20
|
-
yaicli/llms/client.py,sha256=mkE9KHSuPcJfpNQXbzF2YXGkel3jrOW8KfQ3YYpaK4M,4453
|
21
|
-
yaicli/llms/provider.py,sha256=gRPMVC_gTmR1-zm65ErmO5mZWr3HFcexQIEgEQvpymo,2820
|
22
|
-
yaicli/llms/providers/ai21_provider.py,sha256=kOXGuxh1tu-G1MbxSWnDLFDE8gP73YvM7G6i7K5WzbM,2744
|
23
|
-
yaicli/llms/providers/chatglm_provider.py,sha256=Oa9cQTF1x2durkfqNwO2LbC_ofr-8lVRrf63vINxPxY,6232
|
24
|
-
yaicli/llms/providers/chutes_provider.py,sha256=JYStbkf5RqamcAjWkz9S9WtlDsK5gUtpCxCqm6_A1x4,207
|
25
|
-
yaicli/llms/providers/cohere_provider.py,sha256=hc6vQxbCHz9kM2tNKK-kGkuOf4-gkskXW9ctr9V4Cxk,10837
|
26
|
-
yaicli/llms/providers/deepseek_provider.py,sha256=1-ucumH7gEGmebauuq1AvTfEYLFFIefBRqV7OCg_qAU,411
|
27
|
-
yaicli/llms/providers/doubao_provider.py,sha256=hh4VkJYJVwS34Tk2E09zwjoL-ar27X-ldnPg0Q4gemE,1999
|
28
|
-
yaicli/llms/providers/groq_provider.py,sha256=ntwZ7qHv1AEiarHw5gUE0o6jJtTdhg03y5PRpSx_mM4,659
|
29
|
-
yaicli/llms/providers/infiniai_provider.py,sha256=ftpmf7DJ25RklJX0RptGuyEQT1I5Zat8JE9a8gCm9SI,585
|
30
|
-
yaicli/llms/providers/modelscope_provider.py,sha256=ndX59VpVKkYmJJYAfxHrjKkyaoqICpXrbQyQtJMT3LA,427
|
31
|
-
yaicli/llms/providers/ollama_provider.py,sha256=pjpYjfnHWnExweZi1KGbT07JGkcxzKPhqICo8dD82D0,6967
|
32
|
-
yaicli/llms/providers/openai_provider.py,sha256=9pI0tF1cui2NkoKWIKzdrKDfWaokv-s9yeAi5gydQvY,7521
|
33
|
-
yaicli/llms/providers/openrouter_provider.py,sha256=32ZIjbyECecUGhP2iz42UsdHXfm5OT4_Utb62D4RYLI,416
|
34
|
-
yaicli/llms/providers/sambanova_provider.py,sha256=tmesO0H6WEieD0EFKKDF2r2w7oPXSAjneuqaMCBCi-s,1302
|
35
|
-
yaicli/llms/providers/siliconflow_provider.py,sha256=VxTjjXlbqqNRJ0C-maUOV1Pw-HUDtpbmgFniuppCHHk,419
|
36
|
-
yaicli/llms/providers/yi_provider.py,sha256=D8dKLV0mTOPmRkS-Gb59N5u0UWXBJ5ZIfwoaNHM7hvA,205
|
37
|
-
yaicli-0.6.0.dist-info/METADATA,sha256=Yh-4suoENNVLljgQFgOZmtJQuIR-xIUV_fxOeDQWCRU,53514
|
38
|
-
yaicli-0.6.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
39
|
-
yaicli-0.6.0.dist-info/entry_points.txt,sha256=iYVyQP0PJIm9tQnlQheqT435kK_xdGoi5j9aswGV9hA,66
|
40
|
-
yaicli-0.6.0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
41
|
-
yaicli-0.6.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|