jarvis-ai-assistant 0.1.42__tar.gz → 0.1.44__tar.gz
Sign up to get free protection for your applications and to get access to all the features.
- {jarvis_ai_assistant-0.1.42/src/jarvis_ai_assistant.egg-info → jarvis_ai_assistant-0.1.44}/PKG-INFO +1 -1
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/pyproject.toml +1 -1
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/setup.py +1 -1
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/__init__.py +1 -1
- jarvis_ai_assistant-0.1.44/src/jarvis/__pycache__/__init__.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/__pycache__/agent.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/__pycache__/main.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/agent.py +1 -1
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/main.py +1 -1
- jarvis_ai_assistant-0.1.44/src/jarvis/models/__pycache__/ai8.cpython-313.pyc +0 -0
- jarvis_ai_assistant-0.1.44/src/jarvis/models/__pycache__/base.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/models/__pycache__/kimi.cpython-313.pyc +0 -0
- jarvis_ai_assistant-0.1.44/src/jarvis/models/__pycache__/openai.cpython-313.pyc +0 -0
- jarvis_ai_assistant-0.1.44/src/jarvis/models/__pycache__/oyi.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/models/__pycache__/registry.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/models/ai8.py +98 -51
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/models/base.py +4 -2
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/models/kimi.py +4 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/models/openai.py +8 -4
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/models/oyi.py +98 -32
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/models/registry.py +8 -4
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/generator.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/generator.py +6 -1
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44/src/jarvis_ai_assistant.egg-info}/PKG-INFO +1 -1
- jarvis_ai_assistant-0.1.42/src/jarvis/__pycache__/__init__.cpython-313.pyc +0 -0
- jarvis_ai_assistant-0.1.42/src/jarvis/models/__pycache__/ai8.cpython-313.pyc +0 -0
- jarvis_ai_assistant-0.1.42/src/jarvis/models/__pycache__/base.cpython-313.pyc +0 -0
- jarvis_ai_assistant-0.1.42/src/jarvis/models/__pycache__/openai.cpython-313.pyc +0 -0
- jarvis_ai_assistant-0.1.42/src/jarvis/models/__pycache__/oyi.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/LICENSE +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/MANIFEST.in +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/README.md +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/setup.cfg +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/__pycache__/models.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/__pycache__/tools.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/__pycache__/utils.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/__pycache__/zte_llm.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/models/__init__.py +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/models/__pycache__/__init__.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__init__.py +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/__init__.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/base.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/bing_search.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/calculator.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/calculator_tool.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/file_ops.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/methodology.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/python_script.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/rag.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/registry.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/search.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/shell.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/sub_agent.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/user_confirmation.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/user_input.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/user_interaction.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/__pycache__/webpage.cpython-313.pyc +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/base.py +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/file_ops.py +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/methodology.py +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/registry.py +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/shell.py +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/tools/sub_agent.py +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis/utils.py +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis_ai_assistant.egg-info/SOURCES.txt +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis_ai_assistant.egg-info/dependency_links.txt +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis_ai_assistant.egg-info/entry_points.txt +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis_ai_assistant.egg-info/requires.txt +0 -0
- {jarvis_ai_assistant-0.1.42 → jarvis_ai_assistant-0.1.44}/src/jarvis_ai_assistant.egg-info/top_level.txt +0 -0
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
4
4
|
|
5
5
|
[project]
|
6
6
|
name = "jarvis-ai-assistant"
|
7
|
-
version = "0.1.
|
7
|
+
version = "0.1.44"
|
8
8
|
description = "Jarvis: An AI assistant that uses tools to interact with the system"
|
9
9
|
readme = "README.md"
|
10
10
|
authors = [{ name = "Your Name", email = "your.email@example.com" }]
|
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
|
|
2
2
|
|
3
3
|
setup(
|
4
4
|
name="jarvis-ai-assistant",
|
5
|
-
version="0.1.
|
5
|
+
version="0.1.44",
|
6
6
|
author="skyfire",
|
7
7
|
author_email="skyfireitdiy@hotmail.com",
|
8
8
|
description="An AI assistant that uses various tools to interact with the system",
|
Binary file
|
Binary file
|
@@ -20,7 +20,7 @@ class Agent:
|
|
20
20
|
name: Agent名称,默认为"Jarvis"
|
21
21
|
is_sub_agent: 是否为子Agent,默认为False
|
22
22
|
"""
|
23
|
-
self.model = PlatformRegistry.
|
23
|
+
self.model = PlatformRegistry.get_global_platform()
|
24
24
|
self.tool_registry = ToolRegistry.get_global_tool_registry()
|
25
25
|
self.name = name
|
26
26
|
self.is_sub_agent = is_sub_agent
|
@@ -114,7 +114,7 @@ def main():
|
|
114
114
|
PrettyOutput.print("未指定AI平台,请使用 -p 参数或者设置 JARVIS_PLATFORM 环境变量", OutputType.ERROR)
|
115
115
|
return 1
|
116
116
|
|
117
|
-
PlatformRegistry.
|
117
|
+
PlatformRegistry.get_global_platform_registry().set_global_platform_name(platform)
|
118
118
|
|
119
119
|
try:
|
120
120
|
# 获取全局模型实例
|
Binary file
|
Binary file
|
@@ -14,59 +14,64 @@ class AI8Model(BasePlatform):
|
|
14
14
|
|
15
15
|
def __init__(self):
|
16
16
|
"""Initialize model"""
|
17
|
-
PrettyOutput.section("支持的模型", OutputType.SUCCESS)
|
18
|
-
|
19
|
-
PrettyOutput.print("gpt-3.5-turbo", OutputType.INFO)
|
20
|
-
PrettyOutput.print("gpt-4-turbo", OutputType.INFO)
|
21
|
-
PrettyOutput.print("gpt-4o", OutputType.INFO)
|
22
|
-
PrettyOutput.print("gpt-4o-mini", OutputType.INFO)
|
23
|
-
PrettyOutput.print("o1-mini", OutputType.INFO)
|
24
|
-
PrettyOutput.print("gpt-4-vision-preview", OutputType.INFO)
|
25
|
-
PrettyOutput.print("gpt-4-turbo-preview", OutputType.INFO)
|
26
|
-
PrettyOutput.print("o1-mini-all", OutputType.INFO)
|
27
|
-
PrettyOutput.print("gpt-4o-all", OutputType.INFO)
|
28
|
-
PrettyOutput.print("o1-preview", OutputType.INFO)
|
29
|
-
PrettyOutput.print("claude-3-5-sonnet-20241022", OutputType.INFO)
|
30
|
-
PrettyOutput.print("claude-3-opus-20240229", OutputType.INFO)
|
31
|
-
PrettyOutput.print("claude-3-haiku-20240307", OutputType.INFO)
|
32
|
-
PrettyOutput.print("claude-3-5-sonnet-20240620", OutputType.INFO)
|
33
|
-
PrettyOutput.print("deepseek-chat", OutputType.INFO)
|
34
|
-
PrettyOutput.print("deepseek-coder", OutputType.INFO)
|
35
|
-
PrettyOutput.print("glm-4-flash", OutputType.INFO)
|
36
|
-
PrettyOutput.print("glm-4-air", OutputType.INFO)
|
37
|
-
PrettyOutput.print("glm-4v-flash", OutputType.INFO)
|
38
|
-
PrettyOutput.print("qwen-plus", OutputType.INFO)
|
39
|
-
PrettyOutput.print("qwen-vl-max", OutputType.INFO)
|
40
|
-
PrettyOutput.print("qwen-turbo", OutputType.INFO)
|
41
|
-
PrettyOutput.print("lite", OutputType.INFO)
|
42
|
-
PrettyOutput.print("generalv3.5", OutputType.INFO)
|
43
|
-
PrettyOutput.print("yi-lightning", OutputType.INFO)
|
44
|
-
PrettyOutput.print("yi-vision", OutputType.INFO)
|
45
|
-
PrettyOutput.print("yi-spark", OutputType.INFO)
|
46
|
-
PrettyOutput.print("yi-medium", OutputType.INFO)
|
47
|
-
PrettyOutput.print("Doubao-lite-4k", OutputType.INFO)
|
48
|
-
PrettyOutput.print("Doubao-lite-32k", OutputType.INFO)
|
49
|
-
PrettyOutput.print("Doubao-pro-4k", OutputType.INFO)
|
50
|
-
PrettyOutput.print("Doubao-pro-32k", OutputType.INFO)
|
51
|
-
PrettyOutput.print("step-1-flash", OutputType.INFO)
|
52
|
-
PrettyOutput.print("step-1v-8k", OutputType.INFO)
|
53
|
-
PrettyOutput.print("Baichuan4-Air", OutputType.INFO)
|
54
|
-
PrettyOutput.print("Baichuan4-Turbo", OutputType.INFO)
|
55
|
-
PrettyOutput.print("moonshot-v1-8k", OutputType.INFO)
|
56
|
-
PrettyOutput.print("ERNIE-Speed-128K", OutputType.INFO)
|
57
|
-
PrettyOutput.print("ERNIE-3.5-128K", OutputType.INFO)
|
58
|
-
|
59
|
-
|
60
|
-
PrettyOutput.print("使用AI8_MODEL环境变量配置模型", OutputType.SUCCESS)
|
61
|
-
|
62
17
|
self.system_message = ""
|
63
18
|
self.conversation = None
|
64
19
|
self.files = []
|
65
|
-
self.
|
20
|
+
self.models = {} # 存储模型信息
|
21
|
+
|
22
|
+
# 获取可用模型列表
|
23
|
+
available_models = self.get_available_models()
|
24
|
+
|
25
|
+
if available_models:
|
26
|
+
PrettyOutput.section("支持的模型", OutputType.SUCCESS)
|
27
|
+
for model in self.models.values():
|
28
|
+
# 格式化显示模型信息
|
29
|
+
model_str = f"{model['value']:<30}"
|
30
|
+
|
31
|
+
# 添加标签
|
32
|
+
model_str += f"{model['label']}"
|
33
|
+
|
34
|
+
# 添加标签和积分信息
|
35
|
+
attrs = []
|
36
|
+
if model['attr'].get('tag'):
|
37
|
+
attrs.append(model['attr']['tag'])
|
38
|
+
if model['attr'].get('integral'):
|
39
|
+
attrs.append(model['attr']['integral'])
|
40
|
+
|
41
|
+
# 添加特性标记
|
42
|
+
features = []
|
43
|
+
if model['attr'].get('multimodal'):
|
44
|
+
features.append("多模态")
|
45
|
+
if model['attr'].get('plugin'):
|
46
|
+
features.append("插件支持")
|
47
|
+
if model['attr'].get('onlyImg'):
|
48
|
+
features.append("图像支持")
|
49
|
+
if features:
|
50
|
+
model_str += f" [{'|'.join(features)}]"
|
51
|
+
|
52
|
+
# 添加备注
|
53
|
+
if model['attr'].get('note'):
|
54
|
+
model_str += f" - {model['attr']['note']}"
|
55
|
+
|
56
|
+
PrettyOutput.print(model_str, OutputType.INFO)
|
57
|
+
else:
|
58
|
+
PrettyOutput.print("获取模型列表失败", OutputType.WARNING)
|
59
|
+
|
66
60
|
self.token = os.getenv("AI8_API_KEY")
|
67
|
-
if not
|
68
|
-
raise Exception("
|
69
|
-
|
61
|
+
if not self.token:
|
62
|
+
raise Exception("AI8_API_KEY is not set")
|
63
|
+
|
64
|
+
PrettyOutput.print("使用AI8_MODEL环境变量配置模型", OutputType.SUCCESS)
|
65
|
+
|
66
|
+
self.model_name = os.getenv("AI8_MODEL") or "deepseek-chat"
|
67
|
+
if self.model_name not in self.models:
|
68
|
+
PrettyOutput.print(f"警告: 当前选择的模型 {self.model_name} 不在可用列表中", OutputType.WARNING)
|
69
|
+
|
70
|
+
PrettyOutput.print(f"当前使用模型: {self.model_name}", OutputType.SYSTEM)
|
71
|
+
|
72
|
+
def set_model_name(self, model_name: str):
|
73
|
+
"""设置模型名称"""
|
74
|
+
self.model_name = model_name
|
70
75
|
|
71
76
|
def create_conversation(self) -> bool:
|
72
77
|
"""Create a new conversation"""
|
@@ -102,7 +107,7 @@ class AI8Model(BasePlatform):
|
|
102
107
|
# 2. 更新会话设置
|
103
108
|
session_data = {
|
104
109
|
**self.conversation,
|
105
|
-
"model": self.
|
110
|
+
"model": self.model_name,
|
106
111
|
"contextCount": 1024,
|
107
112
|
"prompt": self.system_message,
|
108
113
|
"plugins": ["tavily_search"],
|
@@ -228,7 +233,7 @@ class AI8Model(BasePlatform):
|
|
228
233
|
|
229
234
|
def name(self) -> str:
|
230
235
|
"""Return model name"""
|
231
|
-
return self.
|
236
|
+
return self.model_name
|
232
237
|
|
233
238
|
def reset(self):
|
234
239
|
"""Reset model state"""
|
@@ -275,3 +280,45 @@ class AI8Model(BasePlatform):
|
|
275
280
|
PrettyOutput.print(f"删除会话异常: {str(e)}", OutputType.ERROR)
|
276
281
|
return False
|
277
282
|
|
283
|
+
def get_available_models(self) -> List[str]:
|
284
|
+
"""获取可用的模型列表
|
285
|
+
|
286
|
+
Returns:
|
287
|
+
List[str]: 可用模型名称列表
|
288
|
+
"""
|
289
|
+
try:
|
290
|
+
headers = {
|
291
|
+
'Content-Type': 'application/json',
|
292
|
+
'Accept': 'application/json, text/plain, */*',
|
293
|
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
|
294
|
+
'X-APP-VERSION': '2.2.2',
|
295
|
+
'Origin': self.BASE_URL,
|
296
|
+
'Referer': f'{self.BASE_URL}/chat?_userMenuKey=chat'
|
297
|
+
}
|
298
|
+
|
299
|
+
response = requests.get(
|
300
|
+
f"{self.BASE_URL}/api/chat/template",
|
301
|
+
headers=headers
|
302
|
+
)
|
303
|
+
|
304
|
+
if response.status_code != 200:
|
305
|
+
PrettyOutput.print(f"获取模型列表失败: {response.status_code}", OutputType.ERROR)
|
306
|
+
return []
|
307
|
+
|
308
|
+
data = response.json()
|
309
|
+
if data['code'] != 0:
|
310
|
+
PrettyOutput.print(f"获取模型列表失败: {data.get('msg', '未知错误')}", OutputType.ERROR)
|
311
|
+
return []
|
312
|
+
|
313
|
+
# 保存模型信息
|
314
|
+
self.models = {
|
315
|
+
model['value']: model
|
316
|
+
for model in data['data']['models']
|
317
|
+
}
|
318
|
+
|
319
|
+
return list(self.models.keys())
|
320
|
+
|
321
|
+
except Exception as e:
|
322
|
+
PrettyOutput.print(f"获取模型列表异常: {str(e)}", OutputType.ERROR)
|
323
|
+
return []
|
324
|
+
|
@@ -1,6 +1,5 @@
|
|
1
1
|
from abc import ABC, abstractmethod
|
2
2
|
from typing import Dict, List
|
3
|
-
from ..utils import OutputType, PrettyOutput
|
4
3
|
|
5
4
|
|
6
5
|
class BasePlatform(ABC):
|
@@ -9,8 +8,11 @@ class BasePlatform(ABC):
|
|
9
8
|
def __init__(self):
|
10
9
|
"""初始化模型"""
|
11
10
|
pass
|
11
|
+
|
12
|
+
def set_model_name(self, model_name: str):
|
13
|
+
"""设置模型名称"""
|
14
|
+
raise NotImplementedError("set_model_name is not implemented")
|
12
15
|
|
13
|
-
|
14
16
|
@abstractmethod
|
15
17
|
def chat(self, message: str) -> str:
|
16
18
|
"""执行对话"""
|
@@ -30,9 +30,9 @@ class OpenAIModel(BasePlatform):
|
|
30
30
|
raise Exception("OPENAI_API_KEY is not set")
|
31
31
|
|
32
32
|
self.base_url = os.getenv("OPENAI_API_BASE", "https://api.deepseek.com")
|
33
|
-
self.
|
33
|
+
self.model_name = os.getenv("OPENAI_MODEL_NAME", "deepseek-chat")
|
34
34
|
|
35
|
-
PrettyOutput.print(f"当前使用模型: {self.
|
35
|
+
PrettyOutput.print(f"当前使用模型: {self.model_name}", OutputType.SYSTEM)
|
36
36
|
|
37
37
|
self.client = OpenAI(
|
38
38
|
api_key=self.api_key,
|
@@ -41,6 +41,10 @@ class OpenAIModel(BasePlatform):
|
|
41
41
|
self.messages: List[Dict[str, str]] = []
|
42
42
|
self.system_message = ""
|
43
43
|
|
44
|
+
def set_model_name(self, model_name: str):
|
45
|
+
"""设置模型名称"""
|
46
|
+
self.model_name = model_name
|
47
|
+
|
44
48
|
def set_system_message(self, message: str):
|
45
49
|
"""设置系统消息"""
|
46
50
|
self.system_message = message
|
@@ -55,7 +59,7 @@ class OpenAIModel(BasePlatform):
|
|
55
59
|
self.messages.append({"role": "user", "content": message})
|
56
60
|
|
57
61
|
response = self.client.chat.completions.create(
|
58
|
-
model=self.
|
62
|
+
model=self.model_name, # 使用配置的模型名称
|
59
63
|
messages=self.messages,
|
60
64
|
stream=True
|
61
65
|
)
|
@@ -82,7 +86,7 @@ class OpenAIModel(BasePlatform):
|
|
82
86
|
|
83
87
|
def name(self) -> str:
|
84
88
|
"""返回模型名称"""
|
85
|
-
return self.
|
89
|
+
return self.model_name
|
86
90
|
|
87
91
|
def reset(self):
|
88
92
|
"""重置模型状态"""
|
@@ -15,43 +15,36 @@ class OyiModel(BasePlatform):
|
|
15
15
|
def __init__(self):
|
16
16
|
"""Initialize model"""
|
17
17
|
PrettyOutput.section("支持的模型", OutputType.SUCCESS)
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
PrettyOutput.print("deepseek-chat", OutputType.INFO)
|
28
|
-
PrettyOutput.print("deepseek-coder", OutputType.INFO)
|
29
|
-
PrettyOutput.print("glm-4-flash", OutputType.INFO)
|
30
|
-
PrettyOutput.print("glm-4-air", OutputType.INFO)
|
31
|
-
PrettyOutput.print("qwen-plus", OutputType.INFO)
|
32
|
-
PrettyOutput.print("qwen-turbo", OutputType.INFO)
|
33
|
-
PrettyOutput.print("Doubao-lite-4k", OutputType.INFO)
|
34
|
-
PrettyOutput.print("Doubao-pro-4k", OutputType.INFO)
|
35
|
-
PrettyOutput.print("yi-lightning", OutputType.INFO)
|
36
|
-
PrettyOutput.print("step-1-flash", OutputType.INFO)
|
37
|
-
PrettyOutput.print("moonshot-v1-8k", OutputType.INFO)
|
38
|
-
PrettyOutput.print("lite", OutputType.INFO)
|
39
|
-
PrettyOutput.print("generalv3.5", OutputType.INFO)
|
40
|
-
PrettyOutput.print("gemini-pro", OutputType.INFO)
|
41
|
-
PrettyOutput.print("llama3-70b-8192", OutputType.INFO)
|
18
|
+
|
19
|
+
# 获取可用模型列表
|
20
|
+
available_models = self.get_available_models()
|
21
|
+
if available_models:
|
22
|
+
for model in available_models:
|
23
|
+
PrettyOutput.print(model, OutputType.INFO)
|
24
|
+
else:
|
25
|
+
PrettyOutput.print("获取模型列表失败", OutputType.WARNING)
|
26
|
+
|
42
27
|
PrettyOutput.print("使用OYI_MODEL环境变量配置模型", OutputType.SUCCESS)
|
43
28
|
|
44
|
-
|
45
29
|
self.messages = []
|
46
30
|
self.system_message = ""
|
47
31
|
self.conversation = None
|
48
32
|
self.upload_files = []
|
49
33
|
self.first_chat = True
|
50
|
-
|
34
|
+
|
51
35
|
self.token = os.getenv("OYI_API_KEY")
|
52
|
-
if not
|
53
|
-
raise Exception("
|
54
|
-
|
36
|
+
if not self.token:
|
37
|
+
raise Exception("OYI_API_KEY is not set")
|
38
|
+
|
39
|
+
self.model_name = os.getenv("OYI_MODEL") or "deepseek-chat"
|
40
|
+
if self.model_name not in [m.split()[0] for m in available_models]:
|
41
|
+
PrettyOutput.print(f"警告: 当前选择的模型 {self.model_name} 不在可用列表中", OutputType.WARNING)
|
42
|
+
|
43
|
+
PrettyOutput.print(f"当前使用模型: {self.model_name}", OutputType.SYSTEM)
|
44
|
+
|
45
|
+
def set_model_name(self, model_name: str):
|
46
|
+
"""设置模型名称"""
|
47
|
+
self.model_name = model_name
|
55
48
|
|
56
49
|
|
57
50
|
def create_conversation(self) -> bool:
|
@@ -71,7 +64,7 @@ class OyiModel(BasePlatform):
|
|
71
64
|
"isLock": False,
|
72
65
|
"systemMessage": "",
|
73
66
|
"params": json.dumps({
|
74
|
-
"model": self.
|
67
|
+
"model": self.model_name,
|
75
68
|
"is_webSearch": True,
|
76
69
|
"message": [],
|
77
70
|
"systemMessage": None,
|
@@ -202,7 +195,7 @@ class OyiModel(BasePlatform):
|
|
202
195
|
|
203
196
|
def name(self) -> str:
|
204
197
|
"""Return model name"""
|
205
|
-
return self.
|
198
|
+
return self.model_name
|
206
199
|
|
207
200
|
def reset(self):
|
208
201
|
"""Reset model state"""
|
@@ -261,6 +254,12 @@ class OyiModel(BasePlatform):
|
|
261
254
|
Dict: Upload response data
|
262
255
|
"""
|
263
256
|
try:
|
257
|
+
# 检查当前模型是否支持文件上传
|
258
|
+
model_info = self.models.get(self.model_name)
|
259
|
+
if not model_info or not model_info.get('uploadFile', False):
|
260
|
+
PrettyOutput.print(f"当前模型 {self.model_name} 不支持文件上传", OutputType.WARNING)
|
261
|
+
return None
|
262
|
+
|
264
263
|
headers = {
|
265
264
|
'Authorization': f'Bearer {self.token}',
|
266
265
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
|
@@ -272,7 +271,7 @@ class OyiModel(BasePlatform):
|
|
272
271
|
|
273
272
|
with open(file_path, 'rb') as f:
|
274
273
|
files = {
|
275
|
-
'file': (os.path.basename(file_path), f, mimetypes.guess_type(file_path)[0])
|
274
|
+
'file': (os.path.basename(file_path), f, mimetypes.guess_type(file_path)[0])
|
276
275
|
}
|
277
276
|
|
278
277
|
response = requests.post(
|
@@ -298,3 +297,70 @@ class OyiModel(BasePlatform):
|
|
298
297
|
except Exception as e:
|
299
298
|
PrettyOutput.print(f"文件上传异常: {str(e)}", OutputType.ERROR)
|
300
299
|
return None
|
300
|
+
|
301
|
+
def get_available_models(self) -> List[str]:
|
302
|
+
"""获取可用的模型列表
|
303
|
+
|
304
|
+
Returns:
|
305
|
+
List[str]: 可用模型名称列表
|
306
|
+
"""
|
307
|
+
try:
|
308
|
+
headers = {
|
309
|
+
'Content-Type': 'application/json',
|
310
|
+
'Accept': 'application/json, text/plain, */*',
|
311
|
+
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
|
312
|
+
'Origin': 'https://ai.rcouyi.com',
|
313
|
+
'Referer': 'https://ai.rcouyi.com/'
|
314
|
+
}
|
315
|
+
|
316
|
+
response = requests.get(
|
317
|
+
"https://ai.rcouyi.com/config/system.json",
|
318
|
+
headers=headers
|
319
|
+
)
|
320
|
+
|
321
|
+
if response.status_code != 200:
|
322
|
+
PrettyOutput.print(f"获取模型列表失败: {response.status_code}", OutputType.ERROR)
|
323
|
+
return []
|
324
|
+
|
325
|
+
data = response.json()
|
326
|
+
|
327
|
+
# 保存模型信息
|
328
|
+
self.models = {
|
329
|
+
model['value']: model
|
330
|
+
for model in data.get('model', [])
|
331
|
+
if model.get('enable', False) # 只保存启用的模型
|
332
|
+
}
|
333
|
+
|
334
|
+
# 格式化显示
|
335
|
+
models = []
|
336
|
+
for model in self.models.values():
|
337
|
+
# 基本信息
|
338
|
+
model_str = f"{model['value']:<30} {model['label']}"
|
339
|
+
|
340
|
+
# 添加后缀标签
|
341
|
+
suffix = model.get('suffix', [])
|
342
|
+
if suffix:
|
343
|
+
# 处理新格式的suffix (字典列表)
|
344
|
+
if suffix and isinstance(suffix[0], dict):
|
345
|
+
suffix_str = ', '.join(s.get('tag', '') for s in suffix)
|
346
|
+
# 处理旧格式的suffix (字符串列表)
|
347
|
+
else:
|
348
|
+
suffix_str = ', '.join(str(s) for s in suffix)
|
349
|
+
model_str += f" ({suffix_str})"
|
350
|
+
|
351
|
+
# 添加描述或提示
|
352
|
+
info = model.get('tooltip') or model.get('description', '')
|
353
|
+
if info:
|
354
|
+
model_str += f" - {info}"
|
355
|
+
|
356
|
+
# 添加文件上传支持标记
|
357
|
+
if model.get('uploadFile'):
|
358
|
+
model_str += " [支持文件上传]"
|
359
|
+
|
360
|
+
models.append(model_str)
|
361
|
+
|
362
|
+
return sorted(models)
|
363
|
+
|
364
|
+
except Exception as e:
|
365
|
+
PrettyOutput.print(f"获取模型列表异常: {str(e)}", OutputType.ERROR)
|
366
|
+
return []
|
@@ -130,7 +130,7 @@ class PlatformRegistry:
|
|
130
130
|
|
131
131
|
|
132
132
|
@staticmethod
|
133
|
-
def
|
133
|
+
def get_global_platform_registry():
|
134
134
|
"""获取全局平台注册器"""
|
135
135
|
if PlatformRegistry.global_platform_registry is None:
|
136
136
|
PlatformRegistry.global_platform_registry = PlatformRegistry()
|
@@ -152,9 +152,9 @@ class PlatformRegistry:
|
|
152
152
|
self.platforms: Dict[str, Type[BasePlatform]] = {}
|
153
153
|
|
154
154
|
@staticmethod
|
155
|
-
def
|
155
|
+
def get_global_platform() -> BasePlatform:
|
156
156
|
"""获取全局平台实例"""
|
157
|
-
platform = PlatformRegistry.
|
157
|
+
platform = PlatformRegistry.get_global_platform_registry().create_platform(PlatformRegistry.global_platform_name)
|
158
158
|
if not platform:
|
159
159
|
raise Exception(f"Failed to create platform: {PlatformRegistry.global_platform_name}")
|
160
160
|
return platform
|
@@ -194,6 +194,10 @@ class PlatformRegistry:
|
|
194
194
|
"""获取可用平台列表"""
|
195
195
|
return list(self.platforms.keys())
|
196
196
|
|
197
|
-
def
|
197
|
+
def set_global_platform_name(self, platform_name: str):
|
198
198
|
"""设置全局平台"""
|
199
199
|
PlatformRegistry.global_platform_name = platform_name
|
200
|
+
|
201
|
+
def get_global_platform_name(self) -> str:
|
202
|
+
"""获取全局平台名称"""
|
203
|
+
return PlatformRegistry.global_platform_name
|
Binary file
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import os
|
1
2
|
from typing import Dict, Any
|
2
3
|
from pathlib import Path
|
3
4
|
from jarvis.models.registry import PlatformRegistry
|
@@ -41,7 +42,11 @@ class ToolGeneratorTool:
|
|
41
42
|
|
42
43
|
def _generate_tool_code(self, tool_name: str, class_name: str, description: str, parameters: Dict) -> str:
|
43
44
|
"""使用大模型生成工具代码"""
|
44
|
-
|
45
|
+
platform_name = os.getenv("JARVIS_CODEGEN_PLATFORM") or PlatformRegistry.get_global_platform_name()
|
46
|
+
model = PlatformRegistry.create_platform(platform_name)
|
47
|
+
model_name = os.getenv("JARVIS_CODEGEN_MODEL")
|
48
|
+
if model_name:
|
49
|
+
model.set_model_name(model_name)
|
45
50
|
|
46
51
|
prompt = f"""请生成一个Python工具类的代码,要求如下,除了代码,不要输出任何内容:
|
47
52
|
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|