jarvis-ai-assistant 0.1.134__py3-none-any.whl → 0.1.138__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jarvis-ai-assistant might be problematic. Click here for more details.

Files changed (78) hide show
  1. jarvis/__init__.py +1 -1
  2. jarvis/jarvis_agent/__init__.py +201 -79
  3. jarvis/jarvis_agent/builtin_input_handler.py +16 -6
  4. jarvis/jarvis_agent/file_input_handler.py +9 -9
  5. jarvis/jarvis_agent/jarvis.py +10 -10
  6. jarvis/jarvis_agent/main.py +12 -11
  7. jarvis/jarvis_agent/output_handler.py +3 -3
  8. jarvis/jarvis_agent/patch.py +86 -62
  9. jarvis/jarvis_agent/shell_input_handler.py +5 -3
  10. jarvis/jarvis_code_agent/code_agent.py +134 -99
  11. jarvis/jarvis_code_agent/file_select.py +24 -24
  12. jarvis/jarvis_dev/main.py +45 -51
  13. jarvis/jarvis_git_details/__init__.py +0 -0
  14. jarvis/jarvis_git_details/main.py +179 -0
  15. jarvis/jarvis_git_squash/main.py +7 -7
  16. jarvis/jarvis_lsp/base.py +11 -11
  17. jarvis/jarvis_lsp/cpp.py +14 -14
  18. jarvis/jarvis_lsp/go.py +13 -13
  19. jarvis/jarvis_lsp/python.py +8 -8
  20. jarvis/jarvis_lsp/registry.py +21 -21
  21. jarvis/jarvis_lsp/rust.py +15 -15
  22. jarvis/jarvis_methodology/main.py +101 -0
  23. jarvis/jarvis_multi_agent/__init__.py +11 -11
  24. jarvis/jarvis_multi_agent/main.py +6 -6
  25. jarvis/jarvis_platform/__init__.py +1 -1
  26. jarvis/jarvis_platform/ai8.py +67 -89
  27. jarvis/jarvis_platform/base.py +14 -13
  28. jarvis/jarvis_platform/kimi.py +25 -28
  29. jarvis/jarvis_platform/ollama.py +24 -26
  30. jarvis/jarvis_platform/openai.py +15 -19
  31. jarvis/jarvis_platform/oyi.py +48 -50
  32. jarvis/jarvis_platform/registry.py +27 -28
  33. jarvis/jarvis_platform/yuanbao.py +38 -42
  34. jarvis/jarvis_platform_manager/main.py +81 -81
  35. jarvis/jarvis_platform_manager/openai_test.py +21 -21
  36. jarvis/jarvis_rag/file_processors.py +18 -18
  37. jarvis/jarvis_rag/main.py +261 -277
  38. jarvis/jarvis_smart_shell/main.py +12 -12
  39. jarvis/jarvis_tools/ask_codebase.py +28 -28
  40. jarvis/jarvis_tools/ask_user.py +8 -8
  41. jarvis/jarvis_tools/base.py +4 -4
  42. jarvis/jarvis_tools/chdir.py +9 -9
  43. jarvis/jarvis_tools/code_review.py +19 -19
  44. jarvis/jarvis_tools/create_code_agent.py +15 -15
  45. jarvis/jarvis_tools/execute_python_script.py +3 -3
  46. jarvis/jarvis_tools/execute_shell.py +11 -11
  47. jarvis/jarvis_tools/execute_shell_script.py +3 -3
  48. jarvis/jarvis_tools/file_analyzer.py +29 -29
  49. jarvis/jarvis_tools/file_operation.py +22 -20
  50. jarvis/jarvis_tools/find_caller.py +25 -25
  51. jarvis/jarvis_tools/find_methodolopy.py +65 -0
  52. jarvis/jarvis_tools/find_symbol.py +24 -24
  53. jarvis/jarvis_tools/function_analyzer.py +27 -27
  54. jarvis/jarvis_tools/git_commiter.py +9 -9
  55. jarvis/jarvis_tools/lsp_get_diagnostics.py +19 -19
  56. jarvis/jarvis_tools/methodology.py +23 -62
  57. jarvis/jarvis_tools/project_analyzer.py +29 -33
  58. jarvis/jarvis_tools/rag.py +15 -15
  59. jarvis/jarvis_tools/read_code.py +24 -22
  60. jarvis/jarvis_tools/read_webpage.py +31 -31
  61. jarvis/jarvis_tools/registry.py +72 -52
  62. jarvis/jarvis_tools/tool_generator.py +18 -18
  63. jarvis/jarvis_utils/config.py +23 -23
  64. jarvis/jarvis_utils/embedding.py +83 -83
  65. jarvis/jarvis_utils/git_utils.py +20 -20
  66. jarvis/jarvis_utils/globals.py +18 -6
  67. jarvis/jarvis_utils/input.py +10 -9
  68. jarvis/jarvis_utils/methodology.py +140 -136
  69. jarvis/jarvis_utils/output.py +11 -11
  70. jarvis/jarvis_utils/utils.py +22 -70
  71. {jarvis_ai_assistant-0.1.134.dist-info → jarvis_ai_assistant-0.1.138.dist-info}/METADATA +1 -1
  72. jarvis_ai_assistant-0.1.138.dist-info/RECORD +85 -0
  73. {jarvis_ai_assistant-0.1.134.dist-info → jarvis_ai_assistant-0.1.138.dist-info}/entry_points.txt +2 -0
  74. jarvis/jarvis_tools/select_code_files.py +0 -62
  75. jarvis_ai_assistant-0.1.134.dist-info/RECORD +0 -82
  76. {jarvis_ai_assistant-0.1.134.dist-info → jarvis_ai_assistant-0.1.138.dist-info}/LICENSE +0 -0
  77. {jarvis_ai_assistant-0.1.134.dist-info → jarvis_ai_assistant-0.1.138.dist-info}/WHEEL +0 -0
  78. {jarvis_ai_assistant-0.1.134.dist-info → jarvis_ai_assistant-0.1.138.dist-info}/top_level.txt +0 -0
@@ -9,7 +9,7 @@ from jarvis.jarvis_utils.output import OutputType, PrettyOutput
9
9
 
10
10
  class AI8Model(BasePlatform):
11
11
  """AI8 model implementation"""
12
-
12
+
13
13
  platform_name = "ai8"
14
14
  BASE_URL = "https://ai8.rcouyi.com"
15
15
 
@@ -17,7 +17,7 @@ class AI8Model(BasePlatform):
17
17
  """获取模型列表"""
18
18
  self.get_available_models()
19
19
  return [(name,info['desc']) for name,info in self.models.items()]
20
-
20
+
21
21
  def __init__(self):
22
22
  """Initialize model"""
23
23
  super().__init__()
@@ -28,48 +28,55 @@ class AI8Model(BasePlatform):
28
28
  self.token = os.getenv("AI8_API_KEY")
29
29
  if not self.token:
30
30
  PrettyOutput.print("未设置 AI8_API_KEY", OutputType.WARNING)
31
-
32
-
31
+
32
+ self.headers = {
33
+ 'Authorization': self.token,
34
+ 'Content-Type': 'application/json',
35
+ 'Accept': 'application/json, text/plain, */*',
36
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
37
+ 'X-APP-VERSION': '2.3.0',
38
+ 'Origin': self.BASE_URL,
39
+ 'Referer': f'{self.BASE_URL}/chat?_userMenuKey=chat',
40
+ 'Sec-Fetch-Site': 'same-origin',
41
+ 'Sec-Fetch-Mode': 'cors',
42
+ 'Sec-Fetch-Dest': 'empty',
43
+ }
44
+
33
45
  self.model_name = os.getenv("JARVIS_MODEL") or "deepseek-chat"
34
46
  if self.model_name not in self.get_available_models():
35
47
  PrettyOutput.print(f"警告: 选择的模型 {self.model_name} 不在可用列表中", OutputType.WARNING)
36
-
48
+
49
+
50
+
37
51
 
38
52
  def set_model_name(self, model_name: str):
39
53
  """Set model name"""
40
54
 
41
55
  self.model_name = model_name
42
-
56
+
43
57
  def create_conversation(self) -> bool:
44
58
  """Create a new conversation"""
45
59
  try:
46
- headers = {
47
- 'Authorization': self.token,
48
- 'Content-Type': 'application/json',
49
- 'Accept': 'application/json, text/plain, */*',
50
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
51
- 'X-APP-VERSION': '2.2.2',
52
- 'Origin': self.BASE_URL,
53
- 'Referer': f'{self.BASE_URL}/chat?_userMenuKey=chat'
54
- }
55
-
60
+
61
+
56
62
  # 1. 创建会话
57
63
  response = requests.post(
58
64
  f"{self.BASE_URL}/api/chat/session",
59
- headers=headers
65
+ headers=self.headers,
66
+ json={}
60
67
  )
61
-
68
+
62
69
  if response.status_code != 200:
63
70
  PrettyOutput.print(f"创建会话失败: {response.status_code}", OutputType.WARNING)
64
71
  return False
65
-
72
+
66
73
  data = response.json()
67
74
  if data['code'] != 0:
68
75
  PrettyOutput.print(f"创建会话失败: {data.get('msg', '未知错误')}", OutputType.WARNING)
69
76
  return False
70
-
77
+
71
78
  self.conversation = data['data']
72
-
79
+
73
80
  # 2. 更新会话设置
74
81
  session_data = {
75
82
  **self.conversation,
@@ -80,13 +87,13 @@ class AI8Model(BasePlatform):
80
87
  "localPlugins": None,
81
88
  "useAppId": 0
82
89
  }
83
-
90
+
84
91
  response = requests.put(
85
92
  f"{self.BASE_URL}/api/chat/session/{self.conversation['id']}",
86
- headers=headers,
93
+ headers=self.headers,
87
94
  json=session_data
88
95
  )
89
-
96
+
90
97
  if response.status_code == 200:
91
98
  data = response.json()
92
99
  if data['code'] == 0:
@@ -98,53 +105,45 @@ class AI8Model(BasePlatform):
98
105
  else:
99
106
  PrettyOutput.print(f"更新会话设置失败: {response.status_code}", OutputType.WARNING)
100
107
  return False
101
-
108
+
102
109
  except Exception as e:
103
110
  PrettyOutput.print(f"创建会话失败: {str(e)}", OutputType.ERROR)
104
111
  return False
105
-
112
+
106
113
  def set_system_message(self, message: str):
107
114
  """Set system message"""
108
115
  self.system_message = message
109
-
116
+
110
117
  def chat(self, message: str) -> str:
111
118
  """Execute conversation"""
112
119
  try:
113
-
120
+
114
121
  # 确保有会话ID
115
122
  if not self.conversation:
116
123
  if not self.create_conversation():
117
124
  raise Exception("Failed to create conversation")
118
-
119
- headers = {
120
- 'Authorization': self.token,
121
- 'Content-Type': 'application/json',
122
- 'Accept': 'text/event-stream',
123
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
124
- 'X-APP-VERSION': '2.2.2',
125
- 'Origin': self.BASE_URL,
126
- 'Referer': f'{self.BASE_URL}/chat?_userMenuKey=chat'
127
- }
128
-
125
+
126
+
127
+
129
128
  payload = {
130
129
  "text": message,
131
130
  "sessionId": self.conversation['id'] if self.conversation else None,
132
131
  "files": []
133
132
  }
134
-
135
-
133
+
134
+
136
135
  response = requests.post(
137
136
  f"{self.BASE_URL}/api/chat/completions",
138
- headers=headers,
137
+ headers=self.headers,
139
138
  json=payload,
140
139
  stream=True
141
140
  )
142
-
141
+
143
142
  if response.status_code != 200:
144
143
  error_msg = f"Failed to chat: {response.status_code} {response.text}"
145
144
  PrettyOutput.print(error_msg, OutputType.WARNING)
146
145
  raise Exception(error_msg)
147
-
146
+
148
147
  # 处理流式响应
149
148
  full_response = ""
150
149
  for line in response.iter_lines():
@@ -162,49 +161,37 @@ class AI8Model(BasePlatform):
162
161
 
163
162
  except json.JSONDecodeError:
164
163
  continue
165
-
164
+
166
165
  if not self.suppress_output:
167
166
  PrettyOutput.print_stream_end()
168
167
 
169
168
  return full_response
170
-
169
+
171
170
  except Exception as e:
172
171
  PrettyOutput.print(f"对话异常: {str(e)}", OutputType.ERROR)
173
172
  raise e
174
-
173
+
175
174
  def name(self) -> str:
176
175
  """Return model name"""
177
176
  return self.model_name
178
-
179
- def reset(self):
180
- """Reset model state"""
181
- self.conversation = None
182
-
177
+
178
+
183
179
  def delete_chat(self) -> bool:
184
180
  """Delete current chat session"""
185
181
  try:
186
182
  if not self.conversation:
187
183
  return True
188
-
189
- headers = {
190
- 'Authorization': self.token,
191
- 'Content-Type': 'application/json',
192
- 'Accept': 'application/json, text/plain, */*',
193
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
194
- 'X-APP-VERSION': '2.2.2',
195
- 'Origin': self.BASE_URL,
196
- 'Referer': f'{self.BASE_URL}/chat?_userMenuKey=chat'
197
- }
198
-
184
+
185
+
199
186
  response = requests.delete(
200
187
  f"{self.BASE_URL}/api/chat/session/{self.conversation['id']}",
201
- headers=headers
188
+ headers=self.headers
202
189
  )
203
-
190
+
204
191
  if response.status_code == 200:
205
192
  data = response.json()
206
193
  if data['code'] == 0:
207
- self.reset()
194
+ self.conversation = None
208
195
  return True
209
196
  else:
210
197
  error_msg = f"删除会话失败: {data.get('msg', '未知错误')}"
@@ -214,54 +201,45 @@ class AI8Model(BasePlatform):
214
201
  error_msg = f"删除会话请求失败: {response.status_code}"
215
202
  PrettyOutput.print(error_msg, OutputType.WARNING)
216
203
  return False
217
-
204
+
218
205
  except Exception as e:
219
206
  PrettyOutput.print(f"删除会话失败: {str(e)}", OutputType.ERROR)
220
207
  return False
221
-
208
+
222
209
  def get_available_models(self) -> List[str]:
223
210
  """Get available model list
224
-
211
+
225
212
  Returns:
226
213
  List[str]: Available model name list
227
214
  """
228
215
  try:
229
216
  if self.models:
230
217
  return list(self.models.keys())
231
-
232
- headers = {
233
- 'Content-Type': 'application/json',
234
- 'Accept': 'application/json, text/plain, */*',
235
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
236
- 'X-APP-VERSION': '2.2.2',
237
- 'Origin': self.BASE_URL,
238
- 'Referer': f'{self.BASE_URL}/chat?_userMenuKey=chat'
239
- }
240
-
218
+
241
219
  response = requests.get(
242
220
  f"{self.BASE_URL}/api/chat/tmpl",
243
- headers=headers
221
+ headers=self.headers
244
222
  )
245
-
223
+
246
224
  if response.status_code != 200:
247
225
  PrettyOutput.print(f"获取模型列表失败: {response.status_code}", OutputType.WARNING)
248
226
  return []
249
-
227
+
250
228
  data = response.json()
251
229
  if data['code'] != 0:
252
230
  PrettyOutput.print(f"获取模型列表失败: {data.get('msg', '未知错误')}", OutputType.WARNING)
253
231
  return []
254
-
232
+
255
233
  # 保存模型信息
256
234
  self.models = {
257
- model['value']: model
235
+ model['value']: model
258
236
  for model in data['data']['models']
259
237
  }
260
238
 
261
239
  for model in self.models.values():
262
240
  # 添加标签
263
241
  model_str = f"{model['label']}"
264
-
242
+
265
243
  # 添加特性标记
266
244
  features = []
267
245
  if model['attr'].get('multimodal'):
@@ -279,12 +257,12 @@ class AI8Model(BasePlatform):
279
257
  model_str += f" - {model['attr']['note']}"
280
258
  if features:
281
259
  model_str += f" [{'|'.join(features)}]"
282
-
260
+
283
261
  model['desc'] = model_str
284
-
262
+
285
263
  return list(self.models.keys())
286
-
264
+
287
265
  except Exception as e:
288
266
  PrettyOutput.print(f"获取模型列表失败: {str(e)}", OutputType.ERROR)
289
267
  return []
290
-
268
+
@@ -1,13 +1,14 @@
1
1
  from abc import ABC, abstractmethod
2
2
  import re
3
3
  from typing import Dict, List, Tuple
4
+ from jarvis.jarvis_utils.globals import clear_read_file_record
4
5
  from jarvis.jarvis_utils.output import OutputType, PrettyOutput
5
6
  from jarvis.jarvis_utils.utils import ct, ot, get_context_token_count, while_success, while_true
6
7
 
7
8
 
8
9
  class BasePlatform(ABC):
9
10
  """Base class for large language models"""
10
-
11
+
11
12
  def __init__(self):
12
13
  """Initialize model"""
13
14
  self.suppress_output = True # 添加输出控制标志
@@ -20,7 +21,12 @@ class BasePlatform(ABC):
20
21
  def set_model_name(self, model_name: str):
21
22
  """Set model name"""
22
23
  raise NotImplementedError("set_model_name is not implemented")
23
-
24
+
25
+ def reset(self):
26
+ """Reset model"""
27
+ clear_read_file_record()
28
+ self.delete_chat()
29
+
24
30
  @abstractmethod
25
31
  def chat(self, message: str) -> str:
26
32
  """Execute conversation"""
@@ -31,11 +37,11 @@ class BasePlatform(ABC):
31
37
  import time
32
38
  start_time = time.time()
33
39
  response = self.chat(message)
34
-
40
+
35
41
  end_time = time.time()
36
42
  duration = end_time - start_time
37
43
  char_count = len(response)
38
-
44
+
39
45
  # Calculate token count and tokens per second
40
46
  try:
41
47
  token_count = get_context_token_count(response)
@@ -55,29 +61,24 @@ class BasePlatform(ABC):
55
61
  # Keep original think tag handling
56
62
  response = re.sub(ot("think")+r'.*?'+ct("think"), '', response, flags=re.DOTALL)
57
63
  return response
58
-
64
+
59
65
  return while_true(lambda: while_success(lambda: _chat(), 5), 5)
60
66
 
61
- @abstractmethod
62
- def reset(self):
63
- """Reset model"""
64
- raise NotImplementedError("reset is not implemented")
65
-
66
67
  @abstractmethod
67
68
  def name(self) -> str:
68
69
  """Model name"""
69
70
  raise NotImplementedError("name is not implemented")
70
-
71
+
71
72
  @abstractmethod
72
73
  def delete_chat(self)->bool:
73
74
  """Delete chat"""
74
75
  raise NotImplementedError("delete_chat is not implemented")
75
-
76
+
76
77
  @abstractmethod
77
78
  def set_system_message(self, message: str):
78
79
  """Set system message"""
79
80
  raise NotImplementedError("set_system_message is not implemented")
80
-
81
+
81
82
  @abstractmethod
82
83
  def get_model_list(self) -> List[Tuple[str, str]]:
83
84
  """Get model list"""
@@ -15,7 +15,7 @@ class KimiModel(BasePlatform):
15
15
  def get_model_list(self) -> List[Tuple[str, str]]:
16
16
  """Get model list"""
17
17
  return [("kimi", "Based on the web Kimi, free interface")]
18
-
18
+
19
19
  def __init__(self):
20
20
  """
21
21
  Initialize Kimi model
@@ -28,7 +28,7 @@ class KimiModel(BasePlatform):
28
28
  "需要设置 KIMI_API_KEY 才能使用 Jarvis。请按照以下步骤操作:\n"
29
29
  "1. 获取 Kimi API Key:\n"
30
30
  " • 访问 Kimi AI 平台: https://kimi.moonshot.cn\n"
31
- " • 登录您的账户\n"
31
+ " • 登录您的账户\n"
32
32
  " • 打开浏览器开发者工具 (F12 或右键 -> 检查)\n"
33
33
  " • 切换到网络标签\n"
34
34
  " • 发送任意消息\n"
@@ -84,9 +84,9 @@ class KimiModel(BasePlatform):
84
84
  raise Exception("Failed to create chat session")
85
85
 
86
86
  url = f"https://kimi.moonshot.cn/api/chat/{self.chat_id}/completion/stream"
87
-
88
-
89
-
87
+
88
+
89
+
90
90
  payload = {
91
91
  "messages": [{"role": "user", "content": message}],
92
92
  "use_search": True,
@@ -106,23 +106,23 @@ class KimiModel(BasePlatform):
106
106
  try:
107
107
  response = while_success(lambda: requests.post(url, headers=headers, json=payload, stream=True), sleep_time=5)
108
108
  full_response = ""
109
-
109
+
110
110
  # 收集搜索和引用结果
111
111
  search_results = []
112
112
  ref_sources = []
113
-
113
+
114
114
  for line in response.iter_lines():
115
115
  if not line:
116
116
  continue
117
-
117
+
118
118
  line = line.decode('utf-8')
119
119
  if not line.startswith("data: "):
120
120
  continue
121
-
121
+
122
122
  try:
123
123
  data = json.loads(line[6:])
124
124
  event = data.get("event")
125
-
125
+
126
126
  if event == "cmpl":
127
127
  # 处理补全文本
128
128
  text = data.get("text", "")
@@ -130,7 +130,7 @@ class KimiModel(BasePlatform):
130
130
  if not self.suppress_output:
131
131
  PrettyOutput.print_stream(text)
132
132
  full_response += text
133
-
133
+
134
134
  elif event == "search_plus":
135
135
  # 收集搜索结果
136
136
  msg = data.get("msg", {})
@@ -143,7 +143,7 @@ class KimiModel(BasePlatform):
143
143
  "type": msg.get("type", ""),
144
144
  "url": msg.get("url", "")
145
145
  })
146
-
146
+
147
147
  elif event == "ref_docs":
148
148
  # 收集引用来源
149
149
  ref_cards = data.get("ref_cards", [])
@@ -159,13 +159,13 @@ class KimiModel(BasePlatform):
159
159
  "rag_segments": card.get("rag_segments", []),
160
160
  "origin": card.get("origin", {})
161
161
  })
162
-
162
+
163
163
  except json.JSONDecodeError:
164
164
  continue
165
-
165
+
166
166
  if not self.suppress_output:
167
167
  PrettyOutput.print_stream_end()
168
-
168
+
169
169
 
170
170
  # 显示搜索结果摘要
171
171
  if search_results and not self.suppress_output:
@@ -180,7 +180,7 @@ class KimiModel(BasePlatform):
180
180
  output.append(f" 链接: {result['url']}")
181
181
  output.append("")
182
182
  PrettyOutput.print("\n".join(output), OutputType.PROGRESS)
183
-
183
+
184
184
  # 显示引用来源
185
185
  if ref_sources and not self.suppress_output:
186
186
  output = ["引用来源:"]
@@ -189,7 +189,7 @@ class KimiModel(BasePlatform):
189
189
  output.append(f" 链接: {source['url']}")
190
190
  if source['abstract']:
191
191
  output.append(f" 摘要: {source['abstract']}")
192
-
192
+
193
193
  # 显示相关段落
194
194
  if source['rag_segments']:
195
195
  output.append(" 相关段落:")
@@ -197,18 +197,18 @@ class KimiModel(BasePlatform):
197
197
  text = segment.get('text', '').replace('\n', ' ').strip()
198
198
  if text:
199
199
  output.append(f" - {text}")
200
-
200
+
201
201
  # 显示原文引用
202
202
  origin = source['origin']
203
203
  if origin:
204
204
  text = origin.get('text', '')
205
205
  if text:
206
206
  output.append(f" 原文: {text}")
207
-
207
+
208
208
  output.append("")
209
209
 
210
210
  PrettyOutput.print("\n".join(output), OutputType.PROGRESS)
211
-
211
+
212
212
  return full_response
213
213
 
214
214
  except Exception as e:
@@ -218,17 +218,19 @@ class KimiModel(BasePlatform):
218
218
  """Delete current session"""
219
219
  if not self.chat_id:
220
220
  return True # 如果没有会话ID,视为删除成功
221
-
221
+
222
222
  url = f"https://kimi.moonshot.cn/api/chat/{self.chat_id}"
223
223
  headers = {
224
224
  'Authorization': self.auth_header,
225
225
  'Content-Type': 'application/json'
226
226
  }
227
-
227
+
228
228
  try:
229
229
  response = while_success(lambda: requests.delete(url, headers=headers), sleep_time=5)
230
230
  if response.status_code == 200:
231
- self.reset()
231
+ self.chat_id = ""
232
+ self.uploaded_files = []
233
+ self.first_chat = True # 重置first_chat标记
232
234
  return True
233
235
  else:
234
236
  PrettyOutput.print(f"删除会话失败: HTTP {response.status_code}", OutputType.WARNING)
@@ -237,11 +239,6 @@ class KimiModel(BasePlatform):
237
239
  PrettyOutput.print(f"删除会话时发生错误: {str(e)}", OutputType.ERROR)
238
240
  return False
239
241
 
240
- def reset(self):
241
- """Reset chat"""
242
- self.chat_id = ""
243
- self.uploaded_files = []
244
- self.first_chat = True # 重置first_chat标记
245
242
 
246
243
  def name(self) -> str:
247
244
  """Model name"""
@@ -9,25 +9,25 @@ import ollama
9
9
 
10
10
  class OllamaPlatform(BasePlatform):
11
11
  """Ollama platform implementation"""
12
-
12
+
13
13
  platform_name = "ollama"
14
-
14
+
15
15
  def __init__(self):
16
16
  """Initialize model"""
17
17
  super().__init__()
18
-
18
+
19
19
  # Check environment variables and provide help information
20
20
  self.api_base = os.getenv("OLLAMA_API_BASE", "http://localhost:11434")
21
21
  self.model_name = os.getenv("JARVIS_MODEL") or "deepseek-r1:1.5b"
22
-
22
+
23
23
  # Setup client based on availability
24
24
  self.client = None
25
25
  self.client = ollama.Client(host=self.api_base)
26
-
26
+
27
27
  # Check if Ollama service is available
28
28
  try:
29
29
  available_models = self._get_available_models()
30
-
30
+
31
31
  if not available_models:
32
32
  message = (
33
33
  "需要先下载 Ollama 模型才能使用:\n"
@@ -37,7 +37,7 @@ class OllamaPlatform(BasePlatform):
37
37
  )
38
38
  PrettyOutput.print(message, OutputType.INFO)
39
39
  PrettyOutput.print("Ollama 没有可用的模型", OutputType.WARNING)
40
-
40
+
41
41
  except Exception as e:
42
42
  message = (
43
43
  f"Ollama 服务未启动或无法连接: {str(e)}\n"
@@ -47,8 +47,8 @@ class OllamaPlatform(BasePlatform):
47
47
  "3. 正确配置服务地址 (默认: http://localhost:11434)"
48
48
  )
49
49
  PrettyOutput.print(message, OutputType.WARNING)
50
-
51
-
50
+
51
+
52
52
  self.messages = []
53
53
  self.system_message = ""
54
54
 
@@ -81,7 +81,7 @@ class OllamaPlatform(BasePlatform):
81
81
  messages.append({"role": "user", "content": message})
82
82
 
83
83
  return self._chat_with_package(messages)
84
-
84
+
85
85
  except Exception as e:
86
86
  PrettyOutput.print(f"对话失败: {str(e)}", OutputType.ERROR)
87
87
  raise Exception(f"Chat failed: {str(e)}")
@@ -91,14 +91,14 @@ class OllamaPlatform(BasePlatform):
91
91
  # The client should not be None here due to the check in the chat method
92
92
  if not self.client:
93
93
  raise ValueError("Ollama client is not initialized")
94
-
94
+
95
95
  # Use ollama-python's streaming API
96
96
  stream = self.client.chat(
97
97
  model=self.model_name,
98
98
  messages=messages,
99
99
  stream=True
100
100
  )
101
-
101
+
102
102
  # Process the streaming response
103
103
  full_response = ""
104
104
  for chunk in stream:
@@ -107,33 +107,31 @@ class OllamaPlatform(BasePlatform):
107
107
  if not self.suppress_output:
108
108
  PrettyOutput.print_stream(text)
109
109
  full_response += text
110
-
110
+
111
111
  if not self.suppress_output:
112
112
  PrettyOutput.print_stream_end()
113
-
113
+
114
114
  # Update message history
115
115
  self.messages.append({"role": "user", "content": messages[-1]["content"]})
116
116
  self.messages.append({"role": "assistant", "content": full_response})
117
-
117
+
118
118
  return full_response
119
119
 
120
-
121
- def reset(self):
122
- """Reset model state"""
123
- self.messages = []
124
- if self.system_message:
125
- self.messages.append({"role": "system", "content": self.system_message})
126
-
120
+
127
121
  def name(self) -> str:
128
122
  """Return model name"""
129
123
  return self.model_name
130
-
124
+
131
125
  def delete_chat(self) -> bool:
132
126
  """Delete current chat session"""
133
- self.reset()
127
+ self.messages = []
128
+ if self.system_message:
129
+ self.messages.append({"role": "system", "content": self.system_message})
134
130
  return True
135
-
131
+
136
132
  def set_system_message(self, message: str):
137
133
  """Set system message"""
138
134
  self.system_message = message
139
- self.reset() # 重置会话以应用新的系统消息
135
+ self.messages = []
136
+ if self.system_message:
137
+ self.messages.append({"role": "system", "content": self.system_message})