jarvis-ai-assistant 0.1.85__py3-none-any.whl → 0.1.87__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jarvis-ai-assistant might be problematic. Click here for more details.

jarvis/tools/registry.py CHANGED
@@ -7,7 +7,7 @@ from typing import Any, Callable, Dict, List, Optional
7
7
 
8
8
  from jarvis.models.registry import PlatformRegistry
9
9
  from jarvis.tools.base import Tool
10
- from jarvis.utils import OutputType, PrettyOutput
10
+ from jarvis.utils import OutputType, PrettyOutput, get_max_context_length
11
11
 
12
12
 
13
13
  class ToolRegistry:
@@ -19,7 +19,8 @@ class ToolRegistry:
19
19
  # 加载内置工具和外部工具
20
20
  self._load_builtin_tools()
21
21
  self._load_external_tools()
22
- self.max_context_length = int(os.getenv('JARVIS_MAX_CONTEXT_LENGTH', '65536')) # 默认30k
22
+ # 确保 max_context_length 是整数
23
+ self.max_context_length = int(get_max_context_length() * 0.8)
23
24
 
24
25
  @staticmethod
25
26
  def get_global_tool_registry():
@@ -176,15 +177,16 @@ class ToolRegistry:
176
177
  PrettyOutput.section("执行成功", OutputType.SUCCESS)
177
178
 
178
179
  # 如果输出超过4k字符,使用大模型总结
179
- if len(output) > 4096:
180
+ if len(output) > self.max_context_length:
180
181
  try:
181
182
  PrettyOutput.print("输出较长,正在总结...", OutputType.PROGRESS)
182
183
  model = PlatformRegistry.get_global_platform_registry().get_normal_platform()
183
184
 
184
- # 如果输出超过30k,只取最后30k字符
185
- if len(output) > self.max_context_length:
186
- output_to_summarize = output[-self.max_context_length:]
187
- truncation_notice = "\n(注意: 由于输出过长,仅总结最后65536字符)"
185
+ # 如果输出超过最大上下文长度,只取最后部分
186
+ max_len = self.max_context_length
187
+ if len(output) > max_len:
188
+ output_to_summarize = output[-max_len:]
189
+ truncation_notice = f"\n(注意: 由于输出过长,仅总结最后{max_len}字符)"
188
190
  else:
189
191
  output_to_summarize = output
190
192
  truncation_notice = ""
@@ -209,7 +211,7 @@ class ToolRegistry:
209
211
  --- 总结结束 ---"""
210
212
 
211
213
  except Exception as e:
212
- PrettyOutput.print(f"总结失败: {str(e)}", OutputType.WARNING)
214
+ PrettyOutput.print(f"总结失败: {str(e)}", OutputType.ERROR)
213
215
  output = f"输出较长 ({len(output)} 字符),建议查看原始输出。\n前300字符预览:\n{output[:300]}..."
214
216
 
215
217
  else:
jarvis/utils.py CHANGED
@@ -10,6 +10,8 @@ from prompt_toolkit import PromptSession
10
10
  from prompt_toolkit.styles import Style as PromptStyle
11
11
  from prompt_toolkit.formatted_text import FormattedText
12
12
  from sentence_transformers import SentenceTransformer
13
+ from transformers import AutoModelForSequenceClassification, AutoTokenizer
14
+ import torch
13
15
 
14
16
  # 初始化colorama
15
17
  colorama.init()
@@ -211,7 +213,7 @@ def find_git_root(dir="."):
211
213
 
212
214
  def load_embedding_model():
213
215
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
214
- model_name = os.environ.get("JARVIS_EMBEDDING_MODEL", "BAAI/bge-large-zh-v1.5")
216
+ model_name = "BAAI/bge-large-zh-v1.5"
215
217
  PrettyOutput.print(f"正在加载嵌入模型: {model_name}...", OutputType.INFO)
216
218
  try:
217
219
  # 首先尝试离线加载
@@ -232,4 +234,48 @@ def load_embedding_model():
232
234
  )
233
235
  PrettyOutput.print("模型下载并加载成功", OutputType.SUCCESS)
234
236
 
235
- return embedding_model
237
+ return embedding_model
238
+
239
+ def load_rerank_model():
240
+ """加载重排序模型"""
241
+ model_name = "BAAI/bge-reranker-v2-m3"
242
+ PrettyOutput.print(f"正在加载重排序模型: {model_name}...", OutputType.INFO)
243
+
244
+ try:
245
+ # 首先尝试离线加载
246
+ tokenizer = AutoTokenizer.from_pretrained(
247
+ model_name,
248
+ local_files_only=True,
249
+ cache_dir=os.path.expanduser("~/.cache/huggingface/hub")
250
+ )
251
+ model = AutoModelForSequenceClassification.from_pretrained(
252
+ model_name,
253
+ local_files_only=True,
254
+ cache_dir=os.path.expanduser("~/.cache/huggingface/hub")
255
+ )
256
+ PrettyOutput.print("使用本地缓存加载模型成功", OutputType.SUCCESS)
257
+ except Exception as local_error:
258
+ PrettyOutput.print(f"本地加载失败,尝试在线下载: {str(local_error)}", OutputType.WARNING)
259
+ # 如果离线加载失败,尝试在线下载
260
+ tokenizer = AutoTokenizer.from_pretrained(
261
+ model_name,
262
+ cache_dir=os.path.expanduser("~/.cache/huggingface/hub")
263
+ )
264
+ model = AutoModelForSequenceClassification.from_pretrained(
265
+ model_name,
266
+ cache_dir=os.path.expanduser("~/.cache/huggingface/hub")
267
+ )
268
+ PrettyOutput.print("模型下载并加载成功", OutputType.SUCCESS)
269
+
270
+ # 如果有 GPU 就使用 GPU
271
+ if torch.cuda.is_available():
272
+ model = model.cuda()
273
+ model.eval()
274
+
275
+ return model, tokenizer
276
+
277
+ def get_max_context_length():
278
+ return int(os.getenv('JARVIS_MAX_CONTEXT_LENGTH', '131072')) # 默认128k
279
+
280
+ def get_thread_count():
281
+ return int(os.getenv('JARVIS_THREAD_COUNT', '1'))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: jarvis-ai-assistant
3
- Version: 0.1.85
3
+ Version: 0.1.87
4
4
  Summary: Jarvis: An AI assistant that uses tools to interact with the system
5
5
  Home-page: https://github.com/skyfireitdiy/Jarvis
6
6
  Author: skyfire
@@ -134,15 +134,13 @@ Jarvis supports configuration through environment variables that can be set in t
134
134
  | JARVIS_CODEGEN_MODEL | Model name for code generation | Same as JARVIS_MODEL | No |
135
135
  | JARVIS_CHEAP_PLATFORM | AI platform for cheap operations | Same as JARVIS_PLATFORM | No |
136
136
  | JARVIS_CHEAP_MODEL | Model name for cheap operations | Same as JARVIS_MODEL | No |
137
- | JARVIS_EMBEDDING_MODEL | Embedding model for code analysis | BAAI/bge-large-zh-v1.5 | No |
138
137
  | OPENAI_API_KEY | API key for OpenAI platform | - | Required for OpenAI |
139
138
  | OPENAI_API_BASE | Base URL for OpenAI API | https://api.deepseek.com | No |
140
139
  | OPENAI_MODEL_NAME | Model name for OpenAI | deepseek-chat | No |
141
140
  | AI8_API_KEY | API key for AI8 platform | - | Required for AI8 |
142
- | AI8_MODEL | Model name for AI8 platform | deepseek-chat | No |
143
141
  | KIMI_API_KEY | API key for Kimi platform | - | Required for Kimi |
144
142
  | OYI_API_KEY | API key for OYI platform | - | Required for OYI |
145
- | OYI_MODEL | Model name for OYI platform | deepseek-chat | No |
143
+ | OLLAMA_API_BASE | Base URL for Ollama API | http://localhost:11434 | No |
146
144
 
147
145
 
148
146
  ## 🎯 Usage
@@ -1,24 +1,25 @@
1
- jarvis/__init__.py,sha256=CSCMvkP7vytslNiqC8OQkG-M3_75PziRYYuzcVBVfp0,50
2
- jarvis/agent.py,sha256=LfWTMp7lyrMebby56up-58VuxK5JSnfq0cyJDWG7HBw,19069
1
+ jarvis/__init__.py,sha256=Qu_hhtdNbd4dzRrFbg7RZnEUuZ1vjkIa45ZGmskpboY,50
2
+ jarvis/agent.py,sha256=_qh4mSojAgClOEz5pTiRIfRJU-5_3QGzBAU09roCjtk,19095
3
3
  jarvis/main.py,sha256=ksZkJzqc4oow6wB-7QbGJLejGblrbZtRI3fdciS5DS4,5455
4
- jarvis/utils.py,sha256=jvo6ylvrTaSmXWcYY0qTTf14TwCkAhPsCUuIl5WHEuw,8640
4
+ jarvis/utils.py,sha256=Y5zig7AgIzdWHF31qHaMUziezythfjVKjxFRtMzd1m4,10357
5
5
  jarvis/jarvis_codebase/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- jarvis/jarvis_codebase/main.py,sha256=tefwhPXVcUoV7ZIhF4AiIBsZ9sJRCkjI_i5lyusbZOI,24972
6
+ jarvis/jarvis_codebase/main.py,sha256=nlh0zkJhJfA8jaENV8wNo9OAXCeEKX1lbGzKOBjkzV4,26518
7
7
  jarvis/jarvis_coder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- jarvis/jarvis_coder/main.py,sha256=TxtFCzA5SJSorHtHX5_V3qQeJsoFMgVdrwxLJ9GnPw8,23619
8
+ jarvis/jarvis_coder/main.py,sha256=Rdt9w_uGamDHN-jjZXtHdUNGHa0q2fbGe9R9Ay6XXe0,25431
9
9
  jarvis/jarvis_rag/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- jarvis/jarvis_rag/main.py,sha256=pIZXnw7xl6yqdN0Lghu7WDwd-kyh36KTCAgOoSn95uk,22932
10
+ jarvis/jarvis_rag/main.py,sha256=a8TtPVCh5Xd6W1AaRFGeXvU_1hEnHQdoMElxnMuq0ew,24773
11
11
  jarvis/jarvis_smart_shell/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
12
  jarvis/jarvis_smart_shell/main.py,sha256=QgR1CZRcTVfC8a5hMso3onH3pFdDoniRjr0YQvY2jXQ,3809
13
13
  jarvis/models/__init__.py,sha256=mrOt67nselz_H1gX9wdAO4y2DY5WPXzABqJbr5Des8k,63
14
- jarvis/models/ai8.py,sha256=P_p1ceyjn_olFk38WsubHGavWI9bMEBzo-1vo97HLPE,12215
14
+ jarvis/models/ai8.py,sha256=Te-zqUVTTsismsoS8yXljAMwxKY5BVHdfwJyzPrIYSc,12216
15
15
  jarvis/models/base.py,sha256=vQmgr-l0fRzVTUX4orbQZIKsXEInagjFdYv1cL9Hp7U,1511
16
16
  jarvis/models/kimi.py,sha256=ltYoHQDn9vfZyhZ25eUVKMBpxKKlqlw4kManozVF7uo,16135
17
- jarvis/models/openai.py,sha256=XiZ0omZdaOfDVdmXX-IA-Dg78R8xIH429L6j0R_yHSI,4083
18
- jarvis/models/oyi.py,sha256=tdsBf3gegrEDkZf2VOJF9SRLwe9s2g8Al9s-qR9YQRg,14633
17
+ jarvis/models/ollama.py,sha256=iPCsJKZs3kXtuJyVBo6d6Ls5qBkSRgtuqF38PDFadso,6097
18
+ jarvis/models/openai.py,sha256=Ns_kpJcWoQuxdKScOFlfkSGjrL2dVGzgmvcnP44sEgs,4044
19
+ jarvis/models/oyi.py,sha256=vV3IMsdegxQqhS2vvG6MB648fec6bVopdNZC5xcdY_c,14678
19
20
  jarvis/models/registry.py,sha256=Lt8IdVBAEx_CCFtfZJPgw3nxSEjfFcqI47I-U64kIbg,8257
20
21
  jarvis/tools/__init__.py,sha256=7Rqyj5hBAv5cWDVr5T9ZTZASO7ssBHeQNm2_4ZARdkA,72
21
- jarvis/tools/ask_user.py,sha256=xLXkYK3_f8EJ7kudA8MumOOCxyFl6z3DBS_zcscMH6Y,2151
22
+ jarvis/tools/ask_user.py,sha256=OELDt7oTCjI2G-CebvnBSxFJhqkIWcugLStU-XxouzE,1998
22
23
  jarvis/tools/base.py,sha256=EGRGbdfbLXDLwtyoWdvp9rlxNX7bzc20t0Vc2VkwIEY,652
23
24
  jarvis/tools/chdir.py,sha256=TjfPbX8yvNKgUNJEMXh3ZlVDEIse_Fo8xMoVsiK7_dA,2688
24
25
  jarvis/tools/codebase_qa.py,sha256=LsowsgL7HBmdBwa7zXcYi_OkwOok4qbnzYWYsuZxHtU,2413
@@ -26,14 +27,14 @@ jarvis/tools/coder.py,sha256=kmotT2Klsug44S51QoSW9DzkxLzcF-XonyYAEoWZV6c,2295
26
27
  jarvis/tools/file_ops.py,sha256=h8g0eT9UvlJf4kt0DLXvdSsjcPj7x19lxWdDApeDfpg,3842
27
28
  jarvis/tools/generator.py,sha256=TB1zcw_JmRL2W9w6L4IxtrLF3gjnNw5Jj2Zrowj0eSg,5763
28
29
  jarvis/tools/methodology.py,sha256=UG6s5VYRcd9wrKX4cg6f7zJhet5AIcthFGMOAdevBiw,5175
29
- jarvis/tools/registry.py,sha256=sgj5EVbRgfHSzXW5v-kbIZS_8cwAxTjHvSPAicxBRf4,9074
30
+ jarvis/tools/registry.py,sha256=AbADf8pcjHqfNoQNJkWqEuVg6zHRdryhJyDQ5w4O2sc,9177
30
31
  jarvis/tools/search.py,sha256=c9dXtyICdl8Lm8shNPNyIx9k67uY0rMF8xnIKu2RsnE,8787
31
32
  jarvis/tools/shell.py,sha256=UPKshPyOaUwTngresUw-ot1jHjQIb4wCY5nkJqa38lU,2520
32
33
  jarvis/tools/sub_agent.py,sha256=rEtAmSVY2ZjFOZEKr5m5wpACOQIiM9Zr_3dT92FhXYU,2621
33
34
  jarvis/tools/webpage.py,sha256=d3w3Jcjcu1ESciezTkz3n3Zf-rp_l91PrVoDEZnckOo,2391
34
- jarvis_ai_assistant-0.1.85.dist-info/LICENSE,sha256=AGgVgQmTqFvaztRtCAXsAMryUymB18gZif7_l2e1XOg,1063
35
- jarvis_ai_assistant-0.1.85.dist-info/METADATA,sha256=RK6Yy0tqkTb0tMnqxLLyizhi6fZmFTEwAP-rI6PRCHM,12736
36
- jarvis_ai_assistant-0.1.85.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
37
- jarvis_ai_assistant-0.1.85.dist-info/entry_points.txt,sha256=sdmIO86MrIUepJTGyHs0i_Ho9VGf1q9YRP4RgQvGWcI,280
38
- jarvis_ai_assistant-0.1.85.dist-info/top_level.txt,sha256=1BOxyWfzOP_ZXj8rVTDnNCJ92bBGB0rwq8N1PCpoMIs,7
39
- jarvis_ai_assistant-0.1.85.dist-info/RECORD,,
35
+ jarvis_ai_assistant-0.1.87.dist-info/LICENSE,sha256=AGgVgQmTqFvaztRtCAXsAMryUymB18gZif7_l2e1XOg,1063
36
+ jarvis_ai_assistant-0.1.87.dist-info/METADATA,sha256=1W0ZGe3z8R5w--7LJhwc5KGb_HpLrN-CUkgAOp-d41g,12589
37
+ jarvis_ai_assistant-0.1.87.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
38
+ jarvis_ai_assistant-0.1.87.dist-info/entry_points.txt,sha256=sdmIO86MrIUepJTGyHs0i_Ho9VGf1q9YRP4RgQvGWcI,280
39
+ jarvis_ai_assistant-0.1.87.dist-info/top_level.txt,sha256=1BOxyWfzOP_ZXj8rVTDnNCJ92bBGB0rwq8N1PCpoMIs,7
40
+ jarvis_ai_assistant-0.1.87.dist-info/RECORD,,