auto-coder 0.1.223__py3-none-any.whl → 0.1.225__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of auto-coder might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: auto-coder
3
- Version: 0.1.223
3
+ Version: 0.1.225
4
4
  Summary: AutoCoder: AutoCoder
5
5
  Author: allwefantasy
6
6
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
@@ -1,16 +1,16 @@
1
1
  autocoder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- autocoder/auto_coder.py,sha256=BQvCwOc-4XgOtpwQWHCTvcx97XEejdwRordyrCYSf0s,43554
2
+ autocoder/auto_coder.py,sha256=Y6PqkMBr2ySUZkpik0XQxBNPA-QSwfn1-c2X7FZzU8g,44514
3
3
  autocoder/auto_coder_lang.py,sha256=Rtupq6N3_HT7JRhDKdgCBcwRaiAnyCOR_Gsp4jUomrI,3229
4
4
  autocoder/auto_coder_rag.py,sha256=illKgzP2bv-Tq50ujsofJnOHdI4pzr0ALtfR8NHHWdQ,22351
5
- autocoder/auto_coder_rag_client_mcp.py,sha256=WU8WzwuRbJE-W_r94S8PYKOQ32FEv2WPJzCgZII7dBc,6277
5
+ autocoder/auto_coder_rag_client_mcp.py,sha256=WV7j5JUiQge0x4-B7Hp5-pSAFXLbvLpzQMcCovbauIM,6276
6
6
  autocoder/auto_coder_rag_mcp.py,sha256=-RrjNwFaS2e5v8XDIrKR-zlUNUE8UBaeOtojffBrvJo,8521
7
7
  autocoder/auto_coder_server.py,sha256=XU9b4SBH7zjPPXaTWWHV4_zJm-XYa6njuLQaplYJH_c,20290
8
8
  autocoder/benchmark.py,sha256=Ypomkdzd1T3GE6dRICY3Hj547dZ6_inqJbBJIp5QMco,4423
9
- autocoder/chat_auto_coder.py,sha256=x98afu7PCzYtf2545tIdJP13tI3lixFJg4sSSFtRjeM,95346
9
+ autocoder/chat_auto_coder.py,sha256=kg2EwnM_uow4GuJJTpqQFZlWKEHfvJqRrq8VDiU422M,96173
10
10
  autocoder/chat_auto_coder_lang.py,sha256=ReWukXKVvuzVvpbYk5O9kc1ev7XNmAv3DnuQhmpLmnc,8717
11
11
  autocoder/command_args.py,sha256=9aYJ-AmPxP1sQh6ciw04FWHjSn31f2W9afXFwo8wgx4,30441
12
12
  autocoder/lang.py,sha256=U6AjVV8Rs1uLyjFCZ8sT6WWuNUxMBqkXXIOs4S120uk,14511
13
- autocoder/version.py,sha256=hA-WuAig1V69eSYfuZJjxnT6-tLr5fGDFA1I86siMMg,24
13
+ autocoder/version.py,sha256=daC0R_OhT2OujXLNnTAs1av9FNQAypJF-VIPS6Kb0ME,24
14
14
  autocoder/agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
15
  autocoder/agent/auto_demand_organizer.py,sha256=NWSAEsEk94vT3lGjfo25kKLMwYdPcpy9e-i21txPasQ,6942
16
16
  autocoder/agent/auto_filegroup.py,sha256=CW7bqp0FW1GIEMnl-blyAc2UGT7O9Mom0q66ITz1ckM,6635
@@ -23,7 +23,7 @@ autocoder/agent/project_reader.py,sha256=tWLaPoLw1gI6kO_NzivQj28KbobU2ceOLuppHMb
23
23
  autocoder/chat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  autocoder/common/JupyterClient.py,sha256=O-wi6pXeAEYhAY24kDa0BINrLYvKS6rKyWe98pDClS0,2816
25
25
  autocoder/common/ShellClient.py,sha256=fM1q8t_XMSbLBl2zkCNC2J9xuyKN3eXzGm6hHhqL2WY,2286
26
- autocoder/common/__init__.py,sha256=jwjsx9qT_5fJmdlbNt3I5UiDtDKcdeLqQ8s_Tw_sT0U,11492
26
+ autocoder/common/__init__.py,sha256=CdItaUnVhS7QbaJkUgvsUApYvEjQ6XfDpMZo4OQtmsc,11613
27
27
  autocoder/common/anything2images.py,sha256=0ILBbWzY02M-CiWB-vzuomb_J1hVdxRcenAfIrAXq9M,25283
28
28
  autocoder/common/anything2img.py,sha256=4TREa-sOA-iargieUy7MpyCYVUE-9Mmq0wJtwomPqnE,7662
29
29
  autocoder/common/audio.py,sha256=Kn9nWKQddWnUrAz0a_ZUgjcu4VUU_IcZBigT7n3N3qc,7439
@@ -40,17 +40,18 @@ autocoder/common/code_auto_merge_diff.py,sha256=5SI6ggklJ0QDHvsS0cpNXFuIkFRQxp1i
40
40
  autocoder/common/code_auto_merge_editblock.py,sha256=l6yEiZqXyIlUNIIPXvkHOnLCIInXR78TzSjF-jtJkkg,17035
41
41
  autocoder/common/code_auto_merge_strict_diff.py,sha256=ABYOTDUQYA4Bn4BwT1Rw812y49cHW3UH_JSpM9uJ6ig,9399
42
42
  autocoder/common/code_modification_ranker.py,sha256=DFlbwgdg8GK47zVcvfZSzkyniEKmTVLTOWejjcVIgaw,5121
43
- autocoder/common/command_completer.py,sha256=xXDatBOkjV78jsgG1eF8pcMFC0glTm1poesjd_Q6kSw,9169
43
+ autocoder/common/command_completer.py,sha256=sqCA4RkNJr8kkzUmsdMHz1oW5YRqb8Q_2oTq2D6GHlg,9194
44
44
  autocoder/common/command_generator.py,sha256=v4LmU7sO-P7jEZIXCWHUC6P-vT7AvBi_x_PTwCqBAE8,1323
45
- autocoder/common/command_templates.py,sha256=3G-pCNbL6iHbnkG6v1JZpbIK3Mc9d373_RYGmCcDPMY,8548
45
+ autocoder/common/command_templates.py,sha256=mnB3n8i0yjH1mqzyClEg8Wpr9VbZV44kxky66Zu6OJY,8557
46
46
  autocoder/common/const.py,sha256=eTjhjh4Aj4CUzviJ81jaf3Y5cwqsLATySn2wJxaS6RQ,2911
47
47
  autocoder/common/git_utils.py,sha256=btK45sxvfm4tX3fBRNUPRZoGQuZuOEQrWSAwLy1yoLw,23095
48
48
  autocoder/common/image_to_page.py,sha256=O0cNO_vHHUP-fP4GXiVojShmNqkPnZXeIyiY1MRLpKg,13936
49
49
  autocoder/common/interpreter.py,sha256=62-dIakOunYB4yjmX8SHC0Gdy2h8NtxdgbpdqRZJ5vk,2833
50
50
  autocoder/common/llm_rerank.py,sha256=FbvtCzaR661Mt2wn0qsuiEL1Y3puD6jeIJS4zg_e7Bs,3260
51
- autocoder/common/mcp_hub.py,sha256=9BqNWjVG8polG-bG8sXIqze-ZzzNJxg2OjfOqgxeM3U,14174
52
- autocoder/common/mcp_server.py,sha256=tZfjUjBvEp8gaEusD2SaH5Mh0FdbP2MpnyqAWuSgrwo,12652
51
+ autocoder/common/mcp_hub.py,sha256=2ZyJv3Aiv4Y97UHut49oYhIFcu7ICR-mptDEBSgT3uE,14234
52
+ autocoder/common/mcp_server.py,sha256=QCFa-15kx7rbNsinwdGFFX2y47pww0fVdI-ldKFSSWI,12267
53
53
  autocoder/common/mcp_tools.py,sha256=KsLvRrB6pvmebqd-lDaSH6IBJR0AIxWRE-dtCEG_w9k,12485
54
+ autocoder/common/memory_manager.py,sha256=2ZjYG7BPyvbYalZBF6AM_G5e10Qkw_zrqtD4Zd7GSsQ,3663
54
55
  autocoder/common/recall_validation.py,sha256=Avt9Q9dX3kG6Pf2zsdlOHmsjd-OeSj7U1PFBDp_Cve0,1700
55
56
  autocoder/common/screenshots.py,sha256=_gA-z1HxGjPShBrtgkdideq58MG6rqFB2qMUJKjrycs,3769
56
57
  autocoder/common/search.py,sha256=245iPFgWhMldoUK3CqCP89ltaxZiNPK73evoG6Fp1h8,16518
@@ -71,9 +72,10 @@ autocoder/dispacher/actions/plugins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQ
71
72
  autocoder/dispacher/actions/plugins/action_regex_project.py,sha256=ZvU3hsxFPSZTfdyZF-_tAwaFQvk7cnDIJ7Ip1x6Tc-I,4993
72
73
  autocoder/dispacher/actions/plugins/action_translate.py,sha256=nVAtRSQpdGNmZxg1R_9zXG3AuTv3CHf2v7ODgj8u65c,7727
73
74
  autocoder/index/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
74
- autocoder/index/for_command.py,sha256=zfbvQnhHjsAqBc4Ce1kMGIu0jPEk_rtH7fntg89_4z0,3092
75
- autocoder/index/index.py,sha256=URE9-CPWBYKiH2i6RvGF5jDzkENM9eWxaOW1em7cVHw,34129
75
+ autocoder/index/for_command.py,sha256=LGnz-OWogT8rd24m4Zcan7doLaijxqorAuiMk7WuRq0,3125
76
+ autocoder/index/index.py,sha256=QIgA7SR5OgExRjInmsOBlFqZ_hb_7lVO5_HXHoopSto,34841
76
77
  autocoder/index/symbols_utils.py,sha256=CjcjUVajmJZB75Ty3a7kMv1BZphrm-tIBAdOJv6uo-0,2037
78
+ autocoder/index/types.py,sha256=tYoFicbS6k1Dx4EoMpuNq71-4pF6hhEbtej0VYCVlSo,524
77
79
  autocoder/pyproject/__init__.py,sha256=7ZuIxD2QBYIwhjmpva8eL2knorKo03yNqUhSyecpt7c,14448
78
80
  autocoder/rag/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
79
81
  autocoder/rag/api_server.py,sha256=dRbhAZVRAOlZ64Cnxf4_rKb4iJwHnrWS9Zr67IVORw0,7288
@@ -124,9 +126,9 @@ autocoder/utils/request_event_queue.py,sha256=r3lo5qGsB1dIjzVQ05dnr0z_9Z3zOkBdP1
124
126
  autocoder/utils/request_queue.py,sha256=nwp6PMtgTCiuwJI24p8OLNZjUiprC-TsefQrhMI-yPE,3889
125
127
  autocoder/utils/rest.py,sha256=HawagAap3wMIDROGhY1730zSZrJR_EycODAA5qOj83c,8807
126
128
  autocoder/utils/tests.py,sha256=BqphrwyycGAvs-5mhH8pKtMZdObwhFtJ5MC_ZAOiLq8,1340
127
- auto_coder-0.1.223.dist-info/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
128
- auto_coder-0.1.223.dist-info/METADATA,sha256=wma3ZSYec0V5FYUoLcbVDuWamEWG0ClOEoOInJkGnCQ,2615
129
- auto_coder-0.1.223.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
130
- auto_coder-0.1.223.dist-info/entry_points.txt,sha256=0nzHtHH4pNcM7xq4EBA2toS28Qelrvcbrr59GqD_0Ak,350
131
- auto_coder-0.1.223.dist-info/top_level.txt,sha256=Jqc0_uJSw2GwoFQAa9iJxYns-2mWla-9ok_Y3Gcznjk,10
132
- auto_coder-0.1.223.dist-info/RECORD,,
129
+ auto_coder-0.1.225.dist-info/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
130
+ auto_coder-0.1.225.dist-info/METADATA,sha256=SUwPMT8ybDcS9wkZANfyPWFMjNBjJBstrPcWo8tPRTc,2615
131
+ auto_coder-0.1.225.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
132
+ auto_coder-0.1.225.dist-info/entry_points.txt,sha256=0nzHtHH4pNcM7xq4EBA2toS28Qelrvcbrr59GqD_0Ak,350
133
+ auto_coder-0.1.225.dist-info/top_level.txt,sha256=Jqc0_uJSw2GwoFQAa9iJxYns-2mWla-9ok_Y3Gcznjk,10
134
+ auto_coder-0.1.225.dist-info/RECORD,,
autocoder/auto_coder.py CHANGED
@@ -40,6 +40,7 @@ from rich.panel import Panel
40
40
  from rich.markdown import Markdown
41
41
  from rich.live import Live
42
42
  from autocoder.auto_coder_lang import get_message
43
+ from autocoder.common.memory_manager import save_to_memory_file
43
44
 
44
45
  console = Console()
45
46
 
@@ -792,7 +793,7 @@ def main(input_args: Optional[List[str]] = None):
792
793
  pre_conversations.append(
793
794
  {
794
795
  "role": "user",
795
- "content": f"下面是一些文档和源码,如果用户的问题和他们相关,请参考他们:\n <files>\n{file_content}</files>",
796
+ "content": f"请阅读下面的代码和文档:\n\n <files>\n{file_content}\n</files>",
796
797
  },
797
798
  )
798
799
  pre_conversations.append(
@@ -819,7 +820,7 @@ def main(input_args: Optional[List[str]] = None):
819
820
  pre_conversations.append(
820
821
  {
821
822
  "role": "user",
822
- "content": f"下面是一些文档和源码,如果用户的问题和他们相关,请参考他们:\n <files>{s}</files>",
823
+ "content": f"请阅读下面的代码和文档:\n\n <files>\n{s}\n</files>",
823
824
  }
824
825
  )
825
826
  pre_conversations.append(
@@ -827,7 +828,7 @@ def main(input_args: Optional[List[str]] = None):
827
828
  source_count += 1
828
829
 
829
830
  loaded_conversations = pre_conversations + \
830
- chat_history["ask_conversation"]
831
+ chat_history["ask_conversation"]
831
832
 
832
833
  if args.human_as_model:
833
834
  console = Console()
@@ -836,12 +837,10 @@ def main(input_args: Optional[List[str]] = None):
836
837
  def chat_with_human_as_model(
837
838
  source_codes, pre_conversations, last_conversation
838
839
  ):
839
- """
840
- <files>
841
- {% if source_codes %}
840
+ """
841
+ {% if source_codes %}
842
842
  {{ source_codes }}
843
- {% endif %}
844
- </files>
843
+ {% endif %}
845
844
 
846
845
  {% if pre_conversations %}
847
846
  下面是我们之间的历史对话,假设我是A,你是B。
@@ -853,7 +852,7 @@ def main(input_args: Optional[List[str]] = None):
853
852
  {% endif %}
854
853
 
855
854
 
856
- 参考上面的文件以及对话,回答用户的问题。
855
+ 参考上面的文件以及历史对话,回答用户的问题。
857
856
  用户的问题: {{ last_conversation.content }}
858
857
  """
859
858
 
@@ -942,9 +941,14 @@ def main(input_args: Optional[List[str]] = None):
942
941
  ),
943
942
  )
944
943
 
944
+ if "save" in args.action:
945
+ save_to_memory_file(ask_conversation=chat_history["ask_conversation"],
946
+ query=args.query,
947
+ response=result)
948
+ print("Saved to your memory")
945
949
  return {}
946
950
 
947
- if args.action == "rag":
951
+ if "rag" in args.action:
948
952
  args.enable_rag_search = True
949
953
  args.enable_rag_context = False
950
954
  rag = RAGFactory.get_rag(llm=chat_llm, args=args, path="")
@@ -952,7 +956,7 @@ def main(input_args: Optional[List[str]] = None):
952
956
  conversations=[{"role": "user", "content": args.query}])[0]
953
957
  v = ([item, None] for item in response)
954
958
 
955
- elif args.action == "mcp":
959
+ elif "mcp" in args.action:
956
960
  from autocoder.common.mcp_server import get_mcp_server, McpRequest, McpInstallRequest, McpRemoveRequest, McpListRequest, McpListRunningRequest, McpRefreshRequest
957
961
  mcp_server = get_mcp_server()
958
962
  response = mcp_server.send_request(
@@ -1027,6 +1031,19 @@ def main(input_args: Optional[List[str]] = None):
1027
1031
  with open(memory_file, "w") as f:
1028
1032
  json.dump(chat_history, f, ensure_ascii=False)
1029
1033
 
1034
+ if "copy" in args.action:
1035
+ #copy assistant_response to clipboard
1036
+ import pyperclip
1037
+ try:
1038
+ pyperclip.copy(assistant_response)
1039
+ except:
1040
+ print("pyperclip not installed or clipboard is not supported, instruction will not be copied to clipboard.")
1041
+
1042
+ if "save" in args.action:
1043
+ save_to_memory_file(ask_conversation=chat_history["ask_conversation"],
1044
+ query=args.query,
1045
+ response=assistant_response)
1046
+ print("Saved to your memory")
1030
1047
  return
1031
1048
 
1032
1049
  else:
@@ -83,7 +83,7 @@ class AutoCoderRAGClientMCP:
83
83
  response = self.client.chat.completions.create(
84
84
  messages=[{"role": "user", "content": json.dumps({
85
85
  "query": query,
86
- "only_contexts": False
86
+ "only_contexts": True
87
87
  })}],
88
88
  model=self.args.model,
89
89
  max_tokens=self.args.rag_params_max_tokens,
@@ -48,6 +48,7 @@ from autocoder.agent.auto_guess_query import AutoGuessQuery
48
48
  from autocoder.common.mcp_server import get_mcp_server, McpRequest, McpInstallRequest, McpRemoveRequest, McpListRequest, McpListRunningRequest, McpRefreshRequest
49
49
  import byzerllm
50
50
  from byzerllm.utils import format_str_jinja2
51
+ from autocoder.common.memory_manager import get_global_memory_file_paths
51
52
 
52
53
 
53
54
  class SymbolItem(BaseModel):
@@ -1657,6 +1658,9 @@ def commit(query: str):
1657
1658
  return_paths=True
1658
1659
  )
1659
1660
 
1661
+ if conf.get("enable_global_memory", "true") in ["true", "True",True]:
1662
+ yaml_config["urls"] += get_global_memory_file_paths()
1663
+
1660
1664
  # 临时保存yaml文件,然后读取yaml文件,转换为args
1661
1665
  temp_yaml = os.path.join("actions", f"{uuid.uuid4()}.yml")
1662
1666
  try:
@@ -1740,11 +1744,14 @@ def coding(query: str):
1740
1744
  converted_value = convert_config_value(key, value)
1741
1745
  if converted_value is not None:
1742
1746
  yaml_config[key] = converted_value
1743
-
1747
+
1744
1748
  yaml_config["urls"] = current_files + get_llm_friendly_package_docs(
1745
1749
  return_paths=True
1746
1750
  )
1747
1751
 
1752
+ if conf.get("enable_global_memory", "true") in ["true", "True",True]:
1753
+ yaml_config["urls"] += get_global_memory_file_paths()
1754
+
1748
1755
  # handle image
1749
1756
  v = Image.convert_image_paths_from(query)
1750
1757
  yaml_config["query"] = v
@@ -1794,7 +1801,7 @@ def coding(query: str):
1794
1801
 
1795
1802
  yaml_config[
1796
1803
  "context"
1797
- ] += f"下面是我们的历史对话,参考我们的历史对话从而更好的理解需求和修改代码。\n\n<history>\n"
1804
+ ] += f"下面是我们的历史对话,参考我们的历史对话从而更好的理解需求和修改代码: \n\n<history>\n"
1798
1805
  for conv in conversations:
1799
1806
  if conv["role"] == "user":
1800
1807
  yaml_config["context"] += f"用户: {conv['content']}\n"
@@ -1855,6 +1862,9 @@ def chat(query: str):
1855
1862
  return_paths=True
1856
1863
  )
1857
1864
 
1865
+ if conf.get("enable_global_memory", "true") in ["true", "True",True]:
1866
+ current_files += get_global_memory_file_paths()
1867
+
1858
1868
  yaml_config["urls"] = current_files
1859
1869
 
1860
1870
  if "emb_model" in conf:
@@ -1864,14 +1874,24 @@ def chat(query: str):
1864
1874
  if is_new:
1865
1875
  query = query.replace("/new", "", 1).strip()
1866
1876
 
1877
+ yaml_config["action"] = []
1878
+
1867
1879
  if "/mcp " in query:
1868
- yaml_config["action"] = "mcp"
1880
+ yaml_config["action"].append("mcp")
1869
1881
  query = query.replace("/mcp ", "", 1).strip()
1870
1882
 
1871
1883
  if "/rag " in query:
1872
- yaml_config["action"] = "rag"
1884
+ yaml_config["action"].append("rag")
1873
1885
  query = query.replace("/rag ", "", 1).strip()
1874
1886
 
1887
+ if "/copy" in query:
1888
+ yaml_config["action"].append("copy")
1889
+ query = query.replace("/copy", "", 1).strip()
1890
+
1891
+ if "/save" in query:
1892
+ yaml_config["action"].append("save")
1893
+ query = query.replace("/save", "", 1).strip()
1894
+
1875
1895
  is_review = query.strip().startswith("/review")
1876
1896
  if is_review:
1877
1897
  query = query.replace("/review", "", 1).strip()
@@ -262,8 +262,10 @@ class AutoCoderArgs(pydantic.BaseModel):
262
262
  index_model_max_length: Optional[int] = 0
263
263
  index_model_max_input_length: Optional[int] = 0
264
264
  index_model_anti_quota_limit: Optional[int] = 0
265
+
265
266
 
266
267
  index_filter_level: Optional[int] = 0
268
+ index_filter_enable_relevance_verification: Optional[bool] = True
267
269
  index_filter_workers: Optional[int] = 1
268
270
  index_filter_file_num: Optional[int] = -1
269
271
  index_build_workers: Optional[int] = 1
@@ -348,7 +350,8 @@ class AutoCoderArgs(pydantic.BaseModel):
348
350
  data_cells_max_num: Optional[int] = 2000
349
351
  generate_times_same_model: Optional[int] = 1
350
352
 
351
- action: Optional[str] = None
353
+ action: List[str] = []
354
+ enable_global_memory: Optional[bool] = True
352
355
 
353
356
  class Config:
354
357
  protected_namespaces = ()
@@ -13,7 +13,7 @@ COMMANDS = {
13
13
  "/sd": {},
14
14
  },
15
15
  "/coding": {"/apply": {}, "/next": {}},
16
- "/chat": {"/new": {}, "/mcp": {}, "/rag": {}, "/review": {}, "/no_context": {}},
16
+ "/chat": {"/new": {}, "/save": {}, "/copy":{}, "/mcp": {}, "/rag": {}, "/review": {}, "/no_context": {}},
17
17
  "/mcp": {
18
18
  "/add": "",
19
19
  "/remove": "",
@@ -30,7 +30,7 @@ def init_command_template(source_dir:str):
30
30
  project_type: py
31
31
 
32
32
  ## The model you want to drive AutoCoder to run
33
- model: gpt3_5_chat
33
+ model: deepseek_chat
34
34
 
35
35
 
36
36
  ## Enable the index building which can help you find the related files by your query
@@ -38,7 +38,7 @@ def init_command_template(source_dir:str):
38
38
  skip_build_index: false
39
39
  ## The model to build index for the project (Optional)
40
40
  ## 用于为项目构建索引的模型(可选)
41
- index_model: haiku_chat
41
+ index_model: deepseek_chat
42
42
 
43
43
  ## the filter level to find the related files
44
44
  ## 0: only find the files with the file name
@@ -192,8 +192,8 @@ def base_enable_index()->str:
192
192
  skip_build_index: false
193
193
  anti_quota_limit: 0
194
194
  index_filter_level: 1
195
- index_filter_workers: 4
196
- index_build_workers: 4
195
+ index_filter_workers: 100
196
+ index_build_workers: 100
197
197
  '''
198
198
 
199
199
  @byzerllm.prompt()
@@ -192,7 +192,7 @@ class McpHub:
192
192
  "PATH": os.environ.get("PATH", "")},
193
193
  )
194
194
 
195
- # Create transport using context manager
195
+ # Create transport using context manager
196
196
  transport_manager = stdio_client(server_params)
197
197
  transport = await transport_manager.__aenter__()
198
198
  try:
@@ -204,9 +204,9 @@ class McpHub:
204
204
  self.connections[name] = connection
205
205
 
206
206
  # Update server status and fetch capabilities
207
- server.status = "connected"
208
- server.tools = await self._fetch_tools(name)
209
- server.resources = await self._fetch_resources(name)
207
+ server.status = "connected"
208
+ server.tools = await self._fetch_tools(name)
209
+ server.resources = await self._fetch_resources(name)
210
210
  server.resource_templates = await self._fetch_resource_templates(name)
211
211
 
212
212
  except Exception as e:
@@ -128,6 +128,35 @@ class McpServer:
128
128
  self._task = self._loop.create_task(self._process_request())
129
129
  self._loop.run_forever()
130
130
 
131
+ def _install_python_package(self, package_name: str) -> None:
132
+ """Install a Python package using pip"""
133
+ try:
134
+ import importlib
135
+ importlib.import_module(package_name.replace("-", "_"))
136
+ except ImportError:
137
+ import subprocess
138
+ try:
139
+ subprocess.run([sys.executable, "-m", "pip", "install", package_name], check=True)
140
+ except subprocess.CalledProcessError:
141
+ print(f"\n\033[93mFailed to automatically install {package_name}. Please manually install it using:\n")
142
+ print(f" pip install {package_name}\n")
143
+ print(f"We have already updated the server configuration in ~/.autocoder/mcp/settings.json.\n")
144
+ print(f"After installation, you can restart the auto-coder.chat using the server.\033[0m\n")
145
+
146
+ def _install_node_package(self, package_name: str) -> None:
147
+ """Install a Node.js package using npm"""
148
+ import subprocess
149
+ try:
150
+ subprocess.run(["npx", package_name, "--version"], check=True)
151
+ except:
152
+ try:
153
+ subprocess.run(["npm", "install", "-y", "-g", package_name], check=True)
154
+ except subprocess.CalledProcessError:
155
+ print(f"\n\033[93mFailed to automatically install {package_name}. Please manually install it using:\n")
156
+ print(f" npm install -g {package_name}\n")
157
+ print(f"We have already updated the server configuration in ~/.autocoder/mcp/settings.json.\n")
158
+ print(f"After installation, you can restart the auto-coder.chat using the server.\033[0m\n")
159
+
131
160
  async def _install_server(self, request: McpInstallRequest, hub: McpHub) -> McpResponse:
132
161
  """Install an MCP server with module dependency check"""
133
162
  name = ""
@@ -143,6 +172,12 @@ class McpServer:
143
172
  # 取第一个server 配置
144
173
  config = list(raw_config.values())[0]
145
174
  name = list(raw_config.keys())[0]
175
+ if name.startswith("@") or config["command"] in ["npx","npm"]:
176
+ for item in config["args"]:
177
+ if name in item:
178
+ self._install_node_package(item)
179
+ else:
180
+ self._install_python_package(name)
146
181
  except json.JSONDecodeError:
147
182
  name = server_name_or_config.strip()
148
183
  if name not in MCP_BUILD_IN_SERVERS:
@@ -151,26 +186,7 @@ class McpServer:
151
186
  for s in external_servers:
152
187
  if s.name == name:
153
188
  if s.runtime == "python":
154
- # Check if module exists
155
- try:
156
- import importlib
157
- importlib.import_module(
158
- name.replace("-", "_"))
159
- except ImportError:
160
- # Install missing module
161
- import subprocess
162
- try:
163
- subprocess.run(
164
- [sys.executable, "-m", "pip", "install", name], check=True)
165
- except subprocess.CalledProcessError:
166
- print(
167
- f"\n\033[93mFailed to automatically install {name}. Please manually install it using:\n")
168
- print(f" pip install {name}\n")
169
- print(
170
- f"We have already updated the server configuration in ~/.autocoder/mcp/settings.json.\n")
171
- print(
172
- f"After installation, you can restart the auto-coder.chat using the server.\033[0m\n")
173
-
189
+ self._install_python_package(name)
174
190
  config = {
175
191
  "command": "python",
176
192
  "args": [
@@ -178,28 +194,12 @@ class McpServer:
178
194
  ],
179
195
  }
180
196
  elif s.runtime == "node":
181
- # Check if package exists
182
- try:
183
- subprocess.run(
184
- ["npx", name, "--version"], check=True)
185
- except:
186
- # Install missing package
187
- try:
188
- subprocess.run(
189
- ["npm", "install", "-y", "-g", name], check=True)
190
- except subprocess.CalledProcessError:
191
- print(
192
- f"\n\033[93mFailed to automatically install {name}. Please manually install it using:\n")
193
- print(f" npm install -g {name}\n")
194
- print(
195
- f"We have already updated the server configuration in ~/.autocoder/mcp/settings.json.\n")
196
- print(
197
- f"After installation, you can restart the auto-coder.chat using the server.\033[0m\n")
198
-
197
+ self._install_node_package(name)
199
198
  config = {
200
199
  "command": "npx",
201
200
  "args": [
202
201
  "-y",
202
+ "-g",
203
203
  name
204
204
  ]
205
205
  }
@@ -0,0 +1,112 @@
1
+ import os
2
+ import json
3
+ import time
4
+ from typing import List, Dict, Optional, Any
5
+ from pydantic import BaseModel
6
+ from datetime import datetime
7
+
8
+ class MemoryItem(BaseModel):
9
+ content: Any
10
+ role: str
11
+
12
+ class MemoryEntry(BaseModel):
13
+ timestamp: datetime
14
+ conversation: List[MemoryItem]
15
+
16
+
17
+ def save_to_memory_file(ask_conversation,query:str,response:str):
18
+ # Save to memory file
19
+ memory_dir = os.path.join(os.path.expanduser("~"), ".auto-coder", "memory")
20
+ os.makedirs(memory_dir, exist_ok=True)
21
+ memory_file = os.path.join(memory_dir, "memory.json")
22
+
23
+ # Read existing memory or create new
24
+ if os.path.exists(memory_file):
25
+ with open(memory_file, 'r') as f:
26
+ try:
27
+ memory_data = json.load(f)
28
+ except json.JSONDecodeError:
29
+ memory_data = {}
30
+ else:
31
+ memory_data = {}
32
+
33
+ # Add new conversation
34
+ current_time = str(int(time.time()))
35
+ memory_data[current_time] = [
36
+ {"content": ask_conversation, "role": "background"},
37
+ {"content": query, "role": "user"},
38
+ {"content": response, "role": "assistant"}
39
+ ]
40
+
41
+ # Save memory
42
+ with open(memory_file, 'w') as f:
43
+ json.dump(memory_data, f, ensure_ascii=False, indent=2)
44
+
45
+ def load_from_memory_file() -> List[MemoryEntry]:
46
+ """Load memory data from file and return as list of MemoryEntry objects"""
47
+ memory_dir = os.path.join(os.path.expanduser("~"), ".auto-coder", "memory")
48
+ memory_file = os.path.join(memory_dir, "memory.json")
49
+
50
+ if not os.path.exists(memory_file):
51
+ return []
52
+
53
+ with open(memory_file, 'r') as f:
54
+ try:
55
+ memory_data = json.load(f)
56
+ except json.JSONDecodeError:
57
+ return []
58
+
59
+ entries = []
60
+ for timestamp_str, conversation in memory_data.items():
61
+ try:
62
+ timestamp = datetime.fromtimestamp(int(timestamp_str))
63
+ memory_items = [MemoryItem(**item) for item in conversation]
64
+ entries.append(MemoryEntry(
65
+ timestamp=timestamp,
66
+ conversation=memory_items
67
+ ))
68
+ except (ValueError, TypeError):
69
+ continue
70
+
71
+ # Sort entries by timestamp (oldest first)
72
+ entries.sort(key=lambda x: x.timestamp)
73
+
74
+ return entries
75
+
76
+ def get_global_memory_file_paths() -> List[str]:
77
+ """Get global memory and generate temporary files in ~/.auto-coder/memory/.tmp
78
+
79
+ Returns:
80
+ List[str]: List of file paths for the generated temporary files
81
+ """
82
+ entries = load_from_memory_file()
83
+ memory_dir = os.path.join(os.path.expanduser("~"), ".auto-coder", "memory")
84
+ tmp_dir = os.path.join(memory_dir, ".tmp")
85
+ os.makedirs(tmp_dir, exist_ok=True)
86
+
87
+ file_paths = []
88
+
89
+ for entry in entries:
90
+ # Find assistant responses
91
+ assistant_contents = [
92
+ item.content for item in entry.conversation
93
+ if item.role == "assistant"
94
+ ]
95
+
96
+ if assistant_contents:
97
+ timestamp_str = str(int(entry.timestamp.timestamp()))
98
+ content = "\n".join(assistant_contents)
99
+ file_path = os.path.join(tmp_dir, os.path.join("memory",f"{timestamp_str}.txt"))
100
+
101
+ # Create parent directory if it doesn't exist
102
+ os.makedirs(os.path.dirname(file_path), exist_ok=True)
103
+
104
+ # Only write file if it doesn't exist
105
+ if not os.path.exists(file_path):
106
+ with open(file_path, "w", encoding="utf-8") as f:
107
+ f.write(content)
108
+
109
+ file_paths.append(file_path)
110
+
111
+ return file_paths
112
+
@@ -1,4 +1,5 @@
1
- from autocoder.index.index import IndexManager, TargetFile
1
+ from autocoder.index.index import IndexManager
2
+ from autocoder.index.types import TargetFile
2
3
  from autocoder.suffixproject import SuffixProject
3
4
  from autocoder.tsproject import TSProject
4
5
  from autocoder.pyproject import PyProject
autocoder/index/index.py CHANGED
@@ -29,29 +29,12 @@ from autocoder.utils.queue_communicate import (
29
29
  CommunicateEvent,
30
30
  CommunicateEventType,
31
31
  )
32
-
33
-
34
- class IndexItem(pydantic.BaseModel):
35
- module_name: str
36
- symbols: str
37
- last_modified: float
38
- md5: str # 新增文件内容的MD5哈希值字段
39
-
40
-
41
- class TargetFile(pydantic.BaseModel):
42
- file_path: str
43
- reason: str = pydantic.Field(
44
- ..., description="The reason why the file is the target file"
45
- )
46
-
47
-
48
- class VerifyFileRelevance(pydantic.BaseModel):
49
- relevant_score: int
50
- reason: str
51
-
52
-
53
- class FileList(pydantic.BaseModel):
54
- file_list: List[TargetFile]
32
+ from autocoder.index.types import (
33
+ IndexItem,
34
+ TargetFile,
35
+ VerifyFileRelevance,
36
+ FileList,
37
+ )
55
38
 
56
39
 
57
40
  class IndexManager:
@@ -97,7 +80,7 @@ class IndexManager:
97
80
  ```json
98
81
  {
99
82
  "relevant_score": 0-10,
100
- "reason": "这是相关的原因..."
83
+ "reason": "这是相关的原因(不超过10个中文字符)..."
101
84
  }
102
85
  ```
103
86
  """
@@ -119,12 +102,12 @@ class IndexManager:
119
102
  {
120
103
  "file_list": [
121
104
  {
122
- "file_path": "path/to/file.py",
123
- "reason": "The reason why the file is the target file"
105
+ "file_path": "path/to/file1.py",
106
+ "reason": "这是被选择的原因(不超过10个中文字符)"
124
107
  },
125
108
  {
126
- "file_path": "path/to/file.py",
127
- "reason": "The reason why the file is the target file"
109
+ "file_path": "path/to/file2.py",
110
+ "reason": "这是被选择的原因(不超过10个中文字符)"
128
111
  }
129
112
  ]
130
113
  }
@@ -211,17 +194,19 @@ class IndexManager:
211
194
  if current_chunk:
212
195
  chunks.append("\n".join(current_chunk))
213
196
  return chunks
197
+
198
+ def should_skip(self, file_path: str):
199
+ ext = os.path.splitext(file_path)[1].lower()
200
+ if ext in [".md", ".html", ".txt", ".doc", ".pdf"]:
201
+ return True
202
+ return False
214
203
 
215
204
  def build_index_for_single_source(self, source: SourceCode):
216
205
  file_path = source.module_name
217
206
  if not os.path.exists(file_path):
218
207
  return None
219
208
 
220
- ext = os.path.splitext(file_path)[1].lower()
221
- if ext in [".md", ".html", ".txt", ".doc", ".pdf"]:
222
- return None
223
-
224
- if source.source_code.strip() == "":
209
+ if self.should_skip(file_path):
225
210
  return None
226
211
 
227
212
  md5 = hashlib.md5(source.source_code.encode("utf-8")).hexdigest()
@@ -290,6 +275,13 @@ class IndexManager:
290
275
 
291
276
  wait_to_build_files = []
292
277
  for source in self.sources:
278
+ file_path = source.module_name
279
+ if not os.path.exists(file_path):
280
+ continue
281
+
282
+ if self.should_skip(file_path):
283
+ continue
284
+
293
285
  source_code = source.source_code
294
286
  if self.args.auto_merge == "strict_diff":
295
287
  v = source.source_code.splitlines()
@@ -297,7 +289,7 @@ class IndexManager:
297
289
  for line in v:
298
290
  new_v.append(line[line.find(":"):])
299
291
  source_code = "\n".join(new_v)
300
-
292
+
301
293
  md5 = hashlib.md5(source_code.encode("utf-8")).hexdigest()
302
294
  if (
303
295
  source.module_name not in index_data
@@ -453,6 +445,7 @@ class IndexManager:
453
445
  nonlocal completed_threads
454
446
  result = self._get_target_files_by_query.with_llm(
455
447
  self.llm).with_return_type(FileList).run(chunk, query)
448
+ print(result)
456
449
  if result is not None:
457
450
  with lock:
458
451
  all_results.extend(result.file_list)
@@ -477,18 +470,23 @@ class IndexManager:
477
470
  return all_results, total_threads, completed_threads
478
471
 
479
472
  def get_target_files_by_query(self, query: str) -> FileList:
473
+ '''
474
+ 根据用户查询过滤文件。
475
+ 1. 必选,根据文件名和路径,以及文件用途说明,过滤出相关文件。
476
+ 2. index_filter_level>=1,根据文件名和路径,文件说明以及符号列表过滤出相关文件。
477
+ '''
480
478
  all_results: List[TargetFile] = []
479
+ if self.args.index_filter_level == 0:
480
+ def w():
481
+ return self._get_meta_str(
482
+ skip_symbols=False,
483
+ max_chunk_size=-1,
484
+ includes=[SymbolType.USAGE],
485
+ )
481
486
 
482
- def w():
483
- return self._get_meta_str(
484
- skip_symbols=False,
485
- max_chunk_size=-1,
486
- includes=[SymbolType.USAGE],
487
- )
488
-
489
- temp_result, total_threads, completed_threads = self._query_index_with_thread(
490
- query, w)
491
- all_results.extend(temp_result)
487
+ temp_result, total_threads, completed_threads = self._query_index_with_thread(
488
+ query, w)
489
+ all_results.extend(temp_result)
492
490
 
493
491
  if self.args.index_filter_level >= 1:
494
492
 
@@ -524,12 +522,12 @@ class IndexManager:
524
522
  {
525
523
  "file_list": [
526
524
  {
527
- "file_path": "path/to/file.py",
528
- "reason": "The reason why the file is the target file"
525
+ "file_path": "path/to/file1.py",
526
+ "reason": "这是被选择的原因(不超过10个中文字符)"
529
527
  },
530
528
  {
531
- "file_path": "path/to/file.py",
532
- "reason": "The reason why the file is the target file"
529
+ "file_path": "path/to/file2.py",
530
+ "reason": "这是被选择的原因(不超过10个中文字符)"
533
531
  }
534
532
  ]
535
533
  }
@@ -699,76 +697,77 @@ def build_index_and_filter_files(
699
697
 
700
698
  # Phase 5: Relevance verification
701
699
  logger.info("Phase 5: Performing relevance verification...")
702
- phase_start = time.monotonic()
703
- verified_files = {}
704
- temp_files = list(final_files.values())
705
- verification_results = []
706
-
707
- def print_verification_results(results):
708
- from rich.table import Table
709
- from rich.console import Console
710
-
711
- console = Console()
712
- table = Table(title="File Relevance Verification Results", show_header=True, header_style="bold magenta")
713
- table.add_column("File Path", style="cyan", no_wrap=True)
714
- table.add_column("Score", justify="right", style="green")
715
- table.add_column("Status", style="yellow")
716
- table.add_column("Reason/Error")
717
-
718
- for file_path, score, status, reason in results:
719
- table.add_row(
720
- file_path,
721
- str(score) if score is not None else "N/A",
722
- status,
723
- reason
724
- )
700
+ if args.index_filter_enable_relevance_verification:
701
+ phase_start = time.monotonic()
702
+ verified_files = {}
703
+ temp_files = list(final_files.values())
704
+ verification_results = []
725
705
 
726
- console.print(table)
727
-
728
- def verify_single_file(file: TargetFile):
729
- for source in sources:
730
- if source.module_name == file.file_path:
731
- file_content = source.source_code
732
- try:
733
- result = index_manager.verify_file_relevance.with_llm(llm).with_return_type(VerifyFileRelevance).run(
734
- file_content=file_content,
735
- query=args.query
736
- )
737
- if result.relevant_score >= args.verify_file_relevance_score:
706
+ def print_verification_results(results):
707
+ from rich.table import Table
708
+ from rich.console import Console
709
+
710
+ console = Console()
711
+ table = Table(title="File Relevance Verification Results", show_header=True, header_style="bold magenta")
712
+ table.add_column("File Path", style="cyan", no_wrap=True)
713
+ table.add_column("Score", justify="right", style="green")
714
+ table.add_column("Status", style="yellow")
715
+ table.add_column("Reason/Error")
716
+
717
+ for file_path, score, status, reason in results:
718
+ table.add_row(
719
+ file_path,
720
+ str(score) if score is not None else "N/A",
721
+ status,
722
+ reason
723
+ )
724
+
725
+ console.print(table)
726
+
727
+ def verify_single_file(file: TargetFile):
728
+ for source in sources:
729
+ if source.module_name == file.file_path:
730
+ file_content = source.source_code
731
+ try:
732
+ result = index_manager.verify_file_relevance.with_llm(llm).with_return_type(VerifyFileRelevance).run(
733
+ file_content=file_content,
734
+ query=args.query
735
+ )
736
+ if result.relevant_score >= args.verify_file_relevance_score:
737
+ verified_files[file.file_path] = TargetFile(
738
+ file_path=file.file_path,
739
+ reason=f"Score:{result.relevant_score}, {result.reason}"
740
+ )
741
+ return file.file_path, result.relevant_score, "PASS", result.reason
742
+ else:
743
+ return file.file_path, result.relevant_score, "FAIL", result.reason
744
+ except Exception as e:
745
+ error_msg = str(e)
738
746
  verified_files[file.file_path] = TargetFile(
739
747
  file_path=file.file_path,
740
- reason=f"Score:{result.relevant_score}, {result.reason}"
748
+ reason=f"Verification failed: {error_msg}"
741
749
  )
742
- return file.file_path, result.relevant_score, "PASS", result.reason
743
- else:
744
- return file.file_path, result.relevant_score, "FAIL", result.reason
745
- except Exception as e:
746
- error_msg = str(e)
747
- verified_files[file.file_path] = TargetFile(
748
- file_path=file.file_path,
749
- reason=f"Verification failed: {error_msg}"
750
- )
751
- return file.file_path, None, "ERROR", error_msg
752
- return None
753
-
754
- with ThreadPoolExecutor(max_workers=args.index_filter_workers) as executor:
755
- futures = [executor.submit(verify_single_file, file)
756
- for file in temp_files]
757
- for future in as_completed(futures):
758
- result = future.result()
759
- if result:
760
- verification_results.append(result)
761
- time.sleep(args.anti_quota_limit)
762
-
763
- # Print verification results in a table
764
- print_verification_results(verification_results)
765
-
766
- stats["verified_files"] = len(verified_files)
767
- phase_end = time.monotonic()
768
- stats["timings"]["relevance_verification"] = phase_end - phase_start
750
+ return file.file_path, None, "ERROR", error_msg
751
+ return None
752
+
753
+ with ThreadPoolExecutor(max_workers=args.index_filter_workers) as executor:
754
+ futures = [executor.submit(verify_single_file, file)
755
+ for file in temp_files]
756
+ for future in as_completed(futures):
757
+ result = future.result()
758
+ if result:
759
+ verification_results.append(result)
760
+ time.sleep(args.anti_quota_limit)
761
+
762
+ # Print verification results in a table
763
+ print_verification_results(verification_results)
764
+
765
+ stats["verified_files"] = len(verified_files)
766
+ phase_end = time.monotonic()
767
+ stats["timings"]["relevance_verification"] = phase_end - phase_start
769
768
 
770
- # Keep all files, not just verified ones
771
- final_files = verified_files
769
+ # Keep all files, not just verified ones
770
+ final_files = verified_files
772
771
 
773
772
  def display_table_and_get_selections(data):
774
773
  from prompt_toolkit.shortcuts import checkboxlist_dialog
@@ -0,0 +1,24 @@
1
+ import pydantic
2
+ from typing import List
3
+
4
+ class IndexItem(pydantic.BaseModel):
5
+ module_name: str
6
+ symbols: str
7
+ last_modified: float
8
+ md5: str # 新增文件内容的MD5哈希值字段
9
+
10
+
11
+ class TargetFile(pydantic.BaseModel):
12
+ file_path: str
13
+ reason: str = pydantic.Field(
14
+ ..., description="The reason why the file is the target file"
15
+ )
16
+
17
+
18
+ class VerifyFileRelevance(pydantic.BaseModel):
19
+ relevant_score: int
20
+ reason: str
21
+
22
+
23
+ class FileList(pydantic.BaseModel):
24
+ file_list: List[TargetFile]
autocoder/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.1.223"
1
+ __version__ = "0.1.225"