auto-coder 0.1.222__tar.gz → 0.1.224__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of auto-coder might be problematic. Click here for more details.

Files changed (144) hide show
  1. {auto_coder-0.1.222 → auto_coder-0.1.224}/PKG-INFO +1 -1
  2. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/auto_coder.egg-info/PKG-INFO +1 -1
  3. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/auto_coder.egg-info/SOURCES.txt +3 -0
  4. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/auto_coder.py +28 -11
  5. auto_coder-0.1.224/src/autocoder/auto_coder_rag_client_mcp.py +170 -0
  6. auto_coder-0.1.224/src/autocoder/auto_coder_rag_mcp.py +193 -0
  7. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/chat_auto_coder.py +24 -4
  8. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/__init__.py +2 -1
  9. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/command_completer.py +1 -1
  10. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/mcp_hub.py +4 -4
  11. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/mcp_server.py +38 -38
  12. auto_coder-0.1.224/src/autocoder/common/memory_manager.py +112 -0
  13. auto_coder-0.1.224/src/autocoder/version.py +1 -0
  14. auto_coder-0.1.222/src/autocoder/version.py +0 -1
  15. {auto_coder-0.1.222 → auto_coder-0.1.224}/LICENSE +0 -0
  16. {auto_coder-0.1.222 → auto_coder-0.1.224}/README.md +0 -0
  17. {auto_coder-0.1.222 → auto_coder-0.1.224}/setup.cfg +0 -0
  18. {auto_coder-0.1.222 → auto_coder-0.1.224}/setup.py +0 -0
  19. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/auto_coder.egg-info/dependency_links.txt +0 -0
  20. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/auto_coder.egg-info/entry_points.txt +0 -0
  21. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/auto_coder.egg-info/requires.txt +0 -0
  22. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/auto_coder.egg-info/top_level.txt +0 -0
  23. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/__init__.py +0 -0
  24. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/agent/__init__.py +0 -0
  25. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/agent/auto_demand_organizer.py +0 -0
  26. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/agent/auto_filegroup.py +0 -0
  27. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/agent/auto_guess_query.py +0 -0
  28. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/agent/auto_tool.py +0 -0
  29. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/agent/coder.py +0 -0
  30. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/agent/designer.py +0 -0
  31. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/agent/planner.py +0 -0
  32. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/agent/project_reader.py +0 -0
  33. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/auto_coder_lang.py +0 -0
  34. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/auto_coder_rag.py +0 -0
  35. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/auto_coder_server.py +0 -0
  36. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/benchmark.py +0 -0
  37. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/chat/__init__.py +0 -0
  38. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/chat_auto_coder_lang.py +0 -0
  39. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/command_args.py +0 -0
  40. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/JupyterClient.py +0 -0
  41. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/ShellClient.py +0 -0
  42. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/anything2images.py +0 -0
  43. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/anything2img.py +0 -0
  44. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/audio.py +0 -0
  45. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/buildin_tokenizer.py +0 -0
  46. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/chunk_validation.py +0 -0
  47. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/cleaner.py +0 -0
  48. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/code_auto_execute.py +0 -0
  49. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/code_auto_generate.py +0 -0
  50. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/code_auto_generate_diff.py +0 -0
  51. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/code_auto_generate_editblock.py +0 -0
  52. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/code_auto_generate_strict_diff.py +0 -0
  53. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/code_auto_merge.py +0 -0
  54. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/code_auto_merge_diff.py +0 -0
  55. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/code_auto_merge_editblock.py +0 -0
  56. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/code_auto_merge_strict_diff.py +0 -0
  57. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/code_modification_ranker.py +0 -0
  58. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/command_generator.py +0 -0
  59. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/command_templates.py +0 -0
  60. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/const.py +0 -0
  61. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/git_utils.py +0 -0
  62. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/image_to_page.py +0 -0
  63. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/interpreter.py +0 -0
  64. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/llm_rerank.py +0 -0
  65. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/mcp_servers/__init__.py +0 -0
  66. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/mcp_servers/mcp_server_perplexity.py +0 -0
  67. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/mcp_tools.py +0 -0
  68. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/recall_validation.py +0 -0
  69. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/screenshots.py +0 -0
  70. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/search.py +0 -0
  71. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/search_replace.py +0 -0
  72. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/sys_prompt.py +0 -0
  73. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/text.py +0 -0
  74. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/common/types.py +0 -0
  75. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/data/tokenizer.json +0 -0
  76. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/db/__init__.py +0 -0
  77. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/db/store.py +0 -0
  78. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/dispacher/__init__.py +0 -0
  79. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/dispacher/actions/__init__.py +0 -0
  80. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/dispacher/actions/action.py +0 -0
  81. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/dispacher/actions/copilot.py +0 -0
  82. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/dispacher/actions/plugins/__init__.py +0 -0
  83. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/dispacher/actions/plugins/action_regex_project.py +0 -0
  84. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/dispacher/actions/plugins/action_translate.py +0 -0
  85. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/index/__init__.py +0 -0
  86. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/index/for_command.py +0 -0
  87. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/index/index.py +0 -0
  88. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/index/symbols_utils.py +0 -0
  89. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/lang.py +0 -0
  90. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/pyproject/__init__.py +0 -0
  91. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/__init__.py +0 -0
  92. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/api_server.py +0 -0
  93. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/cache/__init__.py +0 -0
  94. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/cache/base_cache.py +0 -0
  95. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/cache/byzer_storage_cache.py +0 -0
  96. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/cache/file_monitor_cache.py +0 -0
  97. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/cache/simple_cache.py +0 -0
  98. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/doc_filter.py +0 -0
  99. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/document_retriever.py +0 -0
  100. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/llm_wrapper.py +0 -0
  101. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/loaders/__init__.py +0 -0
  102. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/loaders/docx_loader.py +0 -0
  103. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/loaders/excel_loader.py +0 -0
  104. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/loaders/pdf_loader.py +0 -0
  105. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/loaders/ppt_loader.py +0 -0
  106. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/long_context_rag.py +0 -0
  107. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/rag_config.py +0 -0
  108. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/rag_entry.py +0 -0
  109. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/raw_rag.py +0 -0
  110. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/relevant_utils.py +0 -0
  111. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/simple_directory_reader.py +0 -0
  112. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/simple_rag.py +0 -0
  113. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/stream_event/__init__.py +0 -0
  114. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/stream_event/event_writer.py +0 -0
  115. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/stream_event/types.py +0 -0
  116. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/token_checker.py +0 -0
  117. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/token_counter.py +0 -0
  118. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/token_limiter.py +0 -0
  119. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/types.py +0 -0
  120. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/utils.py +0 -0
  121. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/rag/variable_holder.py +0 -0
  122. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/regexproject/__init__.py +0 -0
  123. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/suffixproject/__init__.py +0 -0
  124. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/tsproject/__init__.py +0 -0
  125. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/__init__.py +0 -0
  126. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/_markitdown.py +0 -0
  127. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/conversation_store.py +0 -0
  128. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/llm_client_interceptors.py +0 -0
  129. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/log_capture.py +0 -0
  130. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/multi_turn.py +0 -0
  131. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/operate_config_api.py +0 -0
  132. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/print_table.py +0 -0
  133. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/queue_communicate.py +0 -0
  134. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/request_event_queue.py +0 -0
  135. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/request_queue.py +0 -0
  136. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/rest.py +0 -0
  137. {auto_coder-0.1.222 → auto_coder-0.1.224}/src/autocoder/utils/tests.py +0 -0
  138. {auto_coder-0.1.222 → auto_coder-0.1.224}/tests/test_action_regex_project.py +0 -0
  139. {auto_coder-0.1.222 → auto_coder-0.1.224}/tests/test_chat_auto_coder.py +0 -0
  140. {auto_coder-0.1.222 → auto_coder-0.1.224}/tests/test_code_auto_merge_editblock.py +0 -0
  141. {auto_coder-0.1.222 → auto_coder-0.1.224}/tests/test_command_completer.py +0 -0
  142. {auto_coder-0.1.222 → auto_coder-0.1.224}/tests/test_planner.py +0 -0
  143. {auto_coder-0.1.222 → auto_coder-0.1.224}/tests/test_queue_communicate.py +0 -0
  144. {auto_coder-0.1.222 → auto_coder-0.1.224}/tests/test_symbols_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: auto-coder
3
- Version: 0.1.222
3
+ Version: 0.1.224
4
4
  Summary: AutoCoder: AutoCoder
5
5
  Author: allwefantasy
6
6
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: auto-coder
3
- Version: 0.1.222
3
+ Version: 0.1.224
4
4
  Summary: AutoCoder: AutoCoder
5
5
  Author: allwefantasy
6
6
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
@@ -11,6 +11,8 @@ src/autocoder/__init__.py
11
11
  src/autocoder/auto_coder.py
12
12
  src/autocoder/auto_coder_lang.py
13
13
  src/autocoder/auto_coder_rag.py
14
+ src/autocoder/auto_coder_rag_client_mcp.py
15
+ src/autocoder/auto_coder_rag_mcp.py
14
16
  src/autocoder/auto_coder_server.py
15
17
  src/autocoder/benchmark.py
16
18
  src/autocoder/chat_auto_coder.py
@@ -58,6 +60,7 @@ src/autocoder/common/llm_rerank.py
58
60
  src/autocoder/common/mcp_hub.py
59
61
  src/autocoder/common/mcp_server.py
60
62
  src/autocoder/common/mcp_tools.py
63
+ src/autocoder/common/memory_manager.py
61
64
  src/autocoder/common/recall_validation.py
62
65
  src/autocoder/common/screenshots.py
63
66
  src/autocoder/common/search.py
@@ -40,6 +40,7 @@ from rich.panel import Panel
40
40
  from rich.markdown import Markdown
41
41
  from rich.live import Live
42
42
  from autocoder.auto_coder_lang import get_message
43
+ from autocoder.common.memory_manager import save_to_memory_file
43
44
 
44
45
  console = Console()
45
46
 
@@ -792,7 +793,7 @@ def main(input_args: Optional[List[str]] = None):
792
793
  pre_conversations.append(
793
794
  {
794
795
  "role": "user",
795
- "content": f"下面是一些文档和源码,如果用户的问题和他们相关,请参考他们:\n <files>\n{file_content}</files>",
796
+ "content": f"请阅读下面的代码和文档:\n\n <files>\n{file_content}\n</files>",
796
797
  },
797
798
  )
798
799
  pre_conversations.append(
@@ -819,7 +820,7 @@ def main(input_args: Optional[List[str]] = None):
819
820
  pre_conversations.append(
820
821
  {
821
822
  "role": "user",
822
- "content": f"下面是一些文档和源码,如果用户的问题和他们相关,请参考他们:\n <files>{s}</files>",
823
+ "content": f"请阅读下面的代码和文档:\n\n <files>\n{s}\n</files>",
823
824
  }
824
825
  )
825
826
  pre_conversations.append(
@@ -827,7 +828,7 @@ def main(input_args: Optional[List[str]] = None):
827
828
  source_count += 1
828
829
 
829
830
  loaded_conversations = pre_conversations + \
830
- chat_history["ask_conversation"]
831
+ chat_history["ask_conversation"]
831
832
 
832
833
  if args.human_as_model:
833
834
  console = Console()
@@ -836,12 +837,10 @@ def main(input_args: Optional[List[str]] = None):
836
837
  def chat_with_human_as_model(
837
838
  source_codes, pre_conversations, last_conversation
838
839
  ):
839
- """
840
- <files>
841
- {% if source_codes %}
840
+ """
841
+ {% if source_codes %}
842
842
  {{ source_codes }}
843
- {% endif %}
844
- </files>
843
+ {% endif %}
845
844
 
846
845
  {% if pre_conversations %}
847
846
  下面是我们之间的历史对话,假设我是A,你是B。
@@ -853,7 +852,7 @@ def main(input_args: Optional[List[str]] = None):
853
852
  {% endif %}
854
853
 
855
854
 
856
- 参考上面的文件以及对话,回答用户的问题。
855
+ 参考上面的文件以及历史对话,回答用户的问题。
857
856
  用户的问题: {{ last_conversation.content }}
858
857
  """
859
858
 
@@ -942,9 +941,14 @@ def main(input_args: Optional[List[str]] = None):
942
941
  ),
943
942
  )
944
943
 
944
+ if "save" in args.action:
945
+ save_to_memory_file(ask_conversation=chat_history["ask_conversation"],
946
+ query=args.query,
947
+ response=result)
948
+ print("Saved to your memory")
945
949
  return {}
946
950
 
947
- if args.action == "rag":
951
+ if "rag" in args.action:
948
952
  args.enable_rag_search = True
949
953
  args.enable_rag_context = False
950
954
  rag = RAGFactory.get_rag(llm=chat_llm, args=args, path="")
@@ -952,7 +956,7 @@ def main(input_args: Optional[List[str]] = None):
952
956
  conversations=[{"role": "user", "content": args.query}])[0]
953
957
  v = ([item, None] for item in response)
954
958
 
955
- elif args.action == "mcp":
959
+ elif "mcp" in args.action:
956
960
  from autocoder.common.mcp_server import get_mcp_server, McpRequest, McpInstallRequest, McpRemoveRequest, McpListRequest, McpListRunningRequest, McpRefreshRequest
957
961
  mcp_server = get_mcp_server()
958
962
  response = mcp_server.send_request(
@@ -1027,6 +1031,19 @@ def main(input_args: Optional[List[str]] = None):
1027
1031
  with open(memory_file, "w") as f:
1028
1032
  json.dump(chat_history, f, ensure_ascii=False)
1029
1033
 
1034
+ if "copy" in args.action:
1035
+ #copy assistant_response to clipboard
1036
+ import pyperclip
1037
+ try:
1038
+ pyperclip.copy(assistant_response)
1039
+ except:
1040
+ print("pyperclip not installed or clipboard is not supported, instruction will not be copied to clipboard.")
1041
+
1042
+ if "save" in args.action:
1043
+ save_to_memory_file(ask_conversation=chat_history["ask_conversation"],
1044
+ query=args.query,
1045
+ response=assistant_response)
1046
+ print("Saved to your memory")
1030
1047
  return
1031
1048
 
1032
1049
  else:
@@ -0,0 +1,170 @@
1
+ from typing import Any, List, Dict, Generator, Optional
2
+ import asyncio
3
+ import httpx
4
+ import argparse
5
+ from mcp.server.models import InitializationOptions
6
+ import mcp.types as types
7
+ from mcp.server import NotificationOptions, Server
8
+ import mcp.server.stdio
9
+ from autocoder.common import AutoCoderArgs
10
+ from byzerllm import ByzerLLM
11
+ from autocoder.lang import lang_desc
12
+ import locale
13
+ import pkg_resources
14
+ from openai import OpenAI
15
+
16
+ class AutoCoderRAGClientMCP:
17
+ def __init__(self, llm: ByzerLLM, args: AutoCoderArgs):
18
+ self.llm = llm
19
+ self.args = args
20
+
21
+ if not args.rag_url:
22
+ raise ValueError("rag_url is required for RAG client mode")
23
+
24
+ if not args.rag_url.startswith("http://"):
25
+ args.rag_url = f"http://{args.rag_url}"
26
+
27
+ if not args.rag_url.endswith("/v1"):
28
+ args.rag_url = args.rag_url.rstrip("/") + "/v1"
29
+
30
+ if not args.rag_token:
31
+ raise ValueError("rag_token is required for RAG client mode")
32
+
33
+ self.client = OpenAI(api_key=args.rag_token, base_url=args.rag_url)
34
+
35
+ self.server = Server("auto_coder_rag_client")
36
+
37
+ async def setup_server(self):
38
+ @self.server.list_tools()
39
+ async def handle_list_tools() -> List[types.Tool]:
40
+ return [
41
+ types.Tool(
42
+ name="rag-search",
43
+ description="Search documents using RAG",
44
+ inputSchema={
45
+ "type": "object",
46
+ "properties": {
47
+ "query": {
48
+ "type": "string",
49
+ "description": "Search query",
50
+ },
51
+ },
52
+ "required": ["query"],
53
+ },
54
+ ),
55
+ types.Tool(
56
+ name="rag-chat",
57
+ description="Chat with documents using RAG",
58
+ inputSchema={
59
+ "type": "object",
60
+ "properties": {
61
+ "query": {
62
+ "type": "string",
63
+ "description": "Chat query",
64
+ },
65
+ },
66
+ "required": ["query"],
67
+ },
68
+ ),
69
+ ]
70
+
71
+ @self.server.call_tool()
72
+ async def handle_call_tool(
73
+ name: str, arguments: Dict[str, Any] | None
74
+ ) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]:
75
+ if not arguments:
76
+ raise ValueError("Missing arguments")
77
+
78
+ if name == "rag-search":
79
+ query = arguments.get("query")
80
+ if not query:
81
+ raise ValueError("Missing query parameter")
82
+
83
+ response = self.client.chat.completions.create(
84
+ messages=[{"role": "user", "content": json.dumps({
85
+ "query": query,
86
+ "only_contexts": True
87
+ })}],
88
+ model=self.args.model,
89
+ max_tokens=self.args.rag_params_max_tokens,
90
+ )
91
+ result = response.choices[0].message.content
92
+
93
+ return [
94
+ types.TextContent(
95
+ type="text",
96
+ text=f"Search results for '{query}':\n\n{result}"
97
+ )
98
+ ]
99
+
100
+ elif name == "rag-chat":
101
+ query = arguments.get("query")
102
+ if not query:
103
+ raise ValueError("Missing query parameter")
104
+
105
+ response = self.client.chat.completions.create(
106
+ messages=[{"role": "user", "content": query}],
107
+ model=self.args.model,
108
+ stream=True,
109
+ max_tokens=self.args.rag_params_max_tokens
110
+ )
111
+
112
+ full_response = ""
113
+ for chunk in response:
114
+ if chunk.choices[0].delta.content is not None:
115
+ full_response += chunk.choices[0].delta.content
116
+
117
+ return [
118
+ types.TextContent(
119
+ type="text",
120
+ text=f"Chat response for '{query}':\n\n{full_response}"
121
+ )
122
+ ]
123
+
124
+ else:
125
+ raise ValueError(f"Unknown tool: {name}")
126
+
127
+ async def run(self):
128
+ async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
129
+ await self.server.run(
130
+ read_stream,
131
+ write_stream,
132
+ InitializationOptions(
133
+ server_name="auto_coder_rag_client",
134
+ server_version="0.1.0",
135
+ capabilities=self.server.get_capabilities(
136
+ notification_options=NotificationOptions(),
137
+ experimental_capabilities={},
138
+ ),
139
+ ),
140
+ )
141
+
142
+ def parse_args(input_args: Optional[List[str]] = None) -> AutoCoderArgs:
143
+ system_lang, _ = locale.getdefaultlocale()
144
+ lang = "zh" if system_lang and system_lang.startswith("zh") else "en"
145
+ desc = lang_desc[lang]
146
+
147
+ parser = argparse.ArgumentParser(description="Auto Coder RAG Client MCP Server")
148
+ parser.add_argument("--rag_url", required=True, help="RAG server URL")
149
+ parser.add_argument("--rag_token", required=True, help="RAG server token")
150
+ parser.add_argument("--model", default="deepseek_chat", help=desc["model"])
151
+ parser.add_argument("--rag_params_max_tokens", type=int, default=4096, help="Max tokens for RAG response")
152
+
153
+ args = parser.parse_args(input_args)
154
+ return AutoCoderArgs(**vars(args))
155
+
156
+ async def main():
157
+ # Parse command line arguments
158
+ args = parse_args()
159
+
160
+ # Initialize LLM
161
+ llm = ByzerLLM()
162
+ llm.setup_default_model_name(args.model)
163
+
164
+ # Initialize and run server
165
+ server = AutoCoderRAGClientMCP(llm=llm, args=args)
166
+ await server.setup_server()
167
+ await server.run()
168
+
169
+ if __name__ == "__main__":
170
+ asyncio.run(main())
@@ -0,0 +1,193 @@
1
+ from typing import Any, List, Dict, Generator, Optional
2
+ import asyncio
3
+ import httpx
4
+ import argparse
5
+ from mcp.server.models import InitializationOptions
6
+ import mcp.types as types
7
+ from mcp.server import NotificationOptions, Server
8
+ import mcp.server.stdio
9
+ from autocoder.rag.long_context_rag import LongContextRAG
10
+ from autocoder.common import AutoCoderArgs
11
+ from byzerllm import ByzerLLM
12
+ from autocoder.lang import lang_desc
13
+ import locale
14
+ import pkg_resources
15
+
16
+ class AutoCoderRAGMCP:
17
+ def __init__(self, llm: ByzerLLM, args: AutoCoderArgs):
18
+ self.llm = llm
19
+ self.args = args
20
+ self.rag = LongContextRAG(
21
+ llm=llm,
22
+ args=args,
23
+ path=args.source_dir,
24
+ tokenizer_path=args.tokenizer_path
25
+ )
26
+ self.server = Server("auto_coder_rag")
27
+
28
+ async def setup_server(self):
29
+ @self.server.list_tools()
30
+ async def handle_list_tools() -> List[types.Tool]:
31
+ return [
32
+ types.Tool(
33
+ name="rag-search",
34
+ description="Search documents using RAG",
35
+ inputSchema={
36
+ "type": "object",
37
+ "properties": {
38
+ "query": {
39
+ "type": "string",
40
+ "description": "Search query",
41
+ },
42
+ },
43
+ "required": ["query"],
44
+ },
45
+ ),
46
+ types.Tool(
47
+ name="rag-chat",
48
+ description="Chat with documents using RAG",
49
+ inputSchema={
50
+ "type": "object",
51
+ "properties": {
52
+ "query": {
53
+ "type": "string",
54
+ "description": "Chat query",
55
+ },
56
+ },
57
+ "required": ["query"],
58
+ },
59
+ ),
60
+ ]
61
+
62
+ @self.server.call_tool()
63
+ async def handle_call_tool(
64
+ name: str, arguments: Dict[str, Any] | None
65
+ ) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]:
66
+ if not arguments:
67
+ raise ValueError("Missing arguments")
68
+
69
+ if name == "rag-search":
70
+ query = arguments.get("query")
71
+ if not query:
72
+ raise ValueError("Missing query parameter")
73
+
74
+ results = self.rag.search(query)
75
+ return [
76
+ types.TextContent(
77
+ type="text",
78
+ text=f"Search results for '{query}':\n\n" +
79
+ "\n".join([f"- {result.module_name}: {result.source_code[:200]}..."
80
+ for result in results])
81
+ )
82
+ ]
83
+
84
+ elif name == "rag-chat":
85
+ query = arguments.get("query")
86
+ if not query:
87
+ raise ValueError("Missing query parameter")
88
+
89
+ response, _ = self.rag.stream_chat_oai(
90
+ conversations=[{"role": "user", "content": query}]
91
+ )
92
+ full_response = "".join([chunk for chunk in response])
93
+
94
+ return [
95
+ types.TextContent(
96
+ type="text",
97
+ text=f"Chat response for '{query}':\n\n{full_response}"
98
+ )
99
+ ]
100
+
101
+ else:
102
+ raise ValueError(f"Unknown tool: {name}")
103
+
104
+ async def run(self):
105
+ async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
106
+ await self.server.run(
107
+ read_stream,
108
+ write_stream,
109
+ InitializationOptions(
110
+ server_name="auto_coder_rag",
111
+ server_version="0.1.0",
112
+ capabilities=self.server.get_capabilities(
113
+ notification_options=NotificationOptions(),
114
+ experimental_capabilities={},
115
+ ),
116
+ ),
117
+ )
118
+
119
+ def parse_args(input_args: Optional[List[str]] = None) -> AutoCoderArgs:
120
+ try:
121
+ tokenizer_path = pkg_resources.resource_filename(
122
+ "autocoder", "data/tokenizer.json"
123
+ )
124
+ except FileNotFoundError:
125
+ tokenizer_path = None
126
+
127
+ system_lang, _ = locale.getdefaultlocale()
128
+ lang = "zh" if system_lang and system_lang.startswith("zh") else "en"
129
+ desc = lang_desc[lang]
130
+
131
+ parser = argparse.ArgumentParser(description="Auto Coder RAG MCP Server")
132
+ parser.add_argument("--source_dir", default=".", help="Source directory path")
133
+ parser.add_argument("--tokenizer_path", default=tokenizer_path, help="Path to tokenizer file")
134
+ parser.add_argument("--model", default="deepseek_chat", help=desc["model"])
135
+ parser.add_argument("--index_model", default="", help=desc["index_model"])
136
+ parser.add_argument("--emb_model", default="", help=desc["emb_model"])
137
+ parser.add_argument("--ray_address", default="auto", help=desc["ray_address"])
138
+ parser.add_argument("--required_exts", default="", help=desc["doc_build_parse_required_exts"])
139
+ parser.add_argument("--rag_doc_filter_relevance", type=int, default=5, help="Relevance score threshold for document filtering")
140
+ parser.add_argument("--rag_context_window_limit", type=int, default=56000, help="Context window limit for RAG")
141
+ parser.add_argument("--full_text_ratio", type=float, default=0.7, help="Ratio of full text area in context window")
142
+ parser.add_argument("--segment_ratio", type=float, default=0.2, help="Ratio of segment area in context window")
143
+ parser.add_argument("--index_filter_workers", type=int, default=5, help="Number of workers for document filtering")
144
+ parser.add_argument("--index_filter_file_num", type=int, default=3, help="Maximum number of files to filter")
145
+ parser.add_argument("--host", default="", help="Server host address")
146
+ parser.add_argument("--port", type=int, default=8000, help="Server port")
147
+ parser.add_argument("--monitor_mode", action="store_true", help="Enable document monitoring mode")
148
+ parser.add_argument("--enable_hybrid_index", action="store_true", help="Enable hybrid index")
149
+ parser.add_argument("--disable_auto_window", action="store_true", help="Disable automatic window adaptation")
150
+ parser.add_argument("--disable_segment_reorder", action="store_true", help="Disable segment reordering")
151
+ parser.add_argument("--disable_inference_enhance", action="store_true", help="Disable inference enhancement")
152
+ parser.add_argument("--inference_deep_thought", action="store_true", help="Enable deep thought in inference")
153
+ parser.add_argument("--inference_slow_without_deep_thought", action="store_true", help="Enable slow inference without deep thought")
154
+ parser.add_argument("--inference_compute_precision", type=int, default=64, help="Inference compute precision")
155
+ parser.add_argument("--data_cells_max_num", type=int, default=2000, help="Maximum number of data cells to process")
156
+ parser.add_argument("--recall_model", default="", help="Model used for document recall")
157
+ parser.add_argument("--chunk_model", default="", help="Model used for document chunking")
158
+ parser.add_argument("--qa_model", default="", help="Model used for question answering")
159
+
160
+ args = parser.parse_args(input_args)
161
+ return AutoCoderArgs(**vars(args)),args
162
+
163
+ async def main():
164
+ # Parse command line arguments
165
+ args,raw_rags = parse_args()
166
+
167
+ # Initialize LLM
168
+ llm = ByzerLLM()
169
+ llm.setup_default_model_name(args.model)
170
+
171
+ # Setup sub models if specified
172
+ if raw_rags.recall_model:
173
+ recall_model = ByzerLLM()
174
+ recall_model.setup_default_model_name(args.recall_model)
175
+ llm.setup_sub_client("recall_model", recall_model)
176
+
177
+ if raw_rags.chunk_model:
178
+ chunk_model = ByzerLLM()
179
+ chunk_model.setup_default_model_name(args.chunk_model)
180
+ llm.setup_sub_client("chunk_model", chunk_model)
181
+
182
+ if raw_rags.qa_model:
183
+ qa_model = ByzerLLM()
184
+ qa_model.setup_default_model_name(args.qa_model)
185
+ llm.setup_sub_client("qa_model", qa_model)
186
+
187
+ # Initialize and run server
188
+ server = AutoCoderRAGMCP(llm=llm, args=args)
189
+ await server.setup_server()
190
+ await server.run()
191
+
192
+ if __name__ == "__main__":
193
+ asyncio.run(main())
@@ -48,6 +48,7 @@ from autocoder.agent.auto_guess_query import AutoGuessQuery
48
48
  from autocoder.common.mcp_server import get_mcp_server, McpRequest, McpInstallRequest, McpRemoveRequest, McpListRequest, McpListRunningRequest, McpRefreshRequest
49
49
  import byzerllm
50
50
  from byzerllm.utils import format_str_jinja2
51
+ from autocoder.common.memory_manager import get_global_memory_file_paths
51
52
 
52
53
 
53
54
  class SymbolItem(BaseModel):
@@ -1657,6 +1658,9 @@ def commit(query: str):
1657
1658
  return_paths=True
1658
1659
  )
1659
1660
 
1661
+ if conf.get("enable_global_memory", "true") in ["true", "True",True]:
1662
+ yaml_config["urls"] += get_global_memory_file_paths()
1663
+
1660
1664
  # 临时保存yaml文件,然后读取yaml文件,转换为args
1661
1665
  temp_yaml = os.path.join("actions", f"{uuid.uuid4()}.yml")
1662
1666
  try:
@@ -1740,11 +1744,14 @@ def coding(query: str):
1740
1744
  converted_value = convert_config_value(key, value)
1741
1745
  if converted_value is not None:
1742
1746
  yaml_config[key] = converted_value
1743
-
1747
+
1744
1748
  yaml_config["urls"] = current_files + get_llm_friendly_package_docs(
1745
1749
  return_paths=True
1746
1750
  )
1747
1751
 
1752
+ if conf.get("enable_global_memory", "true") in ["true", "True",True]:
1753
+ yaml_config["urls"] += get_global_memory_file_paths()
1754
+
1748
1755
  # handle image
1749
1756
  v = Image.convert_image_paths_from(query)
1750
1757
  yaml_config["query"] = v
@@ -1794,7 +1801,7 @@ def coding(query: str):
1794
1801
 
1795
1802
  yaml_config[
1796
1803
  "context"
1797
- ] += f"下面是我们的历史对话,参考我们的历史对话从而更好的理解需求和修改代码。\n\n<history>\n"
1804
+ ] += f"下面是我们的历史对话,参考我们的历史对话从而更好的理解需求和修改代码: \n\n<history>\n"
1798
1805
  for conv in conversations:
1799
1806
  if conv["role"] == "user":
1800
1807
  yaml_config["context"] += f"用户: {conv['content']}\n"
@@ -1855,6 +1862,9 @@ def chat(query: str):
1855
1862
  return_paths=True
1856
1863
  )
1857
1864
 
1865
+ if conf.get("enable_global_memory", "true") in ["true", "True",True]:
1866
+ current_files += get_global_memory_file_paths()
1867
+
1858
1868
  yaml_config["urls"] = current_files
1859
1869
 
1860
1870
  if "emb_model" in conf:
@@ -1864,14 +1874,24 @@ def chat(query: str):
1864
1874
  if is_new:
1865
1875
  query = query.replace("/new", "", 1).strip()
1866
1876
 
1877
+ yaml_config["action"] = []
1878
+
1867
1879
  if "/mcp " in query:
1868
- yaml_config["action"] = "mcp"
1880
+ yaml_config["action"].append("mcp")
1869
1881
  query = query.replace("/mcp ", "", 1).strip()
1870
1882
 
1871
1883
  if "/rag " in query:
1872
- yaml_config["action"] = "rag"
1884
+ yaml_config["action"].append("rag")
1873
1885
  query = query.replace("/rag ", "", 1).strip()
1874
1886
 
1887
+ if "/copy" in query:
1888
+ yaml_config["action"].append("copy")
1889
+ query = query.replace("/copy", "", 1).strip()
1890
+
1891
+ if "/save" in query:
1892
+ yaml_config["action"].append("save")
1893
+ query = query.replace("/save", "", 1).strip()
1894
+
1875
1895
  is_review = query.strip().startswith("/review")
1876
1896
  if is_review:
1877
1897
  query = query.replace("/review", "", 1).strip()
@@ -348,7 +348,8 @@ class AutoCoderArgs(pydantic.BaseModel):
348
348
  data_cells_max_num: Optional[int] = 2000
349
349
  generate_times_same_model: Optional[int] = 1
350
350
 
351
- action: Optional[str] = None
351
+ action: List[str] = []
352
+ enable_global_memory: Optional[bool] = True
352
353
 
353
354
  class Config:
354
355
  protected_namespaces = ()
@@ -13,7 +13,7 @@ COMMANDS = {
13
13
  "/sd": {},
14
14
  },
15
15
  "/coding": {"/apply": {}, "/next": {}},
16
- "/chat": {"/new": {}, "/mcp": {}, "/rag": {}, "/review": {}, "/no_context": {}},
16
+ "/chat": {"/new": {}, "/save": {}, "/copy":{}, "/mcp": {}, "/rag": {}, "/review": {}, "/no_context": {}},
17
17
  "/mcp": {
18
18
  "/add": "",
19
19
  "/remove": "",
@@ -192,7 +192,7 @@ class McpHub:
192
192
  "PATH": os.environ.get("PATH", "")},
193
193
  )
194
194
 
195
- # Create transport using context manager
195
+ # Create transport using context manager
196
196
  transport_manager = stdio_client(server_params)
197
197
  transport = await transport_manager.__aenter__()
198
198
  try:
@@ -204,9 +204,9 @@ class McpHub:
204
204
  self.connections[name] = connection
205
205
 
206
206
  # Update server status and fetch capabilities
207
- server.status = "connected"
208
- server.tools = await self._fetch_tools(name)
209
- server.resources = await self._fetch_resources(name)
207
+ server.status = "connected"
208
+ server.tools = await self._fetch_tools(name)
209
+ server.resources = await self._fetch_resources(name)
210
210
  server.resource_templates = await self._fetch_resource_templates(name)
211
211
 
212
212
  except Exception as e: