auto-coder 0.1.353__py3-none-any.whl → 0.1.354__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of auto-coder might be problematic. Click here for more details.
- {auto_coder-0.1.353.dist-info → auto_coder-0.1.354.dist-info}/METADATA +1 -1
- {auto_coder-0.1.353.dist-info → auto_coder-0.1.354.dist-info}/RECORD +41 -29
- autocoder/auto_coder_rag.py +37 -1
- autocoder/auto_coder_runner.py +8 -0
- autocoder/commands/auto_command.py +59 -131
- autocoder/commands/tools.py +1 -1
- autocoder/common/__init__.py +1 -1
- autocoder/common/conversations/__init__.py +52 -0
- autocoder/common/conversations/compatibility.py +303 -0
- autocoder/common/conversations/conversation_manager.py +502 -0
- autocoder/common/conversations/example.py +152 -0
- autocoder/common/file_monitor/__init__.py +5 -0
- autocoder/common/file_monitor/monitor.py +383 -0
- autocoder/common/ignorefiles/__init__.py +4 -0
- autocoder/common/ignorefiles/ignore_file_utils.py +103 -0
- autocoder/common/ignorefiles/test_ignore_file_utils.py +91 -0
- autocoder/common/rulefiles/__init__.py +15 -0
- autocoder/common/rulefiles/autocoderrules_utils.py +173 -0
- autocoder/common/save_formatted_log.py +54 -0
- autocoder/common/v2/agent/agentic_edit.py +7 -36
- autocoder/common/v2/agent/agentic_edit_tools/list_files_tool_resolver.py +1 -1
- autocoder/common/v2/agent/agentic_edit_tools/search_files_tool_resolver.py +73 -43
- autocoder/common/v2/code_editblock_manager.py +20 -8
- autocoder/index/index.py +1 -1
- autocoder/models.py +22 -9
- autocoder/rag/api_server.py +14 -2
- autocoder/rag/cache/simple_cache.py +63 -33
- autocoder/rag/loaders/docx_loader.py +1 -1
- autocoder/rag/loaders/filter_utils.py +133 -76
- autocoder/rag/loaders/image_loader.py +15 -3
- autocoder/rag/loaders/pdf_loader.py +2 -2
- autocoder/rag/long_context_rag.py +11 -0
- autocoder/rag/qa_conversation_strategy.py +5 -31
- autocoder/rag/utils.py +21 -2
- autocoder/utils/_markitdown.py +66 -25
- autocoder/utils/auto_coder_utils/chat_stream_out.py +1 -0
- autocoder/version.py +1 -1
- {auto_coder-0.1.353.dist-info → auto_coder-0.1.354.dist-info}/LICENSE +0 -0
- {auto_coder-0.1.353.dist-info → auto_coder-0.1.354.dist-info}/WHEEL +0 -0
- {auto_coder-0.1.353.dist-info → auto_coder-0.1.354.dist-info}/entry_points.txt +0 -0
- {auto_coder-0.1.353.dist-info → auto_coder-0.1.354.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from threading import Lock
|
|
4
|
+
import threading
|
|
5
|
+
from typing import Dict, List, Optional
|
|
6
|
+
from loguru import logger
|
|
7
|
+
|
|
8
|
+
# 尝试导入 FileMonitor
|
|
9
|
+
try:
|
|
10
|
+
from autocoder.common.file_monitor.monitor import FileMonitor, Change
|
|
11
|
+
except ImportError:
|
|
12
|
+
# 如果导入失败,提供一个空的实现
|
|
13
|
+
logger.warning("警告: 无法导入 FileMonitor,规则文件变更监控将不可用")
|
|
14
|
+
FileMonitor = None
|
|
15
|
+
Change = None
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class AutocoderRulesManager:
|
|
19
|
+
"""
|
|
20
|
+
管理和监控 autocoderrules 目录中的规则文件。
|
|
21
|
+
|
|
22
|
+
实现单例模式,确保全局只有一个规则管理实例。
|
|
23
|
+
支持监控规则文件变化,当规则文件变化时自动重新加载。
|
|
24
|
+
"""
|
|
25
|
+
_instance = None
|
|
26
|
+
_lock = Lock()
|
|
27
|
+
|
|
28
|
+
def __new__(cls, project_root: Optional[str] = None):
|
|
29
|
+
if not cls._instance:
|
|
30
|
+
with cls._lock:
|
|
31
|
+
if not cls._instance:
|
|
32
|
+
cls._instance = super(AutocoderRulesManager, cls).__new__(cls)
|
|
33
|
+
cls._instance._initialized = False
|
|
34
|
+
return cls._instance
|
|
35
|
+
|
|
36
|
+
def __init__(self, project_root: Optional[str] = None):
|
|
37
|
+
if self._initialized:
|
|
38
|
+
return
|
|
39
|
+
self._initialized = True
|
|
40
|
+
|
|
41
|
+
self._rules: Dict[str, str] = {} # 存储规则文件内容: {file_path: content}
|
|
42
|
+
self._rules_dir: Optional[str] = None # 当前使用的规则目录
|
|
43
|
+
self._file_monitor = None # FileMonitor 实例
|
|
44
|
+
self._monitored_dirs: List[str] = [] # 被监控的目录列表
|
|
45
|
+
self._project_root = project_root if project_root is not None else os.getcwd() # 项目根目录
|
|
46
|
+
|
|
47
|
+
# 加载规则
|
|
48
|
+
self._load_rules()
|
|
49
|
+
# 设置文件监控
|
|
50
|
+
self._setup_file_monitor()
|
|
51
|
+
|
|
52
|
+
def _load_rules(self):
|
|
53
|
+
"""
|
|
54
|
+
按优先级顺序加载规则文件。
|
|
55
|
+
优先级顺序:
|
|
56
|
+
1. .autocoderrules/
|
|
57
|
+
2. .auto-coder/.autocoderrules/
|
|
58
|
+
3. .auto-coder/autocoderrules/
|
|
59
|
+
"""
|
|
60
|
+
self._rules = {}
|
|
61
|
+
project_root = self._project_root
|
|
62
|
+
|
|
63
|
+
# 按优先级顺序定义可能的规则目录
|
|
64
|
+
rules_dirs = [
|
|
65
|
+
os.path.join(project_root, ".autocoderrules"),
|
|
66
|
+
os.path.join(project_root, ".auto-coder", ".autocoderrules"),
|
|
67
|
+
os.path.join(project_root, ".auto-coder", "autocoderrules")
|
|
68
|
+
]
|
|
69
|
+
|
|
70
|
+
# 按优先级查找第一个存在的目录
|
|
71
|
+
found_dir = None
|
|
72
|
+
for rules_dir in rules_dirs:
|
|
73
|
+
if os.path.isdir(rules_dir):
|
|
74
|
+
found_dir = rules_dir
|
|
75
|
+
break
|
|
76
|
+
|
|
77
|
+
if not found_dir:
|
|
78
|
+
logger.info("未找到规则目录")
|
|
79
|
+
return
|
|
80
|
+
|
|
81
|
+
self._rules_dir = found_dir
|
|
82
|
+
logger.info(f"使用规则目录: {self._rules_dir}")
|
|
83
|
+
|
|
84
|
+
# 加载目录中的所有 .md 文件
|
|
85
|
+
try:
|
|
86
|
+
for fname in os.listdir(self._rules_dir):
|
|
87
|
+
if fname.endswith(".md"):
|
|
88
|
+
fpath = os.path.join(self._rules_dir, fname)
|
|
89
|
+
try:
|
|
90
|
+
with open(fpath, "r", encoding="utf-8") as f:
|
|
91
|
+
content = f.read()
|
|
92
|
+
self._rules[fpath] = content
|
|
93
|
+
logger.info(f"已加载规则文件: {fpath}")
|
|
94
|
+
except Exception as e:
|
|
95
|
+
logger.info(f"加载规则文件 {fpath} 时出错: {e}")
|
|
96
|
+
continue
|
|
97
|
+
except Exception as e:
|
|
98
|
+
logger.info(f"读取规则目录 {self._rules_dir} 时出错: {e}")
|
|
99
|
+
|
|
100
|
+
def _setup_file_monitor(self):
|
|
101
|
+
"""设置文件监控,当规则文件或目录变化时重新加载规则"""
|
|
102
|
+
if FileMonitor is None or not self._rules_dir:
|
|
103
|
+
return
|
|
104
|
+
|
|
105
|
+
try:
|
|
106
|
+
# 获取项目根目录
|
|
107
|
+
project_root = self._project_root
|
|
108
|
+
|
|
109
|
+
# 创建 FileMonitor 实例
|
|
110
|
+
self._file_monitor = FileMonitor(root_dir=project_root)
|
|
111
|
+
|
|
112
|
+
# 监控所有可能的规则目录
|
|
113
|
+
self._monitored_dirs = [
|
|
114
|
+
os.path.join(project_root, ".autocoderrules"),
|
|
115
|
+
os.path.join(project_root, ".auto-coder", ".autocoderrules"),
|
|
116
|
+
os.path.join(project_root, ".auto-coder", "autocoderrules")
|
|
117
|
+
]
|
|
118
|
+
|
|
119
|
+
# 注册目录监控
|
|
120
|
+
for dir_path in self._monitored_dirs:
|
|
121
|
+
# 创建目录(如果不存在)
|
|
122
|
+
os.makedirs(dir_path, exist_ok=True)
|
|
123
|
+
# 注册监控
|
|
124
|
+
self._file_monitor.register(dir_path, self._on_rules_changed)
|
|
125
|
+
logger.info(f"已注册规则目录监控: {dir_path}")
|
|
126
|
+
|
|
127
|
+
# 启动监控
|
|
128
|
+
if not self._file_monitor.is_running():
|
|
129
|
+
self._file_monitor.start()
|
|
130
|
+
logger.info("规则文件监控已启动")
|
|
131
|
+
|
|
132
|
+
except Exception as e:
|
|
133
|
+
logger.warning(f"设置规则文件监控时出错: {e}")
|
|
134
|
+
|
|
135
|
+
def _on_rules_changed(self, change_type: Change, changed_path: str):
|
|
136
|
+
"""当规则文件或目录发生变化时的回调函数"""
|
|
137
|
+
# 检查变化是否与规则相关
|
|
138
|
+
is_rule_related = False
|
|
139
|
+
|
|
140
|
+
# 检查是否是 .md 文件
|
|
141
|
+
if changed_path.endswith(".md"):
|
|
142
|
+
# 检查文件是否在监控的目录中
|
|
143
|
+
for dir_path in self._monitored_dirs:
|
|
144
|
+
if os.path.abspath(changed_path).startswith(os.path.abspath(dir_path)):
|
|
145
|
+
is_rule_related = True
|
|
146
|
+
break
|
|
147
|
+
else:
|
|
148
|
+
# 检查是否是监控的目录本身
|
|
149
|
+
for dir_path in self._monitored_dirs:
|
|
150
|
+
if os.path.abspath(changed_path) == os.path.abspath(dir_path):
|
|
151
|
+
is_rule_related = True
|
|
152
|
+
break
|
|
153
|
+
|
|
154
|
+
if is_rule_related:
|
|
155
|
+
logger.info(f"检测到规则相关变化 ({change_type.name}): {changed_path}")
|
|
156
|
+
# 重新加载规则
|
|
157
|
+
self._load_rules()
|
|
158
|
+
logger.info("已重新加载规则")
|
|
159
|
+
|
|
160
|
+
def get_rules(self) -> Dict[str, str]:
|
|
161
|
+
"""获取所有规则文件内容"""
|
|
162
|
+
return self._rules.copy()
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
# 对外提供单例
|
|
166
|
+
_rules_manager = None
|
|
167
|
+
|
|
168
|
+
def get_rules(project_root: Optional[str] = None) -> Dict[str, str]:
|
|
169
|
+
"""获取所有规则文件内容,可指定项目根目录"""
|
|
170
|
+
global _rules_manager
|
|
171
|
+
if _rules_manager is None:
|
|
172
|
+
_rules_manager = AutocoderRulesManager(project_root=project_root)
|
|
173
|
+
return _rules_manager.get_rules()
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
import uuid
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
|
|
6
|
+
def save_formatted_log(project_root, json_text, suffix):
|
|
7
|
+
"""
|
|
8
|
+
Save a JSON log as a formatted markdown file under project_root/.cache/logs.
|
|
9
|
+
Filename: <YYYYmmdd_HHMMSS>_<uuid>_<suffix>.md
|
|
10
|
+
Args:
|
|
11
|
+
project_root (str): The root directory of the project.
|
|
12
|
+
json_text (str): The JSON string to be formatted and saved.
|
|
13
|
+
suffix (str): The suffix for the filename.
|
|
14
|
+
"""
|
|
15
|
+
# Parse JSON
|
|
16
|
+
try:
|
|
17
|
+
data = json.loads(json_text)
|
|
18
|
+
except Exception as e:
|
|
19
|
+
raise ValueError(f"Invalid JSON provided: {e}")
|
|
20
|
+
|
|
21
|
+
# Format as markdown with recursive depth
|
|
22
|
+
def to_markdown(obj, level=1):
|
|
23
|
+
lines = []
|
|
24
|
+
if isinstance(obj, dict):
|
|
25
|
+
for key, value in obj.items():
|
|
26
|
+
lines.append(f"{'#' * (level + 1)} {key}\n")
|
|
27
|
+
lines.extend(to_markdown(value, level + 1))
|
|
28
|
+
elif isinstance(obj, list):
|
|
29
|
+
for idx, item in enumerate(obj, 1):
|
|
30
|
+
lines.append(f"{'#' * (level + 1)} Item {idx}\n")
|
|
31
|
+
lines.extend(to_markdown(item, level + 1))
|
|
32
|
+
else:
|
|
33
|
+
lines.append(str(obj) + "\n")
|
|
34
|
+
return lines
|
|
35
|
+
|
|
36
|
+
md_lines = ["# Log Entry\n"]
|
|
37
|
+
md_lines.extend(to_markdown(data, 1))
|
|
38
|
+
md_content = "\n".join(md_lines)
|
|
39
|
+
|
|
40
|
+
# Prepare directory
|
|
41
|
+
logs_dir = os.path.join(project_root, ".cache", "logs")
|
|
42
|
+
os.makedirs(logs_dir, exist_ok=True)
|
|
43
|
+
|
|
44
|
+
# Prepare filename
|
|
45
|
+
now = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
46
|
+
unique_id = str(uuid.uuid4())
|
|
47
|
+
filename = f"{now}_{unique_id}_{suffix}.md"
|
|
48
|
+
filepath = os.path.join(logs_dir, filename)
|
|
49
|
+
|
|
50
|
+
# Save file
|
|
51
|
+
with open(filepath, "w", encoding="utf-8") as f:
|
|
52
|
+
f.write(md_content)
|
|
53
|
+
|
|
54
|
+
return filepath
|
|
@@ -1,6 +1,3 @@
|
|
|
1
|
-
from autocoder.common.v2.agent.agentic_edit_conversation import AgenticConversation
|
|
2
|
-
from enum import Enum
|
|
3
|
-
from enum import Enum
|
|
4
1
|
import json
|
|
5
2
|
import os
|
|
6
3
|
import time
|
|
@@ -55,7 +52,7 @@ from autocoder.common.v2.agent.agentic_edit_tools import ( # Import specific re
|
|
|
55
52
|
AttemptCompletionToolResolver, PlanModeRespondToolResolver, UseMcpToolResolver,
|
|
56
53
|
ListPackageInfoToolResolver
|
|
57
54
|
)
|
|
58
|
-
|
|
55
|
+
from autocoder.common.rulefiles.autocoderrules_utils import get_rules
|
|
59
56
|
from autocoder.common.v2.agent.agentic_edit_types import (AgenticEditRequest, ToolResult,
|
|
60
57
|
MemoryConfig, CommandConfig, BaseTool,
|
|
61
58
|
ExecuteCommandTool, ReadFileTool,
|
|
@@ -119,12 +116,7 @@ class AgenticEdit:
|
|
|
119
116
|
self.memory_config = memory_config
|
|
120
117
|
self.command_config = command_config # Note: command_config might be unused now
|
|
121
118
|
self.project_type_analyzer = ProjectTypeAnalyzer(
|
|
122
|
-
args=args, llm=self.llm)
|
|
123
|
-
|
|
124
|
-
self.conversation_manager = AgenticConversation(
|
|
125
|
-
args, self.conversation_history, conversation_name=conversation_name)
|
|
126
|
-
# 当前不开启历史记录,所以清空
|
|
127
|
-
self.conversation_manager.clear_history()
|
|
119
|
+
args=args, llm=self.llm)
|
|
128
120
|
|
|
129
121
|
self.shadow_manager = ShadowManager(
|
|
130
122
|
args.source_dir, args.event_file, args.ignore_clean_shadows)
|
|
@@ -687,21 +679,8 @@ class AgenticEdit:
|
|
|
687
679
|
{% endif %}
|
|
688
680
|
"""
|
|
689
681
|
import os
|
|
690
|
-
extra_docs =
|
|
691
|
-
|
|
692
|
-
".auto-coder", "autocoderrules")
|
|
693
|
-
if os.path.isdir(rules_dir):
|
|
694
|
-
for fname in os.listdir(rules_dir):
|
|
695
|
-
if fname.endswith(".md"):
|
|
696
|
-
fpath = os.path.join(rules_dir, fname)
|
|
697
|
-
try:
|
|
698
|
-
with open(fpath, "r", encoding="utf-8") as f:
|
|
699
|
-
content = f.read()
|
|
700
|
-
key = fpath
|
|
701
|
-
extra_docs[key] = content
|
|
702
|
-
except Exception:
|
|
703
|
-
continue
|
|
704
|
-
|
|
682
|
+
extra_docs = get_rules()
|
|
683
|
+
|
|
705
684
|
env_info = detect_env()
|
|
706
685
|
shell_type = "bash"
|
|
707
686
|
if shells.is_running_in_cmd():
|
|
@@ -793,12 +772,10 @@ Below are some files the user is focused on, and the content is up to date. Thes
|
|
|
793
772
|
"role":"assistant","content":"Ok"
|
|
794
773
|
})
|
|
795
774
|
|
|
796
|
-
logger.info("Adding conversation history")
|
|
797
|
-
conversations.extend(self.conversation_manager.get_history())
|
|
775
|
+
logger.info("Adding conversation history")
|
|
798
776
|
conversations.append({
|
|
799
777
|
"role": "user", "content": request.user_input
|
|
800
|
-
})
|
|
801
|
-
self.conversation_manager.add_user_message(request.user_input)
|
|
778
|
+
})
|
|
802
779
|
|
|
803
780
|
logger.info(
|
|
804
781
|
f"Initial conversation history size: {len(conversations)}")
|
|
@@ -856,9 +833,7 @@ Below are some files the user is focused on, and the content is up to date. Thes
|
|
|
856
833
|
conversations.append({
|
|
857
834
|
"role": "assistant",
|
|
858
835
|
"content": assistant_buffer + tool_xml
|
|
859
|
-
})
|
|
860
|
-
self.conversation_manager.add_assistant_message(
|
|
861
|
-
assistant_buffer + tool_xml)
|
|
836
|
+
})
|
|
862
837
|
assistant_buffer = "" # Reset buffer after tool call
|
|
863
838
|
|
|
864
839
|
yield event # Yield the ToolCallEvent for display
|
|
@@ -941,7 +916,6 @@ Below are some files the user is focused on, and the content is up to date. Thes
|
|
|
941
916
|
"role": "user", # Simulating the user providing the tool result
|
|
942
917
|
"content": error_xml
|
|
943
918
|
})
|
|
944
|
-
self.conversation_manager.add_user_message(error_xml)
|
|
945
919
|
logger.info(
|
|
946
920
|
f"Added tool result to conversations for tool {type(tool_obj).__name__}")
|
|
947
921
|
logger.info(f"Breaking LLM cycle after executing tool: {tool_name}")
|
|
@@ -968,12 +942,9 @@ Below are some files the user is focused on, and the content is up to date. Thes
|
|
|
968
942
|
logger.info("Adding new assistant message")
|
|
969
943
|
conversations.append(
|
|
970
944
|
{"role": "assistant", "content": assistant_buffer})
|
|
971
|
-
self.conversation_manager.add_assistant_message(
|
|
972
|
-
assistant_buffer)
|
|
973
945
|
elif last_message["role"] == "assistant":
|
|
974
946
|
logger.info("Appending to existing assistant message")
|
|
975
947
|
last_message["content"] += assistant_buffer
|
|
976
|
-
self.conversation_manager.append_to_last_message(assistant_buffer)
|
|
977
948
|
# If the loop ends without AttemptCompletion, it means the LLM finished talking
|
|
978
949
|
# without signaling completion. We might just stop or yield a final message.
|
|
979
950
|
# Let's assume it stops here.
|
|
@@ -6,7 +6,7 @@ from loguru import logger
|
|
|
6
6
|
import typing
|
|
7
7
|
from autocoder.common import AutoCoderArgs
|
|
8
8
|
|
|
9
|
-
from autocoder.ignorefiles.ignore_file_utils import should_ignore
|
|
9
|
+
from autocoder.common.ignorefiles.ignore_file_utils import should_ignore
|
|
10
10
|
|
|
11
11
|
if typing.TYPE_CHECKING:
|
|
12
12
|
from autocoder.common.v2.agent.agentic_edit import AgenticEdit
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
|
|
2
1
|
import os
|
|
3
2
|
import re
|
|
4
3
|
import glob
|
|
@@ -9,7 +8,7 @@ from loguru import logger
|
|
|
9
8
|
from autocoder.common import AutoCoderArgs
|
|
10
9
|
import typing
|
|
11
10
|
|
|
12
|
-
from autocoder.ignorefiles.ignore_file_utils import should_ignore
|
|
11
|
+
from autocoder.common.ignorefiles.ignore_file_utils import should_ignore
|
|
13
12
|
|
|
14
13
|
if typing.TYPE_CHECKING:
|
|
15
14
|
from autocoder.common.v2.agent.agentic_edit import AgenticEdit
|
|
@@ -33,14 +32,13 @@ class SearchFilesToolResolver(BaseToolResolver):
|
|
|
33
32
|
if not absolute_search_path.startswith(absolute_source_dir):
|
|
34
33
|
return ToolResult(success=False, message=f"Error: Access denied. Attempted to search outside the project directory: {search_path_str}")
|
|
35
34
|
|
|
36
|
-
#
|
|
37
|
-
search_base_path = absolute_search_path
|
|
35
|
+
# Check if shadow directory exists
|
|
38
36
|
shadow_exists = False
|
|
37
|
+
shadow_dir_path = None
|
|
39
38
|
if self.shadow_manager:
|
|
40
39
|
try:
|
|
41
40
|
shadow_dir_path = self.shadow_manager.to_shadow_path(absolute_search_path)
|
|
42
41
|
if os.path.exists(shadow_dir_path) and os.path.isdir(shadow_dir_path):
|
|
43
|
-
search_base_path = shadow_dir_path
|
|
44
42
|
shadow_exists = True
|
|
45
43
|
except Exception as e:
|
|
46
44
|
logger.warning(f"Error checking shadow path for {absolute_search_path}: {e}")
|
|
@@ -50,51 +48,83 @@ class SearchFilesToolResolver(BaseToolResolver):
|
|
|
50
48
|
return ToolResult(success=False, message=f"Error: Search path not found: {search_path_str}")
|
|
51
49
|
if os.path.exists(absolute_search_path) and not os.path.isdir(absolute_search_path):
|
|
52
50
|
return ToolResult(success=False, message=f"Error: Search path is not a directory: {search_path_str}")
|
|
53
|
-
if shadow_exists and not os.path.isdir(
|
|
54
|
-
return ToolResult(success=False, message=f"Error: Shadow search path is not a directory: {
|
|
51
|
+
if shadow_exists and not os.path.isdir(shadow_dir_path):
|
|
52
|
+
return ToolResult(success=False, message=f"Error: Shadow search path is not a directory: {shadow_dir_path}")
|
|
55
53
|
|
|
56
|
-
results = []
|
|
57
54
|
try:
|
|
58
55
|
compiled_regex = re.compile(regex_pattern)
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
56
|
+
|
|
57
|
+
# Helper function to search in a directory
|
|
58
|
+
def search_in_dir(base_dir, is_shadow=False):
|
|
59
|
+
search_results = []
|
|
60
|
+
search_glob_pattern = os.path.join(base_dir, "**", file_pattern)
|
|
61
|
+
|
|
62
|
+
logger.info(f"Searching for regex '{regex_pattern}' in files matching '{file_pattern}' under '{base_dir}' (shadow: {is_shadow}) with ignore rules applied.")
|
|
63
|
+
|
|
64
|
+
for filepath in glob.glob(search_glob_pattern, recursive=True):
|
|
65
|
+
abs_path = os.path.abspath(filepath)
|
|
66
|
+
if should_ignore(abs_path):
|
|
67
|
+
continue
|
|
67
68
|
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
69
|
+
if os.path.isfile(filepath):
|
|
70
|
+
try:
|
|
71
|
+
with open(filepath, 'r', encoding='utf-8', errors='replace') as f:
|
|
72
|
+
lines = f.readlines()
|
|
73
|
+
for i, line in enumerate(lines):
|
|
74
|
+
if compiled_regex.search(line):
|
|
75
|
+
context_start = max(0, i - 2)
|
|
76
|
+
context_end = min(len(lines), i + 3)
|
|
77
|
+
context = "".join([f"{j+1}: {lines[j]}" for j in range(context_start, context_end)])
|
|
78
|
+
|
|
79
|
+
if is_shadow and self.shadow_manager:
|
|
80
|
+
try:
|
|
81
|
+
abs_project_path = self.shadow_manager.from_shadow_path(filepath)
|
|
82
|
+
relative_path = os.path.relpath(abs_project_path, source_dir)
|
|
83
|
+
except Exception:
|
|
84
|
+
relative_path = os.path.relpath(filepath, source_dir)
|
|
85
|
+
else:
|
|
82
86
|
relative_path = os.path.relpath(filepath, source_dir)
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
87
|
+
|
|
88
|
+
search_results.append({
|
|
89
|
+
"path": relative_path,
|
|
90
|
+
"line_number": i + 1,
|
|
91
|
+
"match_line": line.strip(),
|
|
92
|
+
"context": context.strip()
|
|
93
|
+
})
|
|
94
|
+
except Exception as e:
|
|
95
|
+
logger.warning(f"Could not read or process file {filepath}: {e}")
|
|
96
|
+
continue
|
|
97
|
+
|
|
98
|
+
return search_results
|
|
99
|
+
|
|
100
|
+
# Search in both directories and merge results
|
|
101
|
+
shadow_results = []
|
|
102
|
+
source_results = []
|
|
103
|
+
|
|
104
|
+
if shadow_exists:
|
|
105
|
+
shadow_results = search_in_dir(shadow_dir_path, is_shadow=True)
|
|
106
|
+
|
|
107
|
+
if os.path.exists(absolute_search_path) and os.path.isdir(absolute_search_path):
|
|
108
|
+
source_results = search_in_dir(absolute_search_path, is_shadow=False)
|
|
109
|
+
|
|
110
|
+
# Merge results, prioritizing shadow results
|
|
111
|
+
# Create a dictionary for quick lookup
|
|
112
|
+
results_dict = {}
|
|
113
|
+
for result in source_results:
|
|
114
|
+
key = (result["path"], result["line_number"])
|
|
115
|
+
results_dict[key] = result
|
|
116
|
+
|
|
117
|
+
# Override with shadow results
|
|
118
|
+
for result in shadow_results:
|
|
119
|
+
key = (result["path"], result["line_number"])
|
|
120
|
+
results_dict[key] = result
|
|
121
|
+
|
|
122
|
+
# Convert back to list
|
|
123
|
+
merged_results = list(results_dict.values())
|
|
94
124
|
|
|
95
|
-
message = f"Search completed. Found {len(
|
|
125
|
+
message = f"Search completed. Found {len(merged_results)} matches."
|
|
96
126
|
logger.info(message)
|
|
97
|
-
return ToolResult(success=True, message=message, content=
|
|
127
|
+
return ToolResult(success=True, message=message, content=merged_results)
|
|
98
128
|
|
|
99
129
|
except re.error as e:
|
|
100
130
|
logger.error(f"Invalid regex pattern '{regex_pattern}': {e}")
|
|
@@ -396,24 +396,36 @@ class CodeEditBlockManager:
|
|
|
396
396
|
def _format_blocks(merge: MergeCodeWithoutEffect) -> Tuple[str, str]:
|
|
397
397
|
unmerged_formatted_text = ""
|
|
398
398
|
for file_path, head, update in merge.failed_blocks:
|
|
399
|
-
unmerged_formatted_text += "```lang
|
|
400
|
-
unmerged_formatted_text +=
|
|
401
|
-
unmerged_formatted_text += "
|
|
399
|
+
unmerged_formatted_text += "```lang"
|
|
400
|
+
unmerged_formatted_text += "\n"
|
|
401
|
+
unmerged_formatted_text += f"##File: {file_path}"
|
|
402
|
+
unmerged_formatted_text += "\n"
|
|
403
|
+
unmerged_formatted_text += "<<<<<<< SEARCH"
|
|
404
|
+
unmerged_formatted_text += "\n"
|
|
402
405
|
unmerged_formatted_text += head
|
|
403
|
-
unmerged_formatted_text += "
|
|
406
|
+
unmerged_formatted_text += "\n"
|
|
407
|
+
unmerged_formatted_text += "======="
|
|
408
|
+
unmerged_formatted_text += "\n"
|
|
404
409
|
unmerged_formatted_text += update
|
|
405
|
-
unmerged_formatted_text += "
|
|
410
|
+
unmerged_formatted_text += "\n"
|
|
411
|
+
unmerged_formatted_text += ">>>>>>> REPLACE"
|
|
412
|
+
unmerged_formatted_text += "\n"
|
|
406
413
|
unmerged_formatted_text += "```"
|
|
407
414
|
unmerged_formatted_text += "\n"
|
|
408
415
|
|
|
409
416
|
merged_formatted_text = ""
|
|
410
417
|
if merge.merged_blocks:
|
|
411
418
|
for file_path, head, update in merge.merged_blocks:
|
|
412
|
-
merged_formatted_text += "```lang
|
|
413
|
-
merged_formatted_text +=
|
|
419
|
+
merged_formatted_text += "```lang"
|
|
420
|
+
merged_formatted_text += "\n"
|
|
421
|
+
merged_formatted_text += f"##File: {file_path}"
|
|
422
|
+
merged_formatted_text += "\n"
|
|
414
423
|
merged_formatted_text += head
|
|
415
|
-
merged_formatted_text += "
|
|
424
|
+
merged_formatted_text += "\n"
|
|
425
|
+
merged_formatted_text += "======="
|
|
426
|
+
merged_formatted_text += "\n"
|
|
416
427
|
merged_formatted_text += update
|
|
428
|
+
merged_formatted_text += "\n"
|
|
417
429
|
merged_formatted_text += "```"
|
|
418
430
|
merged_formatted_text += "\n"
|
|
419
431
|
|
autocoder/index/index.py
CHANGED
|
@@ -462,7 +462,7 @@ class IndexManager:
|
|
|
462
462
|
def filter_exclude_files(self, file_path, exclude_patterns):
|
|
463
463
|
# 增加 ignore_file_utils 的过滤
|
|
464
464
|
try:
|
|
465
|
-
from
|
|
465
|
+
from autocoder.common.ignorefiles import ignore_file_utils
|
|
466
466
|
if ignore_file_utils.should_ignore(file_path):
|
|
467
467
|
return True
|
|
468
468
|
except Exception:
|
autocoder/models.py
CHANGED
|
@@ -60,22 +60,35 @@ default_models_list = [
|
|
|
60
60
|
"max_output_tokens": 8096
|
|
61
61
|
},
|
|
62
62
|
{
|
|
63
|
-
"name": "
|
|
63
|
+
"name": "openai/gpt-4.1-mini",
|
|
64
64
|
"description": "",
|
|
65
|
-
"model_name": "
|
|
65
|
+
"model_name": "openai/gpt-4.1-mini",
|
|
66
66
|
"model_type": "saas/openai",
|
|
67
67
|
"base_url": "https://openrouter.ai/api/v1",
|
|
68
68
|
"api_key_path": "",
|
|
69
69
|
"is_reasoning": False,
|
|
70
|
-
"input_price":
|
|
71
|
-
"output_price":
|
|
70
|
+
"input_price": 2.8,
|
|
71
|
+
"output_price": 11.2,
|
|
72
72
|
"average_speed": 0.0,
|
|
73
|
-
"max_output_tokens": 8096*
|
|
73
|
+
"max_output_tokens": 8096*3
|
|
74
74
|
},
|
|
75
75
|
{
|
|
76
|
-
"name": "
|
|
76
|
+
"name": "openai/gpt-4.1",
|
|
77
77
|
"description": "",
|
|
78
|
-
"model_name": "
|
|
78
|
+
"model_name": "openai/gpt-4.1",
|
|
79
|
+
"model_type": "saas/openai",
|
|
80
|
+
"base_url": "https://openrouter.ai/api/v1",
|
|
81
|
+
"api_key_path": "",
|
|
82
|
+
"is_reasoning": False,
|
|
83
|
+
"input_price": 14.0,
|
|
84
|
+
"output_price": 42.0,
|
|
85
|
+
"average_speed": 0.0,
|
|
86
|
+
"max_output_tokens": 8096*3
|
|
87
|
+
},
|
|
88
|
+
{
|
|
89
|
+
"name": "openai/gpt-4.1-nano",
|
|
90
|
+
"description": "",
|
|
91
|
+
"model_name": "openai/gpt-4.1-nano",
|
|
79
92
|
"model_type": "saas/openai",
|
|
80
93
|
"base_url": "https://openrouter.ai/api/v1",
|
|
81
94
|
"api_key_path": "",
|
|
@@ -83,8 +96,8 @@ default_models_list = [
|
|
|
83
96
|
"input_price": 0.0,
|
|
84
97
|
"output_price": 0.0,
|
|
85
98
|
"average_speed": 0.0,
|
|
86
|
-
"max_output_tokens": 8096*
|
|
87
|
-
},
|
|
99
|
+
"max_output_tokens": 8096*3
|
|
100
|
+
},
|
|
88
101
|
{
|
|
89
102
|
"name": "openrouter/google/gemini-2.5-pro-preview-03-25",
|
|
90
103
|
"description": "",
|
autocoder/rag/api_server.py
CHANGED
|
@@ -187,9 +187,16 @@ async def serve_static_file(full_path: str, request: Request):
|
|
|
187
187
|
# 直接使用规范化的路径
|
|
188
188
|
file_path = os.path.join("/", os.path.normpath(unquote(full_path)))
|
|
189
189
|
|
|
190
|
+
# 获取允许的静态文件目录
|
|
191
|
+
allowed_static_abs = request.app.state.allowed_static_abs
|
|
192
|
+
logger.info(f"==allowed_static_abs==: {allowed_static_abs}")
|
|
193
|
+
|
|
194
|
+
if file_path.startswith(("/_images","_images")):
|
|
195
|
+
file_path = os.path.join(allowed_static_abs, file_path)
|
|
196
|
+
|
|
190
197
|
# 检查文件是否存在
|
|
191
198
|
if not os.path.exists(file_path):
|
|
192
|
-
raise FileNotFoundError(f"File not found: {file_path}")
|
|
199
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
193
200
|
|
|
194
201
|
# 如果启用了Nginx X-Accel-Redirect,使用X-Accel特性
|
|
195
202
|
if hasattr(request.app.state, "enable_nginx_x_accel") and request.app.state.enable_nginx_x_accel:
|
|
@@ -273,6 +280,9 @@ def serve(llm:ByzerLLM, args: ServerArgs):
|
|
|
273
280
|
allowed_static_abs = os.path.abspath(allowed_static_dir)
|
|
274
281
|
logger.info(f"Static files root directory: {allowed_static_abs}")
|
|
275
282
|
|
|
283
|
+
# 将允许的静态文件目录存储到应用状态中
|
|
284
|
+
router_app.state.allowed_static_abs = allowed_static_abs
|
|
285
|
+
|
|
276
286
|
router_app.add_middleware(
|
|
277
287
|
CORSMiddleware,
|
|
278
288
|
allow_origins=args.allowed_origins,
|
|
@@ -309,9 +319,11 @@ def serve(llm:ByzerLLM, args: ServerArgs):
|
|
|
309
319
|
|
|
310
320
|
# Check if path is in allowed directory
|
|
311
321
|
abs_path = os.path.abspath(os.path.join("/", normalized_path))
|
|
322
|
+
if abs_path.startswith("/_images"):
|
|
323
|
+
return await call_next(request)
|
|
312
324
|
|
|
313
325
|
# 使用预先计算好的allowed_static_abs
|
|
314
|
-
is_allowed = abs_path.startswith(allowed_static_abs)
|
|
326
|
+
is_allowed = abs_path.startswith(request.app.state.allowed_static_abs)
|
|
315
327
|
|
|
316
328
|
if not is_allowed:
|
|
317
329
|
logger.warning(f"Unauthorized path access: {abs_path}")
|