auto-coder-web 0.1.64__py3-none-any.whl → 0.1.66__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- auto_coder_web/common_router/active_context_router.py +152 -0
- auto_coder_web/common_router/file_router.py +45 -1
- auto_coder_web/common_router/filecacher.py +139 -0
- auto_coder_web/file_cacher/__init__.py +0 -0
- auto_coder_web/file_cacher/filecacher.py +195 -0
- auto_coder_web/proxy.py +4 -0
- auto_coder_web/routers/auto_router.py +67 -2
- auto_coder_web/routers/coding_router.py +1 -1
- auto_coder_web/version.py +1 -1
- auto_coder_web/web/assets/main-C1svZAzf.css +32 -0
- auto_coder_web/web/assets/main.js +376 -375
- auto_coder_web/web/index.html +1 -1
- {auto_coder_web-0.1.64.dist-info → auto_coder_web-0.1.66.dist-info}/METADATA +3 -2
- {auto_coder_web-0.1.64.dist-info → auto_coder_web-0.1.66.dist-info}/RECORD +17 -13
- auto_coder_web/web/assets/main-DJWilGUK.css +0 -32
- {auto_coder_web-0.1.64.dist-info → auto_coder_web-0.1.66.dist-info}/WHEEL +0 -0
- {auto_coder_web-0.1.64.dist-info → auto_coder_web-0.1.66.dist-info}/entry_points.txt +0 -0
- {auto_coder_web-0.1.64.dist-info → auto_coder_web-0.1.66.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,152 @@
|
|
1
|
+
from fastapi import APIRouter, HTTPException
|
2
|
+
from pydantic import BaseModel, Field
|
3
|
+
from typing import List, Optional, Dict, Any
|
4
|
+
from autocoder.memory.active_context_manager import ActiveContextManager
|
5
|
+
from autocoder.auto_coder_runner import get_final_config
|
6
|
+
from autocoder.common.action_yml_file_manager import ActionYmlFileManager
|
7
|
+
import threading
|
8
|
+
|
9
|
+
router = APIRouter()
|
10
|
+
|
11
|
+
class TaskInfo(BaseModel):
|
12
|
+
task_id: str = Field(..., description="任务ID")
|
13
|
+
status: str = Field(..., description="任务状态")
|
14
|
+
start_time: Optional[str] = Field(None, description="任务开始时间")
|
15
|
+
completion_time: Optional[str] = Field(None, description="任务完成时间")
|
16
|
+
file_name: Optional[str] = Field(None, description="关联的文件名")
|
17
|
+
total_tokens: int = Field(0, description="总token数")
|
18
|
+
input_tokens: int = Field(0, description="输入token数")
|
19
|
+
output_tokens: int = Field(0, description="输出token数")
|
20
|
+
cost: float = Field(0.0, description="费用")
|
21
|
+
processed_dirs: Optional[List[str]] = Field(None, description="已处理的目录列表")
|
22
|
+
error: Optional[str] = Field(None, description="错误信息")
|
23
|
+
|
24
|
+
class TaskListResponse(BaseModel):
|
25
|
+
tasks: List[TaskInfo] = Field(default_factory=list, description="任务列表")
|
26
|
+
|
27
|
+
|
28
|
+
_active_context_manager_lock = threading.Lock()
|
29
|
+
_active_context_manager_instance: Optional[ActiveContextManager] = None
|
30
|
+
|
31
|
+
def get_active_context_manager() -> ActiveContextManager:
|
32
|
+
"""
|
33
|
+
获取ActiveContextManager单例实例
|
34
|
+
"""
|
35
|
+
global _active_context_manager_instance
|
36
|
+
with _active_context_manager_lock:
|
37
|
+
if _active_context_manager_instance is None:
|
38
|
+
args = get_final_config()
|
39
|
+
llm = None
|
40
|
+
try:
|
41
|
+
from autocoder.utils.llms import get_single_llm
|
42
|
+
llm = get_single_llm(args.model, product_mode=args.product_mode)
|
43
|
+
except Exception:
|
44
|
+
llm = None
|
45
|
+
_active_context_manager_instance = ActiveContextManager(llm, args.source_dir)
|
46
|
+
return _active_context_manager_instance
|
47
|
+
|
48
|
+
@router.get("/api/active-context/tasks", response_model=TaskListResponse)
|
49
|
+
async def list_active_context_tasks():
|
50
|
+
"""
|
51
|
+
获取最新的50条活动上下文任务,按开始时间降序排列。
|
52
|
+
如果发现有超过10分钟还未结束(running/queued)的任务,自动标记为failed。
|
53
|
+
"""
|
54
|
+
try:
|
55
|
+
import datetime
|
56
|
+
|
57
|
+
manager = get_active_context_manager()
|
58
|
+
all_tasks_raw = manager.get_all_tasks()
|
59
|
+
|
60
|
+
# 排序,降序,优先使用 start_time,没有则用 completion_time,没有则不排序
|
61
|
+
def get_sort_time(t):
|
62
|
+
st = t.get('start_time')
|
63
|
+
ct = t.get('completion_time')
|
64
|
+
if st:
|
65
|
+
if isinstance(st, str):
|
66
|
+
try:
|
67
|
+
return datetime.datetime.strptime(st, "%Y-%m-%d %H:%M:%S")
|
68
|
+
except:
|
69
|
+
return st
|
70
|
+
else:
|
71
|
+
return st
|
72
|
+
elif ct:
|
73
|
+
if isinstance(ct, str):
|
74
|
+
try:
|
75
|
+
return datetime.datetime.strptime(ct, "%Y-%m-%d %H:%M:%S")
|
76
|
+
except:
|
77
|
+
return ct
|
78
|
+
else:
|
79
|
+
return ct
|
80
|
+
else:
|
81
|
+
return 0
|
82
|
+
|
83
|
+
sorted_tasks = sorted(all_tasks_raw, key=get_sort_time, reverse=True)
|
84
|
+
|
85
|
+
latest_tasks = sorted_tasks[:50]
|
86
|
+
|
87
|
+
now = datetime.datetime.now()
|
88
|
+
|
89
|
+
# 检查是否有超过10分钟还未完成的任务,将其状态置为failed
|
90
|
+
for t in latest_tasks:
|
91
|
+
status = t.get("status", "")
|
92
|
+
if status in ("running", "queued"):
|
93
|
+
st = t.get("start_time")
|
94
|
+
start_time_dt = None
|
95
|
+
if isinstance(st, str):
|
96
|
+
try:
|
97
|
+
start_time_dt = datetime.datetime.strptime(st, "%Y-%m-%d %H:%M:%S")
|
98
|
+
except:
|
99
|
+
start_time_dt = None
|
100
|
+
elif isinstance(st, datetime.datetime):
|
101
|
+
start_time_dt = st
|
102
|
+
else:
|
103
|
+
start_time_dt = None
|
104
|
+
|
105
|
+
if start_time_dt:
|
106
|
+
elapsed = now - start_time_dt
|
107
|
+
if elapsed.total_seconds() > 600: # 超过10分钟
|
108
|
+
t["status"] = "failed"
|
109
|
+
t["error"] = "Timeout: Task exceeded 10 minutes and was automatically marked as failed."
|
110
|
+
|
111
|
+
tasks = []
|
112
|
+
for t in latest_tasks:
|
113
|
+
# 处理时间格式
|
114
|
+
start_time = t.get('start_time')
|
115
|
+
if isinstance(start_time, str):
|
116
|
+
start_time_str = start_time
|
117
|
+
elif start_time:
|
118
|
+
try:
|
119
|
+
start_time_str = start_time.strftime("%Y-%m-%d %H:%M:%S")
|
120
|
+
except:
|
121
|
+
start_time_str = str(start_time)
|
122
|
+
else:
|
123
|
+
start_time_str = None
|
124
|
+
|
125
|
+
completion_time = t.get('completion_time')
|
126
|
+
if isinstance(completion_time, str):
|
127
|
+
completion_time_str = completion_time
|
128
|
+
elif completion_time:
|
129
|
+
try:
|
130
|
+
completion_time_str = completion_time.strftime("%Y-%m-%d %H:%M:%S")
|
131
|
+
except:
|
132
|
+
completion_time_str = str(completion_time)
|
133
|
+
else:
|
134
|
+
completion_time_str = None
|
135
|
+
|
136
|
+
task_info = TaskInfo(
|
137
|
+
task_id = t.get("task_id", ""),
|
138
|
+
status = t.get("status", ""),
|
139
|
+
start_time = start_time_str,
|
140
|
+
completion_time = completion_time_str,
|
141
|
+
file_name = t.get("file_name", ""),
|
142
|
+
total_tokens = t.get("total_tokens", 0),
|
143
|
+
input_tokens = t.get("input_tokens", 0),
|
144
|
+
output_tokens = t.get("output_tokens", 0),
|
145
|
+
cost = t.get("cost", 0.0),
|
146
|
+
processed_dirs = t.get("processed_dirs", []),
|
147
|
+
error = t.get("error", None)
|
148
|
+
)
|
149
|
+
tasks.append(task_info)
|
150
|
+
return TaskListResponse(tasks=tasks)
|
151
|
+
except Exception as e:
|
152
|
+
raise HTTPException(status_code=500, detail=f"Failed to get active context tasks: {str(e)}")
|
@@ -7,9 +7,15 @@ from auto_coder_web.file_manager import (
|
|
7
7
|
get_directory_tree_async,
|
8
8
|
read_file_content_async,
|
9
9
|
)
|
10
|
+
from pydantic import BaseModel
|
11
|
+
from typing import List
|
10
12
|
|
11
13
|
router = APIRouter()
|
12
14
|
|
15
|
+
class FileInfo(BaseModel):
|
16
|
+
name: str
|
17
|
+
path: str
|
18
|
+
|
13
19
|
async def get_project_path(request: Request) -> str:
|
14
20
|
"""获取项目路径作为依赖"""
|
15
21
|
return request.app.state.project_path
|
@@ -101,4 +107,42 @@ async def get_file_content(
|
|
101
107
|
raise HTTPException(
|
102
108
|
status_code=404, detail="File not found or cannot be read")
|
103
109
|
|
104
|
-
return {"content": content}
|
110
|
+
return {"content": content}
|
111
|
+
|
112
|
+
|
113
|
+
@router.get("/api/list-files", response_model=List[FileInfo])
|
114
|
+
async def list_files_in_directory(
|
115
|
+
dir_path: str
|
116
|
+
):
|
117
|
+
"""
|
118
|
+
List all files (not directories) under the specified directory.
|
119
|
+
If dir_path is a file, return info of that file.
|
120
|
+
"""
|
121
|
+
if not await aiofiles.os.path.exists(dir_path):
|
122
|
+
raise HTTPException(status_code=404, detail="Path not found")
|
123
|
+
|
124
|
+
# If path is a file, return info of the file
|
125
|
+
if await aiofiles.os.path.isfile(dir_path):
|
126
|
+
file_name = os.path.basename(dir_path)
|
127
|
+
return [FileInfo(name=file_name, path=dir_path)]
|
128
|
+
|
129
|
+
# If not a directory, error
|
130
|
+
if not await aiofiles.os.path.isdir(dir_path):
|
131
|
+
raise HTTPException(status_code=400, detail="Provided path is neither a directory nor a file")
|
132
|
+
|
133
|
+
# Else, list all files under directory
|
134
|
+
try:
|
135
|
+
entries = await aiofiles.os.listdir(dir_path)
|
136
|
+
except Exception as e:
|
137
|
+
raise HTTPException(status_code=500, detail=str(e))
|
138
|
+
|
139
|
+
result = []
|
140
|
+
for entry in entries:
|
141
|
+
full_path = os.path.join(dir_path, entry)
|
142
|
+
try:
|
143
|
+
if await aiofiles.os.path.isfile(full_path):
|
144
|
+
result.append(FileInfo(name=entry, path=full_path))
|
145
|
+
except Exception:
|
146
|
+
continue # ignore errors per file
|
147
|
+
|
148
|
+
return result
|
@@ -0,0 +1,139 @@
|
|
1
|
+
import os
|
2
|
+
import json
|
3
|
+
import threading
|
4
|
+
import time
|
5
|
+
from watchdog.observers import Observer
|
6
|
+
from watchdog.events import FileSystemEventHandler
|
7
|
+
|
8
|
+
|
9
|
+
class FileCacheHandler(FileSystemEventHandler):
|
10
|
+
def __init__(self, cacher):
|
11
|
+
super().__init__()
|
12
|
+
self.cacher = cacher
|
13
|
+
|
14
|
+
def on_created(self, event):
|
15
|
+
if not event.is_directory:
|
16
|
+
self.cacher._update_file(event.src_path)
|
17
|
+
|
18
|
+
def on_modified(self, event):
|
19
|
+
if not event.is_directory:
|
20
|
+
self.cacher._update_file(event.src_path)
|
21
|
+
|
22
|
+
def on_deleted(self, event):
|
23
|
+
if not event.is_directory:
|
24
|
+
self.cacher._remove_file(event.src_path)
|
25
|
+
|
26
|
+
def on_moved(self, event):
|
27
|
+
if not event.is_directory:
|
28
|
+
self.cacher._remove_file(event.src_path)
|
29
|
+
self.cacher._update_file(event.dest_path)
|
30
|
+
|
31
|
+
|
32
|
+
class FileCacher:
|
33
|
+
def __init__(self, project_path):
|
34
|
+
self.project_path = project_path
|
35
|
+
self.index_file = os.path.join(project_path, ".auto-coder", "cache", "file_cache.json")
|
36
|
+
self.file_info = {} # key: absolute path, value: metadata dict
|
37
|
+
self.ready = False
|
38
|
+
self.lock = threading.RLock()
|
39
|
+
self.observer = None
|
40
|
+
|
41
|
+
def start(self):
|
42
|
+
"""启动缓存构建和监控"""
|
43
|
+
# 启动索引构建线程
|
44
|
+
t = threading.Thread(target=self._build_cache_thread, daemon=True)
|
45
|
+
t.start()
|
46
|
+
|
47
|
+
def _build_cache_thread(self):
|
48
|
+
"""后台构建索引并启动watchdog监控"""
|
49
|
+
try:
|
50
|
+
self._build_cache()
|
51
|
+
finally:
|
52
|
+
self.ready = True
|
53
|
+
self._save_cache()
|
54
|
+
self._start_watchdog()
|
55
|
+
|
56
|
+
def _build_cache(self):
|
57
|
+
"""遍历项目目录,构建初始缓存"""
|
58
|
+
exclude_dirs = {".git", "node_modules", "dist", "build", "__pycache__", ".venv", ".auto-coder"}
|
59
|
+
for root, dirs, files in os.walk(self.project_path, followlinks=True):
|
60
|
+
# 过滤目录
|
61
|
+
dirs[:] = [d for d in dirs if d not in exclude_dirs and not d.startswith('.')]
|
62
|
+
for f in files:
|
63
|
+
abs_path = os.path.join(root, f)
|
64
|
+
self._update_file(abs_path)
|
65
|
+
|
66
|
+
def _update_file(self, abs_path):
|
67
|
+
"""添加或更新单个文件的缓存信息"""
|
68
|
+
try:
|
69
|
+
if not os.path.isfile(abs_path):
|
70
|
+
return
|
71
|
+
stat = os.stat(abs_path)
|
72
|
+
rel_path = os.path.relpath(abs_path, self.project_path)
|
73
|
+
with self.lock:
|
74
|
+
self.file_info[rel_path] = {
|
75
|
+
"mtime": stat.st_mtime,
|
76
|
+
"size": stat.st_size,
|
77
|
+
"abs_path": abs_path,
|
78
|
+
"name": os.path.basename(abs_path),
|
79
|
+
}
|
80
|
+
except Exception:
|
81
|
+
pass # ignore errors
|
82
|
+
|
83
|
+
def _remove_file(self, abs_path):
|
84
|
+
try:
|
85
|
+
rel_path = os.path.relpath(abs_path, self.project_path)
|
86
|
+
with self.lock:
|
87
|
+
if rel_path in self.file_info:
|
88
|
+
del self.file_info[rel_path]
|
89
|
+
except Exception:
|
90
|
+
pass
|
91
|
+
|
92
|
+
def _start_watchdog(self):
|
93
|
+
"""启动watchdog监控项目目录变更"""
|
94
|
+
event_handler = FileCacheHandler(self)
|
95
|
+
self.observer = Observer()
|
96
|
+
self.observer.schedule(event_handler, self.project_path, recursive=True)
|
97
|
+
self.observer.daemon = True
|
98
|
+
self.observer.start()
|
99
|
+
|
100
|
+
def stop(self):
|
101
|
+
"""停止监控"""
|
102
|
+
if self.observer:
|
103
|
+
self.observer.stop()
|
104
|
+
self.observer.join()
|
105
|
+
|
106
|
+
def _save_cache(self):
|
107
|
+
"""将缓存写入磁盘"""
|
108
|
+
try:
|
109
|
+
cache_dir = os.path.dirname(self.index_file)
|
110
|
+
os.makedirs(cache_dir, exist_ok=True)
|
111
|
+
with open(self.index_file, 'w', encoding='utf-8') as f:
|
112
|
+
json.dump(self.file_info, f)
|
113
|
+
except Exception:
|
114
|
+
pass
|
115
|
+
|
116
|
+
def load_cache(self):
|
117
|
+
"""尝试加载磁盘缓存"""
|
118
|
+
try:
|
119
|
+
if os.path.exists(self.index_file):
|
120
|
+
with open(self.index_file, 'r', encoding='utf-8') as f:
|
121
|
+
self.file_info = json.load(f)
|
122
|
+
self.ready = True
|
123
|
+
except Exception:
|
124
|
+
pass
|
125
|
+
|
126
|
+
def search_files(self, patterns):
|
127
|
+
"""
|
128
|
+
根据模式列表查找匹配文件
|
129
|
+
:param patterns: list[str]
|
130
|
+
:return: list[str] 相对路径
|
131
|
+
"""
|
132
|
+
matched = set()
|
133
|
+
with self.lock:
|
134
|
+
for rel_path, info in self.file_info.items():
|
135
|
+
filename = info.get("name", "")
|
136
|
+
for pattern in patterns:
|
137
|
+
if pattern == "" or pattern in filename:
|
138
|
+
matched.add(rel_path)
|
139
|
+
return list(matched)
|
File without changes
|
@@ -0,0 +1,195 @@
|
|
1
|
+
import os
|
2
|
+
import json
|
3
|
+
import threading
|
4
|
+
import time
|
5
|
+
from watchdog.observers import Observer
|
6
|
+
from watchdog.events import FileSystemEventHandler
|
7
|
+
from pydantic import BaseModel
|
8
|
+
|
9
|
+
class FileCacheResult(BaseModel):
|
10
|
+
miss: bool
|
11
|
+
files: list[str]
|
12
|
+
|
13
|
+
class FileCacheHandler(FileSystemEventHandler):
|
14
|
+
def __init__(self, cacher):
|
15
|
+
super().__init__()
|
16
|
+
self.cacher = cacher
|
17
|
+
|
18
|
+
def on_created(self, event):
|
19
|
+
if not event.is_directory:
|
20
|
+
self.cacher._update_file(event.src_path)
|
21
|
+
|
22
|
+
def on_modified(self, event):
|
23
|
+
if not event.is_directory:
|
24
|
+
self.cacher._update_file(event.src_path)
|
25
|
+
|
26
|
+
def on_deleted(self, event):
|
27
|
+
if not event.is_directory:
|
28
|
+
self.cacher._remove_file(event.src_path)
|
29
|
+
|
30
|
+
def on_moved(self, event):
|
31
|
+
if not event.is_directory:
|
32
|
+
self.cacher._remove_file(event.src_path)
|
33
|
+
self.cacher._update_file(event.dest_path)
|
34
|
+
|
35
|
+
|
36
|
+
class FileCacher:
|
37
|
+
def __init__(self, project_path):
|
38
|
+
self.project_path = project_path
|
39
|
+
self.index_file = os.path.join(
|
40
|
+
project_path, ".auto-coder", "cache", "file_cache.json")
|
41
|
+
self.file_info = {} # key: absolute path, value: metadata dict
|
42
|
+
self.ready = False
|
43
|
+
self.lock = threading.RLock()
|
44
|
+
self.observer = None
|
45
|
+
|
46
|
+
def start(self):
|
47
|
+
"""启动缓存构建和监控"""
|
48
|
+
# 启动索引构建线程
|
49
|
+
t = threading.Thread(target=self._build_cache_thread, daemon=True)
|
50
|
+
t.start()
|
51
|
+
|
52
|
+
def _build_cache_thread(self):
|
53
|
+
"""后台构建索引并启动watchdog监控"""
|
54
|
+
try:
|
55
|
+
self._build_cache()
|
56
|
+
finally:
|
57
|
+
self.ready = True
|
58
|
+
self._save_cache()
|
59
|
+
self._start_watchdog()
|
60
|
+
|
61
|
+
def _build_cache(self):
|
62
|
+
"""遍历项目目录,构建初始缓存"""
|
63
|
+
exclude_dirs = {".git", "node_modules", "dist",
|
64
|
+
"build", "__pycache__", ".venv", ".auto-coder"}
|
65
|
+
for root, dirs, files in os.walk(self.project_path, followlinks=True):
|
66
|
+
# 过滤目录
|
67
|
+
dirs[:] = [
|
68
|
+
d for d in dirs if d not in exclude_dirs and not d.startswith('.')]
|
69
|
+
for f in files:
|
70
|
+
abs_path = os.path.join(root, f)
|
71
|
+
self._update_file(abs_path)
|
72
|
+
|
73
|
+
def _update_file(self, abs_path):
|
74
|
+
"""添加或更新单个文件的缓存信息"""
|
75
|
+
try:
|
76
|
+
if not os.path.isfile(abs_path):
|
77
|
+
return
|
78
|
+
stat = os.stat(abs_path)
|
79
|
+
rel_path = os.path.relpath(abs_path, self.project_path)
|
80
|
+
with self.lock:
|
81
|
+
self.file_info[rel_path] = {
|
82
|
+
"mtime": stat.st_mtime,
|
83
|
+
"size": stat.st_size,
|
84
|
+
"abs_path": abs_path,
|
85
|
+
"name": os.path.basename(abs_path),
|
86
|
+
}
|
87
|
+
except Exception:
|
88
|
+
pass # ignore errors
|
89
|
+
|
90
|
+
def _remove_file(self, abs_path):
|
91
|
+
try:
|
92
|
+
rel_path = os.path.relpath(abs_path, self.project_path)
|
93
|
+
with self.lock:
|
94
|
+
if rel_path in self.file_info:
|
95
|
+
del self.file_info[rel_path]
|
96
|
+
except Exception:
|
97
|
+
pass
|
98
|
+
|
99
|
+
def _start_watchdog(self):
|
100
|
+
"""启动watchdog监控项目目录变更"""
|
101
|
+
event_handler = FileCacheHandler(self)
|
102
|
+
self.observer = Observer()
|
103
|
+
self.observer.schedule(
|
104
|
+
event_handler, self.project_path, recursive=True)
|
105
|
+
self.observer.daemon = True
|
106
|
+
self.observer.start()
|
107
|
+
|
108
|
+
def stop(self):
|
109
|
+
"""停止监控"""
|
110
|
+
if self.observer:
|
111
|
+
self.observer.stop()
|
112
|
+
self.observer.join()
|
113
|
+
|
114
|
+
def _save_cache(self):
|
115
|
+
"""将缓存写入磁盘"""
|
116
|
+
try:
|
117
|
+
cache_dir = os.path.dirname(self.index_file)
|
118
|
+
os.makedirs(cache_dir, exist_ok=True)
|
119
|
+
with open(self.index_file, 'w', encoding='utf-8') as f:
|
120
|
+
json.dump(self.file_info, f)
|
121
|
+
except Exception:
|
122
|
+
pass
|
123
|
+
|
124
|
+
def load_cache(self):
|
125
|
+
"""尝试加载磁盘缓存"""
|
126
|
+
try:
|
127
|
+
if os.path.exists(self.index_file):
|
128
|
+
with open(self.index_file, 'r', encoding='utf-8') as f:
|
129
|
+
self.file_info = json.load(f)
|
130
|
+
self.ready = True
|
131
|
+
except Exception:
|
132
|
+
pass
|
133
|
+
|
134
|
+
def search_files(self, patterns):
|
135
|
+
"""
|
136
|
+
根据模式列表查找匹配文件
|
137
|
+
:param patterns: list[str]
|
138
|
+
:return: FileCacheResult(miss=True/False, files=List[str])
|
139
|
+
"""
|
140
|
+
if not self.ready:
|
141
|
+
# 索引未准备好,返回 miss = True,空文件列表
|
142
|
+
return self.FileCacheResult(miss=True, files=[])
|
143
|
+
|
144
|
+
matched = set()
|
145
|
+
default_exclude_dirs = [".git", "node_modules", "dist", "build", "__pycache__", ".venv", ".auto-coder"]
|
146
|
+
project_root = self.project_path
|
147
|
+
|
148
|
+
def should_exclude_path(path: str) -> bool:
|
149
|
+
"""检查路径是否应该被排除(路径中包含排除目录或以.开头的目录/文件)"""
|
150
|
+
# 处理相对/绝对路径
|
151
|
+
rel_path = path
|
152
|
+
if os.path.isabs(path):
|
153
|
+
try:
|
154
|
+
rel_path = os.path.relpath(path, project_root)
|
155
|
+
except ValueError:
|
156
|
+
rel_path = path
|
157
|
+
|
158
|
+
# 检查文件或目录本身是否以.开头
|
159
|
+
if os.path.basename(rel_path).startswith('.'):
|
160
|
+
return True
|
161
|
+
|
162
|
+
# 检查路径中是否包含排除目录
|
163
|
+
path_parts = rel_path.split(os.sep)
|
164
|
+
return any(part in default_exclude_dirs or part.startswith('.') for part in path_parts)
|
165
|
+
|
166
|
+
with self.lock:
|
167
|
+
# 如果没有提供有效模式,返回过滤后的缓存列表
|
168
|
+
if not patterns or (len(patterns) == 1 and patterns[0] == ""):
|
169
|
+
for rel_path, info in self.file_info.items():
|
170
|
+
abs_path = info.get("abs_path", "")
|
171
|
+
if not should_exclude_path(rel_path):
|
172
|
+
matched.add(rel_path)
|
173
|
+
return self.FileCacheResult(miss=False, files=list(matched))
|
174
|
+
|
175
|
+
for pattern in patterns:
|
176
|
+
# 1. 在缓存中匹配文件名
|
177
|
+
for rel_path, info in self.file_info.items():
|
178
|
+
filename = info.get("name", "")
|
179
|
+
abs_path = info.get("abs_path", "")
|
180
|
+
if should_exclude_path(rel_path):
|
181
|
+
continue
|
182
|
+
if pattern in filename:
|
183
|
+
matched.add(rel_path)
|
184
|
+
|
185
|
+
# 2. 如果pattern本身是存在的文件路径(绝对或相对)
|
186
|
+
abs_pattern_path = pattern
|
187
|
+
if not os.path.isabs(abs_pattern_path):
|
188
|
+
abs_pattern_path = os.path.join(project_root, pattern)
|
189
|
+
if os.path.exists(abs_pattern_path) and os.path.isfile(abs_pattern_path) and not should_exclude_path(abs_pattern_path):
|
190
|
+
try:
|
191
|
+
rel_p = os.path.relpath(abs_pattern_path, project_root)
|
192
|
+
matched.add(rel_p)
|
193
|
+
except:
|
194
|
+
matched.add(abs_pattern_path)
|
195
|
+
return self.FileCacheResult(miss=False, files=list(matched))
|
auto_coder_web/proxy.py
CHANGED
@@ -22,9 +22,11 @@ from auto_coder_web.auto_coder_runner_wrapper import AutoCoderRunnerWrapper
|
|
22
22
|
from auto_coder_web.routers import todo_router, settings_router, auto_router, commit_router, chat_router, coding_router, index_router, config_router, upload_router, rag_router, editable_preview_router,mcp_router
|
23
23
|
from auto_coder_web.expert_routers import history_router
|
24
24
|
from auto_coder_web.common_router import completions_router, file_router, auto_coder_conf_router, chat_list_router, file_group_router, model_router, compiler_router
|
25
|
+
from auto_coder_web.common_router import active_context_router
|
25
26
|
from rich.console import Console
|
26
27
|
from loguru import logger
|
27
28
|
from auto_coder_web.lang import get_message
|
29
|
+
from auto_coder_web.file_cacher.filecacher import FileCacher
|
28
30
|
|
29
31
|
class ProxyServer:
|
30
32
|
def __init__(self, project_path: str, quick: bool = False, product_mode: str = "pro"):
|
@@ -79,6 +81,7 @@ class ProxyServer:
|
|
79
81
|
self.app.state.project_path = self.project_path
|
80
82
|
# Store auto_coder_runner in app state for dependency injection
|
81
83
|
self.app.state.auto_coder_runner = self.auto_coder_runner
|
84
|
+
# self.app.state.file_cacher = FileCacher(self.project_path)
|
82
85
|
# Store initialization status
|
83
86
|
self.app.state.is_initialized = self.is_initialized
|
84
87
|
|
@@ -102,6 +105,7 @@ class ProxyServer:
|
|
102
105
|
self.app.include_router(rag_router.router)
|
103
106
|
self.app.include_router(editable_preview_router.router)
|
104
107
|
self.app.include_router(mcp_router.router)
|
108
|
+
self.app.include_router(active_context_router.router)
|
105
109
|
# self.app.include_router(rag_router.router)
|
106
110
|
|
107
111
|
@self.app.on_event("shutdown")
|
@@ -15,6 +15,7 @@ from autocoder.events.event_types import EventType
|
|
15
15
|
from byzerllm.utils.langutil import asyncfy_with_semaphore
|
16
16
|
from autocoder.common.global_cancel import global_cancel, CancelRequestedException
|
17
17
|
from loguru import logger
|
18
|
+
import byzerllm
|
18
19
|
router = APIRouter()
|
19
20
|
|
20
21
|
# 创建线程池
|
@@ -22,6 +23,8 @@ cancel_thread_pool = ThreadPoolExecutor(max_workers=5)
|
|
22
23
|
|
23
24
|
class AutoCommandRequest(BaseModel):
|
24
25
|
command: str
|
26
|
+
include_conversation_history: bool = False
|
27
|
+
buildin_conversation_history: bool = False
|
25
28
|
|
26
29
|
class EventPollRequest(BaseModel):
|
27
30
|
event_file_id:str
|
@@ -58,6 +61,26 @@ def ensure_task_dir(project_path: str) -> str:
|
|
58
61
|
os.makedirs(task_dir, exist_ok=True)
|
59
62
|
return task_dir
|
60
63
|
|
64
|
+
@byzerllm.prompt()
|
65
|
+
def coding_prompt(messages: List[Dict[str, Any]], query: str):
|
66
|
+
'''
|
67
|
+
下面是我们已经产生的一个消息列表,其中 USER_RESPONSE 表示用户的输入,RESULT 你的输出:
|
68
|
+
<messages>
|
69
|
+
{% for message in messages %}
|
70
|
+
<message>
|
71
|
+
<type>{{ message.type }}</type>
|
72
|
+
<content>{{ message.content }}</content>
|
73
|
+
</message>
|
74
|
+
{% endfor %}
|
75
|
+
</messages>
|
76
|
+
|
77
|
+
下面是用户的最新需求:
|
78
|
+
<request>
|
79
|
+
{{ query }}
|
80
|
+
</request>
|
81
|
+
'''
|
82
|
+
|
83
|
+
|
61
84
|
|
62
85
|
@router.post("/api/auto-command")
|
63
86
|
async def auto_command(request: AutoCommandRequest, project_path: str = Depends(get_project_path)):
|
@@ -75,8 +98,50 @@ async def auto_command(request: AutoCommandRequest, project_path: str = Depends(
|
|
75
98
|
wrapper = AutoCoderRunnerWrapper(project_path)
|
76
99
|
wrapper.configure_wrapper(f"event_file:{event_file}")
|
77
100
|
|
78
|
-
|
79
|
-
|
101
|
+
|
102
|
+
if request.include_conversation_history:
|
103
|
+
# 获取当前会话名称
|
104
|
+
current_session_file = os.path.join(project_path, ".auto-coder", "auto-coder.web", "current-session.json")
|
105
|
+
current_session_name = ""
|
106
|
+
if os.path.exists(current_session_file):
|
107
|
+
try:
|
108
|
+
with open(current_session_file, 'r',encoding="utf-8") as f:
|
109
|
+
session_data = json.load(f)
|
110
|
+
current_session_name = session_data.get("session_name", "")
|
111
|
+
except Exception as e:
|
112
|
+
logger.error(f"Error reading current session: {str(e)}")
|
113
|
+
|
114
|
+
# 获取历史消息
|
115
|
+
messages = []
|
116
|
+
if current_session_name:
|
117
|
+
chat_list_file = os.path.join(project_path, ".auto-coder", "auto-coder.web", "chat-lists", f"{current_session_name}.json")
|
118
|
+
if os.path.exists(chat_list_file):
|
119
|
+
try:
|
120
|
+
with open(chat_list_file, 'r', encoding="utf-8") as f:
|
121
|
+
chat_data = json.load(f)
|
122
|
+
# 从聊天历史中提取消息
|
123
|
+
for msg in chat_data.get("messages", []):
|
124
|
+
# 只保留用户和中间结果信息
|
125
|
+
if msg.get("type","") not in ["USER_RESPONSE","RESULT"]:
|
126
|
+
continue
|
127
|
+
|
128
|
+
if msg.get("contentType","") in ["token_stat"]:
|
129
|
+
continue
|
130
|
+
|
131
|
+
messages.append(msg)
|
132
|
+
except Exception as e:
|
133
|
+
logger.error(f"Error reading chat history: {str(e)}")
|
134
|
+
|
135
|
+
# 构建提示信息
|
136
|
+
prompt_text = ""
|
137
|
+
if messages:
|
138
|
+
# 调用coding_prompt生成包含历史消息的提示
|
139
|
+
prompt_text = prompt_text + coding_prompt.prompt(messages, request.command)
|
140
|
+
else:
|
141
|
+
prompt_text = request.command
|
142
|
+
|
143
|
+
# 调用auto_command_wrapper方法
|
144
|
+
result = wrapper.auto_command_wrapper(prompt_text, {
|
80
145
|
"event_file_id": file_id
|
81
146
|
})
|
82
147
|
get_event_manager(event_file).write_completion(
|
@@ -95,7 +95,7 @@ async def coding_command(request: CodingCommandRequest, project_path: str = Depe
|
|
95
95
|
current_session_name = ""
|
96
96
|
if os.path.exists(current_session_file):
|
97
97
|
try:
|
98
|
-
with open(current_session_file, 'r') as f:
|
98
|
+
with open(current_session_file, 'r',encoding="utf-8") as f:
|
99
99
|
session_data = json.load(f)
|
100
100
|
current_session_name = session_data.get("session_name", "")
|
101
101
|
except Exception as e:
|
auto_coder_web/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.1.
|
1
|
+
__version__ = "0.1.66"
|