auto-coder 0.1.362__py3-none-any.whl → 0.1.364__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of auto-coder might be problematic. Click here for more details.
- {auto_coder-0.1.362.dist-info → auto_coder-0.1.364.dist-info}/METADATA +2 -2
- {auto_coder-0.1.362.dist-info → auto_coder-0.1.364.dist-info}/RECORD +65 -22
- autocoder/agent/base_agentic/__init__.py +0 -0
- autocoder/agent/base_agentic/agent_hub.py +169 -0
- autocoder/agent/base_agentic/agentic_lang.py +112 -0
- autocoder/agent/base_agentic/agentic_tool_display.py +180 -0
- autocoder/agent/base_agentic/base_agent.py +1582 -0
- autocoder/agent/base_agentic/default_tools.py +683 -0
- autocoder/agent/base_agentic/test_base_agent.py +82 -0
- autocoder/agent/base_agentic/tool_registry.py +425 -0
- autocoder/agent/base_agentic/tools/__init__.py +12 -0
- autocoder/agent/base_agentic/tools/ask_followup_question_tool_resolver.py +72 -0
- autocoder/agent/base_agentic/tools/attempt_completion_tool_resolver.py +37 -0
- autocoder/agent/base_agentic/tools/base_tool_resolver.py +35 -0
- autocoder/agent/base_agentic/tools/example_tool_resolver.py +46 -0
- autocoder/agent/base_agentic/tools/execute_command_tool_resolver.py +72 -0
- autocoder/agent/base_agentic/tools/list_files_tool_resolver.py +110 -0
- autocoder/agent/base_agentic/tools/plan_mode_respond_tool_resolver.py +35 -0
- autocoder/agent/base_agentic/tools/read_file_tool_resolver.py +54 -0
- autocoder/agent/base_agentic/tools/replace_in_file_tool_resolver.py +156 -0
- autocoder/agent/base_agentic/tools/search_files_tool_resolver.py +134 -0
- autocoder/agent/base_agentic/tools/talk_to_group_tool_resolver.py +96 -0
- autocoder/agent/base_agentic/tools/talk_to_tool_resolver.py +79 -0
- autocoder/agent/base_agentic/tools/use_mcp_tool_resolver.py +44 -0
- autocoder/agent/base_agentic/tools/write_to_file_tool_resolver.py +58 -0
- autocoder/agent/base_agentic/types.py +189 -0
- autocoder/agent/base_agentic/utils.py +100 -0
- autocoder/auto_coder_runner.py +6 -4
- autocoder/chat/conf_command.py +11 -10
- autocoder/common/__init__.py +2 -0
- autocoder/common/file_checkpoint/__init__.py +21 -0
- autocoder/common/file_checkpoint/backup.py +264 -0
- autocoder/common/file_checkpoint/examples.py +217 -0
- autocoder/common/file_checkpoint/manager.py +404 -0
- autocoder/common/file_checkpoint/models.py +156 -0
- autocoder/common/file_checkpoint/store.py +383 -0
- autocoder/common/file_checkpoint/test_backup.py +242 -0
- autocoder/common/file_checkpoint/test_manager.py +570 -0
- autocoder/common/file_checkpoint/test_models.py +360 -0
- autocoder/common/file_checkpoint/test_store.py +327 -0
- autocoder/common/file_checkpoint/test_utils.py +297 -0
- autocoder/common/file_checkpoint/utils.py +119 -0
- autocoder/common/rulefiles/autocoderrules_utils.py +138 -55
- autocoder/common/save_formatted_log.py +76 -5
- autocoder/common/v2/agent/agentic_edit.py +339 -216
- autocoder/common/v2/agent/agentic_edit_tools/read_file_tool_resolver.py +2 -2
- autocoder/common/v2/agent/agentic_edit_tools/replace_in_file_tool_resolver.py +100 -5
- autocoder/common/v2/agent/agentic_edit_tools/test_write_to_file_tool_resolver.py +322 -0
- autocoder/common/v2/agent/agentic_edit_tools/write_to_file_tool_resolver.py +160 -10
- autocoder/common/v2/agent/agentic_edit_types.py +1 -2
- autocoder/common/v2/agent/agentic_tool_display.py +2 -3
- autocoder/compilers/normal_compiler.py +64 -0
- autocoder/events/event_manager_singleton.py +133 -4
- autocoder/linters/normal_linter.py +373 -0
- autocoder/linters/python_linter.py +4 -2
- autocoder/rag/long_context_rag.py +424 -397
- autocoder/rag/test_doc_filter.py +393 -0
- autocoder/rag/test_long_context_rag.py +473 -0
- autocoder/rag/test_token_limiter.py +342 -0
- autocoder/shadows/shadow_manager.py +1 -3
- autocoder/version.py +1 -1
- {auto_coder-0.1.362.dist-info → auto_coder-0.1.364.dist-info}/LICENSE +0 -0
- {auto_coder-0.1.362.dist-info → auto_coder-0.1.364.dist-info}/WHEEL +0 -0
- {auto_coder-0.1.362.dist-info → auto_coder-0.1.364.dist-info}/entry_points.txt +0 -0
- {auto_coder-0.1.362.dist-info → auto_coder-0.1.364.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,383 @@
|
|
|
1
|
+
"""
|
|
2
|
+
文件变更存储器
|
|
3
|
+
|
|
4
|
+
负责存储和管理文件变更历史记录,支持按组、按时间等方式查询变更记录。
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import json
|
|
9
|
+
import sqlite3
|
|
10
|
+
import logging
|
|
11
|
+
import threading
|
|
12
|
+
from typing import Dict, List, Optional, Tuple, Any
|
|
13
|
+
from datetime import datetime
|
|
14
|
+
|
|
15
|
+
from autocoder.common.file_checkpoint.models import ChangeRecord
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class FileChangeStore:
|
|
21
|
+
"""负责存储和管理文件变更历史记录"""
|
|
22
|
+
|
|
23
|
+
def __init__(self, store_dir: Optional[str] = None, max_history: int = 50):
|
|
24
|
+
"""
|
|
25
|
+
初始化变更存储
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
store_dir: 存储目录,如果为None则使用默认目录
|
|
29
|
+
max_history: 最大保存的历史版本数量
|
|
30
|
+
"""
|
|
31
|
+
if store_dir is None:
|
|
32
|
+
# 默认存储目录为项目根目录下的.auto-coder/checkpoint
|
|
33
|
+
store_dir = os.path.join(os.getcwd(), ".auto-coder", "checkpoint")
|
|
34
|
+
|
|
35
|
+
self.store_dir = store_dir
|
|
36
|
+
self.max_history = max_history
|
|
37
|
+
self.db_file = os.path.join(store_dir, "changes.db")
|
|
38
|
+
self.lock = threading.RLock()
|
|
39
|
+
|
|
40
|
+
# 确保存储目录存在
|
|
41
|
+
os.makedirs(store_dir, exist_ok=True)
|
|
42
|
+
|
|
43
|
+
# 初始化数据库
|
|
44
|
+
self._init_db()
|
|
45
|
+
|
|
46
|
+
def save_change(self, change_record: ChangeRecord) -> str:
|
|
47
|
+
"""
|
|
48
|
+
保存一条变更记录
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
change_record: 变更记录对象
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
str: 变更记录ID
|
|
55
|
+
"""
|
|
56
|
+
with self.lock:
|
|
57
|
+
try:
|
|
58
|
+
# 将变更记录保存到数据库
|
|
59
|
+
conn = self._get_db_connection()
|
|
60
|
+
cursor = conn.cursor()
|
|
61
|
+
|
|
62
|
+
cursor.execute(
|
|
63
|
+
"""
|
|
64
|
+
INSERT INTO changes
|
|
65
|
+
(change_id, timestamp, file_path, backup_id, is_new, is_deletion, group_id, data)
|
|
66
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
67
|
+
""",
|
|
68
|
+
(
|
|
69
|
+
change_record.change_id,
|
|
70
|
+
change_record.timestamp,
|
|
71
|
+
change_record.file_path,
|
|
72
|
+
change_record.backup_id,
|
|
73
|
+
int(change_record.is_new),
|
|
74
|
+
int(change_record.is_deletion),
|
|
75
|
+
change_record.group_id,
|
|
76
|
+
json.dumps(change_record.to_dict())
|
|
77
|
+
)
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
conn.commit()
|
|
81
|
+
|
|
82
|
+
# 保存变更记录到JSON文件
|
|
83
|
+
self._save_change_to_file(change_record)
|
|
84
|
+
|
|
85
|
+
# 清理过旧的历史记录
|
|
86
|
+
self._clean_old_history()
|
|
87
|
+
|
|
88
|
+
logger.debug(f"已保存变更记录 {change_record.change_id}")
|
|
89
|
+
return change_record.change_id
|
|
90
|
+
|
|
91
|
+
except Exception as e:
|
|
92
|
+
logger.error(f"保存变更记录失败: {str(e)}")
|
|
93
|
+
raise
|
|
94
|
+
|
|
95
|
+
def get_change(self, change_id: str) -> Optional[ChangeRecord]:
|
|
96
|
+
"""
|
|
97
|
+
获取指定ID的变更记录
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
change_id: 变更记录ID
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
ChangeRecord: 变更记录对象,如果不存在则返回None
|
|
104
|
+
"""
|
|
105
|
+
with self.lock:
|
|
106
|
+
try:
|
|
107
|
+
conn = self._get_db_connection()
|
|
108
|
+
cursor = conn.cursor()
|
|
109
|
+
|
|
110
|
+
cursor.execute(
|
|
111
|
+
"SELECT data FROM changes WHERE change_id = ?",
|
|
112
|
+
(change_id,)
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
row = cursor.fetchone()
|
|
116
|
+
if row is None:
|
|
117
|
+
return None
|
|
118
|
+
|
|
119
|
+
data = json.loads(row[0])
|
|
120
|
+
return ChangeRecord.from_dict(data)
|
|
121
|
+
|
|
122
|
+
except Exception as e:
|
|
123
|
+
logger.error(f"获取变更记录 {change_id} 失败: {str(e)}")
|
|
124
|
+
return None
|
|
125
|
+
|
|
126
|
+
def get_changes_by_group(self, group_id: str) -> List[ChangeRecord]:
|
|
127
|
+
"""
|
|
128
|
+
获取指定组的所有变更记录
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
group_id: 变更组ID
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
List[ChangeRecord]: 变更记录列表
|
|
135
|
+
"""
|
|
136
|
+
with self.lock:
|
|
137
|
+
try:
|
|
138
|
+
conn = self._get_db_connection()
|
|
139
|
+
cursor = conn.cursor()
|
|
140
|
+
|
|
141
|
+
cursor.execute(
|
|
142
|
+
"SELECT data FROM changes WHERE group_id = ? ORDER BY timestamp DESC",
|
|
143
|
+
(group_id,)
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
changes = []
|
|
147
|
+
for row in cursor.fetchall():
|
|
148
|
+
data = json.loads(row[0])
|
|
149
|
+
changes.append(ChangeRecord.from_dict(data))
|
|
150
|
+
|
|
151
|
+
return changes
|
|
152
|
+
|
|
153
|
+
except Exception as e:
|
|
154
|
+
logger.error(f"获取变更组 {group_id} 的记录失败: {str(e)}")
|
|
155
|
+
return []
|
|
156
|
+
|
|
157
|
+
def get_latest_changes(self, limit: int = 10) -> List[ChangeRecord]:
|
|
158
|
+
"""
|
|
159
|
+
获取最近的变更记录
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
limit: 返回的记录数量限制
|
|
163
|
+
|
|
164
|
+
Returns:
|
|
165
|
+
List[ChangeRecord]: 变更记录列表
|
|
166
|
+
"""
|
|
167
|
+
with self.lock:
|
|
168
|
+
try:
|
|
169
|
+
conn = self._get_db_connection()
|
|
170
|
+
cursor = conn.cursor()
|
|
171
|
+
|
|
172
|
+
cursor.execute(
|
|
173
|
+
"SELECT data FROM changes ORDER BY timestamp DESC LIMIT ?",
|
|
174
|
+
(limit,)
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
changes = []
|
|
178
|
+
for row in cursor.fetchall():
|
|
179
|
+
data = json.loads(row[0])
|
|
180
|
+
changes.append(ChangeRecord.from_dict(data))
|
|
181
|
+
|
|
182
|
+
return changes
|
|
183
|
+
|
|
184
|
+
except Exception as e:
|
|
185
|
+
logger.error(f"获取最近变更记录失败: {str(e)}")
|
|
186
|
+
return []
|
|
187
|
+
|
|
188
|
+
def get_changes_by_file(self, file_path: str, limit: int = 10) -> List[ChangeRecord]:
|
|
189
|
+
"""
|
|
190
|
+
获取指定文件的变更记录
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
file_path: 文件路径
|
|
194
|
+
limit: 返回的记录数量限制
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
List[ChangeRecord]: 变更记录列表
|
|
198
|
+
"""
|
|
199
|
+
with self.lock:
|
|
200
|
+
try:
|
|
201
|
+
conn = self._get_db_connection()
|
|
202
|
+
cursor = conn.cursor()
|
|
203
|
+
|
|
204
|
+
cursor.execute(
|
|
205
|
+
"SELECT data FROM changes WHERE file_path = ? ORDER BY timestamp DESC LIMIT ?",
|
|
206
|
+
(file_path, limit)
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
changes = []
|
|
210
|
+
for row in cursor.fetchall():
|
|
211
|
+
data = json.loads(row[0])
|
|
212
|
+
changes.append(ChangeRecord.from_dict(data))
|
|
213
|
+
|
|
214
|
+
return changes
|
|
215
|
+
|
|
216
|
+
except Exception as e:
|
|
217
|
+
logger.error(f"获取文件 {file_path} 的变更记录失败: {str(e)}")
|
|
218
|
+
return []
|
|
219
|
+
|
|
220
|
+
def delete_change(self, change_id: str) -> bool:
|
|
221
|
+
"""
|
|
222
|
+
删除指定的变更记录
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
change_id: 变更记录ID
|
|
226
|
+
|
|
227
|
+
Returns:
|
|
228
|
+
bool: 删除是否成功
|
|
229
|
+
"""
|
|
230
|
+
with self.lock:
|
|
231
|
+
try:
|
|
232
|
+
# 获取变更记录
|
|
233
|
+
change = self.get_change(change_id)
|
|
234
|
+
if change is None:
|
|
235
|
+
return False
|
|
236
|
+
|
|
237
|
+
# 删除数据库中的记录
|
|
238
|
+
conn = self._get_db_connection()
|
|
239
|
+
cursor = conn.cursor()
|
|
240
|
+
|
|
241
|
+
cursor.execute(
|
|
242
|
+
"DELETE FROM changes WHERE change_id = ?",
|
|
243
|
+
(change_id,)
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
conn.commit()
|
|
247
|
+
|
|
248
|
+
# 删除JSON文件
|
|
249
|
+
json_file = os.path.join(self.store_dir, f"{change_id}.json")
|
|
250
|
+
if os.path.exists(json_file):
|
|
251
|
+
os.remove(json_file)
|
|
252
|
+
|
|
253
|
+
logger.debug(f"已删除变更记录 {change_id}")
|
|
254
|
+
return True
|
|
255
|
+
|
|
256
|
+
except Exception as e:
|
|
257
|
+
logger.error(f"删除变更记录 {change_id} 失败: {str(e)}")
|
|
258
|
+
return False
|
|
259
|
+
|
|
260
|
+
def get_change_groups(self, limit: int = 10) -> List[Tuple[str, float, int]]:
|
|
261
|
+
"""
|
|
262
|
+
获取变更组列表
|
|
263
|
+
|
|
264
|
+
Args:
|
|
265
|
+
limit: 返回的组数量限制
|
|
266
|
+
|
|
267
|
+
Returns:
|
|
268
|
+
List[Tuple[str, float, int]]: 变更组ID、最新时间戳和变更数量的列表
|
|
269
|
+
"""
|
|
270
|
+
with self.lock:
|
|
271
|
+
try:
|
|
272
|
+
conn = self._get_db_connection()
|
|
273
|
+
cursor = conn.cursor()
|
|
274
|
+
|
|
275
|
+
cursor.execute(
|
|
276
|
+
"""
|
|
277
|
+
SELECT group_id, MAX(timestamp) as latest_time, COUNT(*) as count
|
|
278
|
+
FROM changes
|
|
279
|
+
WHERE group_id IS NOT NULL
|
|
280
|
+
GROUP BY group_id
|
|
281
|
+
ORDER BY latest_time DESC
|
|
282
|
+
LIMIT ?
|
|
283
|
+
""",
|
|
284
|
+
(limit,)
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
groups = []
|
|
288
|
+
for row in cursor.fetchall():
|
|
289
|
+
groups.append((row[0], row[1], row[2]))
|
|
290
|
+
|
|
291
|
+
return groups
|
|
292
|
+
|
|
293
|
+
except Exception as e:
|
|
294
|
+
logger.error(f"获取变更组列表失败: {str(e)}")
|
|
295
|
+
return []
|
|
296
|
+
|
|
297
|
+
def _init_db(self) -> None:
|
|
298
|
+
"""初始化数据库"""
|
|
299
|
+
try:
|
|
300
|
+
conn = self._get_db_connection()
|
|
301
|
+
cursor = conn.cursor()
|
|
302
|
+
|
|
303
|
+
# 创建变更记录表
|
|
304
|
+
cursor.execute(
|
|
305
|
+
"""
|
|
306
|
+
CREATE TABLE IF NOT EXISTS changes (
|
|
307
|
+
change_id TEXT PRIMARY KEY,
|
|
308
|
+
timestamp REAL NOT NULL,
|
|
309
|
+
file_path TEXT NOT NULL,
|
|
310
|
+
backup_id TEXT,
|
|
311
|
+
is_new INTEGER NOT NULL DEFAULT 0,
|
|
312
|
+
is_deletion INTEGER NOT NULL DEFAULT 0,
|
|
313
|
+
group_id TEXT,
|
|
314
|
+
data TEXT NOT NULL
|
|
315
|
+
)
|
|
316
|
+
"""
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
# 创建索引
|
|
320
|
+
cursor.execute(
|
|
321
|
+
"CREATE INDEX IF NOT EXISTS idx_changes_timestamp ON changes (timestamp)"
|
|
322
|
+
)
|
|
323
|
+
cursor.execute(
|
|
324
|
+
"CREATE INDEX IF NOT EXISTS idx_changes_file_path ON changes (file_path)"
|
|
325
|
+
)
|
|
326
|
+
cursor.execute(
|
|
327
|
+
"CREATE INDEX IF NOT EXISTS idx_changes_group_id ON changes (group_id)"
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
conn.commit()
|
|
331
|
+
|
|
332
|
+
except Exception as e:
|
|
333
|
+
logger.error(f"初始化数据库失败: {str(e)}")
|
|
334
|
+
raise
|
|
335
|
+
|
|
336
|
+
def _get_db_connection(self) -> sqlite3.Connection:
|
|
337
|
+
"""获取数据库连接"""
|
|
338
|
+
conn = sqlite3.connect(self.db_file)
|
|
339
|
+
conn.row_factory = sqlite3.Row
|
|
340
|
+
return conn
|
|
341
|
+
|
|
342
|
+
def _save_change_to_file(self, change_record: ChangeRecord) -> None:
|
|
343
|
+
"""将变更记录保存到JSON文件"""
|
|
344
|
+
try:
|
|
345
|
+
json_file = os.path.join(self.store_dir, f"{change_record.change_id}.json")
|
|
346
|
+
|
|
347
|
+
with open(json_file, 'w', encoding='utf-8') as f:
|
|
348
|
+
json.dump(change_record.to_dict(), f, indent=2)
|
|
349
|
+
|
|
350
|
+
except Exception as e:
|
|
351
|
+
logger.error(f"保存变更记录到文件失败: {str(e)}")
|
|
352
|
+
|
|
353
|
+
def _clean_old_history(self) -> None:
|
|
354
|
+
"""清理过旧的历史记录"""
|
|
355
|
+
try:
|
|
356
|
+
conn = self._get_db_connection()
|
|
357
|
+
cursor = conn.cursor()
|
|
358
|
+
|
|
359
|
+
# 获取记录总数
|
|
360
|
+
cursor.execute("SELECT COUNT(*) FROM changes")
|
|
361
|
+
total_count = cursor.fetchone()[0]
|
|
362
|
+
|
|
363
|
+
# 如果记录数超过最大限制,删除最旧的记录
|
|
364
|
+
if total_count > self.max_history:
|
|
365
|
+
# 计算需要删除的记录数
|
|
366
|
+
delete_count = total_count - self.max_history
|
|
367
|
+
|
|
368
|
+
# 获取要删除的记录ID
|
|
369
|
+
cursor.execute(
|
|
370
|
+
"SELECT change_id FROM changes ORDER BY timestamp ASC LIMIT ?",
|
|
371
|
+
(delete_count,)
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
change_ids = [row[0] for row in cursor.fetchall()]
|
|
375
|
+
|
|
376
|
+
# 删除记录
|
|
377
|
+
for change_id in change_ids:
|
|
378
|
+
self.delete_change(change_id)
|
|
379
|
+
|
|
380
|
+
logger.debug(f"已清理 {len(change_ids)} 条过旧的变更记录")
|
|
381
|
+
|
|
382
|
+
except Exception as e:
|
|
383
|
+
logger.error(f"清理过旧的历史记录失败: {str(e)}")
|
|
@@ -0,0 +1,242 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
import os
|
|
3
|
+
import json
|
|
4
|
+
import tempfile
|
|
5
|
+
import shutil
|
|
6
|
+
from datetime import datetime, timedelta
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from autocoder.common.file_checkpoint.backup import FileBackupManager
|
|
10
|
+
|
|
11
|
+
@pytest.fixture
|
|
12
|
+
def temp_test_dir():
|
|
13
|
+
"""提供一个临时的测试目录"""
|
|
14
|
+
temp_dir = tempfile.mkdtemp()
|
|
15
|
+
yield temp_dir
|
|
16
|
+
shutil.rmtree(temp_dir)
|
|
17
|
+
|
|
18
|
+
@pytest.fixture
|
|
19
|
+
def temp_backup_dir():
|
|
20
|
+
"""提供一个临时的备份目录"""
|
|
21
|
+
temp_dir = tempfile.mkdtemp()
|
|
22
|
+
yield temp_dir
|
|
23
|
+
shutil.rmtree(temp_dir)
|
|
24
|
+
|
|
25
|
+
@pytest.fixture
|
|
26
|
+
def sample_file(temp_test_dir):
|
|
27
|
+
"""创建一个用于测试的样例文件"""
|
|
28
|
+
file_path = os.path.join(temp_test_dir, "sample.txt")
|
|
29
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
30
|
+
f.write("这是一个测试文件的内容")
|
|
31
|
+
return file_path
|
|
32
|
+
|
|
33
|
+
class TestFileBackupManager:
|
|
34
|
+
"""FileBackupManager类的单元测试"""
|
|
35
|
+
|
|
36
|
+
def test_init_with_custom_dir(self, temp_backup_dir):
|
|
37
|
+
"""测试使用自定义目录初始化"""
|
|
38
|
+
manager = FileBackupManager(backup_dir=temp_backup_dir)
|
|
39
|
+
|
|
40
|
+
assert manager.backup_dir == temp_backup_dir
|
|
41
|
+
assert os.path.exists(temp_backup_dir)
|
|
42
|
+
|
|
43
|
+
def test_init_with_default_dir(self, monkeypatch):
|
|
44
|
+
"""测试使用默认目录初始化"""
|
|
45
|
+
# 创建一个临时主目录
|
|
46
|
+
temp_home = tempfile.mkdtemp()
|
|
47
|
+
try:
|
|
48
|
+
# 模拟用户主目录
|
|
49
|
+
monkeypatch.setattr(os.path, 'expanduser', lambda path: temp_home)
|
|
50
|
+
|
|
51
|
+
manager = FileBackupManager()
|
|
52
|
+
|
|
53
|
+
expected_dir = os.path.join(temp_home, ".autocoder", "backups")
|
|
54
|
+
assert manager.backup_dir == expected_dir
|
|
55
|
+
assert os.path.exists(expected_dir)
|
|
56
|
+
finally:
|
|
57
|
+
# 清理临时目录
|
|
58
|
+
if os.path.exists(temp_home):
|
|
59
|
+
shutil.rmtree(temp_home)
|
|
60
|
+
|
|
61
|
+
def test_backup_file(self, temp_backup_dir, sample_file):
|
|
62
|
+
"""测试备份文件功能"""
|
|
63
|
+
manager = FileBackupManager(backup_dir=temp_backup_dir)
|
|
64
|
+
|
|
65
|
+
# 备份文件
|
|
66
|
+
backup_id = manager.backup_file(sample_file)
|
|
67
|
+
|
|
68
|
+
# 检查备份ID
|
|
69
|
+
assert backup_id is not None
|
|
70
|
+
assert len(backup_id) > 0
|
|
71
|
+
|
|
72
|
+
# 检查备份文件是否存在
|
|
73
|
+
backup_file_path = os.path.join(temp_backup_dir, backup_id)
|
|
74
|
+
assert os.path.exists(backup_file_path)
|
|
75
|
+
|
|
76
|
+
# 检查备份文件内容
|
|
77
|
+
with open(backup_file_path, 'r', encoding='utf-8') as f:
|
|
78
|
+
content = f.read()
|
|
79
|
+
assert content == "这是一个测试文件的内容"
|
|
80
|
+
|
|
81
|
+
# 检查元数据
|
|
82
|
+
assert backup_id in manager.metadata
|
|
83
|
+
assert manager.metadata[backup_id]["original_path"] == sample_file
|
|
84
|
+
assert "timestamp" in manager.metadata[backup_id]
|
|
85
|
+
assert "size" in manager.metadata[backup_id]
|
|
86
|
+
|
|
87
|
+
def test_backup_nonexistent_file(self, temp_backup_dir):
|
|
88
|
+
"""测试备份不存在的文件"""
|
|
89
|
+
manager = FileBackupManager(backup_dir=temp_backup_dir)
|
|
90
|
+
|
|
91
|
+
# 尝试备份不存在的文件
|
|
92
|
+
backup_id = manager.backup_file("nonexistent_file.txt")
|
|
93
|
+
|
|
94
|
+
# 应该返回None
|
|
95
|
+
assert backup_id is None
|
|
96
|
+
|
|
97
|
+
def test_restore_file(self, temp_backup_dir, temp_test_dir, sample_file):
|
|
98
|
+
"""测试恢复文件功能"""
|
|
99
|
+
manager = FileBackupManager(backup_dir=temp_backup_dir)
|
|
100
|
+
|
|
101
|
+
# 备份文件
|
|
102
|
+
backup_id = manager.backup_file(sample_file)
|
|
103
|
+
|
|
104
|
+
# 修改原始文件
|
|
105
|
+
with open(sample_file, 'w', encoding='utf-8') as f:
|
|
106
|
+
f.write("已修改的内容")
|
|
107
|
+
|
|
108
|
+
# 恢复到新位置
|
|
109
|
+
restore_path = os.path.join(temp_test_dir, "restored.txt")
|
|
110
|
+
success = manager.restore_file(restore_path, backup_id)
|
|
111
|
+
|
|
112
|
+
# 检查恢复结果
|
|
113
|
+
assert success is True
|
|
114
|
+
assert os.path.exists(restore_path)
|
|
115
|
+
|
|
116
|
+
# 检查恢复文件内容
|
|
117
|
+
with open(restore_path, 'r', encoding='utf-8') as f:
|
|
118
|
+
content = f.read()
|
|
119
|
+
assert content == "这是一个测试文件的内容"
|
|
120
|
+
|
|
121
|
+
def test_restore_with_invalid_backup_id(self, temp_backup_dir, temp_test_dir):
|
|
122
|
+
"""测试使用无效的备份ID恢复文件"""
|
|
123
|
+
manager = FileBackupManager(backup_dir=temp_backup_dir)
|
|
124
|
+
|
|
125
|
+
# 尝试恢复不存在的备份
|
|
126
|
+
restore_path = os.path.join(temp_test_dir, "restored.txt")
|
|
127
|
+
success = manager.restore_file(restore_path, "invalid_backup_id")
|
|
128
|
+
|
|
129
|
+
# 应该失败
|
|
130
|
+
assert success is False
|
|
131
|
+
assert not os.path.exists(restore_path)
|
|
132
|
+
|
|
133
|
+
def test_get_backup_content(self, temp_backup_dir, sample_file):
|
|
134
|
+
"""测试获取备份文件内容"""
|
|
135
|
+
manager = FileBackupManager(backup_dir=temp_backup_dir)
|
|
136
|
+
|
|
137
|
+
# 备份文件
|
|
138
|
+
backup_id = manager.backup_file(sample_file)
|
|
139
|
+
|
|
140
|
+
# 获取备份内容
|
|
141
|
+
content = manager.get_backup_content(backup_id)
|
|
142
|
+
|
|
143
|
+
# 检查内容
|
|
144
|
+
assert content == "这是一个测试文件的内容"
|
|
145
|
+
|
|
146
|
+
def test_get_backup_content_with_invalid_id(self, temp_backup_dir):
|
|
147
|
+
"""测试使用无效的备份ID获取内容"""
|
|
148
|
+
manager = FileBackupManager(backup_dir=temp_backup_dir)
|
|
149
|
+
|
|
150
|
+
# 尝试获取不存在的备份内容
|
|
151
|
+
content = manager.get_backup_content("invalid_backup_id")
|
|
152
|
+
|
|
153
|
+
# 应该返回None
|
|
154
|
+
assert content is None
|
|
155
|
+
|
|
156
|
+
def test_delete_backup(self, temp_backup_dir, sample_file):
|
|
157
|
+
"""测试删除备份"""
|
|
158
|
+
manager = FileBackupManager(backup_dir=temp_backup_dir)
|
|
159
|
+
|
|
160
|
+
# 备份文件
|
|
161
|
+
backup_id = manager.backup_file(sample_file)
|
|
162
|
+
backup_file_path = os.path.join(temp_backup_dir, backup_id)
|
|
163
|
+
|
|
164
|
+
# 检查备份文件是否存在
|
|
165
|
+
assert os.path.exists(backup_file_path)
|
|
166
|
+
assert backup_id in manager.metadata
|
|
167
|
+
|
|
168
|
+
# 删除备份
|
|
169
|
+
success = manager.delete_backup(backup_id)
|
|
170
|
+
|
|
171
|
+
# 检查删除结果
|
|
172
|
+
assert success is True
|
|
173
|
+
assert not os.path.exists(backup_file_path)
|
|
174
|
+
assert backup_id not in manager.metadata
|
|
175
|
+
|
|
176
|
+
def test_delete_nonexistent_backup(self, temp_backup_dir):
|
|
177
|
+
"""测试删除不存在的备份"""
|
|
178
|
+
manager = FileBackupManager(backup_dir=temp_backup_dir)
|
|
179
|
+
|
|
180
|
+
# 尝试删除不存在的备份
|
|
181
|
+
success = manager.delete_backup("nonexistent_backup_id")
|
|
182
|
+
|
|
183
|
+
# 应该返回False
|
|
184
|
+
assert success is False
|
|
185
|
+
|
|
186
|
+
def test_get_backups_for_file(self, temp_backup_dir, sample_file):
|
|
187
|
+
"""测试获取指定文件的所有备份"""
|
|
188
|
+
manager = FileBackupManager(backup_dir=temp_backup_dir)
|
|
189
|
+
|
|
190
|
+
# 创建多个备份
|
|
191
|
+
backup_ids = []
|
|
192
|
+
for i in range(3):
|
|
193
|
+
backup_id = manager.backup_file(sample_file)
|
|
194
|
+
backup_ids.append(backup_id)
|
|
195
|
+
|
|
196
|
+
# 获取文件的备份列表
|
|
197
|
+
backups = manager.get_backups_for_file(sample_file)
|
|
198
|
+
|
|
199
|
+
# 检查备份列表
|
|
200
|
+
assert len(backups) == 3
|
|
201
|
+
for backup_id, timestamp in backups:
|
|
202
|
+
assert backup_id in backup_ids
|
|
203
|
+
assert isinstance(timestamp, float)
|
|
204
|
+
|
|
205
|
+
@pytest.mark.parametrize("max_age_days", [1, 7, 30])
|
|
206
|
+
def test_clean_old_backups(self, temp_backup_dir, sample_file, max_age_days):
|
|
207
|
+
"""测试清理旧备份"""
|
|
208
|
+
manager = FileBackupManager(backup_dir=temp_backup_dir)
|
|
209
|
+
|
|
210
|
+
# 创建一个备份
|
|
211
|
+
backup_id = manager.backup_file(sample_file)
|
|
212
|
+
|
|
213
|
+
# 修改备份的时间戳为过去的时间
|
|
214
|
+
old_timestamp = (datetime.now() - timedelta(days=max_age_days+1)).timestamp()
|
|
215
|
+
manager.metadata[backup_id]["timestamp"] = old_timestamp
|
|
216
|
+
manager._save_metadata()
|
|
217
|
+
|
|
218
|
+
# 创建一个新备份
|
|
219
|
+
new_backup_id = manager.backup_file(sample_file)
|
|
220
|
+
|
|
221
|
+
# 清理旧备份
|
|
222
|
+
cleaned_count = manager.clean_old_backups(max_age_days)
|
|
223
|
+
|
|
224
|
+
# 检查清理结果
|
|
225
|
+
assert cleaned_count == 1
|
|
226
|
+
assert backup_id not in manager.metadata
|
|
227
|
+
assert not os.path.exists(os.path.join(temp_backup_dir, backup_id))
|
|
228
|
+
assert new_backup_id in manager.metadata
|
|
229
|
+
assert os.path.exists(os.path.join(temp_backup_dir, new_backup_id))
|
|
230
|
+
|
|
231
|
+
def test_metadata_persistence(self, temp_backup_dir, sample_file):
|
|
232
|
+
"""测试元数据持久化"""
|
|
233
|
+
# 创建一个备份
|
|
234
|
+
manager1 = FileBackupManager(backup_dir=temp_backup_dir)
|
|
235
|
+
backup_id = manager1.backup_file(sample_file)
|
|
236
|
+
|
|
237
|
+
# 创建另一个管理器实例,应该加载已存在的元数据
|
|
238
|
+
manager2 = FileBackupManager(backup_dir=temp_backup_dir)
|
|
239
|
+
|
|
240
|
+
# 检查元数据是否被正确加载
|
|
241
|
+
assert backup_id in manager2.metadata
|
|
242
|
+
assert manager2.metadata[backup_id]["original_path"] == sample_file
|