auto-coder 0.1.330__py3-none-any.whl → 0.1.332__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of auto-coder might be problematic. Click here for more details.
- {auto_coder-0.1.330.dist-info → auto_coder-0.1.332.dist-info}/METADATA +1 -1
- {auto_coder-0.1.330.dist-info → auto_coder-0.1.332.dist-info}/RECORD +47 -45
- autocoder/agent/agentic_filter.py +928 -0
- autocoder/agent/project_reader.py +1 -14
- autocoder/auto_coder.py +6 -47
- autocoder/auto_coder_runner.py +2 -0
- autocoder/command_args.py +1 -6
- autocoder/commands/auto_command.py +1 -1
- autocoder/commands/tools.py +68 -16
- autocoder/common/__init__.py +8 -3
- autocoder/common/auto_coder_lang.py +21 -1
- autocoder/common/code_auto_generate.py +6 -160
- autocoder/common/code_auto_generate_diff.py +5 -111
- autocoder/common/code_auto_generate_editblock.py +5 -95
- autocoder/common/code_auto_generate_strict_diff.py +6 -112
- autocoder/common/code_auto_merge_editblock.py +1 -45
- autocoder/common/code_modification_ranker.py +6 -2
- autocoder/common/command_templates.py +2 -9
- autocoder/common/conf_utils.py +36 -0
- autocoder/common/stream_out_type.py +7 -2
- autocoder/common/types.py +3 -2
- autocoder/common/v2/code_auto_generate.py +6 -4
- autocoder/common/v2/code_auto_generate_diff.py +4 -3
- autocoder/common/v2/code_auto_generate_editblock.py +9 -4
- autocoder/common/v2/code_auto_generate_strict_diff.py +182 -14
- autocoder/common/v2/code_auto_merge_diff.py +560 -306
- autocoder/common/v2/code_auto_merge_editblock.py +12 -45
- autocoder/common/v2/code_auto_merge_strict_diff.py +76 -7
- autocoder/common/v2/code_diff_manager.py +73 -6
- autocoder/common/v2/code_editblock_manager.py +534 -82
- autocoder/dispacher/actions/action.py +15 -28
- autocoder/dispacher/actions/plugins/action_regex_project.py +5 -9
- autocoder/helper/project_creator.py +0 -1
- autocoder/index/entry.py +35 -53
- autocoder/index/filter/normal_filter.py +0 -16
- autocoder/lang.py +2 -4
- autocoder/linters/shadow_linter.py +4 -0
- autocoder/pyproject/__init__.py +2 -19
- autocoder/rag/cache/simple_cache.py +31 -6
- autocoder/regexproject/__init__.py +4 -22
- autocoder/suffixproject/__init__.py +6 -24
- autocoder/tsproject/__init__.py +5 -22
- autocoder/version.py +1 -1
- {auto_coder-0.1.330.dist-info → auto_coder-0.1.332.dist-info}/LICENSE +0 -0
- {auto_coder-0.1.330.dist-info → auto_coder-0.1.332.dist-info}/WHEEL +0 -0
- {auto_coder-0.1.330.dist-info → auto_coder-0.1.332.dist-info}/entry_points.txt +0 -0
- {auto_coder-0.1.330.dist-info → auto_coder-0.1.332.dist-info}/top_level.txt +0 -0
|
@@ -115,7 +115,7 @@ class ActionTSProject(BaseAction):
|
|
|
115
115
|
|
|
116
116
|
global_cancel.check_and_raise()
|
|
117
117
|
|
|
118
|
-
if args.enable_auto_fix_lint and args.execute and args.auto_merge=="editblock":
|
|
118
|
+
if (args.enable_auto_fix_merge or args.enable_auto_fix_lint) and args.execute and args.auto_merge=="editblock":
|
|
119
119
|
code_merge_manager = CodeEditBlockManager(llm=self.llm, args=self.args,action=self)
|
|
120
120
|
code_merge_manager.run(query=args.query, source_code_list=source_code_list)
|
|
121
121
|
return
|
|
@@ -138,15 +138,10 @@ class ActionTSProject(BaseAction):
|
|
|
138
138
|
else:
|
|
139
139
|
generate = CodeAutoGenerate(
|
|
140
140
|
llm=self.llm, args=self.args, action=self)
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
)
|
|
146
|
-
else:
|
|
147
|
-
generate_result = generate.single_round_run(
|
|
148
|
-
query=args.query, source_code_list=source_code_list
|
|
149
|
-
)
|
|
141
|
+
|
|
142
|
+
generate_result = generate.single_round_run(
|
|
143
|
+
query=args.query, source_code_list=source_code_list
|
|
144
|
+
)
|
|
150
145
|
elapsed_time = time.time() - start_time
|
|
151
146
|
speed = generate_result.metadata.get(
|
|
152
147
|
'generated_tokens_count', 0) / elapsed_time if elapsed_time > 0 else 0
|
|
@@ -274,7 +269,7 @@ class ActionPyProject(BaseAction):
|
|
|
274
269
|
|
|
275
270
|
global_cancel.check_and_raise()
|
|
276
271
|
|
|
277
|
-
if args.enable_auto_fix_lint and args.execute and args.auto_merge=="editblock":
|
|
272
|
+
if (args.enable_auto_fix_merge or args.enable_auto_fix_lint) and args.execute and args.auto_merge=="editblock":
|
|
278
273
|
code_merge_manager = CodeEditBlockManager(llm=self.llm, args=self.args,action=self)
|
|
279
274
|
code_merge_manager.run(query=args.query, source_code_list=source_code_list)
|
|
280
275
|
return
|
|
@@ -297,15 +292,11 @@ class ActionPyProject(BaseAction):
|
|
|
297
292
|
else:
|
|
298
293
|
generate = CodeAutoGenerate(
|
|
299
294
|
llm=self.llm, args=self.args, action=self)
|
|
295
|
+
|
|
296
|
+
generate_result = generate.single_round_run(
|
|
297
|
+
query=args.query, source_code_list=source_code_list
|
|
298
|
+
)
|
|
300
299
|
|
|
301
|
-
if self.args.enable_multi_round_generate:
|
|
302
|
-
generate_result = generate.multi_round_run(
|
|
303
|
-
query=args.query, source_code_list=source_code_list
|
|
304
|
-
)
|
|
305
|
-
else:
|
|
306
|
-
generate_result = generate.single_round_run(
|
|
307
|
-
query=args.query, source_code_list=source_code_list
|
|
308
|
-
)
|
|
309
300
|
elapsed_time = time.time() - start_time
|
|
310
301
|
speed = generate_result.metadata.get(
|
|
311
302
|
'generated_tokens_count', 0) / elapsed_time if elapsed_time > 0 else 0
|
|
@@ -426,7 +417,7 @@ class ActionSuffixProject(BaseAction):
|
|
|
426
417
|
|
|
427
418
|
global_cancel.check_and_raise()
|
|
428
419
|
|
|
429
|
-
if args.enable_auto_fix_lint and args.execute and args.auto_merge=="editblock":
|
|
420
|
+
if (args.enable_auto_fix_merge or args.enable_auto_fix_lint) and args.execute and args.auto_merge=="editblock":
|
|
430
421
|
code_merge_manager = CodeEditBlockManager(llm=self.llm, args=self.args,action=self)
|
|
431
422
|
code_merge_manager.run(query=args.query, source_code_list=source_code_list)
|
|
432
423
|
return
|
|
@@ -449,14 +440,10 @@ class ActionSuffixProject(BaseAction):
|
|
|
449
440
|
else:
|
|
450
441
|
generate = CodeAutoGenerate(
|
|
451
442
|
llm=self.llm, args=self.args, action=self)
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
else:
|
|
457
|
-
generate_result = generate.single_round_run(
|
|
458
|
-
query=args.query, source_code_list=source_code_list
|
|
459
|
-
)
|
|
443
|
+
|
|
444
|
+
generate_result = generate.single_round_run(
|
|
445
|
+
query=args.query, source_code_list=source_code_list
|
|
446
|
+
)
|
|
460
447
|
|
|
461
448
|
elapsed_time = time.time() - start_time
|
|
462
449
|
speed = generate_result.metadata.get(
|
|
@@ -69,7 +69,7 @@ class ActionRegexProject:
|
|
|
69
69
|
|
|
70
70
|
global_cancel.check_and_raise()
|
|
71
71
|
|
|
72
|
-
if args.enable_auto_fix_lint and args.execute and args.auto_merge=="editblock":
|
|
72
|
+
if (args.enable_auto_fix_merge or args.enable_auto_fix_lint) and args.execute and args.auto_merge=="editblock":
|
|
73
73
|
code_merge_manager = CodeEditBlockManager(llm=self.llm, args=self.args,action=self)
|
|
74
74
|
code_merge_manager.run(query=args.query, source_code_list=source_code_list)
|
|
75
75
|
return
|
|
@@ -91,14 +91,10 @@ class ActionRegexProject:
|
|
|
91
91
|
)
|
|
92
92
|
else:
|
|
93
93
|
generate = CodeAutoGenerate(llm=self.llm, args=self.args, action=self)
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
else:
|
|
99
|
-
generate_result = generate.single_round_run(
|
|
100
|
-
query=args.query, source_code_list=source_code_list
|
|
101
|
-
)
|
|
94
|
+
|
|
95
|
+
generate_result = generate.single_round_run(
|
|
96
|
+
query=args.query, source_code_list=source_code_list
|
|
97
|
+
)
|
|
102
98
|
|
|
103
99
|
elapsed_time = time.time() - start_time
|
|
104
100
|
speed = generate_result.metadata.get('generated_tokens_count', 0) / elapsed_time if elapsed_time > 0 else 0
|
autocoder/index/entry.py
CHANGED
|
@@ -30,6 +30,7 @@ from autocoder.common.action_yml_file_manager import ActionYmlFileManager
|
|
|
30
30
|
|
|
31
31
|
from autocoder.events.event_manager_singleton import get_event_manager
|
|
32
32
|
from autocoder.events import event_content as EventContentCreator
|
|
33
|
+
from autocoder.agent.agentic_filter import AgenticFilter
|
|
33
34
|
|
|
34
35
|
|
|
35
36
|
def build_index_and_filter_files(
|
|
@@ -86,15 +87,6 @@ def build_index_and_filter_files(
|
|
|
86
87
|
|
|
87
88
|
if not args.skip_build_index and llm:
|
|
88
89
|
# Phase 2: Build index
|
|
89
|
-
if args.request_id and not args.skip_events:
|
|
90
|
-
queue_communicate.send_event(
|
|
91
|
-
request_id=args.request_id,
|
|
92
|
-
event=CommunicateEvent(
|
|
93
|
-
event_type=CommunicateEventType.CODE_INDEX_BUILD_START.value,
|
|
94
|
-
data=json.dumps({"total_files": len(sources)})
|
|
95
|
-
)
|
|
96
|
-
)
|
|
97
|
-
|
|
98
90
|
printer.print_in_terminal("phase2_building_index")
|
|
99
91
|
phase_start = time.monotonic()
|
|
100
92
|
index_manager = IndexManager(llm=llm, sources=sources, args=args)
|
|
@@ -103,33 +95,46 @@ def build_index_and_filter_files(
|
|
|
103
95
|
phase_end = time.monotonic()
|
|
104
96
|
stats["timings"]["build_index"] = phase_end - phase_start
|
|
105
97
|
|
|
106
|
-
if args.request_id and not args.skip_events:
|
|
107
|
-
queue_communicate.send_event(
|
|
108
|
-
request_id=args.request_id,
|
|
109
|
-
event=CommunicateEvent(
|
|
110
|
-
event_type=CommunicateEventType.CODE_INDEX_BUILD_END.value,
|
|
111
|
-
data=json.dumps({
|
|
112
|
-
"indexed_files": stats["indexed_files"],
|
|
113
|
-
"build_index_time": stats["timings"]["build_index"],
|
|
114
|
-
})
|
|
115
|
-
)
|
|
116
|
-
)
|
|
117
|
-
|
|
118
98
|
if not args.skip_filter_index and args.index_filter_model:
|
|
99
|
+
|
|
119
100
|
model_name = getattr(
|
|
120
101
|
index_manager.index_filter_llm, 'default_model_name', None)
|
|
121
102
|
if not model_name:
|
|
122
103
|
model_name = "unknown(without default model name)"
|
|
123
|
-
printer.print_in_terminal(
|
|
124
|
-
"quick_filter_start", style="blue", model_name=model_name)
|
|
125
|
-
quick_filter = QuickFilter(index_manager, stats, sources)
|
|
126
|
-
quick_filter_result = quick_filter.filter(
|
|
127
|
-
index_manager.read_index(), args.query)
|
|
128
104
|
|
|
129
|
-
|
|
105
|
+
if args.enable_agentic_filter:
|
|
106
|
+
from autocoder.agent.agentic_filter import AgenticFilterRequest, AgenticFilter, CommandConfig, MemoryConfig
|
|
107
|
+
from autocoder.common.conf_utils import load_memory
|
|
108
|
+
|
|
109
|
+
_memory = load_memory(args)
|
|
110
|
+
|
|
111
|
+
def save_memory_func():
|
|
112
|
+
pass
|
|
130
113
|
|
|
131
|
-
|
|
132
|
-
|
|
114
|
+
tuner = AgenticFilter(index_manager.index_filter_llm,
|
|
115
|
+
args=args,
|
|
116
|
+
conversation_history=[],
|
|
117
|
+
memory_config=MemoryConfig(
|
|
118
|
+
memory=_memory, save_memory_func=save_memory_func),
|
|
119
|
+
command_config=None)
|
|
120
|
+
response = tuner.analyze(
|
|
121
|
+
AgenticFilterRequest(user_input=args.query))
|
|
122
|
+
if response:
|
|
123
|
+
for file in response.files:
|
|
124
|
+
final_files[file.path] = TargetFile(
|
|
125
|
+
file_path=file.path, reason="Agentic Filter")
|
|
126
|
+
else:
|
|
127
|
+
printer.print_in_terminal(
|
|
128
|
+
"quick_filter_start", style="blue", model_name=model_name)
|
|
129
|
+
|
|
130
|
+
quick_filter = QuickFilter(index_manager, stats, sources)
|
|
131
|
+
quick_filter_result = quick_filter.filter(
|
|
132
|
+
index_manager.read_index(), args.query)
|
|
133
|
+
|
|
134
|
+
final_files.update(quick_filter_result.files)
|
|
135
|
+
|
|
136
|
+
if quick_filter_result.file_positions:
|
|
137
|
+
file_positions.update(quick_filter_result.file_positions)
|
|
133
138
|
|
|
134
139
|
if not args.skip_filter_index and not args.index_filter_model:
|
|
135
140
|
model_name = getattr(index_manager.llm, 'default_model_name', None)
|
|
@@ -313,17 +318,6 @@ def build_index_and_filter_files(
|
|
|
313
318
|
temp_sources, [{"role": "user", "content": args.query}], args.context_prune_strategy)
|
|
314
319
|
source_code_list.sources = pruned_files
|
|
315
320
|
|
|
316
|
-
if args.request_id and not args.skip_events:
|
|
317
|
-
queue_communicate.send_event(
|
|
318
|
-
request_id=args.request_id,
|
|
319
|
-
event=CommunicateEvent(
|
|
320
|
-
event_type=CommunicateEventType.CODE_INDEX_FILTER_FILE_SELECTED.value,
|
|
321
|
-
data=json.dumps([
|
|
322
|
-
(file.module_name, "") for file in source_code_list.sources
|
|
323
|
-
])
|
|
324
|
-
)
|
|
325
|
-
)
|
|
326
|
-
|
|
327
321
|
stats["final_files"] = len(source_code_list.sources)
|
|
328
322
|
phase_end = time.monotonic()
|
|
329
323
|
stats["timings"]["prepare_output"] = phase_end - phase_start
|
|
@@ -374,18 +368,6 @@ def build_index_and_filter_files(
|
|
|
374
368
|
# }
|
|
375
369
|
# )
|
|
376
370
|
|
|
377
|
-
if args.request_id and not args.skip_events:
|
|
378
|
-
queue_communicate.send_event(
|
|
379
|
-
request_id=args.request_id,
|
|
380
|
-
event=CommunicateEvent(
|
|
381
|
-
event_type=CommunicateEventType.CODE_INDEX_FILTER_END.value,
|
|
382
|
-
data=json.dumps({
|
|
383
|
-
"filtered_files": stats["final_files"],
|
|
384
|
-
"filter_time": total_filter_time
|
|
385
|
-
})
|
|
386
|
-
)
|
|
387
|
-
)
|
|
388
|
-
|
|
389
371
|
get_event_manager(args.event_file).write_result(
|
|
390
372
|
EventContentCreator.create_result(
|
|
391
373
|
content=EventContentCreator.ResultContextUsedContent(
|
|
@@ -395,7 +377,7 @@ def build_index_and_filter_files(
|
|
|
395
377
|
).to_dict()
|
|
396
378
|
),
|
|
397
379
|
metadata=EventMetadata(
|
|
398
|
-
action_file=args.file
|
|
380
|
+
action_file=args.file
|
|
399
381
|
).to_dict()
|
|
400
382
|
)
|
|
401
383
|
|
|
@@ -44,14 +44,6 @@ class NormalFilter():
|
|
|
44
44
|
|
|
45
45
|
final_files: Dict[str, TargetFile] = {}
|
|
46
46
|
if not self.args.skip_filter_index:
|
|
47
|
-
if self.args.request_id and not self.args.skip_events:
|
|
48
|
-
queue_communicate.send_event(
|
|
49
|
-
request_id=self.args.request_id,
|
|
50
|
-
event=CommunicateEvent(
|
|
51
|
-
event_type=CommunicateEventType.CODE_INDEX_FILTER_START.value,
|
|
52
|
-
data=json.dumps({})
|
|
53
|
-
)
|
|
54
|
-
)
|
|
55
47
|
# Phase 3: Level 1 filtering - Query-based
|
|
56
48
|
logger.info(
|
|
57
49
|
"Phase 3: Performing Level 1 filtering (query-based)...")
|
|
@@ -71,14 +63,6 @@ class NormalFilter():
|
|
|
71
63
|
if target_files is not None and self.args.index_filter_level >= 2:
|
|
72
64
|
logger.info(
|
|
73
65
|
"Phase 4: Performing Level 2 filtering (related files)...")
|
|
74
|
-
if self.args.request_id and not self.args.skip_events:
|
|
75
|
-
queue_communicate.send_event(
|
|
76
|
-
request_id=self.args.request_id,
|
|
77
|
-
event=CommunicateEvent(
|
|
78
|
-
event_type=CommunicateEventType.CODE_INDEX_FILTER_START.value,
|
|
79
|
-
data=json.dumps({})
|
|
80
|
-
)
|
|
81
|
-
)
|
|
82
66
|
phase_start = time.monotonic()
|
|
83
67
|
related_files = self.index_manager.get_related_files(
|
|
84
68
|
[file.file_path for file in target_files.file_list]
|
autocoder/lang.py
CHANGED
|
@@ -48,8 +48,7 @@ lang_desc = {
|
|
|
48
48
|
"urls_use_model":"Whether to use model to processing content in urls. Default is False",
|
|
49
49
|
"ray_address": "The address of the Ray cluster to connect to. Default is 'auto'",
|
|
50
50
|
"enable_rag_search":"Whether to enable retrieval augmented generation using search. Default is False",
|
|
51
|
-
"enable_rag_context":"Whether to enable retrieval augmented generation using context. Default is False",
|
|
52
|
-
"enable_multi_round_generate":"Whether to enable multi-round conversation for generation. Default is False",
|
|
51
|
+
"enable_rag_context":"Whether to enable retrieval augmented generation using context. Default is False",
|
|
53
52
|
"index_model_max_length":"The maximum length of the generated code by the index model. Default is 0, which means using the value of model_max_length",
|
|
54
53
|
"index_model_max_input_length":"The maximum length of the input to the index model. Default is 0, which means using the value of model_max_input_length",
|
|
55
54
|
"index_model_anti_quota_limit":"Time to wait in seconds after each API request for the index model. Default is 0, which means using the value of anti_quota_limit",
|
|
@@ -132,8 +131,7 @@ lang_desc = {
|
|
|
132
131
|
"urls_use_model":"是否使用模型处理urls中的内容。默认为False",
|
|
133
132
|
"ray_address": "要连接的Ray集群的地址。默认为'auto'",
|
|
134
133
|
"enable_rag_search":"是否开启使用搜索的检索增强生成。默认为False",
|
|
135
|
-
"enable_rag_context":"是否开启使用上下文的检索增强生成。默认为False",
|
|
136
|
-
"enable_multi_round_generate":"是否开启多轮对话生成。默认为False",
|
|
134
|
+
"enable_rag_context":"是否开启使用上下文的检索增强生成。默认为False",
|
|
137
135
|
"index_model_max_length":"索引模型生成代码的最大长度。默认为0,表示使用model_max_length的值",
|
|
138
136
|
"index_model_max_input_length":"索引模型的最大输入长度。默认为0,表示使用model_max_input_length的值",
|
|
139
137
|
"index_model_anti_quota_limit": "每次索引模型API请求后等待的秒数。默认为0,表示使用anti_quota_limit的值",
|
|
@@ -16,6 +16,7 @@ from autocoder.linters.models import (
|
|
|
16
16
|
IssuePosition,
|
|
17
17
|
IssueSeverity
|
|
18
18
|
)
|
|
19
|
+
from loguru import logger as global_logger
|
|
19
20
|
|
|
20
21
|
class ShadowLinter:
|
|
21
22
|
"""
|
|
@@ -32,6 +33,7 @@ class ShadowLinter:
|
|
|
32
33
|
"""
|
|
33
34
|
self.shadow_manager = shadow_manager
|
|
34
35
|
self.verbose = verbose
|
|
36
|
+
self.logger = global_logger.bind(name="ShadowLinter")
|
|
35
37
|
|
|
36
38
|
def lint_shadow_file(self, shadow_path: str, fix: bool = False) -> FileLintResult:
|
|
37
39
|
"""
|
|
@@ -117,8 +119,10 @@ class ShadowLinter:
|
|
|
117
119
|
|
|
118
120
|
# 处理每个影子文件
|
|
119
121
|
for shadow_path in shadow_files:
|
|
122
|
+
self.logger.info(f"正在检查文件: {shadow_path}")
|
|
120
123
|
try:
|
|
121
124
|
file_result = self.lint_shadow_file(shadow_path, fix=fix)
|
|
125
|
+
self.logger.info(f"检查完成: {shadow_path}")
|
|
122
126
|
# lint_shadow_file现在总是返回有效的FileLintResult,不再需要检查None
|
|
123
127
|
project_path = self.shadow_manager.from_shadow_path(shadow_path)
|
|
124
128
|
|
autocoder/pyproject/__init__.py
CHANGED
|
@@ -233,15 +233,7 @@ class PyProject:
|
|
|
233
233
|
def get_rag_source_codes(self):
|
|
234
234
|
if not self.args.enable_rag_search and not self.args.enable_rag_context:
|
|
235
235
|
return []
|
|
236
|
-
|
|
237
|
-
if self.args.request_id and not self.args.skip_events:
|
|
238
|
-
_ = queue_communicate.send_event(
|
|
239
|
-
request_id=self.args.request_id,
|
|
240
|
-
event=CommunicateEvent(
|
|
241
|
-
event_type=CommunicateEventType.CODE_RAG_SEARCH_START.value,
|
|
242
|
-
data=json.dumps({},ensure_ascii=False)
|
|
243
|
-
)
|
|
244
|
-
)
|
|
236
|
+
|
|
245
237
|
else:
|
|
246
238
|
console = Console()
|
|
247
239
|
console.print(f"\n[bold blue]Starting RAG search for:[/bold blue] {self.args.query}")
|
|
@@ -250,16 +242,7 @@ class PyProject:
|
|
|
250
242
|
rag = RAGFactory.get_rag(self.llm, self.args, "")
|
|
251
243
|
docs = rag.search(self.args.query)
|
|
252
244
|
for doc in docs:
|
|
253
|
-
doc.tag = "RAG"
|
|
254
|
-
|
|
255
|
-
if self.args.request_id and not self.args.skip_events:
|
|
256
|
-
_ = queue_communicate.send_event(
|
|
257
|
-
request_id=self.args.request_id,
|
|
258
|
-
event=CommunicateEvent(
|
|
259
|
-
event_type=CommunicateEventType.CODE_RAG_SEARCH_END.value,
|
|
260
|
-
data=json.dumps({},ensure_ascii=False)
|
|
261
|
-
)
|
|
262
|
-
)
|
|
245
|
+
doc.tag = "RAG"
|
|
263
246
|
else:
|
|
264
247
|
console = Console()
|
|
265
248
|
console.print(f"[bold green]Found {len(docs)} relevant documents[/bold green]")
|
|
@@ -45,7 +45,7 @@ def generate_content_md5(content: Union[str, bytes]) -> str:
|
|
|
45
45
|
|
|
46
46
|
|
|
47
47
|
class AutoCoderRAGAsyncUpdateQueue(BaseCacheManager):
|
|
48
|
-
def __init__(self, path: str, ignore_spec, required_exts: list):
|
|
48
|
+
def __init__(self, path: str, ignore_spec, required_exts: list, update_interval: int = 5):
|
|
49
49
|
"""
|
|
50
50
|
初始化异步更新队列,用于管理代码文件的缓存。
|
|
51
51
|
|
|
@@ -53,6 +53,7 @@ class AutoCoderRAGAsyncUpdateQueue(BaseCacheManager):
|
|
|
53
53
|
path: 需要索引的代码库根目录
|
|
54
54
|
ignore_spec: 指定哪些文件/目录应被忽略的规则
|
|
55
55
|
required_exts: 需要处理的文件扩展名列表
|
|
56
|
+
update_interval: 自动触发更新的时间间隔(秒),默认为5秒
|
|
56
57
|
|
|
57
58
|
缓存结构 (self.cache):
|
|
58
59
|
self.cache 是一个字典,其结构如下:
|
|
@@ -90,13 +91,22 @@ class AutoCoderRAGAsyncUpdateQueue(BaseCacheManager):
|
|
|
90
91
|
self.path = path
|
|
91
92
|
self.ignore_spec = ignore_spec
|
|
92
93
|
self.required_exts = required_exts
|
|
94
|
+
self.update_interval = update_interval
|
|
93
95
|
self.queue = []
|
|
94
96
|
self.cache = {} # 初始化为空字典,稍后通过 read_cache() 填充
|
|
95
97
|
self.lock = threading.Lock()
|
|
96
98
|
self.stop_event = threading.Event()
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
self.
|
|
99
|
+
|
|
100
|
+
# 启动处理队列的线程
|
|
101
|
+
self.queue_thread = threading.Thread(target=self._process_queue)
|
|
102
|
+
self.queue_thread.daemon = True
|
|
103
|
+
self.queue_thread.start()
|
|
104
|
+
|
|
105
|
+
# 启动定时触发更新的线程
|
|
106
|
+
self.update_thread = threading.Thread(target=self._periodic_update)
|
|
107
|
+
self.update_thread.daemon = True
|
|
108
|
+
self.update_thread.start()
|
|
109
|
+
|
|
100
110
|
self.cache = self.read_cache()
|
|
101
111
|
|
|
102
112
|
def _process_queue(self):
|
|
@@ -106,10 +116,25 @@ class AutoCoderRAGAsyncUpdateQueue(BaseCacheManager):
|
|
|
106
116
|
except Exception as e:
|
|
107
117
|
logger.error(f"Error in process_queue: {e}")
|
|
108
118
|
time.sleep(1) # 避免过于频繁的检查
|
|
119
|
+
|
|
120
|
+
def _periodic_update(self):
|
|
121
|
+
"""定时触发文件更新检查"""
|
|
122
|
+
while not self.stop_event.is_set():
|
|
123
|
+
try:
|
|
124
|
+
logger.debug(f"Periodic update triggered (every {self.update_interval}s)")
|
|
125
|
+
# 如果没有被初始化过,不会增量触发
|
|
126
|
+
if not self.cache:
|
|
127
|
+
time.sleep(self.update_interval)
|
|
128
|
+
continue
|
|
129
|
+
self.trigger_update()
|
|
130
|
+
except Exception as e:
|
|
131
|
+
logger.error(f"Error in periodic update: {e}")
|
|
132
|
+
time.sleep(self.update_interval)
|
|
109
133
|
|
|
110
134
|
def stop(self):
|
|
111
135
|
self.stop_event.set()
|
|
112
|
-
self.
|
|
136
|
+
self.queue_thread.join()
|
|
137
|
+
self.update_thread.join()
|
|
113
138
|
|
|
114
139
|
def fileinfo_to_tuple(self, file_info: FileInfo) -> Tuple[str, str, float, str]:
|
|
115
140
|
return (file_info.file_path, file_info.relative_path, file_info.modify_time, file_info.file_md5)
|
|
@@ -285,7 +310,7 @@ class AutoCoderRAGAsyncUpdateQueue(BaseCacheManager):
|
|
|
285
310
|
|
|
286
311
|
def get_cache(self, options: Optional[Dict[str, Any]] = None):
|
|
287
312
|
self.load_first()
|
|
288
|
-
|
|
313
|
+
# 不再在这里触发更新,因为已经有定时线程在处理
|
|
289
314
|
return self.cache
|
|
290
315
|
|
|
291
316
|
def get_all_files(self) -> List[Tuple[str, str, float]]:
|
|
@@ -121,17 +121,8 @@ class RegexProject:
|
|
|
121
121
|
if not self.args.enable_rag_search and not self.args.enable_rag_context:
|
|
122
122
|
return []
|
|
123
123
|
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
request_id=self.args.request_id,
|
|
127
|
-
event=CommunicateEvent(
|
|
128
|
-
event_type=CommunicateEventType.CODE_RAG_SEARCH_START.value,
|
|
129
|
-
data=json.dumps({},ensure_ascii=False)
|
|
130
|
-
)
|
|
131
|
-
)
|
|
132
|
-
else:
|
|
133
|
-
console = Console()
|
|
134
|
-
console.print(f"\n[bold blue]Starting RAG search for:[/bold blue] {self.args.query}")
|
|
124
|
+
console = Console()
|
|
125
|
+
console.print(f"\n[bold blue]Starting RAG search for:[/bold blue] {self.args.query}")
|
|
135
126
|
|
|
136
127
|
from autocoder.rag.rag_entry import RAGFactory
|
|
137
128
|
rag = RAGFactory.get_rag(self.llm, self.args, "")
|
|
@@ -139,17 +130,8 @@ class RegexProject:
|
|
|
139
130
|
for doc in docs:
|
|
140
131
|
doc.tag = "RAG"
|
|
141
132
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
request_id=self.args.request_id,
|
|
145
|
-
event=CommunicateEvent(
|
|
146
|
-
event_type=CommunicateEventType.CODE_RAG_SEARCH_END.value,
|
|
147
|
-
data=json.dumps({},ensure_ascii=False)
|
|
148
|
-
)
|
|
149
|
-
)
|
|
150
|
-
else:
|
|
151
|
-
console = Console()
|
|
152
|
-
console.print(f"[bold green]Found {len(docs)} relevant documents[/bold green]")
|
|
133
|
+
console = Console()
|
|
134
|
+
console.print(f"[bold green]Found {len(docs)} relevant documents[/bold green]")
|
|
153
135
|
|
|
154
136
|
return docs
|
|
155
137
|
|
|
@@ -162,36 +162,18 @@ class SuffixProject:
|
|
|
162
162
|
def get_rag_source_codes(self):
|
|
163
163
|
if not self.args.enable_rag_search and not self.args.enable_rag_context:
|
|
164
164
|
return []
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
request_id=self.args.request_id,
|
|
169
|
-
event=CommunicateEvent(
|
|
170
|
-
event_type=CommunicateEventType.CODE_RAG_SEARCH_START.value,
|
|
171
|
-
data=json.dumps({},ensure_ascii=False)
|
|
172
|
-
)
|
|
173
|
-
)
|
|
174
|
-
else:
|
|
175
|
-
console = Console()
|
|
176
|
-
console.print(f"\n[bold blue]Starting RAG search for:[/bold blue] {self.args.query}")
|
|
165
|
+
|
|
166
|
+
console = Console()
|
|
167
|
+
console.print(f"\n[bold blue]Starting RAG search for:[/bold blue] {self.args.query}")
|
|
177
168
|
|
|
178
169
|
from autocoder.rag.rag_entry import RAGFactory
|
|
179
170
|
rag = RAGFactory.get_rag(self.llm, self.args, "")
|
|
180
171
|
docs = rag.search(self.args.query)
|
|
181
172
|
for doc in docs:
|
|
182
173
|
doc.tag = "RAG"
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
request_id=self.args.request_id,
|
|
187
|
-
event=CommunicateEvent(
|
|
188
|
-
event_type=CommunicateEventType.CODE_RAG_SEARCH_END.value,
|
|
189
|
-
data=json.dumps({},ensure_ascii=False)
|
|
190
|
-
)
|
|
191
|
-
)
|
|
192
|
-
else:
|
|
193
|
-
console = Console()
|
|
194
|
-
console.print(f"[bold green]Found {len(docs)} relevant documents[/bold green]")
|
|
174
|
+
|
|
175
|
+
console = Console()
|
|
176
|
+
console.print(f"[bold green]Found {len(docs)} relevant documents[/bold green]")
|
|
195
177
|
|
|
196
178
|
return docs
|
|
197
179
|
|
autocoder/tsproject/__init__.py
CHANGED
|
@@ -198,17 +198,8 @@ class TSProject:
|
|
|
198
198
|
if not self.args.enable_rag_search and not self.args.enable_rag_context:
|
|
199
199
|
return []
|
|
200
200
|
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
request_id=self.args.request_id,
|
|
204
|
-
event=CommunicateEvent(
|
|
205
|
-
event_type=CommunicateEventType.CODE_RAG_SEARCH_START.value,
|
|
206
|
-
data=json.dumps({},ensure_ascii=False)
|
|
207
|
-
)
|
|
208
|
-
)
|
|
209
|
-
else:
|
|
210
|
-
console = Console()
|
|
211
|
-
console.print(f"\n[bold blue]Starting RAG search for:[/bold blue] {self.args.query}")
|
|
201
|
+
console = Console()
|
|
202
|
+
console.print(f"\n[bold blue]Starting RAG search for:[/bold blue] {self.args.query}")
|
|
212
203
|
|
|
213
204
|
from autocoder.rag.rag_entry import RAGFactory
|
|
214
205
|
rag = RAGFactory.get_rag(self.llm, self.args, "")
|
|
@@ -216,17 +207,9 @@ class TSProject:
|
|
|
216
207
|
for doc in docs:
|
|
217
208
|
doc.tag = "RAG"
|
|
218
209
|
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
event=CommunicateEvent(
|
|
223
|
-
event_type=CommunicateEventType.CODE_RAG_SEARCH_END.value,
|
|
224
|
-
data=json.dumps({},ensure_ascii=False)
|
|
225
|
-
)
|
|
226
|
-
)
|
|
227
|
-
else:
|
|
228
|
-
console = Console()
|
|
229
|
-
console.print(f"[bold green]Found {len(docs)} relevant documents[/bold green]")
|
|
210
|
+
|
|
211
|
+
console = Console()
|
|
212
|
+
console.print(f"[bold green]Found {len(docs)} relevant documents[/bold green]")
|
|
230
213
|
|
|
231
214
|
return docs
|
|
232
215
|
|
autocoder/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.1.
|
|
1
|
+
__version__ = "0.1.332"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|