auto-coder 0.1.354__py3-none-any.whl → 0.1.356__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of auto-coder might be problematic. Click here for more details.
- {auto_coder-0.1.354.dist-info → auto_coder-0.1.356.dist-info}/METADATA +1 -1
- {auto_coder-0.1.354.dist-info → auto_coder-0.1.356.dist-info}/RECORD +40 -35
- autocoder/agent/agentic_filter.py +1 -1
- autocoder/agent/auto_learn.py +631 -0
- autocoder/auto_coder.py +8 -0
- autocoder/auto_coder_runner.py +59 -87
- autocoder/chat/conf_command.py +270 -0
- autocoder/chat/models_command.py +485 -0
- autocoder/chat/rules_command.py +458 -0
- autocoder/chat_auto_coder.py +34 -24
- autocoder/chat_auto_coder_lang.py +156 -2
- autocoder/commands/auto_command.py +1 -1
- autocoder/commands/auto_web.py +1 -1
- autocoder/common/__init__.py +2 -0
- autocoder/common/auto_coder_lang.py +9 -1
- autocoder/common/command_completer.py +58 -12
- autocoder/common/command_completer_v2.py +615 -0
- autocoder/common/global_cancel.py +53 -16
- autocoder/common/rulefiles/autocoderrules_utils.py +83 -0
- autocoder/common/v2/agent/agentic_edit.py +4 -4
- autocoder/common/v2/code_agentic_editblock_manager.py +9 -9
- autocoder/common/v2/code_diff_manager.py +2 -2
- autocoder/common/v2/code_editblock_manager.py +11 -10
- autocoder/common/v2/code_strict_diff_manager.py +3 -2
- autocoder/dispacher/actions/action.py +6 -6
- autocoder/dispacher/actions/plugins/action_regex_project.py +2 -2
- autocoder/events/event_manager_singleton.py +1 -1
- autocoder/index/index.py +2 -2
- autocoder/rag/cache/local_byzer_storage_cache.py +1 -1
- autocoder/rag/cache/local_duckdb_storage_cache.py +8 -0
- autocoder/rag/loaders/image_loader.py +25 -13
- autocoder/rag/long_context_rag.py +2 -2
- autocoder/utils/auto_coder_utils/chat_stream_out.py +3 -4
- autocoder/utils/model_provider_selector.py +14 -2
- autocoder/utils/thread_utils.py +9 -27
- autocoder/version.py +1 -1
- {auto_coder-0.1.354.dist-info → auto_coder-0.1.356.dist-info}/LICENSE +0 -0
- {auto_coder-0.1.354.dist-info → auto_coder-0.1.356.dist-info}/WHEEL +0 -0
- {auto_coder-0.1.354.dist-info → auto_coder-0.1.356.dist-info}/entry_points.txt +0 -0
- {auto_coder-0.1.354.dist-info → auto_coder-0.1.356.dist-info}/top_level.txt +0 -0
|
@@ -27,7 +27,7 @@ from autocoder.rag.relevant_utils import (
|
|
|
27
27
|
DocFilterResult
|
|
28
28
|
)
|
|
29
29
|
from autocoder.rag.token_checker import check_token_limit
|
|
30
|
-
from autocoder.rag.token_counter import RemoteTokenCounter, TokenCounter
|
|
30
|
+
from autocoder.rag.token_counter import RemoteTokenCounter, TokenCounter,count_tokens
|
|
31
31
|
from autocoder.rag.token_limiter import TokenLimiter
|
|
32
32
|
from tokenizers import Tokenizer
|
|
33
33
|
from autocoder.rag.variable_holder import VariableHolder
|
|
@@ -782,7 +782,7 @@ class LongContextRAG:
|
|
|
782
782
|
)
|
|
783
783
|
|
|
784
784
|
# 记录令牌统计
|
|
785
|
-
request_tokens = sum([doc.
|
|
785
|
+
request_tokens = sum([count_tokens(doc.source_code) for doc in relevant_docs])
|
|
786
786
|
target_model = target_llm.default_model_name
|
|
787
787
|
logger.info(
|
|
788
788
|
f"=== LLM Request ===\n"
|
|
@@ -230,9 +230,8 @@ def stream_out(
|
|
|
230
230
|
refresh_per_second=4,
|
|
231
231
|
console=console
|
|
232
232
|
) as live:
|
|
233
|
-
for res in stream_generator:
|
|
234
|
-
global_cancel.check_and_raise(
|
|
235
|
-
|
|
233
|
+
for res in stream_generator:
|
|
234
|
+
global_cancel.check_and_raise(args.event_file)
|
|
236
235
|
last_meta = res[1]
|
|
237
236
|
content = res[0]
|
|
238
237
|
|
|
@@ -241,7 +240,7 @@ def stream_out(
|
|
|
241
240
|
reasoning_content = last_meta.reasoning_content
|
|
242
241
|
|
|
243
242
|
if reasoning_content == "" and content == "":
|
|
244
|
-
continue
|
|
243
|
+
continue
|
|
245
244
|
|
|
246
245
|
if first_token_time == 0.0:
|
|
247
246
|
first_token_time = time.time() - first_token_time_start
|
|
@@ -26,7 +26,7 @@ PROVIDER_INFO_LIST = [
|
|
|
26
26
|
name="volcano",
|
|
27
27
|
endpoint="https://ark.cn-beijing.volces.com/api/v3",
|
|
28
28
|
r1_model="deepseek-r1-250120",
|
|
29
|
-
v3_model="deepseek-v3-
|
|
29
|
+
v3_model="deepseek-v3-250324",
|
|
30
30
|
api_key="",
|
|
31
31
|
r1_input_price=2.0,
|
|
32
32
|
r1_output_price=8.0,
|
|
@@ -55,6 +55,17 @@ PROVIDER_INFO_LIST = [
|
|
|
55
55
|
v3_input_price=2.0,
|
|
56
56
|
v3_output_price=8.0,
|
|
57
57
|
),
|
|
58
|
+
ProviderInfo(
|
|
59
|
+
name="openrouter",
|
|
60
|
+
endpoint="https://openrouter.ai/api/v1",
|
|
61
|
+
r1_model="deepseek/deepseek-r1",
|
|
62
|
+
v3_model="deepseek/deepseek-chat-v3-0324",
|
|
63
|
+
api_key="",
|
|
64
|
+
r1_input_price=0.0,
|
|
65
|
+
r1_output_price=0.0,
|
|
66
|
+
v3_input_price=0.0,
|
|
67
|
+
v3_output_price=0.0,
|
|
68
|
+
)
|
|
58
69
|
]
|
|
59
70
|
|
|
60
71
|
dialog_style = Style.from_dict({
|
|
@@ -147,7 +158,8 @@ class ModelProviderSelector:
|
|
|
147
158
|
values=[
|
|
148
159
|
("volcano", self.printer.get_message_from_key("model_provider_volcano")),
|
|
149
160
|
("siliconflow", self.printer.get_message_from_key("model_provider_siliconflow")),
|
|
150
|
-
("deepseek", self.printer.get_message_from_key("model_provider_deepseek"))
|
|
161
|
+
("deepseek", self.printer.get_message_from_key("model_provider_deepseek")),
|
|
162
|
+
("openrouter", self.printer.get_message_from_key("model_provider_openrouter"))
|
|
151
163
|
],
|
|
152
164
|
style=dialog_style
|
|
153
165
|
).run()
|
autocoder/utils/thread_utils.py
CHANGED
|
@@ -36,9 +36,7 @@ def run_in_raw_thread(token: Optional[str] = None, context: Optional[Dict[str, A
|
|
|
36
36
|
def wrapper(*args, **kwargs):
|
|
37
37
|
# Store thread results
|
|
38
38
|
result = []
|
|
39
|
-
exception_raised = [None] # 存储工作线程中的异常
|
|
40
|
-
thread_token = token
|
|
41
|
-
thread_context = context or {}
|
|
39
|
+
exception_raised = [None] # 存储工作线程中的异常
|
|
42
40
|
thread_terminated = threading.Event() # 用于标记线程是否已终止
|
|
43
41
|
|
|
44
42
|
def worker():
|
|
@@ -53,9 +51,7 @@ def run_in_raw_thread(token: Optional[str] = None, context: Optional[Dict[str, A
|
|
|
53
51
|
except Exception as e:
|
|
54
52
|
# 存储其他异常
|
|
55
53
|
exception_raised[0] = e
|
|
56
|
-
finally:
|
|
57
|
-
# 无论如何执行完毕后,重置取消标志并标记线程已终止
|
|
58
|
-
global_cancel.reset(thread_token)
|
|
54
|
+
finally:
|
|
59
55
|
thread_terminated.set()
|
|
60
56
|
|
|
61
57
|
# Create and start thread with a meaningful name
|
|
@@ -72,18 +68,7 @@ def run_in_raw_thread(token: Optional[str] = None, context: Optional[Dict[str, A
|
|
|
72
68
|
|
|
73
69
|
while thread.is_alive():
|
|
74
70
|
# 每次等待较短时间,以便能够及时响应中断
|
|
75
|
-
thread.join(0.1)
|
|
76
|
-
|
|
77
|
-
# 检查是否已经超过最大等待时间(仅适用于已取消的情况)
|
|
78
|
-
elapsed_time = time.time() - wait_start_time
|
|
79
|
-
if cancelled_by_keyboard and elapsed_time > max_wait_time:
|
|
80
|
-
printer.print_in_terminal("force_terminating_thread")
|
|
81
|
-
break
|
|
82
|
-
|
|
83
|
-
# 检查线程间的取消请求
|
|
84
|
-
if global_cancel.is_requested(thread_token):
|
|
85
|
-
# 传播取消请求到工作线程
|
|
86
|
-
raise CancelRequestedException(thread_token)
|
|
71
|
+
thread.join(0.1)
|
|
87
72
|
|
|
88
73
|
# 如果工作线程出现了异常,在主线程中重新抛出
|
|
89
74
|
if exception_raised[0] is not None:
|
|
@@ -92,15 +77,12 @@ def run_in_raw_thread(token: Optional[str] = None, context: Optional[Dict[str, A
|
|
|
92
77
|
# 返回结果
|
|
93
78
|
return result[0] if result else None
|
|
94
79
|
|
|
95
|
-
except KeyboardInterrupt:
|
|
96
|
-
#
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
global_cancel.
|
|
100
|
-
printer.print_in_terminal("cancellation_requested")
|
|
101
|
-
|
|
102
|
-
# 标记为键盘中断取消
|
|
103
|
-
cancelled_by_keyboard = True
|
|
80
|
+
except KeyboardInterrupt:
|
|
81
|
+
# 取消所有任务
|
|
82
|
+
for token in global_cancel.get_active_tokens():
|
|
83
|
+
print(f"Cancelling job: {token}")
|
|
84
|
+
global_cancel.set_active_tokens()
|
|
85
|
+
printer.print_in_terminal("cancellation_requested")
|
|
104
86
|
wait_start_time = time.time()
|
|
105
87
|
|
|
106
88
|
# 等待线程终止或检测到取消
|
autocoder/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.1.
|
|
1
|
+
__version__ = "0.1.356"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|