auto-coder 0.1.258__py3-none-any.whl → 0.1.260__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of auto-coder might be problematic. Click here for more details.

Files changed (30) hide show
  1. {auto_coder-0.1.258.dist-info → auto_coder-0.1.260.dist-info}/METADATA +1 -1
  2. {auto_coder-0.1.258.dist-info → auto_coder-0.1.260.dist-info}/RECORD +30 -25
  3. autocoder/auto_coder.py +25 -23
  4. autocoder/auto_coder_rag.py +7 -7
  5. autocoder/auto_coder_rag_client_mcp.py +1 -1
  6. autocoder/chat_auto_coder.py +471 -523
  7. autocoder/chat_auto_coder_lang.py +2 -0
  8. autocoder/commands/__init__.py +0 -0
  9. autocoder/commands/auto_command.py +1145 -0
  10. autocoder/commands/tools.py +533 -0
  11. autocoder/common/auto_coder_lang.py +34 -6
  12. autocoder/common/auto_configure.py +304 -0
  13. autocoder/common/code_modification_ranker.py +8 -7
  14. autocoder/common/command_completer.py +566 -0
  15. autocoder/common/command_templates.py +7 -3
  16. autocoder/common/git_utils.py +82 -1
  17. autocoder/common/result_manager.py +115 -0
  18. autocoder/common/utils_code_auto_generate.py +2 -2
  19. autocoder/dispacher/actions/action.py +8 -4
  20. autocoder/dispacher/actions/plugins/action_regex_project.py +2 -1
  21. autocoder/index/entry.py +1 -1
  22. autocoder/index/filter/quick_filter.py +14 -2
  23. autocoder/rag/raw_rag.py +1 -1
  24. autocoder/utils/auto_coder_utils/chat_stream_out.py +13 -6
  25. autocoder/utils/thread_utils.py +4 -0
  26. autocoder/version.py +1 -1
  27. {auto_coder-0.1.258.dist-info → auto_coder-0.1.260.dist-info}/LICENSE +0 -0
  28. {auto_coder-0.1.258.dist-info → auto_coder-0.1.260.dist-info}/WHEEL +0 -0
  29. {auto_coder-0.1.258.dist-info → auto_coder-0.1.260.dist-info}/entry_points.txt +0 -0
  30. {auto_coder-0.1.258.dist-info → auto_coder-0.1.260.dist-info}/top_level.txt +0 -0
@@ -22,6 +22,7 @@ from prompt_toolkit.completion import WordCompleter, Completer, Completion
22
22
  from prompt_toolkit.shortcuts import confirm
23
23
  from autocoder.common import AutoCoderArgs
24
24
  from pydantic import Field, BaseModel
25
+ from autocoder.common.result_manager import ResultManager
25
26
  from autocoder.version import __version__
26
27
  from autocoder.auto_coder import main as auto_coder_main
27
28
  from autocoder.common.command_completer import CommandTextParser
@@ -55,6 +56,7 @@ from autocoder.utils.llms import get_single_llm
55
56
  import pkg_resources
56
57
  from autocoder.common.printer import Printer
57
58
  from autocoder.utils.thread_utils import run_in_thread,run_in_raw_thread
59
+ from autocoder.common.command_completer import CommandCompleter,FileSystemModel as CCFileSystemModel,MemoryConfig as CCMemoryModel
58
60
 
59
61
  class SymbolItem(BaseModel):
60
62
  symbol_name: str
@@ -74,11 +76,17 @@ def parse_arguments():
74
76
  help="Enter the auto-coder.chat without initializing the system",
75
77
  )
76
78
 
79
+ parser.add_argument(
80
+ "--skip_provider_selection",
81
+ action="store_true",
82
+ help="Skip the provider selection",
83
+ )
84
+
77
85
  parser.add_argument(
78
86
  "--product_mode",
79
87
  type=str,
80
- default="pro",
81
- help="The mode of the auto-coder.chat, lite/pro default is pro",
88
+ default="lite",
89
+ help="The mode of the auto-coder.chat, lite/pro default is lite",
82
90
  )
83
91
 
84
92
  parser.add_argument("--lite", action="store_true", help="Lite mode")
@@ -132,6 +140,7 @@ commands = [
132
140
  "/design",
133
141
  "/mcp",
134
142
  "/models",
143
+ "/auto",
135
144
  ]
136
145
 
137
146
 
@@ -252,7 +261,12 @@ def configure_project_type():
252
261
 
253
262
 
254
263
  def initialize_system(args):
264
+ from autocoder.utils.model_provider_selector import ModelProviderSelector
265
+ from autocoder import models as models_module
255
266
  print(f"\n\033[1;34m{get_message('initializing')}\033[0m")
267
+
268
+ first_time = [False]
269
+ configure_success = [False]
256
270
 
257
271
  def print_status(message, status):
258
272
  if status == "success":
@@ -264,10 +278,9 @@ def initialize_system(args):
264
278
  else:
265
279
  print(f" {message}")
266
280
 
267
- def init_project():
268
- first_time = False
281
+ def init_project():
269
282
  if not os.path.exists(".auto-coder"):
270
- first_time = True
283
+ first_time[0] = True
271
284
  print_status(get_message("not_initialized"), "warning")
272
285
  init_choice = input(
273
286
  f" {get_message('init_prompt')}").strip().lower()
@@ -290,140 +303,150 @@ def initialize_system(args):
290
303
  print_status(get_message("created_dir").format(
291
304
  base_persist_dir), "success")
292
305
 
293
- if first_time:
306
+ if first_time[0]:
294
307
  configure_project_type()
308
+ configure_success[0] = True
295
309
 
296
310
  print_status(get_message("init_complete"), "success")
297
311
 
298
312
  init_project()
299
313
 
300
- if args.product_mode == "lite":
301
- from autocoder.utils.model_provider_selector import ModelProviderSelector
302
- from autocoder import models as models_module
303
- if not models_module.check_model_exists("v3_chat") or not models_module.check_model_exists("r1_chat"):
304
- model_provider_selector = ModelProviderSelector()
305
- model_provider_info = model_provider_selector.select_provider()
306
- if model_provider_info is not None:
307
- models_json_list = model_provider_selector.to_models_json(model_provider_info)
308
- models_module.add_and_activate_models(models_json_list)
309
- r1_model = models_json_list[0]['name']
310
- v3_model = models_json_list[1]['name']
311
- configure(f"model:{v3_model}", skip_print=True)
312
- configure(f"chat_model:{r1_model}", skip_print=True)
313
- configure(f"generate_rerank_model:{r1_model}", skip_print=True)
314
- configure(f"code_model:{v3_model}", skip_print=True)
315
- configure(f"index_filter_model:{r1_model}", skip_print=True)
316
-
317
- if args.product_mode == "pro":
318
- # Check if Ray is running
319
- print_status(get_message("checking_ray"), "")
320
- ray_status = subprocess.run(
321
- ["ray", "status"], capture_output=True, text=True)
322
- if ray_status.returncode != 0:
323
- print_status(get_message("ray_not_running"), "warning")
314
+ if not args.skip_provider_selection and first_time[0]:
315
+ if args.product_mode == "lite":
316
+ ## 如果已经是配置过的项目,就无需再选择
317
+ if first_time[0]:
318
+ if not models_module.check_model_exists("v3_chat") or not models_module.check_model_exists("r1_chat"):
319
+ model_provider_selector = ModelProviderSelector()
320
+ model_provider_info = model_provider_selector.select_provider()
321
+ if model_provider_info is not None:
322
+ models_json_list = model_provider_selector.to_models_json(model_provider_info)
323
+ models_module.add_and_activate_models(models_json_list)
324
+
325
+ if args.product_mode == "pro":
326
+ # Check if Ray is running
327
+ print_status(get_message("checking_ray"), "")
328
+ ray_status = subprocess.run(
329
+ ["ray", "status"], capture_output=True, text=True)
330
+ if ray_status.returncode != 0:
331
+ print_status(get_message("ray_not_running"), "warning")
332
+ try:
333
+ subprocess.run(["ray", "start", "--head"], check=True)
334
+ print_status(get_message("ray_start_success"), "success")
335
+ except subprocess.CalledProcessError:
336
+ print_status(get_message("ray_start_fail"), "error")
337
+ return
338
+ else:
339
+ print_status(get_message("ray_running"), "success")
340
+
341
+ # Check if deepseek_chat model is available
342
+ print_status(get_message("checking_model"), "")
324
343
  try:
325
- subprocess.run(["ray", "start", "--head"], check=True)
326
- print_status(get_message("ray_start_success"), "success")
344
+ result = subprocess.run(
345
+ ["easy-byzerllm", "chat", "v3_chat", "你好"],
346
+ capture_output=True,
347
+ text=True,
348
+ timeout=30,
349
+ )
350
+ if result.returncode == 0:
351
+ print_status(get_message("model_available"), "success")
352
+ init_project()
353
+ print_status(get_message("init_complete_final"), "success")
354
+ return
355
+ except subprocess.TimeoutExpired:
356
+ print_status(get_message("model_timeout"), "error")
327
357
  except subprocess.CalledProcessError:
328
- print_status(get_message("ray_start_fail"), "error")
329
- return
330
- else:
331
- print_status(get_message("ray_running"), "success")
358
+ print_status(get_message("model_error"), "error")
359
+
360
+ # If deepseek_chat is not available
361
+ print_status(get_message("model_not_available"), "warning")
362
+ api_key = prompt(HTML(f"<b>{get_message('enter_api_key')} </b>"))
363
+
364
+ print_status(get_message("deploying_model").format("Deepseek官方"), "")
365
+ deploy_cmd = [
366
+ "byzerllm",
367
+ "deploy",
368
+ "--pretrained_model_type",
369
+ "saas/openai",
370
+ "--cpus_per_worker",
371
+ "0.001",
372
+ "--gpus_per_worker",
373
+ "0",
374
+ "--worker_concurrency",
375
+ "1000",
376
+ "--num_workers",
377
+ "1",
378
+ "--infer_params",
379
+ f"saas.base_url=https://api.deepseek.com/v1 saas.api_key={api_key} saas.model=deepseek-chat",
380
+ "--model",
381
+ "v3_chat",
382
+ ]
332
383
 
333
- # Check if deepseek_chat model is available
334
- print_status(get_message("checking_model"), "")
335
- try:
336
- result = subprocess.run(
337
- ["easy-byzerllm", "chat", "deepseek_chat", "你好"],
338
- capture_output=True,
339
- text=True,
340
- timeout=30,
341
- )
342
- if result.returncode == 0:
343
- print_status(get_message("model_available"), "success")
344
- init_project()
345
- print_status(get_message("init_complete_final"), "success")
384
+ try:
385
+ subprocess.run(deploy_cmd, check=True)
386
+ print_status(get_message("deploy_complete"), "success")
387
+ except subprocess.CalledProcessError:
388
+ print_status(get_message("deploy_fail"), "error")
346
389
  return
347
- except subprocess.TimeoutExpired:
348
- print_status(get_message("model_timeout"), "error")
349
- except subprocess.CalledProcessError:
350
- print_status(get_message("model_error"), "error")
351
-
352
- # If deepseek_chat is not available
353
- print_status(get_message("model_not_available"), "warning")
354
- api_key = prompt(HTML(f"<b>{get_message('enter_api_key')} </b>"))
355
-
356
- print_status(get_message("deploying_model").format("Deepseek官方"), "")
357
- deploy_cmd = [
358
- "byzerllm",
359
- "deploy",
360
- "--pretrained_model_type",
361
- "saas/openai",
362
- "--cpus_per_worker",
363
- "0.001",
364
- "--gpus_per_worker",
365
- "0",
366
- "--worker_concurrency",
367
- "1000",
368
- "--num_workers",
369
- "1",
370
- "--infer_params",
371
- f"saas.base_url=https://api.deepseek.com/v1 saas.api_key={api_key} saas.model=deepseek-chat",
372
- "--model",
373
- "deepseek_chat",
374
- ]
375
-
376
- try:
377
- subprocess.run(deploy_cmd, check=True)
378
- print_status(get_message("deploy_complete"), "success")
379
- except subprocess.CalledProcessError:
380
- print_status(get_message("deploy_fail"), "error")
381
- return
382
-
383
-
384
- deploy_cmd = [
385
- "byzerllm",
386
- "deploy",
387
- "--pretrained_model_type",
388
- "saas/reasoning_openai",
389
- "--cpus_per_worker",
390
- "0.001",
391
- "--gpus_per_worker",
392
- "0",
393
- "--worker_concurrency",
394
- "1000",
395
- "--num_workers",
396
- "1",
397
- "--infer_params",
398
- f"saas.base_url=https://api.deepseek.com/v1 saas.api_key={api_key} saas.model=deepseek-reasoner",
399
- "--model",
400
- "deepseek_r1_chat",
401
- ]
390
+
402
391
 
403
- try:
404
- subprocess.run(deploy_cmd, check=True)
405
- print_status(get_message("deploy_complete"), "success")
406
- except subprocess.CalledProcessError:
407
- print_status(get_message("deploy_fail"), "error")
408
- return
392
+ deploy_cmd = [
393
+ "byzerllm",
394
+ "deploy",
395
+ "--pretrained_model_type",
396
+ "saas/reasoning_openai",
397
+ "--cpus_per_worker",
398
+ "0.001",
399
+ "--gpus_per_worker",
400
+ "0",
401
+ "--worker_concurrency",
402
+ "1000",
403
+ "--num_workers",
404
+ "1",
405
+ "--infer_params",
406
+ f"saas.base_url=https://api.deepseek.com/v1 saas.api_key={api_key} saas.model=deepseek-reasoner",
407
+ "--model",
408
+ "r1_chat",
409
+ ]
409
410
 
410
- # Validate the deployment
411
- print_status(get_message("validating_deploy"), "")
412
- try:
413
- validation_result = subprocess.run(
414
- ["easy-byzerllm", "chat", "deepseek_chat", "你好"],
415
- capture_output=True,
416
- text=True,
417
- timeout=30,
418
- check=True,
419
- )
420
- print_status(get_message("validation_success"), "success")
421
- except (subprocess.TimeoutExpired, subprocess.CalledProcessError):
422
- print_status(get_message("validation_fail"), "error")
423
- print_status(get_message("manual_start"), "warning")
424
- print_status("easy-byzerllm chat deepseek_chat 你好", "")
411
+ try:
412
+ subprocess.run(deploy_cmd, check=True)
413
+ print_status(get_message("deploy_complete"), "success")
414
+ except subprocess.CalledProcessError:
415
+ print_status(get_message("deploy_fail"), "error")
416
+ return
425
417
 
426
- print_status(get_message("init_complete_final"), "success")
418
+ # Validate the deployment
419
+ print_status(get_message("validating_deploy"), "")
420
+ try:
421
+ validation_result = subprocess.run(
422
+ ["easy-byzerllm", "chat", "v3_chat", "你好"],
423
+ capture_output=True,
424
+ text=True,
425
+ timeout=30,
426
+ check=True,
427
+ )
428
+ print_status(get_message("validation_success"), "success")
429
+ except (subprocess.TimeoutExpired, subprocess.CalledProcessError):
430
+ print_status(get_message("validation_fail"), "error")
431
+ print_status(get_message("manual_start"), "warning")
432
+ print_status("easy-byzerllm chat v3_chat 你好", "")
433
+
434
+ print_status(get_message("init_complete_final"), "success")
435
+ configure_success[0] = True
436
+
437
+ if first_time[0] and args.product_mode == "pro" and configure_success[0]:
438
+ configure(f"model:v3_chat", skip_print=True)
439
+ configure(f"chat_model:r1_chat", skip_print=True)
440
+ configure(f"generate_rerank_model:r1_chat", skip_print=True)
441
+ configure(f"code_model:v3_chat", skip_print=True)
442
+ configure(f"index_filter_model:r1_chat", skip_print=True)
443
+
444
+ if first_time[0] and args.product_mode == "lite" and models_module.check_model_exists("v3_chat"):
445
+ configure(f"model:v3_chat", skip_print=True)
446
+ configure(f"chat_model:r1_chat", skip_print=True)
447
+ configure(f"generate_rerank_model:r1_chat", skip_print=True)
448
+ configure(f"code_model:v3_chat", skip_print=True)
449
+ configure(f"index_filter_model:r1_chat", skip_print=True)
427
450
 
428
451
 
429
452
  def convert_yaml_config_to_str(yaml_config):
@@ -610,384 +633,6 @@ def get_symbol_list() -> List[SymbolItem]:
610
633
  return list_of_symbols
611
634
 
612
635
 
613
- class CommandCompleter(Completer):
614
- def __init__(self, commands):
615
- self.commands = commands
616
- self.all_file_names = get_all_file_names_in_project()
617
- self.all_files = get_all_file_in_project()
618
- self.all_dir_names = get_all_dir_names_in_project()
619
- self.all_files_with_dot = get_all_file_in_project_with_dot()
620
- self.symbol_list = get_symbol_list()
621
- self.current_file_names = []
622
-
623
- def get_completions(self, document, complete_event):
624
- text = document.text_before_cursor
625
- words = text.split()
626
-
627
- if len(words) > 0:
628
- if words[0] == "/mode":
629
- left_word = text[len("/mode"):]
630
- for mode in ["normal", "auto_detect", "voice_input"]:
631
- if mode.startswith(left_word.strip()):
632
- yield Completion(mode, start_position=-len(left_word.strip()))
633
-
634
- if words[0] == "/add_files":
635
- new_text = text[len("/add_files"):]
636
- parser = CommandTextParser(new_text, words[0])
637
- parser.add_files()
638
- current_word = parser.current_word()
639
-
640
- if parser.last_sub_command() == "/refresh":
641
- return
642
-
643
- for command in parser.get_sub_commands():
644
- if command.startswith(current_word):
645
- yield Completion(command, start_position=-len(current_word))
646
-
647
- if parser.first_sub_command() == "/group" and (
648
- parser.last_sub_command() == "/group"
649
- or parser.last_sub_command() == "/drop"
650
- ):
651
- group_names = memory["current_files"]["groups"].keys()
652
- if "," in current_word:
653
- current_word = current_word.split(",")[-1]
654
-
655
- for group_name in group_names:
656
- if group_name.startswith(current_word):
657
- yield Completion(
658
- group_name, start_position=-len(current_word)
659
- )
660
-
661
- if parser.first_sub_command() != "/group":
662
- if current_word and current_word.startswith("."):
663
- for file_name in self.all_files_with_dot:
664
- if file_name.startswith(current_word):
665
- yield Completion(
666
- file_name, start_position=-
667
- len(current_word)
668
- )
669
- else:
670
- for file_name in self.all_file_names:
671
- if file_name.startswith(current_word):
672
- yield Completion(
673
- file_name, start_position=-
674
- len(current_word)
675
- )
676
- for file_name in self.all_files:
677
- if current_word and current_word in file_name:
678
- yield Completion(
679
- file_name, start_position=-
680
- len(current_word)
681
- )
682
- elif words[0] in ["/chat", "/coding"]:
683
- image_extensions = (
684
- ".png",
685
- ".jpg",
686
- ".jpeg",
687
- ".gif",
688
- ".bmp",
689
- ".tiff",
690
- ".tif",
691
- ".webp",
692
- ".svg",
693
- ".ico",
694
- ".heic",
695
- ".heif",
696
- ".raw",
697
- ".cr2",
698
- ".nef",
699
- ".arw",
700
- ".dng",
701
- ".orf",
702
- ".rw2",
703
- ".pef",
704
- ".srw",
705
- ".eps",
706
- ".ai",
707
- ".psd",
708
- ".xcf",
709
- )
710
- new_text = text[len(words[0]):]
711
- parser = CommandTextParser(new_text, words[0])
712
-
713
- parser.coding()
714
- current_word = parser.current_word()
715
-
716
- if len(new_text.strip()) == 0 or new_text.strip() == "/":
717
- for command in parser.get_sub_commands():
718
- if command.startswith(current_word):
719
- yield Completion(command, start_position=-len(current_word))
720
-
721
- all_tags = parser.tags
722
-
723
- if current_word.startswith("@"):
724
- name = current_word[1:]
725
- target_set = set()
726
-
727
- for file_name in self.current_file_names:
728
- base_file_name = os.path.basename(file_name)
729
- if name in base_file_name:
730
- target_set.add(base_file_name)
731
- path_parts = file_name.split(os.sep)
732
- display_name = (
733
- os.sep.join(path_parts[-3:])
734
- if len(path_parts) > 3
735
- else file_name
736
- )
737
- relative_path = os.path.relpath(
738
- file_name, project_root)
739
- yield Completion(
740
- relative_path,
741
- start_position=-len(name),
742
- display=f"{display_name} (in active files)",
743
- )
744
-
745
- for file_name in self.all_file_names:
746
- if file_name.startswith(name) and file_name not in target_set:
747
- target_set.add(file_name)
748
-
749
- path_parts = file_name.split(os.sep)
750
- display_name = (
751
- os.sep.join(path_parts[-3:])
752
- if len(path_parts) > 3
753
- else file_name
754
- )
755
- relative_path = os.path.relpath(
756
- file_name, project_root)
757
-
758
- yield Completion(
759
- relative_path,
760
- start_position=-len(name),
761
- display=f"{display_name}",
762
- )
763
-
764
- for file_name in self.all_files:
765
- if name in file_name and file_name not in target_set:
766
- path_parts = file_name.split(os.sep)
767
- display_name = (
768
- os.sep.join(path_parts[-3:])
769
- if len(path_parts) > 3
770
- else file_name
771
- )
772
- relative_path = os.path.relpath(
773
- file_name, project_root)
774
- yield Completion(
775
- relative_path,
776
- start_position=-len(name),
777
- display=f"{display_name}",
778
- )
779
-
780
- if current_word.startswith("@@"):
781
- name = current_word[2:]
782
- for symbol in self.symbol_list:
783
- if name in symbol.symbol_name:
784
- file_name = symbol.file_name
785
- path_parts = file_name.split(os.sep)
786
- display_name = (
787
- os.sep.join(path_parts[-3:])
788
- if len(path_parts) > 3
789
- else symbol.symbol_name
790
- )
791
- relative_path = os.path.relpath(
792
- file_name, project_root)
793
- yield Completion(
794
- f"{symbol.symbol_name}(location: {relative_path})",
795
- start_position=-len(name),
796
- display=f"{symbol.symbol_name} ({display_name}/{symbol.symbol_type})",
797
- )
798
-
799
- tags = [tag for tag in parser.tags]
800
-
801
- if current_word.startswith("<"):
802
- name = current_word[1:]
803
- for tag in ["<img>", "</img>"]:
804
- if all_tags and all_tags[-1].start_tag == "<img>":
805
- if tag.startswith(name):
806
- yield Completion(
807
- "</img>", start_position=-len(current_word)
808
- )
809
- elif tag.startswith(name):
810
- yield Completion(tag, start_position=-len(current_word))
811
-
812
- if tags and tags[-1].start_tag == "<img>" and tags[-1].end_tag == "":
813
- raw_file_name = tags[0].content
814
- file_name = raw_file_name.strip()
815
- parent_dir = os.path.dirname(file_name)
816
- file_basename = os.path.basename(file_name)
817
- search_dir = parent_dir if parent_dir else "."
818
- for root, dirs, files in os.walk(search_dir):
819
- # 只处理直接子目录
820
- if root != search_dir:
821
- continue
822
-
823
- # 补全子目录
824
- for dir in dirs:
825
- full_path = os.path.join(root, dir)
826
- if full_path.startswith(file_name):
827
- relative_path = os.path.relpath(
828
- full_path, search_dir)
829
- yield Completion(
830
- relative_path,
831
- start_position=-len(file_basename),
832
- )
833
-
834
- # 补全文件
835
- for file in files:
836
- if file.lower().endswith(
837
- image_extensions
838
- ) and file.startswith(file_basename):
839
- full_path = os.path.join(root, file)
840
- relative_path = os.path.relpath(
841
- full_path, search_dir)
842
- yield Completion(
843
- relative_path,
844
- start_position=-len(file_basename),
845
- )
846
-
847
- # 只处理一层子目录,然后退出循环
848
- break
849
-
850
- elif words[0] == "/remove_files":
851
- new_words = text[len("/remove_files"):].strip().split(",")
852
-
853
- is_at_space = text[-1] == " "
854
- last_word = new_words[-2] if len(new_words) > 1 else ""
855
- current_word = new_words[-1] if new_words else ""
856
-
857
- if is_at_space:
858
- last_word = current_word
859
- current_word = ""
860
-
861
- # /remove_files /all [cursor] or /remove_files /all p[cursor]
862
- if not last_word and not current_word:
863
- if "/all".startswith(current_word):
864
- yield Completion("/all", start_position=-len(current_word))
865
- for file_name in self.current_file_names:
866
- yield Completion(file_name, start_position=-len(current_word))
867
-
868
- # /remove_files /a[cursor] or /remove_files p[cursor]
869
- if current_word:
870
- if "/all".startswith(current_word):
871
- yield Completion("/all", start_position=-len(current_word))
872
- for file_name in self.current_file_names:
873
- if current_word and current_word in file_name:
874
- yield Completion(
875
- file_name, start_position=-len(current_word)
876
- )
877
- elif words[0] == "/exclude_dirs":
878
- new_words = text[len("/exclude_dirs"):].strip().split(",")
879
- current_word = new_words[-1]
880
-
881
- for file_name in self.all_dir_names:
882
- if current_word and current_word in file_name:
883
- yield Completion(file_name, start_position=-len(current_word))
884
-
885
- elif words[0] == "/lib":
886
- new_text = text[len("/lib"):]
887
- parser = CommandTextParser(new_text, words[0])
888
- parser.lib()
889
- current_word = parser.current_word()
890
-
891
- for command in parser.get_sub_commands():
892
- if command.startswith(current_word):
893
- yield Completion(command, start_position=-len(current_word))
894
-
895
- if parser.last_sub_command() in ["/add", "/remove", "/get"]:
896
- for lib_name in memory.get("libs", {}).keys():
897
- if lib_name.startswith(current_word):
898
- yield Completion(
899
- lib_name, start_position=-len(current_word)
900
- )
901
- elif words[0] == "/mcp":
902
- new_text = text[len("/mcp"):]
903
- parser = CommandTextParser(new_text, words[0])
904
- parser.lib()
905
- current_word = parser.current_word()
906
- for command in parser.get_sub_commands():
907
- if command.startswith(current_word):
908
- yield Completion(command, start_position=-len(current_word))
909
- elif words[0] == "/models":
910
- new_text = text[len("/models"):]
911
- parser = CommandTextParser(new_text, words[0])
912
- parser.lib()
913
- current_word = parser.current_word()
914
- for command in parser.get_sub_commands():
915
- if command.startswith(current_word):
916
- yield Completion(command, start_position=-len(current_word))
917
-
918
- elif words[0] == "/coding":
919
- new_text = text[len("/coding"):]
920
- parser = CommandTextParser(new_text, words[0])
921
- parser.lib()
922
- current_word = parser.current_word()
923
- for command in parser.get_sub_commands():
924
- if command.startswith(current_word):
925
- yield Completion(command, start_position=-len(current_word))
926
-
927
- elif words[0] == "/conf":
928
- new_words = text[len("/conf"):].strip().split()
929
- is_at_space = text[-1] == " "
930
- last_word = new_words[-2] if len(new_words) > 1 else ""
931
- current_word = new_words[-1] if new_words else ""
932
- completions = []
933
-
934
- if is_at_space:
935
- last_word = current_word
936
- current_word = ""
937
-
938
- # /conf /drop [curor] or /conf /drop p[cursor]
939
- if last_word == "/drop":
940
- completions = [
941
- field_name
942
- for field_name in memory["conf"].keys()
943
- if field_name.startswith(current_word)
944
- ]
945
- # /conf [curosr]
946
- elif not last_word and not current_word:
947
- completions = [
948
- "/drop"] if "/drop".startswith(current_word) else []
949
- completions += [
950
- field_name + ":"
951
- for field_name in AutoCoderArgs.model_fields.keys()
952
- if field_name.startswith(current_word)
953
- ]
954
- # /conf p[cursor]
955
- elif not last_word and current_word:
956
- completions = [
957
- "/drop"] if "/drop".startswith(current_word) else []
958
- completions += [
959
- field_name + ":"
960
- for field_name in AutoCoderArgs.model_fields.keys()
961
- if field_name.startswith(current_word)
962
- ]
963
-
964
- for completion in completions:
965
- yield Completion(completion, start_position=-len(current_word))
966
-
967
- else:
968
- for command in self.commands:
969
- if command.startswith(text):
970
- yield Completion(command, start_position=-len(text))
971
-
972
- else:
973
- for command in self.commands:
974
- if command.startswith(text):
975
- yield Completion(command, start_position=-len(text))
976
-
977
- def update_current_files(self, files):
978
- self.current_file_names = [f for f in files]
979
-
980
- def refresh_files(self):
981
- self.all_file_names = get_all_file_names_in_project()
982
- self.all_files = get_all_file_in_project()
983
- self.all_dir_names = get_all_dir_names_in_project()
984
- self.all_files_with_dot = get_all_file_in_project_with_dot()
985
- self.symbol_list = get_symbol_list()
986
-
987
-
988
- completer = CommandCompleter(commands)
989
-
990
-
991
636
  def save_memory():
992
637
  with open(os.path.join(base_persist_dir, "memory.json"), "w") as f:
993
638
  json.dump(memory, f, indent=2, ensure_ascii=False)
@@ -1003,6 +648,21 @@ def load_memory():
1003
648
  completer.update_current_files(memory["current_files"]["files"])
1004
649
 
1005
650
 
651
+ completer = CommandCompleter(commands,
652
+ file_system_model=CCFileSystemModel(project_root=project_root,
653
+ defaut_exclude_dirs=defaut_exclude_dirs,
654
+ get_all_file_names_in_project=get_all_file_names_in_project,
655
+ get_all_file_in_project=get_all_file_in_project,
656
+ get_all_dir_names_in_project=get_all_dir_names_in_project,
657
+ get_all_file_in_project_with_dot=get_all_file_in_project_with_dot,
658
+ get_symbol_list=get_symbol_list
659
+ ),
660
+ memory_model=CCMemoryModel(memory=memory,
661
+ save_memory_func=save_memory))
662
+
663
+
664
+
665
+
1006
666
  def print_conf(content:Dict[str,Any]):
1007
667
  """Display configuration dictionary in a Rich table format with enhanced visual styling.
1008
668
 
@@ -1049,6 +709,7 @@ def print_conf(content:Dict[str,Any]):
1049
709
  ))
1050
710
 
1051
711
  def revert():
712
+ result_manager = ResultManager()
1052
713
  last_yaml_file = get_last_yaml_file("actions")
1053
714
  if last_yaml_file:
1054
715
  file_path = os.path.join("actions", last_yaml_file)
@@ -1058,16 +719,24 @@ def revert():
1058
719
  s = output.getvalue()
1059
720
  print(s, flush=True)
1060
721
  if "Successfully reverted changes" in s:
722
+ result_manager.append(content=s, meta={"action": "revert","success":False, "input":{
723
+ }})
1061
724
  print(
1062
725
  "Reverted the last chat action successfully. Remove the yaml file {file_path}"
1063
726
  )
1064
727
  os.remove(file_path)
728
+ else:
729
+ result_manager.append(content=s, meta={"action": "revert","success":False, "input":{
730
+ }})
1065
731
  else:
732
+ result_manager.append(content="No previous chat action found to revert.", meta={"action": "revert","success":False, "input":{
733
+ }})
1066
734
  print("No previous chat action found to revert.")
1067
735
 
1068
736
 
1069
737
  def add_files(args: List[str]):
1070
-
738
+
739
+ result_manager = ResultManager()
1071
740
  if "groups" not in memory["current_files"]:
1072
741
  memory["current_files"]["groups"] = {}
1073
742
  if "groups_info" not in memory["current_files"]:
@@ -1082,6 +751,8 @@ def add_files(args: List[str]):
1082
751
 
1083
752
  if not args:
1084
753
  printer.print_in_terminal("add_files_no_args", style="red")
754
+ result_manager.append(content=printer.get_message_from_key("add_files_no_args"),
755
+ meta={"action": "add_files","success":False, "input":{ "args": args}})
1085
756
  return
1086
757
 
1087
758
  if args[0] == "/refresh":
@@ -1091,6 +762,8 @@ def add_files(args: List[str]):
1091
762
  Panel("Refreshed file list.",
1092
763
  title="Files Refreshed", border_style="green")
1093
764
  )
765
+ result_manager.append(content="Files refreshed.",
766
+ meta={"action": "add_files","success":True, "input":{ "args": args}})
1094
767
  return
1095
768
 
1096
769
  if args[0] == "/group":
@@ -1100,6 +773,8 @@ def add_files(args: List[str]):
1100
773
  Panel("No groups defined.", title="Groups",
1101
774
  border_style="yellow")
1102
775
  )
776
+ result_manager.append(content="No groups defined.",
777
+ meta={"action": "add_files","success":False, "input":{ "args": args}})
1103
778
  else:
1104
779
  table = Table(
1105
780
  title="Defined Groups",
@@ -1130,6 +805,8 @@ def add_files(args: List[str]):
1130
805
  end_section=(i == len(groups) - 1),
1131
806
  )
1132
807
  console.print(Panel(table, border_style="blue"))
808
+ result_manager.append(content="Defined groups.",
809
+ meta={"action": "add_files","success":True, "input":{ "args": args}})
1133
810
  elif len(args) >= 2 and args[1] == "/reset":
1134
811
  memory["current_files"]["current_groups"] = []
1135
812
  console.print(
@@ -1139,6 +816,8 @@ def add_files(args: List[str]):
1139
816
  border_style="green",
1140
817
  )
1141
818
  )
819
+ result_manager.append(content="Active group names have been reset. If you want to clear the active files, you should use the command /remove_files /all.",
820
+ meta={"action": "add_files","success":True, "input":{ "args": args}})
1142
821
  elif len(args) >= 3 and args[1] == "/add":
1143
822
  group_name = args[2]
1144
823
  groups[group_name] = memory["current_files"]["files"].copy()
@@ -1149,6 +828,9 @@ def add_files(args: List[str]):
1149
828
  border_style="green",
1150
829
  )
1151
830
  )
831
+ result_manager.append(content=f"Added group '{group_name}' with current files.",
832
+ meta={"action": "add_files","success":True, "input":{ "args": args}})
833
+
1152
834
  elif len(args) >= 3 and args[1] == "/drop":
1153
835
  group_name = args[2]
1154
836
  if group_name in groups:
@@ -1165,6 +847,8 @@ def add_files(args: List[str]):
1165
847
  border_style="green",
1166
848
  )
1167
849
  )
850
+ result_manager.append(content=f"Dropped group '{group_name}'.",
851
+ meta={"action": "add_files","success":True, "input":{ "args": args}})
1168
852
  else:
1169
853
  console.print(
1170
854
  Panel(
@@ -1173,6 +857,8 @@ def add_files(args: List[str]):
1173
857
  border_style="red",
1174
858
  )
1175
859
  )
860
+ result_manager.append(content=f"Group '{group_name}' not found.",
861
+ meta={"action": "add_files","success":False, "input":{ "args": args}})
1176
862
  elif len(args) == 3 and args[1] == "/set":
1177
863
  group_name = args[2]
1178
864
 
@@ -1244,6 +930,8 @@ def add_files(args: List[str]):
1244
930
  border_style="red",
1245
931
  )
1246
932
  )
933
+ result_manager.append(content=f"Group(s) not found: {', '.join(missing_groups)}",
934
+ meta={"action": "add_files","success":False, "input":{ "args": args}})
1247
935
 
1248
936
  if merged_files:
1249
937
  memory["current_files"]["files"] = list(merged_files)
@@ -1279,6 +967,8 @@ def add_files(args: List[str]):
1279
967
  border_style="green",
1280
968
  )
1281
969
  )
970
+ result_manager.append(content=f"Active groups: {', '.join(memory['current_files']['current_groups'])}",
971
+ meta={"action": "add_files","success":True, "input":{ "args": args}})
1282
972
  elif not missing_groups:
1283
973
  console.print(
1284
974
  Panel(
@@ -1286,7 +976,9 @@ def add_files(args: List[str]):
1286
976
  title="No Files Added",
1287
977
  border_style="yellow",
1288
978
  )
1289
- )
979
+ )
980
+ result_manager.append(content="No files in the specified groups.",
981
+ meta={"action": "add_files","success":False, "input":{ "args": args}})
1290
982
  else:
1291
983
  existing_files = memory["current_files"]["files"]
1292
984
  matched_files = find_files_in_project(args)
@@ -1308,9 +1000,13 @@ def add_files(args: List[str]):
1308
1000
  i == len(files_to_add) - 1
1309
1001
  ), # 在最后一行之后不添加分割线
1310
1002
  )
1311
- console.print(Panel(table, border_style="green"))
1003
+ console.print(Panel(table, border_style="green"))
1004
+ result_manager.append(content=f"Added files: {', '.join(files_to_add)}",
1005
+ meta={"action": "add_files","success":True, "input":{ "args": args}})
1312
1006
  else:
1313
1007
  printer.print_in_terminal("add_files_matched", style="yellow")
1008
+ result_manager.append(content=f"No files matched.",
1009
+ meta={"action": "add_files","success":False, "input":{ "args": args}})
1314
1010
 
1315
1011
  completer.update_current_files(memory["current_files"]["files"])
1316
1012
  save_memory()
@@ -1319,11 +1015,14 @@ def add_files(args: List[str]):
1319
1015
  def remove_files(file_names: List[str]):
1320
1016
  project_root = os.getcwd()
1321
1017
  printer = Printer()
1018
+ result_manager = ResultManager()
1322
1019
 
1323
1020
  if "/all" in file_names:
1324
1021
  memory["current_files"]["files"] = []
1325
1022
  memory["current_files"]["current_groups"] = []
1326
1023
  printer.print_in_terminal("remove_files_all", style="green")
1024
+ result_manager.append(content="All files removed.",
1025
+ meta={"action": "remove_files","success":True, "input":{ "file_names": file_names}})
1327
1026
  else:
1328
1027
  removed_files = []
1329
1028
  for file in memory["current_files"]["files"]:
@@ -1346,9 +1045,13 @@ def remove_files(file_names: List[str]):
1346
1045
  console = Console()
1347
1046
  console.print(
1348
1047
  Panel(table, border_style="green",
1349
- title=printer.get_message_from_key("files_removed")))
1048
+ title=printer.get_message_from_key("files_removed")))
1049
+ result_manager.append(content=f"Removed files: {', '.join(removed_files)}",
1050
+ meta={"action": "remove_files","success":True, "input":{ "file_names": file_names}})
1350
1051
  else:
1351
1052
  printer.print_in_terminal("remove_files_none", style="yellow")
1053
+ result_manager.append(content=printer.get_message_from_key("remove_files_none"),
1054
+ meta={"action": "remove_files","success":False, "input":{ "file_names": file_names}})
1352
1055
 
1353
1056
  completer.update_current_files(memory["current_files"]["files"])
1354
1057
  save_memory()
@@ -1870,6 +1573,8 @@ def coding(query: str):
1870
1573
 
1871
1574
  yaml_content = convert_yaml_config_to_str(yaml_config=yaml_config)
1872
1575
 
1576
+ md5 = hashlib.md5(yaml_content.encode("utf-8")).hexdigest()
1577
+
1873
1578
  execute_file = os.path.join("actions", latest_yaml_file)
1874
1579
  with open(os.path.join(execute_file), "w") as f:
1875
1580
  f.write(yaml_content)
@@ -1877,6 +1582,10 @@ def coding(query: str):
1877
1582
  def execute_chat():
1878
1583
  cmd = ["--file", execute_file]
1879
1584
  auto_coder_main(cmd)
1585
+ result_manager = ResultManager()
1586
+ result_manager.append(content="", meta={"commit_message": f"auto_coder_{latest_yaml_file}_{md5}","action": "coding", "input":{
1587
+ "query": query
1588
+ }})
1880
1589
 
1881
1590
  execute_chat()
1882
1591
  else:
@@ -2158,7 +1867,7 @@ def generate_shell_command(input_text):
2158
1867
  finally:
2159
1868
  os.remove(execute_file)
2160
1869
 
2161
- def manage_models(params, query: str):
1870
+ def manage_models(query: str):
2162
1871
  """
2163
1872
  Handle /models subcommands:
2164
1873
  /models /list - List all models (default + custom)
@@ -2169,7 +1878,8 @@ def manage_models(params, query: str):
2169
1878
  printer = Printer()
2170
1879
  console = Console()
2171
1880
 
2172
- if params.product_mode != "lite":
1881
+ product_mode = memory.get("product_mode", "lite")
1882
+ if product_mode != "lite":
2173
1883
  printer.print_in_terminal("models_lite_only", style="red")
2174
1884
  return
2175
1885
 
@@ -2220,7 +1930,8 @@ def manage_models(params, query: str):
2220
1930
 
2221
1931
  if not subcmd:
2222
1932
  printer.print_in_terminal("models_usage")
2223
-
1933
+
1934
+ result_manager = ResultManager()
2224
1935
  if subcmd == "/list":
2225
1936
  if models_data:
2226
1937
  # Sort models by speed (average_speed)
@@ -2257,8 +1968,21 @@ def manage_models(params, query: str):
2257
1968
  f"{m.get('average_speed', 0.0):.3f}"
2258
1969
  )
2259
1970
  console.print(table)
1971
+ result_manager.add_result(content=json.dumps(sorted_models,ensure_ascii=False),meta={
1972
+ "action": "models",
1973
+ "input": {
1974
+ "query": query
1975
+ }
1976
+ })
1977
+
2260
1978
  else:
2261
1979
  printer.print_in_terminal("models_no_models", style="yellow")
1980
+ result_manager.add_result(content="No models found",meta={
1981
+ "action": "models",
1982
+ "input": {
1983
+ "query": query
1984
+ }
1985
+ })
2262
1986
 
2263
1987
  elif subcmd == "/input_price":
2264
1988
  args = query.strip().split()
@@ -2268,11 +1992,35 @@ def manage_models(params, query: str):
2268
1992
  price = float(args[1])
2269
1993
  if models_module.update_model_input_price(name, price):
2270
1994
  printer.print_in_terminal("models_input_price_updated", style="green", name=name, price=price)
1995
+ result_manager.add_result(content=f"models_input_price_updated: {name} {price}",meta={
1996
+ "action": "models",
1997
+ "input": {
1998
+ "query": query
1999
+ }
2000
+ })
2271
2001
  else:
2272
2002
  printer.print_in_terminal("models_not_found", style="red", name=name)
2003
+ result_manager.add_result(content=f"models_not_found: {name}",meta={
2004
+ "action": "models",
2005
+ "input": {
2006
+ "query": query
2007
+ }
2008
+ })
2273
2009
  except ValueError as e:
2010
+ result_manager.add_result(content=f"models_invalid_price: {str(e)}",meta={
2011
+ "action": "models",
2012
+ "input": {
2013
+ "query": query
2014
+ }
2015
+ })
2274
2016
  printer.print_in_terminal("models_invalid_price", style="red", error=str(e))
2275
2017
  else:
2018
+ result_manager.add_result(content=printer.get_message_from_key("models_input_price_usage"),meta={
2019
+ "action": "models",
2020
+ "input": {
2021
+ "query": query
2022
+ }
2023
+ })
2276
2024
  printer.print_in_terminal("models_input_price_usage", style="red")
2277
2025
 
2278
2026
  elif subcmd == "/output_price":
@@ -2283,11 +2031,35 @@ def manage_models(params, query: str):
2283
2031
  price = float(args[1])
2284
2032
  if models_module.update_model_output_price(name, price):
2285
2033
  printer.print_in_terminal("models_output_price_updated", style="green", name=name, price=price)
2034
+ result_manager.add_result(content=f"models_output_price_updated: {name} {price}",meta={
2035
+ "action": "models",
2036
+ "input": {
2037
+ "query": query
2038
+ }
2039
+ })
2286
2040
  else:
2287
2041
  printer.print_in_terminal("models_not_found", style="red", name=name)
2042
+ result_manager.add_result(content=f"models_not_found: {name}",meta={
2043
+ "action": "models",
2044
+ "input": {
2045
+ "query": query
2046
+ }
2047
+ })
2288
2048
  except ValueError as e:
2289
2049
  printer.print_in_terminal("models_invalid_price", style="red", error=str(e))
2050
+ result_manager.add_result(content=f"models_invalid_price: {str(e)}",meta={
2051
+ "action": "models",
2052
+ "input": {
2053
+ "query": query
2054
+ }
2055
+ })
2290
2056
  else:
2057
+ result_manager.add_result(content=printer.get_message_from_key("models_output_price_usage"),meta={
2058
+ "action": "models",
2059
+ "input": {
2060
+ "query": query
2061
+ }
2062
+ })
2291
2063
  printer.print_in_terminal("models_output_price_usage", style="red")
2292
2064
 
2293
2065
  elif subcmd == "/speed":
@@ -2298,11 +2070,35 @@ def manage_models(params, query: str):
2298
2070
  speed = float(args[1])
2299
2071
  if models_module.update_model_speed(name, speed):
2300
2072
  printer.print_in_terminal("models_speed_updated", style="green", name=name, speed=speed)
2073
+ result_manager.add_result(content=f"models_speed_updated: {name} {speed}",meta={
2074
+ "action": "models",
2075
+ "input": {
2076
+ "query": query
2077
+ }
2078
+ })
2301
2079
  else:
2302
2080
  printer.print_in_terminal("models_not_found", style="red", name=name)
2081
+ result_manager.add_result(content=f"models_not_found: {name}",meta={
2082
+ "action": "models",
2083
+ "input": {
2084
+ "query": query
2085
+ }
2086
+ })
2303
2087
  except ValueError as e:
2304
2088
  printer.print_in_terminal("models_invalid_speed", style="red", error=str(e))
2089
+ result_manager.add_result(content=f"models_invalid_speed: {str(e)}",meta={
2090
+ "action": "models",
2091
+ "input": {
2092
+ "query": query
2093
+ }
2094
+ })
2305
2095
  else:
2096
+ result_manager.add_result(content=printer.get_message_from_key("models_speed_usage"),meta={
2097
+ "action": "models",
2098
+ "input": {
2099
+ "query": query
2100
+ }
2101
+ })
2306
2102
  printer.print_in_terminal("models_speed_usage", style="red")
2307
2103
 
2308
2104
  elif subcmd == "/speed-test":
@@ -2323,7 +2119,14 @@ def manage_models(params, query: str):
2323
2119
  if args and args[0].isdigit():
2324
2120
  test_rounds = int(args[0])
2325
2121
 
2326
- render_speed_test_in_terminal(params.product_mode, test_rounds,enable_long_context=enable_long_context)
2122
+ render_speed_test_in_terminal(product_mode, test_rounds,enable_long_context=enable_long_context)
2123
+ ## 等待优化,获取明细数据
2124
+ result_manager.add_result(content="models test success",meta={
2125
+ "action": "models",
2126
+ "input": {
2127
+ "query": query
2128
+ }
2129
+ })
2327
2130
 
2328
2131
  elif subcmd == "/add":
2329
2132
  # Support both simplified and legacy formats
@@ -2333,11 +2136,29 @@ def manage_models(params, query: str):
2333
2136
  name, api_key = args[0], args[1]
2334
2137
  result = models_module.update_model_with_api_key(name, api_key)
2335
2138
  if result:
2139
+ result_manager.add_result(content=f"models_added: {name}",meta={
2140
+ "action": "models",
2141
+ "input": {
2142
+ "query": query
2143
+ }
2144
+ })
2336
2145
  printer.print_in_terminal("models_added", style="green", name=name)
2337
2146
  else:
2147
+ result_manager.add_result(content=f"models_add_failed: {name}",meta={
2148
+ "action": "models",
2149
+ "input": {
2150
+ "query": query
2151
+ }
2152
+ })
2338
2153
  printer.print_in_terminal("models_add_failed", style="red", name=name)
2339
2154
  else:
2340
2155
  printer.print_in_terminal("models_add_usage", style="red")
2156
+ result_manager.add_result(content=printer.get_message_from_key("models_add_usage"),meta={
2157
+ "action": "models",
2158
+ "input": {
2159
+ "query": query
2160
+ }
2161
+ })
2341
2162
 
2342
2163
  elif subcmd == "/add_model":
2343
2164
  # Parse key=value pairs: /models /add_model name=abc base_url=http://xx ...
@@ -2359,6 +2180,12 @@ def manage_models(params, query: str):
2359
2180
  # Check duplication
2360
2181
  if any(m["name"] == data_dict["name"] for m in models_data):
2361
2182
  printer.print_in_terminal("models_add_model_exists", style="yellow", name=data_dict["name"])
2183
+ result_manager.add_result(content=printer.get_message_from_key("models_add_model_exists",name=data_dict["name"]),meta={
2184
+ "action": "models",
2185
+ "input": {
2186
+ "query": query
2187
+ }
2188
+ })
2362
2189
  return
2363
2190
 
2364
2191
  # Create model with defaults
@@ -2375,22 +2202,51 @@ def manage_models(params, query: str):
2375
2202
  models_data.append(final_model)
2376
2203
  models_module.save_models(models_data)
2377
2204
  printer.print_in_terminal("models_add_model_success", style="green", name=data_dict["name"])
2205
+ result_manager.add_result(content=f"models_add_model_success: {data_dict['name']}",meta={
2206
+ "action": "models",
2207
+ "input": {
2208
+ "query": query
2209
+ }
2210
+ })
2378
2211
 
2379
2212
  elif subcmd == "/remove":
2380
2213
  args = query.strip().split(" ")
2381
2214
  if len(args) < 1:
2382
2215
  printer.print_in_terminal("models_add_usage", style="red")
2216
+ result_manager.add_result(content=printer.get_message_from_key("models_add_usage"),meta={
2217
+ "action": "models",
2218
+ "input": {
2219
+ "query": query
2220
+ }
2221
+ })
2383
2222
  return
2384
2223
  name = args[0]
2385
2224
  filtered_models = [m for m in models_data if m["name"] != name]
2386
2225
  if len(filtered_models) == len(models_data):
2387
2226
  printer.print_in_terminal("models_add_model_remove", style="yellow", name=name)
2227
+ result_manager.add_result(content=printer.get_message_from_key("models_add_model_remove",name=name),meta={
2228
+ "action": "models",
2229
+ "input": {
2230
+ "query": query
2231
+ }
2232
+ })
2388
2233
  return
2389
2234
  models_module.save_models(filtered_models)
2390
2235
  printer.print_in_terminal("models_add_model_removed", style="green", name=name)
2391
-
2236
+ result_manager.add_result(content=printer.get_message_from_key("models_add_model_removed",name=name),meta={
2237
+ "action": "models",
2238
+ "input": {
2239
+ "query": query
2240
+ }
2241
+ })
2392
2242
  else:
2393
2243
  printer.print_in_terminal("models_unknown_subcmd", style="yellow", subcmd=subcmd)
2244
+ result_manager.add_result(content=printer.get_message_from_key("models_unknown_subcmd",subcmd=subcmd),meta={
2245
+ "action": "models",
2246
+ "input": {
2247
+ "query": query
2248
+ }
2249
+ })
2394
2250
 
2395
2251
  def exclude_dirs(dir_names: List[str]):
2396
2252
  new_dirs = dir_names
@@ -2432,6 +2288,41 @@ def index_build():
2432
2288
  os.remove(yaml_file)
2433
2289
 
2434
2290
 
2291
+ def get_final_config()->AutoCoderArgs:
2292
+ conf = memory.get("conf", {})
2293
+ yaml_config = {
2294
+ "include_file": ["./base/base.yml"],
2295
+ "auto_merge": conf.get("auto_merge", "editblock"),
2296
+ "human_as_model": conf.get("human_as_model", "false") == "true",
2297
+ "skip_build_index": conf.get("skip_build_index", "true") == "true",
2298
+ "skip_confirm": conf.get("skip_confirm", "true") == "true",
2299
+ "silence": conf.get("silence", "true") == "true",
2300
+ "include_project_structure": conf.get("include_project_structure", "true")
2301
+ == "true",
2302
+ }
2303
+ for key, value in conf.items():
2304
+ converted_value = convert_config_value(key, value)
2305
+ if converted_value is not None:
2306
+ yaml_config[key] = converted_value
2307
+
2308
+ temp_yaml = os.path.join("actions", f"{uuid.uuid4()}.yml")
2309
+ try:
2310
+ with open(temp_yaml, "w") as f:
2311
+ f.write(convert_yaml_config_to_str(yaml_config=yaml_config))
2312
+ args = convert_yaml_to_config(temp_yaml)
2313
+ finally:
2314
+ if os.path.exists(temp_yaml):
2315
+ os.remove(temp_yaml)
2316
+ return args
2317
+
2318
+ def help(query: str):
2319
+ from autocoder.common.auto_configure import ConfigAutoTuner,MemoryConfig,AutoConfigRequest
2320
+ args = get_final_config()
2321
+ product_mode = memory.get("product_mode", "lite")
2322
+ llm = get_single_llm(args.chat_model or args.model, product_mode=product_mode)
2323
+ auto_config_tuner = ConfigAutoTuner(llm=llm, memory_config=MemoryConfig(memory=memory, save_memory_func=save_memory))
2324
+ auto_config_tuner.tune(AutoConfigRequest(query=query))
2325
+
2435
2326
  @run_in_raw_thread()
2436
2327
  def index_query(query: str):
2437
2328
  conf = memory.get("conf", {})
@@ -2614,6 +2505,58 @@ def lib_command(args: List[str]):
2614
2505
  else:
2615
2506
  console.print(f"Unknown subcommand: {subcommand}")
2616
2507
 
2508
+ @run_in_raw_thread()
2509
+ def auto_command(params,query: str):
2510
+ """处理/auto指令"""
2511
+ from autocoder.commands.auto_command import CommandAutoTuner, AutoCommandRequest, CommandConfig, MemoryConfig
2512
+ args = get_final_config()
2513
+ # help(query)
2514
+
2515
+ # 准备请求参数
2516
+ request = AutoCommandRequest(
2517
+ user_input=query
2518
+ )
2519
+
2520
+ # 初始化调优器
2521
+ llm = get_single_llm(args.chat_model or args.model,product_mode=args.product_mode)
2522
+ tuner = CommandAutoTuner(llm,
2523
+ args=args,
2524
+ memory_config=MemoryConfig(memory=memory, save_memory_func=save_memory),
2525
+ command_config=CommandConfig(
2526
+ add_files=add_files,
2527
+ remove_files=remove_files,
2528
+ list_files=list_files,
2529
+ conf=configure,
2530
+ revert=revert,
2531
+ commit=commit,
2532
+ help=help,
2533
+ exclude_dirs=exclude_dirs,
2534
+ ask=ask,
2535
+ chat=chat,
2536
+ coding=coding,
2537
+ design=design,
2538
+ summon=summon,
2539
+ lib=lib_command,
2540
+ mcp=mcp,
2541
+ models=manage_models,
2542
+ index_build=index_build,
2543
+ index_query=index_query,
2544
+ execute_shell_command=execute_shell_command,
2545
+ generate_shell_command=generate_shell_command
2546
+ ))
2547
+
2548
+ # 生成建议
2549
+ response = tuner.analyze(request)
2550
+
2551
+ # 显示建议
2552
+ console = Console()
2553
+ console.print(Panel(
2554
+ Markdown(response.reasoning or ""),
2555
+ title="Reasoning",
2556
+ border_style="blue",
2557
+ padding=(1, 2)
2558
+ ))
2559
+
2617
2560
 
2618
2561
  def main():
2619
2562
  from autocoder.rag.variable_holder import VariableHolder
@@ -2671,7 +2614,7 @@ def main():
2671
2614
  @kb.add("c-k")
2672
2615
  def _(event):
2673
2616
  if "mode" not in memory:
2674
- memory["mode"] = "normal"
2617
+ memory["mode"] = "auto_detect"
2675
2618
 
2676
2619
  current_mode = memory["mode"]
2677
2620
  if current_mode == "normal":
@@ -2695,7 +2638,7 @@ def main():
2695
2638
 
2696
2639
  def get_bottom_toolbar():
2697
2640
  if "mode" not in memory:
2698
- memory["mode"] = "normal"
2641
+ memory["mode"] = "auto_detect"
2699
2642
  mode = memory["mode"]
2700
2643
  human_as_model = memory["conf"].get("human_as_model", "false")
2701
2644
  if mode not in MODES:
@@ -2757,7 +2700,7 @@ def main():
2757
2700
  new_prompt = ""
2758
2701
 
2759
2702
  if "mode" not in memory:
2760
- memory["mode"] = "normal"
2703
+ memory["mode"] = "auto_detect"
2761
2704
 
2762
2705
  # 处理 user_input 的空格
2763
2706
  if user_input:
@@ -2770,11 +2713,8 @@ def main():
2770
2713
  and user_input
2771
2714
  and not user_input.startswith("/")
2772
2715
  ):
2773
- shell_script = generate_shell_command(user_input)
2774
- if confirm(get_message("confirm_execute")):
2775
- execute_shell_command(shell_script)
2776
- else:
2777
- continue
2716
+ auto_command(ARGS,user_input)
2717
+
2778
2718
  elif memory["mode"] == "voice_input" and not user_input.startswith("/"):
2779
2719
  text = voice_input()
2780
2720
  new_prompt = "/coding " + text
@@ -2805,7 +2745,7 @@ def main():
2805
2745
  if not query:
2806
2746
  print("Please enter your query.")
2807
2747
  else:
2808
- manage_models(ARGS,query)
2748
+ manage_models(query)
2809
2749
 
2810
2750
  elif user_input.startswith("/mode"):
2811
2751
  conf = user_input[len("/mode"):].strip()
@@ -2826,7 +2766,12 @@ def main():
2826
2766
  query = user_input[len("/commit"):].strip()
2827
2767
  commit(query)
2828
2768
  elif user_input.startswith("/help"):
2829
- show_help()
2769
+ query = user_input[len("/help"):].strip()
2770
+ if not query:
2771
+ show_help()
2772
+ else:
2773
+ help(query)
2774
+
2830
2775
  elif user_input.startswith("/exclude_dirs"):
2831
2776
  dir_names = user_input[len(
2832
2777
  "/exclude_dirs"):].strip().split(",")
@@ -2879,6 +2824,9 @@ def main():
2879
2824
  else:
2880
2825
  mcp(query)
2881
2826
 
2827
+ elif user_input.startswith("/auto"):
2828
+ query = user_input[len("/auto"):].strip()
2829
+ auto_command(ARGS,query)
2882
2830
  elif user_input.startswith("/debug"):
2883
2831
  code = user_input[len("/debug"):].strip()
2884
2832
  try: