autocoder-nano 0.1.34__py3-none-any.whl → 0.1.36__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,6 +10,7 @@ import uuid
10
10
 
11
11
  from autocoder_nano.agent.agentic_edit import AgenticEdit
12
12
  from autocoder_nano.agent.agentic_edit_types import AgenticEditRequest
13
+ from autocoder_nano.chat import stream_chat_display
13
14
  from autocoder_nano.edit import Dispacher
14
15
  from autocoder_nano.helper import show_help
15
16
  from autocoder_nano.index.entry import build_index_and_filter_files
@@ -75,7 +76,7 @@ memory = {
75
76
  }
76
77
 
77
78
 
78
- args: AutoCoderArgs = AutoCoderArgs()
79
+ # args: AutoCoderArgs = AutoCoderArgs()
79
80
 
80
81
 
81
82
  def get_all_file_names_in_project() -> List[str]:
@@ -215,6 +216,40 @@ completer = CommandCompleter(
215
216
  )
216
217
 
217
218
 
219
+ def get_final_config(query: str, delete_execute_file: bool = False) -> AutoCoderArgs | None:
220
+ conf = memory.get("conf", {})
221
+ yaml_config = {
222
+ "include_file": ["./base/base.yml"],
223
+ "skip_build_index": conf.get("skip_build_index", "true") == "true",
224
+ "skip_confirm": conf.get("skip_confirm", "true") == "true",
225
+ "chat_model": conf.get("chat_model", ""),
226
+ "code_model": conf.get("code_model", ""),
227
+ "auto_merge": conf.get("auto_merge", "editblock"),
228
+ "exclude_files": memory.get("exclude_files", [])
229
+ }
230
+ current_files = memory["current_files"]["files"]
231
+ yaml_config["urls"] = current_files
232
+ yaml_config["query"] = query
233
+
234
+ # 如果 conf 中有设置, 则以 conf 配置为主
235
+ for key, value in conf.items():
236
+ converted_value = convert_config_value(key, value)
237
+ if converted_value is not None:
238
+ yaml_config[key] = converted_value
239
+
240
+ execute_file = os.path.join(project_root, "actions", f"{uuid.uuid4()}.yml")
241
+ try:
242
+ yaml_content = convert_yaml_config_to_str(yaml_config=yaml_config)
243
+ with open(os.path.join(execute_file), "w") as f: # 保存此次查询的细节
244
+ f.write(yaml_content)
245
+ args = convert_yaml_to_config(execute_file) # 更新到args
246
+ finally:
247
+ if delete_execute_file:
248
+ if os.path.exists(execute_file):
249
+ os.remove(execute_file)
250
+ return args
251
+
252
+
218
253
  def exclude_dirs(dir_names: List[str]):
219
254
  new_dirs = dir_names
220
255
  existing_dirs = memory.get("exclude_dirs", [])
@@ -281,7 +316,7 @@ def exclude_files(query: str):
281
316
 
282
317
 
283
318
  def index_command(llm):
284
- update_config_to_args(query="", delete_execute_file=True)
319
+ args = get_final_config(query="", delete_execute_file=True)
285
320
 
286
321
  source_dir = os.path.abspath(args.source_dir)
287
322
  printer.print_text(f"开始对目录 {source_dir} 中的源代码进行索引", style="green")
@@ -359,7 +394,7 @@ def index_import(import_path: str):
359
394
 
360
395
 
361
396
  def index_query_command(query: str, llm: AutoLLM):
362
- update_config_to_args(query=query, delete_execute_file=True)
397
+ args = get_final_config(query=query, delete_execute_file=True)
363
398
 
364
399
  # args.query = query
365
400
  if args.project_type == "py":
@@ -389,12 +424,6 @@ def index_query_command(query: str, llm: AutoLLM):
389
424
  panel=True
390
425
  )
391
426
 
392
- # headers = TargetFile.model_fields.keys()
393
- # table_data = wrap_text_in_table(
394
- # [[getattr(file_item, name) for name in headers] for file_item in all_results]
395
- # )
396
- # table_output = tabulate.tabulate(table_data, headers, tablefmt="grid")
397
- # print(table_output, flush=True)
398
427
  printer.print_table_compact(
399
428
  headers=["文件路径", "原因"],
400
429
  data=[[_target_file.file_path, _target_file.reason] for _target_file in all_results],
@@ -415,7 +444,8 @@ def convert_yaml_config_to_str(yaml_config):
415
444
 
416
445
 
417
446
  def convert_yaml_to_config(yaml_file: str | dict | AutoCoderArgs):
418
- global args
447
+ # global args
448
+ args = AutoCoderArgs()
419
449
  config = {}
420
450
  if isinstance(yaml_file, str):
421
451
  args.file = yaml_file
@@ -452,42 +482,6 @@ def convert_config_value(key, value):
452
482
  return None
453
483
 
454
484
 
455
- def update_config_to_args(query, delete_execute_file: bool = False):
456
- conf = memory.get("conf", {})
457
-
458
- # 默认 chat 配置
459
- yaml_config = {
460
- "include_file": ["./base/base.yml"],
461
- "skip_build_index": conf.get("skip_build_index", "true") == "true",
462
- "skip_confirm": conf.get("skip_confirm", "true") == "true",
463
- "chat_model": conf.get("chat_model", ""),
464
- "code_model": conf.get("code_model", ""),
465
- "auto_merge": conf.get("auto_merge", "editblock"),
466
- "exclude_files": memory.get("exclude_files", [])
467
- }
468
- current_files = memory["current_files"]["files"]
469
- yaml_config["urls"] = current_files
470
- yaml_config["query"] = query
471
-
472
- # 如果 conf 中有设置, 则以 conf 配置为主
473
- for key, value in conf.items():
474
- converted_value = convert_config_value(key, value)
475
- if converted_value is not None:
476
- yaml_config[key] = converted_value
477
-
478
- yaml_content = convert_yaml_config_to_str(yaml_config=yaml_config)
479
- execute_file = os.path.join(args.source_dir, "actions", f"{uuid.uuid4()}.yml")
480
-
481
- with open(os.path.join(execute_file), "w") as f: # 保存此次查询的细节
482
- f.write(yaml_content)
483
-
484
- convert_yaml_to_config(execute_file) # 更新到args
485
-
486
- if delete_execute_file:
487
- if os.path.exists(execute_file):
488
- os.remove(execute_file)
489
-
490
-
491
485
  def print_chat_history(history, max_entries=5):
492
486
  recent_history = history[-max_entries:]
493
487
  data_list = []
@@ -519,7 +513,7 @@ def code_review(query: str) -> str:
519
513
 
520
514
 
521
515
  def chat(query: str, llm: AutoLLM):
522
- update_config_to_args(query)
516
+ args = get_final_config(query)
523
517
 
524
518
  is_history = query.strip().startswith("/history")
525
519
  is_new = "/new" in query
@@ -610,50 +604,52 @@ def chat(query: str, llm: AutoLLM):
610
604
 
611
605
  loaded_conversations = pre_conversations + chat_history["ask_conversation"]
612
606
 
613
- v = chat_llm.stream_chat_ai(conversations=loaded_conversations, model=args.chat_model)
614
-
615
- MAX_HISTORY_LINES = 15 # 最大保留历史行数
616
- lines_buffer = []
617
- current_line = ""
618
- assistant_response = ""
619
-
620
- try:
621
- with Live(Panel("", title="Response", style="cyan"), refresh_per_second=12) as live:
622
- for chunk in v:
623
- if chunk.choices and chunk.choices[0].delta.content:
624
- content = chunk.choices[0].delta.content
625
- assistant_response += content
626
-
627
- # 处理换行符分割
628
- parts = (current_line + content).split('\n')
629
-
630
- # 最后一部分是未完成的新行
631
- if len(parts) > 1:
632
- # 将完整行加入缓冲区
633
- lines_buffer.extend(parts[:-1])
634
- # 保留最近N行历史
635
- if len(lines_buffer) > MAX_HISTORY_LINES:
636
- del lines_buffer[0: len(lines_buffer) - MAX_HISTORY_LINES]
637
- # 更新当前行(最后未完成的部分)
638
- current_line = parts[-1]
639
- # 构建显示内容 = 历史行 + 当前行
640
- display_content = '\n'.join(lines_buffer[-MAX_HISTORY_LINES:] + [current_line])
641
-
642
- live.update(
643
- Panel(Markdown(display_content), title="模型返回", border_style="cyan",
644
- height=min(25, live.console.height - 4))
645
- )
646
-
647
- # 处理最后未换行的内容
648
- if current_line:
649
- lines_buffer.append(current_line)
650
-
651
- # 最终完整渲染
652
- live.update(
653
- Panel(Markdown(assistant_response), title="模型返回", border_style="dim blue")
654
- )
655
- except Exception as e:
656
- printer.print_panel(Text(f"{str(e)}", style="red"), title="模型返回", center=True)
607
+ assistant_response = stream_chat_display(chat_llm=llm, args=args, conversations=loaded_conversations)
608
+
609
+ # v = chat_llm.stream_chat_ai(conversations=loaded_conversations, model=args.chat_model)
610
+ #
611
+ # MAX_HISTORY_LINES = 15 # 最大保留历史行数
612
+ # lines_buffer = []
613
+ # current_line = ""
614
+ # assistant_response = ""
615
+ #
616
+ # try:
617
+ # with Live(Panel("", title="Response", style="cyan"), refresh_per_second=12) as live:
618
+ # for chunk in v:
619
+ # if chunk.choices and chunk.choices[0].delta.content:
620
+ # content = chunk.choices[0].delta.content
621
+ # assistant_response += content
622
+ #
623
+ # # 处理换行符分割
624
+ # parts = (current_line + content).split('\n')
625
+ #
626
+ # # 最后一部分是未完成的新行
627
+ # if len(parts) > 1:
628
+ # # 将完整行加入缓冲区
629
+ # lines_buffer.extend(parts[:-1])
630
+ # # 保留最近N行历史
631
+ # if len(lines_buffer) > MAX_HISTORY_LINES:
632
+ # del lines_buffer[0: len(lines_buffer) - MAX_HISTORY_LINES]
633
+ # # 更新当前行(最后未完成的部分)
634
+ # current_line = parts[-1]
635
+ # # 构建显示内容 = 历史行 + 当前行
636
+ # display_content = '\n'.join(lines_buffer[-MAX_HISTORY_LINES:] + [current_line])
637
+ #
638
+ # live.update(
639
+ # Panel(Markdown(display_content), title="模型返回", border_style="cyan",
640
+ # height=min(25, live.console.height - 4))
641
+ # )
642
+ #
643
+ # # 处理最后未换行的内容
644
+ # if current_line:
645
+ # lines_buffer.append(current_line)
646
+ #
647
+ # # 最终完整渲染
648
+ # live.update(
649
+ # Panel(Markdown(assistant_response), title="模型返回", border_style="dim blue")
650
+ # )
651
+ # except Exception as e:
652
+ # printer.print_panel(Text(f"{str(e)}", style="red"), title="模型返回", center=True)
657
653
 
658
654
  chat_history["ask_conversation"].append({"role": "assistant", "content": assistant_response})
659
655
 
@@ -664,19 +660,19 @@ def chat(query: str, llm: AutoLLM):
664
660
  return
665
661
 
666
662
 
667
- def init_project():
668
- if not args.project_type:
663
+ def init_project(project_type):
664
+ if not project_type:
669
665
  printer.print_text(
670
666
  f"请指定项目类型。可选的项目类型包括:py|ts| 或文件扩展名(例如:.java,.scala), 多个扩展名逗号分隔.", style="green"
671
667
  )
672
668
  return
673
- os.makedirs(os.path.join(args.source_dir, "actions"), exist_ok=True)
674
- os.makedirs(os.path.join(args.source_dir, ".auto-coder"), exist_ok=True)
675
- os.makedirs(os.path.join(args.source_dir, ".auto-coder", "autocoderrules"), exist_ok=True)
676
- source_dir = os.path.abspath(args.source_dir)
669
+ os.makedirs(os.path.join(project_root, "actions"), exist_ok=True)
670
+ os.makedirs(os.path.join(project_root, ".auto-coder"), exist_ok=True)
671
+ os.makedirs(os.path.join(project_root, ".auto-coder", "autocoderrules"), exist_ok=True)
672
+ source_dir = os.path.abspath(project_root)
677
673
  create_actions(
678
674
  source_dir=source_dir,
679
- params={"project_type": args.project_type,
675
+ params={"project_type": project_type,
680
676
  "source_dir": source_dir},
681
677
  )
682
678
 
@@ -686,7 +682,7 @@ def init_project():
686
682
  f.write("\nactions/")
687
683
  f.write("\noutput.txt")
688
684
 
689
- printer.print_text(f"已在 {os.path.abspath(args.source_dir)} 成功初始化 autocoder-nano 项目", style="green")
685
+ printer.print_text(f"已在 {os.path.abspath(project_root)} 成功初始化 autocoder-nano 项目", style="green")
690
686
  return
691
687
 
692
688
 
@@ -739,7 +735,7 @@ def load_include_files(config, base_path, max_depth=10, current_depth=0):
739
735
 
740
736
  def prepare_chat_yaml():
741
737
  # auto_coder_main(["next", "chat_action"]) 准备聊天 yaml 文件
742
- actions_dir = os.path.join(args.source_dir, "actions")
738
+ actions_dir = os.path.join(project_root, "actions")
743
739
  if not os.path.exists(actions_dir):
744
740
  printer.print_text("当前目录中未找到 actions 目录。请执行初始化 AutoCoder Nano", style="yellow")
745
741
  return
@@ -786,7 +782,7 @@ def coding(query: str, llm: AutoLLM):
786
782
  current_files = memory["current_files"]["files"]
787
783
 
788
784
  prepare_chat_yaml() # 复制上一个序号的 yaml 文件, 生成一个新的聊天 yaml 文件
789
- latest_yaml_file = get_last_yaml_file(os.path.join(args.source_dir, "actions"))
785
+ latest_yaml_file = get_last_yaml_file(os.path.join(project_root, "actions"))
790
786
 
791
787
  if latest_yaml_file:
792
788
  yaml_config = {
@@ -808,7 +804,7 @@ def coding(query: str, llm: AutoLLM):
808
804
  yaml_config["query"] = query
809
805
 
810
806
  if is_apply:
811
- memory_dir = os.path.join(args.source_dir, ".auto-coder", "memory")
807
+ memory_dir = os.path.join(project_root, ".auto-coder", "memory")
812
808
  os.makedirs(memory_dir, exist_ok=True)
813
809
  memory_file = os.path.join(memory_dir, "chat_history.json")
814
810
 
@@ -837,25 +833,26 @@ def coding(query: str, llm: AutoLLM):
837
833
  yaml_config["context"] += f"你: {conv['content']}\n"
838
834
  yaml_config["context"] += "</history>\n"
839
835
 
840
- if args.enable_rules:
841
- rules_dir_path = os.path.join(project_root, ".auto-coder", "autocoderrules")
842
- printer.print_text("已开启 Rules 模式", style="green")
843
- yaml_config["context"] += f"下面是我们对代码进行深入分析,提取具有通用价值的功能模式和设计模式,可在其他需求中复用的Rules\n"
844
- yaml_config["context"] += "你在编写代码时可以参考以下Rules\n"
845
- yaml_config["context"] += "<rules>\n"
846
- for rules_name in os.listdir(rules_dir_path):
847
- printer.print_text(f"正在加载 Rules:{rules_name}", style="green")
848
- rules_file_path = os.path.join(rules_dir_path, rules_name)
849
- with open(rules_file_path, "r") as fp:
850
- yaml_config["context"] += f"{fp.read()}\n"
851
- yaml_config["context"] += "</rules>\n"
836
+ # todo:暂时注释,后续通过一个 is_rules 的参数来控制
837
+ # if args.enable_rules:
838
+ # rules_dir_path = os.path.join(project_root, ".auto-coder", "autocoderrules")
839
+ # printer.print_text("已开启 Rules 模式", style="green")
840
+ # yaml_config["context"] += f"下面是我们对代码进行深入分析,提取具有通用价值的功能模式和设计模式,可在其他需求中复用的Rules\n"
841
+ # yaml_config["context"] += "你在编写代码时可以参考以下Rules\n"
842
+ # yaml_config["context"] += "<rules>\n"
843
+ # for rules_name in os.listdir(rules_dir_path):
844
+ # printer.print_text(f"正在加载 Rules:{rules_name}", style="green")
845
+ # rules_file_path = os.path.join(rules_dir_path, rules_name)
846
+ # with open(rules_file_path, "r") as fp:
847
+ # yaml_config["context"] += f"{fp.read()}\n"
848
+ # yaml_config["context"] += "</rules>\n"
852
849
 
853
850
  yaml_config["file"] = latest_yaml_file
854
851
  yaml_content = convert_yaml_config_to_str(yaml_config=yaml_config)
855
- execute_file = os.path.join(args.source_dir, "actions", latest_yaml_file)
852
+ execute_file = os.path.join(project_root, "actions", latest_yaml_file)
856
853
  with open(os.path.join(execute_file), "w") as f:
857
854
  f.write(yaml_content)
858
- convert_yaml_to_config(execute_file)
855
+ args = convert_yaml_to_config(execute_file)
859
856
 
860
857
  dispacher = Dispacher(args=args, llm=llm)
861
858
  dispacher.dispach()
@@ -866,7 +863,7 @@ def coding(query: str, llm: AutoLLM):
866
863
  completer.refresh_files()
867
864
 
868
865
 
869
- def execute_revert():
866
+ def execute_revert(args: AutoCoderArgs):
870
867
  repo_path = args.source_dir
871
868
 
872
869
  file_content = open(args.file).read()
@@ -883,11 +880,11 @@ def execute_revert():
883
880
 
884
881
 
885
882
  def revert():
886
- last_yaml_file = get_last_yaml_file(os.path.join(args.source_dir, "actions"))
883
+ last_yaml_file = get_last_yaml_file(os.path.join(project_root, "actions"))
887
884
  if last_yaml_file:
888
- file_path = os.path.join(args.source_dir, "actions", last_yaml_file)
889
- convert_yaml_to_config(file_path)
890
- execute_revert()
885
+ file_path = os.path.join(project_root, "actions", last_yaml_file)
886
+ args = convert_yaml_to_config(file_path)
887
+ execute_revert(args)
891
888
  else:
892
889
  printer.print_text(f"No previous chat action found to revert.", style="yellow")
893
890
 
@@ -910,15 +907,15 @@ def print_commit_info(commit_result: CommitResult):
910
907
 
911
908
 
912
909
  def commit_info(query: str, llm: AutoLLM):
913
- repo_path = args.source_dir
910
+ repo_path = project_root
914
911
  prepare_chat_yaml() # 复制上一个序号的 yaml 文件, 生成一个新的聊天 yaml 文件
915
912
 
916
- latest_yaml_file = get_last_yaml_file(os.path.join(args.source_dir, "actions"))
913
+ latest_yaml_file = get_last_yaml_file(os.path.join(project_root, "actions"))
917
914
  execute_file = None
918
915
 
919
916
  if latest_yaml_file:
920
917
  try:
921
- execute_file = os.path.join(args.source_dir, "actions", latest_yaml_file)
918
+ execute_file = os.path.join(project_root, "actions", latest_yaml_file)
922
919
  conf = memory.get("conf", {})
923
920
  yaml_config = {
924
921
  "include_file": ["./base/base.yml"],
@@ -937,11 +934,11 @@ def commit_info(query: str, llm: AutoLLM):
937
934
  yaml_config["urls"] = current_files
938
935
 
939
936
  # 临时保存yaml文件,然后读取yaml文件,更新args
940
- temp_yaml = os.path.join(args.source_dir, "actions", f"{uuid.uuid4()}.yml")
937
+ temp_yaml = os.path.join(project_root, "actions", f"{uuid.uuid4()}.yml")
941
938
  try:
942
939
  with open(temp_yaml, "w", encoding="utf-8") as f:
943
940
  f.write(convert_yaml_config_to_str(yaml_config=yaml_config))
944
- convert_yaml_to_config(temp_yaml)
941
+ args = convert_yaml_to_config(temp_yaml)
945
942
  finally:
946
943
  if os.path.exists(temp_yaml):
947
944
  os.remove(temp_yaml)
@@ -982,7 +979,7 @@ def commit_info(query: str, llm: AutoLLM):
982
979
 
983
980
 
984
981
  def agentic_edit(query: str, llm: AutoLLM):
985
- update_config_to_args(query=query, delete_execute_file=True)
982
+ args = get_final_config(query=query, delete_execute_file=True)
986
983
 
987
984
  sources = SourceCodeList([])
988
985
  agentic_editor = AgenticEdit(
@@ -1032,7 +1029,7 @@ def _generate_shell_script(user_input: str) -> str:
1032
1029
 
1033
1030
 
1034
1031
  def generate_shell_command(input_text: str, llm: AutoLLM) -> str | None:
1035
- update_config_to_args(query=input_text, delete_execute_file=True)
1032
+ args = get_final_config(query=input_text, delete_execute_file=True)
1036
1033
 
1037
1034
  try:
1038
1035
  printer.print_panel(
@@ -1274,13 +1271,18 @@ def initialize_system():
1274
1271
 
1275
1272
  def _init_project():
1276
1273
  first_time = False
1277
- if not os.path.exists(os.path.join(args.source_dir, ".auto-coder")):
1274
+ if not os.path.exists(os.path.join(project_root, ".auto-coder")):
1278
1275
  first_time = True
1279
1276
  printer.print_text("当前目录未初始化为auto-coder项目.", style="yellow")
1280
1277
  init_choice = input(f" 是否现在初始化项目?(y/n): ").strip().lower()
1281
1278
  if init_choice == "y":
1282
1279
  try:
1283
- init_project()
1280
+ if first_time: # 首次启动,配置项目类型
1281
+ if not os.path.exists(base_persist_dir):
1282
+ os.makedirs(base_persist_dir, exist_ok=True)
1283
+ printer.print_text("创建目录:{}".format(base_persist_dir), style="green")
1284
+ project_type = configure_project_type()
1285
+ init_project(project_type)
1284
1286
  printer.print_text("项目初始化成功.", style="green")
1285
1287
  except Exception as e:
1286
1288
  printer.print_text(f"项目初始化失败, {str(e)}.", style="red")
@@ -1289,12 +1291,12 @@ def initialize_system():
1289
1291
  printer.print_text("退出而不初始化.", style="yellow")
1290
1292
  exit(1)
1291
1293
 
1292
- if not os.path.exists(base_persist_dir):
1293
- os.makedirs(base_persist_dir, exist_ok=True)
1294
- printer.print_text("创建目录:{}".format(base_persist_dir), style="green")
1294
+ # if not os.path.exists(base_persist_dir):
1295
+ # os.makedirs(base_persist_dir, exist_ok=True)
1296
+ # printer.print_text("创建目录:{}".format(base_persist_dir), style="green")
1295
1297
 
1296
- if first_time: # 首次启动,配置项目类型
1297
- configure_project_type()
1298
+ # if first_time: # 首次启动,配置项目类型
1299
+ # project_type = configure_project_type()
1298
1300
 
1299
1301
  printer.print_text("项目初始化完成.", style="green")
1300
1302
 
@@ -1656,7 +1658,7 @@ def rules(query_args: List[str], llm: AutoLLM):
1656
1658
  /rules /analyze - 分析当前文件,可选提供查询内容
1657
1659
  /rules /commit <提交ID> - 分析特定提交,必须提供提交ID和查询内容
1658
1660
  """
1659
- update_config_to_args(query="", delete_execute_file=True)
1661
+ args = get_final_config(query="", delete_execute_file=True)
1660
1662
  rules_dir_path = os.path.join(project_root, ".auto-coder", "autocoderrules")
1661
1663
  if query_args[0] == "/list":
1662
1664
  printer.print_table_compact(
@@ -1806,20 +1808,11 @@ def main():
1806
1808
  memory["mode"] = "normal"
1807
1809
  event.app.invalidate()
1808
1810
 
1809
- def _update_bottom_toolbar(toolbar_arg):
1810
- if toolbar_arg in memory['conf']:
1811
- return memory['conf'][toolbar_arg]
1812
- return args.model_dump()[toolbar_arg]
1813
-
1814
1811
  def get_bottom_toolbar():
1815
1812
  if "mode" not in memory:
1816
1813
  memory["mode"] = "normal"
1817
1814
  mode = memory["mode"]
1818
- skip_build_toolbar = _update_bottom_toolbar('skip_build_index')
1819
- skip_filter_toolbar = _update_bottom_toolbar('skip_filter_index')
1820
- index_filter_toolbar = _update_bottom_toolbar('index_filter_level')
1821
- return (f" 当前模式: {MODES[mode]} (ctl+k 切换模式) | 跳过索引: {skip_build_toolbar} "
1822
- f"| 跳过过滤: {skip_filter_toolbar} | 过滤等级: {index_filter_toolbar}")
1815
+ return f" 当前模式: {MODES[mode]} (ctl+k 切换模式) | 当前项目: {project_root}"
1823
1816
 
1824
1817
  session = PromptSession(
1825
1818
  history=InMemoryHistory(),
@@ -0,0 +1,64 @@
1
+ from rich.live import Live
2
+ from rich.panel import Panel
3
+ from rich.markdown import Markdown
4
+ from rich.text import Text
5
+
6
+ from autocoder_nano.llm_client import AutoLLM
7
+ from autocoder_nano.llm_types import AutoCoderArgs
8
+ from autocoder_nano.utils.printer_utils import Printer
9
+
10
+
11
+ printer = Printer
12
+
13
+
14
+ def stream_chat_display(
15
+ chat_llm: AutoLLM, args: AutoCoderArgs, conversations: list[dict], max_history_lines: int = 15, max_height: int = 25
16
+ ) -> str:
17
+ v = chat_llm.stream_chat_ai(conversations=conversations, model=args.chat_model)
18
+
19
+ lines_buffer = []
20
+ assistant_response = ""
21
+ current_line = ""
22
+
23
+ try:
24
+ with Live(Panel("", title="Response", style="cyan"), refresh_per_second=12) as live:
25
+ for chunk in v:
26
+ if chunk.choices and chunk.choices[0].delta.content:
27
+ content = chunk.choices[0].delta.content
28
+ assistant_response += content
29
+
30
+ # 处理换行符分割
31
+ parts = (current_line + content).split('\n')
32
+
33
+ # 最后一部分是未完成的新行
34
+ if len(parts) > 1:
35
+ # 将完整行加入缓冲区
36
+ lines_buffer.extend(parts[:-1])
37
+ # 保留最近N行历史
38
+ if len(lines_buffer) > max_history_lines:
39
+ del lines_buffer[0: len(lines_buffer) - max_history_lines]
40
+ # 更新当前行(最后未完成的部分)
41
+ current_line = parts[-1]
42
+ # 构建显示内容 = 历史行 + 当前行
43
+ display_content = '\n'.join(lines_buffer[-max_history_lines:] + [current_line])
44
+
45
+ live.update(
46
+ Panel(Markdown(display_content), title="模型返回", border_style="cyan",
47
+ height=min(max_height, live.console.height - 4))
48
+ )
49
+
50
+ # 处理最后未换行的内容
51
+ if current_line:
52
+ lines_buffer.append(current_line)
53
+
54
+ # 最终完整渲染
55
+ live.update(
56
+ Panel(Markdown(assistant_response), title="模型返回", border_style="dim blue")
57
+ )
58
+ except Exception as e:
59
+ printer.print_panel(Text(f"{str(e)}", style="red"), title="模型返回", center=True)
60
+
61
+ return assistant_response
62
+
63
+
64
+ __all__ = ["stream_chat_display"]
@@ -0,0 +1,4 @@
1
+ from autocoder_nano.index.entry import build_index_and_filter_files
2
+
3
+
4
+ __all__ = ["build_index_and_filter_files"]
@@ -41,10 +41,8 @@ class AutoLLM:
41
41
  model = self.default_model_name
42
42
 
43
43
  model_name = self.sub_clients[model]["model_name"]
44
- printer.print_card(
45
- title="模型调用",
46
- content=f"调用函数: stream_chat_ai\n使用模型: {model}\n模型名称: {model_name}",
47
- width=60
44
+ printer.print_key_value(
45
+ {"调用函数": "stream_chat_ai", "使用模型": f"{model}", "模型名称": f"{model_name}"}, title="模型调用"
48
46
  )
49
47
  request = LLMRequest(
50
48
  model=model_name,
@@ -65,6 +63,10 @@ class AutoLLM:
65
63
  client: OpenAI = self.sub_clients[model]["client"]
66
64
  model_name = self.sub_clients[model]["model_name"]
67
65
 
66
+ printer.print_key_value(
67
+ {"调用函数": "stream_chat_ai_ex", "使用模型": f"{model}", "模型名称": f"{model_name}"}, title="模型调用"
68
+ )
69
+
68
70
  request = LLMRequest(
69
71
  model=model_name,
70
72
  messages=conversations,
@@ -185,10 +187,8 @@ class AutoLLM:
185
187
  conversations = [{"role": "user", "content": conversations}]
186
188
 
187
189
  model_name = self.sub_clients[model]["model_name"]
188
- printer.print_card(
189
- title="模型调用",
190
- content=f"调用函数: chat_ai\n使用模型: {model}\n模型名称: {model_name}",
191
- width=60
190
+ printer.print_key_value(
191
+ {"调用函数": "chat_ai", "使用模型": f"{model}", "模型名称": f"{model_name}"}, title="模型调用"
192
192
  )
193
193
  request = LLMRequest(
194
194
  model=model_name,
@@ -230,11 +230,9 @@ class AutoLLM:
230
230
  model = self.default_model_name
231
231
 
232
232
  model_name = self.sub_clients[model]["model_name"]
233
- printer.print_card(
234
- title="模型调用",
235
- content=f"调用函数: embedding\n使用模型: {model}\n模型名称: {model_name}",
236
- width=60
237
- )
233
+ # printer.print_key_value(
234
+ # {"调用函数": "embedding", "使用模型": f"{model}", "模型名称": f"{model_name}"}, title="模型调用"
235
+ # )
238
236
 
239
237
  res = self.sub_clients[model]["client"].embeddings.create(
240
238
  model=model_name,
@@ -0,0 +1,131 @@
1
+ import os
2
+ import uuid
3
+
4
+ import yaml
5
+ from jinja2 import Template
6
+
7
+ from autocoder_nano.llm_types import AutoCoderArgs
8
+ from autocoder_nano.utils.printer_utils import Printer
9
+
10
+
11
+ printer = Printer()
12
+
13
+
14
+ def convert_yaml_config_to_str(yaml_config):
15
+ yaml_content = yaml.safe_dump(
16
+ yaml_config,
17
+ allow_unicode=True,
18
+ default_flow_style=False,
19
+ default_style=None,
20
+ )
21
+ return yaml_content
22
+
23
+
24
+ def convert_config_value(key, value):
25
+ field_info = AutoCoderArgs.model_fields.get(key)
26
+ if field_info:
27
+ if value.lower() in ["true", "false"]:
28
+ return value.lower() == "true"
29
+ elif "int" in str(field_info.annotation):
30
+ return int(value)
31
+ elif "float" in str(field_info.annotation):
32
+ return float(value)
33
+ else:
34
+ return value
35
+ else:
36
+ printer.print_text(f"无效的配置项: {key}", style="red")
37
+ return None
38
+
39
+
40
+ def resolve_include_path(base_path, include_path):
41
+ if include_path.startswith(".") or include_path.startswith(".."):
42
+ full_base_path = os.path.abspath(base_path)
43
+ parent_dir = os.path.dirname(full_base_path)
44
+ return os.path.abspath(os.path.join(parent_dir, include_path))
45
+ else:
46
+ return include_path
47
+
48
+
49
+ def load_include_files(config, base_path, max_depth=10, current_depth=0):
50
+ if current_depth >= max_depth:
51
+ raise ValueError(
52
+ f"Exceeded maximum include depth of {max_depth},you may have a circular dependency in your include files."
53
+ )
54
+ if "include_file" in config:
55
+ include_files = config["include_file"]
56
+ if not isinstance(include_files, list):
57
+ include_files = [include_files]
58
+
59
+ for include_file in include_files:
60
+ abs_include_path = resolve_include_path(base_path, include_file)
61
+ # printer.print_text(f"正在加载 Include file: {abs_include_path}", style="green")
62
+ with open(abs_include_path, "r") as f:
63
+ include_config = yaml.safe_load(f)
64
+ if not include_config:
65
+ printer.print_text(f"Include file {abs_include_path} 为空,跳过处理.", style="green")
66
+ continue
67
+ config.update(
68
+ {
69
+ **load_include_files(include_config, abs_include_path, max_depth, current_depth + 1),
70
+ **config,
71
+ }
72
+ )
73
+ del config["include_file"]
74
+ return config
75
+
76
+
77
+ def convert_yaml_to_config(yaml_file: str | dict | AutoCoderArgs):
78
+ # global args
79
+ args = AutoCoderArgs()
80
+ config = {}
81
+ if isinstance(yaml_file, str):
82
+ args.file = yaml_file
83
+ with open(yaml_file, "r") as f:
84
+ config = yaml.safe_load(f)
85
+ config = load_include_files(config, yaml_file)
86
+ if isinstance(yaml_file, dict):
87
+ config = yaml_file
88
+ if isinstance(yaml_file, AutoCoderArgs):
89
+ config = yaml_file.model_dump()
90
+ for key, value in config.items():
91
+ if key != "file": # 排除 --file 参数本身
92
+ # key: ENV {{VARIABLE_NAME}}
93
+ if isinstance(value, str) and value.startswith("ENV"):
94
+ template = Template(value.removeprefix("ENV").strip())
95
+ value = template.render(os.environ)
96
+ setattr(args, key, value)
97
+ return args
98
+
99
+
100
+ def get_final_config(project_root: str, memory: dict, query: str, delete_execute_file: bool = False) -> AutoCoderArgs:
101
+ conf = memory.get("conf", {})
102
+ yaml_config = {
103
+ "include_file": ["./base/base.yml"],
104
+ "skip_build_index": conf.get("skip_build_index", "true") == "true",
105
+ "skip_confirm": conf.get("skip_confirm", "true") == "true",
106
+ "chat_model": conf.get("chat_model", ""),
107
+ "code_model": conf.get("code_model", ""),
108
+ "auto_merge": conf.get("auto_merge", "editblock"),
109
+ "exclude_files": memory.get("exclude_files", [])
110
+ }
111
+ current_files = memory["current_files"]["files"]
112
+ yaml_config["urls"] = current_files
113
+ yaml_config["query"] = query
114
+
115
+ # 如果 conf 中有设置, 则以 conf 配置为主
116
+ for key, value in conf.items():
117
+ converted_value = convert_config_value(key, value)
118
+ if converted_value is not None:
119
+ yaml_config[key] = converted_value
120
+
121
+ execute_file = os.path.join(project_root, "actions", f"{uuid.uuid4()}.yml")
122
+ try:
123
+ yaml_content = convert_yaml_config_to_str(yaml_config=yaml_config)
124
+ with open(os.path.join(execute_file), "w") as f: # 保存此次查询的细节
125
+ f.write(yaml_content)
126
+ args = convert_yaml_to_config(execute_file) # 更新到args
127
+ finally:
128
+ if delete_execute_file:
129
+ if os.path.exists(execute_file):
130
+ os.remove(execute_file)
131
+ return args
autocoder_nano/version.py CHANGED
@@ -1,3 +1,3 @@
1
- __version__ = "0.1.34"
1
+ __version__ = "0.1.36"
2
2
  __author__ = "moofs"
3
3
  __license__ = "Apache License 2.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: autocoder_nano
3
- Version: 0.1.34
3
+ Version: 0.1.36
4
4
  Summary: AutoCoder Nano
5
5
  Author: moofs
6
6
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
@@ -1,16 +1,16 @@
1
1
  autocoder_nano/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- autocoder_nano/auto_coder_nano.py,sha256=sLMu8QPfdGehTNs5-538PLst-OuheA3VT2WovbShZeA,80482
2
+ autocoder_nano/auto_coder_nano.py,sha256=75BNVyuktY2gdH8dXKBGMa-DVePWORf_5kgN5OvBODc,80571
3
3
  autocoder_nano/auto_coder_nano_rag.py,sha256=9BtNZ6nC5D5SPTIuziXZOfouCBLOMNzvJMTdDPQEgO8,10436
4
4
  autocoder_nano/auto_coder_nano_ui.py,sha256=ZBskcIJMeTJY7_JipGJaee58G9fUJaOv3LV4hptLc6c,12669
5
5
  autocoder_nano/file_utils.py,sha256=iGbkbQ191nKL4aNufdexYYYQSDM1XrDC9Uxp_PIbawY,661
6
6
  autocoder_nano/git_utils.py,sha256=zAhXi5WFHklpkoPH04kkXpQjQJv1CEHbXE-O3tqyNlA,23194
7
7
  autocoder_nano/helper.py,sha256=LbieDBKp408x9g4GHCvcujUgMgxDTV9owGHIBYpT1ww,6643
8
- autocoder_nano/llm_client.py,sha256=aB6ZCZnMF28RvoPuqORZAneMpvIy16mPrB7-1vVKTao,11207
8
+ autocoder_nano/llm_client.py,sha256=pJkEpAaPJgz7Yzzr1h-YUveiVidH8YNpLmdfwuM5t_I,11318
9
9
  autocoder_nano/llm_prompt.py,sha256=ViWUfCZp0gDESAAPHBhZc2WhHiFUHIxK6a2xbFu0sjU,10864
10
10
  autocoder_nano/llm_types.py,sha256=T0ugeWdwejy6BJaQrAlk8Pk5qweW2xbggxzHaSpTBOg,11588
11
11
  autocoder_nano/sys_utils.py,sha256=Sn6kr5diaEkVWbYDBrtenr9zw32jVIWvsAReY7_uEd0,1638
12
12
  autocoder_nano/templates.py,sha256=fqlRtnx6HvPE4CbdnPcnLBB-flPwufwcGRpsFD3aW2c,4271
13
- autocoder_nano/version.py,sha256=5nUcgFDfoA9a2PSTl0F1jjOQAPSbI7_EyL0XXwMNT-k,79
13
+ autocoder_nano/version.py,sha256=w6lOdNu-e-YHiAO2e5DN0oFTe4239b-ik57CnftvY_w,79
14
14
  autocoder_nano/agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
15
  autocoder_nano/agent/agent_base.py,sha256=O5Hq6VnoqrXnBE_oXZHXlbmSRdOEe28H65bJ1WhAQjg,16377
16
16
  autocoder_nano/agent/agentic_edit.py,sha256=I1HjRhMabDmtfxcCOKawUJV0wU1GNzKtof19_GNgAjU,88749
@@ -48,6 +48,7 @@ autocoder_nano/app/templates/partials/examples.html,sha256=_i7TfpcRqW-IvI69vVXYe
48
48
  autocoder_nano/app/templates/partials/header.html,sha256=txCMUmFFWSEDz5xxQwt8oBko8Y_b1bSsVASVOMCsILo,300
49
49
  autocoder_nano/app/templates/partials/input.html,sha256=8CY3JcHaA4nPZ2Vu4ragdYZzzodvF0isQiOGHtdQs6k,1956
50
50
  autocoder_nano/app/templates/partials/message.html,sha256=HWEh_j_yJAbP7zFs6jt88BDzkP7dG6VgPUbS2MT5Ax4,1548
51
+ autocoder_nano/chat/__init__.py,sha256=FuXp0tcnegngct9Jp8HbgwFkwnhxMirwNFHtoa_vACw,2441
51
52
  autocoder_nano/data/tokenizer.json,sha256=7Lb5_DaYlDRvBRH0B0ynXO5c1fOwbQLxujX805-OEh0,7847602
52
53
  autocoder_nano/edit/__init__.py,sha256=QPMuW7tBTUe0Q00gUPJEmdxWqvunqko9_dsim0ncr7c,623
53
54
  autocoder_nano/edit/actions.py,sha256=N4qzSIE7Ifm7r6Sk-HbgWmbDqMP6jfrpByfpV7rbEo8,6480
@@ -56,7 +57,7 @@ autocoder_nano/edit/code/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
56
57
  autocoder_nano/edit/code/generate_editblock.py,sha256=s-VTZK0G1OhjEyZXqyjj4sY48fOo02EvHhaxTIw4ytY,13110
57
58
  autocoder_nano/edit/code/merge_editblock.py,sha256=Vk-FOVvaEzKcSRDMyMyR_M77kqj-s5-zejChn4QwLAY,17557
58
59
  autocoder_nano/edit/code/modification_ranker.py,sha256=hnF1acqAzPYKm9hEFxobJHfGGDdM-GclZLxvtt83lGA,3431
59
- autocoder_nano/index/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
+ autocoder_nano/index/__init__.py,sha256=r8HvwfyKJxnXMtAuxXIdbU0CWBGwJNlxIB4VZaUlBbo,112
60
61
  autocoder_nano/index/entry.py,sha256=S71dfnYC201eQLXwqNCo_Y83ImI1ZxuJ0_m2hz5nCJc,7729
61
62
  autocoder_nano/index/index_manager.py,sha256=ek7AqU8M-Snl5qZYhO_U0SEK3-y1u5OOxD9z-LdDesE,15619
62
63
  autocoder_nano/index/symbols_utils.py,sha256=z_16X6BozTfmric1uU-r2GqzDabJ5ChfAOB4lo7i-_8,1450
@@ -86,12 +87,13 @@ autocoder_nano/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
86
87
  autocoder_nano/tools/http_tools.py,sha256=04Tmg8BTwfsw7_-fKBDHv787XU4yQ5UtQSDj0zJBIUc,3189
87
88
  autocoder_nano/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
88
89
  autocoder_nano/utils/completer_utils.py,sha256=MGA3r5pAvDhp1vNGGCyjHWDtqXnd-CF4zPw7uawSzNM,25556
90
+ autocoder_nano/utils/config_utils.py,sha256=r5n0De4mz5sL_nj-CeT_F5TxtgWQIN5vv0Z5FiP8GXA,4800
89
91
  autocoder_nano/utils/formatted_log_utils.py,sha256=1d3xvZ1Bo3-I1wQOMdXpwsMX5cl2FWkmpgHGHvTPEvI,5457
90
92
  autocoder_nano/utils/printer_utils.py,sha256=6rGHihCh8DDESWs6qWqwsf3B6qaeM_CNx6crzkl9UCk,15303
91
93
  autocoder_nano/utils/shell_utils.py,sha256=llVTrOrmS1RH2ws7W69tofVtf53Kq04uh-sURphejrU,2477
92
- autocoder_nano-0.1.34.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
93
- autocoder_nano-0.1.34.dist-info/METADATA,sha256=aQAlr7vwMo7UlMq5cQ79wZwAwLiSpFlMKQQUiKfP6ic,13591
94
- autocoder_nano-0.1.34.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
95
- autocoder_nano-0.1.34.dist-info/entry_points.txt,sha256=Dj8gGZ_AgLy8ANqr2do_DJjpsR3JMh-ztsrUXo4Vn5Q,194
96
- autocoder_nano-0.1.34.dist-info/top_level.txt,sha256=D7s34cwIs1F4EAjRRDvO_zTHtUz1Z7UVccFUNlJn7HI,15
97
- autocoder_nano-0.1.34.dist-info/RECORD,,
94
+ autocoder_nano-0.1.36.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
95
+ autocoder_nano-0.1.36.dist-info/METADATA,sha256=7-k89fLfFGXd67c6xAaIvmZV2Q1vix4B5YecTSn6Qcw,13591
96
+ autocoder_nano-0.1.36.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
97
+ autocoder_nano-0.1.36.dist-info/entry_points.txt,sha256=Dj8gGZ_AgLy8ANqr2do_DJjpsR3JMh-ztsrUXo4Vn5Q,194
98
+ autocoder_nano-0.1.36.dist-info/top_level.txt,sha256=D7s34cwIs1F4EAjRRDvO_zTHtUz1Z7UVccFUNlJn7HI,15
99
+ autocoder_nano-0.1.36.dist-info/RECORD,,