auto-coder 0.1.264__py3-none-any.whl → 0.1.266__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of auto-coder might be problematic. Click here for more details.

Files changed (53) hide show
  1. {auto_coder-0.1.264.dist-info → auto_coder-0.1.266.dist-info}/METADATA +2 -2
  2. {auto_coder-0.1.264.dist-info → auto_coder-0.1.266.dist-info}/RECORD +53 -51
  3. autocoder/agent/planner.py +4 -4
  4. autocoder/auto_coder.py +26 -21
  5. autocoder/auto_coder_server.py +7 -7
  6. autocoder/chat_auto_coder.py +150 -49
  7. autocoder/commands/auto_command.py +83 -5
  8. autocoder/commands/tools.py +48 -50
  9. autocoder/common/__init__.py +0 -1
  10. autocoder/common/auto_coder_lang.py +43 -3
  11. autocoder/common/code_auto_generate.py +3 -3
  12. autocoder/common/code_auto_generate_diff.py +3 -6
  13. autocoder/common/code_auto_generate_editblock.py +3 -3
  14. autocoder/common/code_auto_generate_strict_diff.py +3 -3
  15. autocoder/common/code_auto_merge_diff.py +37 -3
  16. autocoder/common/code_auto_merge_editblock.py +43 -1
  17. autocoder/common/code_auto_merge_strict_diff.py +39 -4
  18. autocoder/common/command_completer.py +3 -0
  19. autocoder/common/command_generator.py +24 -8
  20. autocoder/common/command_templates.py +2 -2
  21. autocoder/common/conf_import_export.py +105 -0
  22. autocoder/common/conf_validator.py +1 -1
  23. autocoder/common/files.py +41 -2
  24. autocoder/common/image_to_page.py +11 -11
  25. autocoder/common/index_import_export.py +38 -18
  26. autocoder/common/mcp_hub.py +3 -3
  27. autocoder/common/mcp_server.py +2 -2
  28. autocoder/common/shells.py +254 -13
  29. autocoder/common/stats_panel.py +126 -0
  30. autocoder/dispacher/actions/action.py +6 -18
  31. autocoder/dispacher/actions/copilot.py +2 -2
  32. autocoder/dispacher/actions/plugins/action_regex_project.py +1 -3
  33. autocoder/dispacher/actions/plugins/action_translate.py +1 -1
  34. autocoder/index/entry.py +2 -2
  35. autocoder/index/filter/normal_filter.py +1 -1
  36. autocoder/index/filter/quick_filter.py +1 -1
  37. autocoder/index/index.py +5 -5
  38. autocoder/models.py +2 -2
  39. autocoder/pyproject/__init__.py +5 -5
  40. autocoder/rag/cache/byzer_storage_cache.py +4 -4
  41. autocoder/rag/cache/file_monitor_cache.py +2 -2
  42. autocoder/rag/cache/simple_cache.py +4 -4
  43. autocoder/rag/long_context_rag.py +2 -2
  44. autocoder/regexproject/__init__.py +3 -2
  45. autocoder/suffixproject/__init__.py +3 -2
  46. autocoder/tsproject/__init__.py +3 -2
  47. autocoder/utils/conversation_store.py +1 -1
  48. autocoder/utils/operate_config_api.py +3 -3
  49. autocoder/version.py +1 -1
  50. {auto_coder-0.1.264.dist-info → auto_coder-0.1.266.dist-info}/LICENSE +0 -0
  51. {auto_coder-0.1.264.dist-info → auto_coder-0.1.266.dist-info}/WHEEL +0 -0
  52. {auto_coder-0.1.264.dist-info → auto_coder-0.1.266.dist-info}/entry_points.txt +0 -0
  53. {auto_coder-0.1.264.dist-info → auto_coder-0.1.266.dist-info}/top_level.txt +0 -0
@@ -126,7 +126,7 @@ class ByzerStorageCache(BaseCacheManager):
126
126
  """Load cache from file"""
127
127
  if os.path.exists(self.cache_file):
128
128
  try:
129
- with open(self.cache_file, "r") as f:
129
+ with open(self.cache_file, "r",encoding="utf-8") as f:
130
130
  lines = f.readlines()
131
131
  cache = {}
132
132
  for line in lines:
@@ -147,7 +147,7 @@ class ByzerStorageCache(BaseCacheManager):
147
147
 
148
148
  if not fcntl:
149
149
  try:
150
- with open(cache_file, "w") as f:
150
+ with open(cache_file, "w",encoding="utf-8") as f:
151
151
  for data in self.cache.values():
152
152
  json.dump(data, f, ensure_ascii=False)
153
153
  f.write("\n")
@@ -155,12 +155,12 @@ class ByzerStorageCache(BaseCacheManager):
155
155
  logger.error(f"Error writing cache file: {str(e)}")
156
156
  else:
157
157
  lock_file = cache_file + ".lock"
158
- with open(lock_file, "w") as lockf:
158
+ with open(lock_file, "w",encoding="utf-8") as lockf:
159
159
  try:
160
160
  # 获取文件锁
161
161
  fcntl.flock(lockf, fcntl.LOCK_EX | fcntl.LOCK_NB)
162
162
  # 写入缓存文件
163
- with open(cache_file, "w") as f:
163
+ with open(cache_file, "w",encoding="utf-8") as f:
164
164
  for data in self.cache.values():
165
165
  json.dump(data, f, ensure_ascii=False)
166
166
  f.write("\n")
@@ -106,11 +106,11 @@ class AutoCoderRAGDocListener(BaseCacheManager):
106
106
  gitignore_path = os.path.join(self.path, ".gitignore")
107
107
 
108
108
  if os.path.exists(serveignore_path):
109
- with open(serveignore_path, "r") as ignore_file:
109
+ with open(serveignore_path, "r",encoding="utf-8") as ignore_file:
110
110
  patterns = ignore_file.readlines()
111
111
  return [pattern.strip() for pattern in patterns]
112
112
  elif os.path.exists(gitignore_path):
113
- with open(gitignore_path, "r") as ignore_file:
113
+ with open(gitignore_path, "r",encoding="utf-8") as ignore_file:
114
114
  patterns = ignore_file.readlines()
115
115
  return [pattern.strip() for pattern in patterns]
116
116
  return []
@@ -160,7 +160,7 @@ class AutoCoderRAGAsyncUpdateQueue(BaseCacheManager):
160
160
 
161
161
  cache = {}
162
162
  if os.path.exists(cache_file):
163
- with open(cache_file, "r") as f:
163
+ with open(cache_file, "r",encoding="utf-8") as f:
164
164
  for line in f:
165
165
  data = json.loads(line)
166
166
  cache[data["file_path"]] = data
@@ -171,7 +171,7 @@ class AutoCoderRAGAsyncUpdateQueue(BaseCacheManager):
171
171
  cache_file = os.path.join(cache_dir, "cache.jsonl")
172
172
 
173
173
  if not fcntl:
174
- with open(cache_file, "w") as f:
174
+ with open(cache_file, "w",encoding="utf-8") as f:
175
175
  for data in self.cache.values():
176
176
  try:
177
177
  json.dump(data, f, ensure_ascii=False)
@@ -181,12 +181,12 @@ class AutoCoderRAGAsyncUpdateQueue(BaseCacheManager):
181
181
  f"Failed to write {data['file_path']} to .cache/cache.jsonl: {e}")
182
182
  else:
183
183
  lock_file = cache_file + ".lock"
184
- with open(lock_file, "w") as lockf:
184
+ with open(lock_file, "w",encoding="utf-8") as lockf:
185
185
  try:
186
186
  # 获取文件锁
187
187
  fcntl.flock(lockf, fcntl.LOCK_EX | fcntl.LOCK_NB)
188
188
  # 写入缓存文件
189
- with open(cache_file, "w") as f:
189
+ with open(cache_file, "w",encoding="utf-8") as f:
190
190
  for data in self.cache.values():
191
191
  try:
192
192
  json.dump(data, f, ensure_ascii=False)
@@ -245,10 +245,10 @@ class LongContextRAG:
245
245
  gitignore_path = os.path.join(self.path, ".gitignore")
246
246
 
247
247
  if os.path.exists(serveignore_path):
248
- with open(serveignore_path, "r") as ignore_file:
248
+ with open(serveignore_path, "r",encoding="utf-8") as ignore_file:
249
249
  return pathspec.PathSpec.from_lines("gitwildmatch", ignore_file)
250
250
  elif os.path.exists(gitignore_path):
251
- with open(gitignore_path, "r") as ignore_file:
251
+ with open(gitignore_path, "r",encoding="utf-8") as ignore_file:
252
252
  return pathspec.PathSpec.from_lines("gitwildmatch", ignore_file)
253
253
  return None
254
254
 
@@ -78,7 +78,8 @@ class RegexProject:
78
78
  raise ValueError("Invalid project_type format. Expected 'regex//<pattern>'")
79
79
 
80
80
  def output(self):
81
- return open(self.target_file, "r").read()
81
+ with open(self.target_file, "r",encoding="utf-8") as file:
82
+ return file.read()
82
83
 
83
84
  def is_regex_match(self, file_path):
84
85
  return re.search(self.regex_pattern, file_path) is not None
@@ -231,7 +232,7 @@ class RegexProject:
231
232
  self.clone_repository()
232
233
 
233
234
  if self.target_file:
234
- with open(self.target_file, "w") as file:
235
+ with open(self.target_file, "w",encoding="utf-8") as file:
235
236
  for code in self.get_source_codes():
236
237
  self.sources.append(code)
237
238
  file.write(f"##File: {code.module_name}\n")
@@ -114,7 +114,8 @@ class SuffixProject:
114
114
  return False
115
115
 
116
116
  def output(self):
117
- return open(self.target_file, "r").read()
117
+ with open(self.target_file, "r",encoding="utf-8") as file:
118
+ return file.read()
118
119
 
119
120
  def is_suffix_file(self, file_path):
120
121
  return any([file_path.endswith(suffix) for suffix in self.suffixs])
@@ -273,7 +274,7 @@ class SuffixProject:
273
274
  self.clone_repository()
274
275
 
275
276
  if self.target_file:
276
- with open(self.target_file, "w") as file:
277
+ with open(self.target_file, "w",encoding="utf-8") as file:
277
278
  for code in self.get_source_codes():
278
279
  self.sources.append(code)
279
280
  file.write(f"##File: {code.module_name}\n")
@@ -106,7 +106,8 @@ class TSProject:
106
106
  return False
107
107
 
108
108
  def output(self):
109
- return open(self.target_file, "r").read()
109
+ with open(self.target_file, "r",encoding="utf-8") as file:
110
+ return file.read()
110
111
 
111
112
  def read_file_content(self, file_path):
112
113
  return FileUtils.read_file(file_path)
@@ -308,7 +309,7 @@ class TSProject:
308
309
  self.clone_repository()
309
310
 
310
311
  if self.target_file:
311
- with open(self.target_file, "w") as file:
312
+ with open(self.target_file, "w",encoding="utf-8") as file:
312
313
  for code in self.get_rest_source_codes():
313
314
  self.sources.append(code)
314
315
  file.write(f"##File: {code.module_name}\n")
@@ -34,7 +34,7 @@ def load_code_model_conversation_from_store(args: AutoCoderArgs):
34
34
  return []
35
35
 
36
36
  conversations = []
37
- with open(conversation_file, "r") as f:
37
+ with open(conversation_file, "r",encoding="utf-8") as f:
38
38
  for line in f:
39
39
  conversations.append(json.loads(line))
40
40
 
@@ -15,7 +15,7 @@ import hashlib
15
15
  def convert_yaml_to_config(yaml_file: str):
16
16
 
17
17
  args = AutoCoderArgs()
18
- with open(yaml_file, "r") as f:
18
+ with open(yaml_file, "r",encoding="utf-8") as f:
19
19
  config = yaml.safe_load(f)
20
20
  config = load_include_files(config, yaml_file)
21
21
  for key, value in config.items():
@@ -75,7 +75,7 @@ def get_llm_friendly_package_docs(memory,
75
75
  if return_paths:
76
76
  docs.append(file_path)
77
77
  else:
78
- with open(file_path, "r") as f:
78
+ with open(file_path, "r",encoding="utf-8") as f:
79
79
  docs.append(f.read())
80
80
 
81
81
  return docs
@@ -130,7 +130,7 @@ def get_llm(memory, model:Optional[str]=None):
130
130
  # 临时保存yaml文件,然后读取yaml文件,转换为args
131
131
  temp_yaml = os.path.join("actions", f"{uuid.uuid4()}.yml")
132
132
  try:
133
- with open(temp_yaml, "w") as f:
133
+ with open(temp_yaml, "w",encoding="utf-8") as f:
134
134
  f.write(convert_yaml_config_to_str(
135
135
  yaml_config=yaml_config))
136
136
  args = convert_yaml_to_config(temp_yaml)
autocoder/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.1.264"
1
+ __version__ = "0.1.266"