auto-coder 0.1.197__py3-none-any.whl → 0.1.199__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of auto-coder might be problematic. Click here for more details.

autocoder/command_args.py CHANGED
@@ -102,6 +102,7 @@ def parse_args(input_args: Optional[List[str]] = None) -> AutoCoderArgs:
102
102
  "--print_request", action="store_true", help=desc["print_request"]
103
103
  )
104
104
  parser.add_argument("--code_model", default="", help=desc["code_model"])
105
+ parser.add_argument("--system_prompt", default="", help=desc["system_prompt"])
105
106
  parser.add_argument("--planner_model", default="",
106
107
  help=desc["planner_model"])
107
108
  parser.add_argument(
@@ -98,6 +98,9 @@ class EnvInfo(pydantic.BaseModel):
98
98
  conda_env: Optional[str]
99
99
  virtualenv: Optional[str]
100
100
  has_bash: bool
101
+ default_shell: Optional[str]
102
+ home_dir: Optional[str]
103
+ cwd: Optional[str]
101
104
 
102
105
 
103
106
  def has_sufficient_content(file_content, min_line_count=10):
@@ -155,8 +158,19 @@ def detect_env() -> EnvInfo:
155
158
  )
156
159
 
157
160
  conda_env = os.environ.get("CONDA_DEFAULT_ENV")
158
-
159
161
  virtualenv = os.environ.get("VIRTUAL_ENV")
162
+
163
+ # Get default shell
164
+ if os_name == "win32":
165
+ default_shell = os.environ.get("COMSPEC", "cmd.exe")
166
+ else:
167
+ default_shell = os.environ.get("SHELL", "/bin/sh")
168
+
169
+ # Get home directory
170
+ home_dir = os.path.expanduser("~")
171
+
172
+ # Get current working directory
173
+ cwd = os.getcwd()
160
174
 
161
175
  has_bash = True
162
176
  try:
@@ -171,6 +185,9 @@ def detect_env() -> EnvInfo:
171
185
  conda_env=conda_env,
172
186
  virtualenv=virtualenv,
173
187
  has_bash=has_bash,
188
+ default_shell=default_shell,
189
+ home_dir=home_dir,
190
+ cwd=cwd,
174
191
  )
175
192
 
176
193
 
@@ -231,6 +248,7 @@ class AutoCoderArgs(pydantic.BaseModel):
231
248
  sd_model: Optional[str] = ""
232
249
  emb_model: Optional[str] = ""
233
250
  code_model: Optional[str] = ""
251
+ system_prompt: Optional[str] = ""
234
252
  text2voice_model: Optional[str] = ""
235
253
  voice2text_model: Optional[str] = ""
236
254
 
@@ -2,6 +2,8 @@ from typing import List, Dict, Tuple
2
2
  from autocoder.common.types import Mode
3
3
  from autocoder.common import AutoCoderArgs
4
4
  import byzerllm
5
+ from autocoder.utils.queue_communicate import queue_communicate, CommunicateEvent, CommunicateEventType
6
+ from autocoder.common import sys_prompt
5
7
 
6
8
 
7
9
  class CodeAutoGenerate:
@@ -146,6 +148,15 @@ class CodeAutoGenerate:
146
148
  ) -> Tuple[str, Dict[str, str]]:
147
149
  llm_config = {"human_as_model": self.args.human_as_model}
148
150
 
151
+ if self.args.request_id and not self.args.skip_events:
152
+ queue_communicate.send_event_no_wait(
153
+ request_id=self.args.request_id,
154
+ event=CommunicateEvent(
155
+ event_type=CommunicateEventType.CODE_GENERATE_START.value,
156
+ data=query,
157
+ ),
158
+ )
159
+
149
160
  if self.args.template == "common":
150
161
  init_prompt = self.single_round_instruction.prompt(
151
162
  instruction=query, content=source_content, context=self.args.context
@@ -158,10 +169,28 @@ class CodeAutoGenerate:
158
169
  with open(self.args.target_file, "w") as file:
159
170
  file.write(init_prompt)
160
171
 
161
- conversations = [{"role": "user", "content": init_prompt}]
172
+ conversations = []
173
+
174
+ if self.args.system_prompt and self.args.system_prompt.strip() == "claude":
175
+ conversations.append({"role": "system", "content": sys_prompt.prompt()})
176
+ elif self.args.system_prompt:
177
+ conversations.append({"role": "system", "content": self.args.system_prompt})
178
+
179
+ conversations.append({"role": "user", "content": init_prompt})
180
+
162
181
 
163
182
  t = self.llm.chat_oai(conversations=conversations, llm_config=llm_config)
164
183
  conversations.append({"role": "assistant", "content": t[0].output})
184
+
185
+ if self.args.request_id and not self.args.skip_events:
186
+ queue_communicate.send_event_no_wait(
187
+ request_id=self.args.request_id,
188
+ event=CommunicateEvent(
189
+ event_type=CommunicateEventType.CODE_GENERATE_END.value,
190
+ data="",
191
+ ),
192
+ )
193
+
165
194
  return [t[0].output], conversations
166
195
 
167
196
  def multi_round_run(
@@ -2,7 +2,8 @@ from typing import List, Dict, Tuple
2
2
  from autocoder.common.types import Mode
3
3
  from autocoder.common import AutoCoderArgs
4
4
  import byzerllm
5
-
5
+ from autocoder.utils.queue_communicate import queue_communicate, CommunicateEvent, CommunicateEventType
6
+ from autocoder.common import sys_prompt
6
7
 
7
8
  class CodeAutoGenerateDiff:
8
9
  def __init__(
@@ -289,6 +290,15 @@ class CodeAutoGenerateDiff:
289
290
  ) -> Tuple[str, Dict[str, str]]:
290
291
  llm_config = {"human_as_model": self.args.human_as_model}
291
292
 
293
+ if self.args.request_id and not self.args.skip_events:
294
+ queue_communicate.send_event_no_wait(
295
+ request_id=self.args.request_id,
296
+ event=CommunicateEvent(
297
+ event_type=CommunicateEventType.CODE_GENERATE_START.value,
298
+ data=query,
299
+ ),
300
+ )
301
+
292
302
  init_prompt = self.single_round_instruction.prompt(
293
303
  instruction=query, content=source_content, context=self.args.context
294
304
  )
@@ -296,10 +306,24 @@ class CodeAutoGenerateDiff:
296
306
  with open(self.args.target_file, "w") as file:
297
307
  file.write(init_prompt)
298
308
 
299
- conversations = [{"role": "user", "content": init_prompt}]
309
+ conversations = []
310
+ if self.args.system_prompt and self.args.system_prompt.strip() == "claude":
311
+ conversations.append({"role": "system", "content": sys_prompt.prompt()})
312
+ else:
313
+ conversations.append({"role": "user", "content": init_prompt})
300
314
 
301
315
  t = self.llm.chat_oai(conversations=conversations, llm_config=llm_config)
302
316
  conversations.append({"role": "assistant", "content": t[0].output})
317
+
318
+ if self.args.request_id and not self.args.skip_events:
319
+ queue_communicate.send_event_no_wait(
320
+ request_id=self.args.request_id,
321
+ event=CommunicateEvent(
322
+ event_type=CommunicateEventType.CODE_GENERATE_END.value,
323
+ data="",
324
+ ),
325
+ )
326
+
303
327
  return [t[0].output], conversations
304
328
 
305
329
  def multi_round_run(
@@ -3,6 +3,12 @@ from autocoder.common.types import Mode
3
3
  from autocoder.common import AutoCoderArgs
4
4
  import byzerllm
5
5
  from autocoder.common import sys_prompt
6
+ from autocoder.utils.queue_communicate import (
7
+ queue_communicate,
8
+ CommunicateEvent,
9
+ CommunicateEventType,
10
+ )
11
+ import json
6
12
 
7
13
 
8
14
  class CodeAutoGenerateEditBlock:
@@ -44,7 +50,7 @@ class CodeAutoGenerateEditBlock:
44
50
  """
45
51
 
46
52
  @byzerllm.prompt(llm=lambda self: self.llm)
47
- def multi_round_instruction(self, instruction: str, content: str,context:str="") -> str:
53
+ def multi_round_instruction(self, instruction: str, content: str, context: str = "") -> str:
48
54
  """
49
55
  如果你需要生成代码,对于每个需要更改的文件,你需要按 *SEARCH/REPLACE block* 的格式进行生成。
50
56
 
@@ -141,7 +147,7 @@ class CodeAutoGenerateEditBlock:
141
147
 
142
148
  Here are the *SEARCH/REPLACE* blocks:
143
149
 
144
-
150
+
145
151
  {{ fence_0 }}python
146
152
  ##File: /tmp/projects/mathweb/hello.py
147
153
  <<<<<<< SEARCH
@@ -152,7 +158,7 @@ class CodeAutoGenerateEditBlock:
152
158
  print("hello")
153
159
  >>>>>>> REPLACE
154
160
  {{ fence_1 }}
155
-
161
+
156
162
  {{ fence_0 }}python
157
163
  ##File: /tmp/projects/mathweb/main.py
158
164
  <<<<<<< SEARCH
@@ -206,7 +212,7 @@ class CodeAutoGenerateEditBlock:
206
212
  }
207
213
 
208
214
  @byzerllm.prompt(llm=lambda self: self.llm)
209
- def single_round_instruction(self, instruction: str, content: str, context:str="") -> str:
215
+ def single_round_instruction(self, instruction: str, content: str, context: str = "") -> str:
210
216
  """
211
217
  如果你需要生成代码,对于每个需要更改的文件,你需要按 *SEARCH/REPLACE block* 的格式进行生成。
212
218
 
@@ -302,7 +308,7 @@ class CodeAutoGenerateEditBlock:
302
308
  2. Remove hello() from main.py and replace it with an import.
303
309
 
304
310
  Here are the *SEARCH/REPLACE* blocks:
305
-
311
+
306
312
  {{ fence_0 }}python
307
313
  ##File: /tmp/projects/mathweb/hello.py
308
314
  <<<<<<< SEARCH
@@ -313,7 +319,7 @@ class CodeAutoGenerateEditBlock:
313
319
  print("hello")
314
320
  >>>>>>> REPLACE
315
321
  {{ fence_1 }}
316
-
322
+
317
323
  {{ fence_0 }}python
318
324
  ##File: /tmp/projects/mathweb/main.py
319
325
  <<<<<<< SEARCH
@@ -353,7 +359,7 @@ class CodeAutoGenerateEditBlock:
353
359
  "fence_0": self.fence_0,
354
360
  "fence_1": self.fence_1,
355
361
  }
356
-
362
+
357
363
  return {
358
364
  "structure": (
359
365
  self.action.pp.get_tree_like_directory_structure()
@@ -371,7 +377,7 @@ class CodeAutoGenerateEditBlock:
371
377
 
372
378
  if self.args.template == "common":
373
379
  init_prompt = self.single_round_instruction.prompt(
374
- instruction=query, content=source_content,context=self.args.context
380
+ instruction=query, content=source_content, context=self.args.context
375
381
  )
376
382
  elif self.args.template == "auto_implement":
377
383
  init_prompt = self.auto_implement_function.prompt(
@@ -380,15 +386,42 @@ class CodeAutoGenerateEditBlock:
380
386
 
381
387
  with open(self.args.target_file, "w") as file:
382
388
  file.write(init_prompt)
383
-
384
389
 
385
390
  conversations = []
386
- # conversations.append({"role": "system", "content": sys_prompt.prompt()})
391
+
392
+ if self.args.system_prompt and self.args.system_prompt.strip() == "claude":
393
+ conversations.append({"role": "system", "content": sys_prompt.prompt()})
394
+ elif self.args.system_prompt:
395
+ conversations.append({"role": "system", "content": self.args.system_prompt})
396
+
387
397
  conversations.append({"role": "user", "content": init_prompt})
388
398
 
389
- t = self.llm.chat_oai(conversations=conversations, llm_config=llm_config)
390
- conversations.append({"role": "assistant", "content": t[0].output})
391
- return [t[0].output], conversations
399
+ if self.args.request_id and not self.args.skip_events:
400
+ _ = queue_communicate.send_event(
401
+ request_id=self.args.request_id,
402
+ event=CommunicateEvent(
403
+ event_type=CommunicateEventType.CODE_GENERATE_START.value,
404
+ data=json.dumps({}, ensure_ascii=False),
405
+ ),
406
+ )
407
+
408
+ t = self.llm.chat_oai(
409
+ conversations=conversations, llm_config=llm_config)
410
+ conversations.append({"role": "assistant", "content": t[0].output})
411
+
412
+ _ = queue_communicate.send_event(
413
+ request_id=self.args.request_id,
414
+ event=CommunicateEvent(
415
+ event_type=CommunicateEventType.CODE_GENERATE_END.value,
416
+ data=json.dumps({}, ensure_ascii=False),
417
+ ),
418
+ )
419
+ return [t[0].output], conversations
420
+ else:
421
+ t = self.llm.chat_oai(
422
+ conversations=conversations, llm_config=llm_config)
423
+ conversations.append({"role": "assistant", "content": t[0].output})
424
+ return [t[0].output], conversations
392
425
 
393
426
  def multi_round_run(
394
427
  self, query: str, source_content: str, max_steps: int = 10
@@ -398,7 +431,7 @@ class CodeAutoGenerateEditBlock:
398
431
 
399
432
  if self.args.template == "common":
400
433
  init_prompt = self.multi_round_instruction.prompt(
401
- instruction=query, content=source_content,context=self.args.context
434
+ instruction=query, content=source_content, context=self.args.context
402
435
  )
403
436
  elif self.args.template == "auto_implement":
404
437
  init_prompt = self.auto_implement_function.prompt(
@@ -412,7 +445,8 @@ class CodeAutoGenerateEditBlock:
412
445
  with open(self.args.target_file, "w") as file:
413
446
  file.write(init_prompt)
414
447
 
415
- t = self.llm.chat_oai(conversations=conversations, llm_config=llm_config)
448
+ t = self.llm.chat_oai(conversations=conversations,
449
+ llm_config=llm_config)
416
450
 
417
451
  result.append(t[0].output)
418
452
 
@@ -430,7 +464,8 @@ class CodeAutoGenerateEditBlock:
430
464
  with open(self.args.target_file, "w") as file:
431
465
  file.write("继续")
432
466
 
433
- t = self.llm.chat_oai(conversations=conversations, llm_config=llm_config)
467
+ t = self.llm.chat_oai(
468
+ conversations=conversations, llm_config=llm_config)
434
469
 
435
470
  result.append(t[0].output)
436
471
  conversations.append({"role": "assistant", "content": t[0].output})
@@ -2,7 +2,8 @@ from typing import List, Dict, Tuple
2
2
  from autocoder.common.types import Mode
3
3
  from autocoder.common import AutoCoderArgs
4
4
  import byzerllm
5
-
5
+ from autocoder.utils.queue_communicate import queue_communicate, CommunicateEvent, CommunicateEventType
6
+ from autocoder.common import sys_prompt
6
7
 
7
8
  class CodeAutoGenerateStrictDiff:
8
9
  def __init__(
@@ -260,6 +261,15 @@ class CodeAutoGenerateStrictDiff:
260
261
  ) -> Tuple[str, Dict[str, str]]:
261
262
  llm_config = {"human_as_model": self.args.human_as_model}
262
263
 
264
+ if self.args.request_id and not self.args.skip_events:
265
+ queue_communicate.send_event_no_wait(
266
+ request_id=self.args.request_id,
267
+ event=CommunicateEvent(
268
+ event_type=CommunicateEventType.CODE_GENERATE_START.value,
269
+ data=query,
270
+ ),
271
+ )
272
+
263
273
  init_prompt = self.single_round_instruction.prompt(
264
274
  instruction=query, content=source_content, context=self.args.context
265
275
  )
@@ -267,10 +277,26 @@ class CodeAutoGenerateStrictDiff:
267
277
  with open(self.args.target_file, "w") as file:
268
278
  file.write(init_prompt)
269
279
 
270
- conversations = [{"role": "user", "content": init_prompt}]
280
+ conversations = []
281
+ if self.args.system_prompt and self.args.system_prompt.strip() == "claude":
282
+ conversations.append({"role": "system", "content": sys_prompt.prompt()})
283
+ elif self.args.system_prompt:
284
+ conversations.append({"role": "system", "content": self.args.system_prompt})
285
+
286
+ conversations.append({"role": "user", "content": init_prompt})
271
287
 
272
288
  t = self.llm.chat_oai(conversations=conversations, llm_config=llm_config)
273
289
  conversations.append({"role": "assistant", "content": t[0].output})
290
+
291
+ if self.args.request_id and not self.args.skip_events:
292
+ queue_communicate.send_event_no_wait(
293
+ request_id=self.args.request_id,
294
+ event=CommunicateEvent(
295
+ event_type=CommunicateEventType.CODE_GENERATE_END.value,
296
+ data="",
297
+ ),
298
+ )
299
+
274
300
  return [t[0].output], conversations
275
301
 
276
302
  def multi_round_run(
@@ -207,15 +207,17 @@ class CodeAutoMergeEditBlock:
207
207
  codes = self.get_edits(content)
208
208
  changes_to_make = []
209
209
  changes_made = False
210
- unmerged_blocks = []
210
+ unmerged_blocks = []
211
+ merged_blocks = []
211
212
 
212
213
  # First, check if there are any changes to be made
213
214
  file_content_mapping = {}
214
215
  for block in codes:
215
216
  file_path, head, update = block
216
217
  if not os.path.exists(file_path):
217
- changes_to_make.append((file_path, None, update))
218
+ changes_to_make.append((file_path, None, update))
218
219
  file_content_mapping[file_path] = update
220
+ merged_blocks.append((file_path, "", update, 1))
219
221
  changes_made = True
220
222
  else:
221
223
  if file_path not in file_content_mapping:
@@ -232,7 +234,8 @@ class CodeAutoMergeEditBlock:
232
234
  changes_to_make.append(
233
235
  (file_path, existing_content, new_content))
234
236
  file_content_mapping[file_path] = new_content
235
- changes_made = True
237
+ merged_blocks.append((file_path, head, update, 1))
238
+ changes_made = True
236
239
  else:
237
240
  # If the SEARCH BLOCK is not found exactly, then try to use
238
241
  # the similarity ratio to find the best matching block
@@ -247,7 +250,8 @@ class CodeAutoMergeEditBlock:
247
250
  (file_path, existing_content, new_content)
248
251
  )
249
252
  file_content_mapping[file_path] = new_content
250
- changes_made = True
253
+ merged_blocks.append((file_path, head, update, similarity))
254
+ changes_made = True
251
255
  else:
252
256
  unmerged_blocks.append(
253
257
  (file_path, head, update, similarity))
@@ -309,12 +313,14 @@ class CodeAutoMergeEditBlock:
309
313
  if self.args.request_id and not self.args.skip_events:
310
314
  # collect modified files
311
315
  event_data = []
312
- for file_path, old_block, new_block in changes_to_make:
316
+ for code in merged_blocks:
317
+ file_path, head, update, similarity = code
313
318
  event_data.append(
314
319
  {
315
- "file_path": file_path,
316
- "old_block": old_block,
317
- "new_block": new_block,
320
+ "file_path": file_path,
321
+ "head": head,
322
+ "update": update,
323
+ "similarity": similarity,
318
324
  }
319
325
  )
320
326
 
@@ -2,11 +2,11 @@ import byzerllm
2
2
  import datetime
3
3
 
4
4
  @byzerllm.prompt()
5
- def sys_prompt():
5
+ def claude_sys_prompt():
6
6
  '''
7
7
  The assistant is Claude, created by Anthropic.
8
8
 
9
- The current date is {{currentDateTime}}.
9
+ The current date is {{ currentDateTime }}.
10
10
 
11
11
  Claude’s knowledge base was last updated in April 2024. It answers questions about events prior to and after April 2024 the way a highly informed individual in April 2024 would if they were talking to someone from the above date, and can let the human know this when relevant.
12
12
 
autocoder/lang.py CHANGED
@@ -61,7 +61,8 @@ lang_desc = {
61
61
  "screenshot_desc": "Generate screenshots of a webpage",
62
62
  "screenshot_url": "The URL of the webpage to capture",
63
63
  "screenshot_output": "The directory to save the screenshots",
64
- "code_model": "The name of the code model to use. Default is empty",
64
+ "code_model": "The name of the code model to use. Default is empty",
65
+ "system_prompt": "The system prompt for the model. Default is empty",
65
66
  "planner_model": "The name of the planner model to use. Default is empty",
66
67
  "designer_model": "The name of the designer model to use. Default is empty",
67
68
  "query_prefix": "The query prefix",
@@ -141,6 +142,7 @@ lang_desc = {
141
142
  "screenshot_url": "要捕获的网页的URL",
142
143
  "screenshot_output": "保存截图的目录",
143
144
  "code_model": "要使用的代码模型的名称。默认为空",
145
+ "system_prompt": "模型使用的系统提示词。默认为空",
144
146
  "next_desc": "基于上一个action文件创建一个新的action文件",
145
147
  "planner_model": "要使用的规划模型的名称。默认为空",
146
148
  "designer_model": "要使用的设计模型的名称。默认为空",
@@ -14,6 +14,8 @@ class CommunicateEventType(Enum):
14
14
  CODE_UNMERGE_RESULT = "code_unmerge_result"
15
15
  CODE_START = "code_start"
16
16
  CODE_END = "code_end"
17
+ CODE_GENERATE_START = "code_generate_start"
18
+ CODE_GENERATE_END = "code_generate_end"
17
19
  CODE_HUMAN_AS_MODEL = "code_human_as_model"
18
20
  ASK_HUMAN = "ask_human"
19
21
  CODE_ERROR = "code_error"
autocoder/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.1.197"
1
+ __version__ = "0.1.199"