auto-coder 0.1.209__py3-none-any.whl → 0.1.212__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of auto-coder might be problematic. Click here for more details.

@@ -5,6 +5,7 @@ from autocoder.common import (
5
5
  split_code_into_segments,
6
6
  SourceCode,
7
7
  )
8
+ from autocoder.common.buildin_tokenizer import BuildinTokenizer
8
9
  from autocoder.pyproject import PyProject, Level1PyProject
9
10
  from autocoder.tsproject import TSProject
10
11
  from autocoder.suffixproject import SuffixProject
@@ -23,9 +24,19 @@ import os
23
24
  from autocoder.common.image_to_page import ImageToPage, ImageToPageDirectly
24
25
  from autocoder.utils.conversation_store import store_code_model_conversation
25
26
  from loguru import logger
27
+ import time
26
28
 
27
29
 
28
- class ActionTSProject:
30
+ class BaseAction:
31
+ def _get_content_length(self, content: str) -> int:
32
+ try:
33
+ tokenizer = BuildinTokenizer()
34
+ return tokenizer.count_tokens(content)
35
+ except Exception as e:
36
+ logger.warning(f"Failed to use tokenizer to count tokens, fallback to len(): {e}")
37
+ return len(content)
38
+
39
+ class ActionTSProject(BaseAction):
29
40
  def __init__(
30
41
  self, args: AutoCoderArgs, llm: Optional[byzerllm.ByzerLLM] = None
31
42
  ) -> None:
@@ -74,14 +85,15 @@ class ActionTSProject:
74
85
  args = self.args
75
86
 
76
87
  if args.execute and self.llm and not args.human_as_model:
77
- if len(content) > self.args.model_max_input_length:
88
+ content_length = self._get_content_length(content)
89
+ if content_length > self.args.model_max_input_length:
78
90
  logger.warning(
79
- f"Content length is {len(content)}, which is larger than the maximum input length {self.args.model_max_input_length}. chunk it..."
80
- )
81
- content = content[: self.args.model_max_input_length]
91
+ f"Content(send to model) is {content_length} tokens, which is larger than the maximum input length {self.args.model_max_input_length}"
92
+ )
82
93
 
83
- if args.execute:
94
+ if args.execute:
84
95
  logger.info("Auto generate the code...")
96
+ start_time = time.time()
85
97
  if args.auto_merge == "diff":
86
98
  generate = CodeAutoGenerateDiff(
87
99
  llm=self.llm, args=self.args, action=self
@@ -104,6 +116,7 @@ class ActionTSProject:
104
116
  generate_result = generate.single_round_run(
105
117
  query=args.query, source_content=content
106
118
  )
119
+ logger.info(f"Code generation completed in {time.time() - start_time:.2f} seconds")
107
120
  merge_result = None
108
121
  if args.execute and args.auto_merge:
109
122
  logger.info("Auto merge the code...")
@@ -141,7 +154,7 @@ class ActionTSProject:
141
154
  file.write(content)
142
155
 
143
156
 
144
- class ActionPyScriptProject:
157
+ class ActionPyScriptProject(BaseAction):
145
158
  def __init__(
146
159
  self, args: AutoCoderArgs, llm: Optional[byzerllm.ByzerLLM] = None
147
160
  ) -> None:
@@ -162,6 +175,8 @@ class ActionPyScriptProject:
162
175
  def process_content(self, content: str):
163
176
  args = self.args
164
177
  if args.execute:
178
+ logger.info("Auto generate the code...")
179
+ start_time = time.time()
165
180
  if args.auto_merge == "diff":
166
181
  generate = CodeAutoGenerateDiff(
167
182
  llm=self.llm, args=self.args, action=self
@@ -184,6 +199,8 @@ class ActionPyScriptProject:
184
199
  generate_result = generate.single_round_run(
185
200
  query=args.query, source_content=content
186
201
  )
202
+
203
+ logger.info(f"Code generation completed in {time.time() - start_time:.2f} seconds")
187
204
  merge_result = None
188
205
  if args.execute and args.auto_merge:
189
206
  logger.info("Auto merge the code...")
@@ -218,11 +235,13 @@ class ActionPyScriptProject:
218
235
  model=self.llm.default_model_name,
219
236
  )
220
237
 
238
+ end_time = time.time()
239
+ logger.info(f"Code generation completed in {end_time - start_time:.2f} seconds")
221
240
  with open(self.args.target_file, "w") as file:
222
241
  file.write(content)
223
242
 
224
243
 
225
- class ActionPyProject:
244
+ class ActionPyProject(BaseAction):
226
245
  def __init__(
227
246
  self, args: AutoCoderArgs, llm: Optional[byzerllm.ByzerLLM] = None
228
247
  ) -> None:
@@ -250,15 +269,15 @@ class ActionPyProject:
250
269
  args = self.args
251
270
 
252
271
  if args.execute and self.llm and not args.human_as_model:
253
- if len(content) > self.args.model_max_input_length:
272
+ content_length = self._get_content_length(content)
273
+ if content_length > self.args.model_max_input_length:
254
274
  logger.warning(
255
- f'''Content length is {len(content)}(you may collect too much files), which is larger than the maximum input length {self.args.model_max_input_length}. chunk it...'''
275
+ f'''Content(send to model) is {content_length} tokens (you may collect too much files), which is larger than the maximum input length {self.args.model_max_input_length}'''
256
276
  )
257
- content = content[: self.args.model_max_input_length]
258
277
 
259
278
  if args.execute:
260
279
  logger.info("Auto generate the code...")
261
-
280
+ start_time = time.time()
262
281
  if args.auto_merge == "diff":
263
282
  generate = CodeAutoGenerateDiff(
264
283
  llm=self.llm, args=self.args, action=self
@@ -283,7 +302,7 @@ class ActionPyProject:
283
302
  generate_result = generate.single_round_run(
284
303
  query=args.query, source_content=content
285
304
  )
286
-
305
+ logger.info(f"Code generation completed in {time.time() - start_time:.2f} seconds")
287
306
  merge_result = None
288
307
  if args.execute and args.auto_merge:
289
308
  logger.info("Auto merge the code...")
@@ -316,13 +335,12 @@ class ActionPyProject:
316
335
  instruction=self.args.query,
317
336
  conversations=generate_result.conversations[0],
318
337
  model=self.llm.default_model_name,
319
- )
320
-
338
+ )
321
339
  with open(args.target_file, "w") as file:
322
340
  file.write(content)
323
341
 
324
342
 
325
- class ActionSuffixProject:
343
+ class ActionSuffixProject(BaseAction):
326
344
  def __init__(
327
345
  self, args: AutoCoderArgs, llm: Optional[byzerllm.ByzerLLM] = None
328
346
  ) -> None:
@@ -346,14 +364,15 @@ class ActionSuffixProject:
346
364
  args = self.args
347
365
 
348
366
  if args.execute and self.llm and not args.human_as_model:
349
- if len(content) > self.args.model_max_input_length:
367
+ content_length = self._get_content_length(content)
368
+ if content_length > self.args.model_max_input_length:
350
369
  logger.warning(
351
- f"Content length is {len(content)}, which is larger than the maximum input length {self.args.model_max_input_length}. chunk it..."
352
- )
353
- content = content[: self.args.model_max_input_length]
370
+ f"Content(send to model) is {content_length} tokens, which is larger than the maximum input length {self.args.model_max_input_length}"
371
+ )
354
372
 
355
373
  if args.execute:
356
374
  logger.info("Auto generate the code...")
375
+ start_time = time.time()
357
376
  if args.auto_merge == "diff":
358
377
  generate = CodeAutoGenerateDiff(
359
378
  llm=self.llm, args=self.args, action=self
@@ -377,7 +396,7 @@ class ActionSuffixProject:
377
396
  query=args.query, source_content=content
378
397
  )
379
398
 
380
-
399
+ logger.info(f"Code generation completed in {time.time() - start_time:.2f} seconds")
381
400
  merge_result = None
382
401
  if args.execute and args.auto_merge:
383
402
  logger.info("Auto merge the code...")
autocoder/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.1.209"
1
+ __version__ = "0.1.212"