auto-coder 0.1.284__py3-none-any.whl → 0.1.286__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of auto-coder might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: auto-coder
3
- Version: 0.1.284
3
+ Version: 0.1.286
4
4
  Summary: AutoCoder: AutoCoder
5
5
  Author: allwefantasy
6
6
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
@@ -11,8 +11,8 @@ autocoder/chat_auto_coder.py,sha256=G7_CIgDOTdGGPzRQDo0hEOh5p8A36oJQaYJc514xBkk,
11
11
  autocoder/chat_auto_coder_lang.py,sha256=ShOQVOnMA-WlT-fB9OrOer-xQkbcWxJGl-WMPuZcUkM,19572
12
12
  autocoder/command_args.py,sha256=9aYJ-AmPxP1sQh6ciw04FWHjSn31f2W9afXFwo8wgx4,30441
13
13
  autocoder/lang.py,sha256=U6AjVV8Rs1uLyjFCZ8sT6WWuNUxMBqkXXIOs4S120uk,14511
14
- autocoder/models.py,sha256=PlG1tKHSHwB57cKLOl5gTl5yTzFUDzCgeHPJU3N9F6Q,9106
15
- autocoder/version.py,sha256=CdPfaa9UyiMW7CWw6BaV5azX5klLdPvg_B_GcYjlyFk,23
14
+ autocoder/models.py,sha256=AyoZ-Pzy0oyYUmWCxOIRiOImsqboSfRET7LO9-UOuxI,11172
15
+ autocoder/version.py,sha256=IEWJd9XFdzl6BiBXmhjT4zjl4SdS7u6oVsHUCg0Zq0A,23
16
16
  autocoder/agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
17
  autocoder/agent/auto_demand_organizer.py,sha256=NWSAEsEk94vT3lGjfo25kKLMwYdPcpy9e-i21txPasQ,6942
18
18
  autocoder/agent/auto_filegroup.py,sha256=CW7bqp0FW1GIEMnl-blyAc2UGT7O9Mom0q66ITz1ckM,6635
@@ -131,7 +131,7 @@ autocoder/rag/cache/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
131
131
  autocoder/rag/cache/base_cache.py,sha256=EaYYYbclMBvnlOUoM7qonnluwZX5oSvUjdvGvFun8_8,742
132
132
  autocoder/rag/cache/byzer_storage_cache.py,sha256=gK90pf741CgccCzQ73urBorCqVyAfwU1FAqMtSorWVk,17232
133
133
  autocoder/rag/cache/file_monitor_cache.py,sha256=2TnOW8Y81Zc0WA1upRrkmQH18IMdv40CeNccmnTvd3c,4981
134
- autocoder/rag/cache/local_byzer_storage_cache.py,sha256=Uhmu5JK0tfZ8NvlcjJzcwtQRhZDpbGp_U6qLXZxVwss,17495
134
+ autocoder/rag/cache/local_byzer_storage_cache.py,sha256=kgGMYP4MIc4upXcwTmeGbWYW4dDnt-hhJKmnjsFPKU4,18977
135
135
  autocoder/rag/cache/simple_cache.py,sha256=8FMmBAfhAPcdSNUWC6Ga43LBFGXD-klwabVbzm_bciI,9347
136
136
  autocoder/rag/loaders/__init__.py,sha256=EQHEZ5Cmz-mGP2SllUTvcIbYCnF7W149dNpNItfs0yE,304
137
137
  autocoder/rag/loaders/docx_loader.py,sha256=ZswPqiiLngUEpzLhNNm1nmwEYV7ZHFEfIoXoG7c5GDU,614
@@ -169,9 +169,9 @@ autocoder/utils/types.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
169
169
  autocoder/utils/auto_coder_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
170
170
  autocoder/utils/auto_coder_utils/chat_stream_out.py,sha256=lkJ_A-sYU36JMzjFWkk3pR6uos8oZHYt9GPsPe_CPAo,11766
171
171
  autocoder/utils/chat_auto_coder_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
172
- auto_coder-0.1.284.dist-info/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
173
- auto_coder-0.1.284.dist-info/METADATA,sha256=NYN7m8jbf2aikPm1nXpD_hTBHMJOmVcpb0-Y8DbhveE,2643
174
- auto_coder-0.1.284.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
175
- auto_coder-0.1.284.dist-info/entry_points.txt,sha256=0nzHtHH4pNcM7xq4EBA2toS28Qelrvcbrr59GqD_0Ak,350
176
- auto_coder-0.1.284.dist-info/top_level.txt,sha256=Jqc0_uJSw2GwoFQAa9iJxYns-2mWla-9ok_Y3Gcznjk,10
177
- auto_coder-0.1.284.dist-info/RECORD,,
172
+ auto_coder-0.1.286.dist-info/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
173
+ auto_coder-0.1.286.dist-info/METADATA,sha256=U258OIH53mmFGbXDNu1KEIiI2lWExnsf-S1X8u1r0NY,2643
174
+ auto_coder-0.1.286.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
175
+ auto_coder-0.1.286.dist-info/entry_points.txt,sha256=0nzHtHH4pNcM7xq4EBA2toS28Qelrvcbrr59GqD_0Ak,350
176
+ auto_coder-0.1.286.dist-info/top_level.txt,sha256=Jqc0_uJSw2GwoFQAa9iJxYns-2mWla-9ok_Y3Gcznjk,10
177
+ auto_coder-0.1.286.dist-info/RECORD,,
autocoder/models.py CHANGED
@@ -286,3 +286,61 @@ def update_model_with_api_key(name: str, api_key: str) -> Dict:
286
286
 
287
287
  return found_model
288
288
 
289
+ def update_model(name: str, model_data: Dict) -> Dict:
290
+ """
291
+ 更新模型信息
292
+
293
+ Args:
294
+ name: 要更新的模型名称
295
+ model_data: 包含模型新信息的字典,可以包含以下字段:
296
+ - name: 模型名称
297
+ - description: 模型描述
298
+ - model_name: 模型实际名称
299
+ - model_type: 模型类型
300
+ - base_url: 基础URL
301
+ - api_key: API密钥
302
+ - is_reasoning: 是否为推理模型
303
+ - input_price: 输入价格
304
+ - output_price: 输出价格
305
+
306
+ Returns:
307
+ Dict: 更新后的模型信息,如果未找到则返回None
308
+ """
309
+ models = load_models()
310
+
311
+ # 查找要更新的模型
312
+ found = False
313
+ for i, model in enumerate(models):
314
+ if model["name"] == name:
315
+ # 更新模型字段
316
+ if "description" in model_data:
317
+ model["description"] = model_data["description"]
318
+ if "model_name" in model_data:
319
+ model["model_name"] = model_data["model_name"]
320
+ if "model_type" in model_data:
321
+ model["model_type"] = model_data["model_type"]
322
+ if "base_url" in model_data:
323
+ model["base_url"] = model_data["base_url"]
324
+ if "is_reasoning" in model_data:
325
+ model["is_reasoning"] = model_data["is_reasoning"]
326
+ if "input_price" in model_data:
327
+ model["input_price"] = float(model_data["input_price"])
328
+ if "output_price" in model_data:
329
+ model["output_price"] = float(model_data["output_price"])
330
+
331
+ # 保存更新后的模型
332
+ models[i] = model
333
+ found = True
334
+
335
+ # 如果提供了API密钥,则更新
336
+ if "api_key" in model_data and model_data["api_key"]:
337
+ update_model_with_api_key(name, model_data["api_key"])
338
+
339
+ break
340
+
341
+ if found:
342
+ save_models(models)
343
+ return models[i]
344
+
345
+ return None
346
+
@@ -66,13 +66,15 @@ class LocalByzerStorageCache(BaseCacheManager):
66
66
  required_exts,
67
67
  extra_params: Optional[AutoCoderArgs] = None,
68
68
  emb_llm: Union[ByzerLLM, SimpleByzerLLM] = None,
69
+ host: str = "127.0.0.1",
70
+ port: int = 33333,
69
71
  ):
70
72
  self.path = path
71
73
  self.ignore_spec = ignore_spec
72
74
  self.required_exts = required_exts
73
75
  self.rag_build_name = extra_params.rag_build_name
74
76
  self.storage = LocalByzerStorage("byzerai_store",
75
- "rag_test", self.rag_build_name, host="127.0.0.1", port=33333,emb_llm=emb_llm)
77
+ "rag_test", self.rag_build_name, host=host, port=port,emb_llm=emb_llm)
76
78
  self.queue = []
77
79
  self.chunk_size = 1000
78
80
  self._init_schema()
@@ -397,6 +399,27 @@ class LocalByzerStorageCache(BaseCacheManager):
397
399
 
398
400
  results = query_builder.execute()
399
401
 
402
+ logger.info(f"从缓存获取: {len(results)} 条数据")
403
+ # Preview first 5 results with all fields but limited content size
404
+ preview_results = []
405
+ for r in results[:5]:
406
+ # Create a copy of the entire result
407
+ preview = r.copy()
408
+ # Similarly limit raw_content if it exists
409
+ if "raw_content" in preview and isinstance(preview["raw_content"], str):
410
+ preview["raw_content"] = preview["raw_content"][:100] + "..." if len(preview["raw_content"]) > 100 else preview["raw_content"]
411
+ preview_results.append(preview)
412
+ logger.info(f"预览前5条数据:")
413
+
414
+ for r in preview_results:
415
+ logger.info(f"文件路径: {r['file_path']}")
416
+ logger.info(f"原始内容: {r['raw_content']}")
417
+ # 打印其他字段
418
+ for k, v in r.items():
419
+ if k not in ["file_path", "raw_content"]:
420
+ logger.info(f"{k}: {v}")
421
+ logger.info("-"*100)
422
+
400
423
  # Group results by file_path and reconstruct documents while preserving order
401
424
  # 这里还可以有排序优化,综合考虑一篇内容出现的次数以及排序位置
402
425
  file_paths = []
@@ -414,10 +437,12 @@ class LocalByzerStorageCache(BaseCacheManager):
414
437
  cached_data = self.cache[file_path]
415
438
  for doc in cached_data.content:
416
439
  if total_tokens + doc["tokens"] > self.max_output_tokens:
440
+ logger.info(f"用户tokens设置为:{self.max_output_tokens},累计tokens: {total_tokens} 当前文件: {file_path} tokens: {doc['tokens']},数据条数变化: {len(results)} -> {len(result)}")
417
441
  return result
418
442
  total_tokens += doc["tokens"]
419
443
  result[file_path] = cached_data.model_dump()
420
-
444
+
445
+ logger.info(f"用户tokens设置为:{self.max_output_tokens},累计tokens: {total_tokens},数据条数变化: {len(results)} -> {len(result)}")
421
446
  return result
422
447
 
423
448
  def get_all_files(self) -> List[FileInfo]:
autocoder/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.1.284"
1
+ __version__ = "0.1.286"