auto-coder 0.1.213__py3-none-any.whl → 0.1.214__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of auto-coder might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: auto-coder
3
- Version: 0.1.213
3
+ Version: 0.1.214
4
4
  Summary: AutoCoder: AutoCoder
5
5
  Author: allwefantasy
6
6
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
@@ -4,11 +4,11 @@ autocoder/auto_coder_lang.py,sha256=Rtupq6N3_HT7JRhDKdgCBcwRaiAnyCOR_Gsp4jUomrI,
4
4
  autocoder/auto_coder_rag.py,sha256=hU7NmCUIM__918p1RbGFlJKPVEqdr4NXjD6b1dBgEVU,21743
5
5
  autocoder/auto_coder_server.py,sha256=XU9b4SBH7zjPPXaTWWHV4_zJm-XYa6njuLQaplYJH_c,20290
6
6
  autocoder/benchmark.py,sha256=Ypomkdzd1T3GE6dRICY3Hj547dZ6_inqJbBJIp5QMco,4423
7
- autocoder/chat_auto_coder.py,sha256=WhaS6uiG5lIBrnif_Sa_M4aTBMTiVn8-eGhlDWjtdBc,91548
8
- autocoder/chat_auto_coder_lang.py,sha256=zU9VRY-l80fZnLJ0Op8A3wq27UhQHh9WcpSYU4SmnqU,8708
7
+ autocoder/chat_auto_coder.py,sha256=xg8o5Zh3Uhc9Cq4TeR2bCcyU7LiwSf8X0izyvQZyi8c,90725
8
+ autocoder/chat_auto_coder_lang.py,sha256=ReWukXKVvuzVvpbYk5O9kc1ev7XNmAv3DnuQhmpLmnc,8717
9
9
  autocoder/command_args.py,sha256=BpMbPceBzjCftPB0yOVsSmTmt61xS3gtc1WGKtcDHQs,30449
10
10
  autocoder/lang.py,sha256=U6AjVV8Rs1uLyjFCZ8sT6WWuNUxMBqkXXIOs4S120uk,14511
11
- autocoder/version.py,sha256=V-hdG0k1-v5-WK9KQgzySDX22RDvdwG4vsIdj8042Cg,24
11
+ autocoder/version.py,sha256=dPm49_fp-JGXzV8qtohQKPW6OPf1b4qK-yxv3G3f9UU,24
12
12
  autocoder/agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  autocoder/agent/auto_demand_organizer.py,sha256=NWSAEsEk94vT3lGjfo25kKLMwYdPcpy9e-i21txPasQ,6942
14
14
  autocoder/agent/auto_filegroup.py,sha256=CW7bqp0FW1GIEMnl-blyAc2UGT7O9Mom0q66ITz1ckM,6635
@@ -119,9 +119,9 @@ autocoder/utils/request_event_queue.py,sha256=r3lo5qGsB1dIjzVQ05dnr0z_9Z3zOkBdP1
119
119
  autocoder/utils/request_queue.py,sha256=nwp6PMtgTCiuwJI24p8OLNZjUiprC-TsefQrhMI-yPE,3889
120
120
  autocoder/utils/rest.py,sha256=HawagAap3wMIDROGhY1730zSZrJR_EycODAA5qOj83c,8807
121
121
  autocoder/utils/tests.py,sha256=BqphrwyycGAvs-5mhH8pKtMZdObwhFtJ5MC_ZAOiLq8,1340
122
- auto_coder-0.1.213.dist-info/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
123
- auto_coder-0.1.213.dist-info/METADATA,sha256=7JrELvSZVoZAD3daumw7vNKywrDnYZaqtw8QkuFczEE,2590
124
- auto_coder-0.1.213.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
125
- auto_coder-0.1.213.dist-info/entry_points.txt,sha256=0nzHtHH4pNcM7xq4EBA2toS28Qelrvcbrr59GqD_0Ak,350
126
- auto_coder-0.1.213.dist-info/top_level.txt,sha256=Jqc0_uJSw2GwoFQAa9iJxYns-2mWla-9ok_Y3Gcznjk,10
127
- auto_coder-0.1.213.dist-info/RECORD,,
122
+ auto_coder-0.1.214.dist-info/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
123
+ auto_coder-0.1.214.dist-info/METADATA,sha256=aFfxGy747osG-wPwJjDoiPCbMCLHZyu2G2FprgZr03s,2590
124
+ auto_coder-0.1.214.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
125
+ auto_coder-0.1.214.dist-info/entry_points.txt,sha256=0nzHtHH4pNcM7xq4EBA2toS28Qelrvcbrr59GqD_0Ak,350
126
+ auto_coder-0.1.214.dist-info/top_level.txt,sha256=Jqc0_uJSw2GwoFQAa9iJxYns-2mWla-9ok_Y3Gcznjk,10
127
+ auto_coder-0.1.214.dist-info/RECORD,,
@@ -314,54 +314,29 @@ def initialize_system():
314
314
  except subprocess.CalledProcessError:
315
315
  print_status(get_message("model_error"), "error")
316
316
 
317
- # If deepseek_chat is not available, prompt user to choose a provider
318
- print_status(get_message("model_not_available"), "warning")
319
- choice = radiolist_dialog(
320
- title=get_message("provider_selection"),
321
- text=get_message("provider_selection"),
322
- values=[
323
- ("1", "硅基流动(https://siliconflow.cn)"),
324
- ("2", "Deepseek官方(https://www.deepseek.com/)"),
325
- ],
326
- ).run()
327
-
328
- if choice is None:
329
- print_status(get_message("no_provider"), "error")
330
- return
331
-
317
+ # If deepseek_chat is not available
318
+ print_status(get_message("model_not_available"), "warning")
332
319
  api_key = prompt(HTML(f"<b>{get_message('enter_api_key')} </b>"))
333
-
334
- if choice == "1":
335
- print_status(get_message("deploying_model").format("硅基流动"), "")
336
- deploy_cmd = [
337
- "easy-byzerllm",
338
- "deploy",
339
- "deepseek-ai/deepseek-v2-chat",
340
- "--token",
341
- api_key,
342
- "--alias",
343
- "deepseek_chat",
344
- ]
345
- else:
346
- print_status(get_message("deploying_model").format("Deepseek官方"), "")
347
- deploy_cmd = [
348
- "byzerllm",
349
- "deploy",
350
- "--pretrained_model_type",
351
- "saas/openai",
352
- "--cpus_per_worker",
353
- "0.001",
354
- "--gpus_per_worker",
355
- "0",
356
- "--worker_concurrency",
357
- "1000",
358
- "--num_workers",
359
- "1",
360
- "--infer_params",
361
- f"saas.base_url=https://api.deepseek.com/v1 saas.api_key={api_key} saas.model=deepseek-chat",
362
- "--model",
363
- "deepseek_chat",
364
- ]
320
+
321
+ print_status(get_message("deploying_model").format("Deepseek官方"), "")
322
+ deploy_cmd = [
323
+ "byzerllm",
324
+ "deploy",
325
+ "--pretrained_model_type",
326
+ "saas/openai",
327
+ "--cpus_per_worker",
328
+ "0.001",
329
+ "--gpus_per_worker",
330
+ "0",
331
+ "--worker_concurrency",
332
+ "1000",
333
+ "--num_workers",
334
+ "1",
335
+ "--infer_params",
336
+ f"saas.base_url=https://api.deepseek.com/v1 saas.api_key={api_key} saas.model=deepseek-chat",
337
+ "--model",
338
+ "deepseek_chat",
339
+ ]
365
340
 
366
341
  try:
367
342
  subprocess.run(deploy_cmd, check=True)
@@ -20,10 +20,10 @@ MESSAGES = {
20
20
  "model_available": "deepseek_chat model is available.",
21
21
  "model_timeout": "Command timed out. deepseek_chat model might not be available.",
22
22
  "model_error": "Error occurred while checking deepseek_chat model.",
23
- "model_not_available": "deepseek_chat model is not available. Please choose a provider:",
23
+ "model_not_available": "deepseek_chat model is not available.",
24
24
  "provider_selection": "Select a provider for deepseek_chat model:",
25
25
  "no_provider": "No provider selected. Exiting initialization.",
26
- "enter_api_key": "Please enter your API key: ",
26
+ "enter_api_key": "Please enter your API key(https://www.deepseek.com/): ",
27
27
  "deploying_model": "Deploying deepseek_chat model using {}...",
28
28
  "deploy_complete": "Deployment completed.",
29
29
  "deploy_fail": "Deployment failed. Please try again or deploy manually.",
@@ -86,10 +86,10 @@ MESSAGES = {
86
86
  "model_available": "deepseek_chat模型可用。",
87
87
  "model_timeout": "命令超时。deepseek_chat模型可能不可用。",
88
88
  "model_error": "检查deepseek_chat模型时出错。",
89
- "model_not_available": "deepseek_chat模型不可用。请选择一个提供商:",
89
+ "model_not_available": "deepseek_chat模型不可用。",
90
90
  "provider_selection": "为deepseek_chat模型选择一个提供商:",
91
91
  "no_provider": "未选择提供商。退出初始化。",
92
- "enter_api_key": "请输入您的API密钥:",
92
+ "enter_api_key": "请输入您的API密钥(https://www.deepseek.com/):",
93
93
  "deploying_model": "正在使用{}部署deepseek_chat模型...",
94
94
  "deploy_complete": "部署完成。",
95
95
  "deploy_fail": "部署失败。请重试或手动部署。",
autocoder/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.1.213"
1
+ __version__ = "0.1.214"