pro-craft 0.1.16__tar.gz → 0.1.18__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pro-craft might be problematic. Click here for more details.
- {pro_craft-0.1.16 → pro_craft-0.1.18}/PKG-INFO +1 -1
- {pro_craft-0.1.16 → pro_craft-0.1.18}/pyproject.toml +1 -1
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/prompt_craft/async_.py +7 -2
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/prompt_craft/sync.py +26 -19
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/server/router/prompt.py +23 -1
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft.egg-info/PKG-INFO +1 -1
- {pro_craft-0.1.16 → pro_craft-0.1.18}/README.md +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/setup.cfg +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/__init__.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/code_helper/coder.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/code_helper/designer.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/database.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/file_manager.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/log.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/prompt_craft/__init__.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/prompt_craft/evals.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/prompt_craft/new.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/server/mcp/__init__.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/server/mcp/prompt.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/server/router/__init__.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft/utils.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft.egg-info/SOURCES.txt +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft.egg-info/dependency_links.txt +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft.egg-info/requires.txt +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/src/pro_craft.egg-info/top_level.txt +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/tests/test22.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/tests/test_coder.py +0 -0
- {pro_craft-0.1.16 → pro_craft-0.1.18}/tests/test_designer.py +0 -0
|
@@ -241,7 +241,8 @@ class AsyncIntel():
|
|
|
241
241
|
chat_history = s_prompt or system_result
|
|
242
242
|
await self.save_prompt_increment_version(prompt_id,
|
|
243
243
|
new_prompt = chat_history,
|
|
244
|
-
|
|
244
|
+
use_case = " summary ",
|
|
245
|
+
score = 60,
|
|
245
246
|
session = session)
|
|
246
247
|
|
|
247
248
|
async def prompt_finetune_to_sql(
|
|
@@ -290,7 +291,8 @@ class AsyncIntel():
|
|
|
290
291
|
new_prompt = prompt
|
|
291
292
|
await self.save_prompt_increment_version(prompt_id = prompt_id,
|
|
292
293
|
new_prompt = new_prompt,
|
|
293
|
-
|
|
294
|
+
use_case = " finetune ",
|
|
295
|
+
score = 60,
|
|
294
296
|
session = session)
|
|
295
297
|
|
|
296
298
|
|
|
@@ -344,6 +346,7 @@ class AsyncIntel():
|
|
|
344
346
|
prompt_id = prompt_id,
|
|
345
347
|
new_prompt = "做一些处理",
|
|
346
348
|
use_case = input_,
|
|
349
|
+
score = 60,
|
|
347
350
|
session = session
|
|
348
351
|
)
|
|
349
352
|
ai_result = await self.intellect_remove(input_data = input_data,
|
|
@@ -390,6 +393,7 @@ class AsyncIntel():
|
|
|
390
393
|
chat_history = input_prompt + "\nassistant:\n" + ai_result # 用聊天记录作为完整提示词
|
|
391
394
|
await self.save_prompt_increment_version(prompt_id, chat_history,
|
|
392
395
|
use_case = input_,
|
|
396
|
+
score = 60,
|
|
393
397
|
session = session)
|
|
394
398
|
|
|
395
399
|
elif result_obj.action_type == "summary":
|
|
@@ -508,6 +512,7 @@ class AsyncIntel():
|
|
|
508
512
|
chat_history = input_prompt + "\nassistant:\n" + ai_result # 用聊天记录作为完整提示词
|
|
509
513
|
await self.save_prompt_increment_version(prompt_id, chat_history,
|
|
510
514
|
use_case = input_,
|
|
515
|
+
score = 60,
|
|
511
516
|
session = session)
|
|
512
517
|
|
|
513
518
|
elif result_obj.action_type == "summary":
|
|
@@ -397,14 +397,10 @@ class Intel():
|
|
|
397
397
|
prompt_id = prompt_id,
|
|
398
398
|
new_prompt = "做一些处理",
|
|
399
399
|
use_case = input_,
|
|
400
|
+
score = 60,
|
|
400
401
|
session = session
|
|
401
402
|
)
|
|
402
|
-
ai_result =
|
|
403
|
-
output_format = output_format,
|
|
404
|
-
prompt_id = prompt_id,
|
|
405
|
-
version = version,
|
|
406
|
-
inference_save_case = inference_save_case
|
|
407
|
-
)
|
|
403
|
+
ai_result = "初始化完成"
|
|
408
404
|
return ai_result
|
|
409
405
|
prompt = result_obj.prompt
|
|
410
406
|
if result_obj.action_type == "inference":
|
|
@@ -423,24 +419,35 @@ class Intel():
|
|
|
423
419
|
# 则训练推广
|
|
424
420
|
|
|
425
421
|
# 新版本 默人修改会 inference 状态
|
|
426
|
-
|
|
422
|
+
prompt = result_obj.prompt
|
|
427
423
|
before_input = result_obj.use_case
|
|
428
424
|
demand = result_obj.demand
|
|
425
|
+
|
|
426
|
+
# assert demand
|
|
427
|
+
# # 注意, 这里的调整要求使用最初的那个输入, 最好一口气调整好
|
|
428
|
+
|
|
429
|
+
# if input_ == before_input: # 输入没变, 说明还是针对同一个输入进行讨论
|
|
430
|
+
# # input_prompt = chat_history + "\nuser:" + demand
|
|
431
|
+
# input_prompt = chat_history + "\nuser:" + demand + output_format
|
|
432
|
+
# else:
|
|
433
|
+
# # input_prompt = chat_history + "\nuser:" + demand + "\n-----input----\n" + input_
|
|
434
|
+
# input_prompt = chat_history + "\nuser:" + demand + output_format + "\n-----input----\n" + input_
|
|
429
435
|
|
|
436
|
+
# ai_result = self.llm.product(input_prompt)
|
|
437
|
+
# chat_history = input_prompt + "\nassistant:\n" + ai_result # 用聊天记录作为完整提示词
|
|
430
438
|
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
chat_history = prompt
|
|
434
|
-
if input_ == before_input: # 输入没变, 说明还是针对同一个输入进行讨论
|
|
435
|
-
# input_prompt = chat_history + "\nuser:" + demand
|
|
436
|
-
input_prompt = chat_history + "\nuser:" + demand + output_format
|
|
439
|
+
if input_ == before_input:
|
|
440
|
+
new_prompt = prompt + "\nuser:" + demand
|
|
437
441
|
else:
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
442
|
+
new_prompt = prompt + "\nuser:" + input_
|
|
443
|
+
|
|
444
|
+
ai_result = self.llm.product(new_prompt + output_format)
|
|
445
|
+
|
|
446
|
+
save_new_prompt = new_prompt + "\nassistant:\n" + ai_result
|
|
447
|
+
|
|
448
|
+
|
|
449
|
+
self.save_prompt_increment_version(prompt_id,
|
|
450
|
+
new_prompt=save_new_prompt,
|
|
444
451
|
use_case = input_,
|
|
445
452
|
score = 60,
|
|
446
453
|
session = session)
|
|
@@ -29,7 +29,8 @@ def create_router(database_url: str,
|
|
|
29
29
|
tags=["prompt"] # 这里使用 Depends 确保每次请求都验证
|
|
30
30
|
)
|
|
31
31
|
|
|
32
|
-
@router.get("/push_order"
|
|
32
|
+
@router.get("/push_order",
|
|
33
|
+
description="可选 train,inference,summary,finetune,patch",)
|
|
33
34
|
async def push_order(demand: str, prompt_id: str, action_type: str = "train"):
|
|
34
35
|
result = intels.push_action_order(
|
|
35
36
|
demand=demand,
|
|
@@ -52,4 +53,25 @@ def create_router(database_url: str,
|
|
|
52
53
|
result = intels.sync_prompt_data_to_database(slave_database_url)
|
|
53
54
|
return {"message": "success","result":result}
|
|
54
55
|
|
|
56
|
+
|
|
57
|
+
@router.get("/roll_back")
|
|
58
|
+
async def roll_back(prompt_id:str,version:str):
|
|
59
|
+
with create_session(intels.engine) as session:
|
|
60
|
+
result = intels.get_prompts_from_sql(
|
|
61
|
+
prompt_id=prompt_id,
|
|
62
|
+
version = version,
|
|
63
|
+
session=session
|
|
64
|
+
)
|
|
65
|
+
assert result.version == version
|
|
66
|
+
intels.save_prompt_increment_version(
|
|
67
|
+
prompt_id = prompt_id,
|
|
68
|
+
new_prompt = result.prompt,
|
|
69
|
+
use_case = result.use_case,
|
|
70
|
+
action_type = "inference",
|
|
71
|
+
demand = "",
|
|
72
|
+
score = 61,
|
|
73
|
+
session = session)
|
|
74
|
+
return {"message": "success","result":result.version}
|
|
75
|
+
|
|
55
76
|
return router
|
|
77
|
+
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|