pro-craft 0.1.9__py3-none-any.whl → 0.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pro-craft might be problematic. Click here for more details.
- pro_craft/prompt_helper.py +66 -2
- pro_craft/prompt_helper_async.py +69 -39
- pro_craft/utils.py +1 -1
- {pro_craft-0.1.9.dist-info → pro_craft-0.1.10.dist-info}/METADATA +2 -1
- {pro_craft-0.1.9.dist-info → pro_craft-0.1.10.dist-info}/RECORD +7 -7
- {pro_craft-0.1.9.dist-info → pro_craft-0.1.10.dist-info}/WHEEL +0 -0
- {pro_craft-0.1.9.dist-info → pro_craft-0.1.10.dist-info}/top_level.txt +0 -0
pro_craft/prompt_helper.py
CHANGED
|
@@ -379,7 +379,16 @@ class Intel():
|
|
|
379
379
|
prompt_id: str,
|
|
380
380
|
version: str = None,
|
|
381
381
|
inference_save_case = True,
|
|
382
|
+
push_patch = False,
|
|
382
383
|
):
|
|
384
|
+
"""
|
|
385
|
+
使用指南:
|
|
386
|
+
1 训练, 使用单一例子做大量的沟通来奠定基础
|
|
387
|
+
2 总结, 将沟通好的总结成完整提示词
|
|
388
|
+
3 推理, 使用部署
|
|
389
|
+
4 微调, 针对一些格式性的, 问题进行微调
|
|
390
|
+
5 补丁, 微调无法解决的问题, 可以尝试使用补丁来解决
|
|
391
|
+
"""
|
|
383
392
|
if isinstance(input_data,dict):
|
|
384
393
|
input_ = json.dumps(input_data,ensure_ascii=False)
|
|
385
394
|
elif isinstance(input_data,str):
|
|
@@ -459,8 +468,18 @@ class Intel():
|
|
|
459
468
|
session = session
|
|
460
469
|
)
|
|
461
470
|
ai_result = self.llm.product(prompt + output_format + "\n-----input----\n" + input_)
|
|
462
|
-
elif result_obj.action_type == "
|
|
463
|
-
|
|
471
|
+
elif result_obj.action_type == "patch":
|
|
472
|
+
|
|
473
|
+
demand = result_obj.demand
|
|
474
|
+
assert demand
|
|
475
|
+
|
|
476
|
+
chat_history = prompt + demand
|
|
477
|
+
ai_result = self.llm.product(chat_history + output_format + "\n-----input----\n" + input_)
|
|
478
|
+
if push_patch:
|
|
479
|
+
self.save_prompt_increment_version(prompt_id, chat_history,
|
|
480
|
+
use_case = input_,
|
|
481
|
+
score = 60,
|
|
482
|
+
session = session)
|
|
464
483
|
else:
|
|
465
484
|
raise
|
|
466
485
|
|
|
@@ -536,3 +555,48 @@ class Intel():
|
|
|
536
555
|
return wrapper
|
|
537
556
|
return outer_packing
|
|
538
557
|
|
|
558
|
+
def biger(self,tasks):
|
|
559
|
+
"""
|
|
560
|
+
编写以下任务
|
|
561
|
+
任务1 从输入文本中提取知识片段
|
|
562
|
+
任务2 将知识片段总结为知识点
|
|
563
|
+
任务3 将知识点添加标签
|
|
564
|
+
任务4 为知识点打分1-10分
|
|
565
|
+
"""
|
|
566
|
+
|
|
567
|
+
system_prompt = """
|
|
568
|
+
根据需求, 以这个为模板, 编写这个程序
|
|
569
|
+
|
|
570
|
+
from procraft.prompt_helper import Intel, IntellectType
|
|
571
|
+
intels = Intel()
|
|
572
|
+
|
|
573
|
+
task_1 = "素材提取-从文本中提取素材"
|
|
574
|
+
|
|
575
|
+
class Varit(BaseModel):
|
|
576
|
+
material : str
|
|
577
|
+
protagonist: str
|
|
578
|
+
|
|
579
|
+
task_2 = "素材提取-验证素材的正确性"
|
|
580
|
+
|
|
581
|
+
class Varit2(BaseModel):
|
|
582
|
+
material : str
|
|
583
|
+
real : str
|
|
584
|
+
|
|
585
|
+
result0 = "输入"
|
|
586
|
+
|
|
587
|
+
result1 = await intels.aintellect_remove_format(input_data = result0,
|
|
588
|
+
OutputFormat = Varit,
|
|
589
|
+
prompt_id = task_1,
|
|
590
|
+
version = None,
|
|
591
|
+
inference_save_case = True)
|
|
592
|
+
|
|
593
|
+
result2 = await intels.aintellect_remove_format(input_data = result1,
|
|
594
|
+
OutputFormat = Varit2,
|
|
595
|
+
prompt_id = task_2,
|
|
596
|
+
version = None,
|
|
597
|
+
inference_save_case = True)
|
|
598
|
+
|
|
599
|
+
print(result2)
|
|
600
|
+
|
|
601
|
+
"""
|
|
602
|
+
return self.llm.product(system_prompt + tasks)
|
pro_craft/prompt_helper_async.py
CHANGED
|
@@ -26,6 +26,10 @@ logger = Log.logger
|
|
|
26
26
|
editing_log = logger.debug
|
|
27
27
|
import re
|
|
28
28
|
|
|
29
|
+
from sqlalchemy import select, desc
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
|
|
29
33
|
def fix_broken_json_string(broken_json_str):
|
|
30
34
|
# 移除 BOM
|
|
31
35
|
broken_json_str = broken_json_str.lstrip('\ufeff')
|
|
@@ -64,26 +68,16 @@ class AsyncIntel():
|
|
|
64
68
|
):
|
|
65
69
|
database_url = database_url or os.getenv("database_url")
|
|
66
70
|
assert database_url
|
|
67
|
-
|
|
71
|
+
|
|
72
|
+
# async_engine
|
|
73
|
+
self.async_engine = create_async_engine(database_url, echo=True,
|
|
68
74
|
pool_size=10, # 连接池中保持的连接数
|
|
69
75
|
max_overflow=20, # 当pool_size不够时,允许临时创建的额外连接数
|
|
70
76
|
pool_recycle=3600, # 每小时回收一次连接
|
|
71
77
|
pool_pre_ping=True, # 使用前检查连接活性
|
|
72
78
|
pool_timeout=30 # 等待连接池中连接的最长时间(秒)
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
# async_engine
|
|
76
|
-
# self.async_engine = create_async_engine(database_url, echo=False,
|
|
77
|
-
# pool_size=10, # 连接池中保持的连接数
|
|
78
|
-
# max_overflow=20, # 当pool_size不够时,允许临时创建的额外连接数
|
|
79
|
-
# pool_recycle=3600, # 每小时回收一次连接
|
|
80
|
-
# pool_pre_ping=True, # 使用前检查连接活性
|
|
81
|
-
# pool_timeout=30 # 等待连接池中连接的最长时间(秒)
|
|
82
|
-
# )
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
PromptBase.metadata.create_all(self.engine)
|
|
86
|
-
|
|
79
|
+
)
|
|
80
|
+
|
|
87
81
|
if model_name in ["gemini-2.5-flash-preview-05-20-nothinking",]:
|
|
88
82
|
self.llm = BianXieAdapter(model_name = model_name)
|
|
89
83
|
elif model_name in ["doubao-1-5-pro-256k-250115",]:
|
|
@@ -92,18 +86,26 @@ class AsyncIntel():
|
|
|
92
86
|
print('Use BianXieAdapter')
|
|
93
87
|
self.llm = BianXieAdapter()
|
|
94
88
|
|
|
89
|
+
async def create_database(self):
|
|
90
|
+
async with self.async_engine.begin() as conn:
|
|
91
|
+
await conn.run_sync(PromptBase.metadata.create_all)
|
|
95
92
|
|
|
96
93
|
async def _aget_latest_prompt_version(self,target_prompt_id,session):
|
|
97
94
|
"""
|
|
98
95
|
获取指定 prompt_id 的最新版本数据,通过创建时间判断。
|
|
99
96
|
"""
|
|
100
|
-
|
|
101
|
-
result = session.query(Prompt).filter(
|
|
97
|
+
stmt = select(Prompt).filter(
|
|
102
98
|
Prompt.prompt_id == target_prompt_id
|
|
103
99
|
).order_by(
|
|
104
|
-
Prompt.timestamp.desc()
|
|
105
|
-
Prompt.version.desc()
|
|
106
|
-
)
|
|
100
|
+
desc(Prompt.timestamp), # 使用 sqlalchemy.desc() 来指定降序
|
|
101
|
+
desc(Prompt.version) # 使用 sqlalchemy.desc() 来指定降序
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
result = await session.execute(stmt)
|
|
105
|
+
# 3. 从 Result 对象中获取第一个模型实例
|
|
106
|
+
# .scalars() 用于从结果行中获取第一个列的值(这里是Prompt对象本身)
|
|
107
|
+
# .first() 获取第一个结果
|
|
108
|
+
result = result.scalars().first()
|
|
107
109
|
|
|
108
110
|
if result:
|
|
109
111
|
editing_log(f"找到 prompt_id '{target_prompt_id}' 的最新版本 (基于时间): {result.version}")
|
|
@@ -111,7 +113,6 @@ class AsyncIntel():
|
|
|
111
113
|
editing_log(f"未找到 prompt_id '{target_prompt_id}' 的任何版本。")
|
|
112
114
|
return result
|
|
113
115
|
|
|
114
|
-
|
|
115
116
|
async def _aget_specific_prompt_version(self,target_prompt_id, target_version,session):
|
|
116
117
|
"""
|
|
117
118
|
获取指定 prompt_id 和特定版本的数据。
|
|
@@ -126,16 +127,19 @@ class AsyncIntel():
|
|
|
126
127
|
dict or None: 如果找到,返回包含 id, prompt_id, version, timestamp, prompt 字段的字典;
|
|
127
128
|
否则返回 None。
|
|
128
129
|
"""
|
|
130
|
+
stmt = select(Prompt).filter(
|
|
131
|
+
Prompt.prompt_id == target_prompt_id,
|
|
132
|
+
Prompt.version == target_version
|
|
133
|
+
)
|
|
134
|
+
result = await session.execute(stmt)
|
|
129
135
|
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
).first() # 因为 (prompt_id, version) 是唯一的,所以 first() 足够
|
|
134
|
-
if result:
|
|
136
|
+
specific_prompt = result.scalars().one_or_none()
|
|
137
|
+
|
|
138
|
+
if specific_prompt:
|
|
135
139
|
editing_log(f"找到 prompt_id '{target_prompt_id}', 版本 '{target_version}' 的提示词数据。")
|
|
136
140
|
else:
|
|
137
141
|
editing_log(f"未找到 prompt_id '{target_prompt_id}', 版本 '{target_version}' 的提示词数据。")
|
|
138
|
-
return
|
|
142
|
+
return specific_prompt
|
|
139
143
|
|
|
140
144
|
async def aget_prompts_from_sql(self,
|
|
141
145
|
prompt_id: str,
|
|
@@ -192,7 +196,7 @@ class AsyncIntel():
|
|
|
192
196
|
)
|
|
193
197
|
|
|
194
198
|
session.add(prompt1)
|
|
195
|
-
session.commit() # 提交事务,将数据写入数据库
|
|
199
|
+
await session.commit() # 提交事务,将数据写入数据库
|
|
196
200
|
|
|
197
201
|
async def asave_use_case_by_sql(self,
|
|
198
202
|
prompt_id: str,
|
|
@@ -211,7 +215,7 @@ class AsyncIntel():
|
|
|
211
215
|
)
|
|
212
216
|
|
|
213
217
|
session.add(use_case)
|
|
214
|
-
session.commit() # 提交事务,将数据写入数据库
|
|
218
|
+
await session.commit() # 提交事务,将数据写入数据库
|
|
215
219
|
|
|
216
220
|
async def summary_to_sql(
|
|
217
221
|
self,
|
|
@@ -298,8 +302,8 @@ class AsyncIntel():
|
|
|
298
302
|
将打算修改的状态推上数据库 # 1
|
|
299
303
|
"""
|
|
300
304
|
# 查看是否已经存在
|
|
301
|
-
|
|
302
|
-
|
|
305
|
+
async with create_async_session(self.async_engine) as session:
|
|
306
|
+
|
|
303
307
|
latest_prompt = await self.aget_prompts_from_sql(prompt_id=prompt_id,session=session)
|
|
304
308
|
|
|
305
309
|
await self.asave_prompt_increment_version(prompt_id=latest_prompt.prompt_id,
|
|
@@ -321,15 +325,15 @@ class AsyncIntel():
|
|
|
321
325
|
prompt_id: str,
|
|
322
326
|
version: str = None,
|
|
323
327
|
inference_save_case = True,
|
|
328
|
+
push_patch = False,
|
|
324
329
|
):
|
|
325
330
|
if isinstance(input_data,dict):
|
|
326
331
|
input_ = json.dumps(input_data,ensure_ascii=False)
|
|
327
332
|
elif isinstance(input_data,str):
|
|
328
333
|
input_ = input_data
|
|
329
|
-
|
|
330
334
|
|
|
331
335
|
# 查数据库, 获取最新提示词对象
|
|
332
|
-
with
|
|
336
|
+
async with create_async_session(self.async_engine) as session:
|
|
333
337
|
result_obj = await self.aget_prompts_from_sql(prompt_id=prompt_id,session=session)
|
|
334
338
|
|
|
335
339
|
|
|
@@ -403,12 +407,23 @@ class AsyncIntel():
|
|
|
403
407
|
session = session
|
|
404
408
|
)
|
|
405
409
|
ai_result = await self.llm.aproduct(prompt + output_format + "\n-----input----\n" + input_)
|
|
406
|
-
elif result_obj.action_type == "
|
|
407
|
-
|
|
410
|
+
elif result_obj.action_type == "patch":
|
|
411
|
+
|
|
412
|
+
demand = result_obj.demand
|
|
413
|
+
assert demand
|
|
414
|
+
|
|
415
|
+
chat_history = prompt + demand
|
|
416
|
+
ai_result = await self.llm.aproduct(chat_history + output_format + "\n-----input----\n" + input_)
|
|
417
|
+
|
|
418
|
+
if push_patch:
|
|
419
|
+
self.save_prompt_increment_version(prompt_id, chat_history,
|
|
420
|
+
use_case = input_,
|
|
421
|
+
score = 60,
|
|
422
|
+
session = session)
|
|
423
|
+
|
|
408
424
|
else:
|
|
409
425
|
raise
|
|
410
426
|
|
|
411
|
-
|
|
412
427
|
return ai_result
|
|
413
428
|
|
|
414
429
|
async def aintellect_stream_remove(self,
|
|
@@ -417,6 +432,7 @@ class AsyncIntel():
|
|
|
417
432
|
prompt_id: str,
|
|
418
433
|
version: str = None,
|
|
419
434
|
inference_save_case = True,
|
|
435
|
+
push_patch = False,
|
|
420
436
|
):
|
|
421
437
|
if isinstance(input_data,dict):
|
|
422
438
|
input_ = json.dumps(input_data,ensure_ascii=False)
|
|
@@ -425,7 +441,7 @@ class AsyncIntel():
|
|
|
425
441
|
|
|
426
442
|
|
|
427
443
|
# 查数据库, 获取最新提示词对象
|
|
428
|
-
with create_session(self.
|
|
444
|
+
with create_session(self.async_engine) as session:
|
|
429
445
|
result_obj = await self.aget_prompts_from_sql(prompt_id=prompt_id,session=session)
|
|
430
446
|
|
|
431
447
|
'''
|
|
@@ -520,8 +536,22 @@ class AsyncIntel():
|
|
|
520
536
|
ai_result += word
|
|
521
537
|
yield word
|
|
522
538
|
|
|
523
|
-
elif result_obj.action_type == "
|
|
524
|
-
|
|
539
|
+
elif result_obj.action_type == "patch":
|
|
540
|
+
|
|
541
|
+
demand = result_obj.demand
|
|
542
|
+
assert demand
|
|
543
|
+
|
|
544
|
+
chat_history = prompt + demand
|
|
545
|
+
ai_generate_result = self.llm.aproduct_stream(chat_history + output_format + "\n-----input----\n" + input_)
|
|
546
|
+
ai_result = ""
|
|
547
|
+
async for word in ai_generate_result:
|
|
548
|
+
ai_result += word
|
|
549
|
+
yield word
|
|
550
|
+
if push_patch:
|
|
551
|
+
self.save_prompt_increment_version(prompt_id, chat_history,
|
|
552
|
+
use_case = input_,
|
|
553
|
+
score = 60,
|
|
554
|
+
session = session)
|
|
525
555
|
else:
|
|
526
556
|
raise
|
|
527
557
|
|
pro_craft/utils.py
CHANGED
|
@@ -150,7 +150,7 @@ async def create_async_session(async_engine):
|
|
|
150
150
|
session = Session()
|
|
151
151
|
try:
|
|
152
152
|
yield session
|
|
153
|
-
await session.commit() # 在成功的情况下自动提交事务
|
|
153
|
+
# await session.commit() # 在成功的情况下自动提交事务
|
|
154
154
|
|
|
155
155
|
except Exception as e:
|
|
156
156
|
print(f"An error occurred: {e}")
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pro-craft
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.10
|
|
4
4
|
Summary: Add your description here
|
|
5
5
|
Requires-Python: >=3.12
|
|
6
6
|
Description-Content-Type: text/markdown
|
|
7
|
+
Requires-Dist: aiomysql>=0.2.0
|
|
7
8
|
Requires-Dist: anyio>=4.11.0
|
|
8
9
|
Requires-Dist: db-help>=0.2.2
|
|
9
10
|
Requires-Dist: fastapi>=0.119.0
|
|
@@ -4,10 +4,10 @@ pro_craft/designer.py,sha256=3gyCqrjcw61sHzDjUPKhL1LOAE8xWLLbNT8NlK2mFLc,4739
|
|
|
4
4
|
pro_craft/evals.py,sha256=1T86jur4k3cLk43j1GyAW4JS0nPNfl6P0ZOQmu-SgpA,1928
|
|
5
5
|
pro_craft/file_manager.py,sha256=2j7lCt9L4mtvAy8_76ibTthXLwKKmVatWIB3DSvQM7U,3805
|
|
6
6
|
pro_craft/log.py,sha256=MZf9jCZsiRoAq8v4FxVnJqeSXxgzAiiKf7mxz6bFtwM,4263
|
|
7
|
-
pro_craft/prompt_helper.py,sha256=
|
|
8
|
-
pro_craft/prompt_helper_async.py,sha256=
|
|
7
|
+
pro_craft/prompt_helper.py,sha256=1EDGP2sKoUCrLXiR0kD2AqzT9LcFYWO8S2FyaZlBoxc,26150
|
|
8
|
+
pro_craft/prompt_helper_async.py,sha256=Ej0JXwev0FmPSoOE5iEXtZxQ3pXoqcaulyyUuC7WyC4,27859
|
|
9
9
|
pro_craft/server.py,sha256=fPAosQIU0d7gxICiALl8u6QwbLI4cawVFyoRYebRES0,2827
|
|
10
|
-
pro_craft/utils.py,sha256=
|
|
10
|
+
pro_craft/utils.py,sha256=cpvwk68mD9hYY8WCq2JXzfrrXqpshiscz_OSav4tC7U,5687
|
|
11
11
|
pro_craft/code_helper/coder.py,sha256=NXglF1KiPtGe4HZN0MZvFJ8p9Iyd5kzIt72DQGgRwXA,24715
|
|
12
12
|
pro_craft/server/__main__.py,sha256=LDTERPMe7RKj3eifVRo9aO9fNXdd16W5Hzr1APd04T0,4227
|
|
13
13
|
pro_craft/server/models.py,sha256=CiUK8e73Bl7fo7ZbnwNTLYLeD4pb1fHMzWR13d3Y6vs,112
|
|
@@ -17,7 +17,7 @@ pro_craft/server/mcp/math.py,sha256=OOzGXx64nK4bOVlu33PtVddcCQ9ilqA3Em9yxjSX9cg,
|
|
|
17
17
|
pro_craft/server/mcp/resource.py,sha256=z94jP3qZofO-1lZCM3TuOfLajw41HARs1ojXab1ymas,776
|
|
18
18
|
pro_craft/server/mcp/weather.py,sha256=RAGuf4sgjlTQSfRRZ1Fo18JnuMQRS_Db9p6AqBQrl8E,455
|
|
19
19
|
pro_craft/server/router/recommended.py,sha256=IAZFdmb8HSl2_TOJeuv5uOKIX47XyX4p4sEwxG-0vt0,9968
|
|
20
|
-
pro_craft-0.1.
|
|
21
|
-
pro_craft-0.1.
|
|
22
|
-
pro_craft-0.1.
|
|
23
|
-
pro_craft-0.1.
|
|
20
|
+
pro_craft-0.1.10.dist-info/METADATA,sha256=d0eKMauNY8YEx9A8ZAtIvGXEVYTER04UwYCscH7ifHY,1800
|
|
21
|
+
pro_craft-0.1.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
22
|
+
pro_craft-0.1.10.dist-info/top_level.txt,sha256=yqYDHArnYMWpeCxkmGRwlL6sJtxiOUnYylLDx9EOgFg,10
|
|
23
|
+
pro_craft-0.1.10.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|