pro-craft 0.1.32__tar.gz → 0.1.34__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pro-craft might be problematic. Click here for more details.

Files changed (28) hide show
  1. {pro_craft-0.1.32 → pro_craft-0.1.34}/PKG-INFO +1 -1
  2. {pro_craft-0.1.32 → pro_craft-0.1.34}/pyproject.toml +1 -1
  3. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/database.py +1 -1
  4. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/prompt_craft/async_ copy.py +19 -5
  5. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/prompt_craft/async_.py +20 -5
  6. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/prompt_craft/sync.py +2 -3
  7. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft.egg-info/PKG-INFO +1 -1
  8. {pro_craft-0.1.32 → pro_craft-0.1.34}/README.md +0 -0
  9. {pro_craft-0.1.32 → pro_craft-0.1.34}/setup.cfg +0 -0
  10. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/__init__.py +0 -0
  11. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/code_helper/coder.py +0 -0
  12. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/code_helper/designer.py +0 -0
  13. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/file_manager.py +0 -0
  14. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/log.py +0 -0
  15. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/prompt_craft/__init__.py +0 -0
  16. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/prompt_craft/new.py +0 -0
  17. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/server/mcp/__init__.py +0 -0
  18. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/server/mcp/prompt.py +0 -0
  19. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/server/router/__init__.py +0 -0
  20. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/server/router/prompt.py +0 -0
  21. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft/utils.py +0 -0
  22. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft.egg-info/SOURCES.txt +0 -0
  23. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft.egg-info/dependency_links.txt +0 -0
  24. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft.egg-info/requires.txt +0 -0
  25. {pro_craft-0.1.32 → pro_craft-0.1.34}/src/pro_craft.egg-info/top_level.txt +0 -0
  26. {pro_craft-0.1.32 → pro_craft-0.1.34}/tests/test22.py +0 -0
  27. {pro_craft-0.1.32 → pro_craft-0.1.34}/tests/test_coder.py +0 -0
  28. {pro_craft-0.1.32 → pro_craft-0.1.34}/tests/test_designer.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pro-craft
3
- Version: 0.1.32
3
+ Version: 0.1.34
4
4
  Summary: Add your description here
5
5
  Requires-Python: >=3.12
6
6
  Description-Content-Type: text/markdown
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "pro-craft"
3
- version = "0.1.32"
3
+ version = "0.1.34"
4
4
  description = "Add your description here"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.12"
@@ -235,7 +235,7 @@ class UseCase(PromptBase):
235
235
 
236
236
  faired_time = Column(
237
237
  Integer,
238
- nullable=True, # 不能为空
238
+ nullable=False, # 不能为空
239
239
  comment="失败次数"
240
240
  )
241
241
 
@@ -162,11 +162,10 @@ class AsyncIntel():
162
162
 
163
163
  if model_name in ["gemini-2.5-flash-preview-05-20-nothinking",]:
164
164
  self.llm = BianXieAdapter(model_name = model_name)
165
- elif model_name in ["doubao-1-5-pro-256k-250115",]:
165
+ elif model_name in ["doubao-1-5-pro-256k-250115","doubao-1-5-pro-32k-250115"]:
166
166
  self.llm = ArkAdapter(model_name = model_name)
167
167
  else:
168
- print('Use BianXieAdapter')
169
- self.llm = BianXieAdapter()
168
+ raise Exception("error llm name")
170
169
 
171
170
  async def create_specific_database(self):
172
171
  tables_to_create_names = ["ai_prompts","ai_usecase"]
@@ -400,20 +399,30 @@ class AsyncIntel():
400
399
  async def save_use_case_by_sql(self,
401
400
  prompt_id: str,
402
401
  use_case:str = "",
402
+ timestamp = "",
403
403
  output = "",
404
404
  solution: str = "",
405
+ faired_time = 0,
405
406
  session = None
406
407
  ):
408
+
407
409
  """
408
410
  从sql保存提示词
409
411
  """
410
412
  #TODO 存之前保证数据库中相同的prompt_id中没有重复的use_case
413
+ use_cases = await self.get_use_case_by_sql(target_prompt_id = prompt_id,
414
+ session = session)
415
+ for use_case_old in use_cases:
416
+ if use_case == use_case_old.use_case:
417
+ print("用例已经存在")
418
+ return
411
419
 
412
- use_case = UseCase(prompt_id=prompt_id,
420
+ use_case = UseCase(prompt_id=prompt_id,
413
421
  use_case = use_case,
422
+ timestamp = timestamp,
414
423
  output = output,
415
424
  solution = solution,
416
- faired_time = 1,
425
+ faired_time = faired_time,
417
426
  )
418
427
 
419
428
  session.add(use_case)
@@ -559,8 +568,10 @@ class AsyncIntel():
559
568
  if inference_save_case:
560
569
  await self.save_use_case_by_sql(prompt_id,
561
570
  use_case = input_,
571
+ timestamp = datetime.now(),
562
572
  output = ai_result,
563
573
  solution = "备注/理想回复",
574
+ faired_time = 0,
564
575
  session = session,
565
576
  )
566
577
 
@@ -651,6 +662,7 @@ class AsyncIntel():
651
662
  session = session
652
663
  )
653
664
  ai_result = await self.llm.aproduct(prompt + output_format + "\nuser:" + input_)
665
+
654
666
  elif result_obj.action_type == "patch":
655
667
  demand = result_obj.demand
656
668
  assert demand
@@ -713,8 +725,10 @@ class AsyncIntel():
713
725
  if inference_save_case:
714
726
  await self.save_use_case_by_sql(prompt_id,
715
727
  use_case = input_,
728
+ timestamp = datetime.now(),
716
729
  output = ai_result,
717
730
  solution = "备注/理想回复",
731
+ faired_time = 0,
718
732
  session = session,
719
733
  )
720
734
 
@@ -162,11 +162,10 @@ class AsyncIntel():
162
162
 
163
163
  if model_name in ["gemini-2.5-flash-preview-05-20-nothinking",]:
164
164
  self.llm = BianXieAdapter(model_name = model_name)
165
- elif model_name in ["doubao-1-5-pro-256k-250115",]:
165
+ elif model_name in ["doubao-1-5-pro-256k-250115","doubao-1-5-pro-32k-250115"]:
166
166
  self.llm = ArkAdapter(model_name = model_name)
167
167
  else:
168
- print('Use BianXieAdapter')
169
- self.llm = BianXieAdapter()
168
+ raise Exception("error llm name")
170
169
 
171
170
  async def create_specific_database(self):
172
171
  tables_to_create_names = ["ai_prompts","ai_usecase"]
@@ -400,19 +399,30 @@ class AsyncIntel():
400
399
  async def save_use_case_by_sql(self,
401
400
  prompt_id: str,
402
401
  use_case:str = "",
402
+ timestamp = "",
403
403
  output = "",
404
404
  solution: str = "",
405
+ faired_time = 0,
405
406
  session = None
406
407
  ):
408
+
407
409
  """
408
410
  从sql保存提示词
409
411
  """
410
412
  #TODO 存之前保证数据库中相同的prompt_id中没有重复的use_case
411
-
412
- use_case = UseCase(prompt_id=prompt_id,
413
+ use_cases = await self.get_use_case_by_sql(target_prompt_id = prompt_id,
414
+ session = session)
415
+ for use_case_old in use_cases:
416
+ if use_case == use_case_old.use_case:
417
+ print("用例已经存在")
418
+ return
419
+
420
+ use_case = UseCase(prompt_id=prompt_id,
413
421
  use_case = use_case,
422
+ timestamp = timestamp,
414
423
  output = output,
415
424
  solution = solution,
425
+ faired_time = faired_time,
416
426
  )
417
427
 
418
428
  session.add(use_case)
@@ -558,8 +568,10 @@ class AsyncIntel():
558
568
  if inference_save_case:
559
569
  await self.save_use_case_by_sql(prompt_id,
560
570
  use_case = input_,
571
+ timestamp = datetime.now(),
561
572
  output = ai_result,
562
573
  solution = "备注/理想回复",
574
+ faired_time = 0,
563
575
  session = session,
564
576
  )
565
577
 
@@ -650,6 +662,7 @@ class AsyncIntel():
650
662
  session = session
651
663
  )
652
664
  ai_result = await self.llm.aproduct(prompt + output_format + "\nuser:" + input_)
665
+
653
666
  elif result_obj.action_type == "patch":
654
667
  demand = result_obj.demand
655
668
  assert demand
@@ -712,8 +725,10 @@ class AsyncIntel():
712
725
  if inference_save_case:
713
726
  await self.save_use_case_by_sql(prompt_id,
714
727
  use_case = input_,
728
+ timestamp = datetime.now(),
715
729
  output = ai_result,
716
730
  solution = "备注/理想回复",
731
+ faired_time = 0,
717
732
  session = session,
718
733
  )
719
734
 
@@ -86,11 +86,10 @@ class Intel():
86
86
 
87
87
  if model_name in ["gemini-2.5-flash-preview-05-20-nothinking",]:
88
88
  self.llm = BianXieAdapter(model_name = model_name)
89
- elif model_name in ["doubao-1-5-pro-256k-250115",]:
89
+ elif model_name in ["doubao-1-5-pro-256k-250115","doubao-1-5-pro-32k-250115"]:
90
90
  self.llm = ArkAdapter(model_name = model_name)
91
91
  else:
92
- print('Use BianXieAdapter')
93
- self.llm = BianXieAdapter()
92
+ raise Exception("error llm name")
94
93
  self.logger = logger or pro_craft_logger
95
94
 
96
95
  def _get_latest_prompt_version(self,target_prompt_id,session):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pro-craft
3
- Version: 0.1.32
3
+ Version: 0.1.34
4
4
  Summary: Add your description here
5
5
  Requires-Python: >=3.12
6
6
  Description-Content-Type: text/markdown
File without changes
File without changes
File without changes