pop-python 1.0.2__tar.gz → 1.0.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {pop_python-1.0.2 → pop_python-1.0.4}/PKG-INFO +1 -1
  2. {pop_python-1.0.2 → pop_python-1.0.4}/POP/LLMClient.py +12 -5
  3. {pop_python-1.0.2 → pop_python-1.0.4}/POP/POP.py +18 -10
  4. {pop_python-1.0.2 → pop_python-1.0.4}/pop_python.egg-info/PKG-INFO +1 -1
  5. {pop_python-1.0.2 → pop_python-1.0.4}/setup.py +1 -1
  6. {pop_python-1.0.2 → pop_python-1.0.4}/LICENSE +0 -0
  7. {pop_python-1.0.2 → pop_python-1.0.4}/MANIFEST.in +0 -0
  8. {pop_python-1.0.2 → pop_python-1.0.4}/POP/Embedder.py +0 -0
  9. {pop_python-1.0.2 → pop_python-1.0.4}/POP/__init__.py +0 -0
  10. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/2024-11-19-content_finder.md +0 -0
  11. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/2024-11-19-get_content.md +0 -0
  12. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/2024-11-19-get_title_and_url.md +0 -0
  13. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/CLI_AI_helper.md +0 -0
  14. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/content_finder.md +0 -0
  15. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/corpus_splitter.md +0 -0
  16. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/fabric-improve_prompt.md +0 -0
  17. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/function_code_generator.md +0 -0
  18. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/function_description_generator.md +0 -0
  19. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/get_content.md +0 -0
  20. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/get_title_and_url.md +0 -0
  21. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/json_formatter_prompt.md +0 -0
  22. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/openai-function_description_generator.md +0 -0
  23. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/openai-json_schema_generator.md +0 -0
  24. {pop_python-1.0.2 → pop_python-1.0.4}/POP/prompts/openai-prompt_generator.md +0 -0
  25. {pop_python-1.0.2 → pop_python-1.0.4}/POP/schemas/biomedical_ner_extractor.json +0 -0
  26. {pop_python-1.0.2 → pop_python-1.0.4}/POP/schemas/entity_extraction_per_sentence.json +0 -0
  27. {pop_python-1.0.2 → pop_python-1.0.4}/README.md +0 -0
  28. {pop_python-1.0.2 → pop_python-1.0.4}/pop_python.egg-info/SOURCES.txt +0 -0
  29. {pop_python-1.0.2 → pop_python-1.0.4}/pop_python.egg-info/dependency_links.txt +0 -0
  30. {pop_python-1.0.2 → pop_python-1.0.4}/pop_python.egg-info/requires.txt +0 -0
  31. {pop_python-1.0.2 → pop_python-1.0.4}/pop_python.egg-info/top_level.txt +0 -0
  32. {pop_python-1.0.2 → pop_python-1.0.4}/pyproject.toml +0 -0
  33. {pop_python-1.0.2 → pop_python-1.0.4}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pop-python
3
- Version: 1.0.2
3
+ Version: 1.0.4
4
4
  Summary: Prompt Oriented Programming (POP): reusable, composable prompt functions for LLMs.
5
5
  Home-page: https://github.com/sgt1796/POP
6
6
  Author: Guotai Shen
@@ -84,11 +84,11 @@ class OpenAIClient(LLMClient):
84
84
  else:
85
85
  request_payload["response_format"] = {"type": "json_schema", "json_schema": fmt}
86
86
 
87
- # Handle function tools
87
+ # Handle function tools (already OpenAI-style)
88
88
  tools = kwargs.get("tools", None)
89
89
  if tools:
90
- request_payload["tools"] = [{"type": "function", "function": tool} for tool in tools]
91
- request_payload["tool_choice"] = "auto"
90
+ request_payload["tools"] = tools
91
+ request_payload["tool_choice"] = kwargs.get("tool_choice", "auto")
92
92
 
93
93
  # Temporary patch for models not supporting system roles
94
94
  if model == "o1-mini" and request_payload["messages"] and request_payload["messages"][0]["role"] == "system":
@@ -140,6 +140,12 @@ class DeepseekClient(LLMClient):
140
140
  role = msg.get("role", "user")
141
141
  request_payload["messages"].append({"role": role, "content": content})
142
142
 
143
+ # tools: OpenAI-style list
144
+ tools = kwargs.get("tools", None)
145
+ if tools:
146
+ request_payload["tools"] = tools
147
+ request_payload["tool_choice"] = kwargs.get("tool_choice", "auto")
148
+
143
149
  # Execute request
144
150
  try:
145
151
  response = self.client.chat.completions.create(**request_payload)
@@ -291,7 +297,8 @@ class DoubaoClient(LLMClient):
291
297
  # Tools (function calling)
292
298
  tools = kwargs.get("tools")
293
299
  if tools:
294
- raise NotImplementedError("DoubaoClient does not support tools yet.")
300
+ payload["tools"] = tools
301
+ payload["tool_choice"] = kwargs.get("tool_choice", "auto")
295
302
  try:
296
303
  response = self.client.chat.completions.create(**payload)
297
304
  except Exception as e:
@@ -400,4 +407,4 @@ class OllamaClient(LLMClient):
400
407
  class Response:
401
408
  def __init__(self, content): self.choices = [Choice(Message(content))]
402
409
 
403
- return Response(content)
410
+ return Response(content)
@@ -3,7 +3,7 @@ import json
3
3
  import requests
4
4
  from dotenv import load_dotenv
5
5
  from os import getenv, path
6
- from LLMClient import LLMClient, OpenAIClient, GeminiClient, DeepseekClient, LocalPyTorchClient, DoubaoClient, OllamaClient
6
+ from .LLMClient import LLMClient, OpenAIClient, GeminiClient, DeepseekClient, LocalPyTorchClient, DoubaoClient, OllamaClient
7
7
 
8
8
  # Load environment variables
9
9
  load_dotenv()
@@ -70,6 +70,7 @@ class PromptFunction:
70
70
  - sys: Additional system instructions.
71
71
  - fmt: Response format/schema.
72
72
  - tools: List of function tools to use (for function calling).
73
+ - tool_choice
73
74
  - temp: Temperature.
74
75
  - ADD_BEFORE: Text to prepend.
75
76
  - ADD_AFTER: Text to append.
@@ -87,6 +88,9 @@ class PromptFunction:
87
88
  tools = kwargs.pop("tools", None)
88
89
  temp = kwargs.pop("temp", self.temperature)
89
90
  images = kwargs.pop("images", None)
91
+ tool_choice = kwargs.pop("tool_choice", None)
92
+ if tools and not tool_choice:
93
+ tool_choice = "auto"
90
94
 
91
95
  # Prepare the prompt with dynamic injections.
92
96
  formatted_prompt = self._prepare_prompt(*args, **kwargs)
@@ -105,15 +109,19 @@ class PromptFunction:
105
109
  messages = [system_message, user_message]
106
110
 
107
111
  try:
108
- # Call the LLM client.
109
- raw_response = self.client.chat_completion(
110
- messages=messages,
111
- model=model,
112
- temperature=temp,
113
- response_format=fmt,
114
- tools=tools,
115
- images=images
116
- )
112
+ # Call the LLM client. Always include tool_choice when tools are provided.
113
+ call_kwargs = {
114
+ "messages": messages,
115
+ "model": model,
116
+ "temperature": temp,
117
+ "response_format": fmt,
118
+ "tools": tools,
119
+ "images": images,
120
+ # Always include tool_choice key so callers can assert on it
121
+ "tool_choice": tool_choice,
122
+ }
123
+
124
+ raw_response = self.client.chat_completion(**call_kwargs)
117
125
  except Exception as e:
118
126
  verbose = True
119
127
  if verbose:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pop-python
3
- Version: 1.0.2
3
+ Version: 1.0.4
4
4
  Summary: Prompt Oriented Programming (POP): reusable, composable prompt functions for LLMs.
5
5
  Home-page: https://github.com/sgt1796/POP
6
6
  Author: Guotai Shen
@@ -7,7 +7,7 @@ long_description = (this_dir / "README.md").read_text(encoding="utf-8")
7
7
  setup(
8
8
  # PyPI project name
9
9
  name="pop-python",
10
- version="1.0.2", # update as needed
10
+ version="1.0.4", # update as needed
11
11
 
12
12
  author="Guotai Shen",
13
13
  author_email="sgt1796@gmail.com",
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes