pycoze 0.1.84__py3-none-any.whl → 0.1.86__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
pycoze/bot/agent/agent.py CHANGED
@@ -83,6 +83,7 @@ if __name__ == "__main__":
83
83
  tools=[python_tool],
84
84
  llm=chat,
85
85
  assistant_message="请以女友的口吻回答,输出不小于100字,可以随便说点其他的",
86
+ support_tools=True,
86
87
  )
87
88
 
88
89
  inputs = [HumanMessage(content="计算根号7+根号88")]
@@ -4,7 +4,7 @@ import json
4
4
  from langchain.tools import BaseTool
5
5
  from langchain_core.utils.function_calling import convert_to_openai_tool
6
6
  from langchain_core.language_models.base import LanguageModelLike
7
- from langchain_core.messages import SystemMessage, ToolMessage
7
+ from langchain_core.messages import SystemMessage, ToolMessage, HumanMessage
8
8
  from langgraph.graph import END
9
9
  from langgraph.graph.message import MessageGraph
10
10
  from langgraph.prebuilt import ToolExecutor, ToolInvocation
@@ -31,7 +31,11 @@ def get_all_markdown_json(content):
31
31
 
32
32
 
33
33
  def create_openai_func_call_agent_executor(
34
- tools: list[BaseTool], llm: LanguageModelLike, system_message: str, **kwargs
34
+ tools: list[BaseTool],
35
+ llm: LanguageModelLike,
36
+ system_message: str,
37
+ support_tools: str,
38
+ **kwargs
35
39
  ):
36
40
 
37
41
  async def _get_messages(messages):
@@ -58,7 +62,6 @@ def create_openai_func_call_agent_executor(
58
62
  def should_continue(messages):
59
63
  # If there is no FuncCall, then we finish
60
64
  last_message = messages[-1]
61
- print("last_message", last_message)
62
65
  if last_message.content.strip().endswith("```"):
63
66
  last_message.content = last_message.content + "\n\n" # 避免影响阅读
64
67
  # if not last_message.tool_calls:
@@ -82,7 +85,6 @@ def create_openai_func_call_agent_executor(
82
85
  tool_calls = []
83
86
  for tool_call in all_json:
84
87
  if "name" not in tool_call or "parameters" not in tool_call:
85
- print("end")
86
88
  return "end"
87
89
  tool_call["arguments"] = json.dumps(tool_call["parameters"])
88
90
  tool_call.pop("parameters")
@@ -167,44 +169,32 @@ def create_openai_func_call_agent_executor(
167
169
  additional_kwargs={"name": tool_call["function"]["name"]},
168
170
  )
169
171
  tool_messages.append(message)
172
+ if not support_tools:
173
+ # HumanMessage
174
+ tool_msgs_str = repr(tool_messages)
175
+ tool_messages = [
176
+ HumanMessage(content="工具调用结束,结果如下:\n" + tool_msgs_str)
177
+ ]
170
178
  return tool_messages
171
179
 
172
180
  workflow = MessageGraph()
173
181
 
174
182
  # Define the two nodes we will cycle between
175
183
  workflow.add_node("agent", agent)
176
- workflow.add_node("action", call_tool)
184
+ workflow.add_node("call_tool", call_tool)
177
185
 
178
- # Set the entrypoint as `agent`
179
- # This means that this node is the first one called
180
186
  workflow.set_entry_point("agent")
181
187
 
182
- # We now add a conditional edge
183
188
  workflow.add_conditional_edges(
184
- # First, we define the start node. We use `agent`.
185
- # This means these are the edges taken after the `agent` node is called.
186
189
  "agent",
187
- # Next, we pass in the function that will determine which node is called next.
188
190
  should_continue,
189
- # Finally we pass in a mapping.
190
- # The keys are strings, and the values are other nodes.
191
- # END is a special node marking that the graph should finish.
192
- # What will happen is we will call `should_continue`, and then the output of that
193
- # will be matched against the keys in this mapping.
194
- # Based on which one it matches, that node will then be called.
195
191
  {
196
- # If `tools`, then we call the tool node.
197
- "continue": "action",
198
- # Otherwise we finish.
192
+ "continue": "call_tool",
199
193
  "end": END,
200
194
  },
201
195
  )
202
196
 
203
- # We now add a normal edge from `tools` to `agent`.
204
- # This means that after `tools` is called, `agent` node is called next.
205
- workflow.add_edge("action", "agent")
197
+ # 调用完工具后,再次调用agent
198
+ workflow.add_edge("call_tool", "agent")
206
199
 
207
- # Finally, we compile it!
208
- # This compiles it into a LangChain Runnable,
209
- # meaning you can use it as you would any other runnable
210
200
  return workflow.compile()
@@ -18,10 +18,11 @@ class Runnable(RunnableBinding):
18
18
  tools: Sequence[BaseTool],
19
19
  llm: LanguageModelLike,
20
20
  assistant_message: str,
21
+ support_tools: bool
21
22
  ) -> None:
22
23
 
23
24
  agent_executor = create_openai_func_call_agent_executor(
24
- tools, llm, assistant_message
25
+ tools, llm, assistant_message, support_tools
25
26
  )
26
27
  agent_executor = agent_executor.with_config({"recursion_limit": 50})
27
28
  super().__init__(
pycoze/bot/bot.py CHANGED
@@ -9,6 +9,9 @@ from langchain_core.utils.function_calling import convert_to_openai_tool
9
9
 
10
10
  params = utils.arg.read_params_file()
11
11
  llm_file = params["appPath"] + "/JsonStorage/llm.json"
12
+ with open(llm_file, "r", encoding="utf-8") as f:
13
+ cfg = json.load(f)
14
+ support_tools = not cfg["model"].startswith("yi-")
12
15
 
13
16
 
14
17
  def load_role_setting(bot_setting_file: str):
@@ -29,28 +32,23 @@ def load_tools(bot_setting_file: str):
29
32
  def agent_chat(bot_setting_file, history):
30
33
  role_setting = load_role_setting(bot_setting_file)
31
34
  tools = load_tools(bot_setting_file)
32
- with open(llm_file, "r", encoding="utf-8") as f:
33
- cfg = json.load(f)
34
- chat = ChatOpenAI(
35
- api_key=cfg["apiKey"],
36
- base_url=cfg["baseURL"],
37
- model=cfg["model"],
38
- temperature=(
39
- role_setting["temperature"] * 2
40
- if cfg["model"].startswith("deepseek") or cfg["model"].startswith("yi-")
41
- else role_setting["temperature"]
42
- ),
43
- stop_sequences=[
44
- "tool▁calls▁end",
45
- "tool▁call▁end",
46
- ], # 停用deepseek的工具调用标记,不然会虚构工具调用过程和结果
47
- )
35
+
36
+ chat = ChatOpenAI(
37
+ api_key=cfg["apiKey"],
38
+ base_url=cfg["baseURL"],
39
+ model=cfg["model"],
40
+ temperature=(
41
+ role_setting["temperature"] * 2
42
+ if cfg["model"].startswith("deepseek")
43
+ else role_setting["temperature"]
44
+ ),
45
+ stop_sequences=[
46
+ "tool▁calls▁end",
47
+ "tool▁call▁end",
48
+ ], # 停用deepseek的工具调用标记,不然会虚构工具调用过程和结果
49
+ )
48
50
  prompt = role_setting["prompt"]
49
- if (
50
- cfg["model"].startswith("deepseek")
51
- or cfg["model"].startswith("yi-")
52
- and len(tools) > 0
53
- ):
51
+ if cfg["model"].startswith("deepseek") or not support_tools and len(tools) > 0:
54
52
  prompt += """
55
53
  如果需要调用工具,请使用以正确的json格式进行结尾(务必保证json格式正确,不要出现反斜杠未转义等问题):
56
54
  ```json
@@ -66,6 +64,7 @@ def agent_chat(bot_setting_file, history):
66
64
  tools=tools,
67
65
  llm=chat,
68
66
  assistant_message=prompt,
67
+ support_tools=support_tools,
69
68
  )
70
69
  return asyncio.run(run_agent(agent, history))
71
70
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pycoze
3
- Version: 0.1.84
3
+ Version: 0.1.86
4
4
  Summary: Package for pycoze only!
5
5
  Author: Yuan Jie Xiong
6
6
  Author-email: aiqqqqqqq@qq.com
@@ -6,13 +6,13 @@ pycoze/ai/__init__.py,sha256=Smivpb8qbRnzWkzKRe2IxsmKP5Dh8EvngDFdkD_DVLo,73
6
6
  pycoze/ai/comfyui.py,sha256=u75tZywkuXiOdm7XST2kBAaveJKpPvY_qTQr_TN9sXk,795
7
7
  pycoze/ai/vram_reserve.py,sha256=s55Cy-Q5mTq-k5oIPbAFwCfrjatjN0QTjQxW7WBTPZI,5738
8
8
  pycoze/bot/__init__.py,sha256=6HHMxDQVOyZM9dtSjQm9tjGnhj4h7CixD0JOvEwTi48,41
9
- pycoze/bot/bot.py,sha256=t5wtxigZO23qVhVrXBNVu1WYc8jBAvbWWBH1QNwmjXU,3006
9
+ pycoze/bot/bot.py,sha256=ccsyiy_knL5pjRLgzhbUU9iXk01GZnNZFCzY3_bxZaQ,2952
10
10
  pycoze/bot/agent/__init__.py,sha256=YR9vpkEQn1e4937r_xFPJXUCPBEJ0SFzEQDBe2x3-YA,157
11
- pycoze/bot/agent/agent.py,sha256=3504Q6IlgNirLma_bLcIxkBjTvmAWSjKzv6da7A6c2Y,3307
12
- pycoze/bot/agent/assistant.py,sha256=eEcu8aS0m-Kmkv5QttnVeirJrsjqqUJ6iwLZsvwlSMY,1049
11
+ pycoze/bot/agent/agent.py,sha256=EzgV29nAb0Obc2yQaAa0xph2KSyQ5njtNxj9ZkwnVgA,3335
12
+ pycoze/bot/agent/assistant.py,sha256=XHg3oA4plRzWXow9uwuEoNc2xMYZF8tKDfCaBGVadWI,1092
13
13
  pycoze/bot/agent/chat.py,sha256=kc0qgcrBSXdiMy49JwThZTV-0PAvzAhiUvbI5ILiSnU,571
14
14
  pycoze/bot/agent/agent_types/__init__.py,sha256=XNvKWq9REE5Wzjm0OZi3CKIQF2UZ9PZkeUuxgFJbrfc,128
15
- pycoze/bot/agent/agent_types/openai_func_call_agent.py,sha256=0llTsPAalGBhVoOLAW4YrLEGPp6-I467i-ZfnpnUr6E,8347
15
+ pycoze/bot/agent/agent_types/openai_func_call_agent.py,sha256=Bt4qyH3OsJftkluTlSOE0d0UVFYlY4OuXP9HdgarCHo,7505
16
16
  pycoze/ui/__init__.py,sha256=7xAfL2lfG7-jllPJEZUJO89xUE9sNzvo1y0WmBswjBI,458
17
17
  pycoze/ui/base.py,sha256=SCXVDK7PpMaBv6ovvabHcfRq_d2AWM0BRyxpNhuJN5A,1285
18
18
  pycoze/ui/color.py,sha256=cT9Ib8uNzkOKxyW0IwVj46o4LwdB1xgNCj1_Rou9d_4,854
@@ -21,8 +21,8 @@ pycoze/ui/ui_def.py,sha256=UhhU_yB3GV9ISbvTWT48hsHPHI250BhMILh6bu5Uioo,4206
21
21
  pycoze/utils/__init__.py,sha256=TNJhFfY7JYdLlzuP9GvgxfNXUtbgH_NUUJSqHXCxJn4,78
22
22
  pycoze/utils/arg.py,sha256=kA3KBQzXc2WlH5XbF8kfikfpqljiKaW7oY_GE4Qyffc,753
23
23
  pycoze/utils/text_or_file.py,sha256=gpxZVWt2DW6YiEg_MnMuwg36VNf3TX383QD_1oZNB0Y,551
24
- pycoze-0.1.84.dist-info/LICENSE,sha256=QStd_Qsd0-kAam_-sOesCIp_uKrGWeoKwt9M49NVkNU,1090
25
- pycoze-0.1.84.dist-info/METADATA,sha256=vFVict4MOpuC5FlPLdPcY0UoACRx4EYmsnm7ZSW6r8Y,719
26
- pycoze-0.1.84.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
27
- pycoze-0.1.84.dist-info/top_level.txt,sha256=76dPeDhKvOCleL3ZC5gl1-y4vdS1tT_U1hxWVAn7sFo,7
28
- pycoze-0.1.84.dist-info/RECORD,,
24
+ pycoze-0.1.86.dist-info/LICENSE,sha256=QStd_Qsd0-kAam_-sOesCIp_uKrGWeoKwt9M49NVkNU,1090
25
+ pycoze-0.1.86.dist-info/METADATA,sha256=pQEipyeUZJ2PZCS7PUubP0gox0b2CMmJwWZk840YVTY,719
26
+ pycoze-0.1.86.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
27
+ pycoze-0.1.86.dist-info/top_level.txt,sha256=76dPeDhKvOCleL3ZC5gl1-y4vdS1tT_U1hxWVAn7sFo,7
28
+ pycoze-0.1.86.dist-info/RECORD,,