pycoze 0.1.84__py3-none-any.whl → 0.1.86__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pycoze/bot/agent/agent.py +1 -0
- pycoze/bot/agent/agent_types/openai_func_call_agent.py +16 -26
- pycoze/bot/agent/assistant.py +2 -1
- pycoze/bot/bot.py +20 -21
- {pycoze-0.1.84.dist-info → pycoze-0.1.86.dist-info}/METADATA +1 -1
- {pycoze-0.1.84.dist-info → pycoze-0.1.86.dist-info}/RECORD +9 -9
- {pycoze-0.1.84.dist-info → pycoze-0.1.86.dist-info}/LICENSE +0 -0
- {pycoze-0.1.84.dist-info → pycoze-0.1.86.dist-info}/WHEEL +0 -0
- {pycoze-0.1.84.dist-info → pycoze-0.1.86.dist-info}/top_level.txt +0 -0
pycoze/bot/agent/agent.py
CHANGED
@@ -4,7 +4,7 @@ import json
|
|
4
4
|
from langchain.tools import BaseTool
|
5
5
|
from langchain_core.utils.function_calling import convert_to_openai_tool
|
6
6
|
from langchain_core.language_models.base import LanguageModelLike
|
7
|
-
from langchain_core.messages import SystemMessage, ToolMessage
|
7
|
+
from langchain_core.messages import SystemMessage, ToolMessage, HumanMessage
|
8
8
|
from langgraph.graph import END
|
9
9
|
from langgraph.graph.message import MessageGraph
|
10
10
|
from langgraph.prebuilt import ToolExecutor, ToolInvocation
|
@@ -31,7 +31,11 @@ def get_all_markdown_json(content):
|
|
31
31
|
|
32
32
|
|
33
33
|
def create_openai_func_call_agent_executor(
|
34
|
-
tools: list[BaseTool],
|
34
|
+
tools: list[BaseTool],
|
35
|
+
llm: LanguageModelLike,
|
36
|
+
system_message: str,
|
37
|
+
support_tools: str,
|
38
|
+
**kwargs
|
35
39
|
):
|
36
40
|
|
37
41
|
async def _get_messages(messages):
|
@@ -58,7 +62,6 @@ def create_openai_func_call_agent_executor(
|
|
58
62
|
def should_continue(messages):
|
59
63
|
# If there is no FuncCall, then we finish
|
60
64
|
last_message = messages[-1]
|
61
|
-
print("last_message", last_message)
|
62
65
|
if last_message.content.strip().endswith("```"):
|
63
66
|
last_message.content = last_message.content + "\n\n" # 避免影响阅读
|
64
67
|
# if not last_message.tool_calls:
|
@@ -82,7 +85,6 @@ def create_openai_func_call_agent_executor(
|
|
82
85
|
tool_calls = []
|
83
86
|
for tool_call in all_json:
|
84
87
|
if "name" not in tool_call or "parameters" not in tool_call:
|
85
|
-
print("end")
|
86
88
|
return "end"
|
87
89
|
tool_call["arguments"] = json.dumps(tool_call["parameters"])
|
88
90
|
tool_call.pop("parameters")
|
@@ -167,44 +169,32 @@ def create_openai_func_call_agent_executor(
|
|
167
169
|
additional_kwargs={"name": tool_call["function"]["name"]},
|
168
170
|
)
|
169
171
|
tool_messages.append(message)
|
172
|
+
if not support_tools:
|
173
|
+
# HumanMessage
|
174
|
+
tool_msgs_str = repr(tool_messages)
|
175
|
+
tool_messages = [
|
176
|
+
HumanMessage(content="工具调用结束,结果如下:\n" + tool_msgs_str)
|
177
|
+
]
|
170
178
|
return tool_messages
|
171
179
|
|
172
180
|
workflow = MessageGraph()
|
173
181
|
|
174
182
|
# Define the two nodes we will cycle between
|
175
183
|
workflow.add_node("agent", agent)
|
176
|
-
workflow.add_node("
|
184
|
+
workflow.add_node("call_tool", call_tool)
|
177
185
|
|
178
|
-
# Set the entrypoint as `agent`
|
179
|
-
# This means that this node is the first one called
|
180
186
|
workflow.set_entry_point("agent")
|
181
187
|
|
182
|
-
# We now add a conditional edge
|
183
188
|
workflow.add_conditional_edges(
|
184
|
-
# First, we define the start node. We use `agent`.
|
185
|
-
# This means these are the edges taken after the `agent` node is called.
|
186
189
|
"agent",
|
187
|
-
# Next, we pass in the function that will determine which node is called next.
|
188
190
|
should_continue,
|
189
|
-
# Finally we pass in a mapping.
|
190
|
-
# The keys are strings, and the values are other nodes.
|
191
|
-
# END is a special node marking that the graph should finish.
|
192
|
-
# What will happen is we will call `should_continue`, and then the output of that
|
193
|
-
# will be matched against the keys in this mapping.
|
194
|
-
# Based on which one it matches, that node will then be called.
|
195
191
|
{
|
196
|
-
|
197
|
-
"continue": "action",
|
198
|
-
# Otherwise we finish.
|
192
|
+
"continue": "call_tool",
|
199
193
|
"end": END,
|
200
194
|
},
|
201
195
|
)
|
202
196
|
|
203
|
-
#
|
204
|
-
|
205
|
-
workflow.add_edge("action", "agent")
|
197
|
+
# 调用完工具后,再次调用agent
|
198
|
+
workflow.add_edge("call_tool", "agent")
|
206
199
|
|
207
|
-
# Finally, we compile it!
|
208
|
-
# This compiles it into a LangChain Runnable,
|
209
|
-
# meaning you can use it as you would any other runnable
|
210
200
|
return workflow.compile()
|
pycoze/bot/agent/assistant.py
CHANGED
@@ -18,10 +18,11 @@ class Runnable(RunnableBinding):
|
|
18
18
|
tools: Sequence[BaseTool],
|
19
19
|
llm: LanguageModelLike,
|
20
20
|
assistant_message: str,
|
21
|
+
support_tools: bool
|
21
22
|
) -> None:
|
22
23
|
|
23
24
|
agent_executor = create_openai_func_call_agent_executor(
|
24
|
-
tools, llm, assistant_message
|
25
|
+
tools, llm, assistant_message, support_tools
|
25
26
|
)
|
26
27
|
agent_executor = agent_executor.with_config({"recursion_limit": 50})
|
27
28
|
super().__init__(
|
pycoze/bot/bot.py
CHANGED
@@ -9,6 +9,9 @@ from langchain_core.utils.function_calling import convert_to_openai_tool
|
|
9
9
|
|
10
10
|
params = utils.arg.read_params_file()
|
11
11
|
llm_file = params["appPath"] + "/JsonStorage/llm.json"
|
12
|
+
with open(llm_file, "r", encoding="utf-8") as f:
|
13
|
+
cfg = json.load(f)
|
14
|
+
support_tools = not cfg["model"].startswith("yi-")
|
12
15
|
|
13
16
|
|
14
17
|
def load_role_setting(bot_setting_file: str):
|
@@ -29,28 +32,23 @@ def load_tools(bot_setting_file: str):
|
|
29
32
|
def agent_chat(bot_setting_file, history):
|
30
33
|
role_setting = load_role_setting(bot_setting_file)
|
31
34
|
tools = load_tools(bot_setting_file)
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
temperature
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
)
|
35
|
+
|
36
|
+
chat = ChatOpenAI(
|
37
|
+
api_key=cfg["apiKey"],
|
38
|
+
base_url=cfg["baseURL"],
|
39
|
+
model=cfg["model"],
|
40
|
+
temperature=(
|
41
|
+
role_setting["temperature"] * 2
|
42
|
+
if cfg["model"].startswith("deepseek")
|
43
|
+
else role_setting["temperature"]
|
44
|
+
),
|
45
|
+
stop_sequences=[
|
46
|
+
"tool▁calls▁end",
|
47
|
+
"tool▁call▁end",
|
48
|
+
], # 停用deepseek的工具调用标记,不然会虚构工具调用过程和结果
|
49
|
+
)
|
48
50
|
prompt = role_setting["prompt"]
|
49
|
-
if (
|
50
|
-
cfg["model"].startswith("deepseek")
|
51
|
-
or cfg["model"].startswith("yi-")
|
52
|
-
and len(tools) > 0
|
53
|
-
):
|
51
|
+
if cfg["model"].startswith("deepseek") or not support_tools and len(tools) > 0:
|
54
52
|
prompt += """
|
55
53
|
如果需要调用工具,请使用以正确的json格式进行结尾(务必保证json格式正确,不要出现反斜杠未转义等问题):
|
56
54
|
```json
|
@@ -66,6 +64,7 @@ def agent_chat(bot_setting_file, history):
|
|
66
64
|
tools=tools,
|
67
65
|
llm=chat,
|
68
66
|
assistant_message=prompt,
|
67
|
+
support_tools=support_tools,
|
69
68
|
)
|
70
69
|
return asyncio.run(run_agent(agent, history))
|
71
70
|
|
@@ -6,13 +6,13 @@ pycoze/ai/__init__.py,sha256=Smivpb8qbRnzWkzKRe2IxsmKP5Dh8EvngDFdkD_DVLo,73
|
|
6
6
|
pycoze/ai/comfyui.py,sha256=u75tZywkuXiOdm7XST2kBAaveJKpPvY_qTQr_TN9sXk,795
|
7
7
|
pycoze/ai/vram_reserve.py,sha256=s55Cy-Q5mTq-k5oIPbAFwCfrjatjN0QTjQxW7WBTPZI,5738
|
8
8
|
pycoze/bot/__init__.py,sha256=6HHMxDQVOyZM9dtSjQm9tjGnhj4h7CixD0JOvEwTi48,41
|
9
|
-
pycoze/bot/bot.py,sha256=
|
9
|
+
pycoze/bot/bot.py,sha256=ccsyiy_knL5pjRLgzhbUU9iXk01GZnNZFCzY3_bxZaQ,2952
|
10
10
|
pycoze/bot/agent/__init__.py,sha256=YR9vpkEQn1e4937r_xFPJXUCPBEJ0SFzEQDBe2x3-YA,157
|
11
|
-
pycoze/bot/agent/agent.py,sha256=
|
12
|
-
pycoze/bot/agent/assistant.py,sha256=
|
11
|
+
pycoze/bot/agent/agent.py,sha256=EzgV29nAb0Obc2yQaAa0xph2KSyQ5njtNxj9ZkwnVgA,3335
|
12
|
+
pycoze/bot/agent/assistant.py,sha256=XHg3oA4plRzWXow9uwuEoNc2xMYZF8tKDfCaBGVadWI,1092
|
13
13
|
pycoze/bot/agent/chat.py,sha256=kc0qgcrBSXdiMy49JwThZTV-0PAvzAhiUvbI5ILiSnU,571
|
14
14
|
pycoze/bot/agent/agent_types/__init__.py,sha256=XNvKWq9REE5Wzjm0OZi3CKIQF2UZ9PZkeUuxgFJbrfc,128
|
15
|
-
pycoze/bot/agent/agent_types/openai_func_call_agent.py,sha256=
|
15
|
+
pycoze/bot/agent/agent_types/openai_func_call_agent.py,sha256=Bt4qyH3OsJftkluTlSOE0d0UVFYlY4OuXP9HdgarCHo,7505
|
16
16
|
pycoze/ui/__init__.py,sha256=7xAfL2lfG7-jllPJEZUJO89xUE9sNzvo1y0WmBswjBI,458
|
17
17
|
pycoze/ui/base.py,sha256=SCXVDK7PpMaBv6ovvabHcfRq_d2AWM0BRyxpNhuJN5A,1285
|
18
18
|
pycoze/ui/color.py,sha256=cT9Ib8uNzkOKxyW0IwVj46o4LwdB1xgNCj1_Rou9d_4,854
|
@@ -21,8 +21,8 @@ pycoze/ui/ui_def.py,sha256=UhhU_yB3GV9ISbvTWT48hsHPHI250BhMILh6bu5Uioo,4206
|
|
21
21
|
pycoze/utils/__init__.py,sha256=TNJhFfY7JYdLlzuP9GvgxfNXUtbgH_NUUJSqHXCxJn4,78
|
22
22
|
pycoze/utils/arg.py,sha256=kA3KBQzXc2WlH5XbF8kfikfpqljiKaW7oY_GE4Qyffc,753
|
23
23
|
pycoze/utils/text_or_file.py,sha256=gpxZVWt2DW6YiEg_MnMuwg36VNf3TX383QD_1oZNB0Y,551
|
24
|
-
pycoze-0.1.
|
25
|
-
pycoze-0.1.
|
26
|
-
pycoze-0.1.
|
27
|
-
pycoze-0.1.
|
28
|
-
pycoze-0.1.
|
24
|
+
pycoze-0.1.86.dist-info/LICENSE,sha256=QStd_Qsd0-kAam_-sOesCIp_uKrGWeoKwt9M49NVkNU,1090
|
25
|
+
pycoze-0.1.86.dist-info/METADATA,sha256=pQEipyeUZJ2PZCS7PUubP0gox0b2CMmJwWZk840YVTY,719
|
26
|
+
pycoze-0.1.86.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
|
27
|
+
pycoze-0.1.86.dist-info/top_level.txt,sha256=76dPeDhKvOCleL3ZC5gl1-y4vdS1tT_U1hxWVAn7sFo,7
|
28
|
+
pycoze-0.1.86.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|