pycoze 0.1.240__py3-none-any.whl → 0.1.242__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pycoze/bot/agent/agent.py +3 -2
- pycoze/bot/agent/agent_types/openai_func_call_agent.py +52 -2
- pycoze/bot/agent/assistant.py +2 -1
- pycoze/bot/agent_chat.py +30 -12
- pycoze/bot/bot.py +1 -0
- {pycoze-0.1.240.dist-info → pycoze-0.1.242.dist-info}/METADATA +1 -1
- {pycoze-0.1.240.dist-info → pycoze-0.1.242.dist-info}/RECORD +10 -10
- {pycoze-0.1.240.dist-info → pycoze-0.1.242.dist-info}/LICENSE +0 -0
- {pycoze-0.1.240.dist-info → pycoze-0.1.242.dist-info}/WHEEL +0 -0
- {pycoze-0.1.240.dist-info → pycoze-0.1.242.dist-info}/top_level.txt +0 -0
pycoze/bot/agent/agent.py
CHANGED
@@ -13,7 +13,7 @@ from langchain_core.agents import AgentFinish
|
|
13
13
|
from .agent_types.const import HumanToolString
|
14
14
|
|
15
15
|
|
16
|
-
async def run_agent(agent, inputs: list):
|
16
|
+
async def run_agent(agent, inputs: list, tool_compatibility_mode: bool):
|
17
17
|
exist_ids = set()
|
18
18
|
content_list = []
|
19
19
|
async for event in agent.astream_events(inputs, version="v2"):
|
@@ -29,7 +29,7 @@ async def run_agent(agent, inputs: list):
|
|
29
29
|
input_list = event["data"]["input"]
|
30
30
|
for msg in input_list:
|
31
31
|
if isinstance(msg, HumanMessage) or isinstance(msg, SystemMessage):
|
32
|
-
if not msg.content.startswith(HumanToolString):
|
32
|
+
if not tool_compatibility_mode or not msg.content.startswith(HumanToolString):
|
33
33
|
content_list = [] # 防止内容重复
|
34
34
|
if isinstance(msg, AIMessage) and not isinstance(
|
35
35
|
msg, AIMessageChunk
|
@@ -84,6 +84,7 @@ if __name__ == "__main__":
|
|
84
84
|
tools=[python_tool],
|
85
85
|
llm=chat,
|
86
86
|
assistant_message="请以女友的口吻回答,输出不小于100字,可以随便说点其他的",
|
87
|
+
tool_compatibility_mode=False,
|
87
88
|
)
|
88
89
|
|
89
90
|
inputs = [HumanMessage(content="计算根号7+根号88")]
|
@@ -34,13 +34,48 @@ def get_tools(last_message):
|
|
34
34
|
if "tool_calls" in last_message.additional_kwargs:
|
35
35
|
return last_message.additional_kwargs["tool_calls"]
|
36
36
|
else:
|
37
|
-
|
37
|
+
tool_calls = None
|
38
|
+
if '"name"' in last_message.content and '"parameters":' in last_message.content:
|
39
|
+
print("name 和 paremeters 模式")
|
40
|
+
all_json = get_all_markdown_json(last_message.content)
|
41
|
+
tool_calls = []
|
42
|
+
for tool_call in all_json:
|
43
|
+
if "name" not in tool_call or "parameters" not in tool_call:
|
44
|
+
return "end"
|
45
|
+
tool_call["arguments"] = json.dumps(tool_call["parameters"])
|
46
|
+
tool_call.pop("parameters")
|
47
|
+
tool_calls.append(
|
48
|
+
{
|
49
|
+
"function": tool_call,
|
50
|
+
"id": random.randint(0, 1000000),
|
51
|
+
}
|
52
|
+
)
|
53
|
+
if "<|tool▁sep|>" in last_message.content:
|
54
|
+
print("deepseek的bug: <|tool▁sep|> 模式")
|
55
|
+
name = (
|
56
|
+
last_message.content.split("<|tool▁sep|>")[1].split("```")[0].strip()
|
57
|
+
)
|
58
|
+
all_json = get_all_markdown_json(last_message.content)
|
59
|
+
tool_calls = []
|
60
|
+
for argument in all_json:
|
61
|
+
tool_calls.append(
|
62
|
+
{
|
63
|
+
"function": {
|
64
|
+
"name": name,
|
65
|
+
"arguments": json.dumps(argument),
|
66
|
+
},
|
67
|
+
"id": random.randint(0, 1000000),
|
68
|
+
}
|
69
|
+
)
|
70
|
+
|
71
|
+
return tool_calls
|
38
72
|
|
39
73
|
|
40
74
|
def create_openai_func_call_agent_executor(
|
41
75
|
tools: list[BaseTool],
|
42
76
|
llm: LanguageModelLike,
|
43
77
|
system_message: str,
|
78
|
+
tool_compatibility_mode: str,
|
44
79
|
**kwargs
|
45
80
|
):
|
46
81
|
|
@@ -75,6 +110,8 @@ def create_openai_func_call_agent_executor(
|
|
75
110
|
return 'continue'
|
76
111
|
return 'end'
|
77
112
|
|
113
|
+
|
114
|
+
|
78
115
|
# Define the function to execute tools
|
79
116
|
async def call_tool(messages):
|
80
117
|
actions: list[ToolInvocation] = []
|
@@ -84,6 +121,14 @@ def create_openai_func_call_agent_executor(
|
|
84
121
|
for tool_call in get_tools(last_message):
|
85
122
|
function = tool_call["function"]
|
86
123
|
function_name = function["name"]
|
124
|
+
if function_name == "a_delay_function":
|
125
|
+
return [
|
126
|
+
ToolMessage(
|
127
|
+
tool_call_id=tool_call["id"],
|
128
|
+
content="a_delay_function只是一个占位符,请忽略重新调用工具",
|
129
|
+
additional_kwargs={"name": tool_call["function"]["name"]},
|
130
|
+
)
|
131
|
+
]
|
87
132
|
|
88
133
|
_tool_input = json.loads(function["arguments"] or "{}")
|
89
134
|
# We construct an ToolInvocation from the function_call
|
@@ -111,7 +156,12 @@ def create_openai_func_call_agent_executor(
|
|
111
156
|
additional_kwargs={"name": tool_call["function"]["name"]},
|
112
157
|
)
|
113
158
|
tool_messages.append(message)
|
114
|
-
|
159
|
+
if tool_compatibility_mode:
|
160
|
+
# HumanMessage
|
161
|
+
tool_msgs_str = repr(tool_messages)
|
162
|
+
tool_messages = [
|
163
|
+
HumanMessage(content=HumanToolString + tool_msgs_str)
|
164
|
+
]
|
115
165
|
return tool_messages
|
116
166
|
|
117
167
|
workflow = MessageGraph()
|
pycoze/bot/agent/assistant.py
CHANGED
@@ -18,10 +18,11 @@ class Runnable(RunnableBinding):
|
|
18
18
|
tools: Sequence[BaseTool],
|
19
19
|
llm: LanguageModelLike,
|
20
20
|
assistant_message: str,
|
21
|
+
tool_compatibility_mode: bool
|
21
22
|
) -> None:
|
22
23
|
|
23
24
|
agent_executor = create_openai_func_call_agent_executor(
|
24
|
-
tools, llm, assistant_message
|
25
|
+
tools, llm, assistant_message, tool_compatibility_mode
|
25
26
|
)
|
26
27
|
agent_executor = agent_executor.with_config({"recursion_limit": 50})
|
27
28
|
super().__init__(
|
pycoze/bot/agent_chat.py
CHANGED
@@ -1,16 +1,16 @@
|
|
1
1
|
import json
|
2
2
|
from langchain_openai import ChatOpenAI
|
3
|
-
from .agent import run_agent, Runnable, CHAT_DATA, clear_chat_data
|
3
|
+
from .agent import run_agent, Runnable, output, CHAT_DATA, clear_chat_data
|
4
4
|
import asyncio
|
5
5
|
from pycoze import utils
|
6
6
|
from pycoze.reference.bot import ref_bot
|
7
7
|
from pycoze.reference.tool import ref_tools
|
8
8
|
from pycoze.reference.workflow import ref_workflow
|
9
|
+
from langchain_core.utils.function_calling import convert_to_openai_tool
|
9
10
|
import os
|
10
11
|
|
11
12
|
cfg = utils.read_json_file("llm.json")
|
12
13
|
|
13
|
-
|
14
14
|
def load_role_setting(bot_setting_file: str):
|
15
15
|
with open(bot_setting_file, "r", encoding="utf-8") as f:
|
16
16
|
return json.load(f)
|
@@ -42,10 +42,10 @@ async def check_interrupt_file(interval, interrupt_file,agent_task):
|
|
42
42
|
agent_task.cancel()
|
43
43
|
break
|
44
44
|
|
45
|
-
async def run_with_interrupt_check(agent, history, interrupt_file, check_interval=1):
|
45
|
+
async def run_with_interrupt_check(agent, history, tool_compatibility_mode, interrupt_file, check_interval=1):
|
46
46
|
clear_chat_data()
|
47
47
|
try:
|
48
|
-
agent_task = asyncio.create_task(run_agent(agent, history))
|
48
|
+
agent_task = asyncio.create_task(run_agent(agent, history, tool_compatibility_mode))
|
49
49
|
check_task = asyncio.create_task(check_interrupt_file(check_interval, interrupt_file, agent_task))
|
50
50
|
result = await agent_task
|
51
51
|
return result
|
@@ -74,25 +74,43 @@ async def agent_chat(bot_setting_file, history):
|
|
74
74
|
api_key=cfg["apiKey"],
|
75
75
|
base_url=cfg["baseURL"],
|
76
76
|
model=cfg["model"],
|
77
|
-
temperature=(
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
77
|
+
temperature=(
|
78
|
+
role_setting["temperature"] * 2
|
79
|
+
if cfg["model"].startswith("deepseek")
|
80
|
+
else role_setting["temperature"]
|
81
|
+
),
|
82
|
+
stop_sequences=[
|
83
|
+
"tool▁calls▁end",
|
84
|
+
"tool▁call▁end",
|
85
|
+
], # 停用deepseek的工具调用标记,不然会虚构工具调用过程和结果
|
82
86
|
)
|
83
87
|
prompt = role_setting["prompt"]
|
84
|
-
|
88
|
+
if (
|
89
|
+
cfg["toolCompatibilityMode"] and len(abilities) > 0
|
90
|
+
):
|
91
|
+
prompt += """
|
92
|
+
作为一个AI,你如果不确定结果,请务必使用工具查询。
|
93
|
+
你可以通过下面的方式使用工具,并耐心等待工具返回结果。
|
94
|
+
如果你需要调用工具,请使用以正确markdown中的json代码格式进行结尾(务必保证json格式正确,不要出现反斜杠未转义等问题):
|
95
|
+
```json
|
96
|
+
{"name": 函数名, "parameters": 参数词典}
|
97
|
+
```
|
98
|
+
"""
|
99
|
+
prompt += "\nAvailable functions:\n"
|
100
|
+
for t in abilities:
|
101
|
+
prompt += f"\n```json\n{json.dumps(convert_to_openai_tool(t))}\n```"
|
85
102
|
agent = Runnable(
|
86
103
|
agent_execution_mode="FuncCall",
|
87
104
|
tools=abilities,
|
88
105
|
llm=chat,
|
89
106
|
assistant_message=prompt,
|
107
|
+
tool_compatibility_mode=cfg["toolCompatibilityMode"],
|
90
108
|
)
|
91
109
|
params = utils.read_params_file()
|
92
110
|
if "interruptFile" in params:
|
93
111
|
interrupt_file_path = params["interruptFile"]
|
94
|
-
result = await run_with_interrupt_check(agent, history,interrupt_file_path)
|
112
|
+
result = await run_with_interrupt_check(agent, history, cfg["toolCompatibilityMode"], interrupt_file_path)
|
95
113
|
else:
|
96
|
-
result = await run_agent(agent, history)
|
114
|
+
result = await run_agent(agent, history, cfg["toolCompatibilityMode"])
|
97
115
|
return result
|
98
116
|
|
pycoze/bot/bot.py
CHANGED
@@ -6,15 +6,15 @@ pycoze/ai/llm/chat.py,sha256=izriC7nCp5qeJRqcUVQBVqTHiH6MJS77ROzGBJufdNI,5133
|
|
6
6
|
pycoze/ai/llm/text_to_image_prompt.py,sha256=0bx2C_YRvjAo7iphHGp1-pmGKsKqwur7dM0t3SiA8kA,3398
|
7
7
|
pycoze/ai/llm/think.py,sha256=sUgTBdGzcZtL3r-Wx8M3lDuVUmDVz8g3qC0VU8uiKAI,5143
|
8
8
|
pycoze/bot/__init__.py,sha256=6HHMxDQVOyZM9dtSjQm9tjGnhj4h7CixD0JOvEwTi48,41
|
9
|
-
pycoze/bot/agent_chat.py,sha256=
|
10
|
-
pycoze/bot/bot.py,sha256=
|
9
|
+
pycoze/bot/agent_chat.py,sha256=LPJy_eEQ8aYgXbZRNMs9ndbsA3uakzu9UnVw9dt0Bes,4383
|
10
|
+
pycoze/bot/bot.py,sha256=fmcgnWcYTFeOxDuAwqWhFhOJzv4mAyJGLqbod-nkhJE,862
|
11
11
|
pycoze/bot/agent/__init__.py,sha256=3wE8_FFQS8j2BY-g9Cr-onV0POEvDRZaw_NCzpqrNus,265
|
12
|
-
pycoze/bot/agent/agent.py,sha256=
|
13
|
-
pycoze/bot/agent/assistant.py,sha256=
|
12
|
+
pycoze/bot/agent/agent.py,sha256=chUgNZh6v6375L_Y2dBEAaLJyfmw4SygYjVVrDN8VIk,3548
|
13
|
+
pycoze/bot/agent/assistant.py,sha256=3iLxnRvf_ia0cP-FHK5Fv4ylltlnzPq1KscRCFYqjkc,1147
|
14
14
|
pycoze/bot/agent/chat.py,sha256=mubOCAHvA6VtyE6N40elI6KrP6A69uB_G6ihE3G_Vi4,860
|
15
15
|
pycoze/bot/agent/agent_types/__init__.py,sha256=zmU2Kmrv5mCdfg-QlPn2H6pWxbGeq8s7YTqLhpzJC6k,179
|
16
16
|
pycoze/bot/agent/agent_types/const.py,sha256=BfUKPrhAHREoMLHuFNG2bCIEkC1-f7K0LEqNg4RwiRE,70
|
17
|
-
pycoze/bot/agent/agent_types/openai_func_call_agent.py,sha256=
|
17
|
+
pycoze/bot/agent/agent_types/openai_func_call_agent.py,sha256=SnEm5MODHn2uMsaMNqgzULM_91vqLHC0TU6ovwCOqLU,6675
|
18
18
|
pycoze/reference/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
19
19
|
pycoze/reference/bot.py,sha256=BDflTV3zYoZqWnJpD5lMM_1vU_5b20M3XiFt1p-RHWM,2427
|
20
20
|
pycoze/reference/lib.py,sha256=0xQJTLTHedGzQBsjuTFNBVqYc4-8Yl65gGCrAhWyOX8,2155
|
@@ -29,8 +29,8 @@ pycoze/utils/__init__.py,sha256=Gi5EnrWZGMD2JRejgV4c_VLCXyvA2wwBFI_niDF5MUE,110
|
|
29
29
|
pycoze/utils/arg.py,sha256=GtfGbMTMdaK75Fwh6MpUe1pCA5X6Ep4LFG7a72YrzjI,525
|
30
30
|
pycoze/utils/env.py,sha256=W04lhvTHhAAC6EldP6kk2xrctqtu8K6kl1vDLZDNeh8,561
|
31
31
|
pycoze/utils/text_or_file.py,sha256=gpxZVWt2DW6YiEg_MnMuwg36VNf3TX383QD_1oZNB0Y,551
|
32
|
-
pycoze-0.1.
|
33
|
-
pycoze-0.1.
|
34
|
-
pycoze-0.1.
|
35
|
-
pycoze-0.1.
|
36
|
-
pycoze-0.1.
|
32
|
+
pycoze-0.1.242.dist-info/LICENSE,sha256=QStd_Qsd0-kAam_-sOesCIp_uKrGWeoKwt9M49NVkNU,1090
|
33
|
+
pycoze-0.1.242.dist-info/METADATA,sha256=DfYIZnLI-LsmIE2Za_6tgTSCc6L2__VT81OYsMKZ-Hg,699
|
34
|
+
pycoze-0.1.242.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
35
|
+
pycoze-0.1.242.dist-info/top_level.txt,sha256=76dPeDhKvOCleL3ZC5gl1-y4vdS1tT_U1hxWVAn7sFo,7
|
36
|
+
pycoze-0.1.242.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|