pycoze 0.1.308__py3-none-any.whl → 0.1.309__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pycoze/bot/__init__.py +1 -1
- pycoze/bot/chat.py +98 -0
- pycoze/bot/chat_base.py +243 -0
- pycoze/bot/lib.py +235 -0
- pycoze/bot/{agent/chat.py → message.py} +0 -1
- pycoze/bot/tools.py +279 -0
- pycoze/reference/lib.py +1 -1
- {pycoze-0.1.308.dist-info → pycoze-0.1.309.dist-info}/METADATA +1 -1
- {pycoze-0.1.308.dist-info → pycoze-0.1.309.dist-info}/RECORD +12 -16
- pycoze/bot/agent/__init__.py +0 -5
- pycoze/bot/agent/agent.py +0 -95
- pycoze/bot/agent/agent_types/__init__.py +0 -4
- pycoze/bot/agent/agent_types/const.py +0 -1
- pycoze/bot/agent/agent_types/openai_func_call_agent.py +0 -181
- pycoze/bot/agent/assistant.py +0 -35
- pycoze/bot/agent_chat.py +0 -110
- pycoze/bot/bot.py +0 -23
- {pycoze-0.1.308.dist-info → pycoze-0.1.309.dist-info}/LICENSE +0 -0
- {pycoze-0.1.308.dist-info → pycoze-0.1.309.dist-info}/WHEEL +0 -0
- {pycoze-0.1.308.dist-info → pycoze-0.1.309.dist-info}/top_level.txt +0 -0
pycoze/bot/agent_chat.py
DELETED
@@ -1,110 +0,0 @@
|
|
1
|
-
import json
|
2
|
-
from langchain_openai import ChatOpenAI
|
3
|
-
from .agent import run_agent, Runnable,CHAT_DATA, clear_chat_data
|
4
|
-
import asyncio
|
5
|
-
from pycoze import utils
|
6
|
-
from pycoze.reference.bot import ref_bot
|
7
|
-
from pycoze.reference.tool import ref_tools
|
8
|
-
from pycoze.reference.workflow import ref_workflow
|
9
|
-
from langchain_core.utils.function_calling import convert_to_openai_tool
|
10
|
-
import os
|
11
|
-
|
12
|
-
cfg = utils.read_json_file("llm.json")
|
13
|
-
|
14
|
-
def load_bot_setting(bot_setting_file: str):
|
15
|
-
with open(bot_setting_file, "r", encoding="utf-8") as f:
|
16
|
-
return json.load(f)
|
17
|
-
|
18
|
-
|
19
|
-
def load_abilities(bot_setting_file: str):
|
20
|
-
with open(bot_setting_file, "r", encoding="utf-8") as f:
|
21
|
-
bot_setting = json.load(f)
|
22
|
-
|
23
|
-
abilities = []
|
24
|
-
for bot_id in bot_setting["bots"]:
|
25
|
-
bot = ref_bot(bot_id, as_agent_tool=True)
|
26
|
-
if bot:
|
27
|
-
abilities.append(bot)
|
28
|
-
for tool_id in bot_setting["tools"]:
|
29
|
-
abilities.extend(ref_tools(tool_id, as_agent_tool=True))
|
30
|
-
for workflow_id in bot_setting["workflows"]:
|
31
|
-
workflow = ref_workflow(workflow_id, as_agent_tool=True)
|
32
|
-
if workflow:
|
33
|
-
abilities.append(workflow)
|
34
|
-
return abilities
|
35
|
-
|
36
|
-
|
37
|
-
async def check_interrupt_file(interval, interrupt_file,agent_task):
|
38
|
-
while True:
|
39
|
-
await asyncio.sleep(interval)
|
40
|
-
if os.path.exists(interrupt_file):
|
41
|
-
os.remove(interrupt_file)
|
42
|
-
agent_task.cancel()
|
43
|
-
break
|
44
|
-
|
45
|
-
async def run_with_interrupt_check(agent, history, tool_compatibility_mode, interrupt_file, check_interval=1):
|
46
|
-
clear_chat_data()
|
47
|
-
try:
|
48
|
-
agent_task = asyncio.create_task(run_agent(agent, history, tool_compatibility_mode))
|
49
|
-
check_task = asyncio.create_task(check_interrupt_file(check_interval, interrupt_file, agent_task))
|
50
|
-
result = await agent_task
|
51
|
-
return result
|
52
|
-
except asyncio.CancelledError:
|
53
|
-
return CHAT_DATA['info']
|
54
|
-
except Exception as e:
|
55
|
-
import traceback
|
56
|
-
print(traceback.format_exc())
|
57
|
-
return None # 返回 None 或者处理异常后的结果
|
58
|
-
finally:
|
59
|
-
if not agent_task.done():
|
60
|
-
agent_task.cancel()
|
61
|
-
# 确保即使发生异常也会取消检查任务
|
62
|
-
if not check_task.done():
|
63
|
-
check_task.cancel()
|
64
|
-
try:
|
65
|
-
await check_task
|
66
|
-
except asyncio.CancelledError:
|
67
|
-
pass # 忽略取消错误
|
68
|
-
|
69
|
-
async def agent_chat(bot_setting_file, history):
|
70
|
-
bot_setting = load_bot_setting(bot_setting_file)
|
71
|
-
abilities = load_abilities(bot_setting_file)
|
72
|
-
|
73
|
-
chat = ChatOpenAI(
|
74
|
-
api_key=cfg["apiKey"],
|
75
|
-
base_url=cfg["baseURL"],
|
76
|
-
model=cfg["model"],
|
77
|
-
temperature=bot_setting["temperature"],
|
78
|
-
stop_sequences=[
|
79
|
-
"tool▁calls▁end",
|
80
|
-
"tool▁call▁end",
|
81
|
-
], # 不然会虚构工具调用过程和结果
|
82
|
-
)
|
83
|
-
prompt = bot_setting["prompt"]
|
84
|
-
if cfg["toolCompatibilityMode"] and len(abilities) > 0:
|
85
|
-
prompt += """
|
86
|
-
作为一个AI,你如果不确定结果,请务必使用工具查询。
|
87
|
-
你可以通过下面的方式使用工具,并耐心等待工具返回结果。
|
88
|
-
如果你需要调用工具,请使用以正确markdown中的json代码格式进行结尾(务必保证json格式正确,不要出现反斜杠未转义等问题):
|
89
|
-
```json
|
90
|
-
{"name": 函数名, "parameters": 参数词典}
|
91
|
-
```
|
92
|
-
"""
|
93
|
-
prompt += "\nAvailable functions:\n"
|
94
|
-
for t in abilities:
|
95
|
-
prompt += f"\n```json\n{json.dumps(convert_to_openai_tool(t))}\n```"
|
96
|
-
agent = Runnable(
|
97
|
-
agent_execution_mode="FuncCall",
|
98
|
-
tools=abilities,
|
99
|
-
llm=chat,
|
100
|
-
assistant_message=prompt,
|
101
|
-
tool_compatibility_mode=cfg["toolCompatibilityMode"],
|
102
|
-
)
|
103
|
-
params = utils.params
|
104
|
-
if "interruptFile" in params:
|
105
|
-
interrupt_file_path = params["interruptFile"]
|
106
|
-
result = await run_with_interrupt_check(agent, history, cfg["toolCompatibilityMode"], interrupt_file_path)
|
107
|
-
else:
|
108
|
-
result = await run_agent(agent, history, cfg["toolCompatibilityMode"])
|
109
|
-
return result
|
110
|
-
|
pycoze/bot/bot.py
DELETED
@@ -1,23 +0,0 @@
|
|
1
|
-
from langchain_core.messages import HumanMessage, AIMessage
|
2
|
-
import json
|
3
|
-
from .agent import INPUT_MESSAGE, output, CHAT_DATA, clear_chat_data
|
4
|
-
from .agent_chat import agent_chat
|
5
|
-
import asyncio
|
6
|
-
|
7
|
-
|
8
|
-
def chat(bot_setting_file: str):
|
9
|
-
history = []
|
10
|
-
while True:
|
11
|
-
input_text = input()
|
12
|
-
if not input_text.startswith(INPUT_MESSAGE):
|
13
|
-
raise ValueError("Invalid message")
|
14
|
-
message = json.loads(input_text[len(INPUT_MESSAGE) :])
|
15
|
-
history.append(HumanMessage(message["content"]))
|
16
|
-
result = asyncio.run(agent_chat(bot_setting_file, history))
|
17
|
-
output("assistant", result)
|
18
|
-
history.append(AIMessage(result))
|
19
|
-
|
20
|
-
|
21
|
-
def get_chat_response(bot_setting_file: str, input_text: str):
|
22
|
-
result = asyncio.run(agent_chat(bot_setting_file, [HumanMessage(input_text)]))
|
23
|
-
return result
|
File without changes
|
File without changes
|
File without changes
|