pycoze 0.1.35__py3-none-any.whl → 0.1.36__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pycoze/bot/agent/agent.py CHANGED
@@ -3,13 +3,12 @@ import json
3
3
  from langchain_openai import ChatOpenAI
4
4
  from .chat import info
5
5
  from .assistant import Runnable
6
- from langchain_core.messages import HumanMessage, AIMessage,AIMessageChunk
6
+ from langchain_core.messages import HumanMessage, AIMessage, AIMessageChunk
7
7
  from langchain_core.agents import AgentFinish
8
8
 
9
9
 
10
-
11
10
  async def run_agent(agent, inputs: list):
12
- if agent.agent_execution_mode == 'FuncCall':
11
+ if agent.agent_execution_mode == "FuncCall":
13
12
  exist_ids = set()
14
13
  content_list = []
15
14
  async for event in agent.astream_events(inputs, version="v2"):
@@ -22,9 +21,17 @@ async def run_agent(agent, inputs: list):
22
21
  elif kind == "on_chain_start":
23
22
  data = event["data"]
24
23
  if "input" in data:
25
- input_list = data["input"] if isinstance(data["input"], list) else [data["input"]]
24
+ input_list = (
25
+ data["input"]
26
+ if isinstance(data["input"], list)
27
+ else [data["input"]]
28
+ )
29
+ if len(input_list) == 0:
30
+ continue
26
31
  msg = input_list[-1]
27
- if isinstance(msg, AIMessage) and not isinstance(msg, AIMessageChunk):
32
+ if isinstance(msg, AIMessage) and not isinstance(
33
+ msg, AIMessageChunk
34
+ ):
28
35
  if "tool_calls" in msg.additional_kwargs:
29
36
  tool_calls = msg.additional_kwargs["tool_calls"]
30
37
  for t in tool_calls:
@@ -32,41 +39,49 @@ async def run_agent(agent, inputs: list):
32
39
  continue
33
40
  exist_ids.add(t["id"])
34
41
  tool = t["function"]["name"]
35
- info("assistant", f"[调用工具:{tool}]")
42
+ info("assistant", f"\n[调用工具:{tool}]\n\n")
36
43
 
37
44
  return "".join(content_list)
38
45
  else:
39
- assert agent.agent_execution_mode == 'ReAct'
40
- inputs_msg = {'input': inputs[-1].content,'chat_history': inputs[:-1]}
46
+ assert agent.agent_execution_mode == "ReAct"
47
+ inputs_msg = {"input": inputs[-1].content, "chat_history": inputs[:-1]}
41
48
  use_tools = []
42
49
  async for event in agent.astream_events(inputs_msg, version="v2"):
43
50
  kind = event["event"]
44
51
  result = None
45
52
  if kind == "on_chain_end":
46
- if 'data' in event:
47
- if 'output' in event['data']:
48
- output = event['data']['output']
49
- if 'agent_outcome' in output and "input" in output:
50
- outcome = output['agent_outcome']
53
+ if "data" in event:
54
+ if "output" in event["data"]:
55
+ output = event["data"]["output"]
56
+ if "agent_outcome" in output and "input" in output:
57
+ outcome = output["agent_outcome"]
51
58
  if isinstance(outcome, AgentFinish):
52
- result = outcome.return_values['output']
59
+ result = outcome.return_values["output"]
53
60
  elif kind == "on_tool_start":
54
- use_tools.append(event['name'])
55
- info("assistant", f"[调用工具:{use_tools}]")
61
+ use_tools.append(event["name"])
62
+ info("assistant", f"\n[调用工具:{use_tools}]\n\n")
56
63
  return result
57
64
 
58
65
 
59
66
  if __name__ == "__main__":
60
67
  from langchain_experimental.tools import PythonREPLTool
68
+
61
69
  llm_file = r"C:\Users\aiqqq\AppData\Roaming\pycoze\JsonStorage\llm.json"
62
70
  with open(llm_file, "r", encoding="utf-8") as f:
63
71
  cfg = json.load(f)
64
- chat = ChatOpenAI(api_key=cfg["apiKey"], base_url=cfg['baseURL'], model=cfg["model"], temperature=0)
72
+ chat = ChatOpenAI(
73
+ api_key=cfg["apiKey"],
74
+ base_url=cfg["baseURL"],
75
+ model=cfg["model"],
76
+ temperature=0,
77
+ )
65
78
  python_tool = PythonREPLTool()
66
- agent = Runnable(agent_execution_mode='FuncCall', # 'FuncCall' or 'ReAct',大模型支持FuncCall的话就用FuncCall
67
- tools=[python_tool],
68
- llm=chat,
69
- assistant_message="请以女友的口吻回答,输出不小于100字,可以随便说点其他的",)
79
+ agent = Runnable(
80
+ agent_execution_mode="FuncCall", # 'FuncCall' or 'ReAct',大模型支持FuncCall的话就用FuncCall
81
+ tools=[python_tool],
82
+ llm=chat,
83
+ assistant_message="请以女友的口吻回答,输出不小于100字,可以随便说点其他的",
84
+ )
70
85
 
71
86
  inputs = [HumanMessage(content="计算根号7+根号88")]
72
87
  print(asyncio.run(run_agent(agent, inputs)))
@@ -8,7 +8,26 @@ from langchain_core.messages import SystemMessage, ToolMessage
8
8
  from langgraph.graph import END
9
9
  from langgraph.graph.message import MessageGraph
10
10
  from langgraph.prebuilt import ToolExecutor, ToolInvocation
11
- from typing import Any
11
+ import re
12
+ import json
13
+ import random
14
+
15
+
16
+ def get_all_markdown_json(content):
17
+ # Find all markdown json blocks
18
+ markdown_json_blocks = re.findall(r"```json(.*?)```", content, re.DOTALL)
19
+ json_list = []
20
+
21
+ for block in markdown_json_blocks:
22
+ try:
23
+ # Remove any leading/trailing whitespace and parse the JSON
24
+ json_data = json.loads(block.strip())
25
+ json_list.append(json_data)
26
+ except json.JSONDecodeError:
27
+ # If the block is not valid JSON, skip it
28
+ continue
29
+
30
+ return json_list
12
31
 
13
32
 
14
33
  def create_openai_func_call_agent_executor(
@@ -39,6 +58,69 @@ def create_openai_func_call_agent_executor(
39
58
  def should_continue(messages):
40
59
  # If there is no FuncCall, then we finish
41
60
  last_message = messages[-1]
61
+ if last_message.content.strip().endswith("```"):
62
+ print("添加末尾换行")
63
+ last_message.content += "\n\n" # 避免影响阅读
64
+ if not last_message.tool_calls:
65
+ if (
66
+ "接下来我将" in last_message.content
67
+ or "接下来,我将" in last_message.content
68
+ ):
69
+ print("deepseek的bug: “接下来我将” 模式,使用a_delay_function骗过llm")
70
+ last_message.additional_kwargs["tool_calls"] = (
71
+ last_message.tool_calls
72
+ ) = [
73
+ {
74
+ "function": {"name": "a_delay_function", "arguments": "{}"},
75
+ "id": random.randint(0, 1000000),
76
+ }
77
+ ]
78
+ return "continue"
79
+ if (
80
+ '"name"' in last_message.content
81
+ and '"parameters":' in last_message.content
82
+ ):
83
+ print("deepseek的bug: name 和 paremeters 模式")
84
+ all_json = get_all_markdown_json(last_message.content)
85
+ tool_calls = []
86
+ for tool_call in all_json:
87
+ if "name" not in tool_call or "parameters" not in tool_call:
88
+ return "end"
89
+ tool_call["arguments"] = json.dumps(tool_call["parameters"])
90
+ tool_call.pop("parameters")
91
+ tool_calls.append(
92
+ {
93
+ "function": tool_call,
94
+ "id": random.randint(0, 1000000),
95
+ }
96
+ )
97
+ last_message.tool_calls = tool_calls
98
+ last_message.additional_kwargs["tool_calls"] = tool_calls
99
+ return "continue"
100
+ if "<|tool▁sep|>" in last_message.content:
101
+ print("deepseek的bug: <|tool▁sep|> 模式")
102
+ name = (
103
+ last_message.content.split("<|tool▁sep|>")[1]
104
+ .split("```")[0]
105
+ .strip()
106
+ )
107
+ all_json = get_all_markdown_json(last_message.content)
108
+ tool_calls = []
109
+ for argument in all_json:
110
+ tool_calls.append(
111
+ {
112
+ "function": {
113
+ "name": name,
114
+ "arguments": json.dumps(argument),
115
+ },
116
+ "id": random.randint(0, 1000000),
117
+ }
118
+ )
119
+
120
+ last_message.additional_kwargs["tool_calls"] = tool_calls
121
+ last_message.tool_calls = tool_calls
122
+ return "continue"
123
+
42
124
  if not last_message.tool_calls:
43
125
  return "end"
44
126
  # Otherwise if there is, we continue
@@ -54,6 +136,15 @@ def create_openai_func_call_agent_executor(
54
136
  for tool_call in last_message.additional_kwargs["tool_calls"]:
55
137
  function = tool_call["function"]
56
138
  function_name = function["name"]
139
+ if function_name == "a_delay_function":
140
+ return [
141
+ ToolMessage(
142
+ tool_call_id=tool_call["id"],
143
+ content="a_delay_function只是一个占位符,请忽略重新调用工具",
144
+ additional_kwargs={"name": tool_call["function"]["name"]},
145
+ )
146
+ ]
147
+
57
148
  _tool_input = json.loads(function["arguments"] or "{}")
58
149
  # We construct an ToolInvocation from the function_call
59
150
  actions.append(
pycoze/bot/bot.py CHANGED
@@ -35,11 +35,17 @@ def agent_chat(bot_setting_file, history):
35
35
  api_key=cfg["apiKey"],
36
36
  base_url=cfg["baseURL"],
37
37
  model=cfg["model"],
38
- temperature=role_setting["temperature"],
38
+ temperature=(
39
+ role_setting["temperature"] * 2
40
+ if cfg["model"].startswith("deepseek")
41
+ else role_setting["temperature"]
42
+ ),
39
43
  )
40
44
 
41
45
  agent = Runnable(
42
- agent_execution_mode="FuncCall", # 'FuncCall' or 'ReAct',大模型支持FuncCall的话就用FuncCall
46
+ agent_execution_mode=(
47
+ "ReAct" if cfg["model"] in ["command-r"] else "FuncCall"
48
+ ), # 'FuncCall' or 'ReAct',大模型支持FuncCall的话就用FuncCall
43
49
  tools=tools,
44
50
  llm=chat,
45
51
  assistant_message=role_setting["prompt"],
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pycoze
3
- Version: 0.1.35
3
+ Version: 0.1.36
4
4
  Summary: Package for pycoze only!
5
5
  Author: Yuan Jie Xiong
6
6
  Author-email: aiqqqqqqq@qq.com
@@ -2,13 +2,13 @@ pycoze/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  pycoze/module.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  pycoze/bot/__init__.py,sha256=pciDtfcIXda7iFt9uI5Fpm0JKpGBhdXHmJv4966WTVU,21
4
4
  pycoze/bot/base.py,sha256=GWYDVGGtiCpk6gv-163cAbDid_IsnMe5jTj7eZUMJQU,2679
5
- pycoze/bot/bot.py,sha256=45HHv9PmUDHfP0pg5zZi23Io7bCZetJSneTZfFaSjow,1838
5
+ pycoze/bot/bot.py,sha256=RPSlKy9YjKrhrTHiaefqvORXD8PTZ6BC-HOw2w5CcEY,2048
6
6
  pycoze/bot/agent/__init__.py,sha256=IaYqQCJ3uBor92JdOxI_EY4HtYOHgej8lijr3UrN1Vc,161
7
- pycoze/bot/agent/agent.py,sha256=vDbTCorUL6Eh2Az4uzwGsLa3Hp4EPcOkq62JpQXt8-s,3435
7
+ pycoze/bot/agent/agent.py,sha256=hxuNNdSrNUhw6FS7xwT4LWFsAS3adOkdspxibL8oOro,3625
8
8
  pycoze/bot/agent/assistant.py,sha256=QLeWaPi415P9jruYOm8qcIbC94cXXAhJYmLTkyC9NTQ,1267
9
9
  pycoze/bot/agent/chat.py,sha256=kc0qgcrBSXdiMy49JwThZTV-0PAvzAhiUvbI5ILiSnU,571
10
10
  pycoze/bot/agent/agent_types/__init__.py,sha256=W2jTNMLqUMqgCMG0Tw0d8n7WpsbsnIonqaPR-YLegLU,210
11
- pycoze/bot/agent/agent_types/openai_func_call_agent.py,sha256=LBCkcPbFHpI9ijrze2X15O-9tXxCbi7IKbJRtrBQjwg,4504
11
+ pycoze/bot/agent/agent_types/openai_func_call_agent.py,sha256=GqWTvG4B6JFSpzvo44mXDTiARgGm0lp2t9sUmg_sLac,8225
12
12
  pycoze/bot/agent/agent_types/react_agent.py,sha256=AnjHwHXVwLAm77ndglJGi4rQhqDGWaLuUfl46uZVSzM,6749
13
13
  pycoze/bot/agent/agent_types/react_prompt.py,sha256=jyovokGaPzNIe5bvTRvn0gmsWLx5kpDIPmRwmEMCl-M,2142
14
14
  pycoze/gpu/__init__.py,sha256=cuxwDdz2Oo-VcwZ50FtFtEIJXdqoz2el-n0QpSt_NMc,75
@@ -21,8 +21,8 @@ pycoze/ui/ui_def.py,sha256=CNFYH8NC-WYmbceIPpxsRr9H6O006pMKukx7U-BOE1Q,3744
21
21
  pycoze/utils/__init__.py,sha256=KExBkotf23dr2NfTEouWke5nJB1q2IuDXgHrmuyd95k,73
22
22
  pycoze/utils/arg.py,sha256=rRujm1zKc0XlnNlpIJ6JAAaFiTzDGmL_RliIpSc5OD8,724
23
23
  pycoze/utils/text_or_file.py,sha256=gpxZVWt2DW6YiEg_MnMuwg36VNf3TX383QD_1oZNB0Y,551
24
- pycoze-0.1.35.dist-info/LICENSE,sha256=QStd_Qsd0-kAam_-sOesCIp_uKrGWeoKwt9M49NVkNU,1090
25
- pycoze-0.1.35.dist-info/METADATA,sha256=zNyvESPcN0ZDZrIb8r_b50-l8FgGWVSJ33p5WeshqW8,719
26
- pycoze-0.1.35.dist-info/WHEEL,sha256=UvcQYKBHoFqaQd6LKyqHw9fxEolWLQnlzP0h_LgJAfI,91
27
- pycoze-0.1.35.dist-info/top_level.txt,sha256=76dPeDhKvOCleL3ZC5gl1-y4vdS1tT_U1hxWVAn7sFo,7
28
- pycoze-0.1.35.dist-info/RECORD,,
24
+ pycoze-0.1.36.dist-info/LICENSE,sha256=QStd_Qsd0-kAam_-sOesCIp_uKrGWeoKwt9M49NVkNU,1090
25
+ pycoze-0.1.36.dist-info/METADATA,sha256=HLdkCWlqoLq6TsNpE9hTZ4exHEy-SZ-QeUjLMYGGgHs,719
26
+ pycoze-0.1.36.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
27
+ pycoze-0.1.36.dist-info/top_level.txt,sha256=76dPeDhKvOCleL3ZC5gl1-y4vdS1tT_U1hxWVAn7sFo,7
28
+ pycoze-0.1.36.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (74.0.0)
2
+ Generator: bdist_wheel (0.44.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5