langchain-dev-utils 1.3.2__tar.gz → 1.3.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/PKG-INFO +1 -1
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/pyproject.toml +1 -1
- langchain_dev_utils-1.3.3/src/langchain_dev_utils/__init__.py +1 -0
- langchain_dev_utils-1.3.3/src/langchain_dev_utils/agents/__init__.py +4 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/middleware/handoffs.py +17 -2
- langchain_dev_utils-1.3.3/src/langchain_dev_utils/agents/wrap.py +274 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_handoffs_middleware.py +34 -38
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_wrap_agent.py +83 -2
- langchain_dev_utils-1.3.2/src/langchain_dev_utils/__init__.py +0 -1
- langchain_dev_utils-1.3.2/src/langchain_dev_utils/agents/__init__.py +0 -4
- langchain_dev_utils-1.3.2/src/langchain_dev_utils/agents/wrap.py +0 -140
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/.gitignore +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/.python-version +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/.vscode/settings.json +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/LICENSE +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/README.md +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/README_cn.md +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/_utils.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/factory.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/file_system.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/middleware/__init__.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/middleware/format_prompt.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/middleware/model_fallback.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/middleware/model_router.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/middleware/plan.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/middleware/summarization.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/middleware/tool_call_repair.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/middleware/tool_emulator.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/middleware/tool_selection.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/plan.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/chat_models/__init__.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/chat_models/adapters/__init__.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/chat_models/adapters/openai_compatible.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/chat_models/base.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/chat_models/types.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/embeddings/__init__.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/embeddings/base.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/message_convert/__init__.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/message_convert/content.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/message_convert/format.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/pipeline/__init__.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/pipeline/parallel.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/pipeline/sequential.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/pipeline/types.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/py.typed +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/tool_calling/__init__.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/tool_calling/human_in_the_loop.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/tool_calling/utils.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/__init__.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_agent.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_chat_models.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_human_in_the_loop.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_load_embbeding.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_load_model.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_messages.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_model_tool_emulator.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_pipline.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_plan_middleware.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_router_model.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_tool_call_repair.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/test_tool_calling.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/utils/__init__.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/tests/utils/register.py +0 -0
- {langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/uv.lock +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langchain-dev-utils
|
|
3
|
-
Version: 1.3.
|
|
3
|
+
Version: 1.3.3
|
|
4
4
|
Summary: A practical utility library for LangChain and LangGraph development
|
|
5
5
|
Project-URL: Source Code, https://github.com/TBice123123/langchain-dev-utils
|
|
6
6
|
Project-URL: repository, https://github.com/TBice123123/langchain-dev-utils
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "1.3.3"
|
|
@@ -78,7 +78,19 @@ def _transform_agent_config(
|
|
|
78
78
|
dict[str, AgentConfig]: The transformed agent config.
|
|
79
79
|
"""
|
|
80
80
|
|
|
81
|
+
new_config = {}
|
|
81
82
|
for agent_name, _cfg in config.items():
|
|
83
|
+
new_config[agent_name] = {}
|
|
84
|
+
|
|
85
|
+
if "model" in _cfg:
|
|
86
|
+
new_config[agent_name]["model"] = _cfg["model"]
|
|
87
|
+
if "prompt" in _cfg:
|
|
88
|
+
new_config[agent_name]["prompt"] = _cfg["prompt"]
|
|
89
|
+
if "default" in _cfg:
|
|
90
|
+
new_config[agent_name]["default"] = _cfg["default"]
|
|
91
|
+
if "tools" in _cfg:
|
|
92
|
+
new_config[agent_name]["tools"] = _cfg["tools"]
|
|
93
|
+
|
|
82
94
|
handoffs = _cfg.get("handoffs", [])
|
|
83
95
|
if handoffs == "all":
|
|
84
96
|
handoff_tools = [
|
|
@@ -102,8 +114,11 @@ def _transform_agent_config(
|
|
|
102
114
|
]
|
|
103
115
|
]
|
|
104
116
|
|
|
105
|
-
|
|
106
|
-
|
|
117
|
+
new_config[agent_name]["tools"] = [
|
|
118
|
+
*new_config[agent_name].get("tools", []),
|
|
119
|
+
*handoff_tools,
|
|
120
|
+
]
|
|
121
|
+
return new_config
|
|
107
122
|
|
|
108
123
|
|
|
109
124
|
class HandoffAgentMiddleware(AgentMiddleware):
|
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Any, Awaitable, Callable, Optional, cast
|
|
3
|
+
|
|
4
|
+
from langchain.tools import ToolRuntime
|
|
5
|
+
from langchain_core.messages import AnyMessage, HumanMessage
|
|
6
|
+
from langchain_core.tools import BaseTool, StructuredTool
|
|
7
|
+
from langgraph.graph.state import CompiledStateGraph
|
|
8
|
+
|
|
9
|
+
from langchain_dev_utils.message_convert import format_sequence
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _process_input(request: str, runtime: ToolRuntime) -> str:
|
|
13
|
+
return request
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _process_output(
|
|
17
|
+
request: str, response: list[AnyMessage], runtime: ToolRuntime
|
|
18
|
+
) -> Any:
|
|
19
|
+
return response[-1].content
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def wrap_agent_as_tool(
|
|
23
|
+
agent: CompiledStateGraph,
|
|
24
|
+
tool_name: Optional[str] = None,
|
|
25
|
+
tool_description: Optional[str] = None,
|
|
26
|
+
pre_input_hooks: Optional[
|
|
27
|
+
tuple[
|
|
28
|
+
Callable[[str, ToolRuntime], str],
|
|
29
|
+
Callable[[str, ToolRuntime], Awaitable[str]],
|
|
30
|
+
]
|
|
31
|
+
| Callable[[str, ToolRuntime], str]
|
|
32
|
+
] = None,
|
|
33
|
+
post_output_hooks: Optional[
|
|
34
|
+
tuple[
|
|
35
|
+
Callable[[str, list[AnyMessage], ToolRuntime], Any],
|
|
36
|
+
Callable[[str, list[AnyMessage], ToolRuntime], Awaitable[Any]],
|
|
37
|
+
]
|
|
38
|
+
| Callable[[str, list[AnyMessage], ToolRuntime], Any]
|
|
39
|
+
] = None,
|
|
40
|
+
) -> BaseTool:
|
|
41
|
+
"""Wraps an agent as a tool
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
agent: The agent to wrap
|
|
45
|
+
tool_name: The name of the tool
|
|
46
|
+
tool_description: The description of the tool
|
|
47
|
+
pre_input_hooks: Hooks to run before the input is processed
|
|
48
|
+
post_output_hooks: Hooks to run after the output is processed
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
BaseTool: The wrapped agent as a tool
|
|
52
|
+
|
|
53
|
+
Example:
|
|
54
|
+
>>> from langchain_dev_utils.agents import wrap_agent_as_tool, create_agent
|
|
55
|
+
>>>
|
|
56
|
+
>>> call_time_agent_tool = wrap_agent_as_tool(
|
|
57
|
+
... time_agent,
|
|
58
|
+
... tool_name="call_time_agent",
|
|
59
|
+
... tool_description="Used to invoke the time sub-agent to perform time-related tasks"
|
|
60
|
+
... )
|
|
61
|
+
>>>
|
|
62
|
+
>>> agent = create_agent("vllm:qwen3-4b", tools=[call_time_agent_tool], name="agent")
|
|
63
|
+
|
|
64
|
+
>>> response = agent.invoke({"messages": [HumanMessage(content="What time is it now?")]})
|
|
65
|
+
>>> response
|
|
66
|
+
"""
|
|
67
|
+
if agent.name is None:
|
|
68
|
+
raise ValueError("Agent name must not be None")
|
|
69
|
+
|
|
70
|
+
process_input = _process_input
|
|
71
|
+
process_input_async = _process_input
|
|
72
|
+
process_output = _process_output
|
|
73
|
+
process_output_async = _process_output
|
|
74
|
+
|
|
75
|
+
if pre_input_hooks:
|
|
76
|
+
if isinstance(pre_input_hooks, tuple):
|
|
77
|
+
process_input = pre_input_hooks[0]
|
|
78
|
+
process_input_async = pre_input_hooks[1]
|
|
79
|
+
else:
|
|
80
|
+
process_input = pre_input_hooks
|
|
81
|
+
process_input_async = pre_input_hooks
|
|
82
|
+
|
|
83
|
+
if post_output_hooks:
|
|
84
|
+
if isinstance(post_output_hooks, tuple):
|
|
85
|
+
process_output = post_output_hooks[0]
|
|
86
|
+
process_output_async = post_output_hooks[1]
|
|
87
|
+
else:
|
|
88
|
+
process_output = post_output_hooks
|
|
89
|
+
process_output_async = post_output_hooks
|
|
90
|
+
|
|
91
|
+
def call_agent(
|
|
92
|
+
request: str,
|
|
93
|
+
runtime: ToolRuntime,
|
|
94
|
+
) -> str:
|
|
95
|
+
request = process_input(request, runtime) if process_input else request
|
|
96
|
+
|
|
97
|
+
messages = [HumanMessage(content=request)]
|
|
98
|
+
response = agent.invoke({"messages": messages})
|
|
99
|
+
|
|
100
|
+
response = process_output(request, response["messages"], runtime)
|
|
101
|
+
return response
|
|
102
|
+
|
|
103
|
+
async def acall_agent(
|
|
104
|
+
request: str,
|
|
105
|
+
runtime: ToolRuntime,
|
|
106
|
+
) -> str:
|
|
107
|
+
if asyncio.iscoroutinefunction(process_input_async):
|
|
108
|
+
request = await process_input_async(request, runtime)
|
|
109
|
+
else:
|
|
110
|
+
request = cast(str, process_input_async(request, runtime))
|
|
111
|
+
|
|
112
|
+
messages = [HumanMessage(content=request)]
|
|
113
|
+
response = await agent.ainvoke({"messages": messages})
|
|
114
|
+
|
|
115
|
+
if asyncio.iscoroutinefunction(process_output_async):
|
|
116
|
+
response = await process_output_async(
|
|
117
|
+
request, response["messages"], runtime
|
|
118
|
+
)
|
|
119
|
+
else:
|
|
120
|
+
response = process_output(request, response["messages"], runtime)
|
|
121
|
+
|
|
122
|
+
return response
|
|
123
|
+
|
|
124
|
+
if tool_name is None:
|
|
125
|
+
tool_name = f"transfor_to_{agent.name}"
|
|
126
|
+
if not tool_name.endswith("_agent"):
|
|
127
|
+
tool_name += "_agent"
|
|
128
|
+
|
|
129
|
+
if tool_description is None:
|
|
130
|
+
tool_description = f"This tool transforms input to {agent.name}"
|
|
131
|
+
|
|
132
|
+
return StructuredTool.from_function(
|
|
133
|
+
func=call_agent,
|
|
134
|
+
coroutine=acall_agent,
|
|
135
|
+
name=tool_name,
|
|
136
|
+
description=tool_description,
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def wrap_all_agents_as_tool(
|
|
141
|
+
agents: list[CompiledStateGraph],
|
|
142
|
+
tool_name: Optional[str] = None,
|
|
143
|
+
tool_description: Optional[str] = None,
|
|
144
|
+
pre_input_hooks: Optional[
|
|
145
|
+
tuple[
|
|
146
|
+
Callable[[str, ToolRuntime], str],
|
|
147
|
+
Callable[[str, ToolRuntime], Awaitable[str]],
|
|
148
|
+
]
|
|
149
|
+
| Callable[[str, ToolRuntime], str]
|
|
150
|
+
] = None,
|
|
151
|
+
post_output_hooks: Optional[
|
|
152
|
+
tuple[
|
|
153
|
+
Callable[[str, list[AnyMessage], ToolRuntime], Any],
|
|
154
|
+
Callable[[str, list[AnyMessage], ToolRuntime], Awaitable[Any]],
|
|
155
|
+
]
|
|
156
|
+
| Callable[[str, list[AnyMessage], ToolRuntime], Any]
|
|
157
|
+
] = None,
|
|
158
|
+
) -> BaseTool:
|
|
159
|
+
"""Wraps all agents as single tool
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
agents: The agents to wrap
|
|
163
|
+
tool_name: The name of the tool, default to "task"
|
|
164
|
+
tool_description: The description of the tool
|
|
165
|
+
pre_input_hooks: Hooks to run before the input is processed
|
|
166
|
+
post_output_hooks: Hooks to run after the output is processed
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
BaseTool: The wrapped agents as single tool
|
|
170
|
+
|
|
171
|
+
Example:
|
|
172
|
+
>>> from langchain_dev_utils.agents import wrap_all_agents_as_tool, create_agent
|
|
173
|
+
>>>
|
|
174
|
+
>>> call_time_agent_tool = wrap_all_agents_as_tool(
|
|
175
|
+
... [time_agent,weather_agent],
|
|
176
|
+
... tool_name="call_sub_agents",
|
|
177
|
+
... tool_description="Used to invoke the sub-agents to perform tasks"
|
|
178
|
+
... )
|
|
179
|
+
>>>
|
|
180
|
+
>>> agent = create_agent("vllm:qwen3-4b", tools=[call_sub_agents_tool], name="agent")
|
|
181
|
+
|
|
182
|
+
>>> response = agent.invoke({"messages": [HumanMessage(content="What time is it now?")]})
|
|
183
|
+
>>> response
|
|
184
|
+
"""
|
|
185
|
+
if len(agents) <= 1:
|
|
186
|
+
raise ValueError("At least more than one agent must be provided")
|
|
187
|
+
|
|
188
|
+
agents_map = {}
|
|
189
|
+
|
|
190
|
+
for agent in agents:
|
|
191
|
+
if agent.name is None:
|
|
192
|
+
raise ValueError("Agent name must not be provided")
|
|
193
|
+
if agent.name in agents_map:
|
|
194
|
+
raise ValueError("Agent name must be unique")
|
|
195
|
+
agents_map[agent.name] = agent
|
|
196
|
+
|
|
197
|
+
process_input = _process_input
|
|
198
|
+
process_input_async = _process_input
|
|
199
|
+
process_output = _process_output
|
|
200
|
+
process_output_async = _process_output
|
|
201
|
+
|
|
202
|
+
if pre_input_hooks:
|
|
203
|
+
if isinstance(pre_input_hooks, tuple):
|
|
204
|
+
process_input = pre_input_hooks[0]
|
|
205
|
+
process_input_async = pre_input_hooks[1]
|
|
206
|
+
else:
|
|
207
|
+
process_input = pre_input_hooks
|
|
208
|
+
process_input_async = pre_input_hooks
|
|
209
|
+
|
|
210
|
+
if post_output_hooks:
|
|
211
|
+
if isinstance(post_output_hooks, tuple):
|
|
212
|
+
process_output = post_output_hooks[0]
|
|
213
|
+
process_output_async = post_output_hooks[1]
|
|
214
|
+
else:
|
|
215
|
+
process_output = post_output_hooks
|
|
216
|
+
process_output_async = post_output_hooks
|
|
217
|
+
|
|
218
|
+
def call_agent(
|
|
219
|
+
agent_name: str,
|
|
220
|
+
description: str,
|
|
221
|
+
runtime: ToolRuntime,
|
|
222
|
+
) -> str:
|
|
223
|
+
task_description = (
|
|
224
|
+
process_input(description, runtime) if process_input else description
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
if agent_name not in agents_map:
|
|
228
|
+
raise ValueError(f"Agent {agent_name} not found")
|
|
229
|
+
|
|
230
|
+
messages = [HumanMessage(content=task_description)]
|
|
231
|
+
response = agents_map[agent_name].invoke({"messages": messages})
|
|
232
|
+
|
|
233
|
+
response = process_output(task_description, response["messages"], runtime)
|
|
234
|
+
return response
|
|
235
|
+
|
|
236
|
+
async def acall_agent(
|
|
237
|
+
agent_name: str,
|
|
238
|
+
description: str,
|
|
239
|
+
runtime: ToolRuntime,
|
|
240
|
+
) -> str:
|
|
241
|
+
if asyncio.iscoroutinefunction(process_input_async):
|
|
242
|
+
task_description = await process_input_async(description, runtime)
|
|
243
|
+
else:
|
|
244
|
+
task_description = cast(str, process_input_async(description, runtime))
|
|
245
|
+
|
|
246
|
+
if agent_name not in agents_map:
|
|
247
|
+
raise ValueError(f"Agent {agent_name} not found")
|
|
248
|
+
|
|
249
|
+
messages = [HumanMessage(content=task_description)]
|
|
250
|
+
response = await agents_map[agent_name].ainvoke({"messages": messages})
|
|
251
|
+
|
|
252
|
+
if asyncio.iscoroutinefunction(process_output_async):
|
|
253
|
+
response = await process_output_async(
|
|
254
|
+
task_description, response["messages"], runtime
|
|
255
|
+
)
|
|
256
|
+
else:
|
|
257
|
+
response = process_output(task_description, response["messages"], runtime)
|
|
258
|
+
|
|
259
|
+
return response
|
|
260
|
+
|
|
261
|
+
if tool_name is None:
|
|
262
|
+
tool_name = "task"
|
|
263
|
+
|
|
264
|
+
if tool_description is None:
|
|
265
|
+
tool_description = (
|
|
266
|
+
"Launch an ephemeral subagent for a task.\nAvailable agents:\n "
|
|
267
|
+
+ format_sequence(list(agents_map.keys()), with_num=True)
|
|
268
|
+
)
|
|
269
|
+
return StructuredTool.from_function(
|
|
270
|
+
func=call_agent,
|
|
271
|
+
coroutine=acall_agent,
|
|
272
|
+
name=tool_name,
|
|
273
|
+
description=tool_description,
|
|
274
|
+
)
|
|
@@ -25,46 +25,43 @@ def run_code(code: str) -> str:
|
|
|
25
25
|
return "Running code successfully"
|
|
26
26
|
|
|
27
27
|
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
"
|
|
31
|
-
|
|
32
|
-
"
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
"
|
|
40
|
-
"
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
"
|
|
49
|
-
|
|
50
|
-
"
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
}
|
|
63
|
-
return agents_config, custom_tool_descriptions
|
|
28
|
+
agents_config: dict[str, AgentConfig] = {
|
|
29
|
+
"time_agent": {
|
|
30
|
+
"model": "zai:glm-4.5",
|
|
31
|
+
"prompt": (
|
|
32
|
+
"You are a time assistant. You can answer users' time-related questions. "
|
|
33
|
+
"If the current question is not time-related, please transfer to another assistant."
|
|
34
|
+
),
|
|
35
|
+
"tools": [get_current_time],
|
|
36
|
+
"handoffs": ["talk_agent"],
|
|
37
|
+
},
|
|
38
|
+
"talk_agent": {
|
|
39
|
+
"prompt": (
|
|
40
|
+
"You are a conversational assistant. You can answer users' questions. "
|
|
41
|
+
"If the current question is a time query, please transfer to the time assistant; "
|
|
42
|
+
"if it's a code-related question, please transfer to the code assistant."
|
|
43
|
+
),
|
|
44
|
+
"default": True,
|
|
45
|
+
"handoffs": "all",
|
|
46
|
+
},
|
|
47
|
+
"code_agent": {
|
|
48
|
+
"model": load_chat_model("dashscope:qwen3-coder-plus"),
|
|
49
|
+
"prompt": (
|
|
50
|
+
"You are a code assistant. You can answer users' code-related questions. "
|
|
51
|
+
"If the current question is not code-related, please transfer to another assistant."
|
|
52
|
+
),
|
|
53
|
+
"tools": [run_code],
|
|
54
|
+
"handoffs": ["talk_agent"],
|
|
55
|
+
},
|
|
56
|
+
}
|
|
57
|
+
custom_tool_descriptions: dict[str, str] = {
|
|
58
|
+
"time_agent": "transfer to the time agent to answer time-related questions",
|
|
59
|
+
"talk_agent": "transfer to the talk agent to answer user questions",
|
|
60
|
+
"code_agent": "transfer to the code agent to answer code-related questions",
|
|
61
|
+
}
|
|
64
62
|
|
|
65
63
|
|
|
66
64
|
def test_handoffs_middleware():
|
|
67
|
-
agents_config, custom_tool_descriptions = get_config()
|
|
68
65
|
agent = create_agent(
|
|
69
66
|
model="dashscope:qwen3-max",
|
|
70
67
|
middleware=[HandoffAgentMiddleware(agents_config, custom_tool_descriptions)],
|
|
@@ -103,7 +100,6 @@ def test_handoffs_middleware():
|
|
|
103
100
|
|
|
104
101
|
|
|
105
102
|
async def test_handoffs_middleware_async():
|
|
106
|
-
agents_config, custom_tool_descriptions = get_config()
|
|
107
103
|
agent = create_agent(
|
|
108
104
|
model="dashscope:qwen3-max",
|
|
109
105
|
middleware=[HandoffAgentMiddleware(agents_config, custom_tool_descriptions)],
|
|
@@ -5,7 +5,7 @@ from langchain.agents import create_agent
|
|
|
5
5
|
from langchain.tools import ToolRuntime, tool
|
|
6
6
|
from langchain_core.messages import HumanMessage, ToolMessage
|
|
7
7
|
|
|
8
|
-
from langchain_dev_utils.agents
|
|
8
|
+
from langchain_dev_utils.agents import wrap_agent_as_tool, wrap_all_agents_as_tool
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
@tool
|
|
@@ -14,6 +14,12 @@ def get_time() -> str:
|
|
|
14
14
|
return "The current time is 10:00 AM"
|
|
15
15
|
|
|
16
16
|
|
|
17
|
+
@tool
|
|
18
|
+
def get_weather(city: str) -> str:
|
|
19
|
+
"""Get the current weather."""
|
|
20
|
+
return f"The current weather in {city} is sunny"
|
|
21
|
+
|
|
22
|
+
|
|
17
23
|
def process_input(request: str, runtime: ToolRuntime) -> str:
|
|
18
24
|
return "<task_description>" + request + "</task_description>"
|
|
19
25
|
|
|
@@ -37,7 +43,9 @@ async def process_output_async(
|
|
|
37
43
|
|
|
38
44
|
|
|
39
45
|
def test_wrap_agent():
|
|
40
|
-
agent = create_agent(
|
|
46
|
+
agent = create_agent(
|
|
47
|
+
model="deepseek:deepseek-chat", tools=[get_time], name="time_agent"
|
|
48
|
+
)
|
|
41
49
|
call_agent_tool = wrap_agent_as_tool(
|
|
42
50
|
agent, "call_time_agent", "call the agent to query the time"
|
|
43
51
|
)
|
|
@@ -100,3 +108,76 @@ async def test_wrap_agent_async(
|
|
|
100
108
|
|
|
101
109
|
assert cast(str, msg.content).startswith("<task_response>")
|
|
102
110
|
assert cast(str, msg.content).endswith("</task_response>")
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def test_wrap_all_agents():
|
|
114
|
+
time_agent = create_agent(
|
|
115
|
+
model="deepseek:deepseek-chat", tools=[get_time], name="time_agent"
|
|
116
|
+
)
|
|
117
|
+
weather_agent = create_agent(
|
|
118
|
+
model="deepseek:deepseek-chat", tools=[get_weather], name="weather_agent"
|
|
119
|
+
)
|
|
120
|
+
call_agent_tool = wrap_all_agents_as_tool(
|
|
121
|
+
[time_agent, weather_agent], "call_sub_agents"
|
|
122
|
+
)
|
|
123
|
+
assert call_agent_tool.name == "call_sub_agents"
|
|
124
|
+
|
|
125
|
+
main_agent = create_agent(model="deepseek:deepseek-chat", tools=[call_agent_tool])
|
|
126
|
+
response = main_agent.invoke(
|
|
127
|
+
{"messages": [HumanMessage(content="What time is it now?")]}
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
msg = None
|
|
131
|
+
for message in response["messages"]:
|
|
132
|
+
if isinstance(message, ToolMessage) and message.name == "call_sub_agents":
|
|
133
|
+
msg = message
|
|
134
|
+
break
|
|
135
|
+
assert msg is not None
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
@pytest.mark.asyncio
|
|
139
|
+
@pytest.mark.parametrize(
|
|
140
|
+
"pre_input_hooks,post_output_hooks",
|
|
141
|
+
[
|
|
142
|
+
(
|
|
143
|
+
process_input,
|
|
144
|
+
process_output,
|
|
145
|
+
),
|
|
146
|
+
(
|
|
147
|
+
(process_input, process_input_async),
|
|
148
|
+
(process_output, process_output_async),
|
|
149
|
+
),
|
|
150
|
+
],
|
|
151
|
+
)
|
|
152
|
+
async def test_wrap_all_agents_async(
|
|
153
|
+
pre_input_hooks: Any,
|
|
154
|
+
post_output_hooks: Any,
|
|
155
|
+
):
|
|
156
|
+
time_agent = create_agent(
|
|
157
|
+
model="deepseek:deepseek-chat", tools=[get_time], name="time_agent"
|
|
158
|
+
)
|
|
159
|
+
weather_agent = create_agent(
|
|
160
|
+
model="deepseek:deepseek-chat", tools=[get_weather], name="weather_agent"
|
|
161
|
+
)
|
|
162
|
+
call_agent_tool = wrap_all_agents_as_tool(
|
|
163
|
+
[time_agent, weather_agent],
|
|
164
|
+
"call_sub_agents",
|
|
165
|
+
pre_input_hooks=pre_input_hooks,
|
|
166
|
+
post_output_hooks=post_output_hooks,
|
|
167
|
+
)
|
|
168
|
+
assert call_agent_tool.name == "call_sub_agents"
|
|
169
|
+
|
|
170
|
+
main_agent = create_agent(model="deepseek:deepseek-chat", tools=[call_agent_tool])
|
|
171
|
+
response = await main_agent.ainvoke(
|
|
172
|
+
{"messages": [HumanMessage(content="What time is it now?")]}
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
msg = None
|
|
176
|
+
for message in response["messages"]:
|
|
177
|
+
if isinstance(message, ToolMessage) and message.name == "call_sub_agents":
|
|
178
|
+
msg = message
|
|
179
|
+
break
|
|
180
|
+
assert msg is not None
|
|
181
|
+
|
|
182
|
+
assert cast(str, msg.content).startswith("<task_response>")
|
|
183
|
+
assert cast(str, msg.content).endswith("</task_response>")
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "1.3.2"
|
|
@@ -1,140 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
from typing import Any, Awaitable, Callable, Optional, cast
|
|
3
|
-
|
|
4
|
-
from langchain.tools import ToolRuntime
|
|
5
|
-
from langchain_core.messages import AnyMessage, HumanMessage
|
|
6
|
-
from langchain_core.tools import BaseTool, StructuredTool
|
|
7
|
-
from langgraph.graph.state import CompiledStateGraph
|
|
8
|
-
from pydantic import BaseModel, Field
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class AgentToolInput(BaseModel):
|
|
12
|
-
request: str = Field(description="The input to the agent")
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def _process_input(request: str, runtime: ToolRuntime) -> str:
|
|
16
|
-
return request
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
def _process_output(
|
|
20
|
-
request: str, response: list[AnyMessage], runtime: ToolRuntime
|
|
21
|
-
) -> Any:
|
|
22
|
-
return response[-1].content
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
def wrap_agent_as_tool(
|
|
26
|
-
agent: CompiledStateGraph,
|
|
27
|
-
tool_name: Optional[str] = None,
|
|
28
|
-
tool_description: Optional[str] = None,
|
|
29
|
-
pre_input_hooks: Optional[
|
|
30
|
-
tuple[
|
|
31
|
-
Callable[[str, ToolRuntime], str],
|
|
32
|
-
Callable[[str, ToolRuntime], Awaitable[str]],
|
|
33
|
-
]
|
|
34
|
-
| Callable[[str, ToolRuntime], str]
|
|
35
|
-
] = None,
|
|
36
|
-
post_output_hooks: Optional[
|
|
37
|
-
tuple[
|
|
38
|
-
Callable[[str, list[AnyMessage], ToolRuntime], Any],
|
|
39
|
-
Callable[[str, list[AnyMessage], ToolRuntime], Awaitable[Any]],
|
|
40
|
-
]
|
|
41
|
-
| Callable[[str, list[AnyMessage], ToolRuntime], Any]
|
|
42
|
-
] = None,
|
|
43
|
-
) -> BaseTool:
|
|
44
|
-
"""Wraps an agent as a tool
|
|
45
|
-
|
|
46
|
-
Args:
|
|
47
|
-
agent: The agent to wrap
|
|
48
|
-
tool_name: The name of the tool
|
|
49
|
-
tool_description: The description of the tool
|
|
50
|
-
pre_input_hooks: Hooks to run before the input is processed
|
|
51
|
-
post_output_hooks: Hooks to run after the output is processed
|
|
52
|
-
|
|
53
|
-
Returns:
|
|
54
|
-
BaseTool: The wrapped agent as a tool
|
|
55
|
-
|
|
56
|
-
Example:
|
|
57
|
-
>>> from langchain_dev_utils.agents import wrap_agent_as_tool, create_agent
|
|
58
|
-
>>>
|
|
59
|
-
>>> call_time_agent_tool = wrap_agent_as_tool(
|
|
60
|
-
... time_agent,
|
|
61
|
-
... tool_name="call_time_agent",
|
|
62
|
-
... tool_description="Used to invoke the time sub-agent to perform time-related tasks"
|
|
63
|
-
... )
|
|
64
|
-
>>>
|
|
65
|
-
>>> agent = create_agent("vllm:qwen3-4b", tools=[call_time_agent_tool], name="agent")
|
|
66
|
-
|
|
67
|
-
>>> response = agent.invoke({"messages": [HumanMessage(content="What time is it now?")]})
|
|
68
|
-
>>> response
|
|
69
|
-
"""
|
|
70
|
-
if agent.name is None:
|
|
71
|
-
raise ValueError("Agent name must not be None")
|
|
72
|
-
|
|
73
|
-
process_input = _process_input
|
|
74
|
-
process_input_async = _process_input
|
|
75
|
-
process_output = _process_output
|
|
76
|
-
process_output_async = _process_output
|
|
77
|
-
|
|
78
|
-
if pre_input_hooks:
|
|
79
|
-
if isinstance(pre_input_hooks, tuple):
|
|
80
|
-
process_input = pre_input_hooks[0]
|
|
81
|
-
process_input_async = pre_input_hooks[1]
|
|
82
|
-
else:
|
|
83
|
-
process_input = pre_input_hooks
|
|
84
|
-
process_input_async = pre_input_hooks
|
|
85
|
-
|
|
86
|
-
if post_output_hooks:
|
|
87
|
-
if isinstance(post_output_hooks, tuple):
|
|
88
|
-
process_output = post_output_hooks[0]
|
|
89
|
-
process_output_async = post_output_hooks[1]
|
|
90
|
-
else:
|
|
91
|
-
process_output = post_output_hooks
|
|
92
|
-
process_output_async = post_output_hooks
|
|
93
|
-
|
|
94
|
-
def call_agent(
|
|
95
|
-
request: str,
|
|
96
|
-
runtime: ToolRuntime,
|
|
97
|
-
) -> str:
|
|
98
|
-
request = process_input(request, runtime) if process_input else request
|
|
99
|
-
|
|
100
|
-
messages = [HumanMessage(content=request)]
|
|
101
|
-
response = agent.invoke({"messages": messages})
|
|
102
|
-
|
|
103
|
-
response = process_output(request, response["messages"], runtime)
|
|
104
|
-
return response
|
|
105
|
-
|
|
106
|
-
async def acall_agent(
|
|
107
|
-
request: str,
|
|
108
|
-
runtime: ToolRuntime,
|
|
109
|
-
) -> str:
|
|
110
|
-
if asyncio.iscoroutinefunction(process_input_async):
|
|
111
|
-
request = await process_input_async(request, runtime)
|
|
112
|
-
else:
|
|
113
|
-
request = cast(str, process_input_async(request, runtime))
|
|
114
|
-
|
|
115
|
-
messages = [HumanMessage(content=request)]
|
|
116
|
-
response = await agent.ainvoke({"messages": messages})
|
|
117
|
-
|
|
118
|
-
if asyncio.iscoroutinefunction(process_output_async):
|
|
119
|
-
response = await process_output_async(
|
|
120
|
-
request, response["messages"], runtime
|
|
121
|
-
)
|
|
122
|
-
else:
|
|
123
|
-
response = process_output(request, response["messages"], runtime)
|
|
124
|
-
|
|
125
|
-
return response
|
|
126
|
-
|
|
127
|
-
if tool_name is None:
|
|
128
|
-
tool_name = f"transfor_to_{agent.name}"
|
|
129
|
-
if not tool_name.endswith("_agent"):
|
|
130
|
-
tool_name += "_agent"
|
|
131
|
-
|
|
132
|
-
if tool_description is None:
|
|
133
|
-
tool_description = f"This tool transforms input to {agent.name}"
|
|
134
|
-
|
|
135
|
-
return StructuredTool.from_function(
|
|
136
|
-
func=call_agent,
|
|
137
|
-
coroutine=acall_agent,
|
|
138
|
-
name=tool_name,
|
|
139
|
-
description=tool_description,
|
|
140
|
-
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/factory.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/agents/plan.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/chat_models/base.py
RENAMED
|
File without changes
|
{langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/chat_models/types.py
RENAMED
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/embeddings/base.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/pipeline/__init__.py
RENAMED
|
File without changes
|
{langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/pipeline/parallel.py
RENAMED
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.3.2 → langchain_dev_utils-1.3.3}/src/langchain_dev_utils/pipeline/types.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|