langchain-dev-utils 1.2.16__tar.gz → 1.3.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/PKG-INFO +3 -2
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/pyproject.toml +47 -50
- langchain_dev_utils-1.3.1/src/langchain_dev_utils/__init__.py +1 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/factory.py +1 -1
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/middleware/__init__.py +3 -0
- langchain_dev_utils-1.3.1/src/langchain_dev_utils/agents/middleware/handoffs.py +138 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/middleware/model_fallback.py +2 -2
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/chat_models/adapters/openai_compatible.py +3 -4
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/__init__.py +0 -1
- langchain_dev_utils-1.3.1/tests/test_handoffs_middleware.py +75 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/uv.lock +131 -131
- langchain_dev_utils-1.2.16/src/langchain_dev_utils/__init__.py +0 -1
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/.gitignore +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/.python-version +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/.vscode/settings.json +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/LICENSE +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/README.md +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/README_cn.md +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/_utils.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/__init__.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/file_system.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/middleware/format_prompt.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/middleware/model_router.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/middleware/plan.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/middleware/summarization.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/middleware/tool_call_repair.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/middleware/tool_emulator.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/middleware/tool_selection.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/plan.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/wrap.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/chat_models/__init__.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/chat_models/adapters/__init__.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/chat_models/base.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/chat_models/types.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/embeddings/__init__.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/embeddings/base.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/message_convert/__init__.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/message_convert/content.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/message_convert/format.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/pipeline/__init__.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/pipeline/parallel.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/pipeline/sequential.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/pipeline/types.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/py.typed +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/tool_calling/__init__.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/tool_calling/human_in_the_loop.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/tool_calling/utils.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_agent.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_chat_models.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_human_in_the_loop.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_load_embbeding.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_load_model.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_messages.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_model_tool_emulator.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_pipline.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_plan_middleware.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_router_model.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_tool_call_repair.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_tool_calling.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/test_wrap_agent.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/utils/__init__.py +0 -0
- {langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/tests/utils/register.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langchain-dev-utils
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.3.1
|
|
4
4
|
Summary: A practical utility library for LangChain and LangGraph development
|
|
5
5
|
Project-URL: Source Code, https://github.com/TBice123123/langchain-dev-utils
|
|
6
6
|
Project-URL: repository, https://github.com/TBice123123/langchain-dev-utils
|
|
@@ -8,7 +8,8 @@ Project-URL: documentation, https://tbice123123.github.io/langchain-dev-utils
|
|
|
8
8
|
Author-email: tiebingice <tiebingice123@outlook.com>
|
|
9
9
|
License-File: LICENSE
|
|
10
10
|
Requires-Python: >=3.11
|
|
11
|
-
Requires-Dist: langchain>=1.
|
|
11
|
+
Requires-Dist: langchain-core>=1.2.5
|
|
12
|
+
Requires-Dist: langchain>=1.2.0
|
|
12
13
|
Requires-Dist: langgraph>=1.0.0
|
|
13
14
|
Provides-Extra: standard
|
|
14
15
|
Requires-Dist: json-repair>=0.53.1; extra == 'standard'
|
|
@@ -1,50 +1,47 @@
|
|
|
1
|
-
[project]
|
|
2
|
-
name = "langchain-dev-utils"
|
|
3
|
-
version = "1.
|
|
4
|
-
description = "A practical utility library for LangChain and LangGraph development"
|
|
5
|
-
readme = "README.md"
|
|
6
|
-
authors = [{ name = "tiebingice", email = "tiebingice123@outlook.com" }]
|
|
7
|
-
requires-python = ">=3.11"
|
|
8
|
-
dependencies = [
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
[
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
[tool.
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
[
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
[
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
"
|
|
40
|
-
"langchain-
|
|
41
|
-
"langchain-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
]
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
[tool.ruff.lint]
|
|
49
|
-
select = ["E", "F", "I", "PGH003", "T201"]
|
|
50
|
-
ignore = ["E501"]
|
|
1
|
+
[project]
|
|
2
|
+
name = "langchain-dev-utils"
|
|
3
|
+
version = "1.3.1"
|
|
4
|
+
description = "A practical utility library for LangChain and LangGraph development"
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
authors = [{ name = "tiebingice", email = "tiebingice123@outlook.com" }]
|
|
7
|
+
requires-python = ">=3.11"
|
|
8
|
+
dependencies = ["langchain>=1.2.0", "langchain-core>=1.2.5", "langgraph>=1.0.0"]
|
|
9
|
+
|
|
10
|
+
[project.urls]
|
|
11
|
+
"Source Code" = "https://github.com/TBice123123/langchain-dev-utils"
|
|
12
|
+
repository = "https://github.com/TBice123123/langchain-dev-utils"
|
|
13
|
+
documentation = "https://tbice123123.github.io/langchain-dev-utils"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
[project.optional-dependencies]
|
|
17
|
+
standard = ["json-repair>=0.53.1", "langchain-openai"]
|
|
18
|
+
|
|
19
|
+
[build-system]
|
|
20
|
+
requires = ["hatchling"]
|
|
21
|
+
build-backend = "hatchling.build"
|
|
22
|
+
|
|
23
|
+
[tool.hatch.build]
|
|
24
|
+
exclude = ["/data", "/docs", "mkdocs.yml"]
|
|
25
|
+
|
|
26
|
+
[tool.pytest.ini_options]
|
|
27
|
+
asyncio_mode = "auto"
|
|
28
|
+
testpaths = ["tests"]
|
|
29
|
+
python_files = ["test_*.py"]
|
|
30
|
+
python_functions = ["test_*"]
|
|
31
|
+
|
|
32
|
+
[dependency-groups]
|
|
33
|
+
dev = ["langchain-model-profiles>=0.0.5", "ruff>=0.14.5"]
|
|
34
|
+
docs = ["mkdocs-material>=9.7.0", "mkdocs-static-i18n>=1.3.0"]
|
|
35
|
+
tests = [
|
|
36
|
+
"python-dotenv>=1.1.1",
|
|
37
|
+
"langchain-tests>=1.0.0",
|
|
38
|
+
"langchain-deepseek>=1.0.0",
|
|
39
|
+
"langchain-qwq>=0.3.0",
|
|
40
|
+
"langchain-ollama>=1.0.0",
|
|
41
|
+
"langchain-community>=0.4.1",
|
|
42
|
+
]
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
[tool.ruff.lint]
|
|
46
|
+
select = ["E", "F", "I", "PGH003", "T201"]
|
|
47
|
+
ignore = ["E501"]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "1.3.1"
|
{langchain_dev_utils-1.2.16 → langchain_dev_utils-1.3.1}/src/langchain_dev_utils/agents/factory.py
RENAMED
|
@@ -5,9 +5,9 @@ from langchain.agents.middleware.types import (
|
|
|
5
5
|
AgentMiddleware,
|
|
6
6
|
AgentState,
|
|
7
7
|
ResponseT,
|
|
8
|
+
StateT_co,
|
|
8
9
|
_InputAgentState,
|
|
9
10
|
_OutputAgentState,
|
|
10
|
-
StateT_co,
|
|
11
11
|
)
|
|
12
12
|
from langchain.agents.structured_output import ResponseFormat
|
|
13
13
|
from langchain_core.messages import SystemMessage
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from .format_prompt import format_prompt
|
|
2
|
+
from .handoffs import HandoffsAgentMiddleware, create_handoffs_tool
|
|
2
3
|
from .model_fallback import ModelFallbackMiddleware
|
|
3
4
|
from .model_router import ModelRouterMiddleware
|
|
4
5
|
from .plan import (
|
|
@@ -24,4 +25,6 @@ __all__ = [
|
|
|
24
25
|
"ModelRouterMiddleware",
|
|
25
26
|
"ToolCallRepairMiddleware",
|
|
26
27
|
"format_prompt",
|
|
28
|
+
"create_handoffs_tool",
|
|
29
|
+
"HandoffsAgentMiddleware",
|
|
27
30
|
]
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
from typing import Any, Awaitable, Callable
|
|
2
|
+
|
|
3
|
+
from langchain.agents import AgentState
|
|
4
|
+
from langchain.agents.middleware import AgentMiddleware, ModelRequest, ModelResponse
|
|
5
|
+
from langchain.agents.middleware.types import ModelCallResult
|
|
6
|
+
from langchain.tools import BaseTool, ToolRuntime, tool
|
|
7
|
+
from langchain_core.language_models.chat_models import BaseChatModel
|
|
8
|
+
from langchain_core.messages import SystemMessage, ToolMessage
|
|
9
|
+
from langgraph.types import Command
|
|
10
|
+
from typing_extensions import NotRequired, Optional, TypedDict
|
|
11
|
+
|
|
12
|
+
from langchain_dev_utils.chat_models import load_chat_model
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class MultiAgentState(AgentState):
|
|
16
|
+
active_agent: NotRequired[str]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class AgentConfig(TypedDict):
|
|
20
|
+
model: NotRequired[str | BaseChatModel]
|
|
21
|
+
prompt: str | SystemMessage
|
|
22
|
+
tools: list[BaseTool | dict[str, Any]]
|
|
23
|
+
default: NotRequired[bool]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def create_handoffs_tool(
|
|
27
|
+
agent_name: str,
|
|
28
|
+
tool_name: Optional[str] = None,
|
|
29
|
+
tool_description: Optional[str] = None,
|
|
30
|
+
):
|
|
31
|
+
"""Create a tool for handoffs to a specified agent.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
agent_name (str): The name of the agent to transfer to.
|
|
35
|
+
tool_name (Optional[str], optional): The name of the tool. Defaults to None.
|
|
36
|
+
tool_description (Optional[str], optional): The description of the tool. Defaults to None.
|
|
37
|
+
|
|
38
|
+
Returns:
|
|
39
|
+
BaseTool: A tool instance for handoffs to the specified agent.
|
|
40
|
+
|
|
41
|
+
Example:
|
|
42
|
+
Basic usage
|
|
43
|
+
>>> from langchain_dev_utils.agents.middleware import create_handoffs_tool
|
|
44
|
+
>>> handoffs_tool = create_handoffs_tool("time_agent")
|
|
45
|
+
"""
|
|
46
|
+
if tool_name is None:
|
|
47
|
+
tool_name = f"transfer_to_{agent_name}"
|
|
48
|
+
if not tool_name.endswith("_agent"):
|
|
49
|
+
tool_name += "_agent"
|
|
50
|
+
|
|
51
|
+
if tool_description is None:
|
|
52
|
+
tool_description = f"Transfer to the {agent_name}"
|
|
53
|
+
|
|
54
|
+
@tool(name_or_callable=tool_name, description=tool_description)
|
|
55
|
+
def handoffs_tool(runtime: ToolRuntime) -> Command:
|
|
56
|
+
return Command(
|
|
57
|
+
update={
|
|
58
|
+
"messages": [
|
|
59
|
+
ToolMessage(
|
|
60
|
+
content=f"Transferred to {agent_name}",
|
|
61
|
+
tool_call_id=runtime.tool_call_id,
|
|
62
|
+
)
|
|
63
|
+
],
|
|
64
|
+
"active_agent": agent_name,
|
|
65
|
+
}
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
return handoffs_tool
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _get_default_active_agent(state: dict[str, AgentConfig]) -> Optional[str]:
|
|
72
|
+
for agent_name, config in state.items():
|
|
73
|
+
if config.get("default", False):
|
|
74
|
+
return agent_name
|
|
75
|
+
return None
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class HandoffsAgentMiddleware(AgentMiddleware):
|
|
79
|
+
"""Agent middleware for switching between multiple agents.
|
|
80
|
+
This middleware dynamically replaces model call parameters based on the currently active agent configuration, enabling seamless switching between different agents.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
agents_config (dict[str, AgentConfig]): A dictionary of agent configurations.
|
|
84
|
+
|
|
85
|
+
Examples:
|
|
86
|
+
```python
|
|
87
|
+
from langchain_dev_utils.agents.middleware import HandoffsAgentMiddleware
|
|
88
|
+
middleware = HandoffsAgentMiddleware(agents_config)
|
|
89
|
+
```
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
state_schema = MultiAgentState
|
|
93
|
+
|
|
94
|
+
def __init__(self, agents_config: dict[str, AgentConfig]):
|
|
95
|
+
default_agent_name = _get_default_active_agent(agents_config)
|
|
96
|
+
if default_agent_name is None:
|
|
97
|
+
raise ValueError(
|
|
98
|
+
"No default agent found, you must set one by set default=True"
|
|
99
|
+
)
|
|
100
|
+
self.default_agent_name = default_agent_name
|
|
101
|
+
self.agents_config = agents_config
|
|
102
|
+
|
|
103
|
+
def _get_active_agent_config(self, request: ModelRequest) -> dict[str, Any]:
|
|
104
|
+
active_agent_name = request.state.get("active_agent", self.default_agent_name)
|
|
105
|
+
|
|
106
|
+
_config = self.agents_config[active_agent_name]
|
|
107
|
+
|
|
108
|
+
params = {}
|
|
109
|
+
if _config.get("model"):
|
|
110
|
+
model = _config.get("model")
|
|
111
|
+
if isinstance(model, str):
|
|
112
|
+
model = load_chat_model(model)
|
|
113
|
+
params["model"] = model
|
|
114
|
+
if _config.get("prompt"):
|
|
115
|
+
params["system_prompt"] = _config.get("prompt")
|
|
116
|
+
if _config.get("tools"):
|
|
117
|
+
params["tools"] = _config.get("tools")
|
|
118
|
+
return params
|
|
119
|
+
|
|
120
|
+
def wrap_model_call(
|
|
121
|
+
self, request: ModelRequest, handler: Callable[[ModelRequest], ModelResponse]
|
|
122
|
+
) -> ModelCallResult:
|
|
123
|
+
override_kwargs = self._get_active_agent_config(request)
|
|
124
|
+
if override_kwargs:
|
|
125
|
+
return handler(request.override(**override_kwargs))
|
|
126
|
+
else:
|
|
127
|
+
return handler(request)
|
|
128
|
+
|
|
129
|
+
async def awrap_model_call(
|
|
130
|
+
self,
|
|
131
|
+
request: ModelRequest,
|
|
132
|
+
handler: Callable[[ModelRequest], Awaitable[ModelResponse]],
|
|
133
|
+
) -> ModelCallResult:
|
|
134
|
+
override_kwargs = self._get_active_agent_config(request)
|
|
135
|
+
if override_kwargs:
|
|
136
|
+
return await handler(request.override(**override_kwargs))
|
|
137
|
+
else:
|
|
138
|
+
return await handler(request)
|
|
@@ -22,7 +22,7 @@ class ModelFallbackMiddleware(_ModelFallbackMiddleware):
|
|
|
22
22
|
|
|
23
23
|
fallback = ModelFallbackMiddleware(
|
|
24
24
|
"vllm:qwen3-8b", ## Try first on error
|
|
25
|
-
"
|
|
25
|
+
"vllm:gpt-oss-20b", #Then this
|
|
26
26
|
)
|
|
27
27
|
|
|
28
28
|
agent = create_agent(
|
|
@@ -30,7 +30,7 @@ class ModelFallbackMiddleware(_ModelFallbackMiddleware):
|
|
|
30
30
|
middleware=[fallback],
|
|
31
31
|
)
|
|
32
32
|
|
|
33
|
-
# If primary fails: tries qwen3-8b, then
|
|
33
|
+
# If primary fails: tries qwen3-8b, then gpt-oss-20b
|
|
34
34
|
result = await agent.invoke({"messages": [HumanMessage("Hello")]})
|
|
35
35
|
```
|
|
36
36
|
"""
|
|
@@ -126,13 +126,12 @@ class _BaseChatOpenAICompatible(BaseChatOpenAI):
|
|
|
126
126
|
Base template class for OpenAI-compatible chat model implementations.
|
|
127
127
|
|
|
128
128
|
This class provides a foundation for integrating various LLM providers that
|
|
129
|
-
offer OpenAI-compatible APIs
|
|
130
|
-
and many others). It enhances the base OpenAI functionality by:
|
|
129
|
+
offer OpenAI-compatible APIs. It enhances the base OpenAI functionality by:
|
|
131
130
|
|
|
132
131
|
**1. Supports output of more types of reasoning content (reasoning_content)**
|
|
133
132
|
ChatOpenAI can only output reasoning content natively supported by official
|
|
134
133
|
OpenAI models, while OpenAICompatibleChatModel can output reasoning content
|
|
135
|
-
from other model providers
|
|
134
|
+
from other model providers.
|
|
136
135
|
|
|
137
136
|
**2. Dynamically adapts to choose the most suitable structured-output method**
|
|
138
137
|
OpenAICompatibleChatModel adds method="auto" (default), which selects the best
|
|
@@ -593,7 +592,7 @@ def _create_openai_compatible_model(
|
|
|
593
592
|
configuring environment variable mappings and default base URLs specific to each provider.
|
|
594
593
|
|
|
595
594
|
Args:
|
|
596
|
-
provider: Provider identifier (e.g
|
|
595
|
+
provider: Provider identifier (e.g.`vllm`)
|
|
597
596
|
base_url: Default API base URL for the provider
|
|
598
597
|
compatibility_options: Optional configuration for the provider
|
|
599
598
|
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from langchain.tools import tool
|
|
3
|
+
from langchain_core.messages import HumanMessage, ToolMessage
|
|
4
|
+
from langchain_dev_utils.agents import create_agent
|
|
5
|
+
from langchain_dev_utils.agents.middleware import (
|
|
6
|
+
HandoffsAgentMiddleware,
|
|
7
|
+
create_handoffs_tool,
|
|
8
|
+
)
|
|
9
|
+
from langchain_dev_utils.agents.middleware.handoffs import AgentConfig
|
|
10
|
+
from tests.utils.register import register_all_model_providers
|
|
11
|
+
|
|
12
|
+
register_all_model_providers()
|
|
13
|
+
|
|
14
|
+
transfer_time_agent = create_handoffs_tool("time_agent")
|
|
15
|
+
transfer_talk_agent = create_handoffs_tool("talk_agent")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@tool
|
|
19
|
+
def get_current_time() -> str:
|
|
20
|
+
"""Get the current time."""
|
|
21
|
+
return datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
agents_config: dict[str, AgentConfig] = {
|
|
25
|
+
"time_agent": {
|
|
26
|
+
"model": "zai:glm-4.5",
|
|
27
|
+
"prompt": "你是一个时间助手,你可以回答用户的时间相关问题",
|
|
28
|
+
"tools": [transfer_talk_agent, get_current_time],
|
|
29
|
+
},
|
|
30
|
+
"talk_agent": {
|
|
31
|
+
"prompt": "你是一个对话助手,你可以回答用户的问题",
|
|
32
|
+
"tools": [transfer_time_agent],
|
|
33
|
+
"default": True,
|
|
34
|
+
},
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def test_handoffs_middleware():
|
|
39
|
+
agent = create_agent(
|
|
40
|
+
model="dashscope:qwen-flash",
|
|
41
|
+
middleware=[HandoffsAgentMiddleware(agents_config)],
|
|
42
|
+
tools=[
|
|
43
|
+
get_current_time,
|
|
44
|
+
transfer_time_agent,
|
|
45
|
+
transfer_talk_agent,
|
|
46
|
+
],
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
response = agent.invoke({"messages": [HumanMessage(content="get current time")]})
|
|
50
|
+
|
|
51
|
+
assert response
|
|
52
|
+
assert response["messages"][-1].response_metadata.get("model_name") == "glm-4.5"
|
|
53
|
+
assert isinstance(response["messages"][-2], ToolMessage)
|
|
54
|
+
assert "active_agent" in response and response["active_agent"] == "time_agent"
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
async def test_handoffs_middleware_async():
|
|
58
|
+
agent = create_agent(
|
|
59
|
+
model="dashscope:qwen-flash",
|
|
60
|
+
middleware=[HandoffsAgentMiddleware(agents_config)],
|
|
61
|
+
tools=[
|
|
62
|
+
get_current_time,
|
|
63
|
+
transfer_time_agent,
|
|
64
|
+
transfer_talk_agent,
|
|
65
|
+
],
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
response = await agent.ainvoke(
|
|
69
|
+
{"messages": [HumanMessage(content="get current time")]}
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
assert response
|
|
73
|
+
assert response["messages"][-1].response_metadata.get("model_name") == "glm-4.5"
|
|
74
|
+
assert isinstance(response["messages"][-2], ToolMessage)
|
|
75
|
+
assert "active_agent" in response and response["active_agent"] == "time_agent"
|