langchain-dev-utils 1.3.6__tar.gz → 1.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/PKG-INFO +7 -6
  2. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/README.md +6 -5
  3. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/README_cn.md +6 -5
  4. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/pyproject.toml +51 -47
  5. langchain_dev_utils-1.4.0/src/langchain_dev_utils/__init__.py +1 -0
  6. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/_utils.py +19 -1
  7. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/middleware/handoffs.py +30 -20
  8. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/middleware/model_router.py +9 -12
  9. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/middleware/plan.py +13 -18
  10. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/wrap.py +20 -7
  11. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/chat_models/adapters/openai_compatible.py +3 -2
  12. langchain_dev_utils-1.4.0/src/langchain_dev_utils/graph/__init__.py +7 -0
  13. langchain_dev_utils-1.4.0/src/langchain_dev_utils/graph/parallel.py +119 -0
  14. langchain_dev_utils-1.4.0/src/langchain_dev_utils/graph/sequential.py +78 -0
  15. langchain_dev_utils-1.4.0/src/langchain_dev_utils/graph/types.py +3 -0
  16. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/message_convert/format.py +34 -1
  17. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/pipeline/parallel.py +6 -0
  18. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/pipeline/sequential.py +6 -0
  19. langchain_dev_utils-1.4.0/tests/test_graph.py +64 -0
  20. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_handoffs_middleware.py +51 -4
  21. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_load_model.py +0 -11
  22. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_messages.py +16 -10
  23. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_wrap_agent.py +37 -4
  24. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/uv.lock +4047 -2883
  25. langchain_dev_utils-1.3.6/src/langchain_dev_utils/__init__.py +0 -1
  26. langchain_dev_utils-1.3.6/src/langchain_dev_utils/agents/file_system.py +0 -252
  27. langchain_dev_utils-1.3.6/src/langchain_dev_utils/agents/plan.py +0 -188
  28. langchain_dev_utils-1.3.6/tests/test_pipline.py +0 -76
  29. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/.gitignore +0 -0
  30. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/.python-version +0 -0
  31. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/.vscode/settings.json +0 -0
  32. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/LICENSE +0 -0
  33. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/__init__.py +0 -0
  34. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/factory.py +0 -0
  35. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/middleware/__init__.py +0 -0
  36. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/middleware/format_prompt.py +0 -0
  37. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/middleware/model_fallback.py +0 -0
  38. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/middleware/summarization.py +0 -0
  39. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/middleware/tool_call_repair.py +0 -0
  40. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/middleware/tool_emulator.py +0 -0
  41. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/agents/middleware/tool_selection.py +0 -0
  42. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/chat_models/__init__.py +0 -0
  43. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/chat_models/adapters/__init__.py +0 -0
  44. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/chat_models/adapters/create_utils.py +0 -0
  45. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/chat_models/adapters/register_profiles.py +0 -0
  46. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/chat_models/base.py +0 -0
  47. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/chat_models/types.py +0 -0
  48. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/embeddings/__init__.py +0 -0
  49. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/embeddings/adapters/__init__.py +0 -0
  50. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/embeddings/adapters/create_utils.py +0 -0
  51. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/embeddings/adapters/openai_compatible.py +0 -0
  52. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/embeddings/base.py +0 -0
  53. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/message_convert/__init__.py +0 -0
  54. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/message_convert/content.py +0 -0
  55. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/pipeline/__init__.py +0 -0
  56. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/pipeline/types.py +0 -0
  57. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/py.typed +0 -0
  58. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/tool_calling/__init__.py +0 -0
  59. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/tool_calling/human_in_the_loop.py +0 -0
  60. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/src/langchain_dev_utils/tool_calling/utils.py +0 -0
  61. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/__init__.py +0 -0
  62. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_agent.py +0 -0
  63. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_chat_models.py +0 -0
  64. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_embedding.py +0 -0
  65. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_human_in_the_loop.py +0 -0
  66. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_load_embbeding.py +0 -0
  67. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_model_tool_emulator.py +0 -0
  68. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_plan_middleware.py +0 -0
  69. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_router_model.py +0 -0
  70. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_tool_call_repair.py +0 -0
  71. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/test_tool_calling.py +0 -0
  72. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/utils/__init__.py +0 -0
  73. {langchain_dev_utils-1.3.6 → langchain_dev_utils-1.4.0}/tests/utils/register.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langchain-dev-utils
3
- Version: 1.3.6
3
+ Version: 1.4.0
4
4
  Summary: A practical utility library for LangChain and LangGraph development
5
5
  Project-URL: Source Code, https://github.com/TBice123123/langchain-dev-utils
6
6
  Project-URL: repository, https://github.com/TBice123123/langchain-dev-utils
@@ -27,10 +27,11 @@ Description-Content-Type: text/markdown
27
27
  <a href="https://tbice123123.github.io/langchain-dev-utils/zh/">中文</a>
28
28
  </p>
29
29
 
30
- [![PyPI](https://img.shields.io/pypi/v/langchain-dev-utils.svg?color=%2334D058&label=pypi%20package)](https://pypi.org/project/langchain-dev-utils/)
31
- [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
32
- [![Python](https://img.shields.io/badge/python-3.11|3.12|3.13|3.14-%2334D058)](https://www.python.org/downloads)
33
- [![Downloads](https://static.pepy.tech/badge/langchain-dev-utils/month)](https://pepy.tech/project/langchain-dev-utils)
30
+ [![GitHub Repo](https://img.shields.io/badge/GitHub-Repo-black.svg?logo=github)](https://github.com/TBice123123/langchain-dev-utils)
31
+ [![PyPI](https://img.shields.io/pypi/v/langchain-dev-utils.svg?color=%2334D058&label=pypi%20package&logo=python)](https://pypi.org/project/langchain-dev-utils/)
32
+ [![Python Version](https://img.shields.io/badge/python-3.11%2B-blue.svg?logo=python&label=Python)](https://python.org)
33
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg?label=License)](https://opensource.org/licenses/MIT)
34
+ [![Last Commit](https://img.shields.io/github/last-commit/TBice123123/langchain-dev-utils)](https://github.com/TBice123123/langchain-dev-utils)
34
35
  [![Documentation](https://img.shields.io/badge/docs-latest-blue)](https://tbice123123.github.io/langchain-dev-utils)
35
36
 
36
37
  > This is the English version. For the Chinese version, please visit [中文版本](https://github.com/TBice123123/langchain-dev-utils/blob/master/README_cn.md)
@@ -51,7 +52,7 @@ Tired of writing repetitive code in LangChain development? `langchain-dev-utils`
51
52
  - **💬 Flexible message handling** - Support for chain-of-thought concatenation, streaming processing, and message formatting
52
53
  - **🛠️ Powerful tool calling** - Built-in tool call detection, parameter parsing, and human review functionality
53
54
  - **🤖 Efficient Agent development** - Simplify agent creation process, expand more common middleware
54
- - **📊 Flexible state graph composition** - Support for serial and parallel composition of multiple StateGraphs
55
+ - **📊 Convenient State Graph Construction** - Provides pre-built functions to easily create sequential or parallel state graphs
55
56
 
56
57
  ## ⚡ Quick Start
57
58
 
@@ -9,10 +9,11 @@
9
9
  <a href="https://tbice123123.github.io/langchain-dev-utils/zh/">中文</a>
10
10
  </p>
11
11
 
12
- [![PyPI](https://img.shields.io/pypi/v/langchain-dev-utils.svg?color=%2334D058&label=pypi%20package)](https://pypi.org/project/langchain-dev-utils/)
13
- [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
14
- [![Python](https://img.shields.io/badge/python-3.11|3.12|3.13|3.14-%2334D058)](https://www.python.org/downloads)
15
- [![Downloads](https://static.pepy.tech/badge/langchain-dev-utils/month)](https://pepy.tech/project/langchain-dev-utils)
12
+ [![GitHub Repo](https://img.shields.io/badge/GitHub-Repo-black.svg?logo=github)](https://github.com/TBice123123/langchain-dev-utils)
13
+ [![PyPI](https://img.shields.io/pypi/v/langchain-dev-utils.svg?color=%2334D058&label=pypi%20package&logo=python)](https://pypi.org/project/langchain-dev-utils/)
14
+ [![Python Version](https://img.shields.io/badge/python-3.11%2B-blue.svg?logo=python&label=Python)](https://python.org)
15
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg?label=License)](https://opensource.org/licenses/MIT)
16
+ [![Last Commit](https://img.shields.io/github/last-commit/TBice123123/langchain-dev-utils)](https://github.com/TBice123123/langchain-dev-utils)
16
17
  [![Documentation](https://img.shields.io/badge/docs-latest-blue)](https://tbice123123.github.io/langchain-dev-utils)
17
18
 
18
19
  > This is the English version. For the Chinese version, please visit [中文版本](https://github.com/TBice123123/langchain-dev-utils/blob/master/README_cn.md)
@@ -33,7 +34,7 @@ Tired of writing repetitive code in LangChain development? `langchain-dev-utils`
33
34
  - **💬 Flexible message handling** - Support for chain-of-thought concatenation, streaming processing, and message formatting
34
35
  - **🛠️ Powerful tool calling** - Built-in tool call detection, parameter parsing, and human review functionality
35
36
  - **🤖 Efficient Agent development** - Simplify agent creation process, expand more common middleware
36
- - **📊 Flexible state graph composition** - Support for serial and parallel composition of multiple StateGraphs
37
+ - **📊 Convenient State Graph Construction** - Provides pre-built functions to easily create sequential or parallel state graphs
37
38
 
38
39
  ## ⚡ Quick Start
39
40
 
@@ -9,10 +9,11 @@
9
9
  <a href="https://tbice123123.github.io/langchain-dev-utils/zh/">中文</a>
10
10
  </p>
11
11
 
12
- [![PyPI](https://img.shields.io/pypi/v/langchain-dev-utils.svg?color=%2334D058&label=pypi%20package)](https://pypi.org/project/langchain-dev-utils/)
13
- [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
14
- [![Python](https://img.shields.io/badge/python-3.11|3.12|3.13|3.14-%2334D058)](https://www.python.org/downloads)
15
- [![Downloads](https://static.pepy.tech/badge/langchain-dev-utils/month)](https://pepy.tech/project/langchain-dev-utils)
12
+ [![GitHub Repo](https://img.shields.io/badge/GitHub-Repo-black.svg?logo=github)](https://github.com/TBice123123/langchain-dev-utils)
13
+ [![PyPI](https://img.shields.io/pypi/v/langchain-dev-utils.svg?color=%2334D058&label=pypi%20package&logo=python)](https://pypi.org/project/langchain-dev-utils/)
14
+ [![Python Version](https://img.shields.io/badge/python-3.11%2B-blue.svg?logo=python&label=Python)](https://python.org)
15
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg?label=License)](https://opensource.org/licenses/MIT)
16
+ [![Last Commit](https://img.shields.io/github/last-commit/TBice123123/langchain-dev-utils)](https://github.com/TBice123123/langchain-dev-utils)
16
17
  [![Documentation](https://img.shields.io/badge/docs-latest-blue)](https://tbice123123.github.io/langchain-dev-utils/zh/)
17
18
 
18
19
  > 当前为中文版,英文版请访问[English Version](https://github.com/TBice123123/langchain-dev-utils/blob/master/README.md)
@@ -33,7 +34,7 @@
33
34
  - **💬 灵活的消息处理** - 支持思维链拼接、流式处理和消息格式化
34
35
  - **🛠️ 强大的工具调用** - 内置工具调用检测、参数解析和人工审核功能
35
36
  - **🤖 高效的 Agent 开发** - 简化智能体创建流程,扩充更多的常用中间件
36
- - **📊 灵活的状态图组合** - 支持串行和并行方式组合多个 StateGraph
37
+ - **📊 便捷的状态图构建** - 提供预构建函数方便构建顺序或者并行的状态图
37
38
 
38
39
  ## ⚡ 快速开始
39
40
 
@@ -1,47 +1,51 @@
1
- [project]
2
- name = "langchain-dev-utils"
3
- version = "1.3.6"
4
- description = "A practical utility library for LangChain and LangGraph development"
5
- readme = "README.md"
6
- authors = [{ name = "tiebingice", email = "tiebingice123@outlook.com" }]
7
- requires-python = ">=3.11"
8
- dependencies = ["langchain>=1.2.0", "langchain-core>=1.2.5", "langgraph>=1.0.0"]
9
-
10
- [project.urls]
11
- "Source Code" = "https://github.com/TBice123123/langchain-dev-utils"
12
- repository = "https://github.com/TBice123123/langchain-dev-utils"
13
- documentation = "https://tbice123123.github.io/langchain-dev-utils"
14
-
15
-
16
- [project.optional-dependencies]
17
- standard = ["json-repair>=0.53.1", "langchain-openai"]
18
-
19
- [build-system]
20
- requires = ["hatchling"]
21
- build-backend = "hatchling.build"
22
-
23
- [tool.hatch.build]
24
- exclude = ["/data", "/docs", "mkdocs.yml"]
25
-
26
- [tool.pytest.ini_options]
27
- asyncio_mode = "auto"
28
- testpaths = ["tests"]
29
- python_files = ["test_*.py"]
30
- python_functions = ["test_*"]
31
-
32
- [dependency-groups]
33
- dev = ["langchain-model-profiles>=0.0.5", "ruff>=0.14.5"]
34
- docs = ["mkdocs-material>=9.7.0", "mkdocs-static-i18n>=1.3.0"]
35
- tests = [
36
- "python-dotenv>=1.1.1",
37
- "langchain-tests>=1.0.0",
38
- "langchain-deepseek>=1.0.0",
39
- "langchain-qwq>=0.3.0",
40
- "langchain-ollama>=1.0.0",
41
- "langchain-community>=0.4.1",
42
- ]
43
-
44
-
45
- [tool.ruff.lint]
46
- select = ["E", "F", "I", "PGH003", "T201"]
47
- ignore = ["E501"]
1
+ [project]
2
+ name = "langchain-dev-utils"
3
+ version = "1.4.0"
4
+ description = "A practical utility library for LangChain and LangGraph development"
5
+ readme = "README.md"
6
+ authors = [{ name = "tiebingice", email = "tiebingice123@outlook.com" }]
7
+ requires-python = ">=3.11"
8
+ dependencies = ["langchain>=1.2.0", "langchain-core>=1.2.5", "langgraph>=1.0.0"]
9
+
10
+ [project.urls]
11
+ "Source Code" = "https://github.com/TBice123123/langchain-dev-utils"
12
+ repository = "https://github.com/TBice123123/langchain-dev-utils"
13
+ documentation = "https://tbice123123.github.io/langchain-dev-utils"
14
+
15
+
16
+ [project.optional-dependencies]
17
+ standard = ["json-repair>=0.53.1", "langchain-openai"]
18
+
19
+ [build-system]
20
+ requires = ["hatchling"]
21
+ build-backend = "hatchling.build"
22
+
23
+ [tool.hatch.build]
24
+ exclude = ["/data", "/docs", "mkdocs.yml"]
25
+
26
+ [tool.pytest.ini_options]
27
+ asyncio_mode = "auto"
28
+ testpaths = ["tests"]
29
+ python_files = ["test_*.py"]
30
+ python_functions = ["test_*"]
31
+
32
+ [dependency-groups]
33
+ dev = ["langchain-model-profiles>=0.0.5", "ruff>=0.14.5"]
34
+ docs = [
35
+ "jupyter>=1.1.1",
36
+ "mkdocs-material>=9.7.0",
37
+ "mkdocs-static-i18n>=1.3.0",
38
+ ]
39
+ tests = [
40
+ "python-dotenv>=1.1.1",
41
+ "langchain-tests>=1.0.0",
42
+ "langchain-deepseek>=1.0.0",
43
+ "langchain-qwq>=0.3.0",
44
+ "langchain-ollama>=1.0.0",
45
+ "langchain-community>=0.4.1",
46
+ ]
47
+
48
+
49
+ [tool.ruff.lint]
50
+ select = ["E", "F", "I", "PGH003", "T201"]
51
+ ignore = ["E501"]
@@ -0,0 +1 @@
1
+ __version__ = "1.4.0"
@@ -1,9 +1,27 @@
1
1
  from importlib import util
2
- from typing import Literal, Optional
2
+ from typing import Literal, Optional, cast
3
3
 
4
+ from langgraph.graph import StateGraph
5
+ from langgraph.graph.state import StateNode
4
6
  from pydantic import BaseModel
5
7
 
6
8
 
9
+ def _transform_node_to_tuple(
10
+ node: StateNode | tuple[str, StateNode],
11
+ ) -> tuple[str, StateNode]:
12
+ if not isinstance(node, tuple):
13
+ if isinstance(node, StateGraph):
14
+ node = node.compile()
15
+ name = node.name
16
+ return name, node
17
+ name = cast(str, getattr(node, "name", getattr(node, "__name__", None)))
18
+ if name is None:
19
+ raise ValueError("Node name must be provided if action is not a function")
20
+ return name, node
21
+ else:
22
+ return node
23
+
24
+
7
25
  def _check_pkg_install(
8
26
  pkg: Literal["langchain_openai", "json_repair"],
9
27
  ) -> None:
@@ -1,4 +1,4 @@
1
- from typing import Any, Awaitable, Callable, Literal
1
+ from typing import Any, Awaitable, Callable, Literal, cast
2
2
 
3
3
  from langchain.agents import AgentState
4
4
  from langchain.agents.middleware import AgentMiddleware, ModelRequest, ModelResponse
@@ -128,6 +128,7 @@ class HandoffAgentMiddleware(AgentMiddleware):
128
128
  Args:
129
129
  agents_config (dict[str, AgentConfig]): A dictionary of agent configurations.
130
130
  custom_handoffs_tool_descriptions (Optional[dict[str, str]]): A dictionary of custom tool descriptions for handoffs tools. Defaults to None.
131
+ handoffs_tool_overrides (Optional[dict[str, BaseTool]]): A dictionary of handoffs tools to override. Defaults to None.
131
132
 
132
133
  Examples:
133
134
  ```python
@@ -142,6 +143,7 @@ class HandoffAgentMiddleware(AgentMiddleware):
142
143
  self,
143
144
  agents_config: dict[str, AgentConfig],
144
145
  custom_handoffs_tool_descriptions: Optional[dict[str, str]] = None,
146
+ handoffs_tool_overrides: Optional[dict[str, BaseTool]] = None,
145
147
  ) -> None:
146
148
  default_agent_name = _get_default_active_agent(agents_config)
147
149
  if default_agent_name is None:
@@ -152,13 +154,23 @@ class HandoffAgentMiddleware(AgentMiddleware):
152
154
  if custom_handoffs_tool_descriptions is None:
153
155
  custom_handoffs_tool_descriptions = {}
154
156
 
155
- handoffs_tools = [
156
- _create_handoffs_tool(
157
- agent_name,
158
- custom_handoffs_tool_descriptions.get(agent_name),
159
- )
160
- for agent_name in agents_config.keys()
161
- ]
157
+ if handoffs_tool_overrides is None:
158
+ handoffs_tool_overrides = {}
159
+
160
+ handoffs_tools = []
161
+ for agent_name in agents_config.keys():
162
+ if not handoffs_tool_overrides.get(agent_name):
163
+ handoffs_tools.append(
164
+ _create_handoffs_tool(
165
+ agent_name,
166
+ custom_handoffs_tool_descriptions.get(agent_name),
167
+ )
168
+ )
169
+ else:
170
+ handoffs_tools.append(
171
+ cast(BaseTool, handoffs_tool_overrides.get(agent_name))
172
+ )
173
+
162
174
  self.default_agent_name = default_agent_name
163
175
  self.agents_config = _transform_agent_config(
164
176
  agents_config,
@@ -166,7 +178,7 @@ class HandoffAgentMiddleware(AgentMiddleware):
166
178
  )
167
179
  self.tools = handoffs_tools
168
180
 
169
- def _get_active_agent_config(self, request: ModelRequest) -> dict[str, Any]:
181
+ def _get_override_request(self, request: ModelRequest) -> ModelRequest:
170
182
  active_agent_name = request.state.get("active_agent", self.default_agent_name)
171
183
 
172
184
  _config = self.agents_config[active_agent_name]
@@ -181,24 +193,22 @@ class HandoffAgentMiddleware(AgentMiddleware):
181
193
  params["system_prompt"] = _config.get("prompt")
182
194
  if _config.get("tools"):
183
195
  params["tools"] = _config.get("tools")
184
- return params
196
+
197
+ if params:
198
+ return request.override(**params)
199
+ else:
200
+ return request
185
201
 
186
202
  def wrap_model_call(
187
203
  self, request: ModelRequest, handler: Callable[[ModelRequest], ModelResponse]
188
204
  ) -> ModelCallResult:
189
- override_kwargs = self._get_active_agent_config(request)
190
- if override_kwargs:
191
- return handler(request.override(**override_kwargs))
192
- else:
193
- return handler(request)
205
+ override_request = self._get_override_request(request)
206
+ return handler(override_request)
194
207
 
195
208
  async def awrap_model_call(
196
209
  self,
197
210
  request: ModelRequest,
198
211
  handler: Callable[[ModelRequest], Awaitable[ModelResponse]],
199
212
  ) -> ModelCallResult:
200
- override_kwargs = self._get_active_agent_config(request)
201
- if override_kwargs:
202
- return await handler(request.override(**override_kwargs))
203
- else:
204
- return await handler(request)
213
+ override_request = self._get_override_request(request)
214
+ return await handler(override_request)
@@ -150,7 +150,7 @@ class ModelRouterMiddleware(AgentMiddleware):
150
150
  model_name = await self._aselect_model(state["messages"])
151
151
  return {"router_model_selection": model_name}
152
152
 
153
- def _get_override_kwargs(self, request: ModelRequest) -> dict[str, Any]:
153
+ def _get_override_request(self, request: ModelRequest) -> ModelRequest:
154
154
  model_dict = {
155
155
  item["model_name"]: {
156
156
  "tools": item.get("tools", None),
@@ -180,24 +180,21 @@ class ModelRouterMiddleware(AgentMiddleware):
180
180
  content=model_values["system_prompt"]
181
181
  )
182
182
 
183
- return override_kwargs
183
+ if override_kwargs:
184
+ return request.override(**override_kwargs)
185
+ else:
186
+ return request
184
187
 
185
188
  def wrap_model_call(
186
189
  self, request: ModelRequest, handler: Callable[[ModelRequest], ModelResponse]
187
190
  ) -> ModelCallResult:
188
- override_kwargs = self._get_override_kwargs(request)
189
- if override_kwargs:
190
- return handler(request.override(**override_kwargs))
191
- else:
192
- return handler(request)
191
+ override_request = self._get_override_request(request)
192
+ return handler(override_request)
193
193
 
194
194
  async def awrap_model_call(
195
195
  self,
196
196
  request: ModelRequest,
197
197
  handler: Callable[[ModelRequest], Awaitable[ModelResponse]],
198
198
  ) -> ModelCallResult:
199
- override_kwargs = self._get_override_kwargs(request)
200
- if override_kwargs:
201
- return await handler(request.override(**override_kwargs))
202
- else:
203
- return await handler(request)
199
+ override_request = self._get_override_request(request)
200
+ return await handler(override_request)
@@ -335,12 +335,8 @@ class PlanMiddleware(AgentMiddleware):
335
335
  self.system_prompt = system_prompt
336
336
  self.tools = tools
337
337
 
338
- def wrap_model_call(
339
- self,
340
- request: ModelRequest,
341
- handler: Callable[[ModelRequest], ModelResponse],
342
- ) -> ModelCallResult:
343
- """Update the system message to include the plan system prompt."""
338
+ def _get_override_request(self, request: ModelRequest) -> ModelRequest:
339
+ """Add the plan system prompt to the system message."""
344
340
  if request.system_message is not None:
345
341
  new_system_content = [
346
342
  *request.system_message.content_blocks,
@@ -351,7 +347,15 @@ class PlanMiddleware(AgentMiddleware):
351
347
  new_system_message = SystemMessage(
352
348
  content=cast("list[str | dict[str, str]]", new_system_content)
353
349
  )
354
- return handler(request.override(system_message=new_system_message))
350
+ return request.override(system_message=new_system_message)
351
+
352
+ def wrap_model_call(
353
+ self,
354
+ request: ModelRequest,
355
+ handler: Callable[[ModelRequest], ModelResponse],
356
+ ) -> ModelCallResult:
357
+ override_request = self._get_override_request(request)
358
+ return handler(override_request)
355
359
 
356
360
  async def awrap_model_call(
357
361
  self,
@@ -359,14 +363,5 @@ class PlanMiddleware(AgentMiddleware):
359
363
  handler: Callable[[ModelRequest], Awaitable[ModelResponse]],
360
364
  ) -> ModelCallResult:
361
365
  """Update the system message to include the plan system prompt."""
362
- if request.system_message is not None:
363
- new_system_content = [
364
- *request.system_message.content_blocks,
365
- {"type": "text", "text": f"\n\n{self.system_prompt}"},
366
- ]
367
- else:
368
- new_system_content = [{"type": "text", "text": self.system_prompt}]
369
- new_system_message = SystemMessage(
370
- content=cast("list[str | dict[str, str]]", new_system_content)
371
- )
372
- return await handler(request.override(system_message=new_system_message))
366
+ override_request = self._get_override_request(request)
367
+ return await handler(override_request)
@@ -1,12 +1,13 @@
1
- import asyncio
2
- from typing import Any, Awaitable, Callable, Optional
1
+ import inspect
2
+ from typing import Any, Awaitable, Callable, Optional, cast
3
3
 
4
4
  from langchain.tools import ToolRuntime
5
- from langchain_core.messages import HumanMessage
5
+ from langchain_core.messages import AIMessage, HumanMessage
6
6
  from langchain_core.tools import BaseTool, StructuredTool
7
7
  from langgraph.graph.state import CompiledStateGraph
8
8
 
9
9
  from langchain_dev_utils.message_convert import format_sequence
10
+ from langchain_dev_utils.tool_calling import parse_tool_calling
10
11
 
11
12
 
12
13
  def _process_input(request: str, runtime: ToolRuntime) -> str:
@@ -19,6 +20,18 @@ def _process_output(
19
20
  return response["messages"][-1].content
20
21
 
21
22
 
23
+ def get_subagent_name(runtime: ToolRuntime) -> str:
24
+ messages = runtime.state.get("messages", [])
25
+ last_ai_msg = cast(
26
+ AIMessage,
27
+ next((msg for msg in reversed(messages) if isinstance(msg, AIMessage)), None),
28
+ )
29
+
30
+ _, args = parse_tool_calling(last_ai_msg, first_tool_call_only=True)
31
+ args = cast(dict[str, Any], args)
32
+ return args["agent_name"]
33
+
34
+
22
35
  def wrap_agent_as_tool(
23
36
  agent: CompiledStateGraph,
24
37
  tool_name: Optional[str] = None,
@@ -115,7 +128,7 @@ def wrap_agent_as_tool(
115
128
  request: str,
116
129
  runtime: ToolRuntime,
117
130
  ):
118
- if asyncio.iscoroutinefunction(process_input_async):
131
+ if inspect.iscoroutinefunction(process_input_async):
119
132
  _processed_input = await process_input_async(request, runtime)
120
133
  else:
121
134
  _processed_input = (
@@ -135,7 +148,7 @@ def wrap_agent_as_tool(
135
148
 
136
149
  response = await agent.ainvoke(agent_input)
137
150
 
138
- if asyncio.iscoroutinefunction(process_output_async):
151
+ if inspect.iscoroutinefunction(process_output_async):
139
152
  response = await process_output_async(request, response, runtime)
140
153
  else:
141
154
  response = (
@@ -277,7 +290,7 @@ def wrap_all_agents_as_tool(
277
290
  if agent_name not in agents_map:
278
291
  raise ValueError(f"Agent {agent_name} not found")
279
292
 
280
- if asyncio.iscoroutinefunction(process_input_async):
293
+ if inspect.iscoroutinefunction(process_input_async):
281
294
  _processed_input = await process_input_async(description, runtime)
282
295
  else:
283
296
  _processed_input = (
@@ -297,7 +310,7 @@ def wrap_all_agents_as_tool(
297
310
 
298
311
  response = await agents_map[agent_name].ainvoke(agent_input)
299
312
 
300
- if asyncio.iscoroutinefunction(process_output_async):
313
+ if inspect.iscoroutinefunction(process_output_async):
301
314
  response = await process_output_async(description, response, runtime)
302
315
  else:
303
316
  response = (
@@ -218,14 +218,15 @@ class _BaseChatOpenAICompatible(BaseChatOpenAI):
218
218
  stop: list[str] | None = None,
219
219
  **kwargs: Any,
220
220
  ) -> dict:
221
+ if stop is not None:
222
+ kwargs["stop"] = stop
223
+
221
224
  payload = {**self._default_params, **kwargs}
222
225
 
223
226
  if self._use_responses_api(payload):
224
227
  return super()._get_request_payload(input_, stop=stop, **kwargs)
225
228
 
226
229
  messages = self._convert_input(input_).to_messages()
227
- if stop is not None:
228
- kwargs["stop"] = stop
229
230
 
230
231
  payload_messages = []
231
232
  last_human_index = -1
@@ -0,0 +1,7 @@
1
+ from .parallel import create_parallel_graph
2
+ from .sequential import create_sequential_graph
3
+
4
+ __all__ = [
5
+ "create_parallel_graph",
6
+ "create_sequential_graph",
7
+ ]
@@ -0,0 +1,119 @@
1
+ from typing import Awaitable, Callable, Optional, Union, cast
2
+
3
+ from langgraph.cache.base import BaseCache
4
+ from langgraph.graph import StateGraph
5
+ from langgraph.graph.state import CompiledStateGraph, StateNode
6
+ from langgraph.store.base import BaseStore
7
+ from langgraph.types import Checkpointer, Send
8
+ from langgraph.typing import ContextT, InputT, OutputT, StateT
9
+
10
+ from langchain_dev_utils._utils import _transform_node_to_tuple
11
+
12
+ from .types import Node
13
+
14
+
15
+ def create_parallel_graph(
16
+ nodes: list[Node],
17
+ state_schema: type[StateT],
18
+ graph_name: Optional[str] = None,
19
+ branches_fn: Optional[
20
+ Union[
21
+ Callable[..., list[Send]],
22
+ Callable[..., Awaitable[list[Send]]],
23
+ ]
24
+ ] = None,
25
+ context_schema: type[ContextT] | None = None,
26
+ input_schema: type[InputT] | None = None,
27
+ output_schema: type[OutputT] | None = None,
28
+ checkpointer: Checkpointer | None = None,
29
+ store: BaseStore | None = None,
30
+ cache: BaseCache | None = None,
31
+ ) -> CompiledStateGraph[StateT, ContextT, InputT, OutputT]:
32
+ """
33
+ Create a parallel graph from a list of nodes.
34
+
35
+ This function lets you build a parallel StateGraph simply by writing the corresponding Nodes.
36
+
37
+ Args:
38
+ nodes: List of nodes to execute in parallel
39
+ state_schema: state schema of the final state graph
40
+ graph_name: Name of the final state graph
41
+ branches_fn: Optional function to determine which nodes to execute
42
+ in parallel
43
+ context_schema: context schema of the final state graph
44
+ input_schema: input schema of the final state graph
45
+ output_schema: output schema of the final state graph
46
+ checkpointer: Optional LangGraph checkpointer for the final state graph
47
+ store: Optional LangGraph store for the final state graph
48
+ cache: Optional LangGraph cache for the final state graph
49
+
50
+ Returns:
51
+ CompiledStateGraph[StateT, ContextT, InputT, OutputT]: Compiled state graph
52
+
53
+ Example:
54
+ # Basic parallel pipeline: multiple specialized agents run concurrently
55
+ >>> from langchain_dev_utils.graph import create_parallel_graph
56
+ >>>
57
+ >>> graph = create_parallel_graph(
58
+ ... nodes=[
59
+ ... node1, node2, node3
60
+ ... ],
61
+ ... state_schema=StateT,
62
+ ... graph_name="parallel_graph",
63
+ ... )
64
+ >>>
65
+ >>> response = graph.invoke({"messages": [HumanMessage("Hello")]})
66
+
67
+ # Dynamic parallel pipeline: decide which nodes to run based on conditional branches
68
+ >>> graph = create_parallel_graph(
69
+ ... nodes=[
70
+ ... node1, node2, node3
71
+ ... ],
72
+ ... state_schema=StateT,
73
+ ... branches_fn=lambda state: [
74
+ ... Send("node1", arg={"messages": [HumanMessage("Hello")]}),
75
+ ... Send("node2", arg={"messages": [HumanMessage("Hello")]}),
76
+ ... ],
77
+ ... graph_name="parallel_graph",
78
+ ... )
79
+ >>>
80
+ >>> response = graph.invoke({"messages": [HumanMessage("Hello")]})
81
+ """
82
+ graph = StateGraph(
83
+ state_schema=state_schema,
84
+ context_schema=context_schema,
85
+ input_schema=input_schema,
86
+ output_schema=output_schema,
87
+ )
88
+
89
+ node_list: list[tuple[str, StateNode]] = []
90
+
91
+ for node in nodes:
92
+ node_list.append(_transform_node_to_tuple(node))
93
+
94
+ if branches_fn:
95
+ for name, node in node_list:
96
+ node = cast(StateNode[StateT, ContextT], node)
97
+ graph.add_node(name, node)
98
+ graph.add_conditional_edges(
99
+ "__start__",
100
+ branches_fn,
101
+ [node_name for node_name, _ in node_list],
102
+ )
103
+ return graph.compile(
104
+ name=graph_name or "parallel graph",
105
+ checkpointer=checkpointer,
106
+ store=store,
107
+ cache=cache,
108
+ )
109
+ else:
110
+ for node_name, node in node_list:
111
+ node = cast(StateNode[StateT, ContextT], node)
112
+ graph.add_node(node_name, node)
113
+ graph.add_edge("__start__", node_name)
114
+ return graph.compile(
115
+ name=graph_name or "parallel graph",
116
+ checkpointer=checkpointer,
117
+ store=store,
118
+ cache=cache,
119
+ )