langchain-dev-utils 1.3.3__tar.gz → 1.3.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/.gitignore +15 -15
  2. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/.python-version +1 -1
  3. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/PKG-INFO +1 -1
  4. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/README_cn.md +85 -85
  5. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/pyproject.toml +47 -47
  6. langchain_dev_utils-1.3.4/src/langchain_dev_utils/__init__.py +1 -0
  7. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/wrap.py +1 -1
  8. langchain_dev_utils-1.3.4/src/langchain_dev_utils/chat_models/adapters/__init__.py +3 -0
  9. langchain_dev_utils-1.3.4/src/langchain_dev_utils/chat_models/adapters/create_utils.py +53 -0
  10. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/chat_models/adapters/openai_compatible.py +23 -4
  11. langchain_dev_utils-1.3.4/src/langchain_dev_utils/chat_models/adapters/register_profiles.py +15 -0
  12. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/chat_models/base.py +4 -11
  13. langchain_dev_utils-1.3.4/src/langchain_dev_utils/embeddings/adapters/__init__.py +3 -0
  14. langchain_dev_utils-1.3.4/src/langchain_dev_utils/embeddings/adapters/create_utils.py +45 -0
  15. langchain_dev_utils-1.3.4/src/langchain_dev_utils/embeddings/adapters/openai_compatible.py +75 -0
  16. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/embeddings/base.py +11 -25
  17. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/message_convert/__init__.py +15 -15
  18. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/message_convert/format.py +69 -69
  19. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_chat_models.py +191 -185
  20. langchain_dev_utils-1.3.4/tests/test_embedding.py +22 -0
  21. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_handoffs_middleware.py +0 -1
  22. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_load_model.py +104 -104
  23. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_messages.py +164 -164
  24. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_plan_middleware.py +68 -68
  25. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_wrap_agent.py +15 -12
  26. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/uv.lock +2883 -2883
  27. langchain_dev_utils-1.3.3/src/langchain_dev_utils/__init__.py +0 -1
  28. langchain_dev_utils-1.3.3/tests/utils/__init__.py +0 -0
  29. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/.vscode/settings.json +0 -0
  30. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/LICENSE +0 -0
  31. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/README.md +0 -0
  32. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/_utils.py +0 -0
  33. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/__init__.py +0 -0
  34. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/factory.py +0 -0
  35. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/file_system.py +0 -0
  36. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/middleware/__init__.py +0 -0
  37. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/middleware/format_prompt.py +0 -0
  38. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/middleware/handoffs.py +0 -0
  39. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/middleware/model_fallback.py +0 -0
  40. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/middleware/model_router.py +0 -0
  41. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/middleware/plan.py +0 -0
  42. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/middleware/summarization.py +0 -0
  43. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/middleware/tool_call_repair.py +0 -0
  44. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/middleware/tool_emulator.py +0 -0
  45. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/middleware/tool_selection.py +0 -0
  46. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/agents/plan.py +0 -0
  47. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/chat_models/__init__.py +0 -0
  48. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/chat_models/types.py +0 -0
  49. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/embeddings/__init__.py +0 -0
  50. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/message_convert/content.py +0 -0
  51. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/pipeline/__init__.py +0 -0
  52. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/pipeline/parallel.py +0 -0
  53. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/pipeline/sequential.py +0 -0
  54. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/pipeline/types.py +0 -0
  55. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/py.typed +0 -0
  56. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/tool_calling/__init__.py +0 -0
  57. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/tool_calling/human_in_the_loop.py +0 -0
  58. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/src/langchain_dev_utils/tool_calling/utils.py +0 -0
  59. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/__init__.py +0 -0
  60. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_agent.py +0 -0
  61. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_human_in_the_loop.py +0 -0
  62. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_load_embbeding.py +0 -0
  63. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_model_tool_emulator.py +0 -0
  64. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_pipline.py +0 -0
  65. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_router_model.py +0 -0
  66. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_tool_call_repair.py +0 -0
  67. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/test_tool_calling.py +0 -0
  68. {langchain_dev_utils-1.3.3/src/langchain_dev_utils/chat_models/adapters → langchain_dev_utils-1.3.4/tests/utils}/__init__.py +0 -0
  69. {langchain_dev_utils-1.3.3 → langchain_dev_utils-1.3.4}/tests/utils/register.py +0 -0
@@ -1,16 +1,16 @@
1
- # Python-generated files
2
- __pycache__/
3
- *.py[oc]
4
- build/
5
- dist/
6
- wheels/
7
- *.egg-info
8
-
9
- # Virtual environments
10
- .venv
11
- .env
12
- .benchmarks
13
- data/
14
- node_modules
15
- dist
1
+ # Python-generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+
9
+ # Virtual environments
10
+ .venv
11
+ .env
12
+ .benchmarks
13
+ data/
14
+ node_modules
15
+ dist
16
16
  site/
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langchain-dev-utils
3
- Version: 1.3.3
3
+ Version: 1.3.4
4
4
  Summary: A practical utility library for LangChain and LangGraph development
5
5
  Project-URL: Source Code, https://github.com/TBice123123/langchain-dev-utils
6
6
  Project-URL: repository, https://github.com/TBice123123/langchain-dev-utils
@@ -1,86 +1,86 @@
1
- # 🦜️🧰 langchain-dev-utils
2
-
3
- <p align="center">
4
- <em>🚀 专为 LangChain 和 LangGraph 开发者打造的高效工具库</em>
5
- </p>
6
-
7
- <p align="center">
8
- 📚 <a href="https://tbice123123.github.io/langchain-dev-utils/">English</a> •
9
- <a href="https://tbice123123.github.io/langchain-dev-utils/zh/">中文</a>
10
- </p>
11
-
12
- [![PyPI](https://img.shields.io/pypi/v/langchain-dev-utils.svg?color=%2334D058&label=pypi%20package)](https://pypi.org/project/langchain-dev-utils/)
13
- [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
14
- [![Python](https://img.shields.io/badge/python-3.11|3.12|3.13|3.14-%2334D058)](https://www.python.org/downloads)
15
- [![Downloads](https://static.pepy.tech/badge/langchain-dev-utils/month)](https://pepy.tech/project/langchain-dev-utils)
16
- [![Documentation](https://img.shields.io/badge/docs-latest-blue)](https://tbice123123.github.io/langchain-dev-utils/zh/)
17
-
18
- > 当前为中文版,英文版请访问[English Version](https://github.com/TBice123123/langchain-dev-utils/blob/master/README.md)
19
-
20
- ## ✨ 为什么选择 langchain-dev-utils?
21
-
22
- 厌倦了在 LangChain 开发中编写重复代码?`langchain-dev-utils` 正是您需要的解决方案!这个轻量但功能强大的工具库专为提升 LangChain 和 LangGraph 开发体验而设计,帮助您:
23
-
24
- - ⚡ **提升开发效率** - 减少样板代码,让您专注于核心功能
25
- - 🧩 **简化复杂流程** - 轻松管理多模型、多工具和多智能体应用
26
- - 🔧 **增强代码质量** - 提高一致性和可读性,减少维护成本
27
- - 🎯 **加速原型开发** - 快速实现想法,更快迭代验证
28
-
29
-
30
- ## 🎯 核心功能
31
-
32
- - **🔌 统一的模型管理** - 通过字符串指定模型提供商,轻松切换和组合不同模型
33
- - **💬 灵活的消息处理** - 支持思维链拼接、流式处理和消息格式化
34
- - **🛠️ 强大的工具调用** - 内置工具调用检测、参数解析和人工审核功能
35
- - **🤖 高效的 Agent 开发** - 简化智能体创建流程,扩充更多的常用中间件
36
- - **📊 灵活的状态图组合** - 支持串行和并行方式组合多个 StateGraph
37
-
38
- ## ⚡ 快速开始
39
-
40
- **1. 安装 `langchain-dev-utils`**
41
-
42
- ```bash
43
- pip install -U "langchain-dev-utils[standard]"
44
- ```
45
-
46
- **2. 开始使用**
47
-
48
- ```python
49
- from langchain.tools import tool
50
- from langchain_core.messages import HumanMessage
51
- from langchain_dev_utils.chat_models import register_model_provider, load_chat_model
52
- from langchain_dev_utils.agents import create_agent
53
-
54
- # 注册模型提供商
55
- register_model_provider("vllm", "openai-compatible", base_url="http://localhost:8000/v1")
56
-
57
- @tool
58
- def get_current_weather(location: str) -> str:
59
- """获取指定地点的当前天气"""
60
- return f"25度,{location}"
61
-
62
- # 使用字符串动态加载模型
63
- model = load_chat_model("vllm:qwen3-4b")
64
- response = model.invoke("你好")
65
- print(response)
66
-
67
- # 创建智能体
68
- agent = create_agent("vllm:qwen3-4b", tools=[get_current_weather])
69
- response = agent.invoke({"messages": [HumanMessage(content="今天纽约的天气如何?")]})
70
- print(response)
71
- ```
72
-
73
- **获取更多的本库功能,请访问[完整文档](https://tbice123123.github.io/langchain-dev-utils/zh/)**
74
-
75
-
76
- ## 🛠️ GitHub 仓库
77
-
78
- 访问 [GitHub 仓库](https://github.com/TBice123123/langchain-dev-utils) 查看源代码和问题。
79
-
80
-
81
- ---
82
-
83
- <div align="center">
84
- <p>由 ❤️ 和 ☕ 驱动开发</p>
85
- <p>如果这个项目对您有帮助,请给我们一个 ⭐️</p>
1
+ # 🦜️🧰 langchain-dev-utils
2
+
3
+ <p align="center">
4
+ <em>🚀 专为 LangChain 和 LangGraph 开发者打造的高效工具库</em>
5
+ </p>
6
+
7
+ <p align="center">
8
+ 📚 <a href="https://tbice123123.github.io/langchain-dev-utils/">English</a> •
9
+ <a href="https://tbice123123.github.io/langchain-dev-utils/zh/">中文</a>
10
+ </p>
11
+
12
+ [![PyPI](https://img.shields.io/pypi/v/langchain-dev-utils.svg?color=%2334D058&label=pypi%20package)](https://pypi.org/project/langchain-dev-utils/)
13
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
14
+ [![Python](https://img.shields.io/badge/python-3.11|3.12|3.13|3.14-%2334D058)](https://www.python.org/downloads)
15
+ [![Downloads](https://static.pepy.tech/badge/langchain-dev-utils/month)](https://pepy.tech/project/langchain-dev-utils)
16
+ [![Documentation](https://img.shields.io/badge/docs-latest-blue)](https://tbice123123.github.io/langchain-dev-utils/zh/)
17
+
18
+ > 当前为中文版,英文版请访问[English Version](https://github.com/TBice123123/langchain-dev-utils/blob/master/README.md)
19
+
20
+ ## ✨ 为什么选择 langchain-dev-utils?
21
+
22
+ 厌倦了在 LangChain 开发中编写重复代码?`langchain-dev-utils` 正是您需要的解决方案!这个轻量但功能强大的工具库专为提升 LangChain 和 LangGraph 开发体验而设计,帮助您:
23
+
24
+ - ⚡ **提升开发效率** - 减少样板代码,让您专注于核心功能
25
+ - 🧩 **简化复杂流程** - 轻松管理多模型、多工具和多智能体应用
26
+ - 🔧 **增强代码质量** - 提高一致性和可读性,减少维护成本
27
+ - 🎯 **加速原型开发** - 快速实现想法,更快迭代验证
28
+
29
+
30
+ ## 🎯 核心功能
31
+
32
+ - **🔌 统一的模型管理** - 通过字符串指定模型提供商,轻松切换和组合不同模型
33
+ - **💬 灵活的消息处理** - 支持思维链拼接、流式处理和消息格式化
34
+ - **🛠️ 强大的工具调用** - 内置工具调用检测、参数解析和人工审核功能
35
+ - **🤖 高效的 Agent 开发** - 简化智能体创建流程,扩充更多的常用中间件
36
+ - **📊 灵活的状态图组合** - 支持串行和并行方式组合多个 StateGraph
37
+
38
+ ## ⚡ 快速开始
39
+
40
+ **1. 安装 `langchain-dev-utils`**
41
+
42
+ ```bash
43
+ pip install -U "langchain-dev-utils[standard]"
44
+ ```
45
+
46
+ **2. 开始使用**
47
+
48
+ ```python
49
+ from langchain.tools import tool
50
+ from langchain_core.messages import HumanMessage
51
+ from langchain_dev_utils.chat_models import register_model_provider, load_chat_model
52
+ from langchain_dev_utils.agents import create_agent
53
+
54
+ # 注册模型提供商
55
+ register_model_provider("vllm", "openai-compatible", base_url="http://localhost:8000/v1")
56
+
57
+ @tool
58
+ def get_current_weather(location: str) -> str:
59
+ """获取指定地点的当前天气"""
60
+ return f"25度,{location}"
61
+
62
+ # 使用字符串动态加载模型
63
+ model = load_chat_model("vllm:qwen3-4b")
64
+ response = model.invoke("你好")
65
+ print(response)
66
+
67
+ # 创建智能体
68
+ agent = create_agent("vllm:qwen3-4b", tools=[get_current_weather])
69
+ response = agent.invoke({"messages": [HumanMessage(content="今天纽约的天气如何?")]})
70
+ print(response)
71
+ ```
72
+
73
+ **获取更多的本库功能,请访问[完整文档](https://tbice123123.github.io/langchain-dev-utils/zh/)**
74
+
75
+
76
+ ## 🛠️ GitHub 仓库
77
+
78
+ 访问 [GitHub 仓库](https://github.com/TBice123123/langchain-dev-utils) 查看源代码和问题。
79
+
80
+
81
+ ---
82
+
83
+ <div align="center">
84
+ <p>由 ❤️ 和 ☕ 驱动开发</p>
85
+ <p>如果这个项目对您有帮助,请给我们一个 ⭐️</p>
86
86
  </div>
@@ -1,47 +1,47 @@
1
- [project]
2
- name = "langchain-dev-utils"
3
- version = "1.3.3"
4
- description = "A practical utility library for LangChain and LangGraph development"
5
- readme = "README.md"
6
- authors = [{ name = "tiebingice", email = "tiebingice123@outlook.com" }]
7
- requires-python = ">=3.11"
8
- dependencies = ["langchain>=1.2.0", "langchain-core>=1.2.5", "langgraph>=1.0.0"]
9
-
10
- [project.urls]
11
- "Source Code" = "https://github.com/TBice123123/langchain-dev-utils"
12
- repository = "https://github.com/TBice123123/langchain-dev-utils"
13
- documentation = "https://tbice123123.github.io/langchain-dev-utils"
14
-
15
-
16
- [project.optional-dependencies]
17
- standard = ["json-repair>=0.53.1", "langchain-openai"]
18
-
19
- [build-system]
20
- requires = ["hatchling"]
21
- build-backend = "hatchling.build"
22
-
23
- [tool.hatch.build]
24
- exclude = ["/data", "/docs", "mkdocs.yml"]
25
-
26
- [tool.pytest.ini_options]
27
- asyncio_mode = "auto"
28
- testpaths = ["tests"]
29
- python_files = ["test_*.py"]
30
- python_functions = ["test_*"]
31
-
32
- [dependency-groups]
33
- dev = ["langchain-model-profiles>=0.0.5", "ruff>=0.14.5"]
34
- docs = ["mkdocs-material>=9.7.0", "mkdocs-static-i18n>=1.3.0"]
35
- tests = [
36
- "python-dotenv>=1.1.1",
37
- "langchain-tests>=1.0.0",
38
- "langchain-deepseek>=1.0.0",
39
- "langchain-qwq>=0.3.0",
40
- "langchain-ollama>=1.0.0",
41
- "langchain-community>=0.4.1",
42
- ]
43
-
44
-
45
- [tool.ruff.lint]
46
- select = ["E", "F", "I", "PGH003", "T201"]
47
- ignore = ["E501"]
1
+ [project]
2
+ name = "langchain-dev-utils"
3
+ version = "1.3.4"
4
+ description = "A practical utility library for LangChain and LangGraph development"
5
+ readme = "README.md"
6
+ authors = [{ name = "tiebingice", email = "tiebingice123@outlook.com" }]
7
+ requires-python = ">=3.11"
8
+ dependencies = ["langchain>=1.2.0", "langchain-core>=1.2.5", "langgraph>=1.0.0"]
9
+
10
+ [project.urls]
11
+ "Source Code" = "https://github.com/TBice123123/langchain-dev-utils"
12
+ repository = "https://github.com/TBice123123/langchain-dev-utils"
13
+ documentation = "https://tbice123123.github.io/langchain-dev-utils"
14
+
15
+
16
+ [project.optional-dependencies]
17
+ standard = ["json-repair>=0.53.1", "langchain-openai"]
18
+
19
+ [build-system]
20
+ requires = ["hatchling"]
21
+ build-backend = "hatchling.build"
22
+
23
+ [tool.hatch.build]
24
+ exclude = ["/data", "/docs", "mkdocs.yml"]
25
+
26
+ [tool.pytest.ini_options]
27
+ asyncio_mode = "auto"
28
+ testpaths = ["tests"]
29
+ python_files = ["test_*.py"]
30
+ python_functions = ["test_*"]
31
+
32
+ [dependency-groups]
33
+ dev = ["langchain-model-profiles>=0.0.5", "ruff>=0.14.5"]
34
+ docs = ["mkdocs-material>=9.7.0", "mkdocs-static-i18n>=1.3.0"]
35
+ tests = [
36
+ "python-dotenv>=1.1.1",
37
+ "langchain-tests>=1.0.0",
38
+ "langchain-deepseek>=1.0.0",
39
+ "langchain-qwq>=0.3.0",
40
+ "langchain-ollama>=1.0.0",
41
+ "langchain-community>=0.4.1",
42
+ ]
43
+
44
+
45
+ [tool.ruff.lint]
46
+ select = ["E", "F", "I", "PGH003", "T201"]
47
+ ignore = ["E501"]
@@ -0,0 +1 @@
1
+ __version__ = "1.3.4"
@@ -171,7 +171,7 @@ def wrap_all_agents_as_tool(
171
171
  Example:
172
172
  >>> from langchain_dev_utils.agents import wrap_all_agents_as_tool, create_agent
173
173
  >>>
174
- >>> call_time_agent_tool = wrap_all_agents_as_tool(
174
+ >>> call_agent_tool = wrap_all_agents_as_tool(
175
175
  ... [time_agent,weather_agent],
176
176
  ... tool_name="call_sub_agents",
177
177
  ... tool_description="Used to invoke the sub-agents to perform tasks"
@@ -0,0 +1,3 @@
1
+ from .create_utils import create_openai_compatible_model
2
+
3
+ __all__ = ["create_openai_compatible_model"]
@@ -0,0 +1,53 @@
1
+ from typing import Any, Optional, cast
2
+
3
+ from langchain_core.utils import from_env
4
+
5
+ from langchain_dev_utils._utils import _check_pkg_install
6
+
7
+ from ..types import CompatibilityOptions
8
+
9
+
10
+ def create_openai_compatible_model(
11
+ model_provider: str,
12
+ base_url: Optional[str] = None,
13
+ compatibility_options: Optional[CompatibilityOptions] = None,
14
+ model_profiles: Optional[dict[str, dict[str, Any]]] = None,
15
+ chat_model_cls_name: Optional[str] = None,
16
+ ):
17
+ """Factory function for creating provider-specific OpenAI-compatible model classes.
18
+
19
+ Dynamically generates model classes for different OpenAI-compatible providers,
20
+ configuring environment variable mappings and default base URLs specific to each provider.
21
+
22
+ Args:
23
+ model_provider (str): Identifier for the OpenAI-compatible provider (e.g. `vllm`, `moonshot`)
24
+ base_url (Optional[str], optional): Default API base URL for the provider. Defaults to None. If not provided, will try to use the environment variable.
25
+ compatibility_options (Optional[CompatibilityOptions], optional): Optional configuration for compatibility options with the provider. Defaults to None.
26
+ model_profiles (Optional[dict[str, dict[str, Any]]], optional): Optional model profiles for the provider. Defaults to None.
27
+ chat_model_cls_name (Optional[str], optional): Optional custom class name for the generated model. Defaults to None.
28
+ Returns:
29
+ Type[_BaseChatOpenAICompatible]: Configured model class ready for instantiation with provider-specific settings
30
+
31
+ Examples:
32
+ >>> from langchain_dev_utils.chat_models.adapters import create_openai_compatible_chat_model
33
+ >>> ChatVLLM = create_openai_compatible_chat_model(
34
+ ... "vllm",
35
+ ... base_url="http://localhost:8000",
36
+ ... chat_model_cls_name="ChatVLLM",
37
+ ... )
38
+ >>> model = ChatVLLM(model="qwen3-4b")
39
+ >>> model.invoke("hello")
40
+ """
41
+ _check_pkg_install("langchain_openai")
42
+ from .openai_compatible import _create_openai_compatible_model
43
+
44
+ base_url = (
45
+ base_url or from_env(f"{model_provider.upper()}_API_BASE", default=None)()
46
+ )
47
+ return _create_openai_compatible_model(
48
+ chat_model_cls_name=chat_model_cls_name,
49
+ provider=model_provider,
50
+ base_url=cast(str, base_url),
51
+ compatibility_options=compatibility_options,
52
+ profiles=model_profiles,
53
+ )
@@ -12,6 +12,7 @@ from typing import (
12
12
  Type,
13
13
  TypeVar,
14
14
  Union,
15
+ cast,
15
16
  )
16
17
 
17
18
  import openai
@@ -19,7 +20,11 @@ from langchain_core.callbacks import (
19
20
  AsyncCallbackManagerForLLMRun,
20
21
  CallbackManagerForLLMRun,
21
22
  )
22
- from langchain_core.language_models import LangSmithParams, LanguageModelInput
23
+ from langchain_core.language_models import (
24
+ LangSmithParams,
25
+ LanguageModelInput,
26
+ ModelProfile,
27
+ )
23
28
  from langchain_core.messages import (
24
29
  AIMessage,
25
30
  AIMessageChunk,
@@ -51,6 +56,10 @@ from ..types import (
51
56
  ResponseFormatType,
52
57
  ToolChoiceType,
53
58
  )
59
+ from .register_profiles import (
60
+ _get_profile_by_provider_and_model,
61
+ _register_profile_with_provider,
62
+ )
54
63
 
55
64
  _BM = TypeVar("_BM", bound=BaseModel)
56
65
  _DictOrPydanticClass = Union[dict[str, Any], type[_BM], type]
@@ -152,7 +161,7 @@ class _BaseChatOpenAICompatible(BaseChatOpenAI):
152
161
  Note: This is a template class and should not be exported or instantiated
153
162
  directly. Instead, use it as a base class and provide the specific provider
154
163
  name through inheritance or the factory function
155
- `_create_openai_compatible_model()`.
164
+ `create_openai_compatible_model()`.
156
165
  """
157
166
 
158
167
  model_name: str = Field(alias="model", default="openai compatible model")
@@ -283,7 +292,10 @@ class _BaseChatOpenAICompatible(BaseChatOpenAI):
283
292
  def _set_model_profile(self) -> Self:
284
293
  """Set model profile if not overridden."""
285
294
  if self.profile is None:
286
- self.profile = {}
295
+ self.profile = cast(
296
+ ModelProfile,
297
+ _get_profile_by_provider_and_model(self._provider, self.model_name),
298
+ )
287
299
  return self
288
300
 
289
301
  def _create_chat_result(
@@ -578,6 +590,8 @@ def _create_openai_compatible_model(
578
590
  provider: str,
579
591
  base_url: str,
580
592
  compatibility_options: Optional[CompatibilityOptions] = None,
593
+ profiles: Optional[dict[str, dict[str, Any]]] = None,
594
+ chat_model_cls_name: Optional[str] = None,
581
595
  ) -> Type[_BaseChatOpenAICompatible]:
582
596
  """Factory function for creating provider-specific OpenAI-compatible model classes.
583
597
 
@@ -588,14 +602,19 @@ def _create_openai_compatible_model(
588
602
  provider: Provider identifier (e.g.`vllm`)
589
603
  base_url: Default API base URL for the provider
590
604
  compatibility_options: Optional configuration for the provider
605
+ profiles: Optional profiles for the provider
606
+ chat_model_cls_name: Optional name for the model class
591
607
 
592
608
  Returns:
593
609
  Configured model class ready for instantiation with provider-specific settings
594
610
  """
595
- chat_model_cls_name = f"Chat{provider.title()}"
611
+ chat_model_cls_name = chat_model_cls_name or f"Chat{provider.title()}"
596
612
  if compatibility_options is None:
597
613
  compatibility_options = {}
598
614
 
615
+ if profiles is not None:
616
+ _register_profile_with_provider(provider, profiles)
617
+
599
618
  return create_model(
600
619
  chat_model_cls_name,
601
620
  __base__=_BaseChatOpenAICompatible,
@@ -0,0 +1,15 @@
1
+ from typing import Any
2
+
3
+ _PROFILES = {}
4
+
5
+
6
+ def _register_profile_with_provider(
7
+ provider_name: str, profile: dict[str, Any]
8
+ ) -> None:
9
+ _PROFILES.update({provider_name: profile})
10
+
11
+
12
+ def _get_profile_by_provider_and_model(
13
+ provider_name: str, model_name: str
14
+ ) -> dict[str, Any]:
15
+ return _PROFILES.get(provider_name, {}).get(model_name, {})
@@ -141,19 +141,12 @@ def register_model_provider(
141
141
  "when chat_model is a string, the value must be 'openai-compatible'"
142
142
  )
143
143
  chat_model = _create_openai_compatible_model(
144
- provider_name,
145
- base_url,
144
+ provider=provider_name,
145
+ base_url=base_url,
146
146
  compatibility_options=compatibility_options,
147
+ profiles=model_profiles,
147
148
  )
148
- _MODEL_PROVIDERS_DICT.update(
149
- {
150
- provider_name: {
151
- "chat_model": chat_model,
152
- "base_url": base_url,
153
- "model_profiles": model_profiles,
154
- }
155
- }
156
- )
149
+ _MODEL_PROVIDERS_DICT.update({provider_name: {"chat_model": chat_model}})
157
150
  else:
158
151
  if base_url is not None:
159
152
  _MODEL_PROVIDERS_DICT.update(
@@ -0,0 +1,3 @@
1
+ from .create_utils import create_openai_compatible_embedding
2
+
3
+ __all__ = ["create_openai_compatible_embedding"]
@@ -0,0 +1,45 @@
1
+ from typing import Optional, cast
2
+
3
+ from langchain_core.utils import from_env
4
+
5
+ from langchain_dev_utils._utils import _check_pkg_install
6
+
7
+
8
+ def create_openai_compatible_embedding(
9
+ embedding_provider: str,
10
+ base_url: Optional[str] = None,
11
+ embedding_model_cls_name: Optional[str] = None,
12
+ ):
13
+ """Factory function for creating provider-specific OpenAI-compatible embedding classes.
14
+
15
+ Dynamically generates embedding classes for different OpenAI-compatible providers,
16
+ configuring environment variable mappings and default base URLs specific to each provider.
17
+
18
+ Args:
19
+ embedding_provider (str): Identifier for the OpenAI-compatible provider (e.g. `vllm`, `moonshot`)
20
+ base_url (Optional[str], optional): Default API base URL for the provider. Defaults to None. If not provided, will try to use the environment variable.
21
+ embedding_model_cls_name (Optional[str], optional): Optional custom class name for the generated embedding. Defaults to None.
22
+ Returns:
23
+ Type[_BaseEmbeddingOpenAICompatible]: Configured embedding class ready for instantiation with provider-specific settings
24
+
25
+ Examples:
26
+ >>> from langchain_dev_utils.embeddings.adapters import create_openai_compatible_embedding
27
+ >>> VLLMEmbedding = create_openai_compatible_embedding(
28
+ ... "vllm",
29
+ ... base_url="http://localhost:8000",
30
+ ... embedding_model_cls_name="VLLMEmbedding",
31
+ ... )
32
+ >>> model = VLLMEmbedding(model="qwen3-embedding-8b")
33
+ >>> model.embed_query("hello")
34
+ """
35
+ _check_pkg_install("langchain_openai")
36
+ from .openai_compatible import _create_openai_compatible_embedding
37
+
38
+ base_url = (
39
+ base_url or from_env(f"{embedding_provider.upper()}_API_BASE", default=None)()
40
+ )
41
+ return _create_openai_compatible_embedding(
42
+ provider=embedding_provider,
43
+ base_url=cast(str, base_url),
44
+ embeddings_cls_name=embedding_model_cls_name,
45
+ )
@@ -0,0 +1,75 @@
1
+ from typing import Optional, Type
2
+
3
+ from langchain_core.utils import from_env, secret_from_env
4
+ from langchain_openai.embeddings import OpenAIEmbeddings
5
+ from pydantic import Field, SecretStr, create_model
6
+
7
+
8
+ class _BaseEmbeddingOpenAICompatible(OpenAIEmbeddings):
9
+ """Base class for OpenAI-Compatible embeddings.
10
+
11
+ This class extends the OpenAIEmbeddings class to support
12
+ custom API keys and base URLs for OpenAI-Compatible models.
13
+
14
+ Note: This is a template class and should not be exported or instantiated
15
+ directly. Instead, use it as a base class and provide the specific provider
16
+ name through inheritance or the factory function
17
+ `create_openai_compatible_embedding()`.
18
+ """
19
+
20
+ openai_api_key: Optional[SecretStr] = Field(
21
+ default_factory=secret_from_env("OPENAI_COMPATIBLE_API_KEY", default=None),
22
+ alias="api_key",
23
+ )
24
+ """OpenAI Compatible API key"""
25
+ openai_api_base: str = Field(
26
+ default_factory=from_env("OPENAI_COMPATIBLE_API_BASE", default=""),
27
+ alias="base_url",
28
+ )
29
+ """OpenAI Compatible API base URL"""
30
+
31
+ check_embedding_ctx_length: bool = False
32
+ """Whether to check the token length of inputs and automatically split inputs
33
+ longer than embedding_ctx_length. Defaults to False. """
34
+
35
+
36
+ def _create_openai_compatible_embedding(
37
+ provider: str,
38
+ base_url: str,
39
+ embeddings_cls_name: Optional[str] = None,
40
+ ) -> Type[_BaseEmbeddingOpenAICompatible]:
41
+ """Factory function for creating provider-specific OpenAI-compatible embeddings classes.
42
+
43
+ Dynamically generates embeddings classes for different OpenAI-compatible providers,
44
+ configuring environment variable mappings and default base URLs specific to each provider.
45
+
46
+ Args:
47
+ provider: Provider identifier (e.g.`vllm`)
48
+ base_url: Default API base URL for the provider
49
+ embeddings_cls_name: Optional custom class name for the generated embeddings. Defaults to None.
50
+
51
+ Returns:
52
+ Configured embeddings class ready for instantiation with provider-specific settings
53
+ """
54
+ embeddings_cls_name = embeddings_cls_name or f"{provider.title()}Embeddings"
55
+
56
+ return create_model(
57
+ embeddings_cls_name,
58
+ __base__=_BaseEmbeddingOpenAICompatible,
59
+ openai_api_base=(
60
+ str,
61
+ Field(
62
+ default_factory=from_env(
63
+ f"{provider.upper()}_API_BASE", default=base_url
64
+ ),
65
+ ),
66
+ ),
67
+ openai_api_key=(
68
+ str,
69
+ Field(
70
+ default_factory=secret_from_env(
71
+ f"{provider.upper()}_API_KEY", default=None
72
+ ),
73
+ ),
74
+ ),
75
+ )