langchain-dev-utils 1.2.14__tar.gz → 1.2.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/PKG-INFO +26 -26
  2. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/README.md +32 -32
  3. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/README_cn.md +32 -32
  4. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/pyproject.toml +1 -1
  5. langchain_dev_utils-1.2.15/src/langchain_dev_utils/__init__.py +1 -0
  6. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/middleware/model_router.py +9 -4
  7. langchain_dev_utils-1.2.14/src/langchain_dev_utils/__init__.py +0 -1
  8. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/.gitignore +0 -0
  9. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/.python-version +0 -0
  10. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/.vscode/settings.json +0 -0
  11. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/LICENSE +0 -0
  12. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/_utils.py +0 -0
  13. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/__init__.py +0 -0
  14. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/factory.py +0 -0
  15. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/file_system.py +0 -0
  16. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/middleware/__init__.py +0 -0
  17. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/middleware/format_prompt.py +0 -0
  18. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/middleware/model_fallback.py +0 -0
  19. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/middleware/plan.py +0 -0
  20. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/middleware/summarization.py +0 -0
  21. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/middleware/tool_call_repair.py +0 -0
  22. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/middleware/tool_emulator.py +0 -0
  23. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/middleware/tool_selection.py +0 -0
  24. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/plan.py +0 -0
  25. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/agents/wrap.py +0 -0
  26. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/chat_models/__init__.py +0 -0
  27. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/chat_models/adapters/__init__.py +0 -0
  28. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/chat_models/adapters/openai_compatible.py +0 -0
  29. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/chat_models/base.py +0 -0
  30. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/chat_models/types.py +0 -0
  31. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/embeddings/__init__.py +0 -0
  32. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/embeddings/base.py +0 -0
  33. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/message_convert/__init__.py +0 -0
  34. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/message_convert/content.py +0 -0
  35. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/message_convert/format.py +0 -0
  36. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/pipeline/__init__.py +0 -0
  37. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/pipeline/parallel.py +0 -0
  38. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/pipeline/sequential.py +0 -0
  39. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/pipeline/types.py +0 -0
  40. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/py.typed +0 -0
  41. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/tool_calling/__init__.py +0 -0
  42. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/tool_calling/human_in_the_loop.py +0 -0
  43. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/src/langchain_dev_utils/tool_calling/utils.py +0 -0
  44. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/__init__.py +0 -0
  45. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_agent.py +0 -0
  46. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_chat_models.py +0 -0
  47. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_human_in_the_loop.py +0 -0
  48. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_load_embbeding.py +0 -0
  49. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_load_model.py +0 -0
  50. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_messages.py +0 -0
  51. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_model_tool_emulator.py +0 -0
  52. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_pipline.py +0 -0
  53. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_plan_middleware.py +0 -0
  54. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_router_model.py +0 -0
  55. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_tool_call_repair.py +0 -0
  56. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_tool_calling.py +0 -0
  57. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/tests/test_wrap_agent.py +0 -0
  58. {langchain_dev_utils-1.2.14 → langchain_dev_utils-1.2.15}/uv.lock +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langchain-dev-utils
3
- Version: 1.2.14
3
+ Version: 1.2.15
4
4
  Summary: A practical utility library for LangChain and LangGraph development
5
5
  Project-URL: Source Code, https://github.com/TBice123123/langchain-dev-utils
6
6
  Project-URL: repository, https://github.com/TBice123123/langchain-dev-utils
@@ -54,38 +54,38 @@ Tired of writing repetitive code in LangChain development? `langchain-dev-utils`
54
54
 
55
55
  ## ⚡ Quick Start
56
56
 
57
- 1. Install `langchain-dev-utils`:
57
+ **1. Install `langchain-dev-utils`**
58
58
 
59
- ```bash
60
- pip install -U langchain[openai] langchain-dev-utils
61
- ```
59
+ ```bash
60
+ pip install -U "langchain-dev-utils[standard]"
61
+ ```
62
62
 
63
- 2. Start using:
63
+ **2. Start using**
64
64
 
65
- ```python
66
- from langchain.tools import tool
67
- from langchain_core.messages import HumanMessage
68
- from langchain_dev_utils.chat_models import register_model_provider, load_chat_model
69
- from langchain_dev_utils.agents import create_agent
65
+ ```python
66
+ from langchain.tools import tool
67
+ from langchain_core.messages import HumanMessage
68
+ from langchain_dev_utils.chat_models import register_model_provider, load_chat_model
69
+ from langchain_dev_utils.agents import create_agent
70
70
 
71
- # Register model provider
72
- register_model_provider("vllm", "openai-compatible", "http://localhost:8000/v1")
71
+ # Register model provider
72
+ register_model_provider("vllm", "openai-compatible", base_url="http://localhost:8000/v1")
73
73
 
74
- @tool
75
- def get_current_weather(location: str) -> str:
76
- """Get the current weather for the specified location"""
77
- return f"25 degrees, {location}"
74
+ @tool
75
+ def get_current_weather(location: str) -> str:
76
+ """Get the current weather for the specified location"""
77
+ return f"25 degrees, {location}"
78
78
 
79
- # Dynamically load model using string
80
- model = load_chat_model("vllm:qwen3-4b")
81
- response = model.invoke("Hello")
82
- print(response)
79
+ # Dynamically load model using string
80
+ model = load_chat_model("vllm:qwen3-4b")
81
+ response = model.invoke("Hello")
82
+ print(response)
83
83
 
84
- # Create agent
85
- agent = create_agent("vllm:qwen3-4b", tools=[get_current_weather])
86
- response = agent.invoke({"messages": [HumanMessage(content="What's the weather like in New York today?")]})
87
- print(response)
88
- ```
84
+ # Create agent
85
+ agent = create_agent("vllm:qwen3-4b", tools=[get_current_weather])
86
+ response = agent.invoke({"messages": [HumanMessage(content="What's the weather like in New York today?")]})
87
+ print(response)
88
+ ```
89
89
 
90
90
  **For more features of this library, please visit the [full documentation](https://tbice123123.github.io/langchain-dev-utils/)**
91
91
 
@@ -37,38 +37,38 @@ Tired of writing repetitive code in LangChain development? `langchain-dev-utils`
37
37
 
38
38
  ## ⚡ Quick Start
39
39
 
40
- 1. Install `langchain-dev-utils`:
41
-
42
- ```bash
43
- pip install -U langchain[openai] langchain-dev-utils
44
- ```
45
-
46
- 2. Start using:
47
-
48
- ```python
49
- from langchain.tools import tool
50
- from langchain_core.messages import HumanMessage
51
- from langchain_dev_utils.chat_models import register_model_provider, load_chat_model
52
- from langchain_dev_utils.agents import create_agent
53
-
54
- # Register model provider
55
- register_model_provider("vllm", "openai-compatible", "http://localhost:8000/v1")
56
-
57
- @tool
58
- def get_current_weather(location: str) -> str:
59
- """Get the current weather for the specified location"""
60
- return f"25 degrees, {location}"
61
-
62
- # Dynamically load model using string
63
- model = load_chat_model("vllm:qwen3-4b")
64
- response = model.invoke("Hello")
65
- print(response)
66
-
67
- # Create agent
68
- agent = create_agent("vllm:qwen3-4b", tools=[get_current_weather])
69
- response = agent.invoke({"messages": [HumanMessage(content="What's the weather like in New York today?")]})
70
- print(response)
71
- ```
40
+ **1. Install `langchain-dev-utils`**
41
+
42
+ ```bash
43
+ pip install -U "langchain-dev-utils[standard]"
44
+ ```
45
+
46
+ **2. Start using**
47
+
48
+ ```python
49
+ from langchain.tools import tool
50
+ from langchain_core.messages import HumanMessage
51
+ from langchain_dev_utils.chat_models import register_model_provider, load_chat_model
52
+ from langchain_dev_utils.agents import create_agent
53
+
54
+ # Register model provider
55
+ register_model_provider("vllm", "openai-compatible", base_url="http://localhost:8000/v1")
56
+
57
+ @tool
58
+ def get_current_weather(location: str) -> str:
59
+ """Get the current weather for the specified location"""
60
+ return f"25 degrees, {location}"
61
+
62
+ # Dynamically load model using string
63
+ model = load_chat_model("vllm:qwen3-4b")
64
+ response = model.invoke("Hello")
65
+ print(response)
66
+
67
+ # Create agent
68
+ agent = create_agent("vllm:qwen3-4b", tools=[get_current_weather])
69
+ response = agent.invoke({"messages": [HumanMessage(content="What's the weather like in New York today?")]})
70
+ print(response)
71
+ ```
72
72
 
73
73
  **For more features of this library, please visit the [full documentation](https://tbice123123.github.io/langchain-dev-utils/)**
74
74
 
@@ -37,38 +37,38 @@
37
37
 
38
38
  ## ⚡ 快速开始
39
39
 
40
- 1. 安装 `langchain-dev-utils`:
41
-
42
- ```bash
43
- pip install -U langchain[openai] langchain-dev-utils
44
- ```
45
-
46
- 2. 开始使用:
47
-
48
- ```python
49
- from langchain.tools import tool
50
- from langchain_core.messages import HumanMessage
51
- from langchain_dev_utils.chat_models import register_model_provider, load_chat_model
52
- from langchain_dev_utils.agents import create_agent
53
-
54
- # 注册模型提供商
55
- register_model_provider("vllm", "openai-compatible", "http://localhost:8000/v1")
56
-
57
- @tool
58
- def get_current_weather(location: str) -> str:
59
- """获取指定地点的当前天气"""
60
- return f"25度,{location}"
61
-
62
- # 使用字符串动态加载模型
63
- model = load_chat_model("vllm:qwen3-4b")
64
- response = model.invoke("你好")
65
- print(response)
66
-
67
- # 创建智能体
68
- agent = create_agent("vllm:qwen3-4b", tools=[get_current_weather])
69
- response = agent.invoke({"messages": [HumanMessage(content="今天纽约的天气如何?")]})
70
- print(response)
71
- ```
40
+ **1. 安装 `langchain-dev-utils`**
41
+
42
+ ```bash
43
+ pip install -U "langchain-dev-utils[standard]"
44
+ ```
45
+
46
+ **2. 开始使用**
47
+
48
+ ```python
49
+ from langchain.tools import tool
50
+ from langchain_core.messages import HumanMessage
51
+ from langchain_dev_utils.chat_models import register_model_provider, load_chat_model
52
+ from langchain_dev_utils.agents import create_agent
53
+
54
+ # 注册模型提供商
55
+ register_model_provider("vllm", "openai-compatible", base_url="http://localhost:8000/v1")
56
+
57
+ @tool
58
+ def get_current_weather(location: str) -> str:
59
+ """获取指定地点的当前天气"""
60
+ return f"25度,{location}"
61
+
62
+ # 使用字符串动态加载模型
63
+ model = load_chat_model("vllm:qwen3-4b")
64
+ response = model.invoke("你好")
65
+ print(response)
66
+
67
+ # 创建智能体
68
+ agent = create_agent("vllm:qwen3-4b", tools=[get_current_weather])
69
+ response = agent.invoke({"messages": [HumanMessage(content="今天纽约的天气如何?")]})
70
+ print(response)
71
+ ```
72
72
 
73
73
  **获取更多的本库功能,请访问[完整文档](https://tbice123123.github.io/langchain-dev-utils/zh/)**
74
74
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "langchain-dev-utils"
3
- version = "1.2.14"
3
+ version = "1.2.15"
4
4
  description = "A practical utility library for LangChain and LangGraph development"
5
5
  readme = "README.md"
6
6
  authors = [{ name = "tiebingice", email = "tiebingice123@outlook.com" }]
@@ -0,0 +1 @@
1
+ __version__ = "1.2.15"
@@ -19,6 +19,7 @@ class ModelDict(TypedDict):
19
19
  model_description: str
20
20
  tools: NotRequired[list[BaseTool | dict[str, Any]]]
21
21
  model_kwargs: NotRequired[dict[str, Any]]
22
+ model_instance: NotRequired[BaseChatModel]
22
23
  model_system_prompt: NotRequired[str]
23
24
 
24
25
 
@@ -76,7 +77,7 @@ class ModelRouterMiddleware(AgentMiddleware):
76
77
  model name or a BaseChatModel instance
77
78
  model_list: List of available routing models, each containing model_name,
78
79
  model_description, tools(Optional), model_kwargs(Optional),
79
- model_system_prompt(Optional)
80
+ model_instance(Optional), model_system_prompt(Optional)
80
81
  router_prompt: Routing prompt template, uses default template if None
81
82
 
82
83
  Examples:
@@ -155,6 +156,7 @@ class ModelRouterMiddleware(AgentMiddleware):
155
156
  "tools": item.get("tools", None),
156
157
  "kwargs": item.get("model_kwargs", None),
157
158
  "system_prompt": item.get("model_system_prompt", None),
159
+ "model_instance": item.get("model_instance", None),
158
160
  }
159
161
  for item in self.model_list
160
162
  }
@@ -163,10 +165,13 @@ class ModelRouterMiddleware(AgentMiddleware):
163
165
  override_kwargs = {}
164
166
  if select_model_name != "default-model" and select_model_name in model_dict:
165
167
  model_values = model_dict.get(select_model_name, {})
166
- if model_values["kwargs"] is not None:
167
- model = load_chat_model(select_model_name, **model_values["kwargs"])
168
+ if model_values["model_instance"] is not None:
169
+ model = model_values["model_instance"]
168
170
  else:
169
- model = load_chat_model(select_model_name)
171
+ if model_values["kwargs"] is not None:
172
+ model = load_chat_model(select_model_name, **model_values["kwargs"])
173
+ else:
174
+ model = load_chat_model(select_model_name)
170
175
  override_kwargs["model"] = model
171
176
  if model_values["tools"] is not None:
172
177
  override_kwargs["tools"] = model_values["tools"]
@@ -1 +0,0 @@
1
- __version__ = "1.2.14"