langchain-dev-utils 1.2.2__tar.gz → 1.2.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/.gitignore +1 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/PKG-INFO +6 -5
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/README.md +5 -4
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/README_cn.md +3 -1
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/pyproject.toml +4 -2
- langchain_dev_utils-1.2.4/src/langchain_dev_utils/__init__.py +1 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/model_fallback.py +1 -1
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/plan.py +5 -1
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/base.py +26 -2
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_load_model.py +26 -2
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/uv.lock +32 -17
- langchain_dev_utils-1.2.2/src/langchain_dev_utils/__init__.py +0 -1
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/.python-version +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/.vscode/settings.json +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/LICENSE +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/__init__.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/factory.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/file_system.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/__init__.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/model_router.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/summarization.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/tool_emulator.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/tool_selection.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/plan.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/wrap.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/__init__.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/adapters/__init__.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/adapters/openai_compatible.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/types.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/embeddings/__init__.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/embeddings/base.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/message_convert/__init__.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/message_convert/content.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/message_convert/format.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/pipeline/__init__.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/pipeline/parallel.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/pipeline/sequential.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/pipeline/types.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/py.typed +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/tool_calling/__init__.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/tool_calling/human_in_the_loop.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/tool_calling/utils.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_agent.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_chat_models.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_human_in_the_loop.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_load_embbeding.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_messages.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_model_tool_emulator.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_pipline.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_plan_middleware.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_router_model.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_tool_calling.py +0 -0
- {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_wrap_agent.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langchain-dev-utils
|
|
3
|
-
Version: 1.2.
|
|
3
|
+
Version: 1.2.4
|
|
4
4
|
Summary: A practical utility library for LangChain and LangGraph development
|
|
5
5
|
Project-URL: Source Code, https://github.com/TBice123123/langchain-dev-utils
|
|
6
6
|
Project-URL: repository, https://github.com/TBice123123/langchain-dev-utils
|
|
@@ -62,6 +62,7 @@ Mainly consists of the following two functions:
|
|
|
62
62
|
- `provider_name`: Model provider name, used as an identifier for subsequent model loading
|
|
63
63
|
- `chat_model`: Chat model, can be a ChatModel or a string (currently supports "openai-compatible")
|
|
64
64
|
- `base_url`: The API address of the model provider (optional, valid for both types of `chat_model`, but mainly used when `chat_model` is a string and is "openai-compatible")
|
|
65
|
+
- `provider_profile`: Model provider's model configuration file (optional, valid for both types of `chat_model`); finally, it will read the corresponding model configuration parameters based on `model_name` and set them to `model.profile`.
|
|
65
66
|
- `provider_config`: Relevant configuration for the model provider (optional, valid when `chat_model` is a string and is "openai-compatible"), can configure some provider-related parameters, such as whether to support structured output in json_mode, list of supported tool_choices, etc.
|
|
66
67
|
|
|
67
68
|
`load_chat_model` parameter description:
|
|
@@ -171,7 +172,7 @@ text = format_sequence([
|
|
|
171
172
|
], separator="\n", with_num=True)
|
|
172
173
|
```
|
|
173
174
|
|
|
174
|
-
**For more information about message conversion, please refer to**: [Message
|
|
175
|
+
**For more information about message conversion, please refer to**: [Message Process](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/message.html), [Formatting List Content](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/format.html)
|
|
175
176
|
|
|
176
177
|
### 3. **Tool Calling**
|
|
177
178
|
|
|
@@ -231,7 +232,7 @@ def get_current_time() -> str:
|
|
|
231
232
|
return str(datetime.datetime.now().timestamp())
|
|
232
233
|
```
|
|
233
234
|
|
|
234
|
-
**For more information about tool calling, please refer to**: [Add Human-in-the-Loop Support](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/human-in-the-loop.html), [Tool Call
|
|
235
|
+
**For more information about tool calling, please refer to**: [Add Human-in-the-Loop Support](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/human-in-the-loop.html), [Tool Call Handling](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/tool.html)
|
|
235
236
|
|
|
236
237
|
### 4. **Agent Development**
|
|
237
238
|
|
|
@@ -278,7 +279,7 @@ response = agent.invoke({"messages": [{"role": "user", "content": "Give me a tra
|
|
|
278
279
|
print(response)
|
|
279
280
|
```
|
|
280
281
|
|
|
281
|
-
**For more information about agent development and all built-in middleware, please refer to**: [
|
|
282
|
+
**For more information about agent development and all built-in middleware, please refer to**: [Pre-built Agent Functions](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/prebuilt.html), [Middleware](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/middleware.html)
|
|
282
283
|
|
|
283
284
|
### 5. **State Graph Orchestration**
|
|
284
285
|
|
|
@@ -391,7 +392,7 @@ response = graph.invoke({"messages": [HumanMessage("Hello")]})
|
|
|
391
392
|
print(response)
|
|
392
393
|
```
|
|
393
394
|
|
|
394
|
-
**For more information about state graph orchestration, please refer to**: [State Graph Orchestration
|
|
395
|
+
**For more information about state graph orchestration, please refer to**: [State Graph Orchestration](https://tbice123123.github.io/langchain-dev-utils-docs/en/graph-orchestration/pipeline.html)
|
|
395
396
|
|
|
396
397
|
## 💬 Join the Community
|
|
397
398
|
|
|
@@ -46,6 +46,7 @@ Mainly consists of the following two functions:
|
|
|
46
46
|
- `provider_name`: Model provider name, used as an identifier for subsequent model loading
|
|
47
47
|
- `chat_model`: Chat model, can be a ChatModel or a string (currently supports "openai-compatible")
|
|
48
48
|
- `base_url`: The API address of the model provider (optional, valid for both types of `chat_model`, but mainly used when `chat_model` is a string and is "openai-compatible")
|
|
49
|
+
- `provider_profile`: Model provider's model configuration file (optional, valid for both types of `chat_model`); finally, it will read the corresponding model configuration parameters based on `model_name` and set them to `model.profile`.
|
|
49
50
|
- `provider_config`: Relevant configuration for the model provider (optional, valid when `chat_model` is a string and is "openai-compatible"), can configure some provider-related parameters, such as whether to support structured output in json_mode, list of supported tool_choices, etc.
|
|
50
51
|
|
|
51
52
|
`load_chat_model` parameter description:
|
|
@@ -155,7 +156,7 @@ text = format_sequence([
|
|
|
155
156
|
], separator="\n", with_num=True)
|
|
156
157
|
```
|
|
157
158
|
|
|
158
|
-
**For more information about message conversion, please refer to**: [Message
|
|
159
|
+
**For more information about message conversion, please refer to**: [Message Process](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/message.html), [Formatting List Content](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/format.html)
|
|
159
160
|
|
|
160
161
|
### 3. **Tool Calling**
|
|
161
162
|
|
|
@@ -215,7 +216,7 @@ def get_current_time() -> str:
|
|
|
215
216
|
return str(datetime.datetime.now().timestamp())
|
|
216
217
|
```
|
|
217
218
|
|
|
218
|
-
**For more information about tool calling, please refer to**: [Add Human-in-the-Loop Support](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/human-in-the-loop.html), [Tool Call
|
|
219
|
+
**For more information about tool calling, please refer to**: [Add Human-in-the-Loop Support](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/human-in-the-loop.html), [Tool Call Handling](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/tool.html)
|
|
219
220
|
|
|
220
221
|
### 4. **Agent Development**
|
|
221
222
|
|
|
@@ -262,7 +263,7 @@ response = agent.invoke({"messages": [{"role": "user", "content": "Give me a tra
|
|
|
262
263
|
print(response)
|
|
263
264
|
```
|
|
264
265
|
|
|
265
|
-
**For more information about agent development and all built-in middleware, please refer to**: [
|
|
266
|
+
**For more information about agent development and all built-in middleware, please refer to**: [Pre-built Agent Functions](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/prebuilt.html), [Middleware](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/middleware.html)
|
|
266
267
|
|
|
267
268
|
### 5. **State Graph Orchestration**
|
|
268
269
|
|
|
@@ -375,7 +376,7 @@ response = graph.invoke({"messages": [HumanMessage("Hello")]})
|
|
|
375
376
|
print(response)
|
|
376
377
|
```
|
|
377
378
|
|
|
378
|
-
**For more information about state graph orchestration, please refer to**: [State Graph Orchestration
|
|
379
|
+
**For more information about state graph orchestration, please refer to**: [State Graph Orchestration](https://tbice123123.github.io/langchain-dev-utils-docs/en/graph-orchestration/pipeline.html)
|
|
379
380
|
|
|
380
381
|
## 💬 Join the Community
|
|
381
382
|
|
|
@@ -46,8 +46,10 @@ pip install -U langchain-dev-utils[standard]
|
|
|
46
46
|
- `provider_name`:模型提供商名称,作为后续模型加载的标识
|
|
47
47
|
- `chat_model`:对话模型,可以是 ChatModel 或字符串(目前支持 "openai-compatible")
|
|
48
48
|
- `base_url`:模型提供商的 API 地址(可选,对于`chat_model`的两种类型情况都有效,但是主要用于`chat_model`为字符串且是"openai-compatible"的情况)
|
|
49
|
+
- `provider_profile`:模型提供商的模型配置文件(可选,对于`chat_model`的两种类型情况都有效);最终将根据 `model_name` 读取对应的模型配置参数,并将其设置到 `model.profile` 中。
|
|
49
50
|
- `provider_config`:模型提供商的相关配置(可选,当 `chat_model` 为字符串且是 "openai-compatible" 时有效),可以配置一些提供商的相关参数,例如是否支持 json_mode 的结构化输出方式、支持的 tool_choice 列表等
|
|
50
51
|
|
|
52
|
+
|
|
51
53
|
`load_chat_model` 参数说明:
|
|
52
54
|
|
|
53
55
|
- `model`:对话模型名称,类型为 str
|
|
@@ -375,7 +377,7 @@ response = graph.invoke({"messages": [HumanMessage("你好")]})
|
|
|
375
377
|
print(response)
|
|
376
378
|
```
|
|
377
379
|
|
|
378
|
-
**对于更多关于状态图编排的相关介绍,请参考**: [
|
|
380
|
+
**对于更多关于状态图编排的相关介绍,请参考**: [状态图编排](https://tbice123123.github.io/langchain-dev-utils-docs/zh/graph-orchestration/pipeline.html)
|
|
379
381
|
|
|
380
382
|
## 💬 加入社区
|
|
381
383
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "langchain-dev-utils"
|
|
3
|
-
version = "1.2.
|
|
3
|
+
version = "1.2.4"
|
|
4
4
|
description = "A practical utility library for LangChain and LangGraph development"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
authors = [{ name = "tiebingice", email = "tiebingice123@outlook.com" }]
|
|
@@ -20,6 +20,8 @@ standard = ["langchain-openai"]
|
|
|
20
20
|
requires = ["hatchling"]
|
|
21
21
|
build-backend = "hatchling.build"
|
|
22
22
|
|
|
23
|
+
[tool.uv.build-backend]
|
|
24
|
+
source-exclude = ["/data"]
|
|
23
25
|
|
|
24
26
|
[tool.pytest.ini_options]
|
|
25
27
|
asyncio_mode = "auto"
|
|
@@ -28,7 +30,7 @@ python_files = ["test_*.py"]
|
|
|
28
30
|
python_functions = ["test_*"]
|
|
29
31
|
|
|
30
32
|
[dependency-groups]
|
|
31
|
-
dev = ["ruff>=0.14.5"]
|
|
33
|
+
dev = ["langchain-model-profiles>=0.0.5", "ruff>=0.14.5"]
|
|
32
34
|
tests = [
|
|
33
35
|
"python-dotenv>=1.1.1",
|
|
34
36
|
"langchain-tests>=1.0.0",
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "1.2.4"
|
|
@@ -17,7 +17,7 @@ class ModelFallbackMiddleware(_ModelFallbackMiddleware):
|
|
|
17
17
|
|
|
18
18
|
Example:
|
|
19
19
|
```python
|
|
20
|
-
from langchain_dev_utils.agents.middleware
|
|
20
|
+
from langchain_dev_utils.agents.middleware import ModelFallbackMiddleware
|
|
21
21
|
from langchain_dev_utils.agents import create_agent
|
|
22
22
|
|
|
23
23
|
fallback = ModelFallbackMiddleware(
|
|
@@ -250,6 +250,8 @@ _PLAN_SYSTEM_PROMPT_NOT_READ_PLAN = """You can manage task plans using two simpl
|
|
|
250
250
|
## finish_sub_plan
|
|
251
251
|
- Call it **only when the current task is 100% done**. It automatically marks it `"done"` and promotes the next `"pending"` task to `"in_progress"`. No parameters needed. Never use it mid-task or if anything’s incomplete.
|
|
252
252
|
Keep plans lean, update immediately, and never batch completions.
|
|
253
|
+
|
|
254
|
+
**Note**: Make sure that all tasks end up with the status `"done"`.
|
|
253
255
|
"""
|
|
254
256
|
|
|
255
257
|
_PLAN_SYSTEM_PROMPT = """You can manage task plans using three simple tools:
|
|
@@ -263,6 +265,8 @@ _PLAN_SYSTEM_PROMPT = """You can manage task plans using three simple tools:
|
|
|
263
265
|
## read_plan
|
|
264
266
|
- Retrieve the full current plan list with statuses, especially when you forget which sub-plan you're supposed to execute next.
|
|
265
267
|
- No parameters required—returns a current plan list with statuses.
|
|
268
|
+
|
|
269
|
+
**Note**: Make sure that all tasks end up with the status `"done"`.
|
|
266
270
|
"""
|
|
267
271
|
|
|
268
272
|
|
|
@@ -290,7 +294,7 @@ class PlanMiddleware(AgentMiddleware):
|
|
|
290
294
|
message_key: The key of the message to be updated. Defaults to "messages".
|
|
291
295
|
Example:
|
|
292
296
|
```python
|
|
293
|
-
from langchain_dev_utils.agents.middleware
|
|
297
|
+
from langchain_dev_utils.agents.middleware import PlanMiddleware
|
|
294
298
|
from langchain_dev_utils.agents import create_agent
|
|
295
299
|
|
|
296
300
|
agent = create_agent("vllm:qwen3-4b", middleware=[PlanMiddleware()])
|
{langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/base.py
RENAMED
|
@@ -14,6 +14,7 @@ class ChatModelProvider(TypedDict):
|
|
|
14
14
|
provider_name: str
|
|
15
15
|
chat_model: ChatModelType
|
|
16
16
|
base_url: NotRequired[str]
|
|
17
|
+
provider_profile: NotRequired[dict[str, dict[str, Any]]]
|
|
17
18
|
provider_config: NotRequired[ProviderConfig]
|
|
18
19
|
|
|
19
20
|
|
|
@@ -94,6 +95,11 @@ def _load_chat_model_helper(
|
|
|
94
95
|
url_key = _get_base_url_field_name(chat_model)
|
|
95
96
|
if url_key:
|
|
96
97
|
kwargs.update({url_key: base_url})
|
|
98
|
+
if provider_profile := _MODEL_PROVIDERS_DICT[model_provider].get(
|
|
99
|
+
"provider_profile"
|
|
100
|
+
):
|
|
101
|
+
if model in provider_profile:
|
|
102
|
+
kwargs.update({"profile": provider_profile[model]})
|
|
97
103
|
return chat_model(model=model, **kwargs)
|
|
98
104
|
|
|
99
105
|
return _init_chat_model_helper(model, model_provider=model_provider, **kwargs)
|
|
@@ -103,6 +109,7 @@ def register_model_provider(
|
|
|
103
109
|
provider_name: str,
|
|
104
110
|
chat_model: ChatModelType,
|
|
105
111
|
base_url: Optional[str] = None,
|
|
112
|
+
provider_profile: Optional[dict[str, dict[str, Any]]] = None,
|
|
106
113
|
provider_config: Optional[ProviderConfig] = None,
|
|
107
114
|
):
|
|
108
115
|
"""Register a new model provider.
|
|
@@ -115,6 +122,7 @@ def register_model_provider(
|
|
|
115
122
|
provider_name: Name of the provider to register
|
|
116
123
|
chat_model: Either a BaseChatModel class or a string identifier for a supported provider
|
|
117
124
|
base_url: The API address of the model provider (optional, valid for both types of `chat_model`, but mainly used when `chat_model` is a string and is "openai-compatible")
|
|
125
|
+
provider_profile: Model provider's model configuration file (optional, valid for both types of `chat_model`); finally, it will read the corresponding model configuration parameters based on `model_name` and set them to `model.profile`.
|
|
118
126
|
provider_config: The configuration of the model provider (Optional parameter;effective only when `chat_model` is a string and is "openai-compatible".)
|
|
119
127
|
It can be configured to configure some related parameters of the provider, such as whether to support json_mode structured output mode, the list of supported tool_choice
|
|
120
128
|
Raises:
|
|
@@ -164,16 +172,30 @@ def register_model_provider(
|
|
|
164
172
|
"chat_model": chat_model,
|
|
165
173
|
"provider_config": provider_config,
|
|
166
174
|
"base_url": base_url,
|
|
175
|
+
"provider_profile": provider_profile,
|
|
167
176
|
}
|
|
168
177
|
}
|
|
169
178
|
)
|
|
170
179
|
else:
|
|
171
180
|
if base_url is not None:
|
|
172
181
|
_MODEL_PROVIDERS_DICT.update(
|
|
173
|
-
{
|
|
182
|
+
{
|
|
183
|
+
provider_name: {
|
|
184
|
+
"chat_model": chat_model,
|
|
185
|
+
"base_url": base_url,
|
|
186
|
+
"provider_profile": provider_profile,
|
|
187
|
+
}
|
|
188
|
+
}
|
|
174
189
|
)
|
|
175
190
|
else:
|
|
176
|
-
_MODEL_PROVIDERS_DICT.update(
|
|
191
|
+
_MODEL_PROVIDERS_DICT.update(
|
|
192
|
+
{
|
|
193
|
+
provider_name: {
|
|
194
|
+
"chat_model": chat_model,
|
|
195
|
+
"provider_profile": provider_profile,
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
)
|
|
177
199
|
|
|
178
200
|
|
|
179
201
|
def batch_register_model_provider(
|
|
@@ -189,6 +211,7 @@ def batch_register_model_provider(
|
|
|
189
211
|
- provider_name: Name of the provider to register
|
|
190
212
|
- chat_model: Either a BaseChatModel class or a string identifier for a supported provider
|
|
191
213
|
- base_url: The API address of the model provider (optional, valid for both types of `chat_model`, but mainly used when `chat_model` is a string and is "openai-compatible")
|
|
214
|
+
- provider_profile: Model provider's model configuration file (optional, valid for both types of `chat_model`); finally, it will read the corresponding model configuration parameters based on `model_name` and set them to `model.profile`.
|
|
192
215
|
- provider_config: The configuration of the model provider(Optional parameter; effective only when `chat_model` is a string and is "openai-compatible".)
|
|
193
216
|
It can be configured to configure some related parameters of the provider, such as whether to support json_mode structured output mode, the list of supported tool_choice
|
|
194
217
|
|
|
@@ -222,6 +245,7 @@ def batch_register_model_provider(
|
|
|
222
245
|
provider["provider_name"],
|
|
223
246
|
provider["chat_model"],
|
|
224
247
|
provider.get("base_url"),
|
|
248
|
+
provider_profile=provider.get("provider_profile"),
|
|
225
249
|
provider_config=provider.get("provider_config"),
|
|
226
250
|
)
|
|
227
251
|
|
|
@@ -7,6 +7,8 @@ from langchain_dev_utils.chat_models import (
|
|
|
7
7
|
batch_register_model_provider,
|
|
8
8
|
load_chat_model,
|
|
9
9
|
)
|
|
10
|
+
from data.alibaba._profiles import _PROFILES as ALI_PROFILES
|
|
11
|
+
from data.zhipuai._profiles import _PROFILES as ZAI_PROFILES
|
|
10
12
|
|
|
11
13
|
load_dotenv()
|
|
12
14
|
|
|
@@ -15,8 +17,13 @@ batch_register_model_provider(
|
|
|
15
17
|
{
|
|
16
18
|
"provider_name": "dashscope",
|
|
17
19
|
"chat_model": ChatQwen,
|
|
20
|
+
"provider_profile": ALI_PROFILES,
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
"provider_name": "zai",
|
|
24
|
+
"chat_model": "openai-compatible",
|
|
25
|
+
"provider_profile": ZAI_PROFILES,
|
|
18
26
|
},
|
|
19
|
-
{"provider_name": "zai", "chat_model": "openai-compatible"},
|
|
20
27
|
]
|
|
21
28
|
)
|
|
22
29
|
|
|
@@ -98,5 +105,22 @@ def test_model_with_reasoning(reasoning_model: BaseChatModel):
|
|
|
98
105
|
|
|
99
106
|
@pytest.mark.asyncio
|
|
100
107
|
async def test_model_with_reasoning_async(reasoning_model: BaseChatModel):
|
|
101
|
-
response = await reasoning_model.ainvoke("hello
|
|
108
|
+
response = await reasoning_model.ainvoke("hello?")
|
|
102
109
|
assert response.additional_kwargs.get("reasoning_content")
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def test_model_profile():
|
|
113
|
+
model = load_chat_model("dashscope:qwen-flash")
|
|
114
|
+
assert model.profile == ALI_PROFILES["qwen-flash"]
|
|
115
|
+
|
|
116
|
+
model = load_chat_model("dashscope:qwen-max")
|
|
117
|
+
assert model.profile == ALI_PROFILES["qwen-max"]
|
|
118
|
+
|
|
119
|
+
model = load_chat_model("dashscope:qwen3-vl-235b-a22b")
|
|
120
|
+
assert model.profile == ALI_PROFILES["qwen3-vl-235b-a22b"]
|
|
121
|
+
|
|
122
|
+
model = load_chat_model("zai:glm-4.6")
|
|
123
|
+
assert model.profile == ZAI_PROFILES["glm-4.6"]
|
|
124
|
+
|
|
125
|
+
model = load_chat_model("zai:glm-4.5v")
|
|
126
|
+
assert model.profile == ZAI_PROFILES["glm-4.5v"]
|
|
@@ -418,11 +418,10 @@ wheels = [
|
|
|
418
418
|
|
|
419
419
|
[[package]]
|
|
420
420
|
name = "langchain-dev-utils"
|
|
421
|
-
version = "1.2.
|
|
421
|
+
version = "1.2.3"
|
|
422
422
|
source = { editable = "." }
|
|
423
423
|
dependencies = [
|
|
424
424
|
{ name = "langchain" },
|
|
425
|
-
{ name = "langchain-deepseek" },
|
|
426
425
|
{ name = "langgraph" },
|
|
427
426
|
]
|
|
428
427
|
|
|
@@ -433,6 +432,7 @@ standard = [
|
|
|
433
432
|
|
|
434
433
|
[package.dev-dependencies]
|
|
435
434
|
dev = [
|
|
435
|
+
{ name = "langchain-model-profiles" },
|
|
436
436
|
{ name = "ruff" },
|
|
437
437
|
]
|
|
438
438
|
tests = [
|
|
@@ -445,15 +445,17 @@ tests = [
|
|
|
445
445
|
|
|
446
446
|
[package.metadata]
|
|
447
447
|
requires-dist = [
|
|
448
|
-
{ name = "langchain", specifier = ">=1.
|
|
449
|
-
{ name = "langchain-deepseek", specifier = ">=1.0.1" },
|
|
448
|
+
{ name = "langchain", specifier = ">=1.1.0" },
|
|
450
449
|
{ name = "langchain-openai", marker = "extra == 'standard'" },
|
|
451
450
|
{ name = "langgraph", specifier = ">=1.0.0" },
|
|
452
451
|
]
|
|
453
452
|
provides-extras = ["standard"]
|
|
454
453
|
|
|
455
454
|
[package.metadata.requires-dev]
|
|
456
|
-
dev = [
|
|
455
|
+
dev = [
|
|
456
|
+
{ name = "langchain-model-profiles", specifier = ">=0.0.5" },
|
|
457
|
+
{ name = "ruff", specifier = ">=0.14.5" },
|
|
458
|
+
]
|
|
457
459
|
tests = [
|
|
458
460
|
{ name = "langchain-deepseek", specifier = ">=1.0.0" },
|
|
459
461
|
{ name = "langchain-ollama", specifier = ">=1.0.0" },
|
|
@@ -462,6 +464,19 @@ tests = [
|
|
|
462
464
|
{ name = "python-dotenv", specifier = ">=1.1.1" },
|
|
463
465
|
]
|
|
464
466
|
|
|
467
|
+
[[package]]
|
|
468
|
+
name = "langchain-model-profiles"
|
|
469
|
+
version = "0.0.5"
|
|
470
|
+
source = { registry = "https://pypi.mirrors.ustc.edu.cn/simple/" }
|
|
471
|
+
dependencies = [
|
|
472
|
+
{ name = "httpx" },
|
|
473
|
+
{ name = "typing-extensions" },
|
|
474
|
+
]
|
|
475
|
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/43/96/359105e3fb0161b5ba2e23daccc9c8b1c7cfe1dff1a05724e52a176ef3d1/langchain_model_profiles-0.0.5.tar.gz", hash = "sha256:21a4dfaa04e2200a2f52b00312bdfc62236579e396ebde2d483ca02127cc4f5c", size = 123467, upload-time = "2025-11-21T21:13:49.66Z" }
|
|
476
|
+
wheels = [
|
|
477
|
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/6a/7f/1a537c74c6ed8794c8f6ebd2fadbeed48a2911acee14efc636791a1ecb08/langchain_model_profiles-0.0.5-py3-none-any.whl", hash = "sha256:f90826c70904783ffc0450835f5dcd419d4d0d4633c4efa5bb92d49296e85524", size = 6182, upload-time = "2025-11-21T21:13:48.671Z" },
|
|
478
|
+
]
|
|
479
|
+
|
|
465
480
|
[[package]]
|
|
466
481
|
name = "langchain-ollama"
|
|
467
482
|
version = "1.0.0"
|
|
@@ -527,7 +542,7 @@ wheels = [
|
|
|
527
542
|
|
|
528
543
|
[[package]]
|
|
529
544
|
name = "langgraph"
|
|
530
|
-
version = "1.0.
|
|
545
|
+
version = "1.0.4"
|
|
531
546
|
source = { registry = "https://pypi.mirrors.ustc.edu.cn/simple/" }
|
|
532
547
|
dependencies = [
|
|
533
548
|
{ name = "langchain-core" },
|
|
@@ -537,9 +552,9 @@ dependencies = [
|
|
|
537
552
|
{ name = "pydantic" },
|
|
538
553
|
{ name = "xxhash" },
|
|
539
554
|
]
|
|
540
|
-
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/
|
|
555
|
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/d6/3c/af87902d300c1f467165558c8966d8b1e1f896dace271d3f35a410a5c26a/langgraph-1.0.4.tar.gz", hash = "sha256:86d08e25d7244340f59c5200fa69fdd11066aa999b3164b531e2a20036fac156", size = 484397, upload-time = "2025-11-25T20:31:48.608Z" }
|
|
541
556
|
wheels = [
|
|
542
|
-
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/
|
|
557
|
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/14/52/4eb25a3f60399da34ba34adff1b3e324cf0d87eb7a08cebf1882a9b5e0d5/langgraph-1.0.4-py3-none-any.whl", hash = "sha256:b1a835ceb0a8d69b9db48075e1939e28b1ad70ee23fa3fa8f90149904778bacf", size = 157271, upload-time = "2025-11-25T20:31:47.518Z" },
|
|
543
558
|
]
|
|
544
559
|
|
|
545
560
|
[[package]]
|
|
@@ -570,20 +585,20 @@ wheels = [
|
|
|
570
585
|
|
|
571
586
|
[[package]]
|
|
572
587
|
name = "langgraph-sdk"
|
|
573
|
-
version = "0.2.
|
|
588
|
+
version = "0.2.10"
|
|
574
589
|
source = { registry = "https://pypi.mirrors.ustc.edu.cn/simple/" }
|
|
575
590
|
dependencies = [
|
|
576
591
|
{ name = "httpx" },
|
|
577
592
|
{ name = "orjson" },
|
|
578
593
|
]
|
|
579
|
-
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/
|
|
594
|
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/cb/0f/88772be3301cc5ad495e77705538edbcbf7f2ccf38d21555fa26131203aa/langgraph_sdk-0.2.10.tar.gz", hash = "sha256:ab58331504fbea28e6322037aa362929799b4e9106663ac1dbd7c5ac44558933", size = 113432, upload-time = "2025-11-24T21:31:57.268Z" }
|
|
580
595
|
wheels = [
|
|
581
|
-
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/
|
|
596
|
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/8b/cc/ff4ba17253d31981b047f4be52cc51a19fa28dd2dd16a880c0c595bd66bd/langgraph_sdk-0.2.10-py3-none-any.whl", hash = "sha256:9aef403663726085de6851e4e50459df9562069bd316dd0261eb359f776fd0ef", size = 58430, upload-time = "2025-11-24T21:31:56.052Z" },
|
|
582
597
|
]
|
|
583
598
|
|
|
584
599
|
[[package]]
|
|
585
600
|
name = "langsmith"
|
|
586
|
-
version = "0.4.
|
|
601
|
+
version = "0.4.49"
|
|
587
602
|
source = { registry = "https://pypi.mirrors.ustc.edu.cn/simple/" }
|
|
588
603
|
dependencies = [
|
|
589
604
|
{ name = "httpx" },
|
|
@@ -594,9 +609,9 @@ dependencies = [
|
|
|
594
609
|
{ name = "requests-toolbelt" },
|
|
595
610
|
{ name = "zstandard" },
|
|
596
611
|
]
|
|
597
|
-
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/
|
|
612
|
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/2d/69/85ae805ecbc1300d486136329b3cb1702483c0afdaf81da95947dd83884a/langsmith-0.4.49.tar.gz", hash = "sha256:4a16ef6f3a9b20c5471884991a12ff37d81f2c13a50660cfe27fa79a7ca2c1b0", size = 987017, upload-time = "2025-11-26T21:45:16.338Z" }
|
|
598
613
|
wheels = [
|
|
599
|
-
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/
|
|
614
|
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/31/79/59ecf7dceafd655ed20270a0f595d9e8e13895231cebcfbff9b6eec51fc4/langsmith-0.4.49-py3-none-any.whl", hash = "sha256:95f84edcd8e74ed658e4a3eb7355b530f35cb08a9a8865dbfde6740e4b18323c", size = 410905, upload-time = "2025-11-26T21:45:14.606Z" },
|
|
600
615
|
]
|
|
601
616
|
|
|
602
617
|
[[package]]
|
|
@@ -1102,7 +1117,7 @@ wheels = [
|
|
|
1102
1117
|
|
|
1103
1118
|
[[package]]
|
|
1104
1119
|
name = "pydantic"
|
|
1105
|
-
version = "2.12.
|
|
1120
|
+
version = "2.12.5"
|
|
1106
1121
|
source = { registry = "https://pypi.mirrors.ustc.edu.cn/simple/" }
|
|
1107
1122
|
dependencies = [
|
|
1108
1123
|
{ name = "annotated-types" },
|
|
@@ -1110,9 +1125,9 @@ dependencies = [
|
|
|
1110
1125
|
{ name = "typing-extensions" },
|
|
1111
1126
|
{ name = "typing-inspection" },
|
|
1112
1127
|
]
|
|
1113
|
-
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/
|
|
1128
|
+
sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
|
|
1114
1129
|
wheels = [
|
|
1115
|
-
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/
|
|
1130
|
+
{ url = "https://mirrors.ustc.edu.cn/pypi/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
|
|
1116
1131
|
]
|
|
1117
1132
|
|
|
1118
1133
|
[[package]]
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "1.2.2"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/__init__.py
RENAMED
|
File without changes
|
{langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/factory.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/plan.py
RENAMED
|
File without changes
|
{langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/wrap.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/types.py
RENAMED
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/embeddings/base.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/pipeline/__init__.py
RENAMED
|
File without changes
|
{langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/pipeline/parallel.py
RENAMED
|
File without changes
|
|
File without changes
|
{langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/pipeline/types.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|