langchain-dev-utils 1.2.2__tar.gz → 1.2.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/.gitignore +1 -0
  2. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/PKG-INFO +6 -5
  3. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/README.md +5 -4
  4. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/README_cn.md +3 -1
  5. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/pyproject.toml +4 -2
  6. langchain_dev_utils-1.2.4/src/langchain_dev_utils/__init__.py +1 -0
  7. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/model_fallback.py +1 -1
  8. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/plan.py +5 -1
  9. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/base.py +26 -2
  10. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_load_model.py +26 -2
  11. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/uv.lock +32 -17
  12. langchain_dev_utils-1.2.2/src/langchain_dev_utils/__init__.py +0 -1
  13. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/.python-version +0 -0
  14. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/.vscode/settings.json +0 -0
  15. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/LICENSE +0 -0
  16. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/__init__.py +0 -0
  17. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/factory.py +0 -0
  18. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/file_system.py +0 -0
  19. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/__init__.py +0 -0
  20. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/model_router.py +0 -0
  21. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/summarization.py +0 -0
  22. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/tool_emulator.py +0 -0
  23. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/middleware/tool_selection.py +0 -0
  24. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/plan.py +0 -0
  25. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/agents/wrap.py +0 -0
  26. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/__init__.py +0 -0
  27. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/adapters/__init__.py +0 -0
  28. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/adapters/openai_compatible.py +0 -0
  29. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/chat_models/types.py +0 -0
  30. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/embeddings/__init__.py +0 -0
  31. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/embeddings/base.py +0 -0
  32. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/message_convert/__init__.py +0 -0
  33. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/message_convert/content.py +0 -0
  34. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/message_convert/format.py +0 -0
  35. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/pipeline/__init__.py +0 -0
  36. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/pipeline/parallel.py +0 -0
  37. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/pipeline/sequential.py +0 -0
  38. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/pipeline/types.py +0 -0
  39. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/py.typed +0 -0
  40. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/tool_calling/__init__.py +0 -0
  41. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/tool_calling/human_in_the_loop.py +0 -0
  42. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/src/langchain_dev_utils/tool_calling/utils.py +0 -0
  43. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_agent.py +0 -0
  44. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_chat_models.py +0 -0
  45. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_human_in_the_loop.py +0 -0
  46. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_load_embbeding.py +0 -0
  47. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_messages.py +0 -0
  48. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_model_tool_emulator.py +0 -0
  49. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_pipline.py +0 -0
  50. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_plan_middleware.py +0 -0
  51. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_router_model.py +0 -0
  52. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_tool_calling.py +0 -0
  53. {langchain_dev_utils-1.2.2 → langchain_dev_utils-1.2.4}/tests/test_wrap_agent.py +0 -0
@@ -10,3 +10,4 @@ wheels/
10
10
  .venv
11
11
  .env
12
12
  .benchmarks
13
+ data/
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langchain-dev-utils
3
- Version: 1.2.2
3
+ Version: 1.2.4
4
4
  Summary: A practical utility library for LangChain and LangGraph development
5
5
  Project-URL: Source Code, https://github.com/TBice123123/langchain-dev-utils
6
6
  Project-URL: repository, https://github.com/TBice123123/langchain-dev-utils
@@ -62,6 +62,7 @@ Mainly consists of the following two functions:
62
62
  - `provider_name`: Model provider name, used as an identifier for subsequent model loading
63
63
  - `chat_model`: Chat model, can be a ChatModel or a string (currently supports "openai-compatible")
64
64
  - `base_url`: The API address of the model provider (optional, valid for both types of `chat_model`, but mainly used when `chat_model` is a string and is "openai-compatible")
65
+ - `provider_profile`: Model provider's model configuration file (optional, valid for both types of `chat_model`); finally, it will read the corresponding model configuration parameters based on `model_name` and set them to `model.profile`.
65
66
  - `provider_config`: Relevant configuration for the model provider (optional, valid when `chat_model` is a string and is "openai-compatible"), can configure some provider-related parameters, such as whether to support structured output in json_mode, list of supported tool_choices, etc.
66
67
 
67
68
  `load_chat_model` parameter description:
@@ -171,7 +172,7 @@ text = format_sequence([
171
172
  ], separator="\n", with_num=True)
172
173
  ```
173
174
 
174
- **For more information about message conversion, please refer to**: [Message Processing](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/message.html), [Format List Content](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/format.html)
175
+ **For more information about message conversion, please refer to**: [Message Process](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/message.html), [Formatting List Content](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/format.html)
175
176
 
176
177
  ### 3. **Tool Calling**
177
178
 
@@ -231,7 +232,7 @@ def get_current_time() -> str:
231
232
  return str(datetime.datetime.now().timestamp())
232
233
  ```
233
234
 
234
- **For more information about tool calling, please refer to**: [Add Human-in-the-Loop Support](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/human-in-the-loop.html), [Tool Call Processing](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/tool.html)
235
+ **For more information about tool calling, please refer to**: [Add Human-in-the-Loop Support](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/human-in-the-loop.html), [Tool Call Handling](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/tool.html)
235
236
 
236
237
  ### 4. **Agent Development**
237
238
 
@@ -278,7 +279,7 @@ response = agent.invoke({"messages": [{"role": "user", "content": "Give me a tra
278
279
  print(response)
279
280
  ```
280
281
 
281
- **For more information about agent development and all built-in middleware, please refer to**: [Prebuilt Agent Functions](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/prebuilt.html), [Middleware](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/middleware.html)
282
+ **For more information about agent development and all built-in middleware, please refer to**: [Pre-built Agent Functions](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/prebuilt.html), [Middleware](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/middleware.html)
282
283
 
283
284
  ### 5. **State Graph Orchestration**
284
285
 
@@ -391,7 +392,7 @@ response = graph.invoke({"messages": [HumanMessage("Hello")]})
391
392
  print(response)
392
393
  ```
393
394
 
394
- **For more information about state graph orchestration, please refer to**: [State Graph Orchestration Pipeline](https://tbice123123.github.io/langchain-dev-utils-docs/en/graph-orchestration/pipeline.html)
395
+ **For more information about state graph orchestration, please refer to**: [State Graph Orchestration](https://tbice123123.github.io/langchain-dev-utils-docs/en/graph-orchestration/pipeline.html)
395
396
 
396
397
  ## 💬 Join the Community
397
398
 
@@ -46,6 +46,7 @@ Mainly consists of the following two functions:
46
46
  - `provider_name`: Model provider name, used as an identifier for subsequent model loading
47
47
  - `chat_model`: Chat model, can be a ChatModel or a string (currently supports "openai-compatible")
48
48
  - `base_url`: The API address of the model provider (optional, valid for both types of `chat_model`, but mainly used when `chat_model` is a string and is "openai-compatible")
49
+ - `provider_profile`: Model provider's model configuration file (optional, valid for both types of `chat_model`); finally, it will read the corresponding model configuration parameters based on `model_name` and set them to `model.profile`.
49
50
  - `provider_config`: Relevant configuration for the model provider (optional, valid when `chat_model` is a string and is "openai-compatible"), can configure some provider-related parameters, such as whether to support structured output in json_mode, list of supported tool_choices, etc.
50
51
 
51
52
  `load_chat_model` parameter description:
@@ -155,7 +156,7 @@ text = format_sequence([
155
156
  ], separator="\n", with_num=True)
156
157
  ```
157
158
 
158
- **For more information about message conversion, please refer to**: [Message Processing](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/message.html), [Format List Content](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/format.html)
159
+ **For more information about message conversion, please refer to**: [Message Process](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/message.html), [Formatting List Content](https://tbice123123.github.io/langchain-dev-utils-docs/en/message-conversion/format.html)
159
160
 
160
161
  ### 3. **Tool Calling**
161
162
 
@@ -215,7 +216,7 @@ def get_current_time() -> str:
215
216
  return str(datetime.datetime.now().timestamp())
216
217
  ```
217
218
 
218
- **For more information about tool calling, please refer to**: [Add Human-in-the-Loop Support](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/human-in-the-loop.html), [Tool Call Processing](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/tool.html)
219
+ **For more information about tool calling, please refer to**: [Add Human-in-the-Loop Support](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/human-in-the-loop.html), [Tool Call Handling](https://tbice123123.github.io/langchain-dev-utils-docs/en/tool-calling/tool.html)
219
220
 
220
221
  ### 4. **Agent Development**
221
222
 
@@ -262,7 +263,7 @@ response = agent.invoke({"messages": [{"role": "user", "content": "Give me a tra
262
263
  print(response)
263
264
  ```
264
265
 
265
- **For more information about agent development and all built-in middleware, please refer to**: [Prebuilt Agent Functions](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/prebuilt.html), [Middleware](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/middleware.html)
266
+ **For more information about agent development and all built-in middleware, please refer to**: [Pre-built Agent Functions](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/prebuilt.html), [Middleware](https://tbice123123.github.io/langchain-dev-utils-docs/en/agent-development/middleware.html)
266
267
 
267
268
  ### 5. **State Graph Orchestration**
268
269
 
@@ -375,7 +376,7 @@ response = graph.invoke({"messages": [HumanMessage("Hello")]})
375
376
  print(response)
376
377
  ```
377
378
 
378
- **For more information about state graph orchestration, please refer to**: [State Graph Orchestration Pipeline](https://tbice123123.github.io/langchain-dev-utils-docs/en/graph-orchestration/pipeline.html)
379
+ **For more information about state graph orchestration, please refer to**: [State Graph Orchestration](https://tbice123123.github.io/langchain-dev-utils-docs/en/graph-orchestration/pipeline.html)
379
380
 
380
381
  ## 💬 Join the Community
381
382
 
@@ -46,8 +46,10 @@ pip install -U langchain-dev-utils[standard]
46
46
  - `provider_name`:模型提供商名称,作为后续模型加载的标识
47
47
  - `chat_model`:对话模型,可以是 ChatModel 或字符串(目前支持 "openai-compatible")
48
48
  - `base_url`:模型提供商的 API 地址(可选,对于`chat_model`的两种类型情况都有效,但是主要用于`chat_model`为字符串且是"openai-compatible"的情况)
49
+ - `provider_profile`:模型提供商的模型配置文件(可选,对于`chat_model`的两种类型情况都有效);最终将根据 `model_name` 读取对应的模型配置参数,并将其设置到 `model.profile` 中。
49
50
  - `provider_config`:模型提供商的相关配置(可选,当 `chat_model` 为字符串且是 "openai-compatible" 时有效),可以配置一些提供商的相关参数,例如是否支持 json_mode 的结构化输出方式、支持的 tool_choice 列表等
50
51
 
52
+
51
53
  `load_chat_model` 参数说明:
52
54
 
53
55
  - `model`:对话模型名称,类型为 str
@@ -375,7 +377,7 @@ response = graph.invoke({"messages": [HumanMessage("你好")]})
375
377
  print(response)
376
378
  ```
377
379
 
378
- **对于更多关于状态图编排的相关介绍,请参考**: [状态图编排管道](https://tbice123123.github.io/langchain-dev-utils-docs/zh/graph-orchestration/pipeline.html)
380
+ **对于更多关于状态图编排的相关介绍,请参考**: [状态图编排](https://tbice123123.github.io/langchain-dev-utils-docs/zh/graph-orchestration/pipeline.html)
379
381
 
380
382
  ## 💬 加入社区
381
383
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "langchain-dev-utils"
3
- version = "1.2.2"
3
+ version = "1.2.4"
4
4
  description = "A practical utility library for LangChain and LangGraph development"
5
5
  readme = "README.md"
6
6
  authors = [{ name = "tiebingice", email = "tiebingice123@outlook.com" }]
@@ -20,6 +20,8 @@ standard = ["langchain-openai"]
20
20
  requires = ["hatchling"]
21
21
  build-backend = "hatchling.build"
22
22
 
23
+ [tool.uv.build-backend]
24
+ source-exclude = ["/data"]
23
25
 
24
26
  [tool.pytest.ini_options]
25
27
  asyncio_mode = "auto"
@@ -28,7 +30,7 @@ python_files = ["test_*.py"]
28
30
  python_functions = ["test_*"]
29
31
 
30
32
  [dependency-groups]
31
- dev = ["ruff>=0.14.5"]
33
+ dev = ["langchain-model-profiles>=0.0.5", "ruff>=0.14.5"]
32
34
  tests = [
33
35
  "python-dotenv>=1.1.1",
34
36
  "langchain-tests>=1.0.0",
@@ -0,0 +1 @@
1
+ __version__ = "1.2.4"
@@ -17,7 +17,7 @@ class ModelFallbackMiddleware(_ModelFallbackMiddleware):
17
17
 
18
18
  Example:
19
19
  ```python
20
- from langchain_dev_utils.agents.middleware.model_fallback import ModelFallbackMiddleware
20
+ from langchain_dev_utils.agents.middleware import ModelFallbackMiddleware
21
21
  from langchain_dev_utils.agents import create_agent
22
22
 
23
23
  fallback = ModelFallbackMiddleware(
@@ -250,6 +250,8 @@ _PLAN_SYSTEM_PROMPT_NOT_READ_PLAN = """You can manage task plans using two simpl
250
250
  ## finish_sub_plan
251
251
  - Call it **only when the current task is 100% done**. It automatically marks it `"done"` and promotes the next `"pending"` task to `"in_progress"`. No parameters needed. Never use it mid-task or if anything’s incomplete.
252
252
  Keep plans lean, update immediately, and never batch completions.
253
+
254
+ **Note**: Make sure that all tasks end up with the status `"done"`.
253
255
  """
254
256
 
255
257
  _PLAN_SYSTEM_PROMPT = """You can manage task plans using three simple tools:
@@ -263,6 +265,8 @@ _PLAN_SYSTEM_PROMPT = """You can manage task plans using three simple tools:
263
265
  ## read_plan
264
266
  - Retrieve the full current plan list with statuses, especially when you forget which sub-plan you're supposed to execute next.
265
267
  - No parameters required—returns a current plan list with statuses.
268
+
269
+ **Note**: Make sure that all tasks end up with the status `"done"`.
266
270
  """
267
271
 
268
272
 
@@ -290,7 +294,7 @@ class PlanMiddleware(AgentMiddleware):
290
294
  message_key: The key of the message to be updated. Defaults to "messages".
291
295
  Example:
292
296
  ```python
293
- from langchain_dev_utils.agents.middleware.plan import PlanMiddleware
297
+ from langchain_dev_utils.agents.middleware import PlanMiddleware
294
298
  from langchain_dev_utils.agents import create_agent
295
299
 
296
300
  agent = create_agent("vllm:qwen3-4b", middleware=[PlanMiddleware()])
@@ -14,6 +14,7 @@ class ChatModelProvider(TypedDict):
14
14
  provider_name: str
15
15
  chat_model: ChatModelType
16
16
  base_url: NotRequired[str]
17
+ provider_profile: NotRequired[dict[str, dict[str, Any]]]
17
18
  provider_config: NotRequired[ProviderConfig]
18
19
 
19
20
 
@@ -94,6 +95,11 @@ def _load_chat_model_helper(
94
95
  url_key = _get_base_url_field_name(chat_model)
95
96
  if url_key:
96
97
  kwargs.update({url_key: base_url})
98
+ if provider_profile := _MODEL_PROVIDERS_DICT[model_provider].get(
99
+ "provider_profile"
100
+ ):
101
+ if model in provider_profile:
102
+ kwargs.update({"profile": provider_profile[model]})
97
103
  return chat_model(model=model, **kwargs)
98
104
 
99
105
  return _init_chat_model_helper(model, model_provider=model_provider, **kwargs)
@@ -103,6 +109,7 @@ def register_model_provider(
103
109
  provider_name: str,
104
110
  chat_model: ChatModelType,
105
111
  base_url: Optional[str] = None,
112
+ provider_profile: Optional[dict[str, dict[str, Any]]] = None,
106
113
  provider_config: Optional[ProviderConfig] = None,
107
114
  ):
108
115
  """Register a new model provider.
@@ -115,6 +122,7 @@ def register_model_provider(
115
122
  provider_name: Name of the provider to register
116
123
  chat_model: Either a BaseChatModel class or a string identifier for a supported provider
117
124
  base_url: The API address of the model provider (optional, valid for both types of `chat_model`, but mainly used when `chat_model` is a string and is "openai-compatible")
125
+ provider_profile: Model provider's model configuration file (optional, valid for both types of `chat_model`); finally, it will read the corresponding model configuration parameters based on `model_name` and set them to `model.profile`.
118
126
  provider_config: The configuration of the model provider (Optional parameter;effective only when `chat_model` is a string and is "openai-compatible".)
119
127
  It can be configured to configure some related parameters of the provider, such as whether to support json_mode structured output mode, the list of supported tool_choice
120
128
  Raises:
@@ -164,16 +172,30 @@ def register_model_provider(
164
172
  "chat_model": chat_model,
165
173
  "provider_config": provider_config,
166
174
  "base_url": base_url,
175
+ "provider_profile": provider_profile,
167
176
  }
168
177
  }
169
178
  )
170
179
  else:
171
180
  if base_url is not None:
172
181
  _MODEL_PROVIDERS_DICT.update(
173
- {provider_name: {"chat_model": chat_model, "base_url": base_url}}
182
+ {
183
+ provider_name: {
184
+ "chat_model": chat_model,
185
+ "base_url": base_url,
186
+ "provider_profile": provider_profile,
187
+ }
188
+ }
174
189
  )
175
190
  else:
176
- _MODEL_PROVIDERS_DICT.update({provider_name: {"chat_model": chat_model}})
191
+ _MODEL_PROVIDERS_DICT.update(
192
+ {
193
+ provider_name: {
194
+ "chat_model": chat_model,
195
+ "provider_profile": provider_profile,
196
+ }
197
+ }
198
+ )
177
199
 
178
200
 
179
201
  def batch_register_model_provider(
@@ -189,6 +211,7 @@ def batch_register_model_provider(
189
211
  - provider_name: Name of the provider to register
190
212
  - chat_model: Either a BaseChatModel class or a string identifier for a supported provider
191
213
  - base_url: The API address of the model provider (optional, valid for both types of `chat_model`, but mainly used when `chat_model` is a string and is "openai-compatible")
214
+ - provider_profile: Model provider's model configuration file (optional, valid for both types of `chat_model`); finally, it will read the corresponding model configuration parameters based on `model_name` and set them to `model.profile`.
192
215
  - provider_config: The configuration of the model provider(Optional parameter; effective only when `chat_model` is a string and is "openai-compatible".)
193
216
  It can be configured to configure some related parameters of the provider, such as whether to support json_mode structured output mode, the list of supported tool_choice
194
217
 
@@ -222,6 +245,7 @@ def batch_register_model_provider(
222
245
  provider["provider_name"],
223
246
  provider["chat_model"],
224
247
  provider.get("base_url"),
248
+ provider_profile=provider.get("provider_profile"),
225
249
  provider_config=provider.get("provider_config"),
226
250
  )
227
251
 
@@ -7,6 +7,8 @@ from langchain_dev_utils.chat_models import (
7
7
  batch_register_model_provider,
8
8
  load_chat_model,
9
9
  )
10
+ from data.alibaba._profiles import _PROFILES as ALI_PROFILES
11
+ from data.zhipuai._profiles import _PROFILES as ZAI_PROFILES
10
12
 
11
13
  load_dotenv()
12
14
 
@@ -15,8 +17,13 @@ batch_register_model_provider(
15
17
  {
16
18
  "provider_name": "dashscope",
17
19
  "chat_model": ChatQwen,
20
+ "provider_profile": ALI_PROFILES,
21
+ },
22
+ {
23
+ "provider_name": "zai",
24
+ "chat_model": "openai-compatible",
25
+ "provider_profile": ZAI_PROFILES,
18
26
  },
19
- {"provider_name": "zai", "chat_model": "openai-compatible"},
20
27
  ]
21
28
  )
22
29
 
@@ -98,5 +105,22 @@ def test_model_with_reasoning(reasoning_model: BaseChatModel):
98
105
 
99
106
  @pytest.mark.asyncio
100
107
  async def test_model_with_reasoning_async(reasoning_model: BaseChatModel):
101
- response = await reasoning_model.ainvoke("hello?")
108
+ response = await reasoning_model.ainvoke("hello")
102
109
  assert response.additional_kwargs.get("reasoning_content")
110
+
111
+
112
+ def test_model_profile():
113
+ model = load_chat_model("dashscope:qwen-flash")
114
+ assert model.profile == ALI_PROFILES["qwen-flash"]
115
+
116
+ model = load_chat_model("dashscope:qwen-max")
117
+ assert model.profile == ALI_PROFILES["qwen-max"]
118
+
119
+ model = load_chat_model("dashscope:qwen3-vl-235b-a22b")
120
+ assert model.profile == ALI_PROFILES["qwen3-vl-235b-a22b"]
121
+
122
+ model = load_chat_model("zai:glm-4.6")
123
+ assert model.profile == ZAI_PROFILES["glm-4.6"]
124
+
125
+ model = load_chat_model("zai:glm-4.5v")
126
+ assert model.profile == ZAI_PROFILES["glm-4.5v"]
@@ -418,11 +418,10 @@ wheels = [
418
418
 
419
419
  [[package]]
420
420
  name = "langchain-dev-utils"
421
- version = "1.2.1"
421
+ version = "1.2.3"
422
422
  source = { editable = "." }
423
423
  dependencies = [
424
424
  { name = "langchain" },
425
- { name = "langchain-deepseek" },
426
425
  { name = "langgraph" },
427
426
  ]
428
427
 
@@ -433,6 +432,7 @@ standard = [
433
432
 
434
433
  [package.dev-dependencies]
435
434
  dev = [
435
+ { name = "langchain-model-profiles" },
436
436
  { name = "ruff" },
437
437
  ]
438
438
  tests = [
@@ -445,15 +445,17 @@ tests = [
445
445
 
446
446
  [package.metadata]
447
447
  requires-dist = [
448
- { name = "langchain", specifier = ">=1.0.0" },
449
- { name = "langchain-deepseek", specifier = ">=1.0.1" },
448
+ { name = "langchain", specifier = ">=1.1.0" },
450
449
  { name = "langchain-openai", marker = "extra == 'standard'" },
451
450
  { name = "langgraph", specifier = ">=1.0.0" },
452
451
  ]
453
452
  provides-extras = ["standard"]
454
453
 
455
454
  [package.metadata.requires-dev]
456
- dev = [{ name = "ruff", specifier = ">=0.14.5" }]
455
+ dev = [
456
+ { name = "langchain-model-profiles", specifier = ">=0.0.5" },
457
+ { name = "ruff", specifier = ">=0.14.5" },
458
+ ]
457
459
  tests = [
458
460
  { name = "langchain-deepseek", specifier = ">=1.0.0" },
459
461
  { name = "langchain-ollama", specifier = ">=1.0.0" },
@@ -462,6 +464,19 @@ tests = [
462
464
  { name = "python-dotenv", specifier = ">=1.1.1" },
463
465
  ]
464
466
 
467
+ [[package]]
468
+ name = "langchain-model-profiles"
469
+ version = "0.0.5"
470
+ source = { registry = "https://pypi.mirrors.ustc.edu.cn/simple/" }
471
+ dependencies = [
472
+ { name = "httpx" },
473
+ { name = "typing-extensions" },
474
+ ]
475
+ sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/43/96/359105e3fb0161b5ba2e23daccc9c8b1c7cfe1dff1a05724e52a176ef3d1/langchain_model_profiles-0.0.5.tar.gz", hash = "sha256:21a4dfaa04e2200a2f52b00312bdfc62236579e396ebde2d483ca02127cc4f5c", size = 123467, upload-time = "2025-11-21T21:13:49.66Z" }
476
+ wheels = [
477
+ { url = "https://mirrors.ustc.edu.cn/pypi/packages/6a/7f/1a537c74c6ed8794c8f6ebd2fadbeed48a2911acee14efc636791a1ecb08/langchain_model_profiles-0.0.5-py3-none-any.whl", hash = "sha256:f90826c70904783ffc0450835f5dcd419d4d0d4633c4efa5bb92d49296e85524", size = 6182, upload-time = "2025-11-21T21:13:48.671Z" },
478
+ ]
479
+
465
480
  [[package]]
466
481
  name = "langchain-ollama"
467
482
  version = "1.0.0"
@@ -527,7 +542,7 @@ wheels = [
527
542
 
528
543
  [[package]]
529
544
  name = "langgraph"
530
- version = "1.0.3"
545
+ version = "1.0.4"
531
546
  source = { registry = "https://pypi.mirrors.ustc.edu.cn/simple/" }
532
547
  dependencies = [
533
548
  { name = "langchain-core" },
@@ -537,9 +552,9 @@ dependencies = [
537
552
  { name = "pydantic" },
538
553
  { name = "xxhash" },
539
554
  ]
540
- sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/a7/55/70f2d11d33b0310d3e48d8e049825b4a34a1c822d48f6448ae548d2cd0f8/langgraph-1.0.3.tar.gz", hash = "sha256:873a6aae6be054ef52a05c463be363a46da9711405b1b14454d595f543b68335", size = 483302, upload-time = "2025-11-10T17:41:45.425Z" }
555
+ sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/d6/3c/af87902d300c1f467165558c8966d8b1e1f896dace271d3f35a410a5c26a/langgraph-1.0.4.tar.gz", hash = "sha256:86d08e25d7244340f59c5200fa69fdd11066aa999b3164b531e2a20036fac156", size = 484397, upload-time = "2025-11-25T20:31:48.608Z" }
541
556
  wheels = [
542
- { url = "https://mirrors.ustc.edu.cn/pypi/packages/84/a3/fdf6ecd0e44cb02d20afe7d0fb64c748a749f4b2e011bf9a785a32642367/langgraph-1.0.3-py3-none-any.whl", hash = "sha256:4a75146f09bd0d127a724876f4244f460c4c66353a993641bd641ed710cd010f", size = 156845, upload-time = "2025-11-10T17:41:43.868Z" },
557
+ { url = "https://mirrors.ustc.edu.cn/pypi/packages/14/52/4eb25a3f60399da34ba34adff1b3e324cf0d87eb7a08cebf1882a9b5e0d5/langgraph-1.0.4-py3-none-any.whl", hash = "sha256:b1a835ceb0a8d69b9db48075e1939e28b1ad70ee23fa3fa8f90149904778bacf", size = 157271, upload-time = "2025-11-25T20:31:47.518Z" },
543
558
  ]
544
559
 
545
560
  [[package]]
@@ -570,20 +585,20 @@ wheels = [
570
585
 
571
586
  [[package]]
572
587
  name = "langgraph-sdk"
573
- version = "0.2.9"
588
+ version = "0.2.10"
574
589
  source = { registry = "https://pypi.mirrors.ustc.edu.cn/simple/" }
575
590
  dependencies = [
576
591
  { name = "httpx" },
577
592
  { name = "orjson" },
578
593
  ]
579
- sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/23/d8/40e01190a73c564a4744e29a6c902f78d34d43dad9b652a363a92a67059c/langgraph_sdk-0.2.9.tar.gz", hash = "sha256:b3bd04c6be4fa382996cd2be8fbc1e7cc94857d2bc6b6f4599a7f2a245975303", size = 99802, upload-time = "2025-09-20T18:49:14.734Z" }
594
+ sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/cb/0f/88772be3301cc5ad495e77705538edbcbf7f2ccf38d21555fa26131203aa/langgraph_sdk-0.2.10.tar.gz", hash = "sha256:ab58331504fbea28e6322037aa362929799b4e9106663ac1dbd7c5ac44558933", size = 113432, upload-time = "2025-11-24T21:31:57.268Z" }
580
595
  wheels = [
581
- { url = "https://mirrors.ustc.edu.cn/pypi/packages/66/05/b2d34e16638241e6f27a6946d28160d4b8b641383787646d41a3727e0896/langgraph_sdk-0.2.9-py3-none-any.whl", hash = "sha256:fbf302edadbf0fb343596f91c597794e936ef68eebc0d3e1d358b6f9f72a1429", size = 56752, upload-time = "2025-09-20T18:49:13.346Z" },
596
+ { url = "https://mirrors.ustc.edu.cn/pypi/packages/8b/cc/ff4ba17253d31981b047f4be52cc51a19fa28dd2dd16a880c0c595bd66bd/langgraph_sdk-0.2.10-py3-none-any.whl", hash = "sha256:9aef403663726085de6851e4e50459df9562069bd316dd0261eb359f776fd0ef", size = 58430, upload-time = "2025-11-24T21:31:56.052Z" },
582
597
  ]
583
598
 
584
599
  [[package]]
585
600
  name = "langsmith"
586
- version = "0.4.47"
601
+ version = "0.4.49"
587
602
  source = { registry = "https://pypi.mirrors.ustc.edu.cn/simple/" }
588
603
  dependencies = [
589
604
  { name = "httpx" },
@@ -594,9 +609,9 @@ dependencies = [
594
609
  { name = "requests-toolbelt" },
595
610
  { name = "zstandard" },
596
611
  ]
597
- sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/ec/dd/d69922b79fb692b9736206574e5ba69b6354080cf1cc9796449d9fe61f9a/langsmith-0.4.47.tar.gz", hash = "sha256:6a576405696ee97147ccb96c9ae5c9437430500a5d118bd447ec2d1f8cf26de1", size = 986584, upload-time = "2025-11-24T16:02:00.914Z" }
612
+ sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/2d/69/85ae805ecbc1300d486136329b3cb1702483c0afdaf81da95947dd83884a/langsmith-0.4.49.tar.gz", hash = "sha256:4a16ef6f3a9b20c5471884991a12ff37d81f2c13a50660cfe27fa79a7ca2c1b0", size = 987017, upload-time = "2025-11-26T21:45:16.338Z" }
598
613
  wheels = [
599
- { url = "https://mirrors.ustc.edu.cn/pypi/packages/80/1a/0c84f7096d41d64425d29db549c8d6fe075f925a5f2022e8087d01d862c2/langsmith-0.4.47-py3-none-any.whl", hash = "sha256:b9e514611d4e1570e33595d33ccb1fe6eda9f96c5f961095a138651f746c1ef5", size = 411207, upload-time = "2025-11-24T16:01:59.123Z" },
614
+ { url = "https://mirrors.ustc.edu.cn/pypi/packages/31/79/59ecf7dceafd655ed20270a0f595d9e8e13895231cebcfbff9b6eec51fc4/langsmith-0.4.49-py3-none-any.whl", hash = "sha256:95f84edcd8e74ed658e4a3eb7355b530f35cb08a9a8865dbfde6740e4b18323c", size = 410905, upload-time = "2025-11-26T21:45:14.606Z" },
600
615
  ]
601
616
 
602
617
  [[package]]
@@ -1102,7 +1117,7 @@ wheels = [
1102
1117
 
1103
1118
  [[package]]
1104
1119
  name = "pydantic"
1105
- version = "2.12.4"
1120
+ version = "2.12.5"
1106
1121
  source = { registry = "https://pypi.mirrors.ustc.edu.cn/simple/" }
1107
1122
  dependencies = [
1108
1123
  { name = "annotated-types" },
@@ -1110,9 +1125,9 @@ dependencies = [
1110
1125
  { name = "typing-extensions" },
1111
1126
  { name = "typing-inspection" },
1112
1127
  ]
1113
- sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/96/ad/a17bc283d7d81837c061c49e3eaa27a45991759a1b7eae1031921c6bd924/pydantic-2.12.4.tar.gz", hash = "sha256:0f8cb9555000a4b5b617f66bfd2566264c4984b27589d3b845685983e8ea85ac", size = 821038, upload-time = "2025-11-05T10:50:08.59Z" }
1128
+ sdist = { url = "https://mirrors.ustc.edu.cn/pypi/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
1114
1129
  wheels = [
1115
- { url = "https://mirrors.ustc.edu.cn/pypi/packages/82/2f/e68750da9b04856e2a7ec56fc6f034a5a79775e9b9a81882252789873798/pydantic-2.12.4-py3-none-any.whl", hash = "sha256:92d3d202a745d46f9be6df459ac5a064fdaa3c1c4cd8adcfa332ccf3c05f871e", size = 463400, upload-time = "2025-11-05T10:50:06.732Z" },
1130
+ { url = "https://mirrors.ustc.edu.cn/pypi/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
1116
1131
  ]
1117
1132
 
1118
1133
  [[package]]
@@ -1 +0,0 @@
1
- __version__ = "1.2.2"