langchain-dev-utils 1.1.12__py3-none-any.whl → 1.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langchain_dev_utils/__init__.py +1 -1
- langchain_dev_utils/agents/middleware/plan.py +19 -7
- langchain_dev_utils/chat_models/adapters/openai_compatible.py +1 -1
- langchain_dev_utils/chat_models/base.py +45 -6
- langchain_dev_utils/embeddings/base.py +52 -8
- {langchain_dev_utils-1.1.12.dist-info → langchain_dev_utils-1.1.13.dist-info}/METADATA +3 -3
- {langchain_dev_utils-1.1.12.dist-info → langchain_dev_utils-1.1.13.dist-info}/RECORD +9 -9
- {langchain_dev_utils-1.1.12.dist-info → langchain_dev_utils-1.1.13.dist-info}/WHEEL +0 -0
- {langchain_dev_utils-1.1.12.dist-info → langchain_dev_utils-1.1.13.dist-info}/licenses/LICENSE +0 -0
langchain_dev_utils/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "1.1.
|
|
1
|
+
__version__ = "1.1.13"
|
|
@@ -244,9 +244,10 @@ def create_read_plan_tool(
|
|
|
244
244
|
|
|
245
245
|
_PLAN_SYSTEM_PROMPT_NOT_READ_PLAN = """You can manage task plans using two simple tools:
|
|
246
246
|
|
|
247
|
-
|
|
247
|
+
## write_plan
|
|
248
248
|
- Use it to break complex tasks (3+ steps) into a clear, actionable list. Only include next steps to execute — the first becomes `"in_progress"`, the rest `"pending"`. Don’t use it for simple tasks (<3 steps).
|
|
249
|
-
|
|
249
|
+
|
|
250
|
+
## finish_sub_plan
|
|
250
251
|
- Call it **only when the current task is 100% done**. It automatically marks it `"done"` and promotes the next `"pending"` task to `"in_progress"`. No parameters needed. Never use it mid-task or if anything’s incomplete.
|
|
251
252
|
Keep plans lean, update immediately, and never batch completions.
|
|
252
253
|
"""
|
|
@@ -277,9 +278,15 @@ class PlanMiddleware(AgentMiddleware):
|
|
|
277
278
|
|
|
278
279
|
Args:
|
|
279
280
|
system_prompt: Custom system prompt to guide the agent on using the plan tool.
|
|
280
|
-
If not provided, uses the default `
|
|
281
|
-
|
|
282
|
-
|
|
281
|
+
If not provided, uses the default `_PLAN_SYSTEM_PROMPT` or `_PLAN_SYSTEM_PROMPT_NOT_READ_PLAN` based on the `use_read_plan_tool` parameter.
|
|
282
|
+
write_plan_tool_description: Description of the `write_plan` tool.
|
|
283
|
+
If not provided, uses the default `_DEFAULT_WRITE_PLAN_TOOL_DESCRIPTION`.
|
|
284
|
+
finish_sub_plan_tool_description: Description of the `finish_sub_plan` tool.
|
|
285
|
+
If not provided, uses the default `_DEFAULT_FINISH_SUB_PLAN_TOOL_DESCRIPTION`.
|
|
286
|
+
read_plan_tool_description: Description of the `read_plan` tool.
|
|
287
|
+
If not provided, uses the default `_DEFAULT_READ_PLAN_TOOL_DESCRIPTION`.
|
|
288
|
+
use_read_plan_tool: Whether to use the `read_plan` tool.
|
|
289
|
+
If not provided, uses the default `True`.
|
|
283
290
|
Example:
|
|
284
291
|
```python
|
|
285
292
|
from langchain_dev_utils.agents.middleware.plan import PlanMiddleware
|
|
@@ -304,6 +311,7 @@ class PlanMiddleware(AgentMiddleware):
|
|
|
304
311
|
finish_sub_plan_tool_description: Optional[str] = None,
|
|
305
312
|
read_plan_tool_description: Optional[str] = None,
|
|
306
313
|
use_read_plan_tool: bool = True,
|
|
314
|
+
message_key: Optional[str] = None,
|
|
307
315
|
) -> None:
|
|
308
316
|
super().__init__()
|
|
309
317
|
|
|
@@ -319,8 +327,12 @@ class PlanMiddleware(AgentMiddleware):
|
|
|
319
327
|
)
|
|
320
328
|
|
|
321
329
|
tools = [
|
|
322
|
-
create_write_plan_tool(
|
|
323
|
-
|
|
330
|
+
create_write_plan_tool(
|
|
331
|
+
description=write_plan_tool_description, message_key=message_key
|
|
332
|
+
),
|
|
333
|
+
create_finish_sub_plan_tool(
|
|
334
|
+
description=finish_sub_plan_tool_description, message_key=message_key
|
|
335
|
+
),
|
|
324
336
|
]
|
|
325
337
|
|
|
326
338
|
if use_read_plan_tool:
|
|
@@ -46,7 +46,7 @@ _DictOrPydantic = Union[dict, _BM]
|
|
|
46
46
|
|
|
47
47
|
|
|
48
48
|
class _ModelProviderConfigType(BaseModel):
|
|
49
|
-
supported_tool_choice: ToolChoiceType = Field(
|
|
49
|
+
supported_tool_choice: ToolChoiceType = Field(default_factory=list)
|
|
50
50
|
keep_reasoning_content: bool = Field(default=False)
|
|
51
51
|
support_json_mode: bool = Field(default=False)
|
|
52
52
|
|
|
@@ -1,10 +1,11 @@
|
|
|
1
|
-
import os
|
|
2
1
|
from typing import Any, NotRequired, Optional, TypedDict, cast
|
|
3
2
|
|
|
4
3
|
from langchain.chat_models.base import _SUPPORTED_PROVIDERS, _init_chat_model_helper
|
|
5
4
|
from langchain_core.language_models.chat_models import BaseChatModel
|
|
5
|
+
from langchain_core.utils import from_env
|
|
6
6
|
|
|
7
7
|
from .types import ChatModelType, ToolChoiceType
|
|
8
|
+
from pydantic import BaseModel
|
|
8
9
|
|
|
9
10
|
_MODEL_PROVIDERS_DICT = {}
|
|
10
11
|
|
|
@@ -22,6 +23,34 @@ class ChatModelProvider(TypedDict):
|
|
|
22
23
|
provider_config: NotRequired[ProviderConfig]
|
|
23
24
|
|
|
24
25
|
|
|
26
|
+
def _get_base_url_field_name(model_cls: type[BaseModel]) -> str | None:
|
|
27
|
+
"""
|
|
28
|
+
Return 'base_url' if the model has a field named or aliased as 'base_url',
|
|
29
|
+
else return 'api_base' if it has a field named or aliased as 'api_base',
|
|
30
|
+
else return None.
|
|
31
|
+
The return value is always either 'base_url', 'api_base', or None.
|
|
32
|
+
"""
|
|
33
|
+
model_fields = model_cls.model_fields
|
|
34
|
+
|
|
35
|
+
# try model_fields first
|
|
36
|
+
if "base_url" in model_fields:
|
|
37
|
+
return "base_url"
|
|
38
|
+
|
|
39
|
+
if "api_base" in model_fields:
|
|
40
|
+
return "api_base"
|
|
41
|
+
|
|
42
|
+
# then try aliases
|
|
43
|
+
for field_info in model_fields.values():
|
|
44
|
+
if field_info.alias == "base_url":
|
|
45
|
+
return "base_url"
|
|
46
|
+
|
|
47
|
+
for field_info in model_fields.values():
|
|
48
|
+
if field_info.alias == "api_base":
|
|
49
|
+
return "api_base"
|
|
50
|
+
|
|
51
|
+
return None
|
|
52
|
+
|
|
53
|
+
|
|
25
54
|
def _parse_model(model: str, model_provider: Optional[str]) -> tuple[str, str]:
|
|
26
55
|
"""Parse model string and provider.
|
|
27
56
|
|
|
@@ -71,6 +100,11 @@ def _load_chat_model_helper(
|
|
|
71
100
|
"provider_config"
|
|
72
101
|
):
|
|
73
102
|
kwargs.update({"provider_config": provider_config})
|
|
103
|
+
|
|
104
|
+
if base_url := _MODEL_PROVIDERS_DICT[model_provider].get("base_url"):
|
|
105
|
+
url_key = _get_base_url_field_name(chat_model)
|
|
106
|
+
if url_key:
|
|
107
|
+
kwargs.update({url_key: base_url})
|
|
74
108
|
return chat_model(model=model, **kwargs)
|
|
75
109
|
|
|
76
110
|
return _init_chat_model_helper(model, model_provider=model_provider, **kwargs)
|
|
@@ -91,7 +125,7 @@ def register_model_provider(
|
|
|
91
125
|
Args:
|
|
92
126
|
provider_name: Name of the provider to register
|
|
93
127
|
chat_model: Either a BaseChatModel class or a string identifier for a supported provider
|
|
94
|
-
base_url:
|
|
128
|
+
base_url: The API address of the model provider (optional, valid for both types of `chat_model`, but mainly used when `chat_model` is a string and is "openai-compatible")
|
|
95
129
|
provider_config: The configuration of the model provider (Optional parameter;effective only when `chat_model` is a string and is "openai-compatible".)
|
|
96
130
|
It can be configured to configure some related parameters of the provider, such as whether to support json_mode structured output mode, the list of supported tool_choice
|
|
97
131
|
Raises:
|
|
@@ -113,6 +147,7 @@ def register_model_provider(
|
|
|
113
147
|
>>> model = load_chat_model(model="vllm:qwen3-4b")
|
|
114
148
|
>>> model.invoke("Hello")
|
|
115
149
|
"""
|
|
150
|
+
base_url = base_url or from_env(f"{provider_name.upper()}_API_BASE", default=None)()
|
|
116
151
|
if isinstance(chat_model, str):
|
|
117
152
|
try:
|
|
118
153
|
from .adapters.openai_compatible import _create_openai_compatible_model
|
|
@@ -120,8 +155,6 @@ def register_model_provider(
|
|
|
120
155
|
raise ImportError(
|
|
121
156
|
"Please install langchain_dev_utils[standard],when chat_model is a 'openai-compatible'"
|
|
122
157
|
)
|
|
123
|
-
|
|
124
|
-
base_url = base_url or os.getenv(f"{provider_name.upper()}_API_BASE")
|
|
125
158
|
if base_url is None:
|
|
126
159
|
raise ValueError(
|
|
127
160
|
f"base_url must be provided or set {provider_name.upper()}_API_BASE environment variable when chat_model is a string"
|
|
@@ -140,11 +173,17 @@ def register_model_provider(
|
|
|
140
173
|
provider_name: {
|
|
141
174
|
"chat_model": chat_model,
|
|
142
175
|
"provider_config": provider_config,
|
|
176
|
+
"base_url": base_url,
|
|
143
177
|
}
|
|
144
178
|
}
|
|
145
179
|
)
|
|
146
180
|
else:
|
|
147
|
-
|
|
181
|
+
if base_url is not None:
|
|
182
|
+
_MODEL_PROVIDERS_DICT.update(
|
|
183
|
+
{provider_name: {"chat_model": chat_model, "base_url": base_url}}
|
|
184
|
+
)
|
|
185
|
+
else:
|
|
186
|
+
_MODEL_PROVIDERS_DICT.update({provider_name: {"chat_model": chat_model}})
|
|
148
187
|
|
|
149
188
|
|
|
150
189
|
def batch_register_model_provider(
|
|
@@ -159,7 +198,7 @@ def batch_register_model_provider(
|
|
|
159
198
|
providers: List of ChatModelProvider dictionaries, each containing:
|
|
160
199
|
- provider_name: Name of the provider to register
|
|
161
200
|
- chat_model: Either a BaseChatModel class or a string identifier for a supported provider
|
|
162
|
-
- base_url:
|
|
201
|
+
- base_url: The API address of the model provider (optional, valid for both types of `chat_model`, but mainly used when `chat_model` is a string and is "openai-compatible")
|
|
163
202
|
- provider_config: The configuration of the model provider(Optional parameter; effective only when `chat_model` is a string and is "openai-compatible".)
|
|
164
203
|
It can be configured to configure some related parameters of the provider, such as whether to support json_mode structured output mode, the list of supported tool_choice
|
|
165
204
|
|
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
import os
|
|
2
1
|
from typing import Any, Literal, NotRequired, Optional, TypedDict, Union
|
|
3
2
|
|
|
4
3
|
from langchain.embeddings.base import Embeddings, _SUPPORTED_PROVIDERS, init_embeddings
|
|
4
|
+
from langchain_core.utils import from_env, secret_from_env
|
|
5
|
+
from pydantic import BaseModel
|
|
5
6
|
|
|
6
7
|
_EMBEDDINGS_PROVIDERS_DICT = {}
|
|
7
8
|
|
|
@@ -14,6 +15,34 @@ class EmbeddingProvider(TypedDict):
|
|
|
14
15
|
base_url: NotRequired[str]
|
|
15
16
|
|
|
16
17
|
|
|
18
|
+
def _get_base_url_field_name(model_cls: type[BaseModel]) -> str | None:
|
|
19
|
+
"""
|
|
20
|
+
Return 'base_url' if the model has a field named or aliased as 'base_url',
|
|
21
|
+
else return 'api_base' if it has a field named or aliased as 'api_base',
|
|
22
|
+
else return None.
|
|
23
|
+
The return value is always either 'base_url', 'api_base', or None.
|
|
24
|
+
"""
|
|
25
|
+
model_fields = model_cls.model_fields
|
|
26
|
+
|
|
27
|
+
# try model_fields first
|
|
28
|
+
if "base_url" in model_fields:
|
|
29
|
+
return "base_url"
|
|
30
|
+
|
|
31
|
+
if "api_base" in model_fields:
|
|
32
|
+
return "api_base"
|
|
33
|
+
|
|
34
|
+
# then try aliases
|
|
35
|
+
for field_info in model_fields.values():
|
|
36
|
+
if field_info.alias == "base_url":
|
|
37
|
+
return "base_url"
|
|
38
|
+
|
|
39
|
+
for field_info in model_fields.values():
|
|
40
|
+
if field_info.alias == "api_base":
|
|
41
|
+
return "api_base"
|
|
42
|
+
|
|
43
|
+
return None
|
|
44
|
+
|
|
45
|
+
|
|
17
46
|
def _parse_model_string(model_name: str) -> tuple[str, str]:
|
|
18
47
|
"""Parse model string into provider and model name.
|
|
19
48
|
|
|
@@ -56,7 +85,7 @@ def register_embeddings_provider(
|
|
|
56
85
|
Args:
|
|
57
86
|
provider_name: Name of the provider to register
|
|
58
87
|
embeddings_model: Either an Embeddings class or a string identifier for a supported provider
|
|
59
|
-
base_url:
|
|
88
|
+
base_url: The API address of the Embedding model provider (optional, valid for both types of `embeddings_model`, but mainly used when `embeddings_model` is a string and is "openai-compatible")
|
|
60
89
|
|
|
61
90
|
Raises:
|
|
62
91
|
ValueError: If base_url is not provided when embeddings_model is a string
|
|
@@ -77,8 +106,9 @@ def register_embeddings_provider(
|
|
|
77
106
|
>>> embeddings = load_embeddings("vllm:qwen3-embedding-4b")
|
|
78
107
|
>>> embeddings.embed_query("hello world")
|
|
79
108
|
"""
|
|
109
|
+
|
|
110
|
+
base_url = base_url or from_env(f"{provider_name.upper()}_API_BASE", default=None)()
|
|
80
111
|
if isinstance(embeddings_model, str):
|
|
81
|
-
base_url = base_url or os.getenv(f"{provider_name.upper()}_API_BASE")
|
|
82
112
|
if base_url is None:
|
|
83
113
|
raise ValueError(
|
|
84
114
|
f"base_url must be provided or set {provider_name.upper()}_API_BASE environment variable when embeddings_model is a string"
|
|
@@ -98,9 +128,19 @@ def register_embeddings_provider(
|
|
|
98
128
|
}
|
|
99
129
|
)
|
|
100
130
|
else:
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
131
|
+
if base_url is not None:
|
|
132
|
+
_EMBEDDINGS_PROVIDERS_DICT.update(
|
|
133
|
+
{
|
|
134
|
+
provider_name: {
|
|
135
|
+
"embeddings_model": embeddings_model,
|
|
136
|
+
"base_url": base_url,
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
)
|
|
140
|
+
else:
|
|
141
|
+
_EMBEDDINGS_PROVIDERS_DICT.update(
|
|
142
|
+
{provider_name: {"embeddings_model": embeddings_model}}
|
|
143
|
+
)
|
|
104
144
|
|
|
105
145
|
|
|
106
146
|
def batch_register_embeddings_provider(
|
|
@@ -115,7 +155,7 @@ def batch_register_embeddings_provider(
|
|
|
115
155
|
providers: List of EmbeddingProvider dictionaries, each containing:
|
|
116
156
|
- provider_name: str - Provider name
|
|
117
157
|
- embeddings_model: Union[Type[Embeddings], str] - Model class or provider string
|
|
118
|
-
- base_url:
|
|
158
|
+
- base_url: The API address of the Embedding model provider (optional, valid for both types of `embeddings_model`, but mainly used when `embeddings_model` is a string and is "openai-compatible")
|
|
119
159
|
|
|
120
160
|
Raises:
|
|
121
161
|
ValueError: If any of the providers are invalid
|
|
@@ -186,7 +226,7 @@ def load_embeddings(
|
|
|
186
226
|
embeddings = _EMBEDDINGS_PROVIDERS_DICT[provider]["embeddings_model"]
|
|
187
227
|
if isinstance(embeddings, str):
|
|
188
228
|
if not (api_key := kwargs.get("api_key")):
|
|
189
|
-
api_key =
|
|
229
|
+
api_key = secret_from_env(f"{provider.upper()}_API_KEY", default=None)()
|
|
190
230
|
if not api_key:
|
|
191
231
|
raise ValueError(
|
|
192
232
|
f"API key for {provider} not found. Please set it in the environment."
|
|
@@ -203,4 +243,8 @@ def load_embeddings(
|
|
|
203
243
|
**kwargs,
|
|
204
244
|
)
|
|
205
245
|
else:
|
|
246
|
+
if base_url := _EMBEDDINGS_PROVIDERS_DICT[provider].get("base_url"):
|
|
247
|
+
url_key = _get_base_url_field_name(embeddings)
|
|
248
|
+
if url_key is not None:
|
|
249
|
+
kwargs.update({url_key: base_url})
|
|
206
250
|
return embeddings(model=model, **kwargs)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langchain-dev-utils
|
|
3
|
-
Version: 1.1.
|
|
3
|
+
Version: 1.1.13
|
|
4
4
|
Summary: A practical utility library for LangChain and LangGraph development
|
|
5
5
|
Project-URL: Source Code, https://github.com/TBice123123/langchain-dev-utils
|
|
6
6
|
Project-URL: repository, https://github.com/TBice123123/langchain-dev-utils
|
|
@@ -61,7 +61,7 @@ Mainly consists of the following two functions:
|
|
|
61
61
|
|
|
62
62
|
- `provider_name`: Model provider name, used as an identifier for subsequent model loading
|
|
63
63
|
- `chat_model`: Chat model, can be a ChatModel or a string (currently supports "openai-compatible")
|
|
64
|
-
- `base_url`: API address of the model provider (optional, valid when `chat_model` is a string and is "openai-compatible")
|
|
64
|
+
- `base_url`: The API address of the model provider (optional, valid for both types of `chat_model`, but mainly used when `chat_model` is a string and is "openai-compatible")
|
|
65
65
|
- `provider_config`: Relevant configuration for the model provider (optional, valid when `chat_model` is a string and is "openai-compatible"), can configure some provider-related parameters, such as whether to support structured output in json_mode, list of supported tool_choices, etc.
|
|
66
66
|
|
|
67
67
|
`load_chat_model` parameter description:
|
|
@@ -101,7 +101,7 @@ Mainly consists of the following two functions:
|
|
|
101
101
|
|
|
102
102
|
- `provider_name`: Embedding model provider name, used as an identifier for subsequent model loading
|
|
103
103
|
- `embeddings_model`: Embedding model, can be Embeddings or a string (currently supports "openai-compatible")
|
|
104
|
-
- `base_url`: API address of the model provider (optional, valid when `embeddings_model` is a string and is "openai-compatible")
|
|
104
|
+
- `base_url`: The API address of the Embedding model provider (optional, valid for both types of `embeddings_model`, but mainly used when `embeddings_model` is a string and is "openai-compatible")
|
|
105
105
|
|
|
106
106
|
`load_embeddings` parameter description:
|
|
107
107
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
langchain_dev_utils/__init__.py,sha256=
|
|
1
|
+
langchain_dev_utils/__init__.py,sha256=fHD3CDKZLpB_vekISdHB54mQUzaieloAPZEREmkbiRQ,23
|
|
2
2
|
langchain_dev_utils/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
3
|
langchain_dev_utils/agents/__init__.py,sha256=e17SMQdJIQngbUCr2N1tY-yw0tD3tEnH7PSvyDmVPeQ,127
|
|
4
4
|
langchain_dev_utils/agents/factory.py,sha256=pQeqz_ZlU43Os5gKlRu5-iCLTslPWEqWJzuGxpKhcRo,3904
|
|
@@ -8,17 +8,17 @@ langchain_dev_utils/agents/wrap.py,sha256=4BWksU9DRz8c3ZHQiUi4GHwGhNysDLNs8pmLWV
|
|
|
8
8
|
langchain_dev_utils/agents/middleware/__init__.py,sha256=cjrb8Rue5uukl9pKPF7CjSrHtcYsUBj3Mdvv2szlp7E,679
|
|
9
9
|
langchain_dev_utils/agents/middleware/model_fallback.py,sha256=cvTj_sOw3r4B4ErMAVdsrniMImWnUpLMECmQErxdsUU,1688
|
|
10
10
|
langchain_dev_utils/agents/middleware/model_router.py,sha256=YkaPpYmIZaGj--YlUjm7dVcNzRt3Au317eor4SDYsQs,8799
|
|
11
|
-
langchain_dev_utils/agents/middleware/plan.py,sha256=
|
|
11
|
+
langchain_dev_utils/agents/middleware/plan.py,sha256=pVABuihOo-TGuPwJA_AdpBa6eodbdZalXozl_YcMsHc,15198
|
|
12
12
|
langchain_dev_utils/agents/middleware/summarization.py,sha256=Ws-_cxSQQfa5rn5Spq1gSLpgIleUCno3QmWRvN4-u9E,2213
|
|
13
13
|
langchain_dev_utils/agents/middleware/tool_emulator.py,sha256=u9rV24yUB-dyc1uUfUe74B1wOGVI3TZRwxkE1bvGm18,2025
|
|
14
14
|
langchain_dev_utils/agents/middleware/tool_selection.py,sha256=ZqdyK4Yhp2u3GM6B_D6U7Srca9vy1o7s6N_LrV24-dQ,3107
|
|
15
15
|
langchain_dev_utils/chat_models/__init__.py,sha256=YSLUyHrWEEj4y4DtGFCOnDW02VIYZdfAH800m4Klgeg,224
|
|
16
|
-
langchain_dev_utils/chat_models/base.py,sha256=
|
|
16
|
+
langchain_dev_utils/chat_models/base.py,sha256=BagUNjqWwTZ2vJ-uHPQ0vyC6nYXOdFJidV_73jlPFG8,11232
|
|
17
17
|
langchain_dev_utils/chat_models/types.py,sha256=oPXFsfho9amnwek5v3ey8LcnsfKVzecWSJcKVBG4ETc,261
|
|
18
18
|
langchain_dev_utils/chat_models/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
|
-
langchain_dev_utils/chat_models/adapters/openai_compatible.py,sha256=
|
|
19
|
+
langchain_dev_utils/chat_models/adapters/openai_compatible.py,sha256=6ZTRCFqgW8fk8nbZs0OarmuHP5M6wr-0mbFogZuLTWY,18409
|
|
20
20
|
langchain_dev_utils/embeddings/__init__.py,sha256=zbEOaV86TUi9Zrg_dH9dpdgacWg31HMJTlTQknA9EKk,244
|
|
21
|
-
langchain_dev_utils/embeddings/base.py,sha256=
|
|
21
|
+
langchain_dev_utils/embeddings/base.py,sha256=OFXgaLO6DsadSITUmtrDvJg_-042lrrDwY5vnS9_do8,9574
|
|
22
22
|
langchain_dev_utils/message_convert/__init__.py,sha256=xwjaQ1oJoc80xy70oQI4uW3gAmgV5JymJd5hgnA6s3g,458
|
|
23
23
|
langchain_dev_utils/message_convert/content.py,sha256=ApmQ7fUUBO3Ihjm2hYSWd4GrU_CvrjbWla-MA7DAFRc,7758
|
|
24
24
|
langchain_dev_utils/message_convert/format.py,sha256=fh4GyyuZBTMrHeCEwdu9fOh5n8tdli1vDF44jK1i-tI,2373
|
|
@@ -29,7 +29,7 @@ langchain_dev_utils/pipeline/types.py,sha256=T3aROKKXeWvd0jcH5XkgMDQfEkLfPaiOhhV
|
|
|
29
29
|
langchain_dev_utils/tool_calling/__init__.py,sha256=mu_WxKMcu6RoTf4vkTPbA1WSBSNc6YIqyBtOQ6iVQj4,322
|
|
30
30
|
langchain_dev_utils/tool_calling/human_in_the_loop.py,sha256=nbaON9806pv5tpMRQUA_Ch3HJA5HBFgzZR7kQRf6PiY,9819
|
|
31
31
|
langchain_dev_utils/tool_calling/utils.py,sha256=3cNv_Zx32KxdsGn8IkxjWUzxYEEwVJeJgTZTbfSg0pA,2751
|
|
32
|
-
langchain_dev_utils-1.1.
|
|
33
|
-
langchain_dev_utils-1.1.
|
|
34
|
-
langchain_dev_utils-1.1.
|
|
35
|
-
langchain_dev_utils-1.1.
|
|
32
|
+
langchain_dev_utils-1.1.13.dist-info/METADATA,sha256=m205M6P2wNSDHHHNCVuSE-1SBMD1oyiMHRYYwHbJyEA,16264
|
|
33
|
+
langchain_dev_utils-1.1.13.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
34
|
+
langchain_dev_utils-1.1.13.dist-info/licenses/LICENSE,sha256=AWAOzNEcsvCEzHOF0qby5OKxviVH_eT9Yce1sgJTico,1084
|
|
35
|
+
langchain_dev_utils-1.1.13.dist-info/RECORD,,
|
|
File without changes
|
{langchain_dev_utils-1.1.12.dist-info → langchain_dev_utils-1.1.13.dist-info}/licenses/LICENSE
RENAMED
|
File without changes
|