langchain 0.3.0.dev1__py3-none-any.whl → 0.3.0.dev2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langchain might be problematic. Click here for more details.
- langchain/agents/agent.py +2 -2
- langchain/chains/api/base.py +1 -1
- langchain/chains/base.py +3 -2
- langchain/chains/conversational_retrieval/base.py +1 -1
- langchain/chains/elasticsearch_database/base.py +1 -1
- langchain/chains/hyde/base.py +1 -1
- langchain/chains/moderation.py +2 -2
- langchain/chains/openai_functions/base.py +2 -2
- langchain/chains/openai_functions/extraction.py +7 -3
- langchain/chains/openai_functions/qa_with_structure.py +4 -1
- langchain/chains/openai_functions/tagging.py +5 -2
- langchain/chains/openai_tools/extraction.py +1 -1
- langchain/chains/structured_output/base.py +7 -7
- langchain/chat_models/base.py +1 -1
- langchain/memory/combined.py +5 -3
- langchain/output_parsers/pandas_dataframe.py +3 -2
- langchain/output_parsers/yaml.py +4 -1
- langchain/pydantic_v1/__init__.py +20 -0
- langchain/pydantic_v1/dataclasses.py +20 -0
- langchain/pydantic_v1/main.py +20 -0
- langchain/retrievers/document_compressors/embeddings_filter.py +1 -1
- {langchain-0.3.0.dev1.dist-info → langchain-0.3.0.dev2.dist-info}/METADATA +3 -3
- {langchain-0.3.0.dev1.dist-info → langchain-0.3.0.dev2.dist-info}/RECORD +26 -26
- {langchain-0.3.0.dev1.dist-info → langchain-0.3.0.dev2.dist-info}/LICENSE +0 -0
- {langchain-0.3.0.dev1.dist-info → langchain-0.3.0.dev2.dist-info}/WHEEL +0 -0
- {langchain-0.3.0.dev1.dist-info → langchain-0.3.0.dev2.dist-info}/entry_points.txt +0 -0
langchain/agents/agent.py
CHANGED
|
@@ -176,7 +176,7 @@ class BaseSingleActionAgent(BaseModel):
|
|
|
176
176
|
Returns:
|
|
177
177
|
Dict: Dictionary representation of agent.
|
|
178
178
|
"""
|
|
179
|
-
_dict = super().
|
|
179
|
+
_dict = super().model_dump()
|
|
180
180
|
try:
|
|
181
181
|
_type = self._agent_type
|
|
182
182
|
except NotImplementedError:
|
|
@@ -324,7 +324,7 @@ class BaseMultiActionAgent(BaseModel):
|
|
|
324
324
|
|
|
325
325
|
def dict(self, **kwargs: Any) -> Dict:
|
|
326
326
|
"""Return dictionary representation of agent."""
|
|
327
|
-
_dict = super().
|
|
327
|
+
_dict = super().model_dump()
|
|
328
328
|
try:
|
|
329
329
|
_dict["_type"] = str(self._agent_type)
|
|
330
330
|
except NotImplementedError:
|
langchain/chains/api/base.py
CHANGED
|
@@ -198,7 +198,7 @@ try:
|
|
|
198
198
|
api_docs: str
|
|
199
199
|
question_key: str = "question" #: :meta private:
|
|
200
200
|
output_key: str = "output" #: :meta private:
|
|
201
|
-
limit_to_domains: Optional[Sequence[str]]
|
|
201
|
+
limit_to_domains: Optional[Sequence[str]] = Field(default_factory=list)
|
|
202
202
|
"""Use to limit the domains that can be accessed by the API chain.
|
|
203
203
|
|
|
204
204
|
* For example, to limit to just the domain `https://www.example.com`, set
|
langchain/chains/base.py
CHANGED
|
@@ -32,8 +32,8 @@ from pydantic import (
|
|
|
32
32
|
BaseModel,
|
|
33
33
|
ConfigDict,
|
|
34
34
|
Field,
|
|
35
|
+
field_validator,
|
|
35
36
|
model_validator,
|
|
36
|
-
validator,
|
|
37
37
|
)
|
|
38
38
|
|
|
39
39
|
from langchain.schema import RUN_KEY
|
|
@@ -248,7 +248,8 @@ class Chain(RunnableSerializable[Dict[str, Any], Dict[str, Any]], ABC):
|
|
|
248
248
|
values["callbacks"] = values.pop("callback_manager", None)
|
|
249
249
|
return values
|
|
250
250
|
|
|
251
|
-
@
|
|
251
|
+
@field_validator("verbose", mode="before")
|
|
252
|
+
@classmethod
|
|
252
253
|
def set_verbose(cls, verbose: Optional[bool]) -> bool:
|
|
253
254
|
"""Set the chain verbosity.
|
|
254
255
|
|
|
@@ -92,7 +92,7 @@ class BaseConversationalRetrievalChain(Chain):
|
|
|
92
92
|
get_chat_history: Optional[Callable[[List[CHAT_TURN_TYPE]], str]] = None
|
|
93
93
|
"""An optional function to get a string of the chat history.
|
|
94
94
|
If None is provided, will use a default."""
|
|
95
|
-
response_if_no_docs_found: Optional[str]
|
|
95
|
+
response_if_no_docs_found: Optional[str] = None
|
|
96
96
|
"""If specified, the chain will return a fixed response if no docs
|
|
97
97
|
are found for the question. """
|
|
98
98
|
|
|
@@ -40,7 +40,7 @@ class ElasticsearchDatabaseChain(Chain):
|
|
|
40
40
|
"""Chain for creating the ES query."""
|
|
41
41
|
answer_chain: Runnable
|
|
42
42
|
"""Chain for answering the user question."""
|
|
43
|
-
database: Any
|
|
43
|
+
database: Any = None
|
|
44
44
|
"""Elasticsearch database to connect to of type elasticsearch.Elasticsearch."""
|
|
45
45
|
top_k: int = 10
|
|
46
46
|
"""Number of results to return from the query"""
|
langchain/chains/hyde/base.py
CHANGED
|
@@ -38,7 +38,7 @@ class HypotheticalDocumentEmbedder(Chain, Embeddings):
|
|
|
38
38
|
@property
|
|
39
39
|
def input_keys(self) -> List[str]:
|
|
40
40
|
"""Input keys for Hyde's LLM chain."""
|
|
41
|
-
return self.llm_chain.input_schema.
|
|
41
|
+
return self.llm_chain.input_schema.model_json_schema()["required"]
|
|
42
42
|
|
|
43
43
|
@property
|
|
44
44
|
def output_keys(self) -> List[str]:
|
langchain/chains/moderation.py
CHANGED
|
@@ -28,8 +28,8 @@ class OpenAIModerationChain(Chain):
|
|
|
28
28
|
moderation = OpenAIModerationChain()
|
|
29
29
|
"""
|
|
30
30
|
|
|
31
|
-
client: Any #: :meta private:
|
|
32
|
-
async_client: Any #: :meta private:
|
|
31
|
+
client: Any = None #: :meta private:
|
|
32
|
+
async_client: Any = None #: :meta private:
|
|
33
33
|
model_name: Optional[str] = None
|
|
34
34
|
"""Moderation model name to use."""
|
|
35
35
|
error: bool = False
|
|
@@ -93,7 +93,7 @@ def create_openai_fn_chain(
|
|
|
93
93
|
from langchain_community.chat_models import ChatOpenAI
|
|
94
94
|
from langchain_core.prompts import ChatPromptTemplate
|
|
95
95
|
|
|
96
|
-
from
|
|
96
|
+
from pydantic import BaseModel, Field
|
|
97
97
|
|
|
98
98
|
|
|
99
99
|
class RecordPerson(BaseModel):
|
|
@@ -183,7 +183,7 @@ def create_structured_output_chain(
|
|
|
183
183
|
from langchain_community.chat_models import ChatOpenAI
|
|
184
184
|
from langchain_core.prompts import ChatPromptTemplate
|
|
185
185
|
|
|
186
|
-
from
|
|
186
|
+
from pydantic import BaseModel, Field
|
|
187
187
|
|
|
188
188
|
class Dog(BaseModel):
|
|
189
189
|
\"\"\"Identifying information about a dog.\"\"\"
|
|
@@ -61,7 +61,7 @@ Passage:
|
|
|
61
61
|
removal="1.0",
|
|
62
62
|
alternative=(
|
|
63
63
|
"""
|
|
64
|
-
from
|
|
64
|
+
from pydantic import BaseModel, Field
|
|
65
65
|
from langchain_anthropic import ChatAnthropic
|
|
66
66
|
|
|
67
67
|
class Joke(BaseModel):
|
|
@@ -131,7 +131,7 @@ def create_extraction_chain(
|
|
|
131
131
|
removal="1.0",
|
|
132
132
|
alternative=(
|
|
133
133
|
"""
|
|
134
|
-
from
|
|
134
|
+
from pydantic import BaseModel, Field
|
|
135
135
|
from langchain_anthropic import ChatAnthropic
|
|
136
136
|
|
|
137
137
|
class Joke(BaseModel):
|
|
@@ -172,7 +172,11 @@ def create_extraction_chain_pydantic(
|
|
|
172
172
|
class PydanticSchema(BaseModel):
|
|
173
173
|
info: List[pydantic_schema] # type: ignore
|
|
174
174
|
|
|
175
|
-
|
|
175
|
+
if hasattr(pydantic_schema, "model_json_schema"):
|
|
176
|
+
openai_schema = pydantic_schema.model_json_schema()
|
|
177
|
+
else:
|
|
178
|
+
openai_schema = pydantic_schema.schema()
|
|
179
|
+
|
|
176
180
|
openai_schema = _resolve_schema_references(
|
|
177
181
|
openai_schema, openai_schema.get("definitions", {})
|
|
178
182
|
)
|
|
@@ -72,7 +72,10 @@ def create_qa_with_structure_chain(
|
|
|
72
72
|
f"Should be one of `pydantic` or `base`."
|
|
73
73
|
)
|
|
74
74
|
if isinstance(schema, type) and is_basemodel_subclass(schema):
|
|
75
|
-
|
|
75
|
+
if hasattr(schema, "model_json_schema"):
|
|
76
|
+
schema_dict = cast(dict, schema.model_json_schema())
|
|
77
|
+
else:
|
|
78
|
+
schema_dict = cast(dict, schema.schema())
|
|
76
79
|
else:
|
|
77
80
|
schema_dict = cast(dict, schema)
|
|
78
81
|
function = {
|
|
@@ -130,7 +130,7 @@ def create_tagging_chain_pydantic(
|
|
|
130
130
|
|
|
131
131
|
.. code-block:: python
|
|
132
132
|
|
|
133
|
-
from
|
|
133
|
+
from pydantic import BaseModel, Field
|
|
134
134
|
from langchain_anthropic import ChatAnthropic
|
|
135
135
|
|
|
136
136
|
class Joke(BaseModel):
|
|
@@ -156,7 +156,10 @@ def create_tagging_chain_pydantic(
|
|
|
156
156
|
Returns:
|
|
157
157
|
Chain (LLMChain) that can be used to extract information from a passage.
|
|
158
158
|
"""
|
|
159
|
-
|
|
159
|
+
if hasattr(pydantic_schema, "model_json_schema"):
|
|
160
|
+
openai_schema = pydantic_schema.model_json_schema()
|
|
161
|
+
else:
|
|
162
|
+
openai_schema = pydantic_schema.schema()
|
|
160
163
|
function = _get_tagging_function(openai_schema)
|
|
161
164
|
prompt = prompt or ChatPromptTemplate.from_template(_TAGGING_TEMPLATE)
|
|
162
165
|
output_parser = PydanticOutputFunctionsParser(pydantic_schema=pydantic_schema)
|
|
@@ -32,7 +32,7 @@ If a property is not present and is not required in the function parameters, do
|
|
|
32
32
|
removal="1.0",
|
|
33
33
|
alternative=(
|
|
34
34
|
"""
|
|
35
|
-
from
|
|
35
|
+
from pydantic import BaseModel, Field
|
|
36
36
|
from langchain_anthropic import ChatAnthropic
|
|
37
37
|
|
|
38
38
|
class Joke(BaseModel):
|
|
@@ -44,7 +44,7 @@ from pydantic import BaseModel
|
|
|
44
44
|
removal="1.0",
|
|
45
45
|
alternative=(
|
|
46
46
|
"""
|
|
47
|
-
from
|
|
47
|
+
from pydantic import BaseModel, Field
|
|
48
48
|
from langchain_anthropic import ChatAnthropic
|
|
49
49
|
|
|
50
50
|
class Joke(BaseModel):
|
|
@@ -108,7 +108,7 @@ def create_openai_fn_runnable(
|
|
|
108
108
|
|
|
109
109
|
from langchain.chains.structured_output import create_openai_fn_runnable
|
|
110
110
|
from langchain_openai import ChatOpenAI
|
|
111
|
-
from
|
|
111
|
+
from pydantic import BaseModel, Field
|
|
112
112
|
|
|
113
113
|
|
|
114
114
|
class RecordPerson(BaseModel):
|
|
@@ -162,7 +162,7 @@ def create_openai_fn_runnable(
|
|
|
162
162
|
removal="1.0",
|
|
163
163
|
alternative=(
|
|
164
164
|
"""
|
|
165
|
-
from
|
|
165
|
+
from pydantic import BaseModel, Field
|
|
166
166
|
from langchain_anthropic import ChatAnthropic
|
|
167
167
|
|
|
168
168
|
class Joke(BaseModel):
|
|
@@ -237,7 +237,7 @@ def create_structured_output_runnable(
|
|
|
237
237
|
|
|
238
238
|
from langchain.chains import create_structured_output_runnable
|
|
239
239
|
from langchain_openai import ChatOpenAI
|
|
240
|
-
from
|
|
240
|
+
from pydantic import BaseModel, Field
|
|
241
241
|
|
|
242
242
|
|
|
243
243
|
class RecordDog(BaseModel):
|
|
@@ -318,7 +318,7 @@ def create_structured_output_runnable(
|
|
|
318
318
|
|
|
319
319
|
from langchain.chains import create_structured_output_runnable
|
|
320
320
|
from langchain_openai import ChatOpenAI
|
|
321
|
-
from
|
|
321
|
+
from pydantic import BaseModel, Field
|
|
322
322
|
|
|
323
323
|
class Dog(BaseModel):
|
|
324
324
|
'''Identifying information about a dog.'''
|
|
@@ -340,7 +340,7 @@ def create_structured_output_runnable(
|
|
|
340
340
|
from langchain.chains import create_structured_output_runnable
|
|
341
341
|
from langchain_openai import ChatOpenAI
|
|
342
342
|
from langchain_core.prompts import ChatPromptTemplate
|
|
343
|
-
from
|
|
343
|
+
from pydantic import BaseModel, Field
|
|
344
344
|
|
|
345
345
|
class Dog(BaseModel):
|
|
346
346
|
'''Identifying information about a dog.'''
|
|
@@ -366,7 +366,7 @@ def create_structured_output_runnable(
|
|
|
366
366
|
from langchain.chains import create_structured_output_runnable
|
|
367
367
|
from langchain_openai import ChatOpenAI
|
|
368
368
|
from langchain_core.prompts import ChatPromptTemplate
|
|
369
|
-
from
|
|
369
|
+
from pydantic import BaseModel, Field
|
|
370
370
|
|
|
371
371
|
class Dog(BaseModel):
|
|
372
372
|
'''Identifying information about a dog.'''
|
langchain/chat_models/base.py
CHANGED
|
@@ -241,7 +241,7 @@ def init_chat_model(
|
|
|
241
241
|
|
|
242
242
|
# pip install langchain langchain-openai langchain-anthropic
|
|
243
243
|
from langchain.chat_models import init_chat_model
|
|
244
|
-
from
|
|
244
|
+
from pydantic import BaseModel, Field
|
|
245
245
|
|
|
246
246
|
class GetWeather(BaseModel):
|
|
247
247
|
'''Get the current weather in a given location'''
|
langchain/memory/combined.py
CHANGED
|
@@ -2,7 +2,7 @@ import warnings
|
|
|
2
2
|
from typing import Any, Dict, List, Set
|
|
3
3
|
|
|
4
4
|
from langchain_core.memory import BaseMemory
|
|
5
|
-
from pydantic import
|
|
5
|
+
from pydantic import field_validator
|
|
6
6
|
|
|
7
7
|
from langchain.memory.chat_memory import BaseChatMemory
|
|
8
8
|
|
|
@@ -13,7 +13,8 @@ class CombinedMemory(BaseMemory):
|
|
|
13
13
|
memories: List[BaseMemory]
|
|
14
14
|
"""For tracking all the memories that should be accessed."""
|
|
15
15
|
|
|
16
|
-
@
|
|
16
|
+
@field_validator("memories")
|
|
17
|
+
@classmethod
|
|
17
18
|
def check_repeated_memory_variable(
|
|
18
19
|
cls, value: List[BaseMemory]
|
|
19
20
|
) -> List[BaseMemory]:
|
|
@@ -29,7 +30,8 @@ class CombinedMemory(BaseMemory):
|
|
|
29
30
|
|
|
30
31
|
return value
|
|
31
32
|
|
|
32
|
-
@
|
|
33
|
+
@field_validator("memories")
|
|
34
|
+
@classmethod
|
|
33
35
|
def check_input_key(cls, value: List[BaseMemory]) -> List[BaseMemory]:
|
|
34
36
|
"""Check that if memories are of type BaseChatMemory that input keys exist."""
|
|
35
37
|
for val in value:
|
|
@@ -3,7 +3,7 @@ from typing import Any, Dict, List, Tuple, Union
|
|
|
3
3
|
|
|
4
4
|
from langchain_core.exceptions import OutputParserException
|
|
5
5
|
from langchain_core.output_parsers.base import BaseOutputParser
|
|
6
|
-
from pydantic import
|
|
6
|
+
from pydantic import field_validator
|
|
7
7
|
|
|
8
8
|
from langchain.output_parsers.format_instructions import (
|
|
9
9
|
PANDAS_DATAFRAME_FORMAT_INSTRUCTIONS,
|
|
@@ -16,7 +16,8 @@ class PandasDataFrameOutputParser(BaseOutputParser[Dict[str, Any]]):
|
|
|
16
16
|
"""The Pandas DataFrame to parse."""
|
|
17
17
|
dataframe: Any
|
|
18
18
|
|
|
19
|
-
@
|
|
19
|
+
@field_validator("dataframe")
|
|
20
|
+
@classmethod
|
|
20
21
|
def validate_dataframe(cls, val: Any) -> Any:
|
|
21
22
|
import pandas as pd
|
|
22
23
|
|
langchain/output_parsers/yaml.py
CHANGED
|
@@ -35,7 +35,10 @@ class YamlOutputParser(BaseOutputParser[T]):
|
|
|
35
35
|
yaml_str = text
|
|
36
36
|
|
|
37
37
|
json_object = yaml.safe_load(yaml_str)
|
|
38
|
-
|
|
38
|
+
if hasattr(self.pydantic_object, "model_validate"):
|
|
39
|
+
return self.pydantic_object.model_validate(json_object)
|
|
40
|
+
else:
|
|
41
|
+
return self.pydantic_object.parse_obj(json_object)
|
|
39
42
|
|
|
40
43
|
except (yaml.YAMLError, ValidationError) as e:
|
|
41
44
|
name = self.pydantic_object.__name__
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
from importlib import metadata
|
|
2
2
|
|
|
3
|
+
from langchain_core._api import warn_deprecated
|
|
4
|
+
|
|
3
5
|
## Create namespaces for pydantic v1 and v2.
|
|
4
6
|
# This code must stay at the top of the file before other modules may
|
|
5
7
|
# attempt to import pydantic since it adds pydantic_v1 and pydantic_v2 to sys.modules.
|
|
@@ -21,3 +23,21 @@ try:
|
|
|
21
23
|
_PYDANTIC_MAJOR_VERSION: int = int(metadata.version("pydantic").split(".")[0])
|
|
22
24
|
except metadata.PackageNotFoundError:
|
|
23
25
|
_PYDANTIC_MAJOR_VERSION = 0
|
|
26
|
+
|
|
27
|
+
warn_deprecated(
|
|
28
|
+
"0.3.0",
|
|
29
|
+
removal="1.0.0",
|
|
30
|
+
alternative="pydantic.v1 or pydantic",
|
|
31
|
+
message=(
|
|
32
|
+
"As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. "
|
|
33
|
+
"The langchain.pydantic_v1 module was a "
|
|
34
|
+
"compatibility shim for pydantic v1, and should no longer be used. "
|
|
35
|
+
"Please update the code to import from Pydantic directly.\n\n"
|
|
36
|
+
"For example, replace imports like: "
|
|
37
|
+
"`from langchain.pydantic_v1 import BaseModel`\n"
|
|
38
|
+
"with: `from pydantic import BaseModel`\n"
|
|
39
|
+
"or the v1 compatibility namespace if you are working in a code base "
|
|
40
|
+
"that has not been fully upgraded to pydantic 2 yet. "
|
|
41
|
+
"\tfrom pydantic.v1 import BaseModel\n"
|
|
42
|
+
),
|
|
43
|
+
)
|
|
@@ -1,4 +1,24 @@
|
|
|
1
|
+
from langchain_core._api import warn_deprecated
|
|
2
|
+
|
|
1
3
|
try:
|
|
2
4
|
from pydantic.v1.dataclasses import * # noqa: F403
|
|
3
5
|
except ImportError:
|
|
4
6
|
from pydantic.dataclasses import * # type: ignore # noqa: F403
|
|
7
|
+
|
|
8
|
+
warn_deprecated(
|
|
9
|
+
"0.3.0",
|
|
10
|
+
removal="1.0.0",
|
|
11
|
+
alternative="pydantic.v1 or pydantic",
|
|
12
|
+
message=(
|
|
13
|
+
"As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. "
|
|
14
|
+
"The langchain.pydantic_v1 module was a "
|
|
15
|
+
"compatibility shim for pydantic v1, and should no longer be used. "
|
|
16
|
+
"Please update the code to import from Pydantic directly.\n\n"
|
|
17
|
+
"For example, replace imports like: "
|
|
18
|
+
"`from langchain.pydantic_v1 import BaseModel`\n"
|
|
19
|
+
"with: `from pydantic import BaseModel`\n"
|
|
20
|
+
"or the v1 compatibility namespace if you are working in a code base "
|
|
21
|
+
"that has not been fully upgraded to pydantic 2 yet. "
|
|
22
|
+
"\tfrom pydantic.v1 import BaseModel\n"
|
|
23
|
+
),
|
|
24
|
+
)
|
langchain/pydantic_v1/main.py
CHANGED
|
@@ -1,4 +1,24 @@
|
|
|
1
|
+
from langchain_core._api import warn_deprecated
|
|
2
|
+
|
|
1
3
|
try:
|
|
2
4
|
from pydantic.v1.main import * # noqa: F403
|
|
3
5
|
except ImportError:
|
|
4
6
|
from pydantic.main import * # type: ignore # noqa: F403
|
|
7
|
+
|
|
8
|
+
warn_deprecated(
|
|
9
|
+
"0.3.0",
|
|
10
|
+
removal="1.0.0",
|
|
11
|
+
alternative="pydantic.v1 or pydantic",
|
|
12
|
+
message=(
|
|
13
|
+
"As of langchain-core 0.3.0, LangChain uses pydantic v2 internally. "
|
|
14
|
+
"The langchain.pydantic_v1 module was a "
|
|
15
|
+
"compatibility shim for pydantic v1, and should no longer be used. "
|
|
16
|
+
"Please update the code to import from Pydantic directly.\n\n"
|
|
17
|
+
"For example, replace imports like: "
|
|
18
|
+
"`from langchain.pydantic_v1 import BaseModel`\n"
|
|
19
|
+
"with: `from pydantic import BaseModel`\n"
|
|
20
|
+
"or the v1 compatibility namespace if you are working in a code base "
|
|
21
|
+
"that has not been fully upgraded to pydantic 2 yet. "
|
|
22
|
+
"\tfrom pydantic.v1 import BaseModel\n"
|
|
23
|
+
),
|
|
24
|
+
)
|
|
@@ -36,7 +36,7 @@ class EmbeddingsFilter(BaseDocumentCompressor):
|
|
|
36
36
|
k: Optional[int] = 20
|
|
37
37
|
"""The number of relevant documents to return. Can be set to None, in which case
|
|
38
38
|
`similarity_threshold` must be specified. Defaults to 20."""
|
|
39
|
-
similarity_threshold: Optional[float]
|
|
39
|
+
similarity_threshold: Optional[float] = None
|
|
40
40
|
"""Threshold for determining when two documents are similar enough
|
|
41
41
|
to be considered redundant. Defaults to None, must be specified if `k` is set
|
|
42
42
|
to None."""
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: langchain
|
|
3
|
-
Version: 0.3.0.
|
|
3
|
+
Version: 0.3.0.dev2
|
|
4
4
|
Summary: Building applications with LLMs through composability
|
|
5
5
|
Home-page: https://github.com/langchain-ai/langchain
|
|
6
6
|
License: MIT
|
|
@@ -15,12 +15,12 @@ Requires-Dist: PyYAML (>=5.3)
|
|
|
15
15
|
Requires-Dist: SQLAlchemy (>=1.4,<3)
|
|
16
16
|
Requires-Dist: aiohttp (>=3.8.3,<4.0.0)
|
|
17
17
|
Requires-Dist: async-timeout (>=4.0.0,<5.0.0) ; python_version < "3.11"
|
|
18
|
-
Requires-Dist: langchain-core (>=0.3.0.
|
|
18
|
+
Requires-Dist: langchain-core (>=0.3.0.dev5,<0.4.0)
|
|
19
19
|
Requires-Dist: langchain-text-splitters (>=0.3.0.dev1,<0.4.0)
|
|
20
20
|
Requires-Dist: langsmith (>=0.1.17,<0.2.0)
|
|
21
21
|
Requires-Dist: numpy (>=1,<2) ; python_version < "3.12"
|
|
22
22
|
Requires-Dist: numpy (>=1.26.0,<2.0.0) ; python_version >= "3.12"
|
|
23
|
-
Requires-Dist: pydantic (>=
|
|
23
|
+
Requires-Dist: pydantic (>=2.7.4,<3.0.0)
|
|
24
24
|
Requires-Dist: requests (>=2,<3)
|
|
25
25
|
Requires-Dist: tenacity (>=8.1.0,<9.0.0,!=8.4.0)
|
|
26
26
|
Project-URL: Repository, https://github.com/langchain-ai/langchain
|
|
@@ -7,7 +7,7 @@ langchain/_api/path.py,sha256=ovJP6Pcf7L_KaKvMMet9G9OzfLTb-sZV2pEw3Tp7o3I,122
|
|
|
7
7
|
langchain/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
8
|
langchain/adapters/openai.py,sha256=kWvS_DdRtpcc49vDY8zLUo3BrtXA3a89bLJu3Sksvaw,1996
|
|
9
9
|
langchain/agents/__init__.py,sha256=JQJ3VlqRMRpHbjR-pkzy1yowJkdEmsQEPXTptkyHc-o,6282
|
|
10
|
-
langchain/agents/agent.py,sha256=
|
|
10
|
+
langchain/agents/agent.py,sha256=g2LViJuyIryV5y-2W9QdTz-v3S2P750lbmNjfkU3_O8,62361
|
|
11
11
|
langchain/agents/agent_iterator.py,sha256=Zr0aikktn-aotTvoaVGJxXIBIkHidIQGkfQIKvovkxg,16454
|
|
12
12
|
langchain/agents/agent_toolkits/__init__.py,sha256=N0ylx2gzwaOqaoHRXQs9jvYNIzrnTM-2rgjNkCU5UII,7370
|
|
13
13
|
langchain/agents/agent_toolkits/ainetwork/__init__.py,sha256=henfKntuAEjG1KoN-Hk1IHy3fFGCYPWLEuZtF2bIdZI,25
|
|
@@ -201,7 +201,7 @@ langchain/callbacks/wandb_callback.py,sha256=mWcDRVTlUnzQGhN2BMiGhPsKw5uyB2qDQ_L
|
|
|
201
201
|
langchain/callbacks/whylabs_callback.py,sha256=N36XACtHYNgFSSYrNbfXiZ4nxSdwSrIE5e6xwxukrPc,688
|
|
202
202
|
langchain/chains/__init__.py,sha256=xsRWTwsP3mTejfnKTzsTKRwpYT5xthXZAde30M_118U,5092
|
|
203
203
|
langchain/chains/api/__init__.py,sha256=d8xBEQqFVNOMTm4qXNz5YiYkvA827Ayyd4XCG1KP-z4,84
|
|
204
|
-
langchain/chains/api/base.py,sha256=
|
|
204
|
+
langchain/chains/api/base.py,sha256=OBdwr1T1ll4D3uSmEPjSf5jMugwTqM_aeZbGmiracZI,15221
|
|
205
205
|
langchain/chains/api/news_docs.py,sha256=9vzx5nSPwe_cjFV8cemlfMp4EX8wiZe2eXBuRik2Vdg,2452
|
|
206
206
|
langchain/chains/api/open_meteo_docs.py,sha256=8pLSX24K37lcgq3jmgfThcuiz7WY3zkub_V6dtsqc18,3399
|
|
207
207
|
langchain/chains/api/openapi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -212,7 +212,7 @@ langchain/chains/api/openapi/response_chain.py,sha256=7vHhIF1-3JUgOXeyWb9CAkG0Ji
|
|
|
212
212
|
langchain/chains/api/podcast_docs.py,sha256=mPW1GrX0X6kaGuGpVYFXNvSoLNoUFse8CaoJSUSa4KU,1920
|
|
213
213
|
langchain/chains/api/prompt.py,sha256=YERLepjWuo2J4wg40DWWfHH4Tsm-9eab-cIllHFxMk4,1031
|
|
214
214
|
langchain/chains/api/tmdb_docs.py,sha256=8yoowa2d53-oytU0dycV-0w9wRe9xOXAPz-s8gQ6EpE,1537
|
|
215
|
-
langchain/chains/base.py,sha256=
|
|
215
|
+
langchain/chains/base.py,sha256=cVee_nkH2qIvsOGi1S5PyUhdXfG4HJ_P1lEbjl0YsS0,30694
|
|
216
216
|
langchain/chains/chat_vector_db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
217
217
|
langchain/chains/chat_vector_db/prompts.py,sha256=4YM7z5Wi8ftJEVj3ZG8YOcudYwGHCNvQh4Gf_6592yc,694
|
|
218
218
|
langchain/chains/combine_documents/__init__.py,sha256=tJZmkLOD4JGjh9OxkCdTMUzbBCb-47fHLyklQo6ida4,367
|
|
@@ -232,10 +232,10 @@ langchain/chains/conversation/base.py,sha256=6pxKkGYDiK_pj0HB_yrSMJgOdjInoO6Ouol
|
|
|
232
232
|
langchain/chains/conversation/memory.py,sha256=KoKmk5FjPEkioolvmFxcJgRr2wRdWIe1LNBHCtGgUKo,1396
|
|
233
233
|
langchain/chains/conversation/prompt.py,sha256=84xC4dy8yNiCSICT4b6UvZdQXpPifMVw1hf7WnFAVkw,913
|
|
234
234
|
langchain/chains/conversational_retrieval/__init__.py,sha256=hq7jx-kmg3s8qLYnV7gPmzVIPcGqW69H6cXIjklvGjY,49
|
|
235
|
-
langchain/chains/conversational_retrieval/base.py,sha256=
|
|
235
|
+
langchain/chains/conversational_retrieval/base.py,sha256=exiaFjIDLk9VoAf15qhMuBeokmtHP6AYGsm9vcmQ4hM,21053
|
|
236
236
|
langchain/chains/conversational_retrieval/prompts.py,sha256=kJITwauXq7dYKnSBoL2EcDTqAnJZlWF_GzJ9C55ZEv8,720
|
|
237
237
|
langchain/chains/elasticsearch_database/__init__.py,sha256=B3Zxy8mxTb4bfMGHC__26BFkvT_6bPisS4rPIFiFWdU,126
|
|
238
|
-
langchain/chains/elasticsearch_database/base.py,sha256=
|
|
238
|
+
langchain/chains/elasticsearch_database/base.py,sha256=Rw6z9x---84WsVKP2L-YI-VehgP3VtI70kc0BfJv9Js,8248
|
|
239
239
|
langchain/chains/elasticsearch_database/prompts.py,sha256=XTRDvnAMwGLlQh9vE0Ju8Nh39Ro7zjzZg13mY36pzNw,1425
|
|
240
240
|
langchain/chains/ernie_functions/__init__.py,sha256=X_gOa8GIjyV6tAS32A1BLv6q08ufSms-tffwgtSyIDA,1514
|
|
241
241
|
langchain/chains/ernie_functions/base.py,sha256=SGs_-yi0qa7cxgkiu2EsoYQF4_fKQUZkxncrp1KiMbU,1730
|
|
@@ -260,7 +260,7 @@ langchain/chains/graph_qa/prompts.py,sha256=dqfI2CSw5xDR3SvIsFSxq2jwOFp-CcGF3WDj
|
|
|
260
260
|
langchain/chains/graph_qa/sparql.py,sha256=wIAy-nymiftBnW3kExycpGOMyFveD1QBrETlfcnlyuE,665
|
|
261
261
|
langchain/chains/history_aware_retriever.py,sha256=a92vlxlq0PaOubc_b4jj_WwGivk4Tyi1xzSBKaTOx4g,2662
|
|
262
262
|
langchain/chains/hyde/__init__.py,sha256=mZ-cb7slBdlK5aG2R_NegBzNCXToHR-tdmfIIA6lKvQ,75
|
|
263
|
-
langchain/chains/hyde/base.py,sha256=
|
|
263
|
+
langchain/chains/hyde/base.py,sha256=Rc5u4JD3M3CaVsK4PwAVF67ooklcz9H3Fjy5ySoJuyY,3619
|
|
264
264
|
langchain/chains/hyde/prompts.py,sha256=U4LfozneOyHDIKd8rCbnGSQK84YvZqAtpf5EL435Ol8,1913
|
|
265
265
|
langchain/chains/llm.py,sha256=cP1QLNpxAHvdA0AKvQVjlZg8vSc_MafX4q1C_v5GFp4,15504
|
|
266
266
|
langchain/chains/llm_bash/__init__.py,sha256=qvRpa5tj09akj4DLVZoKvWK8-oJrUxc5-7ooAP3mO18,453
|
|
@@ -280,21 +280,21 @@ langchain/chains/llm_summarization_checker/prompts/revise_summary.txt,sha256=nSS
|
|
|
280
280
|
langchain/chains/llm_symbolic_math/__init__.py,sha256=KQ6bFiFMsqs8PNtU-oo6l-czNBBwQUn2rEirz3gt-w8,470
|
|
281
281
|
langchain/chains/loading.py,sha256=57shFurz0r_FDoUSTcD5Hv7cZl4Rr2G2A_gT-p7XHCE,28829
|
|
282
282
|
langchain/chains/mapreduce.py,sha256=QTb-7lxao99O7NySK_xpA5d_5iQZJHJdH1Gcv-EgDCY,4097
|
|
283
|
-
langchain/chains/moderation.py,sha256=
|
|
283
|
+
langchain/chains/moderation.py,sha256=ezUrzTOI6uGynyglpSuGom2gK26bKtkER2UuMG4yJWQ,4427
|
|
284
284
|
langchain/chains/natbot/__init__.py,sha256=ACF2TYNK_CTfvmdLlG5Ry0_j9D6ZfjgfQxmeKe1BAIg,96
|
|
285
285
|
langchain/chains/natbot/base.py,sha256=zl_sf4dgS8dFRYTY83cdaMXq1oqEsB-ddlb7RHx5SUM,5286
|
|
286
286
|
langchain/chains/natbot/crawler.py,sha256=E1mQUEsg8Jj6Eth-LBUcMU-Zc88JEA3a79kMhHkKO08,16050
|
|
287
287
|
langchain/chains/natbot/prompt.py,sha256=zB95SYLG5_12ABFFGDtDi8vVP9DSdPoP8UCjrar_4TI,4989
|
|
288
288
|
langchain/chains/openai_functions/__init__.py,sha256=o8B_I98nFTlFPkF6FPpLyt8pU3EfEPHADHr9xY5V1O0,1489
|
|
289
|
-
langchain/chains/openai_functions/base.py,sha256=
|
|
289
|
+
langchain/chains/openai_functions/base.py,sha256=jfgnAuire9OLOL0kLqKScpjdBEXKYzyNx-Xz0xKArMA,10115
|
|
290
290
|
langchain/chains/openai_functions/citation_fuzzy_match.py,sha256=cd9kh6DKMKS-eCskWFcJmDQLOemne1SMe4pKHbJ-Mvc,5344
|
|
291
|
-
langchain/chains/openai_functions/extraction.py,sha256=
|
|
291
|
+
langchain/chains/openai_functions/extraction.py,sha256=2P99EoAb8iipW8TNJwNG2gUzgpWYSCZAvPU-kgUNfqU,7390
|
|
292
292
|
langchain/chains/openai_functions/openapi.py,sha256=oqNFnLboLyFykkjHGvXR9Bd-7tjx7EjkNZnxXh5ISoc,14954
|
|
293
|
-
langchain/chains/openai_functions/qa_with_structure.py,sha256=
|
|
294
|
-
langchain/chains/openai_functions/tagging.py,sha256=
|
|
293
|
+
langchain/chains/openai_functions/qa_with_structure.py,sha256=3JRjX4ylqF4eaIOZ5umcH1hPYiWsYyH9PVH2nF-NfFw,4851
|
|
294
|
+
langchain/chains/openai_functions/tagging.py,sha256=jY6Rk_dJdYtKP5nrZJ7f-oCsMbBLUivfTVxcreZ7PNI,6524
|
|
295
295
|
langchain/chains/openai_functions/utils.py,sha256=GDhYjszQGut1UcJ-dyPvkwiT8gHOV0IejRuIfN7_fhw,1255
|
|
296
296
|
langchain/chains/openai_tools/__init__.py,sha256=xX0If1Nx_ocEOI56EGxCI0v0RZ1_VUegzyODAj0RLVU,134
|
|
297
|
-
langchain/chains/openai_tools/extraction.py,sha256=
|
|
297
|
+
langchain/chains/openai_tools/extraction.py,sha256=sG8qUQKa7f-6JcbH1OWgpTtuUYV-3B-wBZJTDpp101E,3399
|
|
298
298
|
langchain/chains/prompt_selector.py,sha256=Ual6G-PFeZ5jZkeOXnLCYwffE1CFaOmAIHYu0tim6ps,1997
|
|
299
299
|
langchain/chains/qa_generation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
300
300
|
langchain/chains/qa_generation/base.py,sha256=obAA1qtCavCO3zkskRtB62is-geDOJG526T1N6tJbts,4187
|
|
@@ -336,7 +336,7 @@ langchain/chains/sql_database/__init__.py,sha256=jQotWN4EWMD98Jk-f7rqh5YtbXbP9XX
|
|
|
336
336
|
langchain/chains/sql_database/prompt.py,sha256=W0xFqVZ18PzxmutnIBJrocXus8_QBByrKtxg8CjGaYw,15458
|
|
337
337
|
langchain/chains/sql_database/query.py,sha256=h-QP5ESatTFj8t7sGsHppXSchy3ZGL1U1afza-Lo8fc,5421
|
|
338
338
|
langchain/chains/structured_output/__init__.py,sha256=-6nFe-gznavFc3XCMv8XkEzuXoto2rI8Q-bcruVPOR8,204
|
|
339
|
-
langchain/chains/structured_output/base.py,sha256=
|
|
339
|
+
langchain/chains/structured_output/base.py,sha256=jsrF_WQe55gVhZzRGSY7DCetdR91IXdkItK_O_IhovA,25461
|
|
340
340
|
langchain/chains/summarize/__init__.py,sha256=mg1lKtH_x-oJ5qvKY6OD7g9kkqbjMVbL3l3OhfozSQM,151
|
|
341
341
|
langchain/chains/summarize/chain.py,sha256=QA3EgTnT067OLm5waUv_3oiI1mS3KD_uvFkHlns-Jxo,6193
|
|
342
342
|
langchain/chains/summarize/map_reduce_prompt.py,sha256=HZSitW2_WhJINN-_YJCzU6zJXbPuMr5zFek31AzutuQ,238
|
|
@@ -360,7 +360,7 @@ langchain/chat_models/azure_openai.py,sha256=aRNol2PNC49PmvdZnwjhQeMFRDOOelPNAXz
|
|
|
360
360
|
langchain/chat_models/azureml_endpoint.py,sha256=6mxXm8UFXataLp0NYRGA88V3DpiNKPo095u_JGj7XGE,863
|
|
361
361
|
langchain/chat_models/baichuan.py,sha256=3-GveFoF5ZNyLdRNK6V4i3EDDjdseOTFWbCMhDbtO9w,643
|
|
362
362
|
langchain/chat_models/baidu_qianfan_endpoint.py,sha256=CZrX2SMpbE9H7wBXNC6rGvw-YqQl9zjuJrClYQxEzuI,715
|
|
363
|
-
langchain/chat_models/base.py,sha256=
|
|
363
|
+
langchain/chat_models/base.py,sha256=oBNuZSGqjjd9aMGNVmP7di14rFr4HMzqu-WFyVhwxB8,31024
|
|
364
364
|
langchain/chat_models/bedrock.py,sha256=HRV3T_0mEnZ8LvJJqAA_UVpt-_03G715oIgomRJw55M,757
|
|
365
365
|
langchain/chat_models/cohere.py,sha256=EYOECHX-nKRhZVfCfmFGZ2lr51PzaB5OvOEqmBCu1fI,633
|
|
366
366
|
langchain/chat_models/databricks.py,sha256=5_QkC5lG4OldaHC2FS0XylirJouyZx1YT95SKwc12M0,653
|
|
@@ -797,7 +797,7 @@ langchain/memory/chat_message_histories/streamlit.py,sha256=gGwDE9T3hF3c5ojd-jPL
|
|
|
797
797
|
langchain/memory/chat_message_histories/upstash_redis.py,sha256=M-sV600Ey7erOjRQTjzT5C_bt2mLw6RcJtX07YnoluQ,724
|
|
798
798
|
langchain/memory/chat_message_histories/xata.py,sha256=mu8boSJYSS5TUp2qj8k210ZnZ2tqjyuRj_SHPH_g4qw,683
|
|
799
799
|
langchain/memory/chat_message_histories/zep.py,sha256=v2dAHGuV1HANCmxsVZSnXZAzRwIgOmwJ4HxvIM74fYM,680
|
|
800
|
-
langchain/memory/combined.py,sha256=
|
|
800
|
+
langchain/memory/combined.py,sha256=poPw4QbtfjlQcZK_xWseF4wXxGWtl6XGCgMdWQaK0fs,2946
|
|
801
801
|
langchain/memory/entity.py,sha256=PSYJ5q8Xb04_uhKvsL00OgTWVY5_Y4cJCzqxIDu6rV0,15893
|
|
802
802
|
langchain/memory/kg.py,sha256=DNerFp7WY8z6igywdH7KAuq3W2O1DVoPMBsGvw5WebQ,645
|
|
803
803
|
langchain/memory/motorhead_memory.py,sha256=OXjtlAQi1ioRXdM3GVcYmReynkKn8Vm1e5TruqecUR8,658
|
|
@@ -825,7 +825,7 @@ langchain/output_parsers/list.py,sha256=D35r0U51Xy5wHn-VcWxr97Ftul4UqszmyLetDi4s
|
|
|
825
825
|
langchain/output_parsers/loading.py,sha256=YD3RZ8TTBVtVTXdV14xpj_RNZqrJgclk9J9fHQI7YIA,702
|
|
826
826
|
langchain/output_parsers/openai_functions.py,sha256=XmqUCySXGsaHugtItczb8K71lrQIfMNYvAofP9ZEF7U,364
|
|
827
827
|
langchain/output_parsers/openai_tools.py,sha256=beZWrEXyOyGMVWJ7lWE7xxEgbfQCuQnHligdxuEQxng,229
|
|
828
|
-
langchain/output_parsers/pandas_dataframe.py,sha256=
|
|
828
|
+
langchain/output_parsers/pandas_dataframe.py,sha256=2M6UNphkRmJ10SS458lLoAp-VqCwqBERdaEcn1-yJvA,6575
|
|
829
829
|
langchain/output_parsers/prompts.py,sha256=zVhB4xjeWW3MKm4ZM8RfIiPUMg06SJAhYVmCa3jCNS8,508
|
|
830
830
|
langchain/output_parsers/pydantic.py,sha256=uxbrfdyPnZxfdDvmuDr3QOmBFMwML3SfMDEmAKqmyvA,99
|
|
831
831
|
langchain/output_parsers/rail_parser.py,sha256=iHmX3ux2jE2k0MsLqe5XCrJ1eQOBBfZtRbRzQoYPTfU,691
|
|
@@ -834,7 +834,7 @@ langchain/output_parsers/regex_dict.py,sha256=UK6iL4Hx-q6UlPNEGLAnbh7_8-IwtXY2V1
|
|
|
834
834
|
langchain/output_parsers/retry.py,sha256=QPLKiY5uSU8QcnfW067qYVCWqvaktsH5ulR0bo0qgoM,10457
|
|
835
835
|
langchain/output_parsers/structured.py,sha256=R38VNhDr-xD9zM30q71h31ApZofi9UaAkMW7xCz6S2U,3147
|
|
836
836
|
langchain/output_parsers/xml.py,sha256=WDHazWjxO-nDAzxkBJrd1tGINVrzo4mH2-Qgqtz9Y2w,93
|
|
837
|
-
langchain/output_parsers/yaml.py,sha256=
|
|
837
|
+
langchain/output_parsers/yaml.py,sha256=jKxg4cBFF6LCfoIexu9Q4M4LX7MQzb7QbMRT4_bZ5Y0,2409
|
|
838
838
|
langchain/prompts/__init__.py,sha256=TrRYiHB4qLiB8Ai4OohIijntIy_Xd5Y76cbZjPxjWNI,3153
|
|
839
839
|
langchain/prompts/base.py,sha256=QATYkT1NM2-QElHrC4qapaOm3FDxDOgPCdJixuziSbM,565
|
|
840
840
|
langchain/prompts/chat.py,sha256=ohOf8VGpdG2FaEBCzSLB0YPdT_8LmBwQGnb1pYVlZFc,1045
|
|
@@ -849,9 +849,9 @@ langchain/prompts/loading.py,sha256=i5tFvi3So9-joanAD2rwsp3jZq0nLBCgJ6fO7uFLcPw,
|
|
|
849
849
|
langchain/prompts/pipeline.py,sha256=vTdOcggYTfRc4VV2ob-19fsU_iSc96USyazS2EKxthk,133
|
|
850
850
|
langchain/prompts/prompt.py,sha256=Q8sBG8MMTlIq_ErEbIsY0dnXkSCthAr8ntpAu3ZR6X8,153
|
|
851
851
|
langchain/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
852
|
-
langchain/pydantic_v1/__init__.py,sha256=
|
|
853
|
-
langchain/pydantic_v1/dataclasses.py,sha256=
|
|
854
|
-
langchain/pydantic_v1/main.py,sha256=
|
|
852
|
+
langchain/pydantic_v1/__init__.py,sha256=DzV2kHeGQa6WfZpdTdCH_AnXW225R2M0HASASDeYEQY,1700
|
|
853
|
+
langchain/pydantic_v1/dataclasses.py,sha256=Zy38SPrzy9fAkiv0ILI54vrAo43AnxWwbhJBTfJV3rs,937
|
|
854
|
+
langchain/pydantic_v1/main.py,sha256=EP5W5rbEMq1WXA3q1P6o22W8Wi--CzKD2xQQRSKxv-s,923
|
|
855
855
|
langchain/python.py,sha256=TxVqzUU1IjM8WSmM73FEw5KxpEWhXG4OKq8sAJ9yJnU,555
|
|
856
856
|
langchain/requests.py,sha256=PezKhBbDty3VXl5vl7K6aKacNFcIvFGy22SgtaW0AYQ,906
|
|
857
857
|
langchain/retrievers/__init__.py,sha256=aEtuBB68EchIUnvcFitbwaLAorbdg06Eyu1m2PgClJI,6661
|
|
@@ -875,7 +875,7 @@ langchain/retrievers/document_compressors/chain_filter_prompt.py,sha256=FTQRPiEs
|
|
|
875
875
|
langchain/retrievers/document_compressors/cohere_rerank.py,sha256=uo9rRozAvgLM9sUEcE929SnnTvE3CHBDiEhf_S0UufQ,4508
|
|
876
876
|
langchain/retrievers/document_compressors/cross_encoder.py,sha256=_Z7SoPSfOUSk-rNIHX2lQgYV0TgVMKf3F9AnTH7EFiM,393
|
|
877
877
|
langchain/retrievers/document_compressors/cross_encoder_rerank.py,sha256=ThgVrX8NeXFzE4eoftBoa1yz-sBJiDb-JISQa9Hep2k,1542
|
|
878
|
-
langchain/retrievers/document_compressors/embeddings_filter.py,sha256=
|
|
878
|
+
langchain/retrievers/document_compressors/embeddings_filter.py,sha256=8gIQY88ycf5BMRtulwedTWLkwAGp5kMEHR_nhXZz1Ms,5193
|
|
879
879
|
langchain/retrievers/document_compressors/flashrank_rerank.py,sha256=Eo86fJ_T2IbEEeCkI_5rb3Ao4gsdenv-_Ukt33MuMko,709
|
|
880
880
|
langchain/retrievers/document_compressors/listwise_rerank.py,sha256=i3dCqXBF27_sHPGxWOlCkVjt4s85QM0ikHZtPp2LpDs,5127
|
|
881
881
|
langchain/retrievers/elastic_search_bm25.py,sha256=eRboOkRQj-_E53gUQIZzxQ1bX0-uEMv7LAQSD7K7Qf8,665
|
|
@@ -1335,8 +1335,8 @@ langchain/vectorstores/xata.py,sha256=HW_Oi5Hz8rH2JaUhRNWQ-3hLYmNzD8eAz6K5YqPArm
|
|
|
1335
1335
|
langchain/vectorstores/yellowbrick.py,sha256=-lnjGcRE8Q1nEPOTdbKYTw5noS2cy2ce1ePOU804-_o,624
|
|
1336
1336
|
langchain/vectorstores/zep.py,sha256=RJ2auxoA6uHHLEZknw3_jeFmYJYVt-PWKMBcNMGV6TM,798
|
|
1337
1337
|
langchain/vectorstores/zilliz.py,sha256=XhPPIUfKPFJw0_svCoBgCnNkkBLoRVVcyuMfOnE5IxU,609
|
|
1338
|
-
langchain-0.3.0.
|
|
1339
|
-
langchain-0.3.0.
|
|
1340
|
-
langchain-0.3.0.
|
|
1341
|
-
langchain-0.3.0.
|
|
1342
|
-
langchain-0.3.0.
|
|
1338
|
+
langchain-0.3.0.dev2.dist-info/LICENSE,sha256=TsZ-TKbmch26hJssqCJhWXyGph7iFLvyFBYAa3stBHg,1067
|
|
1339
|
+
langchain-0.3.0.dev2.dist-info/METADATA,sha256=i2ylk8mAsL58w2_ThDHa2g-_frzPaFpm6BjtsaFiMH0,7093
|
|
1340
|
+
langchain-0.3.0.dev2.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
|
1341
|
+
langchain-0.3.0.dev2.dist-info/entry_points.txt,sha256=IgKjoXnkkVC8Nm7ggiFMCNAk01ua6RVTb9cmZTVNm5w,58
|
|
1342
|
+
langchain-0.3.0.dev2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|