langchain 0.2.11__py3-none-any.whl → 0.2.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langchain/agents/agent.py +5 -9
- langchain/agents/agent_toolkits/vectorstore/base.py +114 -2
- langchain/agents/agent_toolkits/vectorstore/toolkit.py +0 -6
- langchain/agents/chat/output_parser.py +2 -2
- langchain/agents/initialize.py +1 -1
- langchain/agents/loading.py +2 -2
- langchain/agents/mrkl/base.py +1 -1
- langchain/agents/openai_assistant/base.py +2 -2
- langchain/agents/openai_functions_agent/base.py +1 -1
- langchain/agents/openai_functions_multi_agent/base.py +1 -1
- langchain/agents/output_parsers/react_json_single_input.py +2 -2
- langchain/agents/structured_chat/output_parser.py +2 -2
- langchain/chains/__init__.py +1 -0
- langchain/chains/api/base.py +121 -1
- langchain/chains/base.py +0 -2
- langchain/chains/combine_documents/map_reduce.py +2 -4
- langchain/chains/combine_documents/map_rerank.py +4 -6
- langchain/chains/combine_documents/reduce.py +1 -4
- langchain/chains/combine_documents/refine.py +2 -4
- langchain/chains/combine_documents/stuff.py +12 -4
- langchain/chains/conversation/base.py +2 -4
- langchain/chains/conversational_retrieval/base.py +4 -6
- langchain/chains/elasticsearch_database/base.py +16 -20
- langchain/chains/example_generator.py +3 -4
- langchain/chains/flare/base.py +1 -1
- langchain/chains/hyde/base.py +1 -4
- langchain/chains/llm.py +2 -4
- langchain/chains/llm_checker/base.py +12 -4
- langchain/chains/llm_math/base.py +2 -4
- langchain/chains/llm_summarization_checker/base.py +12 -4
- langchain/chains/loading.py +17 -0
- langchain/chains/mapreduce.py +12 -4
- langchain/chains/natbot/base.py +2 -4
- langchain/chains/openai_functions/__init__.py +2 -0
- langchain/chains/openai_functions/citation_fuzzy_match.py +54 -1
- langchain/chains/openai_functions/openapi.py +88 -1
- langchain/chains/openai_functions/qa_with_structure.py +19 -0
- langchain/chains/openai_functions/tagging.py +81 -0
- langchain/chains/qa_with_sources/base.py +21 -4
- langchain/chains/qa_with_sources/loading.py +16 -0
- langchain/chains/query_constructor/base.py +8 -2
- langchain/chains/query_constructor/schema.py +0 -2
- langchain/chains/question_answering/chain.py +15 -0
- langchain/chains/retrieval_qa/base.py +30 -6
- langchain/chains/router/base.py +1 -4
- langchain/chains/router/embedding_router.py +1 -4
- langchain/chains/router/llm_router.py +76 -1
- langchain/chains/router/multi_prompt.py +76 -1
- langchain/chains/sequential.py +3 -7
- langchain/chains/structured_output/base.py +1 -1
- langchain/chat_models/base.py +26 -3
- langchain/evaluation/agents/trajectory_eval_chain.py +3 -6
- langchain/evaluation/comparison/eval_chain.py +2 -4
- langchain/evaluation/criteria/eval_chain.py +2 -4
- langchain/evaluation/embedding_distance/base.py +3 -4
- langchain/evaluation/parsing/json_schema.py +1 -1
- langchain/evaluation/qa/eval_chain.py +2 -7
- langchain/evaluation/schema.py +8 -8
- langchain/evaluation/scoring/eval_chain.py +2 -4
- langchain/evaluation/string_distance/base.py +7 -6
- langchain/hub.py +60 -26
- langchain/indexes/vectorstore.py +3 -7
- langchain/memory/buffer.py +2 -2
- langchain/memory/entity.py +0 -2
- langchain/memory/summary.py +12 -2
- langchain/memory/summary_buffer.py +2 -2
- langchain/output_parsers/combining.py +2 -2
- langchain/output_parsers/enum.py +2 -2
- langchain/output_parsers/fix.py +4 -5
- langchain/output_parsers/retry.py +3 -3
- langchain/retrievers/contextual_compression.py +0 -2
- langchain/retrievers/document_compressors/base.py +0 -2
- langchain/retrievers/document_compressors/chain_filter.py +1 -1
- langchain/retrievers/document_compressors/cohere_rerank.py +2 -4
- langchain/retrievers/document_compressors/cross_encoder_rerank.py +1 -4
- langchain/retrievers/document_compressors/embeddings_filter.py +3 -4
- langchain/retrievers/document_compressors/listwise_rerank.py +1 -1
- langchain/retrievers/multi_query.py +4 -2
- langchain/retrievers/re_phraser.py +1 -1
- langchain/retrievers/self_query/base.py +11 -3
- langchain/retrievers/time_weighted_retriever.py +0 -2
- {langchain-0.2.11.dist-info → langchain-0.2.13.dist-info}/METADATA +2 -2
- {langchain-0.2.11.dist-info → langchain-0.2.13.dist-info}/RECORD +86 -86
- {langchain-0.2.11.dist-info → langchain-0.2.13.dist-info}/LICENSE +0 -0
- {langchain-0.2.11.dist-info → langchain-0.2.13.dist-info}/WHEEL +0 -0
- {langchain-0.2.11.dist-info → langchain-0.2.13.dist-info}/entry_points.txt +0 -0
langchain/output_parsers/fix.py
CHANGED
|
@@ -3,10 +3,9 @@ from __future__ import annotations
|
|
|
3
3
|
from typing import Any, TypeVar, Union
|
|
4
4
|
|
|
5
5
|
from langchain_core.exceptions import OutputParserException
|
|
6
|
-
from langchain_core.
|
|
7
|
-
from langchain_core.output_parsers import BaseOutputParser
|
|
6
|
+
from langchain_core.output_parsers import BaseOutputParser, StrOutputParser
|
|
8
7
|
from langchain_core.prompts import BasePromptTemplate
|
|
9
|
-
from langchain_core.runnables import RunnableSerializable
|
|
8
|
+
from langchain_core.runnables import Runnable, RunnableSerializable
|
|
10
9
|
from typing_extensions import TypedDict
|
|
11
10
|
|
|
12
11
|
from langchain.output_parsers.prompts import NAIVE_FIX_PROMPT
|
|
@@ -42,7 +41,7 @@ class OutputFixingParser(BaseOutputParser[T]):
|
|
|
42
41
|
@classmethod
|
|
43
42
|
def from_llm(
|
|
44
43
|
cls,
|
|
45
|
-
llm:
|
|
44
|
+
llm: Runnable,
|
|
46
45
|
parser: BaseOutputParser[T],
|
|
47
46
|
prompt: BasePromptTemplate = NAIVE_FIX_PROMPT,
|
|
48
47
|
max_retries: int = 1,
|
|
@@ -58,7 +57,7 @@ class OutputFixingParser(BaseOutputParser[T]):
|
|
|
58
57
|
Returns:
|
|
59
58
|
OutputFixingParser
|
|
60
59
|
"""
|
|
61
|
-
chain = prompt | llm
|
|
60
|
+
chain = prompt | llm | StrOutputParser()
|
|
62
61
|
return cls(parser=parser, retry_chain=chain, max_retries=max_retries)
|
|
63
62
|
|
|
64
63
|
def parse(self, completion: str) -> T:
|
|
@@ -4,7 +4,7 @@ from typing import Any, TypeVar, Union
|
|
|
4
4
|
|
|
5
5
|
from langchain_core.exceptions import OutputParserException
|
|
6
6
|
from langchain_core.language_models import BaseLanguageModel
|
|
7
|
-
from langchain_core.output_parsers import BaseOutputParser
|
|
7
|
+
from langchain_core.output_parsers import BaseOutputParser, StrOutputParser
|
|
8
8
|
from langchain_core.prompt_values import PromptValue
|
|
9
9
|
from langchain_core.prompts import BasePromptTemplate, PromptTemplate
|
|
10
10
|
from langchain_core.runnables import RunnableSerializable
|
|
@@ -82,7 +82,7 @@ class RetryOutputParser(BaseOutputParser[T]):
|
|
|
82
82
|
Returns:
|
|
83
83
|
RetryOutputParser
|
|
84
84
|
"""
|
|
85
|
-
chain = prompt | llm
|
|
85
|
+
chain = prompt | llm | StrOutputParser()
|
|
86
86
|
return cls(parser=parser, retry_chain=chain, max_retries=max_retries)
|
|
87
87
|
|
|
88
88
|
def parse_with_prompt(self, completion: str, prompt_value: PromptValue) -> T:
|
|
@@ -214,7 +214,7 @@ class RetryWithErrorOutputParser(BaseOutputParser[T]):
|
|
|
214
214
|
Returns:
|
|
215
215
|
A RetryWithErrorOutputParser.
|
|
216
216
|
"""
|
|
217
|
-
chain = prompt | llm
|
|
217
|
+
chain = prompt | llm | StrOutputParser()
|
|
218
218
|
return cls(parser=parser, retry_chain=chain, max_retries=max_retries)
|
|
219
219
|
|
|
220
220
|
def parse_with_prompt(self, completion: str, prompt_value: PromptValue) -> T:
|
|
@@ -16,8 +16,6 @@ class DocumentCompressorPipeline(BaseDocumentCompressor):
|
|
|
16
16
|
"""List of document filters that are chained together and run in sequence."""
|
|
17
17
|
|
|
18
18
|
class Config:
|
|
19
|
-
"""Configuration for this pydantic object."""
|
|
20
|
-
|
|
21
19
|
arbitrary_types_allowed = True
|
|
22
20
|
|
|
23
21
|
def compress_documents(
|
|
@@ -104,7 +104,7 @@ class LLMChainFilter(BaseDocumentCompressor):
|
|
|
104
104
|
Args:
|
|
105
105
|
llm: The language model to use for filtering.
|
|
106
106
|
prompt: The prompt to use for the filter.
|
|
107
|
-
|
|
107
|
+
kwargs: Additional arguments to pass to the constructor.
|
|
108
108
|
|
|
109
109
|
Returns:
|
|
110
110
|
A LLMChainFilter that uses the given language model.
|
|
@@ -6,7 +6,7 @@ from typing import Any, Dict, List, Optional, Sequence, Union
|
|
|
6
6
|
from langchain_core._api.deprecation import deprecated
|
|
7
7
|
from langchain_core.callbacks.manager import Callbacks
|
|
8
8
|
from langchain_core.documents import Document
|
|
9
|
-
from langchain_core.pydantic_v1 import
|
|
9
|
+
from langchain_core.pydantic_v1 import root_validator
|
|
10
10
|
from langchain_core.utils import get_from_dict_or_env
|
|
11
11
|
|
|
12
12
|
from langchain.retrievers.document_compressors.base import BaseDocumentCompressor
|
|
@@ -31,10 +31,8 @@ class CohereRerank(BaseDocumentCompressor):
|
|
|
31
31
|
"""Identifier for the application making the request."""
|
|
32
32
|
|
|
33
33
|
class Config:
|
|
34
|
-
"""Configuration for this pydantic object."""
|
|
35
|
-
|
|
36
|
-
extra = Extra.forbid
|
|
37
34
|
arbitrary_types_allowed = True
|
|
35
|
+
extra = "forbid"
|
|
38
36
|
|
|
39
37
|
@root_validator(pre=True)
|
|
40
38
|
def validate_environment(cls, values: Dict) -> Dict:
|
|
@@ -5,7 +5,6 @@ from typing import Optional, Sequence
|
|
|
5
5
|
|
|
6
6
|
from langchain_core.callbacks import Callbacks
|
|
7
7
|
from langchain_core.documents import BaseDocumentCompressor, Document
|
|
8
|
-
from langchain_core.pydantic_v1 import Extra
|
|
9
8
|
|
|
10
9
|
from langchain.retrievers.document_compressors.cross_encoder import BaseCrossEncoder
|
|
11
10
|
|
|
@@ -20,10 +19,8 @@ class CrossEncoderReranker(BaseDocumentCompressor):
|
|
|
20
19
|
"""Number of documents to return."""
|
|
21
20
|
|
|
22
21
|
class Config:
|
|
23
|
-
"""Configuration for this pydantic object."""
|
|
24
|
-
|
|
25
|
-
extra = Extra.forbid
|
|
26
22
|
arbitrary_types_allowed = True
|
|
23
|
+
extra = "forbid"
|
|
27
24
|
|
|
28
25
|
def compress_documents(
|
|
29
26
|
self,
|
|
@@ -4,7 +4,8 @@ import numpy as np
|
|
|
4
4
|
from langchain_core.callbacks.manager import Callbacks
|
|
5
5
|
from langchain_core.documents import Document
|
|
6
6
|
from langchain_core.embeddings import Embeddings
|
|
7
|
-
from langchain_core.pydantic_v1 import Field
|
|
7
|
+
from langchain_core.pydantic_v1 import Field
|
|
8
|
+
from langchain_core.utils import pre_init
|
|
8
9
|
|
|
9
10
|
from langchain.retrievers.document_compressors.base import (
|
|
10
11
|
BaseDocumentCompressor,
|
|
@@ -41,11 +42,9 @@ class EmbeddingsFilter(BaseDocumentCompressor):
|
|
|
41
42
|
to None."""
|
|
42
43
|
|
|
43
44
|
class Config:
|
|
44
|
-
"""Configuration for this pydantic object."""
|
|
45
|
-
|
|
46
45
|
arbitrary_types_allowed = True
|
|
47
46
|
|
|
48
|
-
@
|
|
47
|
+
@pre_init
|
|
49
48
|
def validate_params(cls, values: Dict) -> Dict:
|
|
50
49
|
"""Validate similarity parameters."""
|
|
51
50
|
if values["k"] is None and values["similarity_threshold"] is None:
|
|
@@ -105,7 +105,7 @@ class LLMListwiseRerank(BaseDocumentCompressor):
|
|
|
105
105
|
llm: The language model to use for filtering. **Must implement
|
|
106
106
|
BaseLanguageModel.with_structured_output().**
|
|
107
107
|
prompt: The prompt to use for the filter.
|
|
108
|
-
|
|
108
|
+
kwargs: Additional arguments to pass to the constructor.
|
|
109
109
|
|
|
110
110
|
Returns:
|
|
111
111
|
A LLMListwiseRerank document compressor that uses the given language model.
|
|
@@ -72,6 +72,8 @@ class MultiQueryRetriever(BaseRetriever):
|
|
|
72
72
|
Args:
|
|
73
73
|
retriever: retriever to query documents from
|
|
74
74
|
llm: llm for query generation using DEFAULT_QUERY_PROMPT
|
|
75
|
+
prompt: The prompt which aims to generate several different versions
|
|
76
|
+
of the given user query
|
|
75
77
|
include_original: Whether to include the original query in the list of
|
|
76
78
|
generated queries.
|
|
77
79
|
|
|
@@ -95,7 +97,7 @@ class MultiQueryRetriever(BaseRetriever):
|
|
|
95
97
|
"""Get relevant documents given a user query.
|
|
96
98
|
|
|
97
99
|
Args:
|
|
98
|
-
|
|
100
|
+
query: user query
|
|
99
101
|
|
|
100
102
|
Returns:
|
|
101
103
|
Unique union of relevant documents from all generated queries
|
|
@@ -158,7 +160,7 @@ class MultiQueryRetriever(BaseRetriever):
|
|
|
158
160
|
"""Get relevant documents given a user query.
|
|
159
161
|
|
|
160
162
|
Args:
|
|
161
|
-
|
|
163
|
+
query: user query
|
|
162
164
|
|
|
163
165
|
Returns:
|
|
164
166
|
Unique union of relevant documents from all generated queries
|
|
@@ -64,7 +64,7 @@ class RePhraseQueryRetriever(BaseRetriever):
|
|
|
64
64
|
*,
|
|
65
65
|
run_manager: CallbackManagerForRetrieverRun,
|
|
66
66
|
) -> List[Document]:
|
|
67
|
-
"""Get
|
|
67
|
+
"""Get relevant documents given a user question.
|
|
68
68
|
|
|
69
69
|
Args:
|
|
70
70
|
query: user question
|
|
@@ -177,6 +177,16 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor:
|
|
|
177
177
|
if isinstance(vectorstore, PGVector):
|
|
178
178
|
return NewPGVectorTranslator()
|
|
179
179
|
|
|
180
|
+
try:
|
|
181
|
+
# Added in langchain-community==0.2.11
|
|
182
|
+
from langchain_community.query_constructors.hanavector import HanaTranslator
|
|
183
|
+
from langchain_community.vectorstores import HanaDB
|
|
184
|
+
except ImportError:
|
|
185
|
+
pass
|
|
186
|
+
else:
|
|
187
|
+
if isinstance(vectorstore, HanaDB):
|
|
188
|
+
return HanaTranslator()
|
|
189
|
+
|
|
180
190
|
raise ValueError(
|
|
181
191
|
f"Self query retriever with Vector Store type {vectorstore.__class__}"
|
|
182
192
|
f" not supported."
|
|
@@ -205,10 +215,8 @@ class SelfQueryRetriever(BaseRetriever):
|
|
|
205
215
|
"""Use original query instead of the revised new query from LLM"""
|
|
206
216
|
|
|
207
217
|
class Config:
|
|
208
|
-
"""Configuration for this pydantic object."""
|
|
209
|
-
|
|
210
|
-
arbitrary_types_allowed = True
|
|
211
218
|
allow_population_by_field_name = True
|
|
219
|
+
arbitrary_types_allowed = True
|
|
212
220
|
|
|
213
221
|
@root_validator(pre=True)
|
|
214
222
|
def validate_translator(cls, values: Dict) -> Dict:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: langchain
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.13
|
|
4
4
|
Summary: Building applications with LLMs through composability
|
|
5
5
|
Home-page: https://github.com/langchain-ai/langchain
|
|
6
6
|
License: MIT
|
|
@@ -15,7 +15,7 @@ Requires-Dist: PyYAML (>=5.3)
|
|
|
15
15
|
Requires-Dist: SQLAlchemy (>=1.4,<3)
|
|
16
16
|
Requires-Dist: aiohttp (>=3.8.3,<4.0.0)
|
|
17
17
|
Requires-Dist: async-timeout (>=4.0.0,<5.0.0) ; python_version < "3.11"
|
|
18
|
-
Requires-Dist: langchain-core (>=0.2.
|
|
18
|
+
Requires-Dist: langchain-core (>=0.2.30,<0.3.0)
|
|
19
19
|
Requires-Dist: langchain-text-splitters (>=0.2.0,<0.3.0)
|
|
20
20
|
Requires-Dist: langsmith (>=0.1.17,<0.2.0)
|
|
21
21
|
Requires-Dist: numpy (>=1,<2) ; python_version < "3.12"
|