langchain 0.2.15__py3-none-any.whl → 0.3.0.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain might be problematic. Click here for more details.

Files changed (92) hide show
  1. langchain/agents/agent.py +21 -17
  2. langchain/agents/agent_toolkits/vectorstore/toolkit.py +10 -7
  3. langchain/agents/chat/base.py +1 -1
  4. langchain/agents/conversational/base.py +1 -1
  5. langchain/agents/conversational_chat/base.py +1 -1
  6. langchain/agents/mrkl/base.py +1 -1
  7. langchain/agents/openai_assistant/base.py +8 -7
  8. langchain/agents/openai_functions_agent/base.py +6 -5
  9. langchain/agents/openai_functions_multi_agent/base.py +6 -5
  10. langchain/agents/openai_tools/base.py +8 -3
  11. langchain/agents/react/base.py +1 -1
  12. langchain/agents/self_ask_with_search/base.py +1 -1
  13. langchain/agents/structured_chat/base.py +1 -1
  14. langchain/agents/structured_chat/output_parser.py +1 -1
  15. langchain/chains/api/base.py +13 -11
  16. langchain/chains/base.py +13 -5
  17. langchain/chains/combine_documents/base.py +1 -1
  18. langchain/chains/combine_documents/map_reduce.py +14 -10
  19. langchain/chains/combine_documents/map_rerank.py +17 -14
  20. langchain/chains/combine_documents/reduce.py +5 -3
  21. langchain/chains/combine_documents/refine.py +11 -8
  22. langchain/chains/combine_documents/stuff.py +8 -6
  23. langchain/chains/constitutional_ai/models.py +1 -1
  24. langchain/chains/conversation/base.py +13 -11
  25. langchain/chains/conversational_retrieval/base.py +9 -7
  26. langchain/chains/elasticsearch_database/base.py +10 -8
  27. langchain/chains/flare/base.py +5 -2
  28. langchain/chains/hyde/base.py +5 -3
  29. langchain/chains/llm.py +5 -4
  30. langchain/chains/llm_checker/base.py +8 -6
  31. langchain/chains/llm_math/base.py +8 -6
  32. langchain/chains/llm_summarization_checker/base.py +8 -6
  33. langchain/chains/mapreduce.py +5 -3
  34. langchain/chains/moderation.py +12 -10
  35. langchain/chains/natbot/base.py +8 -6
  36. langchain/chains/openai_functions/base.py +1 -1
  37. langchain/chains/openai_functions/citation_fuzzy_match.py +1 -1
  38. langchain/chains/openai_functions/extraction.py +1 -1
  39. langchain/chains/openai_functions/qa_with_structure.py +1 -1
  40. langchain/chains/openai_tools/extraction.py +1 -1
  41. langchain/chains/prompt_selector.py +1 -1
  42. langchain/chains/qa_generation/base.py +1 -1
  43. langchain/chains/qa_with_sources/base.py +8 -6
  44. langchain/chains/qa_with_sources/retrieval.py +1 -1
  45. langchain/chains/qa_with_sources/vector_db.py +4 -3
  46. langchain/chains/query_constructor/schema.py +5 -4
  47. langchain/chains/retrieval_qa/base.py +12 -9
  48. langchain/chains/router/base.py +5 -3
  49. langchain/chains/router/embedding_router.py +5 -3
  50. langchain/chains/router/llm_router.py +6 -5
  51. langchain/chains/sequential.py +17 -13
  52. langchain/chains/structured_output/base.py +1 -1
  53. langchain/chains/transform.py +1 -1
  54. langchain/chat_models/base.py +1 -1
  55. langchain/evaluation/agents/trajectory_eval_chain.py +4 -3
  56. langchain/evaluation/comparison/eval_chain.py +4 -3
  57. langchain/evaluation/criteria/eval_chain.py +4 -3
  58. langchain/evaluation/embedding_distance/base.py +4 -3
  59. langchain/evaluation/qa/eval_chain.py +7 -4
  60. langchain/evaluation/qa/generate_chain.py +1 -1
  61. langchain/evaluation/scoring/eval_chain.py +4 -3
  62. langchain/evaluation/string_distance/base.py +1 -1
  63. langchain/indexes/vectorstore.py +9 -7
  64. langchain/memory/chat_memory.py +1 -1
  65. langchain/memory/combined.py +1 -1
  66. langchain/memory/entity.py +4 -3
  67. langchain/memory/summary.py +1 -1
  68. langchain/memory/vectorstore.py +1 -1
  69. langchain/memory/vectorstore_token_buffer_memory.py +1 -1
  70. langchain/output_parsers/fix.py +3 -2
  71. langchain/output_parsers/pandas_dataframe.py +1 -1
  72. langchain/output_parsers/retry.py +4 -3
  73. langchain/output_parsers/structured.py +1 -1
  74. langchain/output_parsers/yaml.py +1 -1
  75. langchain/retrievers/contextual_compression.py +4 -2
  76. langchain/retrievers/document_compressors/base.py +4 -2
  77. langchain/retrievers/document_compressors/chain_extract.py +4 -2
  78. langchain/retrievers/document_compressors/chain_filter.py +4 -2
  79. langchain/retrievers/document_compressors/cohere_rerank.py +8 -6
  80. langchain/retrievers/document_compressors/cross_encoder_rerank.py +5 -3
  81. langchain/retrievers/document_compressors/embeddings_filter.py +4 -3
  82. langchain/retrievers/document_compressors/listwise_rerank.py +4 -3
  83. langchain/retrievers/ensemble.py +4 -3
  84. langchain/retrievers/multi_vector.py +5 -4
  85. langchain/retrievers/self_query/base.py +19 -8
  86. langchain/retrievers/time_weighted_retriever.py +4 -3
  87. langchain/smith/evaluation/config.py +7 -5
  88. {langchain-0.2.15.dist-info → langchain-0.3.0.dev1.dist-info}/METADATA +4 -4
  89. {langchain-0.2.15.dist-info → langchain-0.3.0.dev1.dist-info}/RECORD +92 -92
  90. {langchain-0.2.15.dist-info → langchain-0.3.0.dev1.dist-info}/LICENSE +0 -0
  91. {langchain-0.2.15.dist-info → langchain-0.3.0.dev1.dist-info}/WHEEL +0 -0
  92. {langchain-0.2.15.dist-info → langchain-0.3.0.dev1.dist-info}/entry_points.txt +0 -0
@@ -7,6 +7,7 @@ from langchain_core.documents import (
7
7
  BaseDocumentTransformer,
8
8
  Document,
9
9
  )
10
+ from pydantic import ConfigDict
10
11
 
11
12
 
12
13
  class DocumentCompressorPipeline(BaseDocumentCompressor):
@@ -15,8 +16,9 @@ class DocumentCompressorPipeline(BaseDocumentCompressor):
15
16
  transformers: List[Union[BaseDocumentTransformer, BaseDocumentCompressor]]
16
17
  """List of document filters that are chained together and run in sequence."""
17
18
 
18
- class Config:
19
- arbitrary_types_allowed = True
19
+ model_config = ConfigDict(
20
+ arbitrary_types_allowed=True,
21
+ )
20
22
 
21
23
  def compress_documents(
22
24
  self,
@@ -11,6 +11,7 @@ from langchain_core.language_models import BaseLanguageModel
11
11
  from langchain_core.output_parsers import BaseOutputParser, StrOutputParser
12
12
  from langchain_core.prompts import PromptTemplate
13
13
  from langchain_core.runnables import Runnable
14
+ from pydantic import ConfigDict
14
15
 
15
16
  from langchain.chains.llm import LLMChain
16
17
  from langchain.retrievers.document_compressors.base import BaseDocumentCompressor
@@ -56,8 +57,9 @@ class LLMChainExtractor(BaseDocumentCompressor):
56
57
  get_input: Callable[[str, Document], dict] = default_get_input
57
58
  """Callable for constructing the chain input from the query and a Document."""
58
59
 
59
- class Config:
60
- arbitrary_types_allowed = True
60
+ model_config = ConfigDict(
61
+ arbitrary_types_allowed=True,
62
+ )
61
63
 
62
64
  def compress_documents(
63
65
  self,
@@ -9,6 +9,7 @@ from langchain_core.output_parsers import StrOutputParser
9
9
  from langchain_core.prompts import BasePromptTemplate, PromptTemplate
10
10
  from langchain_core.runnables import Runnable
11
11
  from langchain_core.runnables.config import RunnableConfig
12
+ from pydantic import ConfigDict
12
13
 
13
14
  from langchain.chains import LLMChain
14
15
  from langchain.output_parsers.boolean import BooleanOutputParser
@@ -41,8 +42,9 @@ class LLMChainFilter(BaseDocumentCompressor):
41
42
  get_input: Callable[[str, Document], dict] = default_get_input
42
43
  """Callable for constructing the chain input from the query and a Document."""
43
44
 
44
- class Config:
45
- arbitrary_types_allowed = True
45
+ model_config = ConfigDict(
46
+ arbitrary_types_allowed=True,
47
+ )
46
48
 
47
49
  def compress_documents(
48
50
  self,
@@ -6,8 +6,8 @@ from typing import Any, Dict, List, Optional, Sequence, Union
6
6
  from langchain_core._api.deprecation import deprecated
7
7
  from langchain_core.callbacks.manager import Callbacks
8
8
  from langchain_core.documents import Document
9
- from langchain_core.pydantic_v1 import root_validator
10
9
  from langchain_core.utils import get_from_dict_or_env
10
+ from pydantic import ConfigDict, model_validator
11
11
 
12
12
  from langchain.retrievers.document_compressors.base import BaseDocumentCompressor
13
13
 
@@ -30,12 +30,14 @@ class CohereRerank(BaseDocumentCompressor):
30
30
  user_agent: str = "langchain"
31
31
  """Identifier for the application making the request."""
32
32
 
33
- class Config:
34
- arbitrary_types_allowed = True
35
- extra = "forbid"
33
+ model_config = ConfigDict(
34
+ arbitrary_types_allowed=True,
35
+ extra="forbid",
36
+ )
36
37
 
37
- @root_validator(pre=True)
38
- def validate_environment(cls, values: Dict) -> Dict:
38
+ @model_validator(mode="before")
39
+ @classmethod
40
+ def validate_environment(cls, values: Dict) -> Any:
39
41
  """Validate that api key and python package exists in environment."""
40
42
  if not values.get("client"):
41
43
  try:
@@ -5,6 +5,7 @@ from typing import Optional, Sequence
5
5
 
6
6
  from langchain_core.callbacks import Callbacks
7
7
  from langchain_core.documents import BaseDocumentCompressor, Document
8
+ from pydantic import ConfigDict
8
9
 
9
10
  from langchain.retrievers.document_compressors.cross_encoder import BaseCrossEncoder
10
11
 
@@ -18,9 +19,10 @@ class CrossEncoderReranker(BaseDocumentCompressor):
18
19
  top_n: int = 3
19
20
  """Number of documents to return."""
20
21
 
21
- class Config:
22
- arbitrary_types_allowed = True
23
- extra = "forbid"
22
+ model_config = ConfigDict(
23
+ arbitrary_types_allowed=True,
24
+ extra="forbid",
25
+ )
24
26
 
25
27
  def compress_documents(
26
28
  self,
@@ -4,8 +4,8 @@ import numpy as np
4
4
  from langchain_core.callbacks.manager import Callbacks
5
5
  from langchain_core.documents import Document
6
6
  from langchain_core.embeddings import Embeddings
7
- from langchain_core.pydantic_v1 import Field
8
7
  from langchain_core.utils import pre_init
8
+ from pydantic import ConfigDict, Field
9
9
 
10
10
  from langchain.retrievers.document_compressors.base import (
11
11
  BaseDocumentCompressor,
@@ -41,8 +41,9 @@ class EmbeddingsFilter(BaseDocumentCompressor):
41
41
  to be considered redundant. Defaults to None, must be specified if `k` is set
42
42
  to None."""
43
43
 
44
- class Config:
45
- arbitrary_types_allowed = True
44
+ model_config = ConfigDict(
45
+ arbitrary_types_allowed=True,
46
+ )
46
47
 
47
48
  @pre_init
48
49
  def validate_params(cls, values: Dict) -> Dict:
@@ -6,8 +6,8 @@ from langchain_core.callbacks import Callbacks
6
6
  from langchain_core.documents import BaseDocumentCompressor, Document
7
7
  from langchain_core.language_models import BaseLanguageModel
8
8
  from langchain_core.prompts import BasePromptTemplate, ChatPromptTemplate
9
- from langchain_core.pydantic_v1 import BaseModel, Field
10
9
  from langchain_core.runnables import Runnable, RunnableLambda, RunnablePassthrough
10
+ from pydantic import BaseModel, ConfigDict, Field
11
11
 
12
12
  _default_system_tmpl = """{context}
13
13
 
@@ -76,8 +76,9 @@ class LLMListwiseRerank(BaseDocumentCompressor):
76
76
  top_n: int = 3
77
77
  """Number of documents to return."""
78
78
 
79
- class Config:
80
- arbitrary_types_allowed = True
79
+ model_config = ConfigDict(
80
+ arbitrary_types_allowed=True,
81
+ )
81
82
 
82
83
  def compress_documents(
83
84
  self,
@@ -25,7 +25,6 @@ from langchain_core.callbacks import (
25
25
  )
26
26
  from langchain_core.documents import Document
27
27
  from langchain_core.load.dump import dumpd
28
- from langchain_core.pydantic_v1 import root_validator
29
28
  from langchain_core.retrievers import BaseRetriever, RetrieverLike
30
29
  from langchain_core.runnables import RunnableConfig
31
30
  from langchain_core.runnables.config import ensure_config, patch_config
@@ -33,6 +32,7 @@ from langchain_core.runnables.utils import (
33
32
  ConfigurableFieldSpec,
34
33
  get_unique_config_specs,
35
34
  )
35
+ from pydantic import model_validator
36
36
 
37
37
  T = TypeVar("T")
38
38
  H = TypeVar("H", bound=Hashable)
@@ -83,8 +83,9 @@ class EnsembleRetriever(BaseRetriever):
83
83
  spec for retriever in self.retrievers for spec in retriever.config_specs
84
84
  )
85
85
 
86
- @root_validator(pre=True)
87
- def set_weights(cls, values: Dict[str, Any]) -> Dict[str, Any]:
86
+ @model_validator(mode="before")
87
+ @classmethod
88
+ def set_weights(cls, values: Dict[str, Any]) -> Any:
88
89
  if not values.get("weights"):
89
90
  n_retrievers = len(values["retrievers"])
90
91
  values["weights"] = [1 / n_retrievers] * n_retrievers
@@ -1,15 +1,15 @@
1
1
  from enum import Enum
2
- from typing import Dict, List, Optional
2
+ from typing import Any, Dict, List, Optional
3
3
 
4
4
  from langchain_core.callbacks import (
5
5
  AsyncCallbackManagerForRetrieverRun,
6
6
  CallbackManagerForRetrieverRun,
7
7
  )
8
8
  from langchain_core.documents import Document
9
- from langchain_core.pydantic_v1 import Field, root_validator
10
9
  from langchain_core.retrievers import BaseRetriever
11
10
  from langchain_core.stores import BaseStore, ByteStore
12
11
  from langchain_core.vectorstores import VectorStore
12
+ from pydantic import Field, model_validator
13
13
 
14
14
  from langchain.storage._lc_store import create_kv_docstore
15
15
 
@@ -41,8 +41,9 @@ class MultiVectorRetriever(BaseRetriever):
41
41
  search_type: SearchType = SearchType.similarity
42
42
  """Type of search to perform (similarity / mmr)"""
43
43
 
44
- @root_validator(pre=True)
45
- def shim_docstore(cls, values: Dict) -> Dict:
44
+ @model_validator(mode="before")
45
+ @classmethod
46
+ def shim_docstore(cls, values: Dict) -> Any:
46
47
  byte_store = values.get("byte_store")
47
48
  docstore = values.get("docstore")
48
49
  if byte_store is not None:
@@ -9,11 +9,11 @@ from langchain_core.callbacks.manager import (
9
9
  )
10
10
  from langchain_core.documents import Document
11
11
  from langchain_core.language_models import BaseLanguageModel
12
- from langchain_core.pydantic_v1 import Field, root_validator
13
12
  from langchain_core.retrievers import BaseRetriever
14
13
  from langchain_core.runnables import Runnable
15
14
  from langchain_core.structured_query import StructuredQuery, Visitor
16
15
  from langchain_core.vectorstores import VectorStore
16
+ from pydantic import ConfigDict, Field, model_validator
17
17
 
18
18
  from langchain.chains.query_constructor.base import load_query_constructor_runnable
19
19
  from langchain.chains.query_constructor.schema import AttributeInfo
@@ -48,6 +48,7 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor:
48
48
  MongoDBAtlasTranslator,
49
49
  )
50
50
  from langchain_community.query_constructors.myscale import MyScaleTranslator
51
+ from langchain_community.query_constructors.neo4j import Neo4jTranslator
51
52
  from langchain_community.query_constructors.opensearch import OpenSearchTranslator
52
53
  from langchain_community.query_constructors.pgvector import PGVectorTranslator
53
54
  from langchain_community.query_constructors.pinecone import PineconeTranslator
@@ -70,6 +71,7 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor:
70
71
  Dingo,
71
72
  Milvus,
72
73
  MyScale,
74
+ Neo4jVector,
73
75
  OpenSearchVectorSearch,
74
76
  PGVector,
75
77
  Qdrant,
@@ -111,11 +113,10 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor:
111
113
  TimescaleVector: TimescaleVectorTranslator,
112
114
  OpenSearchVectorSearch: OpenSearchTranslator,
113
115
  CommunityMongoDBAtlasVectorSearch: MongoDBAtlasTranslator,
116
+ Neo4jVector: Neo4jTranslator,
114
117
  }
115
118
  if isinstance(vectorstore, DatabricksVectorSearch):
116
119
  return DatabricksVectorSearchTranslator()
117
- if isinstance(vectorstore, Qdrant):
118
- return QdrantTranslator(metadata_key=vectorstore.metadata_payload_key)
119
120
  elif isinstance(vectorstore, MyScale):
120
121
  return MyScaleTranslator(metadata_key=vectorstore.metadata_column)
121
122
  elif isinstance(vectorstore, Redis):
@@ -177,6 +178,14 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor:
177
178
  if isinstance(vectorstore, PGVector):
178
179
  return NewPGVectorTranslator()
179
180
 
181
+ try:
182
+ from langchain_qdrant import QdrantVectorStore
183
+ except ImportError:
184
+ pass
185
+ else:
186
+ if isinstance(vectorstore, QdrantVectorStore):
187
+ return QdrantTranslator(metadata_key=vectorstore.metadata_payload_key)
188
+
180
189
  try:
181
190
  # Added in langchain-community==0.2.11
182
191
  from langchain_community.query_constructors.hanavector import HanaTranslator
@@ -214,12 +223,14 @@ class SelfQueryRetriever(BaseRetriever):
214
223
  use_original_query: bool = False
215
224
  """Use original query instead of the revised new query from LLM"""
216
225
 
217
- class Config:
218
- allow_population_by_field_name = True
219
- arbitrary_types_allowed = True
226
+ model_config = ConfigDict(
227
+ populate_by_name=True,
228
+ arbitrary_types_allowed=True,
229
+ )
220
230
 
221
- @root_validator(pre=True)
222
- def validate_translator(cls, values: Dict) -> Dict:
231
+ @model_validator(mode="before")
232
+ @classmethod
233
+ def validate_translator(cls, values: Dict) -> Any:
223
234
  """Validate translator."""
224
235
  if "structured_query_translator" not in values:
225
236
  values["structured_query_translator"] = _get_builtin_translator(
@@ -7,9 +7,9 @@ from langchain_core.callbacks import (
7
7
  CallbackManagerForRetrieverRun,
8
8
  )
9
9
  from langchain_core.documents import Document
10
- from langchain_core.pydantic_v1 import Field
11
10
  from langchain_core.retrievers import BaseRetriever
12
11
  from langchain_core.vectorstores import VectorStore
12
+ from pydantic import ConfigDict, Field
13
13
 
14
14
 
15
15
  def _get_hours_passed(time: datetime.datetime, ref_time: datetime.datetime) -> float:
@@ -46,8 +46,9 @@ class TimeWeightedVectorStoreRetriever(BaseRetriever):
46
46
  None assigns no salience to documents not fetched from the vector store.
47
47
  """
48
48
 
49
- class Config:
50
- arbitrary_types_allowed = True
49
+ model_config = ConfigDict(
50
+ arbitrary_types_allowed=True,
51
+ )
51
52
 
52
53
  def _document_get_date(self, field: str, document: Document) -> datetime.datetime:
53
54
  """Return the value of the date field of a document."""
@@ -5,10 +5,10 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Union
5
5
  from langchain_core.embeddings import Embeddings
6
6
  from langchain_core.language_models import BaseLanguageModel
7
7
  from langchain_core.prompts import BasePromptTemplate
8
- from langchain_core.pydantic_v1 import BaseModel, Field
9
8
  from langsmith import RunEvaluator
10
9
  from langsmith.evaluation.evaluator import EvaluationResult, EvaluationResults
11
10
  from langsmith.schemas import Example, Run
11
+ from pydantic import BaseModel, ConfigDict, Field
12
12
 
13
13
  from langchain.evaluation.criteria.eval_chain import CRITERIA_TYPE
14
14
  from langchain.evaluation.embedding_distance.base import (
@@ -156,8 +156,9 @@ class RunEvalConfig(BaseModel):
156
156
  eval_llm: Optional[BaseLanguageModel] = None
157
157
  """The language model to pass to any evaluators that require one."""
158
158
 
159
- class Config:
160
- arbitrary_types_allowed = True
159
+ model_config = ConfigDict(
160
+ arbitrary_types_allowed=True,
161
+ )
161
162
 
162
163
  class Criteria(SingleKeyEvalConfig):
163
164
  """Configuration for a reference-free criteria evaluator.
@@ -217,8 +218,9 @@ class RunEvalConfig(BaseModel):
217
218
  embeddings: Optional[Embeddings] = None
218
219
  distance_metric: Optional[EmbeddingDistanceEnum] = None
219
220
 
220
- class Config:
221
- arbitrary_types_allowed = True
221
+ model_config = ConfigDict(
222
+ arbitrary_types_allowed=True,
223
+ )
222
224
 
223
225
  class StringDistance(SingleKeyEvalConfig):
224
226
  """Configuration for a string distance evaluator.
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langchain
3
- Version: 0.2.15
3
+ Version: 0.3.0.dev1
4
4
  Summary: Building applications with LLMs through composability
5
5
  Home-page: https://github.com/langchain-ai/langchain
6
6
  License: MIT
7
- Requires-Python: >=3.8.1,<4.0
7
+ Requires-Python: >=3.9,<4.0
8
8
  Classifier: License :: OSI Approved :: MIT License
9
9
  Classifier: Programming Language :: Python :: 3
10
10
  Classifier: Programming Language :: Python :: 3.9
@@ -15,8 +15,8 @@ Requires-Dist: PyYAML (>=5.3)
15
15
  Requires-Dist: SQLAlchemy (>=1.4,<3)
16
16
  Requires-Dist: aiohttp (>=3.8.3,<4.0.0)
17
17
  Requires-Dist: async-timeout (>=4.0.0,<5.0.0) ; python_version < "3.11"
18
- Requires-Dist: langchain-core (>=0.2.35,<0.3.0)
19
- Requires-Dist: langchain-text-splitters (>=0.2.0,<0.3.0)
18
+ Requires-Dist: langchain-core (>=0.3.0.dev2,<0.4.0)
19
+ Requires-Dist: langchain-text-splitters (>=0.3.0.dev1,<0.4.0)
20
20
  Requires-Dist: langsmith (>=0.1.17,<0.2.0)
21
21
  Requires-Dist: numpy (>=1,<2) ; python_version < "3.12"
22
22
  Requires-Dist: numpy (>=1.26.0,<2.0.0) ; python_version >= "3.12"