langchain 0.2.16__py3-none-any.whl → 0.3.0.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. langchain/agents/agent.py +21 -17
  2. langchain/agents/agent_toolkits/vectorstore/toolkit.py +10 -7
  3. langchain/agents/chat/base.py +1 -1
  4. langchain/agents/conversational/base.py +1 -1
  5. langchain/agents/conversational_chat/base.py +1 -1
  6. langchain/agents/mrkl/base.py +1 -1
  7. langchain/agents/openai_assistant/base.py +8 -7
  8. langchain/agents/openai_functions_agent/base.py +6 -5
  9. langchain/agents/openai_functions_multi_agent/base.py +6 -5
  10. langchain/agents/react/base.py +1 -1
  11. langchain/agents/self_ask_with_search/base.py +1 -1
  12. langchain/agents/structured_chat/base.py +1 -1
  13. langchain/agents/structured_chat/output_parser.py +1 -1
  14. langchain/chains/api/base.py +13 -11
  15. langchain/chains/base.py +13 -5
  16. langchain/chains/combine_documents/base.py +1 -1
  17. langchain/chains/combine_documents/map_reduce.py +14 -10
  18. langchain/chains/combine_documents/map_rerank.py +17 -14
  19. langchain/chains/combine_documents/reduce.py +5 -3
  20. langchain/chains/combine_documents/refine.py +11 -8
  21. langchain/chains/combine_documents/stuff.py +8 -6
  22. langchain/chains/constitutional_ai/models.py +1 -1
  23. langchain/chains/conversation/base.py +13 -11
  24. langchain/chains/conversational_retrieval/base.py +9 -7
  25. langchain/chains/elasticsearch_database/base.py +10 -8
  26. langchain/chains/flare/base.py +1 -1
  27. langchain/chains/hyde/base.py +5 -3
  28. langchain/chains/llm.py +5 -4
  29. langchain/chains/llm_checker/base.py +8 -6
  30. langchain/chains/llm_math/base.py +8 -6
  31. langchain/chains/llm_summarization_checker/base.py +8 -6
  32. langchain/chains/mapreduce.py +5 -3
  33. langchain/chains/moderation.py +4 -3
  34. langchain/chains/natbot/base.py +8 -6
  35. langchain/chains/openai_functions/base.py +1 -1
  36. langchain/chains/openai_functions/citation_fuzzy_match.py +1 -1
  37. langchain/chains/openai_functions/extraction.py +1 -1
  38. langchain/chains/openai_functions/qa_with_structure.py +1 -1
  39. langchain/chains/openai_tools/extraction.py +1 -1
  40. langchain/chains/prompt_selector.py +1 -1
  41. langchain/chains/qa_generation/base.py +1 -1
  42. langchain/chains/qa_with_sources/base.py +8 -6
  43. langchain/chains/qa_with_sources/retrieval.py +1 -1
  44. langchain/chains/qa_with_sources/vector_db.py +4 -3
  45. langchain/chains/query_constructor/schema.py +5 -4
  46. langchain/chains/retrieval_qa/base.py +12 -9
  47. langchain/chains/router/base.py +5 -3
  48. langchain/chains/router/embedding_router.py +5 -3
  49. langchain/chains/router/llm_router.py +6 -5
  50. langchain/chains/sequential.py +17 -13
  51. langchain/chains/structured_output/base.py +1 -1
  52. langchain/chains/transform.py +1 -1
  53. langchain/chat_models/base.py +1 -1
  54. langchain/evaluation/agents/trajectory_eval_chain.py +4 -3
  55. langchain/evaluation/comparison/eval_chain.py +4 -3
  56. langchain/evaluation/criteria/eval_chain.py +4 -3
  57. langchain/evaluation/embedding_distance/base.py +4 -3
  58. langchain/evaluation/qa/eval_chain.py +7 -4
  59. langchain/evaluation/qa/generate_chain.py +1 -1
  60. langchain/evaluation/scoring/eval_chain.py +4 -3
  61. langchain/evaluation/string_distance/base.py +1 -1
  62. langchain/indexes/vectorstore.py +9 -7
  63. langchain/memory/chat_memory.py +1 -1
  64. langchain/memory/combined.py +1 -1
  65. langchain/memory/entity.py +4 -3
  66. langchain/memory/summary.py +1 -1
  67. langchain/memory/vectorstore.py +1 -1
  68. langchain/memory/vectorstore_token_buffer_memory.py +1 -1
  69. langchain/output_parsers/fix.py +3 -2
  70. langchain/output_parsers/pandas_dataframe.py +1 -1
  71. langchain/output_parsers/retry.py +4 -3
  72. langchain/output_parsers/structured.py +1 -1
  73. langchain/output_parsers/yaml.py +1 -1
  74. langchain/retrievers/contextual_compression.py +4 -2
  75. langchain/retrievers/document_compressors/base.py +4 -2
  76. langchain/retrievers/document_compressors/chain_extract.py +4 -2
  77. langchain/retrievers/document_compressors/chain_filter.py +4 -2
  78. langchain/retrievers/document_compressors/cohere_rerank.py +8 -6
  79. langchain/retrievers/document_compressors/cross_encoder_rerank.py +5 -3
  80. langchain/retrievers/document_compressors/embeddings_filter.py +4 -3
  81. langchain/retrievers/document_compressors/listwise_rerank.py +4 -3
  82. langchain/retrievers/ensemble.py +4 -3
  83. langchain/retrievers/multi_vector.py +5 -4
  84. langchain/retrievers/self_query/base.py +8 -6
  85. langchain/retrievers/time_weighted_retriever.py +4 -3
  86. langchain/smith/evaluation/config.py +7 -5
  87. {langchain-0.2.16.dist-info → langchain-0.3.0.dev1.dist-info}/METADATA +4 -4
  88. {langchain-0.2.16.dist-info → langchain-0.3.0.dev1.dist-info}/RECORD +91 -91
  89. {langchain-0.2.16.dist-info → langchain-0.3.0.dev1.dist-info}/LICENSE +0 -0
  90. {langchain-0.2.16.dist-info → langchain-0.3.0.dev1.dist-info}/WHEEL +0 -0
  91. {langchain-0.2.16.dist-info → langchain-0.3.0.dev1.dist-info}/entry_points.txt +0 -0
@@ -5,6 +5,7 @@ from typing import Optional, Sequence
5
5
 
6
6
  from langchain_core.callbacks import Callbacks
7
7
  from langchain_core.documents import BaseDocumentCompressor, Document
8
+ from pydantic import ConfigDict
8
9
 
9
10
  from langchain.retrievers.document_compressors.cross_encoder import BaseCrossEncoder
10
11
 
@@ -18,9 +19,10 @@ class CrossEncoderReranker(BaseDocumentCompressor):
18
19
  top_n: int = 3
19
20
  """Number of documents to return."""
20
21
 
21
- class Config:
22
- arbitrary_types_allowed = True
23
- extra = "forbid"
22
+ model_config = ConfigDict(
23
+ arbitrary_types_allowed=True,
24
+ extra="forbid",
25
+ )
24
26
 
25
27
  def compress_documents(
26
28
  self,
@@ -4,8 +4,8 @@ import numpy as np
4
4
  from langchain_core.callbacks.manager import Callbacks
5
5
  from langchain_core.documents import Document
6
6
  from langchain_core.embeddings import Embeddings
7
- from langchain_core.pydantic_v1 import Field
8
7
  from langchain_core.utils import pre_init
8
+ from pydantic import ConfigDict, Field
9
9
 
10
10
  from langchain.retrievers.document_compressors.base import (
11
11
  BaseDocumentCompressor,
@@ -41,8 +41,9 @@ class EmbeddingsFilter(BaseDocumentCompressor):
41
41
  to be considered redundant. Defaults to None, must be specified if `k` is set
42
42
  to None."""
43
43
 
44
- class Config:
45
- arbitrary_types_allowed = True
44
+ model_config = ConfigDict(
45
+ arbitrary_types_allowed=True,
46
+ )
46
47
 
47
48
  @pre_init
48
49
  def validate_params(cls, values: Dict) -> Dict:
@@ -6,8 +6,8 @@ from langchain_core.callbacks import Callbacks
6
6
  from langchain_core.documents import BaseDocumentCompressor, Document
7
7
  from langchain_core.language_models import BaseLanguageModel
8
8
  from langchain_core.prompts import BasePromptTemplate, ChatPromptTemplate
9
- from langchain_core.pydantic_v1 import BaseModel, Field
10
9
  from langchain_core.runnables import Runnable, RunnableLambda, RunnablePassthrough
10
+ from pydantic import BaseModel, ConfigDict, Field
11
11
 
12
12
  _default_system_tmpl = """{context}
13
13
 
@@ -76,8 +76,9 @@ class LLMListwiseRerank(BaseDocumentCompressor):
76
76
  top_n: int = 3
77
77
  """Number of documents to return."""
78
78
 
79
- class Config:
80
- arbitrary_types_allowed = True
79
+ model_config = ConfigDict(
80
+ arbitrary_types_allowed=True,
81
+ )
81
82
 
82
83
  def compress_documents(
83
84
  self,
@@ -25,7 +25,6 @@ from langchain_core.callbacks import (
25
25
  )
26
26
  from langchain_core.documents import Document
27
27
  from langchain_core.load.dump import dumpd
28
- from langchain_core.pydantic_v1 import root_validator
29
28
  from langchain_core.retrievers import BaseRetriever, RetrieverLike
30
29
  from langchain_core.runnables import RunnableConfig
31
30
  from langchain_core.runnables.config import ensure_config, patch_config
@@ -33,6 +32,7 @@ from langchain_core.runnables.utils import (
33
32
  ConfigurableFieldSpec,
34
33
  get_unique_config_specs,
35
34
  )
35
+ from pydantic import model_validator
36
36
 
37
37
  T = TypeVar("T")
38
38
  H = TypeVar("H", bound=Hashable)
@@ -83,8 +83,9 @@ class EnsembleRetriever(BaseRetriever):
83
83
  spec for retriever in self.retrievers for spec in retriever.config_specs
84
84
  )
85
85
 
86
- @root_validator(pre=True)
87
- def set_weights(cls, values: Dict[str, Any]) -> Dict[str, Any]:
86
+ @model_validator(mode="before")
87
+ @classmethod
88
+ def set_weights(cls, values: Dict[str, Any]) -> Any:
88
89
  if not values.get("weights"):
89
90
  n_retrievers = len(values["retrievers"])
90
91
  values["weights"] = [1 / n_retrievers] * n_retrievers
@@ -1,15 +1,15 @@
1
1
  from enum import Enum
2
- from typing import Dict, List, Optional
2
+ from typing import Any, Dict, List, Optional
3
3
 
4
4
  from langchain_core.callbacks import (
5
5
  AsyncCallbackManagerForRetrieverRun,
6
6
  CallbackManagerForRetrieverRun,
7
7
  )
8
8
  from langchain_core.documents import Document
9
- from langchain_core.pydantic_v1 import Field, root_validator
10
9
  from langchain_core.retrievers import BaseRetriever
11
10
  from langchain_core.stores import BaseStore, ByteStore
12
11
  from langchain_core.vectorstores import VectorStore
12
+ from pydantic import Field, model_validator
13
13
 
14
14
  from langchain.storage._lc_store import create_kv_docstore
15
15
 
@@ -41,8 +41,9 @@ class MultiVectorRetriever(BaseRetriever):
41
41
  search_type: SearchType = SearchType.similarity
42
42
  """Type of search to perform (similarity / mmr)"""
43
43
 
44
- @root_validator(pre=True)
45
- def shim_docstore(cls, values: Dict) -> Dict:
44
+ @model_validator(mode="before")
45
+ @classmethod
46
+ def shim_docstore(cls, values: Dict) -> Any:
46
47
  byte_store = values.get("byte_store")
47
48
  docstore = values.get("docstore")
48
49
  if byte_store is not None:
@@ -9,11 +9,11 @@ from langchain_core.callbacks.manager import (
9
9
  )
10
10
  from langchain_core.documents import Document
11
11
  from langchain_core.language_models import BaseLanguageModel
12
- from langchain_core.pydantic_v1 import Field, root_validator
13
12
  from langchain_core.retrievers import BaseRetriever
14
13
  from langchain_core.runnables import Runnable
15
14
  from langchain_core.structured_query import StructuredQuery, Visitor
16
15
  from langchain_core.vectorstores import VectorStore
16
+ from pydantic import ConfigDict, Field, model_validator
17
17
 
18
18
  from langchain.chains.query_constructor.base import load_query_constructor_runnable
19
19
  from langchain.chains.query_constructor.schema import AttributeInfo
@@ -223,12 +223,14 @@ class SelfQueryRetriever(BaseRetriever):
223
223
  use_original_query: bool = False
224
224
  """Use original query instead of the revised new query from LLM"""
225
225
 
226
- class Config:
227
- allow_population_by_field_name = True
228
- arbitrary_types_allowed = True
226
+ model_config = ConfigDict(
227
+ populate_by_name=True,
228
+ arbitrary_types_allowed=True,
229
+ )
229
230
 
230
- @root_validator(pre=True)
231
- def validate_translator(cls, values: Dict) -> Dict:
231
+ @model_validator(mode="before")
232
+ @classmethod
233
+ def validate_translator(cls, values: Dict) -> Any:
232
234
  """Validate translator."""
233
235
  if "structured_query_translator" not in values:
234
236
  values["structured_query_translator"] = _get_builtin_translator(
@@ -7,9 +7,9 @@ from langchain_core.callbacks import (
7
7
  CallbackManagerForRetrieverRun,
8
8
  )
9
9
  from langchain_core.documents import Document
10
- from langchain_core.pydantic_v1 import Field
11
10
  from langchain_core.retrievers import BaseRetriever
12
11
  from langchain_core.vectorstores import VectorStore
12
+ from pydantic import ConfigDict, Field
13
13
 
14
14
 
15
15
  def _get_hours_passed(time: datetime.datetime, ref_time: datetime.datetime) -> float:
@@ -46,8 +46,9 @@ class TimeWeightedVectorStoreRetriever(BaseRetriever):
46
46
  None assigns no salience to documents not fetched from the vector store.
47
47
  """
48
48
 
49
- class Config:
50
- arbitrary_types_allowed = True
49
+ model_config = ConfigDict(
50
+ arbitrary_types_allowed=True,
51
+ )
51
52
 
52
53
  def _document_get_date(self, field: str, document: Document) -> datetime.datetime:
53
54
  """Return the value of the date field of a document."""
@@ -5,10 +5,10 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Union
5
5
  from langchain_core.embeddings import Embeddings
6
6
  from langchain_core.language_models import BaseLanguageModel
7
7
  from langchain_core.prompts import BasePromptTemplate
8
- from langchain_core.pydantic_v1 import BaseModel, Field
9
8
  from langsmith import RunEvaluator
10
9
  from langsmith.evaluation.evaluator import EvaluationResult, EvaluationResults
11
10
  from langsmith.schemas import Example, Run
11
+ from pydantic import BaseModel, ConfigDict, Field
12
12
 
13
13
  from langchain.evaluation.criteria.eval_chain import CRITERIA_TYPE
14
14
  from langchain.evaluation.embedding_distance.base import (
@@ -156,8 +156,9 @@ class RunEvalConfig(BaseModel):
156
156
  eval_llm: Optional[BaseLanguageModel] = None
157
157
  """The language model to pass to any evaluators that require one."""
158
158
 
159
- class Config:
160
- arbitrary_types_allowed = True
159
+ model_config = ConfigDict(
160
+ arbitrary_types_allowed=True,
161
+ )
161
162
 
162
163
  class Criteria(SingleKeyEvalConfig):
163
164
  """Configuration for a reference-free criteria evaluator.
@@ -217,8 +218,9 @@ class RunEvalConfig(BaseModel):
217
218
  embeddings: Optional[Embeddings] = None
218
219
  distance_metric: Optional[EmbeddingDistanceEnum] = None
219
220
 
220
- class Config:
221
- arbitrary_types_allowed = True
221
+ model_config = ConfigDict(
222
+ arbitrary_types_allowed=True,
223
+ )
222
224
 
223
225
  class StringDistance(SingleKeyEvalConfig):
224
226
  """Configuration for a string distance evaluator.
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langchain
3
- Version: 0.2.16
3
+ Version: 0.3.0.dev1
4
4
  Summary: Building applications with LLMs through composability
5
5
  Home-page: https://github.com/langchain-ai/langchain
6
6
  License: MIT
7
- Requires-Python: >=3.8.1,<4.0
7
+ Requires-Python: >=3.9,<4.0
8
8
  Classifier: License :: OSI Approved :: MIT License
9
9
  Classifier: Programming Language :: Python :: 3
10
10
  Classifier: Programming Language :: Python :: 3.9
@@ -15,8 +15,8 @@ Requires-Dist: PyYAML (>=5.3)
15
15
  Requires-Dist: SQLAlchemy (>=1.4,<3)
16
16
  Requires-Dist: aiohttp (>=3.8.3,<4.0.0)
17
17
  Requires-Dist: async-timeout (>=4.0.0,<5.0.0) ; python_version < "3.11"
18
- Requires-Dist: langchain-core (>=0.2.38,<0.3.0)
19
- Requires-Dist: langchain-text-splitters (>=0.2.0,<0.3.0)
18
+ Requires-Dist: langchain-core (>=0.3.0.dev2,<0.4.0)
19
+ Requires-Dist: langchain-text-splitters (>=0.3.0.dev1,<0.4.0)
20
20
  Requires-Dist: langsmith (>=0.1.17,<0.2.0)
21
21
  Requires-Dist: numpy (>=1,<2) ; python_version < "3.12"
22
22
  Requires-Dist: numpy (>=1.26.0,<2.0.0) ; python_version >= "3.12"