langchain 0.2.12__py3-none-any.whl → 0.2.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. langchain/_api/module_import.py +2 -2
  2. langchain/agents/__init__.py +4 -3
  3. langchain/agents/agent.py +7 -11
  4. langchain/agents/agent_toolkits/__init__.py +1 -1
  5. langchain/agents/agent_toolkits/vectorstore/base.py +114 -2
  6. langchain/agents/agent_toolkits/vectorstore/toolkit.py +2 -7
  7. langchain/agents/agent_types.py +1 -1
  8. langchain/agents/chat/base.py +1 -1
  9. langchain/agents/conversational/base.py +1 -1
  10. langchain/agents/conversational_chat/base.py +1 -1
  11. langchain/agents/initialize.py +2 -2
  12. langchain/agents/json_chat/base.py +1 -1
  13. langchain/agents/loading.py +4 -4
  14. langchain/agents/mrkl/base.py +4 -4
  15. langchain/agents/openai_assistant/base.py +2 -2
  16. langchain/agents/openai_functions_agent/base.py +2 -2
  17. langchain/agents/openai_functions_multi_agent/base.py +2 -2
  18. langchain/agents/react/agent.py +1 -1
  19. langchain/agents/react/base.py +4 -4
  20. langchain/agents/self_ask_with_search/base.py +2 -2
  21. langchain/agents/structured_chat/base.py +3 -2
  22. langchain/agents/tools.py +2 -2
  23. langchain/agents/xml/base.py +2 -2
  24. langchain/chains/__init__.py +1 -0
  25. langchain/chains/api/base.py +121 -1
  26. langchain/chains/base.py +5 -7
  27. langchain/chains/combine_documents/map_reduce.py +2 -4
  28. langchain/chains/combine_documents/map_rerank.py +4 -6
  29. langchain/chains/combine_documents/reduce.py +1 -4
  30. langchain/chains/combine_documents/refine.py +2 -4
  31. langchain/chains/combine_documents/stuff.py +12 -4
  32. langchain/chains/conversation/base.py +2 -4
  33. langchain/chains/conversational_retrieval/base.py +5 -7
  34. langchain/chains/elasticsearch_database/base.py +16 -20
  35. langchain/chains/example_generator.py +3 -4
  36. langchain/chains/flare/base.py +1 -1
  37. langchain/chains/hyde/base.py +1 -4
  38. langchain/chains/llm.py +2 -4
  39. langchain/chains/llm_checker/base.py +12 -4
  40. langchain/chains/llm_math/base.py +2 -4
  41. langchain/chains/llm_summarization_checker/base.py +12 -4
  42. langchain/chains/loading.py +17 -0
  43. langchain/chains/mapreduce.py +12 -4
  44. langchain/chains/natbot/base.py +2 -4
  45. langchain/chains/openai_functions/__init__.py +2 -0
  46. langchain/chains/openai_functions/base.py +2 -2
  47. langchain/chains/openai_functions/citation_fuzzy_match.py +54 -1
  48. langchain/chains/openai_functions/extraction.py +2 -2
  49. langchain/chains/openai_functions/openapi.py +88 -1
  50. langchain/chains/openai_functions/qa_with_structure.py +19 -0
  51. langchain/chains/openai_functions/tagging.py +81 -0
  52. langchain/chains/openai_tools/extraction.py +1 -1
  53. langchain/chains/qa_with_sources/base.py +21 -4
  54. langchain/chains/qa_with_sources/loading.py +16 -0
  55. langchain/chains/query_constructor/base.py +8 -2
  56. langchain/chains/query_constructor/schema.py +0 -2
  57. langchain/chains/question_answering/chain.py +15 -0
  58. langchain/chains/retrieval_qa/base.py +30 -6
  59. langchain/chains/router/base.py +1 -4
  60. langchain/chains/router/embedding_router.py +1 -4
  61. langchain/chains/router/llm_router.py +76 -1
  62. langchain/chains/router/multi_prompt.py +76 -1
  63. langchain/chains/sequential.py +3 -7
  64. langchain/chains/structured_output/base.py +3 -3
  65. langchain/chat_models/base.py +8 -10
  66. langchain/evaluation/agents/trajectory_eval_chain.py +2 -4
  67. langchain/evaluation/comparison/eval_chain.py +2 -4
  68. langchain/evaluation/criteria/eval_chain.py +2 -4
  69. langchain/evaluation/embedding_distance/base.py +0 -2
  70. langchain/evaluation/parsing/json_schema.py +1 -1
  71. langchain/evaluation/qa/eval_chain.py +2 -7
  72. langchain/evaluation/schema.py +8 -8
  73. langchain/evaluation/scoring/eval_chain.py +2 -4
  74. langchain/evaluation/string_distance/base.py +4 -4
  75. langchain/hub.py +60 -26
  76. langchain/indexes/vectorstore.py +3 -7
  77. langchain/memory/entity.py +0 -2
  78. langchain/memory/summary.py +9 -0
  79. langchain/output_parsers/retry.py +1 -1
  80. langchain/retrievers/contextual_compression.py +0 -2
  81. langchain/retrievers/document_compressors/base.py +0 -2
  82. langchain/retrievers/document_compressors/chain_filter.py +1 -1
  83. langchain/retrievers/document_compressors/cohere_rerank.py +3 -5
  84. langchain/retrievers/document_compressors/cross_encoder_rerank.py +1 -4
  85. langchain/retrievers/document_compressors/embeddings_filter.py +0 -2
  86. langchain/retrievers/document_compressors/listwise_rerank.py +1 -1
  87. langchain/retrievers/multi_query.py +4 -2
  88. langchain/retrievers/re_phraser.py +1 -1
  89. langchain/retrievers/self_query/base.py +1 -3
  90. langchain/retrievers/time_weighted_retriever.py +0 -2
  91. langchain/tools/__init__.py +14 -5
  92. langchain/tools/render.py +0 -2
  93. langchain/tools/retriever.py +0 -4
  94. {langchain-0.2.12.dist-info → langchain-0.2.14.dist-info}/METADATA +2 -2
  95. {langchain-0.2.12.dist-info → langchain-0.2.14.dist-info}/RECORD +98 -98
  96. {langchain-0.2.12.dist-info → langchain-0.2.14.dist-info}/LICENSE +0 -0
  97. {langchain-0.2.12.dist-info → langchain-0.2.14.dist-info}/WHEEL +0 -0
  98. {langchain-0.2.12.dist-info → langchain-0.2.14.dist-info}/entry_points.txt +0 -0
langchain/hub.py CHANGED
@@ -3,27 +3,37 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import json
6
- from typing import TYPE_CHECKING, Any, Optional
6
+ from typing import Any, Optional, Sequence
7
7
 
8
8
  from langchain_core.load.dump import dumps
9
9
  from langchain_core.load.load import loads
10
10
  from langchain_core.prompts import BasePromptTemplate
11
11
 
12
- if TYPE_CHECKING:
13
- from langchainhub import Client
14
12
 
15
-
16
- def _get_client(api_url: Optional[str] = None, api_key: Optional[str] = None) -> Client:
13
+ def _get_client(
14
+ api_key: Optional[str] = None,
15
+ api_url: Optional[str] = None,
16
+ ) -> Any:
17
17
  try:
18
- from langchainhub import Client
19
- except ImportError as e:
20
- raise ImportError(
21
- "Could not import langchainhub, please install with `pip install "
22
- "langchainhub`."
23
- ) from e
18
+ from langsmith import Client as LangSmithClient
19
+
20
+ ls_client = LangSmithClient(api_url, api_key=api_key)
21
+ if hasattr(ls_client, "push_prompt") and hasattr(ls_client, "pull_prompt"):
22
+ return ls_client
23
+ else:
24
+ from langchainhub import Client as LangChainHubClient
24
25
 
25
- # Client logic will also attempt to load URL/key from environment variables
26
- return Client(api_url, api_key=api_key)
26
+ return LangChainHubClient(api_url, api_key=api_key)
27
+ except ImportError:
28
+ try:
29
+ from langchainhub import Client as LangChainHubClient
30
+
31
+ return LangChainHubClient(api_url, api_key=api_key)
32
+ except ImportError as e:
33
+ raise ImportError(
34
+ "Could not import langsmith or langchainhub (deprecated),"
35
+ "please install with `pip install langsmith`."
36
+ ) from e
27
37
 
28
38
 
29
39
  def push(
@@ -32,27 +42,43 @@ def push(
32
42
  *,
33
43
  api_url: Optional[str] = None,
34
44
  api_key: Optional[str] = None,
35
- parent_commit_hash: Optional[str] = "latest",
36
- new_repo_is_public: bool = True,
37
- new_repo_description: str = "",
45
+ parent_commit_hash: Optional[str] = None,
46
+ new_repo_is_public: bool = False,
47
+ new_repo_description: Optional[str] = None,
48
+ readme: Optional[str] = None,
49
+ tags: Optional[Sequence[str]] = None,
38
50
  ) -> str:
39
51
  """
40
52
  Push an object to the hub and returns the URL it can be viewed at in a browser.
41
53
 
42
- :param repo_full_name: The full name of the repo to push to in the format of
43
- `owner/repo`.
54
+ :param repo_full_name: The full name of the prompt to push to in the format of
55
+ `owner/prompt_name` or `prompt_name`.
44
56
  :param object: The LangChain to serialize and push to the hub.
45
57
  :param api_url: The URL of the LangChain Hub API. Defaults to the hosted API service
46
58
  if you have an api key set, or a localhost instance if not.
47
59
  :param api_key: The API key to use to authenticate with the LangChain Hub API.
48
60
  :param parent_commit_hash: The commit hash of the parent commit to push to. Defaults
49
61
  to the latest commit automatically.
50
- :param new_repo_is_public: Whether the repo should be public. Defaults to
51
- True (Public by default).
52
- :param new_repo_description: The description of the repo. Defaults to an empty
62
+ :param new_repo_is_public: Whether the prompt should be public. Defaults to
63
+ False (Private by default).
64
+ :param new_repo_description: The description of the prompt. Defaults to an empty
53
65
  string.
54
66
  """
55
- client = _get_client(api_url=api_url, api_key=api_key)
67
+ client = _get_client(api_key=api_key, api_url=api_url)
68
+
69
+ # Then it's langsmith
70
+ if hasattr(client, "push_prompt"):
71
+ return client.push_prompt(
72
+ repo_full_name,
73
+ object=object,
74
+ parent_commit_hash=parent_commit_hash,
75
+ is_public=new_repo_is_public,
76
+ description=new_repo_description,
77
+ readme=readme,
78
+ tags=tags,
79
+ )
80
+
81
+ # Then it's langchainhub
56
82
  manifest_json = dumps(object)
57
83
  message = client.push(
58
84
  repo_full_name,
@@ -67,20 +93,28 @@ def push(
67
93
  def pull(
68
94
  owner_repo_commit: str,
69
95
  *,
96
+ include_model: Optional[bool] = None,
70
97
  api_url: Optional[str] = None,
71
98
  api_key: Optional[str] = None,
72
99
  ) -> Any:
73
100
  """
74
101
  Pull an object from the hub and returns it as a LangChain object.
75
102
 
76
- :param owner_repo_commit: The full name of the repo to pull from in the format of
77
- `owner/repo:commit_hash`.
103
+ :param owner_repo_commit: The full name of the prompt to pull from in the format of
104
+ `owner/prompt_name:commit_hash` or `owner/prompt_name`
105
+ or just `prompt_name` if it's your own prompt.
78
106
  :param api_url: The URL of the LangChain Hub API. Defaults to the hosted API service
79
107
  if you have an api key set, or a localhost instance if not.
80
108
  :param api_key: The API key to use to authenticate with the LangChain Hub API.
81
109
  """
82
- client = _get_client(api_url=api_url, api_key=api_key)
110
+ client = _get_client(api_key=api_key, api_url=api_url)
111
+
112
+ # Then it's langsmith
113
+ if hasattr(client, "pull_prompt"):
114
+ response = client.pull_prompt(owner_repo_commit, include_model=include_model)
115
+ return response
83
116
 
117
+ # Then it's langchainhub
84
118
  if hasattr(client, "pull_repo"):
85
119
  # >= 0.1.15
86
120
  res_dict = client.pull_repo(owner_repo_commit)
@@ -93,6 +127,6 @@ def pull(
93
127
  obj.metadata["lc_hub_commit_hash"] = res_dict["commit_hash"]
94
128
  return obj
95
129
 
96
- # Then it's < 0.1.15
130
+ # Then it's < 0.1.15 langchainhub
97
131
  resp: str = client.pull(owner_repo_commit)
98
132
  return loads(resp)
@@ -4,7 +4,7 @@ from langchain_core.document_loaders import BaseLoader
4
4
  from langchain_core.documents import Document
5
5
  from langchain_core.embeddings import Embeddings
6
6
  from langchain_core.language_models import BaseLanguageModel
7
- from langchain_core.pydantic_v1 import BaseModel, Extra, Field
7
+ from langchain_core.pydantic_v1 import BaseModel, Field
8
8
  from langchain_core.vectorstores import VectorStore
9
9
  from langchain_text_splitters import RecursiveCharacterTextSplitter, TextSplitter
10
10
 
@@ -22,10 +22,8 @@ class VectorStoreIndexWrapper(BaseModel):
22
22
  vectorstore: VectorStore
23
23
 
24
24
  class Config:
25
- """Configuration for this pydantic object."""
26
-
27
- extra = Extra.forbid
28
25
  arbitrary_types_allowed = True
26
+ extra = "forbid"
29
27
 
30
28
  def query(
31
29
  self,
@@ -145,10 +143,8 @@ class VectorstoreIndexCreator(BaseModel):
145
143
  vectorstore_kwargs: dict = Field(default_factory=dict)
146
144
 
147
145
  class Config:
148
- """Configuration for this pydantic object."""
149
-
150
- extra = Extra.forbid
151
146
  arbitrary_types_allowed = True
147
+ extra = "forbid"
152
148
 
153
149
  def from_loaders(self, loaders: List[BaseLoader]) -> VectorStoreIndexWrapper:
154
150
  """Create a vectorstore index from loaders."""
@@ -246,8 +246,6 @@ class SQLiteEntityStore(BaseEntityStore):
246
246
  conn: Any = None
247
247
 
248
248
  class Config:
249
- """Configuration for this pydantic object."""
250
-
251
249
  arbitrary_types_allowed = True
252
250
 
253
251
  def __init__(
@@ -2,6 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  from typing import Any, Dict, List, Type
4
4
 
5
+ from langchain_core._api import deprecated
5
6
  from langchain_core.chat_history import BaseChatMessageHistory
6
7
  from langchain_core.language_models import BaseLanguageModel
7
8
  from langchain_core.messages import BaseMessage, SystemMessage, get_buffer_string
@@ -14,6 +15,14 @@ from langchain.memory.chat_memory import BaseChatMemory
14
15
  from langchain.memory.prompt import SUMMARY_PROMPT
15
16
 
16
17
 
18
+ @deprecated(
19
+ since="0.2.12",
20
+ removal="1.0",
21
+ message=(
22
+ "Refer here for how to incorporate summaries of conversation history: "
23
+ "https://langchain-ai.github.io/langgraph/how-tos/memory/add-summary-conversation-history/" # noqa: E501
24
+ ),
25
+ )
17
26
  class SummarizerMixin(BaseModel):
18
27
  """Mixin for summarizer."""
19
28
 
@@ -214,7 +214,7 @@ class RetryWithErrorOutputParser(BaseOutputParser[T]):
214
214
  Returns:
215
215
  A RetryWithErrorOutputParser.
216
216
  """
217
- chain = prompt | llm
217
+ chain = prompt | llm | StrOutputParser()
218
218
  return cls(parser=parser, retry_chain=chain, max_retries=max_retries)
219
219
 
220
220
  def parse_with_prompt(self, completion: str, prompt_value: PromptValue) -> T:
@@ -22,8 +22,6 @@ class ContextualCompressionRetriever(BaseRetriever):
22
22
  """Base Retriever to use for getting relevant documents."""
23
23
 
24
24
  class Config:
25
- """Configuration for this pydantic object."""
26
-
27
25
  arbitrary_types_allowed = True
28
26
 
29
27
  def _get_relevant_documents(
@@ -16,8 +16,6 @@ class DocumentCompressorPipeline(BaseDocumentCompressor):
16
16
  """List of document filters that are chained together and run in sequence."""
17
17
 
18
18
  class Config:
19
- """Configuration for this pydantic object."""
20
-
21
19
  arbitrary_types_allowed = True
22
20
 
23
21
  def compress_documents(
@@ -104,7 +104,7 @@ class LLMChainFilter(BaseDocumentCompressor):
104
104
  Args:
105
105
  llm: The language model to use for filtering.
106
106
  prompt: The prompt to use for the filter.
107
- **kwargs: Additional arguments to pass to the constructor.
107
+ kwargs: Additional arguments to pass to the constructor.
108
108
 
109
109
  Returns:
110
110
  A LLMChainFilter that uses the given language model.
@@ -6,14 +6,14 @@ from typing import Any, Dict, List, Optional, Sequence, Union
6
6
  from langchain_core._api.deprecation import deprecated
7
7
  from langchain_core.callbacks.manager import Callbacks
8
8
  from langchain_core.documents import Document
9
- from langchain_core.pydantic_v1 import Extra, root_validator
9
+ from langchain_core.pydantic_v1 import root_validator
10
10
  from langchain_core.utils import get_from_dict_or_env
11
11
 
12
12
  from langchain.retrievers.document_compressors.base import BaseDocumentCompressor
13
13
 
14
14
 
15
15
  @deprecated(
16
- since="0.0.30", removal="0.3.0", alternative_import="langchain_cohere.CohereRerank"
16
+ since="0.0.30", removal="1.0", alternative_import="langchain_cohere.CohereRerank"
17
17
  )
18
18
  class CohereRerank(BaseDocumentCompressor):
19
19
  """Document compressor that uses `Cohere Rerank API`."""
@@ -31,10 +31,8 @@ class CohereRerank(BaseDocumentCompressor):
31
31
  """Identifier for the application making the request."""
32
32
 
33
33
  class Config:
34
- """Configuration for this pydantic object."""
35
-
36
- extra = Extra.forbid
37
34
  arbitrary_types_allowed = True
35
+ extra = "forbid"
38
36
 
39
37
  @root_validator(pre=True)
40
38
  def validate_environment(cls, values: Dict) -> Dict:
@@ -5,7 +5,6 @@ from typing import Optional, Sequence
5
5
 
6
6
  from langchain_core.callbacks import Callbacks
7
7
  from langchain_core.documents import BaseDocumentCompressor, Document
8
- from langchain_core.pydantic_v1 import Extra
9
8
 
10
9
  from langchain.retrievers.document_compressors.cross_encoder import BaseCrossEncoder
11
10
 
@@ -20,10 +19,8 @@ class CrossEncoderReranker(BaseDocumentCompressor):
20
19
  """Number of documents to return."""
21
20
 
22
21
  class Config:
23
- """Configuration for this pydantic object."""
24
-
25
- extra = Extra.forbid
26
22
  arbitrary_types_allowed = True
23
+ extra = "forbid"
27
24
 
28
25
  def compress_documents(
29
26
  self,
@@ -42,8 +42,6 @@ class EmbeddingsFilter(BaseDocumentCompressor):
42
42
  to None."""
43
43
 
44
44
  class Config:
45
- """Configuration for this pydantic object."""
46
-
47
45
  arbitrary_types_allowed = True
48
46
 
49
47
  @pre_init
@@ -105,7 +105,7 @@ class LLMListwiseRerank(BaseDocumentCompressor):
105
105
  llm: The language model to use for filtering. **Must implement
106
106
  BaseLanguageModel.with_structured_output().**
107
107
  prompt: The prompt to use for the filter.
108
- **kwargs: Additional arguments to pass to the constructor.
108
+ kwargs: Additional arguments to pass to the constructor.
109
109
 
110
110
  Returns:
111
111
  A LLMListwiseRerank document compressor that uses the given language model.
@@ -72,6 +72,8 @@ class MultiQueryRetriever(BaseRetriever):
72
72
  Args:
73
73
  retriever: retriever to query documents from
74
74
  llm: llm for query generation using DEFAULT_QUERY_PROMPT
75
+ prompt: The prompt which aims to generate several different versions
76
+ of the given user query
75
77
  include_original: Whether to include the original query in the list of
76
78
  generated queries.
77
79
 
@@ -95,7 +97,7 @@ class MultiQueryRetriever(BaseRetriever):
95
97
  """Get relevant documents given a user query.
96
98
 
97
99
  Args:
98
- question: user query
100
+ query: user query
99
101
 
100
102
  Returns:
101
103
  Unique union of relevant documents from all generated queries
@@ -158,7 +160,7 @@ class MultiQueryRetriever(BaseRetriever):
158
160
  """Get relevant documents given a user query.
159
161
 
160
162
  Args:
161
- question: user query
163
+ query: user query
162
164
 
163
165
  Returns:
164
166
  Unique union of relevant documents from all generated queries
@@ -64,7 +64,7 @@ class RePhraseQueryRetriever(BaseRetriever):
64
64
  *,
65
65
  run_manager: CallbackManagerForRetrieverRun,
66
66
  ) -> List[Document]:
67
- """Get relevated documents given a user question.
67
+ """Get relevant documents given a user question.
68
68
 
69
69
  Args:
70
70
  query: user question
@@ -215,10 +215,8 @@ class SelfQueryRetriever(BaseRetriever):
215
215
  """Use original query instead of the revised new query from LLM"""
216
216
 
217
217
  class Config:
218
- """Configuration for this pydantic object."""
219
-
220
- arbitrary_types_allowed = True
221
218
  allow_population_by_field_name = True
219
+ arbitrary_types_allowed = True
222
220
 
223
221
  @root_validator(pre=True)
224
222
  def validate_translator(cls, values: Dict) -> Dict:
@@ -47,8 +47,6 @@ class TimeWeightedVectorStoreRetriever(BaseRetriever):
47
47
  """
48
48
 
49
49
  class Config:
50
- """Configuration for this pydantic object."""
51
-
52
50
  arbitrary_types_allowed = True
53
51
 
54
52
  def _document_get_date(self, field: str, document: Document) -> datetime.datetime:
@@ -21,7 +21,16 @@ import warnings
21
21
  from typing import Any
22
22
 
23
23
  from langchain_core._api import LangChainDeprecationWarning
24
- from langchain_core.tools import BaseTool, StructuredTool, Tool, tool
24
+ from langchain_core.tools import (
25
+ BaseTool as BaseTool,
26
+ )
27
+ from langchain_core.tools import (
28
+ StructuredTool as StructuredTool,
29
+ )
30
+ from langchain_core.tools import (
31
+ Tool as Tool,
32
+ )
33
+ from langchain_core.tools.convert import tool as tool
25
34
 
26
35
  from langchain._api.interactive_env import is_interactive_env
27
36
 
@@ -75,6 +84,10 @@ def __getattr__(name: str) -> Any:
75
84
 
76
85
 
77
86
  __all__ = [
87
+ "StructuredTool",
88
+ "BaseTool",
89
+ "tool",
90
+ "Tool",
78
91
  "AINAppOps",
79
92
  "AINOwnerOps",
80
93
  "AINRuleOps",
@@ -92,7 +105,6 @@ __all__ = [
92
105
  "BaseRequestsTool",
93
106
  "BaseSQLDatabaseTool",
94
107
  "BaseSparkSQLTool",
95
- "BaseTool",
96
108
  "BearlyInterpreterTool",
97
109
  "BingSearchResults",
98
110
  "BingSearchRun",
@@ -181,8 +193,6 @@ __all__ = [
181
193
  "StdInInquireTool",
182
194
  "StackExchangeTool",
183
195
  "SteamshipImageGenerationTool",
184
- "StructuredTool",
185
- "Tool",
186
196
  "VectorStoreQATool",
187
197
  "VectorStoreQAWithSourcesTool",
188
198
  "WikipediaQueryRun",
@@ -193,5 +203,4 @@ __all__ = [
193
203
  "ZapierNLAListActions",
194
204
  "ZapierNLARunAction",
195
205
  "format_tool_to_openai_function",
196
- "tool",
197
206
  ]
langchain/tools/render.py CHANGED
@@ -7,7 +7,6 @@ This module contains various ways to render tools.
7
7
 
8
8
  # For backwards compatibility
9
9
  from langchain_core.tools import (
10
- ToolsRenderer,
11
10
  render_text_description,
12
11
  render_text_description_and_args,
13
12
  )
@@ -17,7 +16,6 @@ from langchain_core.utils.function_calling import (
17
16
  )
18
17
 
19
18
  __all__ = [
20
- "ToolsRenderer",
21
19
  "render_text_description",
22
20
  "render_text_description_and_args",
23
21
  "format_tool_to_openai_tool",
@@ -1,14 +1,10 @@
1
1
  from langchain_core.tools import (
2
- RetrieverInput,
3
- ToolsRenderer,
4
2
  create_retriever_tool,
5
3
  render_text_description,
6
4
  render_text_description_and_args,
7
5
  )
8
6
 
9
7
  __all__ = [
10
- "RetrieverInput",
11
- "ToolsRenderer",
12
8
  "create_retriever_tool",
13
9
  "render_text_description",
14
10
  "render_text_description_and_args",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langchain
3
- Version: 0.2.12
3
+ Version: 0.2.14
4
4
  Summary: Building applications with LLMs through composability
5
5
  Home-page: https://github.com/langchain-ai/langchain
6
6
  License: MIT
@@ -15,7 +15,7 @@ Requires-Dist: PyYAML (>=5.3)
15
15
  Requires-Dist: SQLAlchemy (>=1.4,<3)
16
16
  Requires-Dist: aiohttp (>=3.8.3,<4.0.0)
17
17
  Requires-Dist: async-timeout (>=4.0.0,<5.0.0) ; python_version < "3.11"
18
- Requires-Dist: langchain-core (>=0.2.27,<0.3.0)
18
+ Requires-Dist: langchain-core (>=0.2.32,<0.3.0)
19
19
  Requires-Dist: langchain-text-splitters (>=0.2.0,<0.3.0)
20
20
  Requires-Dist: langsmith (>=0.1.17,<0.2.0)
21
21
  Requires-Dist: numpy (>=1,<2) ; python_version < "3.12"