langchain 0.2.15__py3-none-any.whl → 0.2.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- from typing import Sequence
1
+ from typing import Optional, Sequence
2
2
 
3
3
  from langchain_core.language_models import BaseLanguageModel
4
4
  from langchain_core.prompts.chat import ChatPromptTemplate
@@ -13,7 +13,10 @@ from langchain.agents.output_parsers.openai_tools import OpenAIToolsAgentOutputP
13
13
 
14
14
 
15
15
  def create_openai_tools_agent(
16
- llm: BaseLanguageModel, tools: Sequence[BaseTool], prompt: ChatPromptTemplate
16
+ llm: BaseLanguageModel,
17
+ tools: Sequence[BaseTool],
18
+ prompt: ChatPromptTemplate,
19
+ strict: Optional[bool] = None,
17
20
  ) -> Runnable:
18
21
  """Create an agent that uses OpenAI tools.
19
22
 
@@ -87,7 +90,9 @@ def create_openai_tools_agent(
87
90
  if missing_vars:
88
91
  raise ValueError(f"Prompt missing required variables: {missing_vars}")
89
92
 
90
- llm_with_tools = llm.bind(tools=[convert_to_openai_tool(tool) for tool in tools])
93
+ llm_with_tools = llm.bind(
94
+ tools=[convert_to_openai_tool(tool, strict=strict) for tool in tools]
95
+ )
91
96
 
92
97
  agent = (
93
98
  RunnablePassthrough.assign(
@@ -73,7 +73,10 @@ def _low_confidence_spans(
73
73
 
74
74
  class FlareChain(Chain):
75
75
  """Chain that combines a retriever, a question generator,
76
- and a response generator."""
76
+ and a response generator.
77
+
78
+ See [Active Retrieval Augmented Generation](https://arxiv.org/abs/2305.06983) paper.
79
+ """
77
80
 
78
81
  question_generator_chain: Runnable
79
82
  """Chain that generates questions from uncertain spans."""
@@ -38,7 +38,7 @@ class OpenAIModerationChain(Chain):
38
38
  output_key: str = "output" #: :meta private:
39
39
  openai_api_key: Optional[str] = None
40
40
  openai_organization: Optional[str] = None
41
- _openai_pre_1_0: bool = Field(default=None)
41
+ openai_pre_1_0: bool = Field(default=None)
42
42
 
43
43
  @root_validator(pre=True)
44
44
  def validate_environment(cls, values: Dict) -> Dict:
@@ -58,16 +58,17 @@ class OpenAIModerationChain(Chain):
58
58
  openai.api_key = openai_api_key
59
59
  if openai_organization:
60
60
  openai.organization = openai_organization
61
- values["_openai_pre_1_0"] = False
61
+ values["openai_pre_1_0"] = False
62
62
  try:
63
63
  check_package_version("openai", gte_version="1.0")
64
64
  except ValueError:
65
- values["_openai_pre_1_0"] = True
66
- if values["_openai_pre_1_0"]:
65
+ values["openai_pre_1_0"] = True
66
+ if values["openai_pre_1_0"]:
67
67
  values["client"] = openai.Moderation
68
68
  else:
69
69
  values["client"] = openai.OpenAI()
70
70
  values["async_client"] = openai.AsyncOpenAI()
71
+
71
72
  except ImportError:
72
73
  raise ImportError(
73
74
  "Could not import openai python package. "
@@ -92,7 +93,7 @@ class OpenAIModerationChain(Chain):
92
93
  return [self.output_key]
93
94
 
94
95
  def _moderate(self, text: str, results: Any) -> str:
95
- if self._openai_pre_1_0:
96
+ if self.openai_pre_1_0:
96
97
  condition = results["flagged"]
97
98
  else:
98
99
  condition = results.flagged
@@ -110,7 +111,7 @@ class OpenAIModerationChain(Chain):
110
111
  run_manager: Optional[CallbackManagerForChainRun] = None,
111
112
  ) -> Dict[str, Any]:
112
113
  text = inputs[self.input_key]
113
- if self._openai_pre_1_0:
114
+ if self.openai_pre_1_0:
114
115
  results = self.client.create(text)
115
116
  output = self._moderate(text, results["results"][0])
116
117
  else:
@@ -123,7 +124,7 @@ class OpenAIModerationChain(Chain):
123
124
  inputs: Dict[str, Any],
124
125
  run_manager: Optional[AsyncCallbackManagerForChainRun] = None,
125
126
  ) -> Dict[str, Any]:
126
- if self._openai_pre_1_0:
127
+ if self.openai_pre_1_0:
127
128
  return await super()._acall(inputs, run_manager=run_manager)
128
129
  text = inputs[self.input_key]
129
130
  results = await self.async_client.moderations.create(input=text)
@@ -48,6 +48,7 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor:
48
48
  MongoDBAtlasTranslator,
49
49
  )
50
50
  from langchain_community.query_constructors.myscale import MyScaleTranslator
51
+ from langchain_community.query_constructors.neo4j import Neo4jTranslator
51
52
  from langchain_community.query_constructors.opensearch import OpenSearchTranslator
52
53
  from langchain_community.query_constructors.pgvector import PGVectorTranslator
53
54
  from langchain_community.query_constructors.pinecone import PineconeTranslator
@@ -70,6 +71,7 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor:
70
71
  Dingo,
71
72
  Milvus,
72
73
  MyScale,
74
+ Neo4jVector,
73
75
  OpenSearchVectorSearch,
74
76
  PGVector,
75
77
  Qdrant,
@@ -111,11 +113,10 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor:
111
113
  TimescaleVector: TimescaleVectorTranslator,
112
114
  OpenSearchVectorSearch: OpenSearchTranslator,
113
115
  CommunityMongoDBAtlasVectorSearch: MongoDBAtlasTranslator,
116
+ Neo4jVector: Neo4jTranslator,
114
117
  }
115
118
  if isinstance(vectorstore, DatabricksVectorSearch):
116
119
  return DatabricksVectorSearchTranslator()
117
- if isinstance(vectorstore, Qdrant):
118
- return QdrantTranslator(metadata_key=vectorstore.metadata_payload_key)
119
120
  elif isinstance(vectorstore, MyScale):
120
121
  return MyScaleTranslator(metadata_key=vectorstore.metadata_column)
121
122
  elif isinstance(vectorstore, Redis):
@@ -177,6 +178,14 @@ def _get_builtin_translator(vectorstore: VectorStore) -> Visitor:
177
178
  if isinstance(vectorstore, PGVector):
178
179
  return NewPGVectorTranslator()
179
180
 
181
+ try:
182
+ from langchain_qdrant import QdrantVectorStore
183
+ except ImportError:
184
+ pass
185
+ else:
186
+ if isinstance(vectorstore, QdrantVectorStore):
187
+ return QdrantTranslator(metadata_key=vectorstore.metadata_payload_key)
188
+
180
189
  try:
181
190
  # Added in langchain-community==0.2.11
182
191
  from langchain_community.query_constructors.hanavector import HanaTranslator
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langchain
3
- Version: 0.2.15
3
+ Version: 0.2.16
4
4
  Summary: Building applications with LLMs through composability
5
5
  Home-page: https://github.com/langchain-ai/langchain
6
6
  License: MIT
@@ -15,7 +15,7 @@ Requires-Dist: PyYAML (>=5.3)
15
15
  Requires-Dist: SQLAlchemy (>=1.4,<3)
16
16
  Requires-Dist: aiohttp (>=3.8.3,<4.0.0)
17
17
  Requires-Dist: async-timeout (>=4.0.0,<5.0.0) ; python_version < "3.11"
18
- Requires-Dist: langchain-core (>=0.2.35,<0.3.0)
18
+ Requires-Dist: langchain-core (>=0.2.38,<0.3.0)
19
19
  Requires-Dist: langchain-text-splitters (>=0.2.0,<0.3.0)
20
20
  Requires-Dist: langsmith (>=0.1.17,<0.2.0)
21
21
  Requires-Dist: numpy (>=1,<2) ; python_version < "3.12"
@@ -118,7 +118,7 @@ langchain/agents/openai_functions_agent/base.py,sha256=U1wNxqfqcYciELfiJU6P-B0BR
118
118
  langchain/agents/openai_functions_multi_agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
119
119
  langchain/agents/openai_functions_multi_agent/base.py,sha256=UEAY1l2DeVxVwMSWA1UqvXnKVnQ6cpLUVsaUaFv3B40,12658
120
120
  langchain/agents/openai_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
121
- langchain/agents/openai_tools/base.py,sha256=z8H7DgYhuiH54qDmNZ5QHvoJ0bvuVelGUQ1bv-nMwqE,3513
121
+ langchain/agents/openai_tools/base.py,sha256=8a5x0l2FFEv9juNTCRDpzgsLNg4W3sUzD4kT10JySNg,3596
122
122
  langchain/agents/output_parsers/__init__.py,sha256=Zzsf8moY-juhKCrnBDUhwgKQtW12cNBkua5faqbAlQA,1374
123
123
  langchain/agents/output_parsers/json.py,sha256=sW9e8fG4VlPnMn53dWIwSgnyRBUYs4ULFymrhW92sWQ,1846
124
124
  langchain/agents/output_parsers/openai_functions.py,sha256=MjNEFVCxYgS6Efr3HX4rR1zoks2vJxoV8FCUa240jPQ,3467
@@ -241,7 +241,7 @@ langchain/chains/ernie_functions/__init__.py,sha256=X_gOa8GIjyV6tAS32A1BLv6q08uf
241
241
  langchain/chains/ernie_functions/base.py,sha256=SGs_-yi0qa7cxgkiu2EsoYQF4_fKQUZkxncrp1KiMbU,1730
242
242
  langchain/chains/example_generator.py,sha256=QDY7l9hO-RkTZGMMhVUfbZRf__eacdMGOPQXP3Yshrg,757
243
243
  langchain/chains/flare/__init__.py,sha256=ufb8LMpEVUzTDflcNiJJyKCG9e4EVGAvz5e7h7f0Z1c,51
244
- langchain/chains/flare/base.py,sha256=mn0xPowvgv3IxbNo6E6-HcashFPChVk5ZBPlRoPmPcA,8421
244
+ langchain/chains/flare/base.py,sha256=ZB8n4NYc6bzxxcJ4Cc-GHocR46ck0Oe69oGBjFjAufE,8516
245
245
  langchain/chains/flare/prompts.py,sha256=6ypb3UrOwd4YFy1W8LjBwNVgZLYb-W1U1hme5IdPpDE,1471
246
246
  langchain/chains/graph_qa/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
247
247
  langchain/chains/graph_qa/arangodb.py,sha256=FdkfnDwKnmWinqYObKK-ZPDO_AFZr3PAiRKDEGJxK_A,669
@@ -280,7 +280,7 @@ langchain/chains/llm_summarization_checker/prompts/revise_summary.txt,sha256=nSS
280
280
  langchain/chains/llm_symbolic_math/__init__.py,sha256=KQ6bFiFMsqs8PNtU-oo6l-czNBBwQUn2rEirz3gt-w8,470
281
281
  langchain/chains/loading.py,sha256=57shFurz0r_FDoUSTcD5Hv7cZl4Rr2G2A_gT-p7XHCE,28829
282
282
  langchain/chains/mapreduce.py,sha256=9E3uwPXak68csHIcUg6T78D2EIKpH7Gz7CoZy8DYoGE,4048
283
- langchain/chains/moderation.py,sha256=e9c3nPY83gOq9qXRqC2i4EVUXva79jxKdKAX1nKZMTY,4414
283
+ langchain/chains/moderation.py,sha256=jfeJpQgbmPT0PQcILUpe0j6Hp90xGzJiJyEQnB98Bcs,4408
284
284
  langchain/chains/natbot/__init__.py,sha256=ACF2TYNK_CTfvmdLlG5Ry0_j9D6ZfjgfQxmeKe1BAIg,96
285
285
  langchain/chains/natbot/base.py,sha256=y_WzKic2F6oaup5NS33OAo0LTQklyRJd9wplIZHRvAw,5252
286
286
  langchain/chains/natbot/crawler.py,sha256=E1mQUEsg8Jj6Eth-LBUcMU-Zc88JEA3a79kMhHkKO08,16050
@@ -901,7 +901,7 @@ langchain/retrievers/re_phraser.py,sha256=5H2CAhUNl95wLY2IZf155hlCGr_wgDq7Y1DcYq
901
901
  langchain/retrievers/remote_retriever.py,sha256=f1jPII31IkNrhkH1LvlUlNLRQNMKNvgE_7qHa3o3P04,659
902
902
  langchain/retrievers/self_query/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
903
903
  langchain/retrievers/self_query/astradb.py,sha256=lxlkYOr8xicH7MNyQKIg3Wc-XwhVpKGBn7maqYyR3Hk,670
904
- langchain/retrievers/self_query/base.py,sha256=YQbs6BAg6m2Hj5coyjDs9D9izB0YDZfESalI0gz80DE,13436
904
+ langchain/retrievers/self_query/base.py,sha256=nXPuSdEMEsUQyu91qm7Bn5ayQQZswi_44jQQfU3bPyM,13731
905
905
  langchain/retrievers/self_query/chroma.py,sha256=F0u_3Id1J1hIYM2D8_oNL2JJVetTFDyqW6fuGhjZ0ew,665
906
906
  langchain/retrievers/self_query/dashvector.py,sha256=CJAJQuJYNmw_GUIwwlPx3Scu1uDESTnFF-CzZEwFRRg,685
907
907
  langchain/retrievers/self_query/databricks_vector_search.py,sha256=S9V-XRfG6taeW3yRx_NZs4h-R4TiyHLnuJTIZa5rsqM,782
@@ -1335,8 +1335,8 @@ langchain/vectorstores/xata.py,sha256=HW_Oi5Hz8rH2JaUhRNWQ-3hLYmNzD8eAz6K5YqPArm
1335
1335
  langchain/vectorstores/yellowbrick.py,sha256=-lnjGcRE8Q1nEPOTdbKYTw5noS2cy2ce1ePOU804-_o,624
1336
1336
  langchain/vectorstores/zep.py,sha256=RJ2auxoA6uHHLEZknw3_jeFmYJYVt-PWKMBcNMGV6TM,798
1337
1337
  langchain/vectorstores/zilliz.py,sha256=XhPPIUfKPFJw0_svCoBgCnNkkBLoRVVcyuMfOnE5IxU,609
1338
- langchain-0.2.15.dist-info/LICENSE,sha256=TsZ-TKbmch26hJssqCJhWXyGph7iFLvyFBYAa3stBHg,1067
1339
- langchain-0.2.15.dist-info/METADATA,sha256=SCvMJEs2Mf3DzxDsTkUpUXZInSAdYwn2PfB3lB4x6P0,7074
1340
- langchain-0.2.15.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
1341
- langchain-0.2.15.dist-info/entry_points.txt,sha256=IgKjoXnkkVC8Nm7ggiFMCNAk01ua6RVTb9cmZTVNm5w,58
1342
- langchain-0.2.15.dist-info/RECORD,,
1338
+ langchain-0.2.16.dist-info/LICENSE,sha256=TsZ-TKbmch26hJssqCJhWXyGph7iFLvyFBYAa3stBHg,1067
1339
+ langchain-0.2.16.dist-info/METADATA,sha256=IAi2GE2-duv8RTkyKEisQDPCOUJQXcQc2-cOReTlxn4,7074
1340
+ langchain-0.2.16.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
1341
+ langchain-0.2.16.dist-info/entry_points.txt,sha256=IgKjoXnkkVC8Nm7ggiFMCNAk01ua6RVTb9cmZTVNm5w,58
1342
+ langchain-0.2.16.dist-info/RECORD,,