langchain 0.3.19__py3-none-any.whl → 0.3.20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain might be problematic. Click here for more details.

@@ -652,7 +652,7 @@ class OpenAIAssistantRunnable(RunnableSerializable[Dict, OutputType]):
652
652
  self, intermediate_steps: List[Tuple[OpenAIAssistantAction, str]]
653
653
  ) -> dict:
654
654
  last_action, last_output = intermediate_steps[-1]
655
- run = await self._wait_for_run(last_action.run_id, last_action.thread_id)
655
+ run = self._wait_for_run(last_action.run_id, last_action.thread_id)
656
656
  required_tool_call_ids = set()
657
657
  if run.required_action:
658
658
  required_tool_call_ids = {
@@ -9,7 +9,7 @@ ESQuery:"""
9
9
 
10
10
  DEFAULT_DSL_TEMPLATE = """Given an input question, create a syntactically correct Elasticsearch query to run. Unless the user specifies in their question a specific number of examples they wish to obtain, always limit your query to at most {top_k} results. You can order the results by a relevant column to return the most interesting examples in the database.
11
11
 
12
- Unless told to do not query for all the columns from a specific index, only ask for a the few relevant columns given the question.
12
+ Unless told to do not query for all the columns from a specific index, only ask for a few relevant columns given the question.
13
13
 
14
14
  Pay attention to use only the column names that you can see in the mapping description. Be careful to not query for columns that do not exist. Also, pay attention to which column is in which index. Return the query as valid json.
15
15
 
@@ -1,9 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import logging
3
4
  import re
4
5
  from typing import Any, Dict, List, Optional, Sequence, Tuple
5
6
 
6
- import numpy as np
7
7
  from langchain_core.callbacks import (
8
8
  CallbackManagerForChainRun,
9
9
  )
@@ -23,6 +23,8 @@ from langchain.chains.flare.prompts import (
23
23
  )
24
24
  from langchain.chains.llm import LLMChain
25
25
 
26
+ logger = logging.getLogger(__name__)
27
+
26
28
 
27
29
  def _extract_tokens_and_log_probs(response: AIMessage) -> Tuple[List[str], List[float]]:
28
30
  """Extract tokens and log probabilities from chat model response."""
@@ -57,7 +59,24 @@ def _low_confidence_spans(
57
59
  min_token_gap: int,
58
60
  num_pad_tokens: int,
59
61
  ) -> List[str]:
60
- _low_idx = np.where(np.exp(log_probs) < min_prob)[0]
62
+ try:
63
+ import numpy as np
64
+
65
+ _low_idx = np.where(np.exp(log_probs) < min_prob)[0]
66
+ except ImportError:
67
+ logger.warning(
68
+ "NumPy not found in the current Python environment. FlareChain will use a "
69
+ "pure Python implementation for internal calculations, which may "
70
+ "significantly impact performance, especially for large datasets. For "
71
+ "optimal speed and efficiency, consider installing NumPy: pip install numpy"
72
+ )
73
+ import math
74
+
75
+ _low_idx = [ # type: ignore[assignment]
76
+ idx
77
+ for idx, log_prob in enumerate(log_probs)
78
+ if math.exp(log_prob) < min_prob
79
+ ]
61
80
  low_idx = [i for i in _low_idx if re.search(r"\w", tokens[i])]
62
81
  if len(low_idx) == 0:
63
82
  return []
@@ -5,9 +5,9 @@ https://arxiv.org/abs/2212.10496
5
5
 
6
6
  from __future__ import annotations
7
7
 
8
+ import logging
8
9
  from typing import Any, Dict, List, Optional
9
10
 
10
- import numpy as np
11
11
  from langchain_core.callbacks import CallbackManagerForChainRun
12
12
  from langchain_core.embeddings import Embeddings
13
13
  from langchain_core.language_models import BaseLanguageModel
@@ -20,6 +20,8 @@ from langchain.chains.base import Chain
20
20
  from langchain.chains.hyde.prompts import PROMPT_MAP
21
21
  from langchain.chains.llm import LLMChain
22
22
 
23
+ logger = logging.getLogger(__name__)
24
+
23
25
 
24
26
  class HypotheticalDocumentEmbedder(Chain, Embeddings):
25
27
  """Generate hypothetical document for query, and then embed that.
@@ -54,7 +56,22 @@ class HypotheticalDocumentEmbedder(Chain, Embeddings):
54
56
 
55
57
  def combine_embeddings(self, embeddings: List[List[float]]) -> List[float]:
56
58
  """Combine embeddings into final embeddings."""
57
- return list(np.array(embeddings).mean(axis=0))
59
+ try:
60
+ import numpy as np
61
+
62
+ return list(np.array(embeddings).mean(axis=0))
63
+ except ImportError:
64
+ logger.warning(
65
+ "NumPy not found in the current Python environment. "
66
+ "HypotheticalDocumentEmbedder will use a pure Python implementation "
67
+ "for internal calculations, which may significantly impact "
68
+ "performance, especially for large datasets. For optimal speed and "
69
+ "efficiency, consider installing NumPy: pip install numpy"
70
+ )
71
+ if not embeddings:
72
+ return []
73
+ num_vectors = len(embeddings)
74
+ return [sum(dim_values) / num_vectors for dim_values in zip(*embeddings)]
58
75
 
59
76
  def embed_query(self, text: str) -> List[float]:
60
77
  """Generate a hypothetical document and embedded it."""
@@ -10,7 +10,7 @@ Question: {input}"""
10
10
 
11
11
  _DEFAULT_TEMPLATE = """Given an input question, first create a syntactically correct {dialect} query to run, then look at the results of the query and return the answer. Unless the user specifies in his question a specific number of examples he wishes to obtain, always limit your query to at most {top_k} results. You can order the results by a relevant column to return the most interesting examples in the database.
12
12
 
13
- Never query for all the columns from a specific table, only ask for a the few relevant columns given the question.
13
+ Never query for all the columns from a specific table, only ask for a few relevant columns given the question.
14
14
 
15
15
  Pay attention to use only the column names that you can see in the schema description. Be careful to not query for columns that do not exist. Also, pay attention to which column is in which table.
16
16
 
@@ -875,7 +875,7 @@ class _ConfigurableModel(Runnable[LanguageModelInput, Any]):
875
875
  input: Any,
876
876
  config: Optional[RunnableConfig] = None,
877
877
  *,
878
- version: Literal["v1", "v2"],
878
+ version: Literal["v1", "v2"] = "v2",
879
879
  include_names: Optional[Sequence[str]] = None,
880
880
  include_types: Optional[Sequence[str]] = None,
881
881
  include_tags: Optional[Sequence[str]] = None,
@@ -1,9 +1,11 @@
1
1
  """A chain for comparing the output of two models using embeddings."""
2
2
 
3
+ import functools
4
+ import logging
3
5
  from enum import Enum
6
+ from importlib import util
4
7
  from typing import Any, Dict, List, Optional
5
8
 
6
- import numpy as np
7
9
  from langchain_core.callbacks.manager import (
8
10
  AsyncCallbackManagerForChainRun,
9
11
  CallbackManagerForChainRun,
@@ -18,6 +20,34 @@ from langchain.evaluation.schema import PairwiseStringEvaluator, StringEvaluator
18
20
  from langchain.schema import RUN_KEY
19
21
 
20
22
 
23
+ def _import_numpy() -> Any:
24
+ try:
25
+ import numpy as np
26
+
27
+ return np
28
+ except ImportError as e:
29
+ raise ImportError(
30
+ "Could not import numpy, please install with `pip install numpy`."
31
+ ) from e
32
+
33
+
34
+ logger = logging.getLogger(__name__)
35
+
36
+
37
+ @functools.lru_cache(maxsize=1)
38
+ def _check_numpy() -> bool:
39
+ if bool(util.find_spec("numpy")):
40
+ return True
41
+ logger.warning(
42
+ "NumPy not found in the current Python environment. "
43
+ "langchain will use a pure Python implementation for embedding distance "
44
+ "operations, which may significantly impact performance, especially for large "
45
+ "datasets. For optimal speed and efficiency, consider installing NumPy: "
46
+ "pip install numpy"
47
+ )
48
+ return False
49
+
50
+
21
51
  def _embedding_factory() -> Embeddings:
22
52
  """Create an Embeddings object.
23
53
  Returns:
@@ -158,7 +188,7 @@ class _EmbeddingDistanceChainMixin(Chain):
158
188
  raise ValueError(f"Invalid metric: {metric}")
159
189
 
160
190
  @staticmethod
161
- def _cosine_distance(a: np.ndarray, b: np.ndarray) -> np.ndarray:
191
+ def _cosine_distance(a: Any, b: Any) -> Any:
162
192
  """Compute the cosine distance between two vectors.
163
193
 
164
194
  Args:
@@ -179,7 +209,7 @@ class _EmbeddingDistanceChainMixin(Chain):
179
209
  return 1.0 - cosine_similarity(a, b)
180
210
 
181
211
  @staticmethod
182
- def _euclidean_distance(a: np.ndarray, b: np.ndarray) -> np.floating:
212
+ def _euclidean_distance(a: Any, b: Any) -> Any:
183
213
  """Compute the Euclidean distance between two vectors.
184
214
 
185
215
  Args:
@@ -189,10 +219,15 @@ class _EmbeddingDistanceChainMixin(Chain):
189
219
  Returns:
190
220
  np.floating: The Euclidean distance.
191
221
  """
192
- return np.linalg.norm(a - b)
222
+ if _check_numpy():
223
+ import numpy as np
224
+
225
+ return np.linalg.norm(a - b)
226
+
227
+ return sum((x - y) * (x - y) for x, y in zip(a, b)) ** 0.5
193
228
 
194
229
  @staticmethod
195
- def _manhattan_distance(a: np.ndarray, b: np.ndarray) -> np.floating:
230
+ def _manhattan_distance(a: Any, b: Any) -> Any:
196
231
  """Compute the Manhattan distance between two vectors.
197
232
 
198
233
  Args:
@@ -202,10 +237,14 @@ class _EmbeddingDistanceChainMixin(Chain):
202
237
  Returns:
203
238
  np.floating: The Manhattan distance.
204
239
  """
205
- return np.sum(np.abs(a - b))
240
+ if _check_numpy():
241
+ np = _import_numpy()
242
+ return np.sum(np.abs(a - b))
243
+
244
+ return sum(abs(x - y) for x, y in zip(a, b))
206
245
 
207
246
  @staticmethod
208
- def _chebyshev_distance(a: np.ndarray, b: np.ndarray) -> np.floating:
247
+ def _chebyshev_distance(a: Any, b: Any) -> Any:
209
248
  """Compute the Chebyshev distance between two vectors.
210
249
 
211
250
  Args:
@@ -215,10 +254,14 @@ class _EmbeddingDistanceChainMixin(Chain):
215
254
  Returns:
216
255
  np.floating: The Chebyshev distance.
217
256
  """
218
- return np.max(np.abs(a - b))
257
+ if _check_numpy():
258
+ np = _import_numpy()
259
+ return np.max(np.abs(a - b))
260
+
261
+ return max(abs(x - y) for x, y in zip(a, b))
219
262
 
220
263
  @staticmethod
221
- def _hamming_distance(a: np.ndarray, b: np.ndarray) -> np.floating:
264
+ def _hamming_distance(a: Any, b: Any) -> Any:
222
265
  """Compute the Hamming distance between two vectors.
223
266
 
224
267
  Args:
@@ -228,9 +271,13 @@ class _EmbeddingDistanceChainMixin(Chain):
228
271
  Returns:
229
272
  np.floating: The Hamming distance.
230
273
  """
231
- return np.mean(a != b)
274
+ if _check_numpy():
275
+ np = _import_numpy()
276
+ return np.mean(a != b)
232
277
 
233
- def _compute_score(self, vectors: np.ndarray) -> float:
278
+ return sum(1 for x, y in zip(a, b) if x != y) / len(a)
279
+
280
+ def _compute_score(self, vectors: Any) -> float:
234
281
  """Compute the score based on the distance metric.
235
282
 
236
283
  Args:
@@ -240,8 +287,11 @@ class _EmbeddingDistanceChainMixin(Chain):
240
287
  float: The computed score.
241
288
  """
242
289
  metric = self._get_metric(self.distance_metric)
243
- score = metric(vectors[0].reshape(1, -1), vectors[1].reshape(1, -1)).item()
244
- return score
290
+ if _check_numpy() and isinstance(vectors, _import_numpy().ndarray):
291
+ score = metric(vectors[0].reshape(1, -1), vectors[1].reshape(1, -1)).item()
292
+ else:
293
+ score = metric(vectors[0], vectors[1])
294
+ return float(score)
245
295
 
246
296
 
247
297
  class EmbeddingDistanceEvalChain(_EmbeddingDistanceChainMixin, StringEvaluator):
@@ -292,9 +342,12 @@ class EmbeddingDistanceEvalChain(_EmbeddingDistanceChainMixin, StringEvaluator):
292
342
  Returns:
293
343
  Dict[str, Any]: The computed score.
294
344
  """
295
- vectors = np.array(
296
- self.embeddings.embed_documents([inputs["prediction"], inputs["reference"]])
345
+ vectors = self.embeddings.embed_documents(
346
+ [inputs["prediction"], inputs["reference"]]
297
347
  )
348
+ if _check_numpy():
349
+ np = _import_numpy()
350
+ vectors = np.array(vectors)
298
351
  score = self._compute_score(vectors)
299
352
  return {"score": score}
300
353
 
@@ -313,13 +366,15 @@ class EmbeddingDistanceEvalChain(_EmbeddingDistanceChainMixin, StringEvaluator):
313
366
  Returns:
314
367
  Dict[str, Any]: The computed score.
315
368
  """
316
- embedded = await self.embeddings.aembed_documents(
369
+ vectors = await self.embeddings.aembed_documents(
317
370
  [
318
371
  inputs["prediction"],
319
372
  inputs["reference"],
320
373
  ]
321
374
  )
322
- vectors = np.array(embedded)
375
+ if _check_numpy():
376
+ np = _import_numpy()
377
+ vectors = np.array(vectors)
323
378
  score = self._compute_score(vectors)
324
379
  return {"score": score}
325
380
 
@@ -432,14 +487,15 @@ class PairwiseEmbeddingDistanceEvalChain(
432
487
  Returns:
433
488
  Dict[str, Any]: The computed score.
434
489
  """
435
- vectors = np.array(
436
- self.embeddings.embed_documents(
437
- [
438
- inputs["prediction"],
439
- inputs["prediction_b"],
440
- ]
441
- )
490
+ vectors = self.embeddings.embed_documents(
491
+ [
492
+ inputs["prediction"],
493
+ inputs["prediction_b"],
494
+ ]
442
495
  )
496
+ if _check_numpy():
497
+ np = _import_numpy()
498
+ vectors = np.array(vectors)
443
499
  score = self._compute_score(vectors)
444
500
  return {"score": score}
445
501
 
@@ -458,13 +514,15 @@ class PairwiseEmbeddingDistanceEvalChain(
458
514
  Returns:
459
515
  Dict[str, Any]: The computed score.
460
516
  """
461
- embedded = await self.embeddings.aembed_documents(
517
+ vectors = await self.embeddings.aembed_documents(
462
518
  [
463
519
  inputs["prediction"],
464
520
  inputs["prediction_b"],
465
521
  ]
466
522
  )
467
- vectors = np.array(embedded)
523
+ if _check_numpy():
524
+ np = _import_numpy()
525
+ vectors = np.array(vectors)
468
526
  score = self._compute_score(vectors)
469
527
  return {"score": score}
470
528
 
@@ -1,6 +1,5 @@
1
1
  from typing import Callable, Dict, Optional, Sequence
2
2
 
3
- import numpy as np
4
3
  from langchain_core.callbacks.manager import Callbacks
5
4
  from langchain_core.documents import Document
6
5
  from langchain_core.embeddings import Embeddings
@@ -69,6 +68,13 @@ class EmbeddingsFilter(BaseDocumentCompressor):
69
68
  "To use please install langchain-community "
70
69
  "with `pip install langchain-community`."
71
70
  )
71
+
72
+ try:
73
+ import numpy as np
74
+ except ImportError as e:
75
+ raise ImportError(
76
+ "Could not import numpy, please install with `pip install numpy`."
77
+ ) from e
72
78
  stateful_documents = get_stateful_documents(documents)
73
79
  embedded_documents = _get_embeddings_from_stateful_docs(
74
80
  self.embeddings, stateful_documents
@@ -104,6 +110,13 @@ class EmbeddingsFilter(BaseDocumentCompressor):
104
110
  "To use please install langchain-community "
105
111
  "with `pip install langchain-community`."
106
112
  )
113
+
114
+ try:
115
+ import numpy as np
116
+ except ImportError as e:
117
+ raise ImportError(
118
+ "Could not import numpy, please install with `pip install numpy`."
119
+ ) from e
107
120
  stateful_documents = get_stateful_documents(documents)
108
121
  embedded_documents = await _aget_embeddings_from_stateful_docs(
109
122
  self.embeddings, stateful_documents
@@ -1,23 +1,19 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langchain
3
- Version: 0.3.19
3
+ Version: 0.3.20
4
4
  Summary: Building applications with LLMs through composability
5
5
  License: MIT
6
6
  Project-URL: Source Code, https://github.com/langchain-ai/langchain/tree/master/libs/langchain
7
7
  Project-URL: Release Notes, https://github.com/langchain-ai/langchain/releases?q=tag%3A%22langchain%3D%3D0%22&expanded=true
8
8
  Project-URL: repository, https://github.com/langchain-ai/langchain
9
9
  Requires-Python: <4.0,>=3.9
10
- Requires-Dist: langchain-core<1.0.0,>=0.3.35
10
+ Requires-Dist: langchain-core<1.0.0,>=0.3.41
11
11
  Requires-Dist: langchain-text-splitters<1.0.0,>=0.3.6
12
12
  Requires-Dist: langsmith<0.4,>=0.1.17
13
13
  Requires-Dist: pydantic<3.0.0,>=2.7.4
14
14
  Requires-Dist: SQLAlchemy<3,>=1.4
15
15
  Requires-Dist: requests<3,>=2
16
16
  Requires-Dist: PyYAML>=5.3
17
- Requires-Dist: aiohttp<4.0.0,>=3.8.3
18
- Requires-Dist: tenacity!=8.4.0,<10,>=8.1.0
19
- Requires-Dist: numpy<2,>=1.26.4; python_version < "3.12"
20
- Requires-Dist: numpy<3,>=1.26.2; python_version >= "3.12"
21
17
  Requires-Dist: async-timeout<5.0.0,>=4.0.0; python_version < "3.11"
22
18
  Provides-Extra: community
23
19
  Requires-Dist: langchain-community; extra == "community"
@@ -1,7 +1,7 @@
1
- langchain-0.3.19.dist-info/METADATA,sha256=S-FF3oq7oPneNryroGuJczaeyecMeokCY5IhNPTKwms,7875
2
- langchain-0.3.19.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
- langchain-0.3.19.dist-info/entry_points.txt,sha256=hLMwTN6pPNCY0cYtYmCYgY-piFzDb17o6ZrDC6IpdQU,75
4
- langchain-0.3.19.dist-info/licenses/LICENSE,sha256=TsZ-TKbmch26hJssqCJhWXyGph7iFLvyFBYAa3stBHg,1067
1
+ langchain-0.3.20.dist-info/METADATA,sha256=Igkqqj4LLz-IQexFHUYfh-pYdrua3qzGbvVytVr9MA8,7680
2
+ langchain-0.3.20.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
+ langchain-0.3.20.dist-info/entry_points.txt,sha256=hLMwTN6pPNCY0cYtYmCYgY-piFzDb17o6ZrDC6IpdQU,75
4
+ langchain-0.3.20.dist-info/licenses/LICENSE,sha256=TsZ-TKbmch26hJssqCJhWXyGph7iFLvyFBYAa3stBHg,1067
5
5
  langchain/__init__.py,sha256=4cqV-N_QJnfjk52DqtR2e72vsmJC1R6PkflvRdLjZQI,13709
6
6
  langchain/_api/__init__.py,sha256=0FuHuMNUBMrst1Y1nm5yZzQr2xbLmb7rxMsimqKBXhs,733
7
7
  langchain/_api/deprecation.py,sha256=K9VCkmMs_ebfd_wCJppKq4Ahw-mlXkukbsQ69iQVxT0,1246
@@ -115,7 +115,7 @@ langchain/agents/mrkl/base.py,sha256=yonYGfgMkTixmrknWROMjwjddiUCgmWEkfIaWVlJdAU
115
115
  langchain/agents/mrkl/output_parser.py,sha256=YQGSjQq5pR4kFUg1HrOS3laV6xgtHgtIOQ_TtJY0UFI,3720
116
116
  langchain/agents/mrkl/prompt.py,sha256=2dTMP2lAWiLvCtuEijgQRjbKDlbPEnmx77duMwdJ7e4,641
117
117
  langchain/agents/openai_assistant/__init__.py,sha256=Xssaqoxrix3hn1gKSOLmDRQzTxAoJk0ProGXmXQe8Mw,114
118
- langchain/agents/openai_assistant/base.py,sha256=pq-ttrSp6HWZA9qtw_yOCAvAhQWG90ZA_gt77omNKeM,30169
118
+ langchain/agents/openai_assistant/base.py,sha256=VW4CnD3IrQBnebzh11hEqEbIoNuAyiT6PumVKMJ2_Ok,30163
119
119
  langchain/agents/openai_functions_agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
120
120
  langchain/agents/openai_functions_agent/agent_token_buffer_memory.py,sha256=G5vrWDbv3oWojxafiW2qSae7Z7WUdZugI-ywjTP0zZ4,3790
121
121
  langchain/agents/openai_functions_agent/base.py,sha256=katIW0vE87B7ezm9WU_fEMfeHSQPHZptM0zppQfnY-4,13474
@@ -240,12 +240,12 @@ langchain/chains/conversational_retrieval/base.py,sha256=exiaFjIDLk9VoAf15qhMuBe
240
240
  langchain/chains/conversational_retrieval/prompts.py,sha256=kJITwauXq7dYKnSBoL2EcDTqAnJZlWF_GzJ9C55ZEv8,720
241
241
  langchain/chains/elasticsearch_database/__init__.py,sha256=B3Zxy8mxTb4bfMGHC__26BFkvT_6bPisS4rPIFiFWdU,126
242
242
  langchain/chains/elasticsearch_database/base.py,sha256=Rw6z9x---84WsVKP2L-YI-VehgP3VtI70kc0BfJv9Js,8248
243
- langchain/chains/elasticsearch_database/prompts.py,sha256=XTRDvnAMwGLlQh9vE0Ju8Nh39Ro7zjzZg13mY36pzNw,1425
243
+ langchain/chains/elasticsearch_database/prompts.py,sha256=N6X__jKt0yoA4kFfW-lXxJyP7Wsmef9AVDaxScql1yU,1421
244
244
  langchain/chains/ernie_functions/__init__.py,sha256=X_gOa8GIjyV6tAS32A1BLv6q08ufSms-tffwgtSyIDA,1514
245
245
  langchain/chains/ernie_functions/base.py,sha256=SGs_-yi0qa7cxgkiu2EsoYQF4_fKQUZkxncrp1KiMbU,1730
246
246
  langchain/chains/example_generator.py,sha256=QDY7l9hO-RkTZGMMhVUfbZRf__eacdMGOPQXP3Yshrg,757
247
247
  langchain/chains/flare/__init__.py,sha256=ufb8LMpEVUzTDflcNiJJyKCG9e4EVGAvz5e7h7f0Z1c,51
248
- langchain/chains/flare/base.py,sha256=TeOb9K7NrZS4F7ScrTpxc9-5E2vJ1vOAQA9odFwaQB8,8531
248
+ langchain/chains/flare/base.py,sha256=I-wyBjCe3j_mJs50DEWNaiCIFDrEhbIhyVwTPAyi8uU,9199
249
249
  langchain/chains/flare/prompts.py,sha256=6ypb3UrOwd4YFy1W8LjBwNVgZLYb-W1U1hme5IdPpDE,1471
250
250
  langchain/chains/graph_qa/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
251
251
  langchain/chains/graph_qa/arangodb.py,sha256=FdkfnDwKnmWinqYObKK-ZPDO_AFZr3PAiRKDEGJxK_A,669
@@ -264,7 +264,7 @@ langchain/chains/graph_qa/prompts.py,sha256=dqfI2CSw5xDR3SvIsFSxq2jwOFp-CcGF3WDj
264
264
  langchain/chains/graph_qa/sparql.py,sha256=wIAy-nymiftBnW3kExycpGOMyFveD1QBrETlfcnlyuE,665
265
265
  langchain/chains/history_aware_retriever.py,sha256=a92vlxlq0PaOubc_b4jj_WwGivk4Tyi1xzSBKaTOx4g,2662
266
266
  langchain/chains/hyde/__init__.py,sha256=mZ-cb7slBdlK5aG2R_NegBzNCXToHR-tdmfIIA6lKvQ,75
267
- langchain/chains/hyde/base.py,sha256=Rc5u4JD3M3CaVsK4PwAVF67ooklcz9H3Fjy5ySoJuyY,3619
267
+ langchain/chains/hyde/base.py,sha256=tiriie9bJVaE4XgBoP-FYhDLtvK1SlZpKR0ykJkNSCg,4350
268
268
  langchain/chains/hyde/prompts.py,sha256=U4LfozneOyHDIKd8rCbnGSQK84YvZqAtpf5EL435Ol8,1913
269
269
  langchain/chains/llm.py,sha256=tzLw3OLgBDsHwDNAHV5IP3avRSy8EfZhPnR6tFNJmes,15515
270
270
  langchain/chains/llm_bash/__init__.py,sha256=qvRpa5tj09akj4DLVZoKvWK8-oJrUxc5-7ooAP3mO18,453
@@ -337,7 +337,7 @@ langchain/chains/router/multi_retrieval_prompt.py,sha256=VUYGLWbwGiv03aSMW5sjdGN
337
337
  langchain/chains/router/multi_retrieval_qa.py,sha256=tjIhHEbOwtF3CLq0qQ8Kd78ao5BXRKZLsm9UlmHrdtQ,4254
338
338
  langchain/chains/sequential.py,sha256=a9i0IGsjji57oJg-1QHJqSVcbMpdyqasYPGaeG3OU5I,7499
339
339
  langchain/chains/sql_database/__init__.py,sha256=jQotWN4EWMD98Jk-f7rqh5YtbXbP9XXA0ypLGq8NgrM,47
340
- langchain/chains/sql_database/prompt.py,sha256=W0xFqVZ18PzxmutnIBJrocXus8_QBByrKtxg8CjGaYw,15458
340
+ langchain/chains/sql_database/prompt.py,sha256=q3C6BbmWtNYXWV-9qHnyux5trsM3fjlRLuYNPTlpdR4,15454
341
341
  langchain/chains/sql_database/query.py,sha256=h-QP5ESatTFj8t7sGsHppXSchy3ZGL1U1afza-Lo8fc,5421
342
342
  langchain/chains/structured_output/__init__.py,sha256=-6nFe-gznavFc3XCMv8XkEzuXoto2rI8Q-bcruVPOR8,204
343
343
  langchain/chains/structured_output/base.py,sha256=jsrF_WQe55gVhZzRGSY7DCetdR91IXdkItK_O_IhovA,25461
@@ -364,7 +364,7 @@ langchain/chat_models/azure_openai.py,sha256=aRNol2PNC49PmvdZnwjhQeMFRDOOelPNAXz
364
364
  langchain/chat_models/azureml_endpoint.py,sha256=6mxXm8UFXataLp0NYRGA88V3DpiNKPo095u_JGj7XGE,863
365
365
  langchain/chat_models/baichuan.py,sha256=3-GveFoF5ZNyLdRNK6V4i3EDDjdseOTFWbCMhDbtO9w,643
366
366
  langchain/chat_models/baidu_qianfan_endpoint.py,sha256=CZrX2SMpbE9H7wBXNC6rGvw-YqQl9zjuJrClYQxEzuI,715
367
- langchain/chat_models/base.py,sha256=--ERckyG_jME7_OKslfMvV_t58df8ce4iu4xtPWPUz4,34561
367
+ langchain/chat_models/base.py,sha256=YK84iOnFy64l_6yO1voeO-0toP5nxhIBaTDNQyV2Rgs,34568
368
368
  langchain/chat_models/bedrock.py,sha256=HRV3T_0mEnZ8LvJJqAA_UVpt-_03G715oIgomRJw55M,757
369
369
  langchain/chat_models/cohere.py,sha256=EYOECHX-nKRhZVfCfmFGZ2lr51PzaB5OvOEqmBCu1fI,633
370
370
  langchain/chat_models/databricks.py,sha256=5_QkC5lG4OldaHC2FS0XylirJouyZx1YT95SKwc12M0,653
@@ -639,7 +639,7 @@ langchain/evaluation/criteria/__init__.py,sha256=FE5qrrz5JwWXJWXCzdyNRevEPfmmfBf
639
639
  langchain/evaluation/criteria/eval_chain.py,sha256=JkBEsgNPymOT3OqTSveRAsIr2Sk1O1oWjJZ664t0BuM,21279
640
640
  langchain/evaluation/criteria/prompt.py,sha256=6OgXmdvlYVzRMeAxa1fYGIxqeNAz1NkFCZ6ezLgUnZM,1756
641
641
  langchain/evaluation/embedding_distance/__init__.py,sha256=YLtGUI4ZMxjsn2Q0dGZ-R9YMFgZsarfJv9qzNEnrLQs,324
642
- langchain/evaluation/embedding_distance/base.py,sha256=ra149EghJgnQi4k3n2yqA4zijnR7a712MUzVRObuoGg,17330
642
+ langchain/evaluation/embedding_distance/base.py,sha256=cO5ZdmCL7hhDFk0AAoq--qUuwUduSW0mO9uFh8FSvkQ,18944
643
643
  langchain/evaluation/exact_match/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
644
644
  langchain/evaluation/exact_match/base.py,sha256=BykyjgKQ94391eDODzn3m1RXao9ZSXtc9wiww_fysXI,2751
645
645
  langchain/evaluation/loading.py,sha256=vKg-AbszUMqsC9ptLr5C2SUgHbb3fSIvsI-mwxoUoxE,7371
@@ -879,7 +879,7 @@ langchain/retrievers/document_compressors/chain_filter_prompt.py,sha256=FTQRPiEs
879
879
  langchain/retrievers/document_compressors/cohere_rerank.py,sha256=7U35vqEdslr43q8H74CUzcDvbXuZqLnK8-MH8VrlKWo,4567
880
880
  langchain/retrievers/document_compressors/cross_encoder.py,sha256=_Z7SoPSfOUSk-rNIHX2lQgYV0TgVMKf3F9AnTH7EFiM,393
881
881
  langchain/retrievers/document_compressors/cross_encoder_rerank.py,sha256=ThgVrX8NeXFzE4eoftBoa1yz-sBJiDb-JISQa9Hep2k,1542
882
- langchain/retrievers/document_compressors/embeddings_filter.py,sha256=_04uA8wOw5Eb5rzlu-6rLqxi9u7kqeD8t4xd9VsB_PA,5217
882
+ langchain/retrievers/document_compressors/embeddings_filter.py,sha256=B1l7vNptrNAtWQENP3ZMW6lXyNZ7bu-gsnY4mP16QXw,5624
883
883
  langchain/retrievers/document_compressors/flashrank_rerank.py,sha256=Eo86fJ_T2IbEEeCkI_5rb3Ao4gsdenv-_Ukt33MuMko,709
884
884
  langchain/retrievers/document_compressors/listwise_rerank.py,sha256=i3dCqXBF27_sHPGxWOlCkVjt4s85QM0ikHZtPp2LpDs,5127
885
885
  langchain/retrievers/elastic_search_bm25.py,sha256=eRboOkRQj-_E53gUQIZzxQ1bX0-uEMv7LAQSD7K7Qf8,665
@@ -1339,4 +1339,4 @@ langchain/vectorstores/xata.py,sha256=HW_Oi5Hz8rH2JaUhRNWQ-3hLYmNzD8eAz6K5YqPArm
1339
1339
  langchain/vectorstores/yellowbrick.py,sha256=-lnjGcRE8Q1nEPOTdbKYTw5noS2cy2ce1ePOU804-_o,624
1340
1340
  langchain/vectorstores/zep.py,sha256=RJ2auxoA6uHHLEZknw3_jeFmYJYVt-PWKMBcNMGV6TM,798
1341
1341
  langchain/vectorstores/zilliz.py,sha256=XhPPIUfKPFJw0_svCoBgCnNkkBLoRVVcyuMfOnE5IxU,609
1342
- langchain-0.3.19.dist-info/RECORD,,
1342
+ langchain-0.3.20.dist-info/RECORD,,