langchain-core 0.4.0.dev0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (172) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +3 -4
  3. langchain_core/_api/beta_decorator.py +45 -70
  4. langchain_core/_api/deprecation.py +80 -80
  5. langchain_core/_api/path.py +22 -8
  6. langchain_core/_import_utils.py +10 -4
  7. langchain_core/agents.py +25 -21
  8. langchain_core/caches.py +53 -63
  9. langchain_core/callbacks/__init__.py +1 -8
  10. langchain_core/callbacks/base.py +341 -348
  11. langchain_core/callbacks/file.py +55 -44
  12. langchain_core/callbacks/manager.py +546 -683
  13. langchain_core/callbacks/stdout.py +29 -30
  14. langchain_core/callbacks/streaming_stdout.py +35 -36
  15. langchain_core/callbacks/usage.py +65 -70
  16. langchain_core/chat_history.py +48 -55
  17. langchain_core/document_loaders/base.py +46 -21
  18. langchain_core/document_loaders/langsmith.py +39 -36
  19. langchain_core/documents/__init__.py +0 -1
  20. langchain_core/documents/base.py +96 -74
  21. langchain_core/documents/compressor.py +12 -9
  22. langchain_core/documents/transformers.py +29 -28
  23. langchain_core/embeddings/fake.py +56 -57
  24. langchain_core/env.py +2 -3
  25. langchain_core/example_selectors/base.py +12 -0
  26. langchain_core/example_selectors/length_based.py +1 -1
  27. langchain_core/example_selectors/semantic_similarity.py +21 -25
  28. langchain_core/exceptions.py +15 -9
  29. langchain_core/globals.py +4 -163
  30. langchain_core/indexing/api.py +132 -125
  31. langchain_core/indexing/base.py +64 -67
  32. langchain_core/indexing/in_memory.py +26 -6
  33. langchain_core/language_models/__init__.py +15 -27
  34. langchain_core/language_models/_utils.py +267 -117
  35. langchain_core/language_models/base.py +92 -177
  36. langchain_core/language_models/chat_models.py +547 -407
  37. langchain_core/language_models/fake.py +11 -11
  38. langchain_core/language_models/fake_chat_models.py +72 -118
  39. langchain_core/language_models/llms.py +168 -242
  40. langchain_core/load/dump.py +8 -11
  41. langchain_core/load/load.py +32 -28
  42. langchain_core/load/mapping.py +2 -4
  43. langchain_core/load/serializable.py +50 -56
  44. langchain_core/messages/__init__.py +36 -51
  45. langchain_core/messages/ai.py +377 -150
  46. langchain_core/messages/base.py +239 -47
  47. langchain_core/messages/block_translators/__init__.py +111 -0
  48. langchain_core/messages/block_translators/anthropic.py +470 -0
  49. langchain_core/messages/block_translators/bedrock.py +94 -0
  50. langchain_core/messages/block_translators/bedrock_converse.py +297 -0
  51. langchain_core/messages/block_translators/google_genai.py +530 -0
  52. langchain_core/messages/block_translators/google_vertexai.py +21 -0
  53. langchain_core/messages/block_translators/groq.py +143 -0
  54. langchain_core/messages/block_translators/langchain_v0.py +301 -0
  55. langchain_core/messages/block_translators/openai.py +1010 -0
  56. langchain_core/messages/chat.py +2 -3
  57. langchain_core/messages/content.py +1423 -0
  58. langchain_core/messages/function.py +7 -7
  59. langchain_core/messages/human.py +44 -38
  60. langchain_core/messages/modifier.py +3 -2
  61. langchain_core/messages/system.py +40 -27
  62. langchain_core/messages/tool.py +160 -58
  63. langchain_core/messages/utils.py +527 -638
  64. langchain_core/output_parsers/__init__.py +1 -14
  65. langchain_core/output_parsers/base.py +68 -104
  66. langchain_core/output_parsers/json.py +13 -17
  67. langchain_core/output_parsers/list.py +11 -33
  68. langchain_core/output_parsers/openai_functions.py +56 -74
  69. langchain_core/output_parsers/openai_tools.py +68 -109
  70. langchain_core/output_parsers/pydantic.py +15 -13
  71. langchain_core/output_parsers/string.py +6 -2
  72. langchain_core/output_parsers/transform.py +17 -60
  73. langchain_core/output_parsers/xml.py +34 -44
  74. langchain_core/outputs/__init__.py +1 -1
  75. langchain_core/outputs/chat_generation.py +26 -11
  76. langchain_core/outputs/chat_result.py +1 -3
  77. langchain_core/outputs/generation.py +17 -6
  78. langchain_core/outputs/llm_result.py +15 -8
  79. langchain_core/prompt_values.py +29 -123
  80. langchain_core/prompts/__init__.py +3 -27
  81. langchain_core/prompts/base.py +48 -63
  82. langchain_core/prompts/chat.py +259 -288
  83. langchain_core/prompts/dict.py +19 -11
  84. langchain_core/prompts/few_shot.py +84 -90
  85. langchain_core/prompts/few_shot_with_templates.py +14 -12
  86. langchain_core/prompts/image.py +19 -14
  87. langchain_core/prompts/loading.py +6 -8
  88. langchain_core/prompts/message.py +7 -8
  89. langchain_core/prompts/prompt.py +42 -43
  90. langchain_core/prompts/string.py +37 -16
  91. langchain_core/prompts/structured.py +43 -46
  92. langchain_core/rate_limiters.py +51 -60
  93. langchain_core/retrievers.py +52 -192
  94. langchain_core/runnables/base.py +1727 -1683
  95. langchain_core/runnables/branch.py +52 -73
  96. langchain_core/runnables/config.py +89 -103
  97. langchain_core/runnables/configurable.py +128 -130
  98. langchain_core/runnables/fallbacks.py +93 -82
  99. langchain_core/runnables/graph.py +127 -127
  100. langchain_core/runnables/graph_ascii.py +63 -41
  101. langchain_core/runnables/graph_mermaid.py +87 -70
  102. langchain_core/runnables/graph_png.py +31 -36
  103. langchain_core/runnables/history.py +145 -161
  104. langchain_core/runnables/passthrough.py +141 -144
  105. langchain_core/runnables/retry.py +84 -68
  106. langchain_core/runnables/router.py +33 -37
  107. langchain_core/runnables/schema.py +79 -72
  108. langchain_core/runnables/utils.py +95 -139
  109. langchain_core/stores.py +85 -131
  110. langchain_core/structured_query.py +11 -15
  111. langchain_core/sys_info.py +31 -32
  112. langchain_core/tools/__init__.py +1 -14
  113. langchain_core/tools/base.py +221 -247
  114. langchain_core/tools/convert.py +144 -161
  115. langchain_core/tools/render.py +10 -10
  116. langchain_core/tools/retriever.py +12 -19
  117. langchain_core/tools/simple.py +52 -29
  118. langchain_core/tools/structured.py +56 -60
  119. langchain_core/tracers/__init__.py +1 -9
  120. langchain_core/tracers/_streaming.py +6 -7
  121. langchain_core/tracers/base.py +103 -112
  122. langchain_core/tracers/context.py +29 -48
  123. langchain_core/tracers/core.py +142 -105
  124. langchain_core/tracers/evaluation.py +30 -34
  125. langchain_core/tracers/event_stream.py +162 -117
  126. langchain_core/tracers/langchain.py +34 -36
  127. langchain_core/tracers/log_stream.py +87 -49
  128. langchain_core/tracers/memory_stream.py +3 -3
  129. langchain_core/tracers/root_listeners.py +18 -34
  130. langchain_core/tracers/run_collector.py +8 -20
  131. langchain_core/tracers/schemas.py +0 -125
  132. langchain_core/tracers/stdout.py +3 -3
  133. langchain_core/utils/__init__.py +1 -4
  134. langchain_core/utils/_merge.py +47 -9
  135. langchain_core/utils/aiter.py +70 -66
  136. langchain_core/utils/env.py +12 -9
  137. langchain_core/utils/function_calling.py +139 -206
  138. langchain_core/utils/html.py +7 -8
  139. langchain_core/utils/input.py +6 -6
  140. langchain_core/utils/interactive_env.py +6 -2
  141. langchain_core/utils/iter.py +48 -45
  142. langchain_core/utils/json.py +14 -4
  143. langchain_core/utils/json_schema.py +159 -43
  144. langchain_core/utils/mustache.py +32 -25
  145. langchain_core/utils/pydantic.py +67 -40
  146. langchain_core/utils/strings.py +5 -5
  147. langchain_core/utils/usage.py +1 -1
  148. langchain_core/utils/utils.py +104 -62
  149. langchain_core/vectorstores/base.py +131 -179
  150. langchain_core/vectorstores/in_memory.py +113 -182
  151. langchain_core/vectorstores/utils.py +23 -17
  152. langchain_core/version.py +1 -1
  153. langchain_core-1.0.0.dist-info/METADATA +68 -0
  154. langchain_core-1.0.0.dist-info/RECORD +172 -0
  155. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0.dist-info}/WHEEL +1 -1
  156. langchain_core/beta/__init__.py +0 -1
  157. langchain_core/beta/runnables/__init__.py +0 -1
  158. langchain_core/beta/runnables/context.py +0 -448
  159. langchain_core/memory.py +0 -116
  160. langchain_core/messages/content_blocks.py +0 -1435
  161. langchain_core/prompts/pipeline.py +0 -133
  162. langchain_core/pydantic_v1/__init__.py +0 -30
  163. langchain_core/pydantic_v1/dataclasses.py +0 -23
  164. langchain_core/pydantic_v1/main.py +0 -23
  165. langchain_core/tracers/langchain_v1.py +0 -23
  166. langchain_core/utils/loading.py +0 -31
  167. langchain_core/v1/__init__.py +0 -1
  168. langchain_core/v1/chat_models.py +0 -1047
  169. langchain_core/v1/messages.py +0 -755
  170. langchain_core-0.4.0.dev0.dist-info/METADATA +0 -108
  171. langchain_core-0.4.0.dev0.dist-info/RECORD +0 -177
  172. langchain_core-0.4.0.dev0.dist-info/entry_points.txt +0 -4
@@ -20,34 +20,35 @@ class BaseDocumentTransformer(ABC):
20
20
  sequence of transformed Documents.
21
21
 
22
22
  Example:
23
- .. code-block:: python
24
-
25
- class EmbeddingsRedundantFilter(BaseDocumentTransformer, BaseModel):
26
- embeddings: Embeddings
27
- similarity_fn: Callable = cosine_similarity
28
- similarity_threshold: float = 0.95
29
-
30
- class Config:
31
- arbitrary_types_allowed = True
32
-
33
- def transform_documents(
34
- self, documents: Sequence[Document], **kwargs: Any
35
- ) -> Sequence[Document]:
36
- stateful_documents = get_stateful_documents(documents)
37
- embedded_documents = _get_embeddings_from_stateful_docs(
38
- self.embeddings, stateful_documents
39
- )
40
- included_idxs = _filter_similar_embeddings(
41
- embedded_documents, self.similarity_fn, self.similarity_threshold
42
- )
43
- return [stateful_documents[i] for i in sorted(included_idxs)]
44
-
45
- async def atransform_documents(
46
- self, documents: Sequence[Document], **kwargs: Any
47
- ) -> Sequence[Document]:
48
- raise NotImplementedError
49
-
50
- """ # noqa: E501
23
+ ```python
24
+ class EmbeddingsRedundantFilter(BaseDocumentTransformer, BaseModel):
25
+ embeddings: Embeddings
26
+ similarity_fn: Callable = cosine_similarity
27
+ similarity_threshold: float = 0.95
28
+
29
+ class Config:
30
+ arbitrary_types_allowed = True
31
+
32
+ def transform_documents(
33
+ self, documents: Sequence[Document], **kwargs: Any
34
+ ) -> Sequence[Document]:
35
+ stateful_documents = get_stateful_documents(documents)
36
+ embedded_documents = _get_embeddings_from_stateful_docs(
37
+ self.embeddings, stateful_documents
38
+ )
39
+ included_idxs = _filter_similar_embeddings(
40
+ embedded_documents,
41
+ self.similarity_fn,
42
+ self.similarity_threshold,
43
+ )
44
+ return [stateful_documents[i] for i in sorted(included_idxs)]
45
+
46
+ async def atransform_documents(
47
+ self, documents: Sequence[Document], **kwargs: Any
48
+ ) -> Sequence[Document]:
49
+ raise NotImplementedError
50
+ ```
51
+ """
51
52
 
52
53
  @abstractmethod
53
54
  def transform_documents(
@@ -1,6 +1,7 @@
1
1
  """Module contains a few fake embedding models for testing purposes."""
2
2
 
3
3
  # Please do not add additional fake embedding model implementations here.
4
+ import contextlib
4
5
  import hashlib
5
6
 
6
7
  from pydantic import BaseModel
@@ -8,53 +9,53 @@ from typing_extensions import override
8
9
 
9
10
  from langchain_core.embeddings import Embeddings
10
11
 
12
+ with contextlib.suppress(ImportError):
13
+ import numpy as np
14
+
11
15
 
12
16
  class FakeEmbeddings(Embeddings, BaseModel):
13
17
  """Fake embedding model for unit testing purposes.
14
18
 
15
19
  This embedding model creates embeddings by sampling from a normal distribution.
16
20
 
17
- Do not use this outside of testing, as it is not a real embedding model.
21
+ !!! warning
22
+ Do not use this outside of testing, as it is not a real embedding model.
18
23
 
19
24
  Instantiate:
20
- .. code-block:: python
25
+ ```python
26
+ from langchain_core.embeddings import FakeEmbeddings
21
27
 
22
- from langchain_core.embeddings import FakeEmbeddings
23
- embed = FakeEmbeddings(size=100)
28
+ embed = FakeEmbeddings(size=100)
29
+ ```
24
30
 
25
31
  Embed single text:
26
- .. code-block:: python
27
-
28
- input_text = "The meaning of life is 42"
29
- vector = embed.embed_query(input_text)
30
- print(vector[:3])
31
-
32
- .. code-block:: python
33
-
34
- [-0.700234640213188, -0.581266257710429, -1.1328482266445354]
32
+ ```python
33
+ input_text = "The meaning of life is 42"
34
+ vector = embed.embed_query(input_text)
35
+ print(vector[:3])
36
+ ```
37
+ ```python
38
+ [-0.700234640213188, -0.581266257710429, -1.1328482266445354]
39
+ ```
35
40
 
36
41
  Embed multiple texts:
37
- .. code-block:: python
38
-
39
- input_texts = ["Document 1...", "Document 2..."]
40
- vectors = embed.embed_documents(input_texts)
41
- print(len(vectors))
42
- # The first 3 coordinates for the first vector
43
- print(vectors[0][:3])
44
-
45
- .. code-block:: python
46
-
47
- 2
48
- [-0.5670477847544458, -0.31403828652395727, -0.5840547508955257]
49
-
42
+ ```python
43
+ input_texts = ["Document 1...", "Document 2..."]
44
+ vectors = embed.embed_documents(input_texts)
45
+ print(len(vectors))
46
+ # The first 3 coordinates for the first vector
47
+ print(vectors[0][:3])
48
+ ```
49
+ ```python
50
+ 2
51
+ [-0.5670477847544458, -0.31403828652395727, -0.5840547508955257]
52
+ ```
50
53
  """
51
54
 
52
55
  size: int
53
56
  """The size of the embedding vector."""
54
57
 
55
58
  def _get_embedding(self) -> list[float]:
56
- import numpy as np
57
-
58
59
  return list(np.random.default_rng().normal(size=self.size))
59
60
 
60
61
  @override
@@ -72,52 +73,50 @@ class DeterministicFakeEmbedding(Embeddings, BaseModel):
72
73
  This embedding model creates embeddings by sampling from a normal distribution
73
74
  with a seed based on the hash of the text.
74
75
 
75
- Do not use this outside of testing, as it is not a real embedding model.
76
+ !!! warning
77
+ Do not use this outside of testing, as it is not a real embedding model.
76
78
 
77
79
  Instantiate:
78
- .. code-block:: python
80
+ ```python
81
+ from langchain_core.embeddings import DeterministicFakeEmbedding
79
82
 
80
- from langchain_core.embeddings import DeterministicFakeEmbedding
81
- embed = DeterministicFakeEmbedding(size=100)
83
+ embed = DeterministicFakeEmbedding(size=100)
84
+ ```
82
85
 
83
86
  Embed single text:
84
- .. code-block:: python
85
-
86
- input_text = "The meaning of life is 42"
87
- vector = embed.embed_query(input_text)
88
- print(vector[:3])
89
-
90
- .. code-block:: python
91
-
92
- [-0.700234640213188, -0.581266257710429, -1.1328482266445354]
87
+ ```python
88
+ input_text = "The meaning of life is 42"
89
+ vector = embed.embed_query(input_text)
90
+ print(vector[:3])
91
+ ```
92
+ ```python
93
+ [-0.700234640213188, -0.581266257710429, -1.1328482266445354]
94
+ ```
93
95
 
94
96
  Embed multiple texts:
95
- .. code-block:: python
96
-
97
- input_texts = ["Document 1...", "Document 2..."]
98
- vectors = embed.embed_documents(input_texts)
99
- print(len(vectors))
100
- # The first 3 coordinates for the first vector
101
- print(vectors[0][:3])
102
-
103
- .. code-block:: python
104
-
105
- 2
106
- [-0.5670477847544458, -0.31403828652395727, -0.5840547508955257]
107
-
97
+ ```python
98
+ input_texts = ["Document 1...", "Document 2..."]
99
+ vectors = embed.embed_documents(input_texts)
100
+ print(len(vectors))
101
+ # The first 3 coordinates for the first vector
102
+ print(vectors[0][:3])
103
+ ```
104
+ ```python
105
+ 2
106
+ [-0.5670477847544458, -0.31403828652395727, -0.5840547508955257]
107
+ ```
108
108
  """
109
109
 
110
110
  size: int
111
111
  """The size of the embedding vector."""
112
112
 
113
113
  def _get_embedding(self, seed: int) -> list[float]:
114
- import numpy as np
115
-
116
114
  # set the seed for the random generator
117
115
  rng = np.random.default_rng(seed)
118
116
  return list(rng.normal(size=self.size))
119
117
 
120
- def _get_seed(self, text: str) -> int:
118
+ @staticmethod
119
+ def _get_seed(text: str) -> int:
121
120
  """Get a seed for the random generator, using the hash of the text."""
122
121
  return int(hashlib.sha256(text.encode("utf-8")).hexdigest(), 16) % 10**8
123
122
 
langchain_core/env.py CHANGED
@@ -3,6 +3,8 @@
3
3
  import platform
4
4
  from functools import lru_cache
5
5
 
6
+ from langchain_core import __version__
7
+
6
8
 
7
9
  @lru_cache(maxsize=1)
8
10
  def get_runtime_environment() -> dict:
@@ -11,9 +13,6 @@ def get_runtime_environment() -> dict:
11
13
  Returns:
12
14
  A dictionary with information about the runtime environment.
13
15
  """
14
- # Lazy import to avoid circular imports
15
- from langchain_core import __version__
16
-
17
16
  return {
18
17
  "library_version": __version__,
19
18
  "library": "langchain-core",
@@ -16,6 +16,9 @@ class BaseExampleSelector(ABC):
16
16
  Args:
17
17
  example: A dictionary with keys as input variables
18
18
  and values as their values.
19
+
20
+ Returns:
21
+ Any return value.
19
22
  """
20
23
 
21
24
  async def aadd_example(self, example: dict[str, str]) -> Any:
@@ -24,6 +27,9 @@ class BaseExampleSelector(ABC):
24
27
  Args:
25
28
  example: A dictionary with keys as input variables
26
29
  and values as their values.
30
+
31
+ Returns:
32
+ Any return value.
27
33
  """
28
34
  return await run_in_executor(None, self.add_example, example)
29
35
 
@@ -34,6 +40,9 @@ class BaseExampleSelector(ABC):
34
40
  Args:
35
41
  input_variables: A dictionary with keys as input variables
36
42
  and values as their values.
43
+
44
+ Returns:
45
+ A list of examples.
37
46
  """
38
47
 
39
48
  async def aselect_examples(self, input_variables: dict[str, str]) -> list[dict]:
@@ -42,5 +51,8 @@ class BaseExampleSelector(ABC):
42
51
  Args:
43
52
  input_variables: A dictionary with keys as input variables
44
53
  and values as their values.
54
+
55
+ Returns:
56
+ A list of examples.
45
57
  """
46
58
  return await run_in_executor(None, self.select_examples, input_variables)
@@ -1,7 +1,7 @@
1
1
  """Select examples based on length."""
2
2
 
3
3
  import re
4
- from typing import Callable
4
+ from collections.abc import Callable
5
5
 
6
6
  from pydantic import BaseModel, Field, model_validator
7
7
  from typing_extensions import Self
@@ -3,7 +3,7 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  from abc import ABC
6
- from typing import TYPE_CHECKING, Any, Optional
6
+ from typing import TYPE_CHECKING, Any
7
7
 
8
8
  from pydantic import BaseModel, ConfigDict
9
9
 
@@ -35,12 +35,12 @@ class _VectorStoreExampleSelector(BaseExampleSelector, BaseModel, ABC):
35
35
  """VectorStore that contains information about examples."""
36
36
  k: int = 4
37
37
  """Number of examples to select."""
38
- example_keys: Optional[list[str]] = None
38
+ example_keys: list[str] | None = None
39
39
  """Optional keys to filter examples to."""
40
- input_keys: Optional[list[str]] = None
40
+ input_keys: list[str] | None = None
41
41
  """Optional keys to filter input to. If provided, the search is based on
42
42
  the input variables instead of all variables."""
43
- vectorstore_kwargs: Optional[dict[str, Any]] = None
43
+ vectorstore_kwargs: dict[str, Any] | None = None
44
44
  """Extra arguments passed to similarity_search function of the vectorstore."""
45
45
 
46
46
  model_config = ConfigDict(
@@ -49,9 +49,7 @@ class _VectorStoreExampleSelector(BaseExampleSelector, BaseModel, ABC):
49
49
  )
50
50
 
51
51
  @staticmethod
52
- def _example_to_text(
53
- example: dict[str, str], input_keys: Optional[list[str]]
54
- ) -> str:
52
+ def _example_to_text(example: dict[str, str], input_keys: list[str] | None) -> str:
55
53
  if input_keys:
56
54
  return " ".join(sorted_values({key: example[key] for key in input_keys}))
57
55
  return " ".join(sorted_values(example))
@@ -142,10 +140,10 @@ class SemanticSimilarityExampleSelector(_VectorStoreExampleSelector):
142
140
  embeddings: Embeddings,
143
141
  vectorstore_cls: type[VectorStore],
144
142
  k: int = 4,
145
- input_keys: Optional[list[str]] = None,
143
+ input_keys: list[str] | None = None,
146
144
  *,
147
- example_keys: Optional[list[str]] = None,
148
- vectorstore_kwargs: Optional[dict] = None,
145
+ example_keys: list[str] | None = None,
146
+ vectorstore_kwargs: dict | None = None,
149
147
  **vectorstore_cls_kwargs: Any,
150
148
  ) -> SemanticSimilarityExampleSelector:
151
149
  """Create k-shot example selector using example list and embeddings.
@@ -156,7 +154,7 @@ class SemanticSimilarityExampleSelector(_VectorStoreExampleSelector):
156
154
  examples: List of examples to use in the prompt.
157
155
  embeddings: An initialized embedding API interface, e.g. OpenAIEmbeddings().
158
156
  vectorstore_cls: A vector store DB interface class, e.g. FAISS.
159
- k: Number of examples to select. Default is 4.
157
+ k: Number of examples to select.
160
158
  input_keys: If provided, the search is based on the input variables
161
159
  instead of all variables.
162
160
  example_keys: If provided, keys to filter examples to.
@@ -186,10 +184,10 @@ class SemanticSimilarityExampleSelector(_VectorStoreExampleSelector):
186
184
  embeddings: Embeddings,
187
185
  vectorstore_cls: type[VectorStore],
188
186
  k: int = 4,
189
- input_keys: Optional[list[str]] = None,
187
+ input_keys: list[str] | None = None,
190
188
  *,
191
- example_keys: Optional[list[str]] = None,
192
- vectorstore_kwargs: Optional[dict] = None,
189
+ example_keys: list[str] | None = None,
190
+ vectorstore_kwargs: dict | None = None,
193
191
  **vectorstore_cls_kwargs: Any,
194
192
  ) -> SemanticSimilarityExampleSelector:
195
193
  """Async create k-shot example selector using example list and embeddings.
@@ -200,7 +198,7 @@ class SemanticSimilarityExampleSelector(_VectorStoreExampleSelector):
200
198
  examples: List of examples to use in the prompt.
201
199
  embeddings: An initialized embedding API interface, e.g. OpenAIEmbeddings().
202
200
  vectorstore_cls: A vector store DB interface class, e.g. FAISS.
203
- k: Number of examples to select. Default is 4.
201
+ k: Number of examples to select.
204
202
  input_keys: If provided, the search is based on the input variables
205
203
  instead of all variables.
206
204
  example_keys: If provided, keys to filter examples to.
@@ -273,10 +271,10 @@ class MaxMarginalRelevanceExampleSelector(_VectorStoreExampleSelector):
273
271
  embeddings: Embeddings,
274
272
  vectorstore_cls: type[VectorStore],
275
273
  k: int = 4,
276
- input_keys: Optional[list[str]] = None,
274
+ input_keys: list[str] | None = None,
277
275
  fetch_k: int = 20,
278
- example_keys: Optional[list[str]] = None,
279
- vectorstore_kwargs: Optional[dict] = None,
276
+ example_keys: list[str] | None = None,
277
+ vectorstore_kwargs: dict | None = None,
280
278
  **vectorstore_cls_kwargs: Any,
281
279
  ) -> MaxMarginalRelevanceExampleSelector:
282
280
  """Create k-shot example selector using example list and embeddings.
@@ -287,9 +285,8 @@ class MaxMarginalRelevanceExampleSelector(_VectorStoreExampleSelector):
287
285
  examples: List of examples to use in the prompt.
288
286
  embeddings: An initialized embedding API interface, e.g. OpenAIEmbeddings().
289
287
  vectorstore_cls: A vector store DB interface class, e.g. FAISS.
290
- k: Number of examples to select. Default is 4.
288
+ k: Number of examples to select.
291
289
  fetch_k: Number of Documents to fetch to pass to MMR algorithm.
292
- Default is 20.
293
290
  input_keys: If provided, the search is based on the input variables
294
291
  instead of all variables.
295
292
  example_keys: If provided, keys to filter examples to.
@@ -321,10 +318,10 @@ class MaxMarginalRelevanceExampleSelector(_VectorStoreExampleSelector):
321
318
  vectorstore_cls: type[VectorStore],
322
319
  *,
323
320
  k: int = 4,
324
- input_keys: Optional[list[str]] = None,
321
+ input_keys: list[str] | None = None,
325
322
  fetch_k: int = 20,
326
- example_keys: Optional[list[str]] = None,
327
- vectorstore_kwargs: Optional[dict] = None,
323
+ example_keys: list[str] | None = None,
324
+ vectorstore_kwargs: dict | None = None,
328
325
  **vectorstore_cls_kwargs: Any,
329
326
  ) -> MaxMarginalRelevanceExampleSelector:
330
327
  """Create k-shot example selector using example list and embeddings.
@@ -335,9 +332,8 @@ class MaxMarginalRelevanceExampleSelector(_VectorStoreExampleSelector):
335
332
  examples: List of examples to use in the prompt.
336
333
  embeddings: An initialized embedding API interface, e.g. OpenAIEmbeddings().
337
334
  vectorstore_cls: A vector store DB interface class, e.g. FAISS.
338
- k: Number of examples to select. Default is 4.
335
+ k: Number of examples to select.
339
336
  fetch_k: Number of Documents to fetch to pass to MMR algorithm.
340
- Default is 20.
341
337
  input_keys: If provided, the search is based on the input variables
342
338
  instead of all variables.
343
339
  example_keys: If provided, keys to filter examples to.
@@ -1,7 +1,7 @@
1
1
  """Custom **exceptions** for LangChain."""
2
2
 
3
3
  from enum import Enum
4
- from typing import Any, Optional
4
+ from typing import Any
5
5
 
6
6
 
7
7
  class LangChainException(Exception): # noqa: N818
@@ -16,7 +16,7 @@ class OutputParserException(ValueError, LangChainException): # noqa: N818
16
16
  """Exception that output parsers should raise to signify a parsing error.
17
17
 
18
18
  This exists to differentiate parsing errors from other code or execution errors
19
- that also may arise inside the output parser. OutputParserExceptions will be
19
+ that also may arise inside the output parser. `OutputParserException` will be
20
20
  available to catch and handle in ways to fix the parsing error, while other
21
21
  errors will be raised.
22
22
  """
@@ -24,24 +24,27 @@ class OutputParserException(ValueError, LangChainException): # noqa: N818
24
24
  def __init__(
25
25
  self,
26
26
  error: Any,
27
- observation: Optional[str] = None,
28
- llm_output: Optional[str] = None,
27
+ observation: str | None = None,
28
+ llm_output: str | None = None,
29
29
  send_to_llm: bool = False, # noqa: FBT001,FBT002
30
30
  ):
31
- """Create an OutputParserException.
31
+ """Create an `OutputParserException`.
32
32
 
33
33
  Args:
34
34
  error: The error that's being re-raised or an error message.
35
35
  observation: String explanation of error which can be passed to a
36
- model to try and remediate the issue. Defaults to None.
36
+ model to try and remediate the issue.
37
37
  llm_output: String model output which is error-ing.
38
- Defaults to None.
38
+
39
39
  send_to_llm: Whether to send the observation and llm_output back to an Agent
40
- after an OutputParserException has been raised.
40
+ after an `OutputParserException` has been raised.
41
41
  This gives the underlying model driving the agent the context that the
42
42
  previous output was improperly structured, in the hopes that it will
43
43
  update the output to the correct format.
44
- Defaults to False.
44
+
45
+ Raises:
46
+ ValueError: If `send_to_llm` is True but either observation or
47
+ `llm_output` are not provided.
45
48
  """
46
49
  if isinstance(error, str):
47
50
  error = create_message(
@@ -77,6 +80,9 @@ def create_message(*, message: str, error_code: ErrorCode) -> str:
77
80
  Args:
78
81
  message: The message to display.
79
82
  error_code: The error code to display.
83
+
84
+ Returns:
85
+ The full message with the troubleshooting link.
80
86
  """
81
87
  return (
82
88
  f"{message}\n"