langchain-core 1.0.0a6__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (165) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +3 -4
  3. langchain_core/_api/beta_decorator.py +23 -26
  4. langchain_core/_api/deprecation.py +51 -64
  5. langchain_core/_api/path.py +3 -6
  6. langchain_core/_import_utils.py +3 -4
  7. langchain_core/agents.py +20 -22
  8. langchain_core/caches.py +65 -66
  9. langchain_core/callbacks/__init__.py +1 -8
  10. langchain_core/callbacks/base.py +321 -336
  11. langchain_core/callbacks/file.py +44 -44
  12. langchain_core/callbacks/manager.py +436 -513
  13. langchain_core/callbacks/stdout.py +29 -30
  14. langchain_core/callbacks/streaming_stdout.py +32 -32
  15. langchain_core/callbacks/usage.py +60 -57
  16. langchain_core/chat_history.py +53 -68
  17. langchain_core/document_loaders/base.py +27 -25
  18. langchain_core/document_loaders/blob_loaders.py +1 -1
  19. langchain_core/document_loaders/langsmith.py +44 -48
  20. langchain_core/documents/__init__.py +23 -3
  21. langchain_core/documents/base.py +98 -90
  22. langchain_core/documents/compressor.py +10 -10
  23. langchain_core/documents/transformers.py +34 -35
  24. langchain_core/embeddings/fake.py +50 -54
  25. langchain_core/example_selectors/length_based.py +1 -1
  26. langchain_core/example_selectors/semantic_similarity.py +28 -32
  27. langchain_core/exceptions.py +21 -20
  28. langchain_core/globals.py +3 -151
  29. langchain_core/indexing/__init__.py +1 -1
  30. langchain_core/indexing/api.py +121 -126
  31. langchain_core/indexing/base.py +73 -75
  32. langchain_core/indexing/in_memory.py +4 -6
  33. langchain_core/language_models/__init__.py +14 -29
  34. langchain_core/language_models/_utils.py +58 -61
  35. langchain_core/language_models/base.py +53 -162
  36. langchain_core/language_models/chat_models.py +298 -387
  37. langchain_core/language_models/fake.py +11 -11
  38. langchain_core/language_models/fake_chat_models.py +42 -36
  39. langchain_core/language_models/llms.py +125 -235
  40. langchain_core/load/dump.py +9 -12
  41. langchain_core/load/load.py +18 -28
  42. langchain_core/load/mapping.py +2 -4
  43. langchain_core/load/serializable.py +42 -40
  44. langchain_core/messages/__init__.py +10 -16
  45. langchain_core/messages/ai.py +148 -148
  46. langchain_core/messages/base.py +53 -51
  47. langchain_core/messages/block_translators/__init__.py +19 -22
  48. langchain_core/messages/block_translators/anthropic.py +6 -6
  49. langchain_core/messages/block_translators/bedrock_converse.py +5 -5
  50. langchain_core/messages/block_translators/google_genai.py +10 -7
  51. langchain_core/messages/block_translators/google_vertexai.py +4 -32
  52. langchain_core/messages/block_translators/groq.py +117 -21
  53. langchain_core/messages/block_translators/langchain_v0.py +5 -5
  54. langchain_core/messages/block_translators/openai.py +11 -11
  55. langchain_core/messages/chat.py +2 -6
  56. langchain_core/messages/content.py +337 -328
  57. langchain_core/messages/function.py +6 -10
  58. langchain_core/messages/human.py +24 -31
  59. langchain_core/messages/modifier.py +2 -2
  60. langchain_core/messages/system.py +19 -29
  61. langchain_core/messages/tool.py +74 -90
  62. langchain_core/messages/utils.py +474 -504
  63. langchain_core/output_parsers/__init__.py +13 -10
  64. langchain_core/output_parsers/base.py +61 -61
  65. langchain_core/output_parsers/format_instructions.py +9 -4
  66. langchain_core/output_parsers/json.py +12 -10
  67. langchain_core/output_parsers/list.py +21 -23
  68. langchain_core/output_parsers/openai_functions.py +49 -47
  69. langchain_core/output_parsers/openai_tools.py +16 -21
  70. langchain_core/output_parsers/pydantic.py +13 -14
  71. langchain_core/output_parsers/string.py +5 -5
  72. langchain_core/output_parsers/transform.py +15 -17
  73. langchain_core/output_parsers/xml.py +35 -34
  74. langchain_core/outputs/__init__.py +1 -1
  75. langchain_core/outputs/chat_generation.py +18 -18
  76. langchain_core/outputs/chat_result.py +1 -3
  77. langchain_core/outputs/generation.py +10 -11
  78. langchain_core/outputs/llm_result.py +10 -10
  79. langchain_core/prompt_values.py +11 -17
  80. langchain_core/prompts/__init__.py +3 -27
  81. langchain_core/prompts/base.py +48 -56
  82. langchain_core/prompts/chat.py +275 -325
  83. langchain_core/prompts/dict.py +5 -5
  84. langchain_core/prompts/few_shot.py +81 -88
  85. langchain_core/prompts/few_shot_with_templates.py +11 -13
  86. langchain_core/prompts/image.py +12 -14
  87. langchain_core/prompts/loading.py +4 -6
  88. langchain_core/prompts/message.py +3 -3
  89. langchain_core/prompts/prompt.py +24 -39
  90. langchain_core/prompts/string.py +26 -10
  91. langchain_core/prompts/structured.py +49 -53
  92. langchain_core/rate_limiters.py +51 -60
  93. langchain_core/retrievers.py +61 -198
  94. langchain_core/runnables/base.py +1476 -1626
  95. langchain_core/runnables/branch.py +53 -57
  96. langchain_core/runnables/config.py +72 -89
  97. langchain_core/runnables/configurable.py +120 -137
  98. langchain_core/runnables/fallbacks.py +83 -79
  99. langchain_core/runnables/graph.py +91 -97
  100. langchain_core/runnables/graph_ascii.py +27 -28
  101. langchain_core/runnables/graph_mermaid.py +38 -50
  102. langchain_core/runnables/graph_png.py +15 -16
  103. langchain_core/runnables/history.py +135 -148
  104. langchain_core/runnables/passthrough.py +124 -150
  105. langchain_core/runnables/retry.py +46 -51
  106. langchain_core/runnables/router.py +25 -30
  107. langchain_core/runnables/schema.py +75 -80
  108. langchain_core/runnables/utils.py +60 -67
  109. langchain_core/stores.py +85 -121
  110. langchain_core/structured_query.py +8 -8
  111. langchain_core/sys_info.py +27 -29
  112. langchain_core/tools/__init__.py +1 -14
  113. langchain_core/tools/base.py +284 -229
  114. langchain_core/tools/convert.py +160 -155
  115. langchain_core/tools/render.py +10 -10
  116. langchain_core/tools/retriever.py +12 -11
  117. langchain_core/tools/simple.py +19 -24
  118. langchain_core/tools/structured.py +32 -39
  119. langchain_core/tracers/__init__.py +1 -9
  120. langchain_core/tracers/base.py +97 -99
  121. langchain_core/tracers/context.py +29 -52
  122. langchain_core/tracers/core.py +49 -53
  123. langchain_core/tracers/evaluation.py +11 -11
  124. langchain_core/tracers/event_stream.py +65 -64
  125. langchain_core/tracers/langchain.py +21 -21
  126. langchain_core/tracers/log_stream.py +45 -45
  127. langchain_core/tracers/memory_stream.py +3 -3
  128. langchain_core/tracers/root_listeners.py +16 -16
  129. langchain_core/tracers/run_collector.py +2 -4
  130. langchain_core/tracers/schemas.py +0 -129
  131. langchain_core/tracers/stdout.py +3 -3
  132. langchain_core/utils/__init__.py +1 -4
  133. langchain_core/utils/_merge.py +2 -2
  134. langchain_core/utils/aiter.py +57 -61
  135. langchain_core/utils/env.py +9 -9
  136. langchain_core/utils/function_calling.py +89 -186
  137. langchain_core/utils/html.py +7 -8
  138. langchain_core/utils/input.py +6 -6
  139. langchain_core/utils/interactive_env.py +1 -1
  140. langchain_core/utils/iter.py +36 -40
  141. langchain_core/utils/json.py +4 -3
  142. langchain_core/utils/json_schema.py +9 -9
  143. langchain_core/utils/mustache.py +8 -10
  144. langchain_core/utils/pydantic.py +33 -35
  145. langchain_core/utils/strings.py +6 -9
  146. langchain_core/utils/usage.py +1 -1
  147. langchain_core/utils/utils.py +66 -62
  148. langchain_core/vectorstores/base.py +182 -216
  149. langchain_core/vectorstores/in_memory.py +101 -176
  150. langchain_core/vectorstores/utils.py +5 -5
  151. langchain_core/version.py +1 -1
  152. langchain_core-1.0.3.dist-info/METADATA +69 -0
  153. langchain_core-1.0.3.dist-info/RECORD +172 -0
  154. {langchain_core-1.0.0a6.dist-info → langchain_core-1.0.3.dist-info}/WHEEL +1 -1
  155. langchain_core/memory.py +0 -120
  156. langchain_core/messages/block_translators/ollama.py +0 -47
  157. langchain_core/prompts/pipeline.py +0 -138
  158. langchain_core/pydantic_v1/__init__.py +0 -30
  159. langchain_core/pydantic_v1/dataclasses.py +0 -23
  160. langchain_core/pydantic_v1/main.py +0 -23
  161. langchain_core/tracers/langchain_v1.py +0 -31
  162. langchain_core/utils/loading.py +0 -35
  163. langchain_core-1.0.0a6.dist-info/METADATA +0 -67
  164. langchain_core-1.0.0a6.dist-info/RECORD +0 -181
  165. langchain_core-1.0.0a6.dist-info/entry_points.txt +0 -4
@@ -7,6 +7,7 @@ import threading
7
7
  from abc import abstractmethod
8
8
  from collections.abc import (
9
9
  AsyncIterator,
10
+ Callable,
10
11
  Iterator,
11
12
  Sequence,
12
13
  )
@@ -14,9 +15,6 @@ from functools import wraps
14
15
  from typing import (
15
16
  TYPE_CHECKING,
16
17
  Any,
17
- Callable,
18
- Optional,
19
- Union,
20
18
  cast,
21
19
  )
22
20
  from weakref import WeakValueDictionary
@@ -51,14 +49,14 @@ if TYPE_CHECKING:
51
49
  class DynamicRunnable(RunnableSerializable[Input, Output]):
52
50
  """Serializable Runnable that can be dynamically configured.
53
51
 
54
- A DynamicRunnable should be initiated using the ``configurable_fields`` or
55
- ``configurable_alternatives`` method of a Runnable.
52
+ A DynamicRunnable should be initiated using the `configurable_fields` or
53
+ `configurable_alternatives` method of a Runnable.
56
54
  """
57
55
 
58
56
  default: RunnableSerializable[Input, Output]
59
57
  """The default Runnable to use."""
60
58
 
61
- config: Optional[RunnableConfig] = None
59
+ config: RunnableConfig | None = None
62
60
  """The configuration to use."""
63
61
 
64
62
  model_config = ConfigDict(
@@ -74,10 +72,10 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
74
72
  @classmethod
75
73
  @override
76
74
  def get_lc_namespace(cls) -> list[str]:
77
- """Get the namespace of the langchain object.
75
+ """Get the namespace of the LangChain object.
78
76
 
79
77
  Returns:
80
- ``["langchain", "schema", "runnable"]``
78
+ `["langchain", "schema", "runnable"]`
81
79
  """
82
80
  return ["langchain", "schema", "runnable"]
83
81
 
@@ -92,28 +90,26 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
92
90
  return self.default.OutputType
93
91
 
94
92
  @override
95
- def get_input_schema(
96
- self, config: Optional[RunnableConfig] = None
97
- ) -> type[BaseModel]:
93
+ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]:
98
94
  runnable, config = self.prepare(config)
99
95
  return runnable.get_input_schema(config)
100
96
 
101
97
  @override
102
98
  def get_output_schema(
103
- self, config: Optional[RunnableConfig] = None
99
+ self, config: RunnableConfig | None = None
104
100
  ) -> type[BaseModel]:
105
101
  runnable, config = self.prepare(config)
106
102
  return runnable.get_output_schema(config)
107
103
 
108
104
  @override
109
- def get_graph(self, config: Optional[RunnableConfig] = None) -> Graph:
105
+ def get_graph(self, config: RunnableConfig | None = None) -> Graph:
110
106
  runnable, config = self.prepare(config)
111
107
  return runnable.get_graph(config)
112
108
 
113
109
  @override
114
110
  def with_config(
115
111
  self,
116
- config: Optional[RunnableConfig] = None,
112
+ config: RunnableConfig | None = None,
117
113
  # Sadly Unpack is not well supported by mypy so this will have to be untyped
118
114
  **kwargs: Any,
119
115
  ) -> Runnable[Input, Output]:
@@ -122,16 +118,15 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
122
118
  )
123
119
 
124
120
  def prepare(
125
- self, config: Optional[RunnableConfig] = None
121
+ self, config: RunnableConfig | None = None
126
122
  ) -> tuple[Runnable[Input, Output], RunnableConfig]:
127
123
  """Prepare the Runnable for invocation.
128
124
 
129
125
  Args:
130
- config: The configuration to use. Defaults to None.
126
+ config: The configuration to use.
131
127
 
132
128
  Returns:
133
- tuple[Runnable[Input, Output], RunnableConfig]: The prepared Runnable and
134
- configuration.
129
+ The prepared Runnable and configuration.
135
130
  """
136
131
  runnable: Runnable[Input, Output] = self
137
132
  while isinstance(runnable, DynamicRunnable):
@@ -140,19 +135,19 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
140
135
 
141
136
  @abstractmethod
142
137
  def _prepare(
143
- self, config: Optional[RunnableConfig] = None
138
+ self, config: RunnableConfig | None = None
144
139
  ) -> tuple[Runnable[Input, Output], RunnableConfig]: ...
145
140
 
146
141
  @override
147
142
  def invoke(
148
- self, input: Input, config: Optional[RunnableConfig] = None, **kwargs: Any
143
+ self, input: Input, config: RunnableConfig | None = None, **kwargs: Any
149
144
  ) -> Output:
150
145
  runnable, config = self.prepare(config)
151
146
  return runnable.invoke(input, config, **kwargs)
152
147
 
153
148
  @override
154
149
  async def ainvoke(
155
- self, input: Input, config: Optional[RunnableConfig] = None, **kwargs: Any
150
+ self, input: Input, config: RunnableConfig | None = None, **kwargs: Any
156
151
  ) -> Output:
157
152
  runnable, config = self.prepare(config)
158
153
  return await runnable.ainvoke(input, config, **kwargs)
@@ -161,10 +156,10 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
161
156
  def batch(
162
157
  self,
163
158
  inputs: list[Input],
164
- config: Optional[Union[RunnableConfig, list[RunnableConfig]]] = None,
159
+ config: RunnableConfig | list[RunnableConfig] | None = None,
165
160
  *,
166
161
  return_exceptions: bool = False,
167
- **kwargs: Optional[Any],
162
+ **kwargs: Any | None,
168
163
  ) -> list[Output]:
169
164
  configs = get_config_list(config, len(inputs))
170
165
  prepared = [self.prepare(c) for c in configs]
@@ -183,7 +178,7 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
183
178
  def invoke(
184
179
  prepared: tuple[Runnable[Input, Output], RunnableConfig],
185
180
  input_: Input,
186
- ) -> Union[Output, Exception]:
181
+ ) -> Output | Exception:
187
182
  bound, config = prepared
188
183
  if return_exceptions:
189
184
  try:
@@ -204,10 +199,10 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
204
199
  async def abatch(
205
200
  self,
206
201
  inputs: list[Input],
207
- config: Optional[Union[RunnableConfig, list[RunnableConfig]]] = None,
202
+ config: RunnableConfig | list[RunnableConfig] | None = None,
208
203
  *,
209
204
  return_exceptions: bool = False,
210
- **kwargs: Optional[Any],
205
+ **kwargs: Any | None,
211
206
  ) -> list[Output]:
212
207
  configs = get_config_list(config, len(inputs))
213
208
  prepared = [self.prepare(c) for c in configs]
@@ -226,7 +221,7 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
226
221
  async def ainvoke(
227
222
  prepared: tuple[Runnable[Input, Output], RunnableConfig],
228
223
  input_: Input,
229
- ) -> Union[Output, Exception]:
224
+ ) -> Output | Exception:
230
225
  bound, config = prepared
231
226
  if return_exceptions:
232
227
  try:
@@ -243,8 +238,8 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
243
238
  def stream(
244
239
  self,
245
240
  input: Input,
246
- config: Optional[RunnableConfig] = None,
247
- **kwargs: Optional[Any],
241
+ config: RunnableConfig | None = None,
242
+ **kwargs: Any | None,
248
243
  ) -> Iterator[Output]:
249
244
  runnable, config = self.prepare(config)
250
245
  return runnable.stream(input, config, **kwargs)
@@ -253,8 +248,8 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
253
248
  async def astream(
254
249
  self,
255
250
  input: Input,
256
- config: Optional[RunnableConfig] = None,
257
- **kwargs: Optional[Any],
251
+ config: RunnableConfig | None = None,
252
+ **kwargs: Any | None,
258
253
  ) -> AsyncIterator[Output]:
259
254
  runnable, config = self.prepare(config)
260
255
  async for chunk in runnable.astream(input, config, **kwargs):
@@ -264,8 +259,8 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
264
259
  def transform(
265
260
  self,
266
261
  input: Iterator[Input],
267
- config: Optional[RunnableConfig] = None,
268
- **kwargs: Optional[Any],
262
+ config: RunnableConfig | None = None,
263
+ **kwargs: Any | None,
269
264
  ) -> Iterator[Output]:
270
265
  runnable, config = self.prepare(config)
271
266
  return runnable.transform(input, config, **kwargs)
@@ -274,8 +269,8 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
274
269
  async def atransform(
275
270
  self,
276
271
  input: AsyncIterator[Input],
277
- config: Optional[RunnableConfig] = None,
278
- **kwargs: Optional[Any],
272
+ config: RunnableConfig | None = None,
273
+ **kwargs: Any | None,
279
274
  ) -> AsyncIterator[Output]:
280
275
  runnable, config = self.prepare(config)
281
276
  async for chunk in runnable.atransform(input, config, **kwargs):
@@ -328,58 +323,56 @@ class RunnableConfigurableFields(DynamicRunnable[Input, Output]):
328
323
 
329
324
  Here is an example of using a RunnableConfigurableFields with LLMs:
330
325
 
331
- .. code-block:: python
332
-
333
- from langchain_core.prompts import PromptTemplate
334
- from langchain_core.runnables import ConfigurableField
335
- from langchain_openai import ChatOpenAI
326
+ ```python
327
+ from langchain_core.prompts import PromptTemplate
328
+ from langchain_core.runnables import ConfigurableField
329
+ from langchain_openai import ChatOpenAI
336
330
 
337
- model = ChatOpenAI(temperature=0).configurable_fields(
338
- temperature=ConfigurableField(
339
- id="temperature",
340
- name="LLM Temperature",
341
- description="The temperature of the LLM",
342
- )
331
+ model = ChatOpenAI(temperature=0).configurable_fields(
332
+ temperature=ConfigurableField(
333
+ id="temperature",
334
+ name="LLM Temperature",
335
+ description="The temperature of the LLM",
343
336
  )
344
- # This creates a RunnableConfigurableFields for a chat model.
345
-
346
- # When invoking the created RunnableSequence, you can pass in the
347
- # value for your ConfigurableField's id which in this case
348
- # will be change in temperature
337
+ )
338
+ # This creates a RunnableConfigurableFields for a chat model.
349
339
 
350
- prompt = PromptTemplate.from_template("Pick a random number above {x}")
351
- chain = prompt | model
340
+ # When invoking the created RunnableSequence, you can pass in the
341
+ # value for your ConfigurableField's id which in this case
342
+ # will be change in temperature
352
343
 
353
- chain.invoke({"x": 0})
354
- chain.invoke({"x": 0}, config={"configurable": {"temperature": 0.9}})
344
+ prompt = PromptTemplate.from_template("Pick a random number above {x}")
345
+ chain = prompt | model
355
346
 
347
+ chain.invoke({"x": 0})
348
+ chain.invoke({"x": 0}, config={"configurable": {"temperature": 0.9}})
349
+ ```
356
350
 
357
351
  Here is an example of using a RunnableConfigurableFields with HubRunnables:
358
352
 
359
- .. code-block:: python
360
-
361
- from langchain_core.prompts import PromptTemplate
362
- from langchain_core.runnables import ConfigurableField
363
- from langchain_openai import ChatOpenAI
364
- from langchain.runnables.hub import HubRunnable
365
-
366
- prompt = HubRunnable("rlm/rag-prompt").configurable_fields(
367
- owner_repo_commit=ConfigurableField(
368
- id="hub_commit",
369
- name="Hub Commit",
370
- description="The Hub commit to pull from",
371
- )
353
+ ```python
354
+ from langchain_core.prompts import PromptTemplate
355
+ from langchain_core.runnables import ConfigurableField
356
+ from langchain_openai import ChatOpenAI
357
+ from langchain.runnables.hub import HubRunnable
358
+
359
+ prompt = HubRunnable("rlm/rag-prompt").configurable_fields(
360
+ owner_repo_commit=ConfigurableField(
361
+ id="hub_commit",
362
+ name="Hub Commit",
363
+ description="The Hub commit to pull from",
372
364
  )
365
+ )
373
366
 
374
- prompt.invoke({"question": "foo", "context": "bar"})
367
+ prompt.invoke({"question": "foo", "context": "bar"})
375
368
 
376
- # Invoking prompt with `with_config` method
377
-
378
- prompt.invoke(
379
- {"question": "foo", "context": "bar"},
380
- config={"configurable": {"hub_commit": "rlm/rag-prompt-llama"}},
381
- )
369
+ # Invoking prompt with `with_config` method
382
370
 
371
+ prompt.invoke(
372
+ {"question": "foo", "context": "bar"},
373
+ config={"configurable": {"hub_commit": "rlm/rag-prompt-llama"}},
374
+ )
375
+ ```
383
376
  """
384
377
 
385
378
  fields: dict[str, AnyConfigurableField]
@@ -390,7 +383,7 @@ class RunnableConfigurableFields(DynamicRunnable[Input, Output]):
390
383
  """Get the configuration specs for the RunnableConfigurableFields.
391
384
 
392
385
  Returns:
393
- list[ConfigurableFieldSpec]: The configuration specs.
386
+ The configuration specs.
394
387
  """
395
388
  config_specs = []
396
389
 
@@ -425,7 +418,7 @@ class RunnableConfigurableFields(DynamicRunnable[Input, Output]):
425
418
  return self.default.configurable_fields(**{**self.fields, **kwargs})
426
419
 
427
420
  def _prepare(
428
- self, config: Optional[RunnableConfig] = None
421
+ self, config: RunnableConfig | None = None
429
422
  ) -> tuple[Runnable[Input, Output], RunnableConfig]:
430
423
  config = ensure_config(config)
431
424
  specs_by_id = {spec.id: (key, spec) for key, spec in self.fields.items()}
@@ -472,9 +465,7 @@ class StrEnum(str, enum.Enum):
472
465
 
473
466
 
474
467
  _enums_for_spec: WeakValueDictionary[
475
- Union[
476
- ConfigurableFieldSingleOption, ConfigurableFieldMultiOption, ConfigurableField
477
- ],
468
+ ConfigurableFieldSingleOption | ConfigurableFieldMultiOption | ConfigurableField,
478
469
  type[StrEnum],
479
470
  ] = WeakValueDictionary()
480
471
 
@@ -484,67 +475,59 @@ _enums_for_spec_lock = threading.Lock()
484
475
  class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]):
485
476
  """Runnable that can be dynamically configured.
486
477
 
487
- A RunnableConfigurableAlternatives should be initiated using the
478
+ A `RunnableConfigurableAlternatives` should be initiated using the
488
479
  `configurable_alternatives` method of a Runnable or can be
489
480
  initiated directly as well.
490
481
 
491
- Here is an example of using a RunnableConfigurableAlternatives that uses
482
+ Here is an example of using a `RunnableConfigurableAlternatives` that uses
492
483
  alternative prompts to illustrate its functionality:
493
484
 
494
- .. code-block:: python
495
-
496
- from langchain_core.runnables import ConfigurableField
497
- from langchain_openai import ChatOpenAI
498
-
499
- # This creates a RunnableConfigurableAlternatives for Prompt Runnable
500
- # with two alternatives.
501
- prompt = PromptTemplate.from_template(
502
- "Tell me a joke about {topic}"
503
- ).configurable_alternatives(
504
- ConfigurableField(id="prompt"),
505
- default_key="joke",
506
- poem=PromptTemplate.from_template("Write a short poem about {topic}"),
507
- )
485
+ ```python
486
+ from langchain_core.runnables import ConfigurableField
487
+ from langchain_openai import ChatOpenAI
488
+
489
+ # This creates a RunnableConfigurableAlternatives for Prompt Runnable
490
+ # with two alternatives.
491
+ prompt = PromptTemplate.from_template(
492
+ "Tell me a joke about {topic}"
493
+ ).configurable_alternatives(
494
+ ConfigurableField(id="prompt"),
495
+ default_key="joke",
496
+ poem=PromptTemplate.from_template("Write a short poem about {topic}"),
497
+ )
508
498
 
509
- # When invoking the created RunnableSequence, you can pass in the
510
- # value for your ConfigurableField's id which in this case will either be
511
- # `joke` or `poem`.
512
- chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0)
499
+ # When invoking the created RunnableSequence, you can pass in the
500
+ # value for your ConfigurableField's id which in this case will either be
501
+ # `joke` or `poem`.
502
+ chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0)
513
503
 
514
- # The `with_config` method brings in the desired Prompt Runnable in your
515
- # Runnable Sequence.
516
- chain.with_config(configurable={"prompt": "poem"}).invoke(
517
- {"topic": "bears"}
518
- )
504
+ # The `with_config` method brings in the desired Prompt Runnable in your
505
+ # Runnable Sequence.
506
+ chain.with_config(configurable={"prompt": "poem"}).invoke({"topic": "bears"})
507
+ ```
519
508
 
520
-
521
- Equivalently, you can initialize RunnableConfigurableAlternatives directly
509
+ Equivalently, you can initialize `RunnableConfigurableAlternatives` directly
522
510
  and use in LCEL in the same way:
523
511
 
524
- .. code-block:: python
525
-
526
- from langchain_core.runnables import ConfigurableField
527
- from langchain_core.runnables.configurable import (
528
- RunnableConfigurableAlternatives,
529
- )
530
- from langchain_openai import ChatOpenAI
531
-
532
- prompt = RunnableConfigurableAlternatives(
533
- which=ConfigurableField(id="prompt"),
534
- default=PromptTemplate.from_template("Tell me a joke about {topic}"),
535
- default_key="joke",
536
- prefix_keys=False,
537
- alternatives={
538
- "poem": PromptTemplate.from_template(
539
- "Write a short poem about {topic}"
540
- )
541
- },
542
- )
543
- chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0)
544
- chain.with_config(configurable={"prompt": "poem"}).invoke(
545
- {"topic": "bears"}
546
- )
547
-
512
+ ```python
513
+ from langchain_core.runnables import ConfigurableField
514
+ from langchain_core.runnables.configurable import (
515
+ RunnableConfigurableAlternatives,
516
+ )
517
+ from langchain_openai import ChatOpenAI
518
+
519
+ prompt = RunnableConfigurableAlternatives(
520
+ which=ConfigurableField(id="prompt"),
521
+ default=PromptTemplate.from_template("Tell me a joke about {topic}"),
522
+ default_key="joke",
523
+ prefix_keys=False,
524
+ alternatives={
525
+ "poem": PromptTemplate.from_template("Write a short poem about {topic}")
526
+ },
527
+ )
528
+ chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0)
529
+ chain.with_config(configurable={"prompt": "poem"}).invoke({"topic": "bears"})
530
+ ```
548
531
  """
549
532
 
550
533
  which: ConfigurableField
@@ -552,12 +535,12 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]):
552
535
 
553
536
  alternatives: dict[
554
537
  str,
555
- Union[Runnable[Input, Output], Callable[[], Runnable[Input, Output]]],
538
+ Runnable[Input, Output] | Callable[[], Runnable[Input, Output]],
556
539
  ]
557
540
  """The alternatives to choose from."""
558
541
 
559
542
  default_key: str = "default"
560
- """The enum value to use for the default option. Defaults to ``'default'``."""
543
+ """The enum value to use for the default option."""
561
544
 
562
545
  prefix_keys: bool
563
546
  """Whether to prefix configurable fields of each alternative with a namespace
@@ -626,7 +609,7 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]):
626
609
  )
627
610
 
628
611
  def _prepare(
629
- self, config: Optional[RunnableConfig] = None
612
+ self, config: RunnableConfig | None = None
630
613
  ) -> tuple[Runnable[Input, Output], RunnableConfig]:
631
614
  config = ensure_config(config)
632
615
  which = config.get("configurable", {}).get(self.which.id, self.default_key)
@@ -672,7 +655,7 @@ def prefix_config_spec(
672
655
  prefix: The prefix to add.
673
656
 
674
657
  Returns:
675
- ConfigurableFieldSpec: The prefixed ConfigurableFieldSpec.
658
+ The prefixed ConfigurableFieldSpec.
676
659
  """
677
660
  return (
678
661
  ConfigurableFieldSpec(
@@ -689,8 +672,8 @@ def prefix_config_spec(
689
672
 
690
673
 
691
674
  def make_options_spec(
692
- spec: Union[ConfigurableFieldSingleOption, ConfigurableFieldMultiOption],
693
- description: Optional[str],
675
+ spec: ConfigurableFieldSingleOption | ConfigurableFieldMultiOption,
676
+ description: str | None,
694
677
  ) -> ConfigurableFieldSpec:
695
678
  """Make options spec.
696
679