langchain-core 0.4.0.dev0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (172) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +3 -4
  3. langchain_core/_api/beta_decorator.py +45 -70
  4. langchain_core/_api/deprecation.py +80 -80
  5. langchain_core/_api/path.py +22 -8
  6. langchain_core/_import_utils.py +10 -4
  7. langchain_core/agents.py +25 -21
  8. langchain_core/caches.py +53 -63
  9. langchain_core/callbacks/__init__.py +1 -8
  10. langchain_core/callbacks/base.py +341 -348
  11. langchain_core/callbacks/file.py +55 -44
  12. langchain_core/callbacks/manager.py +546 -683
  13. langchain_core/callbacks/stdout.py +29 -30
  14. langchain_core/callbacks/streaming_stdout.py +35 -36
  15. langchain_core/callbacks/usage.py +65 -70
  16. langchain_core/chat_history.py +48 -55
  17. langchain_core/document_loaders/base.py +46 -21
  18. langchain_core/document_loaders/langsmith.py +39 -36
  19. langchain_core/documents/__init__.py +0 -1
  20. langchain_core/documents/base.py +96 -74
  21. langchain_core/documents/compressor.py +12 -9
  22. langchain_core/documents/transformers.py +29 -28
  23. langchain_core/embeddings/fake.py +56 -57
  24. langchain_core/env.py +2 -3
  25. langchain_core/example_selectors/base.py +12 -0
  26. langchain_core/example_selectors/length_based.py +1 -1
  27. langchain_core/example_selectors/semantic_similarity.py +21 -25
  28. langchain_core/exceptions.py +15 -9
  29. langchain_core/globals.py +4 -163
  30. langchain_core/indexing/api.py +132 -125
  31. langchain_core/indexing/base.py +64 -67
  32. langchain_core/indexing/in_memory.py +26 -6
  33. langchain_core/language_models/__init__.py +15 -27
  34. langchain_core/language_models/_utils.py +267 -117
  35. langchain_core/language_models/base.py +92 -177
  36. langchain_core/language_models/chat_models.py +547 -407
  37. langchain_core/language_models/fake.py +11 -11
  38. langchain_core/language_models/fake_chat_models.py +72 -118
  39. langchain_core/language_models/llms.py +168 -242
  40. langchain_core/load/dump.py +8 -11
  41. langchain_core/load/load.py +32 -28
  42. langchain_core/load/mapping.py +2 -4
  43. langchain_core/load/serializable.py +50 -56
  44. langchain_core/messages/__init__.py +36 -51
  45. langchain_core/messages/ai.py +377 -150
  46. langchain_core/messages/base.py +239 -47
  47. langchain_core/messages/block_translators/__init__.py +111 -0
  48. langchain_core/messages/block_translators/anthropic.py +470 -0
  49. langchain_core/messages/block_translators/bedrock.py +94 -0
  50. langchain_core/messages/block_translators/bedrock_converse.py +297 -0
  51. langchain_core/messages/block_translators/google_genai.py +530 -0
  52. langchain_core/messages/block_translators/google_vertexai.py +21 -0
  53. langchain_core/messages/block_translators/groq.py +143 -0
  54. langchain_core/messages/block_translators/langchain_v0.py +301 -0
  55. langchain_core/messages/block_translators/openai.py +1010 -0
  56. langchain_core/messages/chat.py +2 -3
  57. langchain_core/messages/content.py +1423 -0
  58. langchain_core/messages/function.py +7 -7
  59. langchain_core/messages/human.py +44 -38
  60. langchain_core/messages/modifier.py +3 -2
  61. langchain_core/messages/system.py +40 -27
  62. langchain_core/messages/tool.py +160 -58
  63. langchain_core/messages/utils.py +527 -638
  64. langchain_core/output_parsers/__init__.py +1 -14
  65. langchain_core/output_parsers/base.py +68 -104
  66. langchain_core/output_parsers/json.py +13 -17
  67. langchain_core/output_parsers/list.py +11 -33
  68. langchain_core/output_parsers/openai_functions.py +56 -74
  69. langchain_core/output_parsers/openai_tools.py +68 -109
  70. langchain_core/output_parsers/pydantic.py +15 -13
  71. langchain_core/output_parsers/string.py +6 -2
  72. langchain_core/output_parsers/transform.py +17 -60
  73. langchain_core/output_parsers/xml.py +34 -44
  74. langchain_core/outputs/__init__.py +1 -1
  75. langchain_core/outputs/chat_generation.py +26 -11
  76. langchain_core/outputs/chat_result.py +1 -3
  77. langchain_core/outputs/generation.py +17 -6
  78. langchain_core/outputs/llm_result.py +15 -8
  79. langchain_core/prompt_values.py +29 -123
  80. langchain_core/prompts/__init__.py +3 -27
  81. langchain_core/prompts/base.py +48 -63
  82. langchain_core/prompts/chat.py +259 -288
  83. langchain_core/prompts/dict.py +19 -11
  84. langchain_core/prompts/few_shot.py +84 -90
  85. langchain_core/prompts/few_shot_with_templates.py +14 -12
  86. langchain_core/prompts/image.py +19 -14
  87. langchain_core/prompts/loading.py +6 -8
  88. langchain_core/prompts/message.py +7 -8
  89. langchain_core/prompts/prompt.py +42 -43
  90. langchain_core/prompts/string.py +37 -16
  91. langchain_core/prompts/structured.py +43 -46
  92. langchain_core/rate_limiters.py +51 -60
  93. langchain_core/retrievers.py +52 -192
  94. langchain_core/runnables/base.py +1727 -1683
  95. langchain_core/runnables/branch.py +52 -73
  96. langchain_core/runnables/config.py +89 -103
  97. langchain_core/runnables/configurable.py +128 -130
  98. langchain_core/runnables/fallbacks.py +93 -82
  99. langchain_core/runnables/graph.py +127 -127
  100. langchain_core/runnables/graph_ascii.py +63 -41
  101. langchain_core/runnables/graph_mermaid.py +87 -70
  102. langchain_core/runnables/graph_png.py +31 -36
  103. langchain_core/runnables/history.py +145 -161
  104. langchain_core/runnables/passthrough.py +141 -144
  105. langchain_core/runnables/retry.py +84 -68
  106. langchain_core/runnables/router.py +33 -37
  107. langchain_core/runnables/schema.py +79 -72
  108. langchain_core/runnables/utils.py +95 -139
  109. langchain_core/stores.py +85 -131
  110. langchain_core/structured_query.py +11 -15
  111. langchain_core/sys_info.py +31 -32
  112. langchain_core/tools/__init__.py +1 -14
  113. langchain_core/tools/base.py +221 -247
  114. langchain_core/tools/convert.py +144 -161
  115. langchain_core/tools/render.py +10 -10
  116. langchain_core/tools/retriever.py +12 -19
  117. langchain_core/tools/simple.py +52 -29
  118. langchain_core/tools/structured.py +56 -60
  119. langchain_core/tracers/__init__.py +1 -9
  120. langchain_core/tracers/_streaming.py +6 -7
  121. langchain_core/tracers/base.py +103 -112
  122. langchain_core/tracers/context.py +29 -48
  123. langchain_core/tracers/core.py +142 -105
  124. langchain_core/tracers/evaluation.py +30 -34
  125. langchain_core/tracers/event_stream.py +162 -117
  126. langchain_core/tracers/langchain.py +34 -36
  127. langchain_core/tracers/log_stream.py +87 -49
  128. langchain_core/tracers/memory_stream.py +3 -3
  129. langchain_core/tracers/root_listeners.py +18 -34
  130. langchain_core/tracers/run_collector.py +8 -20
  131. langchain_core/tracers/schemas.py +0 -125
  132. langchain_core/tracers/stdout.py +3 -3
  133. langchain_core/utils/__init__.py +1 -4
  134. langchain_core/utils/_merge.py +47 -9
  135. langchain_core/utils/aiter.py +70 -66
  136. langchain_core/utils/env.py +12 -9
  137. langchain_core/utils/function_calling.py +139 -206
  138. langchain_core/utils/html.py +7 -8
  139. langchain_core/utils/input.py +6 -6
  140. langchain_core/utils/interactive_env.py +6 -2
  141. langchain_core/utils/iter.py +48 -45
  142. langchain_core/utils/json.py +14 -4
  143. langchain_core/utils/json_schema.py +159 -43
  144. langchain_core/utils/mustache.py +32 -25
  145. langchain_core/utils/pydantic.py +67 -40
  146. langchain_core/utils/strings.py +5 -5
  147. langchain_core/utils/usage.py +1 -1
  148. langchain_core/utils/utils.py +104 -62
  149. langchain_core/vectorstores/base.py +131 -179
  150. langchain_core/vectorstores/in_memory.py +113 -182
  151. langchain_core/vectorstores/utils.py +23 -17
  152. langchain_core/version.py +1 -1
  153. langchain_core-1.0.0.dist-info/METADATA +68 -0
  154. langchain_core-1.0.0.dist-info/RECORD +172 -0
  155. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0.dist-info}/WHEEL +1 -1
  156. langchain_core/beta/__init__.py +0 -1
  157. langchain_core/beta/runnables/__init__.py +0 -1
  158. langchain_core/beta/runnables/context.py +0 -448
  159. langchain_core/memory.py +0 -116
  160. langchain_core/messages/content_blocks.py +0 -1435
  161. langchain_core/prompts/pipeline.py +0 -133
  162. langchain_core/pydantic_v1/__init__.py +0 -30
  163. langchain_core/pydantic_v1/dataclasses.py +0 -23
  164. langchain_core/pydantic_v1/main.py +0 -23
  165. langchain_core/tracers/langchain_v1.py +0 -23
  166. langchain_core/utils/loading.py +0 -31
  167. langchain_core/v1/__init__.py +0 -1
  168. langchain_core/v1/chat_models.py +0 -1047
  169. langchain_core/v1/messages.py +0 -755
  170. langchain_core-0.4.0.dev0.dist-info/METADATA +0 -108
  171. langchain_core-0.4.0.dev0.dist-info/RECORD +0 -177
  172. langchain_core-0.4.0.dev0.dist-info/entry_points.txt +0 -4
@@ -7,6 +7,7 @@ import threading
7
7
  from abc import abstractmethod
8
8
  from collections.abc import (
9
9
  AsyncIterator,
10
+ Callable,
10
11
  Iterator,
11
12
  Sequence,
12
13
  )
@@ -14,9 +15,6 @@ from functools import wraps
14
15
  from typing import (
15
16
  TYPE_CHECKING,
16
17
  Any,
17
- Callable,
18
- Optional,
19
- Union,
20
18
  cast,
21
19
  )
22
20
  from weakref import WeakValueDictionary
@@ -53,15 +51,13 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
53
51
 
54
52
  A DynamicRunnable should be initiated using the `configurable_fields` or
55
53
  `configurable_alternatives` method of a Runnable.
56
-
57
- Parameters:
58
- default: The default Runnable to use.
59
- config: The configuration to use.
60
54
  """
61
55
 
62
56
  default: RunnableSerializable[Input, Output]
57
+ """The default Runnable to use."""
63
58
 
64
- config: Optional[RunnableConfig] = None
59
+ config: RunnableConfig | None = None
60
+ """The configuration to use."""
65
61
 
66
62
  model_config = ConfigDict(
67
63
  arbitrary_types_allowed=True,
@@ -70,11 +66,17 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
70
66
  @classmethod
71
67
  @override
72
68
  def is_lc_serializable(cls) -> bool:
69
+ """Return True as this class is serializable."""
73
70
  return True
74
71
 
75
72
  @classmethod
76
73
  @override
77
74
  def get_lc_namespace(cls) -> list[str]:
75
+ """Get the namespace of the LangChain object.
76
+
77
+ Returns:
78
+ `["langchain", "schema", "runnable"]`
79
+ """
78
80
  return ["langchain", "schema", "runnable"]
79
81
 
80
82
  @property
@@ -88,28 +90,26 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
88
90
  return self.default.OutputType
89
91
 
90
92
  @override
91
- def get_input_schema(
92
- self, config: Optional[RunnableConfig] = None
93
- ) -> type[BaseModel]:
93
+ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]:
94
94
  runnable, config = self.prepare(config)
95
95
  return runnable.get_input_schema(config)
96
96
 
97
97
  @override
98
98
  def get_output_schema(
99
- self, config: Optional[RunnableConfig] = None
99
+ self, config: RunnableConfig | None = None
100
100
  ) -> type[BaseModel]:
101
101
  runnable, config = self.prepare(config)
102
102
  return runnable.get_output_schema(config)
103
103
 
104
104
  @override
105
- def get_graph(self, config: Optional[RunnableConfig] = None) -> Graph:
105
+ def get_graph(self, config: RunnableConfig | None = None) -> Graph:
106
106
  runnable, config = self.prepare(config)
107
107
  return runnable.get_graph(config)
108
108
 
109
109
  @override
110
110
  def with_config(
111
111
  self,
112
- config: Optional[RunnableConfig] = None,
112
+ config: RunnableConfig | None = None,
113
113
  # Sadly Unpack is not well supported by mypy so this will have to be untyped
114
114
  **kwargs: Any,
115
115
  ) -> Runnable[Input, Output]:
@@ -118,16 +118,15 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
118
118
  )
119
119
 
120
120
  def prepare(
121
- self, config: Optional[RunnableConfig] = None
121
+ self, config: RunnableConfig | None = None
122
122
  ) -> tuple[Runnable[Input, Output], RunnableConfig]:
123
123
  """Prepare the Runnable for invocation.
124
124
 
125
125
  Args:
126
- config: The configuration to use. Defaults to None.
126
+ config: The configuration to use.
127
127
 
128
128
  Returns:
129
- tuple[Runnable[Input, Output], RunnableConfig]: The prepared Runnable and
130
- configuration.
129
+ The prepared Runnable and configuration.
131
130
  """
132
131
  runnable: Runnable[Input, Output] = self
133
132
  while isinstance(runnable, DynamicRunnable):
@@ -136,19 +135,19 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
136
135
 
137
136
  @abstractmethod
138
137
  def _prepare(
139
- self, config: Optional[RunnableConfig] = None
138
+ self, config: RunnableConfig | None = None
140
139
  ) -> tuple[Runnable[Input, Output], RunnableConfig]: ...
141
140
 
142
141
  @override
143
142
  def invoke(
144
- self, input: Input, config: Optional[RunnableConfig] = None, **kwargs: Any
143
+ self, input: Input, config: RunnableConfig | None = None, **kwargs: Any
145
144
  ) -> Output:
146
145
  runnable, config = self.prepare(config)
147
146
  return runnable.invoke(input, config, **kwargs)
148
147
 
149
148
  @override
150
149
  async def ainvoke(
151
- self, input: Input, config: Optional[RunnableConfig] = None, **kwargs: Any
150
+ self, input: Input, config: RunnableConfig | None = None, **kwargs: Any
152
151
  ) -> Output:
153
152
  runnable, config = self.prepare(config)
154
153
  return await runnable.ainvoke(input, config, **kwargs)
@@ -157,10 +156,10 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
157
156
  def batch(
158
157
  self,
159
158
  inputs: list[Input],
160
- config: Optional[Union[RunnableConfig, list[RunnableConfig]]] = None,
159
+ config: RunnableConfig | list[RunnableConfig] | None = None,
161
160
  *,
162
161
  return_exceptions: bool = False,
163
- **kwargs: Optional[Any],
162
+ **kwargs: Any | None,
164
163
  ) -> list[Output]:
165
164
  configs = get_config_list(config, len(inputs))
166
165
  prepared = [self.prepare(c) for c in configs]
@@ -179,7 +178,7 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
179
178
  def invoke(
180
179
  prepared: tuple[Runnable[Input, Output], RunnableConfig],
181
180
  input_: Input,
182
- ) -> Union[Output, Exception]:
181
+ ) -> Output | Exception:
183
182
  bound, config = prepared
184
183
  if return_exceptions:
185
184
  try:
@@ -200,10 +199,10 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
200
199
  async def abatch(
201
200
  self,
202
201
  inputs: list[Input],
203
- config: Optional[Union[RunnableConfig, list[RunnableConfig]]] = None,
202
+ config: RunnableConfig | list[RunnableConfig] | None = None,
204
203
  *,
205
204
  return_exceptions: bool = False,
206
- **kwargs: Optional[Any],
205
+ **kwargs: Any | None,
207
206
  ) -> list[Output]:
208
207
  configs = get_config_list(config, len(inputs))
209
208
  prepared = [self.prepare(c) for c in configs]
@@ -222,7 +221,7 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
222
221
  async def ainvoke(
223
222
  prepared: tuple[Runnable[Input, Output], RunnableConfig],
224
223
  input_: Input,
225
- ) -> Union[Output, Exception]:
224
+ ) -> Output | Exception:
226
225
  bound, config = prepared
227
226
  if return_exceptions:
228
227
  try:
@@ -239,8 +238,8 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
239
238
  def stream(
240
239
  self,
241
240
  input: Input,
242
- config: Optional[RunnableConfig] = None,
243
- **kwargs: Optional[Any],
241
+ config: RunnableConfig | None = None,
242
+ **kwargs: Any | None,
244
243
  ) -> Iterator[Output]:
245
244
  runnable, config = self.prepare(config)
246
245
  return runnable.stream(input, config, **kwargs)
@@ -249,8 +248,8 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
249
248
  async def astream(
250
249
  self,
251
250
  input: Input,
252
- config: Optional[RunnableConfig] = None,
253
- **kwargs: Optional[Any],
251
+ config: RunnableConfig | None = None,
252
+ **kwargs: Any | None,
254
253
  ) -> AsyncIterator[Output]:
255
254
  runnable, config = self.prepare(config)
256
255
  async for chunk in runnable.astream(input, config, **kwargs):
@@ -260,8 +259,8 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
260
259
  def transform(
261
260
  self,
262
261
  input: Iterator[Input],
263
- config: Optional[RunnableConfig] = None,
264
- **kwargs: Optional[Any],
262
+ config: RunnableConfig | None = None,
263
+ **kwargs: Any | None,
265
264
  ) -> Iterator[Output]:
266
265
  runnable, config = self.prepare(config)
267
266
  return runnable.transform(input, config, **kwargs)
@@ -270,8 +269,8 @@ class DynamicRunnable(RunnableSerializable[Input, Output]):
270
269
  async def atransform(
271
270
  self,
272
271
  input: AsyncIterator[Input],
273
- config: Optional[RunnableConfig] = None,
274
- **kwargs: Optional[Any],
272
+ config: RunnableConfig | None = None,
273
+ **kwargs: Any | None,
275
274
  ) -> AsyncIterator[Output]:
276
275
  runnable, config = self.prepare(config)
277
276
  async for chunk in runnable.atransform(input, config, **kwargs):
@@ -322,73 +321,69 @@ class RunnableConfigurableFields(DynamicRunnable[Input, Output]):
322
321
  A RunnableConfigurableFields should be initiated using the
323
322
  `configurable_fields` method of a Runnable.
324
323
 
325
- Parameters:
326
- fields: The configurable fields to use.
327
-
328
324
  Here is an example of using a RunnableConfigurableFields with LLMs:
329
325
 
330
- .. code-block:: python
331
-
332
- from langchain_core.prompts import PromptTemplate
333
- from langchain_core.runnables import ConfigurableField
334
- from langchain_openai import ChatOpenAI
326
+ ```python
327
+ from langchain_core.prompts import PromptTemplate
328
+ from langchain_core.runnables import ConfigurableField
329
+ from langchain_openai import ChatOpenAI
335
330
 
336
- model = ChatOpenAI(temperature=0).configurable_fields(
337
- temperature=ConfigurableField(
338
- id="temperature",
339
- name="LLM Temperature",
340
- description="The temperature of the LLM",
341
- )
331
+ model = ChatOpenAI(temperature=0).configurable_fields(
332
+ temperature=ConfigurableField(
333
+ id="temperature",
334
+ name="LLM Temperature",
335
+ description="The temperature of the LLM",
342
336
  )
343
- # This creates a RunnableConfigurableFields for a chat model.
344
-
345
- # When invoking the created RunnableSequence, you can pass in the
346
- # value for your ConfigurableField's id which in this case
347
- # will be change in temperature
337
+ )
338
+ # This creates a RunnableConfigurableFields for a chat model.
348
339
 
349
- prompt = PromptTemplate.from_template("Pick a random number above {x}")
350
- chain = prompt | model
340
+ # When invoking the created RunnableSequence, you can pass in the
341
+ # value for your ConfigurableField's id which in this case
342
+ # will be change in temperature
351
343
 
352
- chain.invoke({"x": 0})
353
- chain.invoke({"x": 0}, config={"configurable": {"temperature": 0.9}})
344
+ prompt = PromptTemplate.from_template("Pick a random number above {x}")
345
+ chain = prompt | model
354
346
 
347
+ chain.invoke({"x": 0})
348
+ chain.invoke({"x": 0}, config={"configurable": {"temperature": 0.9}})
349
+ ```
355
350
 
356
351
  Here is an example of using a RunnableConfigurableFields with HubRunnables:
357
352
 
358
- .. code-block:: python
359
-
360
- from langchain_core.prompts import PromptTemplate
361
- from langchain_core.runnables import ConfigurableField
362
- from langchain_openai import ChatOpenAI
363
- from langchain.runnables.hub import HubRunnable
364
-
365
- prompt = HubRunnable("rlm/rag-prompt").configurable_fields(
366
- owner_repo_commit=ConfigurableField(
367
- id="hub_commit",
368
- name="Hub Commit",
369
- description="The Hub commit to pull from",
370
- )
353
+ ```python
354
+ from langchain_core.prompts import PromptTemplate
355
+ from langchain_core.runnables import ConfigurableField
356
+ from langchain_openai import ChatOpenAI
357
+ from langchain.runnables.hub import HubRunnable
358
+
359
+ prompt = HubRunnable("rlm/rag-prompt").configurable_fields(
360
+ owner_repo_commit=ConfigurableField(
361
+ id="hub_commit",
362
+ name="Hub Commit",
363
+ description="The Hub commit to pull from",
371
364
  )
365
+ )
372
366
 
373
- prompt.invoke({"question": "foo", "context": "bar"})
367
+ prompt.invoke({"question": "foo", "context": "bar"})
374
368
 
375
- # Invoking prompt with `with_config` method
376
-
377
- prompt.invoke(
378
- {"question": "foo", "context": "bar"},
379
- config={"configurable": {"hub_commit": "rlm/rag-prompt-llama"}},
380
- )
369
+ # Invoking prompt with `with_config` method
381
370
 
371
+ prompt.invoke(
372
+ {"question": "foo", "context": "bar"},
373
+ config={"configurable": {"hub_commit": "rlm/rag-prompt-llama"}},
374
+ )
375
+ ```
382
376
  """
383
377
 
384
378
  fields: dict[str, AnyConfigurableField]
379
+ """The configurable fields to use."""
385
380
 
386
381
  @property
387
382
  def config_specs(self) -> list[ConfigurableFieldSpec]:
388
383
  """Get the configuration specs for the RunnableConfigurableFields.
389
384
 
390
385
  Returns:
391
- list[ConfigurableFieldSpec]: The configuration specs.
386
+ The configuration specs.
392
387
  """
393
388
  config_specs = []
394
389
 
@@ -423,7 +418,7 @@ class RunnableConfigurableFields(DynamicRunnable[Input, Output]):
423
418
  return self.default.configurable_fields(**{**self.fields, **kwargs})
424
419
 
425
420
  def _prepare(
426
- self, config: Optional[RunnableConfig] = None
421
+ self, config: RunnableConfig | None = None
427
422
  ) -> tuple[Runnable[Input, Output], RunnableConfig]:
428
423
  config = ensure_config(config)
429
424
  specs_by_id = {spec.id: (key, spec) for key, spec in self.fields.items()}
@@ -470,9 +465,7 @@ class StrEnum(str, enum.Enum):
470
465
 
471
466
 
472
467
  _enums_for_spec: WeakValueDictionary[
473
- Union[
474
- ConfigurableFieldSingleOption, ConfigurableFieldMultiOption, ConfigurableField
475
- ],
468
+ ConfigurableFieldSingleOption | ConfigurableFieldMultiOption | ConfigurableField,
476
469
  type[StrEnum],
477
470
  ] = WeakValueDictionary()
478
471
 
@@ -489,63 +482,65 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]):
489
482
  Here is an example of using a RunnableConfigurableAlternatives that uses
490
483
  alternative prompts to illustrate its functionality:
491
484
 
492
- .. code-block:: python
493
-
494
- from langchain_core.runnables import ConfigurableField
495
- from langchain_openai import ChatOpenAI
496
-
497
- # This creates a RunnableConfigurableAlternatives for Prompt Runnable
498
- # with two alternatives.
499
- prompt = PromptTemplate.from_template(
500
- "Tell me a joke about {topic}"
501
- ).configurable_alternatives(
502
- ConfigurableField(id="prompt"),
503
- default_key="joke",
504
- poem=PromptTemplate.from_template("Write a short poem about {topic}")
505
- )
506
-
507
- # When invoking the created RunnableSequence, you can pass in the
508
- # value for your ConfigurableField's id which in this case will either be
509
- # `joke` or `poem`.
510
- chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0)
485
+ ```python
486
+ from langchain_core.runnables import ConfigurableField
487
+ from langchain_openai import ChatOpenAI
488
+
489
+ # This creates a RunnableConfigurableAlternatives for Prompt Runnable
490
+ # with two alternatives.
491
+ prompt = PromptTemplate.from_template(
492
+ "Tell me a joke about {topic}"
493
+ ).configurable_alternatives(
494
+ ConfigurableField(id="prompt"),
495
+ default_key="joke",
496
+ poem=PromptTemplate.from_template("Write a short poem about {topic}"),
497
+ )
511
498
 
512
- # The `with_config` method brings in the desired Prompt Runnable in your
513
- # Runnable Sequence.
514
- chain.with_config(configurable={"prompt": "poem"}).invoke({"topic": "bears"})
499
+ # When invoking the created RunnableSequence, you can pass in the
500
+ # value for your ConfigurableField's id which in this case will either be
501
+ # `joke` or `poem`.
502
+ chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0)
515
503
 
504
+ # The `with_config` method brings in the desired Prompt Runnable in your
505
+ # Runnable Sequence.
506
+ chain.with_config(configurable={"prompt": "poem"}).invoke({"topic": "bears"})
507
+ ```
516
508
 
517
509
  Equivalently, you can initialize RunnableConfigurableAlternatives directly
518
510
  and use in LCEL in the same way:
519
511
 
520
- .. code-block:: python
521
-
522
- from langchain_core.runnables import ConfigurableField
523
- from langchain_core.runnables.configurable import RunnableConfigurableAlternatives
524
- from langchain_openai import ChatOpenAI
525
-
526
- prompt = RunnableConfigurableAlternatives(
527
- which=ConfigurableField(id='prompt'),
528
- default=PromptTemplate.from_template("Tell me a joke about {topic}"),
529
- default_key='joke',
530
- prefix_keys=False,
531
- alternatives={"poem":PromptTemplate.from_template("Write a short poem about {topic}")}
532
- )
533
- chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0)
534
- chain.with_config(configurable={"prompt": "poem"}).invoke({"topic": "bears"})
535
-
536
- """ # noqa: E501
512
+ ```python
513
+ from langchain_core.runnables import ConfigurableField
514
+ from langchain_core.runnables.configurable import (
515
+ RunnableConfigurableAlternatives,
516
+ )
517
+ from langchain_openai import ChatOpenAI
518
+
519
+ prompt = RunnableConfigurableAlternatives(
520
+ which=ConfigurableField(id="prompt"),
521
+ default=PromptTemplate.from_template("Tell me a joke about {topic}"),
522
+ default_key="joke",
523
+ prefix_keys=False,
524
+ alternatives={
525
+ "poem": PromptTemplate.from_template("Write a short poem about {topic}")
526
+ },
527
+ )
528
+ chain = prompt | ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0)
529
+ chain.with_config(configurable={"prompt": "poem"}).invoke({"topic": "bears"})
530
+ ```
531
+ """
537
532
 
538
533
  which: ConfigurableField
539
534
  """The ConfigurableField to use to choose between alternatives."""
540
535
 
541
536
  alternatives: dict[
542
537
  str,
543
- Union[Runnable[Input, Output], Callable[[], Runnable[Input, Output]]],
538
+ Runnable[Input, Output] | Callable[[], Runnable[Input, Output]],
544
539
  ]
545
540
  """The alternatives to choose from."""
546
541
 
547
542
  default_key: str = "default"
548
- """The enum value to use for the default option. Defaults to ``'default'``."""
543
+ """The enum value to use for the default option."""
549
544
 
550
545
  prefix_keys: bool
551
546
  """Whether to prefix configurable fields of each alternative with a namespace
@@ -614,7 +609,7 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]):
614
609
  )
615
610
 
616
611
  def _prepare(
617
- self, config: Optional[RunnableConfig] = None
612
+ self, config: RunnableConfig | None = None
618
613
  ) -> tuple[Runnable[Input, Output], RunnableConfig]:
619
614
  config = ensure_config(config)
620
615
  which = config.get("configurable", {}).get(self.which.id, self.default_key)
@@ -660,7 +655,7 @@ def prefix_config_spec(
660
655
  prefix: The prefix to add.
661
656
 
662
657
  Returns:
663
- ConfigurableFieldSpec: The prefixed ConfigurableFieldSpec.
658
+ The prefixed ConfigurableFieldSpec.
664
659
  """
665
660
  return (
666
661
  ConfigurableFieldSpec(
@@ -677,10 +672,13 @@ def prefix_config_spec(
677
672
 
678
673
 
679
674
  def make_options_spec(
680
- spec: Union[ConfigurableFieldSingleOption, ConfigurableFieldMultiOption],
681
- description: Optional[str],
675
+ spec: ConfigurableFieldSingleOption | ConfigurableFieldMultiOption,
676
+ description: str | None,
682
677
  ) -> ConfigurableFieldSpec:
683
- """Make a ConfigurableFieldSpec for a ConfigurableFieldSingleOption or ConfigurableFieldMultiOption.
678
+ """Make options spec.
679
+
680
+ Make a ConfigurableFieldSpec for a ConfigurableFieldSingleOption or
681
+ ConfigurableFieldMultiOption.
684
682
 
685
683
  Args:
686
684
  spec: The ConfigurableFieldSingleOption or ConfigurableFieldMultiOption.
@@ -688,7 +686,7 @@ def make_options_spec(
688
686
 
689
687
  Returns:
690
688
  The ConfigurableFieldSpec.
691
- """ # noqa: E501
689
+ """
692
690
  with _enums_for_spec_lock:
693
691
  if enum := _enums_for_spec.get(spec):
694
692
  pass