langchain-core 1.0.0a6__py3-none-any.whl → 1.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (165) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +3 -4
  3. langchain_core/_api/beta_decorator.py +23 -26
  4. langchain_core/_api/deprecation.py +51 -64
  5. langchain_core/_api/path.py +3 -6
  6. langchain_core/_import_utils.py +3 -4
  7. langchain_core/agents.py +55 -48
  8. langchain_core/caches.py +65 -66
  9. langchain_core/callbacks/__init__.py +1 -8
  10. langchain_core/callbacks/base.py +321 -336
  11. langchain_core/callbacks/file.py +44 -44
  12. langchain_core/callbacks/manager.py +454 -514
  13. langchain_core/callbacks/stdout.py +29 -30
  14. langchain_core/callbacks/streaming_stdout.py +32 -32
  15. langchain_core/callbacks/usage.py +60 -57
  16. langchain_core/chat_history.py +53 -68
  17. langchain_core/document_loaders/base.py +27 -25
  18. langchain_core/document_loaders/blob_loaders.py +1 -1
  19. langchain_core/document_loaders/langsmith.py +44 -48
  20. langchain_core/documents/__init__.py +23 -3
  21. langchain_core/documents/base.py +102 -94
  22. langchain_core/documents/compressor.py +10 -10
  23. langchain_core/documents/transformers.py +34 -35
  24. langchain_core/embeddings/fake.py +50 -54
  25. langchain_core/example_selectors/length_based.py +2 -2
  26. langchain_core/example_selectors/semantic_similarity.py +28 -32
  27. langchain_core/exceptions.py +21 -20
  28. langchain_core/globals.py +3 -151
  29. langchain_core/indexing/__init__.py +1 -1
  30. langchain_core/indexing/api.py +121 -126
  31. langchain_core/indexing/base.py +73 -75
  32. langchain_core/indexing/in_memory.py +4 -6
  33. langchain_core/language_models/__init__.py +14 -29
  34. langchain_core/language_models/_utils.py +58 -61
  35. langchain_core/language_models/base.py +82 -172
  36. langchain_core/language_models/chat_models.py +329 -402
  37. langchain_core/language_models/fake.py +11 -11
  38. langchain_core/language_models/fake_chat_models.py +42 -36
  39. langchain_core/language_models/llms.py +189 -269
  40. langchain_core/load/dump.py +9 -12
  41. langchain_core/load/load.py +18 -28
  42. langchain_core/load/mapping.py +2 -4
  43. langchain_core/load/serializable.py +42 -40
  44. langchain_core/messages/__init__.py +10 -16
  45. langchain_core/messages/ai.py +148 -148
  46. langchain_core/messages/base.py +53 -51
  47. langchain_core/messages/block_translators/__init__.py +19 -22
  48. langchain_core/messages/block_translators/anthropic.py +6 -6
  49. langchain_core/messages/block_translators/bedrock_converse.py +5 -5
  50. langchain_core/messages/block_translators/google_genai.py +10 -7
  51. langchain_core/messages/block_translators/google_vertexai.py +4 -32
  52. langchain_core/messages/block_translators/groq.py +117 -21
  53. langchain_core/messages/block_translators/langchain_v0.py +5 -5
  54. langchain_core/messages/block_translators/openai.py +11 -11
  55. langchain_core/messages/chat.py +2 -6
  56. langchain_core/messages/content.py +339 -330
  57. langchain_core/messages/function.py +6 -10
  58. langchain_core/messages/human.py +24 -31
  59. langchain_core/messages/modifier.py +2 -2
  60. langchain_core/messages/system.py +19 -29
  61. langchain_core/messages/tool.py +74 -90
  62. langchain_core/messages/utils.py +484 -510
  63. langchain_core/output_parsers/__init__.py +13 -10
  64. langchain_core/output_parsers/base.py +61 -61
  65. langchain_core/output_parsers/format_instructions.py +9 -4
  66. langchain_core/output_parsers/json.py +12 -10
  67. langchain_core/output_parsers/list.py +21 -23
  68. langchain_core/output_parsers/openai_functions.py +49 -47
  69. langchain_core/output_parsers/openai_tools.py +30 -23
  70. langchain_core/output_parsers/pydantic.py +13 -14
  71. langchain_core/output_parsers/string.py +5 -5
  72. langchain_core/output_parsers/transform.py +15 -17
  73. langchain_core/output_parsers/xml.py +35 -34
  74. langchain_core/outputs/__init__.py +1 -1
  75. langchain_core/outputs/chat_generation.py +18 -18
  76. langchain_core/outputs/chat_result.py +1 -3
  77. langchain_core/outputs/generation.py +16 -16
  78. langchain_core/outputs/llm_result.py +10 -10
  79. langchain_core/prompt_values.py +13 -19
  80. langchain_core/prompts/__init__.py +3 -27
  81. langchain_core/prompts/base.py +81 -86
  82. langchain_core/prompts/chat.py +308 -351
  83. langchain_core/prompts/dict.py +6 -6
  84. langchain_core/prompts/few_shot.py +81 -88
  85. langchain_core/prompts/few_shot_with_templates.py +11 -13
  86. langchain_core/prompts/image.py +12 -14
  87. langchain_core/prompts/loading.py +4 -6
  88. langchain_core/prompts/message.py +7 -7
  89. langchain_core/prompts/prompt.py +24 -39
  90. langchain_core/prompts/string.py +26 -10
  91. langchain_core/prompts/structured.py +49 -53
  92. langchain_core/rate_limiters.py +51 -60
  93. langchain_core/retrievers.py +61 -198
  94. langchain_core/runnables/base.py +1551 -1656
  95. langchain_core/runnables/branch.py +68 -70
  96. langchain_core/runnables/config.py +72 -89
  97. langchain_core/runnables/configurable.py +145 -161
  98. langchain_core/runnables/fallbacks.py +102 -96
  99. langchain_core/runnables/graph.py +91 -97
  100. langchain_core/runnables/graph_ascii.py +27 -28
  101. langchain_core/runnables/graph_mermaid.py +42 -51
  102. langchain_core/runnables/graph_png.py +43 -16
  103. langchain_core/runnables/history.py +175 -177
  104. langchain_core/runnables/passthrough.py +151 -167
  105. langchain_core/runnables/retry.py +46 -51
  106. langchain_core/runnables/router.py +30 -35
  107. langchain_core/runnables/schema.py +75 -80
  108. langchain_core/runnables/utils.py +60 -67
  109. langchain_core/stores.py +85 -121
  110. langchain_core/structured_query.py +8 -8
  111. langchain_core/sys_info.py +29 -29
  112. langchain_core/tools/__init__.py +1 -14
  113. langchain_core/tools/base.py +306 -245
  114. langchain_core/tools/convert.py +160 -155
  115. langchain_core/tools/render.py +10 -10
  116. langchain_core/tools/retriever.py +12 -11
  117. langchain_core/tools/simple.py +19 -24
  118. langchain_core/tools/structured.py +32 -39
  119. langchain_core/tracers/__init__.py +1 -9
  120. langchain_core/tracers/base.py +97 -99
  121. langchain_core/tracers/context.py +29 -52
  122. langchain_core/tracers/core.py +49 -53
  123. langchain_core/tracers/evaluation.py +11 -11
  124. langchain_core/tracers/event_stream.py +65 -64
  125. langchain_core/tracers/langchain.py +21 -21
  126. langchain_core/tracers/log_stream.py +45 -45
  127. langchain_core/tracers/memory_stream.py +3 -3
  128. langchain_core/tracers/root_listeners.py +16 -16
  129. langchain_core/tracers/run_collector.py +2 -4
  130. langchain_core/tracers/schemas.py +0 -129
  131. langchain_core/tracers/stdout.py +3 -3
  132. langchain_core/utils/__init__.py +1 -4
  133. langchain_core/utils/_merge.py +2 -2
  134. langchain_core/utils/aiter.py +57 -61
  135. langchain_core/utils/env.py +9 -9
  136. langchain_core/utils/function_calling.py +94 -188
  137. langchain_core/utils/html.py +7 -8
  138. langchain_core/utils/input.py +9 -6
  139. langchain_core/utils/interactive_env.py +1 -1
  140. langchain_core/utils/iter.py +36 -40
  141. langchain_core/utils/json.py +4 -3
  142. langchain_core/utils/json_schema.py +9 -9
  143. langchain_core/utils/mustache.py +8 -10
  144. langchain_core/utils/pydantic.py +35 -37
  145. langchain_core/utils/strings.py +6 -9
  146. langchain_core/utils/usage.py +1 -1
  147. langchain_core/utils/utils.py +66 -62
  148. langchain_core/vectorstores/base.py +182 -216
  149. langchain_core/vectorstores/in_memory.py +101 -176
  150. langchain_core/vectorstores/utils.py +5 -5
  151. langchain_core/version.py +1 -1
  152. langchain_core-1.0.4.dist-info/METADATA +69 -0
  153. langchain_core-1.0.4.dist-info/RECORD +172 -0
  154. {langchain_core-1.0.0a6.dist-info → langchain_core-1.0.4.dist-info}/WHEEL +1 -1
  155. langchain_core/memory.py +0 -120
  156. langchain_core/messages/block_translators/ollama.py +0 -47
  157. langchain_core/prompts/pipeline.py +0 -138
  158. langchain_core/pydantic_v1/__init__.py +0 -30
  159. langchain_core/pydantic_v1/dataclasses.py +0 -23
  160. langchain_core/pydantic_v1/main.py +0 -23
  161. langchain_core/tracers/langchain_v1.py +0 -31
  162. langchain_core/utils/loading.py +0 -35
  163. langchain_core-1.0.0a6.dist-info/METADATA +0 -67
  164. langchain_core-1.0.0a6.dist-info/RECORD +0 -181
  165. langchain_core-1.0.0a6.dist-info/entry_points.txt +0 -4
@@ -5,7 +5,7 @@ import inspect
5
5
  import typing
6
6
  from collections.abc import AsyncIterator, Iterator, Sequence
7
7
  from functools import wraps
8
- from typing import TYPE_CHECKING, Any, Optional, Union, cast
8
+ from typing import TYPE_CHECKING, Any, cast
9
9
 
10
10
  from pydantic import BaseModel, ConfigDict
11
11
  from typing_extensions import override
@@ -35,61 +35,59 @@ if TYPE_CHECKING:
35
35
 
36
36
 
37
37
  class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
38
- """Runnable that can fallback to other Runnables if it fails.
38
+ """`Runnable` that can fallback to other `Runnable`s if it fails.
39
39
 
40
40
  External APIs (e.g., APIs for a language model) may at times experience
41
41
  degraded performance or even downtime.
42
42
 
43
- In these cases, it can be useful to have a fallback Runnable that can be
44
- used in place of the original Runnable (e.g., fallback to another LLM provider).
43
+ In these cases, it can be useful to have a fallback `Runnable` that can be
44
+ used in place of the original `Runnable` (e.g., fallback to another LLM provider).
45
45
 
46
- Fallbacks can be defined at the level of a single Runnable, or at the level
47
- of a chain of Runnables. Fallbacks are tried in order until one succeeds or
46
+ Fallbacks can be defined at the level of a single `Runnable`, or at the level
47
+ of a chain of `Runnable`s. Fallbacks are tried in order until one succeeds or
48
48
  all fail.
49
49
 
50
- While you can instantiate a ``RunnableWithFallbacks`` directly, it is usually
51
- more convenient to use the ``with_fallbacks`` method on a Runnable.
50
+ While you can instantiate a `RunnableWithFallbacks` directly, it is usually
51
+ more convenient to use the `with_fallbacks` method on a `Runnable`.
52
52
 
53
53
  Example:
54
+ ```python
55
+ from langchain_core.chat_models.openai import ChatOpenAI
56
+ from langchain_core.chat_models.anthropic import ChatAnthropic
54
57
 
55
- .. code-block:: python
56
-
57
- from langchain_core.chat_models.openai import ChatOpenAI
58
- from langchain_core.chat_models.anthropic import ChatAnthropic
59
-
60
- model = ChatAnthropic(model="claude-3-haiku-20240307").with_fallbacks(
61
- [ChatOpenAI(model="gpt-3.5-turbo-0125")]
62
- )
63
- # Will usually use ChatAnthropic, but fallback to ChatOpenAI
64
- # if ChatAnthropic fails.
65
- model.invoke("hello")
66
-
67
- # And you can also use fallbacks at the level of a chain.
68
- # Here if both LLM providers fail, we'll fallback to a good hardcoded
69
- # response.
58
+ model = ChatAnthropic(model="claude-3-haiku-20240307").with_fallbacks(
59
+ [ChatOpenAI(model="gpt-3.5-turbo-0125")]
60
+ )
61
+ # Will usually use ChatAnthropic, but fallback to ChatOpenAI
62
+ # if ChatAnthropic fails.
63
+ model.invoke("hello")
70
64
 
71
- from langchain_core.prompts import PromptTemplate
72
- from langchain_core.output_parser import StrOutputParser
73
- from langchain_core.runnables import RunnableLambda
65
+ # And you can also use fallbacks at the level of a chain.
66
+ # Here if both LLM providers fail, we'll fallback to a good hardcoded
67
+ # response.
74
68
 
69
+ from langchain_core.prompts import PromptTemplate
70
+ from langchain_core.output_parser import StrOutputParser
71
+ from langchain_core.runnables import RunnableLambda
75
72
 
76
- def when_all_is_lost(inputs):
77
- return (
78
- "Looks like our LLM providers are down. "
79
- "Here's a nice 🦜️ emoji for you instead."
80
- )
81
73
 
74
+ def when_all_is_lost(inputs):
75
+ return (
76
+ "Looks like our LLM providers are down. "
77
+ "Here's a nice 🦜️ emoji for you instead."
78
+ )
82
79
 
83
- chain_with_fallback = (
84
- PromptTemplate.from_template("Tell me a joke about {topic}")
85
- | model
86
- | StrOutputParser()
87
- ).with_fallbacks([RunnableLambda(when_all_is_lost)])
88
80
 
81
+ chain_with_fallback = (
82
+ PromptTemplate.from_template("Tell me a joke about {topic}")
83
+ | model
84
+ | StrOutputParser()
85
+ ).with_fallbacks([RunnableLambda(when_all_is_lost)])
86
+ ```
89
87
  """
90
88
 
91
89
  runnable: Runnable[Input, Output]
92
- """The Runnable to run first."""
90
+ """The `Runnable` to run first."""
93
91
  fallbacks: Sequence[Runnable[Input, Output]]
94
92
  """A sequence of fallbacks to try."""
95
93
  exceptions_to_handle: tuple[type[BaseException], ...] = (Exception,)
@@ -97,11 +95,14 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
97
95
 
98
96
  Any exception that is not a subclass of these exceptions will be raised immediately.
99
97
  """
100
- exception_key: Optional[str] = None
101
- """If string is specified then handled exceptions will be passed to fallbacks as
102
- part of the input under the specified key. If None, exceptions
103
- will not be passed to fallbacks. If used, the base Runnable and its fallbacks
104
- must accept a dictionary as input."""
98
+ exception_key: str | None = None
99
+ """If `string` is specified then handled exceptions will be passed to fallbacks as
100
+ part of the input under the specified key.
101
+
102
+ If `None`, exceptions will not be passed to fallbacks.
103
+
104
+ If used, the base `Runnable` and its fallbacks must accept a dictionary as input.
105
+ """
105
106
 
106
107
  model_config = ConfigDict(
107
108
  arbitrary_types_allowed=True,
@@ -118,14 +119,12 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
118
119
  return self.runnable.OutputType
119
120
 
120
121
  @override
121
- def get_input_schema(
122
- self, config: Optional[RunnableConfig] = None
123
- ) -> type[BaseModel]:
122
+ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]:
124
123
  return self.runnable.get_input_schema(config)
125
124
 
126
125
  @override
127
126
  def get_output_schema(
128
- self, config: Optional[RunnableConfig] = None
127
+ self, config: RunnableConfig | None = None
129
128
  ) -> type[BaseModel]:
130
129
  return self.runnable.get_output_schema(config)
131
130
 
@@ -141,32 +140,32 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
141
140
  @classmethod
142
141
  @override
143
142
  def is_lc_serializable(cls) -> bool:
144
- """Return True as this class is serializable."""
143
+ """Return `True` as this class is serializable."""
145
144
  return True
146
145
 
147
146
  @classmethod
148
147
  @override
149
148
  def get_lc_namespace(cls) -> list[str]:
150
- """Get the namespace of the langchain object.
149
+ """Get the namespace of the LangChain object.
151
150
 
152
151
  Returns:
153
- ``["langchain", "schema", "runnable"]``
152
+ `["langchain", "schema", "runnable"]`
154
153
  """
155
154
  return ["langchain", "schema", "runnable"]
156
155
 
157
156
  @property
158
157
  def runnables(self) -> Iterator[Runnable[Input, Output]]:
159
- """Iterator over the Runnable and its fallbacks.
158
+ """Iterator over the `Runnable` and its fallbacks.
160
159
 
161
160
  Yields:
162
- The Runnable then its fallbacks.
161
+ The `Runnable` then its fallbacks.
163
162
  """
164
163
  yield self.runnable
165
164
  yield from self.fallbacks
166
165
 
167
166
  @override
168
167
  def invoke(
169
- self, input: Input, config: Optional[RunnableConfig] = None, **kwargs: Any
168
+ self, input: Input, config: RunnableConfig | None = None, **kwargs: Any
170
169
  ) -> Output:
171
170
  if self.exception_key is not None and not isinstance(input, dict):
172
171
  msg = (
@@ -218,8 +217,8 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
218
217
  async def ainvoke(
219
218
  self,
220
219
  input: Input,
221
- config: Optional[RunnableConfig] = None,
222
- **kwargs: Optional[Any],
220
+ config: RunnableConfig | None = None,
221
+ **kwargs: Any | None,
223
222
  ) -> Output:
224
223
  if self.exception_key is not None and not isinstance(input, dict):
225
224
  msg = (
@@ -268,10 +267,10 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
268
267
  def batch(
269
268
  self,
270
269
  inputs: list[Input],
271
- config: Optional[Union[RunnableConfig, list[RunnableConfig]]] = None,
270
+ config: RunnableConfig | list[RunnableConfig] | None = None,
272
271
  *,
273
272
  return_exceptions: bool = False,
274
- **kwargs: Optional[Any],
273
+ **kwargs: Any | None,
275
274
  ) -> list[Output]:
276
275
  if self.exception_key is not None and not all(
277
276
  isinstance(input_, dict) for input_ in inputs
@@ -307,7 +306,9 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
307
306
  name=config.get("run_name") or self.get_name(),
308
307
  run_id=config.pop("run_id", None),
309
308
  )
310
- for cm, input_, config in zip(callback_managers, inputs, configs)
309
+ for cm, input_, config in zip(
310
+ callback_managers, inputs, configs, strict=False
311
+ )
311
312
  ]
312
313
 
313
314
  to_return: dict[int, Any] = {}
@@ -325,7 +326,9 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
325
326
  return_exceptions=True,
326
327
  **kwargs,
327
328
  )
328
- for (i, input_), output in zip(sorted(run_again.copy().items()), outputs):
329
+ for (i, input_), output in zip(
330
+ sorted(run_again.copy().items()), outputs, strict=False
331
+ ):
329
332
  if isinstance(output, BaseException) and not isinstance(
330
333
  output, self.exceptions_to_handle
331
334
  ):
@@ -360,10 +363,10 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
360
363
  async def abatch(
361
364
  self,
362
365
  inputs: list[Input],
363
- config: Optional[Union[RunnableConfig, list[RunnableConfig]]] = None,
366
+ config: RunnableConfig | list[RunnableConfig] | None = None,
364
367
  *,
365
368
  return_exceptions: bool = False,
366
- **kwargs: Optional[Any],
369
+ **kwargs: Any | None,
367
370
  ) -> list[Output]:
368
371
  if self.exception_key is not None and not all(
369
372
  isinstance(input_, dict) for input_ in inputs
@@ -400,11 +403,13 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
400
403
  name=config.get("run_name") or self.get_name(),
401
404
  run_id=config.pop("run_id", None),
402
405
  )
403
- for cm, input_, config in zip(callback_managers, inputs, configs)
406
+ for cm, input_, config in zip(
407
+ callback_managers, inputs, configs, strict=False
408
+ )
404
409
  )
405
410
  )
406
411
 
407
- to_return: dict[int, Union[Output, BaseException]] = {}
412
+ to_return: dict[int, Output | BaseException] = {}
408
413
  run_again = dict(enumerate(inputs))
409
414
  handled_exceptions: dict[int, BaseException] = {}
410
415
  first_to_raise = None
@@ -420,7 +425,9 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
420
425
  **kwargs,
421
426
  )
422
427
 
423
- for (i, input_), output in zip(sorted(run_again.copy().items()), outputs):
428
+ for (i, input_), output in zip(
429
+ sorted(run_again.copy().items()), outputs, strict=False
430
+ ):
424
431
  if isinstance(output, BaseException) and not isinstance(
425
432
  output, self.exceptions_to_handle
426
433
  ):
@@ -460,8 +467,8 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
460
467
  def stream(
461
468
  self,
462
469
  input: Input,
463
- config: Optional[RunnableConfig] = None,
464
- **kwargs: Optional[Any],
470
+ config: RunnableConfig | None = None,
471
+ **kwargs: Any | None,
465
472
  ) -> Iterator[Output]:
466
473
  if self.exception_key is not None and not isinstance(input, dict):
467
474
  msg = (
@@ -507,7 +514,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
507
514
  raise first_error
508
515
 
509
516
  yield chunk
510
- output: Optional[Output] = chunk
517
+ output: Output | None = chunk
511
518
  try:
512
519
  for chunk in stream:
513
520
  yield chunk
@@ -524,8 +531,8 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
524
531
  async def astream(
525
532
  self,
526
533
  input: Input,
527
- config: Optional[RunnableConfig] = None,
528
- **kwargs: Optional[Any],
534
+ config: RunnableConfig | None = None,
535
+ **kwargs: Any | None,
529
536
  ) -> AsyncIterator[Output]:
530
537
  if self.exception_key is not None and not isinstance(input, dict):
531
538
  msg = (
@@ -571,7 +578,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
571
578
  raise first_error
572
579
 
573
580
  yield chunk
574
- output: Optional[Output] = chunk
581
+ output: Output | None = chunk
575
582
  try:
576
583
  async for chunk in stream:
577
584
  yield chunk
@@ -585,37 +592,36 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
585
592
  await run_manager.on_chain_end(output)
586
593
 
587
594
  def __getattr__(self, name: str) -> Any:
588
- """Get an attribute from the wrapped Runnable and its fallbacks.
595
+ """Get an attribute from the wrapped `Runnable` and its fallbacks.
589
596
 
590
597
  Returns:
591
- If the attribute is anything other than a method that outputs a Runnable,
592
- returns getattr(self.runnable, name). If the attribute is a method that
593
- does return a new Runnable (e.g. llm.bind_tools([...]) outputs a new
594
- RunnableBinding) then self.runnable and each of the runnables in
595
- self.fallbacks is replaced with getattr(x, name).
598
+ If the attribute is anything other than a method that outputs a `Runnable`,
599
+ returns `getattr(self.runnable, name)`. If the attribute is a method that
600
+ does return a new `Runnable` (e.g. `model.bind_tools([...])` outputs a new
601
+ `RunnableBinding`) then `self.runnable` and each of the runnables in
602
+ `self.fallbacks` is replaced with `getattr(x, name)`.
596
603
 
597
604
  Example:
598
- .. code-block:: python
599
-
600
- from langchain_openai import ChatOpenAI
601
- from langchain_anthropic import ChatAnthropic
602
-
603
- gpt_4o = ChatOpenAI(model="gpt-4o")
604
- claude_3_sonnet = ChatAnthropic(model="claude-3-7-sonnet-20250219")
605
- llm = gpt_4o.with_fallbacks([claude_3_sonnet])
606
-
607
- llm.model_name
608
- # -> "gpt-4o"
609
-
610
- # .bind_tools() is called on both ChatOpenAI and ChatAnthropic
611
- # Equivalent to:
612
- # gpt_4o.bind_tools([...]).with_fallbacks([claude_3_sonnet.bind_tools([...])])
613
- llm.bind_tools([...])
614
- # -> RunnableWithFallbacks(
615
- runnable=RunnableBinding(bound=ChatOpenAI(...), kwargs={"tools": [...]}),
616
- fallbacks=[RunnableBinding(bound=ChatAnthropic(...), kwargs={"tools": [...]})],
617
- )
618
-
605
+ ```python
606
+ from langchain_openai import ChatOpenAI
607
+ from langchain_anthropic import ChatAnthropic
608
+
609
+ gpt_4o = ChatOpenAI(model="gpt-4o")
610
+ claude_3_sonnet = ChatAnthropic(model="claude-sonnet-4-5-20250929")
611
+ model = gpt_4o.with_fallbacks([claude_3_sonnet])
612
+
613
+ model.model_name
614
+ # -> "gpt-4o"
615
+
616
+ # .bind_tools() is called on both ChatOpenAI and ChatAnthropic
617
+ # Equivalent to:
618
+ # gpt_4o.bind_tools([...]).with_fallbacks([claude_3_sonnet.bind_tools([...])])
619
+ model.bind_tools([...])
620
+ # -> RunnableWithFallbacks(
621
+ runnable=RunnableBinding(bound=ChatOpenAI(...), kwargs={"tools": [...]}),
622
+ fallbacks=[RunnableBinding(bound=ChatAnthropic(...), kwargs={"tools": [...]})],
623
+ )
624
+ ```
619
625
  """ # noqa: E501
620
626
  attr = getattr(self.runnable, name)
621
627
  if _returns_runnable(attr):