langchain-core 0.4.0.dev0__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (172) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +3 -4
  3. langchain_core/_api/beta_decorator.py +45 -70
  4. langchain_core/_api/deprecation.py +80 -80
  5. langchain_core/_api/path.py +22 -8
  6. langchain_core/_import_utils.py +10 -4
  7. langchain_core/agents.py +25 -21
  8. langchain_core/caches.py +53 -63
  9. langchain_core/callbacks/__init__.py +1 -8
  10. langchain_core/callbacks/base.py +341 -348
  11. langchain_core/callbacks/file.py +55 -44
  12. langchain_core/callbacks/manager.py +546 -683
  13. langchain_core/callbacks/stdout.py +29 -30
  14. langchain_core/callbacks/streaming_stdout.py +35 -36
  15. langchain_core/callbacks/usage.py +65 -70
  16. langchain_core/chat_history.py +48 -55
  17. langchain_core/document_loaders/base.py +46 -21
  18. langchain_core/document_loaders/langsmith.py +39 -36
  19. langchain_core/documents/__init__.py +0 -1
  20. langchain_core/documents/base.py +96 -74
  21. langchain_core/documents/compressor.py +12 -9
  22. langchain_core/documents/transformers.py +29 -28
  23. langchain_core/embeddings/fake.py +56 -57
  24. langchain_core/env.py +2 -3
  25. langchain_core/example_selectors/base.py +12 -0
  26. langchain_core/example_selectors/length_based.py +1 -1
  27. langchain_core/example_selectors/semantic_similarity.py +21 -25
  28. langchain_core/exceptions.py +15 -9
  29. langchain_core/globals.py +4 -163
  30. langchain_core/indexing/api.py +132 -125
  31. langchain_core/indexing/base.py +64 -67
  32. langchain_core/indexing/in_memory.py +26 -6
  33. langchain_core/language_models/__init__.py +15 -27
  34. langchain_core/language_models/_utils.py +267 -117
  35. langchain_core/language_models/base.py +92 -177
  36. langchain_core/language_models/chat_models.py +547 -407
  37. langchain_core/language_models/fake.py +11 -11
  38. langchain_core/language_models/fake_chat_models.py +72 -118
  39. langchain_core/language_models/llms.py +168 -242
  40. langchain_core/load/dump.py +8 -11
  41. langchain_core/load/load.py +32 -28
  42. langchain_core/load/mapping.py +2 -4
  43. langchain_core/load/serializable.py +50 -56
  44. langchain_core/messages/__init__.py +36 -51
  45. langchain_core/messages/ai.py +377 -150
  46. langchain_core/messages/base.py +239 -47
  47. langchain_core/messages/block_translators/__init__.py +111 -0
  48. langchain_core/messages/block_translators/anthropic.py +470 -0
  49. langchain_core/messages/block_translators/bedrock.py +94 -0
  50. langchain_core/messages/block_translators/bedrock_converse.py +297 -0
  51. langchain_core/messages/block_translators/google_genai.py +530 -0
  52. langchain_core/messages/block_translators/google_vertexai.py +21 -0
  53. langchain_core/messages/block_translators/groq.py +143 -0
  54. langchain_core/messages/block_translators/langchain_v0.py +301 -0
  55. langchain_core/messages/block_translators/openai.py +1010 -0
  56. langchain_core/messages/chat.py +2 -3
  57. langchain_core/messages/content.py +1423 -0
  58. langchain_core/messages/function.py +7 -7
  59. langchain_core/messages/human.py +44 -38
  60. langchain_core/messages/modifier.py +3 -2
  61. langchain_core/messages/system.py +40 -27
  62. langchain_core/messages/tool.py +160 -58
  63. langchain_core/messages/utils.py +527 -638
  64. langchain_core/output_parsers/__init__.py +1 -14
  65. langchain_core/output_parsers/base.py +68 -104
  66. langchain_core/output_parsers/json.py +13 -17
  67. langchain_core/output_parsers/list.py +11 -33
  68. langchain_core/output_parsers/openai_functions.py +56 -74
  69. langchain_core/output_parsers/openai_tools.py +68 -109
  70. langchain_core/output_parsers/pydantic.py +15 -13
  71. langchain_core/output_parsers/string.py +6 -2
  72. langchain_core/output_parsers/transform.py +17 -60
  73. langchain_core/output_parsers/xml.py +34 -44
  74. langchain_core/outputs/__init__.py +1 -1
  75. langchain_core/outputs/chat_generation.py +26 -11
  76. langchain_core/outputs/chat_result.py +1 -3
  77. langchain_core/outputs/generation.py +17 -6
  78. langchain_core/outputs/llm_result.py +15 -8
  79. langchain_core/prompt_values.py +29 -123
  80. langchain_core/prompts/__init__.py +3 -27
  81. langchain_core/prompts/base.py +48 -63
  82. langchain_core/prompts/chat.py +259 -288
  83. langchain_core/prompts/dict.py +19 -11
  84. langchain_core/prompts/few_shot.py +84 -90
  85. langchain_core/prompts/few_shot_with_templates.py +14 -12
  86. langchain_core/prompts/image.py +19 -14
  87. langchain_core/prompts/loading.py +6 -8
  88. langchain_core/prompts/message.py +7 -8
  89. langchain_core/prompts/prompt.py +42 -43
  90. langchain_core/prompts/string.py +37 -16
  91. langchain_core/prompts/structured.py +43 -46
  92. langchain_core/rate_limiters.py +51 -60
  93. langchain_core/retrievers.py +52 -192
  94. langchain_core/runnables/base.py +1727 -1683
  95. langchain_core/runnables/branch.py +52 -73
  96. langchain_core/runnables/config.py +89 -103
  97. langchain_core/runnables/configurable.py +128 -130
  98. langchain_core/runnables/fallbacks.py +93 -82
  99. langchain_core/runnables/graph.py +127 -127
  100. langchain_core/runnables/graph_ascii.py +63 -41
  101. langchain_core/runnables/graph_mermaid.py +87 -70
  102. langchain_core/runnables/graph_png.py +31 -36
  103. langchain_core/runnables/history.py +145 -161
  104. langchain_core/runnables/passthrough.py +141 -144
  105. langchain_core/runnables/retry.py +84 -68
  106. langchain_core/runnables/router.py +33 -37
  107. langchain_core/runnables/schema.py +79 -72
  108. langchain_core/runnables/utils.py +95 -139
  109. langchain_core/stores.py +85 -131
  110. langchain_core/structured_query.py +11 -15
  111. langchain_core/sys_info.py +31 -32
  112. langchain_core/tools/__init__.py +1 -14
  113. langchain_core/tools/base.py +221 -247
  114. langchain_core/tools/convert.py +144 -161
  115. langchain_core/tools/render.py +10 -10
  116. langchain_core/tools/retriever.py +12 -19
  117. langchain_core/tools/simple.py +52 -29
  118. langchain_core/tools/structured.py +56 -60
  119. langchain_core/tracers/__init__.py +1 -9
  120. langchain_core/tracers/_streaming.py +6 -7
  121. langchain_core/tracers/base.py +103 -112
  122. langchain_core/tracers/context.py +29 -48
  123. langchain_core/tracers/core.py +142 -105
  124. langchain_core/tracers/evaluation.py +30 -34
  125. langchain_core/tracers/event_stream.py +162 -117
  126. langchain_core/tracers/langchain.py +34 -36
  127. langchain_core/tracers/log_stream.py +87 -49
  128. langchain_core/tracers/memory_stream.py +3 -3
  129. langchain_core/tracers/root_listeners.py +18 -34
  130. langchain_core/tracers/run_collector.py +8 -20
  131. langchain_core/tracers/schemas.py +0 -125
  132. langchain_core/tracers/stdout.py +3 -3
  133. langchain_core/utils/__init__.py +1 -4
  134. langchain_core/utils/_merge.py +47 -9
  135. langchain_core/utils/aiter.py +70 -66
  136. langchain_core/utils/env.py +12 -9
  137. langchain_core/utils/function_calling.py +139 -206
  138. langchain_core/utils/html.py +7 -8
  139. langchain_core/utils/input.py +6 -6
  140. langchain_core/utils/interactive_env.py +6 -2
  141. langchain_core/utils/iter.py +48 -45
  142. langchain_core/utils/json.py +14 -4
  143. langchain_core/utils/json_schema.py +159 -43
  144. langchain_core/utils/mustache.py +32 -25
  145. langchain_core/utils/pydantic.py +67 -40
  146. langchain_core/utils/strings.py +5 -5
  147. langchain_core/utils/usage.py +1 -1
  148. langchain_core/utils/utils.py +104 -62
  149. langchain_core/vectorstores/base.py +131 -179
  150. langchain_core/vectorstores/in_memory.py +113 -182
  151. langchain_core/vectorstores/utils.py +23 -17
  152. langchain_core/version.py +1 -1
  153. langchain_core-1.0.0.dist-info/METADATA +68 -0
  154. langchain_core-1.0.0.dist-info/RECORD +172 -0
  155. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0.dist-info}/WHEEL +1 -1
  156. langchain_core/beta/__init__.py +0 -1
  157. langchain_core/beta/runnables/__init__.py +0 -1
  158. langchain_core/beta/runnables/context.py +0 -448
  159. langchain_core/memory.py +0 -116
  160. langchain_core/messages/content_blocks.py +0 -1435
  161. langchain_core/prompts/pipeline.py +0 -133
  162. langchain_core/pydantic_v1/__init__.py +0 -30
  163. langchain_core/pydantic_v1/dataclasses.py +0 -23
  164. langchain_core/pydantic_v1/main.py +0 -23
  165. langchain_core/tracers/langchain_v1.py +0 -23
  166. langchain_core/utils/loading.py +0 -31
  167. langchain_core/v1/__init__.py +0 -1
  168. langchain_core/v1/chat_models.py +0 -1047
  169. langchain_core/v1/messages.py +0 -755
  170. langchain_core-0.4.0.dev0.dist-info/METADATA +0 -108
  171. langchain_core-0.4.0.dev0.dist-info/RECORD +0 -177
  172. langchain_core-0.4.0.dev0.dist-info/entry_points.txt +0 -4
@@ -5,11 +5,12 @@ import inspect
5
5
  import typing
6
6
  from collections.abc import AsyncIterator, Iterator, Sequence
7
7
  from functools import wraps
8
- from typing import TYPE_CHECKING, Any, Optional, Union
8
+ from typing import TYPE_CHECKING, Any, cast
9
9
 
10
10
  from pydantic import BaseModel, ConfigDict
11
11
  from typing_extensions import override
12
12
 
13
+ from langchain_core.callbacks.manager import AsyncCallbackManager, CallbackManager
13
14
  from langchain_core.runnables.base import Runnable, RunnableSerializable
14
15
  from langchain_core.runnables.config import (
15
16
  RunnableConfig,
@@ -46,41 +47,43 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
46
47
  of a chain of Runnables. Fallbacks are tried in order until one succeeds or
47
48
  all fail.
48
49
 
49
- While you can instantiate a ``RunnableWithFallbacks`` directly, it is usually
50
- more convenient to use the ``with_fallbacks`` method on a Runnable.
50
+ While you can instantiate a `RunnableWithFallbacks` directly, it is usually
51
+ more convenient to use the `with_fallbacks` method on a Runnable.
51
52
 
52
53
  Example:
54
+ ```python
55
+ from langchain_core.chat_models.openai import ChatOpenAI
56
+ from langchain_core.chat_models.anthropic import ChatAnthropic
53
57
 
54
- .. code-block:: python
55
-
56
- from langchain_core.chat_models.openai import ChatOpenAI
57
- from langchain_core.chat_models.anthropic import ChatAnthropic
58
+ model = ChatAnthropic(model="claude-3-haiku-20240307").with_fallbacks(
59
+ [ChatOpenAI(model="gpt-3.5-turbo-0125")]
60
+ )
61
+ # Will usually use ChatAnthropic, but fallback to ChatOpenAI
62
+ # if ChatAnthropic fails.
63
+ model.invoke("hello")
58
64
 
59
- model = ChatAnthropic(
60
- model="claude-3-haiku-20240307"
61
- ).with_fallbacks([ChatOpenAI(model="gpt-3.5-turbo-0125")])
62
- # Will usually use ChatAnthropic, but fallback to ChatOpenAI
63
- # if ChatAnthropic fails.
64
- model.invoke('hello')
65
+ # And you can also use fallbacks at the level of a chain.
66
+ # Here if both LLM providers fail, we'll fallback to a good hardcoded
67
+ # response.
65
68
 
66
- # And you can also use fallbacks at the level of a chain.
67
- # Here if both LLM providers fail, we'll fallback to a good hardcoded
68
- # response.
69
+ from langchain_core.prompts import PromptTemplate
70
+ from langchain_core.output_parser import StrOutputParser
71
+ from langchain_core.runnables import RunnableLambda
69
72
 
70
- from langchain_core.prompts import PromptTemplate
71
- from langchain_core.output_parser import StrOutputParser
72
- from langchain_core.runnables import RunnableLambda
73
73
 
74
- def when_all_is_lost(inputs):
75
- return ("Looks like our LLM providers are down. "
76
- "Here's a nice 🦜️ emoji for you instead.")
74
+ def when_all_is_lost(inputs):
75
+ return (
76
+ "Looks like our LLM providers are down. "
77
+ "Here's a nice 🦜️ emoji for you instead."
78
+ )
77
79
 
78
- chain_with_fallback = (
79
- PromptTemplate.from_template('Tell me a joke about {topic}')
80
- | model
81
- | StrOutputParser()
82
- ).with_fallbacks([RunnableLambda(when_all_is_lost)])
83
80
 
81
+ chain_with_fallback = (
82
+ PromptTemplate.from_template("Tell me a joke about {topic}")
83
+ | model
84
+ | StrOutputParser()
85
+ ).with_fallbacks([RunnableLambda(when_all_is_lost)])
86
+ ```
84
87
  """
85
88
 
86
89
  runnable: Runnable[Input, Output]
@@ -92,9 +95,9 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
92
95
 
93
96
  Any exception that is not a subclass of these exceptions will be raised immediately.
94
97
  """
95
- exception_key: Optional[str] = None
96
- """If string is specified then handled exceptions will be passed to fallbacks as
97
- part of the input under the specified key. If None, exceptions
98
+ exception_key: str | None = None
99
+ """If `string` is specified then handled exceptions will be passed to fallbacks as
100
+ part of the input under the specified key. If `None`, exceptions
98
101
  will not be passed to fallbacks. If used, the base Runnable and its fallbacks
99
102
  must accept a dictionary as input."""
100
103
 
@@ -113,14 +116,12 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
113
116
  return self.runnable.OutputType
114
117
 
115
118
  @override
116
- def get_input_schema(
117
- self, config: Optional[RunnableConfig] = None
118
- ) -> type[BaseModel]:
119
+ def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]:
119
120
  return self.runnable.get_input_schema(config)
120
121
 
121
122
  @override
122
123
  def get_output_schema(
123
- self, config: Optional[RunnableConfig] = None
124
+ self, config: RunnableConfig | None = None
124
125
  ) -> type[BaseModel]:
125
126
  return self.runnable.get_output_schema(config)
126
127
 
@@ -136,26 +137,32 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
136
137
  @classmethod
137
138
  @override
138
139
  def is_lc_serializable(cls) -> bool:
140
+ """Return True as this class is serializable."""
139
141
  return True
140
142
 
141
143
  @classmethod
142
144
  @override
143
145
  def get_lc_namespace(cls) -> list[str]:
144
- """Get the namespace of the langchain object.
146
+ """Get the namespace of the LangChain object.
145
147
 
146
- Defaults to ["langchain", "schema", "runnable"].
148
+ Returns:
149
+ `["langchain", "schema", "runnable"]`
147
150
  """
148
151
  return ["langchain", "schema", "runnable"]
149
152
 
150
153
  @property
151
154
  def runnables(self) -> Iterator[Runnable[Input, Output]]:
152
- """Iterator over the Runnable and its fallbacks."""
155
+ """Iterator over the Runnable and its fallbacks.
156
+
157
+ Yields:
158
+ The Runnable then its fallbacks.
159
+ """
153
160
  yield self.runnable
154
161
  yield from self.fallbacks
155
162
 
156
163
  @override
157
164
  def invoke(
158
- self, input: Input, config: Optional[RunnableConfig] = None, **kwargs: Any
165
+ self, input: Input, config: RunnableConfig | None = None, **kwargs: Any
159
166
  ) -> Output:
160
167
  if self.exception_key is not None and not isinstance(input, dict):
161
168
  msg = (
@@ -207,8 +214,8 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
207
214
  async def ainvoke(
208
215
  self,
209
216
  input: Input,
210
- config: Optional[RunnableConfig] = None,
211
- **kwargs: Optional[Any],
217
+ config: RunnableConfig | None = None,
218
+ **kwargs: Any | None,
212
219
  ) -> Output:
213
220
  if self.exception_key is not None and not isinstance(input, dict):
214
221
  msg = (
@@ -257,13 +264,11 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
257
264
  def batch(
258
265
  self,
259
266
  inputs: list[Input],
260
- config: Optional[Union[RunnableConfig, list[RunnableConfig]]] = None,
267
+ config: RunnableConfig | list[RunnableConfig] | None = None,
261
268
  *,
262
269
  return_exceptions: bool = False,
263
- **kwargs: Optional[Any],
270
+ **kwargs: Any | None,
264
271
  ) -> list[Output]:
265
- from langchain_core.callbacks.manager import CallbackManager
266
-
267
272
  if self.exception_key is not None and not all(
268
273
  isinstance(input_, dict) for input_ in inputs
269
274
  ):
@@ -298,7 +303,9 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
298
303
  name=config.get("run_name") or self.get_name(),
299
304
  run_id=config.pop("run_id", None),
300
305
  )
301
- for cm, input_, config in zip(callback_managers, inputs, configs)
306
+ for cm, input_, config in zip(
307
+ callback_managers, inputs, configs, strict=False
308
+ )
302
309
  ]
303
310
 
304
311
  to_return: dict[int, Any] = {}
@@ -316,7 +323,9 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
316
323
  return_exceptions=True,
317
324
  **kwargs,
318
325
  )
319
- for (i, input_), output in zip(sorted(run_again.copy().items()), outputs):
326
+ for (i, input_), output in zip(
327
+ sorted(run_again.copy().items()), outputs, strict=False
328
+ ):
320
329
  if isinstance(output, BaseException) and not isinstance(
321
330
  output, self.exceptions_to_handle
322
331
  ):
@@ -351,13 +360,11 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
351
360
  async def abatch(
352
361
  self,
353
362
  inputs: list[Input],
354
- config: Optional[Union[RunnableConfig, list[RunnableConfig]]] = None,
363
+ config: RunnableConfig | list[RunnableConfig] | None = None,
355
364
  *,
356
365
  return_exceptions: bool = False,
357
- **kwargs: Optional[Any],
366
+ **kwargs: Any | None,
358
367
  ) -> list[Output]:
359
- from langchain_core.callbacks.manager import AsyncCallbackManager
360
-
361
368
  if self.exception_key is not None and not all(
362
369
  isinstance(input_, dict) for input_ in inputs
363
370
  ):
@@ -393,11 +400,13 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
393
400
  name=config.get("run_name") or self.get_name(),
394
401
  run_id=config.pop("run_id", None),
395
402
  )
396
- for cm, input_, config in zip(callback_managers, inputs, configs)
403
+ for cm, input_, config in zip(
404
+ callback_managers, inputs, configs, strict=False
405
+ )
397
406
  )
398
407
  )
399
408
 
400
- to_return = {}
409
+ to_return: dict[int, Output | BaseException] = {}
401
410
  run_again = dict(enumerate(inputs))
402
411
  handled_exceptions: dict[int, BaseException] = {}
403
412
  first_to_raise = None
@@ -413,7 +422,9 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
413
422
  **kwargs,
414
423
  )
415
424
 
416
- for (i, input_), output in zip(sorted(run_again.copy().items()), outputs):
425
+ for (i, input_), output in zip(
426
+ sorted(run_again.copy().items()), outputs, strict=False
427
+ ):
417
428
  if isinstance(output, BaseException) and not isinstance(
418
429
  output, self.exceptions_to_handle
419
430
  ):
@@ -447,14 +458,14 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
447
458
  if not return_exceptions and sorted_handled_exceptions:
448
459
  raise sorted_handled_exceptions[0][1]
449
460
  to_return.update(handled_exceptions)
450
- return [output for _, output in sorted(to_return.items())] # type: ignore[misc]
461
+ return [cast("Output", output) for _, output in sorted(to_return.items())]
451
462
 
452
463
  @override
453
464
  def stream(
454
465
  self,
455
466
  input: Input,
456
- config: Optional[RunnableConfig] = None,
457
- **kwargs: Optional[Any],
467
+ config: RunnableConfig | None = None,
468
+ **kwargs: Any | None,
458
469
  ) -> Iterator[Output]:
459
470
  if self.exception_key is not None and not isinstance(input, dict):
460
471
  msg = (
@@ -500,7 +511,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
500
511
  raise first_error
501
512
 
502
513
  yield chunk
503
- output: Optional[Output] = chunk
514
+ output: Output | None = chunk
504
515
  try:
505
516
  for chunk in stream:
506
517
  yield chunk
@@ -517,8 +528,8 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
517
528
  async def astream(
518
529
  self,
519
530
  input: Input,
520
- config: Optional[RunnableConfig] = None,
521
- **kwargs: Optional[Any],
531
+ config: RunnableConfig | None = None,
532
+ **kwargs: Any | None,
522
533
  ) -> AsyncIterator[Output]:
523
534
  if self.exception_key is not None and not isinstance(input, dict):
524
535
  msg = (
@@ -564,12 +575,12 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
564
575
  raise first_error
565
576
 
566
577
  yield chunk
567
- output: Optional[Output] = chunk
578
+ output: Output | None = chunk
568
579
  try:
569
580
  async for chunk in stream:
570
581
  yield chunk
571
582
  try:
572
- output = output + chunk
583
+ output = output + chunk # type: ignore[operator]
573
584
  except TypeError:
574
585
  output = None
575
586
  except BaseException as e:
@@ -583,32 +594,32 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
583
594
  Returns:
584
595
  If the attribute is anything other than a method that outputs a Runnable,
585
596
  returns getattr(self.runnable, name). If the attribute is a method that
586
- does return a new Runnable (e.g. llm.bind_tools([...]) outputs a new
597
+ does return a new Runnable (e.g. model.bind_tools([...]) outputs a new
587
598
  RunnableBinding) then self.runnable and each of the runnables in
588
599
  self.fallbacks is replaced with getattr(x, name).
589
600
 
590
601
  Example:
591
- .. code-block:: python
592
-
593
- from langchain_openai import ChatOpenAI
594
- from langchain_anthropic import ChatAnthropic
595
-
596
- gpt_4o = ChatOpenAI(model="gpt-4o")
597
- claude_3_sonnet = ChatAnthropic(model="claude-3-7-sonnet-20250219")
598
- llm = gpt_4o.with_fallbacks([claude_3_sonnet])
599
-
600
- llm.model_name
601
- # -> "gpt-4o"
602
-
603
- # .bind_tools() is called on both ChatOpenAI and ChatAnthropic
604
- # Equivalent to:
605
- # gpt_4o.bind_tools([...]).with_fallbacks([claude_3_sonnet.bind_tools([...])])
606
- llm.bind_tools([...])
607
- # -> RunnableWithFallbacks(
608
- runnable=RunnableBinding(bound=ChatOpenAI(...), kwargs={"tools": [...]}),
609
- fallbacks=[RunnableBinding(bound=ChatAnthropic(...), kwargs={"tools": [...]})],
610
- )
602
+ ```python
603
+ from langchain_openai import ChatOpenAI
604
+ from langchain_anthropic import ChatAnthropic
605
+
606
+ gpt_4o = ChatOpenAI(model="gpt-4o")
607
+ claude_3_sonnet = ChatAnthropic(model="claude-3-7-sonnet-20250219")
608
+ model = gpt_4o.with_fallbacks([claude_3_sonnet])
609
+
610
+ model.model_name
611
+ # -> "gpt-4o"
612
+
613
+ # .bind_tools() is called on both ChatOpenAI and ChatAnthropic
614
+ # Equivalent to:
615
+ # gpt_4o.bind_tools([...]).with_fallbacks([claude_3_sonnet.bind_tools([...])])
616
+ model.bind_tools([...])
617
+ # -> RunnableWithFallbacks(
618
+ runnable=RunnableBinding(bound=ChatOpenAI(...), kwargs={"tools": [...]}),
619
+ fallbacks=[RunnableBinding(bound=ChatAnthropic(...), kwargs={"tools": [...]})],
620
+ )
611
621
 
622
+ ```
612
623
  """ # noqa: E501
613
624
  attr = getattr(self.runnable, name)
614
625
  if _returns_runnable(attr):