langchain-core 0.4.0.dev0__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langchain-core might be problematic. Click here for more details.
- langchain_core/__init__.py +1 -1
- langchain_core/_api/__init__.py +3 -4
- langchain_core/_api/beta_decorator.py +45 -70
- langchain_core/_api/deprecation.py +80 -80
- langchain_core/_api/path.py +22 -8
- langchain_core/_import_utils.py +10 -4
- langchain_core/agents.py +25 -21
- langchain_core/caches.py +53 -63
- langchain_core/callbacks/__init__.py +1 -8
- langchain_core/callbacks/base.py +341 -348
- langchain_core/callbacks/file.py +55 -44
- langchain_core/callbacks/manager.py +546 -683
- langchain_core/callbacks/stdout.py +29 -30
- langchain_core/callbacks/streaming_stdout.py +35 -36
- langchain_core/callbacks/usage.py +65 -70
- langchain_core/chat_history.py +48 -55
- langchain_core/document_loaders/base.py +46 -21
- langchain_core/document_loaders/langsmith.py +39 -36
- langchain_core/documents/__init__.py +0 -1
- langchain_core/documents/base.py +96 -74
- langchain_core/documents/compressor.py +12 -9
- langchain_core/documents/transformers.py +29 -28
- langchain_core/embeddings/fake.py +56 -57
- langchain_core/env.py +2 -3
- langchain_core/example_selectors/base.py +12 -0
- langchain_core/example_selectors/length_based.py +1 -1
- langchain_core/example_selectors/semantic_similarity.py +21 -25
- langchain_core/exceptions.py +15 -9
- langchain_core/globals.py +4 -163
- langchain_core/indexing/api.py +132 -125
- langchain_core/indexing/base.py +64 -67
- langchain_core/indexing/in_memory.py +26 -6
- langchain_core/language_models/__init__.py +15 -27
- langchain_core/language_models/_utils.py +267 -117
- langchain_core/language_models/base.py +92 -177
- langchain_core/language_models/chat_models.py +547 -407
- langchain_core/language_models/fake.py +11 -11
- langchain_core/language_models/fake_chat_models.py +72 -118
- langchain_core/language_models/llms.py +168 -242
- langchain_core/load/dump.py +8 -11
- langchain_core/load/load.py +32 -28
- langchain_core/load/mapping.py +2 -4
- langchain_core/load/serializable.py +50 -56
- langchain_core/messages/__init__.py +36 -51
- langchain_core/messages/ai.py +377 -150
- langchain_core/messages/base.py +239 -47
- langchain_core/messages/block_translators/__init__.py +111 -0
- langchain_core/messages/block_translators/anthropic.py +470 -0
- langchain_core/messages/block_translators/bedrock.py +94 -0
- langchain_core/messages/block_translators/bedrock_converse.py +297 -0
- langchain_core/messages/block_translators/google_genai.py +530 -0
- langchain_core/messages/block_translators/google_vertexai.py +21 -0
- langchain_core/messages/block_translators/groq.py +143 -0
- langchain_core/messages/block_translators/langchain_v0.py +301 -0
- langchain_core/messages/block_translators/openai.py +1010 -0
- langchain_core/messages/chat.py +2 -3
- langchain_core/messages/content.py +1423 -0
- langchain_core/messages/function.py +7 -7
- langchain_core/messages/human.py +44 -38
- langchain_core/messages/modifier.py +3 -2
- langchain_core/messages/system.py +40 -27
- langchain_core/messages/tool.py +160 -58
- langchain_core/messages/utils.py +527 -638
- langchain_core/output_parsers/__init__.py +1 -14
- langchain_core/output_parsers/base.py +68 -104
- langchain_core/output_parsers/json.py +13 -17
- langchain_core/output_parsers/list.py +11 -33
- langchain_core/output_parsers/openai_functions.py +56 -74
- langchain_core/output_parsers/openai_tools.py +68 -109
- langchain_core/output_parsers/pydantic.py +15 -13
- langchain_core/output_parsers/string.py +6 -2
- langchain_core/output_parsers/transform.py +17 -60
- langchain_core/output_parsers/xml.py +34 -44
- langchain_core/outputs/__init__.py +1 -1
- langchain_core/outputs/chat_generation.py +26 -11
- langchain_core/outputs/chat_result.py +1 -3
- langchain_core/outputs/generation.py +17 -6
- langchain_core/outputs/llm_result.py +15 -8
- langchain_core/prompt_values.py +29 -123
- langchain_core/prompts/__init__.py +3 -27
- langchain_core/prompts/base.py +48 -63
- langchain_core/prompts/chat.py +259 -288
- langchain_core/prompts/dict.py +19 -11
- langchain_core/prompts/few_shot.py +84 -90
- langchain_core/prompts/few_shot_with_templates.py +14 -12
- langchain_core/prompts/image.py +19 -14
- langchain_core/prompts/loading.py +6 -8
- langchain_core/prompts/message.py +7 -8
- langchain_core/prompts/prompt.py +42 -43
- langchain_core/prompts/string.py +37 -16
- langchain_core/prompts/structured.py +43 -46
- langchain_core/rate_limiters.py +51 -60
- langchain_core/retrievers.py +52 -192
- langchain_core/runnables/base.py +1727 -1683
- langchain_core/runnables/branch.py +52 -73
- langchain_core/runnables/config.py +89 -103
- langchain_core/runnables/configurable.py +128 -130
- langchain_core/runnables/fallbacks.py +93 -82
- langchain_core/runnables/graph.py +127 -127
- langchain_core/runnables/graph_ascii.py +63 -41
- langchain_core/runnables/graph_mermaid.py +87 -70
- langchain_core/runnables/graph_png.py +31 -36
- langchain_core/runnables/history.py +145 -161
- langchain_core/runnables/passthrough.py +141 -144
- langchain_core/runnables/retry.py +84 -68
- langchain_core/runnables/router.py +33 -37
- langchain_core/runnables/schema.py +79 -72
- langchain_core/runnables/utils.py +95 -139
- langchain_core/stores.py +85 -131
- langchain_core/structured_query.py +11 -15
- langchain_core/sys_info.py +31 -32
- langchain_core/tools/__init__.py +1 -14
- langchain_core/tools/base.py +221 -247
- langchain_core/tools/convert.py +144 -161
- langchain_core/tools/render.py +10 -10
- langchain_core/tools/retriever.py +12 -19
- langchain_core/tools/simple.py +52 -29
- langchain_core/tools/structured.py +56 -60
- langchain_core/tracers/__init__.py +1 -9
- langchain_core/tracers/_streaming.py +6 -7
- langchain_core/tracers/base.py +103 -112
- langchain_core/tracers/context.py +29 -48
- langchain_core/tracers/core.py +142 -105
- langchain_core/tracers/evaluation.py +30 -34
- langchain_core/tracers/event_stream.py +162 -117
- langchain_core/tracers/langchain.py +34 -36
- langchain_core/tracers/log_stream.py +87 -49
- langchain_core/tracers/memory_stream.py +3 -3
- langchain_core/tracers/root_listeners.py +18 -34
- langchain_core/tracers/run_collector.py +8 -20
- langchain_core/tracers/schemas.py +0 -125
- langchain_core/tracers/stdout.py +3 -3
- langchain_core/utils/__init__.py +1 -4
- langchain_core/utils/_merge.py +47 -9
- langchain_core/utils/aiter.py +70 -66
- langchain_core/utils/env.py +12 -9
- langchain_core/utils/function_calling.py +139 -206
- langchain_core/utils/html.py +7 -8
- langchain_core/utils/input.py +6 -6
- langchain_core/utils/interactive_env.py +6 -2
- langchain_core/utils/iter.py +48 -45
- langchain_core/utils/json.py +14 -4
- langchain_core/utils/json_schema.py +159 -43
- langchain_core/utils/mustache.py +32 -25
- langchain_core/utils/pydantic.py +67 -40
- langchain_core/utils/strings.py +5 -5
- langchain_core/utils/usage.py +1 -1
- langchain_core/utils/utils.py +104 -62
- langchain_core/vectorstores/base.py +131 -179
- langchain_core/vectorstores/in_memory.py +113 -182
- langchain_core/vectorstores/utils.py +23 -17
- langchain_core/version.py +1 -1
- langchain_core-1.0.0.dist-info/METADATA +68 -0
- langchain_core-1.0.0.dist-info/RECORD +172 -0
- {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0.dist-info}/WHEEL +1 -1
- langchain_core/beta/__init__.py +0 -1
- langchain_core/beta/runnables/__init__.py +0 -1
- langchain_core/beta/runnables/context.py +0 -448
- langchain_core/memory.py +0 -116
- langchain_core/messages/content_blocks.py +0 -1435
- langchain_core/prompts/pipeline.py +0 -133
- langchain_core/pydantic_v1/__init__.py +0 -30
- langchain_core/pydantic_v1/dataclasses.py +0 -23
- langchain_core/pydantic_v1/main.py +0 -23
- langchain_core/tracers/langchain_v1.py +0 -23
- langchain_core/utils/loading.py +0 -31
- langchain_core/v1/__init__.py +0 -1
- langchain_core/v1/chat_models.py +0 -1047
- langchain_core/v1/messages.py +0 -755
- langchain_core-0.4.0.dev0.dist-info/METADATA +0 -108
- langchain_core-0.4.0.dev0.dist-info/RECORD +0 -177
- langchain_core-0.4.0.dev0.dist-info/entry_points.txt +0 -4
|
@@ -5,11 +5,12 @@ import inspect
|
|
|
5
5
|
import typing
|
|
6
6
|
from collections.abc import AsyncIterator, Iterator, Sequence
|
|
7
7
|
from functools import wraps
|
|
8
|
-
from typing import TYPE_CHECKING, Any,
|
|
8
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
9
9
|
|
|
10
10
|
from pydantic import BaseModel, ConfigDict
|
|
11
11
|
from typing_extensions import override
|
|
12
12
|
|
|
13
|
+
from langchain_core.callbacks.manager import AsyncCallbackManager, CallbackManager
|
|
13
14
|
from langchain_core.runnables.base import Runnable, RunnableSerializable
|
|
14
15
|
from langchain_core.runnables.config import (
|
|
15
16
|
RunnableConfig,
|
|
@@ -46,41 +47,43 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
46
47
|
of a chain of Runnables. Fallbacks are tried in order until one succeeds or
|
|
47
48
|
all fail.
|
|
48
49
|
|
|
49
|
-
While you can instantiate a
|
|
50
|
-
more convenient to use the
|
|
50
|
+
While you can instantiate a `RunnableWithFallbacks` directly, it is usually
|
|
51
|
+
more convenient to use the `with_fallbacks` method on a Runnable.
|
|
51
52
|
|
|
52
53
|
Example:
|
|
54
|
+
```python
|
|
55
|
+
from langchain_core.chat_models.openai import ChatOpenAI
|
|
56
|
+
from langchain_core.chat_models.anthropic import ChatAnthropic
|
|
53
57
|
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
+
model = ChatAnthropic(model="claude-3-haiku-20240307").with_fallbacks(
|
|
59
|
+
[ChatOpenAI(model="gpt-3.5-turbo-0125")]
|
|
60
|
+
)
|
|
61
|
+
# Will usually use ChatAnthropic, but fallback to ChatOpenAI
|
|
62
|
+
# if ChatAnthropic fails.
|
|
63
|
+
model.invoke("hello")
|
|
58
64
|
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
# Will usually use ChatAnthropic, but fallback to ChatOpenAI
|
|
63
|
-
# if ChatAnthropic fails.
|
|
64
|
-
model.invoke('hello')
|
|
65
|
+
# And you can also use fallbacks at the level of a chain.
|
|
66
|
+
# Here if both LLM providers fail, we'll fallback to a good hardcoded
|
|
67
|
+
# response.
|
|
65
68
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
+
from langchain_core.prompts import PromptTemplate
|
|
70
|
+
from langchain_core.output_parser import StrOutputParser
|
|
71
|
+
from langchain_core.runnables import RunnableLambda
|
|
69
72
|
|
|
70
|
-
from langchain_core.prompts import PromptTemplate
|
|
71
|
-
from langchain_core.output_parser import StrOutputParser
|
|
72
|
-
from langchain_core.runnables import RunnableLambda
|
|
73
73
|
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
74
|
+
def when_all_is_lost(inputs):
|
|
75
|
+
return (
|
|
76
|
+
"Looks like our LLM providers are down. "
|
|
77
|
+
"Here's a nice 🦜️ emoji for you instead."
|
|
78
|
+
)
|
|
77
79
|
|
|
78
|
-
chain_with_fallback = (
|
|
79
|
-
PromptTemplate.from_template('Tell me a joke about {topic}')
|
|
80
|
-
| model
|
|
81
|
-
| StrOutputParser()
|
|
82
|
-
).with_fallbacks([RunnableLambda(when_all_is_lost)])
|
|
83
80
|
|
|
81
|
+
chain_with_fallback = (
|
|
82
|
+
PromptTemplate.from_template("Tell me a joke about {topic}")
|
|
83
|
+
| model
|
|
84
|
+
| StrOutputParser()
|
|
85
|
+
).with_fallbacks([RunnableLambda(when_all_is_lost)])
|
|
86
|
+
```
|
|
84
87
|
"""
|
|
85
88
|
|
|
86
89
|
runnable: Runnable[Input, Output]
|
|
@@ -92,9 +95,9 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
92
95
|
|
|
93
96
|
Any exception that is not a subclass of these exceptions will be raised immediately.
|
|
94
97
|
"""
|
|
95
|
-
exception_key:
|
|
96
|
-
"""If string is specified then handled exceptions will be passed to fallbacks as
|
|
97
|
-
part of the input under the specified key. If None
|
|
98
|
+
exception_key: str | None = None
|
|
99
|
+
"""If `string` is specified then handled exceptions will be passed to fallbacks as
|
|
100
|
+
part of the input under the specified key. If `None`, exceptions
|
|
98
101
|
will not be passed to fallbacks. If used, the base Runnable and its fallbacks
|
|
99
102
|
must accept a dictionary as input."""
|
|
100
103
|
|
|
@@ -113,14 +116,12 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
113
116
|
return self.runnable.OutputType
|
|
114
117
|
|
|
115
118
|
@override
|
|
116
|
-
def get_input_schema(
|
|
117
|
-
self, config: Optional[RunnableConfig] = None
|
|
118
|
-
) -> type[BaseModel]:
|
|
119
|
+
def get_input_schema(self, config: RunnableConfig | None = None) -> type[BaseModel]:
|
|
119
120
|
return self.runnable.get_input_schema(config)
|
|
120
121
|
|
|
121
122
|
@override
|
|
122
123
|
def get_output_schema(
|
|
123
|
-
self, config:
|
|
124
|
+
self, config: RunnableConfig | None = None
|
|
124
125
|
) -> type[BaseModel]:
|
|
125
126
|
return self.runnable.get_output_schema(config)
|
|
126
127
|
|
|
@@ -136,26 +137,32 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
136
137
|
@classmethod
|
|
137
138
|
@override
|
|
138
139
|
def is_lc_serializable(cls) -> bool:
|
|
140
|
+
"""Return True as this class is serializable."""
|
|
139
141
|
return True
|
|
140
142
|
|
|
141
143
|
@classmethod
|
|
142
144
|
@override
|
|
143
145
|
def get_lc_namespace(cls) -> list[str]:
|
|
144
|
-
"""Get the namespace of the
|
|
146
|
+
"""Get the namespace of the LangChain object.
|
|
145
147
|
|
|
146
|
-
|
|
148
|
+
Returns:
|
|
149
|
+
`["langchain", "schema", "runnable"]`
|
|
147
150
|
"""
|
|
148
151
|
return ["langchain", "schema", "runnable"]
|
|
149
152
|
|
|
150
153
|
@property
|
|
151
154
|
def runnables(self) -> Iterator[Runnable[Input, Output]]:
|
|
152
|
-
"""Iterator over the Runnable and its fallbacks.
|
|
155
|
+
"""Iterator over the Runnable and its fallbacks.
|
|
156
|
+
|
|
157
|
+
Yields:
|
|
158
|
+
The Runnable then its fallbacks.
|
|
159
|
+
"""
|
|
153
160
|
yield self.runnable
|
|
154
161
|
yield from self.fallbacks
|
|
155
162
|
|
|
156
163
|
@override
|
|
157
164
|
def invoke(
|
|
158
|
-
self, input: Input, config:
|
|
165
|
+
self, input: Input, config: RunnableConfig | None = None, **kwargs: Any
|
|
159
166
|
) -> Output:
|
|
160
167
|
if self.exception_key is not None and not isinstance(input, dict):
|
|
161
168
|
msg = (
|
|
@@ -207,8 +214,8 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
207
214
|
async def ainvoke(
|
|
208
215
|
self,
|
|
209
216
|
input: Input,
|
|
210
|
-
config:
|
|
211
|
-
**kwargs:
|
|
217
|
+
config: RunnableConfig | None = None,
|
|
218
|
+
**kwargs: Any | None,
|
|
212
219
|
) -> Output:
|
|
213
220
|
if self.exception_key is not None and not isinstance(input, dict):
|
|
214
221
|
msg = (
|
|
@@ -257,13 +264,11 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
257
264
|
def batch(
|
|
258
265
|
self,
|
|
259
266
|
inputs: list[Input],
|
|
260
|
-
config:
|
|
267
|
+
config: RunnableConfig | list[RunnableConfig] | None = None,
|
|
261
268
|
*,
|
|
262
269
|
return_exceptions: bool = False,
|
|
263
|
-
**kwargs:
|
|
270
|
+
**kwargs: Any | None,
|
|
264
271
|
) -> list[Output]:
|
|
265
|
-
from langchain_core.callbacks.manager import CallbackManager
|
|
266
|
-
|
|
267
272
|
if self.exception_key is not None and not all(
|
|
268
273
|
isinstance(input_, dict) for input_ in inputs
|
|
269
274
|
):
|
|
@@ -298,7 +303,9 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
298
303
|
name=config.get("run_name") or self.get_name(),
|
|
299
304
|
run_id=config.pop("run_id", None),
|
|
300
305
|
)
|
|
301
|
-
for cm, input_, config in zip(
|
|
306
|
+
for cm, input_, config in zip(
|
|
307
|
+
callback_managers, inputs, configs, strict=False
|
|
308
|
+
)
|
|
302
309
|
]
|
|
303
310
|
|
|
304
311
|
to_return: dict[int, Any] = {}
|
|
@@ -316,7 +323,9 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
316
323
|
return_exceptions=True,
|
|
317
324
|
**kwargs,
|
|
318
325
|
)
|
|
319
|
-
for (i, input_), output in zip(
|
|
326
|
+
for (i, input_), output in zip(
|
|
327
|
+
sorted(run_again.copy().items()), outputs, strict=False
|
|
328
|
+
):
|
|
320
329
|
if isinstance(output, BaseException) and not isinstance(
|
|
321
330
|
output, self.exceptions_to_handle
|
|
322
331
|
):
|
|
@@ -351,13 +360,11 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
351
360
|
async def abatch(
|
|
352
361
|
self,
|
|
353
362
|
inputs: list[Input],
|
|
354
|
-
config:
|
|
363
|
+
config: RunnableConfig | list[RunnableConfig] | None = None,
|
|
355
364
|
*,
|
|
356
365
|
return_exceptions: bool = False,
|
|
357
|
-
**kwargs:
|
|
366
|
+
**kwargs: Any | None,
|
|
358
367
|
) -> list[Output]:
|
|
359
|
-
from langchain_core.callbacks.manager import AsyncCallbackManager
|
|
360
|
-
|
|
361
368
|
if self.exception_key is not None and not all(
|
|
362
369
|
isinstance(input_, dict) for input_ in inputs
|
|
363
370
|
):
|
|
@@ -393,11 +400,13 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
393
400
|
name=config.get("run_name") or self.get_name(),
|
|
394
401
|
run_id=config.pop("run_id", None),
|
|
395
402
|
)
|
|
396
|
-
for cm, input_, config in zip(
|
|
403
|
+
for cm, input_, config in zip(
|
|
404
|
+
callback_managers, inputs, configs, strict=False
|
|
405
|
+
)
|
|
397
406
|
)
|
|
398
407
|
)
|
|
399
408
|
|
|
400
|
-
to_return = {}
|
|
409
|
+
to_return: dict[int, Output | BaseException] = {}
|
|
401
410
|
run_again = dict(enumerate(inputs))
|
|
402
411
|
handled_exceptions: dict[int, BaseException] = {}
|
|
403
412
|
first_to_raise = None
|
|
@@ -413,7 +422,9 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
413
422
|
**kwargs,
|
|
414
423
|
)
|
|
415
424
|
|
|
416
|
-
for (i, input_), output in zip(
|
|
425
|
+
for (i, input_), output in zip(
|
|
426
|
+
sorted(run_again.copy().items()), outputs, strict=False
|
|
427
|
+
):
|
|
417
428
|
if isinstance(output, BaseException) and not isinstance(
|
|
418
429
|
output, self.exceptions_to_handle
|
|
419
430
|
):
|
|
@@ -447,14 +458,14 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
447
458
|
if not return_exceptions and sorted_handled_exceptions:
|
|
448
459
|
raise sorted_handled_exceptions[0][1]
|
|
449
460
|
to_return.update(handled_exceptions)
|
|
450
|
-
return [output for _, output in sorted(to_return.items())]
|
|
461
|
+
return [cast("Output", output) for _, output in sorted(to_return.items())]
|
|
451
462
|
|
|
452
463
|
@override
|
|
453
464
|
def stream(
|
|
454
465
|
self,
|
|
455
466
|
input: Input,
|
|
456
|
-
config:
|
|
457
|
-
**kwargs:
|
|
467
|
+
config: RunnableConfig | None = None,
|
|
468
|
+
**kwargs: Any | None,
|
|
458
469
|
) -> Iterator[Output]:
|
|
459
470
|
if self.exception_key is not None and not isinstance(input, dict):
|
|
460
471
|
msg = (
|
|
@@ -500,7 +511,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
500
511
|
raise first_error
|
|
501
512
|
|
|
502
513
|
yield chunk
|
|
503
|
-
output:
|
|
514
|
+
output: Output | None = chunk
|
|
504
515
|
try:
|
|
505
516
|
for chunk in stream:
|
|
506
517
|
yield chunk
|
|
@@ -517,8 +528,8 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
517
528
|
async def astream(
|
|
518
529
|
self,
|
|
519
530
|
input: Input,
|
|
520
|
-
config:
|
|
521
|
-
**kwargs:
|
|
531
|
+
config: RunnableConfig | None = None,
|
|
532
|
+
**kwargs: Any | None,
|
|
522
533
|
) -> AsyncIterator[Output]:
|
|
523
534
|
if self.exception_key is not None and not isinstance(input, dict):
|
|
524
535
|
msg = (
|
|
@@ -564,12 +575,12 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
564
575
|
raise first_error
|
|
565
576
|
|
|
566
577
|
yield chunk
|
|
567
|
-
output:
|
|
578
|
+
output: Output | None = chunk
|
|
568
579
|
try:
|
|
569
580
|
async for chunk in stream:
|
|
570
581
|
yield chunk
|
|
571
582
|
try:
|
|
572
|
-
output = output + chunk
|
|
583
|
+
output = output + chunk # type: ignore[operator]
|
|
573
584
|
except TypeError:
|
|
574
585
|
output = None
|
|
575
586
|
except BaseException as e:
|
|
@@ -583,32 +594,32 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
|
|
|
583
594
|
Returns:
|
|
584
595
|
If the attribute is anything other than a method that outputs a Runnable,
|
|
585
596
|
returns getattr(self.runnable, name). If the attribute is a method that
|
|
586
|
-
does return a new Runnable (e.g.
|
|
597
|
+
does return a new Runnable (e.g. model.bind_tools([...]) outputs a new
|
|
587
598
|
RunnableBinding) then self.runnable and each of the runnables in
|
|
588
599
|
self.fallbacks is replaced with getattr(x, name).
|
|
589
600
|
|
|
590
601
|
Example:
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
)
|
|
602
|
+
```python
|
|
603
|
+
from langchain_openai import ChatOpenAI
|
|
604
|
+
from langchain_anthropic import ChatAnthropic
|
|
605
|
+
|
|
606
|
+
gpt_4o = ChatOpenAI(model="gpt-4o")
|
|
607
|
+
claude_3_sonnet = ChatAnthropic(model="claude-3-7-sonnet-20250219")
|
|
608
|
+
model = gpt_4o.with_fallbacks([claude_3_sonnet])
|
|
609
|
+
|
|
610
|
+
model.model_name
|
|
611
|
+
# -> "gpt-4o"
|
|
612
|
+
|
|
613
|
+
# .bind_tools() is called on both ChatOpenAI and ChatAnthropic
|
|
614
|
+
# Equivalent to:
|
|
615
|
+
# gpt_4o.bind_tools([...]).with_fallbacks([claude_3_sonnet.bind_tools([...])])
|
|
616
|
+
model.bind_tools([...])
|
|
617
|
+
# -> RunnableWithFallbacks(
|
|
618
|
+
runnable=RunnableBinding(bound=ChatOpenAI(...), kwargs={"tools": [...]}),
|
|
619
|
+
fallbacks=[RunnableBinding(bound=ChatAnthropic(...), kwargs={"tools": [...]})],
|
|
620
|
+
)
|
|
611
621
|
|
|
622
|
+
```
|
|
612
623
|
""" # noqa: E501
|
|
613
624
|
attr = getattr(self.runnable, name)
|
|
614
625
|
if _returns_runnable(attr):
|