langchain-core 1.0.0a1__py3-none-any.whl → 1.0.0a3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langchain-core might be problematic. Click here for more details.
- langchain_core/_api/beta_decorator.py +17 -40
- langchain_core/_api/deprecation.py +20 -7
- langchain_core/_api/path.py +19 -2
- langchain_core/_import_utils.py +7 -0
- langchain_core/agents.py +10 -6
- langchain_core/callbacks/base.py +28 -15
- langchain_core/callbacks/manager.py +81 -69
- langchain_core/callbacks/usage.py +4 -2
- langchain_core/chat_history.py +29 -21
- langchain_core/document_loaders/base.py +34 -9
- langchain_core/document_loaders/langsmith.py +3 -0
- langchain_core/documents/base.py +35 -10
- langchain_core/documents/transformers.py +4 -2
- langchain_core/embeddings/fake.py +8 -5
- langchain_core/env.py +2 -3
- langchain_core/example_selectors/base.py +12 -0
- langchain_core/exceptions.py +7 -0
- langchain_core/globals.py +17 -28
- langchain_core/indexing/api.py +57 -45
- langchain_core/indexing/base.py +5 -8
- langchain_core/indexing/in_memory.py +23 -3
- langchain_core/language_models/__init__.py +6 -2
- langchain_core/language_models/_utils.py +28 -4
- langchain_core/language_models/base.py +33 -21
- langchain_core/language_models/chat_models.py +103 -29
- langchain_core/language_models/fake_chat_models.py +5 -7
- langchain_core/language_models/llms.py +54 -20
- langchain_core/load/dump.py +2 -3
- langchain_core/load/load.py +15 -1
- langchain_core/load/serializable.py +38 -43
- langchain_core/memory.py +7 -3
- langchain_core/messages/__init__.py +7 -17
- langchain_core/messages/ai.py +41 -34
- langchain_core/messages/base.py +16 -7
- langchain_core/messages/block_translators/__init__.py +10 -8
- langchain_core/messages/block_translators/anthropic.py +3 -1
- langchain_core/messages/block_translators/bedrock.py +3 -1
- langchain_core/messages/block_translators/bedrock_converse.py +3 -1
- langchain_core/messages/block_translators/google_genai.py +3 -1
- langchain_core/messages/block_translators/google_vertexai.py +3 -1
- langchain_core/messages/block_translators/groq.py +3 -1
- langchain_core/messages/block_translators/langchain_v0.py +3 -136
- langchain_core/messages/block_translators/ollama.py +3 -1
- langchain_core/messages/block_translators/openai.py +252 -10
- langchain_core/messages/content.py +26 -124
- langchain_core/messages/human.py +2 -13
- langchain_core/messages/system.py +2 -6
- langchain_core/messages/tool.py +34 -14
- langchain_core/messages/utils.py +189 -74
- langchain_core/output_parsers/base.py +5 -2
- langchain_core/output_parsers/json.py +4 -4
- langchain_core/output_parsers/list.py +7 -22
- langchain_core/output_parsers/openai_functions.py +3 -0
- langchain_core/output_parsers/openai_tools.py +6 -1
- langchain_core/output_parsers/pydantic.py +4 -0
- langchain_core/output_parsers/string.py +5 -1
- langchain_core/output_parsers/xml.py +19 -19
- langchain_core/outputs/chat_generation.py +18 -7
- langchain_core/outputs/generation.py +14 -3
- langchain_core/outputs/llm_result.py +8 -1
- langchain_core/prompt_values.py +10 -4
- langchain_core/prompts/base.py +6 -11
- langchain_core/prompts/chat.py +88 -60
- langchain_core/prompts/dict.py +16 -8
- langchain_core/prompts/few_shot.py +9 -11
- langchain_core/prompts/few_shot_with_templates.py +5 -1
- langchain_core/prompts/image.py +12 -5
- langchain_core/prompts/loading.py +2 -2
- langchain_core/prompts/message.py +5 -6
- langchain_core/prompts/pipeline.py +13 -8
- langchain_core/prompts/prompt.py +22 -8
- langchain_core/prompts/string.py +18 -10
- langchain_core/prompts/structured.py +7 -2
- langchain_core/rate_limiters.py +2 -2
- langchain_core/retrievers.py +7 -6
- langchain_core/runnables/base.py +387 -246
- langchain_core/runnables/branch.py +11 -28
- langchain_core/runnables/config.py +20 -17
- langchain_core/runnables/configurable.py +34 -19
- langchain_core/runnables/fallbacks.py +20 -13
- langchain_core/runnables/graph.py +48 -38
- langchain_core/runnables/graph_ascii.py +40 -17
- langchain_core/runnables/graph_mermaid.py +54 -25
- langchain_core/runnables/graph_png.py +27 -31
- langchain_core/runnables/history.py +55 -58
- langchain_core/runnables/passthrough.py +44 -21
- langchain_core/runnables/retry.py +44 -23
- langchain_core/runnables/router.py +9 -8
- langchain_core/runnables/schema.py +9 -0
- langchain_core/runnables/utils.py +53 -90
- langchain_core/stores.py +19 -31
- langchain_core/sys_info.py +9 -8
- langchain_core/tools/base.py +36 -27
- langchain_core/tools/convert.py +25 -14
- langchain_core/tools/simple.py +36 -8
- langchain_core/tools/structured.py +25 -12
- langchain_core/tracers/base.py +2 -2
- langchain_core/tracers/context.py +5 -1
- langchain_core/tracers/core.py +110 -46
- langchain_core/tracers/evaluation.py +22 -26
- langchain_core/tracers/event_stream.py +97 -42
- langchain_core/tracers/langchain.py +12 -3
- langchain_core/tracers/langchain_v1.py +10 -2
- langchain_core/tracers/log_stream.py +56 -17
- langchain_core/tracers/root_listeners.py +4 -20
- langchain_core/tracers/run_collector.py +6 -16
- langchain_core/tracers/schemas.py +5 -1
- langchain_core/utils/aiter.py +14 -6
- langchain_core/utils/env.py +3 -0
- langchain_core/utils/function_calling.py +46 -20
- langchain_core/utils/interactive_env.py +6 -2
- langchain_core/utils/iter.py +12 -5
- langchain_core/utils/json.py +12 -3
- langchain_core/utils/json_schema.py +156 -40
- langchain_core/utils/loading.py +5 -1
- langchain_core/utils/mustache.py +25 -16
- langchain_core/utils/pydantic.py +38 -9
- langchain_core/utils/utils.py +25 -9
- langchain_core/vectorstores/base.py +7 -20
- langchain_core/vectorstores/in_memory.py +20 -14
- langchain_core/vectorstores/utils.py +18 -12
- langchain_core/version.py +1 -1
- langchain_core-1.0.0a3.dist-info/METADATA +77 -0
- langchain_core-1.0.0a3.dist-info/RECORD +181 -0
- langchain_core/beta/__init__.py +0 -1
- langchain_core/beta/runnables/__init__.py +0 -1
- langchain_core/beta/runnables/context.py +0 -448
- langchain_core-1.0.0a1.dist-info/METADATA +0 -106
- langchain_core-1.0.0a1.dist-info/RECORD +0 -184
- {langchain_core-1.0.0a1.dist-info → langchain_core-1.0.0a3.dist-info}/WHEEL +0 -0
- {langchain_core-1.0.0a1.dist-info → langchain_core-1.0.0a3.dist-info}/entry_points.txt +0 -0
|
@@ -96,22 +96,22 @@ class RunnablePassthrough(RunnableSerializable[Other, Other]):
|
|
|
96
96
|
)
|
|
97
97
|
|
|
98
98
|
runnable = RunnableParallel(
|
|
99
|
-
origin=RunnablePassthrough(),
|
|
100
|
-
modified=lambda x: x+1
|
|
99
|
+
origin=RunnablePassthrough(), modified=lambda x: x + 1
|
|
101
100
|
)
|
|
102
101
|
|
|
103
|
-
runnable.invoke(1)
|
|
102
|
+
runnable.invoke(1) # {'origin': 1, 'modified': 2}
|
|
104
103
|
|
|
105
104
|
|
|
106
|
-
def fake_llm(prompt: str) -> str:
|
|
105
|
+
def fake_llm(prompt: str) -> str: # Fake LLM for the example
|
|
107
106
|
return "completion"
|
|
108
107
|
|
|
108
|
+
|
|
109
109
|
chain = RunnableLambda(fake_llm) | {
|
|
110
|
-
|
|
111
|
-
|
|
110
|
+
"original": RunnablePassthrough(), # Original LLM output
|
|
111
|
+
"parsed": lambda text: text[::-1], # Parsing logic
|
|
112
112
|
}
|
|
113
113
|
|
|
114
|
-
chain.invoke(
|
|
114
|
+
chain.invoke("hello") # {'original': 'completion', 'parsed': 'noitelpmoc'}
|
|
115
115
|
|
|
116
116
|
In some cases, it may be useful to pass the input through while adding some
|
|
117
117
|
keys to the output. In this case, you can use the `assign` method:
|
|
@@ -120,17 +120,19 @@ class RunnablePassthrough(RunnableSerializable[Other, Other]):
|
|
|
120
120
|
|
|
121
121
|
from langchain_core.runnables import RunnablePassthrough
|
|
122
122
|
|
|
123
|
-
|
|
123
|
+
|
|
124
|
+
def fake_llm(prompt: str) -> str: # Fake LLM for the example
|
|
124
125
|
return "completion"
|
|
125
126
|
|
|
127
|
+
|
|
126
128
|
runnable = {
|
|
127
|
-
|
|
128
|
-
|
|
129
|
+
"llm1": fake_llm,
|
|
130
|
+
"llm2": fake_llm,
|
|
129
131
|
} | RunnablePassthrough.assign(
|
|
130
|
-
total_chars=lambda inputs: len(inputs[
|
|
132
|
+
total_chars=lambda inputs: len(inputs["llm1"] + inputs["llm2"])
|
|
131
133
|
)
|
|
132
134
|
|
|
133
|
-
runnable.invoke(
|
|
135
|
+
runnable.invoke("hello")
|
|
134
136
|
# {'llm1': 'completion', 'llm2': 'completion', 'total_chars': 20}
|
|
135
137
|
|
|
136
138
|
"""
|
|
@@ -191,11 +193,16 @@ class RunnablePassthrough(RunnableSerializable[Other, Other]):
|
|
|
191
193
|
@classmethod
|
|
192
194
|
@override
|
|
193
195
|
def is_lc_serializable(cls) -> bool:
|
|
196
|
+
"""Return True as this class is serializable."""
|
|
194
197
|
return True
|
|
195
198
|
|
|
196
199
|
@classmethod
|
|
197
|
-
@override
|
|
198
200
|
def get_lc_namespace(cls) -> list[str]:
|
|
201
|
+
"""Get the namespace of the langchain object.
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
``["langchain", "schema", "runnable"]``
|
|
205
|
+
"""
|
|
199
206
|
return ["langchain", "schema", "runnable"]
|
|
200
207
|
|
|
201
208
|
@property
|
|
@@ -378,11 +385,15 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]):
|
|
|
378
385
|
)
|
|
379
386
|
from langchain_core.runnables.base import RunnableLambda
|
|
380
387
|
|
|
388
|
+
|
|
381
389
|
def add_ten(x: dict[str, int]) -> dict[str, int]:
|
|
382
390
|
return {"added": x["input"] + 10}
|
|
383
391
|
|
|
392
|
+
|
|
384
393
|
mapper = RunnableParallel(
|
|
385
|
-
{
|
|
394
|
+
{
|
|
395
|
+
"add_step": RunnableLambda(add_ten),
|
|
396
|
+
}
|
|
386
397
|
)
|
|
387
398
|
|
|
388
399
|
runnable_assign = RunnableAssign(mapper)
|
|
@@ -411,11 +422,17 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]):
|
|
|
411
422
|
@classmethod
|
|
412
423
|
@override
|
|
413
424
|
def is_lc_serializable(cls) -> bool:
|
|
425
|
+
"""Return True as this class is serializable."""
|
|
414
426
|
return True
|
|
415
427
|
|
|
416
428
|
@classmethod
|
|
417
429
|
@override
|
|
418
430
|
def get_lc_namespace(cls) -> list[str]:
|
|
431
|
+
"""Get the namespace of the langchain object.
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
``["langchain", "schema", "runnable"]``
|
|
435
|
+
"""
|
|
419
436
|
return ["langchain", "schema", "runnable"]
|
|
420
437
|
|
|
421
438
|
@override
|
|
@@ -688,13 +705,13 @@ class RunnablePick(RunnableSerializable[dict[str, Any], dict[str, Any]]):
|
|
|
688
705
|
from langchain_core.runnables.passthrough import RunnablePick
|
|
689
706
|
|
|
690
707
|
input_data = {
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
708
|
+
"name": "John",
|
|
709
|
+
"age": 30,
|
|
710
|
+
"city": "New York",
|
|
711
|
+
"country": "USA",
|
|
695
712
|
}
|
|
696
713
|
|
|
697
|
-
runnable = RunnablePick(keys=[
|
|
714
|
+
runnable = RunnablePick(keys=["name", "age"])
|
|
698
715
|
|
|
699
716
|
output_data = runnable.invoke(input_data)
|
|
700
717
|
|
|
@@ -715,12 +732,17 @@ class RunnablePick(RunnableSerializable[dict[str, Any], dict[str, Any]]):
|
|
|
715
732
|
@classmethod
|
|
716
733
|
@override
|
|
717
734
|
def is_lc_serializable(cls) -> bool:
|
|
735
|
+
"""Return True as this class is serializable."""
|
|
718
736
|
return True
|
|
719
737
|
|
|
720
738
|
@classmethod
|
|
721
739
|
@override
|
|
722
740
|
def get_lc_namespace(cls) -> list[str]:
|
|
723
|
-
"""Get the namespace of the langchain object.
|
|
741
|
+
"""Get the namespace of the langchain object.
|
|
742
|
+
|
|
743
|
+
Returns:
|
|
744
|
+
``["langchain", "schema", "runnable"]``
|
|
745
|
+
"""
|
|
724
746
|
return ["langchain", "schema", "runnable"]
|
|
725
747
|
|
|
726
748
|
@override
|
|
@@ -730,7 +752,8 @@ class RunnablePick(RunnableSerializable[dict[str, Any], dict[str, Any]]):
|
|
|
730
752
|
name = (
|
|
731
753
|
name
|
|
732
754
|
or self.name
|
|
733
|
-
or
|
|
755
|
+
or "RunnablePick"
|
|
756
|
+
f"<{','.join([self.keys] if isinstance(self.keys, str) else self.keys)}>"
|
|
734
757
|
)
|
|
735
758
|
return super().get_name(suffix, name=name)
|
|
736
759
|
|
|
@@ -66,17 +66,21 @@ class RunnableRetry(RunnableBindingBase[Input, Output]): # type: ignore[no-rede
|
|
|
66
66
|
|
|
67
67
|
import time
|
|
68
68
|
|
|
69
|
+
|
|
69
70
|
def foo(input) -> None:
|
|
70
71
|
'''Fake function that raises an exception.'''
|
|
71
72
|
raise ValueError(f"Invoking foo failed. At time {time.time()}")
|
|
72
73
|
|
|
74
|
+
|
|
73
75
|
runnable = RunnableLambda(foo)
|
|
74
76
|
|
|
75
77
|
runnable_with_retries = runnable.with_retry(
|
|
76
|
-
retry_if_exception_type=(ValueError,),
|
|
77
|
-
wait_exponential_jitter=True,
|
|
78
|
-
stop_after_attempt=2,
|
|
79
|
-
exponential_jitter_params={
|
|
78
|
+
retry_if_exception_type=(ValueError,), # Retry only on ValueError
|
|
79
|
+
wait_exponential_jitter=True, # Add jitter to the exponential backoff
|
|
80
|
+
stop_after_attempt=2, # Try twice
|
|
81
|
+
exponential_jitter_params={
|
|
82
|
+
"initial": 2
|
|
83
|
+
}, # if desired, customize backoff
|
|
80
84
|
)
|
|
81
85
|
|
|
82
86
|
# The method invocation above is equivalent to the longer form below:
|
|
@@ -111,7 +115,7 @@ class RunnableRetry(RunnableBindingBase[Input, Output]): # type: ignore[no-rede
|
|
|
111
115
|
chain = template | model
|
|
112
116
|
retryable_chain = chain.with_retry()
|
|
113
117
|
|
|
114
|
-
"""
|
|
118
|
+
"""
|
|
115
119
|
|
|
116
120
|
retry_exception_types: tuple[type[BaseException], ...] = (Exception,)
|
|
117
121
|
"""The exception types to retry on. By default all exceptions are retried.
|
|
@@ -234,31 +238,40 @@ class RunnableRetry(RunnableBindingBase[Input, Output]): # type: ignore[no-rede
|
|
|
234
238
|
) -> list[Union[Output, Exception]]:
|
|
235
239
|
results_map: dict[int, Output] = {}
|
|
236
240
|
|
|
237
|
-
def pending(iterable: list[U]) -> list[U]:
|
|
238
|
-
return [item for idx, item in enumerate(iterable) if idx not in results_map]
|
|
239
|
-
|
|
240
241
|
not_set: list[Output] = []
|
|
241
242
|
result = not_set
|
|
242
243
|
try:
|
|
243
244
|
for attempt in self._sync_retrying():
|
|
244
245
|
with attempt:
|
|
245
|
-
#
|
|
246
|
+
# Retry for inputs that have not yet succeeded
|
|
247
|
+
# Determine which original indices remain.
|
|
248
|
+
remaining_indices = [
|
|
249
|
+
i for i in range(len(inputs)) if i not in results_map
|
|
250
|
+
]
|
|
251
|
+
if not remaining_indices:
|
|
252
|
+
break
|
|
253
|
+
pending_inputs = [inputs[i] for i in remaining_indices]
|
|
254
|
+
pending_configs = [config[i] for i in remaining_indices]
|
|
255
|
+
pending_run_managers = [run_manager[i] for i in remaining_indices]
|
|
256
|
+
# Invoke underlying batch only on remaining elements.
|
|
246
257
|
result = super().batch(
|
|
247
|
-
|
|
258
|
+
pending_inputs,
|
|
248
259
|
self._patch_config_list(
|
|
249
|
-
|
|
260
|
+
pending_configs, pending_run_managers, attempt.retry_state
|
|
250
261
|
),
|
|
251
262
|
return_exceptions=True,
|
|
252
263
|
**kwargs,
|
|
253
264
|
)
|
|
254
|
-
# Register the results of the inputs that have succeeded
|
|
265
|
+
# Register the results of the inputs that have succeeded, mapping
|
|
266
|
+
# back to their original indices.
|
|
255
267
|
first_exception = None
|
|
256
|
-
for
|
|
268
|
+
for offset, r in enumerate(result):
|
|
257
269
|
if isinstance(r, Exception):
|
|
258
270
|
if not first_exception:
|
|
259
271
|
first_exception = r
|
|
260
272
|
continue
|
|
261
|
-
|
|
273
|
+
orig_idx = remaining_indices[offset]
|
|
274
|
+
results_map[orig_idx] = r
|
|
262
275
|
# If any exception occurred, raise it, to retry the failed ones
|
|
263
276
|
if first_exception:
|
|
264
277
|
raise first_exception
|
|
@@ -301,31 +314,39 @@ class RunnableRetry(RunnableBindingBase[Input, Output]): # type: ignore[no-rede
|
|
|
301
314
|
) -> list[Union[Output, Exception]]:
|
|
302
315
|
results_map: dict[int, Output] = {}
|
|
303
316
|
|
|
304
|
-
def pending(iterable: list[U]) -> list[U]:
|
|
305
|
-
return [item for idx, item in enumerate(iterable) if idx not in results_map]
|
|
306
|
-
|
|
307
317
|
not_set: list[Output] = []
|
|
308
318
|
result = not_set
|
|
309
319
|
try:
|
|
310
320
|
async for attempt in self._async_retrying():
|
|
311
321
|
with attempt:
|
|
312
|
-
#
|
|
322
|
+
# Retry for inputs that have not yet succeeded
|
|
323
|
+
# Determine which original indices remain.
|
|
324
|
+
remaining_indices = [
|
|
325
|
+
i for i in range(len(inputs)) if i not in results_map
|
|
326
|
+
]
|
|
327
|
+
if not remaining_indices:
|
|
328
|
+
break
|
|
329
|
+
pending_inputs = [inputs[i] for i in remaining_indices]
|
|
330
|
+
pending_configs = [config[i] for i in remaining_indices]
|
|
331
|
+
pending_run_managers = [run_manager[i] for i in remaining_indices]
|
|
313
332
|
result = await super().abatch(
|
|
314
|
-
|
|
333
|
+
pending_inputs,
|
|
315
334
|
self._patch_config_list(
|
|
316
|
-
|
|
335
|
+
pending_configs, pending_run_managers, attempt.retry_state
|
|
317
336
|
),
|
|
318
337
|
return_exceptions=True,
|
|
319
338
|
**kwargs,
|
|
320
339
|
)
|
|
321
|
-
# Register the results of the inputs that have succeeded
|
|
340
|
+
# Register the results of the inputs that have succeeded, mapping
|
|
341
|
+
# back to their original indices.
|
|
322
342
|
first_exception = None
|
|
323
|
-
for
|
|
343
|
+
for offset, r in enumerate(result):
|
|
324
344
|
if isinstance(r, Exception):
|
|
325
345
|
if not first_exception:
|
|
326
346
|
first_exception = r
|
|
327
347
|
continue
|
|
328
|
-
|
|
348
|
+
orig_idx = remaining_indices[offset]
|
|
349
|
+
results_map[orig_idx] = r
|
|
329
350
|
# If any exception occurred, raise it, to retry the failed ones
|
|
330
351
|
if first_exception:
|
|
331
352
|
raise first_exception
|
|
@@ -38,15 +38,12 @@ if TYPE_CHECKING:
|
|
|
38
38
|
|
|
39
39
|
|
|
40
40
|
class RouterInput(TypedDict):
|
|
41
|
-
"""Router input.
|
|
42
|
-
|
|
43
|
-
Attributes:
|
|
44
|
-
key: The key to route on.
|
|
45
|
-
input: The input to pass to the selected Runnable.
|
|
46
|
-
"""
|
|
41
|
+
"""Router input."""
|
|
47
42
|
|
|
48
43
|
key: str
|
|
44
|
+
"""The key to route on."""
|
|
49
45
|
input: Any
|
|
46
|
+
"""The input to pass to the selected Runnable."""
|
|
50
47
|
|
|
51
48
|
|
|
52
49
|
class RouterRunnable(RunnableSerializable[RouterInput, Output]):
|
|
@@ -98,13 +95,17 @@ class RouterRunnable(RunnableSerializable[RouterInput, Output]):
|
|
|
98
95
|
@classmethod
|
|
99
96
|
@override
|
|
100
97
|
def is_lc_serializable(cls) -> bool:
|
|
101
|
-
"""Return
|
|
98
|
+
"""Return True as this class is serializable."""
|
|
102
99
|
return True
|
|
103
100
|
|
|
104
101
|
@classmethod
|
|
105
102
|
@override
|
|
106
103
|
def get_lc_namespace(cls) -> list[str]:
|
|
107
|
-
"""Get the namespace of the langchain object.
|
|
104
|
+
"""Get the namespace of the langchain object.
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
``["langchain", "schema", "runnable"]``
|
|
108
|
+
"""
|
|
108
109
|
return ["langchain", "schema", "runnable"]
|
|
109
110
|
|
|
110
111
|
@override
|
|
@@ -23,6 +23,13 @@ class EventData(TypedDict, total=False):
|
|
|
23
23
|
won't be known until the *END* of the Runnable when it has finished streaming
|
|
24
24
|
its inputs.
|
|
25
25
|
"""
|
|
26
|
+
error: NotRequired[BaseException]
|
|
27
|
+
"""The error that occurred during the execution of the Runnable.
|
|
28
|
+
|
|
29
|
+
This field is only available if the Runnable raised an exception.
|
|
30
|
+
|
|
31
|
+
.. versionadded:: 1.0.0
|
|
32
|
+
"""
|
|
26
33
|
output: Any
|
|
27
34
|
"""The output of the Runnable that generated the event.
|
|
28
35
|
|
|
@@ -51,9 +58,11 @@ class BaseStreamEvent(TypedDict):
|
|
|
51
58
|
|
|
52
59
|
from langchain_core.runnables import RunnableLambda
|
|
53
60
|
|
|
61
|
+
|
|
54
62
|
async def reverse(s: str) -> str:
|
|
55
63
|
return s[::-1]
|
|
56
64
|
|
|
65
|
+
|
|
57
66
|
chain = RunnableLambda(func=reverse)
|
|
58
67
|
|
|
59
68
|
events = [event async for event in chain.astream_events("hello")]
|