langchain-core 0.3.71__py3-none-any.whl → 0.4.0.dev0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/beta_decorator.py +1 -0
  3. langchain_core/_api/deprecation.py +2 -0
  4. langchain_core/beta/runnables/context.py +1 -0
  5. langchain_core/callbacks/base.py +23 -14
  6. langchain_core/callbacks/file.py +1 -0
  7. langchain_core/callbacks/manager.py +145 -19
  8. langchain_core/callbacks/streaming_stdout.py +4 -3
  9. langchain_core/callbacks/usage.py +15 -3
  10. langchain_core/chat_history.py +1 -0
  11. langchain_core/document_loaders/langsmith.py +2 -1
  12. langchain_core/documents/base.py +2 -0
  13. langchain_core/embeddings/fake.py +2 -0
  14. langchain_core/indexing/api.py +10 -0
  15. langchain_core/language_models/_utils.py +37 -0
  16. langchain_core/language_models/base.py +4 -1
  17. langchain_core/language_models/chat_models.py +48 -27
  18. langchain_core/language_models/fake_chat_models.py +71 -1
  19. langchain_core/language_models/llms.py +1 -0
  20. langchain_core/memory.py +1 -0
  21. langchain_core/messages/__init__.py +54 -0
  22. langchain_core/messages/ai.py +31 -18
  23. langchain_core/messages/content_blocks.py +1349 -69
  24. langchain_core/messages/human.py +1 -0
  25. langchain_core/messages/modifier.py +1 -1
  26. langchain_core/messages/tool.py +8 -83
  27. langchain_core/messages/utils.py +221 -6
  28. langchain_core/output_parsers/base.py +51 -14
  29. langchain_core/output_parsers/json.py +5 -2
  30. langchain_core/output_parsers/list.py +7 -2
  31. langchain_core/output_parsers/openai_functions.py +29 -5
  32. langchain_core/output_parsers/openai_tools.py +90 -47
  33. langchain_core/output_parsers/pydantic.py +3 -2
  34. langchain_core/output_parsers/transform.py +53 -12
  35. langchain_core/output_parsers/xml.py +14 -5
  36. langchain_core/outputs/llm_result.py +4 -1
  37. langchain_core/prompt_values.py +111 -7
  38. langchain_core/prompts/base.py +4 -0
  39. langchain_core/prompts/chat.py +3 -0
  40. langchain_core/prompts/few_shot.py +1 -0
  41. langchain_core/prompts/few_shot_with_templates.py +1 -0
  42. langchain_core/prompts/image.py +1 -0
  43. langchain_core/prompts/pipeline.py +1 -0
  44. langchain_core/prompts/prompt.py +1 -0
  45. langchain_core/prompts/structured.py +1 -0
  46. langchain_core/rate_limiters.py +1 -0
  47. langchain_core/retrievers.py +3 -0
  48. langchain_core/runnables/base.py +75 -57
  49. langchain_core/runnables/branch.py +1 -0
  50. langchain_core/runnables/config.py +2 -2
  51. langchain_core/runnables/configurable.py +2 -1
  52. langchain_core/runnables/fallbacks.py +3 -7
  53. langchain_core/runnables/graph.py +5 -3
  54. langchain_core/runnables/graph_ascii.py +1 -0
  55. langchain_core/runnables/graph_mermaid.py +1 -0
  56. langchain_core/runnables/history.py +1 -0
  57. langchain_core/runnables/passthrough.py +3 -0
  58. langchain_core/runnables/retry.py +1 -0
  59. langchain_core/runnables/router.py +1 -0
  60. langchain_core/runnables/schema.py +1 -0
  61. langchain_core/stores.py +3 -0
  62. langchain_core/tools/base.py +43 -11
  63. langchain_core/tools/convert.py +25 -3
  64. langchain_core/tools/retriever.py +8 -1
  65. langchain_core/tools/structured.py +10 -1
  66. langchain_core/tracers/base.py +14 -7
  67. langchain_core/tracers/context.py +1 -1
  68. langchain_core/tracers/core.py +27 -4
  69. langchain_core/tracers/event_stream.py +14 -3
  70. langchain_core/tracers/langchain.py +14 -3
  71. langchain_core/tracers/log_stream.py +4 -1
  72. langchain_core/utils/aiter.py +5 -0
  73. langchain_core/utils/function_calling.py +2 -1
  74. langchain_core/utils/iter.py +1 -0
  75. langchain_core/utils/json_schema.py +1 -1
  76. langchain_core/v1/__init__.py +1 -0
  77. langchain_core/v1/chat_models.py +1047 -0
  78. langchain_core/v1/messages.py +755 -0
  79. langchain_core/vectorstores/base.py +1 -0
  80. langchain_core/version.py +1 -1
  81. {langchain_core-0.3.71.dist-info → langchain_core-0.4.0.dev0.dist-info}/METADATA +1 -1
  82. {langchain_core-0.3.71.dist-info → langchain_core-0.4.0.dev0.dist-info}/RECORD +84 -81
  83. {langchain_core-0.3.71.dist-info → langchain_core-0.4.0.dev0.dist-info}/WHEEL +0 -0
  84. {langchain_core-0.3.71.dist-info → langchain_core-0.4.0.dev0.dist-info}/entry_points.txt +0 -0
@@ -97,10 +97,7 @@ if TYPE_CHECKING:
97
97
  from langchain_core.runnables.retry import ExponentialJitterParams
98
98
  from langchain_core.runnables.schema import StreamEvent
99
99
  from langchain_core.tools import BaseTool
100
- from langchain_core.tracers.log_stream import (
101
- RunLog,
102
- RunLogPatch,
103
- )
100
+ from langchain_core.tracers.log_stream import RunLog, RunLogPatch
104
101
  from langchain_core.tracers.root_listeners import AsyncListener
105
102
  from langchain_core.tracers.schemas import Run
106
103
 
@@ -236,6 +233,7 @@ class Runnable(ABC, Generic[Input, Output]):
236
233
  )
237
234
 
238
235
  For a UI (and much more) checkout LangSmith: https://docs.smith.langchain.com/
236
+
239
237
  """ # noqa: E501
240
238
 
241
239
  name: Optional[str]
@@ -391,6 +389,7 @@ class Runnable(ABC, Generic[Input, Output]):
391
389
  print(runnable.get_input_jsonschema())
392
390
 
393
391
  .. versionadded:: 0.3.0
392
+
394
393
  """
395
394
  return self.get_input_schema(config).model_json_schema()
396
395
 
@@ -464,6 +463,7 @@ class Runnable(ABC, Generic[Input, Output]):
464
463
  print(runnable.get_output_jsonschema())
465
464
 
466
465
  .. versionadded:: 0.3.0
466
+
467
467
  """
468
468
  return self.get_output_schema(config).model_json_schema()
469
469
 
@@ -620,6 +620,7 @@ class Runnable(ABC, Generic[Input, Output]):
620
620
  sequence.batch([1, 2, 3])
621
621
  await sequence.abatch([1, 2, 3])
622
622
  # -> [4, 6, 8]
623
+
623
624
  """
624
625
  return RunnableSequence(self, *others, name=name)
625
626
 
@@ -727,7 +728,7 @@ class Runnable(ABC, Generic[Input, Output]):
727
728
  @abstractmethod
728
729
  def invoke(
729
730
  self,
730
- input: Input, # noqa: A002
731
+ input: Input,
731
732
  config: Optional[RunnableConfig] = None,
732
733
  **kwargs: Any,
733
734
  ) -> Output:
@@ -736,10 +737,10 @@ class Runnable(ABC, Generic[Input, Output]):
736
737
  Args:
737
738
  input: The input to the Runnable.
738
739
  config: A config to use when invoking the Runnable.
739
- The config supports standard keys like 'tags', 'metadata' for tracing
740
- purposes, 'max_concurrency' for controlling how much work to do
741
- in parallel, and other keys. Please refer to the RunnableConfig
742
- for more details.
740
+ The config supports standard keys like ``'tags'``, ``'metadata'`` for
741
+ tracing purposes, ``'max_concurrency'`` for controlling how much work to
742
+ do in parallel, and other keys. Please refer to the RunnableConfig
743
+ for more details. Defaults to None.
743
744
 
744
745
  Returns:
745
746
  The output of the Runnable.
@@ -747,7 +748,7 @@ class Runnable(ABC, Generic[Input, Output]):
747
748
 
748
749
  async def ainvoke(
749
750
  self,
750
- input: Input, # noqa: A002
751
+ input: Input,
751
752
  config: Optional[RunnableConfig] = None,
752
753
  **kwargs: Any,
753
754
  ) -> Output:
@@ -885,9 +886,9 @@ class Runnable(ABC, Generic[Input, Output]):
885
886
  Args:
886
887
  inputs: A list of inputs to the Runnable.
887
888
  config: A config to use when invoking the Runnable.
888
- The config supports standard keys like 'tags', 'metadata' for tracing
889
- purposes, 'max_concurrency' for controlling how much work to do
890
- in parallel, and other keys. Please refer to the RunnableConfig
889
+ The config supports standard keys like ``'tags'``, ``'metadata'`` for
890
+ tracing purposes, ``'max_concurrency'`` for controlling how much work to
891
+ do in parallel, and other keys. Please refer to the RunnableConfig
891
892
  for more details. Defaults to None.
892
893
  return_exceptions: Whether to return exceptions instead of raising them.
893
894
  Defaults to False.
@@ -950,10 +951,10 @@ class Runnable(ABC, Generic[Input, Output]):
950
951
  Args:
951
952
  inputs: A list of inputs to the Runnable.
952
953
  config: A config to use when invoking the Runnable.
953
- The config supports standard keys like 'tags', 'metadata' for tracing
954
- purposes, 'max_concurrency' for controlling how much work to do
955
- in parallel, and other keys. Please refer to the RunnableConfig
956
- for more details. Defaults to None. Defaults to None.
954
+ The config supports standard keys like ``'tags'``, ``'metadata'`` for
955
+ tracing purposes, ``'max_concurrency'`` for controlling how much work to
956
+ do in parallel, and other keys. Please refer to the RunnableConfig
957
+ for more details. Defaults to None.
957
958
  return_exceptions: Whether to return exceptions instead of raising them.
958
959
  Defaults to False.
959
960
  kwargs: Additional keyword arguments to pass to the Runnable.
@@ -995,7 +996,7 @@ class Runnable(ABC, Generic[Input, Output]):
995
996
 
996
997
  def stream(
997
998
  self,
998
- input: Input, # noqa: A002
999
+ input: Input,
999
1000
  config: Optional[RunnableConfig] = None,
1000
1001
  **kwargs: Optional[Any],
1001
1002
  ) -> Iterator[Output]:
@@ -1015,7 +1016,7 @@ class Runnable(ABC, Generic[Input, Output]):
1015
1016
 
1016
1017
  async def astream(
1017
1018
  self,
1018
- input: Input, # noqa: A002
1019
+ input: Input,
1019
1020
  config: Optional[RunnableConfig] = None,
1020
1021
  **kwargs: Optional[Any],
1021
1022
  ) -> AsyncIterator[Output]:
@@ -1069,7 +1070,7 @@ class Runnable(ABC, Generic[Input, Output]):
1069
1070
 
1070
1071
  async def astream_log(
1071
1072
  self,
1072
- input: Any, # noqa: A002
1073
+ input: Any,
1073
1074
  config: Optional[RunnableConfig] = None,
1074
1075
  *,
1075
1076
  diff: bool = True,
@@ -1140,7 +1141,7 @@ class Runnable(ABC, Generic[Input, Output]):
1140
1141
 
1141
1142
  async def astream_events(
1142
1143
  self,
1143
- input: Any, # noqa: A002
1144
+ input: Any,
1144
1145
  config: Optional[RunnableConfig] = None,
1145
1146
  *,
1146
1147
  version: Literal["v1", "v2"] = "v2",
@@ -1361,6 +1362,7 @@ class Runnable(ABC, Generic[Input, Output]):
1361
1362
 
1362
1363
  Raises:
1363
1364
  NotImplementedError: If the version is not `v1` or `v2`.
1365
+
1364
1366
  """ # noqa: E501
1365
1367
  from langchain_core.tracers.event_stream import (
1366
1368
  _astream_events_implementation_v1,
@@ -1405,7 +1407,7 @@ class Runnable(ABC, Generic[Input, Output]):
1405
1407
 
1406
1408
  def transform(
1407
1409
  self,
1408
- input: Iterator[Input], # noqa: A002
1410
+ input: Iterator[Input],
1409
1411
  config: Optional[RunnableConfig] = None,
1410
1412
  **kwargs: Optional[Any],
1411
1413
  ) -> Iterator[Output]:
@@ -1447,7 +1449,7 @@ class Runnable(ABC, Generic[Input, Output]):
1447
1449
 
1448
1450
  async def atransform(
1449
1451
  self,
1450
- input: AsyncIterator[Input], # noqa: A002
1452
+ input: AsyncIterator[Input],
1451
1453
  config: Optional[RunnableConfig] = None,
1452
1454
  **kwargs: Optional[Any],
1453
1455
  ) -> AsyncIterator[Output]:
@@ -1569,18 +1571,17 @@ class Runnable(ABC, Generic[Input, Output]):
1569
1571
  ) -> Runnable[Input, Output]:
1570
1572
  """Bind lifecycle listeners to a Runnable, returning a new Runnable.
1571
1573
 
1572
- on_start: Called before the Runnable starts running, with the Run object.
1573
- on_end: Called after the Runnable finishes running, with the Run object.
1574
- on_error: Called if the Runnable throws an error, with the Run object.
1575
-
1576
1574
  The Run object contains information about the run, including its id,
1577
1575
  type, input, output, error, start_time, end_time, and any tags or metadata
1578
1576
  added to the run.
1579
1577
 
1580
1578
  Args:
1581
- on_start: Called before the Runnable starts running. Defaults to None.
1582
- on_end: Called after the Runnable finishes running. Defaults to None.
1583
- on_error: Called if the Runnable throws an error. Defaults to None.
1579
+ on_start: Called before the Runnable starts running, with the Run object.
1580
+ Defaults to None.
1581
+ on_end: Called after the Runnable finishes running, with the Run object.
1582
+ Defaults to None.
1583
+ on_error: Called if the Runnable throws an error, with the Run object.
1584
+ Defaults to None.
1584
1585
 
1585
1586
  Returns:
1586
1587
  A new Runnable with the listeners bound.
@@ -1608,6 +1609,7 @@ class Runnable(ABC, Generic[Input, Output]):
1608
1609
  on_end=fn_end
1609
1610
  )
1610
1611
  chain.invoke(2)
1612
+
1611
1613
  """
1612
1614
  from langchain_core.tracers.root_listeners import RootListenersTracer
1613
1615
 
@@ -1636,21 +1638,17 @@ class Runnable(ABC, Generic[Input, Output]):
1636
1638
  ) -> Runnable[Input, Output]:
1637
1639
  """Bind async lifecycle listeners to a Runnable, returning a new Runnable.
1638
1640
 
1639
- on_start: Asynchronously called before the Runnable starts running.
1640
- on_end: Asynchronously called after the Runnable finishes running.
1641
- on_error: Asynchronously called if the Runnable throws an error.
1642
-
1643
1641
  The Run object contains information about the run, including its id,
1644
1642
  type, input, output, error, start_time, end_time, and any tags or metadata
1645
1643
  added to the run.
1646
1644
 
1647
1645
  Args:
1648
- on_start: Asynchronously called before the Runnable starts running.
1649
- Defaults to None.
1650
- on_end: Asynchronously called after the Runnable finishes running.
1651
- Defaults to None.
1652
- on_error: Asynchronously called if the Runnable throws an error.
1653
- Defaults to None.
1646
+ on_start: Called asynchronously before the Runnable starts running,
1647
+ with the Run object. Defaults to None.
1648
+ on_end: Called asynchronously after the Runnable finishes running,
1649
+ with the Run object. Defaults to None.
1650
+ on_error: Called asynchronously if the Runnable throws an error,
1651
+ with the Run object. Defaults to None.
1654
1652
 
1655
1653
  Returns:
1656
1654
  A new Runnable with the listeners bound.
@@ -1830,6 +1828,7 @@ class Runnable(ABC, Generic[Input, Output]):
1830
1828
 
1831
1829
  runnable = RunnableLambda(_lambda)
1832
1830
  print(runnable.map().invoke([1, 2, 3])) # [2, 3, 4]
1831
+
1833
1832
  """
1834
1833
  return RunnableEach(bound=self)
1835
1834
 
@@ -2362,6 +2361,7 @@ class Runnable(ABC, Generic[Input, Output]):
2362
2361
  name: Optional[str] = None,
2363
2362
  description: Optional[str] = None,
2364
2363
  arg_types: Optional[dict[str, type]] = None,
2364
+ message_version: Literal["v0", "v1"] = "v0",
2365
2365
  ) -> BaseTool:
2366
2366
  """Create a BaseTool from a Runnable.
2367
2367
 
@@ -2377,6 +2377,11 @@ class Runnable(ABC, Generic[Input, Output]):
2377
2377
  name: The name of the tool. Defaults to None.
2378
2378
  description: The description of the tool. Defaults to None.
2379
2379
  arg_types: A dictionary of argument names to types. Defaults to None.
2380
+ message_version: Version of ToolMessage to return given
2381
+ :class:`~langchain_core.messages.content_blocks.ToolCall` input.
2382
+
2383
+ If ``"v0"``, output will be a v0 :class:`~langchain_core.messages.tool.ToolMessage`.
2384
+ If ``"v1"``, output will be a v1 :class:`~langchain_core.messages.v1.ToolMessage`.
2380
2385
 
2381
2386
  Returns:
2382
2387
  A BaseTool instance.
@@ -2451,7 +2456,8 @@ class Runnable(ABC, Generic[Input, Output]):
2451
2456
  as_tool.invoke("b")
2452
2457
 
2453
2458
  .. versionadded:: 0.2.14
2454
- """
2459
+
2460
+ """ # noqa: E501
2455
2461
  # Avoid circular import
2456
2462
  from langchain_core.tools import convert_runnable_to_tool
2457
2463
 
@@ -2461,6 +2467,7 @@ class Runnable(ABC, Generic[Input, Output]):
2461
2467
  name=name,
2462
2468
  description=description,
2463
2469
  arg_types=arg_types,
2470
+ message_version=message_version,
2464
2471
  )
2465
2472
 
2466
2473
 
@@ -2522,6 +2529,7 @@ class RunnableSerializable(Serializable, Runnable[Input, Output]):
2522
2529
  configurable={"output_token_number": 200}
2523
2530
  ).invoke("tell me something about chess").content
2524
2531
  )
2532
+
2525
2533
  """
2526
2534
  from langchain_core.runnables.configurable import RunnableConfigurableFields
2527
2535
 
@@ -2550,7 +2558,7 @@ class RunnableSerializable(Serializable, Runnable[Input, Output]):
2550
2558
  which: The ConfigurableField instance that will be used to select the
2551
2559
  alternative.
2552
2560
  default_key: The default key to use if no alternative is selected.
2553
- Defaults to "default".
2561
+ Defaults to ``'default'``.
2554
2562
  prefix_keys: Whether to prefix the keys with the ConfigurableField id.
2555
2563
  Defaults to False.
2556
2564
  **kwargs: A dictionary of keys to Runnable instances or callables that
@@ -2566,7 +2574,7 @@ class RunnableSerializable(Serializable, Runnable[Input, Output]):
2566
2574
  from langchain_openai import ChatOpenAI
2567
2575
 
2568
2576
  model = ChatAnthropic(
2569
- model_name="claude-3-sonnet-20240229"
2577
+ model_name="claude-3-7-sonnet-20250219"
2570
2578
  ).configurable_alternatives(
2571
2579
  ConfigurableField(id="llm"),
2572
2580
  default_key="anthropic",
@@ -2582,6 +2590,7 @@ class RunnableSerializable(Serializable, Runnable[Input, Output]):
2582
2590
  configurable={"llm": "openai"}
2583
2591
  ).invoke("which organization created you?").content
2584
2592
  )
2593
+
2585
2594
  """
2586
2595
  from langchain_core.runnables.configurable import (
2587
2596
  RunnableConfigurableAlternatives,
@@ -2746,6 +2755,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]):
2746
2755
  async for chunk in chain.astream({'topic': 'colors'}):
2747
2756
  print('-') # noqa: T201
2748
2757
  print(chunk, sep='', flush=True) # noqa: T201
2758
+
2749
2759
  """
2750
2760
 
2751
2761
  # The steps are broken into first, middle and last, solely for type checking
@@ -3544,6 +3554,7 @@ class RunnableParallel(RunnableSerializable[Input, dict[str, Any]]):
3544
3554
  for key in chunk:
3545
3555
  output[key] = output[key] + chunk[key].content
3546
3556
  print(output) # noqa: T201
3557
+
3547
3558
  """
3548
3559
 
3549
3560
  steps__: Mapping[str, Runnable[Input, Any]]
@@ -4066,6 +4077,7 @@ class RunnableGenerator(Runnable[Input, Output]):
4066
4077
 
4067
4078
  runnable = chant_chain | RunnableLambda(reverse_generator)
4068
4079
  "".join(runnable.stream({"topic": "waste"})) # ".elcycer ,esuer ,ecudeR"
4080
+
4069
4081
  """
4070
4082
 
4071
4083
  def __init__(
@@ -4326,6 +4338,7 @@ class RunnableLambda(Runnable[Input, Output]):
4326
4338
  runnable = RunnableLambda(add_one, afunc=add_one_async)
4327
4339
  runnable.invoke(1) # Uses add_one
4328
4340
  await runnable.ainvoke(1) # Uses add_one_async
4341
+
4329
4342
  """
4330
4343
 
4331
4344
  def __init__(
@@ -5180,6 +5193,7 @@ class RunnableEach(RunnableEachBase[Input, Output]):
5180
5193
  {'topic':'Art'},
5181
5194
  {'topic':'Biology'}])
5182
5195
  print(output) # noqa: T201
5196
+
5183
5197
  """
5184
5198
 
5185
5199
  @override
@@ -5215,6 +5229,10 @@ class RunnableEach(RunnableEachBase[Input, Output]):
5215
5229
  ) -> RunnableEach[Input, Output]:
5216
5230
  """Bind lifecycle listeners to a Runnable, returning a new Runnable.
5217
5231
 
5232
+ The Run object contains information about the run, including its id,
5233
+ type, input, output, error, start_time, end_time, and any tags or metadata
5234
+ added to the run.
5235
+
5218
5236
  Args:
5219
5237
  on_start: Called before the Runnable starts running, with the Run object.
5220
5238
  Defaults to None.
@@ -5225,10 +5243,6 @@ class RunnableEach(RunnableEachBase[Input, Output]):
5225
5243
 
5226
5244
  Returns:
5227
5245
  A new Runnable with the listeners bound.
5228
-
5229
- The Run object contains information about the run, including its id,
5230
- type, input, output, error, start_time, end_time, and any tags or metadata
5231
- added to the run.
5232
5246
  """
5233
5247
  return RunnableEach(
5234
5248
  bound=self.bound.with_listeners(
@@ -5245,20 +5259,20 @@ class RunnableEach(RunnableEachBase[Input, Output]):
5245
5259
  ) -> RunnableEach[Input, Output]:
5246
5260
  """Bind async lifecycle listeners to a Runnable, returning a new Runnable.
5247
5261
 
5262
+ The Run object contains information about the run, including its id,
5263
+ type, input, output, error, start_time, end_time, and any tags or metadata
5264
+ added to the run.
5265
+
5248
5266
  Args:
5249
5267
  on_start: Called asynchronously before the Runnable starts running,
5250
- with the Run object. Defaults to None.
5268
+ with the Run object. Defaults to None.
5251
5269
  on_end: Called asynchronously after the Runnable finishes running,
5252
- with the Run object. Defaults to None.
5270
+ with the Run object. Defaults to None.
5253
5271
  on_error: Called asynchronously if the Runnable throws an error,
5254
- with the Run object. Defaults to None.
5272
+ with the Run object. Defaults to None.
5255
5273
 
5256
5274
  Returns:
5257
5275
  A new Runnable with the listeners bound.
5258
-
5259
- The Run object contains information about the run, including its id,
5260
- type, input, output, error, start_time, end_time, and any tags or metadata
5261
- added to the run.
5262
5276
  """
5263
5277
  return RunnableEach(
5264
5278
  bound=self.bound.with_alisteners(
@@ -5714,6 +5728,7 @@ class RunnableBinding(RunnableBindingBase[Input, Output]):
5714
5728
  kwargs={'stop': ['-']} # <-- Note the additional kwargs
5715
5729
  )
5716
5730
  runnable_binding.invoke('Say "Parrot-MAGIC"') # Should return `Parrot`
5731
+
5717
5732
  """
5718
5733
 
5719
5734
  @override
@@ -5768,6 +5783,10 @@ class RunnableBinding(RunnableBindingBase[Input, Output]):
5768
5783
  ) -> Runnable[Input, Output]:
5769
5784
  """Bind lifecycle listeners to a Runnable, returning a new Runnable.
5770
5785
 
5786
+ The Run object contains information about the run, including its id,
5787
+ type, input, output, error, start_time, end_time, and any tags or metadata
5788
+ added to the run.
5789
+
5771
5790
  Args:
5772
5791
  on_start: Called before the Runnable starts running, with the Run object.
5773
5792
  Defaults to None.
@@ -5777,9 +5796,7 @@ class RunnableBinding(RunnableBindingBase[Input, Output]):
5777
5796
  Defaults to None.
5778
5797
 
5779
5798
  Returns:
5780
- The Runnable object contains information about the run, including its id,
5781
- type, input, output, error, start_time, end_time, and any tags or metadata
5782
- added to the run.
5799
+ A new Runnable with the listeners bound.
5783
5800
  """
5784
5801
  from langchain_core.tracers.root_listeners import RootListenersTracer
5785
5802
 
@@ -5992,5 +6009,6 @@ def chain(
5992
6009
 
5993
6010
  for chunk in llm.stream(formatted):
5994
6011
  yield chunk
6012
+
5995
6013
  """
5996
6014
  return RunnableLambda(func)
@@ -63,6 +63,7 @@ class RunnableBranch(RunnableSerializable[Input, Output]):
63
63
 
64
64
  branch.invoke("hello") # "HELLO"
65
65
  branch.invoke(None) # "goodbye"
66
+
66
67
  """
67
68
 
68
69
  branches: Sequence[tuple[Runnable[Input, bool], Runnable[Input, Output]]]
@@ -402,7 +402,7 @@ def call_func_with_variable_args(
402
402
  Callable[[Input, CallbackManagerForChainRun], Output],
403
403
  Callable[[Input, CallbackManagerForChainRun, RunnableConfig], Output],
404
404
  ],
405
- input: Input, # noqa: A002
405
+ input: Input,
406
406
  config: RunnableConfig,
407
407
  run_manager: Optional[CallbackManagerForChainRun] = None,
408
408
  **kwargs: Any,
@@ -439,7 +439,7 @@ def acall_func_with_variable_args(
439
439
  Awaitable[Output],
440
440
  ],
441
441
  ],
442
- input: Input, # noqa: A002
442
+ input: Input,
443
443
  config: RunnableConfig,
444
444
  run_manager: Optional[AsyncCallbackManagerForChainRun] = None,
445
445
  **kwargs: Any,
@@ -378,6 +378,7 @@ class RunnableConfigurableFields(DynamicRunnable[Input, Output]):
378
378
  {"question": "foo", "context": "bar"},
379
379
  config={"configurable": {"hub_commit": "rlm/rag-prompt-llama"}},
380
380
  )
381
+
381
382
  """
382
383
 
383
384
  fields: dict[str, AnyConfigurableField]
@@ -544,7 +545,7 @@ class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]):
544
545
  """The alternatives to choose from."""
545
546
 
546
547
  default_key: str = "default"
547
- """The enum value to use for the default option. Defaults to "default"."""
548
+ """The enum value to use for the default option. Defaults to ``'default'``."""
548
549
 
549
550
  prefix_keys: bool
550
551
  """Whether to prefix configurable fields of each alternative with a namespace
@@ -5,12 +5,7 @@ import inspect
5
5
  import typing
6
6
  from collections.abc import AsyncIterator, Iterator, Sequence
7
7
  from functools import wraps
8
- from typing import (
9
- TYPE_CHECKING,
10
- Any,
11
- Optional,
12
- Union,
13
- )
8
+ from typing import TYPE_CHECKING, Any, Optional, Union
14
9
 
15
10
  from pydantic import BaseModel, ConfigDict
16
11
  from typing_extensions import override
@@ -85,6 +80,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
85
80
  | model
86
81
  | StrOutputParser()
87
82
  ).with_fallbacks([RunnableLambda(when_all_is_lost)])
83
+
88
84
  """
89
85
 
90
86
  runnable: Runnable[Input, Output]
@@ -598,7 +594,7 @@ class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
598
594
  from langchain_anthropic import ChatAnthropic
599
595
 
600
596
  gpt_4o = ChatOpenAI(model="gpt-4o")
601
- claude_3_sonnet = ChatAnthropic(model="claude-3-sonnet-20240229")
597
+ claude_3_sonnet = ChatAnthropic(model="claude-3-7-sonnet-20250219")
602
598
  llm = gpt_4o.with_fallbacks([claude_3_sonnet])
603
599
 
604
600
  llm.model_name
@@ -114,7 +114,7 @@ class Node(NamedTuple):
114
114
  def copy(
115
115
  self,
116
116
  *,
117
- id: Optional[str] = None, # noqa: A002
117
+ id: Optional[str] = None,
118
118
  name: Optional[str] = None,
119
119
  ) -> Node:
120
120
  """Return a copy of the node with optional new id and name.
@@ -187,7 +187,7 @@ class MermaidDrawMethod(Enum):
187
187
 
188
188
 
189
189
  def node_data_str(
190
- id: str, # noqa: A002
190
+ id: str,
191
191
  data: Union[type[BaseModel], RunnableType, None],
192
192
  ) -> str:
193
193
  """Convert the data of a node to a string.
@@ -328,7 +328,7 @@ class Graph:
328
328
  def add_node(
329
329
  self,
330
330
  data: Union[type[BaseModel], RunnableType, None],
331
- id: Optional[str] = None, # noqa: A002
331
+ id: Optional[str] = None,
332
332
  *,
333
333
  metadata: Optional[dict[str, Any]] = None,
334
334
  ) -> Node:
@@ -611,6 +611,7 @@ class Graph:
611
611
 
612
612
  Returns:
613
613
  The Mermaid syntax string.
614
+
614
615
  """
615
616
  from langchain_core.runnables.graph_mermaid import draw_mermaid
616
617
 
@@ -681,6 +682,7 @@ class Graph:
681
682
 
682
683
  Returns:
683
684
  The PNG image as bytes.
685
+
684
686
  """
685
687
  from langchain_core.runnables.graph_mermaid import draw_mermaid_png
686
688
 
@@ -263,6 +263,7 @@ def draw_ascii(vertices: Mapping[str, str], edges: Sequence[LangEdge]) -> str:
263
263
  +---+ +---+
264
264
  | 3 | | 4 |
265
265
  +---+ +---+
266
+
266
267
  """
267
268
  # NOTE: coordinates might me negative, so we need to shift
268
269
  # everything to the positive plane before we actually draw it.
@@ -70,6 +70,7 @@ def draw_mermaid(
70
70
 
71
71
  Returns:
72
72
  str: Mermaid graph syntax.
73
+
73
74
  """
74
75
  # Initialize Mermaid graph configuration
75
76
  original_frontmatter_config = frontmatter_config or {}
@@ -311,6 +311,7 @@ class RunnableWithMessageHistory(RunnableBindingBase):
311
311
  into the get_session_history factory.
312
312
  **kwargs: Arbitrary additional kwargs to pass to parent class
313
313
  ``RunnableBindingBase`` init.
314
+
314
315
  """
315
316
  history_chain: Runnable = RunnableLambda(
316
317
  self._enter_history, self._aenter_history
@@ -132,6 +132,7 @@ class RunnablePassthrough(RunnableSerializable[Other, Other]):
132
132
 
133
133
  runnable.invoke('hello')
134
134
  # {'llm1': 'completion', 'llm2': 'completion', 'total_chars': 20}
135
+
135
136
  """
136
137
 
137
138
  input_type: Optional[type[Other]] = None
@@ -393,6 +394,7 @@ class RunnableAssign(RunnableSerializable[dict[str, Any], dict[str, Any]]):
393
394
  # Asynchronous example
394
395
  await runnable_assign.ainvoke({"input": 5})
395
396
  # returns {'input': 5, 'add_step': {'added': 15}}
397
+
396
398
  """
397
399
 
398
400
  mapper: RunnableParallel
@@ -697,6 +699,7 @@ class RunnablePick(RunnableSerializable[dict[str, Any], dict[str, Any]]):
697
699
  output_data = runnable.invoke(input_data)
698
700
 
699
701
  print(output_data) # Output: {'name': 'John', 'age': 30}
702
+
700
703
  """
701
704
 
702
705
  keys: Union[str, list[str]]
@@ -110,6 +110,7 @@ class RunnableRetry(RunnableBindingBase[Input, Output]):
110
110
  # Bad
111
111
  chain = template | model
112
112
  retryable_chain = chain.with_retry()
113
+
113
114
  """ # noqa: E501
114
115
 
115
116
  retry_exception_types: tuple[type[BaseException], ...] = (Exception,)
@@ -66,6 +66,7 @@ class RouterRunnable(RunnableSerializable[RouterInput, Output]):
66
66
 
67
67
  router = RouterRunnable(runnables={"add": add, "square": square})
68
68
  router.invoke({"key": "square", "input": 3})
69
+
69
70
  """
70
71
 
71
72
  runnables: Mapping[str, Runnable[Any, Output]]
@@ -83,6 +83,7 @@ class BaseStreamEvent(TypedDict):
83
83
  "tags": [],
84
84
  },
85
85
  ]
86
+
86
87
  """
87
88
 
88
89
  event: str
langchain_core/stores.py CHANGED
@@ -76,6 +76,7 @@ class BaseStore(ABC, Generic[K, V]):
76
76
  for key in self.store.keys():
77
77
  if key.startswith(prefix):
78
78
  yield key
79
+
79
80
  """
80
81
 
81
82
  @abstractmethod
@@ -302,6 +303,7 @@ class InMemoryStore(InMemoryBaseStore[Any]):
302
303
  # ['key2']
303
304
  list(store.yield_keys(prefix='k'))
304
305
  # ['key2']
306
+
305
307
  """
306
308
 
307
309
 
@@ -327,6 +329,7 @@ class InMemoryByteStore(InMemoryBaseStore[bytes]):
327
329
  # ['key2']
328
330
  list(store.yield_keys(prefix='k'))
329
331
  # ['key2']
332
+
330
333
  """
331
334
 
332
335