langchain-core 0.4.0.dev0__py3-none-any.whl → 1.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (74) hide show
  1. langchain_core/_api/beta_decorator.py +2 -2
  2. langchain_core/_api/deprecation.py +1 -1
  3. langchain_core/beta/runnables/context.py +1 -1
  4. langchain_core/callbacks/base.py +14 -23
  5. langchain_core/callbacks/file.py +13 -2
  6. langchain_core/callbacks/manager.py +74 -157
  7. langchain_core/callbacks/streaming_stdout.py +3 -4
  8. langchain_core/callbacks/usage.py +2 -12
  9. langchain_core/chat_history.py +6 -6
  10. langchain_core/documents/base.py +1 -1
  11. langchain_core/documents/compressor.py +9 -6
  12. langchain_core/indexing/base.py +2 -2
  13. langchain_core/language_models/_utils.py +230 -101
  14. langchain_core/language_models/base.py +35 -23
  15. langchain_core/language_models/chat_models.py +245 -53
  16. langchain_core/language_models/fake_chat_models.py +28 -81
  17. langchain_core/load/dump.py +3 -4
  18. langchain_core/messages/__init__.py +38 -22
  19. langchain_core/messages/ai.py +188 -30
  20. langchain_core/messages/base.py +164 -25
  21. langchain_core/messages/block_translators/__init__.py +89 -0
  22. langchain_core/messages/block_translators/anthropic.py +451 -0
  23. langchain_core/messages/block_translators/bedrock.py +45 -0
  24. langchain_core/messages/block_translators/bedrock_converse.py +47 -0
  25. langchain_core/messages/block_translators/google_genai.py +45 -0
  26. langchain_core/messages/block_translators/google_vertexai.py +47 -0
  27. langchain_core/messages/block_translators/groq.py +45 -0
  28. langchain_core/messages/block_translators/langchain_v0.py +297 -0
  29. langchain_core/messages/block_translators/ollama.py +45 -0
  30. langchain_core/messages/block_translators/openai.py +586 -0
  31. langchain_core/messages/{content_blocks.py → content.py} +346 -213
  32. langchain_core/messages/human.py +29 -9
  33. langchain_core/messages/system.py +29 -9
  34. langchain_core/messages/tool.py +94 -13
  35. langchain_core/messages/utils.py +32 -234
  36. langchain_core/output_parsers/base.py +14 -50
  37. langchain_core/output_parsers/json.py +2 -5
  38. langchain_core/output_parsers/list.py +2 -7
  39. langchain_core/output_parsers/openai_functions.py +5 -28
  40. langchain_core/output_parsers/openai_tools.py +49 -90
  41. langchain_core/output_parsers/pydantic.py +2 -3
  42. langchain_core/output_parsers/transform.py +12 -53
  43. langchain_core/output_parsers/xml.py +9 -17
  44. langchain_core/prompt_values.py +8 -112
  45. langchain_core/prompts/chat.py +1 -3
  46. langchain_core/runnables/base.py +500 -451
  47. langchain_core/runnables/branch.py +1 -1
  48. langchain_core/runnables/fallbacks.py +4 -4
  49. langchain_core/runnables/history.py +1 -1
  50. langchain_core/runnables/passthrough.py +3 -3
  51. langchain_core/runnables/retry.py +1 -1
  52. langchain_core/runnables/router.py +1 -1
  53. langchain_core/structured_query.py +3 -7
  54. langchain_core/tools/base.py +14 -41
  55. langchain_core/tools/convert.py +2 -22
  56. langchain_core/tools/retriever.py +1 -8
  57. langchain_core/tools/structured.py +2 -10
  58. langchain_core/tracers/_streaming.py +6 -7
  59. langchain_core/tracers/base.py +7 -14
  60. langchain_core/tracers/core.py +4 -27
  61. langchain_core/tracers/event_stream.py +4 -15
  62. langchain_core/tracers/langchain.py +3 -14
  63. langchain_core/tracers/log_stream.py +2 -3
  64. langchain_core/utils/_merge.py +45 -7
  65. langchain_core/utils/function_calling.py +22 -9
  66. langchain_core/utils/utils.py +29 -0
  67. langchain_core/version.py +1 -1
  68. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0a1.dist-info}/METADATA +7 -9
  69. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0a1.dist-info}/RECORD +71 -64
  70. langchain_core/v1/__init__.py +0 -1
  71. langchain_core/v1/chat_models.py +0 -1047
  72. langchain_core/v1/messages.py +0 -755
  73. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0a1.dist-info}/WHEEL +0 -0
  74. {langchain_core-0.4.0.dev0.dist-info → langchain_core-1.0.0a1.dist-info}/entry_points.txt +0 -0
@@ -11,15 +11,7 @@ from abc import ABC, abstractmethod
11
11
  from concurrent.futures import ThreadPoolExecutor
12
12
  from contextlib import asynccontextmanager, contextmanager
13
13
  from contextvars import copy_context
14
- from typing import (
15
- TYPE_CHECKING,
16
- Any,
17
- Callable,
18
- Optional,
19
- TypeVar,
20
- Union,
21
- cast,
22
- )
14
+ from typing import TYPE_CHECKING, Any, Callable, Optional, TypeVar, Union, cast
23
15
  from uuid import UUID
24
16
 
25
17
  from langsmith.run_helpers import get_tracing_context
@@ -37,16 +29,8 @@ from langchain_core.callbacks.base import (
37
29
  )
38
30
  from langchain_core.callbacks.stdout import StdOutCallbackHandler
39
31
  from langchain_core.messages import BaseMessage, get_buffer_string
40
- from langchain_core.messages.utils import convert_from_v1_message
41
- from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, LLMResult
42
32
  from langchain_core.tracers.schemas import Run
43
33
  from langchain_core.utils.env import env_var_is_set
44
- from langchain_core.v1.messages import (
45
- AIMessage,
46
- AIMessageChunk,
47
- MessageV1,
48
- MessageV1Types,
49
- )
50
34
 
51
35
  if TYPE_CHECKING:
52
36
  from collections.abc import AsyncGenerator, Coroutine, Generator, Sequence
@@ -55,7 +39,7 @@ if TYPE_CHECKING:
55
39
 
56
40
  from langchain_core.agents import AgentAction, AgentFinish
57
41
  from langchain_core.documents import Document
58
- from langchain_core.outputs import GenerationChunk
42
+ from langchain_core.outputs import ChatGenerationChunk, GenerationChunk, LLMResult
59
43
  from langchain_core.runnables.config import RunnableConfig
60
44
 
61
45
  logger = logging.getLogger(__name__)
@@ -100,7 +84,8 @@ def trace_as_chain_group(
100
84
  metadata (dict[str, Any], optional): The metadata to apply to all runs.
101
85
  Defaults to None.
102
86
 
103
- Note: must have LANGCHAIN_TRACING_V2 env var set to true to see the trace in LangSmith.
87
+ .. note:
88
+ Must have ``LANGCHAIN_TRACING_V2`` env var set to true to see the trace in LangSmith.
104
89
 
105
90
  Returns:
106
91
  CallbackManagerForChainGroup: The callback manager for the chain group.
@@ -185,7 +170,8 @@ async def atrace_as_chain_group(
185
170
  Returns:
186
171
  AsyncCallbackManager: The async callback manager for the chain group.
187
172
 
188
- Note: must have LANGCHAIN_TRACING_V2 env var set to true to see the trace in LangSmith.
173
+ .. note:
174
+ Must have ``LANGCHAIN_TRACING_V2`` env var set to true to see the trace in LangSmith.
189
175
 
190
176
  Example:
191
177
  .. code-block:: python
@@ -242,6 +228,7 @@ def shielded(func: Func) -> Func:
242
228
 
243
229
  Returns:
244
230
  Callable: The shielded function
231
+
245
232
  """
246
233
 
247
234
  @functools.wraps(func)
@@ -251,46 +238,6 @@ def shielded(func: Func) -> Func:
251
238
  return cast("Func", wrapped)
252
239
 
253
240
 
254
- def _convert_llm_events(
255
- event_name: str, args: tuple[Any, ...], kwargs: dict[str, Any]
256
- ) -> tuple[tuple[Any, ...], dict[str, Any]]:
257
- args_list = list(args)
258
- if (
259
- event_name == "on_chat_model_start"
260
- and isinstance(args_list[1], list)
261
- and args_list[1]
262
- and isinstance(args_list[1][0], MessageV1Types)
263
- ):
264
- batch = [
265
- convert_from_v1_message(item)
266
- for item in args_list[1]
267
- if isinstance(item, MessageV1Types)
268
- ]
269
- args_list[1] = [batch]
270
- elif (
271
- event_name == "on_llm_new_token"
272
- and "chunk" in kwargs
273
- and isinstance(kwargs["chunk"], MessageV1Types)
274
- ):
275
- chunk = kwargs["chunk"]
276
- kwargs["chunk"] = ChatGenerationChunk(text=chunk.text, message=chunk)
277
- elif event_name == "on_llm_end" and isinstance(args_list[0], MessageV1Types):
278
- args_list[0] = LLMResult(
279
- generations=[
280
- [
281
- ChatGeneration(
282
- text=args_list[0].text,
283
- message=convert_from_v1_message(args_list[0]),
284
- )
285
- ]
286
- ]
287
- )
288
- else:
289
- pass
290
-
291
- return tuple(args_list), kwargs
292
-
293
-
294
241
  def handle_event(
295
242
  handlers: list[BaseCallbackHandler],
296
243
  event_name: str,
@@ -300,15 +247,17 @@ def handle_event(
300
247
  ) -> None:
301
248
  """Generic event handler for CallbackManager.
302
249
 
303
- Note: This function is used by LangServe to handle events.
250
+ .. note::
251
+ This function is used by ``LangServe`` to handle events.
304
252
 
305
253
  Args:
306
254
  handlers: The list of handlers that will handle the event.
307
- event_name: The name of the event (e.g., "on_llm_start").
255
+ event_name: The name of the event (e.g., ``'on_llm_start'``).
308
256
  ignore_condition_name: Name of the attribute defined on handler
309
257
  that if True will cause the handler to be skipped for the given event.
310
258
  *args: The arguments to pass to the event handler.
311
259
  **kwargs: The keyword arguments to pass to the event handler
260
+
312
261
  """
313
262
  coros: list[Coroutine[Any, Any, Any]] = []
314
263
 
@@ -319,8 +268,6 @@ def handle_event(
319
268
  if ignore_condition_name is None or not getattr(
320
269
  handler, ignore_condition_name
321
270
  ):
322
- if not handler.accepts_new_messages:
323
- args, kwargs = _convert_llm_events(event_name, args, kwargs)
324
271
  event = getattr(handler, event_name)(*args, **kwargs)
325
272
  if asyncio.iscoroutine(event):
326
273
  coros.append(event)
@@ -415,8 +362,6 @@ async def _ahandle_event_for_handler(
415
362
  ) -> None:
416
363
  try:
417
364
  if ignore_condition_name is None or not getattr(handler, ignore_condition_name):
418
- if not handler.accepts_new_messages:
419
- args, kwargs = _convert_llm_events(event_name, args, kwargs)
420
365
  event = getattr(handler, event_name)
421
366
  if asyncio.iscoroutinefunction(event):
422
367
  await event(*args, **kwargs)
@@ -467,17 +412,19 @@ async def ahandle_event(
467
412
  *args: Any,
468
413
  **kwargs: Any,
469
414
  ) -> None:
470
- """Async generic event handler for AsyncCallbackManager.
415
+ """Async generic event handler for ``AsyncCallbackManager``.
471
416
 
472
- Note: This function is used by LangServe to handle events.
417
+ .. note::
418
+ This function is used by ``LangServe`` to handle events.
473
419
 
474
420
  Args:
475
421
  handlers: The list of handlers that will handle the event.
476
- event_name: The name of the event (e.g., "on_llm_start").
422
+ event_name: The name of the event (e.g., ``'on_llm_start'``).
477
423
  ignore_condition_name: Name of the attribute defined on handler
478
424
  that if True will cause the handler to be skipped for the given event.
479
425
  *args: The arguments to pass to the event handler.
480
426
  **kwargs: The keyword arguments to pass to the event handler.
427
+
481
428
  """
482
429
  for handler in [h for h in handlers if h.run_inline]:
483
430
  await _ahandle_event_for_handler(
@@ -529,6 +476,7 @@ class BaseRunManager(RunManagerMixin):
529
476
  Defaults to None.
530
477
  inheritable_metadata (Optional[dict[str, Any]]): The inheritable metadata.
531
478
  Defaults to None.
479
+
532
480
  """
533
481
  self.run_id = run_id
534
482
  self.handlers = handlers
@@ -545,6 +493,7 @@ class BaseRunManager(RunManagerMixin):
545
493
 
546
494
  Returns:
547
495
  BaseRunManager: The noop manager.
496
+
548
497
  """
549
498
  return cls(
550
499
  run_id=uuid.uuid4(),
@@ -597,6 +546,7 @@ class RunManager(BaseRunManager):
597
546
  Args:
598
547
  retry_state (RetryCallState): The retry state.
599
548
  **kwargs (Any): Additional keyword arguments.
549
+
600
550
  """
601
551
  if not self.handlers:
602
552
  return
@@ -624,6 +574,7 @@ class ParentRunManager(RunManager):
624
574
 
625
575
  Returns:
626
576
  CallbackManager: The child callback manager.
577
+
627
578
  """
628
579
  manager = CallbackManager(handlers=[], parent_run_id=self.run_id)
629
580
  manager.set_handlers(self.inheritable_handlers)
@@ -643,6 +594,7 @@ class AsyncRunManager(BaseRunManager, ABC):
643
594
 
644
595
  Returns:
645
596
  RunManager: The sync RunManager.
597
+
646
598
  """
647
599
 
648
600
  async def on_text(
@@ -658,6 +610,7 @@ class AsyncRunManager(BaseRunManager, ABC):
658
610
 
659
611
  Returns:
660
612
  Any: The result of the callback.
613
+
661
614
  """
662
615
  if not self.handlers:
663
616
  return
@@ -682,6 +635,7 @@ class AsyncRunManager(BaseRunManager, ABC):
682
635
  Args:
683
636
  retry_state (RetryCallState): The retry state.
684
637
  **kwargs (Any): Additional keyword arguments.
638
+
685
639
  """
686
640
  if not self.handlers:
687
641
  return
@@ -709,6 +663,7 @@ class AsyncParentRunManager(AsyncRunManager):
709
663
 
710
664
  Returns:
711
665
  AsyncCallbackManager: The child callback manager.
666
+
712
667
  """
713
668
  manager = AsyncCallbackManager(handlers=[], parent_run_id=self.run_id)
714
669
  manager.set_handlers(self.inheritable_handlers)
@@ -726,9 +681,7 @@ class CallbackManagerForLLMRun(RunManager, LLMManagerMixin):
726
681
  self,
727
682
  token: str,
728
683
  *,
729
- chunk: Optional[
730
- Union[GenerationChunk, ChatGenerationChunk, AIMessageChunk]
731
- ] = None,
684
+ chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]] = None,
732
685
  **kwargs: Any,
733
686
  ) -> None:
734
687
  """Run when LLM generates a new token.
@@ -738,6 +691,7 @@ class CallbackManagerForLLMRun(RunManager, LLMManagerMixin):
738
691
  chunk (Optional[Union[GenerationChunk, ChatGenerationChunk]], optional):
739
692
  The chunk. Defaults to None.
740
693
  **kwargs (Any): Additional keyword arguments.
694
+
741
695
  """
742
696
  if not self.handlers:
743
697
  return
@@ -753,12 +707,13 @@ class CallbackManagerForLLMRun(RunManager, LLMManagerMixin):
753
707
  **kwargs,
754
708
  )
755
709
 
756
- def on_llm_end(self, response: Union[LLMResult, AIMessage], **kwargs: Any) -> None:
710
+ def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
757
711
  """Run when LLM ends running.
758
712
 
759
713
  Args:
760
- response (LLMResult | AIMessage): The LLM result.
714
+ response (LLMResult): The LLM result.
761
715
  **kwargs (Any): Additional keyword arguments.
716
+
762
717
  """
763
718
  if not self.handlers:
764
719
  return
@@ -783,8 +738,8 @@ class CallbackManagerForLLMRun(RunManager, LLMManagerMixin):
783
738
  Args:
784
739
  error (Exception or KeyboardInterrupt): The error.
785
740
  kwargs (Any): Additional keyword arguments.
786
- - response (LLMResult | AIMessage): The response which was generated
787
- before the error occurred.
741
+ - response (LLMResult): The response which was generated before
742
+ the error occurred.
788
743
  """
789
744
  if not self.handlers:
790
745
  return
@@ -808,6 +763,7 @@ class AsyncCallbackManagerForLLMRun(AsyncRunManager, LLMManagerMixin):
808
763
 
809
764
  Returns:
810
765
  CallbackManagerForLLMRun: The sync RunManager.
766
+
811
767
  """
812
768
  return CallbackManagerForLLMRun(
813
769
  run_id=self.run_id,
@@ -824,9 +780,7 @@ class AsyncCallbackManagerForLLMRun(AsyncRunManager, LLMManagerMixin):
824
780
  self,
825
781
  token: str,
826
782
  *,
827
- chunk: Optional[
828
- Union[GenerationChunk, ChatGenerationChunk, AIMessageChunk]
829
- ] = None,
783
+ chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]] = None,
830
784
  **kwargs: Any,
831
785
  ) -> None:
832
786
  """Run when LLM generates a new token.
@@ -836,6 +790,7 @@ class AsyncCallbackManagerForLLMRun(AsyncRunManager, LLMManagerMixin):
836
790
  chunk (Optional[Union[GenerationChunk, ChatGenerationChunk]], optional):
837
791
  The chunk. Defaults to None.
838
792
  **kwargs (Any): Additional keyword arguments.
793
+
839
794
  """
840
795
  if not self.handlers:
841
796
  return
@@ -852,14 +807,13 @@ class AsyncCallbackManagerForLLMRun(AsyncRunManager, LLMManagerMixin):
852
807
  )
853
808
 
854
809
  @shielded
855
- async def on_llm_end(
856
- self, response: Union[LLMResult, AIMessage], **kwargs: Any
857
- ) -> None:
810
+ async def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
858
811
  """Run when LLM ends running.
859
812
 
860
813
  Args:
861
- response (LLMResult | AIMessage): The LLM result.
814
+ response (LLMResult): The LLM result.
862
815
  **kwargs (Any): Additional keyword arguments.
816
+
863
817
  """
864
818
  if not self.handlers:
865
819
  return
@@ -885,8 +839,11 @@ class AsyncCallbackManagerForLLMRun(AsyncRunManager, LLMManagerMixin):
885
839
  Args:
886
840
  error (Exception or KeyboardInterrupt): The error.
887
841
  kwargs (Any): Additional keyword arguments.
888
- - response (LLMResult | AIMessage): The response which was generated
889
- before the error occurred.
842
+ - response (LLMResult): The response which was generated before
843
+ the error occurred.
844
+
845
+
846
+
890
847
  """
891
848
  if not self.handlers:
892
849
  return
@@ -911,6 +868,7 @@ class CallbackManagerForChainRun(ParentRunManager, ChainManagerMixin):
911
868
  Args:
912
869
  outputs (Union[dict[str, Any], Any]): The outputs of the chain.
913
870
  **kwargs (Any): Additional keyword arguments.
871
+
914
872
  """
915
873
  if not self.handlers:
916
874
  return
@@ -935,6 +893,7 @@ class CallbackManagerForChainRun(ParentRunManager, ChainManagerMixin):
935
893
  Args:
936
894
  error (Exception or KeyboardInterrupt): The error.
937
895
  **kwargs (Any): Additional keyword arguments.
896
+
938
897
  """
939
898
  if not self.handlers:
940
899
  return
@@ -958,6 +917,7 @@ class CallbackManagerForChainRun(ParentRunManager, ChainManagerMixin):
958
917
 
959
918
  Returns:
960
919
  Any: The result of the callback.
920
+
961
921
  """
962
922
  if not self.handlers:
963
923
  return
@@ -981,6 +941,7 @@ class CallbackManagerForChainRun(ParentRunManager, ChainManagerMixin):
981
941
 
982
942
  Returns:
983
943
  Any: The result of the callback.
944
+
984
945
  """
985
946
  if not self.handlers:
986
947
  return
@@ -1025,6 +986,7 @@ class AsyncCallbackManagerForChainRun(AsyncParentRunManager, ChainManagerMixin):
1025
986
  Args:
1026
987
  outputs (Union[dict[str, Any], Any]): The outputs of the chain.
1027
988
  **kwargs (Any): Additional keyword arguments.
989
+
1028
990
  """
1029
991
  if not self.handlers:
1030
992
  return
@@ -1050,6 +1012,7 @@ class AsyncCallbackManagerForChainRun(AsyncParentRunManager, ChainManagerMixin):
1050
1012
  Args:
1051
1013
  error (Exception or KeyboardInterrupt): The error.
1052
1014
  **kwargs (Any): Additional keyword arguments.
1015
+
1053
1016
  """
1054
1017
  if not self.handlers:
1055
1018
  return
@@ -1073,6 +1036,7 @@ class AsyncCallbackManagerForChainRun(AsyncParentRunManager, ChainManagerMixin):
1073
1036
 
1074
1037
  Returns:
1075
1038
  Any: The result of the callback.
1039
+
1076
1040
  """
1077
1041
  if not self.handlers:
1078
1042
  return
@@ -1096,6 +1060,7 @@ class AsyncCallbackManagerForChainRun(AsyncParentRunManager, ChainManagerMixin):
1096
1060
 
1097
1061
  Returns:
1098
1062
  Any: The result of the callback.
1063
+
1099
1064
  """
1100
1065
  if not self.handlers:
1101
1066
  return
@@ -1124,6 +1089,7 @@ class CallbackManagerForToolRun(ParentRunManager, ToolManagerMixin):
1124
1089
  Args:
1125
1090
  output (Any): The output of the tool.
1126
1091
  **kwargs (Any): The keyword arguments to pass to the event handler
1092
+
1127
1093
  """
1128
1094
  if not self.handlers:
1129
1095
  return
@@ -1148,6 +1114,7 @@ class CallbackManagerForToolRun(ParentRunManager, ToolManagerMixin):
1148
1114
  Args:
1149
1115
  error (Exception or KeyboardInterrupt): The error.
1150
1116
  **kwargs (Any): Additional keyword arguments.
1117
+
1151
1118
  """
1152
1119
  if not self.handlers:
1153
1120
  return
@@ -1189,6 +1156,7 @@ class AsyncCallbackManagerForToolRun(AsyncParentRunManager, ToolManagerMixin):
1189
1156
  Args:
1190
1157
  output (Any): The output of the tool.
1191
1158
  **kwargs (Any): Additional keyword arguments.
1159
+
1192
1160
  """
1193
1161
  if not self.handlers:
1194
1162
  return
@@ -1213,6 +1181,7 @@ class AsyncCallbackManagerForToolRun(AsyncParentRunManager, ToolManagerMixin):
1213
1181
  Args:
1214
1182
  error (Exception or KeyboardInterrupt): The error.
1215
1183
  **kwargs (Any): Additional keyword arguments.
1184
+
1216
1185
  """
1217
1186
  if not self.handlers:
1218
1187
  return
@@ -1241,6 +1210,7 @@ class CallbackManagerForRetrieverRun(ParentRunManager, RetrieverManagerMixin):
1241
1210
  Args:
1242
1211
  documents (Sequence[Document]): The retrieved documents.
1243
1212
  **kwargs (Any): Additional keyword arguments.
1213
+
1244
1214
  """
1245
1215
  if not self.handlers:
1246
1216
  return
@@ -1265,6 +1235,7 @@ class CallbackManagerForRetrieverRun(ParentRunManager, RetrieverManagerMixin):
1265
1235
  Args:
1266
1236
  error (BaseException): The error.
1267
1237
  **kwargs (Any): Additional keyword arguments.
1238
+
1268
1239
  """
1269
1240
  if not self.handlers:
1270
1241
  return
@@ -1291,6 +1262,7 @@ class AsyncCallbackManagerForRetrieverRun(
1291
1262
 
1292
1263
  Returns:
1293
1264
  CallbackManagerForRetrieverRun: The sync RunManager.
1265
+
1294
1266
  """
1295
1267
  return CallbackManagerForRetrieverRun(
1296
1268
  run_id=self.run_id,
@@ -1312,6 +1284,7 @@ class AsyncCallbackManagerForRetrieverRun(
1312
1284
  Args:
1313
1285
  documents (Sequence[Document]): The retrieved documents.
1314
1286
  **kwargs (Any): Additional keyword arguments.
1287
+
1315
1288
  """
1316
1289
  if not self.handlers:
1317
1290
  return
@@ -1337,6 +1310,7 @@ class AsyncCallbackManagerForRetrieverRun(
1337
1310
  Args:
1338
1311
  error (BaseException): The error.
1339
1312
  **kwargs (Any): Additional keyword arguments.
1313
+
1340
1314
  """
1341
1315
  if not self.handlers:
1342
1316
  return
@@ -1373,6 +1347,7 @@ class CallbackManager(BaseCallbackManager):
1373
1347
  Returns:
1374
1348
  list[CallbackManagerForLLMRun]: A callback manager for each
1375
1349
  prompt as an LLM run.
1350
+
1376
1351
  """
1377
1352
  managers = []
1378
1353
  for i, prompt in enumerate(prompts):
@@ -1409,7 +1384,7 @@ class CallbackManager(BaseCallbackManager):
1409
1384
  def on_chat_model_start(
1410
1385
  self,
1411
1386
  serialized: dict[str, Any],
1412
- messages: Union[list[list[BaseMessage]], list[MessageV1]],
1387
+ messages: list[list[BaseMessage]],
1413
1388
  run_id: Optional[UUID] = None,
1414
1389
  **kwargs: Any,
1415
1390
  ) -> list[CallbackManagerForLLMRun]:
@@ -1417,40 +1392,15 @@ class CallbackManager(BaseCallbackManager):
1417
1392
 
1418
1393
  Args:
1419
1394
  serialized (dict[str, Any]): The serialized LLM.
1420
- messages (list[list[BaseMessage | MessageV1]]): The list of messages.
1395
+ messages (list[list[BaseMessage]]): The list of messages.
1421
1396
  run_id (UUID, optional): The ID of the run. Defaults to None.
1422
1397
  **kwargs (Any): Additional keyword arguments.
1423
1398
 
1424
1399
  Returns:
1425
1400
  list[CallbackManagerForLLMRun]: A callback manager for each
1426
1401
  list of messages as an LLM run.
1402
+
1427
1403
  """
1428
- if messages and isinstance(messages[0], MessageV1Types):
1429
- run_id_ = run_id if run_id is not None else uuid.uuid4()
1430
- handle_event(
1431
- self.handlers,
1432
- "on_chat_model_start",
1433
- "ignore_chat_model",
1434
- serialized,
1435
- messages,
1436
- run_id=run_id_,
1437
- parent_run_id=self.parent_run_id,
1438
- tags=self.tags,
1439
- metadata=self.metadata,
1440
- **kwargs,
1441
- )
1442
- return [
1443
- CallbackManagerForLLMRun(
1444
- run_id=run_id_,
1445
- handlers=self.handlers,
1446
- inheritable_handlers=self.inheritable_handlers,
1447
- parent_run_id=self.parent_run_id,
1448
- tags=self.tags,
1449
- inheritable_tags=self.inheritable_tags,
1450
- metadata=self.metadata,
1451
- inheritable_metadata=self.inheritable_metadata,
1452
- )
1453
- ]
1454
1404
  managers = []
1455
1405
  for message_list in messages:
1456
1406
  if run_id is not None:
@@ -1503,6 +1453,7 @@ class CallbackManager(BaseCallbackManager):
1503
1453
 
1504
1454
  Returns:
1505
1455
  CallbackManagerForChainRun: The callback manager for the chain run.
1456
+
1506
1457
  """
1507
1458
  if run_id is None:
1508
1459
  run_id = uuid.uuid4()
@@ -1557,6 +1508,7 @@ class CallbackManager(BaseCallbackManager):
1557
1508
 
1558
1509
  Returns:
1559
1510
  CallbackManagerForToolRun: The callback manager for the tool run.
1511
+
1560
1512
  """
1561
1513
  if run_id is None:
1562
1514
  run_id = uuid.uuid4()
@@ -1603,6 +1555,7 @@ class CallbackManager(BaseCallbackManager):
1603
1555
  run_id (UUID, optional): The ID of the run. Defaults to None.
1604
1556
  parent_run_id (UUID, optional): The ID of the parent run. Defaults to None.
1605
1557
  **kwargs (Any): Additional keyword arguments.
1558
+
1606
1559
  """
1607
1560
  if run_id is None:
1608
1561
  run_id = uuid.uuid4()
@@ -1650,6 +1603,7 @@ class CallbackManager(BaseCallbackManager):
1650
1603
  run_id: The ID of the run. Defaults to None.
1651
1604
 
1652
1605
  .. versionadded:: 0.2.14
1606
+
1653
1607
  """
1654
1608
  if not self.handlers:
1655
1609
  return
@@ -1704,6 +1658,7 @@ class CallbackManager(BaseCallbackManager):
1704
1658
 
1705
1659
  Returns:
1706
1660
  CallbackManager: The configured callback manager.
1661
+
1707
1662
  """
1708
1663
  return _configure(
1709
1664
  cls,
@@ -1738,6 +1693,7 @@ class CallbackManagerForChainGroup(CallbackManager):
1738
1693
  parent_run_id (Optional[UUID]): The ID of the parent run. Defaults to None.
1739
1694
  parent_run_manager (CallbackManagerForChainRun): The parent run manager.
1740
1695
  **kwargs (Any): Additional keyword arguments.
1696
+
1741
1697
  """
1742
1698
  super().__init__(
1743
1699
  handlers,
@@ -1826,6 +1782,7 @@ class CallbackManagerForChainGroup(CallbackManager):
1826
1782
  Args:
1827
1783
  outputs (Union[dict[str, Any], Any]): The outputs of the chain.
1828
1784
  **kwargs (Any): Additional keyword arguments.
1785
+
1829
1786
  """
1830
1787
  self.ended = True
1831
1788
  return self.parent_run_manager.on_chain_end(outputs, **kwargs)
@@ -1840,6 +1797,7 @@ class CallbackManagerForChainGroup(CallbackManager):
1840
1797
  Args:
1841
1798
  error (Exception or KeyboardInterrupt): The error.
1842
1799
  **kwargs (Any): Additional keyword arguments.
1800
+
1843
1801
  """
1844
1802
  self.ended = True
1845
1803
  return self.parent_run_manager.on_chain_error(error, **kwargs)
@@ -1945,7 +1903,7 @@ class AsyncCallbackManager(BaseCallbackManager):
1945
1903
  async def on_chat_model_start(
1946
1904
  self,
1947
1905
  serialized: dict[str, Any],
1948
- messages: Union[list[list[BaseMessage]], list[MessageV1]],
1906
+ messages: list[list[BaseMessage]],
1949
1907
  run_id: Optional[UUID] = None,
1950
1908
  **kwargs: Any,
1951
1909
  ) -> list[AsyncCallbackManagerForLLMRun]:
@@ -1953,7 +1911,7 @@ class AsyncCallbackManager(BaseCallbackManager):
1953
1911
 
1954
1912
  Args:
1955
1913
  serialized (dict[str, Any]): The serialized LLM.
1956
- messages (list[list[BaseMessage | MessageV1]]): The list of messages.
1914
+ messages (list[list[BaseMessage]]): The list of messages.
1957
1915
  run_id (UUID, optional): The ID of the run. Defaults to None.
1958
1916
  **kwargs (Any): Additional keyword arguments.
1959
1917
 
@@ -1962,51 +1920,10 @@ class AsyncCallbackManager(BaseCallbackManager):
1962
1920
  async callback managers, one for each LLM Run
1963
1921
  corresponding to each inner message list.
1964
1922
  """
1965
- if messages and isinstance(messages[0], MessageV1Types):
1966
- run_id_ = run_id if run_id is not None else uuid.uuid4()
1967
- inline_tasks = []
1968
- non_inline_tasks = []
1969
- for handler in self.handlers:
1970
- task = ahandle_event(
1971
- [handler],
1972
- "on_chat_model_start",
1973
- "ignore_chat_model",
1974
- serialized,
1975
- messages,
1976
- run_id=run_id_,
1977
- parent_run_id=self.parent_run_id,
1978
- tags=self.tags,
1979
- metadata=self.metadata,
1980
- **kwargs,
1981
- )
1982
- if handler.run_inline:
1983
- inline_tasks.append(task)
1984
- else:
1985
- non_inline_tasks.append(task)
1986
- managers = [
1987
- AsyncCallbackManagerForLLMRun(
1988
- run_id=run_id_,
1989
- handlers=self.handlers,
1990
- inheritable_handlers=self.inheritable_handlers,
1991
- parent_run_id=self.parent_run_id,
1992
- tags=self.tags,
1993
- inheritable_tags=self.inheritable_tags,
1994
- metadata=self.metadata,
1995
- inheritable_metadata=self.inheritable_metadata,
1996
- )
1997
- ]
1998
- # Run inline tasks sequentially
1999
- for task in inline_tasks:
2000
- await task
2001
-
2002
- # Run non-inline tasks concurrently
2003
- if non_inline_tasks:
2004
- await asyncio.gather(*non_inline_tasks)
2005
-
2006
- return managers
2007
1923
  inline_tasks = []
2008
1924
  non_inline_tasks = []
2009
1925
  managers = []
1926
+
2010
1927
  for message_list in messages:
2011
1928
  if run_id is not None:
2012
1929
  run_id_ = run_id
@@ -3,7 +3,7 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import sys
6
- from typing import TYPE_CHECKING, Any, Union
6
+ from typing import TYPE_CHECKING, Any
7
7
 
8
8
  from typing_extensions import override
9
9
 
@@ -13,7 +13,6 @@ if TYPE_CHECKING:
13
13
  from langchain_core.agents import AgentAction, AgentFinish
14
14
  from langchain_core.messages import BaseMessage
15
15
  from langchain_core.outputs import LLMResult
16
- from langchain_core.v1.messages import AIMessage, MessageV1
17
16
 
18
17
 
19
18
  class StreamingStdOutCallbackHandler(BaseCallbackHandler):
@@ -33,7 +32,7 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
33
32
  def on_chat_model_start(
34
33
  self,
35
34
  serialized: dict[str, Any],
36
- messages: Union[list[list[BaseMessage]], list[MessageV1]],
35
+ messages: list[list[BaseMessage]],
37
36
  **kwargs: Any,
38
37
  ) -> None:
39
38
  """Run when LLM starts running.
@@ -55,7 +54,7 @@ class StreamingStdOutCallbackHandler(BaseCallbackHandler):
55
54
  sys.stdout.write(token)
56
55
  sys.stdout.flush()
57
56
 
58
- def on_llm_end(self, response: Union[LLMResult, AIMessage], **kwargs: Any) -> None:
57
+ def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
59
58
  """Run when LLM ends running.
60
59
 
61
60
  Args: