langchain-core 0.3.74__py3-none-any.whl → 0.3.76__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (122) hide show
  1. langchain_core/_api/beta_decorator.py +18 -41
  2. langchain_core/_api/deprecation.py +20 -7
  3. langchain_core/_api/path.py +19 -2
  4. langchain_core/_import_utils.py +7 -0
  5. langchain_core/agents.py +10 -6
  6. langchain_core/beta/runnables/context.py +2 -3
  7. langchain_core/callbacks/base.py +11 -4
  8. langchain_core/callbacks/file.py +13 -2
  9. langchain_core/callbacks/manager.py +129 -78
  10. langchain_core/callbacks/usage.py +4 -2
  11. langchain_core/chat_history.py +10 -12
  12. langchain_core/document_loaders/base.py +34 -9
  13. langchain_core/document_loaders/langsmith.py +3 -0
  14. langchain_core/documents/base.py +36 -11
  15. langchain_core/documents/compressor.py +9 -6
  16. langchain_core/documents/transformers.py +4 -2
  17. langchain_core/embeddings/fake.py +8 -5
  18. langchain_core/env.py +2 -3
  19. langchain_core/example_selectors/base.py +12 -0
  20. langchain_core/exceptions.py +7 -0
  21. langchain_core/globals.py +17 -28
  22. langchain_core/indexing/api.py +56 -44
  23. langchain_core/indexing/base.py +7 -10
  24. langchain_core/indexing/in_memory.py +23 -3
  25. langchain_core/language_models/__init__.py +3 -2
  26. langchain_core/language_models/base.py +64 -39
  27. langchain_core/language_models/chat_models.py +130 -42
  28. langchain_core/language_models/fake_chat_models.py +10 -11
  29. langchain_core/language_models/llms.py +49 -17
  30. langchain_core/load/dump.py +5 -7
  31. langchain_core/load/load.py +15 -1
  32. langchain_core/load/serializable.py +38 -43
  33. langchain_core/memory.py +7 -3
  34. langchain_core/messages/ai.py +36 -16
  35. langchain_core/messages/base.py +13 -6
  36. langchain_core/messages/content_blocks.py +23 -2
  37. langchain_core/messages/human.py +2 -6
  38. langchain_core/messages/modifier.py +1 -1
  39. langchain_core/messages/system.py +2 -6
  40. langchain_core/messages/tool.py +36 -16
  41. langchain_core/messages/utils.py +198 -87
  42. langchain_core/output_parsers/base.py +5 -2
  43. langchain_core/output_parsers/json.py +4 -4
  44. langchain_core/output_parsers/list.py +7 -22
  45. langchain_core/output_parsers/openai_functions.py +3 -0
  46. langchain_core/output_parsers/openai_tools.py +8 -1
  47. langchain_core/output_parsers/pydantic.py +4 -0
  48. langchain_core/output_parsers/string.py +5 -1
  49. langchain_core/output_parsers/transform.py +2 -2
  50. langchain_core/output_parsers/xml.py +23 -22
  51. langchain_core/outputs/chat_generation.py +18 -7
  52. langchain_core/outputs/generation.py +14 -3
  53. langchain_core/outputs/llm_result.py +8 -1
  54. langchain_core/prompt_values.py +10 -4
  55. langchain_core/prompts/base.py +4 -9
  56. langchain_core/prompts/chat.py +88 -61
  57. langchain_core/prompts/dict.py +16 -8
  58. langchain_core/prompts/few_shot.py +9 -11
  59. langchain_core/prompts/few_shot_with_templates.py +5 -1
  60. langchain_core/prompts/image.py +12 -5
  61. langchain_core/prompts/message.py +5 -6
  62. langchain_core/prompts/pipeline.py +13 -8
  63. langchain_core/prompts/prompt.py +22 -8
  64. langchain_core/prompts/string.py +18 -10
  65. langchain_core/prompts/structured.py +7 -2
  66. langchain_core/rate_limiters.py +2 -2
  67. langchain_core/retrievers.py +7 -6
  68. langchain_core/runnables/base.py +842 -567
  69. langchain_core/runnables/branch.py +15 -20
  70. langchain_core/runnables/config.py +11 -17
  71. langchain_core/runnables/configurable.py +34 -19
  72. langchain_core/runnables/fallbacks.py +24 -17
  73. langchain_core/runnables/graph.py +47 -40
  74. langchain_core/runnables/graph_ascii.py +40 -17
  75. langchain_core/runnables/graph_mermaid.py +27 -15
  76. langchain_core/runnables/graph_png.py +27 -31
  77. langchain_core/runnables/history.py +56 -59
  78. langchain_core/runnables/passthrough.py +47 -24
  79. langchain_core/runnables/retry.py +10 -6
  80. langchain_core/runnables/router.py +10 -9
  81. langchain_core/runnables/schema.py +2 -0
  82. langchain_core/runnables/utils.py +51 -89
  83. langchain_core/stores.py +13 -25
  84. langchain_core/structured_query.py +3 -7
  85. langchain_core/sys_info.py +9 -8
  86. langchain_core/tools/base.py +30 -23
  87. langchain_core/tools/convert.py +24 -13
  88. langchain_core/tools/simple.py +35 -3
  89. langchain_core/tools/structured.py +26 -3
  90. langchain_core/tracers/_streaming.py +6 -7
  91. langchain_core/tracers/base.py +2 -2
  92. langchain_core/tracers/context.py +5 -1
  93. langchain_core/tracers/core.py +109 -39
  94. langchain_core/tracers/evaluation.py +22 -26
  95. langchain_core/tracers/event_stream.py +41 -28
  96. langchain_core/tracers/langchain.py +12 -3
  97. langchain_core/tracers/langchain_v1.py +10 -2
  98. langchain_core/tracers/log_stream.py +57 -18
  99. langchain_core/tracers/root_listeners.py +4 -20
  100. langchain_core/tracers/run_collector.py +6 -16
  101. langchain_core/tracers/schemas.py +5 -1
  102. langchain_core/utils/aiter.py +14 -6
  103. langchain_core/utils/env.py +3 -0
  104. langchain_core/utils/function_calling.py +49 -30
  105. langchain_core/utils/interactive_env.py +6 -2
  106. langchain_core/utils/iter.py +11 -3
  107. langchain_core/utils/json.py +5 -2
  108. langchain_core/utils/json_schema.py +15 -5
  109. langchain_core/utils/loading.py +5 -1
  110. langchain_core/utils/mustache.py +24 -15
  111. langchain_core/utils/pydantic.py +32 -4
  112. langchain_core/utils/utils.py +24 -8
  113. langchain_core/vectorstores/base.py +7 -20
  114. langchain_core/vectorstores/in_memory.py +18 -12
  115. langchain_core/vectorstores/utils.py +18 -12
  116. langchain_core/version.py +1 -1
  117. langchain_core-0.3.76.dist-info/METADATA +77 -0
  118. langchain_core-0.3.76.dist-info/RECORD +174 -0
  119. langchain_core-0.3.74.dist-info/METADATA +0 -108
  120. langchain_core-0.3.74.dist-info/RECORD +0 -174
  121. {langchain_core-0.3.74.dist-info → langchain_core-0.3.76.dist-info}/WHEEL +0 -0
  122. {langchain_core-0.3.74.dist-info → langchain_core-0.3.76.dist-info}/entry_points.txt +0 -0
@@ -71,7 +71,7 @@ class _TracerCore(ABC):
71
71
  for streaming events.
72
72
  - 'original+chat' is a format that is the same as 'original'
73
73
  except it does NOT raise an attribute error on_chat_model_start
74
- kwargs: Additional keyword arguments that will be passed to
74
+ **kwargs: Additional keyword arguments that will be passed to
75
75
  the superclass.
76
76
  """
77
77
  super().__init__(**kwargs)
@@ -82,7 +82,7 @@ class _TracerCore(ABC):
82
82
  """Map of run ID to (trace_id, dotted_order). Cleared when tracer GCed."""
83
83
 
84
84
  @abstractmethod
85
- def _persist_run(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]:
85
+ def _persist_run(self, run: Run) -> Union[Coroutine[Any, Any, None], None]:
86
86
  """Persist a run."""
87
87
 
88
88
  @staticmethod
@@ -108,7 +108,7 @@ class _TracerCore(ABC):
108
108
  except: # noqa: E722
109
109
  return msg
110
110
 
111
- def _start_trace(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # type: ignore[return]
111
+ def _start_trace(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # type: ignore[return]
112
112
  current_dotted_order = run.start_time.strftime("%Y%m%dT%H%M%S%fZ") + str(run.id)
113
113
  if run.parent_run_id:
114
114
  if parent := self.order_map.get(run.parent_run_id):
@@ -531,27 +531,43 @@ class _TracerCore(ABC):
531
531
  return retrieval_run
532
532
 
533
533
  def __deepcopy__(self, memo: dict) -> _TracerCore:
534
- """Deepcopy the tracer."""
534
+ """Return self deepcopied."""
535
535
  return self
536
536
 
537
537
  def __copy__(self) -> _TracerCore:
538
- """Copy the tracer."""
538
+ """Return self copied."""
539
539
  return self
540
540
 
541
- def _end_trace(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
542
- """End a trace for a run."""
541
+ def _end_trace(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
542
+ """End a trace for a run.
543
+
544
+ Args:
545
+ run: The run.
546
+ """
543
547
  return None
544
548
 
545
- def _on_run_create(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
546
- """Process a run upon creation."""
549
+ def _on_run_create(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
550
+ """Process a run upon creation.
551
+
552
+ Args:
553
+ run: The created run.
554
+ """
547
555
  return None
548
556
 
549
- def _on_run_update(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
550
- """Process a run upon update."""
557
+ def _on_run_update(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
558
+ """Process a run upon update.
559
+
560
+ Args:
561
+ run: The updated run.
562
+ """
551
563
  return None
552
564
 
553
- def _on_llm_start(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
554
- """Process the LLM Run upon start."""
565
+ def _on_llm_start(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
566
+ """Process the LLM Run upon start.
567
+
568
+ Args:
569
+ run: The LLM run.
570
+ """
555
571
  return None
556
572
 
557
573
  def _on_llm_new_token(
@@ -559,54 +575,108 @@ class _TracerCore(ABC):
559
575
  run: Run, # noqa: ARG002
560
576
  token: str, # noqa: ARG002
561
577
  chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]], # noqa: ARG002
562
- ) -> Union[None, Coroutine[Any, Any, None]]:
563
- """Process new LLM token."""
578
+ ) -> Union[Coroutine[Any, Any, None], None]:
579
+ """Process new LLM token.
580
+
581
+ Args:
582
+ run: The LLM run.
583
+ token: The new token.
584
+ chunk: Optional chunk.
585
+ """
564
586
  return None
565
587
 
566
- def _on_llm_end(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
567
- """Process the LLM Run."""
588
+ def _on_llm_end(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
589
+ """Process the LLM Run.
590
+
591
+ Args:
592
+ run: The LLM run.
593
+ """
568
594
  return None
569
595
 
570
- def _on_llm_error(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
571
- """Process the LLM Run upon error."""
596
+ def _on_llm_error(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
597
+ """Process the LLM Run upon error.
598
+
599
+ Args:
600
+ run: The LLM run.
601
+ """
572
602
  return None
573
603
 
574
- def _on_chain_start(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
575
- """Process the Chain Run upon start."""
604
+ def _on_chain_start(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
605
+ """Process the Chain Run upon start.
606
+
607
+ Args:
608
+ run: The chain run.
609
+ """
576
610
  return None
577
611
 
578
- def _on_chain_end(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
579
- """Process the Chain Run."""
612
+ def _on_chain_end(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
613
+ """Process the Chain Run.
614
+
615
+ Args:
616
+ run: The chain run.
617
+ """
580
618
  return None
581
619
 
582
- def _on_chain_error(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
583
- """Process the Chain Run upon error."""
620
+ def _on_chain_error(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
621
+ """Process the Chain Run upon error.
622
+
623
+ Args:
624
+ run: The chain run.
625
+ """
584
626
  return None
585
627
 
586
- def _on_tool_start(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
587
- """Process the Tool Run upon start."""
628
+ def _on_tool_start(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
629
+ """Process the Tool Run upon start.
630
+
631
+ Args:
632
+ run: The tool run.
633
+ """
588
634
  return None
589
635
 
590
- def _on_tool_end(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
591
- """Process the Tool Run."""
636
+ def _on_tool_end(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
637
+ """Process the Tool Run.
638
+
639
+ Args:
640
+ run: The tool run.
641
+ """
592
642
  return None
593
643
 
594
- def _on_tool_error(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
595
- """Process the Tool Run upon error."""
644
+ def _on_tool_error(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
645
+ """Process the Tool Run upon error.
646
+
647
+ Args:
648
+ run: The tool run.
649
+ """
596
650
  return None
597
651
 
598
- def _on_chat_model_start(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
599
- """Process the Chat Model Run upon start."""
652
+ def _on_chat_model_start(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
653
+ """Process the Chat Model Run upon start.
654
+
655
+ Args:
656
+ run: The chat model run.
657
+ """
600
658
  return None
601
659
 
602
- def _on_retriever_start(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
603
- """Process the Retriever Run upon start."""
660
+ def _on_retriever_start(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
661
+ """Process the Retriever Run upon start.
662
+
663
+ Args:
664
+ run: The retriever run.
665
+ """
604
666
  return None
605
667
 
606
- def _on_retriever_end(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
607
- """Process the Retriever Run."""
668
+ def _on_retriever_end(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
669
+ """Process the Retriever Run.
670
+
671
+ Args:
672
+ run: The retriever run.
673
+ """
608
674
  return None
609
675
 
610
- def _on_retriever_error(self, run: Run) -> Union[None, Coroutine[Any, Any, None]]: # noqa: ARG002
611
- """Process the Retriever Run upon error."""
676
+ def _on_retriever_error(self, run: Run) -> Union[Coroutine[Any, Any, None], None]: # noqa: ARG002
677
+ """Process the Retriever Run upon error.
678
+
679
+ Args:
680
+ run: The retriever run.
681
+ """
612
682
  return None
@@ -38,24 +38,27 @@ class EvaluatorCallbackHandler(BaseTracer):
38
38
  """Tracer that runs a run evaluator whenever a run is persisted.
39
39
 
40
40
  Attributes:
41
- example_id : Union[UUID, None]
42
- The example ID associated with the runs.
43
41
  client : Client
44
42
  The LangSmith client instance used for evaluating the runs.
45
- evaluators : Sequence[RunEvaluator]
46
- The sequence of run evaluators to be executed.
47
- executor : ThreadPoolExecutor
48
- The thread pool executor used for running the evaluators.
49
- futures : set[Future]
50
- The set of futures representing the running evaluators.
51
- skip_unfinished : bool
52
- Whether to skip runs that are not finished or raised
53
- an error.
54
- project_name : Optional[str]
55
- The LangSmith project name to be organize eval chain runs under.
56
43
  """
57
44
 
58
45
  name: str = "evaluator_callback_handler"
46
+ example_id: Optional[UUID] = None
47
+ """The example ID associated with the runs."""
48
+ client: langsmith.Client
49
+ """The LangSmith client instance used for evaluating the runs."""
50
+ evaluators: Sequence[langsmith.RunEvaluator] = ()
51
+ """The sequence of run evaluators to be executed."""
52
+ executor: Optional[ThreadPoolExecutor] = None
53
+ """The thread pool executor used for running the evaluators."""
54
+ futures: weakref.WeakSet[Future] = weakref.WeakSet()
55
+ """The set of futures representing the running evaluators."""
56
+ skip_unfinished: bool = True
57
+ """Whether to skip runs that are not finished or raised an error."""
58
+ project_name: Optional[str] = None
59
+ """The LangSmith project name to be organize eval chain runs under."""
60
+ logged_eval_results: dict[tuple[str, str], list[EvaluationResult]]
61
+ lock: threading.Lock
59
62
 
60
63
  def __init__(
61
64
  self,
@@ -91,7 +94,7 @@ class EvaluatorCallbackHandler(BaseTracer):
91
94
  self.client = client or langchain_tracer.get_client()
92
95
  self.evaluators = evaluators
93
96
  if max_concurrency is None:
94
- self.executor: Optional[ThreadPoolExecutor] = _get_executor()
97
+ self.executor = _get_executor()
95
98
  elif max_concurrency > 0:
96
99
  self.executor = ThreadPoolExecutor(max_workers=max_concurrency)
97
100
  weakref.finalize(
@@ -100,10 +103,10 @@ class EvaluatorCallbackHandler(BaseTracer):
100
103
  )
101
104
  else:
102
105
  self.executor = None
103
- self.futures: weakref.WeakSet[Future] = weakref.WeakSet()
106
+ self.futures = weakref.WeakSet()
104
107
  self.skip_unfinished = skip_unfinished
105
108
  self.project_name = project_name
106
- self.logged_eval_results: dict[tuple[str, str], list[EvaluationResult]] = {}
109
+ self.logged_eval_results = {}
107
110
  self.lock = threading.Lock()
108
111
  _TRACERS.add(self)
109
112
 
@@ -111,12 +114,8 @@ class EvaluatorCallbackHandler(BaseTracer):
111
114
  """Evaluate the run in the project.
112
115
 
113
116
  Args:
114
- ----------
115
- run : Run
116
- The run to be evaluated.
117
- evaluator : RunEvaluator
118
- The evaluator to use for evaluating the run.
119
-
117
+ run: The run to be evaluated.
118
+ evaluator: The evaluator to use for evaluating the run.
120
119
  """
121
120
  try:
122
121
  if self.project_name is None:
@@ -202,10 +201,7 @@ class EvaluatorCallbackHandler(BaseTracer):
202
201
  """Run the evaluator on the run.
203
202
 
204
203
  Args:
205
- ----------
206
- run : Run
207
- The run to be evaluated.
208
-
204
+ run: The run to be evaluated.
209
205
  """
210
206
  if self.skip_unfinished and not run.outputs:
211
207
  logger.debug("Skipping unfinished run %s", run.id)
@@ -9,21 +9,23 @@ from typing import (
9
9
  TYPE_CHECKING,
10
10
  Any,
11
11
  Optional,
12
+ TypedDict,
12
13
  TypeVar,
13
14
  Union,
14
15
  cast,
15
16
  )
16
17
  from uuid import UUID, uuid4
17
18
 
18
- from typing_extensions import NotRequired, TypedDict, override
19
+ from typing_extensions import NotRequired, override
19
20
 
20
- from langchain_core.callbacks.base import AsyncCallbackHandler
21
+ from langchain_core.callbacks.base import AsyncCallbackHandler, BaseCallbackManager
21
22
  from langchain_core.messages import AIMessageChunk, BaseMessage, BaseMessageChunk
22
23
  from langchain_core.outputs import (
23
24
  ChatGenerationChunk,
24
25
  GenerationChunk,
25
26
  LLMResult,
26
27
  )
28
+ from langchain_core.runnables import ensure_config
27
29
  from langchain_core.runnables.schema import (
28
30
  CustomStreamEvent,
29
31
  EventData,
@@ -36,6 +38,11 @@ from langchain_core.runnables.utils import (
36
38
  _RootEventFilter,
37
39
  )
38
40
  from langchain_core.tracers._streaming import _StreamingCallbackHandler
41
+ from langchain_core.tracers.log_stream import (
42
+ LogStreamCallbackHandler,
43
+ RunLog,
44
+ _astream_log_implementation,
45
+ )
39
46
  from langchain_core.tracers.memory_stream import _MemoryStream
40
47
  from langchain_core.utils.aiter import aclosing, py_anext
41
48
 
@@ -53,22 +60,20 @@ class RunInfo(TypedDict):
53
60
  """Information about a run.
54
61
 
55
62
  This is used to keep track of the metadata associated with a run.
56
-
57
- Parameters:
58
- name: The name of the run.
59
- tags: The tags associated with the run.
60
- metadata: The metadata associated with the run.
61
- run_type: The type of the run.
62
- inputs: The inputs to the run.
63
- parent_run_id: The ID of the parent run.
64
63
  """
65
64
 
66
65
  name: str
66
+ """The name of the run."""
67
67
  tags: list[str]
68
+ """The tags associated with the run."""
68
69
  metadata: dict[str, Any]
70
+ """The metadata associated with the run."""
69
71
  run_type: str
72
+ """The type of the run."""
70
73
  inputs: NotRequired[Any]
74
+ """The inputs to the run."""
71
75
  parent_run_id: Optional[UUID]
76
+ """The ID of the parent run."""
72
77
 
73
78
 
74
79
  def _assign_name(name: Optional[str], serialized: Optional[dict[str, Any]]) -> str:
@@ -155,7 +160,11 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
155
160
  self.send_stream.send_nowait(event)
156
161
 
157
162
  def __aiter__(self) -> AsyncIterator[Any]:
158
- """Iterate over the receive stream."""
163
+ """Iterate over the receive stream.
164
+
165
+ Returns:
166
+ An async iterator over the receive stream.
167
+ """
159
168
  return self.receive_stream.__aiter__()
160
169
 
161
170
  async def tap_output_aiter(
@@ -412,7 +421,6 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
412
421
  parent_run_id: Optional[UUID] = None,
413
422
  **kwargs: Any,
414
423
  ) -> None:
415
- """Run on new LLM token. Only available when streaming is enabled."""
416
424
  run_info = self.run_map.get(run_id)
417
425
  chunk_: Union[GenerationChunk, BaseMessageChunk]
418
426
 
@@ -458,7 +466,15 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
458
466
  async def on_llm_end(
459
467
  self, response: LLMResult, *, run_id: UUID, **kwargs: Any
460
468
  ) -> None:
461
- """End a trace for an LLM run."""
469
+ """End a trace for an LLM run.
470
+
471
+ Args:
472
+ response (LLMResult): The response which was generated.
473
+ run_id (UUID): The run ID. This is the ID of the current run.
474
+
475
+ Raises:
476
+ ValueError: If the run type is not ``'llm'`` or ``'chat_model'``.
477
+ """
462
478
  run_info = self.run_map.pop(run_id)
463
479
  inputs_ = run_info["inputs"]
464
480
 
@@ -636,7 +652,15 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
636
652
 
637
653
  @override
638
654
  async def on_tool_end(self, output: Any, *, run_id: UUID, **kwargs: Any) -> None:
639
- """End a trace for a tool run."""
655
+ """End a trace for a tool run.
656
+
657
+ Args:
658
+ output: The output of the tool.
659
+ run_id: The run ID. This is the ID of the current run.
660
+
661
+ Raises:
662
+ AssertionError: If the run ID is a tool call and does not have inputs
663
+ """
640
664
  run_info = self.run_map.pop(run_id)
641
665
  if "inputs" not in run_info:
642
666
  msg = (
@@ -730,11 +754,11 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
730
754
  )
731
755
 
732
756
  def __deepcopy__(self, memo: dict) -> _AstreamEventsCallbackHandler:
733
- """Deepcopy the tracer."""
757
+ """Return self."""
734
758
  return self
735
759
 
736
760
  def __copy__(self) -> _AstreamEventsCallbackHandler:
737
- """Copy the tracer."""
761
+ """Return self."""
738
762
  return self
739
763
 
740
764
 
@@ -751,14 +775,6 @@ async def _astream_events_implementation_v1(
751
775
  exclude_tags: Optional[Sequence[str]] = None,
752
776
  **kwargs: Any,
753
777
  ) -> AsyncIterator[StandardStreamEvent]:
754
- from langchain_core.runnables import ensure_config
755
- from langchain_core.runnables.utils import _RootEventFilter
756
- from langchain_core.tracers.log_stream import (
757
- LogStreamCallbackHandler,
758
- RunLog,
759
- _astream_log_implementation,
760
- )
761
-
762
778
  stream = LogStreamCallbackHandler(
763
779
  auto_close=False,
764
780
  include_names=include_names,
@@ -936,9 +952,6 @@ async def _astream_events_implementation_v2(
936
952
  **kwargs: Any,
937
953
  ) -> AsyncIterator[StandardStreamEvent]:
938
954
  """Implementation of the astream events API for V2 runnables."""
939
- from langchain_core.callbacks.base import BaseCallbackManager
940
- from langchain_core.runnables import ensure_config
941
-
942
955
  event_streamer = _AstreamEventsCallbackHandler(
943
956
  include_names=include_names,
944
957
  include_types=include_types,
@@ -999,7 +1012,7 @@ async def _astream_events_implementation_v2(
999
1012
  continue
1000
1013
 
1001
1014
  # If it's the end event corresponding to the root runnable
1002
- # we dont include the input in the event since it's guaranteed
1015
+ # we don't include the input in the event since it's guaranteed
1003
1016
  # to be included in the first event.
1004
1017
  if (
1005
1018
  event["run_id"] == first_event_run_id
@@ -8,7 +8,7 @@ from datetime import datetime, timezone
8
8
  from typing import TYPE_CHECKING, Any, Optional, Union
9
9
  from uuid import UUID
10
10
 
11
- from langsmith import Client
11
+ from langsmith import Client, get_tracing_context
12
12
  from langsmith import run_trees as rt
13
13
  from langsmith import utils as ls_utils
14
14
  from tenacity import (
@@ -53,7 +53,11 @@ def wait_for_all_tracers() -> None:
53
53
 
54
54
 
55
55
  def get_client() -> Client:
56
- """Get the client."""
56
+ """Get the client.
57
+
58
+ Returns:
59
+ The LangSmith client.
60
+ """
57
61
  return rt.get_cached_client()
58
62
 
59
63
 
@@ -109,6 +113,8 @@ class LangChainTracer(BaseTracer):
109
113
  super()._start_trace(run)
110
114
  if run.ls_client is None:
111
115
  run.ls_client = self.client
116
+ if get_tracing_context().get("enabled") is False:
117
+ run.extra["__disabled"] = True
112
118
 
113
119
  def on_chat_model_start(
114
120
  self,
@@ -201,6 +207,8 @@ class LangChainTracer(BaseTracer):
201
207
 
202
208
  def _persist_run_single(self, run: Run) -> None:
203
209
  """Persist a run."""
210
+ if run.extra.get("__disabled"):
211
+ return
204
212
  try:
205
213
  run.extra["runtime"] = get_runtime_environment()
206
214
  run.tags = self._get_tags(run)
@@ -214,6 +222,8 @@ class LangChainTracer(BaseTracer):
214
222
 
215
223
  def _update_run_single(self, run: Run) -> None:
216
224
  """Update a run."""
225
+ if run.extra.get("__disabled"):
226
+ return
217
227
  try:
218
228
  run.patch(exclude_inputs=run.extra.get("inputs_is_truthy", False))
219
229
  except Exception as e:
@@ -235,7 +245,6 @@ class LangChainTracer(BaseTracer):
235
245
  chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]] = None,
236
246
  parent_run_id: Optional[UUID] = None,
237
247
  ) -> Run:
238
- """Append token event to LLM run and return the run."""
239
248
  run_id_str = str(run_id)
240
249
  if run_id_str not in self.run_has_token_event_map:
241
250
  self.run_has_token_event_map[run_id_str] = True
@@ -7,7 +7,11 @@ from typing import Any
7
7
 
8
8
 
9
9
  def get_headers(*args: Any, **kwargs: Any) -> Any: # noqa: ARG001
10
- """Throw an error because this has been replaced by get_headers."""
10
+ """Throw an error because this has been replaced by get_headers.
11
+
12
+ Raises:
13
+ RuntimeError: Always, because this function is deprecated.
14
+ """
11
15
  msg = (
12
16
  "get_headers for LangChainTracerV1 is no longer supported. "
13
17
  "Please use LangChainTracer instead."
@@ -16,7 +20,11 @@ def get_headers(*args: Any, **kwargs: Any) -> Any: # noqa: ARG001
16
20
 
17
21
 
18
22
  def LangChainTracerV1(*args: Any, **kwargs: Any) -> Any: # noqa: N802,ARG001
19
- """Throw an error because this has been replaced by LangChainTracer."""
23
+ """Throw an error because this has been replaced by ``LangChainTracer``.
24
+
25
+ Raises:
26
+ RuntimeError: Always, because this class is deprecated.
27
+ """
20
28
  msg = (
21
29
  "LangChainTracerV1 is no longer supported. Please use LangChainTracer instead."
22
30
  )