langchain-core 1.0.0a6__py3-none-any.whl → 1.0.0a8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langchain-core might be problematic. Click here for more details.

Files changed (131) hide show
  1. langchain_core/_api/__init__.py +3 -3
  2. langchain_core/_api/beta_decorator.py +6 -6
  3. langchain_core/_api/deprecation.py +21 -29
  4. langchain_core/_api/path.py +3 -6
  5. langchain_core/_import_utils.py +2 -3
  6. langchain_core/agents.py +10 -11
  7. langchain_core/caches.py +7 -7
  8. langchain_core/callbacks/base.py +91 -91
  9. langchain_core/callbacks/file.py +11 -11
  10. langchain_core/callbacks/manager.py +86 -89
  11. langchain_core/callbacks/stdout.py +8 -8
  12. langchain_core/callbacks/usage.py +4 -4
  13. langchain_core/chat_history.py +5 -5
  14. langchain_core/document_loaders/base.py +2 -2
  15. langchain_core/document_loaders/langsmith.py +15 -15
  16. langchain_core/documents/base.py +16 -16
  17. langchain_core/documents/compressor.py +4 -4
  18. langchain_core/example_selectors/length_based.py +1 -1
  19. langchain_core/example_selectors/semantic_similarity.py +17 -19
  20. langchain_core/exceptions.py +3 -3
  21. langchain_core/globals.py +3 -151
  22. langchain_core/indexing/api.py +44 -43
  23. langchain_core/indexing/base.py +30 -30
  24. langchain_core/indexing/in_memory.py +3 -3
  25. langchain_core/language_models/_utils.py +5 -7
  26. langchain_core/language_models/base.py +18 -132
  27. langchain_core/language_models/chat_models.py +118 -227
  28. langchain_core/language_models/fake.py +11 -11
  29. langchain_core/language_models/fake_chat_models.py +35 -29
  30. langchain_core/language_models/llms.py +91 -201
  31. langchain_core/load/dump.py +1 -1
  32. langchain_core/load/load.py +11 -12
  33. langchain_core/load/mapping.py +2 -4
  34. langchain_core/load/serializable.py +2 -4
  35. langchain_core/messages/ai.py +17 -20
  36. langchain_core/messages/base.py +23 -25
  37. langchain_core/messages/block_translators/__init__.py +2 -5
  38. langchain_core/messages/block_translators/anthropic.py +3 -3
  39. langchain_core/messages/block_translators/bedrock_converse.py +2 -2
  40. langchain_core/messages/block_translators/langchain_v0.py +2 -2
  41. langchain_core/messages/block_translators/openai.py +6 -6
  42. langchain_core/messages/content.py +120 -124
  43. langchain_core/messages/human.py +7 -7
  44. langchain_core/messages/system.py +7 -7
  45. langchain_core/messages/tool.py +24 -24
  46. langchain_core/messages/utils.py +67 -79
  47. langchain_core/output_parsers/base.py +12 -14
  48. langchain_core/output_parsers/json.py +4 -4
  49. langchain_core/output_parsers/list.py +3 -5
  50. langchain_core/output_parsers/openai_functions.py +3 -3
  51. langchain_core/output_parsers/openai_tools.py +3 -3
  52. langchain_core/output_parsers/pydantic.py +2 -2
  53. langchain_core/output_parsers/transform.py +13 -15
  54. langchain_core/output_parsers/xml.py +7 -9
  55. langchain_core/outputs/chat_generation.py +4 -4
  56. langchain_core/outputs/chat_result.py +1 -3
  57. langchain_core/outputs/generation.py +2 -2
  58. langchain_core/outputs/llm_result.py +5 -5
  59. langchain_core/prompts/__init__.py +1 -5
  60. langchain_core/prompts/base.py +10 -15
  61. langchain_core/prompts/chat.py +31 -82
  62. langchain_core/prompts/dict.py +2 -2
  63. langchain_core/prompts/few_shot.py +5 -5
  64. langchain_core/prompts/few_shot_with_templates.py +4 -4
  65. langchain_core/prompts/loading.py +3 -5
  66. langchain_core/prompts/prompt.py +4 -16
  67. langchain_core/prompts/string.py +2 -1
  68. langchain_core/prompts/structured.py +16 -23
  69. langchain_core/rate_limiters.py +3 -4
  70. langchain_core/retrievers.py +14 -14
  71. langchain_core/runnables/base.py +928 -1042
  72. langchain_core/runnables/branch.py +36 -40
  73. langchain_core/runnables/config.py +27 -35
  74. langchain_core/runnables/configurable.py +108 -124
  75. langchain_core/runnables/fallbacks.py +76 -72
  76. langchain_core/runnables/graph.py +39 -45
  77. langchain_core/runnables/graph_ascii.py +9 -11
  78. langchain_core/runnables/graph_mermaid.py +18 -19
  79. langchain_core/runnables/graph_png.py +8 -9
  80. langchain_core/runnables/history.py +114 -127
  81. langchain_core/runnables/passthrough.py +113 -139
  82. langchain_core/runnables/retry.py +43 -48
  83. langchain_core/runnables/router.py +23 -28
  84. langchain_core/runnables/schema.py +42 -44
  85. langchain_core/runnables/utils.py +28 -31
  86. langchain_core/stores.py +9 -13
  87. langchain_core/structured_query.py +8 -8
  88. langchain_core/tools/base.py +62 -115
  89. langchain_core/tools/convert.py +31 -35
  90. langchain_core/tools/render.py +1 -1
  91. langchain_core/tools/retriever.py +4 -4
  92. langchain_core/tools/simple.py +13 -17
  93. langchain_core/tools/structured.py +12 -15
  94. langchain_core/tracers/base.py +62 -64
  95. langchain_core/tracers/context.py +17 -35
  96. langchain_core/tracers/core.py +49 -53
  97. langchain_core/tracers/evaluation.py +11 -11
  98. langchain_core/tracers/event_stream.py +58 -60
  99. langchain_core/tracers/langchain.py +13 -13
  100. langchain_core/tracers/log_stream.py +22 -24
  101. langchain_core/tracers/root_listeners.py +14 -14
  102. langchain_core/tracers/run_collector.py +2 -4
  103. langchain_core/tracers/schemas.py +8 -8
  104. langchain_core/tracers/stdout.py +2 -1
  105. langchain_core/utils/__init__.py +0 -3
  106. langchain_core/utils/_merge.py +2 -2
  107. langchain_core/utils/aiter.py +24 -28
  108. langchain_core/utils/env.py +4 -4
  109. langchain_core/utils/function_calling.py +31 -41
  110. langchain_core/utils/html.py +3 -4
  111. langchain_core/utils/input.py +3 -3
  112. langchain_core/utils/iter.py +15 -19
  113. langchain_core/utils/json.py +3 -2
  114. langchain_core/utils/json_schema.py +6 -6
  115. langchain_core/utils/mustache.py +3 -5
  116. langchain_core/utils/pydantic.py +16 -18
  117. langchain_core/utils/usage.py +1 -1
  118. langchain_core/utils/utils.py +29 -29
  119. langchain_core/vectorstores/base.py +18 -21
  120. langchain_core/vectorstores/in_memory.py +14 -87
  121. langchain_core/vectorstores/utils.py +2 -2
  122. langchain_core/version.py +1 -1
  123. {langchain_core-1.0.0a6.dist-info → langchain_core-1.0.0a8.dist-info}/METADATA +10 -21
  124. langchain_core-1.0.0a8.dist-info/RECORD +176 -0
  125. {langchain_core-1.0.0a6.dist-info → langchain_core-1.0.0a8.dist-info}/WHEEL +1 -1
  126. langchain_core/messages/block_translators/ollama.py +0 -47
  127. langchain_core/prompts/pipeline.py +0 -138
  128. langchain_core/tracers/langchain_v1.py +0 -31
  129. langchain_core/utils/loading.py +0 -35
  130. langchain_core-1.0.0a6.dist-info/RECORD +0 -181
  131. langchain_core-1.0.0a6.dist-info/entry_points.txt +0 -4
@@ -2,14 +2,11 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from collections.abc import Awaitable
5
+ from collections.abc import Awaitable, Callable
6
6
  from inspect import signature
7
7
  from typing import (
8
8
  TYPE_CHECKING,
9
9
  Any,
10
- Callable,
11
- Optional,
12
- Union,
13
10
  )
14
11
 
15
12
  from typing_extensions import override
@@ -34,9 +31,9 @@ class Tool(BaseTool):
34
31
  """Tool that takes in function or coroutine directly."""
35
32
 
36
33
  description: str = ""
37
- func: Optional[Callable[..., str]]
34
+ func: Callable[..., str] | None
38
35
  """The function to run when the tool is called."""
39
- coroutine: Optional[Callable[..., Awaitable[str]]] = None
36
+ coroutine: Callable[..., Awaitable[str]] | None = None
40
37
  """The asynchronous version of the function."""
41
38
 
42
39
  # --- Runnable ---
@@ -44,8 +41,8 @@ class Tool(BaseTool):
44
41
  @override
45
42
  async def ainvoke(
46
43
  self,
47
- input: Union[str, dict, ToolCall],
48
- config: Optional[RunnableConfig] = None,
44
+ input: str | dict | ToolCall,
45
+ config: RunnableConfig | None = None,
49
46
  **kwargs: Any,
50
47
  ) -> Any:
51
48
  if not self.coroutine:
@@ -70,7 +67,7 @@ class Tool(BaseTool):
70
67
  return {"tool_input": {"type": "string"}}
71
68
 
72
69
  def _to_args_and_kwargs(
73
- self, tool_input: Union[str, dict], tool_call_id: Optional[str]
70
+ self, tool_input: str | dict, tool_call_id: str | None
74
71
  ) -> tuple[tuple, dict]:
75
72
  """Convert tool input to pydantic model.
76
73
 
@@ -101,7 +98,7 @@ class Tool(BaseTool):
101
98
  self,
102
99
  *args: Any,
103
100
  config: RunnableConfig,
104
- run_manager: Optional[CallbackManagerForToolRun] = None,
101
+ run_manager: CallbackManagerForToolRun | None = None,
105
102
  **kwargs: Any,
106
103
  ) -> Any:
107
104
  """Use the tool.
@@ -128,7 +125,7 @@ class Tool(BaseTool):
128
125
  self,
129
126
  *args: Any,
130
127
  config: RunnableConfig,
131
- run_manager: Optional[AsyncCallbackManagerForToolRun] = None,
128
+ run_manager: AsyncCallbackManagerForToolRun | None = None,
132
129
  **kwargs: Any,
133
130
  ) -> Any:
134
131
  """Use the tool asynchronously.
@@ -157,7 +154,7 @@ class Tool(BaseTool):
157
154
 
158
155
  # TODO: this is for backwards compatibility, remove in future
159
156
  def __init__(
160
- self, name: str, func: Optional[Callable], description: str, **kwargs: Any
157
+ self, name: str, func: Callable | None, description: str, **kwargs: Any
161
158
  ) -> None:
162
159
  """Initialize tool."""
163
160
  super().__init__(name=name, func=func, description=description, **kwargs)
@@ -165,14 +162,13 @@ class Tool(BaseTool):
165
162
  @classmethod
166
163
  def from_function(
167
164
  cls,
168
- func: Optional[Callable],
165
+ func: Callable | None,
169
166
  name: str, # We keep these required to support backwards compatibility
170
167
  description: str,
171
168
  return_direct: bool = False, # noqa: FBT001,FBT002
172
- args_schema: Optional[ArgsSchema] = None,
173
- coroutine: Optional[
174
- Callable[..., Awaitable[Any]]
175
- ] = None, # This is last for compatibility, but should be after func
169
+ args_schema: ArgsSchema | None = None,
170
+ coroutine: Callable[..., Awaitable[Any]]
171
+ | None = None, # This is last for compatibility, but should be after func
176
172
  **kwargs: Any,
177
173
  ) -> Tool:
178
174
  """Initialize tool from a function.
@@ -3,16 +3,13 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import textwrap
6
- from collections.abc import Awaitable
6
+ from collections.abc import Awaitable, Callable
7
7
  from inspect import signature
8
8
  from typing import (
9
9
  TYPE_CHECKING,
10
10
  Annotated,
11
11
  Any,
12
- Callable,
13
12
  Literal,
14
- Optional,
15
- Union,
16
13
  )
17
14
 
18
15
  from pydantic import Field, SkipValidation
@@ -44,9 +41,9 @@ class StructuredTool(BaseTool):
44
41
  ..., description="The tool schema."
45
42
  )
46
43
  """The input arguments' schema."""
47
- func: Optional[Callable[..., Any]] = None
44
+ func: Callable[..., Any] | None = None
48
45
  """The function to run when the tool is called."""
49
- coroutine: Optional[Callable[..., Awaitable[Any]]] = None
46
+ coroutine: Callable[..., Awaitable[Any]] | None = None
50
47
  """The asynchronous version of the function."""
51
48
 
52
49
  # --- Runnable ---
@@ -55,8 +52,8 @@ class StructuredTool(BaseTool):
55
52
  @override
56
53
  async def ainvoke(
57
54
  self,
58
- input: Union[str, dict, ToolCall],
59
- config: Optional[RunnableConfig] = None,
55
+ input: str | dict | ToolCall,
56
+ config: RunnableConfig | None = None,
60
57
  **kwargs: Any,
61
58
  ) -> Any:
62
59
  if not self.coroutine:
@@ -71,7 +68,7 @@ class StructuredTool(BaseTool):
71
68
  self,
72
69
  *args: Any,
73
70
  config: RunnableConfig,
74
- run_manager: Optional[CallbackManagerForToolRun] = None,
71
+ run_manager: CallbackManagerForToolRun | None = None,
75
72
  **kwargs: Any,
76
73
  ) -> Any:
77
74
  """Use the tool.
@@ -98,7 +95,7 @@ class StructuredTool(BaseTool):
98
95
  self,
99
96
  *args: Any,
100
97
  config: RunnableConfig,
101
- run_manager: Optional[AsyncCallbackManagerForToolRun] = None,
98
+ run_manager: AsyncCallbackManagerForToolRun | None = None,
102
99
  **kwargs: Any,
103
100
  ) -> Any:
104
101
  """Use the tool asynchronously.
@@ -128,12 +125,12 @@ class StructuredTool(BaseTool):
128
125
  @classmethod
129
126
  def from_function(
130
127
  cls,
131
- func: Optional[Callable] = None,
132
- coroutine: Optional[Callable[..., Awaitable[Any]]] = None,
133
- name: Optional[str] = None,
134
- description: Optional[str] = None,
128
+ func: Callable | None = None,
129
+ coroutine: Callable[..., Awaitable[Any]] | None = None,
130
+ name: str | None = None,
131
+ description: str | None = None,
135
132
  return_direct: bool = False, # noqa: FBT001,FBT002
136
- args_schema: Optional[ArgsSchema] = None,
133
+ args_schema: ArgsSchema | None = None,
137
134
  infer_schema: bool = True, # noqa: FBT001,FBT002
138
135
  *,
139
136
  response_format: Literal["content", "content_and_artifact"] = "content",
@@ -8,8 +8,6 @@ from abc import ABC, abstractmethod
8
8
  from typing import (
9
9
  TYPE_CHECKING,
10
10
  Any,
11
- Optional,
12
- Union,
13
11
  )
14
12
 
15
13
  from typing_extensions import override
@@ -57,10 +55,10 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC):
57
55
  messages: list[list[BaseMessage]],
58
56
  *,
59
57
  run_id: UUID,
60
- tags: Optional[list[str]] = None,
61
- parent_run_id: Optional[UUID] = None,
62
- metadata: Optional[dict[str, Any]] = None,
63
- name: Optional[str] = None,
58
+ tags: list[str] | None = None,
59
+ parent_run_id: UUID | None = None,
60
+ metadata: dict[str, Any] | None = None,
61
+ name: str | None = None,
64
62
  **kwargs: Any,
65
63
  ) -> Run:
66
64
  """Start a trace for an LLM run.
@@ -98,10 +96,10 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC):
98
96
  prompts: list[str],
99
97
  *,
100
98
  run_id: UUID,
101
- tags: Optional[list[str]] = None,
102
- parent_run_id: Optional[UUID] = None,
103
- metadata: Optional[dict[str, Any]] = None,
104
- name: Optional[str] = None,
99
+ tags: list[str] | None = None,
100
+ parent_run_id: UUID | None = None,
101
+ metadata: dict[str, Any] | None = None,
102
+ name: str | None = None,
105
103
  **kwargs: Any,
106
104
  ) -> Run:
107
105
  """Start a trace for an LLM run.
@@ -138,9 +136,9 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC):
138
136
  self,
139
137
  token: str,
140
138
  *,
141
- chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]] = None,
139
+ chunk: GenerationChunk | ChatGenerationChunk | None = None,
142
140
  run_id: UUID,
143
- parent_run_id: Optional[UUID] = None,
141
+ parent_run_id: UUID | None = None,
144
142
  **kwargs: Any,
145
143
  ) -> Run:
146
144
  """Run on new LLM token. Only available when streaming is enabled.
@@ -244,11 +242,11 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC):
244
242
  inputs: dict[str, Any],
245
243
  *,
246
244
  run_id: UUID,
247
- tags: Optional[list[str]] = None,
248
- parent_run_id: Optional[UUID] = None,
249
- metadata: Optional[dict[str, Any]] = None,
250
- run_type: Optional[str] = None,
251
- name: Optional[str] = None,
245
+ tags: list[str] | None = None,
246
+ parent_run_id: UUID | None = None,
247
+ metadata: dict[str, Any] | None = None,
248
+ run_type: str | None = None,
249
+ name: str | None = None,
252
250
  **kwargs: Any,
253
251
  ) -> Run:
254
252
  """Start a trace for a chain run.
@@ -288,7 +286,7 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC):
288
286
  outputs: dict[str, Any],
289
287
  *,
290
288
  run_id: UUID,
291
- inputs: Optional[dict[str, Any]] = None,
289
+ inputs: dict[str, Any] | None = None,
292
290
  **kwargs: Any,
293
291
  ) -> Run:
294
292
  """End a trace for a chain run.
@@ -316,7 +314,7 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC):
316
314
  self,
317
315
  error: BaseException,
318
316
  *,
319
- inputs: Optional[dict[str, Any]] = None,
317
+ inputs: dict[str, Any] | None = None,
320
318
  run_id: UUID,
321
319
  **kwargs: Any,
322
320
  ) -> Run:
@@ -346,11 +344,11 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC):
346
344
  input_str: str,
347
345
  *,
348
346
  run_id: UUID,
349
- tags: Optional[list[str]] = None,
350
- parent_run_id: Optional[UUID] = None,
351
- metadata: Optional[dict[str, Any]] = None,
352
- name: Optional[str] = None,
353
- inputs: Optional[dict[str, Any]] = None,
347
+ tags: list[str] | None = None,
348
+ parent_run_id: UUID | None = None,
349
+ metadata: dict[str, Any] | None = None,
350
+ name: str | None = None,
351
+ inputs: dict[str, Any] | None = None,
354
352
  **kwargs: Any,
355
353
  ) -> Run:
356
354
  """Start a trace for a tool run.
@@ -436,10 +434,10 @@ class BaseTracer(_TracerCore, BaseCallbackHandler, ABC):
436
434
  query: str,
437
435
  *,
438
436
  run_id: UUID,
439
- parent_run_id: Optional[UUID] = None,
440
- tags: Optional[list[str]] = None,
441
- metadata: Optional[dict[str, Any]] = None,
442
- name: Optional[str] = None,
437
+ parent_run_id: UUID | None = None,
438
+ tags: list[str] | None = None,
439
+ metadata: dict[str, Any] | None = None,
440
+ name: str | None = None,
443
441
  **kwargs: Any,
444
442
  ) -> Run:
445
443
  """Run when the Retriever starts running.
@@ -565,10 +563,10 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
565
563
  messages: list[list[BaseMessage]],
566
564
  *,
567
565
  run_id: UUID,
568
- parent_run_id: Optional[UUID] = None,
569
- tags: Optional[list[str]] = None,
570
- metadata: Optional[dict[str, Any]] = None,
571
- name: Optional[str] = None,
566
+ parent_run_id: UUID | None = None,
567
+ tags: list[str] | None = None,
568
+ metadata: dict[str, Any] | None = None,
569
+ name: str | None = None,
572
570
  **kwargs: Any,
573
571
  ) -> Any:
574
572
  chat_model_run = self._create_chat_model_run(
@@ -595,9 +593,9 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
595
593
  prompts: list[str],
596
594
  *,
597
595
  run_id: UUID,
598
- parent_run_id: Optional[UUID] = None,
599
- tags: Optional[list[str]] = None,
600
- metadata: Optional[dict[str, Any]] = None,
596
+ parent_run_id: UUID | None = None,
597
+ tags: list[str] | None = None,
598
+ metadata: dict[str, Any] | None = None,
601
599
  **kwargs: Any,
602
600
  ) -> None:
603
601
  llm_run = self._create_llm_run(
@@ -617,9 +615,9 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
617
615
  self,
618
616
  token: str,
619
617
  *,
620
- chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]] = None,
618
+ chunk: GenerationChunk | ChatGenerationChunk | None = None,
621
619
  run_id: UUID,
622
- parent_run_id: Optional[UUID] = None,
620
+ parent_run_id: UUID | None = None,
623
621
  **kwargs: Any,
624
622
  ) -> None:
625
623
  llm_run = self._llm_run_with_token_event(
@@ -649,8 +647,8 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
649
647
  response: LLMResult,
650
648
  *,
651
649
  run_id: UUID,
652
- parent_run_id: Optional[UUID] = None,
653
- tags: Optional[list[str]] = None,
650
+ parent_run_id: UUID | None = None,
651
+ tags: list[str] | None = None,
654
652
  **kwargs: Any,
655
653
  ) -> None:
656
654
  llm_run = self._complete_llm_run(
@@ -666,8 +664,8 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
666
664
  error: BaseException,
667
665
  *,
668
666
  run_id: UUID,
669
- parent_run_id: Optional[UUID] = None,
670
- tags: Optional[list[str]] = None,
667
+ parent_run_id: UUID | None = None,
668
+ tags: list[str] | None = None,
671
669
  **kwargs: Any,
672
670
  ) -> None:
673
671
  llm_run = self._errored_llm_run(
@@ -684,11 +682,11 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
684
682
  inputs: dict[str, Any],
685
683
  *,
686
684
  run_id: UUID,
687
- tags: Optional[list[str]] = None,
688
- parent_run_id: Optional[UUID] = None,
689
- metadata: Optional[dict[str, Any]] = None,
690
- run_type: Optional[str] = None,
691
- name: Optional[str] = None,
685
+ tags: list[str] | None = None,
686
+ parent_run_id: UUID | None = None,
687
+ metadata: dict[str, Any] | None = None,
688
+ run_type: str | None = None,
689
+ name: str | None = None,
692
690
  **kwargs: Any,
693
691
  ) -> None:
694
692
  chain_run = self._create_chain_run(
@@ -711,7 +709,7 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
711
709
  outputs: dict[str, Any],
712
710
  *,
713
711
  run_id: UUID,
714
- inputs: Optional[dict[str, Any]] = None,
712
+ inputs: dict[str, Any] | None = None,
715
713
  **kwargs: Any,
716
714
  ) -> None:
717
715
  chain_run = self._complete_chain_run(
@@ -727,7 +725,7 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
727
725
  self,
728
726
  error: BaseException,
729
727
  *,
730
- inputs: Optional[dict[str, Any]] = None,
728
+ inputs: dict[str, Any] | None = None,
731
729
  run_id: UUID,
732
730
  **kwargs: Any,
733
731
  ) -> None:
@@ -746,11 +744,11 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
746
744
  input_str: str,
747
745
  *,
748
746
  run_id: UUID,
749
- tags: Optional[list[str]] = None,
750
- parent_run_id: Optional[UUID] = None,
751
- metadata: Optional[dict[str, Any]] = None,
752
- name: Optional[str] = None,
753
- inputs: Optional[dict[str, Any]] = None,
747
+ tags: list[str] | None = None,
748
+ parent_run_id: UUID | None = None,
749
+ metadata: dict[str, Any] | None = None,
750
+ name: str | None = None,
751
+ inputs: dict[str, Any] | None = None,
754
752
  **kwargs: Any,
755
753
  ) -> None:
756
754
  tool_run = self._create_tool_run(
@@ -787,8 +785,8 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
787
785
  error: BaseException,
788
786
  *,
789
787
  run_id: UUID,
790
- parent_run_id: Optional[UUID] = None,
791
- tags: Optional[list[str]] = None,
788
+ parent_run_id: UUID | None = None,
789
+ tags: list[str] | None = None,
792
790
  **kwargs: Any,
793
791
  ) -> None:
794
792
  tool_run = self._errored_tool_run(
@@ -805,10 +803,10 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
805
803
  query: str,
806
804
  *,
807
805
  run_id: UUID,
808
- parent_run_id: Optional[UUID] = None,
809
- tags: Optional[list[str]] = None,
810
- metadata: Optional[dict[str, Any]] = None,
811
- name: Optional[str] = None,
806
+ parent_run_id: UUID | None = None,
807
+ tags: list[str] | None = None,
808
+ metadata: dict[str, Any] | None = None,
809
+ name: str | None = None,
812
810
  **kwargs: Any,
813
811
  ) -> None:
814
812
  retriever_run = self._create_retrieval_run(
@@ -832,8 +830,8 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
832
830
  error: BaseException,
833
831
  *,
834
832
  run_id: UUID,
835
- parent_run_id: Optional[UUID] = None,
836
- tags: Optional[list[str]] = None,
833
+ parent_run_id: UUID | None = None,
834
+ tags: list[str] | None = None,
837
835
  **kwargs: Any,
838
836
  ) -> None:
839
837
  retrieval_run = self._errored_retrieval_run(
@@ -852,8 +850,8 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
852
850
  documents: Sequence[Document],
853
851
  *,
854
852
  run_id: UUID,
855
- parent_run_id: Optional[UUID] = None,
856
- tags: Optional[list[str]] = None,
853
+ parent_run_id: UUID | None = None,
854
+ tags: list[str] | None = None,
857
855
  **kwargs: Any,
858
856
  ) -> None:
859
857
  retrieval_run = self._complete_retrieval_run(
@@ -882,7 +880,7 @@ class AsyncBaseTracer(_TracerCore, AsyncCallbackHandler, ABC):
882
880
  self,
883
881
  run: Run,
884
882
  token: str,
885
- chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]],
883
+ chunk: GenerationChunk | ChatGenerationChunk | None,
886
884
  ) -> None:
887
885
  """Process new LLM token."""
888
886
 
@@ -8,8 +8,6 @@ from typing import (
8
8
  TYPE_CHECKING,
9
9
  Any,
10
10
  Literal,
11
- Optional,
12
- Union,
13
11
  cast,
14
12
  )
15
13
  from uuid import UUID
@@ -27,40 +25,24 @@ if TYPE_CHECKING:
27
25
 
28
26
  from langchain_core.callbacks.base import BaseCallbackHandler, Callbacks
29
27
  from langchain_core.callbacks.manager import AsyncCallbackManager, CallbackManager
30
- from langchain_core.tracers.schemas import TracerSessionV1
31
28
 
32
29
  # for backwards partial compatibility if this is imported by users but unused
33
30
  tracing_callback_var: Any = None
34
- tracing_v2_callback_var: ContextVar[Optional[LangChainTracer]] = ContextVar(
31
+ tracing_v2_callback_var: ContextVar[LangChainTracer | None] = ContextVar(
35
32
  "tracing_callback_v2", default=None
36
33
  )
37
- run_collector_var: ContextVar[Optional[RunCollectorCallbackHandler]] = ContextVar(
34
+ run_collector_var: ContextVar[RunCollectorCallbackHandler | None] = ContextVar(
38
35
  "run_collector", default=None
39
36
  )
40
37
 
41
38
 
42
- @contextmanager
43
- def tracing_enabled(
44
- session_name: str = "default", # noqa: ARG001
45
- ) -> Generator[TracerSessionV1, None, None]:
46
- """Throw an error because this has been replaced by ``tracing_v2_enabled``.
47
-
48
- Raises:
49
- RuntimeError: Always, because this function is deprecated.
50
- """
51
- msg = (
52
- "tracing_enabled is no longer supported. Please use tracing_enabled_v2 instead."
53
- )
54
- raise RuntimeError(msg)
55
-
56
-
57
39
  @contextmanager
58
40
  def tracing_v2_enabled(
59
- project_name: Optional[str] = None,
41
+ project_name: str | None = None,
60
42
  *,
61
- example_id: Optional[Union[str, UUID]] = None,
62
- tags: Optional[list[str]] = None,
63
- client: Optional[LangSmithClient] = None,
43
+ example_id: str | UUID | None = None,
44
+ tags: list[str] | None = None,
45
+ client: LangSmithClient | None = None,
64
46
  ) -> Generator[LangChainTracer, None, None]:
65
47
  """Instruct LangChain to log all runs in context to LangSmith.
66
48
 
@@ -123,9 +105,9 @@ def collect_runs() -> Generator[RunCollectorCallbackHandler, None, None]:
123
105
 
124
106
 
125
107
  def _get_trace_callbacks(
126
- project_name: Optional[str] = None,
127
- example_id: Optional[Union[str, UUID]] = None,
128
- callback_manager: Optional[Union[CallbackManager, AsyncCallbackManager]] = None,
108
+ project_name: str | None = None,
109
+ example_id: str | UUID | None = None,
110
+ callback_manager: CallbackManager | AsyncCallbackManager | None = None,
129
111
  ) -> Callbacks:
130
112
  if _tracing_v2_is_enabled():
131
113
  project_name_ = project_name or _get_tracer_project()
@@ -149,7 +131,7 @@ def _get_trace_callbacks(
149
131
  return cb
150
132
 
151
133
 
152
- def _tracing_v2_is_enabled() -> Union[bool, Literal["local"]]:
134
+ def _tracing_v2_is_enabled() -> bool | Literal["local"]:
153
135
  if tracing_v2_callback_var.get() is not None:
154
136
  return True
155
137
  return ls_utils.tracing_is_enabled()
@@ -180,19 +162,19 @@ def _get_tracer_project() -> str:
180
162
 
181
163
  _configure_hooks: list[
182
164
  tuple[
183
- ContextVar[Optional[BaseCallbackHandler]],
165
+ ContextVar[BaseCallbackHandler | None],
184
166
  bool,
185
- Optional[type[BaseCallbackHandler]],
186
- Optional[str],
167
+ type[BaseCallbackHandler] | None,
168
+ str | None,
187
169
  ]
188
170
  ] = []
189
171
 
190
172
 
191
173
  def register_configure_hook(
192
- context_var: ContextVar[Optional[Any]],
174
+ context_var: ContextVar[Any | None],
193
175
  inheritable: bool, # noqa: FBT001
194
- handle_class: Optional[type[BaseCallbackHandler]] = None,
195
- env_var: Optional[str] = None,
176
+ handle_class: type[BaseCallbackHandler] | None = None,
177
+ env_var: str | None = None,
196
178
  ) -> None:
197
179
  """Register a configure hook.
198
180
 
@@ -215,7 +197,7 @@ def register_configure_hook(
215
197
  (
216
198
  # the typings of ContextVar do not have the generic arg set as covariant
217
199
  # so we have to cast it
218
- cast("ContextVar[Optional[BaseCallbackHandler]]", context_var),
200
+ cast("ContextVar[BaseCallbackHandler | None]", context_var),
219
201
  inheritable,
220
202
  handle_class,
221
203
  env_var,