langchain-core 1.0.0a5__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (165) hide show
  1. langchain_core/__init__.py +1 -1
  2. langchain_core/_api/__init__.py +3 -4
  3. langchain_core/_api/beta_decorator.py +23 -26
  4. langchain_core/_api/deprecation.py +51 -64
  5. langchain_core/_api/path.py +3 -6
  6. langchain_core/_import_utils.py +3 -4
  7. langchain_core/agents.py +20 -22
  8. langchain_core/caches.py +65 -66
  9. langchain_core/callbacks/__init__.py +1 -8
  10. langchain_core/callbacks/base.py +321 -336
  11. langchain_core/callbacks/file.py +44 -44
  12. langchain_core/callbacks/manager.py +436 -513
  13. langchain_core/callbacks/stdout.py +29 -30
  14. langchain_core/callbacks/streaming_stdout.py +32 -32
  15. langchain_core/callbacks/usage.py +60 -57
  16. langchain_core/chat_history.py +53 -68
  17. langchain_core/document_loaders/base.py +27 -25
  18. langchain_core/document_loaders/blob_loaders.py +1 -1
  19. langchain_core/document_loaders/langsmith.py +44 -48
  20. langchain_core/documents/__init__.py +23 -3
  21. langchain_core/documents/base.py +98 -90
  22. langchain_core/documents/compressor.py +10 -10
  23. langchain_core/documents/transformers.py +34 -35
  24. langchain_core/embeddings/fake.py +50 -54
  25. langchain_core/example_selectors/length_based.py +1 -1
  26. langchain_core/example_selectors/semantic_similarity.py +28 -32
  27. langchain_core/exceptions.py +21 -20
  28. langchain_core/globals.py +3 -151
  29. langchain_core/indexing/__init__.py +1 -1
  30. langchain_core/indexing/api.py +121 -126
  31. langchain_core/indexing/base.py +73 -75
  32. langchain_core/indexing/in_memory.py +4 -6
  33. langchain_core/language_models/__init__.py +14 -29
  34. langchain_core/language_models/_utils.py +58 -61
  35. langchain_core/language_models/base.py +53 -162
  36. langchain_core/language_models/chat_models.py +298 -387
  37. langchain_core/language_models/fake.py +11 -11
  38. langchain_core/language_models/fake_chat_models.py +42 -36
  39. langchain_core/language_models/llms.py +125 -235
  40. langchain_core/load/dump.py +9 -12
  41. langchain_core/load/load.py +18 -28
  42. langchain_core/load/mapping.py +2 -4
  43. langchain_core/load/serializable.py +42 -40
  44. langchain_core/messages/__init__.py +10 -16
  45. langchain_core/messages/ai.py +148 -148
  46. langchain_core/messages/base.py +58 -52
  47. langchain_core/messages/block_translators/__init__.py +27 -17
  48. langchain_core/messages/block_translators/anthropic.py +6 -6
  49. langchain_core/messages/block_translators/bedrock_converse.py +5 -5
  50. langchain_core/messages/block_translators/google_genai.py +505 -20
  51. langchain_core/messages/block_translators/google_vertexai.py +4 -32
  52. langchain_core/messages/block_translators/groq.py +117 -21
  53. langchain_core/messages/block_translators/langchain_v0.py +5 -5
  54. langchain_core/messages/block_translators/openai.py +11 -11
  55. langchain_core/messages/chat.py +2 -6
  56. langchain_core/messages/content.py +337 -328
  57. langchain_core/messages/function.py +6 -10
  58. langchain_core/messages/human.py +24 -31
  59. langchain_core/messages/modifier.py +2 -2
  60. langchain_core/messages/system.py +19 -29
  61. langchain_core/messages/tool.py +74 -90
  62. langchain_core/messages/utils.py +474 -504
  63. langchain_core/output_parsers/__init__.py +13 -10
  64. langchain_core/output_parsers/base.py +61 -61
  65. langchain_core/output_parsers/format_instructions.py +9 -4
  66. langchain_core/output_parsers/json.py +12 -10
  67. langchain_core/output_parsers/list.py +21 -23
  68. langchain_core/output_parsers/openai_functions.py +49 -47
  69. langchain_core/output_parsers/openai_tools.py +16 -21
  70. langchain_core/output_parsers/pydantic.py +13 -14
  71. langchain_core/output_parsers/string.py +5 -5
  72. langchain_core/output_parsers/transform.py +15 -17
  73. langchain_core/output_parsers/xml.py +35 -34
  74. langchain_core/outputs/__init__.py +1 -1
  75. langchain_core/outputs/chat_generation.py +18 -18
  76. langchain_core/outputs/chat_result.py +1 -3
  77. langchain_core/outputs/generation.py +10 -11
  78. langchain_core/outputs/llm_result.py +10 -10
  79. langchain_core/prompt_values.py +11 -17
  80. langchain_core/prompts/__init__.py +3 -27
  81. langchain_core/prompts/base.py +48 -56
  82. langchain_core/prompts/chat.py +275 -325
  83. langchain_core/prompts/dict.py +5 -5
  84. langchain_core/prompts/few_shot.py +81 -88
  85. langchain_core/prompts/few_shot_with_templates.py +11 -13
  86. langchain_core/prompts/image.py +12 -14
  87. langchain_core/prompts/loading.py +4 -6
  88. langchain_core/prompts/message.py +3 -3
  89. langchain_core/prompts/prompt.py +24 -39
  90. langchain_core/prompts/string.py +26 -10
  91. langchain_core/prompts/structured.py +49 -53
  92. langchain_core/rate_limiters.py +51 -60
  93. langchain_core/retrievers.py +61 -198
  94. langchain_core/runnables/base.py +1478 -1630
  95. langchain_core/runnables/branch.py +53 -57
  96. langchain_core/runnables/config.py +72 -89
  97. langchain_core/runnables/configurable.py +120 -137
  98. langchain_core/runnables/fallbacks.py +83 -79
  99. langchain_core/runnables/graph.py +91 -97
  100. langchain_core/runnables/graph_ascii.py +27 -28
  101. langchain_core/runnables/graph_mermaid.py +38 -50
  102. langchain_core/runnables/graph_png.py +15 -16
  103. langchain_core/runnables/history.py +135 -148
  104. langchain_core/runnables/passthrough.py +124 -150
  105. langchain_core/runnables/retry.py +46 -51
  106. langchain_core/runnables/router.py +25 -30
  107. langchain_core/runnables/schema.py +75 -80
  108. langchain_core/runnables/utils.py +60 -67
  109. langchain_core/stores.py +85 -121
  110. langchain_core/structured_query.py +8 -8
  111. langchain_core/sys_info.py +27 -29
  112. langchain_core/tools/__init__.py +1 -14
  113. langchain_core/tools/base.py +285 -229
  114. langchain_core/tools/convert.py +160 -155
  115. langchain_core/tools/render.py +10 -10
  116. langchain_core/tools/retriever.py +12 -11
  117. langchain_core/tools/simple.py +19 -24
  118. langchain_core/tools/structured.py +32 -39
  119. langchain_core/tracers/__init__.py +1 -9
  120. langchain_core/tracers/base.py +97 -99
  121. langchain_core/tracers/context.py +29 -52
  122. langchain_core/tracers/core.py +49 -53
  123. langchain_core/tracers/evaluation.py +11 -11
  124. langchain_core/tracers/event_stream.py +65 -64
  125. langchain_core/tracers/langchain.py +21 -21
  126. langchain_core/tracers/log_stream.py +45 -45
  127. langchain_core/tracers/memory_stream.py +3 -3
  128. langchain_core/tracers/root_listeners.py +16 -16
  129. langchain_core/tracers/run_collector.py +2 -4
  130. langchain_core/tracers/schemas.py +0 -129
  131. langchain_core/tracers/stdout.py +3 -3
  132. langchain_core/utils/__init__.py +1 -4
  133. langchain_core/utils/_merge.py +2 -2
  134. langchain_core/utils/aiter.py +57 -61
  135. langchain_core/utils/env.py +9 -9
  136. langchain_core/utils/function_calling.py +89 -186
  137. langchain_core/utils/html.py +7 -8
  138. langchain_core/utils/input.py +6 -6
  139. langchain_core/utils/interactive_env.py +1 -1
  140. langchain_core/utils/iter.py +36 -40
  141. langchain_core/utils/json.py +4 -3
  142. langchain_core/utils/json_schema.py +9 -9
  143. langchain_core/utils/mustache.py +8 -10
  144. langchain_core/utils/pydantic.py +33 -35
  145. langchain_core/utils/strings.py +6 -9
  146. langchain_core/utils/usage.py +1 -1
  147. langchain_core/utils/utils.py +66 -62
  148. langchain_core/vectorstores/base.py +182 -216
  149. langchain_core/vectorstores/in_memory.py +101 -176
  150. langchain_core/vectorstores/utils.py +5 -5
  151. langchain_core/version.py +1 -1
  152. langchain_core-1.0.3.dist-info/METADATA +69 -0
  153. langchain_core-1.0.3.dist-info/RECORD +172 -0
  154. {langchain_core-1.0.0a5.dist-info → langchain_core-1.0.3.dist-info}/WHEEL +1 -1
  155. langchain_core/memory.py +0 -120
  156. langchain_core/messages/block_translators/ollama.py +0 -47
  157. langchain_core/prompts/pipeline.py +0 -138
  158. langchain_core/pydantic_v1/__init__.py +0 -30
  159. langchain_core/pydantic_v1/dataclasses.py +0 -23
  160. langchain_core/pydantic_v1/main.py +0 -23
  161. langchain_core/tracers/langchain_v1.py +0 -31
  162. langchain_core/utils/loading.py +0 -35
  163. langchain_core-1.0.0a5.dist-info/METADATA +0 -77
  164. langchain_core-1.0.0a5.dist-info/RECORD +0 -181
  165. langchain_core-1.0.0a5.dist-info/entry_points.txt +0 -4
@@ -5,7 +5,7 @@ from __future__ import annotations
5
5
  import logging
6
6
  from concurrent.futures import ThreadPoolExecutor
7
7
  from datetime import datetime, timezone
8
- from typing import TYPE_CHECKING, Any, Optional, Union
8
+ from typing import TYPE_CHECKING, Any
9
9
  from uuid import UUID
10
10
 
11
11
  from langsmith import Client, get_tracing_context
@@ -30,7 +30,7 @@ if TYPE_CHECKING:
30
30
 
31
31
  logger = logging.getLogger(__name__)
32
32
  _LOGGED = set()
33
- _EXECUTOR: Optional[ThreadPoolExecutor] = None
33
+ _EXECUTOR: ThreadPoolExecutor | None = None
34
34
 
35
35
 
36
36
  def log_error_once(method: str, exception: Exception) -> None:
@@ -76,10 +76,10 @@ class LangChainTracer(BaseTracer):
76
76
 
77
77
  def __init__(
78
78
  self,
79
- example_id: Optional[Union[UUID, str]] = None,
80
- project_name: Optional[str] = None,
81
- client: Optional[Client] = None,
82
- tags: Optional[list[str]] = None,
79
+ example_id: UUID | str | None = None,
80
+ project_name: str | None = None,
81
+ client: Client | None = None,
82
+ tags: list[str] | None = None,
83
83
  **kwargs: Any,
84
84
  ) -> None:
85
85
  """Initialize the LangChain tracer.
@@ -89,7 +89,7 @@ class LangChainTracer(BaseTracer):
89
89
  project_name: The project name. Defaults to the tracer project.
90
90
  client: The client. Defaults to the global client.
91
91
  tags: The tags. Defaults to an empty list.
92
- kwargs: Additional keyword arguments.
92
+ **kwargs: Additional keyword arguments.
93
93
  """
94
94
  super().__init__(**kwargs)
95
95
  self.example_id = (
@@ -98,7 +98,7 @@ class LangChainTracer(BaseTracer):
98
98
  self.project_name = project_name or ls_utils.get_tracer_project()
99
99
  self.client = client or get_client()
100
100
  self.tags = tags or []
101
- self.latest_run: Optional[Run] = None
101
+ self.latest_run: Run | None = None
102
102
  self.run_has_token_event_map: dict[str, bool] = {}
103
103
 
104
104
  def _start_trace(self, run: Run) -> None:
@@ -122,10 +122,10 @@ class LangChainTracer(BaseTracer):
122
122
  messages: list[list[BaseMessage]],
123
123
  *,
124
124
  run_id: UUID,
125
- tags: Optional[list[str]] = None,
126
- parent_run_id: Optional[UUID] = None,
127
- metadata: Optional[dict[str, Any]] = None,
128
- name: Optional[str] = None,
125
+ tags: list[str] | None = None,
126
+ parent_run_id: UUID | None = None,
127
+ metadata: dict[str, Any] | None = None,
128
+ name: str | None = None,
129
129
  **kwargs: Any,
130
130
  ) -> Run:
131
131
  """Start a trace for an LLM run.
@@ -134,14 +134,14 @@ class LangChainTracer(BaseTracer):
134
134
  serialized: The serialized model.
135
135
  messages: The messages.
136
136
  run_id: The run ID.
137
- tags: The tags. Defaults to None.
138
- parent_run_id: The parent run ID. Defaults to None.
139
- metadata: The metadata. Defaults to None.
140
- name: The name. Defaults to None.
141
- kwargs: Additional keyword arguments.
137
+ tags: The tags.
138
+ parent_run_id: The parent run ID.
139
+ metadata: The metadata.
140
+ name: The name.
141
+ **kwargs: Additional keyword arguments.
142
142
 
143
143
  Returns:
144
- Run: The run.
144
+ The run.
145
145
  """
146
146
  start_time = datetime.now(timezone.utc)
147
147
  if metadata:
@@ -175,7 +175,7 @@ class LangChainTracer(BaseTracer):
175
175
  """Get the LangSmith root run URL.
176
176
 
177
177
  Returns:
178
- str: The LangSmith root run URL.
178
+ The LangSmith root run URL.
179
179
 
180
180
  Raises:
181
181
  ValueError: If no traced run is found.
@@ -242,8 +242,8 @@ class LangChainTracer(BaseTracer):
242
242
  self,
243
243
  token: str,
244
244
  run_id: UUID,
245
- chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]] = None,
246
- parent_run_id: Optional[UUID] = None,
245
+ chunk: GenerationChunk | ChatGenerationChunk | None = None,
246
+ parent_run_id: UUID | None = None,
247
247
  ) -> Run:
248
248
  run_id_str = str(run_id)
249
249
  if run_id_str not in self.run_has_token_event_map:
@@ -12,9 +12,7 @@ from typing import (
12
12
  TYPE_CHECKING,
13
13
  Any,
14
14
  Literal,
15
- Optional,
16
15
  TypeVar,
17
- Union,
18
16
  overload,
19
17
  )
20
18
 
@@ -59,13 +57,13 @@ class LogEntry(TypedDict):
59
57
  """List of LLM tokens streamed by this run, if applicable."""
60
58
  streamed_output: list[Any]
61
59
  """List of output chunks streamed by this run, if available."""
62
- inputs: NotRequired[Optional[Any]]
60
+ inputs: NotRequired[Any | None]
63
61
  """Inputs to this run. Not available currently via astream_log."""
64
- final_output: Optional[Any]
62
+ final_output: Any | None
65
63
  """Final output of this run.
66
64
 
67
65
  Only available after the run has finished successfully."""
68
- end_time: Optional[str]
66
+ end_time: str | None
69
67
  """ISO-8601 timestamp of when the run ended.
70
68
  Only available after the run has finished."""
71
69
 
@@ -77,7 +75,7 @@ class RunState(TypedDict):
77
75
  """ID of the run."""
78
76
  streamed_output: list[Any]
79
77
  """List of output chunks streamed by Runnable.stream()"""
80
- final_output: Optional[Any]
78
+ final_output: Any | None
81
79
  """Final output of the run, usually the result of aggregating (`+`) streamed_output.
82
80
  Updated throughout the run when supported by the Runnable."""
83
81
 
@@ -98,10 +96,10 @@ class RunLogPatch:
98
96
  """Patch to the run log."""
99
97
 
100
98
  ops: list[dict[str, Any]]
101
- """List of jsonpatch operations, which describe how to create the run state
99
+ """List of JSONPatch operations, which describe how to create the run state
102
100
  from an empty dict. This is the minimal representation of the log, designed to
103
101
  be serialized as JSON and sent over the wire to reconstruct the log on the other
104
- side. Reconstruction of the state can be done with any jsonpatch-compliant library,
102
+ side. Reconstruction of the state can be done with any JSONPatch-compliant library,
105
103
  see https://jsonpatch.com for more information."""
106
104
 
107
105
  def __init__(self, *ops: dict[str, Any]) -> None:
@@ -112,17 +110,17 @@ class RunLogPatch:
112
110
  """
113
111
  self.ops = list(ops)
114
112
 
115
- def __add__(self, other: Union[RunLogPatch, Any]) -> RunLog:
116
- """Combine two ``RunLogPatch`` instances.
113
+ def __add__(self, other: RunLogPatch | Any) -> RunLog:
114
+ """Combine two `RunLogPatch` instances.
117
115
 
118
116
  Args:
119
- other: The other ``RunLogPatch`` to combine with.
117
+ other: The other `RunLogPatch` to combine with.
120
118
 
121
119
  Raises:
122
- TypeError: If the other object is not a ``RunLogPatch``.
120
+ TypeError: If the other object is not a `RunLogPatch`.
123
121
 
124
122
  Returns:
125
- A new ``RunLog`` representing the combination of the two.
123
+ A new `RunLog` representing the combination of the two.
126
124
  """
127
125
  if type(other) is RunLogPatch:
128
126
  ops = self.ops + other.ops
@@ -160,17 +158,17 @@ class RunLog(RunLogPatch):
160
158
  super().__init__(*ops)
161
159
  self.state = state
162
160
 
163
- def __add__(self, other: Union[RunLogPatch, Any]) -> RunLog:
164
- """Combine two ``RunLog``s.
161
+ def __add__(self, other: RunLogPatch | Any) -> RunLog:
162
+ """Combine two `RunLog`s.
165
163
 
166
164
  Args:
167
- other: The other ``RunLog`` or ``RunLogPatch`` to combine with.
165
+ other: The other `RunLog` or `RunLogPatch` to combine with.
168
166
 
169
167
  Raises:
170
- TypeError: If the other object is not a ``RunLog`` or ``RunLogPatch``.
168
+ TypeError: If the other object is not a `RunLog` or `RunLogPatch`.
171
169
 
172
170
  Returns:
173
- A new ``RunLog`` representing the combination of the two.
171
+ A new `RunLog` representing the combination of the two.
174
172
  """
175
173
  if type(other) is RunLogPatch:
176
174
  ops = self.ops + other.ops
@@ -186,13 +184,13 @@ class RunLog(RunLogPatch):
186
184
 
187
185
  @override
188
186
  def __eq__(self, other: object) -> bool:
189
- """Check if two ``RunLog``s are equal.
187
+ """Check if two `RunLog`s are equal.
190
188
 
191
189
  Args:
192
- other: The other ``RunLog`` to compare to.
190
+ other: The other `RunLog` to compare to.
193
191
 
194
192
  Returns:
195
- True if the ``RunLog``s are equal, False otherwise.
193
+ `True` if the `RunLog`s are equal, `False` otherwise.
196
194
  """
197
195
  # First compare that the state is the same
198
196
  if not isinstance(other, RunLog):
@@ -215,12 +213,12 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler):
215
213
  self,
216
214
  *,
217
215
  auto_close: bool = True,
218
- include_names: Optional[Sequence[str]] = None,
219
- include_types: Optional[Sequence[str]] = None,
220
- include_tags: Optional[Sequence[str]] = None,
221
- exclude_names: Optional[Sequence[str]] = None,
222
- exclude_types: Optional[Sequence[str]] = None,
223
- exclude_tags: Optional[Sequence[str]] = None,
216
+ include_names: Sequence[str] | None = None,
217
+ include_types: Sequence[str] | None = None,
218
+ include_tags: Sequence[str] | None = None,
219
+ exclude_names: Sequence[str] | None = None,
220
+ exclude_types: Sequence[str] | None = None,
221
+ exclude_tags: Sequence[str] | None = None,
224
222
  # Schema format is for internal use only.
225
223
  _schema_format: Literal["original", "streaming_events"] = "streaming_events",
226
224
  ) -> None:
@@ -266,14 +264,17 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler):
266
264
  self.exclude_types = exclude_types
267
265
  self.exclude_tags = exclude_tags
268
266
 
269
- loop = asyncio.get_event_loop()
267
+ try:
268
+ loop = asyncio.get_event_loop()
269
+ except RuntimeError:
270
+ loop = asyncio.new_event_loop()
270
271
  memory_stream = _MemoryStream[RunLogPatch](loop)
271
272
  self.lock = threading.Lock()
272
273
  self.send_stream = memory_stream.get_send_stream()
273
274
  self.receive_stream = memory_stream.get_receive_stream()
274
275
  self._key_map_by_run_id: dict[UUID, str] = {}
275
276
  self._counter_map_by_name: dict[str, int] = defaultdict(int)
276
- self.root_id: Optional[UUID] = None
277
+ self.root_id: UUID | None = None
277
278
 
278
279
  def __aiter__(self) -> AsyncIterator[RunLogPatch]:
279
280
  """Iterate over the stream of run logs.
@@ -290,8 +291,7 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler):
290
291
  *ops: The operations to send to the stream.
291
292
 
292
293
  Returns:
293
- bool: True if the patch was sent successfully, False if the stream
294
- is closed.
294
+ `True` if the patch was sent successfully, False if the stream is closed.
295
295
  """
296
296
  # We will likely want to wrap this in try / except at some point
297
297
  # to handle exceptions that might arise at run time.
@@ -310,7 +310,7 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler):
310
310
  output: The output async iterator.
311
311
 
312
312
  Yields:
313
- T: The output value.
313
+ The output value.
314
314
  """
315
315
  async for chunk in output:
316
316
  # root run is handled in .astream_log()
@@ -341,7 +341,7 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler):
341
341
  output: The output iterator.
342
342
 
343
343
  Yields:
344
- T: The output value.
344
+ The output value.
345
345
  """
346
346
  for chunk in output:
347
347
  # root run is handled in .astream_log()
@@ -371,7 +371,7 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler):
371
371
  run: The Run to check.
372
372
 
373
373
  Returns:
374
- bool: True if the run should be included, False otherwise.
374
+ `True` if the run should be included, `False` otherwise.
375
375
  """
376
376
  if run.id == self.root_id:
377
377
  return False
@@ -515,7 +515,7 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler):
515
515
  self,
516
516
  run: Run,
517
517
  token: str,
518
- chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]],
518
+ chunk: GenerationChunk | ChatGenerationChunk | None,
519
519
  ) -> None:
520
520
  """Process new LLM token."""
521
521
  index = self._key_map_by_run_id.get(run.id)
@@ -541,7 +541,7 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler):
541
541
 
542
542
  def _get_standardized_inputs(
543
543
  run: Run, schema_format: Literal["original", "streaming_events"]
544
- ) -> Optional[dict[str, Any]]:
544
+ ) -> dict[str, Any] | None:
545
545
  """Extract standardized inputs from a run.
546
546
 
547
547
  Standardizes the inputs based on the type of the runnable used.
@@ -583,7 +583,7 @@ def _get_standardized_inputs(
583
583
 
584
584
  def _get_standardized_outputs(
585
585
  run: Run, schema_format: Literal["original", "streaming_events", "original+chat"]
586
- ) -> Optional[Any]:
586
+ ) -> Any | None:
587
587
  """Extract standardized output from a run.
588
588
 
589
589
  Standardizes the outputs based on the type of the runnable used.
@@ -617,7 +617,7 @@ def _get_standardized_outputs(
617
617
  def _astream_log_implementation(
618
618
  runnable: Runnable[Input, Output],
619
619
  value: Any,
620
- config: Optional[RunnableConfig] = None,
620
+ config: RunnableConfig | None = None,
621
621
  *,
622
622
  stream: LogStreamCallbackHandler,
623
623
  diff: Literal[True] = True,
@@ -630,7 +630,7 @@ def _astream_log_implementation(
630
630
  def _astream_log_implementation(
631
631
  runnable: Runnable[Input, Output],
632
632
  value: Any,
633
- config: Optional[RunnableConfig] = None,
633
+ config: RunnableConfig | None = None,
634
634
  *,
635
635
  stream: LogStreamCallbackHandler,
636
636
  diff: Literal[False],
@@ -642,13 +642,13 @@ def _astream_log_implementation(
642
642
  async def _astream_log_implementation(
643
643
  runnable: Runnable[Input, Output],
644
644
  value: Any,
645
- config: Optional[RunnableConfig] = None,
645
+ config: RunnableConfig | None = None,
646
646
  *,
647
647
  stream: LogStreamCallbackHandler,
648
648
  diff: bool = True,
649
649
  with_streamed_output_list: bool = True,
650
650
  **kwargs: Any,
651
- ) -> Union[AsyncIterator[RunLogPatch], AsyncIterator[RunLog]]:
651
+ ) -> AsyncIterator[RunLogPatch] | AsyncIterator[RunLog]:
652
652
  """Implementation of astream_log for a given runnable.
653
653
 
654
654
  The implementation has been factored out (at least temporarily) as both
@@ -661,7 +661,7 @@ async def _astream_log_implementation(
661
661
  stream: The stream to send the run logs to.
662
662
  diff: Whether to yield run log patches (True) or full run logs (False).
663
663
  with_streamed_output_list: Whether to include a list of all streamed
664
- outputs in each patch. If False, only the final output will be included
664
+ outputs in each patch. If `False`, only the final output will be included
665
665
  in the patches.
666
666
  **kwargs: Additional keyword arguments to pass to the runnable.
667
667
 
@@ -669,7 +669,7 @@ async def _astream_log_implementation(
669
669
  ValueError: If the callbacks in the config are of an unexpected type.
670
670
 
671
671
  Yields:
672
- The run log patches or states, depending on the value of ``diff``.
672
+ The run log patches or states, depending on the value of `diff`.
673
673
  """
674
674
  # Assign the stream handler to the config
675
675
  config = ensure_config(config)
@@ -693,8 +693,8 @@ async def _astream_log_implementation(
693
693
  # add each chunk to the output stream
694
694
  async def consume_astream() -> None:
695
695
  try:
696
- prev_final_output: Optional[Output] = None
697
- final_output: Optional[Output] = None
696
+ prev_final_output: Output | None = None
697
+ final_output: Output | None = None
698
698
 
699
699
  async for chunk in runnable.astream(value, config, **kwargs):
700
700
  prev_final_output = final_output
@@ -5,7 +5,7 @@ channel. The writer and reader can be in the same event loop or in different eve
5
5
  loops. When they're in different event loops, they will also be in different
6
6
  threads.
7
7
 
8
- This is useful in situations when there's a mix of synchronous and asynchronous
8
+ Useful in situations when there's a mix of synchronous and asynchronous
9
9
  used in the code.
10
10
  """
11
11
 
@@ -130,7 +130,7 @@ class _MemoryStream(Generic[T]):
130
130
  """Get a writer for the channel.
131
131
 
132
132
  Returns:
133
- _SendStream: The writer for the channel.
133
+ The writer for the channel.
134
134
  """
135
135
  return _SendStream[T](
136
136
  reader_loop=self._loop, queue=self._queue, done=self._done
@@ -140,6 +140,6 @@ class _MemoryStream(Generic[T]):
140
140
  """Get a reader for the channel.
141
141
 
142
142
  Returns:
143
- _ReceiveStream: The reader for the channel.
143
+ The reader for the channel.
144
144
  """
145
145
  return _ReceiveStream[T](queue=self._queue, done=self._done)
@@ -1,7 +1,7 @@
1
1
  """Tracers that call listeners."""
2
2
 
3
- from collections.abc import Awaitable
4
- from typing import TYPE_CHECKING, Callable, Optional, Union
3
+ from collections.abc import Awaitable, Callable
4
+ from typing import TYPE_CHECKING
5
5
 
6
6
  from langchain_core.runnables.config import (
7
7
  RunnableConfig,
@@ -14,25 +14,25 @@ from langchain_core.tracers.schemas import Run
14
14
  if TYPE_CHECKING:
15
15
  from uuid import UUID
16
16
 
17
- Listener = Union[Callable[[Run], None], Callable[[Run, RunnableConfig], None]]
18
- AsyncListener = Union[
19
- Callable[[Run], Awaitable[None]], Callable[[Run, RunnableConfig], Awaitable[None]]
20
- ]
17
+ Listener = Callable[[Run], None] | Callable[[Run, RunnableConfig], None]
18
+ AsyncListener = (
19
+ Callable[[Run], Awaitable[None]] | Callable[[Run, RunnableConfig], Awaitable[None]]
20
+ )
21
21
 
22
22
 
23
23
  class RootListenersTracer(BaseTracer):
24
24
  """Tracer that calls listeners on run start, end, and error."""
25
25
 
26
26
  log_missing_parent = False
27
- """Whether to log a warning if the parent is missing. Default is False."""
27
+ """Whether to log a warning if the parent is missing."""
28
28
 
29
29
  def __init__(
30
30
  self,
31
31
  *,
32
32
  config: RunnableConfig,
33
- on_start: Optional[Listener],
34
- on_end: Optional[Listener],
35
- on_error: Optional[Listener],
33
+ on_start: Listener | None,
34
+ on_end: Listener | None,
35
+ on_error: Listener | None,
36
36
  ) -> None:
37
37
  """Initialize the tracer.
38
38
 
@@ -48,7 +48,7 @@ class RootListenersTracer(BaseTracer):
48
48
  self._arg_on_start = on_start
49
49
  self._arg_on_end = on_end
50
50
  self._arg_on_error = on_error
51
- self.root_id: Optional[UUID] = None
51
+ self.root_id: UUID | None = None
52
52
 
53
53
  def _persist_run(self, run: Run) -> None:
54
54
  # This is a legacy method only called once for an entire run tree
@@ -79,15 +79,15 @@ class AsyncRootListenersTracer(AsyncBaseTracer):
79
79
  """Async Tracer that calls listeners on run start, end, and error."""
80
80
 
81
81
  log_missing_parent = False
82
- """Whether to log a warning if the parent is missing. Default is False."""
82
+ """Whether to log a warning if the parent is missing."""
83
83
 
84
84
  def __init__(
85
85
  self,
86
86
  *,
87
87
  config: RunnableConfig,
88
- on_start: Optional[AsyncListener],
89
- on_end: Optional[AsyncListener],
90
- on_error: Optional[AsyncListener],
88
+ on_start: AsyncListener | None,
89
+ on_end: AsyncListener | None,
90
+ on_error: AsyncListener | None,
91
91
  ) -> None:
92
92
  """Initialize the tracer.
93
93
 
@@ -103,7 +103,7 @@ class AsyncRootListenersTracer(AsyncBaseTracer):
103
103
  self._arg_on_start = on_start
104
104
  self._arg_on_end = on_end
105
105
  self._arg_on_error = on_error
106
- self.root_id: Optional[UUID] = None
106
+ self.root_id: UUID | None = None
107
107
 
108
108
  async def _persist_run(self, run: Run) -> None:
109
109
  # This is a legacy method only called once for an entire run tree
@@ -1,6 +1,6 @@
1
1
  """A tracer that collects all nested runs in a list."""
2
2
 
3
- from typing import Any, Optional, Union
3
+ from typing import Any
4
4
  from uuid import UUID
5
5
 
6
6
  from langchain_core.tracers.base import BaseTracer
@@ -15,9 +15,7 @@ class RunCollectorCallbackHandler(BaseTracer):
15
15
 
16
16
  name: str = "run-collector_callback_handler"
17
17
 
18
- def __init__(
19
- self, example_id: Optional[Union[UUID, str]] = None, **kwargs: Any
20
- ) -> None:
18
+ def __init__(self, example_id: UUID | str | None = None, **kwargs: Any) -> None:
21
19
  """Initialize the RunCollectorCallbackHandler.
22
20
 
23
21
  Args:
@@ -2,142 +2,13 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- import warnings
6
- from datetime import datetime, timezone
7
- from typing import Any, Optional
8
- from uuid import UUID
9
-
10
5
  from langsmith import RunTree
11
- from langsmith.schemas import RunTypeEnum as RunTypeEnumDep
12
- from pydantic import PydanticDeprecationWarning
13
- from pydantic.v1 import BaseModel as BaseModelV1
14
- from pydantic.v1 import Field as FieldV1
15
-
16
- from langchain_core._api import deprecated
17
-
18
-
19
- @deprecated("0.1.0", alternative="Use string instead.", removal="1.0")
20
- def RunTypeEnum() -> type[RunTypeEnumDep]: # noqa: N802
21
- """``RunTypeEnum``.
22
-
23
- Returns:
24
- The ``RunTypeEnum`` class.
25
- """
26
- warnings.warn(
27
- "RunTypeEnum is deprecated. Please directly use a string instead"
28
- " (e.g. 'llm', 'chain', 'tool').",
29
- DeprecationWarning,
30
- stacklevel=2,
31
- )
32
- return RunTypeEnumDep
33
-
34
-
35
- @deprecated("0.1.0", removal="1.0")
36
- class TracerSessionV1Base(BaseModelV1):
37
- """Base class for TracerSessionV1."""
38
-
39
- start_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc))
40
- name: Optional[str] = None
41
- extra: Optional[dict[str, Any]] = None
42
-
43
-
44
- @deprecated("0.1.0", removal="1.0")
45
- class TracerSessionV1Create(TracerSessionV1Base):
46
- """Create class for TracerSessionV1."""
47
-
48
-
49
- @deprecated("0.1.0", removal="1.0")
50
- class TracerSessionV1(TracerSessionV1Base):
51
- """TracerSessionV1 schema."""
52
-
53
- id: int
54
-
55
-
56
- @deprecated("0.1.0", removal="1.0")
57
- class TracerSessionBase(TracerSessionV1Base):
58
- """Base class for TracerSession."""
59
-
60
- tenant_id: UUID
61
-
62
-
63
- @deprecated("0.1.0", removal="1.0")
64
- class TracerSession(TracerSessionBase):
65
- """TracerSessionV1 schema for the V2 API."""
66
-
67
- id: UUID
68
-
69
-
70
- @deprecated("0.1.0", alternative="Run", removal="1.0")
71
- class BaseRun(BaseModelV1):
72
- """Base class for Run."""
73
-
74
- uuid: str
75
- parent_uuid: Optional[str] = None
76
- start_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc))
77
- end_time: datetime = FieldV1(default_factory=lambda: datetime.now(timezone.utc))
78
- extra: Optional[dict[str, Any]] = None
79
- execution_order: int
80
- child_execution_order: int
81
- serialized: dict[str, Any]
82
- session_id: int
83
- error: Optional[str] = None
84
-
85
-
86
- @deprecated("0.1.0", alternative="Run", removal="1.0")
87
- class LLMRun(BaseRun):
88
- """Class for LLMRun."""
89
-
90
- prompts: list[str]
91
- # Temporarily, remove but we will completely remove LLMRun
92
- # response: Optional[LLMResult] = None
93
-
94
-
95
- @deprecated("0.1.0", alternative="Run", removal="1.0")
96
- class ChainRun(BaseRun):
97
- """Class for ChainRun."""
98
-
99
- inputs: dict[str, Any]
100
- outputs: Optional[dict[str, Any]] = None
101
- child_llm_runs: list[LLMRun] = FieldV1(default_factory=list)
102
- child_chain_runs: list[ChainRun] = FieldV1(default_factory=list)
103
- child_tool_runs: list[ToolRun] = FieldV1(default_factory=list)
104
-
105
-
106
- @deprecated("0.1.0", alternative="Run", removal="1.0")
107
- class ToolRun(BaseRun):
108
- """Class for ToolRun."""
109
-
110
- tool_input: str
111
- output: Optional[str] = None
112
- action: str
113
- child_llm_runs: list[LLMRun] = FieldV1(default_factory=list)
114
- child_chain_runs: list[ChainRun] = FieldV1(default_factory=list)
115
- child_tool_runs: list[ToolRun] = FieldV1(default_factory=list)
116
-
117
6
 
118
7
  # Begin V2 API Schemas
119
8
 
120
9
 
121
10
  Run = RunTree # For backwards compatibility
122
11
 
123
- # TODO: Update once langsmith moves to Pydantic V2 and we can swap Run.model_rebuild
124
- # for Run.update_forward_refs
125
- with warnings.catch_warnings():
126
- warnings.simplefilter("ignore", category=PydanticDeprecationWarning)
127
-
128
- ChainRun.update_forward_refs()
129
- ToolRun.update_forward_refs()
130
-
131
12
  __all__ = [
132
- "BaseRun",
133
- "ChainRun",
134
- "LLMRun",
135
13
  "Run",
136
- "RunTypeEnum",
137
- "ToolRun",
138
- "TracerSession",
139
- "TracerSessionBase",
140
- "TracerSessionV1",
141
- "TracerSessionV1Base",
142
- "TracerSessionV1Create",
143
14
  ]
@@ -1,7 +1,8 @@
1
1
  """Tracers that print to the console."""
2
2
 
3
3
  import json
4
- from typing import Any, Callable
4
+ from collections.abc import Callable
5
+ from typing import Any
5
6
 
6
7
  from langchain_core.tracers.base import BaseTracer
7
8
  from langchain_core.tracers.schemas import Run
@@ -48,8 +49,7 @@ class FunctionCallbackHandler(BaseTracer):
48
49
  """Tracer that calls a function with a single str parameter."""
49
50
 
50
51
  name: str = "function_callback_handler"
51
- """The name of the tracer. This is used to identify the tracer in the logs.
52
- Default is "function_callback_handler"."""
52
+ """The name of the tracer. This is used to identify the tracer in the logs."""
53
53
 
54
54
  def __init__(self, function: Callable[[str], None], **kwargs: Any) -> None:
55
55
  """Create a FunctionCallbackHandler.