langchain-core 1.0.0a1__py3-none-any.whl → 1.0.0a3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langchain-core might be problematic. Click here for more details.
- langchain_core/_api/beta_decorator.py +17 -40
- langchain_core/_api/deprecation.py +20 -7
- langchain_core/_api/path.py +19 -2
- langchain_core/_import_utils.py +7 -0
- langchain_core/agents.py +10 -6
- langchain_core/callbacks/base.py +28 -15
- langchain_core/callbacks/manager.py +81 -69
- langchain_core/callbacks/usage.py +4 -2
- langchain_core/chat_history.py +29 -21
- langchain_core/document_loaders/base.py +34 -9
- langchain_core/document_loaders/langsmith.py +3 -0
- langchain_core/documents/base.py +35 -10
- langchain_core/documents/transformers.py +4 -2
- langchain_core/embeddings/fake.py +8 -5
- langchain_core/env.py +2 -3
- langchain_core/example_selectors/base.py +12 -0
- langchain_core/exceptions.py +7 -0
- langchain_core/globals.py +17 -28
- langchain_core/indexing/api.py +57 -45
- langchain_core/indexing/base.py +5 -8
- langchain_core/indexing/in_memory.py +23 -3
- langchain_core/language_models/__init__.py +6 -2
- langchain_core/language_models/_utils.py +28 -4
- langchain_core/language_models/base.py +33 -21
- langchain_core/language_models/chat_models.py +103 -29
- langchain_core/language_models/fake_chat_models.py +5 -7
- langchain_core/language_models/llms.py +54 -20
- langchain_core/load/dump.py +2 -3
- langchain_core/load/load.py +15 -1
- langchain_core/load/serializable.py +38 -43
- langchain_core/memory.py +7 -3
- langchain_core/messages/__init__.py +7 -17
- langchain_core/messages/ai.py +41 -34
- langchain_core/messages/base.py +16 -7
- langchain_core/messages/block_translators/__init__.py +10 -8
- langchain_core/messages/block_translators/anthropic.py +3 -1
- langchain_core/messages/block_translators/bedrock.py +3 -1
- langchain_core/messages/block_translators/bedrock_converse.py +3 -1
- langchain_core/messages/block_translators/google_genai.py +3 -1
- langchain_core/messages/block_translators/google_vertexai.py +3 -1
- langchain_core/messages/block_translators/groq.py +3 -1
- langchain_core/messages/block_translators/langchain_v0.py +3 -136
- langchain_core/messages/block_translators/ollama.py +3 -1
- langchain_core/messages/block_translators/openai.py +252 -10
- langchain_core/messages/content.py +26 -124
- langchain_core/messages/human.py +2 -13
- langchain_core/messages/system.py +2 -6
- langchain_core/messages/tool.py +34 -14
- langchain_core/messages/utils.py +189 -74
- langchain_core/output_parsers/base.py +5 -2
- langchain_core/output_parsers/json.py +4 -4
- langchain_core/output_parsers/list.py +7 -22
- langchain_core/output_parsers/openai_functions.py +3 -0
- langchain_core/output_parsers/openai_tools.py +6 -1
- langchain_core/output_parsers/pydantic.py +4 -0
- langchain_core/output_parsers/string.py +5 -1
- langchain_core/output_parsers/xml.py +19 -19
- langchain_core/outputs/chat_generation.py +18 -7
- langchain_core/outputs/generation.py +14 -3
- langchain_core/outputs/llm_result.py +8 -1
- langchain_core/prompt_values.py +10 -4
- langchain_core/prompts/base.py +6 -11
- langchain_core/prompts/chat.py +88 -60
- langchain_core/prompts/dict.py +16 -8
- langchain_core/prompts/few_shot.py +9 -11
- langchain_core/prompts/few_shot_with_templates.py +5 -1
- langchain_core/prompts/image.py +12 -5
- langchain_core/prompts/loading.py +2 -2
- langchain_core/prompts/message.py +5 -6
- langchain_core/prompts/pipeline.py +13 -8
- langchain_core/prompts/prompt.py +22 -8
- langchain_core/prompts/string.py +18 -10
- langchain_core/prompts/structured.py +7 -2
- langchain_core/rate_limiters.py +2 -2
- langchain_core/retrievers.py +7 -6
- langchain_core/runnables/base.py +387 -246
- langchain_core/runnables/branch.py +11 -28
- langchain_core/runnables/config.py +20 -17
- langchain_core/runnables/configurable.py +34 -19
- langchain_core/runnables/fallbacks.py +20 -13
- langchain_core/runnables/graph.py +48 -38
- langchain_core/runnables/graph_ascii.py +40 -17
- langchain_core/runnables/graph_mermaid.py +54 -25
- langchain_core/runnables/graph_png.py +27 -31
- langchain_core/runnables/history.py +55 -58
- langchain_core/runnables/passthrough.py +44 -21
- langchain_core/runnables/retry.py +44 -23
- langchain_core/runnables/router.py +9 -8
- langchain_core/runnables/schema.py +9 -0
- langchain_core/runnables/utils.py +53 -90
- langchain_core/stores.py +19 -31
- langchain_core/sys_info.py +9 -8
- langchain_core/tools/base.py +36 -27
- langchain_core/tools/convert.py +25 -14
- langchain_core/tools/simple.py +36 -8
- langchain_core/tools/structured.py +25 -12
- langchain_core/tracers/base.py +2 -2
- langchain_core/tracers/context.py +5 -1
- langchain_core/tracers/core.py +110 -46
- langchain_core/tracers/evaluation.py +22 -26
- langchain_core/tracers/event_stream.py +97 -42
- langchain_core/tracers/langchain.py +12 -3
- langchain_core/tracers/langchain_v1.py +10 -2
- langchain_core/tracers/log_stream.py +56 -17
- langchain_core/tracers/root_listeners.py +4 -20
- langchain_core/tracers/run_collector.py +6 -16
- langchain_core/tracers/schemas.py +5 -1
- langchain_core/utils/aiter.py +14 -6
- langchain_core/utils/env.py +3 -0
- langchain_core/utils/function_calling.py +46 -20
- langchain_core/utils/interactive_env.py +6 -2
- langchain_core/utils/iter.py +12 -5
- langchain_core/utils/json.py +12 -3
- langchain_core/utils/json_schema.py +156 -40
- langchain_core/utils/loading.py +5 -1
- langchain_core/utils/mustache.py +25 -16
- langchain_core/utils/pydantic.py +38 -9
- langchain_core/utils/utils.py +25 -9
- langchain_core/vectorstores/base.py +7 -20
- langchain_core/vectorstores/in_memory.py +20 -14
- langchain_core/vectorstores/utils.py +18 -12
- langchain_core/version.py +1 -1
- langchain_core-1.0.0a3.dist-info/METADATA +77 -0
- langchain_core-1.0.0a3.dist-info/RECORD +181 -0
- langchain_core/beta/__init__.py +0 -1
- langchain_core/beta/runnables/__init__.py +0 -1
- langchain_core/beta/runnables/context.py +0 -448
- langchain_core-1.0.0a1.dist-info/METADATA +0 -106
- langchain_core-1.0.0a1.dist-info/RECORD +0 -184
- {langchain_core-1.0.0a1.dist-info → langchain_core-1.0.0a3.dist-info}/WHEEL +0 -0
- {langchain_core-1.0.0a1.dist-info → langchain_core-1.0.0a3.dist-info}/entry_points.txt +0 -0
|
@@ -9,21 +9,23 @@ from typing import (
|
|
|
9
9
|
TYPE_CHECKING,
|
|
10
10
|
Any,
|
|
11
11
|
Optional,
|
|
12
|
+
TypedDict,
|
|
12
13
|
TypeVar,
|
|
13
14
|
Union,
|
|
14
15
|
cast,
|
|
15
16
|
)
|
|
16
17
|
from uuid import UUID, uuid4
|
|
17
18
|
|
|
18
|
-
from typing_extensions import NotRequired,
|
|
19
|
+
from typing_extensions import NotRequired, override
|
|
19
20
|
|
|
20
|
-
from langchain_core.callbacks.base import AsyncCallbackHandler
|
|
21
|
+
from langchain_core.callbacks.base import AsyncCallbackHandler, BaseCallbackManager
|
|
21
22
|
from langchain_core.messages import AIMessageChunk, BaseMessage, BaseMessageChunk
|
|
22
23
|
from langchain_core.outputs import (
|
|
23
24
|
ChatGenerationChunk,
|
|
24
25
|
GenerationChunk,
|
|
25
26
|
LLMResult,
|
|
26
27
|
)
|
|
28
|
+
from langchain_core.runnables import ensure_config
|
|
27
29
|
from langchain_core.runnables.schema import (
|
|
28
30
|
CustomStreamEvent,
|
|
29
31
|
EventData,
|
|
@@ -36,6 +38,11 @@ from langchain_core.runnables.utils import (
|
|
|
36
38
|
_RootEventFilter,
|
|
37
39
|
)
|
|
38
40
|
from langchain_core.tracers._streaming import _StreamingCallbackHandler
|
|
41
|
+
from langchain_core.tracers.log_stream import (
|
|
42
|
+
LogStreamCallbackHandler,
|
|
43
|
+
RunLog,
|
|
44
|
+
_astream_log_implementation,
|
|
45
|
+
)
|
|
39
46
|
from langchain_core.tracers.memory_stream import _MemoryStream
|
|
40
47
|
from langchain_core.utils.aiter import aclosing, py_anext
|
|
41
48
|
|
|
@@ -53,22 +60,20 @@ class RunInfo(TypedDict):
|
|
|
53
60
|
"""Information about a run.
|
|
54
61
|
|
|
55
62
|
This is used to keep track of the metadata associated with a run.
|
|
56
|
-
|
|
57
|
-
Parameters:
|
|
58
|
-
name: The name of the run.
|
|
59
|
-
tags: The tags associated with the run.
|
|
60
|
-
metadata: The metadata associated with the run.
|
|
61
|
-
run_type: The type of the run.
|
|
62
|
-
inputs: The inputs to the run.
|
|
63
|
-
parent_run_id: The ID of the parent run.
|
|
64
63
|
"""
|
|
65
64
|
|
|
66
65
|
name: str
|
|
66
|
+
"""The name of the run."""
|
|
67
67
|
tags: list[str]
|
|
68
|
+
"""The tags associated with the run."""
|
|
68
69
|
metadata: dict[str, Any]
|
|
70
|
+
"""The metadata associated with the run."""
|
|
69
71
|
run_type: str
|
|
72
|
+
"""The type of the run."""
|
|
70
73
|
inputs: NotRequired[Any]
|
|
74
|
+
"""The inputs to the run."""
|
|
71
75
|
parent_run_id: Optional[UUID]
|
|
76
|
+
"""The ID of the parent run."""
|
|
72
77
|
|
|
73
78
|
|
|
74
79
|
def _assign_name(name: Optional[str], serialized: Optional[dict[str, Any]]) -> str:
|
|
@@ -155,7 +160,11 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
155
160
|
self.send_stream.send_nowait(event)
|
|
156
161
|
|
|
157
162
|
def __aiter__(self) -> AsyncIterator[Any]:
|
|
158
|
-
"""Iterate over the receive stream.
|
|
163
|
+
"""Iterate over the receive stream.
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
An async iterator over the receive stream.
|
|
167
|
+
"""
|
|
159
168
|
return self.receive_stream.__aiter__()
|
|
160
169
|
|
|
161
170
|
async def tap_output_aiter(
|
|
@@ -215,7 +224,7 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
215
224
|
yield chunk
|
|
216
225
|
|
|
217
226
|
def tap_output_iter(self, run_id: UUID, output: Iterator[T]) -> Iterator[T]:
|
|
218
|
-
"""Tap the output
|
|
227
|
+
"""Tap the output iter.
|
|
219
228
|
|
|
220
229
|
Args:
|
|
221
230
|
run_id: The ID of the run.
|
|
@@ -306,7 +315,7 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
306
315
|
name: Optional[str] = None,
|
|
307
316
|
**kwargs: Any,
|
|
308
317
|
) -> None:
|
|
309
|
-
"""Start a trace for
|
|
318
|
+
"""Start a trace for a chat model run."""
|
|
310
319
|
name_ = _assign_name(name, serialized)
|
|
311
320
|
run_type = "chat_model"
|
|
312
321
|
|
|
@@ -348,7 +357,7 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
348
357
|
name: Optional[str] = None,
|
|
349
358
|
**kwargs: Any,
|
|
350
359
|
) -> None:
|
|
351
|
-
"""Start a trace for
|
|
360
|
+
"""Start a trace for a (non-chat model) LLM run."""
|
|
352
361
|
name_ = _assign_name(name, serialized)
|
|
353
362
|
run_type = "llm"
|
|
354
363
|
|
|
@@ -412,7 +421,10 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
412
421
|
parent_run_id: Optional[UUID] = None,
|
|
413
422
|
**kwargs: Any,
|
|
414
423
|
) -> None:
|
|
415
|
-
"""Run on new
|
|
424
|
+
"""Run on new output token. Only available when streaming is enabled.
|
|
425
|
+
|
|
426
|
+
For both chat models and non-chat models (legacy LLMs).
|
|
427
|
+
"""
|
|
416
428
|
run_info = self.run_map.get(run_id)
|
|
417
429
|
chunk_: Union[GenerationChunk, BaseMessageChunk]
|
|
418
430
|
|
|
@@ -458,9 +470,15 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
458
470
|
async def on_llm_end(
|
|
459
471
|
self, response: LLMResult, *, run_id: UUID, **kwargs: Any
|
|
460
472
|
) -> None:
|
|
461
|
-
"""End a trace for
|
|
473
|
+
"""End a trace for a model run.
|
|
474
|
+
|
|
475
|
+
For both chat models and non-chat models (legacy LLMs).
|
|
476
|
+
|
|
477
|
+
Raises:
|
|
478
|
+
ValueError: If the run type is not ``'llm'`` or ``'chat_model'``.
|
|
479
|
+
"""
|
|
462
480
|
run_info = self.run_map.pop(run_id)
|
|
463
|
-
inputs_ = run_info
|
|
481
|
+
inputs_ = run_info.get("inputs")
|
|
464
482
|
|
|
465
483
|
generations: Union[list[list[GenerationChunk]], list[list[ChatGenerationChunk]]]
|
|
466
484
|
output: Union[dict, BaseMessage] = {}
|
|
@@ -592,6 +610,28 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
592
610
|
run_type,
|
|
593
611
|
)
|
|
594
612
|
|
|
613
|
+
def _get_tool_run_info_with_inputs(self, run_id: UUID) -> tuple[RunInfo, Any]:
|
|
614
|
+
"""Get run info for a tool and extract inputs, with validation.
|
|
615
|
+
|
|
616
|
+
Args:
|
|
617
|
+
run_id: The run ID of the tool.
|
|
618
|
+
|
|
619
|
+
Returns:
|
|
620
|
+
A tuple of (run_info, inputs).
|
|
621
|
+
|
|
622
|
+
Raises:
|
|
623
|
+
AssertionError: If the run ID is a tool call and does not have inputs.
|
|
624
|
+
"""
|
|
625
|
+
run_info = self.run_map.pop(run_id)
|
|
626
|
+
if "inputs" not in run_info:
|
|
627
|
+
msg = (
|
|
628
|
+
f"Run ID {run_id} is a tool call and is expected to have "
|
|
629
|
+
f"inputs associated with it."
|
|
630
|
+
)
|
|
631
|
+
raise AssertionError(msg)
|
|
632
|
+
inputs = run_info["inputs"]
|
|
633
|
+
return run_info, inputs
|
|
634
|
+
|
|
595
635
|
@override
|
|
596
636
|
async def on_tool_start(
|
|
597
637
|
self,
|
|
@@ -634,17 +674,43 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
634
674
|
"tool",
|
|
635
675
|
)
|
|
636
676
|
|
|
677
|
+
@override
|
|
678
|
+
async def on_tool_error(
|
|
679
|
+
self,
|
|
680
|
+
error: BaseException,
|
|
681
|
+
*,
|
|
682
|
+
run_id: UUID,
|
|
683
|
+
parent_run_id: Optional[UUID] = None,
|
|
684
|
+
tags: Optional[list[str]] = None,
|
|
685
|
+
**kwargs: Any,
|
|
686
|
+
) -> None:
|
|
687
|
+
"""Run when tool errors."""
|
|
688
|
+
run_info, inputs = self._get_tool_run_info_with_inputs(run_id)
|
|
689
|
+
|
|
690
|
+
self._send(
|
|
691
|
+
{
|
|
692
|
+
"event": "on_tool_error",
|
|
693
|
+
"data": {
|
|
694
|
+
"error": error,
|
|
695
|
+
"input": inputs,
|
|
696
|
+
},
|
|
697
|
+
"run_id": str(run_id),
|
|
698
|
+
"name": run_info["name"],
|
|
699
|
+
"tags": run_info["tags"],
|
|
700
|
+
"metadata": run_info["metadata"],
|
|
701
|
+
"parent_ids": self._get_parent_ids(run_id),
|
|
702
|
+
},
|
|
703
|
+
"tool",
|
|
704
|
+
)
|
|
705
|
+
|
|
637
706
|
@override
|
|
638
707
|
async def on_tool_end(self, output: Any, *, run_id: UUID, **kwargs: Any) -> None:
|
|
639
|
-
"""End a trace for a tool run.
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
)
|
|
646
|
-
raise AssertionError(msg)
|
|
647
|
-
inputs = run_info["inputs"]
|
|
708
|
+
"""End a trace for a tool run.
|
|
709
|
+
|
|
710
|
+
Raises:
|
|
711
|
+
AssertionError: If the run ID is a tool call and does not have inputs
|
|
712
|
+
"""
|
|
713
|
+
run_info, inputs = self._get_tool_run_info_with_inputs(run_id)
|
|
648
714
|
|
|
649
715
|
self._send(
|
|
650
716
|
{
|
|
@@ -718,7 +784,7 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
718
784
|
"event": "on_retriever_end",
|
|
719
785
|
"data": {
|
|
720
786
|
"output": documents,
|
|
721
|
-
"input": run_info
|
|
787
|
+
"input": run_info.get("inputs"),
|
|
722
788
|
},
|
|
723
789
|
"run_id": str(run_id),
|
|
724
790
|
"name": run_info["name"],
|
|
@@ -730,11 +796,11 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
730
796
|
)
|
|
731
797
|
|
|
732
798
|
def __deepcopy__(self, memo: dict) -> _AstreamEventsCallbackHandler:
|
|
733
|
-
"""
|
|
799
|
+
"""Return self."""
|
|
734
800
|
return self
|
|
735
801
|
|
|
736
802
|
def __copy__(self) -> _AstreamEventsCallbackHandler:
|
|
737
|
-
"""
|
|
803
|
+
"""Return self."""
|
|
738
804
|
return self
|
|
739
805
|
|
|
740
806
|
|
|
@@ -751,14 +817,6 @@ async def _astream_events_implementation_v1(
|
|
|
751
817
|
exclude_tags: Optional[Sequence[str]] = None,
|
|
752
818
|
**kwargs: Any,
|
|
753
819
|
) -> AsyncIterator[StandardStreamEvent]:
|
|
754
|
-
from langchain_core.runnables import ensure_config
|
|
755
|
-
from langchain_core.runnables.utils import _RootEventFilter
|
|
756
|
-
from langchain_core.tracers.log_stream import (
|
|
757
|
-
LogStreamCallbackHandler,
|
|
758
|
-
RunLog,
|
|
759
|
-
_astream_log_implementation,
|
|
760
|
-
)
|
|
761
|
-
|
|
762
820
|
stream = LogStreamCallbackHandler(
|
|
763
821
|
auto_close=False,
|
|
764
822
|
include_names=include_names,
|
|
@@ -838,12 +896,12 @@ async def _astream_events_implementation_v1(
|
|
|
838
896
|
# Usually they will NOT be available for components that operate
|
|
839
897
|
# on streams, since those components stream the input and
|
|
840
898
|
# don't know its final value until the end of the stream.
|
|
841
|
-
inputs = log_entry
|
|
899
|
+
inputs = log_entry.get("inputs")
|
|
842
900
|
if inputs is not None:
|
|
843
901
|
data["input"] = inputs
|
|
844
902
|
|
|
845
903
|
if event_type == "end":
|
|
846
|
-
inputs = log_entry
|
|
904
|
+
inputs = log_entry.get("inputs")
|
|
847
905
|
if inputs is not None:
|
|
848
906
|
data["input"] = inputs
|
|
849
907
|
|
|
@@ -936,9 +994,6 @@ async def _astream_events_implementation_v2(
|
|
|
936
994
|
**kwargs: Any,
|
|
937
995
|
) -> AsyncIterator[StandardStreamEvent]:
|
|
938
996
|
"""Implementation of the astream events API for V2 runnables."""
|
|
939
|
-
from langchain_core.callbacks.base import BaseCallbackManager
|
|
940
|
-
from langchain_core.runnables import ensure_config
|
|
941
|
-
|
|
942
997
|
event_streamer = _AstreamEventsCallbackHandler(
|
|
943
998
|
include_names=include_names,
|
|
944
999
|
include_types=include_types,
|
|
@@ -8,7 +8,7 @@ from datetime import datetime, timezone
|
|
|
8
8
|
from typing import TYPE_CHECKING, Any, Optional, Union
|
|
9
9
|
from uuid import UUID
|
|
10
10
|
|
|
11
|
-
from langsmith import Client
|
|
11
|
+
from langsmith import Client, get_tracing_context
|
|
12
12
|
from langsmith import run_trees as rt
|
|
13
13
|
from langsmith import utils as ls_utils
|
|
14
14
|
from tenacity import (
|
|
@@ -53,7 +53,11 @@ def wait_for_all_tracers() -> None:
|
|
|
53
53
|
|
|
54
54
|
|
|
55
55
|
def get_client() -> Client:
|
|
56
|
-
"""Get the client.
|
|
56
|
+
"""Get the client.
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
The LangSmith client.
|
|
60
|
+
"""
|
|
57
61
|
return rt.get_cached_client()
|
|
58
62
|
|
|
59
63
|
|
|
@@ -109,6 +113,8 @@ class LangChainTracer(BaseTracer):
|
|
|
109
113
|
super()._start_trace(run)
|
|
110
114
|
if run.ls_client is None:
|
|
111
115
|
run.ls_client = self.client
|
|
116
|
+
if get_tracing_context().get("enabled") is False:
|
|
117
|
+
run.extra["__disabled"] = True
|
|
112
118
|
|
|
113
119
|
def on_chat_model_start(
|
|
114
120
|
self,
|
|
@@ -201,6 +207,8 @@ class LangChainTracer(BaseTracer):
|
|
|
201
207
|
|
|
202
208
|
def _persist_run_single(self, run: Run) -> None:
|
|
203
209
|
"""Persist a run."""
|
|
210
|
+
if run.extra.get("__disabled"):
|
|
211
|
+
return
|
|
204
212
|
try:
|
|
205
213
|
run.extra["runtime"] = get_runtime_environment()
|
|
206
214
|
run.tags = self._get_tags(run)
|
|
@@ -214,6 +222,8 @@ class LangChainTracer(BaseTracer):
|
|
|
214
222
|
|
|
215
223
|
def _update_run_single(self, run: Run) -> None:
|
|
216
224
|
"""Update a run."""
|
|
225
|
+
if run.extra.get("__disabled"):
|
|
226
|
+
return
|
|
217
227
|
try:
|
|
218
228
|
run.patch(exclude_inputs=run.extra.get("inputs_is_truthy", False))
|
|
219
229
|
except Exception as e:
|
|
@@ -235,7 +245,6 @@ class LangChainTracer(BaseTracer):
|
|
|
235
245
|
chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]] = None,
|
|
236
246
|
parent_run_id: Optional[UUID] = None,
|
|
237
247
|
) -> Run:
|
|
238
|
-
"""Append token event to LLM run and return the run."""
|
|
239
248
|
run_id_str = str(run_id)
|
|
240
249
|
if run_id_str not in self.run_has_token_event_map:
|
|
241
250
|
self.run_has_token_event_map[run_id_str] = True
|
|
@@ -7,7 +7,11 @@ from typing import Any
|
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def get_headers(*args: Any, **kwargs: Any) -> Any: # noqa: ARG001
|
|
10
|
-
"""Throw an error because this has been replaced by get_headers.
|
|
10
|
+
"""Throw an error because this has been replaced by get_headers.
|
|
11
|
+
|
|
12
|
+
Raises:
|
|
13
|
+
RuntimeError: Always, because this function is deprecated.
|
|
14
|
+
"""
|
|
11
15
|
msg = (
|
|
12
16
|
"get_headers for LangChainTracerV1 is no longer supported. "
|
|
13
17
|
"Please use LangChainTracer instead."
|
|
@@ -16,7 +20,11 @@ def get_headers(*args: Any, **kwargs: Any) -> Any: # noqa: ARG001
|
|
|
16
20
|
|
|
17
21
|
|
|
18
22
|
def LangChainTracerV1(*args: Any, **kwargs: Any) -> Any: # noqa: N802,ARG001
|
|
19
|
-
"""Throw an error because this has been replaced by LangChainTracer
|
|
23
|
+
"""Throw an error because this has been replaced by ``LangChainTracer``.
|
|
24
|
+
|
|
25
|
+
Raises:
|
|
26
|
+
RuntimeError: Always, because this class is deprecated.
|
|
27
|
+
"""
|
|
20
28
|
msg = (
|
|
21
29
|
"LangChainTracerV1 is no longer supported. Please use LangChainTracer instead."
|
|
22
30
|
)
|
|
@@ -7,6 +7,7 @@ import contextlib
|
|
|
7
7
|
import copy
|
|
8
8
|
import threading
|
|
9
9
|
from collections import defaultdict
|
|
10
|
+
from pprint import pformat
|
|
10
11
|
from typing import (
|
|
11
12
|
TYPE_CHECKING,
|
|
12
13
|
Any,
|
|
@@ -20,10 +21,11 @@ from typing import (
|
|
|
20
21
|
import jsonpatch # type: ignore[import-untyped]
|
|
21
22
|
from typing_extensions import NotRequired, TypedDict, override
|
|
22
23
|
|
|
24
|
+
from langchain_core.callbacks.base import BaseCallbackManager
|
|
23
25
|
from langchain_core.load import dumps
|
|
24
26
|
from langchain_core.load.load import load
|
|
25
27
|
from langchain_core.outputs import ChatGenerationChunk, GenerationChunk
|
|
26
|
-
from langchain_core.runnables import
|
|
28
|
+
from langchain_core.runnables import RunnableConfig, ensure_config
|
|
27
29
|
from langchain_core.tracers._streaming import _StreamingCallbackHandler
|
|
28
30
|
from langchain_core.tracers.base import BaseTracer
|
|
29
31
|
from langchain_core.tracers.memory_stream import _MemoryStream
|
|
@@ -32,6 +34,7 @@ if TYPE_CHECKING:
|
|
|
32
34
|
from collections.abc import AsyncIterator, Iterator, Sequence
|
|
33
35
|
from uuid import UUID
|
|
34
36
|
|
|
37
|
+
from langchain_core.runnables import Runnable
|
|
35
38
|
from langchain_core.runnables.utils import Input, Output
|
|
36
39
|
from langchain_core.tracers.schemas import Run
|
|
37
40
|
|
|
@@ -110,7 +113,17 @@ class RunLogPatch:
|
|
|
110
113
|
self.ops = list(ops)
|
|
111
114
|
|
|
112
115
|
def __add__(self, other: Union[RunLogPatch, Any]) -> RunLog:
|
|
113
|
-
"""Combine two RunLogPatch instances.
|
|
116
|
+
"""Combine two ``RunLogPatch`` instances.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
other: The other ``RunLogPatch`` to combine with.
|
|
120
|
+
|
|
121
|
+
Raises:
|
|
122
|
+
TypeError: If the other object is not a ``RunLogPatch``.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
A new ``RunLog`` representing the combination of the two.
|
|
126
|
+
"""
|
|
114
127
|
if type(other) is RunLogPatch:
|
|
115
128
|
ops = self.ops + other.ops
|
|
116
129
|
state = jsonpatch.apply_patch(None, copy.deepcopy(ops))
|
|
@@ -121,8 +134,6 @@ class RunLogPatch:
|
|
|
121
134
|
|
|
122
135
|
@override
|
|
123
136
|
def __repr__(self) -> str:
|
|
124
|
-
from pprint import pformat
|
|
125
|
-
|
|
126
137
|
# 1:-1 to get rid of the [] around the list
|
|
127
138
|
return f"RunLogPatch({pformat(self.ops)[1:-1]})"
|
|
128
139
|
|
|
@@ -150,7 +161,17 @@ class RunLog(RunLogPatch):
|
|
|
150
161
|
self.state = state
|
|
151
162
|
|
|
152
163
|
def __add__(self, other: Union[RunLogPatch, Any]) -> RunLog:
|
|
153
|
-
"""Combine two
|
|
164
|
+
"""Combine two ``RunLog``s.
|
|
165
|
+
|
|
166
|
+
Args:
|
|
167
|
+
other: The other ``RunLog`` or ``RunLogPatch`` to combine with.
|
|
168
|
+
|
|
169
|
+
Raises:
|
|
170
|
+
TypeError: If the other object is not a ``RunLog`` or ``RunLogPatch``.
|
|
171
|
+
|
|
172
|
+
Returns:
|
|
173
|
+
A new ``RunLog`` representing the combination of the two.
|
|
174
|
+
"""
|
|
154
175
|
if type(other) is RunLogPatch:
|
|
155
176
|
ops = self.ops + other.ops
|
|
156
177
|
state = jsonpatch.apply_patch(self.state, other.ops)
|
|
@@ -161,13 +182,18 @@ class RunLog(RunLogPatch):
|
|
|
161
182
|
|
|
162
183
|
@override
|
|
163
184
|
def __repr__(self) -> str:
|
|
164
|
-
from pprint import pformat
|
|
165
|
-
|
|
166
185
|
return f"RunLog({pformat(self.state)})"
|
|
167
186
|
|
|
168
187
|
@override
|
|
169
188
|
def __eq__(self, other: object) -> bool:
|
|
170
|
-
"""Check if two
|
|
189
|
+
"""Check if two ``RunLog``s are equal.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
other: The other ``RunLog`` to compare to.
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
True if the ``RunLog``s are equal, False otherwise.
|
|
196
|
+
"""
|
|
171
197
|
# First compare that the state is the same
|
|
172
198
|
if not isinstance(other, RunLog):
|
|
173
199
|
return False
|
|
@@ -250,7 +276,11 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler):
|
|
|
250
276
|
self.root_id: Optional[UUID] = None
|
|
251
277
|
|
|
252
278
|
def __aiter__(self) -> AsyncIterator[RunLogPatch]:
|
|
253
|
-
"""Iterate over the stream of run logs.
|
|
279
|
+
"""Iterate over the stream of run logs.
|
|
280
|
+
|
|
281
|
+
Returns:
|
|
282
|
+
An async iterator over the run log patches.
|
|
283
|
+
"""
|
|
254
284
|
return self.receive_stream.__aiter__()
|
|
255
285
|
|
|
256
286
|
def send(self, *ops: dict[str, Any]) -> bool:
|
|
@@ -623,15 +653,24 @@ async def _astream_log_implementation(
|
|
|
623
653
|
|
|
624
654
|
The implementation has been factored out (at least temporarily) as both
|
|
625
655
|
astream_log and astream_events relies on it.
|
|
626
|
-
"""
|
|
627
|
-
import jsonpatch
|
|
628
|
-
|
|
629
|
-
from langchain_core.callbacks.base import BaseCallbackManager
|
|
630
|
-
from langchain_core.tracers.log_stream import (
|
|
631
|
-
RunLog,
|
|
632
|
-
RunLogPatch,
|
|
633
|
-
)
|
|
634
656
|
|
|
657
|
+
Args:
|
|
658
|
+
runnable: The runnable to run in streaming mode.
|
|
659
|
+
value: The input to the runnable.
|
|
660
|
+
config: The config to pass to the runnable.
|
|
661
|
+
stream: The stream to send the run logs to.
|
|
662
|
+
diff: Whether to yield run log patches (True) or full run logs (False).
|
|
663
|
+
with_streamed_output_list: Whether to include a list of all streamed
|
|
664
|
+
outputs in each patch. If False, only the final output will be included
|
|
665
|
+
in the patches.
|
|
666
|
+
**kwargs: Additional keyword arguments to pass to the runnable.
|
|
667
|
+
|
|
668
|
+
Raises:
|
|
669
|
+
ValueError: If the callbacks in the config are of an unexpected type.
|
|
670
|
+
|
|
671
|
+
Yields:
|
|
672
|
+
The run log patches or states, depending on the value of ``diff``.
|
|
673
|
+
"""
|
|
635
674
|
# Assign the stream handler to the config
|
|
636
675
|
config = ensure_config(config)
|
|
637
676
|
callbacks = config.get("callbacks")
|
|
@@ -21,18 +21,10 @@ AsyncListener = Union[
|
|
|
21
21
|
|
|
22
22
|
|
|
23
23
|
class RootListenersTracer(BaseTracer):
|
|
24
|
-
"""Tracer that calls listeners on run start, end, and error.
|
|
25
|
-
|
|
26
|
-
Parameters:
|
|
27
|
-
log_missing_parent: Whether to log a warning if the parent is missing.
|
|
28
|
-
Default is False.
|
|
29
|
-
config: The runnable config.
|
|
30
|
-
on_start: The listener to call on run start.
|
|
31
|
-
on_end: The listener to call on run end.
|
|
32
|
-
on_error: The listener to call on run error.
|
|
33
|
-
"""
|
|
24
|
+
"""Tracer that calls listeners on run start, end, and error."""
|
|
34
25
|
|
|
35
26
|
log_missing_parent = False
|
|
27
|
+
"""Whether to log a warning if the parent is missing. Default is False."""
|
|
36
28
|
|
|
37
29
|
def __init__(
|
|
38
30
|
self,
|
|
@@ -84,18 +76,10 @@ class RootListenersTracer(BaseTracer):
|
|
|
84
76
|
|
|
85
77
|
|
|
86
78
|
class AsyncRootListenersTracer(AsyncBaseTracer):
|
|
87
|
-
"""Async Tracer that calls listeners on run start, end, and error.
|
|
88
|
-
|
|
89
|
-
Parameters:
|
|
90
|
-
log_missing_parent: Whether to log a warning if the parent is missing.
|
|
91
|
-
Default is False.
|
|
92
|
-
config: The runnable config.
|
|
93
|
-
on_start: The listener to call on run start.
|
|
94
|
-
on_end: The listener to call on run end.
|
|
95
|
-
on_error: The listener to call on run error.
|
|
96
|
-
"""
|
|
79
|
+
"""Async Tracer that calls listeners on run start, end, and error."""
|
|
97
80
|
|
|
98
81
|
log_missing_parent = False
|
|
82
|
+
"""Whether to log a warning if the parent is missing. Default is False."""
|
|
99
83
|
|
|
100
84
|
def __init__(
|
|
101
85
|
self,
|
|
@@ -11,12 +11,6 @@ class RunCollectorCallbackHandler(BaseTracer):
|
|
|
11
11
|
"""Tracer that collects all nested runs in a list.
|
|
12
12
|
|
|
13
13
|
This tracer is useful for inspection and evaluation purposes.
|
|
14
|
-
|
|
15
|
-
Parameters
|
|
16
|
-
----------
|
|
17
|
-
name : str, default="run-collector_callback_handler"
|
|
18
|
-
example_id : Optional[Union[UUID, str]], default=None
|
|
19
|
-
The ID of the example being traced. It can be either a UUID or a string.
|
|
20
14
|
"""
|
|
21
15
|
|
|
22
16
|
name: str = "run-collector_callback_handler"
|
|
@@ -26,12 +20,10 @@ class RunCollectorCallbackHandler(BaseTracer):
|
|
|
26
20
|
) -> None:
|
|
27
21
|
"""Initialize the RunCollectorCallbackHandler.
|
|
28
22
|
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
**kwargs : Any
|
|
34
|
-
Additional keyword arguments
|
|
23
|
+
Args:
|
|
24
|
+
example_id: The ID of the example being traced. (default: None).
|
|
25
|
+
It can be either a UUID or a string.
|
|
26
|
+
**kwargs: Additional keyword arguments.
|
|
35
27
|
"""
|
|
36
28
|
super().__init__(**kwargs)
|
|
37
29
|
self.example_id = (
|
|
@@ -42,10 +34,8 @@ class RunCollectorCallbackHandler(BaseTracer):
|
|
|
42
34
|
def _persist_run(self, run: Run) -> None:
|
|
43
35
|
"""Persist a run by adding it to the traced_runs list.
|
|
44
36
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
run : Run
|
|
48
|
-
The run to be persisted.
|
|
37
|
+
Args:
|
|
38
|
+
run: The run to be persisted.
|
|
49
39
|
"""
|
|
50
40
|
run_ = run.copy()
|
|
51
41
|
run_.reference_example_id = self.example_id
|
|
@@ -18,7 +18,11 @@ from langchain_core._api import deprecated
|
|
|
18
18
|
|
|
19
19
|
@deprecated("0.1.0", alternative="Use string instead.", removal="1.0")
|
|
20
20
|
def RunTypeEnum() -> type[RunTypeEnumDep]: # noqa: N802
|
|
21
|
-
"""RunTypeEnum
|
|
21
|
+
"""``RunTypeEnum``.
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
The ``RunTypeEnum`` class.
|
|
25
|
+
"""
|
|
22
26
|
warnings.warn(
|
|
23
27
|
"RunTypeEnum is deprecated. Please directly use a string instead"
|
|
24
28
|
" (e.g. 'llm', 'chain', 'tool').",
|
langchain_core/utils/aiter.py
CHANGED
|
@@ -37,7 +37,7 @@ _no_default = object()
|
|
|
37
37
|
# before 3.10, the builtin anext() was not available
|
|
38
38
|
def py_anext(
|
|
39
39
|
iterator: AsyncIterator[T], default: Union[T, Any] = _no_default
|
|
40
|
-
) -> Awaitable[Union[T,
|
|
40
|
+
) -> Awaitable[Union[T, Any, None]]:
|
|
41
41
|
"""Pure-Python implementation of anext() for testing purposes.
|
|
42
42
|
|
|
43
43
|
Closely matches the builtin anext() C implementation.
|
|
@@ -94,7 +94,7 @@ class NoLock:
|
|
|
94
94
|
exc_val: Optional[BaseException],
|
|
95
95
|
exc_tb: Optional[TracebackType],
|
|
96
96
|
) -> bool:
|
|
97
|
-
"""
|
|
97
|
+
"""Return False, exception not suppressed."""
|
|
98
98
|
return False
|
|
99
99
|
|
|
100
100
|
|
|
@@ -236,7 +236,11 @@ class Tee(Generic[T]):
|
|
|
236
236
|
return self._children[item]
|
|
237
237
|
|
|
238
238
|
def __iter__(self) -> Iterator[AsyncIterator[T]]:
|
|
239
|
-
"""Iterate over the child iterators.
|
|
239
|
+
"""Iterate over the child iterators.
|
|
240
|
+
|
|
241
|
+
Yields:
|
|
242
|
+
The child iterators.
|
|
243
|
+
"""
|
|
240
244
|
yield from self._children
|
|
241
245
|
|
|
242
246
|
async def __aenter__(self) -> "Tee[T]":
|
|
@@ -249,7 +253,11 @@ class Tee(Generic[T]):
|
|
|
249
253
|
exc_val: Optional[BaseException],
|
|
250
254
|
exc_tb: Optional[TracebackType],
|
|
251
255
|
) -> bool:
|
|
252
|
-
"""Close all child iterators.
|
|
256
|
+
"""Close all child iterators.
|
|
257
|
+
|
|
258
|
+
Returns:
|
|
259
|
+
False, exceptions not suppressed.
|
|
260
|
+
"""
|
|
253
261
|
await self.aclose()
|
|
254
262
|
return False
|
|
255
263
|
|
|
@@ -318,8 +326,8 @@ async def abatch_iterate(
|
|
|
318
326
|
size: The size of the batch.
|
|
319
327
|
iterable: The async iterable to batch.
|
|
320
328
|
|
|
321
|
-
|
|
322
|
-
|
|
329
|
+
Yields:
|
|
330
|
+
The batches.
|
|
323
331
|
"""
|
|
324
332
|
batch: list[T] = []
|
|
325
333
|
async for element in iterable:
|
langchain_core/utils/env.py
CHANGED
|
@@ -39,6 +39,9 @@ def get_from_dict_or_env(
|
|
|
39
39
|
in the dictionary.
|
|
40
40
|
default: The default value to return if the key is not in the dictionary
|
|
41
41
|
or the environment. Defaults to None.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
The dict value or the environment variable value.
|
|
42
45
|
"""
|
|
43
46
|
if isinstance(key, (list, tuple)):
|
|
44
47
|
for k in key:
|