langchain-core 0.3.75__py3-none-any.whl → 0.3.76__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of langchain-core might be problematic. Click here for more details.
- langchain_core/_api/beta_decorator.py +17 -40
- langchain_core/_api/deprecation.py +19 -6
- langchain_core/_api/path.py +19 -2
- langchain_core/_import_utils.py +7 -0
- langchain_core/agents.py +10 -6
- langchain_core/beta/runnables/context.py +1 -2
- langchain_core/callbacks/base.py +11 -4
- langchain_core/callbacks/manager.py +81 -69
- langchain_core/callbacks/usage.py +4 -2
- langchain_core/chat_history.py +4 -6
- langchain_core/document_loaders/base.py +34 -9
- langchain_core/document_loaders/langsmith.py +3 -0
- langchain_core/documents/base.py +35 -10
- langchain_core/documents/transformers.py +4 -2
- langchain_core/embeddings/fake.py +8 -5
- langchain_core/env.py +2 -3
- langchain_core/example_selectors/base.py +12 -0
- langchain_core/exceptions.py +7 -0
- langchain_core/globals.py +17 -28
- langchain_core/indexing/api.py +56 -44
- langchain_core/indexing/base.py +5 -8
- langchain_core/indexing/in_memory.py +23 -3
- langchain_core/language_models/__init__.py +3 -2
- langchain_core/language_models/base.py +31 -20
- langchain_core/language_models/chat_models.py +94 -25
- langchain_core/language_models/fake_chat_models.py +5 -7
- langchain_core/language_models/llms.py +49 -17
- langchain_core/load/dump.py +2 -3
- langchain_core/load/load.py +15 -1
- langchain_core/load/serializable.py +38 -43
- langchain_core/memory.py +7 -3
- langchain_core/messages/ai.py +36 -19
- langchain_core/messages/base.py +13 -6
- langchain_core/messages/content_blocks.py +23 -2
- langchain_core/messages/human.py +2 -6
- langchain_core/messages/system.py +2 -6
- langchain_core/messages/tool.py +33 -13
- langchain_core/messages/utils.py +182 -72
- langchain_core/output_parsers/base.py +5 -2
- langchain_core/output_parsers/json.py +4 -4
- langchain_core/output_parsers/list.py +7 -22
- langchain_core/output_parsers/openai_functions.py +3 -0
- langchain_core/output_parsers/openai_tools.py +6 -1
- langchain_core/output_parsers/pydantic.py +4 -0
- langchain_core/output_parsers/string.py +5 -1
- langchain_core/output_parsers/xml.py +19 -19
- langchain_core/outputs/chat_generation.py +18 -7
- langchain_core/outputs/generation.py +14 -3
- langchain_core/outputs/llm_result.py +8 -1
- langchain_core/prompt_values.py +10 -4
- langchain_core/prompts/base.py +4 -9
- langchain_core/prompts/chat.py +87 -58
- langchain_core/prompts/dict.py +16 -8
- langchain_core/prompts/few_shot.py +9 -11
- langchain_core/prompts/few_shot_with_templates.py +5 -1
- langchain_core/prompts/image.py +12 -5
- langchain_core/prompts/message.py +5 -6
- langchain_core/prompts/pipeline.py +13 -8
- langchain_core/prompts/prompt.py +22 -8
- langchain_core/prompts/string.py +18 -10
- langchain_core/prompts/structured.py +7 -2
- langchain_core/rate_limiters.py +2 -2
- langchain_core/retrievers.py +7 -6
- langchain_core/runnables/base.py +402 -183
- langchain_core/runnables/branch.py +14 -19
- langchain_core/runnables/config.py +9 -15
- langchain_core/runnables/configurable.py +34 -19
- langchain_core/runnables/fallbacks.py +20 -13
- langchain_core/runnables/graph.py +44 -37
- langchain_core/runnables/graph_ascii.py +40 -17
- langchain_core/runnables/graph_mermaid.py +27 -15
- langchain_core/runnables/graph_png.py +27 -31
- langchain_core/runnables/history.py +55 -58
- langchain_core/runnables/passthrough.py +44 -21
- langchain_core/runnables/retry.py +9 -5
- langchain_core/runnables/router.py +9 -8
- langchain_core/runnables/schema.py +2 -0
- langchain_core/runnables/utils.py +51 -89
- langchain_core/stores.py +13 -25
- langchain_core/sys_info.py +9 -8
- langchain_core/tools/base.py +30 -23
- langchain_core/tools/convert.py +24 -13
- langchain_core/tools/simple.py +35 -3
- langchain_core/tools/structured.py +25 -2
- langchain_core/tracers/base.py +2 -2
- langchain_core/tracers/context.py +5 -1
- langchain_core/tracers/core.py +109 -39
- langchain_core/tracers/evaluation.py +22 -26
- langchain_core/tracers/event_stream.py +40 -27
- langchain_core/tracers/langchain.py +12 -3
- langchain_core/tracers/langchain_v1.py +10 -2
- langchain_core/tracers/log_stream.py +56 -17
- langchain_core/tracers/root_listeners.py +4 -20
- langchain_core/tracers/run_collector.py +6 -16
- langchain_core/tracers/schemas.py +5 -1
- langchain_core/utils/aiter.py +14 -6
- langchain_core/utils/env.py +3 -0
- langchain_core/utils/function_calling.py +37 -20
- langchain_core/utils/interactive_env.py +6 -2
- langchain_core/utils/iter.py +11 -3
- langchain_core/utils/json.py +5 -2
- langchain_core/utils/json_schema.py +15 -5
- langchain_core/utils/loading.py +5 -1
- langchain_core/utils/mustache.py +24 -15
- langchain_core/utils/pydantic.py +32 -4
- langchain_core/utils/utils.py +24 -8
- langchain_core/vectorstores/base.py +7 -20
- langchain_core/vectorstores/in_memory.py +18 -12
- langchain_core/vectorstores/utils.py +18 -12
- langchain_core/version.py +1 -1
- langchain_core-0.3.76.dist-info/METADATA +77 -0
- langchain_core-0.3.76.dist-info/RECORD +174 -0
- langchain_core-0.3.75.dist-info/METADATA +0 -106
- langchain_core-0.3.75.dist-info/RECORD +0 -174
- {langchain_core-0.3.75.dist-info → langchain_core-0.3.76.dist-info}/WHEEL +0 -0
- {langchain_core-0.3.75.dist-info → langchain_core-0.3.76.dist-info}/entry_points.txt +0 -0
|
@@ -38,24 +38,27 @@ class EvaluatorCallbackHandler(BaseTracer):
|
|
|
38
38
|
"""Tracer that runs a run evaluator whenever a run is persisted.
|
|
39
39
|
|
|
40
40
|
Attributes:
|
|
41
|
-
example_id : Union[UUID, None]
|
|
42
|
-
The example ID associated with the runs.
|
|
43
41
|
client : Client
|
|
44
42
|
The LangSmith client instance used for evaluating the runs.
|
|
45
|
-
evaluators : Sequence[RunEvaluator]
|
|
46
|
-
The sequence of run evaluators to be executed.
|
|
47
|
-
executor : ThreadPoolExecutor
|
|
48
|
-
The thread pool executor used for running the evaluators.
|
|
49
|
-
futures : set[Future]
|
|
50
|
-
The set of futures representing the running evaluators.
|
|
51
|
-
skip_unfinished : bool
|
|
52
|
-
Whether to skip runs that are not finished or raised
|
|
53
|
-
an error.
|
|
54
|
-
project_name : Optional[str]
|
|
55
|
-
The LangSmith project name to be organize eval chain runs under.
|
|
56
43
|
"""
|
|
57
44
|
|
|
58
45
|
name: str = "evaluator_callback_handler"
|
|
46
|
+
example_id: Optional[UUID] = None
|
|
47
|
+
"""The example ID associated with the runs."""
|
|
48
|
+
client: langsmith.Client
|
|
49
|
+
"""The LangSmith client instance used for evaluating the runs."""
|
|
50
|
+
evaluators: Sequence[langsmith.RunEvaluator] = ()
|
|
51
|
+
"""The sequence of run evaluators to be executed."""
|
|
52
|
+
executor: Optional[ThreadPoolExecutor] = None
|
|
53
|
+
"""The thread pool executor used for running the evaluators."""
|
|
54
|
+
futures: weakref.WeakSet[Future] = weakref.WeakSet()
|
|
55
|
+
"""The set of futures representing the running evaluators."""
|
|
56
|
+
skip_unfinished: bool = True
|
|
57
|
+
"""Whether to skip runs that are not finished or raised an error."""
|
|
58
|
+
project_name: Optional[str] = None
|
|
59
|
+
"""The LangSmith project name to be organize eval chain runs under."""
|
|
60
|
+
logged_eval_results: dict[tuple[str, str], list[EvaluationResult]]
|
|
61
|
+
lock: threading.Lock
|
|
59
62
|
|
|
60
63
|
def __init__(
|
|
61
64
|
self,
|
|
@@ -91,7 +94,7 @@ class EvaluatorCallbackHandler(BaseTracer):
|
|
|
91
94
|
self.client = client or langchain_tracer.get_client()
|
|
92
95
|
self.evaluators = evaluators
|
|
93
96
|
if max_concurrency is None:
|
|
94
|
-
self.executor
|
|
97
|
+
self.executor = _get_executor()
|
|
95
98
|
elif max_concurrency > 0:
|
|
96
99
|
self.executor = ThreadPoolExecutor(max_workers=max_concurrency)
|
|
97
100
|
weakref.finalize(
|
|
@@ -100,10 +103,10 @@ class EvaluatorCallbackHandler(BaseTracer):
|
|
|
100
103
|
)
|
|
101
104
|
else:
|
|
102
105
|
self.executor = None
|
|
103
|
-
self.futures
|
|
106
|
+
self.futures = weakref.WeakSet()
|
|
104
107
|
self.skip_unfinished = skip_unfinished
|
|
105
108
|
self.project_name = project_name
|
|
106
|
-
self.logged_eval_results
|
|
109
|
+
self.logged_eval_results = {}
|
|
107
110
|
self.lock = threading.Lock()
|
|
108
111
|
_TRACERS.add(self)
|
|
109
112
|
|
|
@@ -111,12 +114,8 @@ class EvaluatorCallbackHandler(BaseTracer):
|
|
|
111
114
|
"""Evaluate the run in the project.
|
|
112
115
|
|
|
113
116
|
Args:
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
The run to be evaluated.
|
|
117
|
-
evaluator : RunEvaluator
|
|
118
|
-
The evaluator to use for evaluating the run.
|
|
119
|
-
|
|
117
|
+
run: The run to be evaluated.
|
|
118
|
+
evaluator: The evaluator to use for evaluating the run.
|
|
120
119
|
"""
|
|
121
120
|
try:
|
|
122
121
|
if self.project_name is None:
|
|
@@ -202,10 +201,7 @@ class EvaluatorCallbackHandler(BaseTracer):
|
|
|
202
201
|
"""Run the evaluator on the run.
|
|
203
202
|
|
|
204
203
|
Args:
|
|
205
|
-
|
|
206
|
-
run : Run
|
|
207
|
-
The run to be evaluated.
|
|
208
|
-
|
|
204
|
+
run: The run to be evaluated.
|
|
209
205
|
"""
|
|
210
206
|
if self.skip_unfinished and not run.outputs:
|
|
211
207
|
logger.debug("Skipping unfinished run %s", run.id)
|
|
@@ -9,21 +9,23 @@ from typing import (
|
|
|
9
9
|
TYPE_CHECKING,
|
|
10
10
|
Any,
|
|
11
11
|
Optional,
|
|
12
|
+
TypedDict,
|
|
12
13
|
TypeVar,
|
|
13
14
|
Union,
|
|
14
15
|
cast,
|
|
15
16
|
)
|
|
16
17
|
from uuid import UUID, uuid4
|
|
17
18
|
|
|
18
|
-
from typing_extensions import NotRequired,
|
|
19
|
+
from typing_extensions import NotRequired, override
|
|
19
20
|
|
|
20
|
-
from langchain_core.callbacks.base import AsyncCallbackHandler
|
|
21
|
+
from langchain_core.callbacks.base import AsyncCallbackHandler, BaseCallbackManager
|
|
21
22
|
from langchain_core.messages import AIMessageChunk, BaseMessage, BaseMessageChunk
|
|
22
23
|
from langchain_core.outputs import (
|
|
23
24
|
ChatGenerationChunk,
|
|
24
25
|
GenerationChunk,
|
|
25
26
|
LLMResult,
|
|
26
27
|
)
|
|
28
|
+
from langchain_core.runnables import ensure_config
|
|
27
29
|
from langchain_core.runnables.schema import (
|
|
28
30
|
CustomStreamEvent,
|
|
29
31
|
EventData,
|
|
@@ -36,6 +38,11 @@ from langchain_core.runnables.utils import (
|
|
|
36
38
|
_RootEventFilter,
|
|
37
39
|
)
|
|
38
40
|
from langchain_core.tracers._streaming import _StreamingCallbackHandler
|
|
41
|
+
from langchain_core.tracers.log_stream import (
|
|
42
|
+
LogStreamCallbackHandler,
|
|
43
|
+
RunLog,
|
|
44
|
+
_astream_log_implementation,
|
|
45
|
+
)
|
|
39
46
|
from langchain_core.tracers.memory_stream import _MemoryStream
|
|
40
47
|
from langchain_core.utils.aiter import aclosing, py_anext
|
|
41
48
|
|
|
@@ -53,22 +60,20 @@ class RunInfo(TypedDict):
|
|
|
53
60
|
"""Information about a run.
|
|
54
61
|
|
|
55
62
|
This is used to keep track of the metadata associated with a run.
|
|
56
|
-
|
|
57
|
-
Parameters:
|
|
58
|
-
name: The name of the run.
|
|
59
|
-
tags: The tags associated with the run.
|
|
60
|
-
metadata: The metadata associated with the run.
|
|
61
|
-
run_type: The type of the run.
|
|
62
|
-
inputs: The inputs to the run.
|
|
63
|
-
parent_run_id: The ID of the parent run.
|
|
64
63
|
"""
|
|
65
64
|
|
|
66
65
|
name: str
|
|
66
|
+
"""The name of the run."""
|
|
67
67
|
tags: list[str]
|
|
68
|
+
"""The tags associated with the run."""
|
|
68
69
|
metadata: dict[str, Any]
|
|
70
|
+
"""The metadata associated with the run."""
|
|
69
71
|
run_type: str
|
|
72
|
+
"""The type of the run."""
|
|
70
73
|
inputs: NotRequired[Any]
|
|
74
|
+
"""The inputs to the run."""
|
|
71
75
|
parent_run_id: Optional[UUID]
|
|
76
|
+
"""The ID of the parent run."""
|
|
72
77
|
|
|
73
78
|
|
|
74
79
|
def _assign_name(name: Optional[str], serialized: Optional[dict[str, Any]]) -> str:
|
|
@@ -155,7 +160,11 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
155
160
|
self.send_stream.send_nowait(event)
|
|
156
161
|
|
|
157
162
|
def __aiter__(self) -> AsyncIterator[Any]:
|
|
158
|
-
"""Iterate over the receive stream.
|
|
163
|
+
"""Iterate over the receive stream.
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
An async iterator over the receive stream.
|
|
167
|
+
"""
|
|
159
168
|
return self.receive_stream.__aiter__()
|
|
160
169
|
|
|
161
170
|
async def tap_output_aiter(
|
|
@@ -412,7 +421,6 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
412
421
|
parent_run_id: Optional[UUID] = None,
|
|
413
422
|
**kwargs: Any,
|
|
414
423
|
) -> None:
|
|
415
|
-
"""Run on new LLM token. Only available when streaming is enabled."""
|
|
416
424
|
run_info = self.run_map.get(run_id)
|
|
417
425
|
chunk_: Union[GenerationChunk, BaseMessageChunk]
|
|
418
426
|
|
|
@@ -458,7 +466,15 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
458
466
|
async def on_llm_end(
|
|
459
467
|
self, response: LLMResult, *, run_id: UUID, **kwargs: Any
|
|
460
468
|
) -> None:
|
|
461
|
-
"""End a trace for an LLM run.
|
|
469
|
+
"""End a trace for an LLM run.
|
|
470
|
+
|
|
471
|
+
Args:
|
|
472
|
+
response (LLMResult): The response which was generated.
|
|
473
|
+
run_id (UUID): The run ID. This is the ID of the current run.
|
|
474
|
+
|
|
475
|
+
Raises:
|
|
476
|
+
ValueError: If the run type is not ``'llm'`` or ``'chat_model'``.
|
|
477
|
+
"""
|
|
462
478
|
run_info = self.run_map.pop(run_id)
|
|
463
479
|
inputs_ = run_info["inputs"]
|
|
464
480
|
|
|
@@ -636,7 +652,15 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
636
652
|
|
|
637
653
|
@override
|
|
638
654
|
async def on_tool_end(self, output: Any, *, run_id: UUID, **kwargs: Any) -> None:
|
|
639
|
-
"""End a trace for a tool run.
|
|
655
|
+
"""End a trace for a tool run.
|
|
656
|
+
|
|
657
|
+
Args:
|
|
658
|
+
output: The output of the tool.
|
|
659
|
+
run_id: The run ID. This is the ID of the current run.
|
|
660
|
+
|
|
661
|
+
Raises:
|
|
662
|
+
AssertionError: If the run ID is a tool call and does not have inputs
|
|
663
|
+
"""
|
|
640
664
|
run_info = self.run_map.pop(run_id)
|
|
641
665
|
if "inputs" not in run_info:
|
|
642
666
|
msg = (
|
|
@@ -730,11 +754,11 @@ class _AstreamEventsCallbackHandler(AsyncCallbackHandler, _StreamingCallbackHand
|
|
|
730
754
|
)
|
|
731
755
|
|
|
732
756
|
def __deepcopy__(self, memo: dict) -> _AstreamEventsCallbackHandler:
|
|
733
|
-
"""
|
|
757
|
+
"""Return self."""
|
|
734
758
|
return self
|
|
735
759
|
|
|
736
760
|
def __copy__(self) -> _AstreamEventsCallbackHandler:
|
|
737
|
-
"""
|
|
761
|
+
"""Return self."""
|
|
738
762
|
return self
|
|
739
763
|
|
|
740
764
|
|
|
@@ -751,14 +775,6 @@ async def _astream_events_implementation_v1(
|
|
|
751
775
|
exclude_tags: Optional[Sequence[str]] = None,
|
|
752
776
|
**kwargs: Any,
|
|
753
777
|
) -> AsyncIterator[StandardStreamEvent]:
|
|
754
|
-
from langchain_core.runnables import ensure_config
|
|
755
|
-
from langchain_core.runnables.utils import _RootEventFilter
|
|
756
|
-
from langchain_core.tracers.log_stream import (
|
|
757
|
-
LogStreamCallbackHandler,
|
|
758
|
-
RunLog,
|
|
759
|
-
_astream_log_implementation,
|
|
760
|
-
)
|
|
761
|
-
|
|
762
778
|
stream = LogStreamCallbackHandler(
|
|
763
779
|
auto_close=False,
|
|
764
780
|
include_names=include_names,
|
|
@@ -936,9 +952,6 @@ async def _astream_events_implementation_v2(
|
|
|
936
952
|
**kwargs: Any,
|
|
937
953
|
) -> AsyncIterator[StandardStreamEvent]:
|
|
938
954
|
"""Implementation of the astream events API for V2 runnables."""
|
|
939
|
-
from langchain_core.callbacks.base import BaseCallbackManager
|
|
940
|
-
from langchain_core.runnables import ensure_config
|
|
941
|
-
|
|
942
955
|
event_streamer = _AstreamEventsCallbackHandler(
|
|
943
956
|
include_names=include_names,
|
|
944
957
|
include_types=include_types,
|
|
@@ -8,7 +8,7 @@ from datetime import datetime, timezone
|
|
|
8
8
|
from typing import TYPE_CHECKING, Any, Optional, Union
|
|
9
9
|
from uuid import UUID
|
|
10
10
|
|
|
11
|
-
from langsmith import Client
|
|
11
|
+
from langsmith import Client, get_tracing_context
|
|
12
12
|
from langsmith import run_trees as rt
|
|
13
13
|
from langsmith import utils as ls_utils
|
|
14
14
|
from tenacity import (
|
|
@@ -53,7 +53,11 @@ def wait_for_all_tracers() -> None:
|
|
|
53
53
|
|
|
54
54
|
|
|
55
55
|
def get_client() -> Client:
|
|
56
|
-
"""Get the client.
|
|
56
|
+
"""Get the client.
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
The LangSmith client.
|
|
60
|
+
"""
|
|
57
61
|
return rt.get_cached_client()
|
|
58
62
|
|
|
59
63
|
|
|
@@ -109,6 +113,8 @@ class LangChainTracer(BaseTracer):
|
|
|
109
113
|
super()._start_trace(run)
|
|
110
114
|
if run.ls_client is None:
|
|
111
115
|
run.ls_client = self.client
|
|
116
|
+
if get_tracing_context().get("enabled") is False:
|
|
117
|
+
run.extra["__disabled"] = True
|
|
112
118
|
|
|
113
119
|
def on_chat_model_start(
|
|
114
120
|
self,
|
|
@@ -201,6 +207,8 @@ class LangChainTracer(BaseTracer):
|
|
|
201
207
|
|
|
202
208
|
def _persist_run_single(self, run: Run) -> None:
|
|
203
209
|
"""Persist a run."""
|
|
210
|
+
if run.extra.get("__disabled"):
|
|
211
|
+
return
|
|
204
212
|
try:
|
|
205
213
|
run.extra["runtime"] = get_runtime_environment()
|
|
206
214
|
run.tags = self._get_tags(run)
|
|
@@ -214,6 +222,8 @@ class LangChainTracer(BaseTracer):
|
|
|
214
222
|
|
|
215
223
|
def _update_run_single(self, run: Run) -> None:
|
|
216
224
|
"""Update a run."""
|
|
225
|
+
if run.extra.get("__disabled"):
|
|
226
|
+
return
|
|
217
227
|
try:
|
|
218
228
|
run.patch(exclude_inputs=run.extra.get("inputs_is_truthy", False))
|
|
219
229
|
except Exception as e:
|
|
@@ -235,7 +245,6 @@ class LangChainTracer(BaseTracer):
|
|
|
235
245
|
chunk: Optional[Union[GenerationChunk, ChatGenerationChunk]] = None,
|
|
236
246
|
parent_run_id: Optional[UUID] = None,
|
|
237
247
|
) -> Run:
|
|
238
|
-
"""Append token event to LLM run and return the run."""
|
|
239
248
|
run_id_str = str(run_id)
|
|
240
249
|
if run_id_str not in self.run_has_token_event_map:
|
|
241
250
|
self.run_has_token_event_map[run_id_str] = True
|
|
@@ -7,7 +7,11 @@ from typing import Any
|
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def get_headers(*args: Any, **kwargs: Any) -> Any: # noqa: ARG001
|
|
10
|
-
"""Throw an error because this has been replaced by get_headers.
|
|
10
|
+
"""Throw an error because this has been replaced by get_headers.
|
|
11
|
+
|
|
12
|
+
Raises:
|
|
13
|
+
RuntimeError: Always, because this function is deprecated.
|
|
14
|
+
"""
|
|
11
15
|
msg = (
|
|
12
16
|
"get_headers for LangChainTracerV1 is no longer supported. "
|
|
13
17
|
"Please use LangChainTracer instead."
|
|
@@ -16,7 +20,11 @@ def get_headers(*args: Any, **kwargs: Any) -> Any: # noqa: ARG001
|
|
|
16
20
|
|
|
17
21
|
|
|
18
22
|
def LangChainTracerV1(*args: Any, **kwargs: Any) -> Any: # noqa: N802,ARG001
|
|
19
|
-
"""Throw an error because this has been replaced by LangChainTracer
|
|
23
|
+
"""Throw an error because this has been replaced by ``LangChainTracer``.
|
|
24
|
+
|
|
25
|
+
Raises:
|
|
26
|
+
RuntimeError: Always, because this class is deprecated.
|
|
27
|
+
"""
|
|
20
28
|
msg = (
|
|
21
29
|
"LangChainTracerV1 is no longer supported. Please use LangChainTracer instead."
|
|
22
30
|
)
|
|
@@ -7,6 +7,7 @@ import contextlib
|
|
|
7
7
|
import copy
|
|
8
8
|
import threading
|
|
9
9
|
from collections import defaultdict
|
|
10
|
+
from pprint import pformat
|
|
10
11
|
from typing import (
|
|
11
12
|
TYPE_CHECKING,
|
|
12
13
|
Any,
|
|
@@ -20,10 +21,11 @@ from typing import (
|
|
|
20
21
|
import jsonpatch # type: ignore[import-untyped]
|
|
21
22
|
from typing_extensions import NotRequired, TypedDict, override
|
|
22
23
|
|
|
24
|
+
from langchain_core.callbacks.base import BaseCallbackManager
|
|
23
25
|
from langchain_core.load import dumps
|
|
24
26
|
from langchain_core.load.load import load
|
|
25
27
|
from langchain_core.outputs import ChatGenerationChunk, GenerationChunk
|
|
26
|
-
from langchain_core.runnables import
|
|
28
|
+
from langchain_core.runnables import RunnableConfig, ensure_config
|
|
27
29
|
from langchain_core.tracers._streaming import _StreamingCallbackHandler
|
|
28
30
|
from langchain_core.tracers.base import BaseTracer
|
|
29
31
|
from langchain_core.tracers.memory_stream import _MemoryStream
|
|
@@ -32,6 +34,7 @@ if TYPE_CHECKING:
|
|
|
32
34
|
from collections.abc import AsyncIterator, Iterator, Sequence
|
|
33
35
|
from uuid import UUID
|
|
34
36
|
|
|
37
|
+
from langchain_core.runnables import Runnable
|
|
35
38
|
from langchain_core.runnables.utils import Input, Output
|
|
36
39
|
from langchain_core.tracers.schemas import Run
|
|
37
40
|
|
|
@@ -110,7 +113,17 @@ class RunLogPatch:
|
|
|
110
113
|
self.ops = list(ops)
|
|
111
114
|
|
|
112
115
|
def __add__(self, other: Union[RunLogPatch, Any]) -> RunLog:
|
|
113
|
-
"""Combine two RunLogPatch instances.
|
|
116
|
+
"""Combine two ``RunLogPatch`` instances.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
other: The other ``RunLogPatch`` to combine with.
|
|
120
|
+
|
|
121
|
+
Raises:
|
|
122
|
+
TypeError: If the other object is not a ``RunLogPatch``.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
A new ``RunLog`` representing the combination of the two.
|
|
126
|
+
"""
|
|
114
127
|
if type(other) is RunLogPatch:
|
|
115
128
|
ops = self.ops + other.ops
|
|
116
129
|
state = jsonpatch.apply_patch(None, copy.deepcopy(ops))
|
|
@@ -121,8 +134,6 @@ class RunLogPatch:
|
|
|
121
134
|
|
|
122
135
|
@override
|
|
123
136
|
def __repr__(self) -> str:
|
|
124
|
-
from pprint import pformat
|
|
125
|
-
|
|
126
137
|
# 1:-1 to get rid of the [] around the list
|
|
127
138
|
return f"RunLogPatch({pformat(self.ops)[1:-1]})"
|
|
128
139
|
|
|
@@ -150,7 +161,17 @@ class RunLog(RunLogPatch):
|
|
|
150
161
|
self.state = state
|
|
151
162
|
|
|
152
163
|
def __add__(self, other: Union[RunLogPatch, Any]) -> RunLog:
|
|
153
|
-
"""Combine two
|
|
164
|
+
"""Combine two ``RunLog``s.
|
|
165
|
+
|
|
166
|
+
Args:
|
|
167
|
+
other: The other ``RunLog`` or ``RunLogPatch`` to combine with.
|
|
168
|
+
|
|
169
|
+
Raises:
|
|
170
|
+
TypeError: If the other object is not a ``RunLog`` or ``RunLogPatch``.
|
|
171
|
+
|
|
172
|
+
Returns:
|
|
173
|
+
A new ``RunLog`` representing the combination of the two.
|
|
174
|
+
"""
|
|
154
175
|
if type(other) is RunLogPatch:
|
|
155
176
|
ops = self.ops + other.ops
|
|
156
177
|
state = jsonpatch.apply_patch(self.state, other.ops)
|
|
@@ -161,13 +182,18 @@ class RunLog(RunLogPatch):
|
|
|
161
182
|
|
|
162
183
|
@override
|
|
163
184
|
def __repr__(self) -> str:
|
|
164
|
-
from pprint import pformat
|
|
165
|
-
|
|
166
185
|
return f"RunLog({pformat(self.state)})"
|
|
167
186
|
|
|
168
187
|
@override
|
|
169
188
|
def __eq__(self, other: object) -> bool:
|
|
170
|
-
"""Check if two
|
|
189
|
+
"""Check if two ``RunLog``s are equal.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
other: The other ``RunLog`` to compare to.
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
True if the ``RunLog``s are equal, False otherwise.
|
|
196
|
+
"""
|
|
171
197
|
# First compare that the state is the same
|
|
172
198
|
if not isinstance(other, RunLog):
|
|
173
199
|
return False
|
|
@@ -250,7 +276,11 @@ class LogStreamCallbackHandler(BaseTracer, _StreamingCallbackHandler):
|
|
|
250
276
|
self.root_id: Optional[UUID] = None
|
|
251
277
|
|
|
252
278
|
def __aiter__(self) -> AsyncIterator[RunLogPatch]:
|
|
253
|
-
"""Iterate over the stream of run logs.
|
|
279
|
+
"""Iterate over the stream of run logs.
|
|
280
|
+
|
|
281
|
+
Returns:
|
|
282
|
+
An async iterator over the run log patches.
|
|
283
|
+
"""
|
|
254
284
|
return self.receive_stream.__aiter__()
|
|
255
285
|
|
|
256
286
|
def send(self, *ops: dict[str, Any]) -> bool:
|
|
@@ -623,15 +653,24 @@ async def _astream_log_implementation(
|
|
|
623
653
|
|
|
624
654
|
The implementation has been factored out (at least temporarily) as both
|
|
625
655
|
astream_log and astream_events relies on it.
|
|
626
|
-
"""
|
|
627
|
-
import jsonpatch
|
|
628
|
-
|
|
629
|
-
from langchain_core.callbacks.base import BaseCallbackManager
|
|
630
|
-
from langchain_core.tracers.log_stream import (
|
|
631
|
-
RunLog,
|
|
632
|
-
RunLogPatch,
|
|
633
|
-
)
|
|
634
656
|
|
|
657
|
+
Args:
|
|
658
|
+
runnable: The runnable to run in streaming mode.
|
|
659
|
+
value: The input to the runnable.
|
|
660
|
+
config: The config to pass to the runnable.
|
|
661
|
+
stream: The stream to send the run logs to.
|
|
662
|
+
diff: Whether to yield run log patches (True) or full run logs (False).
|
|
663
|
+
with_streamed_output_list: Whether to include a list of all streamed
|
|
664
|
+
outputs in each patch. If False, only the final output will be included
|
|
665
|
+
in the patches.
|
|
666
|
+
**kwargs: Additional keyword arguments to pass to the runnable.
|
|
667
|
+
|
|
668
|
+
Raises:
|
|
669
|
+
ValueError: If the callbacks in the config are of an unexpected type.
|
|
670
|
+
|
|
671
|
+
Yields:
|
|
672
|
+
The run log patches or states, depending on the value of ``diff``.
|
|
673
|
+
"""
|
|
635
674
|
# Assign the stream handler to the config
|
|
636
675
|
config = ensure_config(config)
|
|
637
676
|
callbacks = config.get("callbacks")
|
|
@@ -21,18 +21,10 @@ AsyncListener = Union[
|
|
|
21
21
|
|
|
22
22
|
|
|
23
23
|
class RootListenersTracer(BaseTracer):
|
|
24
|
-
"""Tracer that calls listeners on run start, end, and error.
|
|
25
|
-
|
|
26
|
-
Parameters:
|
|
27
|
-
log_missing_parent: Whether to log a warning if the parent is missing.
|
|
28
|
-
Default is False.
|
|
29
|
-
config: The runnable config.
|
|
30
|
-
on_start: The listener to call on run start.
|
|
31
|
-
on_end: The listener to call on run end.
|
|
32
|
-
on_error: The listener to call on run error.
|
|
33
|
-
"""
|
|
24
|
+
"""Tracer that calls listeners on run start, end, and error."""
|
|
34
25
|
|
|
35
26
|
log_missing_parent = False
|
|
27
|
+
"""Whether to log a warning if the parent is missing. Default is False."""
|
|
36
28
|
|
|
37
29
|
def __init__(
|
|
38
30
|
self,
|
|
@@ -84,18 +76,10 @@ class RootListenersTracer(BaseTracer):
|
|
|
84
76
|
|
|
85
77
|
|
|
86
78
|
class AsyncRootListenersTracer(AsyncBaseTracer):
|
|
87
|
-
"""Async Tracer that calls listeners on run start, end, and error.
|
|
88
|
-
|
|
89
|
-
Parameters:
|
|
90
|
-
log_missing_parent: Whether to log a warning if the parent is missing.
|
|
91
|
-
Default is False.
|
|
92
|
-
config: The runnable config.
|
|
93
|
-
on_start: The listener to call on run start.
|
|
94
|
-
on_end: The listener to call on run end.
|
|
95
|
-
on_error: The listener to call on run error.
|
|
96
|
-
"""
|
|
79
|
+
"""Async Tracer that calls listeners on run start, end, and error."""
|
|
97
80
|
|
|
98
81
|
log_missing_parent = False
|
|
82
|
+
"""Whether to log a warning if the parent is missing. Default is False."""
|
|
99
83
|
|
|
100
84
|
def __init__(
|
|
101
85
|
self,
|
|
@@ -11,12 +11,6 @@ class RunCollectorCallbackHandler(BaseTracer):
|
|
|
11
11
|
"""Tracer that collects all nested runs in a list.
|
|
12
12
|
|
|
13
13
|
This tracer is useful for inspection and evaluation purposes.
|
|
14
|
-
|
|
15
|
-
Parameters
|
|
16
|
-
----------
|
|
17
|
-
name : str, default="run-collector_callback_handler"
|
|
18
|
-
example_id : Optional[Union[UUID, str]], default=None
|
|
19
|
-
The ID of the example being traced. It can be either a UUID or a string.
|
|
20
14
|
"""
|
|
21
15
|
|
|
22
16
|
name: str = "run-collector_callback_handler"
|
|
@@ -26,12 +20,10 @@ class RunCollectorCallbackHandler(BaseTracer):
|
|
|
26
20
|
) -> None:
|
|
27
21
|
"""Initialize the RunCollectorCallbackHandler.
|
|
28
22
|
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
**kwargs : Any
|
|
34
|
-
Additional keyword arguments
|
|
23
|
+
Args:
|
|
24
|
+
example_id: The ID of the example being traced. (default: None).
|
|
25
|
+
It can be either a UUID or a string.
|
|
26
|
+
**kwargs: Additional keyword arguments.
|
|
35
27
|
"""
|
|
36
28
|
super().__init__(**kwargs)
|
|
37
29
|
self.example_id = (
|
|
@@ -42,10 +34,8 @@ class RunCollectorCallbackHandler(BaseTracer):
|
|
|
42
34
|
def _persist_run(self, run: Run) -> None:
|
|
43
35
|
"""Persist a run by adding it to the traced_runs list.
|
|
44
36
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
run : Run
|
|
48
|
-
The run to be persisted.
|
|
37
|
+
Args:
|
|
38
|
+
run: The run to be persisted.
|
|
49
39
|
"""
|
|
50
40
|
run_ = run.copy()
|
|
51
41
|
run_.reference_example_id = self.example_id
|
|
@@ -18,7 +18,11 @@ from langchain_core._api import deprecated
|
|
|
18
18
|
|
|
19
19
|
@deprecated("0.1.0", alternative="Use string instead.", removal="1.0")
|
|
20
20
|
def RunTypeEnum() -> type[RunTypeEnumDep]: # noqa: N802
|
|
21
|
-
"""RunTypeEnum
|
|
21
|
+
"""``RunTypeEnum``.
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
The ``RunTypeEnum`` class.
|
|
25
|
+
"""
|
|
22
26
|
warnings.warn(
|
|
23
27
|
"RunTypeEnum is deprecated. Please directly use a string instead"
|
|
24
28
|
" (e.g. 'llm', 'chain', 'tool').",
|
langchain_core/utils/aiter.py
CHANGED
|
@@ -37,7 +37,7 @@ _no_default = object()
|
|
|
37
37
|
# before 3.10, the builtin anext() was not available
|
|
38
38
|
def py_anext(
|
|
39
39
|
iterator: AsyncIterator[T], default: Union[T, Any] = _no_default
|
|
40
|
-
) -> Awaitable[Union[T,
|
|
40
|
+
) -> Awaitable[Union[T, Any, None]]:
|
|
41
41
|
"""Pure-Python implementation of anext() for testing purposes.
|
|
42
42
|
|
|
43
43
|
Closely matches the builtin anext() C implementation.
|
|
@@ -94,7 +94,7 @@ class NoLock:
|
|
|
94
94
|
exc_val: Optional[BaseException],
|
|
95
95
|
exc_tb: Optional[TracebackType],
|
|
96
96
|
) -> bool:
|
|
97
|
-
"""
|
|
97
|
+
"""Return False, exception not suppressed."""
|
|
98
98
|
return False
|
|
99
99
|
|
|
100
100
|
|
|
@@ -236,7 +236,11 @@ class Tee(Generic[T]):
|
|
|
236
236
|
return self._children[item]
|
|
237
237
|
|
|
238
238
|
def __iter__(self) -> Iterator[AsyncIterator[T]]:
|
|
239
|
-
"""Iterate over the child iterators.
|
|
239
|
+
"""Iterate over the child iterators.
|
|
240
|
+
|
|
241
|
+
Yields:
|
|
242
|
+
The child iterators.
|
|
243
|
+
"""
|
|
240
244
|
yield from self._children
|
|
241
245
|
|
|
242
246
|
async def __aenter__(self) -> "Tee[T]":
|
|
@@ -249,7 +253,11 @@ class Tee(Generic[T]):
|
|
|
249
253
|
exc_val: Optional[BaseException],
|
|
250
254
|
exc_tb: Optional[TracebackType],
|
|
251
255
|
) -> bool:
|
|
252
|
-
"""Close all child iterators.
|
|
256
|
+
"""Close all child iterators.
|
|
257
|
+
|
|
258
|
+
Returns:
|
|
259
|
+
False, exceptions not suppressed.
|
|
260
|
+
"""
|
|
253
261
|
await self.aclose()
|
|
254
262
|
return False
|
|
255
263
|
|
|
@@ -318,8 +326,8 @@ async def abatch_iterate(
|
|
|
318
326
|
size: The size of the batch.
|
|
319
327
|
iterable: The async iterable to batch.
|
|
320
328
|
|
|
321
|
-
|
|
322
|
-
|
|
329
|
+
Yields:
|
|
330
|
+
The batches.
|
|
323
331
|
"""
|
|
324
332
|
batch: list[T] = []
|
|
325
333
|
async for element in iterable:
|
langchain_core/utils/env.py
CHANGED
|
@@ -39,6 +39,9 @@ def get_from_dict_or_env(
|
|
|
39
39
|
in the dictionary.
|
|
40
40
|
default: The default value to return if the key is not in the dictionary
|
|
41
41
|
or the environment. Defaults to None.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
The dict value or the environment variable value.
|
|
42
45
|
"""
|
|
43
46
|
if isinstance(key, (list, tuple)):
|
|
44
47
|
for k in key:
|