mseep-agentops 0.4.18__py3-none-any.whl → 0.4.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentops/__init__.py +0 -0
- agentops/client/api/base.py +28 -30
- agentops/client/api/versions/v3.py +29 -25
- agentops/client/api/versions/v4.py +87 -46
- agentops/client/client.py +98 -29
- agentops/client/http/README.md +87 -0
- agentops/client/http/http_client.py +126 -172
- agentops/config.py +8 -2
- agentops/instrumentation/OpenTelemetry.md +133 -0
- agentops/instrumentation/README.md +167 -0
- agentops/instrumentation/__init__.py +13 -1
- agentops/instrumentation/agentic/ag2/__init__.py +18 -0
- agentops/instrumentation/agentic/ag2/instrumentor.py +922 -0
- agentops/instrumentation/agentic/agno/__init__.py +19 -0
- agentops/instrumentation/agentic/agno/attributes/__init__.py +20 -0
- agentops/instrumentation/agentic/agno/attributes/agent.py +250 -0
- agentops/instrumentation/agentic/agno/attributes/metrics.py +214 -0
- agentops/instrumentation/agentic/agno/attributes/storage.py +158 -0
- agentops/instrumentation/agentic/agno/attributes/team.py +195 -0
- agentops/instrumentation/agentic/agno/attributes/tool.py +210 -0
- agentops/instrumentation/agentic/agno/attributes/workflow.py +254 -0
- agentops/instrumentation/agentic/agno/instrumentor.py +1313 -0
- agentops/instrumentation/agentic/crewai/LICENSE +201 -0
- agentops/instrumentation/agentic/crewai/NOTICE.md +10 -0
- agentops/instrumentation/agentic/crewai/__init__.py +6 -0
- agentops/instrumentation/agentic/crewai/crewai_span_attributes.py +335 -0
- agentops/instrumentation/agentic/crewai/instrumentation.py +535 -0
- agentops/instrumentation/agentic/crewai/version.py +1 -0
- agentops/instrumentation/agentic/google_adk/__init__.py +19 -0
- agentops/instrumentation/agentic/google_adk/instrumentor.py +68 -0
- agentops/instrumentation/agentic/google_adk/patch.py +767 -0
- agentops/instrumentation/agentic/haystack/__init__.py +1 -0
- agentops/instrumentation/agentic/haystack/instrumentor.py +186 -0
- agentops/instrumentation/agentic/langgraph/__init__.py +3 -0
- agentops/instrumentation/agentic/langgraph/attributes.py +54 -0
- agentops/instrumentation/agentic/langgraph/instrumentation.py +598 -0
- agentops/instrumentation/agentic/langgraph/version.py +1 -0
- agentops/instrumentation/agentic/openai_agents/README.md +156 -0
- agentops/instrumentation/agentic/openai_agents/SPANS.md +145 -0
- agentops/instrumentation/agentic/openai_agents/TRACING_API.md +144 -0
- agentops/instrumentation/agentic/openai_agents/__init__.py +30 -0
- agentops/instrumentation/agentic/openai_agents/attributes/common.py +549 -0
- agentops/instrumentation/agentic/openai_agents/attributes/completion.py +172 -0
- agentops/instrumentation/agentic/openai_agents/attributes/model.py +58 -0
- agentops/instrumentation/agentic/openai_agents/attributes/tokens.py +275 -0
- agentops/instrumentation/agentic/openai_agents/exporter.py +469 -0
- agentops/instrumentation/agentic/openai_agents/instrumentor.py +107 -0
- agentops/instrumentation/agentic/openai_agents/processor.py +58 -0
- agentops/instrumentation/agentic/smolagents/README.md +88 -0
- agentops/instrumentation/agentic/smolagents/__init__.py +12 -0
- agentops/instrumentation/agentic/smolagents/attributes/agent.py +354 -0
- agentops/instrumentation/agentic/smolagents/attributes/model.py +205 -0
- agentops/instrumentation/agentic/smolagents/instrumentor.py +286 -0
- agentops/instrumentation/agentic/smolagents/stream_wrapper.py +258 -0
- agentops/instrumentation/agentic/xpander/__init__.py +15 -0
- agentops/instrumentation/agentic/xpander/context.py +112 -0
- agentops/instrumentation/agentic/xpander/instrumentor.py +877 -0
- agentops/instrumentation/agentic/xpander/trace_probe.py +86 -0
- agentops/instrumentation/agentic/xpander/version.py +3 -0
- agentops/instrumentation/common/README.md +65 -0
- agentops/instrumentation/common/attributes.py +1 -2
- agentops/instrumentation/providers/anthropic/__init__.py +24 -0
- agentops/instrumentation/providers/anthropic/attributes/__init__.py +23 -0
- agentops/instrumentation/providers/anthropic/attributes/common.py +64 -0
- agentops/instrumentation/providers/anthropic/attributes/message.py +541 -0
- agentops/instrumentation/providers/anthropic/attributes/tools.py +231 -0
- agentops/instrumentation/providers/anthropic/event_handler_wrapper.py +90 -0
- agentops/instrumentation/providers/anthropic/instrumentor.py +146 -0
- agentops/instrumentation/providers/anthropic/stream_wrapper.py +436 -0
- agentops/instrumentation/providers/google_genai/README.md +33 -0
- agentops/instrumentation/providers/google_genai/__init__.py +24 -0
- agentops/instrumentation/providers/google_genai/attributes/__init__.py +25 -0
- agentops/instrumentation/providers/google_genai/attributes/chat.py +125 -0
- agentops/instrumentation/providers/google_genai/attributes/common.py +88 -0
- agentops/instrumentation/providers/google_genai/attributes/model.py +284 -0
- agentops/instrumentation/providers/google_genai/instrumentor.py +170 -0
- agentops/instrumentation/providers/google_genai/stream_wrapper.py +238 -0
- agentops/instrumentation/providers/ibm_watsonx_ai/__init__.py +28 -0
- agentops/instrumentation/providers/ibm_watsonx_ai/attributes/__init__.py +27 -0
- agentops/instrumentation/providers/ibm_watsonx_ai/attributes/attributes.py +277 -0
- agentops/instrumentation/providers/ibm_watsonx_ai/attributes/common.py +104 -0
- agentops/instrumentation/providers/ibm_watsonx_ai/instrumentor.py +162 -0
- agentops/instrumentation/providers/ibm_watsonx_ai/stream_wrapper.py +302 -0
- agentops/instrumentation/providers/mem0/__init__.py +45 -0
- agentops/instrumentation/providers/mem0/common.py +377 -0
- agentops/instrumentation/providers/mem0/instrumentor.py +270 -0
- agentops/instrumentation/providers/mem0/memory.py +430 -0
- agentops/instrumentation/providers/openai/__init__.py +21 -0
- agentops/instrumentation/providers/openai/attributes/__init__.py +7 -0
- agentops/instrumentation/providers/openai/attributes/common.py +55 -0
- agentops/instrumentation/providers/openai/attributes/response.py +607 -0
- agentops/instrumentation/providers/openai/config.py +36 -0
- agentops/instrumentation/providers/openai/instrumentor.py +312 -0
- agentops/instrumentation/providers/openai/stream_wrapper.py +941 -0
- agentops/instrumentation/providers/openai/utils.py +44 -0
- agentops/instrumentation/providers/openai/v0.py +176 -0
- agentops/instrumentation/providers/openai/v0_wrappers.py +483 -0
- agentops/instrumentation/providers/openai/wrappers/__init__.py +30 -0
- agentops/instrumentation/providers/openai/wrappers/assistant.py +277 -0
- agentops/instrumentation/providers/openai/wrappers/chat.py +259 -0
- agentops/instrumentation/providers/openai/wrappers/completion.py +109 -0
- agentops/instrumentation/providers/openai/wrappers/embeddings.py +94 -0
- agentops/instrumentation/providers/openai/wrappers/image_gen.py +75 -0
- agentops/instrumentation/providers/openai/wrappers/responses.py +191 -0
- agentops/instrumentation/providers/openai/wrappers/shared.py +81 -0
- agentops/instrumentation/utilities/concurrent_futures/__init__.py +10 -0
- agentops/instrumentation/utilities/concurrent_futures/instrumentation.py +206 -0
- agentops/integration/callbacks/dspy/__init__.py +11 -0
- agentops/integration/callbacks/dspy/callback.py +471 -0
- agentops/integration/callbacks/langchain/README.md +59 -0
- agentops/integration/callbacks/langchain/__init__.py +15 -0
- agentops/integration/callbacks/langchain/callback.py +791 -0
- agentops/integration/callbacks/langchain/utils.py +54 -0
- agentops/legacy/crewai.md +121 -0
- agentops/logging/instrument_logging.py +4 -0
- agentops/sdk/README.md +220 -0
- agentops/sdk/core.py +75 -32
- agentops/sdk/descriptors/classproperty.py +28 -0
- agentops/sdk/exporters.py +152 -33
- agentops/semconv/README.md +125 -0
- agentops/semconv/span_kinds.py +0 -2
- agentops/validation.py +102 -63
- {mseep_agentops-0.4.18.dist-info → mseep_agentops-0.4.22.dist-info}/METADATA +30 -40
- mseep_agentops-0.4.22.dist-info/RECORD +178 -0
- {mseep_agentops-0.4.18.dist-info → mseep_agentops-0.4.22.dist-info}/WHEEL +1 -2
- mseep_agentops-0.4.18.dist-info/RECORD +0 -94
- mseep_agentops-0.4.18.dist-info/top_level.txt +0 -2
- tests/conftest.py +0 -10
- tests/unit/client/__init__.py +0 -1
- tests/unit/client/test_http_adapter.py +0 -221
- tests/unit/client/test_http_client.py +0 -206
- tests/unit/conftest.py +0 -54
- tests/unit/sdk/__init__.py +0 -1
- tests/unit/sdk/instrumentation_tester.py +0 -207
- tests/unit/sdk/test_attributes.py +0 -392
- tests/unit/sdk/test_concurrent_instrumentation.py +0 -468
- tests/unit/sdk/test_decorators.py +0 -763
- tests/unit/sdk/test_exporters.py +0 -241
- tests/unit/sdk/test_factory.py +0 -1188
- tests/unit/sdk/test_internal_span_processor.py +0 -397
- tests/unit/sdk/test_resource_attributes.py +0 -35
- tests/unit/test_config.py +0 -82
- tests/unit/test_context_manager.py +0 -777
- tests/unit/test_events.py +0 -27
- tests/unit/test_host_env.py +0 -54
- tests/unit/test_init_py.py +0 -501
- tests/unit/test_serialization.py +0 -433
- tests/unit/test_session.py +0 -676
- tests/unit/test_user_agent.py +0 -34
- tests/unit/test_validation.py +0 -405
- {tests → agentops/instrumentation/agentic/openai_agents/attributes}/__init__.py +0 -0
- /tests/unit/__init__.py → /agentops/instrumentation/providers/openai/attributes/tools.py +0 -0
- {mseep_agentops-0.4.18.dist-info → mseep_agentops-0.4.22.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,791 @@
|
|
1
|
+
"""
|
2
|
+
LangChain callback handler for AgentOps.
|
3
|
+
|
4
|
+
This module provides the LangChain callback handler for AgentOps tracing and monitoring.
|
5
|
+
"""
|
6
|
+
|
7
|
+
from typing import Any, Dict, List, Optional, Union
|
8
|
+
|
9
|
+
from opentelemetry import trace
|
10
|
+
from opentelemetry.context import attach, detach
|
11
|
+
from opentelemetry.trace import SpanContext, set_span_in_context
|
12
|
+
|
13
|
+
from agentops.helpers.serialization import safe_serialize
|
14
|
+
from agentops.logging import logger
|
15
|
+
from agentops.sdk.core import tracer
|
16
|
+
from agentops.semconv import SpanKind, SpanAttributes, LangChainAttributes, LangChainAttributeValues, CoreAttributes
|
17
|
+
from agentops.integration.callbacks.langchain.utils import get_model_info
|
18
|
+
|
19
|
+
from langchain_core.callbacks.base import BaseCallbackHandler, AsyncCallbackHandler
|
20
|
+
from langchain_core.outputs import LLMResult
|
21
|
+
from langchain_core.agents import AgentAction, AgentFinish
|
22
|
+
|
23
|
+
|
24
|
+
class LangchainCallbackHandler(BaseCallbackHandler):
|
25
|
+
"""
|
26
|
+
AgentOps sync callback handler for Langchain.
|
27
|
+
|
28
|
+
This handler creates spans for LLM calls and other langchain operations,
|
29
|
+
maintaining proper parent-child relationships with session as root span.
|
30
|
+
|
31
|
+
Args:
|
32
|
+
api_key (str, optional): AgentOps API key
|
33
|
+
tags (List[str], optional): Tags to add to the session
|
34
|
+
auto_session (bool, optional): Whether to automatically create a session span
|
35
|
+
"""
|
36
|
+
|
37
|
+
def __init__(
|
38
|
+
self,
|
39
|
+
api_key: Optional[str] = None,
|
40
|
+
tags: Optional[List[str]] = None,
|
41
|
+
auto_session: bool = True,
|
42
|
+
):
|
43
|
+
"""Initialize the callback handler."""
|
44
|
+
self.active_spans = {}
|
45
|
+
self.api_key = api_key
|
46
|
+
self.tags = tags or []
|
47
|
+
self.session_span = None
|
48
|
+
self.session_token = None
|
49
|
+
self.context_tokens = {} # Store context tokens by run_id
|
50
|
+
self.token_counts = {} # Track token counts for streaming
|
51
|
+
|
52
|
+
# Initialize AgentOps
|
53
|
+
if auto_session:
|
54
|
+
self._initialize_agentops()
|
55
|
+
|
56
|
+
def _initialize_agentops(self):
|
57
|
+
"""Initialize AgentOps"""
|
58
|
+
import agentops
|
59
|
+
|
60
|
+
if not tracer.initialized:
|
61
|
+
init_kwargs = {
|
62
|
+
"auto_start_session": False,
|
63
|
+
"instrument_llm_calls": True,
|
64
|
+
}
|
65
|
+
|
66
|
+
if self.api_key:
|
67
|
+
init_kwargs["api_key"] = self.api_key
|
68
|
+
|
69
|
+
agentops.init(**init_kwargs)
|
70
|
+
logger.debug("AgentOps initialized from LangChain callback handler")
|
71
|
+
|
72
|
+
if not tracer.initialized:
|
73
|
+
logger.warning("AgentOps not initialized, session span will not be created")
|
74
|
+
return
|
75
|
+
|
76
|
+
otel_tracer = tracer.get_tracer()
|
77
|
+
|
78
|
+
span_name = f"session.{SpanKind.SESSION}"
|
79
|
+
|
80
|
+
attributes = {
|
81
|
+
SpanAttributes.AGENTOPS_SPAN_KIND: SpanKind.SESSION,
|
82
|
+
"session.tags": self.tags,
|
83
|
+
"agentops.operation.name": "session",
|
84
|
+
"span.kind": SpanKind.SESSION,
|
85
|
+
}
|
86
|
+
|
87
|
+
# Create a root session span
|
88
|
+
self.session_span = otel_tracer.start_span(span_name, attributes=attributes)
|
89
|
+
|
90
|
+
# Attach session span to the current context
|
91
|
+
self.session_token = attach(set_span_in_context(self.session_span))
|
92
|
+
|
93
|
+
logger.debug("Created session span as root span for LangChain")
|
94
|
+
|
95
|
+
def _create_span(
|
96
|
+
self,
|
97
|
+
operation_name: str,
|
98
|
+
span_kind: str,
|
99
|
+
run_id: Any = None,
|
100
|
+
attributes: Optional[Dict[str, Any]] = None,
|
101
|
+
parent_run_id: Optional[Any] = None,
|
102
|
+
):
|
103
|
+
"""
|
104
|
+
Create a span for the operation.
|
105
|
+
|
106
|
+
Args:
|
107
|
+
operation_name: Name of the operation
|
108
|
+
span_kind: Type of span
|
109
|
+
run_id: Unique identifier for the operation
|
110
|
+
attributes: Additional attributes for the span
|
111
|
+
parent_run_id: The run_id of the parent span if this is a child span
|
112
|
+
|
113
|
+
Returns:
|
114
|
+
The created span
|
115
|
+
"""
|
116
|
+
if not tracer.initialized:
|
117
|
+
logger.warning("AgentOps not initialized, spans will not be created")
|
118
|
+
return trace.NonRecordingSpan(SpanContext.INVALID)
|
119
|
+
|
120
|
+
otel_tracer = tracer.get_tracer()
|
121
|
+
|
122
|
+
span_name = f"{operation_name}.{span_kind}"
|
123
|
+
|
124
|
+
if attributes is None:
|
125
|
+
attributes = {}
|
126
|
+
|
127
|
+
attributes[SpanAttributes.AGENTOPS_SPAN_KIND] = span_kind
|
128
|
+
attributes["agentops.operation.name"] = operation_name
|
129
|
+
|
130
|
+
if run_id is None:
|
131
|
+
run_id = id(attributes)
|
132
|
+
|
133
|
+
parent_span = None
|
134
|
+
if parent_run_id is not None and parent_run_id in self.active_spans:
|
135
|
+
# Get parent span from active spans
|
136
|
+
parent_span = self.active_spans.get(parent_run_id)
|
137
|
+
# Create context with parent span
|
138
|
+
parent_ctx = set_span_in_context(parent_span)
|
139
|
+
# Start span with parent context
|
140
|
+
span = otel_tracer.start_span(span_name, context=parent_ctx, attributes=attributes)
|
141
|
+
logger.debug(f"Started span: {span_name} with parent: {parent_run_id}")
|
142
|
+
else:
|
143
|
+
# If no parent_run_id or parent not found, use session as parent
|
144
|
+
parent_ctx = set_span_in_context(self.session_span)
|
145
|
+
# Start span with session as parent context
|
146
|
+
span = otel_tracer.start_span(span_name, context=parent_ctx, attributes=attributes)
|
147
|
+
logger.debug(f"Started span: {span_name} with session as parent")
|
148
|
+
|
149
|
+
# Store span in active_spans
|
150
|
+
self.active_spans[run_id] = span
|
151
|
+
|
152
|
+
# Store token to detach later
|
153
|
+
token = attach(set_span_in_context(span))
|
154
|
+
self.context_tokens[run_id] = token
|
155
|
+
|
156
|
+
return span
|
157
|
+
|
158
|
+
def _end_span(self, run_id: Any):
|
159
|
+
"""
|
160
|
+
End the span associated with the run_id.
|
161
|
+
|
162
|
+
Args:
|
163
|
+
run_id: Unique identifier for the operation
|
164
|
+
"""
|
165
|
+
if run_id not in self.active_spans:
|
166
|
+
logger.warning(f"No span found for call {run_id}")
|
167
|
+
return
|
168
|
+
|
169
|
+
span = self.active_spans.pop(run_id)
|
170
|
+
token = self.context_tokens.pop(run_id, None)
|
171
|
+
|
172
|
+
if token is not None:
|
173
|
+
detach(token)
|
174
|
+
|
175
|
+
try:
|
176
|
+
span.end()
|
177
|
+
logger.debug(f"Ended span: {span.name}")
|
178
|
+
except Exception as e:
|
179
|
+
logger.warning(f"Error ending span: {e}")
|
180
|
+
|
181
|
+
# Clean up token counts if present
|
182
|
+
if run_id in self.token_counts:
|
183
|
+
del self.token_counts[run_id]
|
184
|
+
|
185
|
+
def on_llm_start(self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any) -> None:
|
186
|
+
"""Run when LLM starts running."""
|
187
|
+
try:
|
188
|
+
# Add null check for serialized
|
189
|
+
if serialized is None:
|
190
|
+
serialized = {}
|
191
|
+
|
192
|
+
model_info = get_model_info(serialized)
|
193
|
+
# Ensure default values if model_info returns unknown
|
194
|
+
model_name = model_info.get("model_name", "unknown")
|
195
|
+
|
196
|
+
attributes = {
|
197
|
+
# Use both standard and LangChain-specific attributes
|
198
|
+
SpanAttributes.LLM_REQUEST_MODEL: model_name,
|
199
|
+
LangChainAttributes.LLM_MODEL: model_name,
|
200
|
+
SpanAttributes.LLM_PROMPTS: safe_serialize(prompts),
|
201
|
+
LangChainAttributes.LLM_NAME: serialized.get("id", "unknown_llm"),
|
202
|
+
}
|
203
|
+
|
204
|
+
if "kwargs" in serialized:
|
205
|
+
for key, value in serialized["kwargs"].items():
|
206
|
+
if key == "temperature":
|
207
|
+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE] = value
|
208
|
+
elif key == "max_tokens":
|
209
|
+
attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] = value
|
210
|
+
elif key == "top_p":
|
211
|
+
attributes[SpanAttributes.LLM_REQUEST_TOP_P] = value
|
212
|
+
|
213
|
+
run_id = kwargs.get("run_id", id(serialized or {}))
|
214
|
+
parent_run_id = kwargs.get("parent_run_id", None)
|
215
|
+
|
216
|
+
# Initialize token count for streaming if needed
|
217
|
+
self.token_counts[run_id] = 0
|
218
|
+
|
219
|
+
# Log parent relationship for debugging
|
220
|
+
if parent_run_id:
|
221
|
+
logger.debug(f"LLM span with run_id {run_id} has parent {parent_run_id}")
|
222
|
+
|
223
|
+
self._create_span("llm", SpanKind.LLM, run_id, attributes, parent_run_id)
|
224
|
+
|
225
|
+
logger.debug(f"Started LLM span for {model_name}")
|
226
|
+
except Exception as e:
|
227
|
+
logger.warning(f"Error in on_llm_start: {e}")
|
228
|
+
|
229
|
+
def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
|
230
|
+
"""Run when LLM ends running."""
|
231
|
+
try:
|
232
|
+
run_id = kwargs.get("run_id", id(response))
|
233
|
+
|
234
|
+
if run_id not in self.active_spans:
|
235
|
+
logger.warning(f"No span found for LLM call {run_id}")
|
236
|
+
return
|
237
|
+
|
238
|
+
span = self.active_spans.get(run_id)
|
239
|
+
|
240
|
+
if hasattr(response, "generations") and response.generations:
|
241
|
+
completions = []
|
242
|
+
for gen_list in response.generations:
|
243
|
+
for gen in gen_list:
|
244
|
+
if hasattr(gen, "text"):
|
245
|
+
completions.append(gen.text)
|
246
|
+
|
247
|
+
if completions:
|
248
|
+
try:
|
249
|
+
span.set_attribute(SpanAttributes.LLM_COMPLETIONS, safe_serialize(completions))
|
250
|
+
except Exception as e:
|
251
|
+
logger.warning(f"Failed to set completions: {e}")
|
252
|
+
|
253
|
+
if hasattr(response, "llm_output") and response.llm_output:
|
254
|
+
token_usage = response.llm_output.get("token_usage", {})
|
255
|
+
|
256
|
+
if "completion_tokens" in token_usage:
|
257
|
+
try:
|
258
|
+
span.set_attribute(SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, token_usage["completion_tokens"])
|
259
|
+
except Exception as e:
|
260
|
+
logger.warning(f"Failed to set completion tokens: {e}")
|
261
|
+
|
262
|
+
if "prompt_tokens" in token_usage:
|
263
|
+
try:
|
264
|
+
span.set_attribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, token_usage["prompt_tokens"])
|
265
|
+
except Exception as e:
|
266
|
+
logger.warning(f"Failed to set prompt tokens: {e}")
|
267
|
+
|
268
|
+
if "total_tokens" in token_usage:
|
269
|
+
try:
|
270
|
+
span.set_attribute(SpanAttributes.LLM_USAGE_TOTAL_TOKENS, token_usage["total_tokens"])
|
271
|
+
except Exception as e:
|
272
|
+
logger.warning(f"Failed to set total tokens: {e}")
|
273
|
+
|
274
|
+
# For streaming, record the total tokens streamed
|
275
|
+
if run_id in self.token_counts and self.token_counts[run_id] > 0:
|
276
|
+
try:
|
277
|
+
span.set_attribute(SpanAttributes.LLM_USAGE_STREAMING_TOKENS, self.token_counts[run_id])
|
278
|
+
except Exception as e:
|
279
|
+
logger.warning(f"Failed to set streaming tokens: {e}")
|
280
|
+
|
281
|
+
# End the span after setting all attributes
|
282
|
+
self._end_span(run_id)
|
283
|
+
|
284
|
+
except Exception as e:
|
285
|
+
logger.warning(f"Error in on_llm_end: {e}")
|
286
|
+
|
287
|
+
def on_chain_start(self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any) -> None:
|
288
|
+
"""Run when chain starts running."""
|
289
|
+
try:
|
290
|
+
# Add null check for serialized
|
291
|
+
if serialized is None:
|
292
|
+
serialized = {}
|
293
|
+
|
294
|
+
chain_type = serialized.get("name", "unknown_chain")
|
295
|
+
|
296
|
+
attributes = {
|
297
|
+
LangChainAttributes.CHAIN_TYPE: chain_type,
|
298
|
+
LangChainAttributes.CHAIN_NAME: serialized.get("id", "unknown_chain"),
|
299
|
+
LangChainAttributes.CHAIN_VERBOSE: serialized.get("verbose", False),
|
300
|
+
"chain.inputs": safe_serialize(inputs),
|
301
|
+
}
|
302
|
+
|
303
|
+
# Add specific chain types
|
304
|
+
if "sequential" in chain_type.lower():
|
305
|
+
attributes[LangChainAttributes.CHAIN_KIND] = LangChainAttributeValues.CHAIN_KIND_SEQUENTIAL
|
306
|
+
elif "llm" in chain_type.lower():
|
307
|
+
attributes[LangChainAttributes.CHAIN_KIND] = LangChainAttributeValues.CHAIN_KIND_LLM
|
308
|
+
elif "router" in chain_type.lower():
|
309
|
+
attributes[LangChainAttributes.CHAIN_KIND] = LangChainAttributeValues.CHAIN_KIND_ROUTER
|
310
|
+
|
311
|
+
run_id = kwargs.get("run_id", id(serialized or {}))
|
312
|
+
parent_run_id = kwargs.get("parent_run_id", None)
|
313
|
+
|
314
|
+
# Log parent relationship for debugging
|
315
|
+
if parent_run_id:
|
316
|
+
logger.debug(f"Chain span with run_id {run_id} has parent {parent_run_id}")
|
317
|
+
|
318
|
+
self._create_span("chain", SpanKind.CHAIN, run_id, attributes, parent_run_id)
|
319
|
+
|
320
|
+
logger.debug(f"Started Chain span for {chain_type}")
|
321
|
+
except Exception as e:
|
322
|
+
logger.warning(f"Error in on_chain_start: {e}")
|
323
|
+
|
324
|
+
def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None:
|
325
|
+
"""Run when chain ends running."""
|
326
|
+
try:
|
327
|
+
run_id = kwargs.get("run_id", id(outputs))
|
328
|
+
|
329
|
+
if run_id not in self.active_spans:
|
330
|
+
logger.warning(f"No span found for chain call {run_id}")
|
331
|
+
return
|
332
|
+
|
333
|
+
span = self.active_spans.get(run_id)
|
334
|
+
|
335
|
+
try:
|
336
|
+
span.set_attribute("chain.outputs", safe_serialize(outputs))
|
337
|
+
except Exception as e:
|
338
|
+
logger.warning(f"Failed to set chain outputs: {e}")
|
339
|
+
|
340
|
+
# End the span after setting all attributes
|
341
|
+
self._end_span(run_id)
|
342
|
+
|
343
|
+
except Exception as e:
|
344
|
+
logger.warning(f"Error in on_chain_end: {e}")
|
345
|
+
|
346
|
+
def on_tool_start(self, serialized: Dict[str, Any], input_str: str, **kwargs: Any) -> None:
|
347
|
+
"""Run when tool starts running."""
|
348
|
+
try:
|
349
|
+
# Add null check for serialized
|
350
|
+
if serialized is None:
|
351
|
+
serialized = {}
|
352
|
+
|
353
|
+
tool_name = serialized.get("name", "unknown_tool")
|
354
|
+
|
355
|
+
attributes = {
|
356
|
+
LangChainAttributes.TOOL_NAME: tool_name,
|
357
|
+
LangChainAttributes.TOOL_DESCRIPTION: serialized.get("description", ""),
|
358
|
+
LangChainAttributes.TOOL_INPUT: input_str,
|
359
|
+
}
|
360
|
+
|
361
|
+
# Add more tool-specific attributes
|
362
|
+
if "return_direct" in serialized:
|
363
|
+
attributes[LangChainAttributes.TOOL_RETURN_DIRECT] = serialized["return_direct"]
|
364
|
+
|
365
|
+
if "args_schema" in serialized:
|
366
|
+
schema = serialized.get("args_schema")
|
367
|
+
if schema:
|
368
|
+
schema_str = str(schema)
|
369
|
+
if len(schema_str) < 1000: # Avoid extremely large attributes
|
370
|
+
attributes[LangChainAttributes.TOOL_ARGS_SCHEMA] = schema_str
|
371
|
+
|
372
|
+
run_id = kwargs.get("run_id", id(serialized or {}))
|
373
|
+
parent_run_id = kwargs.get("parent_run_id", None)
|
374
|
+
|
375
|
+
self._create_span("tool", SpanKind.TOOL, run_id, attributes, parent_run_id)
|
376
|
+
|
377
|
+
logger.debug(f"Started Tool span for {tool_name}")
|
378
|
+
except Exception as e:
|
379
|
+
logger.warning(f"Error in on_tool_start: {e}")
|
380
|
+
|
381
|
+
def on_tool_end(self, output: str, **kwargs: Any) -> None:
|
382
|
+
"""Run when tool ends running."""
|
383
|
+
try:
|
384
|
+
run_id = kwargs.get("run_id", id(output))
|
385
|
+
|
386
|
+
if run_id not in self.active_spans:
|
387
|
+
logger.warning(f"No span found for tool call {run_id}")
|
388
|
+
return
|
389
|
+
|
390
|
+
span = self.active_spans.get(run_id)
|
391
|
+
|
392
|
+
try:
|
393
|
+
span.set_attribute(
|
394
|
+
LangChainAttributes.TOOL_OUTPUT, output if isinstance(output, str) else safe_serialize(output)
|
395
|
+
)
|
396
|
+
except Exception as e:
|
397
|
+
logger.warning(f"Failed to set tool output: {e}")
|
398
|
+
|
399
|
+
# End the span after setting all attributes
|
400
|
+
self._end_span(run_id)
|
401
|
+
|
402
|
+
except Exception as e:
|
403
|
+
logger.warning(f"Error in on_tool_end: {e}")
|
404
|
+
|
405
|
+
def on_agent_action(self, action: AgentAction, **kwargs: Any) -> None:
|
406
|
+
"""Run on agent action."""
|
407
|
+
try:
|
408
|
+
tool = action.tool
|
409
|
+
tool_input = action.tool_input
|
410
|
+
log = action.log
|
411
|
+
|
412
|
+
attributes = {
|
413
|
+
LangChainAttributes.AGENT_ACTION_TOOL: tool,
|
414
|
+
LangChainAttributes.AGENT_ACTION_INPUT: safe_serialize(tool_input),
|
415
|
+
LangChainAttributes.AGENT_ACTION_LOG: log,
|
416
|
+
}
|
417
|
+
|
418
|
+
run_id = kwargs.get("run_id", id(action))
|
419
|
+
parent_run_id = kwargs.get("parent_run_id", None)
|
420
|
+
|
421
|
+
self._create_span("agent_action", SpanKind.AGENT_ACTION, run_id, attributes, parent_run_id)
|
422
|
+
|
423
|
+
logger.debug(f"Started Agent Action span for {tool}")
|
424
|
+
except Exception as e:
|
425
|
+
logger.warning(f"Error in on_agent_action: {e}")
|
426
|
+
|
427
|
+
def on_agent_finish(self, finish: AgentFinish, **kwargs: Any) -> None:
|
428
|
+
"""Run on agent end."""
|
429
|
+
try:
|
430
|
+
run_id = kwargs.get("run_id", id(finish))
|
431
|
+
|
432
|
+
if run_id not in self.active_spans:
|
433
|
+
logger.warning(f"No span found for agent finish {run_id}")
|
434
|
+
return
|
435
|
+
|
436
|
+
span = self.active_spans.get(run_id)
|
437
|
+
|
438
|
+
try:
|
439
|
+
span.set_attribute(LangChainAttributes.AGENT_FINISH_RETURN_VALUES, safe_serialize(finish.return_values))
|
440
|
+
except Exception as e:
|
441
|
+
logger.warning(f"Failed to set agent return values: {e}")
|
442
|
+
|
443
|
+
try:
|
444
|
+
span.set_attribute(LangChainAttributes.AGENT_FINISH_LOG, finish.log)
|
445
|
+
except Exception as e:
|
446
|
+
logger.warning(f"Failed to set agent log: {e}")
|
447
|
+
|
448
|
+
# End the span after setting all attributes
|
449
|
+
self._end_span(run_id)
|
450
|
+
|
451
|
+
except Exception as e:
|
452
|
+
logger.warning(f"Error in on_agent_finish: {e}")
|
453
|
+
|
454
|
+
def __del__(self):
|
455
|
+
"""Clean up resources when the handler is deleted."""
|
456
|
+
try:
|
457
|
+
# End any remaining spans
|
458
|
+
for run_id in list(self.active_spans.keys()):
|
459
|
+
try:
|
460
|
+
self._end_span(run_id)
|
461
|
+
except Exception as e:
|
462
|
+
logger.warning(f"Error ending span during cleanup: {e}")
|
463
|
+
|
464
|
+
# End session span and detach session token
|
465
|
+
if self.session_span:
|
466
|
+
try:
|
467
|
+
# Detach session token if exists
|
468
|
+
if hasattr(self, "session_token") and self.session_token:
|
469
|
+
detach(self.session_token)
|
470
|
+
|
471
|
+
self.session_span.end()
|
472
|
+
logger.debug("Ended session span")
|
473
|
+
except Exception as e:
|
474
|
+
logger.warning(f"Error ending session span: {e}")
|
475
|
+
|
476
|
+
except Exception as e:
|
477
|
+
logger.warning(f"Error in __del__: {e}")
|
478
|
+
|
479
|
+
def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
|
480
|
+
"""Run on new token from LLM."""
|
481
|
+
try:
|
482
|
+
run_id = kwargs.get("run_id")
|
483
|
+
|
484
|
+
if not run_id:
|
485
|
+
logger.warning("No run_id provided for on_llm_new_token")
|
486
|
+
return
|
487
|
+
|
488
|
+
if run_id not in self.active_spans:
|
489
|
+
logger.warning(f"No span found for token in run {run_id}")
|
490
|
+
return
|
491
|
+
|
492
|
+
# Count tokens for later attribution
|
493
|
+
if run_id in self.token_counts:
|
494
|
+
self.token_counts[run_id] += 1
|
495
|
+
else:
|
496
|
+
self.token_counts[run_id] = 1
|
497
|
+
|
498
|
+
# We don't set attributes on each token because it's inefficient
|
499
|
+
# and can lead to "setting attribute on ended span" errors
|
500
|
+
# Instead, we count tokens and set the total at the end
|
501
|
+
|
502
|
+
except Exception as e:
|
503
|
+
logger.warning(f"Error in on_llm_new_token: {e}")
|
504
|
+
|
505
|
+
def on_chat_model_start(self, serialized: Dict[str, Any], messages: List[Any], **kwargs: Any) -> None:
|
506
|
+
"""Run when a chat model starts generating."""
|
507
|
+
try:
|
508
|
+
# Add null check for serialized
|
509
|
+
if serialized is None:
|
510
|
+
serialized = {}
|
511
|
+
|
512
|
+
model_info = get_model_info(serialized)
|
513
|
+
# Ensure default values if model_info returns unknown
|
514
|
+
model_name = model_info.get("model_name", "unknown")
|
515
|
+
|
516
|
+
# Extract message contents and roles
|
517
|
+
formatted_messages = []
|
518
|
+
roles = []
|
519
|
+
|
520
|
+
for message in messages:
|
521
|
+
if hasattr(message, "content") and hasattr(message, "type"):
|
522
|
+
formatted_messages.append({"content": message.content, "role": message.type})
|
523
|
+
roles.append(message.type)
|
524
|
+
|
525
|
+
attributes = {
|
526
|
+
# Use both standard and LangChain-specific attributes
|
527
|
+
SpanAttributes.LLM_REQUEST_MODEL: model_name,
|
528
|
+
LangChainAttributes.LLM_MODEL: model_name,
|
529
|
+
SpanAttributes.LLM_PROMPTS: safe_serialize(formatted_messages),
|
530
|
+
LangChainAttributes.LLM_NAME: serialized.get("id", "unknown_chat_model"),
|
531
|
+
LangChainAttributes.CHAT_MESSAGE_ROLES: safe_serialize(roles),
|
532
|
+
LangChainAttributes.CHAT_MODEL_TYPE: "chat",
|
533
|
+
}
|
534
|
+
|
535
|
+
# Add generation parameters
|
536
|
+
if "kwargs" in serialized:
|
537
|
+
for key, value in serialized["kwargs"].items():
|
538
|
+
if key == "temperature":
|
539
|
+
attributes[SpanAttributes.LLM_REQUEST_TEMPERATURE] = value
|
540
|
+
elif key == "max_tokens":
|
541
|
+
attributes[SpanAttributes.LLM_REQUEST_MAX_TOKENS] = value
|
542
|
+
elif key == "top_p":
|
543
|
+
attributes[SpanAttributes.LLM_REQUEST_TOP_P] = value
|
544
|
+
|
545
|
+
run_id = kwargs.get("run_id", id(serialized or {}))
|
546
|
+
parent_run_id = kwargs.get("parent_run_id", None)
|
547
|
+
|
548
|
+
# Initialize token count for streaming if needed
|
549
|
+
self.token_counts[run_id] = 0
|
550
|
+
|
551
|
+
self._create_span("chat_model", SpanKind.LLM, run_id, attributes, parent_run_id)
|
552
|
+
|
553
|
+
logger.debug(f"Started Chat Model span for {model_name}")
|
554
|
+
except Exception as e:
|
555
|
+
logger.warning(f"Error in on_chat_model_start: {e}")
|
556
|
+
|
557
|
+
def on_llm_error(self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any) -> None:
|
558
|
+
"""Run when LLM errors."""
|
559
|
+
try:
|
560
|
+
run_id = kwargs.get("run_id")
|
561
|
+
|
562
|
+
if not run_id or run_id not in self.active_spans:
|
563
|
+
logger.warning(f"No span found for LLM error {run_id}")
|
564
|
+
return
|
565
|
+
|
566
|
+
span = self.active_spans.get(run_id)
|
567
|
+
|
568
|
+
# Record error attributes
|
569
|
+
try:
|
570
|
+
span.set_attribute("error", True)
|
571
|
+
span.set_attribute(CoreAttributes.ERROR_TYPE, error.__class__.__name__)
|
572
|
+
span.set_attribute(CoreAttributes.ERROR_MESSAGE, str(error))
|
573
|
+
span.set_attribute(LangChainAttributes.LLM_ERROR, str(error))
|
574
|
+
except Exception as e:
|
575
|
+
logger.warning(f"Failed to set error attributes: {e}")
|
576
|
+
|
577
|
+
# End span with error
|
578
|
+
self._end_span(run_id)
|
579
|
+
|
580
|
+
except Exception as e:
|
581
|
+
logger.warning(f"Error in on_llm_error: {e}")
|
582
|
+
|
583
|
+
def on_chain_error(self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any) -> None:
|
584
|
+
"""Run when chain errors."""
|
585
|
+
try:
|
586
|
+
run_id = kwargs.get("run_id")
|
587
|
+
|
588
|
+
if not run_id or run_id not in self.active_spans:
|
589
|
+
logger.warning(f"No span found for chain error {run_id}")
|
590
|
+
return
|
591
|
+
|
592
|
+
span = self.active_spans.get(run_id)
|
593
|
+
|
594
|
+
# Record error attributes
|
595
|
+
try:
|
596
|
+
span.set_attribute("error", True)
|
597
|
+
span.set_attribute(CoreAttributes.ERROR_TYPE, error.__class__.__name__)
|
598
|
+
span.set_attribute(CoreAttributes.ERROR_MESSAGE, str(error))
|
599
|
+
span.set_attribute(LangChainAttributes.CHAIN_ERROR, str(error))
|
600
|
+
except Exception as e:
|
601
|
+
logger.warning(f"Failed to set error attributes: {e}")
|
602
|
+
|
603
|
+
# End span with error
|
604
|
+
self._end_span(run_id)
|
605
|
+
|
606
|
+
except Exception as e:
|
607
|
+
logger.warning(f"Error in on_chain_error: {e}")
|
608
|
+
|
609
|
+
def on_tool_error(self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any) -> None:
|
610
|
+
"""Run when tool errors."""
|
611
|
+
try:
|
612
|
+
run_id = kwargs.get("run_id")
|
613
|
+
|
614
|
+
if not run_id or run_id not in self.active_spans:
|
615
|
+
logger.warning(f"No span found for tool error {run_id}")
|
616
|
+
return
|
617
|
+
|
618
|
+
span = self.active_spans.get(run_id)
|
619
|
+
|
620
|
+
# Record error attributes
|
621
|
+
try:
|
622
|
+
span.set_attribute("error", True)
|
623
|
+
span.set_attribute(CoreAttributes.ERROR_TYPE, error.__class__.__name__)
|
624
|
+
span.set_attribute(CoreAttributes.ERROR_MESSAGE, str(error))
|
625
|
+
span.set_attribute(LangChainAttributes.TOOL_ERROR, str(error))
|
626
|
+
except Exception as e:
|
627
|
+
logger.warning(f"Failed to set error attributes: {e}")
|
628
|
+
|
629
|
+
# End span with error
|
630
|
+
self._end_span(run_id)
|
631
|
+
|
632
|
+
except Exception as e:
|
633
|
+
logger.warning(f"Error in on_tool_error: {e}")
|
634
|
+
|
635
|
+
def on_text(self, text: str, **kwargs: Any) -> None:
|
636
|
+
"""
|
637
|
+
Run on arbitrary text.
|
638
|
+
|
639
|
+
This can be used for logging or recording intermediate steps.
|
640
|
+
"""
|
641
|
+
try:
|
642
|
+
run_id = kwargs.get("run_id")
|
643
|
+
|
644
|
+
if run_id is None:
|
645
|
+
# Create a new span for this text
|
646
|
+
run_id = id(text)
|
647
|
+
parent_run_id = kwargs.get("parent_run_id")
|
648
|
+
|
649
|
+
attributes = {
|
650
|
+
LangChainAttributes.TEXT_CONTENT: text,
|
651
|
+
}
|
652
|
+
|
653
|
+
self._create_span("text", SpanKind.TEXT, run_id, attributes, parent_run_id)
|
654
|
+
|
655
|
+
# Immediately end the span as text events are one-off
|
656
|
+
self._end_span(run_id)
|
657
|
+
else:
|
658
|
+
# Try to find a parent span to add the text to
|
659
|
+
parent_run_id = kwargs.get("parent_run_id")
|
660
|
+
|
661
|
+
if parent_run_id and parent_run_id in self.active_spans:
|
662
|
+
# Add text to parent span
|
663
|
+
try:
|
664
|
+
parent_span = self.active_spans[parent_run_id]
|
665
|
+
# Use get_attribute to check if text already exists
|
666
|
+
existing_text = ""
|
667
|
+
try:
|
668
|
+
existing_text = parent_span.get_attribute(LangChainAttributes.TEXT_CONTENT) or ""
|
669
|
+
except Exception:
|
670
|
+
# If get_attribute isn't available or fails, just set the text
|
671
|
+
pass
|
672
|
+
|
673
|
+
if existing_text:
|
674
|
+
parent_span.set_attribute(LangChainAttributes.TEXT_CONTENT, f"{existing_text}\n{text}")
|
675
|
+
else:
|
676
|
+
parent_span.set_attribute(LangChainAttributes.TEXT_CONTENT, text)
|
677
|
+
except Exception as e:
|
678
|
+
logger.warning(f"Failed to update parent span with text: {e}")
|
679
|
+
except Exception as e:
|
680
|
+
logger.warning(f"Error in on_text: {e}")
|
681
|
+
|
682
|
+
|
683
|
+
class AsyncLangchainCallbackHandler(AsyncCallbackHandler):
|
684
|
+
"""
|
685
|
+
AgentOps async callback handler for Langchain.
|
686
|
+
|
687
|
+
This handler creates spans for LLM calls and other langchain operations,
|
688
|
+
maintaining proper parent-child relationships with session as root span.
|
689
|
+
This is the async version of the handler.
|
690
|
+
|
691
|
+
Args:
|
692
|
+
api_key (str, optional): AgentOps API key
|
693
|
+
tags (List[str], optional): Tags to add to the session
|
694
|
+
auto_session (bool, optional): Whether to automatically create a session span
|
695
|
+
"""
|
696
|
+
|
697
|
+
def __init__(
|
698
|
+
self,
|
699
|
+
api_key: Optional[str] = None,
|
700
|
+
tags: Optional[List[str]] = None,
|
701
|
+
auto_session: bool = True,
|
702
|
+
):
|
703
|
+
"""Initialize the callback handler."""
|
704
|
+
# Create an internal sync handler to delegate to
|
705
|
+
self._sync_handler = LangchainCallbackHandler(api_key=api_key, tags=tags, auto_session=auto_session)
|
706
|
+
|
707
|
+
@property
|
708
|
+
def active_spans(self):
|
709
|
+
"""Access to the active spans dictionary from sync handler."""
|
710
|
+
return self._sync_handler.active_spans
|
711
|
+
|
712
|
+
@property
|
713
|
+
def session_span(self):
|
714
|
+
"""Access to the session span from sync handler."""
|
715
|
+
return self._sync_handler.session_span
|
716
|
+
|
717
|
+
async def on_llm_start(self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any) -> None:
|
718
|
+
"""Run when LLM starts running."""
|
719
|
+
# Delegate to sync handler
|
720
|
+
self._sync_handler.on_llm_start(serialized, prompts, **kwargs)
|
721
|
+
|
722
|
+
async def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None:
|
723
|
+
"""Run when LLM ends running."""
|
724
|
+
# Delegate to sync handler
|
725
|
+
self._sync_handler.on_llm_end(response, **kwargs)
|
726
|
+
|
727
|
+
async def on_chain_start(self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any) -> None:
|
728
|
+
"""Run when chain starts running."""
|
729
|
+
# Delegate to sync handler
|
730
|
+
self._sync_handler.on_chain_start(serialized, inputs, **kwargs)
|
731
|
+
|
732
|
+
async def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None:
|
733
|
+
"""Run when chain ends running."""
|
734
|
+
# Delegate to sync handler
|
735
|
+
self._sync_handler.on_chain_end(outputs, **kwargs)
|
736
|
+
|
737
|
+
async def on_tool_start(self, serialized: Dict[str, Any], input_str: str, **kwargs: Any) -> None:
|
738
|
+
"""Run when tool starts running."""
|
739
|
+
# Delegate to sync handler
|
740
|
+
self._sync_handler.on_tool_start(serialized, input_str, **kwargs)
|
741
|
+
|
742
|
+
async def on_tool_end(self, output: str, **kwargs: Any) -> None:
|
743
|
+
"""Run when tool ends running."""
|
744
|
+
# Delegate to sync handler
|
745
|
+
self._sync_handler.on_tool_end(output, **kwargs)
|
746
|
+
|
747
|
+
async def on_agent_action(self, action: AgentAction, **kwargs: Any) -> None:
|
748
|
+
"""Run on agent action."""
|
749
|
+
# Delegate to sync handler
|
750
|
+
self._sync_handler.on_agent_action(action, **kwargs)
|
751
|
+
|
752
|
+
async def on_agent_finish(self, finish: AgentFinish, **kwargs: Any) -> None:
|
753
|
+
"""Run on agent end."""
|
754
|
+
# Delegate to sync handler
|
755
|
+
self._sync_handler.on_agent_finish(finish, **kwargs)
|
756
|
+
|
757
|
+
async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
|
758
|
+
"""Run on new token from LLM."""
|
759
|
+
# Delegate to sync handler
|
760
|
+
self._sync_handler.on_llm_new_token(token, **kwargs)
|
761
|
+
|
762
|
+
async def on_chat_model_start(self, serialized: Dict[str, Any], messages: List[Any], **kwargs: Any) -> None:
|
763
|
+
"""Run when a chat model starts generating."""
|
764
|
+
# Delegate to sync handler
|
765
|
+
self._sync_handler.on_chat_model_start(serialized, messages, **kwargs)
|
766
|
+
|
767
|
+
async def on_llm_error(self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any) -> None:
|
768
|
+
"""Run when LLM errors."""
|
769
|
+
# Delegate to sync handler
|
770
|
+
self._sync_handler.on_llm_error(error, **kwargs)
|
771
|
+
|
772
|
+
async def on_chain_error(self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any) -> None:
|
773
|
+
"""Run when chain errors."""
|
774
|
+
# Delegate to sync handler
|
775
|
+
self._sync_handler.on_chain_error(error, **kwargs)
|
776
|
+
|
777
|
+
async def on_tool_error(self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any) -> None:
|
778
|
+
"""Run when tool errors."""
|
779
|
+
# Delegate to sync handler
|
780
|
+
self._sync_handler.on_tool_error(error, **kwargs)
|
781
|
+
|
782
|
+
async def on_text(self, text: str, **kwargs: Any) -> None:
|
783
|
+
"""Run on arbitrary text."""
|
784
|
+
# Delegate to sync handler
|
785
|
+
self._sync_handler.on_text(text, **kwargs)
|
786
|
+
|
787
|
+
def __del__(self):
|
788
|
+
"""Clean up resources when the handler is deleted."""
|
789
|
+
# The sync handler's __del__ will handle cleanup
|
790
|
+
if hasattr(self, "_sync_handler"):
|
791
|
+
del self._sync_handler
|