sentry-sdk 3.0.0a1__py2.py3-none-any.whl → 3.0.0a3__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sentry-sdk might be problematic. Click here for more details.
- sentry_sdk/__init__.py +2 -0
- sentry_sdk/_compat.py +5 -12
- sentry_sdk/_init_implementation.py +7 -7
- sentry_sdk/_log_batcher.py +17 -29
- sentry_sdk/_lru_cache.py +7 -9
- sentry_sdk/_queue.py +2 -4
- sentry_sdk/_types.py +11 -18
- sentry_sdk/_werkzeug.py +5 -7
- sentry_sdk/ai/monitoring.py +44 -31
- sentry_sdk/ai/utils.py +3 -4
- sentry_sdk/api.py +75 -87
- sentry_sdk/attachments.py +10 -12
- sentry_sdk/client.py +137 -155
- sentry_sdk/consts.py +430 -174
- sentry_sdk/crons/api.py +16 -17
- sentry_sdk/crons/decorator.py +25 -27
- sentry_sdk/debug.py +4 -6
- sentry_sdk/envelope.py +46 -112
- sentry_sdk/feature_flags.py +9 -15
- sentry_sdk/integrations/__init__.py +24 -19
- sentry_sdk/integrations/_asgi_common.py +15 -18
- sentry_sdk/integrations/_wsgi_common.py +22 -33
- sentry_sdk/integrations/aiohttp.py +32 -30
- sentry_sdk/integrations/anthropic.py +42 -37
- sentry_sdk/integrations/argv.py +3 -4
- sentry_sdk/integrations/ariadne.py +16 -18
- sentry_sdk/integrations/arq.py +21 -29
- sentry_sdk/integrations/asgi.py +63 -37
- sentry_sdk/integrations/asyncio.py +14 -16
- sentry_sdk/integrations/atexit.py +6 -10
- sentry_sdk/integrations/aws_lambda.py +26 -36
- sentry_sdk/integrations/beam.py +10 -18
- sentry_sdk/integrations/boto3.py +18 -16
- sentry_sdk/integrations/bottle.py +25 -34
- sentry_sdk/integrations/celery/__init__.py +41 -61
- sentry_sdk/integrations/celery/beat.py +23 -27
- sentry_sdk/integrations/celery/utils.py +15 -17
- sentry_sdk/integrations/chalice.py +8 -10
- sentry_sdk/integrations/clickhouse_driver.py +21 -31
- sentry_sdk/integrations/cloud_resource_context.py +9 -16
- sentry_sdk/integrations/cohere.py +27 -33
- sentry_sdk/integrations/dedupe.py +5 -8
- sentry_sdk/integrations/django/__init__.py +57 -72
- sentry_sdk/integrations/django/asgi.py +26 -34
- sentry_sdk/integrations/django/caching.py +23 -19
- sentry_sdk/integrations/django/middleware.py +17 -20
- sentry_sdk/integrations/django/signals_handlers.py +11 -10
- sentry_sdk/integrations/django/templates.py +19 -16
- sentry_sdk/integrations/django/transactions.py +16 -11
- sentry_sdk/integrations/django/views.py +6 -10
- sentry_sdk/integrations/dramatiq.py +21 -21
- sentry_sdk/integrations/excepthook.py +10 -10
- sentry_sdk/integrations/executing.py +3 -4
- sentry_sdk/integrations/falcon.py +27 -42
- sentry_sdk/integrations/fastapi.py +13 -16
- sentry_sdk/integrations/flask.py +31 -38
- sentry_sdk/integrations/gcp.py +13 -16
- sentry_sdk/integrations/gnu_backtrace.py +4 -6
- sentry_sdk/integrations/gql.py +16 -17
- sentry_sdk/integrations/graphene.py +13 -12
- sentry_sdk/integrations/grpc/__init__.py +19 -1
- sentry_sdk/integrations/grpc/aio/server.py +15 -14
- sentry_sdk/integrations/grpc/client.py +19 -9
- sentry_sdk/integrations/grpc/consts.py +2 -0
- sentry_sdk/integrations/grpc/server.py +12 -8
- sentry_sdk/integrations/httpx.py +9 -12
- sentry_sdk/integrations/huey.py +13 -20
- sentry_sdk/integrations/huggingface_hub.py +18 -18
- sentry_sdk/integrations/langchain.py +203 -113
- sentry_sdk/integrations/launchdarkly.py +13 -10
- sentry_sdk/integrations/litestar.py +37 -35
- sentry_sdk/integrations/logging.py +52 -65
- sentry_sdk/integrations/loguru.py +127 -57
- sentry_sdk/integrations/modules.py +3 -4
- sentry_sdk/integrations/openai.py +100 -88
- sentry_sdk/integrations/openai_agents/__init__.py +49 -0
- sentry_sdk/integrations/openai_agents/consts.py +1 -0
- sentry_sdk/integrations/openai_agents/patches/__init__.py +4 -0
- sentry_sdk/integrations/openai_agents/patches/agent_run.py +152 -0
- sentry_sdk/integrations/openai_agents/patches/models.py +52 -0
- sentry_sdk/integrations/openai_agents/patches/runner.py +42 -0
- sentry_sdk/integrations/openai_agents/patches/tools.py +84 -0
- sentry_sdk/integrations/openai_agents/spans/__init__.py +5 -0
- sentry_sdk/integrations/openai_agents/spans/agent_workflow.py +20 -0
- sentry_sdk/integrations/openai_agents/spans/ai_client.py +46 -0
- sentry_sdk/integrations/openai_agents/spans/execute_tool.py +47 -0
- sentry_sdk/integrations/openai_agents/spans/handoff.py +24 -0
- sentry_sdk/integrations/openai_agents/spans/invoke_agent.py +41 -0
- sentry_sdk/integrations/openai_agents/utils.py +201 -0
- sentry_sdk/integrations/openfeature.py +11 -6
- sentry_sdk/integrations/pure_eval.py +6 -10
- sentry_sdk/integrations/pymongo.py +13 -17
- sentry_sdk/integrations/pyramid.py +31 -36
- sentry_sdk/integrations/quart.py +23 -28
- sentry_sdk/integrations/ray.py +73 -64
- sentry_sdk/integrations/redis/__init__.py +7 -4
- sentry_sdk/integrations/redis/_async_common.py +25 -12
- sentry_sdk/integrations/redis/_sync_common.py +19 -13
- sentry_sdk/integrations/redis/modules/caches.py +17 -8
- sentry_sdk/integrations/redis/modules/queries.py +9 -8
- sentry_sdk/integrations/redis/rb.py +3 -2
- sentry_sdk/integrations/redis/redis.py +4 -4
- sentry_sdk/integrations/redis/redis_cluster.py +21 -13
- sentry_sdk/integrations/redis/redis_py_cluster_legacy.py +3 -2
- sentry_sdk/integrations/redis/utils.py +23 -24
- sentry_sdk/integrations/rq.py +13 -16
- sentry_sdk/integrations/rust_tracing.py +9 -6
- sentry_sdk/integrations/sanic.py +34 -46
- sentry_sdk/integrations/serverless.py +22 -27
- sentry_sdk/integrations/socket.py +27 -15
- sentry_sdk/integrations/spark/__init__.py +1 -0
- sentry_sdk/integrations/spark/spark_driver.py +45 -83
- sentry_sdk/integrations/spark/spark_worker.py +7 -11
- sentry_sdk/integrations/sqlalchemy.py +22 -19
- sentry_sdk/integrations/starlette.py +86 -90
- sentry_sdk/integrations/starlite.py +28 -34
- sentry_sdk/integrations/statsig.py +5 -4
- sentry_sdk/integrations/stdlib.py +28 -24
- sentry_sdk/integrations/strawberry.py +62 -49
- sentry_sdk/integrations/sys_exit.py +7 -11
- sentry_sdk/integrations/threading.py +12 -14
- sentry_sdk/integrations/tornado.py +28 -32
- sentry_sdk/integrations/trytond.py +4 -3
- sentry_sdk/integrations/typer.py +8 -6
- sentry_sdk/integrations/unleash.py +5 -4
- sentry_sdk/integrations/wsgi.py +47 -46
- sentry_sdk/logger.py +41 -10
- sentry_sdk/monitor.py +16 -28
- sentry_sdk/opentelemetry/consts.py +11 -4
- sentry_sdk/opentelemetry/contextvars_context.py +26 -16
- sentry_sdk/opentelemetry/propagator.py +38 -21
- sentry_sdk/opentelemetry/sampler.py +51 -34
- sentry_sdk/opentelemetry/scope.py +36 -37
- sentry_sdk/opentelemetry/span_processor.py +48 -58
- sentry_sdk/opentelemetry/tracing.py +58 -14
- sentry_sdk/opentelemetry/utils.py +186 -194
- sentry_sdk/profiler/continuous_profiler.py +108 -97
- sentry_sdk/profiler/transaction_profiler.py +70 -97
- sentry_sdk/profiler/utils.py +11 -15
- sentry_sdk/scope.py +251 -273
- sentry_sdk/scrubber.py +22 -26
- sentry_sdk/serializer.py +40 -54
- sentry_sdk/session.py +44 -61
- sentry_sdk/sessions.py +35 -49
- sentry_sdk/spotlight.py +15 -21
- sentry_sdk/tracing.py +121 -187
- sentry_sdk/tracing_utils.py +104 -122
- sentry_sdk/transport.py +131 -157
- sentry_sdk/utils.py +232 -309
- sentry_sdk/worker.py +16 -28
- {sentry_sdk-3.0.0a1.dist-info → sentry_sdk-3.0.0a3.dist-info}/METADATA +3 -3
- sentry_sdk-3.0.0a3.dist-info/RECORD +168 -0
- {sentry_sdk-3.0.0a1.dist-info → sentry_sdk-3.0.0a3.dist-info}/WHEEL +1 -1
- sentry_sdk-3.0.0a1.dist-info/RECORD +0 -154
- {sentry_sdk-3.0.0a1.dist-info → sentry_sdk-3.0.0a3.dist-info}/entry_points.txt +0 -0
- {sentry_sdk-3.0.0a1.dist-info → sentry_sdk-3.0.0a3.dist-info}/licenses/LICENSE +0 -0
- {sentry_sdk-3.0.0a1.dist-info → sentry_sdk-3.0.0a3.dist-info}/top_level.txt +0 -0
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
import itertools
|
|
1
3
|
from collections import OrderedDict
|
|
2
4
|
from functools import wraps
|
|
3
5
|
|
|
@@ -22,6 +24,8 @@ try:
|
|
|
22
24
|
from langchain_core.callbacks import (
|
|
23
25
|
manager,
|
|
24
26
|
BaseCallbackHandler,
|
|
27
|
+
BaseCallbackManager,
|
|
28
|
+
Callbacks,
|
|
25
29
|
)
|
|
26
30
|
from langchain_core.agents import AgentAction, AgentFinish
|
|
27
31
|
except ImportError:
|
|
@@ -58,40 +62,41 @@ class LangchainIntegration(Integration):
|
|
|
58
62
|
max_spans = 1024
|
|
59
63
|
|
|
60
64
|
def __init__(
|
|
61
|
-
self
|
|
62
|
-
|
|
63
|
-
|
|
65
|
+
self: LangchainIntegration,
|
|
66
|
+
include_prompts: bool = True,
|
|
67
|
+
max_spans: int = 1024,
|
|
68
|
+
tiktoken_encoding_name: Optional[str] = None,
|
|
69
|
+
) -> None:
|
|
64
70
|
self.include_prompts = include_prompts
|
|
65
71
|
self.max_spans = max_spans
|
|
66
72
|
self.tiktoken_encoding_name = tiktoken_encoding_name
|
|
67
73
|
|
|
68
74
|
@staticmethod
|
|
69
|
-
def setup_once():
|
|
70
|
-
# type: () -> None
|
|
75
|
+
def setup_once() -> None:
|
|
71
76
|
manager._configure = _wrap_configure(manager._configure)
|
|
72
77
|
|
|
73
78
|
|
|
74
79
|
class WatchedSpan:
|
|
75
|
-
num_completion_tokens = 0
|
|
76
|
-
num_prompt_tokens = 0
|
|
77
|
-
no_collect_tokens = False
|
|
78
|
-
children
|
|
79
|
-
is_pipeline = False
|
|
80
|
-
|
|
81
|
-
def __init__(self, span):
|
|
82
|
-
# type: (Span) -> None
|
|
80
|
+
num_completion_tokens: int = 0
|
|
81
|
+
num_prompt_tokens: int = 0
|
|
82
|
+
no_collect_tokens: bool = False
|
|
83
|
+
children: List[WatchedSpan] = []
|
|
84
|
+
is_pipeline: bool = False
|
|
85
|
+
|
|
86
|
+
def __init__(self, span: Span) -> None:
|
|
83
87
|
self.span = span
|
|
84
88
|
|
|
85
89
|
|
|
86
90
|
class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
87
91
|
"""Base callback handler that can be used to handle callbacks from langchain."""
|
|
88
92
|
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
93
|
+
def __init__(
|
|
94
|
+
self,
|
|
95
|
+
max_span_map_size: int,
|
|
96
|
+
include_prompts: bool,
|
|
97
|
+
tiktoken_encoding_name: Optional[str] = None,
|
|
98
|
+
) -> None:
|
|
99
|
+
self.span_map: OrderedDict[UUID, WatchedSpan] = OrderedDict()
|
|
95
100
|
self.max_span_map_size = max_span_map_size
|
|
96
101
|
self.include_prompts = include_prompts
|
|
97
102
|
|
|
@@ -101,21 +106,18 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
101
106
|
|
|
102
107
|
self.tiktoken_encoding = tiktoken.get_encoding(tiktoken_encoding_name)
|
|
103
108
|
|
|
104
|
-
def count_tokens(self, s):
|
|
105
|
-
# type: (str) -> int
|
|
109
|
+
def count_tokens(self, s: str) -> int:
|
|
106
110
|
if self.tiktoken_encoding is not None:
|
|
107
111
|
return len(self.tiktoken_encoding.encode_ordinary(s))
|
|
108
112
|
return 0
|
|
109
113
|
|
|
110
|
-
def gc_span_map(self):
|
|
111
|
-
# type: () -> None
|
|
114
|
+
def gc_span_map(self) -> None:
|
|
112
115
|
|
|
113
116
|
while len(self.span_map) > self.max_span_map_size:
|
|
114
117
|
run_id, watched_span = self.span_map.popitem(last=False)
|
|
115
118
|
self._exit_span(watched_span, run_id)
|
|
116
119
|
|
|
117
|
-
def _handle_error(self, run_id, error):
|
|
118
|
-
# type: (UUID, Any) -> None
|
|
120
|
+
def _handle_error(self, run_id: UUID, error: Any) -> None:
|
|
119
121
|
if not run_id or run_id not in self.span_map:
|
|
120
122
|
return
|
|
121
123
|
|
|
@@ -127,14 +129,17 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
127
129
|
span_data.span.finish()
|
|
128
130
|
del self.span_map[run_id]
|
|
129
131
|
|
|
130
|
-
def _normalize_langchain_message(self, message):
|
|
131
|
-
# type: (BaseMessage) -> Any
|
|
132
|
+
def _normalize_langchain_message(self, message: BaseMessage) -> Any:
|
|
132
133
|
parsed = {"content": message.content, "role": message.type}
|
|
133
134
|
parsed.update(message.additional_kwargs)
|
|
134
135
|
return parsed
|
|
135
136
|
|
|
136
|
-
def _create_span(
|
|
137
|
-
|
|
137
|
+
def _create_span(
|
|
138
|
+
self: SentryLangchainCallback,
|
|
139
|
+
run_id: UUID,
|
|
140
|
+
parent_id: Optional[Any],
|
|
141
|
+
**kwargs: Any,
|
|
142
|
+
) -> WatchedSpan:
|
|
138
143
|
|
|
139
144
|
parent_watched_span = self.span_map.get(parent_id) if parent_id else None
|
|
140
145
|
sentry_span = sentry_sdk.start_span(
|
|
@@ -161,8 +166,9 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
161
166
|
self.gc_span_map()
|
|
162
167
|
return watched_span
|
|
163
168
|
|
|
164
|
-
def _exit_span(
|
|
165
|
-
|
|
169
|
+
def _exit_span(
|
|
170
|
+
self: SentryLangchainCallback, span_data: WatchedSpan, run_id: UUID
|
|
171
|
+
) -> None:
|
|
166
172
|
|
|
167
173
|
if span_data.is_pipeline:
|
|
168
174
|
set_ai_pipeline_name(None)
|
|
@@ -172,17 +178,16 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
172
178
|
del self.span_map[run_id]
|
|
173
179
|
|
|
174
180
|
def on_llm_start(
|
|
175
|
-
self,
|
|
176
|
-
serialized,
|
|
177
|
-
prompts,
|
|
181
|
+
self: SentryLangchainCallback,
|
|
182
|
+
serialized: Dict[str, Any],
|
|
183
|
+
prompts: List[str],
|
|
178
184
|
*,
|
|
179
|
-
run_id,
|
|
180
|
-
tags=None,
|
|
181
|
-
parent_run_id=None,
|
|
182
|
-
metadata=None,
|
|
183
|
-
**kwargs,
|
|
184
|
-
):
|
|
185
|
-
# type: (SentryLangchainCallback, Dict[str, Any], List[str], UUID, Optional[List[str]], Optional[UUID], Optional[Dict[str, Any]], Any) -> Any
|
|
185
|
+
run_id: UUID,
|
|
186
|
+
tags: Optional[List[str]] = None,
|
|
187
|
+
parent_run_id: Optional[UUID] = None,
|
|
188
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
189
|
+
**kwargs: Any,
|
|
190
|
+
) -> Any:
|
|
186
191
|
"""Run when LLM starts running."""
|
|
187
192
|
with capture_internal_exceptions():
|
|
188
193
|
if not run_id:
|
|
@@ -203,8 +208,14 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
203
208
|
if k in all_params:
|
|
204
209
|
set_data_normalized(span, v, all_params[k])
|
|
205
210
|
|
|
206
|
-
def on_chat_model_start(
|
|
207
|
-
|
|
211
|
+
def on_chat_model_start(
|
|
212
|
+
self: SentryLangchainCallback,
|
|
213
|
+
serialized: Dict[str, Any],
|
|
214
|
+
messages: List[List[BaseMessage]],
|
|
215
|
+
*,
|
|
216
|
+
run_id: UUID,
|
|
217
|
+
**kwargs: Any,
|
|
218
|
+
) -> Any:
|
|
208
219
|
"""Run when Chat Model starts running."""
|
|
209
220
|
with capture_internal_exceptions():
|
|
210
221
|
if not run_id:
|
|
@@ -249,8 +260,9 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
249
260
|
message.content
|
|
250
261
|
) + self.count_tokens(message.type)
|
|
251
262
|
|
|
252
|
-
def on_llm_new_token(
|
|
253
|
-
|
|
263
|
+
def on_llm_new_token(
|
|
264
|
+
self: SentryLangchainCallback, token: str, *, run_id: UUID, **kwargs: Any
|
|
265
|
+
) -> Any:
|
|
254
266
|
"""Run on new LLM token. Only available when streaming is enabled."""
|
|
255
267
|
with capture_internal_exceptions():
|
|
256
268
|
if not run_id or run_id not in self.span_map:
|
|
@@ -260,8 +272,13 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
260
272
|
return
|
|
261
273
|
span_data.num_completion_tokens += self.count_tokens(token)
|
|
262
274
|
|
|
263
|
-
def on_llm_end(
|
|
264
|
-
|
|
275
|
+
def on_llm_end(
|
|
276
|
+
self: SentryLangchainCallback,
|
|
277
|
+
response: LLMResult,
|
|
278
|
+
*,
|
|
279
|
+
run_id: UUID,
|
|
280
|
+
**kwargs: Any,
|
|
281
|
+
) -> Any:
|
|
265
282
|
"""Run when LLM ends running."""
|
|
266
283
|
with capture_internal_exceptions():
|
|
267
284
|
if not run_id:
|
|
@@ -286,27 +303,38 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
286
303
|
if token_usage:
|
|
287
304
|
record_token_usage(
|
|
288
305
|
span_data.span,
|
|
289
|
-
token_usage.get("prompt_tokens"),
|
|
290
|
-
token_usage.get("completion_tokens"),
|
|
291
|
-
token_usage.get("total_tokens"),
|
|
306
|
+
input_tokens=token_usage.get("prompt_tokens"),
|
|
307
|
+
output_tokens=token_usage.get("completion_tokens"),
|
|
308
|
+
total_tokens=token_usage.get("total_tokens"),
|
|
292
309
|
)
|
|
293
310
|
else:
|
|
294
311
|
record_token_usage(
|
|
295
312
|
span_data.span,
|
|
296
|
-
span_data.num_prompt_tokens,
|
|
297
|
-
span_data.num_completion_tokens,
|
|
313
|
+
input_tokens=span_data.num_prompt_tokens,
|
|
314
|
+
output_tokens=span_data.num_completion_tokens,
|
|
298
315
|
)
|
|
299
316
|
|
|
300
317
|
self._exit_span(span_data, run_id)
|
|
301
318
|
|
|
302
|
-
def on_llm_error(
|
|
303
|
-
|
|
319
|
+
def on_llm_error(
|
|
320
|
+
self: SentryLangchainCallback,
|
|
321
|
+
error: Union[Exception, KeyboardInterrupt],
|
|
322
|
+
*,
|
|
323
|
+
run_id: UUID,
|
|
324
|
+
**kwargs: Any,
|
|
325
|
+
) -> Any:
|
|
304
326
|
"""Run when LLM errors."""
|
|
305
327
|
with capture_internal_exceptions():
|
|
306
328
|
self._handle_error(run_id, error)
|
|
307
329
|
|
|
308
|
-
def on_chain_start(
|
|
309
|
-
|
|
330
|
+
def on_chain_start(
|
|
331
|
+
self: SentryLangchainCallback,
|
|
332
|
+
serialized: Dict[str, Any],
|
|
333
|
+
inputs: Dict[str, Any],
|
|
334
|
+
*,
|
|
335
|
+
run_id: UUID,
|
|
336
|
+
**kwargs: Any,
|
|
337
|
+
) -> Any:
|
|
310
338
|
"""Run when chain starts running."""
|
|
311
339
|
with capture_internal_exceptions():
|
|
312
340
|
if not run_id:
|
|
@@ -326,8 +354,13 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
326
354
|
if metadata:
|
|
327
355
|
set_data_normalized(watched_span.span, SPANDATA.AI_METADATA, metadata)
|
|
328
356
|
|
|
329
|
-
def on_chain_end(
|
|
330
|
-
|
|
357
|
+
def on_chain_end(
|
|
358
|
+
self: SentryLangchainCallback,
|
|
359
|
+
outputs: Dict[str, Any],
|
|
360
|
+
*,
|
|
361
|
+
run_id: UUID,
|
|
362
|
+
**kwargs: Any,
|
|
363
|
+
) -> Any:
|
|
331
364
|
"""Run when chain ends running."""
|
|
332
365
|
with capture_internal_exceptions():
|
|
333
366
|
if not run_id or run_id not in self.span_map:
|
|
@@ -338,13 +371,23 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
338
371
|
return
|
|
339
372
|
self._exit_span(span_data, run_id)
|
|
340
373
|
|
|
341
|
-
def on_chain_error(
|
|
342
|
-
|
|
374
|
+
def on_chain_error(
|
|
375
|
+
self: SentryLangchainCallback,
|
|
376
|
+
error: Union[Exception, KeyboardInterrupt],
|
|
377
|
+
*,
|
|
378
|
+
run_id: UUID,
|
|
379
|
+
**kwargs: Any,
|
|
380
|
+
) -> Any:
|
|
343
381
|
"""Run when chain errors."""
|
|
344
382
|
self._handle_error(run_id, error)
|
|
345
383
|
|
|
346
|
-
def on_agent_action(
|
|
347
|
-
|
|
384
|
+
def on_agent_action(
|
|
385
|
+
self: SentryLangchainCallback,
|
|
386
|
+
action: AgentAction,
|
|
387
|
+
*,
|
|
388
|
+
run_id: UUID,
|
|
389
|
+
**kwargs: Any,
|
|
390
|
+
) -> Any:
|
|
348
391
|
with capture_internal_exceptions():
|
|
349
392
|
if not run_id:
|
|
350
393
|
return
|
|
@@ -360,8 +403,13 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
360
403
|
watched_span.span, SPANDATA.AI_INPUT_MESSAGES, action.tool_input
|
|
361
404
|
)
|
|
362
405
|
|
|
363
|
-
def on_agent_finish(
|
|
364
|
-
|
|
406
|
+
def on_agent_finish(
|
|
407
|
+
self: SentryLangchainCallback,
|
|
408
|
+
finish: AgentFinish,
|
|
409
|
+
*,
|
|
410
|
+
run_id: UUID,
|
|
411
|
+
**kwargs: Any,
|
|
412
|
+
) -> Any:
|
|
365
413
|
with capture_internal_exceptions():
|
|
366
414
|
if not run_id:
|
|
367
415
|
return
|
|
@@ -375,8 +423,14 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
375
423
|
)
|
|
376
424
|
self._exit_span(span_data, run_id)
|
|
377
425
|
|
|
378
|
-
def on_tool_start(
|
|
379
|
-
|
|
426
|
+
def on_tool_start(
|
|
427
|
+
self: SentryLangchainCallback,
|
|
428
|
+
serialized: Dict[str, Any],
|
|
429
|
+
input_str: str,
|
|
430
|
+
*,
|
|
431
|
+
run_id: UUID,
|
|
432
|
+
**kwargs: Any,
|
|
433
|
+
) -> Any:
|
|
380
434
|
"""Run when tool starts running."""
|
|
381
435
|
with capture_internal_exceptions():
|
|
382
436
|
if not run_id:
|
|
@@ -399,8 +453,9 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
399
453
|
watched_span.span, SPANDATA.AI_METADATA, kwargs.get("metadata")
|
|
400
454
|
)
|
|
401
455
|
|
|
402
|
-
def on_tool_end(
|
|
403
|
-
|
|
456
|
+
def on_tool_end(
|
|
457
|
+
self: SentryLangchainCallback, output: str, *, run_id: UUID, **kwargs: Any
|
|
458
|
+
) -> Any:
|
|
404
459
|
"""Run when tool ends running."""
|
|
405
460
|
with capture_internal_exceptions():
|
|
406
461
|
if not run_id or run_id not in self.span_map:
|
|
@@ -413,60 +468,95 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
|
|
|
413
468
|
set_data_normalized(span_data.span, SPANDATA.AI_RESPONSES, output)
|
|
414
469
|
self._exit_span(span_data, run_id)
|
|
415
470
|
|
|
416
|
-
def on_tool_error(
|
|
417
|
-
|
|
471
|
+
def on_tool_error(
|
|
472
|
+
self,
|
|
473
|
+
error: SentryLangchainCallback,
|
|
474
|
+
*args: Union[Exception, KeyboardInterrupt],
|
|
475
|
+
run_id: UUID,
|
|
476
|
+
**kwargs: Any,
|
|
477
|
+
) -> Any:
|
|
418
478
|
"""Run when tool errors."""
|
|
419
479
|
self._handle_error(run_id, error)
|
|
420
480
|
|
|
421
481
|
|
|
422
|
-
def _wrap_configure(f):
|
|
423
|
-
# type: (Callable[..., Any]) -> Callable[..., Any]
|
|
482
|
+
def _wrap_configure(f: Callable[..., Any]) -> Callable[..., Any]:
|
|
424
483
|
|
|
425
484
|
@wraps(f)
|
|
426
|
-
def new_configure(
|
|
427
|
-
|
|
485
|
+
def new_configure(
|
|
486
|
+
callback_manager_cls: type,
|
|
487
|
+
inheritable_callbacks: Callbacks = None,
|
|
488
|
+
local_callbacks: Callbacks = None,
|
|
489
|
+
*args: Any,
|
|
490
|
+
**kwargs: Any,
|
|
491
|
+
) -> Any:
|
|
428
492
|
|
|
429
493
|
integration = sentry_sdk.get_client().get_integration(LangchainIntegration)
|
|
430
494
|
if integration is None:
|
|
431
|
-
return f(
|
|
495
|
+
return f(
|
|
496
|
+
callback_manager_cls,
|
|
497
|
+
inheritable_callbacks,
|
|
498
|
+
local_callbacks,
|
|
499
|
+
*args,
|
|
500
|
+
**kwargs,
|
|
501
|
+
)
|
|
432
502
|
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
503
|
+
local_callbacks = local_callbacks or []
|
|
504
|
+
|
|
505
|
+
# Handle each possible type of local_callbacks. For each type, we
|
|
506
|
+
# extract the list of callbacks to check for SentryLangchainCallback,
|
|
507
|
+
# and define a function that would add the SentryLangchainCallback
|
|
508
|
+
# to the existing callbacks list.
|
|
509
|
+
if isinstance(local_callbacks, BaseCallbackManager):
|
|
510
|
+
callbacks_list = local_callbacks.handlers
|
|
511
|
+
elif isinstance(local_callbacks, BaseCallbackHandler):
|
|
512
|
+
callbacks_list = [local_callbacks]
|
|
513
|
+
elif isinstance(local_callbacks, list):
|
|
514
|
+
callbacks_list = local_callbacks
|
|
515
|
+
else:
|
|
516
|
+
logger.debug("Unknown callback type: %s", local_callbacks)
|
|
517
|
+
# Just proceed with original function call
|
|
518
|
+
return f(
|
|
519
|
+
callback_manager_cls,
|
|
520
|
+
inheritable_callbacks,
|
|
521
|
+
local_callbacks,
|
|
522
|
+
*args,
|
|
523
|
+
**kwargs,
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
# Handle each possible type of inheritable_callbacks.
|
|
527
|
+
if isinstance(inheritable_callbacks, BaseCallbackManager):
|
|
528
|
+
inheritable_callbacks_list = inheritable_callbacks.handlers
|
|
529
|
+
elif isinstance(inheritable_callbacks, list):
|
|
530
|
+
inheritable_callbacks_list = inheritable_callbacks
|
|
531
|
+
else:
|
|
532
|
+
inheritable_callbacks_list = []
|
|
533
|
+
|
|
534
|
+
if not any(
|
|
535
|
+
isinstance(cb, SentryLangchainCallback)
|
|
536
|
+
for cb in itertools.chain(callbacks_list, inheritable_callbacks_list)
|
|
537
|
+
):
|
|
538
|
+
sentry_handler = SentryLangchainCallback(
|
|
539
|
+
integration.max_spans,
|
|
540
|
+
integration.include_prompts,
|
|
541
|
+
integration.tiktoken_encoding_name,
|
|
542
|
+
)
|
|
543
|
+
if isinstance(local_callbacks, BaseCallbackManager):
|
|
544
|
+
local_callbacks = local_callbacks.copy()
|
|
545
|
+
local_callbacks.handlers = [
|
|
546
|
+
*local_callbacks.handlers,
|
|
547
|
+
sentry_handler,
|
|
548
|
+
]
|
|
549
|
+
elif isinstance(local_callbacks, BaseCallbackHandler):
|
|
550
|
+
local_callbacks = [local_callbacks, sentry_handler]
|
|
551
|
+
else: # local_callbacks is a list
|
|
552
|
+
local_callbacks = [*local_callbacks, sentry_handler]
|
|
553
|
+
|
|
554
|
+
return f(
|
|
555
|
+
callback_manager_cls,
|
|
556
|
+
inheritable_callbacks,
|
|
557
|
+
local_callbacks,
|
|
558
|
+
*args,
|
|
559
|
+
**kwargs,
|
|
560
|
+
)
|
|
471
561
|
|
|
472
562
|
return new_configure
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
1
2
|
from typing import TYPE_CHECKING
|
|
2
3
|
|
|
3
4
|
from sentry_sdk.feature_flags import add_feature_flag
|
|
@@ -20,8 +21,7 @@ except ImportError:
|
|
|
20
21
|
class LaunchDarklyIntegration(Integration):
|
|
21
22
|
identifier = "launchdarkly"
|
|
22
23
|
|
|
23
|
-
def __init__(self, ld_client=None):
|
|
24
|
-
# type: (LDClient | None) -> None
|
|
24
|
+
def __init__(self, ld_client: LDClient | None = None) -> None:
|
|
25
25
|
"""
|
|
26
26
|
:param client: An initialized LDClient instance. If a client is not provided, this
|
|
27
27
|
integration will attempt to use the shared global instance.
|
|
@@ -38,25 +38,28 @@ class LaunchDarklyIntegration(Integration):
|
|
|
38
38
|
client.add_hook(LaunchDarklyHook())
|
|
39
39
|
|
|
40
40
|
@staticmethod
|
|
41
|
-
def setup_once():
|
|
42
|
-
# type: () -> None
|
|
41
|
+
def setup_once() -> None:
|
|
43
42
|
pass
|
|
44
43
|
|
|
45
44
|
|
|
46
45
|
class LaunchDarklyHook(Hook):
|
|
47
46
|
|
|
48
47
|
@property
|
|
49
|
-
def metadata(self):
|
|
50
|
-
# type: () -> Metadata
|
|
48
|
+
def metadata(self) -> Metadata:
|
|
51
49
|
return Metadata(name="sentry-flag-auditor")
|
|
52
50
|
|
|
53
|
-
def after_evaluation(
|
|
54
|
-
|
|
51
|
+
def after_evaluation(
|
|
52
|
+
self,
|
|
53
|
+
series_context: EvaluationSeriesContext,
|
|
54
|
+
data: dict[Any, Any],
|
|
55
|
+
detail: EvaluationDetail,
|
|
56
|
+
) -> dict[Any, Any]:
|
|
55
57
|
if isinstance(detail.value, bool):
|
|
56
58
|
add_feature_flag(series_context.key, detail.value)
|
|
57
59
|
|
|
58
60
|
return data
|
|
59
61
|
|
|
60
|
-
def before_evaluation(
|
|
61
|
-
|
|
62
|
+
def before_evaluation(
|
|
63
|
+
self, series_context: EvaluationSeriesContext, data: dict[Any, Any]
|
|
64
|
+
) -> dict[Any, Any]:
|
|
62
65
|
return data # No-op.
|