sentry-sdk 0.18.0__py2.py3-none-any.whl → 2.46.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sentry_sdk/__init__.py +48 -6
- sentry_sdk/_compat.py +64 -56
- sentry_sdk/_init_implementation.py +84 -0
- sentry_sdk/_log_batcher.py +172 -0
- sentry_sdk/_lru_cache.py +47 -0
- sentry_sdk/_metrics_batcher.py +167 -0
- sentry_sdk/_queue.py +81 -19
- sentry_sdk/_types.py +311 -11
- sentry_sdk/_werkzeug.py +98 -0
- sentry_sdk/ai/__init__.py +7 -0
- sentry_sdk/ai/monitoring.py +137 -0
- sentry_sdk/ai/utils.py +144 -0
- sentry_sdk/api.py +409 -67
- sentry_sdk/attachments.py +75 -0
- sentry_sdk/client.py +849 -103
- sentry_sdk/consts.py +1389 -34
- sentry_sdk/crons/__init__.py +10 -0
- sentry_sdk/crons/api.py +62 -0
- sentry_sdk/crons/consts.py +4 -0
- sentry_sdk/crons/decorator.py +135 -0
- sentry_sdk/debug.py +12 -15
- sentry_sdk/envelope.py +112 -61
- sentry_sdk/feature_flags.py +71 -0
- sentry_sdk/hub.py +442 -386
- sentry_sdk/integrations/__init__.py +228 -58
- sentry_sdk/integrations/_asgi_common.py +108 -0
- sentry_sdk/integrations/_wsgi_common.py +131 -40
- sentry_sdk/integrations/aiohttp.py +221 -72
- sentry_sdk/integrations/anthropic.py +439 -0
- sentry_sdk/integrations/argv.py +4 -6
- sentry_sdk/integrations/ariadne.py +161 -0
- sentry_sdk/integrations/arq.py +247 -0
- sentry_sdk/integrations/asgi.py +237 -135
- sentry_sdk/integrations/asyncio.py +144 -0
- sentry_sdk/integrations/asyncpg.py +208 -0
- sentry_sdk/integrations/atexit.py +13 -18
- sentry_sdk/integrations/aws_lambda.py +233 -80
- sentry_sdk/integrations/beam.py +27 -35
- sentry_sdk/integrations/boto3.py +137 -0
- sentry_sdk/integrations/bottle.py +91 -69
- sentry_sdk/integrations/celery/__init__.py +529 -0
- sentry_sdk/integrations/celery/beat.py +293 -0
- sentry_sdk/integrations/celery/utils.py +43 -0
- sentry_sdk/integrations/chalice.py +35 -28
- sentry_sdk/integrations/clickhouse_driver.py +177 -0
- sentry_sdk/integrations/cloud_resource_context.py +280 -0
- sentry_sdk/integrations/cohere.py +274 -0
- sentry_sdk/integrations/dedupe.py +32 -8
- sentry_sdk/integrations/django/__init__.py +343 -89
- sentry_sdk/integrations/django/asgi.py +201 -22
- sentry_sdk/integrations/django/caching.py +204 -0
- sentry_sdk/integrations/django/middleware.py +80 -32
- sentry_sdk/integrations/django/signals_handlers.py +91 -0
- sentry_sdk/integrations/django/templates.py +69 -2
- sentry_sdk/integrations/django/transactions.py +39 -14
- sentry_sdk/integrations/django/views.py +69 -16
- sentry_sdk/integrations/dramatiq.py +226 -0
- sentry_sdk/integrations/excepthook.py +19 -13
- sentry_sdk/integrations/executing.py +5 -6
- sentry_sdk/integrations/falcon.py +128 -65
- sentry_sdk/integrations/fastapi.py +141 -0
- sentry_sdk/integrations/flask.py +114 -75
- sentry_sdk/integrations/gcp.py +67 -36
- sentry_sdk/integrations/gnu_backtrace.py +14 -22
- sentry_sdk/integrations/google_genai/__init__.py +301 -0
- sentry_sdk/integrations/google_genai/consts.py +16 -0
- sentry_sdk/integrations/google_genai/streaming.py +155 -0
- sentry_sdk/integrations/google_genai/utils.py +576 -0
- sentry_sdk/integrations/gql.py +162 -0
- sentry_sdk/integrations/graphene.py +151 -0
- sentry_sdk/integrations/grpc/__init__.py +168 -0
- sentry_sdk/integrations/grpc/aio/__init__.py +7 -0
- sentry_sdk/integrations/grpc/aio/client.py +95 -0
- sentry_sdk/integrations/grpc/aio/server.py +100 -0
- sentry_sdk/integrations/grpc/client.py +91 -0
- sentry_sdk/integrations/grpc/consts.py +1 -0
- sentry_sdk/integrations/grpc/server.py +66 -0
- sentry_sdk/integrations/httpx.py +178 -0
- sentry_sdk/integrations/huey.py +174 -0
- sentry_sdk/integrations/huggingface_hub.py +378 -0
- sentry_sdk/integrations/langchain.py +1132 -0
- sentry_sdk/integrations/langgraph.py +337 -0
- sentry_sdk/integrations/launchdarkly.py +61 -0
- sentry_sdk/integrations/litellm.py +287 -0
- sentry_sdk/integrations/litestar.py +315 -0
- sentry_sdk/integrations/logging.py +261 -85
- sentry_sdk/integrations/loguru.py +213 -0
- sentry_sdk/integrations/mcp.py +566 -0
- sentry_sdk/integrations/modules.py +6 -33
- sentry_sdk/integrations/openai.py +725 -0
- sentry_sdk/integrations/openai_agents/__init__.py +61 -0
- sentry_sdk/integrations/openai_agents/consts.py +1 -0
- sentry_sdk/integrations/openai_agents/patches/__init__.py +5 -0
- sentry_sdk/integrations/openai_agents/patches/agent_run.py +140 -0
- sentry_sdk/integrations/openai_agents/patches/error_tracing.py +77 -0
- sentry_sdk/integrations/openai_agents/patches/models.py +50 -0
- sentry_sdk/integrations/openai_agents/patches/runner.py +45 -0
- sentry_sdk/integrations/openai_agents/patches/tools.py +77 -0
- sentry_sdk/integrations/openai_agents/spans/__init__.py +5 -0
- sentry_sdk/integrations/openai_agents/spans/agent_workflow.py +21 -0
- sentry_sdk/integrations/openai_agents/spans/ai_client.py +42 -0
- sentry_sdk/integrations/openai_agents/spans/execute_tool.py +48 -0
- sentry_sdk/integrations/openai_agents/spans/handoff.py +19 -0
- sentry_sdk/integrations/openai_agents/spans/invoke_agent.py +86 -0
- sentry_sdk/integrations/openai_agents/utils.py +199 -0
- sentry_sdk/integrations/openfeature.py +35 -0
- sentry_sdk/integrations/opentelemetry/__init__.py +7 -0
- sentry_sdk/integrations/opentelemetry/consts.py +5 -0
- sentry_sdk/integrations/opentelemetry/integration.py +58 -0
- sentry_sdk/integrations/opentelemetry/propagator.py +117 -0
- sentry_sdk/integrations/opentelemetry/span_processor.py +391 -0
- sentry_sdk/integrations/otlp.py +82 -0
- sentry_sdk/integrations/pure_eval.py +20 -11
- sentry_sdk/integrations/pydantic_ai/__init__.py +47 -0
- sentry_sdk/integrations/pydantic_ai/consts.py +1 -0
- sentry_sdk/integrations/pydantic_ai/patches/__init__.py +4 -0
- sentry_sdk/integrations/pydantic_ai/patches/agent_run.py +215 -0
- sentry_sdk/integrations/pydantic_ai/patches/graph_nodes.py +110 -0
- sentry_sdk/integrations/pydantic_ai/patches/model_request.py +40 -0
- sentry_sdk/integrations/pydantic_ai/patches/tools.py +98 -0
- sentry_sdk/integrations/pydantic_ai/spans/__init__.py +3 -0
- sentry_sdk/integrations/pydantic_ai/spans/ai_client.py +246 -0
- sentry_sdk/integrations/pydantic_ai/spans/execute_tool.py +49 -0
- sentry_sdk/integrations/pydantic_ai/spans/invoke_agent.py +112 -0
- sentry_sdk/integrations/pydantic_ai/utils.py +223 -0
- sentry_sdk/integrations/pymongo.py +214 -0
- sentry_sdk/integrations/pyramid.py +71 -60
- sentry_sdk/integrations/quart.py +237 -0
- sentry_sdk/integrations/ray.py +165 -0
- sentry_sdk/integrations/redis/__init__.py +48 -0
- sentry_sdk/integrations/redis/_async_common.py +116 -0
- sentry_sdk/integrations/redis/_sync_common.py +119 -0
- sentry_sdk/integrations/redis/consts.py +19 -0
- sentry_sdk/integrations/redis/modules/__init__.py +0 -0
- sentry_sdk/integrations/redis/modules/caches.py +118 -0
- sentry_sdk/integrations/redis/modules/queries.py +65 -0
- sentry_sdk/integrations/redis/rb.py +32 -0
- sentry_sdk/integrations/redis/redis.py +69 -0
- sentry_sdk/integrations/redis/redis_cluster.py +107 -0
- sentry_sdk/integrations/redis/redis_py_cluster_legacy.py +50 -0
- sentry_sdk/integrations/redis/utils.py +148 -0
- sentry_sdk/integrations/rq.py +62 -52
- sentry_sdk/integrations/rust_tracing.py +284 -0
- sentry_sdk/integrations/sanic.py +248 -114
- sentry_sdk/integrations/serverless.py +13 -22
- sentry_sdk/integrations/socket.py +96 -0
- sentry_sdk/integrations/spark/spark_driver.py +115 -62
- sentry_sdk/integrations/spark/spark_worker.py +42 -50
- sentry_sdk/integrations/sqlalchemy.py +82 -37
- sentry_sdk/integrations/starlette.py +737 -0
- sentry_sdk/integrations/starlite.py +292 -0
- sentry_sdk/integrations/statsig.py +37 -0
- sentry_sdk/integrations/stdlib.py +100 -58
- sentry_sdk/integrations/strawberry.py +394 -0
- sentry_sdk/integrations/sys_exit.py +70 -0
- sentry_sdk/integrations/threading.py +142 -38
- sentry_sdk/integrations/tornado.py +68 -53
- sentry_sdk/integrations/trytond.py +15 -20
- sentry_sdk/integrations/typer.py +60 -0
- sentry_sdk/integrations/unleash.py +33 -0
- sentry_sdk/integrations/unraisablehook.py +53 -0
- sentry_sdk/integrations/wsgi.py +126 -125
- sentry_sdk/logger.py +96 -0
- sentry_sdk/metrics.py +81 -0
- sentry_sdk/monitor.py +120 -0
- sentry_sdk/profiler/__init__.py +49 -0
- sentry_sdk/profiler/continuous_profiler.py +730 -0
- sentry_sdk/profiler/transaction_profiler.py +839 -0
- sentry_sdk/profiler/utils.py +195 -0
- sentry_sdk/scope.py +1542 -112
- sentry_sdk/scrubber.py +177 -0
- sentry_sdk/serializer.py +152 -210
- sentry_sdk/session.py +177 -0
- sentry_sdk/sessions.py +202 -179
- sentry_sdk/spotlight.py +242 -0
- sentry_sdk/tracing.py +1202 -294
- sentry_sdk/tracing_utils.py +1236 -0
- sentry_sdk/transport.py +693 -189
- sentry_sdk/types.py +52 -0
- sentry_sdk/utils.py +1395 -228
- sentry_sdk/worker.py +30 -17
- sentry_sdk-2.46.0.dist-info/METADATA +268 -0
- sentry_sdk-2.46.0.dist-info/RECORD +189 -0
- {sentry_sdk-0.18.0.dist-info → sentry_sdk-2.46.0.dist-info}/WHEEL +1 -1
- sentry_sdk-2.46.0.dist-info/entry_points.txt +2 -0
- sentry_sdk-2.46.0.dist-info/licenses/LICENSE +21 -0
- sentry_sdk/_functools.py +0 -66
- sentry_sdk/integrations/celery.py +0 -275
- sentry_sdk/integrations/redis.py +0 -103
- sentry_sdk-0.18.0.dist-info/LICENSE +0 -9
- sentry_sdk-0.18.0.dist-info/METADATA +0 -66
- sentry_sdk-0.18.0.dist-info/RECORD +0 -65
- {sentry_sdk-0.18.0.dist-info → sentry_sdk-2.46.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,439 @@
|
|
|
1
|
+
from functools import wraps
|
|
2
|
+
from typing import TYPE_CHECKING
|
|
3
|
+
|
|
4
|
+
import sentry_sdk
|
|
5
|
+
from sentry_sdk.ai.monitoring import record_token_usage
|
|
6
|
+
from sentry_sdk.ai.utils import (
|
|
7
|
+
set_data_normalized,
|
|
8
|
+
normalize_message_roles,
|
|
9
|
+
truncate_and_annotate_messages,
|
|
10
|
+
get_start_span_function,
|
|
11
|
+
)
|
|
12
|
+
from sentry_sdk.consts import OP, SPANDATA, SPANSTATUS
|
|
13
|
+
from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
|
|
14
|
+
from sentry_sdk.scope import should_send_default_pii
|
|
15
|
+
from sentry_sdk.tracing_utils import set_span_errored
|
|
16
|
+
from sentry_sdk.utils import (
|
|
17
|
+
capture_internal_exceptions,
|
|
18
|
+
event_from_exception,
|
|
19
|
+
package_version,
|
|
20
|
+
safe_serialize,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
try:
|
|
24
|
+
try:
|
|
25
|
+
from anthropic import NotGiven
|
|
26
|
+
except ImportError:
|
|
27
|
+
NotGiven = None
|
|
28
|
+
|
|
29
|
+
try:
|
|
30
|
+
from anthropic import Omit
|
|
31
|
+
except ImportError:
|
|
32
|
+
Omit = None
|
|
33
|
+
|
|
34
|
+
from anthropic.resources import AsyncMessages, Messages
|
|
35
|
+
|
|
36
|
+
if TYPE_CHECKING:
|
|
37
|
+
from anthropic.types import MessageStreamEvent
|
|
38
|
+
except ImportError:
|
|
39
|
+
raise DidNotEnable("Anthropic not installed")
|
|
40
|
+
|
|
41
|
+
if TYPE_CHECKING:
|
|
42
|
+
from typing import Any, AsyncIterator, Iterator
|
|
43
|
+
from sentry_sdk.tracing import Span
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class AnthropicIntegration(Integration):
|
|
47
|
+
identifier = "anthropic"
|
|
48
|
+
origin = f"auto.ai.{identifier}"
|
|
49
|
+
|
|
50
|
+
def __init__(self, include_prompts=True):
|
|
51
|
+
# type: (AnthropicIntegration, bool) -> None
|
|
52
|
+
self.include_prompts = include_prompts
|
|
53
|
+
|
|
54
|
+
@staticmethod
|
|
55
|
+
def setup_once():
|
|
56
|
+
# type: () -> None
|
|
57
|
+
version = package_version("anthropic")
|
|
58
|
+
_check_minimum_version(AnthropicIntegration, version)
|
|
59
|
+
|
|
60
|
+
Messages.create = _wrap_message_create(Messages.create)
|
|
61
|
+
AsyncMessages.create = _wrap_message_create_async(AsyncMessages.create)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _capture_exception(exc):
|
|
65
|
+
# type: (Any) -> None
|
|
66
|
+
set_span_errored()
|
|
67
|
+
|
|
68
|
+
event, hint = event_from_exception(
|
|
69
|
+
exc,
|
|
70
|
+
client_options=sentry_sdk.get_client().options,
|
|
71
|
+
mechanism={"type": "anthropic", "handled": False},
|
|
72
|
+
)
|
|
73
|
+
sentry_sdk.capture_event(event, hint=hint)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def _get_token_usage(result):
|
|
77
|
+
# type: (Messages) -> tuple[int, int]
|
|
78
|
+
"""
|
|
79
|
+
Get token usage from the Anthropic response.
|
|
80
|
+
"""
|
|
81
|
+
input_tokens = 0
|
|
82
|
+
output_tokens = 0
|
|
83
|
+
if hasattr(result, "usage"):
|
|
84
|
+
usage = result.usage
|
|
85
|
+
if hasattr(usage, "input_tokens") and isinstance(usage.input_tokens, int):
|
|
86
|
+
input_tokens = usage.input_tokens
|
|
87
|
+
if hasattr(usage, "output_tokens") and isinstance(usage.output_tokens, int):
|
|
88
|
+
output_tokens = usage.output_tokens
|
|
89
|
+
|
|
90
|
+
return input_tokens, output_tokens
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _collect_ai_data(event, model, input_tokens, output_tokens, content_blocks):
|
|
94
|
+
# type: (MessageStreamEvent, str | None, int, int, list[str]) -> tuple[str | None, int, int, list[str]]
|
|
95
|
+
"""
|
|
96
|
+
Collect model information, token usage, and collect content blocks from the AI streaming response.
|
|
97
|
+
"""
|
|
98
|
+
with capture_internal_exceptions():
|
|
99
|
+
if hasattr(event, "type"):
|
|
100
|
+
if event.type == "message_start":
|
|
101
|
+
usage = event.message.usage
|
|
102
|
+
input_tokens += usage.input_tokens
|
|
103
|
+
output_tokens += usage.output_tokens
|
|
104
|
+
model = event.message.model or model
|
|
105
|
+
elif event.type == "content_block_start":
|
|
106
|
+
pass
|
|
107
|
+
elif event.type == "content_block_delta":
|
|
108
|
+
if hasattr(event.delta, "text"):
|
|
109
|
+
content_blocks.append(event.delta.text)
|
|
110
|
+
elif hasattr(event.delta, "partial_json"):
|
|
111
|
+
content_blocks.append(event.delta.partial_json)
|
|
112
|
+
elif event.type == "content_block_stop":
|
|
113
|
+
pass
|
|
114
|
+
elif event.type == "message_delta":
|
|
115
|
+
output_tokens += event.usage.output_tokens
|
|
116
|
+
|
|
117
|
+
return model, input_tokens, output_tokens, content_blocks
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _set_input_data(span, kwargs, integration):
|
|
121
|
+
# type: (Span, dict[str, Any], AnthropicIntegration) -> None
|
|
122
|
+
"""
|
|
123
|
+
Set input data for the span based on the provided keyword arguments for the anthropic message creation.
|
|
124
|
+
"""
|
|
125
|
+
messages = kwargs.get("messages")
|
|
126
|
+
if (
|
|
127
|
+
messages is not None
|
|
128
|
+
and len(messages) > 0
|
|
129
|
+
and should_send_default_pii()
|
|
130
|
+
and integration.include_prompts
|
|
131
|
+
):
|
|
132
|
+
normalized_messages = []
|
|
133
|
+
for message in messages:
|
|
134
|
+
if (
|
|
135
|
+
message.get("role") == "user"
|
|
136
|
+
and "content" in message
|
|
137
|
+
and isinstance(message["content"], (list, tuple))
|
|
138
|
+
):
|
|
139
|
+
for item in message["content"]:
|
|
140
|
+
if item.get("type") == "tool_result":
|
|
141
|
+
normalized_messages.append(
|
|
142
|
+
{
|
|
143
|
+
"role": "tool",
|
|
144
|
+
"content": {
|
|
145
|
+
"tool_use_id": item.get("tool_use_id"),
|
|
146
|
+
"output": item.get("content"),
|
|
147
|
+
},
|
|
148
|
+
}
|
|
149
|
+
)
|
|
150
|
+
else:
|
|
151
|
+
normalized_messages.append(message)
|
|
152
|
+
|
|
153
|
+
role_normalized_messages = normalize_message_roles(normalized_messages)
|
|
154
|
+
scope = sentry_sdk.get_current_scope()
|
|
155
|
+
messages_data = truncate_and_annotate_messages(
|
|
156
|
+
role_normalized_messages, span, scope
|
|
157
|
+
)
|
|
158
|
+
if messages_data is not None:
|
|
159
|
+
set_data_normalized(
|
|
160
|
+
span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages_data, unpack=False
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
set_data_normalized(
|
|
164
|
+
span, SPANDATA.GEN_AI_RESPONSE_STREAMING, kwargs.get("stream", False)
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
kwargs_keys_to_attributes = {
|
|
168
|
+
"max_tokens": SPANDATA.GEN_AI_REQUEST_MAX_TOKENS,
|
|
169
|
+
"model": SPANDATA.GEN_AI_REQUEST_MODEL,
|
|
170
|
+
"temperature": SPANDATA.GEN_AI_REQUEST_TEMPERATURE,
|
|
171
|
+
"top_k": SPANDATA.GEN_AI_REQUEST_TOP_K,
|
|
172
|
+
"top_p": SPANDATA.GEN_AI_REQUEST_TOP_P,
|
|
173
|
+
}
|
|
174
|
+
for key, attribute in kwargs_keys_to_attributes.items():
|
|
175
|
+
value = kwargs.get(key)
|
|
176
|
+
|
|
177
|
+
if value is not None and _is_given(value):
|
|
178
|
+
set_data_normalized(span, attribute, value)
|
|
179
|
+
|
|
180
|
+
# Input attributes: Tools
|
|
181
|
+
tools = kwargs.get("tools")
|
|
182
|
+
if tools is not None and _is_given(tools) and len(tools) > 0:
|
|
183
|
+
set_data_normalized(
|
|
184
|
+
span, SPANDATA.GEN_AI_REQUEST_AVAILABLE_TOOLS, safe_serialize(tools)
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def _set_output_data(
|
|
189
|
+
span,
|
|
190
|
+
integration,
|
|
191
|
+
model,
|
|
192
|
+
input_tokens,
|
|
193
|
+
output_tokens,
|
|
194
|
+
content_blocks,
|
|
195
|
+
finish_span=False,
|
|
196
|
+
):
|
|
197
|
+
# type: (Span, AnthropicIntegration, str | None, int | None, int | None, list[Any], bool) -> None
|
|
198
|
+
"""
|
|
199
|
+
Set output data for the span based on the AI response."""
|
|
200
|
+
span.set_data(SPANDATA.GEN_AI_RESPONSE_MODEL, model)
|
|
201
|
+
if should_send_default_pii() and integration.include_prompts:
|
|
202
|
+
output_messages = {
|
|
203
|
+
"response": [],
|
|
204
|
+
"tool": [],
|
|
205
|
+
} # type: (dict[str, list[Any]])
|
|
206
|
+
|
|
207
|
+
for output in content_blocks:
|
|
208
|
+
if output["type"] == "text":
|
|
209
|
+
output_messages["response"].append(output["text"])
|
|
210
|
+
elif output["type"] == "tool_use":
|
|
211
|
+
output_messages["tool"].append(output)
|
|
212
|
+
|
|
213
|
+
if len(output_messages["tool"]) > 0:
|
|
214
|
+
set_data_normalized(
|
|
215
|
+
span,
|
|
216
|
+
SPANDATA.GEN_AI_RESPONSE_TOOL_CALLS,
|
|
217
|
+
output_messages["tool"],
|
|
218
|
+
unpack=False,
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
if len(output_messages["response"]) > 0:
|
|
222
|
+
set_data_normalized(
|
|
223
|
+
span, SPANDATA.GEN_AI_RESPONSE_TEXT, output_messages["response"]
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
record_token_usage(
|
|
227
|
+
span,
|
|
228
|
+
input_tokens=input_tokens,
|
|
229
|
+
output_tokens=output_tokens,
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
if finish_span:
|
|
233
|
+
span.__exit__(None, None, None)
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def _sentry_patched_create_common(f, *args, **kwargs):
|
|
237
|
+
# type: (Any, *Any, **Any) -> Any
|
|
238
|
+
integration = kwargs.pop("integration")
|
|
239
|
+
if integration is None:
|
|
240
|
+
return f(*args, **kwargs)
|
|
241
|
+
|
|
242
|
+
if "messages" not in kwargs:
|
|
243
|
+
return f(*args, **kwargs)
|
|
244
|
+
|
|
245
|
+
try:
|
|
246
|
+
iter(kwargs["messages"])
|
|
247
|
+
except TypeError:
|
|
248
|
+
return f(*args, **kwargs)
|
|
249
|
+
|
|
250
|
+
model = kwargs.get("model", "")
|
|
251
|
+
|
|
252
|
+
span = get_start_span_function()(
|
|
253
|
+
op=OP.GEN_AI_CHAT,
|
|
254
|
+
name=f"chat {model}".strip(),
|
|
255
|
+
origin=AnthropicIntegration.origin,
|
|
256
|
+
)
|
|
257
|
+
span.__enter__()
|
|
258
|
+
|
|
259
|
+
_set_input_data(span, kwargs, integration)
|
|
260
|
+
|
|
261
|
+
result = yield f, args, kwargs
|
|
262
|
+
|
|
263
|
+
with capture_internal_exceptions():
|
|
264
|
+
if hasattr(result, "content"):
|
|
265
|
+
input_tokens, output_tokens = _get_token_usage(result)
|
|
266
|
+
|
|
267
|
+
content_blocks = []
|
|
268
|
+
for content_block in result.content:
|
|
269
|
+
if hasattr(content_block, "to_dict"):
|
|
270
|
+
content_blocks.append(content_block.to_dict())
|
|
271
|
+
elif hasattr(content_block, "model_dump"):
|
|
272
|
+
content_blocks.append(content_block.model_dump())
|
|
273
|
+
elif hasattr(content_block, "text"):
|
|
274
|
+
content_blocks.append({"type": "text", "text": content_block.text})
|
|
275
|
+
|
|
276
|
+
_set_output_data(
|
|
277
|
+
span=span,
|
|
278
|
+
integration=integration,
|
|
279
|
+
model=getattr(result, "model", None),
|
|
280
|
+
input_tokens=input_tokens,
|
|
281
|
+
output_tokens=output_tokens,
|
|
282
|
+
content_blocks=content_blocks,
|
|
283
|
+
finish_span=True,
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
# Streaming response
|
|
287
|
+
elif hasattr(result, "_iterator"):
|
|
288
|
+
old_iterator = result._iterator
|
|
289
|
+
|
|
290
|
+
def new_iterator():
|
|
291
|
+
# type: () -> Iterator[MessageStreamEvent]
|
|
292
|
+
model = None
|
|
293
|
+
input_tokens = 0
|
|
294
|
+
output_tokens = 0
|
|
295
|
+
content_blocks = [] # type: list[str]
|
|
296
|
+
|
|
297
|
+
for event in old_iterator:
|
|
298
|
+
model, input_tokens, output_tokens, content_blocks = (
|
|
299
|
+
_collect_ai_data(
|
|
300
|
+
event, model, input_tokens, output_tokens, content_blocks
|
|
301
|
+
)
|
|
302
|
+
)
|
|
303
|
+
yield event
|
|
304
|
+
|
|
305
|
+
_set_output_data(
|
|
306
|
+
span=span,
|
|
307
|
+
integration=integration,
|
|
308
|
+
model=model,
|
|
309
|
+
input_tokens=input_tokens,
|
|
310
|
+
output_tokens=output_tokens,
|
|
311
|
+
content_blocks=[{"text": "".join(content_blocks), "type": "text"}],
|
|
312
|
+
finish_span=True,
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
async def new_iterator_async():
|
|
316
|
+
# type: () -> AsyncIterator[MessageStreamEvent]
|
|
317
|
+
model = None
|
|
318
|
+
input_tokens = 0
|
|
319
|
+
output_tokens = 0
|
|
320
|
+
content_blocks = [] # type: list[str]
|
|
321
|
+
|
|
322
|
+
async for event in old_iterator:
|
|
323
|
+
model, input_tokens, output_tokens, content_blocks = (
|
|
324
|
+
_collect_ai_data(
|
|
325
|
+
event, model, input_tokens, output_tokens, content_blocks
|
|
326
|
+
)
|
|
327
|
+
)
|
|
328
|
+
yield event
|
|
329
|
+
|
|
330
|
+
_set_output_data(
|
|
331
|
+
span=span,
|
|
332
|
+
integration=integration,
|
|
333
|
+
model=model,
|
|
334
|
+
input_tokens=input_tokens,
|
|
335
|
+
output_tokens=output_tokens,
|
|
336
|
+
content_blocks=[{"text": "".join(content_blocks), "type": "text"}],
|
|
337
|
+
finish_span=True,
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
if str(type(result._iterator)) == "<class 'async_generator'>":
|
|
341
|
+
result._iterator = new_iterator_async()
|
|
342
|
+
else:
|
|
343
|
+
result._iterator = new_iterator()
|
|
344
|
+
|
|
345
|
+
else:
|
|
346
|
+
span.set_data("unknown_response", True)
|
|
347
|
+
span.__exit__(None, None, None)
|
|
348
|
+
|
|
349
|
+
return result
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
def _wrap_message_create(f):
|
|
353
|
+
# type: (Any) -> Any
|
|
354
|
+
def _execute_sync(f, *args, **kwargs):
|
|
355
|
+
# type: (Any, *Any, **Any) -> Any
|
|
356
|
+
gen = _sentry_patched_create_common(f, *args, **kwargs)
|
|
357
|
+
|
|
358
|
+
try:
|
|
359
|
+
f, args, kwargs = next(gen)
|
|
360
|
+
except StopIteration as e:
|
|
361
|
+
return e.value
|
|
362
|
+
|
|
363
|
+
try:
|
|
364
|
+
try:
|
|
365
|
+
result = f(*args, **kwargs)
|
|
366
|
+
except Exception as exc:
|
|
367
|
+
_capture_exception(exc)
|
|
368
|
+
raise exc from None
|
|
369
|
+
|
|
370
|
+
return gen.send(result)
|
|
371
|
+
except StopIteration as e:
|
|
372
|
+
return e.value
|
|
373
|
+
|
|
374
|
+
@wraps(f)
|
|
375
|
+
def _sentry_patched_create_sync(*args, **kwargs):
|
|
376
|
+
# type: (*Any, **Any) -> Any
|
|
377
|
+
integration = sentry_sdk.get_client().get_integration(AnthropicIntegration)
|
|
378
|
+
kwargs["integration"] = integration
|
|
379
|
+
|
|
380
|
+
try:
|
|
381
|
+
return _execute_sync(f, *args, **kwargs)
|
|
382
|
+
finally:
|
|
383
|
+
span = sentry_sdk.get_current_span()
|
|
384
|
+
if span is not None and span.status == SPANSTATUS.ERROR:
|
|
385
|
+
with capture_internal_exceptions():
|
|
386
|
+
span.__exit__(None, None, None)
|
|
387
|
+
|
|
388
|
+
return _sentry_patched_create_sync
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
def _wrap_message_create_async(f):
|
|
392
|
+
# type: (Any) -> Any
|
|
393
|
+
async def _execute_async(f, *args, **kwargs):
|
|
394
|
+
# type: (Any, *Any, **Any) -> Any
|
|
395
|
+
gen = _sentry_patched_create_common(f, *args, **kwargs)
|
|
396
|
+
|
|
397
|
+
try:
|
|
398
|
+
f, args, kwargs = next(gen)
|
|
399
|
+
except StopIteration as e:
|
|
400
|
+
return await e.value
|
|
401
|
+
|
|
402
|
+
try:
|
|
403
|
+
try:
|
|
404
|
+
result = await f(*args, **kwargs)
|
|
405
|
+
except Exception as exc:
|
|
406
|
+
_capture_exception(exc)
|
|
407
|
+
raise exc from None
|
|
408
|
+
|
|
409
|
+
return gen.send(result)
|
|
410
|
+
except StopIteration as e:
|
|
411
|
+
return e.value
|
|
412
|
+
|
|
413
|
+
@wraps(f)
|
|
414
|
+
async def _sentry_patched_create_async(*args, **kwargs):
|
|
415
|
+
# type: (*Any, **Any) -> Any
|
|
416
|
+
integration = sentry_sdk.get_client().get_integration(AnthropicIntegration)
|
|
417
|
+
kwargs["integration"] = integration
|
|
418
|
+
|
|
419
|
+
try:
|
|
420
|
+
return await _execute_async(f, *args, **kwargs)
|
|
421
|
+
finally:
|
|
422
|
+
span = sentry_sdk.get_current_span()
|
|
423
|
+
if span is not None and span.status == SPANSTATUS.ERROR:
|
|
424
|
+
with capture_internal_exceptions():
|
|
425
|
+
span.__exit__(None, None, None)
|
|
426
|
+
|
|
427
|
+
return _sentry_patched_create_async
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
def _is_given(obj):
|
|
431
|
+
# type: (Any) -> bool
|
|
432
|
+
"""
|
|
433
|
+
Check for givenness safely across different anthropic versions.
|
|
434
|
+
"""
|
|
435
|
+
if NotGiven is not None and isinstance(obj, NotGiven):
|
|
436
|
+
return False
|
|
437
|
+
if Omit is not None and isinstance(obj, Omit):
|
|
438
|
+
return False
|
|
439
|
+
return True
|
sentry_sdk/integrations/argv.py
CHANGED
|
@@ -1,14 +1,12 @@
|
|
|
1
|
-
from __future__ import absolute_import
|
|
2
|
-
|
|
3
1
|
import sys
|
|
4
2
|
|
|
5
|
-
|
|
3
|
+
import sentry_sdk
|
|
6
4
|
from sentry_sdk.integrations import Integration
|
|
7
5
|
from sentry_sdk.scope import add_global_event_processor
|
|
8
6
|
|
|
9
|
-
from
|
|
7
|
+
from typing import TYPE_CHECKING
|
|
10
8
|
|
|
11
|
-
if
|
|
9
|
+
if TYPE_CHECKING:
|
|
12
10
|
from typing import Optional
|
|
13
11
|
|
|
14
12
|
from sentry_sdk._types import Event, Hint
|
|
@@ -23,7 +21,7 @@ class ArgvIntegration(Integration):
|
|
|
23
21
|
@add_global_event_processor
|
|
24
22
|
def processor(event, hint):
|
|
25
23
|
# type: (Event, Optional[Hint]) -> Optional[Event]
|
|
26
|
-
if
|
|
24
|
+
if sentry_sdk.get_client().get_integration(ArgvIntegration) is not None:
|
|
27
25
|
extra = event.setdefault("extra", {})
|
|
28
26
|
# If some event processor decided to set extra to e.g. an
|
|
29
27
|
# `int`, don't crash. Not here.
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
from importlib import import_module
|
|
2
|
+
|
|
3
|
+
import sentry_sdk
|
|
4
|
+
from sentry_sdk import get_client, capture_event
|
|
5
|
+
from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
|
|
6
|
+
from sentry_sdk.integrations.logging import ignore_logger
|
|
7
|
+
from sentry_sdk.integrations._wsgi_common import request_body_within_bounds
|
|
8
|
+
from sentry_sdk.scope import should_send_default_pii
|
|
9
|
+
from sentry_sdk.utils import (
|
|
10
|
+
capture_internal_exceptions,
|
|
11
|
+
ensure_integration_enabled,
|
|
12
|
+
event_from_exception,
|
|
13
|
+
package_version,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
try:
|
|
17
|
+
# importing like this is necessary due to name shadowing in ariadne
|
|
18
|
+
# (ariadne.graphql is also a function)
|
|
19
|
+
ariadne_graphql = import_module("ariadne.graphql")
|
|
20
|
+
except ImportError:
|
|
21
|
+
raise DidNotEnable("ariadne is not installed")
|
|
22
|
+
|
|
23
|
+
from typing import TYPE_CHECKING
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from typing import Any, Dict, List, Optional
|
|
27
|
+
from ariadne.types import GraphQLError, GraphQLResult, GraphQLSchema, QueryParser # type: ignore
|
|
28
|
+
from graphql.language.ast import DocumentNode
|
|
29
|
+
from sentry_sdk._types import Event, EventProcessor
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class AriadneIntegration(Integration):
|
|
33
|
+
identifier = "ariadne"
|
|
34
|
+
|
|
35
|
+
@staticmethod
|
|
36
|
+
def setup_once():
|
|
37
|
+
# type: () -> None
|
|
38
|
+
version = package_version("ariadne")
|
|
39
|
+
_check_minimum_version(AriadneIntegration, version)
|
|
40
|
+
|
|
41
|
+
ignore_logger("ariadne")
|
|
42
|
+
|
|
43
|
+
_patch_graphql()
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _patch_graphql():
|
|
47
|
+
# type: () -> None
|
|
48
|
+
old_parse_query = ariadne_graphql.parse_query
|
|
49
|
+
old_handle_errors = ariadne_graphql.handle_graphql_errors
|
|
50
|
+
old_handle_query_result = ariadne_graphql.handle_query_result
|
|
51
|
+
|
|
52
|
+
@ensure_integration_enabled(AriadneIntegration, old_parse_query)
|
|
53
|
+
def _sentry_patched_parse_query(context_value, query_parser, data):
|
|
54
|
+
# type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode
|
|
55
|
+
event_processor = _make_request_event_processor(data)
|
|
56
|
+
sentry_sdk.get_isolation_scope().add_event_processor(event_processor)
|
|
57
|
+
|
|
58
|
+
result = old_parse_query(context_value, query_parser, data)
|
|
59
|
+
return result
|
|
60
|
+
|
|
61
|
+
@ensure_integration_enabled(AriadneIntegration, old_handle_errors)
|
|
62
|
+
def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs):
|
|
63
|
+
# type: (List[GraphQLError], Any, Any) -> GraphQLResult
|
|
64
|
+
result = old_handle_errors(errors, *args, **kwargs)
|
|
65
|
+
|
|
66
|
+
event_processor = _make_response_event_processor(result[1])
|
|
67
|
+
sentry_sdk.get_isolation_scope().add_event_processor(event_processor)
|
|
68
|
+
|
|
69
|
+
client = get_client()
|
|
70
|
+
if client.is_active():
|
|
71
|
+
with capture_internal_exceptions():
|
|
72
|
+
for error in errors:
|
|
73
|
+
event, hint = event_from_exception(
|
|
74
|
+
error,
|
|
75
|
+
client_options=client.options,
|
|
76
|
+
mechanism={
|
|
77
|
+
"type": AriadneIntegration.identifier,
|
|
78
|
+
"handled": False,
|
|
79
|
+
},
|
|
80
|
+
)
|
|
81
|
+
capture_event(event, hint=hint)
|
|
82
|
+
|
|
83
|
+
return result
|
|
84
|
+
|
|
85
|
+
@ensure_integration_enabled(AriadneIntegration, old_handle_query_result)
|
|
86
|
+
def _sentry_patched_handle_query_result(result, *args, **kwargs):
|
|
87
|
+
# type: (Any, Any, Any) -> GraphQLResult
|
|
88
|
+
query_result = old_handle_query_result(result, *args, **kwargs)
|
|
89
|
+
|
|
90
|
+
event_processor = _make_response_event_processor(query_result[1])
|
|
91
|
+
sentry_sdk.get_isolation_scope().add_event_processor(event_processor)
|
|
92
|
+
|
|
93
|
+
client = get_client()
|
|
94
|
+
if client.is_active():
|
|
95
|
+
with capture_internal_exceptions():
|
|
96
|
+
for error in result.errors or []:
|
|
97
|
+
event, hint = event_from_exception(
|
|
98
|
+
error,
|
|
99
|
+
client_options=client.options,
|
|
100
|
+
mechanism={
|
|
101
|
+
"type": AriadneIntegration.identifier,
|
|
102
|
+
"handled": False,
|
|
103
|
+
},
|
|
104
|
+
)
|
|
105
|
+
capture_event(event, hint=hint)
|
|
106
|
+
|
|
107
|
+
return query_result
|
|
108
|
+
|
|
109
|
+
ariadne_graphql.parse_query = _sentry_patched_parse_query # type: ignore
|
|
110
|
+
ariadne_graphql.handle_graphql_errors = _sentry_patched_handle_graphql_errors # type: ignore
|
|
111
|
+
ariadne_graphql.handle_query_result = _sentry_patched_handle_query_result # type: ignore
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _make_request_event_processor(data):
|
|
115
|
+
# type: (GraphQLSchema) -> EventProcessor
|
|
116
|
+
"""Add request data and api_target to events."""
|
|
117
|
+
|
|
118
|
+
def inner(event, hint):
|
|
119
|
+
# type: (Event, dict[str, Any]) -> Event
|
|
120
|
+
if not isinstance(data, dict):
|
|
121
|
+
return event
|
|
122
|
+
|
|
123
|
+
with capture_internal_exceptions():
|
|
124
|
+
try:
|
|
125
|
+
content_length = int(
|
|
126
|
+
(data.get("headers") or {}).get("Content-Length", 0)
|
|
127
|
+
)
|
|
128
|
+
except (TypeError, ValueError):
|
|
129
|
+
return event
|
|
130
|
+
|
|
131
|
+
if should_send_default_pii() and request_body_within_bounds(
|
|
132
|
+
get_client(), content_length
|
|
133
|
+
):
|
|
134
|
+
request_info = event.setdefault("request", {})
|
|
135
|
+
request_info["api_target"] = "graphql"
|
|
136
|
+
request_info["data"] = data
|
|
137
|
+
|
|
138
|
+
elif event.get("request", {}).get("data"):
|
|
139
|
+
del event["request"]["data"]
|
|
140
|
+
|
|
141
|
+
return event
|
|
142
|
+
|
|
143
|
+
return inner
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _make_response_event_processor(response):
|
|
147
|
+
# type: (Dict[str, Any]) -> EventProcessor
|
|
148
|
+
"""Add response data to the event's response context."""
|
|
149
|
+
|
|
150
|
+
def inner(event, hint):
|
|
151
|
+
# type: (Event, dict[str, Any]) -> Event
|
|
152
|
+
with capture_internal_exceptions():
|
|
153
|
+
if should_send_default_pii() and response.get("errors"):
|
|
154
|
+
contexts = event.setdefault("contexts", {})
|
|
155
|
+
contexts["response"] = {
|
|
156
|
+
"data": response,
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
return event
|
|
160
|
+
|
|
161
|
+
return inner
|