sentry-sdk 2.39.0__py2.py3-none-any.whl → 2.40.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sentry-sdk might be problematic. Click here for more details.
- sentry_sdk/client.py +6 -6
- sentry_sdk/consts.py +12 -2
- sentry_sdk/envelope.py +28 -14
- sentry_sdk/feature_flags.py +0 -1
- sentry_sdk/hub.py +17 -9
- sentry_sdk/integrations/__init__.py +1 -0
- sentry_sdk/integrations/asgi.py +3 -2
- sentry_sdk/integrations/dramatiq.py +89 -31
- sentry_sdk/integrations/grpc/aio/client.py +2 -1
- sentry_sdk/integrations/grpc/client.py +3 -4
- sentry_sdk/integrations/langchain.py +0 -1
- sentry_sdk/integrations/launchdarkly.py +0 -1
- sentry_sdk/integrations/litellm.py +251 -0
- sentry_sdk/integrations/litestar.py +4 -4
- sentry_sdk/integrations/openai_agents/spans/ai_client.py +4 -1
- sentry_sdk/integrations/openai_agents/utils.py +25 -1
- sentry_sdk/integrations/pure_eval.py +3 -1
- sentry_sdk/integrations/spark/spark_driver.py +2 -1
- sentry_sdk/integrations/sqlalchemy.py +2 -6
- sentry_sdk/integrations/starlette.py +1 -3
- sentry_sdk/integrations/starlite.py +4 -4
- sentry_sdk/integrations/wsgi.py +3 -2
- sentry_sdk/metrics.py +17 -11
- sentry_sdk/profiler/utils.py +2 -6
- sentry_sdk/scope.py +6 -3
- sentry_sdk/serializer.py +1 -3
- sentry_sdk/session.py +4 -2
- sentry_sdk/sessions.py +4 -2
- sentry_sdk/tracing.py +36 -7
- sentry_sdk/tracing_utils.py +1 -3
- sentry_sdk/transport.py +8 -9
- sentry_sdk/utils.py +5 -3
- {sentry_sdk-2.39.0.dist-info → sentry_sdk-2.40.0.dist-info}/METADATA +3 -1
- {sentry_sdk-2.39.0.dist-info → sentry_sdk-2.40.0.dist-info}/RECORD +38 -37
- {sentry_sdk-2.39.0.dist-info → sentry_sdk-2.40.0.dist-info}/WHEEL +0 -0
- {sentry_sdk-2.39.0.dist-info → sentry_sdk-2.40.0.dist-info}/entry_points.txt +0 -0
- {sentry_sdk-2.39.0.dist-info → sentry_sdk-2.40.0.dist-info}/licenses/LICENSE +0 -0
- {sentry_sdk-2.39.0.dist-info → sentry_sdk-2.40.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING
|
|
2
|
+
|
|
3
|
+
import sentry_sdk
|
|
4
|
+
from sentry_sdk import consts
|
|
5
|
+
from sentry_sdk.ai.monitoring import record_token_usage
|
|
6
|
+
from sentry_sdk.ai.utils import get_start_span_function, set_data_normalized
|
|
7
|
+
from sentry_sdk.consts import SPANDATA
|
|
8
|
+
from sentry_sdk.integrations import DidNotEnable, Integration
|
|
9
|
+
from sentry_sdk.scope import should_send_default_pii
|
|
10
|
+
from sentry_sdk.utils import event_from_exception
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from typing import Any, Dict
|
|
14
|
+
from datetime import datetime
|
|
15
|
+
|
|
16
|
+
try:
|
|
17
|
+
import litellm # type: ignore[import-not-found]
|
|
18
|
+
except ImportError:
|
|
19
|
+
raise DidNotEnable("LiteLLM not installed")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _get_metadata_dict(kwargs):
|
|
23
|
+
# type: (Dict[str, Any]) -> Dict[str, Any]
|
|
24
|
+
"""Get the metadata dictionary from the kwargs."""
|
|
25
|
+
litellm_params = kwargs.setdefault("litellm_params", {})
|
|
26
|
+
|
|
27
|
+
# we need this weird little dance, as metadata might be set but may be None initially
|
|
28
|
+
metadata = litellm_params.get("metadata")
|
|
29
|
+
if metadata is None:
|
|
30
|
+
metadata = {}
|
|
31
|
+
litellm_params["metadata"] = metadata
|
|
32
|
+
return metadata
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _input_callback(kwargs):
|
|
36
|
+
# type: (Dict[str, Any]) -> None
|
|
37
|
+
"""Handle the start of a request."""
|
|
38
|
+
integration = sentry_sdk.get_client().get_integration(LiteLLMIntegration)
|
|
39
|
+
|
|
40
|
+
if integration is None:
|
|
41
|
+
return
|
|
42
|
+
|
|
43
|
+
# Get key parameters
|
|
44
|
+
full_model = kwargs.get("model", "")
|
|
45
|
+
try:
|
|
46
|
+
model, provider, _, _ = litellm.get_llm_provider(full_model)
|
|
47
|
+
except Exception:
|
|
48
|
+
model = full_model
|
|
49
|
+
provider = "unknown"
|
|
50
|
+
|
|
51
|
+
messages = kwargs.get("messages", [])
|
|
52
|
+
operation = "chat" if messages else "embeddings"
|
|
53
|
+
|
|
54
|
+
# Start a new span/transaction
|
|
55
|
+
span = get_start_span_function()(
|
|
56
|
+
op=(
|
|
57
|
+
consts.OP.GEN_AI_CHAT
|
|
58
|
+
if operation == "chat"
|
|
59
|
+
else consts.OP.GEN_AI_EMBEDDINGS
|
|
60
|
+
),
|
|
61
|
+
name=f"{operation} {model}",
|
|
62
|
+
origin=LiteLLMIntegration.origin,
|
|
63
|
+
)
|
|
64
|
+
span.__enter__()
|
|
65
|
+
|
|
66
|
+
# Store span for later
|
|
67
|
+
_get_metadata_dict(kwargs)["_sentry_span"] = span
|
|
68
|
+
|
|
69
|
+
# Set basic data
|
|
70
|
+
set_data_normalized(span, SPANDATA.GEN_AI_SYSTEM, provider)
|
|
71
|
+
set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, operation)
|
|
72
|
+
|
|
73
|
+
# Record messages if allowed
|
|
74
|
+
if messages and should_send_default_pii() and integration.include_prompts:
|
|
75
|
+
set_data_normalized(
|
|
76
|
+
span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages, unpack=False
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
# Record other parameters
|
|
80
|
+
params = {
|
|
81
|
+
"model": SPANDATA.GEN_AI_REQUEST_MODEL,
|
|
82
|
+
"stream": SPANDATA.GEN_AI_RESPONSE_STREAMING,
|
|
83
|
+
"max_tokens": SPANDATA.GEN_AI_REQUEST_MAX_TOKENS,
|
|
84
|
+
"presence_penalty": SPANDATA.GEN_AI_REQUEST_PRESENCE_PENALTY,
|
|
85
|
+
"frequency_penalty": SPANDATA.GEN_AI_REQUEST_FREQUENCY_PENALTY,
|
|
86
|
+
"temperature": SPANDATA.GEN_AI_REQUEST_TEMPERATURE,
|
|
87
|
+
"top_p": SPANDATA.GEN_AI_REQUEST_TOP_P,
|
|
88
|
+
}
|
|
89
|
+
for key, attribute in params.items():
|
|
90
|
+
value = kwargs.get(key)
|
|
91
|
+
if value is not None:
|
|
92
|
+
set_data_normalized(span, attribute, value)
|
|
93
|
+
|
|
94
|
+
# Record LiteLLM-specific parameters
|
|
95
|
+
litellm_params = {
|
|
96
|
+
"api_base": kwargs.get("api_base"),
|
|
97
|
+
"api_version": kwargs.get("api_version"),
|
|
98
|
+
"custom_llm_provider": kwargs.get("custom_llm_provider"),
|
|
99
|
+
}
|
|
100
|
+
for key, value in litellm_params.items():
|
|
101
|
+
if value is not None:
|
|
102
|
+
set_data_normalized(span, f"gen_ai.litellm.{key}", value)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def _success_callback(kwargs, completion_response, start_time, end_time):
|
|
106
|
+
# type: (Dict[str, Any], Any, datetime, datetime) -> None
|
|
107
|
+
"""Handle successful completion."""
|
|
108
|
+
|
|
109
|
+
span = _get_metadata_dict(kwargs).get("_sentry_span")
|
|
110
|
+
if span is None:
|
|
111
|
+
return
|
|
112
|
+
|
|
113
|
+
integration = sentry_sdk.get_client().get_integration(LiteLLMIntegration)
|
|
114
|
+
if integration is None:
|
|
115
|
+
return
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
# Record model information
|
|
119
|
+
if hasattr(completion_response, "model"):
|
|
120
|
+
set_data_normalized(
|
|
121
|
+
span, SPANDATA.GEN_AI_RESPONSE_MODEL, completion_response.model
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
# Record response content if allowed
|
|
125
|
+
if should_send_default_pii() and integration.include_prompts:
|
|
126
|
+
if hasattr(completion_response, "choices"):
|
|
127
|
+
response_messages = []
|
|
128
|
+
for choice in completion_response.choices:
|
|
129
|
+
if hasattr(choice, "message"):
|
|
130
|
+
if hasattr(choice.message, "model_dump"):
|
|
131
|
+
response_messages.append(choice.message.model_dump())
|
|
132
|
+
elif hasattr(choice.message, "dict"):
|
|
133
|
+
response_messages.append(choice.message.dict())
|
|
134
|
+
else:
|
|
135
|
+
# Fallback for basic message objects
|
|
136
|
+
msg = {}
|
|
137
|
+
if hasattr(choice.message, "role"):
|
|
138
|
+
msg["role"] = choice.message.role
|
|
139
|
+
if hasattr(choice.message, "content"):
|
|
140
|
+
msg["content"] = choice.message.content
|
|
141
|
+
if hasattr(choice.message, "tool_calls"):
|
|
142
|
+
msg["tool_calls"] = choice.message.tool_calls
|
|
143
|
+
response_messages.append(msg)
|
|
144
|
+
|
|
145
|
+
if response_messages:
|
|
146
|
+
set_data_normalized(
|
|
147
|
+
span, SPANDATA.GEN_AI_RESPONSE_TEXT, response_messages
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
# Record token usage
|
|
151
|
+
if hasattr(completion_response, "usage"):
|
|
152
|
+
usage = completion_response.usage
|
|
153
|
+
record_token_usage(
|
|
154
|
+
span,
|
|
155
|
+
input_tokens=getattr(usage, "prompt_tokens", None),
|
|
156
|
+
output_tokens=getattr(usage, "completion_tokens", None),
|
|
157
|
+
total_tokens=getattr(usage, "total_tokens", None),
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
finally:
|
|
161
|
+
# Always finish the span and clean up
|
|
162
|
+
span.__exit__(None, None, None)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def _failure_callback(kwargs, exception, start_time, end_time):
|
|
166
|
+
# type: (Dict[str, Any], Exception, datetime, datetime) -> None
|
|
167
|
+
"""Handle request failure."""
|
|
168
|
+
span = _get_metadata_dict(kwargs).get("_sentry_span")
|
|
169
|
+
if span is None:
|
|
170
|
+
return
|
|
171
|
+
|
|
172
|
+
try:
|
|
173
|
+
# Capture the exception
|
|
174
|
+
event, hint = event_from_exception(
|
|
175
|
+
exception,
|
|
176
|
+
client_options=sentry_sdk.get_client().options,
|
|
177
|
+
mechanism={"type": "litellm", "handled": False},
|
|
178
|
+
)
|
|
179
|
+
sentry_sdk.capture_event(event, hint=hint)
|
|
180
|
+
finally:
|
|
181
|
+
# Always finish the span and clean up
|
|
182
|
+
span.__exit__(type(exception), exception, None)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
class LiteLLMIntegration(Integration):
|
|
186
|
+
"""
|
|
187
|
+
LiteLLM integration for Sentry.
|
|
188
|
+
|
|
189
|
+
This integration automatically captures LiteLLM API calls and sends them to Sentry
|
|
190
|
+
for monitoring and error tracking. It supports all 100+ LLM providers that LiteLLM
|
|
191
|
+
supports, including OpenAI, Anthropic, Google, Cohere, and many others.
|
|
192
|
+
|
|
193
|
+
Features:
|
|
194
|
+
- Automatic exception capture for all LiteLLM calls
|
|
195
|
+
- Token usage tracking across all providers
|
|
196
|
+
- Provider detection and attribution
|
|
197
|
+
- Input/output message capture (configurable)
|
|
198
|
+
- Streaming response support
|
|
199
|
+
- Cost tracking integration
|
|
200
|
+
|
|
201
|
+
Usage:
|
|
202
|
+
|
|
203
|
+
```python
|
|
204
|
+
import litellm
|
|
205
|
+
import sentry_sdk
|
|
206
|
+
|
|
207
|
+
# Initialize Sentry with the LiteLLM integration
|
|
208
|
+
sentry_sdk.init(
|
|
209
|
+
dsn="your-dsn",
|
|
210
|
+
send_default_pii=True
|
|
211
|
+
integrations=[
|
|
212
|
+
sentry_sdk.integrations.LiteLLMIntegration(
|
|
213
|
+
include_prompts=True # Set to False to exclude message content
|
|
214
|
+
)
|
|
215
|
+
]
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
# All LiteLLM calls will now be monitored
|
|
219
|
+
response = litellm.completion(
|
|
220
|
+
model="gpt-3.5-turbo",
|
|
221
|
+
messages=[{"role": "user", "content": "Hello!"}]
|
|
222
|
+
)
|
|
223
|
+
```
|
|
224
|
+
|
|
225
|
+
Configuration:
|
|
226
|
+
- include_prompts (bool): Whether to include prompts and responses in spans.
|
|
227
|
+
Defaults to True. Set to False to exclude potentially sensitive data.
|
|
228
|
+
"""
|
|
229
|
+
|
|
230
|
+
identifier = "litellm"
|
|
231
|
+
origin = f"auto.ai.{identifier}"
|
|
232
|
+
|
|
233
|
+
def __init__(self, include_prompts=True):
|
|
234
|
+
# type: (LiteLLMIntegration, bool) -> None
|
|
235
|
+
self.include_prompts = include_prompts
|
|
236
|
+
|
|
237
|
+
@staticmethod
|
|
238
|
+
def setup_once():
|
|
239
|
+
# type: () -> None
|
|
240
|
+
"""Set up LiteLLM callbacks for monitoring."""
|
|
241
|
+
litellm.input_callback = litellm.input_callback or []
|
|
242
|
+
if _input_callback not in litellm.input_callback:
|
|
243
|
+
litellm.input_callback.append(_input_callback)
|
|
244
|
+
|
|
245
|
+
litellm.success_callback = litellm.success_callback or []
|
|
246
|
+
if _success_callback not in litellm.success_callback:
|
|
247
|
+
litellm.success_callback.append(_success_callback)
|
|
248
|
+
|
|
249
|
+
litellm.failure_callback = litellm.failure_callback or []
|
|
250
|
+
if _failure_callback not in litellm.failure_callback:
|
|
251
|
+
litellm.failure_callback.append(_failure_callback)
|
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
from collections.abc import Set
|
|
2
|
+
from copy import deepcopy
|
|
3
|
+
|
|
2
4
|
import sentry_sdk
|
|
3
5
|
from sentry_sdk.consts import OP
|
|
4
6
|
from sentry_sdk.integrations import (
|
|
@@ -222,9 +224,7 @@ def patch_http_route_handle():
|
|
|
222
224
|
return await old_handle(self, scope, receive, send)
|
|
223
225
|
|
|
224
226
|
sentry_scope = sentry_sdk.get_isolation_scope()
|
|
225
|
-
request = scope["app"].request_class(
|
|
226
|
-
scope=scope, receive=receive, send=send
|
|
227
|
-
) # type: Request[Any, Any]
|
|
227
|
+
request = scope["app"].request_class(scope=scope, receive=receive, send=send) # type: Request[Any, Any]
|
|
228
228
|
extracted_request_data = ConnectionDataExtractor(
|
|
229
229
|
parse_body=True, parse_query=True
|
|
230
230
|
)(request)
|
|
@@ -262,7 +262,7 @@ def patch_http_route_handle():
|
|
|
262
262
|
|
|
263
263
|
event.update(
|
|
264
264
|
{
|
|
265
|
-
"request": request_info,
|
|
265
|
+
"request": deepcopy(request_info),
|
|
266
266
|
"transaction": tx_name,
|
|
267
267
|
"transaction_info": tx_info,
|
|
268
268
|
}
|
|
@@ -7,6 +7,7 @@ from ..utils import (
|
|
|
7
7
|
_set_input_data,
|
|
8
8
|
_set_output_data,
|
|
9
9
|
_set_usage_data,
|
|
10
|
+
_create_mcp_execute_tool_spans,
|
|
10
11
|
)
|
|
11
12
|
|
|
12
13
|
from typing import TYPE_CHECKING
|
|
@@ -28,12 +29,14 @@ def ai_client_span(agent, get_response_kwargs):
|
|
|
28
29
|
# TODO-anton: remove hardcoded stuff and replace something that also works for embedding and so on
|
|
29
30
|
span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat")
|
|
30
31
|
|
|
32
|
+
_set_agent_data(span, agent)
|
|
33
|
+
|
|
31
34
|
return span
|
|
32
35
|
|
|
33
36
|
|
|
34
37
|
def update_ai_client_span(span, agent, get_response_kwargs, result):
|
|
35
38
|
# type: (sentry_sdk.tracing.Span, Agent, dict[str, Any], Any) -> None
|
|
36
|
-
_set_agent_data(span, agent)
|
|
37
39
|
_set_usage_data(span, result.usage)
|
|
38
40
|
_set_input_data(span, get_response_kwargs)
|
|
39
41
|
_set_output_data(span, result)
|
|
42
|
+
_create_mcp_execute_tool_spans(span, result)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import sentry_sdk
|
|
2
2
|
from sentry_sdk.ai.utils import set_data_normalized
|
|
3
|
-
from sentry_sdk.consts import SPANDATA
|
|
3
|
+
from sentry_sdk.consts import SPANDATA, SPANSTATUS, OP
|
|
4
4
|
from sentry_sdk.integrations import DidNotEnable
|
|
5
5
|
from sentry_sdk.scope import should_send_default_pii
|
|
6
6
|
from sentry_sdk.tracing_utils import set_span_errored
|
|
@@ -156,3 +156,27 @@ def _set_output_data(span, result):
|
|
|
156
156
|
set_data_normalized(
|
|
157
157
|
span, SPANDATA.GEN_AI_RESPONSE_TEXT, output_messages["response"]
|
|
158
158
|
)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def _create_mcp_execute_tool_spans(span, result):
|
|
162
|
+
# type: (sentry_sdk.tracing.Span, agents.Result) -> None
|
|
163
|
+
for output in result.output:
|
|
164
|
+
if output.__class__.__name__ == "McpCall":
|
|
165
|
+
with sentry_sdk.start_span(
|
|
166
|
+
op=OP.GEN_AI_EXECUTE_TOOL,
|
|
167
|
+
description=f"execute_tool {output.name}",
|
|
168
|
+
start_timestamp=span.start_timestamp,
|
|
169
|
+
) as execute_tool_span:
|
|
170
|
+
set_data_normalized(execute_tool_span, SPANDATA.GEN_AI_TOOL_TYPE, "mcp")
|
|
171
|
+
set_data_normalized(
|
|
172
|
+
execute_tool_span, SPANDATA.GEN_AI_TOOL_NAME, output.name
|
|
173
|
+
)
|
|
174
|
+
if should_send_default_pii():
|
|
175
|
+
execute_tool_span.set_data(
|
|
176
|
+
SPANDATA.GEN_AI_TOOL_INPUT, output.arguments
|
|
177
|
+
)
|
|
178
|
+
execute_tool_span.set_data(
|
|
179
|
+
SPANDATA.GEN_AI_TOOL_OUTPUT, output.output
|
|
180
|
+
)
|
|
181
|
+
if output.error:
|
|
182
|
+
execute_tool_span.set_status(SPANSTATUS.ERROR)
|
|
@@ -116,7 +116,9 @@ def pure_eval_frame(frame):
|
|
|
116
116
|
return (n.lineno, n.col_offset)
|
|
117
117
|
|
|
118
118
|
nodes_before_stmt = [
|
|
119
|
-
node
|
|
119
|
+
node
|
|
120
|
+
for node in nodes
|
|
121
|
+
if start(node) < stmt.last_token.end # type: ignore
|
|
120
122
|
]
|
|
121
123
|
if nodes_before_stmt:
|
|
122
124
|
# The position of the last node before or in the statement
|
|
@@ -64,9 +64,7 @@ def _before_cursor_execute(
|
|
|
64
64
|
@ensure_integration_enabled(SqlalchemyIntegration)
|
|
65
65
|
def _after_cursor_execute(conn, cursor, statement, parameters, context, *args):
|
|
66
66
|
# type: (Any, Any, Any, Any, Any, *Any) -> None
|
|
67
|
-
ctx_mgr = getattr(
|
|
68
|
-
context, "_sentry_sql_span_manager", None
|
|
69
|
-
) # type: Optional[ContextManager[Any]]
|
|
67
|
+
ctx_mgr = getattr(context, "_sentry_sql_span_manager", None) # type: Optional[ContextManager[Any]]
|
|
70
68
|
|
|
71
69
|
if ctx_mgr is not None:
|
|
72
70
|
context._sentry_sql_span_manager = None
|
|
@@ -92,9 +90,7 @@ def _handle_error(context, *args):
|
|
|
92
90
|
# _after_cursor_execute does not get called for crashing SQL stmts. Judging
|
|
93
91
|
# from SQLAlchemy codebase it does seem like any error coming into this
|
|
94
92
|
# handler is going to be fatal.
|
|
95
|
-
ctx_mgr = getattr(
|
|
96
|
-
execution_context, "_sentry_sql_span_manager", None
|
|
97
|
-
) # type: Optional[ContextManager[Any]]
|
|
93
|
+
ctx_mgr = getattr(execution_context, "_sentry_sql_span_manager", None) # type: Optional[ContextManager[Any]]
|
|
98
94
|
|
|
99
95
|
if ctx_mgr is not None:
|
|
100
96
|
execution_context._sentry_sql_span_manager = None
|
|
@@ -103,9 +103,7 @@ class StarletteIntegration(Integration):
|
|
|
103
103
|
self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture))
|
|
104
104
|
|
|
105
105
|
if isinstance(failed_request_status_codes, Set):
|
|
106
|
-
self.failed_request_status_codes =
|
|
107
|
-
failed_request_status_codes
|
|
108
|
-
) # type: Container[int]
|
|
106
|
+
self.failed_request_status_codes = failed_request_status_codes # type: Container[int]
|
|
109
107
|
else:
|
|
110
108
|
warnings.warn(
|
|
111
109
|
"Passing a list or None for failed_request_status_codes is deprecated. "
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from copy import deepcopy
|
|
2
|
+
|
|
1
3
|
import sentry_sdk
|
|
2
4
|
from sentry_sdk.consts import OP
|
|
3
5
|
from sentry_sdk.integrations import DidNotEnable, Integration
|
|
@@ -200,9 +202,7 @@ def patch_http_route_handle():
|
|
|
200
202
|
return await old_handle(self, scope, receive, send)
|
|
201
203
|
|
|
202
204
|
sentry_scope = sentry_sdk.get_isolation_scope()
|
|
203
|
-
request = scope["app"].request_class(
|
|
204
|
-
scope=scope, receive=receive, send=send
|
|
205
|
-
) # type: Request[Any, Any]
|
|
205
|
+
request = scope["app"].request_class(scope=scope, receive=receive, send=send) # type: Request[Any, Any]
|
|
206
206
|
extracted_request_data = ConnectionDataExtractor(
|
|
207
207
|
parse_body=True, parse_query=True
|
|
208
208
|
)(request)
|
|
@@ -239,7 +239,7 @@ def patch_http_route_handle():
|
|
|
239
239
|
|
|
240
240
|
event.update(
|
|
241
241
|
{
|
|
242
|
-
"request": request_info,
|
|
242
|
+
"request": deepcopy(request_info),
|
|
243
243
|
"transaction": tx_name,
|
|
244
244
|
"transaction_info": tx_info,
|
|
245
245
|
}
|
sentry_sdk/integrations/wsgi.py
CHANGED
|
@@ -119,14 +119,15 @@ class SentryWsgiMiddleware:
|
|
|
119
119
|
origin=self.span_origin,
|
|
120
120
|
)
|
|
121
121
|
|
|
122
|
-
|
|
122
|
+
transaction_context = (
|
|
123
123
|
sentry_sdk.start_transaction(
|
|
124
124
|
transaction,
|
|
125
125
|
custom_sampling_context={"wsgi_environ": environ},
|
|
126
126
|
)
|
|
127
127
|
if transaction is not None
|
|
128
128
|
else nullcontext()
|
|
129
|
-
)
|
|
129
|
+
)
|
|
130
|
+
with transaction_context:
|
|
130
131
|
try:
|
|
131
132
|
response = self.app(
|
|
132
133
|
environ,
|
sentry_sdk/metrics.py
CHANGED
|
@@ -159,7 +159,8 @@ class CounterMetric(Metric):
|
|
|
159
159
|
__slots__ = ("value",)
|
|
160
160
|
|
|
161
161
|
def __init__(
|
|
162
|
-
self,
|
|
162
|
+
self,
|
|
163
|
+
first, # type: MetricValue
|
|
163
164
|
):
|
|
164
165
|
# type: (...) -> None
|
|
165
166
|
self.value = float(first)
|
|
@@ -170,7 +171,8 @@ class CounterMetric(Metric):
|
|
|
170
171
|
return 1
|
|
171
172
|
|
|
172
173
|
def add(
|
|
173
|
-
self,
|
|
174
|
+
self,
|
|
175
|
+
value, # type: MetricValue
|
|
174
176
|
):
|
|
175
177
|
# type: (...) -> None
|
|
176
178
|
self.value += float(value)
|
|
@@ -190,7 +192,8 @@ class GaugeMetric(Metric):
|
|
|
190
192
|
)
|
|
191
193
|
|
|
192
194
|
def __init__(
|
|
193
|
-
self,
|
|
195
|
+
self,
|
|
196
|
+
first, # type: MetricValue
|
|
194
197
|
):
|
|
195
198
|
# type: (...) -> None
|
|
196
199
|
first = float(first)
|
|
@@ -207,7 +210,8 @@ class GaugeMetric(Metric):
|
|
|
207
210
|
return 5
|
|
208
211
|
|
|
209
212
|
def add(
|
|
210
|
-
self,
|
|
213
|
+
self,
|
|
214
|
+
value, # type: MetricValue
|
|
211
215
|
):
|
|
212
216
|
# type: (...) -> None
|
|
213
217
|
value = float(value)
|
|
@@ -232,7 +236,8 @@ class DistributionMetric(Metric):
|
|
|
232
236
|
__slots__ = ("value",)
|
|
233
237
|
|
|
234
238
|
def __init__(
|
|
235
|
-
self,
|
|
239
|
+
self,
|
|
240
|
+
first, # type: MetricValue
|
|
236
241
|
):
|
|
237
242
|
# type(...) -> None
|
|
238
243
|
self.value = [float(first)]
|
|
@@ -243,7 +248,8 @@ class DistributionMetric(Metric):
|
|
|
243
248
|
return len(self.value)
|
|
244
249
|
|
|
245
250
|
def add(
|
|
246
|
-
self,
|
|
251
|
+
self,
|
|
252
|
+
value, # type: MetricValue
|
|
247
253
|
):
|
|
248
254
|
# type: (...) -> None
|
|
249
255
|
self.value.append(float(value))
|
|
@@ -257,7 +263,8 @@ class SetMetric(Metric):
|
|
|
257
263
|
__slots__ = ("value",)
|
|
258
264
|
|
|
259
265
|
def __init__(
|
|
260
|
-
self,
|
|
266
|
+
self,
|
|
267
|
+
first, # type: MetricValue
|
|
261
268
|
):
|
|
262
269
|
# type: (...) -> None
|
|
263
270
|
self.value = {first}
|
|
@@ -268,7 +275,8 @@ class SetMetric(Metric):
|
|
|
268
275
|
return len(self.value)
|
|
269
276
|
|
|
270
277
|
def add(
|
|
271
|
-
self,
|
|
278
|
+
self,
|
|
279
|
+
value, # type: MetricValue
|
|
272
280
|
):
|
|
273
281
|
# type: (...) -> None
|
|
274
282
|
self.value.add(value)
|
|
@@ -373,9 +381,7 @@ class LocalAggregator:
|
|
|
373
381
|
|
|
374
382
|
def __init__(self):
|
|
375
383
|
# type: (...) -> None
|
|
376
|
-
self._measurements =
|
|
377
|
-
{}
|
|
378
|
-
) # type: Dict[Tuple[str, MetricTagsInternal], Tuple[float, float, int, float]]
|
|
384
|
+
self._measurements = {} # type: Dict[Tuple[str, MetricTagsInternal], Tuple[float, float, int, float]]
|
|
379
385
|
|
|
380
386
|
def add(
|
|
381
387
|
self,
|
sentry_sdk/profiler/utils.py
CHANGED
|
@@ -85,9 +85,7 @@ else:
|
|
|
85
85
|
if (
|
|
86
86
|
# the co_varnames start with the frame's positional arguments
|
|
87
87
|
# and we expect the first to be `self` if its an instance method
|
|
88
|
-
co_varnames
|
|
89
|
-
and co_varnames[0] == "self"
|
|
90
|
-
and "self" in frame.f_locals
|
|
88
|
+
co_varnames and co_varnames[0] == "self" and "self" in frame.f_locals
|
|
91
89
|
):
|
|
92
90
|
for cls in type(frame.f_locals["self"]).__mro__:
|
|
93
91
|
if name in cls.__dict__:
|
|
@@ -101,9 +99,7 @@ else:
|
|
|
101
99
|
if (
|
|
102
100
|
# the co_varnames start with the frame's positional arguments
|
|
103
101
|
# and we expect the first to be `cls` if its a class method
|
|
104
|
-
co_varnames
|
|
105
|
-
and co_varnames[0] == "cls"
|
|
106
|
-
and "cls" in frame.f_locals
|
|
102
|
+
co_varnames and co_varnames[0] == "cls" and "cls" in frame.f_locals
|
|
107
103
|
):
|
|
108
104
|
for cls in frame.f_locals["cls"].__mro__:
|
|
109
105
|
if name in cls.__dict__:
|
sentry_sdk/scope.py
CHANGED
|
@@ -894,7 +894,8 @@ class Scope:
|
|
|
894
894
|
self._contexts[key] = value
|
|
895
895
|
|
|
896
896
|
def remove_context(
|
|
897
|
-
self,
|
|
897
|
+
self,
|
|
898
|
+
key, # type: str
|
|
898
899
|
):
|
|
899
900
|
# type: (...) -> None
|
|
900
901
|
"""Removes a context."""
|
|
@@ -910,7 +911,8 @@ class Scope:
|
|
|
910
911
|
self._extras[key] = value
|
|
911
912
|
|
|
912
913
|
def remove_extra(
|
|
913
|
-
self,
|
|
914
|
+
self,
|
|
915
|
+
key, # type: str
|
|
914
916
|
):
|
|
915
917
|
# type: (...) -> None
|
|
916
918
|
"""Removes a specific extra key."""
|
|
@@ -1321,7 +1323,8 @@ class Scope:
|
|
|
1321
1323
|
self._force_auto_session_tracking = None
|
|
1322
1324
|
|
|
1323
1325
|
def add_event_processor(
|
|
1324
|
-
self,
|
|
1326
|
+
self,
|
|
1327
|
+
func, # type: EventProcessor
|
|
1325
1328
|
):
|
|
1326
1329
|
# type: (...) -> None
|
|
1327
1330
|
"""Register a scope local event processor on the scope.
|
sentry_sdk/serializer.py
CHANGED
|
@@ -128,9 +128,7 @@ def serialize(event, **kwargs):
|
|
|
128
128
|
path = [] # type: List[Segment]
|
|
129
129
|
meta_stack = [] # type: List[Dict[str, Any]]
|
|
130
130
|
|
|
131
|
-
keep_request_bodies = (
|
|
132
|
-
kwargs.pop("max_request_body_size", None) == "always"
|
|
133
|
-
) # type: bool
|
|
131
|
+
keep_request_bodies = kwargs.pop("max_request_body_size", None) == "always" # type: bool
|
|
134
132
|
max_value_length = kwargs.pop("max_value_length", None) # type: Optional[int]
|
|
135
133
|
is_vars = kwargs.pop("is_vars", False)
|
|
136
134
|
custom_repr = kwargs.pop("custom_repr", None) # type: Callable[..., Optional[str]]
|
sentry_sdk/session.py
CHANGED
|
@@ -130,7 +130,8 @@ class Session:
|
|
|
130
130
|
self.status = status
|
|
131
131
|
|
|
132
132
|
def close(
|
|
133
|
-
self,
|
|
133
|
+
self,
|
|
134
|
+
status=None, # type: Optional[SessionStatus]
|
|
134
135
|
):
|
|
135
136
|
# type: (...) -> Any
|
|
136
137
|
if status is None and self.status == "ok":
|
|
@@ -139,7 +140,8 @@ class Session:
|
|
|
139
140
|
self.update(status=status)
|
|
140
141
|
|
|
141
142
|
def get_json_attrs(
|
|
142
|
-
self,
|
|
143
|
+
self,
|
|
144
|
+
with_user_info=True, # type: Optional[bool]
|
|
143
145
|
):
|
|
144
146
|
# type: (...) -> Any
|
|
145
147
|
attrs = {}
|
sentry_sdk/sessions.py
CHANGED
|
@@ -228,7 +228,8 @@ class SessionFlusher:
|
|
|
228
228
|
return None
|
|
229
229
|
|
|
230
230
|
def add_aggregate_session(
|
|
231
|
-
self,
|
|
231
|
+
self,
|
|
232
|
+
session, # type: Session
|
|
232
233
|
):
|
|
233
234
|
# type: (...) -> None
|
|
234
235
|
# NOTE on `session.did`:
|
|
@@ -259,7 +260,8 @@ class SessionFlusher:
|
|
|
259
260
|
state["exited"] = state.get("exited", 0) + 1
|
|
260
261
|
|
|
261
262
|
def add_session(
|
|
262
|
-
self,
|
|
263
|
+
self,
|
|
264
|
+
session, # type: Session
|
|
263
265
|
):
|
|
264
266
|
# type: (...) -> None
|
|
265
267
|
if session.session_mode == "request":
|