sentry-sdk 0.7.5__py2.py3-none-any.whl → 2.46.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sentry_sdk/__init__.py +48 -30
- sentry_sdk/_compat.py +74 -61
- sentry_sdk/_init_implementation.py +84 -0
- sentry_sdk/_log_batcher.py +172 -0
- sentry_sdk/_lru_cache.py +47 -0
- sentry_sdk/_metrics_batcher.py +167 -0
- sentry_sdk/_queue.py +289 -0
- sentry_sdk/_types.py +338 -0
- sentry_sdk/_werkzeug.py +98 -0
- sentry_sdk/ai/__init__.py +7 -0
- sentry_sdk/ai/monitoring.py +137 -0
- sentry_sdk/ai/utils.py +144 -0
- sentry_sdk/api.py +496 -80
- sentry_sdk/attachments.py +75 -0
- sentry_sdk/client.py +1023 -103
- sentry_sdk/consts.py +1438 -66
- sentry_sdk/crons/__init__.py +10 -0
- sentry_sdk/crons/api.py +62 -0
- sentry_sdk/crons/consts.py +4 -0
- sentry_sdk/crons/decorator.py +135 -0
- sentry_sdk/debug.py +15 -14
- sentry_sdk/envelope.py +369 -0
- sentry_sdk/feature_flags.py +71 -0
- sentry_sdk/hub.py +611 -280
- sentry_sdk/integrations/__init__.py +276 -49
- sentry_sdk/integrations/_asgi_common.py +108 -0
- sentry_sdk/integrations/_wsgi_common.py +180 -44
- sentry_sdk/integrations/aiohttp.py +291 -42
- sentry_sdk/integrations/anthropic.py +439 -0
- sentry_sdk/integrations/argv.py +9 -8
- sentry_sdk/integrations/ariadne.py +161 -0
- sentry_sdk/integrations/arq.py +247 -0
- sentry_sdk/integrations/asgi.py +341 -0
- sentry_sdk/integrations/asyncio.py +144 -0
- sentry_sdk/integrations/asyncpg.py +208 -0
- sentry_sdk/integrations/atexit.py +17 -10
- sentry_sdk/integrations/aws_lambda.py +377 -62
- sentry_sdk/integrations/beam.py +176 -0
- sentry_sdk/integrations/boto3.py +137 -0
- sentry_sdk/integrations/bottle.py +221 -0
- sentry_sdk/integrations/celery/__init__.py +529 -0
- sentry_sdk/integrations/celery/beat.py +293 -0
- sentry_sdk/integrations/celery/utils.py +43 -0
- sentry_sdk/integrations/chalice.py +134 -0
- sentry_sdk/integrations/clickhouse_driver.py +177 -0
- sentry_sdk/integrations/cloud_resource_context.py +280 -0
- sentry_sdk/integrations/cohere.py +274 -0
- sentry_sdk/integrations/dedupe.py +48 -14
- sentry_sdk/integrations/django/__init__.py +584 -191
- sentry_sdk/integrations/django/asgi.py +245 -0
- sentry_sdk/integrations/django/caching.py +204 -0
- sentry_sdk/integrations/django/middleware.py +187 -0
- sentry_sdk/integrations/django/signals_handlers.py +91 -0
- sentry_sdk/integrations/django/templates.py +79 -5
- sentry_sdk/integrations/django/transactions.py +49 -22
- sentry_sdk/integrations/django/views.py +96 -0
- sentry_sdk/integrations/dramatiq.py +226 -0
- sentry_sdk/integrations/excepthook.py +50 -13
- sentry_sdk/integrations/executing.py +67 -0
- sentry_sdk/integrations/falcon.py +272 -0
- sentry_sdk/integrations/fastapi.py +141 -0
- sentry_sdk/integrations/flask.py +142 -88
- sentry_sdk/integrations/gcp.py +239 -0
- sentry_sdk/integrations/gnu_backtrace.py +99 -0
- sentry_sdk/integrations/google_genai/__init__.py +301 -0
- sentry_sdk/integrations/google_genai/consts.py +16 -0
- sentry_sdk/integrations/google_genai/streaming.py +155 -0
- sentry_sdk/integrations/google_genai/utils.py +576 -0
- sentry_sdk/integrations/gql.py +162 -0
- sentry_sdk/integrations/graphene.py +151 -0
- sentry_sdk/integrations/grpc/__init__.py +168 -0
- sentry_sdk/integrations/grpc/aio/__init__.py +7 -0
- sentry_sdk/integrations/grpc/aio/client.py +95 -0
- sentry_sdk/integrations/grpc/aio/server.py +100 -0
- sentry_sdk/integrations/grpc/client.py +91 -0
- sentry_sdk/integrations/grpc/consts.py +1 -0
- sentry_sdk/integrations/grpc/server.py +66 -0
- sentry_sdk/integrations/httpx.py +178 -0
- sentry_sdk/integrations/huey.py +174 -0
- sentry_sdk/integrations/huggingface_hub.py +378 -0
- sentry_sdk/integrations/langchain.py +1132 -0
- sentry_sdk/integrations/langgraph.py +337 -0
- sentry_sdk/integrations/launchdarkly.py +61 -0
- sentry_sdk/integrations/litellm.py +287 -0
- sentry_sdk/integrations/litestar.py +315 -0
- sentry_sdk/integrations/logging.py +307 -96
- sentry_sdk/integrations/loguru.py +213 -0
- sentry_sdk/integrations/mcp.py +566 -0
- sentry_sdk/integrations/modules.py +14 -31
- sentry_sdk/integrations/openai.py +725 -0
- sentry_sdk/integrations/openai_agents/__init__.py +61 -0
- sentry_sdk/integrations/openai_agents/consts.py +1 -0
- sentry_sdk/integrations/openai_agents/patches/__init__.py +5 -0
- sentry_sdk/integrations/openai_agents/patches/agent_run.py +140 -0
- sentry_sdk/integrations/openai_agents/patches/error_tracing.py +77 -0
- sentry_sdk/integrations/openai_agents/patches/models.py +50 -0
- sentry_sdk/integrations/openai_agents/patches/runner.py +45 -0
- sentry_sdk/integrations/openai_agents/patches/tools.py +77 -0
- sentry_sdk/integrations/openai_agents/spans/__init__.py +5 -0
- sentry_sdk/integrations/openai_agents/spans/agent_workflow.py +21 -0
- sentry_sdk/integrations/openai_agents/spans/ai_client.py +42 -0
- sentry_sdk/integrations/openai_agents/spans/execute_tool.py +48 -0
- sentry_sdk/integrations/openai_agents/spans/handoff.py +19 -0
- sentry_sdk/integrations/openai_agents/spans/invoke_agent.py +86 -0
- sentry_sdk/integrations/openai_agents/utils.py +199 -0
- sentry_sdk/integrations/openfeature.py +35 -0
- sentry_sdk/integrations/opentelemetry/__init__.py +7 -0
- sentry_sdk/integrations/opentelemetry/consts.py +5 -0
- sentry_sdk/integrations/opentelemetry/integration.py +58 -0
- sentry_sdk/integrations/opentelemetry/propagator.py +117 -0
- sentry_sdk/integrations/opentelemetry/span_processor.py +391 -0
- sentry_sdk/integrations/otlp.py +82 -0
- sentry_sdk/integrations/pure_eval.py +141 -0
- sentry_sdk/integrations/pydantic_ai/__init__.py +47 -0
- sentry_sdk/integrations/pydantic_ai/consts.py +1 -0
- sentry_sdk/integrations/pydantic_ai/patches/__init__.py +4 -0
- sentry_sdk/integrations/pydantic_ai/patches/agent_run.py +215 -0
- sentry_sdk/integrations/pydantic_ai/patches/graph_nodes.py +110 -0
- sentry_sdk/integrations/pydantic_ai/patches/model_request.py +40 -0
- sentry_sdk/integrations/pydantic_ai/patches/tools.py +98 -0
- sentry_sdk/integrations/pydantic_ai/spans/__init__.py +3 -0
- sentry_sdk/integrations/pydantic_ai/spans/ai_client.py +246 -0
- sentry_sdk/integrations/pydantic_ai/spans/execute_tool.py +49 -0
- sentry_sdk/integrations/pydantic_ai/spans/invoke_agent.py +112 -0
- sentry_sdk/integrations/pydantic_ai/utils.py +223 -0
- sentry_sdk/integrations/pymongo.py +214 -0
- sentry_sdk/integrations/pyramid.py +112 -68
- sentry_sdk/integrations/quart.py +237 -0
- sentry_sdk/integrations/ray.py +165 -0
- sentry_sdk/integrations/redis/__init__.py +48 -0
- sentry_sdk/integrations/redis/_async_common.py +116 -0
- sentry_sdk/integrations/redis/_sync_common.py +119 -0
- sentry_sdk/integrations/redis/consts.py +19 -0
- sentry_sdk/integrations/redis/modules/__init__.py +0 -0
- sentry_sdk/integrations/redis/modules/caches.py +118 -0
- sentry_sdk/integrations/redis/modules/queries.py +65 -0
- sentry_sdk/integrations/redis/rb.py +32 -0
- sentry_sdk/integrations/redis/redis.py +69 -0
- sentry_sdk/integrations/redis/redis_cluster.py +107 -0
- sentry_sdk/integrations/redis/redis_py_cluster_legacy.py +50 -0
- sentry_sdk/integrations/redis/utils.py +148 -0
- sentry_sdk/integrations/rq.py +95 -37
- sentry_sdk/integrations/rust_tracing.py +284 -0
- sentry_sdk/integrations/sanic.py +294 -123
- sentry_sdk/integrations/serverless.py +48 -19
- sentry_sdk/integrations/socket.py +96 -0
- sentry_sdk/integrations/spark/__init__.py +4 -0
- sentry_sdk/integrations/spark/spark_driver.py +316 -0
- sentry_sdk/integrations/spark/spark_worker.py +116 -0
- sentry_sdk/integrations/sqlalchemy.py +142 -0
- sentry_sdk/integrations/starlette.py +737 -0
- sentry_sdk/integrations/starlite.py +292 -0
- sentry_sdk/integrations/statsig.py +37 -0
- sentry_sdk/integrations/stdlib.py +235 -29
- sentry_sdk/integrations/strawberry.py +394 -0
- sentry_sdk/integrations/sys_exit.py +70 -0
- sentry_sdk/integrations/threading.py +158 -28
- sentry_sdk/integrations/tornado.py +84 -52
- sentry_sdk/integrations/trytond.py +50 -0
- sentry_sdk/integrations/typer.py +60 -0
- sentry_sdk/integrations/unleash.py +33 -0
- sentry_sdk/integrations/unraisablehook.py +53 -0
- sentry_sdk/integrations/wsgi.py +201 -119
- sentry_sdk/logger.py +96 -0
- sentry_sdk/metrics.py +81 -0
- sentry_sdk/monitor.py +120 -0
- sentry_sdk/profiler/__init__.py +49 -0
- sentry_sdk/profiler/continuous_profiler.py +730 -0
- sentry_sdk/profiler/transaction_profiler.py +839 -0
- sentry_sdk/profiler/utils.py +195 -0
- sentry_sdk/py.typed +0 -0
- sentry_sdk/scope.py +1713 -85
- sentry_sdk/scrubber.py +177 -0
- sentry_sdk/serializer.py +405 -0
- sentry_sdk/session.py +177 -0
- sentry_sdk/sessions.py +275 -0
- sentry_sdk/spotlight.py +242 -0
- sentry_sdk/tracing.py +1486 -0
- sentry_sdk/tracing_utils.py +1236 -0
- sentry_sdk/transport.py +806 -134
- sentry_sdk/types.py +52 -0
- sentry_sdk/utils.py +1625 -465
- sentry_sdk/worker.py +54 -25
- sentry_sdk-2.46.0.dist-info/METADATA +268 -0
- sentry_sdk-2.46.0.dist-info/RECORD +189 -0
- {sentry_sdk-0.7.5.dist-info → sentry_sdk-2.46.0.dist-info}/WHEEL +1 -1
- sentry_sdk-2.46.0.dist-info/entry_points.txt +2 -0
- sentry_sdk-2.46.0.dist-info/licenses/LICENSE +21 -0
- sentry_sdk/integrations/celery.py +0 -119
- sentry_sdk-0.7.5.dist-info/LICENSE +0 -9
- sentry_sdk-0.7.5.dist-info/METADATA +0 -36
- sentry_sdk-0.7.5.dist-info/RECORD +0 -39
- {sentry_sdk-0.7.5.dist-info → sentry_sdk-2.46.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
import sentry_sdk
|
|
2
|
+
from sentry_sdk.integrations import Integration
|
|
3
|
+
from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled
|
|
4
|
+
|
|
5
|
+
from typing import TYPE_CHECKING
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from typing import Any
|
|
9
|
+
from typing import Optional
|
|
10
|
+
|
|
11
|
+
from sentry_sdk._types import Event, Hint
|
|
12
|
+
from pyspark import SparkContext
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class SparkIntegration(Integration):
|
|
16
|
+
identifier = "spark"
|
|
17
|
+
|
|
18
|
+
@staticmethod
|
|
19
|
+
def setup_once():
|
|
20
|
+
# type: () -> None
|
|
21
|
+
_setup_sentry_tracing()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _set_app_properties():
|
|
25
|
+
# type: () -> None
|
|
26
|
+
"""
|
|
27
|
+
Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties.
|
|
28
|
+
This allows worker integration to have access to app_name and application_id.
|
|
29
|
+
"""
|
|
30
|
+
from pyspark import SparkContext
|
|
31
|
+
|
|
32
|
+
spark_context = SparkContext._active_spark_context
|
|
33
|
+
if spark_context:
|
|
34
|
+
spark_context.setLocalProperty(
|
|
35
|
+
"sentry_app_name",
|
|
36
|
+
spark_context.appName,
|
|
37
|
+
)
|
|
38
|
+
spark_context.setLocalProperty(
|
|
39
|
+
"sentry_application_id",
|
|
40
|
+
spark_context.applicationId,
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _start_sentry_listener(sc):
|
|
45
|
+
# type: (SparkContext) -> None
|
|
46
|
+
"""
|
|
47
|
+
Start java gateway server to add custom `SparkListener`
|
|
48
|
+
"""
|
|
49
|
+
from pyspark.java_gateway import ensure_callback_server_started
|
|
50
|
+
|
|
51
|
+
gw = sc._gateway
|
|
52
|
+
ensure_callback_server_started(gw)
|
|
53
|
+
listener = SentryListener()
|
|
54
|
+
sc._jsc.sc().addSparkListener(listener)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _add_event_processor(sc):
|
|
58
|
+
# type: (SparkContext) -> None
|
|
59
|
+
scope = sentry_sdk.get_isolation_scope()
|
|
60
|
+
|
|
61
|
+
@scope.add_event_processor
|
|
62
|
+
def process_event(event, hint):
|
|
63
|
+
# type: (Event, Hint) -> Optional[Event]
|
|
64
|
+
with capture_internal_exceptions():
|
|
65
|
+
if sentry_sdk.get_client().get_integration(SparkIntegration) is None:
|
|
66
|
+
return event
|
|
67
|
+
|
|
68
|
+
if sc._active_spark_context is None:
|
|
69
|
+
return event
|
|
70
|
+
|
|
71
|
+
event.setdefault("user", {}).setdefault("id", sc.sparkUser())
|
|
72
|
+
|
|
73
|
+
event.setdefault("tags", {}).setdefault(
|
|
74
|
+
"executor.id", sc._conf.get("spark.executor.id")
|
|
75
|
+
)
|
|
76
|
+
event["tags"].setdefault(
|
|
77
|
+
"spark-submit.deployMode",
|
|
78
|
+
sc._conf.get("spark.submit.deployMode"),
|
|
79
|
+
)
|
|
80
|
+
event["tags"].setdefault("driver.host", sc._conf.get("spark.driver.host"))
|
|
81
|
+
event["tags"].setdefault("driver.port", sc._conf.get("spark.driver.port"))
|
|
82
|
+
event["tags"].setdefault("spark_version", sc.version)
|
|
83
|
+
event["tags"].setdefault("app_name", sc.appName)
|
|
84
|
+
event["tags"].setdefault("application_id", sc.applicationId)
|
|
85
|
+
event["tags"].setdefault("master", sc.master)
|
|
86
|
+
event["tags"].setdefault("spark_home", sc.sparkHome)
|
|
87
|
+
|
|
88
|
+
event.setdefault("extra", {}).setdefault("web_url", sc.uiWebUrl)
|
|
89
|
+
|
|
90
|
+
return event
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _activate_integration(sc):
|
|
94
|
+
# type: (SparkContext) -> None
|
|
95
|
+
|
|
96
|
+
_start_sentry_listener(sc)
|
|
97
|
+
_set_app_properties()
|
|
98
|
+
_add_event_processor(sc)
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _patch_spark_context_init():
|
|
102
|
+
# type: () -> None
|
|
103
|
+
from pyspark import SparkContext
|
|
104
|
+
|
|
105
|
+
spark_context_init = SparkContext._do_init
|
|
106
|
+
|
|
107
|
+
@ensure_integration_enabled(SparkIntegration, spark_context_init)
|
|
108
|
+
def _sentry_patched_spark_context_init(self, *args, **kwargs):
|
|
109
|
+
# type: (SparkContext, *Any, **Any) -> Optional[Any]
|
|
110
|
+
rv = spark_context_init(self, *args, **kwargs)
|
|
111
|
+
_activate_integration(self)
|
|
112
|
+
return rv
|
|
113
|
+
|
|
114
|
+
SparkContext._do_init = _sentry_patched_spark_context_init
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def _setup_sentry_tracing():
|
|
118
|
+
# type: () -> None
|
|
119
|
+
from pyspark import SparkContext
|
|
120
|
+
|
|
121
|
+
if SparkContext._active_spark_context is not None:
|
|
122
|
+
_activate_integration(SparkContext._active_spark_context)
|
|
123
|
+
return
|
|
124
|
+
_patch_spark_context_init()
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class SparkListener:
|
|
128
|
+
def onApplicationEnd(self, applicationEnd): # noqa: N802,N803
|
|
129
|
+
# type: (Any) -> None
|
|
130
|
+
pass
|
|
131
|
+
|
|
132
|
+
def onApplicationStart(self, applicationStart): # noqa: N802,N803
|
|
133
|
+
# type: (Any) -> None
|
|
134
|
+
pass
|
|
135
|
+
|
|
136
|
+
def onBlockManagerAdded(self, blockManagerAdded): # noqa: N802,N803
|
|
137
|
+
# type: (Any) -> None
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
def onBlockManagerRemoved(self, blockManagerRemoved): # noqa: N802,N803
|
|
141
|
+
# type: (Any) -> None
|
|
142
|
+
pass
|
|
143
|
+
|
|
144
|
+
def onBlockUpdated(self, blockUpdated): # noqa: N802,N803
|
|
145
|
+
# type: (Any) -> None
|
|
146
|
+
pass
|
|
147
|
+
|
|
148
|
+
def onEnvironmentUpdate(self, environmentUpdate): # noqa: N802,N803
|
|
149
|
+
# type: (Any) -> None
|
|
150
|
+
pass
|
|
151
|
+
|
|
152
|
+
def onExecutorAdded(self, executorAdded): # noqa: N802,N803
|
|
153
|
+
# type: (Any) -> None
|
|
154
|
+
pass
|
|
155
|
+
|
|
156
|
+
def onExecutorBlacklisted(self, executorBlacklisted): # noqa: N802,N803
|
|
157
|
+
# type: (Any) -> None
|
|
158
|
+
pass
|
|
159
|
+
|
|
160
|
+
def onExecutorBlacklistedForStage( # noqa: N802
|
|
161
|
+
self,
|
|
162
|
+
executorBlacklistedForStage, # noqa: N803
|
|
163
|
+
):
|
|
164
|
+
# type: (Any) -> None
|
|
165
|
+
pass
|
|
166
|
+
|
|
167
|
+
def onExecutorMetricsUpdate(self, executorMetricsUpdate): # noqa: N802,N803
|
|
168
|
+
# type: (Any) -> None
|
|
169
|
+
pass
|
|
170
|
+
|
|
171
|
+
def onExecutorRemoved(self, executorRemoved): # noqa: N802,N803
|
|
172
|
+
# type: (Any) -> None
|
|
173
|
+
pass
|
|
174
|
+
|
|
175
|
+
def onJobEnd(self, jobEnd): # noqa: N802,N803
|
|
176
|
+
# type: (Any) -> None
|
|
177
|
+
pass
|
|
178
|
+
|
|
179
|
+
def onJobStart(self, jobStart): # noqa: N802,N803
|
|
180
|
+
# type: (Any) -> None
|
|
181
|
+
pass
|
|
182
|
+
|
|
183
|
+
def onNodeBlacklisted(self, nodeBlacklisted): # noqa: N802,N803
|
|
184
|
+
# type: (Any) -> None
|
|
185
|
+
pass
|
|
186
|
+
|
|
187
|
+
def onNodeBlacklistedForStage(self, nodeBlacklistedForStage): # noqa: N802,N803
|
|
188
|
+
# type: (Any) -> None
|
|
189
|
+
pass
|
|
190
|
+
|
|
191
|
+
def onNodeUnblacklisted(self, nodeUnblacklisted): # noqa: N802,N803
|
|
192
|
+
# type: (Any) -> None
|
|
193
|
+
pass
|
|
194
|
+
|
|
195
|
+
def onOtherEvent(self, event): # noqa: N802,N803
|
|
196
|
+
# type: (Any) -> None
|
|
197
|
+
pass
|
|
198
|
+
|
|
199
|
+
def onSpeculativeTaskSubmitted(self, speculativeTask): # noqa: N802,N803
|
|
200
|
+
# type: (Any) -> None
|
|
201
|
+
pass
|
|
202
|
+
|
|
203
|
+
def onStageCompleted(self, stageCompleted): # noqa: N802,N803
|
|
204
|
+
# type: (Any) -> None
|
|
205
|
+
pass
|
|
206
|
+
|
|
207
|
+
def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803
|
|
208
|
+
# type: (Any) -> None
|
|
209
|
+
pass
|
|
210
|
+
|
|
211
|
+
def onTaskEnd(self, taskEnd): # noqa: N802,N803
|
|
212
|
+
# type: (Any) -> None
|
|
213
|
+
pass
|
|
214
|
+
|
|
215
|
+
def onTaskGettingResult(self, taskGettingResult): # noqa: N802,N803
|
|
216
|
+
# type: (Any) -> None
|
|
217
|
+
pass
|
|
218
|
+
|
|
219
|
+
def onTaskStart(self, taskStart): # noqa: N802,N803
|
|
220
|
+
# type: (Any) -> None
|
|
221
|
+
pass
|
|
222
|
+
|
|
223
|
+
def onUnpersistRDD(self, unpersistRDD): # noqa: N802,N803
|
|
224
|
+
# type: (Any) -> None
|
|
225
|
+
pass
|
|
226
|
+
|
|
227
|
+
class Java:
|
|
228
|
+
implements = ["org.apache.spark.scheduler.SparkListenerInterface"]
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
class SentryListener(SparkListener):
|
|
232
|
+
def _add_breadcrumb(
|
|
233
|
+
self,
|
|
234
|
+
level, # type: str
|
|
235
|
+
message, # type: str
|
|
236
|
+
data=None, # type: Optional[dict[str, Any]]
|
|
237
|
+
):
|
|
238
|
+
# type: (...) -> None
|
|
239
|
+
sentry_sdk.get_isolation_scope().add_breadcrumb(
|
|
240
|
+
level=level, message=message, data=data
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
def onJobStart(self, jobStart): # noqa: N802,N803
|
|
244
|
+
# type: (Any) -> None
|
|
245
|
+
sentry_sdk.get_isolation_scope().clear_breadcrumbs()
|
|
246
|
+
|
|
247
|
+
message = "Job {} Started".format(jobStart.jobId())
|
|
248
|
+
self._add_breadcrumb(level="info", message=message)
|
|
249
|
+
_set_app_properties()
|
|
250
|
+
|
|
251
|
+
def onJobEnd(self, jobEnd): # noqa: N802,N803
|
|
252
|
+
# type: (Any) -> None
|
|
253
|
+
level = ""
|
|
254
|
+
message = ""
|
|
255
|
+
data = {"result": jobEnd.jobResult().toString()}
|
|
256
|
+
|
|
257
|
+
if jobEnd.jobResult().toString() == "JobSucceeded":
|
|
258
|
+
level = "info"
|
|
259
|
+
message = "Job {} Ended".format(jobEnd.jobId())
|
|
260
|
+
else:
|
|
261
|
+
level = "warning"
|
|
262
|
+
message = "Job {} Failed".format(jobEnd.jobId())
|
|
263
|
+
|
|
264
|
+
self._add_breadcrumb(level=level, message=message, data=data)
|
|
265
|
+
|
|
266
|
+
def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803
|
|
267
|
+
# type: (Any) -> None
|
|
268
|
+
stage_info = stageSubmitted.stageInfo()
|
|
269
|
+
message = "Stage {} Submitted".format(stage_info.stageId())
|
|
270
|
+
|
|
271
|
+
data = {"name": stage_info.name()}
|
|
272
|
+
attempt_id = _get_attempt_id(stage_info)
|
|
273
|
+
if attempt_id is not None:
|
|
274
|
+
data["attemptId"] = attempt_id
|
|
275
|
+
|
|
276
|
+
self._add_breadcrumb(level="info", message=message, data=data)
|
|
277
|
+
_set_app_properties()
|
|
278
|
+
|
|
279
|
+
def onStageCompleted(self, stageCompleted): # noqa: N802,N803
|
|
280
|
+
# type: (Any) -> None
|
|
281
|
+
from py4j.protocol import Py4JJavaError # type: ignore
|
|
282
|
+
|
|
283
|
+
stage_info = stageCompleted.stageInfo()
|
|
284
|
+
message = ""
|
|
285
|
+
level = ""
|
|
286
|
+
|
|
287
|
+
data = {"name": stage_info.name()}
|
|
288
|
+
attempt_id = _get_attempt_id(stage_info)
|
|
289
|
+
if attempt_id is not None:
|
|
290
|
+
data["attemptId"] = attempt_id
|
|
291
|
+
|
|
292
|
+
# Have to Try Except because stageInfo.failureReason() is typed with Scala Option
|
|
293
|
+
try:
|
|
294
|
+
data["reason"] = stage_info.failureReason().get()
|
|
295
|
+
message = "Stage {} Failed".format(stage_info.stageId())
|
|
296
|
+
level = "warning"
|
|
297
|
+
except Py4JJavaError:
|
|
298
|
+
message = "Stage {} Completed".format(stage_info.stageId())
|
|
299
|
+
level = "info"
|
|
300
|
+
|
|
301
|
+
self._add_breadcrumb(level=level, message=message, data=data)
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def _get_attempt_id(stage_info):
|
|
305
|
+
# type: (Any) -> Optional[int]
|
|
306
|
+
try:
|
|
307
|
+
return stage_info.attemptId()
|
|
308
|
+
except Exception:
|
|
309
|
+
pass
|
|
310
|
+
|
|
311
|
+
try:
|
|
312
|
+
return stage_info.attemptNumber()
|
|
313
|
+
except Exception:
|
|
314
|
+
pass
|
|
315
|
+
|
|
316
|
+
return None
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
|
|
3
|
+
import sentry_sdk
|
|
4
|
+
from sentry_sdk.integrations import Integration
|
|
5
|
+
from sentry_sdk.utils import (
|
|
6
|
+
capture_internal_exceptions,
|
|
7
|
+
exc_info_from_error,
|
|
8
|
+
single_exception_from_error_tuple,
|
|
9
|
+
walk_exception_chain,
|
|
10
|
+
event_hint_with_exc_info,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
from typing import TYPE_CHECKING
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from typing import Any
|
|
17
|
+
from typing import Optional
|
|
18
|
+
|
|
19
|
+
from sentry_sdk._types import ExcInfo, Event, Hint
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SparkWorkerIntegration(Integration):
|
|
23
|
+
identifier = "spark_worker"
|
|
24
|
+
|
|
25
|
+
@staticmethod
|
|
26
|
+
def setup_once():
|
|
27
|
+
# type: () -> None
|
|
28
|
+
import pyspark.daemon as original_daemon
|
|
29
|
+
|
|
30
|
+
original_daemon.worker_main = _sentry_worker_main
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _capture_exception(exc_info):
|
|
34
|
+
# type: (ExcInfo) -> None
|
|
35
|
+
client = sentry_sdk.get_client()
|
|
36
|
+
|
|
37
|
+
mechanism = {"type": "spark", "handled": False}
|
|
38
|
+
|
|
39
|
+
exc_info = exc_info_from_error(exc_info)
|
|
40
|
+
|
|
41
|
+
exc_type, exc_value, tb = exc_info
|
|
42
|
+
rv = []
|
|
43
|
+
|
|
44
|
+
# On Exception worker will call sys.exit(-1), so we can ignore SystemExit and similar errors
|
|
45
|
+
for exc_type, exc_value, tb in walk_exception_chain(exc_info):
|
|
46
|
+
if exc_type not in (SystemExit, EOFError, ConnectionResetError):
|
|
47
|
+
rv.append(
|
|
48
|
+
single_exception_from_error_tuple(
|
|
49
|
+
exc_type, exc_value, tb, client.options, mechanism
|
|
50
|
+
)
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
if rv:
|
|
54
|
+
rv.reverse()
|
|
55
|
+
hint = event_hint_with_exc_info(exc_info)
|
|
56
|
+
event = {"level": "error", "exception": {"values": rv}} # type: Event
|
|
57
|
+
|
|
58
|
+
_tag_task_context()
|
|
59
|
+
|
|
60
|
+
sentry_sdk.capture_event(event, hint=hint)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _tag_task_context():
|
|
64
|
+
# type: () -> None
|
|
65
|
+
from pyspark.taskcontext import TaskContext
|
|
66
|
+
|
|
67
|
+
scope = sentry_sdk.get_isolation_scope()
|
|
68
|
+
|
|
69
|
+
@scope.add_event_processor
|
|
70
|
+
def process_event(event, hint):
|
|
71
|
+
# type: (Event, Hint) -> Optional[Event]
|
|
72
|
+
with capture_internal_exceptions():
|
|
73
|
+
integration = sentry_sdk.get_client().get_integration(
|
|
74
|
+
SparkWorkerIntegration
|
|
75
|
+
)
|
|
76
|
+
task_context = TaskContext.get()
|
|
77
|
+
|
|
78
|
+
if integration is None or task_context is None:
|
|
79
|
+
return event
|
|
80
|
+
|
|
81
|
+
event.setdefault("tags", {}).setdefault(
|
|
82
|
+
"stageId", str(task_context.stageId())
|
|
83
|
+
)
|
|
84
|
+
event["tags"].setdefault("partitionId", str(task_context.partitionId()))
|
|
85
|
+
event["tags"].setdefault("attemptNumber", str(task_context.attemptNumber()))
|
|
86
|
+
event["tags"].setdefault("taskAttemptId", str(task_context.taskAttemptId()))
|
|
87
|
+
|
|
88
|
+
if task_context._localProperties:
|
|
89
|
+
if "sentry_app_name" in task_context._localProperties:
|
|
90
|
+
event["tags"].setdefault(
|
|
91
|
+
"app_name", task_context._localProperties["sentry_app_name"]
|
|
92
|
+
)
|
|
93
|
+
event["tags"].setdefault(
|
|
94
|
+
"application_id",
|
|
95
|
+
task_context._localProperties["sentry_application_id"],
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
if "callSite.short" in task_context._localProperties:
|
|
99
|
+
event.setdefault("extra", {}).setdefault(
|
|
100
|
+
"callSite", task_context._localProperties["callSite.short"]
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
return event
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def _sentry_worker_main(*args, **kwargs):
|
|
107
|
+
# type: (*Optional[Any], **Optional[Any]) -> None
|
|
108
|
+
import pyspark.worker as original_worker
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
original_worker.main(*args, **kwargs)
|
|
112
|
+
except SystemExit:
|
|
113
|
+
if sentry_sdk.get_client().get_integration(SparkWorkerIntegration) is not None:
|
|
114
|
+
exc_info = sys.exc_info()
|
|
115
|
+
with capture_internal_exceptions():
|
|
116
|
+
_capture_exception(exc_info)
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
from sentry_sdk.consts import SPANSTATUS, SPANDATA
|
|
2
|
+
from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
|
|
3
|
+
from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
|
|
4
|
+
from sentry_sdk.utils import (
|
|
5
|
+
capture_internal_exceptions,
|
|
6
|
+
ensure_integration_enabled,
|
|
7
|
+
parse_version,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
from sqlalchemy.engine import Engine # type: ignore
|
|
12
|
+
from sqlalchemy.event import listen # type: ignore
|
|
13
|
+
from sqlalchemy import __version__ as SQLALCHEMY_VERSION # type: ignore
|
|
14
|
+
except ImportError:
|
|
15
|
+
raise DidNotEnable("SQLAlchemy not installed.")
|
|
16
|
+
|
|
17
|
+
from typing import TYPE_CHECKING
|
|
18
|
+
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
from typing import Any
|
|
21
|
+
from typing import ContextManager
|
|
22
|
+
from typing import Optional
|
|
23
|
+
|
|
24
|
+
from sentry_sdk.tracing import Span
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class SqlalchemyIntegration(Integration):
|
|
28
|
+
identifier = "sqlalchemy"
|
|
29
|
+
origin = f"auto.db.{identifier}"
|
|
30
|
+
|
|
31
|
+
@staticmethod
|
|
32
|
+
def setup_once():
|
|
33
|
+
# type: () -> None
|
|
34
|
+
version = parse_version(SQLALCHEMY_VERSION)
|
|
35
|
+
_check_minimum_version(SqlalchemyIntegration, version)
|
|
36
|
+
|
|
37
|
+
listen(Engine, "before_cursor_execute", _before_cursor_execute)
|
|
38
|
+
listen(Engine, "after_cursor_execute", _after_cursor_execute)
|
|
39
|
+
listen(Engine, "handle_error", _handle_error)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@ensure_integration_enabled(SqlalchemyIntegration)
|
|
43
|
+
def _before_cursor_execute(
|
|
44
|
+
conn, cursor, statement, parameters, context, executemany, *args
|
|
45
|
+
):
|
|
46
|
+
# type: (Any, Any, Any, Any, Any, bool, *Any) -> None
|
|
47
|
+
ctx_mgr = record_sql_queries(
|
|
48
|
+
cursor,
|
|
49
|
+
statement,
|
|
50
|
+
parameters,
|
|
51
|
+
paramstyle=context and context.dialect and context.dialect.paramstyle or None,
|
|
52
|
+
executemany=executemany,
|
|
53
|
+
span_origin=SqlalchemyIntegration.origin,
|
|
54
|
+
)
|
|
55
|
+
context._sentry_sql_span_manager = ctx_mgr
|
|
56
|
+
|
|
57
|
+
span = ctx_mgr.__enter__()
|
|
58
|
+
|
|
59
|
+
if span is not None:
|
|
60
|
+
_set_db_data(span, conn)
|
|
61
|
+
context._sentry_sql_span = span
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@ensure_integration_enabled(SqlalchemyIntegration)
|
|
65
|
+
def _after_cursor_execute(conn, cursor, statement, parameters, context, *args):
|
|
66
|
+
# type: (Any, Any, Any, Any, Any, *Any) -> None
|
|
67
|
+
ctx_mgr = getattr(context, "_sentry_sql_span_manager", None) # type: Optional[ContextManager[Any]]
|
|
68
|
+
|
|
69
|
+
if ctx_mgr is not None:
|
|
70
|
+
context._sentry_sql_span_manager = None
|
|
71
|
+
ctx_mgr.__exit__(None, None, None)
|
|
72
|
+
|
|
73
|
+
span = getattr(context, "_sentry_sql_span", None) # type: Optional[Span]
|
|
74
|
+
if span is not None:
|
|
75
|
+
with capture_internal_exceptions():
|
|
76
|
+
add_query_source(span)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _handle_error(context, *args):
|
|
80
|
+
# type: (Any, *Any) -> None
|
|
81
|
+
execution_context = context.execution_context
|
|
82
|
+
if execution_context is None:
|
|
83
|
+
return
|
|
84
|
+
|
|
85
|
+
span = getattr(execution_context, "_sentry_sql_span", None) # type: Optional[Span]
|
|
86
|
+
|
|
87
|
+
if span is not None:
|
|
88
|
+
span.set_status(SPANSTATUS.INTERNAL_ERROR)
|
|
89
|
+
|
|
90
|
+
# _after_cursor_execute does not get called for crashing SQL stmts. Judging
|
|
91
|
+
# from SQLAlchemy codebase it does seem like any error coming into this
|
|
92
|
+
# handler is going to be fatal.
|
|
93
|
+
ctx_mgr = getattr(execution_context, "_sentry_sql_span_manager", None) # type: Optional[ContextManager[Any]]
|
|
94
|
+
|
|
95
|
+
if ctx_mgr is not None:
|
|
96
|
+
execution_context._sentry_sql_span_manager = None
|
|
97
|
+
ctx_mgr.__exit__(None, None, None)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
# See: https://docs.sqlalchemy.org/en/20/dialects/index.html
|
|
101
|
+
def _get_db_system(name):
|
|
102
|
+
# type: (str) -> Optional[str]
|
|
103
|
+
name = str(name)
|
|
104
|
+
|
|
105
|
+
if "sqlite" in name:
|
|
106
|
+
return "sqlite"
|
|
107
|
+
|
|
108
|
+
if "postgres" in name:
|
|
109
|
+
return "postgresql"
|
|
110
|
+
|
|
111
|
+
if "mariadb" in name:
|
|
112
|
+
return "mariadb"
|
|
113
|
+
|
|
114
|
+
if "mysql" in name:
|
|
115
|
+
return "mysql"
|
|
116
|
+
|
|
117
|
+
if "oracle" in name:
|
|
118
|
+
return "oracle"
|
|
119
|
+
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def _set_db_data(span, conn):
|
|
124
|
+
# type: (Span, Any) -> None
|
|
125
|
+
db_system = _get_db_system(conn.engine.name)
|
|
126
|
+
if db_system is not None:
|
|
127
|
+
span.set_data(SPANDATA.DB_SYSTEM, db_system)
|
|
128
|
+
|
|
129
|
+
if conn.engine.url is None:
|
|
130
|
+
return
|
|
131
|
+
|
|
132
|
+
db_name = conn.engine.url.database
|
|
133
|
+
if db_name is not None:
|
|
134
|
+
span.set_data(SPANDATA.DB_NAME, db_name)
|
|
135
|
+
|
|
136
|
+
server_address = conn.engine.url.host
|
|
137
|
+
if server_address is not None:
|
|
138
|
+
span.set_data(SPANDATA.SERVER_ADDRESS, server_address)
|
|
139
|
+
|
|
140
|
+
server_port = conn.engine.url.port
|
|
141
|
+
if server_port is not None:
|
|
142
|
+
span.set_data(SPANDATA.SERVER_PORT, server_port)
|