sentry-sdk 2.41.0__py2.py3-none-any.whl → 2.42.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sentry-sdk might be problematic. Click here for more details.

@@ -9,6 +9,7 @@ from sentry_sdk.ai.utils import (
9
9
  normalize_message_roles,
10
10
  set_data_normalized,
11
11
  get_start_span_function,
12
+ truncate_and_annotate_messages,
12
13
  )
13
14
  from sentry_sdk.consts import OP, SPANDATA
14
15
  from sentry_sdk.integrations import DidNotEnable, Integration
@@ -221,12 +222,17 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
221
222
  }
222
223
  for prompt in prompts
223
224
  ]
224
- set_data_normalized(
225
- span,
226
- SPANDATA.GEN_AI_REQUEST_MESSAGES,
227
- normalized_messages,
228
- unpack=False,
225
+ scope = sentry_sdk.get_current_scope()
226
+ messages_data = truncate_and_annotate_messages(
227
+ normalized_messages, span, scope
229
228
  )
229
+ if messages_data is not None:
230
+ set_data_normalized(
231
+ span,
232
+ SPANDATA.GEN_AI_REQUEST_MESSAGES,
233
+ messages_data,
234
+ unpack=False,
235
+ )
230
236
 
231
237
  def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs):
232
238
  # type: (SentryLangchainCallback, Dict[str, Any], List[List[BaseMessage]], UUID, Any) -> Any
@@ -278,13 +284,17 @@ class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc]
278
284
  self._normalize_langchain_message(message)
279
285
  )
280
286
  normalized_messages = normalize_message_roles(normalized_messages)
281
-
282
- set_data_normalized(
283
- span,
284
- SPANDATA.GEN_AI_REQUEST_MESSAGES,
285
- normalized_messages,
286
- unpack=False,
287
+ scope = sentry_sdk.get_current_scope()
288
+ messages_data = truncate_and_annotate_messages(
289
+ normalized_messages, span, scope
287
290
  )
291
+ if messages_data is not None:
292
+ set_data_normalized(
293
+ span,
294
+ SPANDATA.GEN_AI_REQUEST_MESSAGES,
295
+ messages_data,
296
+ unpack=False,
297
+ )
288
298
 
289
299
  def on_chat_model_end(self, response, *, run_id, **kwargs):
290
300
  # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any
@@ -758,12 +768,17 @@ def _wrap_agent_executor_invoke(f):
758
768
  and integration.include_prompts
759
769
  ):
760
770
  normalized_messages = normalize_message_roles([input])
761
- set_data_normalized(
762
- span,
763
- SPANDATA.GEN_AI_REQUEST_MESSAGES,
764
- normalized_messages,
765
- unpack=False,
771
+ scope = sentry_sdk.get_current_scope()
772
+ messages_data = truncate_and_annotate_messages(
773
+ normalized_messages, span, scope
766
774
  )
775
+ if messages_data is not None:
776
+ set_data_normalized(
777
+ span,
778
+ SPANDATA.GEN_AI_REQUEST_MESSAGES,
779
+ messages_data,
780
+ unpack=False,
781
+ )
767
782
 
768
783
  output = result.get("output")
769
784
  if (
@@ -813,12 +828,17 @@ def _wrap_agent_executor_stream(f):
813
828
  and integration.include_prompts
814
829
  ):
815
830
  normalized_messages = normalize_message_roles([input])
816
- set_data_normalized(
817
- span,
818
- SPANDATA.GEN_AI_REQUEST_MESSAGES,
819
- normalized_messages,
820
- unpack=False,
831
+ scope = sentry_sdk.get_current_scope()
832
+ messages_data = truncate_and_annotate_messages(
833
+ normalized_messages, span, scope
821
834
  )
835
+ if messages_data is not None:
836
+ set_data_normalized(
837
+ span,
838
+ SPANDATA.GEN_AI_REQUEST_MESSAGES,
839
+ messages_data,
840
+ unpack=False,
841
+ )
822
842
 
823
843
  # Run the agent
824
844
  result = f(self, *args, **kwargs)
@@ -2,7 +2,11 @@ from functools import wraps
2
2
  from typing import Any, Callable, List, Optional
3
3
 
4
4
  import sentry_sdk
5
- from sentry_sdk.ai.utils import set_data_normalized, normalize_message_roles
5
+ from sentry_sdk.ai.utils import (
6
+ set_data_normalized,
7
+ normalize_message_roles,
8
+ truncate_and_annotate_messages,
9
+ )
6
10
  from sentry_sdk.consts import OP, SPANDATA
7
11
  from sentry_sdk.integrations import DidNotEnable, Integration
8
12
  from sentry_sdk.scope import should_send_default_pii
@@ -181,12 +185,17 @@ def _wrap_pregel_invoke(f):
181
185
  input_messages = _parse_langgraph_messages(args[0])
182
186
  if input_messages:
183
187
  normalized_input_messages = normalize_message_roles(input_messages)
184
- set_data_normalized(
185
- span,
186
- SPANDATA.GEN_AI_REQUEST_MESSAGES,
187
- normalized_input_messages,
188
- unpack=False,
188
+ scope = sentry_sdk.get_current_scope()
189
+ messages_data = truncate_and_annotate_messages(
190
+ normalized_input_messages, span, scope
189
191
  )
192
+ if messages_data is not None:
193
+ set_data_normalized(
194
+ span,
195
+ SPANDATA.GEN_AI_REQUEST_MESSAGES,
196
+ messages_data,
197
+ unpack=False,
198
+ )
190
199
 
191
200
  result = f(self, *args, **kwargs)
192
201
 
@@ -232,12 +241,17 @@ def _wrap_pregel_ainvoke(f):
232
241
  input_messages = _parse_langgraph_messages(args[0])
233
242
  if input_messages:
234
243
  normalized_input_messages = normalize_message_roles(input_messages)
235
- set_data_normalized(
236
- span,
237
- SPANDATA.GEN_AI_REQUEST_MESSAGES,
238
- normalized_input_messages,
239
- unpack=False,
244
+ scope = sentry_sdk.get_current_scope()
245
+ messages_data = truncate_and_annotate_messages(
246
+ normalized_input_messages, span, scope
240
247
  )
248
+ if messages_data is not None:
249
+ set_data_normalized(
250
+ span,
251
+ SPANDATA.GEN_AI_REQUEST_MESSAGES,
252
+ messages_data,
253
+ unpack=False,
254
+ )
241
255
 
242
256
  result = await f(self, *args, **kwargs)
243
257
 
@@ -48,8 +48,11 @@ def _input_callback(kwargs):
48
48
  model = full_model
49
49
  provider = "unknown"
50
50
 
51
- messages = kwargs.get("messages", [])
52
- operation = "chat" if messages else "embeddings"
51
+ call_type = kwargs.get("call_type", None)
52
+ if call_type == "embedding":
53
+ operation = "embeddings"
54
+ else:
55
+ operation = "chat"
53
56
 
54
57
  # Start a new span/transaction
55
58
  span = get_start_span_function()(
@@ -71,6 +74,7 @@ def _input_callback(kwargs):
71
74
  set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, operation)
72
75
 
73
76
  # Record messages if allowed
77
+ messages = kwargs.get("messages", [])
74
78
  if messages and should_send_default_pii() and integration.include_prompts:
75
79
  set_data_normalized(
76
80
  span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages, unpack=False
@@ -3,7 +3,11 @@ from functools import wraps
3
3
  import sentry_sdk
4
4
  from sentry_sdk import consts
5
5
  from sentry_sdk.ai.monitoring import record_token_usage
6
- from sentry_sdk.ai.utils import set_data_normalized, normalize_message_roles
6
+ from sentry_sdk.ai.utils import (
7
+ set_data_normalized,
8
+ normalize_message_roles,
9
+ truncate_and_annotate_messages,
10
+ )
7
11
  from sentry_sdk.consts import SPANDATA
8
12
  from sentry_sdk.integrations import DidNotEnable, Integration
9
13
  from sentry_sdk.scope import should_send_default_pii
@@ -22,9 +26,14 @@ if TYPE_CHECKING:
22
26
 
23
27
  try:
24
28
  try:
25
- from openai import NOT_GIVEN
29
+ from openai import NotGiven
26
30
  except ImportError:
27
- NOT_GIVEN = None
31
+ NotGiven = None
32
+
33
+ try:
34
+ from openai import Omit
35
+ except ImportError:
36
+ Omit = None
28
37
 
29
38
  from openai.resources.chat.completions import Completions, AsyncCompletions
30
39
  from openai.resources import Embeddings, AsyncEmbeddings
@@ -183,9 +192,12 @@ def _set_input_data(span, kwargs, operation, integration):
183
192
  and integration.include_prompts
184
193
  ):
185
194
  normalized_messages = normalize_message_roles(messages)
186
- set_data_normalized(
187
- span, SPANDATA.GEN_AI_REQUEST_MESSAGES, normalized_messages, unpack=False
188
- )
195
+ scope = sentry_sdk.get_current_scope()
196
+ messages_data = truncate_and_annotate_messages(normalized_messages, span, scope)
197
+ if messages_data is not None:
198
+ set_data_normalized(
199
+ span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages_data, unpack=False
200
+ )
189
201
 
190
202
  # Input attributes: Common
191
203
  set_data_normalized(span, SPANDATA.GEN_AI_SYSTEM, "openai")
@@ -204,12 +216,12 @@ def _set_input_data(span, kwargs, operation, integration):
204
216
  for key, attribute in kwargs_keys_to_attributes.items():
205
217
  value = kwargs.get(key)
206
218
 
207
- if value is not NOT_GIVEN and value is not None:
219
+ if value is not None and _is_given(value):
208
220
  set_data_normalized(span, attribute, value)
209
221
 
210
222
  # Input attributes: Tools
211
223
  tools = kwargs.get("tools")
212
- if tools is not NOT_GIVEN and tools is not None and len(tools) > 0:
224
+ if tools is not None and _is_given(tools) and len(tools) > 0:
213
225
  set_data_normalized(
214
226
  span, SPANDATA.GEN_AI_REQUEST_AVAILABLE_TOOLS, safe_serialize(tools)
215
227
  )
@@ -231,7 +243,7 @@ def _set_output_data(span, response, kwargs, integration, finish_span=True):
231
243
 
232
244
  if hasattr(response, "choices"):
233
245
  if should_send_default_pii() and integration.include_prompts:
234
- response_text = [choice.message.dict() for choice in response.choices]
246
+ response_text = [choice.message.model_dump() for choice in response.choices]
235
247
  if len(response_text) > 0:
236
248
  set_data_normalized(span, SPANDATA.GEN_AI_RESPONSE_TEXT, response_text)
237
249
 
@@ -689,3 +701,15 @@ def _wrap_async_responses_create(f):
689
701
  return await _execute_async(f, *args, **kwargs)
690
702
 
691
703
  return _sentry_patched_responses_async
704
+
705
+
706
+ def _is_given(obj):
707
+ # type: (Any) -> bool
708
+ """
709
+ Check for givenness safely across different openai versions.
710
+ """
711
+ if NotGiven is not None and isinstance(obj, NotGiven):
712
+ return False
713
+ if Omit is not None and isinstance(obj, Omit):
714
+ return False
715
+ return True
@@ -1,4 +1,5 @@
1
1
  import inspect
2
+ import functools
2
3
  import sys
3
4
 
4
5
  import sentry_sdk
@@ -17,7 +18,6 @@ try:
17
18
  import ray # type: ignore[import-not-found]
18
19
  except ImportError:
19
20
  raise DidNotEnable("Ray not installed.")
20
- import functools
21
21
 
22
22
  from typing import TYPE_CHECKING
23
23
 
@@ -54,12 +54,13 @@ def _patch_ray_remote():
54
54
 
55
55
  def wrapper(user_f):
56
56
  # type: (Callable[..., Any]) -> Any
57
- def new_func(*f_args, _tracing=None, **f_kwargs):
57
+ @functools.wraps(user_f)
58
+ def new_func(*f_args, _sentry_tracing=None, **f_kwargs):
58
59
  # type: (Any, Optional[dict[str, Any]], Any) -> Any
59
60
  _check_sentry_initialized()
60
61
 
61
62
  transaction = sentry_sdk.continue_trace(
62
- _tracing or {},
63
+ _sentry_tracing or {},
63
64
  op=OP.QUEUE_TASK_RAY,
64
65
  name=qualname_from_function(user_f),
65
66
  origin=RayIntegration.origin,
@@ -78,6 +79,19 @@ def _patch_ray_remote():
78
79
 
79
80
  return result
80
81
 
82
+ # Patching new_func signature to add the _sentry_tracing parameter to it
83
+ # Ray later inspects the signature and finds the unexpected parameter otherwise
84
+ signature = inspect.signature(new_func)
85
+ params = list(signature.parameters.values())
86
+ params.append(
87
+ inspect.Parameter(
88
+ "_sentry_tracing",
89
+ kind=inspect.Parameter.KEYWORD_ONLY,
90
+ default=None,
91
+ )
92
+ )
93
+ new_func.__signature__ = signature.replace(parameters=params) # type: ignore[attr-defined]
94
+
81
95
  if f:
82
96
  rv = old_remote(new_func)
83
97
  else:
@@ -99,7 +113,9 @@ def _patch_ray_remote():
99
113
  for k, v in sentry_sdk.get_current_scope().iter_trace_propagation_headers()
100
114
  }
101
115
  try:
102
- result = old_remote_method(*args, **kwargs, _tracing=tracing)
116
+ result = old_remote_method(
117
+ *args, **kwargs, _sentry_tracing=tracing
118
+ )
103
119
  span.set_status(SPANSTATUS.OK)
104
120
  except Exception:
105
121
  span.set_status(SPANSTATUS.INTERNAL_ERROR)
@@ -20,12 +20,13 @@ def _get_safe_command(name, args):
20
20
  # type: (str, Sequence[Any]) -> str
21
21
  command_parts = [name]
22
22
 
23
+ name_low = name.lower()
24
+ send_default_pii = should_send_default_pii()
25
+
23
26
  for i, arg in enumerate(args):
24
27
  if i > _MAX_NUM_ARGS:
25
28
  break
26
29
 
27
- name_low = name.lower()
28
-
29
30
  if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA:
30
31
  command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
31
32
  continue
@@ -33,9 +34,8 @@ def _get_safe_command(name, args):
33
34
  arg_is_the_key = i == 0
34
35
  if arg_is_the_key:
35
36
  command_parts.append(repr(arg))
36
-
37
37
  else:
38
- if should_send_default_pii():
38
+ if send_default_pii:
39
39
  command_parts.append(repr(arg))
40
40
  else:
41
41
  command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
@@ -8,7 +8,11 @@ import sentry_sdk
8
8
  from sentry_sdk.consts import OP, SPANDATA
9
9
  from sentry_sdk.integrations import Integration
10
10
  from sentry_sdk.scope import add_global_event_processor
11
- from sentry_sdk.tracing_utils import EnvironHeaders, should_propagate_trace
11
+ from sentry_sdk.tracing_utils import (
12
+ EnvironHeaders,
13
+ should_propagate_trace,
14
+ add_http_request_source,
15
+ )
12
16
  from sentry_sdk.utils import (
13
17
  SENSITIVE_DATA_SUBSTITUTE,
14
18
  capture_internal_exceptions,
@@ -135,6 +139,9 @@ def _install_httplib():
135
139
  finally:
136
140
  span.finish()
137
141
 
142
+ with capture_internal_exceptions():
143
+ add_http_request_source(span)
144
+
138
145
  return rv
139
146
 
140
147
  HTTPConnection.putrequest = putrequest # type: ignore[method-assign]
sentry_sdk/scope.py CHANGED
@@ -188,6 +188,7 @@ class Scope:
188
188
  "_extras",
189
189
  "_breadcrumbs",
190
190
  "_n_breadcrumbs_truncated",
191
+ "_gen_ai_original_message_count",
191
192
  "_event_processors",
192
193
  "_error_processors",
193
194
  "_should_capture",
@@ -213,6 +214,7 @@ class Scope:
213
214
  self._name = None # type: Optional[str]
214
215
  self._propagation_context = None # type: Optional[PropagationContext]
215
216
  self._n_breadcrumbs_truncated = 0 # type: int
217
+ self._gen_ai_original_message_count = {} # type: Dict[str, int]
216
218
 
217
219
  self.client = NonRecordingClient() # type: sentry_sdk.client.BaseClient
218
220
 
@@ -247,6 +249,7 @@ class Scope:
247
249
 
248
250
  rv._breadcrumbs = copy(self._breadcrumbs)
249
251
  rv._n_breadcrumbs_truncated = self._n_breadcrumbs_truncated
252
+ rv._gen_ai_original_message_count = self._gen_ai_original_message_count.copy()
250
253
  rv._event_processors = self._event_processors.copy()
251
254
  rv._error_processors = self._error_processors.copy()
252
255
  rv._propagation_context = self._propagation_context
@@ -1583,6 +1586,10 @@ class Scope:
1583
1586
  self._n_breadcrumbs_truncated = (
1584
1587
  self._n_breadcrumbs_truncated + scope._n_breadcrumbs_truncated
1585
1588
  )
1589
+ if scope._gen_ai_original_message_count:
1590
+ self._gen_ai_original_message_count.update(
1591
+ scope._gen_ai_original_message_count
1592
+ )
1586
1593
  if scope._span:
1587
1594
  self._span = scope._span
1588
1595
  if scope._attachments:
@@ -1679,7 +1686,7 @@ def new_scope():
1679
1686
  try:
1680
1687
  # restore original scope
1681
1688
  _current_scope.reset(token)
1682
- except LookupError:
1689
+ except (LookupError, ValueError):
1683
1690
  capture_internal_exception(sys.exc_info())
1684
1691
 
1685
1692
 
@@ -1717,7 +1724,7 @@ def use_scope(scope):
1717
1724
  try:
1718
1725
  # restore original scope
1719
1726
  _current_scope.reset(token)
1720
- except LookupError:
1727
+ except (LookupError, ValueError):
1721
1728
  capture_internal_exception(sys.exc_info())
1722
1729
 
1723
1730
 
@@ -1761,12 +1768,12 @@ def isolation_scope():
1761
1768
  # restore original scopes
1762
1769
  try:
1763
1770
  _current_scope.reset(current_token)
1764
- except LookupError:
1771
+ except (LookupError, ValueError):
1765
1772
  capture_internal_exception(sys.exc_info())
1766
1773
 
1767
1774
  try:
1768
1775
  _isolation_scope.reset(isolation_token)
1769
- except LookupError:
1776
+ except (LookupError, ValueError):
1770
1777
  capture_internal_exception(sys.exc_info())
1771
1778
 
1772
1779
 
@@ -1808,12 +1815,12 @@ def use_isolation_scope(isolation_scope):
1808
1815
  # restore original scopes
1809
1816
  try:
1810
1817
  _current_scope.reset(current_token)
1811
- except LookupError:
1818
+ except (LookupError, ValueError):
1812
1819
  capture_internal_exception(sys.exc_info())
1813
1820
 
1814
1821
  try:
1815
1822
  _isolation_scope.reset(isolation_token)
1816
- except LookupError:
1823
+ except (LookupError, ValueError):
1817
1824
  capture_internal_exception(sys.exc_info())
1818
1825
 
1819
1826
 
@@ -218,33 +218,11 @@ def _should_be_included(
218
218
  )
219
219
 
220
220
 
221
- def add_query_source(span):
222
- # type: (sentry_sdk.tracing.Span) -> None
221
+ def add_source(span, project_root, in_app_include, in_app_exclude):
222
+ # type: (sentry_sdk.tracing.Span, Optional[str], Optional[list[str]], Optional[list[str]]) -> None
223
223
  """
224
224
  Adds OTel compatible source code information to the span
225
225
  """
226
- client = sentry_sdk.get_client()
227
- if not client.is_active():
228
- return
229
-
230
- if span.timestamp is None or span.start_timestamp is None:
231
- return
232
-
233
- should_add_query_source = client.options.get("enable_db_query_source", True)
234
- if not should_add_query_source:
235
- return
236
-
237
- duration = span.timestamp - span.start_timestamp
238
- threshold = client.options.get("db_query_source_threshold_ms", 0)
239
- slow_query = duration / timedelta(milliseconds=1) > threshold
240
-
241
- if not slow_query:
242
- return
243
-
244
- project_root = client.options["project_root"]
245
- in_app_include = client.options.get("in_app_include")
246
- in_app_exclude = client.options.get("in_app_exclude")
247
-
248
226
  # Find the correct frame
249
227
  frame = sys._getframe() # type: Union[FrameType, None]
250
228
  while frame is not None:
@@ -309,6 +287,68 @@ def add_query_source(span):
309
287
  span.set_data(SPANDATA.CODE_FUNCTION, frame.f_code.co_name)
310
288
 
311
289
 
290
+ def add_query_source(span):
291
+ # type: (sentry_sdk.tracing.Span) -> None
292
+ """
293
+ Adds OTel compatible source code information to a database query span
294
+ """
295
+ client = sentry_sdk.get_client()
296
+ if not client.is_active():
297
+ return
298
+
299
+ if span.timestamp is None or span.start_timestamp is None:
300
+ return
301
+
302
+ should_add_query_source = client.options.get("enable_db_query_source", True)
303
+ if not should_add_query_source:
304
+ return
305
+
306
+ duration = span.timestamp - span.start_timestamp
307
+ threshold = client.options.get("db_query_source_threshold_ms", 0)
308
+ slow_query = duration / timedelta(milliseconds=1) > threshold
309
+
310
+ if not slow_query:
311
+ return
312
+
313
+ add_source(
314
+ span=span,
315
+ project_root=client.options["project_root"],
316
+ in_app_include=client.options.get("in_app_include"),
317
+ in_app_exclude=client.options.get("in_app_exclude"),
318
+ )
319
+
320
+
321
+ def add_http_request_source(span):
322
+ # type: (sentry_sdk.tracing.Span) -> None
323
+ """
324
+ Adds OTel compatible source code information to a span for an outgoing HTTP request
325
+ """
326
+ client = sentry_sdk.get_client()
327
+ if not client.is_active():
328
+ return
329
+
330
+ if span.timestamp is None or span.start_timestamp is None:
331
+ return
332
+
333
+ should_add_request_source = client.options.get("enable_http_request_source", False)
334
+ if not should_add_request_source:
335
+ return
336
+
337
+ duration = span.timestamp - span.start_timestamp
338
+ threshold = client.options.get("http_request_source_threshold_ms", 0)
339
+ slow_query = duration / timedelta(milliseconds=1) > threshold
340
+
341
+ if not slow_query:
342
+ return
343
+
344
+ add_source(
345
+ span=span,
346
+ project_root=client.options["project_root"],
347
+ in_app_include=client.options.get("in_app_include"),
348
+ in_app_exclude=client.options.get("in_app_exclude"),
349
+ )
350
+
351
+
312
352
  def extract_sentrytrace_data(header):
313
353
  # type: (Optional[str]) -> Optional[Dict[str, Union[str, bool, None]]]
314
354
  """
sentry_sdk/utils.py CHANGED
@@ -1484,17 +1484,37 @@ class TimeoutThread(threading.Thread):
1484
1484
  waiting_time and raises a custom ServerlessTimeout exception.
1485
1485
  """
1486
1486
 
1487
- def __init__(self, waiting_time, configured_timeout):
1488
- # type: (float, int) -> None
1487
+ def __init__(
1488
+ self, waiting_time, configured_timeout, isolation_scope=None, current_scope=None
1489
+ ):
1490
+ # type: (float, int, Optional[sentry_sdk.Scope], Optional[sentry_sdk.Scope]) -> None
1489
1491
  threading.Thread.__init__(self)
1490
1492
  self.waiting_time = waiting_time
1491
1493
  self.configured_timeout = configured_timeout
1494
+
1495
+ self.isolation_scope = isolation_scope
1496
+ self.current_scope = current_scope
1497
+
1492
1498
  self._stop_event = threading.Event()
1493
1499
 
1494
1500
  def stop(self):
1495
1501
  # type: () -> None
1496
1502
  self._stop_event.set()
1497
1503
 
1504
+ def _capture_exception(self):
1505
+ # type: () -> ExcInfo
1506
+ exc_info = sys.exc_info()
1507
+
1508
+ client = sentry_sdk.get_client()
1509
+ event, hint = event_from_exception(
1510
+ exc_info,
1511
+ client_options=client.options,
1512
+ mechanism={"type": "threading", "handled": False},
1513
+ )
1514
+ sentry_sdk.capture_event(event, hint=hint)
1515
+
1516
+ return exc_info
1517
+
1498
1518
  def run(self):
1499
1519
  # type: () -> None
1500
1520
 
@@ -1510,6 +1530,18 @@ class TimeoutThread(threading.Thread):
1510
1530
  integer_configured_timeout = integer_configured_timeout + 1
1511
1531
 
1512
1532
  # Raising Exception after timeout duration is reached
1533
+ if self.isolation_scope is not None and self.current_scope is not None:
1534
+ with sentry_sdk.scope.use_isolation_scope(self.isolation_scope):
1535
+ with sentry_sdk.scope.use_scope(self.current_scope):
1536
+ try:
1537
+ raise ServerlessTimeoutWarning(
1538
+ "WARNING : Function is expected to get timed out. Configured timeout duration = {} seconds.".format(
1539
+ integer_configured_timeout
1540
+ )
1541
+ )
1542
+ except Exception:
1543
+ reraise(*self._capture_exception())
1544
+
1513
1545
  raise ServerlessTimeoutWarning(
1514
1546
  "WARNING : Function is expected to get timed out. Configured timeout duration = {} seconds.".format(
1515
1547
  integer_configured_timeout
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sentry-sdk
3
- Version: 2.41.0
3
+ Version: 2.42.1
4
4
  Summary: Python client for Sentry (https://sentry.io)
5
5
  Home-page: https://github.com/getsentry/sentry-python
6
6
  Author: Sentry Team and Contributors
@@ -118,6 +118,8 @@ Provides-Extra: tornado
118
118
  Requires-Dist: tornado>=6; extra == "tornado"
119
119
  Provides-Extra: unleash
120
120
  Requires-Dist: UnleashClient>=6.0.1; extra == "unleash"
121
+ Provides-Extra: google-genai
122
+ Requires-Dist: google-genai>=1.29.0; extra == "google-genai"
121
123
  Dynamic: author
122
124
  Dynamic: author-email
123
125
  Dynamic: classifier