sentry-sdk 2.42.1__py2.py3-none-any.whl → 2.43.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sentry-sdk might be problematic. Click here for more details.

Files changed (36) hide show
  1. sentry_sdk/__init__.py +2 -0
  2. sentry_sdk/_metrics_batcher.py +1 -1
  3. sentry_sdk/consts.py +87 -2
  4. sentry_sdk/integrations/__init__.py +2 -0
  5. sentry_sdk/integrations/django/caching.py +16 -3
  6. sentry_sdk/integrations/google_genai/__init__.py +3 -0
  7. sentry_sdk/integrations/google_genai/utils.py +16 -6
  8. sentry_sdk/integrations/langchain.py +8 -2
  9. sentry_sdk/integrations/litellm.py +11 -4
  10. sentry_sdk/integrations/mcp.py +552 -0
  11. sentry_sdk/integrations/openai_agents/__init__.py +2 -0
  12. sentry_sdk/integrations/openai_agents/patches/__init__.py +1 -0
  13. sentry_sdk/integrations/openai_agents/patches/error_tracing.py +77 -0
  14. sentry_sdk/integrations/pydantic_ai/__init__.py +47 -0
  15. sentry_sdk/integrations/pydantic_ai/consts.py +1 -0
  16. sentry_sdk/integrations/pydantic_ai/patches/__init__.py +4 -0
  17. sentry_sdk/integrations/pydantic_ai/patches/agent_run.py +217 -0
  18. sentry_sdk/integrations/pydantic_ai/patches/graph_nodes.py +105 -0
  19. sentry_sdk/integrations/pydantic_ai/patches/model_request.py +35 -0
  20. sentry_sdk/integrations/pydantic_ai/patches/tools.py +75 -0
  21. sentry_sdk/integrations/pydantic_ai/spans/__init__.py +3 -0
  22. sentry_sdk/integrations/pydantic_ai/spans/ai_client.py +253 -0
  23. sentry_sdk/integrations/pydantic_ai/spans/execute_tool.py +49 -0
  24. sentry_sdk/integrations/pydantic_ai/spans/invoke_agent.py +112 -0
  25. sentry_sdk/integrations/pydantic_ai/utils.py +175 -0
  26. sentry_sdk/integrations/starlette.py +1 -1
  27. sentry_sdk/integrations/strawberry.py +10 -9
  28. sentry_sdk/logger.py +14 -2
  29. sentry_sdk/tracing_utils.py +1 -1
  30. {sentry_sdk-2.42.1.dist-info → sentry_sdk-2.43.0.dist-info}/METADATA +6 -1
  31. {sentry_sdk-2.42.1.dist-info → sentry_sdk-2.43.0.dist-info}/RECORD +36 -22
  32. /sentry_sdk/{_metrics.py → metrics.py} +0 -0
  33. {sentry_sdk-2.42.1.dist-info → sentry_sdk-2.43.0.dist-info}/WHEEL +0 -0
  34. {sentry_sdk-2.42.1.dist-info → sentry_sdk-2.43.0.dist-info}/entry_points.txt +0 -0
  35. {sentry_sdk-2.42.1.dist-info → sentry_sdk-2.43.0.dist-info}/licenses/LICENSE +0 -0
  36. {sentry_sdk-2.42.1.dist-info → sentry_sdk-2.43.0.dist-info}/top_level.txt +0 -0
sentry_sdk/__init__.py CHANGED
@@ -1,4 +1,5 @@
1
1
  from sentry_sdk import profiler
2
+ from sentry_sdk import metrics
2
3
  from sentry_sdk.scope import Scope
3
4
  from sentry_sdk.transport import Transport, HttpTransport
4
5
  from sentry_sdk.client import Client
@@ -48,6 +49,7 @@ __all__ = [ # noqa
48
49
  "trace",
49
50
  "monitor",
50
51
  "logger",
52
+ "metrics",
51
53
  "profiler",
52
54
  "start_session",
53
55
  "end_session",
@@ -12,7 +12,7 @@ if TYPE_CHECKING:
12
12
 
13
13
 
14
14
  class MetricsBatcher:
15
- MAX_METRICS_BEFORE_FLUSH = 100
15
+ MAX_METRICS_BEFORE_FLUSH = 1000
16
16
  FLUSH_WAIT_TIME = 5.0
17
17
 
18
18
  def __init__(
sentry_sdk/consts.py CHANGED
@@ -749,6 +749,90 @@ class SPANDATA:
749
749
  Example: "MainThread"
750
750
  """
751
751
 
752
+ MCP_TOOL_NAME = "mcp.tool.name"
753
+ """
754
+ The name of the MCP tool being called.
755
+ Example: "get_weather"
756
+ """
757
+
758
+ MCP_PROMPT_NAME = "mcp.prompt.name"
759
+ """
760
+ The name of the MCP prompt being retrieved.
761
+ Example: "code_review"
762
+ """
763
+
764
+ MCP_RESOURCE_URI = "mcp.resource.uri"
765
+ """
766
+ The URI of the MCP resource being accessed.
767
+ Example: "file:///path/to/resource"
768
+ """
769
+
770
+ MCP_METHOD_NAME = "mcp.method.name"
771
+ """
772
+ The MCP protocol method name being called.
773
+ Example: "tools/call", "prompts/get", "resources/read"
774
+ """
775
+
776
+ MCP_REQUEST_ID = "mcp.request.id"
777
+ """
778
+ The unique identifier for the MCP request.
779
+ Example: "req_123abc"
780
+ """
781
+
782
+ MCP_TOOL_RESULT_CONTENT = "mcp.tool.result.content"
783
+ """
784
+ The result/output content from an MCP tool execution.
785
+ Example: "The weather is sunny"
786
+ """
787
+
788
+ MCP_TOOL_RESULT_CONTENT_COUNT = "mcp.tool.result.content_count"
789
+ """
790
+ The number of items/keys in the MCP tool result.
791
+ Example: 5
792
+ """
793
+
794
+ MCP_TOOL_RESULT_IS_ERROR = "mcp.tool.result.is_error"
795
+ """
796
+ Whether the MCP tool execution resulted in an error.
797
+ Example: True
798
+ """
799
+
800
+ MCP_PROMPT_RESULT_MESSAGE_CONTENT = "mcp.prompt.result.message_content"
801
+ """
802
+ The message content from an MCP prompt retrieval.
803
+ Example: "Review the following code..."
804
+ """
805
+
806
+ MCP_PROMPT_RESULT_MESSAGE_ROLE = "mcp.prompt.result.message_role"
807
+ """
808
+ The role of the message in an MCP prompt retrieval (only set for single-message prompts).
809
+ Example: "user", "assistant", "system"
810
+ """
811
+
812
+ MCP_PROMPT_RESULT_MESSAGE_COUNT = "mcp.prompt.result.message_count"
813
+ """
814
+ The number of messages in an MCP prompt result.
815
+ Example: 1, 3
816
+ """
817
+
818
+ MCP_RESOURCE_PROTOCOL = "mcp.resource.protocol"
819
+ """
820
+ The protocol/scheme of the MCP resource URI.
821
+ Example: "file", "http", "https"
822
+ """
823
+
824
+ MCP_TRANSPORT = "mcp.transport"
825
+ """
826
+ The transport method used for MCP communication.
827
+ Example: "pipe" (stdio), "tcp" (HTTP/WebSocket/SSE)
828
+ """
829
+
830
+ MCP_SESSION_ID = "mcp.session.id"
831
+ """
832
+ The session identifier for the MCP connection.
833
+ Example: "a1b2c3d4e5f6"
834
+ """
835
+
752
836
 
753
837
  class SPANSTATUS:
754
838
  """
@@ -845,6 +929,7 @@ class OP:
845
929
  WEBSOCKET_SERVER = "websocket.server"
846
930
  SOCKET_CONNECTION = "socket.connection"
847
931
  SOCKET_DNS = "socket.dns"
932
+ MCP_SERVER = "mcp.server"
848
933
 
849
934
 
850
935
  # This type exists to trick mypy and PyCharm into thinking `init` and `Client`
@@ -909,7 +994,7 @@ class ClientConstructor:
909
994
  error_sampler=None, # type: Optional[Callable[[Event, Hint], Union[float, bool]]]
910
995
  enable_db_query_source=True, # type: bool
911
996
  db_query_source_threshold_ms=100, # type: int
912
- enable_http_request_source=False, # type: bool
997
+ enable_http_request_source=True, # type: bool
913
998
  http_request_source_threshold_ms=100, # type: int
914
999
  spotlight=None, # type: Optional[Union[bool, str]]
915
1000
  cert_file=None, # type: Optional[str]
@@ -1348,4 +1433,4 @@ DEFAULT_OPTIONS = _get_default_options()
1348
1433
  del _get_default_options
1349
1434
 
1350
1435
 
1351
- VERSION = "2.42.1"
1436
+ VERSION = "2.43.0"
@@ -149,9 +149,11 @@ _MIN_VERSIONS = {
149
149
  "launchdarkly": (9, 8, 0),
150
150
  "litellm": (1, 77, 5),
151
151
  "loguru": (0, 7, 0),
152
+ "mcp": (1, 15, 0),
152
153
  "openai": (1, 0, 0),
153
154
  "openai_agents": (0, 0, 19),
154
155
  "openfeature": (0, 7, 1),
156
+ "pydantic_ai": (1, 0, 0),
155
157
  "quart": (0, 16, 0),
156
158
  "ray": (2, 7, 0),
157
159
  "requests": (2, 0, 0),
@@ -45,7 +45,8 @@ def _patch_cache_method(cache, method_name, address, port):
45
45
  ):
46
46
  # type: (CacheHandler, str, Callable[..., Any], tuple[Any, ...], dict[str, Any], Optional[str], Optional[int]) -> Any
47
47
  is_set_operation = method_name.startswith("set")
48
- is_get_operation = not is_set_operation
48
+ is_get_method = method_name == "get"
49
+ is_get_many_method = method_name == "get_many"
49
50
 
50
51
  op = OP.CACHE_PUT if is_set_operation else OP.CACHE_GET
51
52
  description = _get_span_description(method_name, args, kwargs)
@@ -69,8 +70,20 @@ def _patch_cache_method(cache, method_name, address, port):
69
70
  span.set_data(SPANDATA.CACHE_KEY, key)
70
71
 
71
72
  item_size = None
72
- if is_get_operation:
73
- if value:
73
+ if is_get_many_method:
74
+ if value != {}:
75
+ item_size = len(str(value))
76
+ span.set_data(SPANDATA.CACHE_HIT, True)
77
+ else:
78
+ span.set_data(SPANDATA.CACHE_HIT, False)
79
+ elif is_get_method:
80
+ default_value = None
81
+ if len(args) >= 2:
82
+ default_value = args[1]
83
+ elif "default" in kwargs:
84
+ default_value = kwargs["default"]
85
+
86
+ if value != default_value:
74
87
  item_size = len(str(value))
75
88
  span.set_data(SPANDATA.CACHE_HIT, True)
76
89
  else:
@@ -92,6 +92,7 @@ def _wrap_generate_content_stream(f):
92
92
  chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name)
93
93
  set_span_data_for_request(chat_span, integration, model_name, contents, kwargs)
94
94
  chat_span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True)
95
+ chat_span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name)
95
96
 
96
97
  try:
97
98
  stream = f(self, *args, **kwargs)
@@ -165,6 +166,7 @@ def _wrap_async_generate_content_stream(f):
165
166
  chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name)
166
167
  set_span_data_for_request(chat_span, integration, model_name, contents, kwargs)
167
168
  chat_span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True)
169
+ chat_span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name)
168
170
 
169
171
  try:
170
172
  stream = await f(self, *args, **kwargs)
@@ -233,6 +235,7 @@ def _wrap_generate_content(f):
233
235
  chat_span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat")
234
236
  chat_span.set_data(SPANDATA.GEN_AI_SYSTEM, GEN_AI_SYSTEM)
235
237
  chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name)
238
+ chat_span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name)
236
239
  set_span_data_for_request(
237
240
  chat_span, integration, model_name, contents, kwargs
238
241
  )
@@ -15,7 +15,11 @@ from typing import (
15
15
  )
16
16
 
17
17
  import sentry_sdk
18
- from sentry_sdk.ai.utils import set_data_normalized
18
+ from sentry_sdk.ai.utils import (
19
+ set_data_normalized,
20
+ truncate_and_annotate_messages,
21
+ normalize_message_roles,
22
+ )
19
23
  from sentry_sdk.consts import OP, SPANDATA
20
24
  from sentry_sdk.scope import should_send_default_pii
21
25
  from sentry_sdk.utils import (
@@ -462,12 +466,18 @@ def set_span_data_for_request(span, integration, model, contents, kwargs):
462
466
  messages.append({"role": "user", "content": contents_text})
463
467
 
464
468
  if messages:
465
- set_data_normalized(
466
- span,
467
- SPANDATA.GEN_AI_REQUEST_MESSAGES,
468
- messages,
469
- unpack=False,
469
+ normalized_messages = normalize_message_roles(messages)
470
+ scope = sentry_sdk.get_current_scope()
471
+ messages_data = truncate_and_annotate_messages(
472
+ normalized_messages, span, scope
470
473
  )
474
+ if messages_data is not None:
475
+ set_data_normalized(
476
+ span,
477
+ SPANDATA.GEN_AI_REQUEST_MESSAGES,
478
+ messages_data,
479
+ unpack=False,
480
+ )
471
481
 
472
482
  # Extract parameters directly from config (not nested under generation_config)
473
483
  for param, span_key in [
@@ -50,9 +50,15 @@ except ImportError:
50
50
 
51
51
 
52
52
  try:
53
- from langchain.agents import AgentExecutor
53
+ # >=v1
54
+ from langchain_classic.agents import AgentExecutor # type: ignore[import-not-found]
54
55
  except ImportError:
55
- AgentExecutor = None
56
+ try:
57
+ # <v1
58
+ from langchain.agents import AgentExecutor
59
+ except ImportError:
60
+ AgentExecutor = None
61
+
56
62
 
57
63
  DATA_FIELDS = {
58
64
  "frequency_penalty": SPANDATA.GEN_AI_REQUEST_FREQUENCY_PENALTY,
@@ -3,7 +3,11 @@ from typing import TYPE_CHECKING
3
3
  import sentry_sdk
4
4
  from sentry_sdk import consts
5
5
  from sentry_sdk.ai.monitoring import record_token_usage
6
- from sentry_sdk.ai.utils import get_start_span_function, set_data_normalized
6
+ from sentry_sdk.ai.utils import (
7
+ get_start_span_function,
8
+ set_data_normalized,
9
+ truncate_and_annotate_messages,
10
+ )
7
11
  from sentry_sdk.consts import SPANDATA
8
12
  from sentry_sdk.integrations import DidNotEnable, Integration
9
13
  from sentry_sdk.scope import should_send_default_pii
@@ -76,9 +80,12 @@ def _input_callback(kwargs):
76
80
  # Record messages if allowed
77
81
  messages = kwargs.get("messages", [])
78
82
  if messages and should_send_default_pii() and integration.include_prompts:
79
- set_data_normalized(
80
- span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages, unpack=False
81
- )
83
+ scope = sentry_sdk.get_current_scope()
84
+ messages_data = truncate_and_annotate_messages(messages, span, scope)
85
+ if messages_data is not None:
86
+ set_data_normalized(
87
+ span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages_data, unpack=False
88
+ )
82
89
 
83
90
  # Record other parameters
84
91
  params = {