genai-otel-instrument 0.1.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. genai_otel/__init__.py +132 -0
  2. genai_otel/__version__.py +34 -0
  3. genai_otel/auto_instrument.py +602 -0
  4. genai_otel/cli.py +92 -0
  5. genai_otel/config.py +333 -0
  6. genai_otel/cost_calculator.py +467 -0
  7. genai_otel/cost_enriching_exporter.py +207 -0
  8. genai_otel/cost_enrichment_processor.py +174 -0
  9. genai_otel/evaluation/__init__.py +76 -0
  10. genai_otel/evaluation/bias_detector.py +364 -0
  11. genai_otel/evaluation/config.py +261 -0
  12. genai_otel/evaluation/hallucination_detector.py +525 -0
  13. genai_otel/evaluation/pii_detector.py +356 -0
  14. genai_otel/evaluation/prompt_injection_detector.py +262 -0
  15. genai_otel/evaluation/restricted_topics_detector.py +316 -0
  16. genai_otel/evaluation/span_processor.py +962 -0
  17. genai_otel/evaluation/toxicity_detector.py +406 -0
  18. genai_otel/exceptions.py +17 -0
  19. genai_otel/gpu_metrics.py +516 -0
  20. genai_otel/instrumentors/__init__.py +71 -0
  21. genai_otel/instrumentors/anthropic_instrumentor.py +134 -0
  22. genai_otel/instrumentors/anyscale_instrumentor.py +27 -0
  23. genai_otel/instrumentors/autogen_instrumentor.py +394 -0
  24. genai_otel/instrumentors/aws_bedrock_instrumentor.py +94 -0
  25. genai_otel/instrumentors/azure_openai_instrumentor.py +69 -0
  26. genai_otel/instrumentors/base.py +919 -0
  27. genai_otel/instrumentors/bedrock_agents_instrumentor.py +398 -0
  28. genai_otel/instrumentors/cohere_instrumentor.py +140 -0
  29. genai_otel/instrumentors/crewai_instrumentor.py +311 -0
  30. genai_otel/instrumentors/dspy_instrumentor.py +661 -0
  31. genai_otel/instrumentors/google_ai_instrumentor.py +310 -0
  32. genai_otel/instrumentors/groq_instrumentor.py +106 -0
  33. genai_otel/instrumentors/guardrails_ai_instrumentor.py +510 -0
  34. genai_otel/instrumentors/haystack_instrumentor.py +503 -0
  35. genai_otel/instrumentors/huggingface_instrumentor.py +399 -0
  36. genai_otel/instrumentors/hyperbolic_instrumentor.py +236 -0
  37. genai_otel/instrumentors/instructor_instrumentor.py +425 -0
  38. genai_otel/instrumentors/langchain_instrumentor.py +340 -0
  39. genai_otel/instrumentors/langgraph_instrumentor.py +328 -0
  40. genai_otel/instrumentors/llamaindex_instrumentor.py +36 -0
  41. genai_otel/instrumentors/mistralai_instrumentor.py +315 -0
  42. genai_otel/instrumentors/ollama_instrumentor.py +197 -0
  43. genai_otel/instrumentors/ollama_server_metrics_poller.py +336 -0
  44. genai_otel/instrumentors/openai_agents_instrumentor.py +291 -0
  45. genai_otel/instrumentors/openai_instrumentor.py +260 -0
  46. genai_otel/instrumentors/pydantic_ai_instrumentor.py +362 -0
  47. genai_otel/instrumentors/replicate_instrumentor.py +87 -0
  48. genai_otel/instrumentors/sambanova_instrumentor.py +196 -0
  49. genai_otel/instrumentors/togetherai_instrumentor.py +146 -0
  50. genai_otel/instrumentors/vertexai_instrumentor.py +106 -0
  51. genai_otel/llm_pricing.json +1676 -0
  52. genai_otel/logging_config.py +45 -0
  53. genai_otel/mcp_instrumentors/__init__.py +14 -0
  54. genai_otel/mcp_instrumentors/api_instrumentor.py +144 -0
  55. genai_otel/mcp_instrumentors/base.py +105 -0
  56. genai_otel/mcp_instrumentors/database_instrumentor.py +336 -0
  57. genai_otel/mcp_instrumentors/kafka_instrumentor.py +31 -0
  58. genai_otel/mcp_instrumentors/manager.py +139 -0
  59. genai_otel/mcp_instrumentors/redis_instrumentor.py +31 -0
  60. genai_otel/mcp_instrumentors/vector_db_instrumentor.py +265 -0
  61. genai_otel/metrics.py +148 -0
  62. genai_otel/py.typed +2 -0
  63. genai_otel/server_metrics.py +197 -0
  64. genai_otel_instrument-0.1.24.dist-info/METADATA +1404 -0
  65. genai_otel_instrument-0.1.24.dist-info/RECORD +69 -0
  66. genai_otel_instrument-0.1.24.dist-info/WHEEL +5 -0
  67. genai_otel_instrument-0.1.24.dist-info/entry_points.txt +2 -0
  68. genai_otel_instrument-0.1.24.dist-info/licenses/LICENSE +680 -0
  69. genai_otel_instrument-0.1.24.dist-info/top_level.txt +1 -0
@@ -0,0 +1,45 @@
1
+ """Centralized logging configuration"""
2
+
3
+ import logging
4
+ import os
5
+ import sys
6
+ from logging.handlers import RotatingFileHandler
7
+ from typing import Optional
8
+
9
+
10
+ def setup_logging(
11
+ level: Optional[str] = None, log_dir: str = "logs", log_file_name: str = "genai_otel.log"
12
+ ):
13
+ """Configure logging for the library with configurable log level via environment variable
14
+ and log rotation.
15
+ """
16
+ # Determine log level from environment variable or default to INFO
17
+ env_log_level = os.environ.get("GENAI_OTEL_LOG_LEVEL")
18
+ log_level_str = level or env_log_level or "INFO"
19
+ log_level = getattr(logging, log_level_str.upper(), logging.INFO)
20
+
21
+ # Create logs directory if it doesn't exist
22
+ os.makedirs(log_dir, exist_ok=True)
23
+ log_file_path = os.path.join(log_dir, log_file_name)
24
+
25
+ # Setup handlers
26
+ handlers = [logging.StreamHandler(sys.stdout)]
27
+
28
+ # Add rotating file handler
29
+ file_handler = RotatingFileHandler(log_file_path, maxBytes=10 * 1024 * 1024, backupCount=10)
30
+ handlers.append(file_handler)
31
+
32
+ # Set library logger
33
+ logger = logging.getLogger("genai_otel")
34
+ logger.setLevel(log_level)
35
+
36
+ # Clear existing handlers to prevent duplicates in case of multiple calls
37
+ if logger.handlers:
38
+ for handler in logger.handlers:
39
+ handler.close()
40
+ logger.handlers = []
41
+
42
+ for handler in handlers:
43
+ logger.addHandler(handler)
44
+
45
+ return logger
@@ -0,0 +1,14 @@
1
+ """Module for OpenTelemetry instrumentors for Model Context Protocol (MCP) tools.
2
+
3
+ This package contains individual instrumentor classes for various MCP tools,
4
+ including databases, caching layers, message queues, vector databases, and
5
+ generic API clients, enabling automatic tracing and metric collection of their operations.
6
+ """
7
+
8
+ # pylint: disable=R0801
9
+ import httpx
10
+
11
+ from .base import BaseMCPInstrumentor
12
+ from .manager import MCPInstrumentorManager
13
+
14
+ __all__ = ["BaseMCPInstrumentor", "MCPInstrumentorManager"]
@@ -0,0 +1,144 @@
1
+ """OpenTelemetry instrumentor for generic API calls.
2
+
3
+ This module provides the `APIInstrumentor` class, which automatically traces
4
+ HTTP requests made using popular libraries like `requests` and `httpx`.
5
+ It enriches spans with relevant attributes, including detected GenAI system
6
+ information based on the URL.
7
+ """
8
+
9
+ import logging
10
+ from typing import Any, Dict, Optional
11
+ from urllib.parse import urlparse
12
+
13
+ import httpx
14
+ import requests
15
+ import wrapt
16
+
17
+ from genai_otel.instrumentors.base import BaseInstrumentor
18
+
19
+ from ..config import OTelConfig
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ class APIInstrumentor(BaseInstrumentor):
25
+ """Instrument custom API calls, adding GenAI-specific attributes.
26
+
27
+ This instrumentor targets common HTTP client libraries like `requests` and `httpx`.
28
+ It aims to add relevant attributes to spans, including GenAI system information
29
+ if detectable from the URL or headers.
30
+ """
31
+
32
+ def __init__(self, config: OTelConfig):
33
+ """Initializes the APIInstrumentor.
34
+
35
+ Args:
36
+ config (OTelConfig): The OpenTelemetry configuration object.
37
+ """
38
+ super().__init__() # Initialize BaseInstrumentor
39
+ self.config = config
40
+
41
+ def instrument(self, config: OTelConfig):
42
+ """Instrument requests and httpx libraries for API calls.
43
+
44
+ Applies wrappers to `requests.request`, `requests.Session.request`,
45
+ and `httpx.Client.request` to capture API call details.
46
+
47
+ Args:
48
+ config (OTelConfig): The OpenTelemetry configuration object.
49
+ """
50
+ self.config = config # Store the config
51
+
52
+ # CRITICAL: Do NOT wrap requests library when using OTLP HTTP exporters
53
+ # Wrapping requests.Session breaks OTLP exporters that use it internally
54
+ # try:
55
+ # # Wrap requests.Session.request and requests.request
56
+ # wrapt.wrap_function_wrapper(requests, "request", self._wrap_api_call)
57
+ # wrapt.wrap_function_wrapper(requests.sessions.Session, "request", self._wrap_api_call)
58
+ # logger.info("requests library instrumented for API calls.")
59
+ # except ImportError:
60
+ # logger.debug("requests library not found, skipping instrumentation.")
61
+ # except Exception as e:
62
+ # logger.error("Failed to instrument requests library: %s", e, exc_info=True)
63
+ # if self.config.fail_on_error:
64
+ # raise
65
+
66
+ logger.warning(
67
+ "requests library instrumentation disabled to prevent OTLP exporter conflicts"
68
+ )
69
+
70
+ try:
71
+ # Wrap httpx.Client.request
72
+ wrapt.wrap_function_wrapper(httpx.Client, "request", self._wrap_api_call)
73
+ logger.info("httpx library instrumented for API calls.")
74
+ except ImportError:
75
+ logger.debug("httpx library not found, skipping instrumentation.")
76
+ except Exception as e:
77
+ logger.error("Failed to instrument httpx library: %s", e, exc_info=True)
78
+ if self.config.fail_on_error:
79
+ raise
80
+
81
+ def _wrap_api_call(self, wrapped, instance, args, kwargs):
82
+ """Wrapper function for API calls using create_span_wrapper.
83
+
84
+ This method prepares the arguments for `create_span_wrapper` and applies it.
85
+ """
86
+ method = kwargs.get("method", args[0] if args else "unknown").upper()
87
+ url = kwargs.get("url", args[1] if len(args) > 1 else None)
88
+ span_name = f"api.call.{method.lower()}"
89
+ if url:
90
+ try:
91
+ parsed_url = urlparse(url)
92
+ span_name = f"api.call.{method.lower()}.{parsed_url.hostname}"
93
+ except Exception:
94
+ pass # Keep default span name if URL parsing fails
95
+
96
+ instrumented_call = self.create_span_wrapper(
97
+ span_name=span_name, extract_attributes=self._extract_api_attributes
98
+ )
99
+ return instrumented_call(wrapped, instance, args, kwargs)
100
+
101
+ def _extract_api_attributes(
102
+ self, instance: Any, args: Any, kwargs: Any
103
+ ) -> Dict[str, Any]: # pylint: disable=W0613
104
+ """Extract attributes from API call arguments for OpenTelemetry spans.
105
+
106
+ Args:
107
+ instance: The instance of the class the method is called on (e.g., requests.Session).
108
+ args: Positional arguments passed to the method.
109
+ kwargs: Keyword arguments passed to the method.
110
+
111
+ Returns:
112
+ Dict[str, Any]: A dictionary of attributes to be set on the span.
113
+ """
114
+ attrs = {}
115
+ method = kwargs.get("method", args[0] if args else "unknown").upper()
116
+ url = kwargs.get("url", args[1] if len(args) > 1 else None)
117
+
118
+ if url:
119
+ try:
120
+ parsed_url = urlparse(url)
121
+ if parsed_url.hostname:
122
+ attrs["net.peer.name"] = parsed_url.hostname
123
+ attrs["url.full"] = url
124
+ attrs["http.method"] = method
125
+ except Exception as e:
126
+ logger.warning("Failed to parse URL '%s' for attributes: %s", url, e)
127
+
128
+ if url:
129
+ if "openai.com" in url:
130
+ attrs["gen_ai.system"] = "openai"
131
+ elif "anthropic.com" in url:
132
+ attrs["gen_ai.system"] = "anthropic"
133
+ elif "google.com" in url:
134
+ attrs["gen_ai.system"] = "google"
135
+
136
+ return attrs
137
+
138
+ def _extract_usage(self, result) -> Optional[Dict[str, int]]: # pylint: disable=W0613
139
+ """API calls typically don't have direct token usage like LLMs.
140
+
141
+ This method is part of the BaseInstrumentor interface but is not implemented
142
+ for generic API calls as token usage is not a standard concept here.
143
+ """
144
+ return None
@@ -0,0 +1,105 @@
1
+ """Base class for MCP instrumentors with shared metrics.
2
+
3
+ This module provides the `BaseMCPInstrumentor` class which creates and manages
4
+ MCP-specific metrics (requests, duration, payload sizes) that are shared across
5
+ all MCP instrumentors (databases, APIs, vector DBs, etc.).
6
+ """
7
+
8
+ import logging
9
+ from typing import Optional
10
+
11
+ from opentelemetry import metrics
12
+ from opentelemetry.metrics import Counter, Histogram
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+ # Import semantic conventions
17
+ try:
18
+ from openlit.semcov import SemanticConvention as SC
19
+ except ImportError:
20
+ # Fallback if openlit not available
21
+ class SC:
22
+ MCP_REQUESTS = "mcp.requests"
23
+ MCP_CLIENT_OPERATION_DURATION_METRIC = "mcp.client.operation.duration"
24
+ MCP_REQUEST_SIZE = "mcp.request.size"
25
+ MCP_RESPONSE_SIZE_METRIC = "mcp.response.size"
26
+
27
+
28
+ class BaseMCPInstrumentor:
29
+ """Base class for MCP instrumentors with shared metrics.
30
+
31
+ This class provides MCP-specific metrics that can be used by all MCP instrumentors
32
+ to track requests, operation duration, and payload sizes.
33
+
34
+ Metrics:
35
+ - mcp.requests: Counter for number of MCP requests
36
+ - mcp.client.operation.duration: Histogram for operation duration in seconds
37
+ - mcp.request.size: Histogram for request payload size in bytes
38
+ - mcp.response.size: Histogram for response payload size in bytes
39
+ """
40
+
41
+ # Class-level shared metrics (created once, shared by all instances)
42
+ _shared_request_counter: Optional[Counter] = None
43
+ _shared_duration_histogram: Optional[Histogram] = None
44
+ _shared_request_size_histogram: Optional[Histogram] = None
45
+ _shared_response_size_histogram: Optional[Histogram] = None
46
+ _metrics_initialized = False
47
+
48
+ def __init__(self):
49
+ """Initialize BaseMCPInstrumentor and create shared metrics if needed."""
50
+ if not BaseMCPInstrumentor._metrics_initialized:
51
+ self._create_shared_metrics()
52
+
53
+ # Instance references to shared metrics
54
+ self.mcp_request_counter = BaseMCPInstrumentor._shared_request_counter
55
+ self.mcp_duration_histogram = BaseMCPInstrumentor._shared_duration_histogram
56
+ self.mcp_request_size_histogram = BaseMCPInstrumentor._shared_request_size_histogram
57
+ self.mcp_response_size_histogram = BaseMCPInstrumentor._shared_response_size_histogram
58
+
59
+ @classmethod
60
+ def _create_shared_metrics(cls):
61
+ """Create shared MCP metrics once at class level."""
62
+ if cls._metrics_initialized:
63
+ return
64
+
65
+ try:
66
+ meter = metrics.get_meter(__name__)
67
+
68
+ # MCP request counter
69
+ cls._shared_request_counter = meter.create_counter(
70
+ SC.MCP_REQUESTS,
71
+ description="Number of MCP requests",
72
+ unit="1",
73
+ )
74
+
75
+ # MCP operation duration histogram
76
+ cls._shared_duration_histogram = meter.create_histogram(
77
+ SC.MCP_CLIENT_OPERATION_DURATION_METRIC,
78
+ description="MCP operation duration",
79
+ unit="s",
80
+ )
81
+
82
+ # MCP request size histogram
83
+ cls._shared_request_size_histogram = meter.create_histogram(
84
+ SC.MCP_REQUEST_SIZE,
85
+ description="MCP request payload size",
86
+ unit="By",
87
+ )
88
+
89
+ # MCP response size histogram
90
+ cls._shared_response_size_histogram = meter.create_histogram(
91
+ SC.MCP_RESPONSE_SIZE_METRIC,
92
+ description="MCP response payload size",
93
+ unit="By",
94
+ )
95
+
96
+ cls._metrics_initialized = True
97
+ logger.debug("MCP shared metrics created successfully")
98
+
99
+ except Exception as e:
100
+ logger.warning(f"Failed to create MCP shared metrics: {e}")
101
+ # Set to None if creation fails
102
+ cls._shared_request_counter = None
103
+ cls._shared_duration_histogram = None
104
+ cls._shared_request_size_histogram = None
105
+ cls._shared_response_size_histogram = None
@@ -0,0 +1,336 @@
1
+ """OpenTelemetry instrumentor for various database clients.
2
+
3
+ This module provides the `DatabaseInstrumentor` class, which automatically
4
+ instruments popular Python database libraries such as SQLAlchemy, psycopg2,
5
+ pymongo, and mysql, enabling tracing of database operations within GenAI applications.
6
+
7
+ This instrumentor uses a hybrid approach:
8
+ 1. Built-in OTel instrumentors create spans with full trace context
9
+ 2. Custom wrapt wrappers add MCP-specific metrics (duration, payload sizes)
10
+ """
11
+
12
+ import json
13
+ import logging
14
+ import time
15
+
16
+ import wrapt
17
+ from opentelemetry.instrumentation.mysql import MySQLInstrumentor
18
+ from opentelemetry.instrumentation.psycopg2 import Psycopg2Instrumentor
19
+ from opentelemetry.instrumentation.pymongo import PymongoInstrumentor
20
+ from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
21
+
22
+ from ..config import OTelConfig
23
+ from .base import BaseMCPInstrumentor
24
+
25
+ logger = logging.getLogger(__name__)
26
+
27
+ # Conditional imports for database libraries
28
+ try:
29
+ import psycopg2
30
+ from psycopg2.extensions import cursor as Psycopg2Cursor
31
+
32
+ PSYCOPG2_AVAILABLE = True
33
+ except ImportError:
34
+ psycopg2 = None
35
+ Psycopg2Cursor = None
36
+ PSYCOPG2_AVAILABLE = False
37
+
38
+ try:
39
+ import pymongo
40
+ from pymongo.collection import Collection as PymongoCollection
41
+
42
+ PYMONGO_AVAILABLE = True
43
+ except ImportError:
44
+ pymongo = None
45
+ PymongoCollection = None
46
+ PYMONGO_AVAILABLE = False
47
+
48
+ try:
49
+ import mysql.connector
50
+ from mysql.connector.cursor import MySQLCursor
51
+
52
+ MYSQL_AVAILABLE = True
53
+ except ImportError:
54
+ mysql = None
55
+ MySQLCursor = None
56
+ MYSQL_AVAILABLE = False
57
+
58
+
59
+ class DatabaseInstrumentor(BaseMCPInstrumentor): # pylint: disable=R0903
60
+ """Instrument various database clients with traces and MCP metrics.
61
+
62
+ Uses a hybrid approach:
63
+ - Built-in OTel instrumentors for spans/traces
64
+ - Custom wrappers for MCP-specific metrics
65
+ """
66
+
67
+ def __init__(self, config: OTelConfig):
68
+ super().__init__()
69
+ self.config = config
70
+
71
+ def instrument(self):
72
+ """Instrument all detected database libraries with traces and MCP metrics.
73
+
74
+ Uses hybrid approach:
75
+ 1. Built-in OTel instrumentors for spans/traces
76
+ 2. Custom wrappers for MCP metrics (duration, payload sizes)
77
+ """
78
+ instrumented_count = 0
79
+
80
+ # Step 1: Use built-in instrumentors for traces/spans
81
+ # SQLAlchemy
82
+ try:
83
+ SQLAlchemyInstrumentor().instrument()
84
+ logger.info("SQLAlchemy instrumentation enabled")
85
+ instrumented_count += 1
86
+ except ImportError:
87
+ logger.debug("SQLAlchemy not installed, skipping instrumentation.")
88
+ except Exception as e:
89
+ logger.warning(f"SQLAlchemy instrumentation failed: {e}")
90
+
91
+ # PostgreSQL (psycopg2)
92
+ try:
93
+ Psycopg2Instrumentor().instrument()
94
+ logger.info("PostgreSQL (psycopg2) instrumentation enabled")
95
+ instrumented_count += 1
96
+ except ImportError:
97
+ logger.debug("Psycopg2 not installed, skipping instrumentation.")
98
+ except Exception as e:
99
+ logger.warning(f"PostgreSQL instrumentation failed: {e}")
100
+
101
+ # MongoDB
102
+ try:
103
+ PymongoInstrumentor().instrument()
104
+ logger.info("MongoDB instrumentation enabled")
105
+ instrumented_count += 1
106
+ except ImportError:
107
+ logger.debug("Pymongo not installed, skipping instrumentation.")
108
+ except Exception as e:
109
+ logger.warning(f"MongoDB instrumentation failed: {e}")
110
+
111
+ # MySQL
112
+ try:
113
+ MySQLInstrumentor().instrument()
114
+ logger.info("MySQL instrumentation enabled")
115
+ instrumented_count += 1
116
+ except ImportError:
117
+ logger.debug("MySQL-python not installed, skipping instrumentation.")
118
+ except Exception as e:
119
+ logger.warning(f"MySQL instrumentation failed: {e}")
120
+
121
+ # Step 2: Add custom MCP metrics wrappers
122
+ if self.mcp_request_counter is not None:
123
+ # Add metrics collection for databases that are available
124
+ if PSYCOPG2_AVAILABLE:
125
+ self._add_psycopg2_metrics()
126
+ if PYMONGO_AVAILABLE:
127
+ self._add_pymongo_metrics()
128
+ if MYSQL_AVAILABLE:
129
+ self._add_mysql_metrics()
130
+
131
+ return instrumented_count
132
+
133
+ def _add_psycopg2_metrics(self):
134
+ """Add MCP metrics collection to psycopg2 cursor execute methods."""
135
+ try:
136
+ # Wrap psycopg2 cursor execute methods
137
+ if hasattr(Psycopg2Cursor, "execute"):
138
+ wrapt.wrap_function_wrapper(
139
+ "psycopg2.extensions", "cursor.execute", self._db_execute_wrapper("psycopg2")
140
+ )
141
+ if hasattr(Psycopg2Cursor, "executemany"):
142
+ wrapt.wrap_function_wrapper(
143
+ "psycopg2.extensions",
144
+ "cursor.executemany",
145
+ self._db_execute_wrapper("psycopg2"),
146
+ )
147
+ logger.debug("PostgreSQL MCP metrics enabled")
148
+ except Exception as e:
149
+ logger.debug(f"Failed to add PostgreSQL MCP metrics: {e}")
150
+
151
+ def _add_pymongo_metrics(self):
152
+ """Add MCP metrics collection to pymongo collection methods."""
153
+ try:
154
+ # Wrap common pymongo collection methods
155
+ methods_to_wrap = [
156
+ "find",
157
+ "find_one",
158
+ "insert_one",
159
+ "insert_many",
160
+ "update_one",
161
+ "update_many",
162
+ "delete_one",
163
+ "delete_many",
164
+ "count_documents",
165
+ "aggregate",
166
+ ]
167
+ for method_name in methods_to_wrap:
168
+ if hasattr(PymongoCollection, method_name):
169
+ wrapt.wrap_function_wrapper(
170
+ "pymongo.collection",
171
+ f"Collection.{method_name}",
172
+ self._db_operation_wrapper("pymongo", method_name),
173
+ )
174
+ logger.debug("MongoDB MCP metrics enabled")
175
+ except Exception as e:
176
+ logger.debug(f"Failed to add MongoDB MCP metrics: {e}")
177
+
178
+ def _add_mysql_metrics(self):
179
+ """Add MCP metrics collection to MySQL cursor execute methods."""
180
+ try:
181
+ # Wrap MySQL cursor execute methods
182
+ if hasattr(MySQLCursor, "execute"):
183
+ wrapt.wrap_function_wrapper(
184
+ "mysql.connector.cursor",
185
+ "MySQLCursor.execute",
186
+ self._db_execute_wrapper("mysql"),
187
+ )
188
+ if hasattr(MySQLCursor, "executemany"):
189
+ wrapt.wrap_function_wrapper(
190
+ "mysql.connector.cursor",
191
+ "MySQLCursor.executemany",
192
+ self._db_execute_wrapper("mysql"),
193
+ )
194
+ logger.debug("MySQL MCP metrics enabled")
195
+ except Exception as e:
196
+ logger.debug(f"Failed to add MySQL MCP metrics: {e}")
197
+
198
+ def _db_execute_wrapper(self, db_system: str):
199
+ """Create a wrapper for database execute methods that records MCP metrics.
200
+
201
+ Args:
202
+ db_system: Name of the database system (e.g., "psycopg2", "mysql")
203
+
204
+ Returns:
205
+ Wrapper function compatible with wrapt
206
+ """
207
+
208
+ def wrapper(wrapped, instance, args, kwargs):
209
+ start_time = time.time()
210
+ try:
211
+ result = wrapped(*args, **kwargs)
212
+ return result
213
+ finally:
214
+ # Record duration
215
+ duration = time.time() - start_time
216
+ if self.mcp_duration_histogram:
217
+ self.mcp_duration_histogram.record(
218
+ duration, {"db.system": db_system, "mcp.operation": "execute"}
219
+ )
220
+
221
+ # Record request count
222
+ if self.mcp_request_counter:
223
+ self.mcp_request_counter.add(
224
+ 1, {"db.system": db_system, "mcp.operation": "execute"}
225
+ )
226
+
227
+ # Estimate request size (query + params)
228
+ try:
229
+ query = args[0] if args else ""
230
+ params = (
231
+ args[1] if len(args) > 1 else kwargs.get("vars") or kwargs.get("params")
232
+ )
233
+ request_size = len(str(query))
234
+ if params:
235
+ try:
236
+ request_size += len(json.dumps(params, default=str))
237
+ except (TypeError, ValueError):
238
+ request_size += len(str(params))
239
+
240
+ if self.mcp_request_size_histogram:
241
+ self.mcp_request_size_histogram.record(
242
+ request_size, {"db.system": db_system}
243
+ )
244
+
245
+ # Estimate response size from rowcount
246
+ if hasattr(instance, "rowcount") and instance.rowcount > 0:
247
+ # Rough estimate: 100 bytes per row
248
+ response_size = instance.rowcount * 100
249
+ if self.mcp_response_size_histogram:
250
+ self.mcp_response_size_histogram.record(
251
+ response_size, {"db.system": db_system}
252
+ )
253
+ except Exception as e:
254
+ logger.debug(f"Failed to record payload size for {db_system}: {e}")
255
+
256
+ return wrapper
257
+
258
+ def _db_operation_wrapper(self, db_system: str, operation: str):
259
+ """Create a wrapper for database operations that records MCP metrics.
260
+
261
+ Args:
262
+ db_system: Name of the database system (e.g., "pymongo")
263
+ operation: Name of the operation (e.g., "find", "insert_one")
264
+
265
+ Returns:
266
+ Wrapper function compatible with wrapt
267
+ """
268
+
269
+ def wrapper(wrapped, instance, args, kwargs):
270
+ start_time = time.time()
271
+ try:
272
+ result = wrapped(*args, **kwargs)
273
+ return result
274
+ finally:
275
+ # Record duration
276
+ duration = time.time() - start_time
277
+ if self.mcp_duration_histogram:
278
+ self.mcp_duration_histogram.record(
279
+ duration, {"db.system": db_system, "mcp.operation": operation}
280
+ )
281
+
282
+ # Record request count
283
+ if self.mcp_request_counter:
284
+ self.mcp_request_counter.add(
285
+ 1, {"db.system": db_system, "mcp.operation": operation}
286
+ )
287
+
288
+ # Estimate payload sizes
289
+ try:
290
+ # Request size: serialize args and kwargs
291
+ request_size = 0
292
+ if args:
293
+ for arg in args:
294
+ if arg is not None:
295
+ try:
296
+ request_size += len(json.dumps(arg, default=str))
297
+ except (TypeError, ValueError):
298
+ request_size += len(str(arg))
299
+ if kwargs:
300
+ for val in kwargs.values():
301
+ if val is not None:
302
+ try:
303
+ request_size += len(json.dumps(val, default=str))
304
+ except (TypeError, ValueError):
305
+ request_size += len(str(val))
306
+
307
+ if self.mcp_request_size_histogram and request_size > 0:
308
+ self.mcp_request_size_histogram.record(
309
+ request_size, {"db.system": db_system, "mcp.operation": operation}
310
+ )
311
+
312
+ # Response size: estimate based on result type
313
+ response_size = 0
314
+ if result is not None:
315
+ if isinstance(result, dict):
316
+ try:
317
+ response_size = len(json.dumps(result, default=str))
318
+ except (TypeError, ValueError):
319
+ response_size = len(str(result))
320
+ elif isinstance(result, (list, tuple)):
321
+ response_size = len(result) * 100 # Estimate 100 bytes per item
322
+ elif isinstance(result, int):
323
+ response_size = 8 # Integer size
324
+ elif hasattr(result, "inserted_ids"):
325
+ response_size = len(str(result.inserted_ids))
326
+ elif hasattr(result, "matched_count"):
327
+ response_size = 8
328
+
329
+ if self.mcp_response_size_histogram and response_size > 0:
330
+ self.mcp_response_size_histogram.record(
331
+ response_size, {"db.system": db_system, "mcp.operation": operation}
332
+ )
333
+ except Exception as e:
334
+ logger.debug(f"Failed to record payload size for {db_system}.{operation}: {e}")
335
+
336
+ return wrapper
@@ -0,0 +1,31 @@
1
+ """OpenTelemetry instrumentor for Apache Kafka clients.
2
+
3
+ This module provides the `KafkaInstrumentor` class, which automatically
4
+ instruments Kafka producers and consumers, enabling tracing of message
5
+ queue operations within GenAI applications.
6
+ """
7
+
8
+ import logging
9
+
10
+ from opentelemetry.instrumentation.kafka import KafkaInstrumentor as OTelKafkaInstrumentor
11
+
12
+ from ..config import OTelConfig
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class KafkaInstrumentor: # pylint: disable=R0903
18
+ """Instrument Kafka producers and consumers"""
19
+
20
+ def __init__(self, config: OTelConfig):
21
+ self.config = config
22
+
23
+ def instrument(self):
24
+ """Instrument Kafka"""
25
+ try:
26
+ OTelKafkaInstrumentor().instrument()
27
+ logger.info("Kafka instrumentation enabled")
28
+ except ImportError:
29
+ logger.debug("Kafka-python not installed, skipping instrumentation.")
30
+ except Exception as e:
31
+ logger.warning(f"Kafka instrumentation failed: {e}")