genai-otel-instrument 0.1.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- genai_otel/__init__.py +132 -0
- genai_otel/__version__.py +34 -0
- genai_otel/auto_instrument.py +602 -0
- genai_otel/cli.py +92 -0
- genai_otel/config.py +333 -0
- genai_otel/cost_calculator.py +467 -0
- genai_otel/cost_enriching_exporter.py +207 -0
- genai_otel/cost_enrichment_processor.py +174 -0
- genai_otel/evaluation/__init__.py +76 -0
- genai_otel/evaluation/bias_detector.py +364 -0
- genai_otel/evaluation/config.py +261 -0
- genai_otel/evaluation/hallucination_detector.py +525 -0
- genai_otel/evaluation/pii_detector.py +356 -0
- genai_otel/evaluation/prompt_injection_detector.py +262 -0
- genai_otel/evaluation/restricted_topics_detector.py +316 -0
- genai_otel/evaluation/span_processor.py +962 -0
- genai_otel/evaluation/toxicity_detector.py +406 -0
- genai_otel/exceptions.py +17 -0
- genai_otel/gpu_metrics.py +516 -0
- genai_otel/instrumentors/__init__.py +71 -0
- genai_otel/instrumentors/anthropic_instrumentor.py +134 -0
- genai_otel/instrumentors/anyscale_instrumentor.py +27 -0
- genai_otel/instrumentors/autogen_instrumentor.py +394 -0
- genai_otel/instrumentors/aws_bedrock_instrumentor.py +94 -0
- genai_otel/instrumentors/azure_openai_instrumentor.py +69 -0
- genai_otel/instrumentors/base.py +919 -0
- genai_otel/instrumentors/bedrock_agents_instrumentor.py +398 -0
- genai_otel/instrumentors/cohere_instrumentor.py +140 -0
- genai_otel/instrumentors/crewai_instrumentor.py +311 -0
- genai_otel/instrumentors/dspy_instrumentor.py +661 -0
- genai_otel/instrumentors/google_ai_instrumentor.py +310 -0
- genai_otel/instrumentors/groq_instrumentor.py +106 -0
- genai_otel/instrumentors/guardrails_ai_instrumentor.py +510 -0
- genai_otel/instrumentors/haystack_instrumentor.py +503 -0
- genai_otel/instrumentors/huggingface_instrumentor.py +399 -0
- genai_otel/instrumentors/hyperbolic_instrumentor.py +236 -0
- genai_otel/instrumentors/instructor_instrumentor.py +425 -0
- genai_otel/instrumentors/langchain_instrumentor.py +340 -0
- genai_otel/instrumentors/langgraph_instrumentor.py +328 -0
- genai_otel/instrumentors/llamaindex_instrumentor.py +36 -0
- genai_otel/instrumentors/mistralai_instrumentor.py +315 -0
- genai_otel/instrumentors/ollama_instrumentor.py +197 -0
- genai_otel/instrumentors/ollama_server_metrics_poller.py +336 -0
- genai_otel/instrumentors/openai_agents_instrumentor.py +291 -0
- genai_otel/instrumentors/openai_instrumentor.py +260 -0
- genai_otel/instrumentors/pydantic_ai_instrumentor.py +362 -0
- genai_otel/instrumentors/replicate_instrumentor.py +87 -0
- genai_otel/instrumentors/sambanova_instrumentor.py +196 -0
- genai_otel/instrumentors/togetherai_instrumentor.py +146 -0
- genai_otel/instrumentors/vertexai_instrumentor.py +106 -0
- genai_otel/llm_pricing.json +1676 -0
- genai_otel/logging_config.py +45 -0
- genai_otel/mcp_instrumentors/__init__.py +14 -0
- genai_otel/mcp_instrumentors/api_instrumentor.py +144 -0
- genai_otel/mcp_instrumentors/base.py +105 -0
- genai_otel/mcp_instrumentors/database_instrumentor.py +336 -0
- genai_otel/mcp_instrumentors/kafka_instrumentor.py +31 -0
- genai_otel/mcp_instrumentors/manager.py +139 -0
- genai_otel/mcp_instrumentors/redis_instrumentor.py +31 -0
- genai_otel/mcp_instrumentors/vector_db_instrumentor.py +265 -0
- genai_otel/metrics.py +148 -0
- genai_otel/py.typed +2 -0
- genai_otel/server_metrics.py +197 -0
- genai_otel_instrument-0.1.24.dist-info/METADATA +1404 -0
- genai_otel_instrument-0.1.24.dist-info/RECORD +69 -0
- genai_otel_instrument-0.1.24.dist-info/WHEEL +5 -0
- genai_otel_instrument-0.1.24.dist-info/entry_points.txt +2 -0
- genai_otel_instrument-0.1.24.dist-info/licenses/LICENSE +680 -0
- genai_otel_instrument-0.1.24.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
"""Manager for OpenTelemetry instrumentation of Model Context Protocol (MCP) tools.
|
|
2
|
+
|
|
3
|
+
This module provides the `MCPInstrumentorManager` class, which orchestrates
|
|
4
|
+
the automatic instrumentation of various MCP tools, including databases, caching
|
|
5
|
+
layers, message queues, vector databases, and generic API calls. It ensures
|
|
6
|
+
that these components are integrated into the OpenTelemetry tracing and metrics
|
|
7
|
+
system.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import asyncio
|
|
11
|
+
import logging
|
|
12
|
+
|
|
13
|
+
import httpx
|
|
14
|
+
from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor
|
|
15
|
+
from opentelemetry.instrumentation.requests import RequestsInstrumentor
|
|
16
|
+
|
|
17
|
+
from ..config import OTelConfig
|
|
18
|
+
from .api_instrumentor import APIInstrumentor
|
|
19
|
+
from .database_instrumentor import DatabaseInstrumentor
|
|
20
|
+
from .kafka_instrumentor import KafkaInstrumentor
|
|
21
|
+
from .redis_instrumentor import RedisInstrumentor
|
|
22
|
+
from .vector_db_instrumentor import VectorDBInstrumentor
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class MCPInstrumentorManager: # pylint: disable=R0903
|
|
28
|
+
"""Manager for MCP (Model Context Protocol) tool instrumentation"""
|
|
29
|
+
|
|
30
|
+
def __init__(self, config: OTelConfig):
|
|
31
|
+
self.config = config
|
|
32
|
+
self.instrumentors = []
|
|
33
|
+
|
|
34
|
+
def instrument_all(self, fail_on_error: bool = False): # pylint: disable=R0912, R0915
|
|
35
|
+
"""Instrument all detected MCP tools"""
|
|
36
|
+
|
|
37
|
+
success_count = 0
|
|
38
|
+
failure_count = 0
|
|
39
|
+
|
|
40
|
+
# HTTP/API instrumentation (disabled by default to avoid conflicts)
|
|
41
|
+
if self.config.enable_http_instrumentation:
|
|
42
|
+
try:
|
|
43
|
+
logger.info("Instrumenting HTTP/API calls")
|
|
44
|
+
# CRITICAL: Do NOT instrument requests library when using OTLP HTTP exporters
|
|
45
|
+
# RequestsInstrumentor patches requests.Session at class level, breaking OTLP exporters
|
|
46
|
+
# that use requests internally. The OTEL_PYTHON_REQUESTS_EXCLUDED_URLS doesn't help
|
|
47
|
+
# because it only works at request-time, not at instrumentation-time.
|
|
48
|
+
#
|
|
49
|
+
# TODO: Find a way to instrument user requests without breaking OTLP exporters
|
|
50
|
+
# RequestsInstrumentor().instrument()
|
|
51
|
+
|
|
52
|
+
logger.warning(
|
|
53
|
+
"Requests library instrumentation is disabled to prevent conflicts with OTLP exporters"
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
# HTTPx is safe to instrument
|
|
57
|
+
HTTPXClientInstrumentor().instrument()
|
|
58
|
+
api_instrumentor = APIInstrumentor(self.config)
|
|
59
|
+
api_instrumentor.instrument(self.config)
|
|
60
|
+
logger.info("✓ HTTP/API instrumentation enabled (requests library excluded)")
|
|
61
|
+
success_count += 1
|
|
62
|
+
except ImportError as e:
|
|
63
|
+
failure_count += 1
|
|
64
|
+
logger.debug(f"✗ HTTP/API instrumentation skipped due to missing dependency: {e}")
|
|
65
|
+
except Exception as e:
|
|
66
|
+
failure_count += 1
|
|
67
|
+
logger.error(f"✗ Failed to instrument HTTP/API: {e}", exc_info=True)
|
|
68
|
+
if fail_on_error:
|
|
69
|
+
raise
|
|
70
|
+
else:
|
|
71
|
+
logger.info("HTTP/API instrumentation disabled (enable_http_instrumentation=False)")
|
|
72
|
+
|
|
73
|
+
# Database instrumentation
|
|
74
|
+
try:
|
|
75
|
+
logger.info("Instrumenting databases")
|
|
76
|
+
db_instrumentor = DatabaseInstrumentor(self.config)
|
|
77
|
+
result = db_instrumentor.instrument()
|
|
78
|
+
if result > 0:
|
|
79
|
+
success_count += 1
|
|
80
|
+
logger.info(f"✓ Database instrumentation enabled ({result} databases)")
|
|
81
|
+
except ImportError as e:
|
|
82
|
+
failure_count += 1
|
|
83
|
+
logger.debug(f"✗ Database instrumentation skipped due to missing dependency: {e}")
|
|
84
|
+
except Exception as e:
|
|
85
|
+
failure_count += 1
|
|
86
|
+
logger.error(f"✗ Failed to instrument databases: {e}", exc_info=True)
|
|
87
|
+
if fail_on_error:
|
|
88
|
+
raise
|
|
89
|
+
|
|
90
|
+
# Redis instrumentation
|
|
91
|
+
try:
|
|
92
|
+
logger.info("Instrumenting Redis")
|
|
93
|
+
redis_instrumentor = RedisInstrumentor(self.config)
|
|
94
|
+
redis_instrumentor.instrument()
|
|
95
|
+
success_count += 1
|
|
96
|
+
except ImportError as e:
|
|
97
|
+
failure_count += 1
|
|
98
|
+
logger.debug(f"✗ Redis instrumentation skipped due to missing dependency: {e}")
|
|
99
|
+
except Exception as e:
|
|
100
|
+
failure_count += 1
|
|
101
|
+
logger.error(f"✗ Failed to instrument Redis: {e}", exc_info=True)
|
|
102
|
+
if fail_on_error:
|
|
103
|
+
raise
|
|
104
|
+
|
|
105
|
+
# Kafka instrumentation
|
|
106
|
+
try:
|
|
107
|
+
logger.info("Instrumenting Kafka")
|
|
108
|
+
kafka_instrumentor = KafkaInstrumentor(self.config)
|
|
109
|
+
kafka_instrumentor.instrument()
|
|
110
|
+
success_count += 1
|
|
111
|
+
except ImportError as e:
|
|
112
|
+
failure_count += 1
|
|
113
|
+
logger.debug(f"✗ Kafka instrumentation skipped due to missing dependency: {e}")
|
|
114
|
+
except Exception as e:
|
|
115
|
+
failure_count += 1
|
|
116
|
+
logger.error(f"✗ Failed to instrument Kafka: {e}", exc_info=True)
|
|
117
|
+
if fail_on_error:
|
|
118
|
+
raise
|
|
119
|
+
|
|
120
|
+
# Vector DB instrumentation
|
|
121
|
+
try:
|
|
122
|
+
logger.info("Instrumenting Vector DBs")
|
|
123
|
+
vector_db_instrumentor = VectorDBInstrumentor(self.config)
|
|
124
|
+
result = vector_db_instrumentor.instrument()
|
|
125
|
+
if result > 0:
|
|
126
|
+
success_count += 1
|
|
127
|
+
logger.info(f"✓ Vector DB instrumentation enabled ({result} databases)")
|
|
128
|
+
except ImportError as e:
|
|
129
|
+
failure_count += 1
|
|
130
|
+
logger.debug(f"✗ Vector DB instrumentation skipped due to missing dependency: {e}")
|
|
131
|
+
except Exception as e:
|
|
132
|
+
failure_count += 1
|
|
133
|
+
logger.error(f"✗ Failed to instrument Vector DBs: {e}", exc_info=True)
|
|
134
|
+
if fail_on_error:
|
|
135
|
+
raise
|
|
136
|
+
|
|
137
|
+
logger.info(
|
|
138
|
+
f"MCP instrumentation summary: {success_count} succeeded, " f"{failure_count} failed"
|
|
139
|
+
)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"""OpenTelemetry instrumentor for Redis clients.
|
|
2
|
+
|
|
3
|
+
This module provides the `RedisInstrumentor` class, which automatically
|
|
4
|
+
instruments Redis operations, enabling tracing of caching interactions
|
|
5
|
+
within GenAI applications.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
|
|
10
|
+
from opentelemetry.instrumentation.redis import RedisInstrumentor as OTelRedisInstrumentor
|
|
11
|
+
|
|
12
|
+
from ..config import OTelConfig
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class RedisInstrumentor: # pylint: disable=R0903
|
|
18
|
+
"""Instrument Redis clients"""
|
|
19
|
+
|
|
20
|
+
def __init__(self, config: OTelConfig):
|
|
21
|
+
self.config = config
|
|
22
|
+
|
|
23
|
+
def instrument(self):
|
|
24
|
+
"""Instrument Redis"""
|
|
25
|
+
try:
|
|
26
|
+
OTelRedisInstrumentor().instrument()
|
|
27
|
+
logger.info("Redis instrumentation enabled")
|
|
28
|
+
except ImportError:
|
|
29
|
+
logger.debug("Redis-py not installed, skipping instrumentation.")
|
|
30
|
+
except Exception as e:
|
|
31
|
+
logger.warning(f"Redis instrumentation failed: {e}")
|
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
"""OpenTelemetry instrumentor for various vector database clients.
|
|
2
|
+
|
|
3
|
+
This module provides the `VectorDBInstrumentor` class, which automatically
|
|
4
|
+
instruments popular Python vector database libraries such as Pinecone, Weaviate,
|
|
5
|
+
Qdrant, ChromaDB, Milvus, and FAISS, enabling tracing of vector search and
|
|
6
|
+
related operations within GenAI applications.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
from typing import Any, Dict, Optional
|
|
11
|
+
|
|
12
|
+
import wrapt
|
|
13
|
+
from opentelemetry import trace
|
|
14
|
+
from opentelemetry.trace import SpanKind, Status, StatusCode
|
|
15
|
+
|
|
16
|
+
from ..config import OTelConfig
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class VectorDBInstrumentor: # pylint: disable=R0903
|
|
22
|
+
"""Instrument vector database clients"""
|
|
23
|
+
|
|
24
|
+
def __init__(self, config: OTelConfig):
|
|
25
|
+
self.config = config
|
|
26
|
+
self.tracer = trace.get_tracer(__name__)
|
|
27
|
+
|
|
28
|
+
def instrument(self):
|
|
29
|
+
"""Instrument all detected vector DB libraries"""
|
|
30
|
+
instrumented_count = 0
|
|
31
|
+
if self._instrument_pinecone():
|
|
32
|
+
instrumented_count += 1
|
|
33
|
+
if self._instrument_weaviate():
|
|
34
|
+
instrumented_count += 1
|
|
35
|
+
if self._instrument_qdrant():
|
|
36
|
+
instrumented_count += 1
|
|
37
|
+
if self._instrument_chroma():
|
|
38
|
+
instrumented_count += 1
|
|
39
|
+
if self._instrument_milvus():
|
|
40
|
+
instrumented_count += 1
|
|
41
|
+
if self._instrument_faiss():
|
|
42
|
+
instrumented_count += 1
|
|
43
|
+
return instrumented_count
|
|
44
|
+
|
|
45
|
+
def _instrument_pinecone(self):
|
|
46
|
+
"""Instrument Pinecone operations"""
|
|
47
|
+
try:
|
|
48
|
+
import pinecone
|
|
49
|
+
|
|
50
|
+
# Check Pinecone version to handle API differences
|
|
51
|
+
pinecone_version = getattr(pinecone, "__version__", "0.0.0")
|
|
52
|
+
|
|
53
|
+
# Pinecone 3.0+ uses a different API structure
|
|
54
|
+
if hasattr(pinecone, "Pinecone"):
|
|
55
|
+
# New API (3.0+)
|
|
56
|
+
logger.info("Detected Pinecone 3.0+ API")
|
|
57
|
+
wrapt.wrap_function_wrapper(
|
|
58
|
+
"pinecone", "Pinecone.__init__", self._wrap_pinecone_init
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
elif hasattr(pinecone, "Index"):
|
|
62
|
+
# Old API (2.x)
|
|
63
|
+
logger.info("Detected Pinecone 2.x API")
|
|
64
|
+
original_query = pinecone.Index.query
|
|
65
|
+
original_upsert = pinecone.Index.upsert
|
|
66
|
+
original_delete = pinecone.Index.delete
|
|
67
|
+
|
|
68
|
+
pinecone.Index.query = self._wrap_pinecone_method(original_query, "pinecone.query")
|
|
69
|
+
pinecone.Index.upsert = self._wrap_pinecone_method(
|
|
70
|
+
original_upsert, "pinecone.upsert"
|
|
71
|
+
)
|
|
72
|
+
pinecone.Index.delete = self._wrap_pinecone_method(
|
|
73
|
+
original_delete, "pinecone.delete"
|
|
74
|
+
)
|
|
75
|
+
else:
|
|
76
|
+
logger.warning("Could not detect Pinecone API version. Skipping instrumentation.")
|
|
77
|
+
return False
|
|
78
|
+
|
|
79
|
+
logger.info("Pinecone instrumentation enabled")
|
|
80
|
+
return True
|
|
81
|
+
|
|
82
|
+
except ImportError:
|
|
83
|
+
logger.info("Pinecone not installed, skipping instrumentation")
|
|
84
|
+
return False
|
|
85
|
+
except Exception as e:
|
|
86
|
+
if "pinecone-client" in str(e) and "renamed" in str(e):
|
|
87
|
+
logger.error(
|
|
88
|
+
"Failed to instrument Pinecone: %s. Please ensure only the `pinecone` package is installed (uninstall `pinecone-client` if present).",
|
|
89
|
+
e,
|
|
90
|
+
)
|
|
91
|
+
else:
|
|
92
|
+
logger.error(f"Failed to instrument Pinecone: {e}", exc_info=True)
|
|
93
|
+
return False
|
|
94
|
+
|
|
95
|
+
def _wrap_pinecone_init(self, wrapped, instance, args, kwargs):
|
|
96
|
+
"""Wrapper for Pinecone.__init__ to instrument index methods."""
|
|
97
|
+
result = wrapped(*args, **kwargs)
|
|
98
|
+
if hasattr(instance, "Index"):
|
|
99
|
+
original_index = instance.Index
|
|
100
|
+
|
|
101
|
+
@wrapt.decorator
|
|
102
|
+
def traced_index(wrapped_idx, idx_instance, idx_args, idx_kwargs):
|
|
103
|
+
idx = wrapped_idx(*idx_args, **idx_kwargs)
|
|
104
|
+
if hasattr(idx_instance, "query"):
|
|
105
|
+
idx_instance.query = self._wrap_pinecone_method(
|
|
106
|
+
idx_instance.query, "pinecone.index.query"
|
|
107
|
+
)
|
|
108
|
+
if hasattr(idx_instance, "upsert"):
|
|
109
|
+
idx_instance.upsert = self._wrap_pinecone_method(
|
|
110
|
+
idx_instance.upsert, "pinecone.index.upsert"
|
|
111
|
+
)
|
|
112
|
+
if hasattr(idx_instance, "delete"):
|
|
113
|
+
idx_instance.delete = self._wrap_pinecone_method(
|
|
114
|
+
idx_instance.delete, "pinecone.index.delete"
|
|
115
|
+
)
|
|
116
|
+
return idx
|
|
117
|
+
|
|
118
|
+
instance.Index = traced_index(original_index)
|
|
119
|
+
return result
|
|
120
|
+
|
|
121
|
+
def _wrap_pinecone_method(self, original_method, operation_name):
|
|
122
|
+
"""Wrap a Pinecone method with tracing"""
|
|
123
|
+
|
|
124
|
+
def wrapper(*args, **kwargs):
|
|
125
|
+
tracer = trace.get_tracer(__name__)
|
|
126
|
+
with tracer.start_as_current_span(
|
|
127
|
+
operation_name,
|
|
128
|
+
kind=SpanKind.CLIENT,
|
|
129
|
+
attributes={"db.system": "pinecone", "db.operation": operation_name.split(".")[-1]},
|
|
130
|
+
) as span:
|
|
131
|
+
try:
|
|
132
|
+
result = original_method(*args, **kwargs)
|
|
133
|
+
span.set_status(Status(StatusCode.OK))
|
|
134
|
+
return result
|
|
135
|
+
except Exception as e:
|
|
136
|
+
span.set_status(Status(StatusCode.ERROR, str(e)))
|
|
137
|
+
span.record_exception(e)
|
|
138
|
+
raise
|
|
139
|
+
|
|
140
|
+
return wrapper
|
|
141
|
+
|
|
142
|
+
def _instrument_weaviate(self):
|
|
143
|
+
"""Instrument Weaviate"""
|
|
144
|
+
try:
|
|
145
|
+
import weaviate
|
|
146
|
+
|
|
147
|
+
@wrapt.decorator
|
|
148
|
+
def wrapped_query(wrapped, instance, args, kwargs): # pylint: disable=W0613
|
|
149
|
+
with self.tracer.start_as_current_span("weaviate.query") as span:
|
|
150
|
+
span.set_attribute("db.system", "weaviate")
|
|
151
|
+
span.set_attribute("db.operation", "query")
|
|
152
|
+
result = wrapped(*args, **kwargs)
|
|
153
|
+
return result
|
|
154
|
+
|
|
155
|
+
weaviate.Client.query = wrapped_query(weaviate.Client.query) # pylint: disable=E1120
|
|
156
|
+
logger.info("Weaviate instrumentation enabled")
|
|
157
|
+
return True
|
|
158
|
+
|
|
159
|
+
except ImportError:
|
|
160
|
+
return False
|
|
161
|
+
|
|
162
|
+
def _instrument_qdrant(self):
|
|
163
|
+
"""Instrument Qdrant"""
|
|
164
|
+
try:
|
|
165
|
+
from qdrant_client import QdrantClient
|
|
166
|
+
|
|
167
|
+
original_search = QdrantClient.search
|
|
168
|
+
|
|
169
|
+
def wrapped_search(instance, *args, **kwargs):
|
|
170
|
+
with self.tracer.start_as_current_span("qdrant.search") as span:
|
|
171
|
+
span.set_attribute("db.system", "qdrant")
|
|
172
|
+
span.set_attribute("db.operation", "search")
|
|
173
|
+
|
|
174
|
+
collection = kwargs.get("collection_name", args[0] if args else "unknown")
|
|
175
|
+
span.set_attribute("vector.collection", collection)
|
|
176
|
+
|
|
177
|
+
limit = kwargs.get("limit", 10)
|
|
178
|
+
span.set_attribute("vector.limit", limit)
|
|
179
|
+
|
|
180
|
+
result = original_search(instance, *args, **kwargs)
|
|
181
|
+
return result
|
|
182
|
+
|
|
183
|
+
QdrantClient.search = wrapped_search
|
|
184
|
+
logger.info("Qdrant instrumentation enabled")
|
|
185
|
+
return True
|
|
186
|
+
|
|
187
|
+
except ImportError:
|
|
188
|
+
return False
|
|
189
|
+
|
|
190
|
+
def _instrument_chroma(self):
|
|
191
|
+
"""Instrument ChromaDB"""
|
|
192
|
+
try:
|
|
193
|
+
import chromadb
|
|
194
|
+
|
|
195
|
+
original_query = chromadb.Collection.query
|
|
196
|
+
|
|
197
|
+
def wrapped_query(instance, *args, **kwargs):
|
|
198
|
+
with self.tracer.start_as_current_span("chroma.query") as span:
|
|
199
|
+
span.set_attribute("db.system", "chromadb")
|
|
200
|
+
span.set_attribute("db.operation", "query")
|
|
201
|
+
span.set_attribute("vector.collection", instance.name)
|
|
202
|
+
|
|
203
|
+
n_results = kwargs.get("n_results", 10)
|
|
204
|
+
span.set_attribute("vector.n_results", n_results)
|
|
205
|
+
|
|
206
|
+
result = original_query(instance, *args, **kwargs)
|
|
207
|
+
return result
|
|
208
|
+
|
|
209
|
+
chromadb.Collection.query = wrapped_query
|
|
210
|
+
logger.info("ChromaDB instrumentation enabled")
|
|
211
|
+
return True
|
|
212
|
+
|
|
213
|
+
except ImportError:
|
|
214
|
+
return False
|
|
215
|
+
|
|
216
|
+
def _instrument_milvus(self):
|
|
217
|
+
"""Instrument Milvus"""
|
|
218
|
+
try:
|
|
219
|
+
from pymilvus import Collection
|
|
220
|
+
|
|
221
|
+
original_search = Collection.search
|
|
222
|
+
|
|
223
|
+
def wrapped_search(instance, *args, **kwargs):
|
|
224
|
+
with self.tracer.start_as_current_span("milvus.search") as span:
|
|
225
|
+
span.set_attribute("db.system", "milvus")
|
|
226
|
+
span.set_attribute("db.operation", "search")
|
|
227
|
+
span.set_attribute("vector.collection", instance.name)
|
|
228
|
+
|
|
229
|
+
limit = kwargs.get("limit", 10)
|
|
230
|
+
span.set_attribute("vector.limit", limit)
|
|
231
|
+
|
|
232
|
+
result = original_search(instance, *args, **kwargs)
|
|
233
|
+
return result
|
|
234
|
+
|
|
235
|
+
Collection.search = wrapped_search
|
|
236
|
+
logger.info("Milvus instrumentation enabled")
|
|
237
|
+
return True
|
|
238
|
+
|
|
239
|
+
except ImportError:
|
|
240
|
+
return False
|
|
241
|
+
|
|
242
|
+
def _instrument_faiss(self):
|
|
243
|
+
"""Instrument FAISS"""
|
|
244
|
+
try:
|
|
245
|
+
import faiss
|
|
246
|
+
|
|
247
|
+
original_search = faiss.Index.search
|
|
248
|
+
|
|
249
|
+
def wrapped_search(instance, *args, **kwargs):
|
|
250
|
+
with self.tracer.start_as_current_span("faiss.search") as span:
|
|
251
|
+
span.set_attribute("db.system", "faiss")
|
|
252
|
+
span.set_attribute("db.operation", "search")
|
|
253
|
+
|
|
254
|
+
k = args[1] if len(args) > 1 else kwargs.get("k", 10)
|
|
255
|
+
span.set_attribute("vector.k", k)
|
|
256
|
+
|
|
257
|
+
result = original_search(instance, *args, **kwargs)
|
|
258
|
+
return result
|
|
259
|
+
|
|
260
|
+
faiss.Index.search = wrapped_search
|
|
261
|
+
logger.info("FAISS instrumentation enabled")
|
|
262
|
+
return True
|
|
263
|
+
|
|
264
|
+
except ImportError:
|
|
265
|
+
return False
|
genai_otel/metrics.py
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
# isort: skip_file
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
from typing import Any, Dict, Optional, Tuple
|
|
5
|
+
|
|
6
|
+
from opentelemetry import metrics
|
|
7
|
+
from opentelemetry.metrics import Meter # Import Meter here
|
|
8
|
+
from opentelemetry.sdk.metrics import MeterProvider
|
|
9
|
+
from opentelemetry.sdk.metrics._internal.export import MetricExporter
|
|
10
|
+
from opentelemetry.sdk.metrics.export import (
|
|
11
|
+
ConsoleMetricExporter,
|
|
12
|
+
PeriodicExportingMetricReader,
|
|
13
|
+
)
|
|
14
|
+
from opentelemetry.sdk.resources import (
|
|
15
|
+
DEPLOYMENT_ENVIRONMENT,
|
|
16
|
+
SERVICE_NAME,
|
|
17
|
+
TELEMETRY_SDK_NAME,
|
|
18
|
+
Resource,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
# Correct the import for OTLP Metric Exporter
|
|
24
|
+
if os.environ.get("OTEL_EXPORTER_OTLP_PROTOCOL") == "grpc":
|
|
25
|
+
from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import ( # noqa: E402
|
|
26
|
+
OTLPMetricExporter,
|
|
27
|
+
)
|
|
28
|
+
else:
|
|
29
|
+
from opentelemetry.exporter.otlp.proto.http.metric_exporter import ( # noqa: E402
|
|
30
|
+
OTLPMetricExporter,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
# Global variables to hold the MeterProvider and Meter
|
|
34
|
+
_meter_provider: Optional[MeterProvider] = None
|
|
35
|
+
_meter: Optional[Meter] = None
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_meter() -> Meter:
|
|
39
|
+
"""
|
|
40
|
+
Returns the globally configured Meter.
|
|
41
|
+
"""
|
|
42
|
+
return metrics.get_meter(__name__)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def get_meter_provider() -> MeterProvider:
|
|
46
|
+
"""
|
|
47
|
+
Returns the globally configured MeterProvider.
|
|
48
|
+
"""
|
|
49
|
+
return metrics.get_meter_provider()
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
_DB_CLIENT_OPERATION_DURATION_BUCKETS = [0.001, 0.005, 0.01, 0.05, 0.1, 0.5, 1, 5, 10]
|
|
53
|
+
|
|
54
|
+
_GEN_AI_CLIENT_OPERATION_DURATION_BUCKETS = [
|
|
55
|
+
0.01,
|
|
56
|
+
0.02,
|
|
57
|
+
0.04,
|
|
58
|
+
0.08,
|
|
59
|
+
0.16,
|
|
60
|
+
0.32,
|
|
61
|
+
0.64,
|
|
62
|
+
1.28,
|
|
63
|
+
2.56,
|
|
64
|
+
5.12,
|
|
65
|
+
10.24,
|
|
66
|
+
20.48,
|
|
67
|
+
40.96,
|
|
68
|
+
81.92,
|
|
69
|
+
]
|
|
70
|
+
|
|
71
|
+
_GEN_AI_SERVER_TBT = [
|
|
72
|
+
0.01,
|
|
73
|
+
0.025,
|
|
74
|
+
0.05,
|
|
75
|
+
0.075,
|
|
76
|
+
0.1,
|
|
77
|
+
0.15,
|
|
78
|
+
0.2,
|
|
79
|
+
0.3,
|
|
80
|
+
0.4,
|
|
81
|
+
0.5,
|
|
82
|
+
0.75,
|
|
83
|
+
1.0,
|
|
84
|
+
2.5,
|
|
85
|
+
]
|
|
86
|
+
|
|
87
|
+
_GEN_AI_SERVER_TFTT = [
|
|
88
|
+
0.001,
|
|
89
|
+
0.005,
|
|
90
|
+
0.01,
|
|
91
|
+
0.02,
|
|
92
|
+
0.04,
|
|
93
|
+
0.06,
|
|
94
|
+
0.08,
|
|
95
|
+
0.1,
|
|
96
|
+
0.25,
|
|
97
|
+
0.5,
|
|
98
|
+
0.75,
|
|
99
|
+
1.0,
|
|
100
|
+
2.5,
|
|
101
|
+
5.0,
|
|
102
|
+
7.5,
|
|
103
|
+
10.0,
|
|
104
|
+
]
|
|
105
|
+
|
|
106
|
+
_GEN_AI_CLIENT_TOKEN_USAGE_BUCKETS = [
|
|
107
|
+
1,
|
|
108
|
+
4,
|
|
109
|
+
16,
|
|
110
|
+
64,
|
|
111
|
+
256,
|
|
112
|
+
1024,
|
|
113
|
+
4096,
|
|
114
|
+
16384,
|
|
115
|
+
65536,
|
|
116
|
+
262144,
|
|
117
|
+
1048576,
|
|
118
|
+
4194304,
|
|
119
|
+
16777216,
|
|
120
|
+
67108864,
|
|
121
|
+
]
|
|
122
|
+
|
|
123
|
+
# MCP-specific bucket boundaries for performance and size metrics
|
|
124
|
+
_MCP_CLIENT_OPERATION_DURATION_BUCKETS = [
|
|
125
|
+
0.001, # 1ms
|
|
126
|
+
0.005, # 5ms
|
|
127
|
+
0.01, # 10ms
|
|
128
|
+
0.05, # 50ms
|
|
129
|
+
0.1, # 100ms
|
|
130
|
+
0.5, # 500ms
|
|
131
|
+
1.0, # 1s
|
|
132
|
+
2.0, # 2s
|
|
133
|
+
5.0, # 5s
|
|
134
|
+
10.0, # 10s
|
|
135
|
+
]
|
|
136
|
+
|
|
137
|
+
_MCP_PAYLOAD_SIZE_BUCKETS = [
|
|
138
|
+
100, # 100 bytes
|
|
139
|
+
500, # 500 bytes
|
|
140
|
+
1024, # 1KB
|
|
141
|
+
5120, # 5KB
|
|
142
|
+
10240, # 10KB
|
|
143
|
+
51200, # 50KB
|
|
144
|
+
102400, # 100KB
|
|
145
|
+
512000, # 500KB
|
|
146
|
+
1048576, # 1MB
|
|
147
|
+
5242880, # 5MB
|
|
148
|
+
]
|
genai_otel/py.typed
ADDED