netra-sdk 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of netra-sdk might be problematic. Click here for more details.
- netra/decorators.py +13 -2
- netra/instrumentation/__init__.py +5 -16
- netra/processors/__init__.py +2 -2
- netra/processors/error_detection_processor.py +80 -0
- netra/processors/session_span_processor.py +6 -0
- netra/session.py +29 -53
- netra/session_manager.py +86 -1
- netra/tracer.py +2 -2
- netra/version.py +1 -1
- {netra_sdk-0.1.3.dist-info → netra_sdk-0.1.5.dist-info}/METADATA +7 -41
- {netra_sdk-0.1.3.dist-info → netra_sdk-0.1.5.dist-info}/RECORD +13 -13
- netra/processors/span_aggregation_processor.py +0 -365
- {netra_sdk-0.1.3.dist-info → netra_sdk-0.1.5.dist-info}/LICENCE +0 -0
- {netra_sdk-0.1.3.dist-info → netra_sdk-0.1.5.dist-info}/WHEEL +0 -0
netra/decorators.py
CHANGED
|
@@ -12,6 +12,7 @@ from typing import Any, Awaitable, Callable, Dict, Optional, ParamSpec, Tuple, T
|
|
|
12
12
|
from opentelemetry import trace
|
|
13
13
|
|
|
14
14
|
from .config import Config
|
|
15
|
+
from .session_manager import SessionManager
|
|
15
16
|
|
|
16
17
|
P = ParamSpec("P")
|
|
17
18
|
R = TypeVar("R")
|
|
@@ -78,6 +79,9 @@ def _create_function_wrapper(func: Callable[P, R], entity_type: str, name: Optio
|
|
|
78
79
|
|
|
79
80
|
@functools.wraps(func)
|
|
80
81
|
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
82
|
+
# Push entity to stack before span starts so SessionSpanProcessor can capture it
|
|
83
|
+
SessionManager.push_entity(entity_type, span_name)
|
|
84
|
+
|
|
81
85
|
tracer = trace.get_tracer(module_name)
|
|
82
86
|
with tracer.start_as_current_span(span_name) as span:
|
|
83
87
|
_add_span_attributes(span, func, args, kwargs, entity_type)
|
|
@@ -87,8 +91,10 @@ def _create_function_wrapper(func: Callable[P, R], entity_type: str, name: Optio
|
|
|
87
91
|
return result
|
|
88
92
|
except Exception as e:
|
|
89
93
|
span.set_attribute(f"{Config.LIBRARY_NAME}.entity.error", str(e))
|
|
90
|
-
span.record_exception(e)
|
|
91
94
|
raise
|
|
95
|
+
finally:
|
|
96
|
+
# Pop entity from stack after function call is done
|
|
97
|
+
SessionManager.pop_entity(entity_type)
|
|
92
98
|
|
|
93
99
|
return cast(Callable[P, R], async_wrapper)
|
|
94
100
|
|
|
@@ -96,6 +102,9 @@ def _create_function_wrapper(func: Callable[P, R], entity_type: str, name: Optio
|
|
|
96
102
|
|
|
97
103
|
@functools.wraps(func)
|
|
98
104
|
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
105
|
+
# Push entity to stack before span starts so SessionSpanProcessor can capture it
|
|
106
|
+
SessionManager.push_entity(entity_type, span_name)
|
|
107
|
+
|
|
99
108
|
tracer = trace.get_tracer(module_name)
|
|
100
109
|
with tracer.start_as_current_span(span_name) as span:
|
|
101
110
|
_add_span_attributes(span, func, args, kwargs, entity_type)
|
|
@@ -105,8 +114,10 @@ def _create_function_wrapper(func: Callable[P, R], entity_type: str, name: Optio
|
|
|
105
114
|
return result
|
|
106
115
|
except Exception as e:
|
|
107
116
|
span.set_attribute(f"{Config.LIBRARY_NAME}.entity.error", str(e))
|
|
108
|
-
span.record_exception(e)
|
|
109
117
|
raise
|
|
118
|
+
finally:
|
|
119
|
+
# Pop entity from stack after function call is done
|
|
120
|
+
SessionManager.pop_entity(entity_type)
|
|
110
121
|
|
|
111
122
|
return cast(Callable[P, R], sync_wrapper)
|
|
112
123
|
|
|
@@ -281,23 +281,12 @@ def init_fastapi_instrumentation() -> bool:
|
|
|
281
281
|
bool: True if initialization was successful, False otherwise.
|
|
282
282
|
"""
|
|
283
283
|
try:
|
|
284
|
-
if
|
|
285
|
-
|
|
286
|
-
from fastapi import FastAPI
|
|
284
|
+
if is_package_installed("fastapi"):
|
|
285
|
+
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
|
|
287
286
|
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
original_init(self, *args, **kwargs)
|
|
292
|
-
|
|
293
|
-
try:
|
|
294
|
-
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
|
|
295
|
-
|
|
296
|
-
FastAPIInstrumentor().instrument_app(self)
|
|
297
|
-
except Exception as e:
|
|
298
|
-
logging.warning(f"Failed to auto-instrument FastAPI: {e}")
|
|
299
|
-
|
|
300
|
-
FastAPI.__init__ = _patched_init
|
|
287
|
+
instrumentor = FastAPIInstrumentor()
|
|
288
|
+
if not instrumentor.is_instrumented_by_opentelemetry:
|
|
289
|
+
instrumentor.instrument()
|
|
301
290
|
return True
|
|
302
291
|
except Exception as e:
|
|
303
292
|
logging.error(f"Error initializing FastAPI instrumentor: {e}")
|
netra/processors/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
+
from netra.processors.error_detection_processor import ErrorDetectionProcessor
|
|
1
2
|
from netra.processors.session_span_processor import SessionSpanProcessor
|
|
2
|
-
from netra.processors.span_aggregation_processor import SpanAggregationProcessor
|
|
3
3
|
|
|
4
|
-
__all__ = ["
|
|
4
|
+
__all__ = ["ErrorDetectionProcessor", "SessionSpanProcessor"]
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Any, Optional
|
|
3
|
+
|
|
4
|
+
import httpx
|
|
5
|
+
from opentelemetry.sdk.trace import SpanProcessor
|
|
6
|
+
from opentelemetry.trace import Context, Span, Status, StatusCode
|
|
7
|
+
|
|
8
|
+
from netra import Netra
|
|
9
|
+
|
|
10
|
+
logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ErrorDetectionProcessor(SpanProcessor): # type: ignore[misc]
|
|
14
|
+
"""
|
|
15
|
+
OpenTelemetry span processor that monitors for error attributes in spans and creates custom events.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(self) -> None:
|
|
19
|
+
pass
|
|
20
|
+
|
|
21
|
+
def on_start(self, span: Span, parent_context: Optional[Context] = None) -> None:
|
|
22
|
+
"""Called when a span starts."""
|
|
23
|
+
span_id = self._get_span_id(span)
|
|
24
|
+
if not span_id:
|
|
25
|
+
return
|
|
26
|
+
|
|
27
|
+
# Wrap span methods to capture data
|
|
28
|
+
self._wrap_span_methods(span, span_id)
|
|
29
|
+
|
|
30
|
+
def on_end(self, span: Span) -> None:
|
|
31
|
+
"""Called when a span ends."""
|
|
32
|
+
|
|
33
|
+
def force_flush(self, timeout_millis: int = 30000) -> bool:
|
|
34
|
+
"""Force flush any pending data."""
|
|
35
|
+
return True
|
|
36
|
+
|
|
37
|
+
def shutdown(self) -> bool:
|
|
38
|
+
"""Shutdown the processor."""
|
|
39
|
+
return True
|
|
40
|
+
|
|
41
|
+
def _get_span_id(self, span: Span) -> Optional[str]:
|
|
42
|
+
"""Get a unique identifier for the span."""
|
|
43
|
+
try:
|
|
44
|
+
span_context = span.get_span_context()
|
|
45
|
+
return f"{span_context.trace_id:032x}-{span_context.span_id:016x}"
|
|
46
|
+
except Exception:
|
|
47
|
+
return None
|
|
48
|
+
|
|
49
|
+
def _status_code_processing(self, status_code: int) -> None:
|
|
50
|
+
if httpx.codes.is_error(status_code):
|
|
51
|
+
event_attributes = {"has_error": True, "status_code": status_code}
|
|
52
|
+
Netra.set_custom_event(event_name="error_detected", attributes=event_attributes)
|
|
53
|
+
|
|
54
|
+
def _wrap_span_methods(self, span: Span, span_id: str) -> Any:
|
|
55
|
+
"""Wrap span methods to capture attributes and events."""
|
|
56
|
+
# Wrap set_attribute
|
|
57
|
+
original_set_attribute = span.set_attribute
|
|
58
|
+
|
|
59
|
+
def wrapped_set_attribute(key: str, value: Any) -> Any:
|
|
60
|
+
# Status code processing
|
|
61
|
+
if key == "http.status_code":
|
|
62
|
+
self._status_code_processing(value)
|
|
63
|
+
|
|
64
|
+
return original_set_attribute(key, value)
|
|
65
|
+
|
|
66
|
+
# Wrap set_status
|
|
67
|
+
original_set_status = span.set_status
|
|
68
|
+
|
|
69
|
+
def wrapped_set_status(status: Status) -> Any:
|
|
70
|
+
# Check if status code is ERROR
|
|
71
|
+
if status.status_code == StatusCode.ERROR:
|
|
72
|
+
event_attributes = {
|
|
73
|
+
"has_error": True,
|
|
74
|
+
}
|
|
75
|
+
Netra.set_custom_event(event_name="error_detected", attributes=event_attributes)
|
|
76
|
+
|
|
77
|
+
return original_set_status(status)
|
|
78
|
+
|
|
79
|
+
span.set_attribute = wrapped_set_attribute
|
|
80
|
+
span.set_status = wrapped_set_status
|
|
@@ -42,6 +42,12 @@ class SessionSpanProcessor(SpanProcessor): # type: ignore[misc]
|
|
|
42
42
|
value = baggage.get_baggage(f"custom.{key}", ctx)
|
|
43
43
|
if value:
|
|
44
44
|
span.set_attribute(f"{Config.LIBRARY_NAME}.custom.{key}", value)
|
|
45
|
+
|
|
46
|
+
# Add entity attributes from SessionManager
|
|
47
|
+
entity_attributes = SessionManager.get_current_entity_attributes()
|
|
48
|
+
for attr_key, attr_value in entity_attributes.items():
|
|
49
|
+
span.set_attribute(attr_key, attr_value)
|
|
50
|
+
|
|
45
51
|
except Exception as e:
|
|
46
52
|
logger.exception(f"Error setting span attributes: {e}")
|
|
47
53
|
|
netra/session.py
CHANGED
|
@@ -1,11 +1,13 @@
|
|
|
1
|
+
import json
|
|
1
2
|
import logging
|
|
2
3
|
import time
|
|
3
|
-
from typing import Any, Dict, Literal, Optional
|
|
4
|
+
from typing import Any, Dict, List, Literal, Optional
|
|
4
5
|
|
|
5
6
|
from opentelemetry import context as context_api
|
|
6
7
|
from opentelemetry import trace
|
|
7
8
|
from opentelemetry.trace import SpanKind, Status, StatusCode
|
|
8
9
|
from opentelemetry.trace.propagation import set_span_in_context
|
|
10
|
+
from pydantic import BaseModel
|
|
9
11
|
|
|
10
12
|
from netra.config import Config
|
|
11
13
|
|
|
@@ -14,22 +16,22 @@ logging.basicConfig(level=logging.INFO)
|
|
|
14
16
|
logger = logging.getLogger(__name__)
|
|
15
17
|
|
|
16
18
|
|
|
19
|
+
class UsageModel(BaseModel): # type: ignore[misc]
|
|
20
|
+
model: str
|
|
21
|
+
type: str
|
|
22
|
+
unit_used: Optional[int] = None
|
|
23
|
+
cost_in_usd: Optional[float] = None
|
|
24
|
+
|
|
25
|
+
|
|
17
26
|
class ATTRIBUTE:
|
|
18
27
|
LLM_SYSTEM = "llm_system"
|
|
19
28
|
MODEL = "model"
|
|
20
29
|
PROMPT = "prompt"
|
|
21
30
|
NEGATIVE_PROMPT = "negative_prompt"
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
TOTAL_TOKENS = "total_tokens"
|
|
27
|
-
PROMPT_TOKENS_COST = "prompt_tokens_cost"
|
|
28
|
-
PROMPT_TOKENS = "prompt_tokens"
|
|
29
|
-
COMPLETION_TOKENS_COST = "completion_tokens_cost"
|
|
30
|
-
COMPLETION_TOKENS = "completion_tokens"
|
|
31
|
-
CACHED_TOKENS_COST = "cached_tokens_cost"
|
|
32
|
-
CACHED_TOKENS = "cached_tokens"
|
|
31
|
+
HEIGHT = "height"
|
|
32
|
+
WIDTH = "width"
|
|
33
|
+
OUTPUT_TYPE = "output_type"
|
|
34
|
+
USAGE = "usage"
|
|
33
35
|
STATUS = "status"
|
|
34
36
|
DURATION_MS = "duration_ms"
|
|
35
37
|
ERROR_MESSAGE = "error_message"
|
|
@@ -46,7 +48,7 @@ class Session:
|
|
|
46
48
|
# External API call
|
|
47
49
|
result = external_api.generate_video(...)
|
|
48
50
|
|
|
49
|
-
session.
|
|
51
|
+
session.set_usage(usage_data)
|
|
50
52
|
"""
|
|
51
53
|
|
|
52
54
|
def __init__(self, name: str, attributes: Optional[Dict[str, str]] = None, module_name: str = "combat_sdk"):
|
|
@@ -139,49 +141,23 @@ class Session:
|
|
|
139
141
|
"""Set the negative prompt."""
|
|
140
142
|
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.NEGATIVE_PROMPT}", negative_prompt)
|
|
141
143
|
|
|
142
|
-
def
|
|
143
|
-
"""Set the
|
|
144
|
-
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.
|
|
145
|
-
|
|
146
|
-
def set_image_width(self, width: str) -> "Session":
|
|
147
|
-
"""Set the image width."""
|
|
148
|
-
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.IMAGE_WIDTH}", width)
|
|
149
|
-
|
|
150
|
-
def set_total_tokens(self, tokens: str) -> "Session":
|
|
151
|
-
"""Set the number of tokens used."""
|
|
152
|
-
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.TOTAL_TOKENS}", tokens)
|
|
153
|
-
|
|
154
|
-
def set_prompt_tokens_cost(self, cost: str) -> "Session":
|
|
155
|
-
"""Set the number of tokens used."""
|
|
156
|
-
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.PROMPT_TOKENS_COST}", cost)
|
|
157
|
-
|
|
158
|
-
def set_prompt_tokens(self, tokens: str) -> "Session":
|
|
159
|
-
"""Set the number of tokens used."""
|
|
160
|
-
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.PROMPT_TOKENS}", tokens)
|
|
161
|
-
|
|
162
|
-
def set_completion_tokens_cost(self, cost: str) -> "Session":
|
|
163
|
-
"""Set the number of tokens used."""
|
|
164
|
-
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.COMPLETION_TOKENS_COST}", cost)
|
|
165
|
-
|
|
166
|
-
def set_completion_tokens(self, tokens: str) -> "Session":
|
|
167
|
-
"""Set the number of tokens used."""
|
|
168
|
-
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.COMPLETION_TOKENS}", tokens)
|
|
169
|
-
|
|
170
|
-
def set_cached_tokens_cost(self, cost: str) -> "Session":
|
|
171
|
-
"""Set the number of tokens used."""
|
|
172
|
-
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.CACHED_TOKENS_COST}", cost)
|
|
144
|
+
def set_height(self, height: str) -> "Session":
|
|
145
|
+
"""Set the height."""
|
|
146
|
+
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.HEIGHT}", height)
|
|
173
147
|
|
|
174
|
-
def
|
|
175
|
-
"""Set the
|
|
176
|
-
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.
|
|
148
|
+
def set_width(self, width: str) -> "Session":
|
|
149
|
+
"""Set the width."""
|
|
150
|
+
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.WIDTH}", width)
|
|
177
151
|
|
|
178
|
-
def
|
|
179
|
-
"""Set the
|
|
180
|
-
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.
|
|
152
|
+
def set_output_type(self, output_type: str) -> "Session":
|
|
153
|
+
"""Set the output type."""
|
|
154
|
+
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.OUTPUT_TYPE}", output_type)
|
|
181
155
|
|
|
182
|
-
def
|
|
183
|
-
"""Set the
|
|
184
|
-
|
|
156
|
+
def set_usage(self, usage: List[UsageModel]) -> "Session":
|
|
157
|
+
"""Set the usage data as a JSON string."""
|
|
158
|
+
usage_dict = [u.model_dump() for u in usage]
|
|
159
|
+
usage_json = json.dumps(usage_dict)
|
|
160
|
+
return self.set_attribute(f"{Config.LIBRARY_NAME}.{ATTRIBUTE.USAGE}", usage_json)
|
|
185
161
|
|
|
186
162
|
def set_model(self, model: str) -> "Session":
|
|
187
163
|
"""Set the model used."""
|
netra/session_manager.py
CHANGED
|
@@ -5,7 +5,7 @@ Handles automatic session and user ID management for applications.
|
|
|
5
5
|
|
|
6
6
|
import logging
|
|
7
7
|
from datetime import datetime
|
|
8
|
-
from typing import Any, Dict, Optional, Union
|
|
8
|
+
from typing import Any, Dict, List, Optional, Union
|
|
9
9
|
|
|
10
10
|
from opentelemetry import baggage
|
|
11
11
|
from opentelemetry import context as otel_context
|
|
@@ -22,6 +22,11 @@ class SessionManager:
|
|
|
22
22
|
# Class variable to track the current span
|
|
23
23
|
_current_span: Optional[trace.Span] = None
|
|
24
24
|
|
|
25
|
+
# Class variables to track separate entity stacks
|
|
26
|
+
_workflow_stack: List[str] = []
|
|
27
|
+
_task_stack: List[str] = []
|
|
28
|
+
_agent_stack: List[str] = []
|
|
29
|
+
|
|
25
30
|
@classmethod
|
|
26
31
|
def set_current_span(cls, span: Optional[trace.Span]) -> None:
|
|
27
32
|
"""
|
|
@@ -42,6 +47,86 @@ class SessionManager:
|
|
|
42
47
|
"""
|
|
43
48
|
return cls._current_span
|
|
44
49
|
|
|
50
|
+
@classmethod
|
|
51
|
+
def push_entity(cls, entity_type: str, entity_name: str) -> None:
|
|
52
|
+
"""
|
|
53
|
+
Push an entity onto the appropriate entity stack.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
entity_type: Type of entity (workflow, task, agent)
|
|
57
|
+
entity_name: Name of the entity
|
|
58
|
+
"""
|
|
59
|
+
if entity_type == "workflow":
|
|
60
|
+
cls._workflow_stack.append(entity_name)
|
|
61
|
+
elif entity_type == "task":
|
|
62
|
+
cls._task_stack.append(entity_name)
|
|
63
|
+
elif entity_type == "agent":
|
|
64
|
+
cls._agent_stack.append(entity_name)
|
|
65
|
+
|
|
66
|
+
@classmethod
|
|
67
|
+
def pop_entity(cls, entity_type: str) -> Optional[str]:
|
|
68
|
+
"""
|
|
69
|
+
Pop the most recent entity from the specified entity stack.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
entity_type: Type of entity (workflow, task, agent)
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
Entity name or None if stack is empty
|
|
76
|
+
"""
|
|
77
|
+
if entity_type == "workflow" and cls._workflow_stack:
|
|
78
|
+
return cls._workflow_stack.pop()
|
|
79
|
+
elif entity_type == "task" and cls._task_stack:
|
|
80
|
+
return cls._task_stack.pop()
|
|
81
|
+
elif entity_type == "agent" and cls._agent_stack:
|
|
82
|
+
return cls._agent_stack.pop()
|
|
83
|
+
return None
|
|
84
|
+
|
|
85
|
+
@classmethod
|
|
86
|
+
def get_current_entity_attributes(cls) -> Dict[str, str]:
|
|
87
|
+
"""
|
|
88
|
+
Get current entity attributes for span annotation.
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
Dictionary of entity attributes to add to spans
|
|
92
|
+
"""
|
|
93
|
+
attributes = {}
|
|
94
|
+
|
|
95
|
+
# Add current workflow if exists
|
|
96
|
+
if cls._workflow_stack:
|
|
97
|
+
attributes[f"{Config.LIBRARY_NAME}.workflow.name"] = cls._workflow_stack[-1]
|
|
98
|
+
|
|
99
|
+
# Add current task if exists
|
|
100
|
+
if cls._task_stack:
|
|
101
|
+
attributes[f"{Config.LIBRARY_NAME}.task.name"] = cls._task_stack[-1]
|
|
102
|
+
|
|
103
|
+
# Add current agent if exists
|
|
104
|
+
if cls._agent_stack:
|
|
105
|
+
attributes[f"{Config.LIBRARY_NAME}.agent.name"] = cls._agent_stack[-1]
|
|
106
|
+
|
|
107
|
+
return attributes
|
|
108
|
+
|
|
109
|
+
@classmethod
|
|
110
|
+
def clear_entity_stacks(cls) -> None:
|
|
111
|
+
"""Clear all entity stacks."""
|
|
112
|
+
cls._workflow_stack.clear()
|
|
113
|
+
cls._task_stack.clear()
|
|
114
|
+
cls._agent_stack.clear()
|
|
115
|
+
|
|
116
|
+
@classmethod
|
|
117
|
+
def get_stack_info(cls) -> Dict[str, List[str]]:
|
|
118
|
+
"""
|
|
119
|
+
Get information about all current stacks.
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
Dictionary containing all stack contents
|
|
123
|
+
"""
|
|
124
|
+
return {
|
|
125
|
+
"workflows": cls._workflow_stack.copy(),
|
|
126
|
+
"tasks": cls._task_stack.copy(),
|
|
127
|
+
"agents": cls._agent_stack.copy(),
|
|
128
|
+
}
|
|
129
|
+
|
|
45
130
|
@staticmethod
|
|
46
131
|
def set_session_context(session_key: str, value: Union[str, Dict[str, str]]) -> None:
|
|
47
132
|
"""
|
netra/tracer.py
CHANGED
|
@@ -66,10 +66,10 @@ class Tracer:
|
|
|
66
66
|
headers=self.cfg.headers,
|
|
67
67
|
)
|
|
68
68
|
# Add span processors for session span processing and data aggregation processing
|
|
69
|
-
from netra.processors import
|
|
69
|
+
from netra.processors import ErrorDetectionProcessor, SessionSpanProcessor
|
|
70
70
|
|
|
71
71
|
provider.add_span_processor(SessionSpanProcessor())
|
|
72
|
-
provider.add_span_processor(
|
|
72
|
+
provider.add_span_processor(ErrorDetectionProcessor())
|
|
73
73
|
|
|
74
74
|
# Install appropriate span processor
|
|
75
75
|
if self.cfg.disable_batch:
|
netra/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.1.
|
|
1
|
+
__version__ = "0.1.5"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: netra-sdk
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.5
|
|
4
4
|
Summary: A Python SDK for AI application observability that provides OpenTelemetry-based monitoring, tracing, and PII protection for LLM and vector database applications. Enables easy instrumentation, session tracking, and privacy-focused data collection for AI systems in production environments.
|
|
5
5
|
License: Apache-2.0
|
|
6
6
|
Keywords: netra,tracing,observability,sdk,ai,llm,vector,database
|
|
@@ -416,17 +416,18 @@ Netra.init(app_name="My App")
|
|
|
416
416
|
with Netra.start_session("video_generation_task") as session:
|
|
417
417
|
# Set attributes before the API call
|
|
418
418
|
session.set_prompt("A cat playing piano")
|
|
419
|
-
session.
|
|
420
|
-
session.
|
|
419
|
+
session.set_height("1024")
|
|
420
|
+
session.set_width("1024")
|
|
421
|
+
session.set_output_type("image")
|
|
421
422
|
session.set_model("stable-diffusion-xl")
|
|
422
423
|
|
|
423
424
|
# Make your external API call
|
|
424
425
|
result = external_api.generate_video(...)
|
|
425
426
|
|
|
426
427
|
# Set post-call attributes
|
|
427
|
-
session.
|
|
428
|
+
session.set_completion_tokens("1250")
|
|
428
429
|
session.set_credits("30")
|
|
429
|
-
session.
|
|
430
|
+
session.set_completion_tokens_cost("0.15")
|
|
430
431
|
|
|
431
432
|
# Add events during session
|
|
432
433
|
session.add_event("processing_completed", {"step": "rendering"})
|
|
@@ -662,42 +663,7 @@ pre-commit install --hook-type pre-push
|
|
|
662
663
|
|
|
663
664
|
## 🤝 Contributing
|
|
664
665
|
|
|
665
|
-
We welcome contributions! Please
|
|
666
|
-
|
|
667
|
-
### Commit Message Format
|
|
668
|
-
|
|
669
|
-
We use [Conventional Commits](https://www.conventionalcommits.org/) for commit messages:
|
|
670
|
-
|
|
671
|
-
```
|
|
672
|
-
<type>[optional scope]: <description>
|
|
673
|
-
|
|
674
|
-
[optional body]
|
|
675
|
-
|
|
676
|
-
[optional footer(s)]
|
|
677
|
-
```
|
|
678
|
-
|
|
679
|
-
**Types:**
|
|
680
|
-
- **feat**: A new feature
|
|
681
|
-
- **fix**: A bug fix
|
|
682
|
-
- **docs**: Documentation only changes
|
|
683
|
-
- **style**: Changes that do not affect the meaning of the code
|
|
684
|
-
- **refactor**: A code change that neither fixes a bug nor adds a feature
|
|
685
|
-
- **perf**: A code change that improves performance
|
|
686
|
-
- **test**: Adding missing tests or correcting existing tests
|
|
687
|
-
- **chore**: Changes to the build process or auxiliary tools
|
|
688
|
-
|
|
689
|
-
**Examples:**
|
|
690
|
-
```
|
|
691
|
-
feat: add support for Claude AI instrumentation
|
|
692
|
-
fix(pii): resolve masking issue with nested objects
|
|
693
|
-
docs: update installation instructions
|
|
694
|
-
```
|
|
695
|
-
|
|
696
|
-
**Scope** can be used to specify the area of change (e.g., `pii`, `instrumentation`, `decorators`).
|
|
697
|
-
|
|
698
|
-
**Body** should include the motivation for the change and contrast with previous behavior.
|
|
699
|
-
|
|
700
|
-
**Footer** can be used for "BREAKING CHANGE:" or issue references.
|
|
666
|
+
We welcome contributions! Please see our [Contributing Guide](CONTRIBUTING.md) for detailed information on how to contribute to the project, including development setup, testing, and our commit message format.
|
|
701
667
|
|
|
702
668
|
---
|
|
703
669
|
|
|
@@ -4,12 +4,12 @@ netra/anonymizer/anonymizer.py,sha256=1VeYAsFpF_tYDlqJF-Q82-ZXGOR4YWBqrKUsRw3qOr
|
|
|
4
4
|
netra/anonymizer/base.py,sha256=ytPxHCUD2OXlEY6fNTuMmwImNdIjgj294I41FIgoXpU,5946
|
|
5
5
|
netra/anonymizer/fp_anonymizer.py,sha256=_6svIYmE0eejdIMkhKBUWCNjGtGimtrGtbLvPSOp8W4,6493
|
|
6
6
|
netra/config.py,sha256=6aUG-BWWDOyptnTlEjOpB41n9xLmuo-XBAgs5GE5Tbg,4761
|
|
7
|
-
netra/decorators.py,sha256=
|
|
7
|
+
netra/decorators.py,sha256=V_WpZ2IgW2Y7B_WnSXmKUGGhkM5Cra2TwONddmJpPaI,6837
|
|
8
8
|
netra/exceptions/__init__.py,sha256=uDgcBxmC4WhdS7HRYQk_TtJyxH1s1o6wZmcsnSHLAcM,174
|
|
9
9
|
netra/exceptions/injection.py,sha256=ke4eUXRYUFJkMZgdSyPPkPt5PdxToTI6xLEBI0hTWUQ,1332
|
|
10
10
|
netra/exceptions/pii.py,sha256=MT4p_x-zH3VtYudTSxw1Z9qQZADJDspq64WrYqSWlZc,2438
|
|
11
11
|
netra/input_scanner.py,sha256=bzP3s7YudGHQrIbUgQGrcIBEJ6CmOewzuYNSu75cVXM,4988
|
|
12
|
-
netra/instrumentation/__init__.py,sha256=
|
|
12
|
+
netra/instrumentation/__init__.py,sha256=s-sXykQZ4CKUHLqHRR7buOrkN9hXGTZpNALRZkdIHB0,38757
|
|
13
13
|
netra/instrumentation/aiohttp/__init__.py,sha256=M1kuF0R3gKY5rlbhEC1AR13UWHelmfokluL2yFysKWc,14398
|
|
14
14
|
netra/instrumentation/aiohttp/version.py,sha256=Zy-0Aukx-HS_Mo3NKPWg-hlUoWKDzS0w58gLoVtJec8,24
|
|
15
15
|
netra/instrumentation/cohere/__init__.py,sha256=3XwmCAZwZiMkHdNN3YvcBOLsNCx80ymbU31TyMzv1IY,17685
|
|
@@ -28,15 +28,15 @@ netra/instrumentation/mistralai/version.py,sha256=d6593s-XBNvVxri9lr2qLUDZQ3Zk3-
|
|
|
28
28
|
netra/instrumentation/weaviate/__init__.py,sha256=EOlpWxobOLHYKqo_kMct_7nu26x1hr8qkeG5_h99wtg,4330
|
|
29
29
|
netra/instrumentation/weaviate/version.py,sha256=PiCZHjonujPbnIn0KmD3Yl68hrjPRG_oKe5vJF3mmG8,24
|
|
30
30
|
netra/pii.py,sha256=S7GnVzoNJEzKiUWnqN9bOCKPeNLsriztgB2E6Rx-yJU,27023
|
|
31
|
-
netra/processors/__init__.py,sha256=
|
|
32
|
-
netra/processors/
|
|
33
|
-
netra/processors/
|
|
31
|
+
netra/processors/__init__.py,sha256=G16VumYTpgV4jsWrKNFSgm6xMQAsZ2Rrux25UVeo5YQ,215
|
|
32
|
+
netra/processors/error_detection_processor.py,sha256=vlFQ8-Zgoj83htk_ciVhlT41RfU2GiGmDHGBzjKgtYQ,2701
|
|
33
|
+
netra/processors/session_span_processor.py,sha256=qcsBl-LnILWefsftI8NQhXDGb94OWPc8LvzhVA0JS_c,2432
|
|
34
34
|
netra/scanner.py,sha256=wqjMZnEbVvrGMiUSI352grUyHpkk94oBfHfMiXPhpGU,3866
|
|
35
|
-
netra/session.py,sha256=
|
|
36
|
-
netra/session_manager.py,sha256=
|
|
37
|
-
netra/tracer.py,sha256=
|
|
38
|
-
netra/version.py,sha256=
|
|
39
|
-
netra_sdk-0.1.
|
|
40
|
-
netra_sdk-0.1.
|
|
41
|
-
netra_sdk-0.1.
|
|
42
|
-
netra_sdk-0.1.
|
|
35
|
+
netra/session.py,sha256=o1wXrPzMauqj_3P-iNBHVlcIR7zcKcbsmkrcHjQMKuY,7263
|
|
36
|
+
netra/session_manager.py,sha256=EVcnWcSj4NdkH--HmqHx0mmzivQiM4GCyFLu6lwi33M,6252
|
|
37
|
+
netra/tracer.py,sha256=9jAKdIHXbaZ6WV_p8I1syQiMdqXVCXMhpEhCBsbbci8,3538
|
|
38
|
+
netra/version.py,sha256=rPSfWgIeq2YWVPyESOAwCBt8vftsTpIkuLAGDEzyRQc,22
|
|
39
|
+
netra_sdk-0.1.5.dist-info/LICENCE,sha256=8B_UoZ-BAl0AqiHAHUETCgd3I2B9yYJ1WEQtVb_qFMA,11359
|
|
40
|
+
netra_sdk-0.1.5.dist-info/METADATA,sha256=-82nUeU3ogSqDhIpMzYAUkjgS5emC5yy_jA_WTkpsPU,23518
|
|
41
|
+
netra_sdk-0.1.5.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
42
|
+
netra_sdk-0.1.5.dist-info/RECORD,,
|
|
@@ -1,365 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Span aggregation utilities for Combat SDK.
|
|
3
|
-
Handles aggregation of child span data into parent spans.
|
|
4
|
-
"""
|
|
5
|
-
|
|
6
|
-
import json
|
|
7
|
-
import logging
|
|
8
|
-
from collections import defaultdict
|
|
9
|
-
from typing import Any, Dict, Optional, Set
|
|
10
|
-
|
|
11
|
-
import httpx
|
|
12
|
-
from opentelemetry import trace
|
|
13
|
-
from opentelemetry.sdk.trace import SpanProcessor
|
|
14
|
-
from opentelemetry.trace import Context, Span
|
|
15
|
-
|
|
16
|
-
from netra import Netra
|
|
17
|
-
from netra.config import Config
|
|
18
|
-
|
|
19
|
-
logger = logging.getLogger(__name__)
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
class SpanAggregationData:
|
|
23
|
-
"""Holds aggregated data for a span."""
|
|
24
|
-
|
|
25
|
-
def __init__(self) -> None:
|
|
26
|
-
self.tokens: Dict[str, Dict[str, int]] = defaultdict(lambda: defaultdict(int))
|
|
27
|
-
self.models: Set[str] = set()
|
|
28
|
-
self.has_pii: bool = False
|
|
29
|
-
self.pii_entities: Set[str] = set()
|
|
30
|
-
self.pii_actions: Dict[str, Set[str]] = defaultdict(set)
|
|
31
|
-
self.has_violation: bool = False
|
|
32
|
-
self.violations: Set[str] = set()
|
|
33
|
-
self.violation_actions: Dict[str, Set[str]] = defaultdict(set)
|
|
34
|
-
self.has_error: bool = False
|
|
35
|
-
self.status_codes: Set[int] = set()
|
|
36
|
-
|
|
37
|
-
def merge_from_other(self, other: "SpanAggregationData") -> None:
|
|
38
|
-
"""Merge data from another SpanAggregationData instance."""
|
|
39
|
-
# Merge error data
|
|
40
|
-
if other.has_error:
|
|
41
|
-
self.has_error = True
|
|
42
|
-
self.status_codes.update(other.status_codes)
|
|
43
|
-
|
|
44
|
-
# Merge tokens - take the maximum values for each model
|
|
45
|
-
for model, token_data in other.tokens.items():
|
|
46
|
-
if model not in self.tokens:
|
|
47
|
-
self.tokens[model] = {}
|
|
48
|
-
for token_type, count in token_data.items():
|
|
49
|
-
self.tokens[model][token_type] = max(self.tokens[model].get(token_type, 0), count)
|
|
50
|
-
|
|
51
|
-
# Merge models
|
|
52
|
-
self.models.update(other.models)
|
|
53
|
-
|
|
54
|
-
# Merge PII data
|
|
55
|
-
if other.has_pii:
|
|
56
|
-
self.has_pii = True
|
|
57
|
-
self.pii_entities.update(other.pii_entities)
|
|
58
|
-
for action, entities in other.pii_actions.items():
|
|
59
|
-
self.pii_actions[action].update(entities)
|
|
60
|
-
|
|
61
|
-
# Merge violation data
|
|
62
|
-
if other.has_violation:
|
|
63
|
-
self.has_violation = True
|
|
64
|
-
self.violations.update(other.violations)
|
|
65
|
-
for action, violations in other.violation_actions.items():
|
|
66
|
-
self.violation_actions[action].update(violations)
|
|
67
|
-
|
|
68
|
-
def to_attributes(self) -> Dict[str, str]:
|
|
69
|
-
"""Convert aggregated data to span attributes."""
|
|
70
|
-
attributes = {}
|
|
71
|
-
|
|
72
|
-
# Error Data
|
|
73
|
-
attributes["has_error"] = str(self.has_error).lower()
|
|
74
|
-
if self.has_error:
|
|
75
|
-
attributes["status_codes"] = json.dumps(list(self.status_codes))
|
|
76
|
-
|
|
77
|
-
# Token usage by model
|
|
78
|
-
if self.tokens:
|
|
79
|
-
tokens_dict = {}
|
|
80
|
-
for model, usage in self.tokens.items():
|
|
81
|
-
tokens_dict[model] = dict(usage)
|
|
82
|
-
attributes["tokens"] = json.dumps(tokens_dict)
|
|
83
|
-
|
|
84
|
-
# Models used
|
|
85
|
-
if self.models:
|
|
86
|
-
attributes["models"] = json.dumps(sorted(list(self.models)))
|
|
87
|
-
|
|
88
|
-
# PII information
|
|
89
|
-
attributes["has_pii"] = str(self.has_pii).lower()
|
|
90
|
-
if self.pii_entities:
|
|
91
|
-
attributes["pii_entities"] = json.dumps(sorted(list(self.pii_entities)))
|
|
92
|
-
if self.pii_actions:
|
|
93
|
-
pii_actions_dict = {}
|
|
94
|
-
for action, entities in self.pii_actions.items():
|
|
95
|
-
pii_actions_dict[action] = sorted(list(entities))
|
|
96
|
-
attributes["pii_actions"] = json.dumps(pii_actions_dict)
|
|
97
|
-
|
|
98
|
-
# Violation information
|
|
99
|
-
attributes["has_violation"] = str(self.has_violation).lower()
|
|
100
|
-
if self.violations:
|
|
101
|
-
attributes["violations"] = json.dumps(sorted(list(self.violations)))
|
|
102
|
-
if self.violation_actions:
|
|
103
|
-
violation_actions_dict = {}
|
|
104
|
-
for action, violations in self.violation_actions.items():
|
|
105
|
-
violation_actions_dict[action] = sorted(list(violations))
|
|
106
|
-
attributes["violation_actions"] = json.dumps(violation_actions_dict)
|
|
107
|
-
|
|
108
|
-
return attributes
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
class SpanAggregationProcessor(SpanProcessor): # type: ignore[misc]
|
|
112
|
-
"""
|
|
113
|
-
OpenTelemetry span processor that aggregates data from child spans into parent spans.
|
|
114
|
-
"""
|
|
115
|
-
|
|
116
|
-
def __init__(self) -> None:
|
|
117
|
-
self._span_data: Dict[str, SpanAggregationData] = {}
|
|
118
|
-
self._span_hierarchy: Dict[str, Optional[str]] = {} # child_id -> parent_id
|
|
119
|
-
self._root_spans: Set[str] = set()
|
|
120
|
-
self._captured_data: Dict[str, Dict[str, Any]] = {} # span_id -> {attributes, events}
|
|
121
|
-
self._active_spans: Dict[str, Span] = {} # span_id -> original span reference
|
|
122
|
-
|
|
123
|
-
def on_start(self, span: Span, parent_context: Optional[Context] = None) -> None:
|
|
124
|
-
"""Called when a span starts."""
|
|
125
|
-
span_id = self._get_span_id(span)
|
|
126
|
-
if not span_id:
|
|
127
|
-
return
|
|
128
|
-
|
|
129
|
-
# Store the original span for later use
|
|
130
|
-
self._active_spans[span_id] = span
|
|
131
|
-
|
|
132
|
-
# Initialize aggregation data
|
|
133
|
-
self._span_data[span_id] = SpanAggregationData()
|
|
134
|
-
self._captured_data[span_id] = {"attributes": {}, "events": []}
|
|
135
|
-
|
|
136
|
-
# Check if this is a root span (no parent)
|
|
137
|
-
if span.parent is None:
|
|
138
|
-
self._root_spans.add(span_id)
|
|
139
|
-
else:
|
|
140
|
-
# Track parent-child relationship - span.parent is a SpanContext, not a Span
|
|
141
|
-
try:
|
|
142
|
-
parent_span_context = span.parent
|
|
143
|
-
if parent_span_context and parent_span_context.span_id:
|
|
144
|
-
parent_span_id = f"{parent_span_context.trace_id:032x}-{parent_span_context.span_id:016x}"
|
|
145
|
-
self._span_hierarchy[span_id] = parent_span_id
|
|
146
|
-
else:
|
|
147
|
-
logger.warning(f"DEBUG: Parent span context is invalid for child {span_id}")
|
|
148
|
-
except Exception as e:
|
|
149
|
-
logger.warning(f"DEBUG: Could not get parent span ID for child {span_id}: {e}")
|
|
150
|
-
|
|
151
|
-
# Wrap span methods to capture data
|
|
152
|
-
self._wrap_span_methods(span, span_id)
|
|
153
|
-
|
|
154
|
-
def on_end(self, span: Span) -> None:
|
|
155
|
-
"""Called when a span ends."""
|
|
156
|
-
span_id = self._get_span_id(span)
|
|
157
|
-
if not span_id or span_id not in self._span_data:
|
|
158
|
-
return
|
|
159
|
-
|
|
160
|
-
try:
|
|
161
|
-
# Process this span's captured data
|
|
162
|
-
captured = self._captured_data.get(span_id, {})
|
|
163
|
-
self._process_attributes(self._span_data[span_id], captured.get("attributes", {}))
|
|
164
|
-
|
|
165
|
-
# Set aggregated attributes on this span
|
|
166
|
-
original_span = self._active_spans.get(span_id)
|
|
167
|
-
if original_span and original_span.is_recording():
|
|
168
|
-
self._set_span_attributes(original_span, self._span_data[span_id])
|
|
169
|
-
|
|
170
|
-
# Handle parent-child aggregation for any remaining data
|
|
171
|
-
self._aggregate_to_all_parents(span_id)
|
|
172
|
-
|
|
173
|
-
except Exception as e:
|
|
174
|
-
logger.error(f"Error during span aggregation for span {span_id}: {e}")
|
|
175
|
-
# Even if there's an error, try to do basic aggregation
|
|
176
|
-
try:
|
|
177
|
-
original_span = self._active_spans.get(span_id)
|
|
178
|
-
if original_span and original_span.is_recording():
|
|
179
|
-
self._set_span_attributes(original_span, self._span_data[span_id])
|
|
180
|
-
except Exception as inner_e:
|
|
181
|
-
logger.error(f"Failed to set basic aggregation attributes: {inner_e}")
|
|
182
|
-
|
|
183
|
-
# Clean up
|
|
184
|
-
self._span_data.pop(span_id, None)
|
|
185
|
-
self._captured_data.pop(span_id, None)
|
|
186
|
-
self._active_spans.pop(span_id, None)
|
|
187
|
-
self._root_spans.discard(span_id)
|
|
188
|
-
self._span_hierarchy.pop(span_id, None)
|
|
189
|
-
|
|
190
|
-
def _wrap_span_methods(self, span: Span, span_id: str) -> Any:
|
|
191
|
-
"""Wrap span methods to capture attributes and events."""
|
|
192
|
-
# Wrap set_attribute
|
|
193
|
-
original_set_attribute = span.set_attribute
|
|
194
|
-
|
|
195
|
-
def wrapped_set_attribute(key: str, value: Any) -> Any:
|
|
196
|
-
# Status code processing
|
|
197
|
-
if key == "http.status_code":
|
|
198
|
-
self._status_code_processing(value)
|
|
199
|
-
|
|
200
|
-
# Capture the all the attribute data
|
|
201
|
-
self._captured_data[span_id]["attributes"][key] = value
|
|
202
|
-
return original_set_attribute(key, value)
|
|
203
|
-
|
|
204
|
-
span.set_attribute = wrapped_set_attribute
|
|
205
|
-
|
|
206
|
-
# Wrap add_event
|
|
207
|
-
original_add_event = span.add_event
|
|
208
|
-
|
|
209
|
-
def wrapped_add_event(name: str, attributes: Dict[str, Any] = {}, timestamp: int = 0) -> Any:
|
|
210
|
-
# Only process PII and violation events
|
|
211
|
-
if name == "pii_detected" and attributes:
|
|
212
|
-
self._process_pii_event(self._span_data[span_id], attributes)
|
|
213
|
-
if span.is_recording():
|
|
214
|
-
self._set_span_attributes(span, self._span_data[span_id])
|
|
215
|
-
# Immediately aggregate to parent spans
|
|
216
|
-
self._aggregate_to_all_parents(span_id)
|
|
217
|
-
elif name == "violation_detected" and attributes:
|
|
218
|
-
self._process_violation_event(self._span_data[span_id], attributes)
|
|
219
|
-
if span.is_recording():
|
|
220
|
-
self._set_span_attributes(span, self._span_data[span_id])
|
|
221
|
-
# Immediately aggregate to parent spans
|
|
222
|
-
self._aggregate_to_all_parents(span_id)
|
|
223
|
-
|
|
224
|
-
# Check if span is still recording before adding event
|
|
225
|
-
if not span.is_recording():
|
|
226
|
-
logger.debug(f"Attempted to add event to ended span {span_id}")
|
|
227
|
-
return None
|
|
228
|
-
return original_add_event(name, attributes, timestamp)
|
|
229
|
-
|
|
230
|
-
span.add_event = wrapped_add_event
|
|
231
|
-
|
|
232
|
-
def _process_attributes(self, data: SpanAggregationData, attributes: Dict[str, Any]) -> None:
|
|
233
|
-
"""Process span attributes for aggregation."""
|
|
234
|
-
# Extract status code for error identification
|
|
235
|
-
status_code = attributes.get("http.status_code", 200)
|
|
236
|
-
if httpx.codes.is_error(status_code):
|
|
237
|
-
data.has_error = True
|
|
238
|
-
data.status_codes.update([status_code])
|
|
239
|
-
|
|
240
|
-
# Extract model information
|
|
241
|
-
model = attributes.get("gen_ai.request.model") or attributes.get("gen_ai.response.model")
|
|
242
|
-
if model:
|
|
243
|
-
data.models.add(model)
|
|
244
|
-
# Extract token usage
|
|
245
|
-
token_fields = {
|
|
246
|
-
"prompt_tokens": attributes.get("gen_ai.usage.prompt_tokens", 0),
|
|
247
|
-
"completion_tokens": attributes.get("gen_ai.usage.completion_tokens", 0),
|
|
248
|
-
"total_tokens": attributes.get("llm.usage.total_tokens", 0),
|
|
249
|
-
"cache_read_input_tokens": attributes.get("gen_ai.usage.cache_read_input_tokens", 0),
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
# Initialize token fields if they don't exist
|
|
253
|
-
if model not in data.tokens:
|
|
254
|
-
data.tokens[model] = {}
|
|
255
|
-
|
|
256
|
-
# Add token values
|
|
257
|
-
for field, value in token_fields.items():
|
|
258
|
-
if isinstance(value, (int, str)):
|
|
259
|
-
current_value = data.tokens[model].get(field, 0)
|
|
260
|
-
data.tokens[model][field] = current_value + int(value)
|
|
261
|
-
|
|
262
|
-
def _process_pii_event(self, data: SpanAggregationData, attrs: Dict[str, Any]) -> None:
|
|
263
|
-
"""Process pii_detected event."""
|
|
264
|
-
if attrs.get("has_pii"):
|
|
265
|
-
data.has_pii = True
|
|
266
|
-
|
|
267
|
-
# Extract entities from pii_entities field
|
|
268
|
-
entity_counts_str = attrs.get("pii_entities")
|
|
269
|
-
if entity_counts_str:
|
|
270
|
-
try:
|
|
271
|
-
entity_counts = (
|
|
272
|
-
json.loads(entity_counts_str) if isinstance(entity_counts_str, str) else entity_counts_str
|
|
273
|
-
)
|
|
274
|
-
if isinstance(entity_counts, dict):
|
|
275
|
-
entities = set(entity_counts.keys())
|
|
276
|
-
data.pii_entities.update(entities)
|
|
277
|
-
|
|
278
|
-
# Determine action
|
|
279
|
-
if attrs.get("is_blocked"):
|
|
280
|
-
data.pii_actions["BLOCK"].update(entities)
|
|
281
|
-
elif attrs.get("is_masked"):
|
|
282
|
-
data.pii_actions["MASK"].update(entities)
|
|
283
|
-
else:
|
|
284
|
-
data.pii_actions["FLAG"].update(entities)
|
|
285
|
-
except (json.JSONDecodeError, TypeError):
|
|
286
|
-
logger.error(f"Failed to parse pii_entities: {entity_counts_str}")
|
|
287
|
-
|
|
288
|
-
def _process_violation_event(self, data: SpanAggregationData, attrs: Dict[str, Any]) -> None:
|
|
289
|
-
"""Process violation_detected event."""
|
|
290
|
-
if attrs.get("has_violation"):
|
|
291
|
-
data.has_violation = True
|
|
292
|
-
violations = attrs.get("violations", [])
|
|
293
|
-
if violations:
|
|
294
|
-
data.violations.update(violations)
|
|
295
|
-
# Set action based on is_blocked flag
|
|
296
|
-
action = "BLOCK" if attrs.get("is_blocked") else "FLAG"
|
|
297
|
-
data.violation_actions[action].update(violations)
|
|
298
|
-
|
|
299
|
-
def _aggregate_to_all_parents(self, child_span_id: str) -> None:
|
|
300
|
-
"""Aggregate data from child span to all its parent spans in the hierarchy."""
|
|
301
|
-
if child_span_id not in self._span_data:
|
|
302
|
-
return
|
|
303
|
-
|
|
304
|
-
child_data = self._span_data[child_span_id]
|
|
305
|
-
current_span_id = child_span_id
|
|
306
|
-
|
|
307
|
-
# Traverse up the parent hierarchy
|
|
308
|
-
while True:
|
|
309
|
-
parent_id = self._span_hierarchy.get(current_span_id)
|
|
310
|
-
if not parent_id or parent_id not in self._span_data:
|
|
311
|
-
break
|
|
312
|
-
|
|
313
|
-
# Merge child data into parent
|
|
314
|
-
self._span_data[parent_id].merge_from_other(child_data)
|
|
315
|
-
|
|
316
|
-
# Update parent span attributes if it's still active and recording
|
|
317
|
-
parent_span = self._active_spans.get(parent_id)
|
|
318
|
-
if parent_span and parent_span.is_recording():
|
|
319
|
-
self._set_span_attributes(parent_span, self._span_data[parent_id])
|
|
320
|
-
|
|
321
|
-
# Move up to the next parent
|
|
322
|
-
current_span_id = parent_id
|
|
323
|
-
|
|
324
|
-
def _set_span_attributes(self, span: Span, data: SpanAggregationData) -> None:
|
|
325
|
-
"""Set aggregated attributes on the given span."""
|
|
326
|
-
try:
|
|
327
|
-
aggregated_attrs = data.to_attributes()
|
|
328
|
-
# Set all aggregated attributes under a single 'aggregator' key as a JSON object
|
|
329
|
-
span.set_attribute(f"{Config.LIBRARY_NAME}.aggregated_attributes", json.dumps(aggregated_attrs))
|
|
330
|
-
except Exception as e:
|
|
331
|
-
logger.error(f"Failed to set aggregated attributes: {e}")
|
|
332
|
-
|
|
333
|
-
def _get_span_id(self, span: Span) -> Optional[str]:
|
|
334
|
-
"""Get a unique identifier for the span."""
|
|
335
|
-
try:
|
|
336
|
-
span_context = span.get_span_context()
|
|
337
|
-
return f"{span_context.trace_id:032x}-{span_context.span_id:016x}"
|
|
338
|
-
except Exception:
|
|
339
|
-
return None
|
|
340
|
-
|
|
341
|
-
def _get_span_id_from_context(self, context: Context) -> Optional[str]:
|
|
342
|
-
"""Extract span ID from context."""
|
|
343
|
-
if context:
|
|
344
|
-
span_context = trace.get_current_span(context).get_span_context()
|
|
345
|
-
if span_context and span_context.span_id:
|
|
346
|
-
return f"{span_context.trace_id:032x}-{span_context.span_id:016x}"
|
|
347
|
-
return None
|
|
348
|
-
|
|
349
|
-
def _status_code_processing(self, status_code: int) -> None:
|
|
350
|
-
if httpx.codes.is_error(status_code):
|
|
351
|
-
event_attributes = {"has_error": True, "status_code": status_code}
|
|
352
|
-
Netra.set_custom_event(event_name="error_detected", attributes=event_attributes)
|
|
353
|
-
|
|
354
|
-
def force_flush(self, timeout_millis: int = 30000) -> bool:
|
|
355
|
-
"""Force flush any pending data."""
|
|
356
|
-
return True
|
|
357
|
-
|
|
358
|
-
def shutdown(self) -> bool:
|
|
359
|
-
"""Shutdown the processor."""
|
|
360
|
-
self._span_data.clear()
|
|
361
|
-
self._span_hierarchy.clear()
|
|
362
|
-
self._root_spans.clear()
|
|
363
|
-
self._captured_data.clear()
|
|
364
|
-
self._active_spans.clear()
|
|
365
|
-
return True
|
|
File without changes
|
|
File without changes
|