ddapm-test-agent 1.36.0__py3-none-any.whl → 1.38.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddapm_test_agent/agent.py +98 -13
- ddapm_test_agent/remoteconfig.py +2 -2
- ddapm_test_agent/static/style.css +1679 -0
- ddapm_test_agent/templates/base.html +31 -0
- ddapm_test_agent/templates/config.html +440 -0
- ddapm_test_agent/templates/dashboard.html +90 -0
- ddapm_test_agent/templates/macros.html +40 -0
- ddapm_test_agent/templates/requests.html +1925 -0
- ddapm_test_agent/templates/session_detail.html +37 -0
- ddapm_test_agent/templates/sessions.html +23 -0
- ddapm_test_agent/templates/snapshot_detail.html +410 -0
- ddapm_test_agent/templates/snapshots.html +86 -0
- ddapm_test_agent/templates/trace_detail.html +37 -0
- ddapm_test_agent/templates/tracer_flares.html +640 -0
- ddapm_test_agent/templates/traces.html +24 -0
- ddapm_test_agent/vcr_proxy.py +306 -58
- ddapm_test_agent/web.py +1523 -0
- {ddapm_test_agent-1.36.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/METADATA +15 -5
- ddapm_test_agent-1.38.0.dist-info/RECORD +40 -0
- ddapm_test_agent-1.36.0.dist-info/RECORD +0 -26
- {ddapm_test_agent-1.36.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/WHEEL +0 -0
- {ddapm_test_agent-1.36.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/entry_points.txt +0 -0
- {ddapm_test_agent-1.36.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/licenses/LICENSE.BSD3 +0 -0
- {ddapm_test_agent-1.36.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/licenses/LICENSE.apache2 +0 -0
- {ddapm_test_agent-1.36.0.dist-info → ddapm_test_agent-1.38.0.dist-info}/top_level.txt +0 -0
ddapm_test_agent/web.py
ADDED
|
@@ -0,0 +1,1523 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import datetime
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
import time
|
|
7
|
+
from typing import Any
|
|
8
|
+
from typing import Dict
|
|
9
|
+
from typing import List
|
|
10
|
+
from typing import Optional
|
|
11
|
+
from typing import Protocol
|
|
12
|
+
from typing import Tuple
|
|
13
|
+
import urllib.parse
|
|
14
|
+
import weakref
|
|
15
|
+
|
|
16
|
+
from aiohttp import web
|
|
17
|
+
from aiohttp.web import StreamResponse
|
|
18
|
+
from jinja2 import Environment
|
|
19
|
+
from jinja2 import FileSystemLoader
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
log = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class BodyProcessor:
|
|
26
|
+
"""Utility class for processing request/response bodies"""
|
|
27
|
+
|
|
28
|
+
@staticmethod
|
|
29
|
+
def process_body(body_data: bytes, content_type: str) -> Tuple[str, bool]:
|
|
30
|
+
"""
|
|
31
|
+
Process body data for display
|
|
32
|
+
Returns: (processed_body, is_binary)
|
|
33
|
+
"""
|
|
34
|
+
if not body_data:
|
|
35
|
+
return "", False
|
|
36
|
+
|
|
37
|
+
content_type = content_type or ""
|
|
38
|
+
|
|
39
|
+
# Check if this is binary content that should be base64 encoded
|
|
40
|
+
if (
|
|
41
|
+
"msgpack" in content_type.lower()
|
|
42
|
+
or "application/octet-stream" in content_type.lower()
|
|
43
|
+
or "multipart" in content_type.lower()
|
|
44
|
+
):
|
|
45
|
+
import base64
|
|
46
|
+
|
|
47
|
+
return base64.b64encode(body_data).decode("ascii"), True
|
|
48
|
+
else:
|
|
49
|
+
# Try to decode as text
|
|
50
|
+
try:
|
|
51
|
+
return body_data.decode("utf-8", errors="replace"), False
|
|
52
|
+
except Exception:
|
|
53
|
+
return "[Binary data]", False
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class TraceProcessor:
|
|
57
|
+
"""Utility class for processing trace data"""
|
|
58
|
+
|
|
59
|
+
@staticmethod
|
|
60
|
+
def process_traces(raw_data: bytes, content_type: str, path: str, suppress_errors: bool = False) -> Dict[str, Any]:
|
|
61
|
+
"""Process trace data and return standardized trace information"""
|
|
62
|
+
if not raw_data:
|
|
63
|
+
return TraceProcessor._empty_trace_result(path)
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
# Import decode functions
|
|
67
|
+
from .trace import decode_v1 as trace_decode_v1
|
|
68
|
+
from .trace import decode_v04 as trace_decode_v04
|
|
69
|
+
from .trace import decode_v05 as trace_decode_v05
|
|
70
|
+
from .trace import decode_v07 as trace_decode_v07
|
|
71
|
+
|
|
72
|
+
# Decode based on path
|
|
73
|
+
traces = []
|
|
74
|
+
log.info(f"Decoding trace data for path: {path}, data length: {len(raw_data)}")
|
|
75
|
+
if path == "/v0.4/traces":
|
|
76
|
+
traces = trace_decode_v04(content_type, raw_data, suppress_errors)
|
|
77
|
+
elif path == "/v0.5/traces":
|
|
78
|
+
traces = trace_decode_v05(raw_data)
|
|
79
|
+
elif path == "/v0.7/traces":
|
|
80
|
+
traces = trace_decode_v07(raw_data)
|
|
81
|
+
elif path == "/v1.0/traces":
|
|
82
|
+
traces = trace_decode_v1(raw_data)
|
|
83
|
+
log.info(f"Decoded {len(traces)} traces for {path}")
|
|
84
|
+
|
|
85
|
+
return TraceProcessor._process_decoded_traces(traces, path)
|
|
86
|
+
|
|
87
|
+
except Exception as e:
|
|
88
|
+
log.error(f"Failed to process trace data: {e}")
|
|
89
|
+
return TraceProcessor._empty_trace_result(path)
|
|
90
|
+
|
|
91
|
+
@staticmethod
|
|
92
|
+
def _process_decoded_traces(traces: List[Any], path: str) -> Dict[str, Any]:
|
|
93
|
+
"""Convert decoded traces to standardized format"""
|
|
94
|
+
# Count traces and spans
|
|
95
|
+
trace_count = len(traces)
|
|
96
|
+
span_count = sum(len(trace) for trace in traces)
|
|
97
|
+
|
|
98
|
+
# Convert spans to dict format for JSON serialization
|
|
99
|
+
trace_data = []
|
|
100
|
+
for trace in traces:
|
|
101
|
+
trace_spans = []
|
|
102
|
+
for span in trace:
|
|
103
|
+
# Handle both dict and object formats
|
|
104
|
+
if hasattr(span, "trace_id"):
|
|
105
|
+
# Span object with attributes
|
|
106
|
+
span_dict = {
|
|
107
|
+
"trace_id": span.trace_id,
|
|
108
|
+
"span_id": span.span_id,
|
|
109
|
+
"parent_id": span.parent_id,
|
|
110
|
+
"service": span.service,
|
|
111
|
+
"name": span.name,
|
|
112
|
+
"resource": span.resource,
|
|
113
|
+
"type": span.type,
|
|
114
|
+
"start": span.start,
|
|
115
|
+
"duration": span.duration,
|
|
116
|
+
"error": span.error,
|
|
117
|
+
"meta": span.meta,
|
|
118
|
+
"metrics": span.metrics,
|
|
119
|
+
}
|
|
120
|
+
else:
|
|
121
|
+
# Already a dictionary
|
|
122
|
+
span_dict = {
|
|
123
|
+
"trace_id": span.get("trace_id"),
|
|
124
|
+
"span_id": span.get("span_id"),
|
|
125
|
+
"parent_id": span.get("parent_id"),
|
|
126
|
+
"service": span.get("service"),
|
|
127
|
+
"name": span.get("name"),
|
|
128
|
+
"resource": span.get("resource"),
|
|
129
|
+
"type": span.get("type"),
|
|
130
|
+
"start": span.get("start"),
|
|
131
|
+
"duration": span.get("duration"),
|
|
132
|
+
"error": span.get("error"),
|
|
133
|
+
"meta": span.get("meta", {}),
|
|
134
|
+
"metrics": span.get("metrics", {}),
|
|
135
|
+
}
|
|
136
|
+
trace_spans.append(span_dict)
|
|
137
|
+
trace_data.append(trace_spans)
|
|
138
|
+
|
|
139
|
+
# Encode trace data as base64 JSON for JavaScript
|
|
140
|
+
import base64
|
|
141
|
+
import json
|
|
142
|
+
|
|
143
|
+
trace_json = json.dumps(trace_data)
|
|
144
|
+
trace_data_b64 = base64.b64encode(trace_json.encode("utf-8")).decode("ascii")
|
|
145
|
+
|
|
146
|
+
return {
|
|
147
|
+
"is_trace_request": True,
|
|
148
|
+
"path": path,
|
|
149
|
+
"trace_count": trace_count,
|
|
150
|
+
"span_count": span_count,
|
|
151
|
+
"traces": trace_data,
|
|
152
|
+
"trace_data_b64": trace_data_b64,
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
@staticmethod
|
|
156
|
+
def _empty_trace_result(path: str) -> Dict[str, Any]:
|
|
157
|
+
"""Return empty trace result for error cases"""
|
|
158
|
+
return {
|
|
159
|
+
"is_trace_request": True,
|
|
160
|
+
"path": path,
|
|
161
|
+
"trace_count": 0,
|
|
162
|
+
"span_count": 0,
|
|
163
|
+
"traces": [],
|
|
164
|
+
"trace_data_b64": "",
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class RequestObserver(Protocol):
|
|
169
|
+
"""Observer interface for request notifications"""
|
|
170
|
+
|
|
171
|
+
async def notify_request(self, request_data: Dict[str, Any]) -> None:
|
|
172
|
+
pass
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
class RequestStorage:
|
|
176
|
+
"""Centralized storage for all request/response data"""
|
|
177
|
+
|
|
178
|
+
def __init__(self, max_requests: int = 200):
|
|
179
|
+
self._requests: List[Dict[str, Any]] = []
|
|
180
|
+
self._max_requests = max_requests
|
|
181
|
+
self._observers: List[RequestObserver] = []
|
|
182
|
+
|
|
183
|
+
def add_request(self, request_data: Dict[str, Any]) -> None:
|
|
184
|
+
"""Add a new request and notify observers"""
|
|
185
|
+
self._requests.append(request_data)
|
|
186
|
+
|
|
187
|
+
# Maintain size limit
|
|
188
|
+
if len(self._requests) > self._max_requests:
|
|
189
|
+
self._requests.pop(0)
|
|
190
|
+
|
|
191
|
+
# Notify observers asynchronously
|
|
192
|
+
import asyncio
|
|
193
|
+
|
|
194
|
+
for observer in self._observers:
|
|
195
|
+
try:
|
|
196
|
+
asyncio.create_task(observer.notify_request(request_data))
|
|
197
|
+
except Exception:
|
|
198
|
+
# Don't let observer failures break request processing
|
|
199
|
+
pass
|
|
200
|
+
|
|
201
|
+
def get_all_requests(self) -> List[Dict[str, Any]]:
|
|
202
|
+
"""Get all stored requests (most recent first)"""
|
|
203
|
+
return list(reversed(self._requests))
|
|
204
|
+
|
|
205
|
+
def clear_requests(self) -> None:
|
|
206
|
+
"""Clear all stored requests"""
|
|
207
|
+
self._requests.clear()
|
|
208
|
+
|
|
209
|
+
def add_observer(self, observer: RequestObserver) -> None:
|
|
210
|
+
"""Add an observer for request notifications"""
|
|
211
|
+
self._observers.append(observer)
|
|
212
|
+
|
|
213
|
+
def remove_observer(self, observer: RequestObserver) -> None:
|
|
214
|
+
"""Remove an observer"""
|
|
215
|
+
if observer in self._observers:
|
|
216
|
+
self._observers.remove(observer)
|
|
217
|
+
|
|
218
|
+
def __len__(self) -> int:
|
|
219
|
+
return len(self._requests)
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
# Global request storage instance (initialized with default, can be reconfigured)
|
|
223
|
+
request_storage = None
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
@web.middleware
|
|
227
|
+
async def request_response_capture_middleware(request: web.Request, handler: Any) -> Any:
|
|
228
|
+
"""Middleware to capture all request/response data for WebUI"""
|
|
229
|
+
request_start_time = time.time()
|
|
230
|
+
|
|
231
|
+
# Capture request data
|
|
232
|
+
request_body = b""
|
|
233
|
+
if request.has_body and request.can_read_body:
|
|
234
|
+
try:
|
|
235
|
+
request_body = await request.read()
|
|
236
|
+
# Store the body back on the request for handlers to use
|
|
237
|
+
request._payload.feed_eof()
|
|
238
|
+
request._payload.feed_data(request_body)
|
|
239
|
+
except Exception as e:
|
|
240
|
+
log.debug(f"Failed to read request body: {e}")
|
|
241
|
+
|
|
242
|
+
# Process the request
|
|
243
|
+
response_status = 500
|
|
244
|
+
response_headers = {}
|
|
245
|
+
response_body = b""
|
|
246
|
+
|
|
247
|
+
try:
|
|
248
|
+
response = await handler(request)
|
|
249
|
+
response_status = response.status
|
|
250
|
+
response_headers = dict(response.headers)
|
|
251
|
+
|
|
252
|
+
# Capture response body if it's a regular response
|
|
253
|
+
if hasattr(response, "body") and response.body:
|
|
254
|
+
response_body = response.body
|
|
255
|
+
elif hasattr(response, "text") and response.text:
|
|
256
|
+
response_body = response.text.encode("utf-8")
|
|
257
|
+
|
|
258
|
+
except Exception as e:
|
|
259
|
+
# Handle errors
|
|
260
|
+
response_status = 500
|
|
261
|
+
response_headers = {}
|
|
262
|
+
response_body = str(e).encode("utf-8")
|
|
263
|
+
# Store data even on error, then re-raise
|
|
264
|
+
request_data = {
|
|
265
|
+
"timestamp": request_start_time,
|
|
266
|
+
"method": request.method,
|
|
267
|
+
"path": request.path_qs,
|
|
268
|
+
"headers": dict(request.headers),
|
|
269
|
+
"content_type": request.headers.get("Content-Type", ""),
|
|
270
|
+
"remote_addr": request.remote or "",
|
|
271
|
+
"request_body": request_body,
|
|
272
|
+
"response": {
|
|
273
|
+
"status": response_status,
|
|
274
|
+
"headers": response_headers,
|
|
275
|
+
"body": response_body,
|
|
276
|
+
},
|
|
277
|
+
"duration_ms": (time.time() - request_start_time) * 1000,
|
|
278
|
+
}
|
|
279
|
+
if request_storage is not None:
|
|
280
|
+
request_storage.add_request(request_data)
|
|
281
|
+
raise
|
|
282
|
+
|
|
283
|
+
# Store captured data for successful requests
|
|
284
|
+
request_data = {
|
|
285
|
+
"timestamp": request_start_time,
|
|
286
|
+
"method": request.method,
|
|
287
|
+
"path": request.path_qs,
|
|
288
|
+
"headers": dict(request.headers),
|
|
289
|
+
"content_type": request.headers.get("Content-Type", ""),
|
|
290
|
+
"remote_addr": request.remote or "",
|
|
291
|
+
"request_body": request_body,
|
|
292
|
+
"response": {
|
|
293
|
+
"status": response_status,
|
|
294
|
+
"headers": response_headers,
|
|
295
|
+
"body": response_body,
|
|
296
|
+
},
|
|
297
|
+
"duration_ms": (time.time() - request_start_time) * 1000,
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
# Store in unified request storage (with automatic size limit and observer notifications)
|
|
301
|
+
if request_storage is not None:
|
|
302
|
+
request_storage.add_request(request_data)
|
|
303
|
+
|
|
304
|
+
return response
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
MAX_STORED_REQUESTS = 200
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
class WebUI:
|
|
311
|
+
"""Web UI module for the dd-apm-test-agent"""
|
|
312
|
+
|
|
313
|
+
def __init__(self, agent: Any, config: Optional[Dict[str, Any]] = None) -> None:
|
|
314
|
+
self.agent = agent
|
|
315
|
+
self.config = config or {}
|
|
316
|
+
|
|
317
|
+
# Initialize or reconfigure global request storage with max requests from config
|
|
318
|
+
global request_storage
|
|
319
|
+
max_requests = self.config.get("max_requests", 200)
|
|
320
|
+
if request_storage is None:
|
|
321
|
+
request_storage = RequestStorage(max_requests=max_requests)
|
|
322
|
+
else:
|
|
323
|
+
# Update existing storage limit if needed
|
|
324
|
+
request_storage._max_requests = max_requests
|
|
325
|
+
|
|
326
|
+
# Track SSE connections for real-time updates
|
|
327
|
+
self._sse_connections: weakref.WeakSet[Any] = weakref.WeakSet()
|
|
328
|
+
|
|
329
|
+
# Register as observer for request notifications
|
|
330
|
+
request_storage.add_observer(self)
|
|
331
|
+
|
|
332
|
+
# Set up Jinja2 template environment
|
|
333
|
+
templates_dir = Path(__file__).parent / "templates"
|
|
334
|
+
self.jinja_env = Environment(loader=FileSystemLoader(str(templates_dir)), autoescape=True)
|
|
335
|
+
|
|
336
|
+
# Add custom filters
|
|
337
|
+
def timestamp_format(timestamp):
|
|
338
|
+
"""Format timestamp for display"""
|
|
339
|
+
dt = datetime.datetime.fromtimestamp(timestamp)
|
|
340
|
+
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
|
341
|
+
|
|
342
|
+
self.jinja_env.filters["timestamp_format"] = timestamp_format
|
|
343
|
+
|
|
344
|
+
async def notify_request(self, request_data: Dict[str, Any]) -> None:
|
|
345
|
+
"""Request Observer implementation - notify SSE connections of new requests"""
|
|
346
|
+
if not self._sse_connections:
|
|
347
|
+
return
|
|
348
|
+
|
|
349
|
+
# Process the request data for WebUI display
|
|
350
|
+
processed_request = self._process_single_request(request_data)
|
|
351
|
+
if not processed_request:
|
|
352
|
+
return
|
|
353
|
+
|
|
354
|
+
message = json.dumps(
|
|
355
|
+
{
|
|
356
|
+
"type": "new_request",
|
|
357
|
+
"request": processed_request,
|
|
358
|
+
"total_count": len(request_storage) if request_storage is not None else 0,
|
|
359
|
+
}
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
# Send to all connected SSE clients
|
|
363
|
+
dead_connections = []
|
|
364
|
+
for connection in self._sse_connections:
|
|
365
|
+
try:
|
|
366
|
+
await connection.write(f"data: {message}\n\n".encode())
|
|
367
|
+
except Exception:
|
|
368
|
+
dead_connections.append(connection)
|
|
369
|
+
|
|
370
|
+
# Clean up dead connections
|
|
371
|
+
for connection in dead_connections:
|
|
372
|
+
self._sse_connections.discard(connection)
|
|
373
|
+
|
|
374
|
+
def get_requests_from_agent(self) -> List[Dict[str, Any]]:
|
|
375
|
+
"""Get processed request data from unified request storage"""
|
|
376
|
+
processed_requests = []
|
|
377
|
+
|
|
378
|
+
# Get all requests from unified storage (already in most recent first order)
|
|
379
|
+
all_requests = request_storage.get_all_requests()[:MAX_STORED_REQUESTS] if request_storage is not None else []
|
|
380
|
+
|
|
381
|
+
for req_data in all_requests:
|
|
382
|
+
processed_request = self._process_single_request(req_data)
|
|
383
|
+
if processed_request:
|
|
384
|
+
processed_requests.append(processed_request)
|
|
385
|
+
|
|
386
|
+
return processed_requests
|
|
387
|
+
|
|
388
|
+
def _process_single_request(self, req_data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|
389
|
+
"""Process a single request data dict into WebUI format"""
|
|
390
|
+
try:
|
|
391
|
+
# Process request and response bodies using utility
|
|
392
|
+
request_body, request_body_is_binary = BodyProcessor.process_body(
|
|
393
|
+
req_data["request_body"], req_data["content_type"]
|
|
394
|
+
)
|
|
395
|
+
response_body, response_body_is_binary = BodyProcessor.process_body(
|
|
396
|
+
req_data["response"]["body"],
|
|
397
|
+
req_data["response"]["headers"].get("Content-Type", ""),
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
# Extract query string from path
|
|
401
|
+
path_parts = req_data["path"].split("?", 1)
|
|
402
|
+
path = path_parts[0]
|
|
403
|
+
query_string = path_parts[1] if len(path_parts) > 1 else ""
|
|
404
|
+
|
|
405
|
+
# Parse query parameters into a dict for template rendering
|
|
406
|
+
query_params = {}
|
|
407
|
+
if query_string:
|
|
408
|
+
query_params = urllib.parse.parse_qs(query_string, keep_blank_values=True)
|
|
409
|
+
|
|
410
|
+
# Check if this is a trace request and process trace data
|
|
411
|
+
trace_data = None
|
|
412
|
+
if path in ["/v0.4/traces", "/v0.5/traces", "/v0.7/traces", "/v1.0/traces"]:
|
|
413
|
+
trace_data = self._process_middleware_trace_data(req_data, path)
|
|
414
|
+
|
|
415
|
+
return {
|
|
416
|
+
"method": req_data["method"],
|
|
417
|
+
"path": path,
|
|
418
|
+
"query_string": query_string,
|
|
419
|
+
"query_params": query_params,
|
|
420
|
+
"headers": req_data["headers"],
|
|
421
|
+
"content_type": req_data["content_type"],
|
|
422
|
+
"content_length": len(req_data["request_body"]) if req_data["request_body"] else 0,
|
|
423
|
+
"remote_addr": req_data["remote_addr"],
|
|
424
|
+
"timestamp": req_data["timestamp"],
|
|
425
|
+
"session_token": req_data["headers"].get("X-Datadog-Test-Session-Token"),
|
|
426
|
+
"body": request_body,
|
|
427
|
+
"body_is_binary": request_body_is_binary,
|
|
428
|
+
"trace_data": trace_data,
|
|
429
|
+
"response": {
|
|
430
|
+
"status": req_data["response"]["status"],
|
|
431
|
+
"headers": req_data["response"]["headers"],
|
|
432
|
+
"content_type": req_data["response"]["headers"].get("Content-Type", ""),
|
|
433
|
+
"body": response_body,
|
|
434
|
+
"body_is_binary": response_body_is_binary,
|
|
435
|
+
},
|
|
436
|
+
"duration_ms": req_data["duration_ms"],
|
|
437
|
+
}
|
|
438
|
+
except Exception as e:
|
|
439
|
+
log.debug(f"Failed to process captured request: {e}")
|
|
440
|
+
return None
|
|
441
|
+
|
|
442
|
+
def _get_request_body(self, req: Any) -> Any:
|
|
443
|
+
"""Extract and format request body for display"""
|
|
444
|
+
if "_testagent_data" not in req:
|
|
445
|
+
return ""
|
|
446
|
+
|
|
447
|
+
data = req["_testagent_data"]
|
|
448
|
+
content_type = req.content_type or ""
|
|
449
|
+
|
|
450
|
+
if "msgpack" in content_type.lower():
|
|
451
|
+
# For binary data, show as base64
|
|
452
|
+
import base64
|
|
453
|
+
|
|
454
|
+
return base64.b64encode(data).decode("ascii")
|
|
455
|
+
else:
|
|
456
|
+
# For text data, decode as UTF-8
|
|
457
|
+
try:
|
|
458
|
+
return data.decode("utf-8", errors="replace")
|
|
459
|
+
except (UnicodeDecodeError, AttributeError):
|
|
460
|
+
return str(data)
|
|
461
|
+
|
|
462
|
+
def _get_basic_trace_info(self, req):
|
|
463
|
+
"""Get basic trace info without full decoding for request list"""
|
|
464
|
+
if not self._is_trace_request(req):
|
|
465
|
+
return None
|
|
466
|
+
|
|
467
|
+
# Just return basic info indicating this is a trace request
|
|
468
|
+
# The actual decoding will happen when viewing the trace details
|
|
469
|
+
return {
|
|
470
|
+
"is_trace_request": True,
|
|
471
|
+
"path": req.path,
|
|
472
|
+
"trace_count": "?", # Unknown until decoded
|
|
473
|
+
"span_count": "?", # Unknown until decoded
|
|
474
|
+
"traces": None,
|
|
475
|
+
"trace_data_b64": "", # Will be filled when actually needed
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
def _get_trace_data(self, req):
|
|
479
|
+
"""Extract and process trace data for trace requests"""
|
|
480
|
+
if not self._is_trace_request(req):
|
|
481
|
+
return None
|
|
482
|
+
|
|
483
|
+
# Get raw data
|
|
484
|
+
if "_testagent_data" not in req:
|
|
485
|
+
return None
|
|
486
|
+
|
|
487
|
+
raw_data = req["_testagent_data"]
|
|
488
|
+
content_type = req.content_type or ""
|
|
489
|
+
|
|
490
|
+
return TraceProcessor.process_traces(raw_data, content_type, req.path, suppress_errors=False)
|
|
491
|
+
|
|
492
|
+
def _process_middleware_trace_data(self, req_data, path):
|
|
493
|
+
"""Process trace data from middleware-captured request data"""
|
|
494
|
+
raw_data = req_data.get("request_body", b"")
|
|
495
|
+
content_type = req_data.get("content_type", "")
|
|
496
|
+
|
|
497
|
+
return TraceProcessor.process_traces(raw_data, content_type, path, suppress_errors=True)
|
|
498
|
+
|
|
499
|
+
def _is_trace_request(self, req: Any) -> bool:
|
|
500
|
+
"""Check if request is a trace request based on path"""
|
|
501
|
+
trace_paths = ["/v0.4/traces", "/v0.5/traces", "/v0.7/traces", "/v1.0/traces"]
|
|
502
|
+
return req.path in trace_paths
|
|
503
|
+
|
|
504
|
+
def _clean_trace_data_for_json(self, traces):
|
|
505
|
+
"""Clean trace data to ensure JSON serializability"""
|
|
506
|
+
import json
|
|
507
|
+
|
|
508
|
+
def clean_value(obj):
|
|
509
|
+
if isinstance(obj, (int, float, str, bool)) or obj is None:
|
|
510
|
+
return obj
|
|
511
|
+
elif isinstance(obj, bytes):
|
|
512
|
+
# Convert bytes to base64 string
|
|
513
|
+
import base64
|
|
514
|
+
|
|
515
|
+
try:
|
|
516
|
+
return base64.b64encode(obj).decode("ascii")
|
|
517
|
+
except Exception:
|
|
518
|
+
return "[Binary data]"
|
|
519
|
+
elif isinstance(obj, dict):
|
|
520
|
+
return {str(k): clean_value(v) for k, v in obj.items()}
|
|
521
|
+
elif isinstance(obj, (list, tuple)):
|
|
522
|
+
return [clean_value(item) for item in obj]
|
|
523
|
+
else:
|
|
524
|
+
# For any other type, convert to string
|
|
525
|
+
try:
|
|
526
|
+
return str(obj)
|
|
527
|
+
except Exception:
|
|
528
|
+
return "[Unserializable object]"
|
|
529
|
+
|
|
530
|
+
try:
|
|
531
|
+
cleaned = clean_value(traces)
|
|
532
|
+
# Test JSON serializability
|
|
533
|
+
json.dumps(cleaned)
|
|
534
|
+
return cleaned
|
|
535
|
+
except Exception:
|
|
536
|
+
# Failed to clean trace data
|
|
537
|
+
return []
|
|
538
|
+
|
|
539
|
+
def make_app(self) -> web.Application:
|
|
540
|
+
"""Create the web UI application"""
|
|
541
|
+
app = web.Application()
|
|
542
|
+
|
|
543
|
+
# Set up routes
|
|
544
|
+
app.add_routes(
|
|
545
|
+
[
|
|
546
|
+
web.get("/", self.handle_dashboard),
|
|
547
|
+
web.get("/requests", self.handle_requests),
|
|
548
|
+
web.get("/requests/stream", self.handle_requests_sse),
|
|
549
|
+
web.post("/requests/clear", self.handle_clear_requests),
|
|
550
|
+
web.get("/requests/download", self.handle_download_requests),
|
|
551
|
+
web.get("/traces", self.handle_requests), # Redirect old traces URL
|
|
552
|
+
web.get("/traces/{trace_id}", self.handle_trace_detail),
|
|
553
|
+
web.get("/config", self.handle_config),
|
|
554
|
+
web.post("/config/create", self.handle_config_create),
|
|
555
|
+
web.post("/config/update", self.handle_config_update),
|
|
556
|
+
web.post("/config/create_path", self.handle_config_create_path),
|
|
557
|
+
web.post("/config/clear", self.handle_config_clear),
|
|
558
|
+
web.get("/tracerflares", self.handle_tracer_flares),
|
|
559
|
+
web.post("/tracerflares/start", self.handle_start_flare),
|
|
560
|
+
web.post("/tracerflares/stop", self.handle_stop_flare),
|
|
561
|
+
web.get("/tracerflares/download", self.handle_download_tracer_flare),
|
|
562
|
+
web.get("/snapshots", self.handle_snapshots),
|
|
563
|
+
web.get("/snapshots/{filename}", self.handle_snapshot_detail),
|
|
564
|
+
# HTMX endpoints for server-side processing
|
|
565
|
+
web.post("/api/render-waterfall", self.handle_render_waterfall),
|
|
566
|
+
web.post("/api/render-json", self.handle_render_json),
|
|
567
|
+
# Static files
|
|
568
|
+
web.static("/static", Path(__file__).parent / "static"),
|
|
569
|
+
]
|
|
570
|
+
)
|
|
571
|
+
|
|
572
|
+
return app
|
|
573
|
+
|
|
574
|
+
async def handle_dashboard(self, request: web.Request) -> web.Response:
|
|
575
|
+
"""Handle dashboard page"""
|
|
576
|
+
template = self.jinja_env.get_template("dashboard.html")
|
|
577
|
+
|
|
578
|
+
# Get server configuration information
|
|
579
|
+
|
|
580
|
+
# Get server configuration (not runtime status)
|
|
581
|
+
enabled_servers = {
|
|
582
|
+
"web_ui": True, # If we're serving this page, Web UI is enabled
|
|
583
|
+
"apm_server": True, # Always enabled in the main app
|
|
584
|
+
"otlp_http": True, # Always enabled
|
|
585
|
+
"otlp_grpc": True, # Always enabled
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
# Get configuration from main app config stored in agent
|
|
589
|
+
main_config = self.config
|
|
590
|
+
agent_url = main_config.get("agent_url", "")
|
|
591
|
+
is_proxying = bool(agent_url)
|
|
592
|
+
|
|
593
|
+
# Get actual port configuration from main app
|
|
594
|
+
actual_apm_port = main_config.get("port", 8126)
|
|
595
|
+
actual_otlp_http_port = main_config.get("otlp_http_port", 4318)
|
|
596
|
+
actual_otlp_grpc_port = main_config.get("otlp_grpc_port", 4317)
|
|
597
|
+
|
|
598
|
+
content = template.render(
|
|
599
|
+
title="Dashboard",
|
|
600
|
+
total_requests=len(request_storage) if request_storage is not None else 0,
|
|
601
|
+
# Server configuration
|
|
602
|
+
web_ui_port=self.config.get("web_ui_port", 8080),
|
|
603
|
+
apm_port=actual_apm_port,
|
|
604
|
+
otlp_http_port=actual_otlp_http_port,
|
|
605
|
+
otlp_grpc_port=actual_otlp_grpc_port,
|
|
606
|
+
enabled_servers=enabled_servers,
|
|
607
|
+
is_proxying=is_proxying,
|
|
608
|
+
agent_url=agent_url,
|
|
609
|
+
snapshot_dir=main_config.get("snapshot_dir", "snapshots"),
|
|
610
|
+
vcr_enabled=bool(main_config.get("vcr_cassettes_directory")),
|
|
611
|
+
error_responses_disabled=main_config.get("disable_error_responses", False),
|
|
612
|
+
max_requests=main_config.get("max_requests", 200),
|
|
613
|
+
)
|
|
614
|
+
return web.Response(text=content, content_type="text/html")
|
|
615
|
+
|
|
616
|
+
async def handle_requests(self, request: web.Request) -> web.Response:
|
|
617
|
+
"""Handle requests page - live view of all requests"""
|
|
618
|
+
template = self.jinja_env.get_template("requests.html")
|
|
619
|
+
|
|
620
|
+
# Get processed requests from agent
|
|
621
|
+
request_data = self.get_requests_from_agent()
|
|
622
|
+
|
|
623
|
+
content = template.render(
|
|
624
|
+
requests=request_data,
|
|
625
|
+
total_requests=len(request_storage) if request_storage is not None else 0,
|
|
626
|
+
)
|
|
627
|
+
return web.Response(text=content, content_type="text/html")
|
|
628
|
+
|
|
629
|
+
async def handle_trace_detail(self, request: web.Request) -> web.Response:
|
|
630
|
+
"""Handle individual trace detail page"""
|
|
631
|
+
trace_id = int(request.match_info["trace_id"])
|
|
632
|
+
template = self.jinja_env.get_template("trace_detail.html")
|
|
633
|
+
try:
|
|
634
|
+
trace = await self.agent._trace_by_trace_id(trace_id)
|
|
635
|
+
content = template.render(
|
|
636
|
+
title=f"Trace {trace_id}",
|
|
637
|
+
trace_id=trace_id,
|
|
638
|
+
trace=trace,
|
|
639
|
+
)
|
|
640
|
+
except KeyError:
|
|
641
|
+
content = template.render(
|
|
642
|
+
title=f"Trace {trace_id}",
|
|
643
|
+
trace_id=trace_id,
|
|
644
|
+
trace=None,
|
|
645
|
+
error="Trace not found",
|
|
646
|
+
)
|
|
647
|
+
return web.Response(text=content, content_type="text/html")
|
|
648
|
+
|
|
649
|
+
async def handle_config(self, request: web.Request) -> web.Response:
|
|
650
|
+
"""Handle config page"""
|
|
651
|
+
template = self.jinja_env.get_template("config.html")
|
|
652
|
+
|
|
653
|
+
# Get selected token from query parameter
|
|
654
|
+
selected_token: Optional[str] = request.query.get("token", "")
|
|
655
|
+
if selected_token == "null" or selected_token == "":
|
|
656
|
+
selected_token = None
|
|
657
|
+
|
|
658
|
+
# Get available session tokens from stored requests
|
|
659
|
+
session_tokens = set()
|
|
660
|
+
for req_info in self.get_requests_from_agent():
|
|
661
|
+
token = req_info.get("session_token")
|
|
662
|
+
if token:
|
|
663
|
+
session_tokens.add(token)
|
|
664
|
+
|
|
665
|
+
# Always include None (default) option
|
|
666
|
+
all_tokens = [None] + sorted(session_tokens)
|
|
667
|
+
|
|
668
|
+
# Get current remote config data for selected token
|
|
669
|
+
current_config = {}
|
|
670
|
+
try:
|
|
671
|
+
current_config = await self.agent._rc_server.get_config_response(selected_token)
|
|
672
|
+
if not current_config:
|
|
673
|
+
current_config = {}
|
|
674
|
+
except Exception:
|
|
675
|
+
current_config = {}
|
|
676
|
+
|
|
677
|
+
# Get all configs for display
|
|
678
|
+
all_configs = {}
|
|
679
|
+
for token in all_tokens:
|
|
680
|
+
try:
|
|
681
|
+
token_config = await self.agent._rc_server.get_config_response(token)
|
|
682
|
+
if token_config:
|
|
683
|
+
all_configs[str(token)] = token_config
|
|
684
|
+
except Exception:
|
|
685
|
+
pass
|
|
686
|
+
|
|
687
|
+
content = template.render(
|
|
688
|
+
title="Configuration",
|
|
689
|
+
session_tokens=all_tokens,
|
|
690
|
+
selected_token=selected_token,
|
|
691
|
+
current_config_json=json.dumps(current_config, indent=2) if current_config else "{}",
|
|
692
|
+
config_data=json.dumps(all_configs, indent=2) if all_configs else "{}",
|
|
693
|
+
)
|
|
694
|
+
return web.Response(text=content, content_type="text/html")
|
|
695
|
+
|
|
696
|
+
async def handle_config_create(self, request: web.Request) -> web.Response:
|
|
697
|
+
"""Handle creating a new remote config response"""
|
|
698
|
+
try:
|
|
699
|
+
data = await request.post()
|
|
700
|
+
token = data.get("token") or None
|
|
701
|
+
config_data = data.get("config_data", "{}")
|
|
702
|
+
|
|
703
|
+
# Parse and validate JSON
|
|
704
|
+
try:
|
|
705
|
+
if isinstance(config_data, bytes):
|
|
706
|
+
config_data = config_data.decode("utf-8")
|
|
707
|
+
elif not isinstance(config_data, str):
|
|
708
|
+
config_data = str(config_data)
|
|
709
|
+
parsed_config = json.loads(config_data)
|
|
710
|
+
except json.JSONDecodeError as e:
|
|
711
|
+
return web.json_response({"error": f"Invalid JSON: {e}"}, status=400)
|
|
712
|
+
|
|
713
|
+
self.agent._rc_server.create_config_response(token, parsed_config)
|
|
714
|
+
return web.json_response({"status": "success", "message": "Config created successfully"})
|
|
715
|
+
|
|
716
|
+
except Exception as e:
|
|
717
|
+
return web.json_response({"error": str(e)}, status=500)
|
|
718
|
+
|
|
719
|
+
async def handle_config_update(self, request: web.Request) -> web.Response:
|
|
720
|
+
"""Handle updating a remote config response"""
|
|
721
|
+
try:
|
|
722
|
+
data = await request.post()
|
|
723
|
+
token = data.get("token") or None
|
|
724
|
+
config_data = data.get("config_data", "{}")
|
|
725
|
+
|
|
726
|
+
# Parse and validate JSON
|
|
727
|
+
try:
|
|
728
|
+
if isinstance(config_data, bytes):
|
|
729
|
+
config_data = config_data.decode("utf-8")
|
|
730
|
+
elif not isinstance(config_data, str):
|
|
731
|
+
config_data = str(config_data)
|
|
732
|
+
parsed_config = json.loads(config_data)
|
|
733
|
+
except json.JSONDecodeError as e:
|
|
734
|
+
return web.json_response({"error": f"Invalid JSON: {e}"}, status=400)
|
|
735
|
+
|
|
736
|
+
self.agent._rc_server.update_config_response(token, parsed_config)
|
|
737
|
+
return web.json_response({"status": "success", "message": "Config updated successfully"})
|
|
738
|
+
|
|
739
|
+
except Exception as e:
|
|
740
|
+
return web.json_response({"error": str(e)}, status=500)
|
|
741
|
+
|
|
742
|
+
async def handle_config_create_path(self, request: web.Request) -> web.Response:
|
|
743
|
+
"""Handle creating a remote config path response"""
|
|
744
|
+
try:
|
|
745
|
+
data = await request.post()
|
|
746
|
+
token = data.get("token") or None
|
|
747
|
+
path = data.get("path", "")
|
|
748
|
+
message = data.get("message", "{}")
|
|
749
|
+
|
|
750
|
+
if not path:
|
|
751
|
+
return web.json_response({"error": "Path is required"}, status=400)
|
|
752
|
+
|
|
753
|
+
# Validate and parse message JSON
|
|
754
|
+
try:
|
|
755
|
+
if isinstance(message, bytes):
|
|
756
|
+
message = message.decode("utf-8")
|
|
757
|
+
elif not isinstance(message, str):
|
|
758
|
+
message = str(message)
|
|
759
|
+
parsed_message = json.loads(message)
|
|
760
|
+
except json.JSONDecodeError as e:
|
|
761
|
+
return web.json_response({"error": f"Invalid message JSON: {e}"}, status=400)
|
|
762
|
+
|
|
763
|
+
self.agent._rc_server.create_config_path_response(token, path, parsed_message)
|
|
764
|
+
return web.json_response({"status": "success", "message": "Config path created successfully"})
|
|
765
|
+
|
|
766
|
+
except Exception as e:
|
|
767
|
+
return web.json_response({"error": str(e)}, status=500)
|
|
768
|
+
|
|
769
|
+
async def handle_config_clear(self, request: web.Request) -> web.Response:
|
|
770
|
+
"""Handle clearing remote config for a token"""
|
|
771
|
+
try:
|
|
772
|
+
data = await request.post()
|
|
773
|
+
token = data.get("token") or None
|
|
774
|
+
|
|
775
|
+
# Clear by creating empty response
|
|
776
|
+
self.agent._rc_server.create_config_response(token, {})
|
|
777
|
+
return web.json_response({"status": "success", "message": "Config cleared successfully"})
|
|
778
|
+
|
|
779
|
+
except Exception as e:
|
|
780
|
+
return web.json_response({"error": str(e)}, status=500)
|
|
781
|
+
|
|
782
|
+
async def handle_tracer_flares(self, request: web.Request) -> web.Response:
|
|
783
|
+
"""Handle tracer flares page"""
|
|
784
|
+
template = self.jinja_env.get_template("tracer_flares.html")
|
|
785
|
+
|
|
786
|
+
# Get ALL tracer flares regardless of session token
|
|
787
|
+
tracer_flares = []
|
|
788
|
+
try:
|
|
789
|
+
# Get all flares by getting flares for None (default session) and then all other sessions
|
|
790
|
+
all_flares = []
|
|
791
|
+
|
|
792
|
+
# Get flares for default session (None)
|
|
793
|
+
default_flares = await self.agent._tracerflares_by_session(None)
|
|
794
|
+
for flare in default_flares:
|
|
795
|
+
flare["session_token"] = None # Mark session token for display
|
|
796
|
+
all_flares.extend(default_flares)
|
|
797
|
+
|
|
798
|
+
# Get flares for all other session tokens
|
|
799
|
+
session_tokens = set()
|
|
800
|
+
for req_data in request_storage.get_all_requests() if request_storage is not None else []:
|
|
801
|
+
token = req_data.get("headers", {}).get("X-Datadog-Test-Session-Token")
|
|
802
|
+
if token:
|
|
803
|
+
session_tokens.add(token)
|
|
804
|
+
|
|
805
|
+
for token in session_tokens:
|
|
806
|
+
token_flares = await self.agent._tracerflares_by_session(token)
|
|
807
|
+
for flare in token_flares:
|
|
808
|
+
flare["session_token"] = token # Mark session token for display
|
|
809
|
+
all_flares.extend(token_flares)
|
|
810
|
+
|
|
811
|
+
tracer_flares = all_flares
|
|
812
|
+
except Exception as e:
|
|
813
|
+
print(f"Error getting tracer flares: {e}")
|
|
814
|
+
|
|
815
|
+
# Check for active flare - check all active flares
|
|
816
|
+
active_flares = getattr(self.agent, "_active_flares", {})
|
|
817
|
+
active_flare = None
|
|
818
|
+
# Find any active flare (there should only be one at a time)
|
|
819
|
+
for flare_info in active_flares.values():
|
|
820
|
+
active_flare = flare_info
|
|
821
|
+
break
|
|
822
|
+
|
|
823
|
+
content = template.render(
|
|
824
|
+
title="Tracer Flares",
|
|
825
|
+
tracer_flares=tracer_flares,
|
|
826
|
+
total_flares=len(tracer_flares),
|
|
827
|
+
active_flare=active_flare,
|
|
828
|
+
)
|
|
829
|
+
return web.Response(text=content, content_type="text/html")
|
|
830
|
+
|
|
831
|
+
async def handle_start_flare(self, request: web.Request) -> web.Response:
|
|
832
|
+
"""Handle starting a tracer flare collection"""
|
|
833
|
+
try:
|
|
834
|
+
data = await request.post()
|
|
835
|
+
session_token = data.get("token") or None
|
|
836
|
+
|
|
837
|
+
# Generate unique UUID for this flare request
|
|
838
|
+
import uuid
|
|
839
|
+
|
|
840
|
+
flare_uuid = str(uuid.uuid4())
|
|
841
|
+
|
|
842
|
+
# Step 1: Send AGENT_CONFIG to enable debug logging
|
|
843
|
+
debug_config = {"config": {"log_level": "debug"}}
|
|
844
|
+
self.agent._rc_server.create_config_path_response(
|
|
845
|
+
session_token, "datadog/2/AGENT_CONFIG/flare_debug/config", debug_config
|
|
846
|
+
)
|
|
847
|
+
|
|
848
|
+
# Store the active flare info
|
|
849
|
+
if not hasattr(self.agent, "_active_flares"):
|
|
850
|
+
self.agent._active_flares = {}
|
|
851
|
+
|
|
852
|
+
import time
|
|
853
|
+
|
|
854
|
+
self.agent._active_flares[session_token or "default"] = {
|
|
855
|
+
"uuid": flare_uuid,
|
|
856
|
+
"start_time": time.time(),
|
|
857
|
+
"case_id": str(int(time.time())), # Pure numeric timestamp
|
|
858
|
+
"hostname": "test-agent",
|
|
859
|
+
"user_handle": "test@example.com",
|
|
860
|
+
}
|
|
861
|
+
|
|
862
|
+
return web.json_response(
|
|
863
|
+
{
|
|
864
|
+
"status": "success",
|
|
865
|
+
"message": "Tracer flare collection started",
|
|
866
|
+
"uuid": flare_uuid,
|
|
867
|
+
}
|
|
868
|
+
)
|
|
869
|
+
|
|
870
|
+
except Exception as e:
|
|
871
|
+
return web.json_response({"error": str(e)}, status=500)
|
|
872
|
+
|
|
873
|
+
async def handle_stop_flare(self, request: web.Request) -> web.Response:
|
|
874
|
+
"""Handle stopping a tracer flare collection and triggering upload"""
|
|
875
|
+
try:
|
|
876
|
+
data = await request.post()
|
|
877
|
+
session_token = data.get("token") or None
|
|
878
|
+
|
|
879
|
+
# Check if there's an active flare
|
|
880
|
+
if not hasattr(self.agent, "_active_flares"):
|
|
881
|
+
return web.json_response({"error": "No active flare collection"}, status=400)
|
|
882
|
+
|
|
883
|
+
active_flares = getattr(self.agent, "_active_flares", {})
|
|
884
|
+
flare_key = session_token or "default"
|
|
885
|
+
|
|
886
|
+
if flare_key not in active_flares:
|
|
887
|
+
return web.json_response({"error": "No active flare for this session"}, status=400)
|
|
888
|
+
|
|
889
|
+
flare_info = active_flares[flare_key]
|
|
890
|
+
|
|
891
|
+
# Step 2: Send AGENT_TASK to trigger flare upload
|
|
892
|
+
task_config = {
|
|
893
|
+
"task_type": "tracer_flare",
|
|
894
|
+
"uuid": flare_info["uuid"],
|
|
895
|
+
"args": {
|
|
896
|
+
"case_id": flare_info["case_id"],
|
|
897
|
+
"hostname": flare_info["hostname"],
|
|
898
|
+
"user_handle": flare_info["user_handle"],
|
|
899
|
+
},
|
|
900
|
+
}
|
|
901
|
+
|
|
902
|
+
self.agent._rc_server.create_config_path_response(
|
|
903
|
+
session_token, "datadog/2/AGENT_TASK/flare_upload/config", task_config
|
|
904
|
+
)
|
|
905
|
+
|
|
906
|
+
# Remove from active flares
|
|
907
|
+
del active_flares[flare_key]
|
|
908
|
+
|
|
909
|
+
return web.json_response(
|
|
910
|
+
{
|
|
911
|
+
"status": "success",
|
|
912
|
+
"message": "Tracer flare upload triggered",
|
|
913
|
+
"case_id": flare_info["case_id"],
|
|
914
|
+
}
|
|
915
|
+
)
|
|
916
|
+
|
|
917
|
+
except Exception as e:
|
|
918
|
+
return web.json_response({"error": str(e)}, status=500)
|
|
919
|
+
|
|
920
|
+
async def handle_download_tracer_flare(self, request: web.Request) -> web.Response:
|
|
921
|
+
"""Handle downloading a tracer flare ZIP file"""
|
|
922
|
+
try:
|
|
923
|
+
case_id = request.query.get("case_id")
|
|
924
|
+
|
|
925
|
+
if not case_id:
|
|
926
|
+
return web.json_response({"error": "case_id parameter required"}, status=400)
|
|
927
|
+
|
|
928
|
+
# Get ALL tracer flares from all sessions
|
|
929
|
+
all_flares = []
|
|
930
|
+
|
|
931
|
+
# Get flares for default session (None)
|
|
932
|
+
default_flares = await self.agent._tracerflares_by_session(None)
|
|
933
|
+
all_flares.extend(default_flares)
|
|
934
|
+
|
|
935
|
+
# Get flares for all other session tokens
|
|
936
|
+
session_tokens = set()
|
|
937
|
+
for req_data in request_storage.get_all_requests() if request_storage is not None else []:
|
|
938
|
+
token = req_data.get("headers", {}).get("X-Datadog-Test-Session-Token")
|
|
939
|
+
if token:
|
|
940
|
+
session_tokens.add(token)
|
|
941
|
+
|
|
942
|
+
for token in session_tokens:
|
|
943
|
+
token_flares = await self.agent._tracerflares_by_session(token)
|
|
944
|
+
all_flares.extend(token_flares)
|
|
945
|
+
|
|
946
|
+
# Debug logging
|
|
947
|
+
# Looking for case_id in flares
|
|
948
|
+
# Found flares
|
|
949
|
+
# Find the flare with matching case_id
|
|
950
|
+
target_flare = None
|
|
951
|
+
for flare in all_flares:
|
|
952
|
+
if flare.get("case_id") == case_id:
|
|
953
|
+
target_flare = flare
|
|
954
|
+
break
|
|
955
|
+
|
|
956
|
+
if not target_flare:
|
|
957
|
+
# Create detailed error message with available case_ids
|
|
958
|
+
available_cases = [f"'{flare.get('case_id')}'" for flare in all_flares]
|
|
959
|
+
error_msg = f"Flare not found. Looking for '{case_id}', available: {available_cases}"
|
|
960
|
+
# Error processing flare
|
|
961
|
+
return web.json_response({"error": error_msg}, status=404)
|
|
962
|
+
|
|
963
|
+
if "flare_file" not in target_flare:
|
|
964
|
+
return web.json_response({"error": "No flare file in this flare"}, status=404)
|
|
965
|
+
|
|
966
|
+
try:
|
|
967
|
+
# Decode base64 flare file data
|
|
968
|
+
import base64
|
|
969
|
+
|
|
970
|
+
flare_data = base64.b64decode(target_flare["flare_file"])
|
|
971
|
+
|
|
972
|
+
# Return as ZIP file download
|
|
973
|
+
return web.Response(
|
|
974
|
+
body=flare_data,
|
|
975
|
+
content_type="application/zip",
|
|
976
|
+
headers={"Content-Disposition": f'attachment; filename="tracer_flare_{case_id}.zip"'},
|
|
977
|
+
)
|
|
978
|
+
except Exception as decode_error:
|
|
979
|
+
return web.json_response({"error": f"Error decoding flare file: {decode_error}"}, status=500)
|
|
980
|
+
|
|
981
|
+
except Exception as e:
|
|
982
|
+
return web.json_response({"error": str(e)}, status=500)
|
|
983
|
+
|
|
984
|
+
async def handle_snapshots(self, request: web.Request) -> web.Response:
|
|
985
|
+
"""Handle snapshots page"""
|
|
986
|
+
template = self.jinja_env.get_template("snapshots.html")
|
|
987
|
+
|
|
988
|
+
# Get snapshot directory from main app config
|
|
989
|
+
main_config = self.config
|
|
990
|
+
snapshot_dir = main_config.get("snapshot_dir", "snapshots")
|
|
991
|
+
snapshots = []
|
|
992
|
+
|
|
993
|
+
try:
|
|
994
|
+
snapshot_path = Path(snapshot_dir)
|
|
995
|
+
if snapshot_path.exists() and snapshot_path.is_dir():
|
|
996
|
+
# Get all JSON files in the snapshot directory
|
|
997
|
+
json_files = list(snapshot_path.glob("*.json"))
|
|
998
|
+
json_files.sort() # Sort alphabetically
|
|
999
|
+
|
|
1000
|
+
for file_path in json_files:
|
|
1001
|
+
try:
|
|
1002
|
+
stat = file_path.stat()
|
|
1003
|
+
snapshots.append(
|
|
1004
|
+
{
|
|
1005
|
+
"filename": file_path.name,
|
|
1006
|
+
"size": stat.st_size,
|
|
1007
|
+
"modified": stat.st_mtime,
|
|
1008
|
+
}
|
|
1009
|
+
)
|
|
1010
|
+
except (OSError, IOError):
|
|
1011
|
+
# Skip files we can't stat
|
|
1012
|
+
continue
|
|
1013
|
+
except Exception as e:
|
|
1014
|
+
# Handle directory access errors
|
|
1015
|
+
error_msg: Optional[str] = f"Error accessing snapshot directory: {e}"
|
|
1016
|
+
else:
|
|
1017
|
+
error_msg = None
|
|
1018
|
+
|
|
1019
|
+
content = template.render(
|
|
1020
|
+
title="Snapshots",
|
|
1021
|
+
snapshots=snapshots,
|
|
1022
|
+
snapshot_dir=snapshot_dir,
|
|
1023
|
+
error=error_msg,
|
|
1024
|
+
)
|
|
1025
|
+
return web.Response(text=content, content_type="text/html")
|
|
1026
|
+
|
|
1027
|
+
async def handle_snapshot_detail(self, request: web.Request) -> web.Response:
|
|
1028
|
+
"""Handle individual snapshot detail page"""
|
|
1029
|
+
filename = request.match_info["filename"]
|
|
1030
|
+
template = self.jinja_env.get_template("snapshot_detail.html")
|
|
1031
|
+
|
|
1032
|
+
# Get snapshot directory from main app config
|
|
1033
|
+
main_config = self.config
|
|
1034
|
+
snapshot_dir = main_config.get("snapshot_dir", "snapshots")
|
|
1035
|
+
|
|
1036
|
+
try:
|
|
1037
|
+
snapshot_path = Path(snapshot_dir) / filename
|
|
1038
|
+
|
|
1039
|
+
# Security check: ensure the file is within the snapshot directory
|
|
1040
|
+
snapshot_path = snapshot_path.resolve()
|
|
1041
|
+
allowed_dir = Path(snapshot_dir).resolve()
|
|
1042
|
+
if not str(snapshot_path).startswith(str(allowed_dir)):
|
|
1043
|
+
raise ValueError("File path not allowed")
|
|
1044
|
+
|
|
1045
|
+
if not snapshot_path.exists() or not snapshot_path.is_file():
|
|
1046
|
+
raise FileNotFoundError("Snapshot file not found")
|
|
1047
|
+
|
|
1048
|
+
# Read and parse JSON content
|
|
1049
|
+
with open(snapshot_path, "r", encoding="utf-8") as f:
|
|
1050
|
+
raw_content = f.read()
|
|
1051
|
+
|
|
1052
|
+
try:
|
|
1053
|
+
parsed_data = json.loads(raw_content)
|
|
1054
|
+
# Pretty-format the JSON
|
|
1055
|
+
formatted_content = json.dumps(parsed_data, indent=2, ensure_ascii=False)
|
|
1056
|
+
is_valid_json = True
|
|
1057
|
+
parse_error: Optional[str] = None
|
|
1058
|
+
|
|
1059
|
+
# Count traces and spans
|
|
1060
|
+
trace_count = 0
|
|
1061
|
+
span_count = 0
|
|
1062
|
+
if isinstance(parsed_data, list):
|
|
1063
|
+
trace_count = len(parsed_data)
|
|
1064
|
+
for trace in parsed_data:
|
|
1065
|
+
if isinstance(trace, list):
|
|
1066
|
+
span_count += len(trace)
|
|
1067
|
+
elif isinstance(trace, dict):
|
|
1068
|
+
span_count += 1
|
|
1069
|
+
elif isinstance(parsed_data, dict):
|
|
1070
|
+
# Single trace case
|
|
1071
|
+
trace_count = 1
|
|
1072
|
+
span_count = 1
|
|
1073
|
+
|
|
1074
|
+
except json.JSONDecodeError as e:
|
|
1075
|
+
formatted_content = raw_content
|
|
1076
|
+
is_valid_json = False
|
|
1077
|
+
parse_error = str(e)
|
|
1078
|
+
trace_count = 0
|
|
1079
|
+
span_count = 0
|
|
1080
|
+
else:
|
|
1081
|
+
parse_error = None
|
|
1082
|
+
|
|
1083
|
+
# Get file stats
|
|
1084
|
+
stat = snapshot_path.stat()
|
|
1085
|
+
file_info = {
|
|
1086
|
+
"filename": filename,
|
|
1087
|
+
"size": stat.st_size,
|
|
1088
|
+
"modified": stat.st_mtime,
|
|
1089
|
+
}
|
|
1090
|
+
|
|
1091
|
+
content = template.render(
|
|
1092
|
+
title=f"Snapshot: {filename}",
|
|
1093
|
+
filename=filename,
|
|
1094
|
+
file_info=file_info,
|
|
1095
|
+
raw_content=formatted_content,
|
|
1096
|
+
is_valid_json=is_valid_json,
|
|
1097
|
+
parse_error=parse_error,
|
|
1098
|
+
trace_count=trace_count,
|
|
1099
|
+
span_count=span_count,
|
|
1100
|
+
trace_data=json.dumps(parsed_data) if is_valid_json else None,
|
|
1101
|
+
error=None,
|
|
1102
|
+
)
|
|
1103
|
+
|
|
1104
|
+
except (FileNotFoundError, ValueError, OSError, IOError) as e:
|
|
1105
|
+
content = template.render(
|
|
1106
|
+
title=f"Snapshot: {filename}",
|
|
1107
|
+
filename=filename,
|
|
1108
|
+
file_info=None,
|
|
1109
|
+
raw_content=None,
|
|
1110
|
+
is_valid_json=False,
|
|
1111
|
+
parse_error=None,
|
|
1112
|
+
error=f"Error loading snapshot: {e}",
|
|
1113
|
+
)
|
|
1114
|
+
|
|
1115
|
+
return web.Response(text=content, content_type="text/html")
|
|
1116
|
+
|
|
1117
|
+
async def handle_requests_sse(self, request: web.Request) -> StreamResponse:
|
|
1118
|
+
"""Handle Server-Sent Events for real-time request updates"""
|
|
1119
|
+
response = StreamResponse(
|
|
1120
|
+
status=200,
|
|
1121
|
+
reason="OK",
|
|
1122
|
+
headers={
|
|
1123
|
+
"Content-Type": "text/event-stream",
|
|
1124
|
+
"Cache-Control": "no-cache",
|
|
1125
|
+
"Connection": "keep-alive",
|
|
1126
|
+
"Access-Control-Allow-Origin": "*",
|
|
1127
|
+
},
|
|
1128
|
+
)
|
|
1129
|
+
|
|
1130
|
+
await response.prepare(request)
|
|
1131
|
+
|
|
1132
|
+
# Add this connection to our set
|
|
1133
|
+
self._sse_connections.add(response)
|
|
1134
|
+
|
|
1135
|
+
try:
|
|
1136
|
+
# Send initial request count from unified storage
|
|
1137
|
+
last_count = len(request_storage) if request_storage is not None else 0
|
|
1138
|
+
await response.write(f"data: {json.dumps({'type': 'count', 'count': last_count})}\n\n".encode())
|
|
1139
|
+
|
|
1140
|
+
# Keep connection alive with heartbeats
|
|
1141
|
+
# New requests are now sent via notify_new_request from middleware
|
|
1142
|
+
while True:
|
|
1143
|
+
await asyncio.sleep(5) # Heartbeat every 5 seconds
|
|
1144
|
+
# Send heartbeat (will raise exception if connection is closed)
|
|
1145
|
+
await response.write(f"data: {json.dumps({'type': 'heartbeat'})}\n\n".encode())
|
|
1146
|
+
|
|
1147
|
+
except (ConnectionResetError, asyncio.CancelledError, Exception):
|
|
1148
|
+
pass
|
|
1149
|
+
finally:
|
|
1150
|
+
# Clean up connection
|
|
1151
|
+
self._sse_connections.discard(response)
|
|
1152
|
+
|
|
1153
|
+
return response
|
|
1154
|
+
|
|
1155
|
+
async def notify_new_request(self, request_info: Dict[str, Any]) -> None:
|
|
1156
|
+
"""Notify all SSE connections about a new request"""
|
|
1157
|
+
if not self._sse_connections:
|
|
1158
|
+
return
|
|
1159
|
+
|
|
1160
|
+
# Add trace data processing for streaming requests if this is a trace request
|
|
1161
|
+
trace_data = None
|
|
1162
|
+
if request_info.get("path") in [
|
|
1163
|
+
"/v0.4/traces",
|
|
1164
|
+
"/v0.5/traces",
|
|
1165
|
+
"/v0.7/traces",
|
|
1166
|
+
"/v1.0/traces",
|
|
1167
|
+
]:
|
|
1168
|
+
try:
|
|
1169
|
+
import base64
|
|
1170
|
+
import json
|
|
1171
|
+
|
|
1172
|
+
# Parse trace data using the proper decoder based on path
|
|
1173
|
+
from .trace import decode_v1
|
|
1174
|
+
from .trace import decode_v04
|
|
1175
|
+
from .trace import decode_v05
|
|
1176
|
+
from .trace import decode_v07
|
|
1177
|
+
|
|
1178
|
+
raw_body = request_info.get("body", "")
|
|
1179
|
+
content_type = request_info.get("content_type", "")
|
|
1180
|
+
path = request_info.get("path", "")
|
|
1181
|
+
|
|
1182
|
+
if raw_body:
|
|
1183
|
+
# Decode from base64 if needed (msgpack requests are base64 encoded in agent.py)
|
|
1184
|
+
if "msgpack" in content_type.lower():
|
|
1185
|
+
binary_data = base64.b64decode(raw_body)
|
|
1186
|
+
else:
|
|
1187
|
+
binary_data = raw_body.encode("utf-8")
|
|
1188
|
+
|
|
1189
|
+
# Use the appropriate decoder based on path
|
|
1190
|
+
if path == "/v0.4/traces":
|
|
1191
|
+
traces = decode_v04(content_type, binary_data, False)
|
|
1192
|
+
elif path == "/v0.5/traces":
|
|
1193
|
+
traces = decode_v05(binary_data)
|
|
1194
|
+
elif path == "/v0.7/traces":
|
|
1195
|
+
traces = decode_v07(binary_data)
|
|
1196
|
+
elif path == "/v1.0/traces":
|
|
1197
|
+
traces = decode_v1(binary_data)
|
|
1198
|
+
else:
|
|
1199
|
+
traces = []
|
|
1200
|
+
|
|
1201
|
+
# Decoded trace chunks
|
|
1202
|
+
else:
|
|
1203
|
+
traces = None
|
|
1204
|
+
|
|
1205
|
+
if traces:
|
|
1206
|
+
# Count traces and spans using the same logic as the static version
|
|
1207
|
+
trace_count = len(traces)
|
|
1208
|
+
span_count = sum(len(trace) for trace in traces)
|
|
1209
|
+
|
|
1210
|
+
# Clean the data for JSON serialization
|
|
1211
|
+
clean_traces = self._clean_trace_data_for_json(traces)
|
|
1212
|
+
|
|
1213
|
+
# Cleaned trace data
|
|
1214
|
+
|
|
1215
|
+
# Create base64-encoded version for safe HTML transport
|
|
1216
|
+
trace_data_json = json.dumps(
|
|
1217
|
+
{
|
|
1218
|
+
"traces": clean_traces,
|
|
1219
|
+
"trace_count": trace_count,
|
|
1220
|
+
"span_count": span_count,
|
|
1221
|
+
}
|
|
1222
|
+
)
|
|
1223
|
+
trace_data_b64 = base64.b64encode(trace_data_json.encode("utf-8")).decode("ascii")
|
|
1224
|
+
|
|
1225
|
+
trace_data = {
|
|
1226
|
+
"traces": clean_traces,
|
|
1227
|
+
"trace_count": trace_count,
|
|
1228
|
+
"span_count": span_count,
|
|
1229
|
+
"trace_data_b64": trace_data_b64,
|
|
1230
|
+
}
|
|
1231
|
+
except Exception:
|
|
1232
|
+
# Failed to process streaming trace data
|
|
1233
|
+
trace_data = None
|
|
1234
|
+
|
|
1235
|
+
# Add trace_data to request_info if present
|
|
1236
|
+
if trace_data:
|
|
1237
|
+
request_info = {**request_info, "trace_data": trace_data}
|
|
1238
|
+
|
|
1239
|
+
message = json.dumps(
|
|
1240
|
+
{
|
|
1241
|
+
"type": "new_request",
|
|
1242
|
+
"request": request_info,
|
|
1243
|
+
"total_count": len(request_storage) if request_storage is not None else 0,
|
|
1244
|
+
}
|
|
1245
|
+
)
|
|
1246
|
+
|
|
1247
|
+
# Send to all connected clients
|
|
1248
|
+
dead_connections = []
|
|
1249
|
+
for connection in self._sse_connections:
|
|
1250
|
+
try:
|
|
1251
|
+
await connection.write(f"data: {message}\n\n".encode())
|
|
1252
|
+
except Exception:
|
|
1253
|
+
# Connection is dead, mark for removal
|
|
1254
|
+
dead_connections.append(connection)
|
|
1255
|
+
|
|
1256
|
+
# Clean up dead connections
|
|
1257
|
+
for connection in dead_connections:
|
|
1258
|
+
self._sse_connections.discard(connection)
|
|
1259
|
+
|
|
1260
|
+
async def notify_latest_request(self) -> None:
|
|
1261
|
+
"""Notify all SSE connections about the latest request"""
|
|
1262
|
+
if not self._sse_connections:
|
|
1263
|
+
return
|
|
1264
|
+
|
|
1265
|
+
try:
|
|
1266
|
+
# Get the latest processed request
|
|
1267
|
+
processed_requests = self.get_requests_from_agent()
|
|
1268
|
+
if not processed_requests:
|
|
1269
|
+
return
|
|
1270
|
+
|
|
1271
|
+
latest_request = processed_requests[0] # Most recent request
|
|
1272
|
+
|
|
1273
|
+
message = json.dumps(
|
|
1274
|
+
{
|
|
1275
|
+
"type": "new_request",
|
|
1276
|
+
"request": latest_request,
|
|
1277
|
+
"total_count": len(request_storage) if request_storage is not None else 0,
|
|
1278
|
+
}
|
|
1279
|
+
)
|
|
1280
|
+
|
|
1281
|
+
# Send to all connected clients
|
|
1282
|
+
dead_connections = []
|
|
1283
|
+
for connection in self._sse_connections:
|
|
1284
|
+
try:
|
|
1285
|
+
await connection.write(f"data: {message}\n\n".encode())
|
|
1286
|
+
except Exception:
|
|
1287
|
+
# Connection is dead, mark for removal
|
|
1288
|
+
dead_connections.append(connection)
|
|
1289
|
+
|
|
1290
|
+
# Clean up dead connections
|
|
1291
|
+
for connection in dead_connections:
|
|
1292
|
+
self._sse_connections.discard(connection)
|
|
1293
|
+
|
|
1294
|
+
except Exception as e:
|
|
1295
|
+
log.error(f"Error in notify_latest_request: {e}")
|
|
1296
|
+
|
|
1297
|
+
async def handle_clear_requests(self, request: web.Request) -> web.Response:
|
|
1298
|
+
"""Handle clearing all stored requests"""
|
|
1299
|
+
# Clear unified request storage
|
|
1300
|
+
if request_storage is not None:
|
|
1301
|
+
request_storage.clear_requests()
|
|
1302
|
+
return web.json_response({"status": "success", "message": "All requests cleared"})
|
|
1303
|
+
|
|
1304
|
+
async def handle_download_requests(self, request: web.Request) -> web.Response:
|
|
1305
|
+
"""Handle downloading all current requests as JSON"""
|
|
1306
|
+
# Use the already processed requests from unified storage
|
|
1307
|
+
requests_data = self.get_requests_from_agent()
|
|
1308
|
+
|
|
1309
|
+
# Create JSON response with proper headers for download
|
|
1310
|
+
json_content = json.dumps(requests_data, indent=2)
|
|
1311
|
+
return web.Response(
|
|
1312
|
+
text=json_content,
|
|
1313
|
+
content_type="application/json",
|
|
1314
|
+
headers={
|
|
1315
|
+
"Content-Disposition": "attachment; filename=requests.json",
|
|
1316
|
+
"Content-Length": str(len(json_content.encode("utf-8"))),
|
|
1317
|
+
},
|
|
1318
|
+
)
|
|
1319
|
+
|
|
1320
|
+
async def handle_render_waterfall(self, request):
|
|
1321
|
+
"""HTMX endpoint: Render waterfall view for trace data"""
|
|
1322
|
+
try:
|
|
1323
|
+
# Get request data from POST body
|
|
1324
|
+
req_data = await request.json()
|
|
1325
|
+
|
|
1326
|
+
# Extract trace data
|
|
1327
|
+
trace_data = req_data.get("trace_data")
|
|
1328
|
+
if not trace_data or not isinstance(trace_data, dict):
|
|
1329
|
+
return web.Response(
|
|
1330
|
+
text='<div class="empty-state">No trace data available</div>',
|
|
1331
|
+
content_type="text/html",
|
|
1332
|
+
)
|
|
1333
|
+
|
|
1334
|
+
# Use existing trace processing
|
|
1335
|
+
traces = trace_data.get("traces", [])
|
|
1336
|
+
if not traces:
|
|
1337
|
+
return web.Response(
|
|
1338
|
+
text='<div class="empty-state">No trace data available for waterfall view</div>',
|
|
1339
|
+
content_type="text/html",
|
|
1340
|
+
)
|
|
1341
|
+
|
|
1342
|
+
# Render waterfall HTML server-side
|
|
1343
|
+
html = self._render_waterfall_html(traces)
|
|
1344
|
+
return web.Response(text=html, content_type="text/html")
|
|
1345
|
+
|
|
1346
|
+
except Exception as e:
|
|
1347
|
+
log.error(f"Error rendering waterfall: {e}")
|
|
1348
|
+
return web.Response(
|
|
1349
|
+
text=f'<div class="error-message">Error generating waterfall view: {str(e)}</div>',
|
|
1350
|
+
content_type="text/html",
|
|
1351
|
+
)
|
|
1352
|
+
|
|
1353
|
+
async def handle_render_json(self, request):
|
|
1354
|
+
"""HTMX endpoint: Render pretty-printed JSON for request body"""
|
|
1355
|
+
try:
|
|
1356
|
+
# Get request data from POST body
|
|
1357
|
+
req_data = await request.json()
|
|
1358
|
+
|
|
1359
|
+
# Extract body data and content type
|
|
1360
|
+
body_data = req_data.get("body_data", "")
|
|
1361
|
+
content_type = req_data.get("content_type", "")
|
|
1362
|
+
|
|
1363
|
+
if not body_data:
|
|
1364
|
+
return web.Response(
|
|
1365
|
+
text='<div class="empty-state">No body data</div>',
|
|
1366
|
+
content_type="text/html",
|
|
1367
|
+
)
|
|
1368
|
+
|
|
1369
|
+
# Process the body data
|
|
1370
|
+
processed_body, is_binary = BodyProcessor.process_body(
|
|
1371
|
+
body_data.encode("utf-8") if isinstance(body_data, str) else body_data,
|
|
1372
|
+
content_type,
|
|
1373
|
+
)
|
|
1374
|
+
|
|
1375
|
+
if is_binary:
|
|
1376
|
+
html = f'<pre class="json-display binary">{processed_body}</pre>'
|
|
1377
|
+
else:
|
|
1378
|
+
# Try to pretty-print as JSON
|
|
1379
|
+
try:
|
|
1380
|
+
parsed_json = json.loads(processed_body)
|
|
1381
|
+
pretty_json = json.dumps(parsed_json, indent=2)
|
|
1382
|
+
html = f'<pre class="json-display">{pretty_json}</pre>'
|
|
1383
|
+
except json.JSONDecodeError:
|
|
1384
|
+
# Not JSON, display as text
|
|
1385
|
+
html = f'<pre class="json-display text">{processed_body}</pre>'
|
|
1386
|
+
|
|
1387
|
+
return web.Response(text=html, content_type="text/html")
|
|
1388
|
+
|
|
1389
|
+
except Exception as e:
|
|
1390
|
+
log.error(f"Error rendering JSON: {e}")
|
|
1391
|
+
return web.Response(
|
|
1392
|
+
text=f'<div class="error-message">Error processing body data: {str(e)}</div>',
|
|
1393
|
+
content_type="text/html",
|
|
1394
|
+
)
|
|
1395
|
+
|
|
1396
|
+
def _render_waterfall_html(self, traces: List[Any]) -> str:
|
|
1397
|
+
"""Render waterfall HTML from trace data"""
|
|
1398
|
+
if not traces:
|
|
1399
|
+
return '<div class="empty-state">No trace data available</div>'
|
|
1400
|
+
|
|
1401
|
+
html = '<div class="waterfall-traces">'
|
|
1402
|
+
|
|
1403
|
+
for trace_index, trace in enumerate(traces):
|
|
1404
|
+
if not trace or not isinstance(trace, list):
|
|
1405
|
+
continue
|
|
1406
|
+
|
|
1407
|
+
# Process spans
|
|
1408
|
+
valid_spans = [span for span in trace if span and isinstance(span, dict)]
|
|
1409
|
+
if not valid_spans:
|
|
1410
|
+
continue
|
|
1411
|
+
|
|
1412
|
+
# Calculate timing
|
|
1413
|
+
min_start = min(span.get("start", 0) for span in valid_spans)
|
|
1414
|
+
max_end = max(span.get("start", 0) + span.get("duration", 0) for span in valid_spans)
|
|
1415
|
+
total_duration = max_end - min_start
|
|
1416
|
+
|
|
1417
|
+
if total_duration == 0:
|
|
1418
|
+
total_duration = max(span.get("duration", 0) for span in valid_spans)
|
|
1419
|
+
|
|
1420
|
+
trace_id = valid_spans[0].get("trace_id", trace_index)
|
|
1421
|
+
|
|
1422
|
+
html += f"""
|
|
1423
|
+
<div class="waterfall-trace">
|
|
1424
|
+
<div class="trace-header">
|
|
1425
|
+
<div class="trace-header-left">
|
|
1426
|
+
<h4>Trace {trace_id}</h4>
|
|
1427
|
+
</div>
|
|
1428
|
+
<span class="trace-duration">{self._format_duration(total_duration)}</span>
|
|
1429
|
+
</div>
|
|
1430
|
+
<div class="spans-timeline">
|
|
1431
|
+
"""
|
|
1432
|
+
|
|
1433
|
+
# Sort spans and render
|
|
1434
|
+
sorted_spans = sorted(valid_spans, key=lambda s: s.get("start", 0))
|
|
1435
|
+
span_hierarchy = self._build_span_hierarchy(sorted_spans)
|
|
1436
|
+
|
|
1437
|
+
for span_info in span_hierarchy:
|
|
1438
|
+
html += self._render_span_html(span_info, min_start, total_duration, 0)
|
|
1439
|
+
|
|
1440
|
+
html += "</div></div>"
|
|
1441
|
+
|
|
1442
|
+
html += "</div>"
|
|
1443
|
+
return html
|
|
1444
|
+
|
|
1445
|
+
def _build_span_hierarchy(self, spans: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
1446
|
+
"""Build hierarchical span structure"""
|
|
1447
|
+
# Create span lookup
|
|
1448
|
+
span_lookup = {span.get("span_id"): span for span in spans}
|
|
1449
|
+
root_spans = []
|
|
1450
|
+
|
|
1451
|
+
for span in spans:
|
|
1452
|
+
parent_id = span.get("parent_id")
|
|
1453
|
+
if not parent_id or parent_id not in span_lookup:
|
|
1454
|
+
# Root span
|
|
1455
|
+
root_spans.append({"span": span, "children": self._get_children(span, spans)})
|
|
1456
|
+
|
|
1457
|
+
return root_spans
|
|
1458
|
+
|
|
1459
|
+
def _get_children(self, parent_span: Dict[str, Any], all_spans: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
1460
|
+
"""Get child spans recursively"""
|
|
1461
|
+
parent_id = parent_span.get("span_id")
|
|
1462
|
+
children = []
|
|
1463
|
+
|
|
1464
|
+
for span in all_spans:
|
|
1465
|
+
if span.get("parent_id") == parent_id and span.get("span_id") != parent_id:
|
|
1466
|
+
children.append({"span": span, "children": self._get_children(span, all_spans)})
|
|
1467
|
+
|
|
1468
|
+
return sorted(children, key=lambda c: c["span"].get("start", 0))
|
|
1469
|
+
|
|
1470
|
+
def _render_span_html(self, span_info: Dict[str, Any], min_start: int, total_duration: int, depth: int) -> str:
|
|
1471
|
+
"""Render HTML for a single span and its children"""
|
|
1472
|
+
span = span_info["span"]
|
|
1473
|
+
children = span_info.get("children", [])
|
|
1474
|
+
|
|
1475
|
+
# Calculate positioning
|
|
1476
|
+
start_time = span.get("start", 0)
|
|
1477
|
+
duration = span.get("duration", 0)
|
|
1478
|
+
|
|
1479
|
+
if total_duration > 0:
|
|
1480
|
+
left_percent = ((start_time - min_start) / total_duration) * 100
|
|
1481
|
+
width_percent = (duration / total_duration) * 100
|
|
1482
|
+
else:
|
|
1483
|
+
left_percent = 0
|
|
1484
|
+
width_percent = 100
|
|
1485
|
+
|
|
1486
|
+
# Generate span classes
|
|
1487
|
+
span_class = "waterfall-span"
|
|
1488
|
+
if span.get("error"):
|
|
1489
|
+
span_class += " error"
|
|
1490
|
+
|
|
1491
|
+
service = span.get("service", "unknown")
|
|
1492
|
+
operation = span.get("name", "unknown")
|
|
1493
|
+
|
|
1494
|
+
indent_style = f"margin-left: {depth * 20}px;"
|
|
1495
|
+
bar_style = f"left: {left_percent}%; width: {max(width_percent, 0.5)}%;"
|
|
1496
|
+
|
|
1497
|
+
html = f"""
|
|
1498
|
+
<div class="{span_class}" style="{indent_style}">
|
|
1499
|
+
<div class="span-info">
|
|
1500
|
+
<span class="service-name">{service}</span>
|
|
1501
|
+
<span class="operation-name">{operation}</span>
|
|
1502
|
+
<span class="span-duration">{self._format_duration(duration)}</span>
|
|
1503
|
+
</div>
|
|
1504
|
+
<div class="span-bar" style="{bar_style}"></div>
|
|
1505
|
+
</div>
|
|
1506
|
+
"""
|
|
1507
|
+
|
|
1508
|
+
# Add children
|
|
1509
|
+
for child in children:
|
|
1510
|
+
html += self._render_span_html(child, min_start, total_duration, depth + 1)
|
|
1511
|
+
|
|
1512
|
+
return html
|
|
1513
|
+
|
|
1514
|
+
def _format_duration(self, duration_ns: int) -> str:
|
|
1515
|
+
"""Format duration from nanoseconds to human readable"""
|
|
1516
|
+
if duration_ns < 1000:
|
|
1517
|
+
return f"{duration_ns}ns"
|
|
1518
|
+
elif duration_ns < 1000000:
|
|
1519
|
+
return f"{duration_ns / 1000:.1f}μs"
|
|
1520
|
+
elif duration_ns < 1000000000:
|
|
1521
|
+
return f"{duration_ns / 1000000:.1f}ms"
|
|
1522
|
+
else:
|
|
1523
|
+
return f"{duration_ns / 1000000000:.2f}s"
|