fastapi-radar 0.1.6__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fastapi_radar/__init__.py +3 -2
- fastapi_radar/api.py +383 -37
- fastapi_radar/background.py +120 -0
- fastapi_radar/capture.py +101 -16
- fastapi_radar/dashboard/dist/assets/index-8Om0PGu6.js +326 -0
- fastapi_radar/dashboard/dist/assets/index-D51YrvFG.css +1 -0
- fastapi_radar/dashboard/dist/assets/index-p3czTzXB.js +361 -0
- fastapi_radar/dashboard/dist/index.html +2 -2
- fastapi_radar/dashboard/node_modules/flatted/python/flatted.py +149 -0
- fastapi_radar/middleware.py +115 -17
- fastapi_radar/models.py +143 -19
- fastapi_radar/radar.py +138 -44
- fastapi_radar/tracing.py +258 -0
- fastapi_radar/utils.py +26 -1
- {fastapi_radar-0.1.6.dist-info → fastapi_radar-0.3.1.dist-info}/METADATA +58 -15
- fastapi_radar-0.3.1.dist-info/RECORD +19 -0
- {fastapi_radar-0.1.6.dist-info → fastapi_radar-0.3.1.dist-info}/top_level.txt +0 -1
- fastapi_radar/dashboard/dist/assets/index-BJa0l2JD.js +0 -313
- fastapi_radar/dashboard/dist/assets/index-DCxkDBhr.css +0 -1
- fastapi_radar-0.1.6.dist-info/RECORD +0 -17
- tests/__init__.py +0 -1
- tests/test_radar.py +0 -65
- {fastapi_radar-0.1.6.dist-info → fastapi_radar-0.3.1.dist-info}/WHEEL +0 -0
- {fastapi_radar-0.1.6.dist-info → fastapi_radar-0.3.1.dist-info}/licenses/LICENSE +0 -0
fastapi_radar/radar.py
CHANGED
|
@@ -1,17 +1,43 @@
|
|
|
1
1
|
"""Main Radar class for FastAPI Radar."""
|
|
2
2
|
|
|
3
|
-
from typing import Optional, List
|
|
4
|
-
from pathlib import Path
|
|
5
3
|
from contextlib import contextmanager
|
|
4
|
+
import os
|
|
5
|
+
import sys
|
|
6
|
+
import multiprocessing
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import List, Optional
|
|
9
|
+
|
|
6
10
|
from fastapi import FastAPI
|
|
7
11
|
from sqlalchemy import create_engine
|
|
8
12
|
from sqlalchemy.engine import Engine
|
|
9
|
-
from sqlalchemy.orm import
|
|
13
|
+
from sqlalchemy.orm import Session, sessionmaker
|
|
14
|
+
from sqlalchemy.pool import StaticPool
|
|
10
15
|
|
|
11
|
-
from .models import Base
|
|
12
|
-
from .middleware import RadarMiddleware
|
|
13
|
-
from .capture import QueryCapture
|
|
14
16
|
from .api import create_api_router
|
|
17
|
+
from .capture import QueryCapture
|
|
18
|
+
from .middleware import RadarMiddleware
|
|
19
|
+
from .models import Base
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def is_reload_worker() -> bool:
|
|
23
|
+
"""Check if we're running in a reload worker process (used by fastapi dev)."""
|
|
24
|
+
if os.environ.get("UVICORN_RELOAD"):
|
|
25
|
+
return True
|
|
26
|
+
|
|
27
|
+
if os.environ.get("WERKZEUG_RUN_MAIN"):
|
|
28
|
+
return True
|
|
29
|
+
|
|
30
|
+
if hasattr(multiprocessing.current_process(), "name"):
|
|
31
|
+
process_name = multiprocessing.current_process().name
|
|
32
|
+
if process_name != "MainProcess" and "SpawnProcess" in process_name:
|
|
33
|
+
return True
|
|
34
|
+
|
|
35
|
+
return False
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def is_windows() -> bool:
|
|
39
|
+
"""Check if we're running on Windows."""
|
|
40
|
+
return sys.platform.startswith("win")
|
|
15
41
|
|
|
16
42
|
|
|
17
43
|
class Radar:
|
|
@@ -29,6 +55,10 @@ class Radar:
|
|
|
29
55
|
capture_sql_bindings: bool = True,
|
|
30
56
|
exclude_paths: Optional[List[str]] = None,
|
|
31
57
|
theme: str = "auto",
|
|
58
|
+
enable_tracing: bool = True,
|
|
59
|
+
service_name: str = "fastapi-app",
|
|
60
|
+
include_in_schema: bool = True,
|
|
61
|
+
db_path: Optional[str] = None,
|
|
32
62
|
):
|
|
33
63
|
self.app = app
|
|
34
64
|
self.db_engine = db_engine
|
|
@@ -39,38 +69,94 @@ class Radar:
|
|
|
39
69
|
self.capture_sql_bindings = capture_sql_bindings
|
|
40
70
|
self.exclude_paths = exclude_paths or []
|
|
41
71
|
self.theme = theme
|
|
42
|
-
self.
|
|
72
|
+
self.enable_tracing = enable_tracing
|
|
73
|
+
self.service_name = service_name
|
|
74
|
+
self.db_path = db_path
|
|
75
|
+
self.query_capture = None
|
|
43
76
|
|
|
44
|
-
# Add all radar paths to excluded paths - exclude everything under /__radar
|
|
45
77
|
if dashboard_path not in self.exclude_paths:
|
|
46
78
|
self.exclude_paths.append(dashboard_path)
|
|
47
|
-
|
|
48
|
-
# Exclude favicon.ico
|
|
49
79
|
self.exclude_paths.append("/favicon.ico")
|
|
50
80
|
|
|
51
|
-
# Setup storage engine (default to SQLite)
|
|
52
81
|
if storage_engine:
|
|
53
82
|
self.storage_engine = storage_engine
|
|
54
83
|
else:
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
84
|
+
storage_url = os.environ.get("RADAR_STORAGE_URL")
|
|
85
|
+
if storage_url:
|
|
86
|
+
if "duckdb" in storage_url:
|
|
87
|
+
self.storage_engine = create_engine(
|
|
88
|
+
storage_url, poolclass=StaticPool
|
|
89
|
+
)
|
|
90
|
+
else:
|
|
91
|
+
self.storage_engine = create_engine(storage_url)
|
|
92
|
+
else:
|
|
93
|
+
import duckdb_engine # noqa: F401
|
|
94
|
+
|
|
95
|
+
if self.db_path:
|
|
96
|
+
try:
|
|
97
|
+
provided_path = Path(self.db_path).resolve()
|
|
98
|
+
if provided_path.suffix.lower() == ".duckdb":
|
|
99
|
+
radar_db_path = provided_path
|
|
100
|
+
radar_db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
101
|
+
else:
|
|
102
|
+
radar_db_path = provided_path / "radar.duckdb"
|
|
103
|
+
provided_path.mkdir(parents=True, exist_ok=True)
|
|
104
|
+
|
|
105
|
+
except Exception as e:
|
|
106
|
+
import warnings
|
|
107
|
+
|
|
108
|
+
warnings.warn(
|
|
109
|
+
(
|
|
110
|
+
f"Failed to create database path '{self.db_path}': {e}. "
|
|
111
|
+
f"Using current directory."
|
|
112
|
+
),
|
|
113
|
+
UserWarning,
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
radar_db_path = Path.cwd() / "radar.duckdb"
|
|
117
|
+
radar_db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
118
|
+
else:
|
|
119
|
+
radar_db_path = Path.cwd() / "radar.duckdb"
|
|
120
|
+
radar_db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
121
|
+
|
|
122
|
+
if is_reload_worker():
|
|
123
|
+
import warnings
|
|
124
|
+
|
|
125
|
+
warnings.warn(
|
|
126
|
+
"FastAPI Radar: Detected development mode with auto-reload. "
|
|
127
|
+
"Using in-memory database to avoid file locking issues. "
|
|
128
|
+
"Data will not persist between reloads.",
|
|
129
|
+
UserWarning,
|
|
130
|
+
)
|
|
131
|
+
self.storage_engine = create_engine(
|
|
132
|
+
"duckdb:///:memory:",
|
|
133
|
+
connect_args={
|
|
134
|
+
"read_only": False,
|
|
135
|
+
"config": {"memory_limit": "500mb"},
|
|
136
|
+
},
|
|
137
|
+
poolclass=StaticPool,
|
|
138
|
+
)
|
|
139
|
+
else:
|
|
140
|
+
self.storage_engine = create_engine(
|
|
141
|
+
f"duckdb:///{radar_db_path}",
|
|
142
|
+
connect_args={
|
|
143
|
+
"read_only": False,
|
|
144
|
+
"config": {"memory_limit": "500mb"},
|
|
145
|
+
},
|
|
146
|
+
poolclass=StaticPool,
|
|
147
|
+
)
|
|
59
148
|
|
|
60
|
-
# Create session maker for storage
|
|
61
149
|
self.SessionLocal = sessionmaker(
|
|
62
150
|
autocommit=False, autoflush=False, bind=self.storage_engine
|
|
63
151
|
)
|
|
64
152
|
|
|
65
|
-
# Initialize components
|
|
66
153
|
self._setup_middleware()
|
|
67
154
|
|
|
68
|
-
# Only setup query capture if db_engine is provided
|
|
69
155
|
if self.db_engine:
|
|
70
156
|
self._setup_query_capture()
|
|
71
157
|
|
|
72
|
-
self._setup_api()
|
|
73
|
-
self._setup_dashboard()
|
|
158
|
+
self._setup_api(include_in_schema=include_in_schema)
|
|
159
|
+
self._setup_dashboard(include_in_schema=include_in_schema)
|
|
74
160
|
|
|
75
161
|
@contextmanager
|
|
76
162
|
def get_session(self) -> Session:
|
|
@@ -89,6 +175,8 @@ class Radar:
|
|
|
89
175
|
exclude_paths=self.exclude_paths,
|
|
90
176
|
max_body_size=10000,
|
|
91
177
|
capture_response_body=True,
|
|
178
|
+
enable_tracing=self.enable_tracing,
|
|
179
|
+
service_name=self.service_name,
|
|
92
180
|
)
|
|
93
181
|
|
|
94
182
|
def _setup_query_capture(self) -> None:
|
|
@@ -104,20 +192,19 @@ class Radar:
|
|
|
104
192
|
)
|
|
105
193
|
self.query_capture.register(self.db_engine)
|
|
106
194
|
|
|
107
|
-
def _setup_api(self) -> None:
|
|
195
|
+
def _setup_api(self, include_in_schema: bool) -> None:
|
|
108
196
|
"""Mount API endpoints."""
|
|
109
197
|
api_router = create_api_router(self.get_session)
|
|
110
|
-
self.app.include_router(api_router)
|
|
198
|
+
self.app.include_router(api_router, include_in_schema=include_in_schema)
|
|
111
199
|
|
|
112
|
-
def _setup_dashboard(self) -> None:
|
|
200
|
+
def _setup_dashboard(self, include_in_schema: bool) -> None:
|
|
113
201
|
"""Mount dashboard static files."""
|
|
114
|
-
from fastapi.responses import FileResponse
|
|
115
202
|
from fastapi import Request
|
|
203
|
+
from fastapi.responses import FileResponse
|
|
116
204
|
|
|
117
205
|
dashboard_dir = Path(__file__).parent / "dashboard" / "dist"
|
|
118
206
|
|
|
119
207
|
if not dashboard_dir.exists():
|
|
120
|
-
# Create placeholder dashboard for development
|
|
121
208
|
dashboard_dir.mkdir(parents=True, exist_ok=True)
|
|
122
209
|
self._create_placeholder_dashboard(dashboard_dir)
|
|
123
210
|
print("\n" + "=" * 60)
|
|
@@ -129,11 +216,11 @@ class Radar:
|
|
|
129
216
|
print(" npm run build")
|
|
130
217
|
print("=" * 60 + "\n")
|
|
131
218
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
219
|
+
@self.app.get(
|
|
220
|
+
f"{self.dashboard_path}/{{full_path:path}}",
|
|
221
|
+
include_in_schema=include_in_schema,
|
|
222
|
+
)
|
|
135
223
|
async def serve_dashboard(request: Request, full_path: str = ""):
|
|
136
|
-
# Check if it's a request for a static asset
|
|
137
224
|
if full_path and any(
|
|
138
225
|
full_path.endswith(ext)
|
|
139
226
|
for ext in [
|
|
@@ -152,7 +239,6 @@ class Radar:
|
|
|
152
239
|
if file_path.exists():
|
|
153
240
|
return FileResponse(file_path)
|
|
154
241
|
|
|
155
|
-
# For all other routes, serve index.html (SPA behavior)
|
|
156
242
|
index_path = dashboard_dir / "index.html"
|
|
157
243
|
if index_path.exists():
|
|
158
244
|
return FileResponse(index_path)
|
|
@@ -160,7 +246,6 @@ class Radar:
|
|
|
160
246
|
return {"error": "Dashboard not found. Please build the dashboard."}
|
|
161
247
|
|
|
162
248
|
def _create_placeholder_dashboard(self, dashboard_dir: Path) -> None:
|
|
163
|
-
"""Create a placeholder dashboard for development."""
|
|
164
249
|
index_html = dashboard_dir / "index.html"
|
|
165
250
|
index_html.write_text(
|
|
166
251
|
"""
|
|
@@ -249,17 +334,20 @@ class Radar:
|
|
|
249
334
|
</div>
|
|
250
335
|
</div>
|
|
251
336
|
<script>
|
|
252
|
-
// Fetch stats from API
|
|
253
337
|
async function loadStats() {{
|
|
254
338
|
try {{
|
|
255
339
|
const response = await fetch('/__radar/api/stats?hours=1');
|
|
256
340
|
const data = await response.json();
|
|
257
341
|
|
|
258
|
-
document.querySelectorAll('.stat-value')[0].textContent =
|
|
259
|
-
|
|
342
|
+
document.querySelectorAll('.stat-value')[0].textContent =
|
|
343
|
+
data.total_requests;
|
|
344
|
+
document.querySelectorAll('.stat-value')[1].textContent =
|
|
345
|
+
data.total_queries;
|
|
260
346
|
document.querySelectorAll('.stat-value')[2].textContent =
|
|
261
|
-
data.avg_response_time ?
|
|
262
|
-
|
|
347
|
+
data.avg_response_time ?
|
|
348
|
+
`${{data.avg_response_time.toFixed(1)}}ms` : '--';
|
|
349
|
+
document.querySelectorAll('.stat-value')[3].textContent =
|
|
350
|
+
data.total_exceptions;
|
|
263
351
|
|
|
264
352
|
document.querySelectorAll('.stat-value').forEach(el => {{
|
|
265
353
|
el.classList.remove('loading');
|
|
@@ -269,9 +357,7 @@ class Radar:
|
|
|
269
357
|
}}
|
|
270
358
|
}}
|
|
271
359
|
|
|
272
|
-
// Load stats on page load
|
|
273
360
|
loadStats();
|
|
274
|
-
// Refresh stats every 5 seconds
|
|
275
361
|
setInterval(loadStats, 5000);
|
|
276
362
|
</script>
|
|
277
363
|
</body>
|
|
@@ -282,21 +368,29 @@ class Radar:
|
|
|
282
368
|
)
|
|
283
369
|
|
|
284
370
|
def create_tables(self) -> None:
|
|
285
|
-
"""Create
|
|
286
|
-
|
|
371
|
+
"""Create database tables.
|
|
372
|
+
|
|
373
|
+
With dev mode (fastapi dev), this safely handles
|
|
374
|
+
multiple process attempts to create tables.
|
|
375
|
+
"""
|
|
376
|
+
try:
|
|
377
|
+
Base.metadata.create_all(bind=self.storage_engine)
|
|
378
|
+
except Exception as e:
|
|
379
|
+
error_msg = str(e).lower()
|
|
380
|
+
if "already exists" not in error_msg and "lock" not in error_msg:
|
|
381
|
+
raise
|
|
287
382
|
|
|
288
383
|
def drop_tables(self) -> None:
|
|
289
|
-
"""Drop radar storage tables."""
|
|
290
384
|
Base.metadata.drop_all(bind=self.storage_engine)
|
|
291
385
|
|
|
292
386
|
def cleanup(self, older_than_hours: Optional[int] = None) -> None:
|
|
293
|
-
|
|
294
|
-
|
|
387
|
+
from datetime import datetime, timedelta, timezone
|
|
388
|
+
|
|
295
389
|
from .models import CapturedRequest
|
|
296
390
|
|
|
297
391
|
with self.get_session() as session:
|
|
298
392
|
hours = older_than_hours or self.retention_hours
|
|
299
|
-
cutoff = datetime.
|
|
393
|
+
cutoff = datetime.now(timezone.utc) - timedelta(hours=hours)
|
|
300
394
|
|
|
301
395
|
deleted = (
|
|
302
396
|
session.query(CapturedRequest)
|
fastapi_radar/tracing.py
ADDED
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
"""Tracing core functionality module."""
|
|
2
|
+
|
|
3
|
+
import uuid
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from typing import Optional, Dict, Any, List
|
|
6
|
+
from contextvars import ContextVar
|
|
7
|
+
from sqlalchemy.orm import Session
|
|
8
|
+
|
|
9
|
+
from .models import Trace, Span, SpanRelation
|
|
10
|
+
|
|
11
|
+
# Trace context for the current request
|
|
12
|
+
trace_context: ContextVar[Optional["TraceContext"]] = ContextVar(
|
|
13
|
+
"trace_context", default=None
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class TraceContext:
|
|
18
|
+
"""Tracing context that manages trace and span data for a request."""
|
|
19
|
+
|
|
20
|
+
def __init__(self, trace_id: str, service_name: str = "fastapi-app"):
|
|
21
|
+
self.trace_id = trace_id
|
|
22
|
+
self.service_name = service_name
|
|
23
|
+
self.root_span_id: Optional[str] = None
|
|
24
|
+
self.current_span_id: Optional[str] = None
|
|
25
|
+
self.spans: Dict[str, Dict[str, Any]] = {}
|
|
26
|
+
self.start_time = datetime.now(timezone.utc)
|
|
27
|
+
|
|
28
|
+
def create_span(
|
|
29
|
+
self,
|
|
30
|
+
operation_name: str,
|
|
31
|
+
parent_span_id: Optional[str] = None,
|
|
32
|
+
span_kind: str = "server",
|
|
33
|
+
tags: Optional[Dict[str, Any]] = None,
|
|
34
|
+
) -> str:
|
|
35
|
+
"""Create a new span."""
|
|
36
|
+
span_id = self._generate_span_id()
|
|
37
|
+
|
|
38
|
+
span_data = {
|
|
39
|
+
"span_id": span_id,
|
|
40
|
+
"trace_id": self.trace_id,
|
|
41
|
+
"parent_span_id": parent_span_id or self.current_span_id,
|
|
42
|
+
"operation_name": operation_name,
|
|
43
|
+
"service_name": self.service_name,
|
|
44
|
+
"span_kind": span_kind,
|
|
45
|
+
"start_time": datetime.now(timezone.utc),
|
|
46
|
+
"tags": tags or {},
|
|
47
|
+
"logs": [],
|
|
48
|
+
"status": "ok",
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
self.spans[span_id] = span_data
|
|
52
|
+
|
|
53
|
+
# Set root span if not already set
|
|
54
|
+
if self.root_span_id is None:
|
|
55
|
+
self.root_span_id = span_id
|
|
56
|
+
|
|
57
|
+
return span_id
|
|
58
|
+
|
|
59
|
+
def finish_span(
|
|
60
|
+
self, span_id: str, status: str = "ok", tags: Optional[Dict[str, Any]] = None
|
|
61
|
+
):
|
|
62
|
+
"""Finish a span."""
|
|
63
|
+
if span_id not in self.spans:
|
|
64
|
+
return
|
|
65
|
+
|
|
66
|
+
span_data = self.spans[span_id]
|
|
67
|
+
span_data["end_time"] = datetime.now(timezone.utc)
|
|
68
|
+
span_data["duration_ms"] = (
|
|
69
|
+
span_data["end_time"] - span_data["start_time"]
|
|
70
|
+
).total_seconds() * 1000
|
|
71
|
+
span_data["status"] = status
|
|
72
|
+
|
|
73
|
+
if tags:
|
|
74
|
+
span_data["tags"].update(tags)
|
|
75
|
+
|
|
76
|
+
def add_span_log(self, span_id: str, message: str, level: str = "info", **fields):
|
|
77
|
+
"""Add a log entry to a span."""
|
|
78
|
+
if span_id not in self.spans:
|
|
79
|
+
return
|
|
80
|
+
|
|
81
|
+
log_entry = {
|
|
82
|
+
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
83
|
+
"level": level,
|
|
84
|
+
"message": message,
|
|
85
|
+
**fields,
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
self.spans[span_id]["logs"].append(log_entry)
|
|
89
|
+
|
|
90
|
+
def set_current_span(self, span_id: str):
|
|
91
|
+
"""Set the current active span."""
|
|
92
|
+
self.current_span_id = span_id
|
|
93
|
+
|
|
94
|
+
def get_trace_summary(self) -> Dict[str, Any]:
|
|
95
|
+
"""Return a trace summary for persistence and display."""
|
|
96
|
+
if not self.spans:
|
|
97
|
+
return {}
|
|
98
|
+
|
|
99
|
+
all_times = []
|
|
100
|
+
error_count = 0
|
|
101
|
+
|
|
102
|
+
for span in self.spans.values():
|
|
103
|
+
if span.get("start_time"):
|
|
104
|
+
all_times.append(span["start_time"])
|
|
105
|
+
if span.get("end_time"):
|
|
106
|
+
all_times.append(span["end_time"])
|
|
107
|
+
if span.get("status") == "error":
|
|
108
|
+
error_count += 1
|
|
109
|
+
|
|
110
|
+
start_time = min(all_times) if all_times else self.start_time
|
|
111
|
+
end_time = max(all_times) if all_times else datetime.now(timezone.utc)
|
|
112
|
+
|
|
113
|
+
return {
|
|
114
|
+
"trace_id": self.trace_id,
|
|
115
|
+
"service_name": self.service_name,
|
|
116
|
+
"operation_name": self.spans.get(self.root_span_id, {}).get(
|
|
117
|
+
"operation_name", "unknown"
|
|
118
|
+
),
|
|
119
|
+
"start_time": start_time,
|
|
120
|
+
"end_time": end_time,
|
|
121
|
+
"duration_ms": (end_time - start_time).total_seconds() * 1000,
|
|
122
|
+
"span_count": len(self.spans),
|
|
123
|
+
"status": "error" if error_count > 0 else "ok",
|
|
124
|
+
"tags": {},
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
@staticmethod
|
|
128
|
+
def _generate_span_id() -> str:
|
|
129
|
+
"""Generate a 16-character hexadecimal span ID."""
|
|
130
|
+
return uuid.uuid4().hex[:16]
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
class TracingManager:
|
|
134
|
+
"""Tracing manager responsible for persistence and querying."""
|
|
135
|
+
|
|
136
|
+
def __init__(self, get_session):
|
|
137
|
+
self.get_session = get_session
|
|
138
|
+
|
|
139
|
+
def save_trace_context(self, trace_ctx: TraceContext):
|
|
140
|
+
"""Persist the trace context into the database."""
|
|
141
|
+
with self.get_session() as session:
|
|
142
|
+
# Save trace
|
|
143
|
+
trace_summary = trace_ctx.get_trace_summary()
|
|
144
|
+
trace = Trace(**trace_summary)
|
|
145
|
+
session.add(trace)
|
|
146
|
+
|
|
147
|
+
# Save spans
|
|
148
|
+
for span_data in trace_ctx.spans.values():
|
|
149
|
+
span = Span(**span_data)
|
|
150
|
+
session.add(span)
|
|
151
|
+
|
|
152
|
+
self._save_span_relations(session, trace_ctx)
|
|
153
|
+
|
|
154
|
+
session.commit()
|
|
155
|
+
|
|
156
|
+
def _save_span_relations(self, session: Session, trace_ctx: TraceContext):
|
|
157
|
+
"""Store parent-child span relations for optimized querying."""
|
|
158
|
+
|
|
159
|
+
def calculate_depth(
|
|
160
|
+
span_id: str, spans: Dict[str, Dict], depth: int = 0
|
|
161
|
+
) -> List[tuple]:
|
|
162
|
+
"""Recursively compute span depth."""
|
|
163
|
+
relations = []
|
|
164
|
+
span = spans.get(span_id)
|
|
165
|
+
if not span:
|
|
166
|
+
return relations
|
|
167
|
+
|
|
168
|
+
# Find all child spans
|
|
169
|
+
for sid, s in spans.items():
|
|
170
|
+
if s.get("parent_span_id") == span_id:
|
|
171
|
+
relations.append((span_id, sid, depth + 1))
|
|
172
|
+
relations.extend(calculate_depth(sid, spans, depth + 1))
|
|
173
|
+
|
|
174
|
+
return relations
|
|
175
|
+
|
|
176
|
+
# Start from the root span
|
|
177
|
+
if trace_ctx.root_span_id:
|
|
178
|
+
relations = calculate_depth(trace_ctx.root_span_id, trace_ctx.spans)
|
|
179
|
+
|
|
180
|
+
for parent_id, child_id, depth in relations:
|
|
181
|
+
relation = SpanRelation(
|
|
182
|
+
trace_id=trace_ctx.trace_id,
|
|
183
|
+
parent_span_id=parent_id,
|
|
184
|
+
child_span_id=child_id,
|
|
185
|
+
depth=depth,
|
|
186
|
+
)
|
|
187
|
+
session.add(relation)
|
|
188
|
+
|
|
189
|
+
def get_waterfall_data(self, trace_id: str) -> List[Dict[str, Any]]:
|
|
190
|
+
"""Return data for the waterfall view."""
|
|
191
|
+
with self.get_session() as session:
|
|
192
|
+
# Query optimized for DuckDB
|
|
193
|
+
from sqlalchemy import text
|
|
194
|
+
|
|
195
|
+
waterfall_query = text(
|
|
196
|
+
"""
|
|
197
|
+
WITH span_timeline AS (
|
|
198
|
+
SELECT
|
|
199
|
+
s.span_id,
|
|
200
|
+
s.parent_span_id,
|
|
201
|
+
s.operation_name,
|
|
202
|
+
s.service_name,
|
|
203
|
+
s.start_time,
|
|
204
|
+
s.end_time,
|
|
205
|
+
s.duration_ms,
|
|
206
|
+
s.status,
|
|
207
|
+
s.tags,
|
|
208
|
+
COALESCE(r.depth, 0) as depth,
|
|
209
|
+
-- Offset relative to trace start
|
|
210
|
+
EXTRACT(EPOCH FROM (
|
|
211
|
+
s.start_time - MIN(s.start_time)
|
|
212
|
+
OVER (PARTITION BY s.trace_id)
|
|
213
|
+
)) * 1000 as offset_ms
|
|
214
|
+
FROM radar_spans s
|
|
215
|
+
LEFT JOIN radar_span_relations r ON s.span_id = r.child_span_id
|
|
216
|
+
WHERE s.trace_id = :trace_id
|
|
217
|
+
)
|
|
218
|
+
SELECT * FROM span_timeline
|
|
219
|
+
ORDER BY offset_ms, depth
|
|
220
|
+
"""
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
result = session.execute(waterfall_query, {"trace_id": trace_id})
|
|
224
|
+
|
|
225
|
+
return [
|
|
226
|
+
{
|
|
227
|
+
"span_id": row.span_id,
|
|
228
|
+
"parent_span_id": row.parent_span_id,
|
|
229
|
+
"operation_name": row.operation_name,
|
|
230
|
+
"service_name": row.service_name,
|
|
231
|
+
"start_time": (
|
|
232
|
+
row.start_time.isoformat() if row.start_time else None
|
|
233
|
+
),
|
|
234
|
+
"end_time": row.end_time.isoformat() if row.end_time else None,
|
|
235
|
+
"duration_ms": row.duration_ms,
|
|
236
|
+
"status": row.status,
|
|
237
|
+
"tags": row.tags,
|
|
238
|
+
"depth": row.depth,
|
|
239
|
+
"offset_ms": float(row.offset_ms) if row.offset_ms else 0.0,
|
|
240
|
+
}
|
|
241
|
+
for row in result
|
|
242
|
+
]
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def get_current_trace_context() -> Optional[TraceContext]:
|
|
246
|
+
"""Get the current trace context."""
|
|
247
|
+
return trace_context.get()
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def set_trace_context(ctx: TraceContext):
|
|
251
|
+
"""Set the current trace context."""
|
|
252
|
+
trace_context.set(ctx)
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def create_trace_context(service_name: str = "fastapi-app") -> TraceContext:
|
|
256
|
+
"""Create a new trace context."""
|
|
257
|
+
trace_id = uuid.uuid4().hex
|
|
258
|
+
return TraceContext(trace_id, service_name)
|
fastapi_radar/utils.py
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
"""Utility functions for FastAPI Radar."""
|
|
2
2
|
|
|
3
|
+
import re
|
|
3
4
|
from typing import Dict, Optional
|
|
4
|
-
|
|
5
|
+
|
|
5
6
|
from starlette.datastructures import Headers
|
|
7
|
+
from starlette.requests import Request
|
|
6
8
|
|
|
7
9
|
|
|
8
10
|
def serialize_headers(headers: Headers) -> Dict[str, str]:
|
|
@@ -57,3 +59,26 @@ def format_sql(sql: str, max_length: int = 5000) -> str:
|
|
|
57
59
|
sql = sql[:max_length] + "... [truncated]"
|
|
58
60
|
|
|
59
61
|
return sql
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def redact_sensitive_data(text: Optional[str]) -> Optional[str]:
|
|
65
|
+
"""Redact sensitive data from text (body content)."""
|
|
66
|
+
if not text:
|
|
67
|
+
return text
|
|
68
|
+
|
|
69
|
+
# Patterns for sensitive data
|
|
70
|
+
patterns = [
|
|
71
|
+
(r'"(password|passwd|pwd)"\s*:\s*"[^"]*"', r'"\1": "***REDACTED***"'),
|
|
72
|
+
(
|
|
73
|
+
r'"(token|api_key|apikey|secret|auth)"\s*:\s*"[^"]*"',
|
|
74
|
+
r'"\1": "***REDACTED***"',
|
|
75
|
+
),
|
|
76
|
+
(r'"(credit_card|card_number|cvv)"\s*:\s*"[^"]*"', r'"\1": "***REDACTED***"'),
|
|
77
|
+
(r"Bearer\s+[A-Za-z0-9\-_\.]+", "Bearer ***REDACTED***"),
|
|
78
|
+
]
|
|
79
|
+
|
|
80
|
+
result = text
|
|
81
|
+
for pattern, replacement in patterns:
|
|
82
|
+
result = re.sub(pattern, replacement, result, flags=re.IGNORECASE)
|
|
83
|
+
|
|
84
|
+
return result
|