prismiq 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prismiq/__init__.py +543 -0
- prismiq/api.py +1889 -0
- prismiq/auth.py +108 -0
- prismiq/cache.py +527 -0
- prismiq/calculated_field_processor.py +231 -0
- prismiq/calculated_fields.py +819 -0
- prismiq/dashboard_store.py +1219 -0
- prismiq/dashboards.py +374 -0
- prismiq/dates.py +247 -0
- prismiq/engine.py +1315 -0
- prismiq/executor.py +345 -0
- prismiq/filter_merge.py +397 -0
- prismiq/formatting.py +298 -0
- prismiq/logging.py +489 -0
- prismiq/metrics.py +536 -0
- prismiq/middleware.py +346 -0
- prismiq/permissions.py +87 -0
- prismiq/persistence/__init__.py +45 -0
- prismiq/persistence/models.py +208 -0
- prismiq/persistence/postgres_store.py +1119 -0
- prismiq/persistence/saved_query_store.py +336 -0
- prismiq/persistence/schema.sql +95 -0
- prismiq/persistence/setup.py +222 -0
- prismiq/persistence/tables.py +76 -0
- prismiq/pins.py +72 -0
- prismiq/py.typed +0 -0
- prismiq/query.py +1233 -0
- prismiq/schema.py +333 -0
- prismiq/schema_config.py +354 -0
- prismiq/sql_utils.py +147 -0
- prismiq/sql_validator.py +219 -0
- prismiq/sqlalchemy_builder.py +577 -0
- prismiq/timeseries.py +410 -0
- prismiq/transforms.py +471 -0
- prismiq/trends.py +573 -0
- prismiq/types.py +688 -0
- prismiq-0.1.0.dist-info/METADATA +109 -0
- prismiq-0.1.0.dist-info/RECORD +39 -0
- prismiq-0.1.0.dist-info/WHEEL +4 -0
prismiq/api.py
ADDED
|
@@ -0,0 +1,1889 @@
|
|
|
1
|
+
"""FastAPI routes for the Prismiq analytics engine.
|
|
2
|
+
|
|
3
|
+
This module provides a factory function to create an API router that
|
|
4
|
+
exposes schema, validation, and query execution endpoints.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
# ruff: noqa: B008 # FastAPI's Depends() in function defaults is standard pattern
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import time
|
|
12
|
+
from collections.abc import Awaitable, Callable
|
|
13
|
+
from datetime import date
|
|
14
|
+
from typing import TYPE_CHECKING, Any
|
|
15
|
+
|
|
16
|
+
from fastapi import APIRouter, Depends, HTTPException
|
|
17
|
+
from pydantic import BaseModel, ConfigDict
|
|
18
|
+
|
|
19
|
+
from prismiq import __version__
|
|
20
|
+
from prismiq.auth import AuthContext, create_header_auth_dependency
|
|
21
|
+
from prismiq.dashboards import (
|
|
22
|
+
Dashboard,
|
|
23
|
+
DashboardCreate,
|
|
24
|
+
DashboardExport,
|
|
25
|
+
DashboardUpdate,
|
|
26
|
+
Widget,
|
|
27
|
+
WidgetCreate,
|
|
28
|
+
WidgetUpdate,
|
|
29
|
+
)
|
|
30
|
+
from prismiq.filter_merge import FilterValue, merge_filters
|
|
31
|
+
from prismiq.logging import get_logger
|
|
32
|
+
from prismiq.permissions import (
|
|
33
|
+
can_delete_dashboard,
|
|
34
|
+
can_edit_dashboard,
|
|
35
|
+
can_edit_widget,
|
|
36
|
+
can_view_dashboard,
|
|
37
|
+
)
|
|
38
|
+
from prismiq.pins import PinnedDashboard, PinRequest, ReorderPinsRequest, UnpinRequest
|
|
39
|
+
from prismiq.query import ValidationResult
|
|
40
|
+
from prismiq.schema_config import EnhancedDatabaseSchema, EnhancedTableSchema
|
|
41
|
+
from prismiq.sql_validator import SQLValidationError
|
|
42
|
+
from prismiq.timeseries import TimeInterval
|
|
43
|
+
from prismiq.transforms import pivot_data
|
|
44
|
+
from prismiq.trends import ComparisonPeriod, TrendResult, add_trend_column
|
|
45
|
+
from prismiq.types import (
|
|
46
|
+
DatabaseSchema,
|
|
47
|
+
QueryDefinition,
|
|
48
|
+
QueryResult,
|
|
49
|
+
QueryValidationError,
|
|
50
|
+
SavedQuery,
|
|
51
|
+
SavedQueryCreate,
|
|
52
|
+
SavedQueryUpdate,
|
|
53
|
+
TableNotFoundError,
|
|
54
|
+
TableSchema,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
if TYPE_CHECKING:
|
|
58
|
+
from prismiq.engine import PrismiqEngine
|
|
59
|
+
|
|
60
|
+
# Track startup time for uptime calculation
|
|
61
|
+
_startup_time: float | None = None
|
|
62
|
+
|
|
63
|
+
# Module logger for cache operations
|
|
64
|
+
_logger = get_logger(__name__)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _get_uptime() -> float:
|
|
68
|
+
"""Get uptime in seconds since router was created."""
|
|
69
|
+
if _startup_time is None:
|
|
70
|
+
return 0.0
|
|
71
|
+
return time.time() - _startup_time
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
# ============================================================================
|
|
75
|
+
# Response Models
|
|
76
|
+
# ============================================================================
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class ValidationResponse(BaseModel):
|
|
80
|
+
"""Response model for query validation endpoint."""
|
|
81
|
+
|
|
82
|
+
valid: bool
|
|
83
|
+
"""Whether the query is valid."""
|
|
84
|
+
|
|
85
|
+
errors: list[str]
|
|
86
|
+
"""List of validation error messages (empty if valid)."""
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class DetailedValidationResponse(BaseModel):
|
|
90
|
+
"""Response model for detailed query validation endpoint."""
|
|
91
|
+
|
|
92
|
+
result: ValidationResult
|
|
93
|
+
"""Complete validation result with detailed errors."""
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
class TableListResponse(BaseModel):
|
|
97
|
+
"""Response model for table list endpoint."""
|
|
98
|
+
|
|
99
|
+
tables: list[str]
|
|
100
|
+
"""List of table names."""
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class ExecuteQueryRequest(BaseModel):
|
|
104
|
+
"""Request model for query execution endpoint."""
|
|
105
|
+
|
|
106
|
+
query: QueryDefinition
|
|
107
|
+
"""Query definition to execute."""
|
|
108
|
+
|
|
109
|
+
bypass_cache: bool = False
|
|
110
|
+
"""If True, bypass cache and re-execute query, then update cache."""
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
class QueryResultWithCache(BaseModel):
|
|
114
|
+
"""Query result with cache metadata."""
|
|
115
|
+
|
|
116
|
+
columns: list[str]
|
|
117
|
+
"""Column names in result order."""
|
|
118
|
+
|
|
119
|
+
column_types: list[str]
|
|
120
|
+
"""Column data types."""
|
|
121
|
+
|
|
122
|
+
rows: list[list[Any]]
|
|
123
|
+
"""Result rows as list of lists."""
|
|
124
|
+
|
|
125
|
+
row_count: int
|
|
126
|
+
"""Number of rows returned."""
|
|
127
|
+
|
|
128
|
+
truncated: bool
|
|
129
|
+
"""Whether results were truncated due to limit."""
|
|
130
|
+
|
|
131
|
+
execution_time_ms: float
|
|
132
|
+
"""Query execution time in milliseconds."""
|
|
133
|
+
|
|
134
|
+
cached_at: float | None = None
|
|
135
|
+
"""Unix timestamp when result was cached (None if not from cache)."""
|
|
136
|
+
|
|
137
|
+
is_from_cache: bool = False
|
|
138
|
+
"""Whether this result came from cache."""
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
class PreviewRequest(BaseModel):
|
|
142
|
+
"""Request model for query preview endpoint."""
|
|
143
|
+
|
|
144
|
+
query: QueryDefinition
|
|
145
|
+
"""Query definition to preview."""
|
|
146
|
+
|
|
147
|
+
limit: int = 100
|
|
148
|
+
"""Maximum number of rows to return."""
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
class ExecuteSQLRequest(BaseModel):
|
|
152
|
+
"""Request model for raw SQL execution endpoint."""
|
|
153
|
+
|
|
154
|
+
sql: str
|
|
155
|
+
"""Raw SQL query (SELECT only)."""
|
|
156
|
+
|
|
157
|
+
params: dict[str, Any] | None = None
|
|
158
|
+
"""Optional named parameters for the query."""
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
class SQLValidationResponse(BaseModel):
|
|
162
|
+
"""Response model for SQL validation endpoint."""
|
|
163
|
+
|
|
164
|
+
valid: bool
|
|
165
|
+
"""Whether the SQL is valid."""
|
|
166
|
+
|
|
167
|
+
errors: list[str]
|
|
168
|
+
"""List of validation errors (empty if valid)."""
|
|
169
|
+
|
|
170
|
+
tables: list[str]
|
|
171
|
+
"""List of tables referenced in the query."""
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
class SuccessResponse(BaseModel):
|
|
175
|
+
"""Generic success response."""
|
|
176
|
+
|
|
177
|
+
success: bool = True
|
|
178
|
+
"""Whether the operation succeeded."""
|
|
179
|
+
|
|
180
|
+
message: str = "OK"
|
|
181
|
+
"""Success message."""
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
# ============================================================================
|
|
185
|
+
# Health Check Models
|
|
186
|
+
# ============================================================================
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
class HealthCheck(BaseModel):
|
|
190
|
+
"""Individual health check result."""
|
|
191
|
+
|
|
192
|
+
model_config = ConfigDict(strict=True)
|
|
193
|
+
|
|
194
|
+
status: str
|
|
195
|
+
"""Status of the check: 'healthy', 'degraded', or 'unhealthy'."""
|
|
196
|
+
|
|
197
|
+
message: str | None = None
|
|
198
|
+
"""Optional message with details (e.g., error message)."""
|
|
199
|
+
|
|
200
|
+
latency_ms: float | None = None
|
|
201
|
+
"""Optional latency of the health check in milliseconds."""
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
class HealthStatus(BaseModel):
|
|
205
|
+
"""Overall health status response."""
|
|
206
|
+
|
|
207
|
+
model_config = ConfigDict(strict=True)
|
|
208
|
+
|
|
209
|
+
status: str
|
|
210
|
+
"""Overall status: 'healthy', 'degraded', or 'unhealthy'."""
|
|
211
|
+
|
|
212
|
+
version: str
|
|
213
|
+
"""Application version."""
|
|
214
|
+
|
|
215
|
+
uptime_seconds: float
|
|
216
|
+
"""Time since the application started in seconds."""
|
|
217
|
+
|
|
218
|
+
checks: dict[str, HealthCheck]
|
|
219
|
+
"""Individual health check results."""
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
class LivenessResponse(BaseModel):
|
|
223
|
+
"""Response for liveness probe."""
|
|
224
|
+
|
|
225
|
+
status: str = "ok"
|
|
226
|
+
"""Liveness status."""
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
class ReadinessResponse(BaseModel):
|
|
230
|
+
"""Response for readiness probe."""
|
|
231
|
+
|
|
232
|
+
status: str = "ok"
|
|
233
|
+
"""Readiness status."""
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
# ============================================================================
|
|
237
|
+
# Time Series Request Models
|
|
238
|
+
# ============================================================================
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
class TimeSeriesQueryRequest(BaseModel):
|
|
242
|
+
"""Request model for time series query execution."""
|
|
243
|
+
|
|
244
|
+
query: QueryDefinition
|
|
245
|
+
"""Query definition to execute."""
|
|
246
|
+
|
|
247
|
+
interval: TimeInterval
|
|
248
|
+
"""Time interval for bucketing (minute, hour, day, week, month, quarter, year)."""
|
|
249
|
+
|
|
250
|
+
date_column: str
|
|
251
|
+
"""Name of the date/timestamp column to bucket."""
|
|
252
|
+
|
|
253
|
+
fill_missing: bool = True
|
|
254
|
+
"""Whether to fill missing time buckets with default values."""
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
class PivotRequest(BaseModel):
|
|
258
|
+
"""Request model for pivot transformation."""
|
|
259
|
+
|
|
260
|
+
result: QueryResult
|
|
261
|
+
"""Query result to pivot."""
|
|
262
|
+
|
|
263
|
+
row_column: str
|
|
264
|
+
"""Column to use as row headers."""
|
|
265
|
+
|
|
266
|
+
pivot_column: str
|
|
267
|
+
"""Column to pivot into separate columns."""
|
|
268
|
+
|
|
269
|
+
value_column: str
|
|
270
|
+
"""Column containing values to aggregate."""
|
|
271
|
+
|
|
272
|
+
aggregation: str = "sum"
|
|
273
|
+
"""Aggregation function: sum, avg, count, min, max."""
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
class TrendColumnRequest(BaseModel):
|
|
277
|
+
"""Request model for adding trend columns."""
|
|
278
|
+
|
|
279
|
+
result: QueryResult
|
|
280
|
+
"""Query result to add trend columns to."""
|
|
281
|
+
|
|
282
|
+
value_column: str
|
|
283
|
+
"""Column containing values to calculate trends for."""
|
|
284
|
+
|
|
285
|
+
order_column: str
|
|
286
|
+
"""Column to order by for trend calculation."""
|
|
287
|
+
|
|
288
|
+
group_column: str | None = None
|
|
289
|
+
"""Optional column to group by for separate trend calculations."""
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
class MetricTrendRequest(BaseModel):
|
|
293
|
+
"""Request model for calculating metric trend."""
|
|
294
|
+
|
|
295
|
+
query: QueryDefinition
|
|
296
|
+
"""Query definition for the metric."""
|
|
297
|
+
|
|
298
|
+
comparison: ComparisonPeriod
|
|
299
|
+
"""Period to compare against."""
|
|
300
|
+
|
|
301
|
+
current_start: date
|
|
302
|
+
"""Start date of current period."""
|
|
303
|
+
|
|
304
|
+
current_end: date
|
|
305
|
+
"""End date of current period."""
|
|
306
|
+
|
|
307
|
+
value_column: str
|
|
308
|
+
"""Column containing the metric value."""
|
|
309
|
+
|
|
310
|
+
date_column: str
|
|
311
|
+
"""Column containing the date for filtering."""
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
# ============================================================================
|
|
315
|
+
# Dashboard Request/Response Models
|
|
316
|
+
# ============================================================================
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
class DashboardListResponse(BaseModel):
|
|
320
|
+
"""Response model for dashboard list endpoint."""
|
|
321
|
+
|
|
322
|
+
dashboards: list[Dashboard]
|
|
323
|
+
"""List of dashboards."""
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
class WidgetQueryRequest(BaseModel):
|
|
327
|
+
"""Request model for executing a widget's query with dashboard filters."""
|
|
328
|
+
|
|
329
|
+
widget_id: str
|
|
330
|
+
"""ID of the widget to execute."""
|
|
331
|
+
|
|
332
|
+
filter_values: list[FilterValue] = []
|
|
333
|
+
"""Current dashboard filter values."""
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
class DashboardImportRequest(BaseModel):
|
|
337
|
+
"""Request model for importing a dashboard."""
|
|
338
|
+
|
|
339
|
+
export_data: DashboardExport
|
|
340
|
+
"""Dashboard export data to import."""
|
|
341
|
+
|
|
342
|
+
name_override: str | None = None
|
|
343
|
+
"""Optional name to use instead of the export's name."""
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
# ============================================================================
|
|
347
|
+
# Saved Query Request/Response Models
|
|
348
|
+
# ============================================================================
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
class SavedQueryListResponse(BaseModel):
|
|
352
|
+
"""Response model for saved query list endpoint."""
|
|
353
|
+
|
|
354
|
+
queries: list[SavedQuery]
|
|
355
|
+
"""List of saved queries."""
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
# ============================================================================
|
|
359
|
+
# Pin Request/Response Models
|
|
360
|
+
# ============================================================================
|
|
361
|
+
|
|
362
|
+
|
|
363
|
+
class PinnedDashboardsResponse(BaseModel):
|
|
364
|
+
"""Response model for getting pinned dashboards."""
|
|
365
|
+
|
|
366
|
+
dashboards: list[Dashboard]
|
|
367
|
+
"""List of pinned dashboards, ordered by position."""
|
|
368
|
+
|
|
369
|
+
pins: list[PinnedDashboard]
|
|
370
|
+
"""List of pin entries with metadata."""
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
class DashboardPinContextsResponse(BaseModel):
|
|
374
|
+
"""Response model for getting contexts where a dashboard is pinned."""
|
|
375
|
+
|
|
376
|
+
contexts: list[str]
|
|
377
|
+
"""List of context names where the dashboard is pinned."""
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
# ============================================================================
|
|
381
|
+
# Router Factory
|
|
382
|
+
# ============================================================================
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
def create_router(
|
|
386
|
+
engine: PrismiqEngine,
|
|
387
|
+
get_auth_context: Callable[..., Awaitable[AuthContext]] | None = None,
|
|
388
|
+
) -> APIRouter:
|
|
389
|
+
"""Create a FastAPI router for the Prismiq analytics engine.
|
|
390
|
+
|
|
391
|
+
Args:
|
|
392
|
+
engine: Initialized PrismiqEngine instance.
|
|
393
|
+
get_auth_context: FastAPI dependency that returns an AuthContext.
|
|
394
|
+
Called ONCE per request - no duplicate auth processing.
|
|
395
|
+
If None, uses a default that requires X-Tenant-ID header.
|
|
396
|
+
|
|
397
|
+
Returns:
|
|
398
|
+
APIRouter with analytics endpoints.
|
|
399
|
+
|
|
400
|
+
Example:
|
|
401
|
+
# Simple header-based auth
|
|
402
|
+
from prismiq.auth import create_header_auth_dependency
|
|
403
|
+
router = create_router(engine, get_auth_context=create_header_auth_dependency())
|
|
404
|
+
|
|
405
|
+
# Custom auth with your provider
|
|
406
|
+
async def get_auth(request: Request) -> MyAuthContext:
|
|
407
|
+
token = request.headers.get("Authorization", "").replace("Bearer ", "")
|
|
408
|
+
user = await my_auth_provider.verify(token)
|
|
409
|
+
return MyAuthContext(tenant_id=user.org_id, user_id=user.id)
|
|
410
|
+
|
|
411
|
+
router = create_router(engine, get_auth_context=get_auth)
|
|
412
|
+
"""
|
|
413
|
+
global _startup_time
|
|
414
|
+
_startup_time = time.time()
|
|
415
|
+
|
|
416
|
+
router = APIRouter(tags=["analytics"])
|
|
417
|
+
|
|
418
|
+
# Default auth dependency if none provided
|
|
419
|
+
if get_auth_context is None:
|
|
420
|
+
get_auth_context = create_header_auth_dependency()
|
|
421
|
+
|
|
422
|
+
# Use engine's dashboard store
|
|
423
|
+
store = engine.dashboard_store
|
|
424
|
+
|
|
425
|
+
# ========================================================================
|
|
426
|
+
# Health Check Endpoints
|
|
427
|
+
# ========================================================================
|
|
428
|
+
|
|
429
|
+
@router.get("/health", response_model=HealthStatus)
|
|
430
|
+
async def health_check() -> HealthStatus:
|
|
431
|
+
"""Comprehensive health check endpoint.
|
|
432
|
+
|
|
433
|
+
Checks the health of all dependencies (database, cache, etc.)
|
|
434
|
+
and returns an overall status.
|
|
435
|
+
|
|
436
|
+
Returns:
|
|
437
|
+
HealthStatus with overall status and individual check results.
|
|
438
|
+
"""
|
|
439
|
+
checks: dict[str, HealthCheck] = {}
|
|
440
|
+
|
|
441
|
+
# Check database connection
|
|
442
|
+
try:
|
|
443
|
+
start = time.perf_counter()
|
|
444
|
+
await engine.check_connection()
|
|
445
|
+
latency = (time.perf_counter() - start) * 1000
|
|
446
|
+
checks["database"] = HealthCheck(
|
|
447
|
+
status="healthy",
|
|
448
|
+
latency_ms=round(latency, 2),
|
|
449
|
+
)
|
|
450
|
+
except (TypeError, AttributeError, ImportError, AssertionError):
|
|
451
|
+
# Re-raise programming bugs - these need to be fixed, not reported as unhealthy
|
|
452
|
+
raise
|
|
453
|
+
except Exception as e:
|
|
454
|
+
# Infrastructure errors - report as unhealthy
|
|
455
|
+
checks["database"] = HealthCheck(
|
|
456
|
+
status="unhealthy",
|
|
457
|
+
message=str(e),
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
# Check cache if configured (using getattr for forward compatibility)
|
|
461
|
+
# The cache property will be added in Task 7 (Engine Integration)
|
|
462
|
+
cache = getattr(engine, "cache", None)
|
|
463
|
+
if cache is not None:
|
|
464
|
+
try:
|
|
465
|
+
start = time.perf_counter()
|
|
466
|
+
await cache.set("health_check", "ok", ttl=1)
|
|
467
|
+
result = await cache.get("health_check")
|
|
468
|
+
latency = (time.perf_counter() - start) * 1000
|
|
469
|
+
|
|
470
|
+
if result == "ok":
|
|
471
|
+
checks["cache"] = HealthCheck(
|
|
472
|
+
status="healthy",
|
|
473
|
+
latency_ms=round(latency, 2),
|
|
474
|
+
)
|
|
475
|
+
else:
|
|
476
|
+
checks["cache"] = HealthCheck(
|
|
477
|
+
status="degraded",
|
|
478
|
+
message="Cache read/write verification failed",
|
|
479
|
+
)
|
|
480
|
+
except (TypeError, AttributeError, ImportError, AssertionError):
|
|
481
|
+
# Re-raise programming bugs
|
|
482
|
+
raise
|
|
483
|
+
except Exception as e:
|
|
484
|
+
# Infrastructure errors - report as unhealthy
|
|
485
|
+
checks["cache"] = HealthCheck(
|
|
486
|
+
status="unhealthy",
|
|
487
|
+
message=str(e),
|
|
488
|
+
)
|
|
489
|
+
|
|
490
|
+
# Determine overall status
|
|
491
|
+
all_healthy = all(c.status == "healthy" for c in checks.values())
|
|
492
|
+
any_unhealthy = any(c.status == "unhealthy" for c in checks.values())
|
|
493
|
+
|
|
494
|
+
if all_healthy:
|
|
495
|
+
overall_status = "healthy"
|
|
496
|
+
elif any_unhealthy:
|
|
497
|
+
overall_status = "unhealthy"
|
|
498
|
+
else:
|
|
499
|
+
overall_status = "degraded"
|
|
500
|
+
|
|
501
|
+
return HealthStatus(
|
|
502
|
+
status=overall_status,
|
|
503
|
+
version=__version__,
|
|
504
|
+
uptime_seconds=round(_get_uptime(), 2),
|
|
505
|
+
checks=checks,
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
@router.get("/health/live", response_model=LivenessResponse)
|
|
509
|
+
async def liveness() -> LivenessResponse:
|
|
510
|
+
"""Kubernetes liveness probe endpoint.
|
|
511
|
+
|
|
512
|
+
Indicates whether the application process is running.
|
|
513
|
+
This should only fail if the process is in a broken state
|
|
514
|
+
and needs to be restarted.
|
|
515
|
+
|
|
516
|
+
Returns:
|
|
517
|
+
LivenessResponse with status 'ok'.
|
|
518
|
+
"""
|
|
519
|
+
return LivenessResponse(status="ok")
|
|
520
|
+
|
|
521
|
+
@router.get("/health/ready", response_model=ReadinessResponse)
|
|
522
|
+
async def readiness() -> ReadinessResponse:
|
|
523
|
+
"""Kubernetes readiness probe endpoint.
|
|
524
|
+
|
|
525
|
+
Indicates whether the application is ready to receive traffic.
|
|
526
|
+
Checks if the database connection is available.
|
|
527
|
+
|
|
528
|
+
Returns:
|
|
529
|
+
ReadinessResponse with status 'ok'.
|
|
530
|
+
|
|
531
|
+
Raises:
|
|
532
|
+
HTTPException: 503 if the application is not ready.
|
|
533
|
+
"""
|
|
534
|
+
try:
|
|
535
|
+
await engine.check_connection()
|
|
536
|
+
return ReadinessResponse(status="ok")
|
|
537
|
+
except Exception as e:
|
|
538
|
+
raise HTTPException(
|
|
539
|
+
status_code=503,
|
|
540
|
+
detail=f"Service not ready: {e!s}",
|
|
541
|
+
) from e
|
|
542
|
+
|
|
543
|
+
# ========================================================================
|
|
544
|
+
# Schema Endpoints
|
|
545
|
+
# ========================================================================
|
|
546
|
+
|
|
547
|
+
@router.get("/schema", response_model=DatabaseSchema)
|
|
548
|
+
async def get_schema(
|
|
549
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
550
|
+
) -> DatabaseSchema:
|
|
551
|
+
"""Get the complete database schema (raw).
|
|
552
|
+
|
|
553
|
+
Returns all exposed tables, their columns, and relationships
|
|
554
|
+
without any configuration applied.
|
|
555
|
+
|
|
556
|
+
Uses the schema_name from AuthContext for multi-tenant schema isolation.
|
|
557
|
+
"""
|
|
558
|
+
schema_name = auth.schema_name
|
|
559
|
+
return await engine.get_schema(schema_name=schema_name)
|
|
560
|
+
|
|
561
|
+
@router.get("/schema/enhanced", response_model=EnhancedDatabaseSchema)
|
|
562
|
+
async def get_enhanced_schema(
|
|
563
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
564
|
+
) -> EnhancedDatabaseSchema:
|
|
565
|
+
"""Get the enhanced database schema with configuration applied.
|
|
566
|
+
|
|
567
|
+
Returns schema with display names, descriptions, and hidden
|
|
568
|
+
tables/columns filtered out.
|
|
569
|
+
|
|
570
|
+
Uses the schema_name from AuthContext for multi-tenant schema isolation.
|
|
571
|
+
"""
|
|
572
|
+
schema_name = auth.schema_name
|
|
573
|
+
return await engine.get_enhanced_schema(schema_name=schema_name)
|
|
574
|
+
|
|
575
|
+
@router.get("/tables", response_model=TableListResponse)
|
|
576
|
+
async def get_tables(
|
|
577
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
578
|
+
) -> TableListResponse:
|
|
579
|
+
"""Get list of available table names.
|
|
580
|
+
|
|
581
|
+
Returns a simple list of table names for quick reference. Uses
|
|
582
|
+
the schema_name from AuthContext for multi-tenant schema
|
|
583
|
+
isolation.
|
|
584
|
+
"""
|
|
585
|
+
schema_name = auth.schema_name
|
|
586
|
+
schema = await engine.get_schema(schema_name=schema_name)
|
|
587
|
+
return TableListResponse(tables=schema.table_names())
|
|
588
|
+
|
|
589
|
+
@router.get("/tables/{table_name}", response_model=TableSchema)
|
|
590
|
+
async def get_table(
|
|
591
|
+
table_name: str,
|
|
592
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
593
|
+
) -> TableSchema:
|
|
594
|
+
"""Get schema information for a single table (raw).
|
|
595
|
+
|
|
596
|
+
Args:
|
|
597
|
+
table_name: Name of the table to retrieve.
|
|
598
|
+
|
|
599
|
+
Returns:
|
|
600
|
+
TableSchema with columns and metadata.
|
|
601
|
+
|
|
602
|
+
Raises:
|
|
603
|
+
404: If the table is not found.
|
|
604
|
+
"""
|
|
605
|
+
schema_name = auth.schema_name
|
|
606
|
+
try:
|
|
607
|
+
return await engine.get_table(table_name, schema_name=schema_name)
|
|
608
|
+
except TableNotFoundError as e:
|
|
609
|
+
raise HTTPException(status_code=404, detail=str(e)) from e
|
|
610
|
+
|
|
611
|
+
@router.get("/tables/{table_name}/enhanced", response_model=EnhancedTableSchema)
|
|
612
|
+
async def get_enhanced_table(
|
|
613
|
+
table_name: str,
|
|
614
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
615
|
+
) -> EnhancedTableSchema:
|
|
616
|
+
"""Get enhanced schema information for a single table.
|
|
617
|
+
|
|
618
|
+
Args:
|
|
619
|
+
table_name: Name of the table to retrieve.
|
|
620
|
+
|
|
621
|
+
Returns:
|
|
622
|
+
EnhancedTableSchema with display names and format hints.
|
|
623
|
+
|
|
624
|
+
Raises:
|
|
625
|
+
404: If the table is not found or is hidden.
|
|
626
|
+
"""
|
|
627
|
+
schema_name = auth.schema_name
|
|
628
|
+
enhanced_schema = await engine.get_enhanced_schema(schema_name=schema_name)
|
|
629
|
+
table = enhanced_schema.get_table(table_name)
|
|
630
|
+
if table is None:
|
|
631
|
+
raise HTTPException(status_code=404, detail=f"Table '{table_name}' not found")
|
|
632
|
+
return table
|
|
633
|
+
|
|
634
|
+
@router.get("/tables/{table_name}/columns/{column_name}/sample")
|
|
635
|
+
async def get_column_sample(
|
|
636
|
+
table_name: str,
|
|
637
|
+
column_name: str,
|
|
638
|
+
limit: int = 5,
|
|
639
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
640
|
+
) -> dict[str, list[Any]]:
|
|
641
|
+
"""Get sample values from a column for data preview.
|
|
642
|
+
|
|
643
|
+
Args:
|
|
644
|
+
table_name: Name of the table.
|
|
645
|
+
column_name: Name of the column.
|
|
646
|
+
limit: Maximum number of distinct values to return (default 5).
|
|
647
|
+
|
|
648
|
+
Returns:
|
|
649
|
+
Object with sample values array.
|
|
650
|
+
|
|
651
|
+
Raises:
|
|
652
|
+
404: If the table or column is not found.
|
|
653
|
+
"""
|
|
654
|
+
schema_name = auth.schema_name
|
|
655
|
+
try:
|
|
656
|
+
values = await engine.sample_column_values(
|
|
657
|
+
table_name, column_name, limit, schema_name=schema_name
|
|
658
|
+
)
|
|
659
|
+
return {"values": values}
|
|
660
|
+
except ValueError as e:
|
|
661
|
+
raise HTTPException(status_code=404, detail=str(e)) from e
|
|
662
|
+
except Exception as e:
|
|
663
|
+
raise HTTPException(status_code=500, detail=str(e)) from e
|
|
664
|
+
|
|
665
|
+
# ========================================================================
|
|
666
|
+
# Query Endpoints
|
|
667
|
+
# ========================================================================
|
|
668
|
+
|
|
669
|
+
@router.post("/query/validate", response_model=ValidationResponse)
|
|
670
|
+
async def validate_query(
|
|
671
|
+
query: QueryDefinition,
|
|
672
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
673
|
+
) -> ValidationResponse:
|
|
674
|
+
"""Validate a query without executing it.
|
|
675
|
+
|
|
676
|
+
Checks that all tables and columns exist in the schema,
|
|
677
|
+
and that join columns are compatible.
|
|
678
|
+
|
|
679
|
+
Uses the schema_name from AuthContext for multi-tenant schema isolation.
|
|
680
|
+
|
|
681
|
+
Args:
|
|
682
|
+
query: Query definition to validate.
|
|
683
|
+
|
|
684
|
+
Returns:
|
|
685
|
+
ValidationResponse with valid flag and any errors.
|
|
686
|
+
"""
|
|
687
|
+
schema_name = auth.schema_name
|
|
688
|
+
errors = await engine.validate_query_async(query, schema_name=schema_name)
|
|
689
|
+
return ValidationResponse(valid=len(errors) == 0, errors=errors)
|
|
690
|
+
|
|
691
|
+
@router.post("/query/validate/detailed", response_model=DetailedValidationResponse)
|
|
692
|
+
async def validate_query_detailed(
|
|
693
|
+
query: QueryDefinition,
|
|
694
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
695
|
+
) -> DetailedValidationResponse:
|
|
696
|
+
"""Validate a query with detailed error information.
|
|
697
|
+
|
|
698
|
+
Returns detailed errors with error codes, field paths, and suggestions.
|
|
699
|
+
|
|
700
|
+
Uses the schema_name from AuthContext for multi-tenant schema isolation.
|
|
701
|
+
|
|
702
|
+
Args:
|
|
703
|
+
query: Query definition to validate.
|
|
704
|
+
|
|
705
|
+
Returns:
|
|
706
|
+
DetailedValidationResponse with complete validation result.
|
|
707
|
+
"""
|
|
708
|
+
schema_name = auth.schema_name
|
|
709
|
+
result = await engine.validate_query_detailed_async(query, schema_name=schema_name)
|
|
710
|
+
return DetailedValidationResponse(result=result)
|
|
711
|
+
|
|
712
|
+
@router.post("/query/sql")
|
|
713
|
+
async def generate_sql(
|
|
714
|
+
query: QueryDefinition,
|
|
715
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
716
|
+
) -> dict[str, str]:
|
|
717
|
+
"""Generate SQL from a query definition without executing.
|
|
718
|
+
|
|
719
|
+
Useful for previewing the SQL that will be generated.
|
|
720
|
+
|
|
721
|
+
Uses the schema_name from AuthContext for multi-tenant schema isolation.
|
|
722
|
+
|
|
723
|
+
Args:
|
|
724
|
+
query: Query definition to generate SQL for.
|
|
725
|
+
|
|
726
|
+
Returns:
|
|
727
|
+
Object with the generated SQL string.
|
|
728
|
+
|
|
729
|
+
Raises:
|
|
730
|
+
400: If the query fails validation.
|
|
731
|
+
"""
|
|
732
|
+
schema_name = auth.schema_name
|
|
733
|
+
try:
|
|
734
|
+
sql = await engine.generate_sql_async(query, schema_name=schema_name)
|
|
735
|
+
return {"sql": sql}
|
|
736
|
+
except QueryValidationError as e:
|
|
737
|
+
raise HTTPException(
|
|
738
|
+
status_code=400, detail={"message": e.message, "errors": e.errors}
|
|
739
|
+
) from e
|
|
740
|
+
|
|
741
|
+
@router.post("/query/execute", response_model=QueryResultWithCache)
|
|
742
|
+
async def execute_query(
|
|
743
|
+
request: ExecuteQueryRequest,
|
|
744
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
745
|
+
) -> QueryResultWithCache:
|
|
746
|
+
"""Execute a query and return results with cache metadata.
|
|
747
|
+
|
|
748
|
+
Args:
|
|
749
|
+
request: Query execution request with optional cache bypass.
|
|
750
|
+
|
|
751
|
+
Returns:
|
|
752
|
+
QueryResultWithCache with columns, rows, execution metadata, and cache info.
|
|
753
|
+
|
|
754
|
+
Raises:
|
|
755
|
+
400: If the query fails validation.
|
|
756
|
+
500: If the query execution fails.
|
|
757
|
+
"""
|
|
758
|
+
try:
|
|
759
|
+
query = request.query
|
|
760
|
+
bypass_cache = request.bypass_cache
|
|
761
|
+
schema_name = auth.schema_name
|
|
762
|
+
|
|
763
|
+
# Execute query (bypass cache if requested)
|
|
764
|
+
use_cache = not bypass_cache
|
|
765
|
+
result = await engine.execute_query(query, schema_name=schema_name, use_cache=use_cache)
|
|
766
|
+
|
|
767
|
+
# Get cache metadata
|
|
768
|
+
cached_at: float | None = None
|
|
769
|
+
is_from_cache = False
|
|
770
|
+
|
|
771
|
+
if engine._query_cache: # pyright: ignore[reportPrivateUsage]
|
|
772
|
+
if bypass_cache:
|
|
773
|
+
# We just executed fresh, cache it and get the timestamp
|
|
774
|
+
try:
|
|
775
|
+
cached_at = await engine._query_cache.cache_result( # pyright: ignore[reportPrivateUsage]
|
|
776
|
+
query, result
|
|
777
|
+
)
|
|
778
|
+
except (TypeError, AttributeError, ImportError):
|
|
779
|
+
# Re-raise programming bugs - these need to be fixed
|
|
780
|
+
raise
|
|
781
|
+
except Exception as cache_err:
|
|
782
|
+
# Log infrastructure errors but don't fail - cache is optional
|
|
783
|
+
_logger.warning(
|
|
784
|
+
"Failed to cache query result",
|
|
785
|
+
error=str(cache_err),
|
|
786
|
+
error_type=type(cache_err).__name__,
|
|
787
|
+
tables=[t.name for t in query.tables],
|
|
788
|
+
)
|
|
789
|
+
cached_at = None # Don't report misleading timestamp
|
|
790
|
+
is_from_cache = False
|
|
791
|
+
else:
|
|
792
|
+
# Check if result was from cache
|
|
793
|
+
try:
|
|
794
|
+
metadata = await engine._query_cache.get_cache_metadata( # pyright: ignore[reportPrivateUsage]
|
|
795
|
+
query
|
|
796
|
+
)
|
|
797
|
+
if metadata and "cached_at" in metadata:
|
|
798
|
+
cached_at = metadata["cached_at"]
|
|
799
|
+
is_from_cache = True
|
|
800
|
+
except (TypeError, AttributeError, ImportError):
|
|
801
|
+
# Re-raise programming bugs - these need to be fixed
|
|
802
|
+
raise
|
|
803
|
+
except Exception as cache_err:
|
|
804
|
+
# Log infrastructure errors but don't fail - metadata is optional
|
|
805
|
+
_logger.warning(
|
|
806
|
+
"Failed to get cache metadata",
|
|
807
|
+
error=str(cache_err),
|
|
808
|
+
error_type=type(cache_err).__name__,
|
|
809
|
+
tables=[t.name for t in query.tables],
|
|
810
|
+
)
|
|
811
|
+
# Continue without cache metadata
|
|
812
|
+
|
|
813
|
+
return QueryResultWithCache(
|
|
814
|
+
columns=result.columns,
|
|
815
|
+
column_types=result.column_types,
|
|
816
|
+
rows=result.rows,
|
|
817
|
+
row_count=result.row_count,
|
|
818
|
+
truncated=result.truncated,
|
|
819
|
+
execution_time_ms=result.execution_time_ms,
|
|
820
|
+
cached_at=cached_at,
|
|
821
|
+
is_from_cache=is_from_cache,
|
|
822
|
+
)
|
|
823
|
+
except QueryValidationError as e:
|
|
824
|
+
raise HTTPException(
|
|
825
|
+
status_code=400, detail={"message": e.message, "errors": e.errors}
|
|
826
|
+
) from e
|
|
827
|
+
except Exception as e:
|
|
828
|
+
raise HTTPException(status_code=500, detail=str(e)) from e
|
|
829
|
+
|
|
830
|
+
@router.post("/query/preview", response_model=QueryResult)
|
|
831
|
+
async def preview_query(
|
|
832
|
+
request: PreviewRequest,
|
|
833
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
834
|
+
) -> QueryResult:
|
|
835
|
+
"""Execute a query with a limited number of rows.
|
|
836
|
+
|
|
837
|
+
Useful for quick previews in the query builder UI.
|
|
838
|
+
|
|
839
|
+
Args:
|
|
840
|
+
request: Preview request with query and limit.
|
|
841
|
+
|
|
842
|
+
Returns:
|
|
843
|
+
QueryResult with limited rows.
|
|
844
|
+
|
|
845
|
+
Raises:
|
|
846
|
+
400: If the query fails validation.
|
|
847
|
+
500: If the query execution fails.
|
|
848
|
+
"""
|
|
849
|
+
schema_name = auth.schema_name
|
|
850
|
+
try:
|
|
851
|
+
return await engine.preview_query(
|
|
852
|
+
request.query, limit=request.limit, schema_name=schema_name
|
|
853
|
+
)
|
|
854
|
+
except QueryValidationError as e:
|
|
855
|
+
raise HTTPException(
|
|
856
|
+
status_code=400, detail={"message": e.message, "errors": e.errors}
|
|
857
|
+
) from e
|
|
858
|
+
except Exception as e:
|
|
859
|
+
raise HTTPException(status_code=500, detail=str(e)) from e
|
|
860
|
+
|
|
861
|
+
# ========================================================================
|
|
862
|
+
# Custom SQL Endpoints
|
|
863
|
+
# ========================================================================
|
|
864
|
+
|
|
865
|
+
@router.post("/query/validate-sql", response_model=SQLValidationResponse)
|
|
866
|
+
async def validate_sql(
|
|
867
|
+
request: ExecuteSQLRequest,
|
|
868
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
869
|
+
) -> SQLValidationResponse:
|
|
870
|
+
"""Validate a raw SQL query without executing it.
|
|
871
|
+
|
|
872
|
+
Checks that the SQL is a valid SELECT statement and only
|
|
873
|
+
references tables visible in the schema.
|
|
874
|
+
|
|
875
|
+
Args:
|
|
876
|
+
request: SQL validation request.
|
|
877
|
+
auth: Authentication context with tenant and schema info.
|
|
878
|
+
|
|
879
|
+
Returns:
|
|
880
|
+
SQLValidationResponse with validation status and details.
|
|
881
|
+
"""
|
|
882
|
+
schema_name = auth.schema_name
|
|
883
|
+
result = await engine.validate_sql(request.sql, schema_name=schema_name)
|
|
884
|
+
return SQLValidationResponse(
|
|
885
|
+
valid=result.valid,
|
|
886
|
+
errors=result.errors,
|
|
887
|
+
tables=result.tables,
|
|
888
|
+
)
|
|
889
|
+
|
|
890
|
+
@router.post("/query/execute-sql", response_model=QueryResult)
|
|
891
|
+
async def execute_sql(
|
|
892
|
+
request: ExecuteSQLRequest,
|
|
893
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
894
|
+
) -> QueryResult:
|
|
895
|
+
"""Execute a raw SQL query.
|
|
896
|
+
|
|
897
|
+
Only SELECT statements are allowed. Queries are restricted
|
|
898
|
+
to tables visible in the schema.
|
|
899
|
+
|
|
900
|
+
Args:
|
|
901
|
+
request: SQL execution request with query and optional params.
|
|
902
|
+
auth: Authentication context with tenant and schema info.
|
|
903
|
+
|
|
904
|
+
Returns:
|
|
905
|
+
QueryResult with columns, rows, and execution metadata.
|
|
906
|
+
|
|
907
|
+
Raises:
|
|
908
|
+
400: If the SQL fails validation.
|
|
909
|
+
500: If the query execution fails.
|
|
910
|
+
"""
|
|
911
|
+
schema_name = auth.schema_name
|
|
912
|
+
try:
|
|
913
|
+
return await engine.execute_raw_sql(
|
|
914
|
+
sql=request.sql,
|
|
915
|
+
params=request.params,
|
|
916
|
+
schema_name=schema_name,
|
|
917
|
+
)
|
|
918
|
+
except SQLValidationError as e:
|
|
919
|
+
raise HTTPException(
|
|
920
|
+
status_code=400, detail={"message": e.message, "errors": e.errors}
|
|
921
|
+
) from e
|
|
922
|
+
except Exception as e:
|
|
923
|
+
raise HTTPException(status_code=500, detail=str(e)) from e
|
|
924
|
+
|
|
925
|
+
# ========================================================================
|
|
926
|
+
# Time Series Endpoints
|
|
927
|
+
# ========================================================================
|
|
928
|
+
|
|
929
|
+
@router.post("/query/execute/timeseries", response_model=QueryResult)
|
|
930
|
+
async def execute_timeseries_query(
|
|
931
|
+
request: TimeSeriesQueryRequest,
|
|
932
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
933
|
+
) -> QueryResult:
|
|
934
|
+
"""Execute a time series query with automatic bucketing.
|
|
935
|
+
|
|
936
|
+
Automatically adds date_trunc to the query for time bucketing
|
|
937
|
+
and optionally fills missing time buckets.
|
|
938
|
+
|
|
939
|
+
Args:
|
|
940
|
+
request: Time series query request with interval configuration.
|
|
941
|
+
auth: Authentication context with tenant and schema info.
|
|
942
|
+
|
|
943
|
+
Returns:
|
|
944
|
+
QueryResult with time-bucketed data.
|
|
945
|
+
|
|
946
|
+
Raises:
|
|
947
|
+
400: If the query fails validation or date column is invalid.
|
|
948
|
+
500: If the query execution fails.
|
|
949
|
+
"""
|
|
950
|
+
schema_name = auth.schema_name
|
|
951
|
+
try:
|
|
952
|
+
return await engine.execute_timeseries_query(
|
|
953
|
+
query=request.query,
|
|
954
|
+
interval=request.interval,
|
|
955
|
+
date_column=request.date_column,
|
|
956
|
+
fill_missing=request.fill_missing,
|
|
957
|
+
schema_name=schema_name,
|
|
958
|
+
)
|
|
959
|
+
except QueryValidationError as e:
|
|
960
|
+
raise HTTPException(
|
|
961
|
+
status_code=400, detail={"message": e.message, "errors": e.errors}
|
|
962
|
+
) from e
|
|
963
|
+
except ValueError as e:
|
|
964
|
+
raise HTTPException(status_code=400, detail=str(e)) from e
|
|
965
|
+
except Exception as e:
|
|
966
|
+
raise HTTPException(status_code=500, detail=str(e)) from e
|
|
967
|
+
|
|
968
|
+
# ========================================================================
|
|
969
|
+
# Transform Endpoints
|
|
970
|
+
# ========================================================================
|
|
971
|
+
|
|
972
|
+
@router.post("/transform/pivot", response_model=QueryResult)
|
|
973
|
+
async def pivot_result(request: PivotRequest) -> QueryResult:
|
|
974
|
+
"""Pivot query result data from long to wide format.
|
|
975
|
+
|
|
976
|
+
Transforms data like:
|
|
977
|
+
region | month | sales
|
|
978
|
+
East | Jan | 100
|
|
979
|
+
East | Feb | 150
|
|
980
|
+
|
|
981
|
+
Into:
|
|
982
|
+
region | Jan | Feb
|
|
983
|
+
East | 100 | 150
|
|
984
|
+
|
|
985
|
+
Args:
|
|
986
|
+
request: Pivot request with column configuration.
|
|
987
|
+
|
|
988
|
+
Returns:
|
|
989
|
+
Pivoted QueryResult.
|
|
990
|
+
|
|
991
|
+
Raises:
|
|
992
|
+
400: If column names are invalid.
|
|
993
|
+
"""
|
|
994
|
+
try:
|
|
995
|
+
return pivot_data(
|
|
996
|
+
result=request.result,
|
|
997
|
+
row_column=request.row_column,
|
|
998
|
+
pivot_column=request.pivot_column,
|
|
999
|
+
value_column=request.value_column,
|
|
1000
|
+
aggregation=request.aggregation,
|
|
1001
|
+
)
|
|
1002
|
+
except ValueError as e:
|
|
1003
|
+
raise HTTPException(status_code=400, detail=str(e)) from e
|
|
1004
|
+
except Exception as e:
|
|
1005
|
+
raise HTTPException(status_code=500, detail=str(e)) from e
|
|
1006
|
+
|
|
1007
|
+
@router.post("/transform/trend", response_model=QueryResult)
|
|
1008
|
+
async def add_trend(request: TrendColumnRequest) -> QueryResult:
|
|
1009
|
+
"""Add trend columns to query result.
|
|
1010
|
+
|
|
1011
|
+
Adds columns for previous value, absolute change, and percent change
|
|
1012
|
+
based on the order of rows.
|
|
1013
|
+
|
|
1014
|
+
Args:
|
|
1015
|
+
request: Trend column request with column configuration.
|
|
1016
|
+
|
|
1017
|
+
Returns:
|
|
1018
|
+
QueryResult with added trend columns.
|
|
1019
|
+
|
|
1020
|
+
Raises:
|
|
1021
|
+
400: If column names are invalid.
|
|
1022
|
+
"""
|
|
1023
|
+
try:
|
|
1024
|
+
return add_trend_column(
|
|
1025
|
+
result=request.result,
|
|
1026
|
+
value_column=request.value_column,
|
|
1027
|
+
order_column=request.order_column,
|
|
1028
|
+
group_column=request.group_column,
|
|
1029
|
+
)
|
|
1030
|
+
except ValueError as e:
|
|
1031
|
+
raise HTTPException(status_code=400, detail=str(e)) from e
|
|
1032
|
+
except Exception as e:
|
|
1033
|
+
raise HTTPException(status_code=500, detail=str(e)) from e
|
|
1034
|
+
|
|
1035
|
+
# ========================================================================
|
|
1036
|
+
# Metrics Endpoints
|
|
1037
|
+
# ========================================================================
|
|
1038
|
+
|
|
1039
|
+
@router.post("/metrics/trend", response_model=TrendResult)
|
|
1040
|
+
async def calculate_metric_trend(request: MetricTrendRequest) -> TrendResult:
|
|
1041
|
+
"""Calculate trend for a metric query.
|
|
1042
|
+
|
|
1043
|
+
Executes the query for both current and comparison periods,
|
|
1044
|
+
then calculates the trend between them.
|
|
1045
|
+
|
|
1046
|
+
Args:
|
|
1047
|
+
request: Metric trend request with period configuration.
|
|
1048
|
+
|
|
1049
|
+
Returns:
|
|
1050
|
+
TrendResult with current value, previous value, and change metrics.
|
|
1051
|
+
|
|
1052
|
+
Raises:
|
|
1053
|
+
400: If the query fails validation.
|
|
1054
|
+
500: If the query execution fails.
|
|
1055
|
+
"""
|
|
1056
|
+
try:
|
|
1057
|
+
return await engine.calculate_metric_trend(
|
|
1058
|
+
query=request.query,
|
|
1059
|
+
comparison=request.comparison,
|
|
1060
|
+
current_start=request.current_start,
|
|
1061
|
+
current_end=request.current_end,
|
|
1062
|
+
value_column=request.value_column,
|
|
1063
|
+
date_column=request.date_column,
|
|
1064
|
+
)
|
|
1065
|
+
except QueryValidationError as e:
|
|
1066
|
+
raise HTTPException(
|
|
1067
|
+
status_code=400, detail={"message": e.message, "errors": e.errors}
|
|
1068
|
+
) from e
|
|
1069
|
+
except ValueError as e:
|
|
1070
|
+
raise HTTPException(status_code=400, detail=str(e)) from e
|
|
1071
|
+
except Exception as e:
|
|
1072
|
+
raise HTTPException(status_code=500, detail=str(e)) from e
|
|
1073
|
+
|
|
1074
|
+
# ========================================================================
|
|
1075
|
+
# Dashboard Endpoints
|
|
1076
|
+
# ========================================================================
|
|
1077
|
+
|
|
1078
|
+
@router.get("/dashboards", response_model=DashboardListResponse)
|
|
1079
|
+
async def list_dashboards(
|
|
1080
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1081
|
+
) -> DashboardListResponse:
|
|
1082
|
+
"""List all dashboards for the current tenant.
|
|
1083
|
+
|
|
1084
|
+
Returns:
|
|
1085
|
+
List of dashboards the user can access.
|
|
1086
|
+
"""
|
|
1087
|
+
dashboards = await store.list_dashboards(
|
|
1088
|
+
tenant_id=auth.tenant_id,
|
|
1089
|
+
owner_id=auth.user_id,
|
|
1090
|
+
schema_name=auth.schema_name,
|
|
1091
|
+
)
|
|
1092
|
+
return DashboardListResponse(dashboards=dashboards)
|
|
1093
|
+
|
|
1094
|
+
@router.get("/dashboards/{dashboard_id}", response_model=Dashboard)
|
|
1095
|
+
async def get_dashboard(
|
|
1096
|
+
dashboard_id: str,
|
|
1097
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1098
|
+
) -> Dashboard:
|
|
1099
|
+
"""Get a dashboard by ID.
|
|
1100
|
+
|
|
1101
|
+
Args:
|
|
1102
|
+
dashboard_id: Dashboard ID.
|
|
1103
|
+
|
|
1104
|
+
Returns:
|
|
1105
|
+
Dashboard with all widgets and filters.
|
|
1106
|
+
|
|
1107
|
+
Raises:
|
|
1108
|
+
404: If dashboard not found.
|
|
1109
|
+
403: If user lacks permission to view.
|
|
1110
|
+
"""
|
|
1111
|
+
dashboard = await store.get_dashboard(
|
|
1112
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1113
|
+
)
|
|
1114
|
+
if dashboard is None:
|
|
1115
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1116
|
+
|
|
1117
|
+
if not can_view_dashboard(dashboard, auth.user_id):
|
|
1118
|
+
raise HTTPException(status_code=403, detail="Permission denied")
|
|
1119
|
+
|
|
1120
|
+
return dashboard
|
|
1121
|
+
|
|
1122
|
+
@router.post("/dashboards", response_model=Dashboard, status_code=201)
|
|
1123
|
+
async def create_dashboard(
|
|
1124
|
+
data: DashboardCreate,
|
|
1125
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1126
|
+
) -> Dashboard:
|
|
1127
|
+
"""Create a new dashboard.
|
|
1128
|
+
|
|
1129
|
+
Args:
|
|
1130
|
+
data: Dashboard creation data.
|
|
1131
|
+
|
|
1132
|
+
Returns:
|
|
1133
|
+
Created dashboard.
|
|
1134
|
+
"""
|
|
1135
|
+
return await store.create_dashboard(
|
|
1136
|
+
data,
|
|
1137
|
+
tenant_id=auth.tenant_id,
|
|
1138
|
+
owner_id=auth.user_id,
|
|
1139
|
+
schema_name=auth.schema_name,
|
|
1140
|
+
)
|
|
1141
|
+
|
|
1142
|
+
@router.patch("/dashboards/{dashboard_id}", response_model=Dashboard)
|
|
1143
|
+
async def update_dashboard(
|
|
1144
|
+
dashboard_id: str,
|
|
1145
|
+
data: DashboardUpdate,
|
|
1146
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1147
|
+
) -> Dashboard:
|
|
1148
|
+
"""Update a dashboard.
|
|
1149
|
+
|
|
1150
|
+
Args:
|
|
1151
|
+
dashboard_id: Dashboard ID.
|
|
1152
|
+
data: Fields to update.
|
|
1153
|
+
|
|
1154
|
+
Returns:
|
|
1155
|
+
Updated dashboard.
|
|
1156
|
+
|
|
1157
|
+
Raises:
|
|
1158
|
+
404: If dashboard not found.
|
|
1159
|
+
403: If user lacks permission to edit.
|
|
1160
|
+
"""
|
|
1161
|
+
dashboard = await store.get_dashboard(
|
|
1162
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1163
|
+
)
|
|
1164
|
+
if dashboard is None:
|
|
1165
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1166
|
+
|
|
1167
|
+
if not can_edit_dashboard(dashboard, auth.user_id):
|
|
1168
|
+
raise HTTPException(status_code=403, detail="Permission denied")
|
|
1169
|
+
|
|
1170
|
+
updated = await store.update_dashboard(
|
|
1171
|
+
dashboard_id, data, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1172
|
+
)
|
|
1173
|
+
if updated is None:
|
|
1174
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1175
|
+
return updated
|
|
1176
|
+
|
|
1177
|
+
@router.delete("/dashboards/{dashboard_id}", response_model=SuccessResponse)
|
|
1178
|
+
async def delete_dashboard(
|
|
1179
|
+
dashboard_id: str,
|
|
1180
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1181
|
+
) -> SuccessResponse:
|
|
1182
|
+
"""Delete a dashboard.
|
|
1183
|
+
|
|
1184
|
+
Args:
|
|
1185
|
+
dashboard_id: Dashboard ID.
|
|
1186
|
+
|
|
1187
|
+
Returns:
|
|
1188
|
+
Success response.
|
|
1189
|
+
|
|
1190
|
+
Raises:
|
|
1191
|
+
404: If dashboard not found.
|
|
1192
|
+
403: If user lacks permission to delete.
|
|
1193
|
+
"""
|
|
1194
|
+
dashboard = await store.get_dashboard(
|
|
1195
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1196
|
+
)
|
|
1197
|
+
if dashboard is None:
|
|
1198
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1199
|
+
|
|
1200
|
+
if not can_delete_dashboard(dashboard, auth.user_id):
|
|
1201
|
+
raise HTTPException(status_code=403, detail="Permission denied")
|
|
1202
|
+
|
|
1203
|
+
deleted = await store.delete_dashboard(
|
|
1204
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1205
|
+
)
|
|
1206
|
+
if not deleted:
|
|
1207
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1208
|
+
return SuccessResponse(message=f"Dashboard '{dashboard_id}' deleted")
|
|
1209
|
+
|
|
1210
|
+
# ========================================================================
|
|
1211
|
+
# Widget Endpoints
|
|
1212
|
+
# ========================================================================
|
|
1213
|
+
|
|
1214
|
+
@router.post(
|
|
1215
|
+
"/dashboards/{dashboard_id}/widgets",
|
|
1216
|
+
response_model=Widget,
|
|
1217
|
+
status_code=201,
|
|
1218
|
+
)
|
|
1219
|
+
async def add_widget(
|
|
1220
|
+
dashboard_id: str,
|
|
1221
|
+
data: WidgetCreate,
|
|
1222
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1223
|
+
) -> Widget:
|
|
1224
|
+
"""Add a widget to a dashboard.
|
|
1225
|
+
|
|
1226
|
+
Args:
|
|
1227
|
+
dashboard_id: Dashboard ID.
|
|
1228
|
+
data: Widget creation data.
|
|
1229
|
+
|
|
1230
|
+
Returns:
|
|
1231
|
+
Created widget.
|
|
1232
|
+
|
|
1233
|
+
Raises:
|
|
1234
|
+
404: If dashboard not found.
|
|
1235
|
+
403: If user lacks permission to edit widgets.
|
|
1236
|
+
"""
|
|
1237
|
+
dashboard = await store.get_dashboard(
|
|
1238
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1239
|
+
)
|
|
1240
|
+
if dashboard is None:
|
|
1241
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1242
|
+
|
|
1243
|
+
if not can_edit_widget(dashboard, auth.user_id):
|
|
1244
|
+
raise HTTPException(status_code=403, detail="Permission denied")
|
|
1245
|
+
|
|
1246
|
+
created = await store.add_widget(
|
|
1247
|
+
dashboard_id, data, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1248
|
+
)
|
|
1249
|
+
if created is None:
|
|
1250
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1251
|
+
return created
|
|
1252
|
+
|
|
1253
|
+
@router.patch("/dashboards/{dashboard_id}/widgets/{widget_id}", response_model=Widget)
|
|
1254
|
+
async def update_widget(
|
|
1255
|
+
dashboard_id: str,
|
|
1256
|
+
widget_id: str,
|
|
1257
|
+
data: WidgetUpdate,
|
|
1258
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1259
|
+
) -> Widget:
|
|
1260
|
+
"""Update a widget.
|
|
1261
|
+
|
|
1262
|
+
Args:
|
|
1263
|
+
dashboard_id: Dashboard ID.
|
|
1264
|
+
widget_id: Widget ID.
|
|
1265
|
+
data: Fields to update.
|
|
1266
|
+
|
|
1267
|
+
Returns:
|
|
1268
|
+
Updated widget.
|
|
1269
|
+
|
|
1270
|
+
Raises:
|
|
1271
|
+
404: If dashboard or widget not found.
|
|
1272
|
+
403: If user lacks permission to edit widgets.
|
|
1273
|
+
"""
|
|
1274
|
+
dashboard = await store.get_dashboard(
|
|
1275
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1276
|
+
)
|
|
1277
|
+
if dashboard is None:
|
|
1278
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1279
|
+
|
|
1280
|
+
if not can_edit_widget(dashboard, auth.user_id):
|
|
1281
|
+
raise HTTPException(status_code=403, detail="Permission denied")
|
|
1282
|
+
|
|
1283
|
+
updated = await store.update_widget(
|
|
1284
|
+
widget_id, data, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1285
|
+
)
|
|
1286
|
+
if updated is None:
|
|
1287
|
+
raise HTTPException(status_code=404, detail=f"Widget '{widget_id}' not found")
|
|
1288
|
+
return updated
|
|
1289
|
+
|
|
1290
|
+
@router.delete("/dashboards/{dashboard_id}/widgets/{widget_id}", response_model=SuccessResponse)
|
|
1291
|
+
async def delete_widget(
|
|
1292
|
+
dashboard_id: str,
|
|
1293
|
+
widget_id: str,
|
|
1294
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1295
|
+
) -> SuccessResponse:
|
|
1296
|
+
"""Delete a widget.
|
|
1297
|
+
|
|
1298
|
+
Args:
|
|
1299
|
+
dashboard_id: Dashboard ID.
|
|
1300
|
+
widget_id: Widget ID.
|
|
1301
|
+
|
|
1302
|
+
Returns:
|
|
1303
|
+
Success response.
|
|
1304
|
+
|
|
1305
|
+
Raises:
|
|
1306
|
+
404: If dashboard or widget not found.
|
|
1307
|
+
403: If user lacks permission to edit widgets.
|
|
1308
|
+
"""
|
|
1309
|
+
dashboard = await store.get_dashboard(
|
|
1310
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1311
|
+
)
|
|
1312
|
+
if dashboard is None:
|
|
1313
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1314
|
+
|
|
1315
|
+
if not can_edit_widget(dashboard, auth.user_id):
|
|
1316
|
+
raise HTTPException(status_code=403, detail="Permission denied")
|
|
1317
|
+
|
|
1318
|
+
deleted = await store.delete_widget(
|
|
1319
|
+
widget_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1320
|
+
)
|
|
1321
|
+
if not deleted:
|
|
1322
|
+
raise HTTPException(status_code=404, detail=f"Widget '{widget_id}' not found")
|
|
1323
|
+
return SuccessResponse(message=f"Widget '{widget_id}' deleted")
|
|
1324
|
+
|
|
1325
|
+
@router.post(
|
|
1326
|
+
"/dashboards/{dashboard_id}/widgets/{widget_id}/duplicate",
|
|
1327
|
+
response_model=Widget,
|
|
1328
|
+
status_code=201,
|
|
1329
|
+
)
|
|
1330
|
+
async def duplicate_widget(
|
|
1331
|
+
dashboard_id: str,
|
|
1332
|
+
widget_id: str,
|
|
1333
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1334
|
+
) -> Widget:
|
|
1335
|
+
"""Duplicate a widget.
|
|
1336
|
+
|
|
1337
|
+
Args:
|
|
1338
|
+
dashboard_id: Dashboard ID.
|
|
1339
|
+
widget_id: Widget ID to duplicate.
|
|
1340
|
+
|
|
1341
|
+
Returns:
|
|
1342
|
+
New duplicated widget.
|
|
1343
|
+
|
|
1344
|
+
Raises:
|
|
1345
|
+
404: If dashboard or widget not found.
|
|
1346
|
+
403: If user lacks permission to edit widgets.
|
|
1347
|
+
"""
|
|
1348
|
+
dashboard = await store.get_dashboard(
|
|
1349
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1350
|
+
)
|
|
1351
|
+
if dashboard is None:
|
|
1352
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1353
|
+
|
|
1354
|
+
if not can_edit_widget(dashboard, auth.user_id):
|
|
1355
|
+
raise HTTPException(status_code=403, detail="Permission denied")
|
|
1356
|
+
|
|
1357
|
+
duplicated = await store.duplicate_widget(
|
|
1358
|
+
widget_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1359
|
+
)
|
|
1360
|
+
if duplicated is None:
|
|
1361
|
+
raise HTTPException(status_code=404, detail=f"Widget '{widget_id}' not found")
|
|
1362
|
+
return duplicated
|
|
1363
|
+
|
|
1364
|
+
# ========================================================================
|
|
1365
|
+
# Layout Update Endpoints
|
|
1366
|
+
# ========================================================================
|
|
1367
|
+
|
|
1368
|
+
@router.put("/dashboards/{dashboard_id}/layout", response_model=Dashboard)
|
|
1369
|
+
async def update_layout(
|
|
1370
|
+
dashboard_id: str,
|
|
1371
|
+
positions: list[dict[str, Any]],
|
|
1372
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1373
|
+
) -> Dashboard:
|
|
1374
|
+
"""Batch update widget positions in a dashboard.
|
|
1375
|
+
|
|
1376
|
+
Args:
|
|
1377
|
+
dashboard_id: Dashboard ID.
|
|
1378
|
+
positions: List of position updates, each with widget_id and position.
|
|
1379
|
+
|
|
1380
|
+
Returns:
|
|
1381
|
+
Updated dashboard with new widget positions.
|
|
1382
|
+
|
|
1383
|
+
Raises:
|
|
1384
|
+
404: If dashboard not found.
|
|
1385
|
+
403: If user lacks permission to edit.
|
|
1386
|
+
400: If update fails.
|
|
1387
|
+
"""
|
|
1388
|
+
dashboard = await store.get_dashboard(
|
|
1389
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1390
|
+
)
|
|
1391
|
+
if dashboard is None:
|
|
1392
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1393
|
+
|
|
1394
|
+
if not can_edit_dashboard(dashboard, auth.user_id):
|
|
1395
|
+
raise HTTPException(status_code=403, detail="Permission denied")
|
|
1396
|
+
|
|
1397
|
+
success = await store.update_widget_positions(
|
|
1398
|
+
dashboard_id=dashboard_id,
|
|
1399
|
+
positions=positions,
|
|
1400
|
+
tenant_id=auth.tenant_id,
|
|
1401
|
+
schema_name=auth.schema_name,
|
|
1402
|
+
)
|
|
1403
|
+
if not success:
|
|
1404
|
+
raise HTTPException(status_code=400, detail="Failed to update layout")
|
|
1405
|
+
|
|
1406
|
+
# Return updated dashboard
|
|
1407
|
+
updated = await store.get_dashboard(
|
|
1408
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1409
|
+
)
|
|
1410
|
+
if updated is None:
|
|
1411
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1412
|
+
return updated
|
|
1413
|
+
|
|
1414
|
+
@router.post(
|
|
1415
|
+
"/dashboards/{dashboard_id}/widgets/{widget_id}/execute",
|
|
1416
|
+
response_model=QueryResult,
|
|
1417
|
+
)
|
|
1418
|
+
async def execute_widget_query(
|
|
1419
|
+
dashboard_id: str,
|
|
1420
|
+
widget_id: str,
|
|
1421
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1422
|
+
filter_values: list[FilterValue] | None = None,
|
|
1423
|
+
) -> QueryResult:
|
|
1424
|
+
"""Execute a widget's query with dashboard filters applied.
|
|
1425
|
+
|
|
1426
|
+
Args:
|
|
1427
|
+
dashboard_id: Dashboard ID.
|
|
1428
|
+
widget_id: Widget ID.
|
|
1429
|
+
filter_values: Current dashboard filter values.
|
|
1430
|
+
|
|
1431
|
+
Returns:
|
|
1432
|
+
Query result.
|
|
1433
|
+
|
|
1434
|
+
Raises:
|
|
1435
|
+
404: If dashboard or widget not found.
|
|
1436
|
+
403: If user lacks permission to view.
|
|
1437
|
+
400: If widget has no query or query fails validation.
|
|
1438
|
+
"""
|
|
1439
|
+
dashboard = await store.get_dashboard(
|
|
1440
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1441
|
+
)
|
|
1442
|
+
if dashboard is None:
|
|
1443
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1444
|
+
|
|
1445
|
+
if not can_view_dashboard(dashboard, auth.user_id):
|
|
1446
|
+
raise HTTPException(status_code=403, detail="Permission denied")
|
|
1447
|
+
|
|
1448
|
+
# Find the widget
|
|
1449
|
+
widget = None
|
|
1450
|
+
for w in dashboard.widgets:
|
|
1451
|
+
if w.id == widget_id:
|
|
1452
|
+
widget = w
|
|
1453
|
+
break
|
|
1454
|
+
|
|
1455
|
+
if widget is None:
|
|
1456
|
+
raise HTTPException(status_code=404, detail=f"Widget '{widget_id}' not found")
|
|
1457
|
+
|
|
1458
|
+
if widget.query is None:
|
|
1459
|
+
raise HTTPException(status_code=400, detail="Widget has no query")
|
|
1460
|
+
|
|
1461
|
+
# Get schema_name from auth context
|
|
1462
|
+
schema_name = auth.schema_name
|
|
1463
|
+
|
|
1464
|
+
# Merge dashboard filters with widget query
|
|
1465
|
+
schema = await engine.get_schema(schema_name=schema_name)
|
|
1466
|
+
query = merge_filters(
|
|
1467
|
+
widget.query,
|
|
1468
|
+
dashboard.filters,
|
|
1469
|
+
filter_values or [],
|
|
1470
|
+
schema,
|
|
1471
|
+
)
|
|
1472
|
+
|
|
1473
|
+
try:
|
|
1474
|
+
return await engine.execute_query(query, schema_name=schema_name)
|
|
1475
|
+
except QueryValidationError as e:
|
|
1476
|
+
raise HTTPException(
|
|
1477
|
+
status_code=400, detail={"message": e.message, "errors": e.errors}
|
|
1478
|
+
) from e
|
|
1479
|
+
except Exception as e:
|
|
1480
|
+
raise HTTPException(status_code=500, detail=str(e)) from e
|
|
1481
|
+
|
|
1482
|
+
# ========================================================================
|
|
1483
|
+
# Dashboard Import/Export Endpoints
|
|
1484
|
+
# ========================================================================
|
|
1485
|
+
|
|
1486
|
+
@router.get("/dashboards/{dashboard_id}/export", response_model=DashboardExport)
|
|
1487
|
+
async def export_dashboard(
|
|
1488
|
+
dashboard_id: str,
|
|
1489
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1490
|
+
) -> DashboardExport:
|
|
1491
|
+
"""Export a dashboard to a portable format.
|
|
1492
|
+
|
|
1493
|
+
Args:
|
|
1494
|
+
dashboard_id: Dashboard ID.
|
|
1495
|
+
|
|
1496
|
+
Returns:
|
|
1497
|
+
DashboardExport data.
|
|
1498
|
+
|
|
1499
|
+
Raises:
|
|
1500
|
+
404: If dashboard not found.
|
|
1501
|
+
403: If user lacks permission to view.
|
|
1502
|
+
"""
|
|
1503
|
+
dashboard = await store.get_dashboard(
|
|
1504
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1505
|
+
)
|
|
1506
|
+
if dashboard is None:
|
|
1507
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1508
|
+
|
|
1509
|
+
if not can_view_dashboard(dashboard, auth.user_id):
|
|
1510
|
+
raise HTTPException(status_code=403, detail="Permission denied")
|
|
1511
|
+
|
|
1512
|
+
# Convert widgets to dict format without IDs
|
|
1513
|
+
widget_dicts: list[dict[str, Any]] = []
|
|
1514
|
+
for widget in dashboard.widgets:
|
|
1515
|
+
widget_dict = widget.model_dump()
|
|
1516
|
+
# Remove ID and timestamps
|
|
1517
|
+
del widget_dict["id"]
|
|
1518
|
+
del widget_dict["created_at"]
|
|
1519
|
+
del widget_dict["updated_at"]
|
|
1520
|
+
widget_dicts.append(widget_dict)
|
|
1521
|
+
|
|
1522
|
+
return DashboardExport(
|
|
1523
|
+
version="1.0",
|
|
1524
|
+
name=dashboard.name,
|
|
1525
|
+
description=dashboard.description,
|
|
1526
|
+
layout=dashboard.layout,
|
|
1527
|
+
widgets=widget_dicts,
|
|
1528
|
+
filters=dashboard.filters,
|
|
1529
|
+
)
|
|
1530
|
+
|
|
1531
|
+
@router.post("/dashboards/import", response_model=Dashboard, status_code=201)
|
|
1532
|
+
async def import_dashboard(
|
|
1533
|
+
request: DashboardImportRequest,
|
|
1534
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1535
|
+
) -> Dashboard:
|
|
1536
|
+
"""Import a dashboard from exported data.
|
|
1537
|
+
|
|
1538
|
+
Args:
|
|
1539
|
+
request: Import request with export data.
|
|
1540
|
+
|
|
1541
|
+
Returns:
|
|
1542
|
+
Imported dashboard.
|
|
1543
|
+
"""
|
|
1544
|
+
export_data = request.export_data
|
|
1545
|
+
|
|
1546
|
+
# Create the dashboard
|
|
1547
|
+
dashboard = await store.create_dashboard(
|
|
1548
|
+
DashboardCreate(
|
|
1549
|
+
name=request.name_override or export_data.name,
|
|
1550
|
+
description=export_data.description,
|
|
1551
|
+
layout=export_data.layout,
|
|
1552
|
+
),
|
|
1553
|
+
tenant_id=auth.tenant_id,
|
|
1554
|
+
owner_id=auth.user_id,
|
|
1555
|
+
schema_name=auth.schema_name,
|
|
1556
|
+
)
|
|
1557
|
+
|
|
1558
|
+
# Update with filters
|
|
1559
|
+
if export_data.filters:
|
|
1560
|
+
await store.update_dashboard(
|
|
1561
|
+
dashboard.id,
|
|
1562
|
+
DashboardUpdate(filters=export_data.filters),
|
|
1563
|
+
tenant_id=auth.tenant_id,
|
|
1564
|
+
schema_name=auth.schema_name,
|
|
1565
|
+
)
|
|
1566
|
+
|
|
1567
|
+
# Add widgets
|
|
1568
|
+
for widget_dict in export_data.widgets:
|
|
1569
|
+
await store.add_widget(
|
|
1570
|
+
dashboard.id,
|
|
1571
|
+
WidgetCreate(
|
|
1572
|
+
type=widget_dict["type"],
|
|
1573
|
+
title=widget_dict["title"],
|
|
1574
|
+
query=widget_dict.get("query"),
|
|
1575
|
+
position=widget_dict["position"],
|
|
1576
|
+
config=widget_dict.get("config"),
|
|
1577
|
+
),
|
|
1578
|
+
tenant_id=auth.tenant_id,
|
|
1579
|
+
schema_name=auth.schema_name,
|
|
1580
|
+
)
|
|
1581
|
+
|
|
1582
|
+
# Return the complete dashboard
|
|
1583
|
+
result = await store.get_dashboard(
|
|
1584
|
+
dashboard.id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1585
|
+
)
|
|
1586
|
+
if result is None:
|
|
1587
|
+
raise HTTPException(status_code=500, detail="Failed to retrieve imported dashboard")
|
|
1588
|
+
return result
|
|
1589
|
+
|
|
1590
|
+
# ========================================================================
|
|
1591
|
+
# Saved Query Endpoints
|
|
1592
|
+
# ========================================================================
|
|
1593
|
+
|
|
1594
|
+
@router.get("/saved-queries", response_model=SavedQueryListResponse)
|
|
1595
|
+
async def list_saved_queries(
|
|
1596
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1597
|
+
) -> SavedQueryListResponse:
|
|
1598
|
+
"""List saved queries for the current tenant.
|
|
1599
|
+
|
|
1600
|
+
Returns queries owned by the user or shared with all users.
|
|
1601
|
+
|
|
1602
|
+
Returns:
|
|
1603
|
+
List of saved queries.
|
|
1604
|
+
"""
|
|
1605
|
+
saved_query_store = engine.saved_query_store
|
|
1606
|
+
queries = await saved_query_store.list(
|
|
1607
|
+
tenant_id=auth.tenant_id,
|
|
1608
|
+
user_id=auth.user_id,
|
|
1609
|
+
schema_name=auth.schema_name,
|
|
1610
|
+
)
|
|
1611
|
+
return SavedQueryListResponse(queries=queries)
|
|
1612
|
+
|
|
1613
|
+
@router.get("/saved-queries/{query_id}", response_model=SavedQuery)
|
|
1614
|
+
async def get_saved_query(
|
|
1615
|
+
query_id: str,
|
|
1616
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1617
|
+
) -> SavedQuery:
|
|
1618
|
+
"""Get a saved query by ID.
|
|
1619
|
+
|
|
1620
|
+
Args:
|
|
1621
|
+
query_id: Saved query ID.
|
|
1622
|
+
|
|
1623
|
+
Returns:
|
|
1624
|
+
Saved query.
|
|
1625
|
+
|
|
1626
|
+
Raises:
|
|
1627
|
+
404: If saved query not found.
|
|
1628
|
+
"""
|
|
1629
|
+
saved_query_store = engine.saved_query_store
|
|
1630
|
+
query = await saved_query_store.get(
|
|
1631
|
+
query_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1632
|
+
)
|
|
1633
|
+
if query is None:
|
|
1634
|
+
raise HTTPException(status_code=404, detail=f"Saved query '{query_id}' not found")
|
|
1635
|
+
return query
|
|
1636
|
+
|
|
1637
|
+
@router.post("/saved-queries", response_model=SavedQuery, status_code=201)
|
|
1638
|
+
async def create_saved_query(
|
|
1639
|
+
data: SavedQueryCreate,
|
|
1640
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1641
|
+
) -> SavedQuery:
|
|
1642
|
+
"""Create a new saved query.
|
|
1643
|
+
|
|
1644
|
+
Args:
|
|
1645
|
+
data: Saved query creation data.
|
|
1646
|
+
|
|
1647
|
+
Returns:
|
|
1648
|
+
Created saved query.
|
|
1649
|
+
"""
|
|
1650
|
+
saved_query_store = engine.saved_query_store
|
|
1651
|
+
return await saved_query_store.create(
|
|
1652
|
+
data,
|
|
1653
|
+
tenant_id=auth.tenant_id,
|
|
1654
|
+
owner_id=auth.user_id,
|
|
1655
|
+
schema_name=auth.schema_name,
|
|
1656
|
+
)
|
|
1657
|
+
|
|
1658
|
+
@router.patch("/saved-queries/{query_id}", response_model=SavedQuery)
|
|
1659
|
+
async def update_saved_query(
|
|
1660
|
+
query_id: str,
|
|
1661
|
+
data: SavedQueryUpdate,
|
|
1662
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1663
|
+
) -> SavedQuery:
|
|
1664
|
+
"""Update a saved query.
|
|
1665
|
+
|
|
1666
|
+
Only the owner can update a query.
|
|
1667
|
+
|
|
1668
|
+
Args:
|
|
1669
|
+
query_id: Saved query ID.
|
|
1670
|
+
data: Fields to update.
|
|
1671
|
+
|
|
1672
|
+
Returns:
|
|
1673
|
+
Updated saved query.
|
|
1674
|
+
|
|
1675
|
+
Raises:
|
|
1676
|
+
404: If saved query not found or user is not owner.
|
|
1677
|
+
"""
|
|
1678
|
+
saved_query_store = engine.saved_query_store
|
|
1679
|
+
updated = await saved_query_store.update(
|
|
1680
|
+
query_id,
|
|
1681
|
+
data,
|
|
1682
|
+
tenant_id=auth.tenant_id,
|
|
1683
|
+
user_id=auth.user_id,
|
|
1684
|
+
schema_name=auth.schema_name,
|
|
1685
|
+
)
|
|
1686
|
+
if updated is None:
|
|
1687
|
+
raise HTTPException(
|
|
1688
|
+
status_code=404,
|
|
1689
|
+
detail=f"Saved query '{query_id}' not found or permission denied",
|
|
1690
|
+
)
|
|
1691
|
+
return updated
|
|
1692
|
+
|
|
1693
|
+
@router.delete("/saved-queries/{query_id}", response_model=SuccessResponse)
|
|
1694
|
+
async def delete_saved_query(
|
|
1695
|
+
query_id: str,
|
|
1696
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1697
|
+
) -> SuccessResponse:
|
|
1698
|
+
"""Delete a saved query.
|
|
1699
|
+
|
|
1700
|
+
Only the owner can delete a query.
|
|
1701
|
+
|
|
1702
|
+
Args:
|
|
1703
|
+
query_id: Saved query ID.
|
|
1704
|
+
|
|
1705
|
+
Returns:
|
|
1706
|
+
Success response.
|
|
1707
|
+
|
|
1708
|
+
Raises:
|
|
1709
|
+
404: If saved query not found or user is not owner.
|
|
1710
|
+
"""
|
|
1711
|
+
saved_query_store = engine.saved_query_store
|
|
1712
|
+
deleted = await saved_query_store.delete(
|
|
1713
|
+
query_id,
|
|
1714
|
+
tenant_id=auth.tenant_id,
|
|
1715
|
+
user_id=auth.user_id,
|
|
1716
|
+
schema_name=auth.schema_name,
|
|
1717
|
+
)
|
|
1718
|
+
if not deleted:
|
|
1719
|
+
raise HTTPException(
|
|
1720
|
+
status_code=404,
|
|
1721
|
+
detail=f"Saved query '{query_id}' not found or permission denied",
|
|
1722
|
+
)
|
|
1723
|
+
return SuccessResponse(message=f"Saved query '{query_id}' deleted")
|
|
1724
|
+
|
|
1725
|
+
# ========================================================================
|
|
1726
|
+
# Pin Endpoints
|
|
1727
|
+
# ========================================================================
|
|
1728
|
+
|
|
1729
|
+
@router.post("/pins", response_model=PinnedDashboard, status_code=201)
|
|
1730
|
+
async def pin_dashboard(
|
|
1731
|
+
request: PinRequest,
|
|
1732
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1733
|
+
) -> PinnedDashboard:
|
|
1734
|
+
"""Pin a dashboard to a context.
|
|
1735
|
+
|
|
1736
|
+
Args:
|
|
1737
|
+
request: Pin request with dashboard_id, context, and optional position.
|
|
1738
|
+
|
|
1739
|
+
Returns:
|
|
1740
|
+
Created pin entry.
|
|
1741
|
+
|
|
1742
|
+
Raises:
|
|
1743
|
+
400: If dashboard already pinned to context.
|
|
1744
|
+
401: If user_id is not provided.
|
|
1745
|
+
404: If dashboard not found.
|
|
1746
|
+
"""
|
|
1747
|
+
if auth.user_id is None:
|
|
1748
|
+
raise HTTPException(status_code=401, detail="User ID required for pin operations")
|
|
1749
|
+
try:
|
|
1750
|
+
return await store.pin_dashboard(
|
|
1751
|
+
dashboard_id=request.dashboard_id,
|
|
1752
|
+
context=request.context,
|
|
1753
|
+
tenant_id=auth.tenant_id,
|
|
1754
|
+
user_id=auth.user_id,
|
|
1755
|
+
position=request.position,
|
|
1756
|
+
schema_name=auth.schema_name,
|
|
1757
|
+
)
|
|
1758
|
+
except ValueError as e:
|
|
1759
|
+
if "not found" in str(e):
|
|
1760
|
+
raise HTTPException(status_code=404, detail=str(e)) from e
|
|
1761
|
+
raise HTTPException(status_code=400, detail=str(e)) from e
|
|
1762
|
+
|
|
1763
|
+
@router.delete("/pins", response_model=SuccessResponse)
|
|
1764
|
+
async def unpin_dashboard(
|
|
1765
|
+
request: UnpinRequest,
|
|
1766
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1767
|
+
) -> SuccessResponse:
|
|
1768
|
+
"""Unpin a dashboard from a context.
|
|
1769
|
+
|
|
1770
|
+
Args:
|
|
1771
|
+
request: Unpin request with dashboard_id and context.
|
|
1772
|
+
|
|
1773
|
+
Returns:
|
|
1774
|
+
Success response.
|
|
1775
|
+
|
|
1776
|
+
Raises:
|
|
1777
|
+
401: If user_id is not provided.
|
|
1778
|
+
404: If pin not found.
|
|
1779
|
+
"""
|
|
1780
|
+
if auth.user_id is None:
|
|
1781
|
+
raise HTTPException(status_code=401, detail="User ID required for pin operations")
|
|
1782
|
+
unpinned = await store.unpin_dashboard(
|
|
1783
|
+
dashboard_id=request.dashboard_id,
|
|
1784
|
+
context=request.context,
|
|
1785
|
+
tenant_id=auth.tenant_id,
|
|
1786
|
+
user_id=auth.user_id,
|
|
1787
|
+
schema_name=auth.schema_name,
|
|
1788
|
+
)
|
|
1789
|
+
if not unpinned:
|
|
1790
|
+
raise HTTPException(
|
|
1791
|
+
status_code=404,
|
|
1792
|
+
detail=f"Pin not found for dashboard '{request.dashboard_id}' in context '{request.context}'",
|
|
1793
|
+
)
|
|
1794
|
+
return SuccessResponse(message="Dashboard unpinned successfully")
|
|
1795
|
+
|
|
1796
|
+
@router.get("/pins", response_model=PinnedDashboardsResponse)
|
|
1797
|
+
async def get_pinned_dashboards(
|
|
1798
|
+
context: str,
|
|
1799
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1800
|
+
) -> PinnedDashboardsResponse:
|
|
1801
|
+
"""Get all dashboards pinned to a context.
|
|
1802
|
+
|
|
1803
|
+
Args:
|
|
1804
|
+
context: Context to get pins for (e.g., "accounts", "dashboard").
|
|
1805
|
+
|
|
1806
|
+
Returns:
|
|
1807
|
+
List of dashboards and their pin metadata.
|
|
1808
|
+
|
|
1809
|
+
Raises:
|
|
1810
|
+
401: If user_id is not provided.
|
|
1811
|
+
"""
|
|
1812
|
+
if auth.user_id is None:
|
|
1813
|
+
raise HTTPException(status_code=401, detail="User ID required for pin operations")
|
|
1814
|
+
dashboards = await store.get_pinned_dashboards(
|
|
1815
|
+
context=context,
|
|
1816
|
+
tenant_id=auth.tenant_id,
|
|
1817
|
+
user_id=auth.user_id,
|
|
1818
|
+
schema_name=auth.schema_name,
|
|
1819
|
+
)
|
|
1820
|
+
pins = await store.get_pins_for_context(
|
|
1821
|
+
context=context,
|
|
1822
|
+
tenant_id=auth.tenant_id,
|
|
1823
|
+
user_id=auth.user_id,
|
|
1824
|
+
schema_name=auth.schema_name,
|
|
1825
|
+
)
|
|
1826
|
+
return PinnedDashboardsResponse(dashboards=dashboards, pins=pins)
|
|
1827
|
+
|
|
1828
|
+
@router.get("/dashboards/{dashboard_id}/pins", response_model=DashboardPinContextsResponse)
|
|
1829
|
+
async def get_dashboard_pin_contexts(
|
|
1830
|
+
dashboard_id: str,
|
|
1831
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1832
|
+
) -> DashboardPinContextsResponse:
|
|
1833
|
+
"""Get all contexts where a dashboard is pinned.
|
|
1834
|
+
|
|
1835
|
+
Args:
|
|
1836
|
+
dashboard_id: Dashboard ID.
|
|
1837
|
+
|
|
1838
|
+
Returns:
|
|
1839
|
+
List of context names.
|
|
1840
|
+
|
|
1841
|
+
Raises:
|
|
1842
|
+
401: If user_id is not provided.
|
|
1843
|
+
404: If dashboard not found.
|
|
1844
|
+
"""
|
|
1845
|
+
if auth.user_id is None:
|
|
1846
|
+
raise HTTPException(status_code=401, detail="User ID required for pin operations")
|
|
1847
|
+
# Verify dashboard exists
|
|
1848
|
+
dashboard = await store.get_dashboard(
|
|
1849
|
+
dashboard_id, tenant_id=auth.tenant_id, schema_name=auth.schema_name
|
|
1850
|
+
)
|
|
1851
|
+
if dashboard is None:
|
|
1852
|
+
raise HTTPException(status_code=404, detail=f"Dashboard '{dashboard_id}' not found")
|
|
1853
|
+
|
|
1854
|
+
contexts = await store.get_pin_contexts_for_dashboard(
|
|
1855
|
+
dashboard_id=dashboard_id,
|
|
1856
|
+
tenant_id=auth.tenant_id,
|
|
1857
|
+
user_id=auth.user_id,
|
|
1858
|
+
schema_name=auth.schema_name,
|
|
1859
|
+
)
|
|
1860
|
+
return DashboardPinContextsResponse(contexts=contexts)
|
|
1861
|
+
|
|
1862
|
+
@router.put("/pins/order", response_model=SuccessResponse)
|
|
1863
|
+
async def reorder_pins(
|
|
1864
|
+
request: ReorderPinsRequest,
|
|
1865
|
+
auth: AuthContext = Depends(get_auth_context),
|
|
1866
|
+
) -> SuccessResponse:
|
|
1867
|
+
"""Reorder pinned dashboards within a context.
|
|
1868
|
+
|
|
1869
|
+
Args:
|
|
1870
|
+
request: Reorder request with context and ordered dashboard IDs.
|
|
1871
|
+
|
|
1872
|
+
Returns:
|
|
1873
|
+
Success response.
|
|
1874
|
+
|
|
1875
|
+
Raises:
|
|
1876
|
+
401: If user_id is not provided.
|
|
1877
|
+
"""
|
|
1878
|
+
if auth.user_id is None:
|
|
1879
|
+
raise HTTPException(status_code=401, detail="User ID required for pin operations")
|
|
1880
|
+
await store.reorder_pins(
|
|
1881
|
+
context=request.context,
|
|
1882
|
+
dashboard_ids=request.dashboard_ids,
|
|
1883
|
+
tenant_id=auth.tenant_id,
|
|
1884
|
+
user_id=auth.user_id,
|
|
1885
|
+
schema_name=auth.schema_name,
|
|
1886
|
+
)
|
|
1887
|
+
return SuccessResponse(message="Pins reordered successfully")
|
|
1888
|
+
|
|
1889
|
+
return router
|