prismiq 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prismiq/__init__.py +543 -0
- prismiq/api.py +1889 -0
- prismiq/auth.py +108 -0
- prismiq/cache.py +527 -0
- prismiq/calculated_field_processor.py +231 -0
- prismiq/calculated_fields.py +819 -0
- prismiq/dashboard_store.py +1219 -0
- prismiq/dashboards.py +374 -0
- prismiq/dates.py +247 -0
- prismiq/engine.py +1315 -0
- prismiq/executor.py +345 -0
- prismiq/filter_merge.py +397 -0
- prismiq/formatting.py +298 -0
- prismiq/logging.py +489 -0
- prismiq/metrics.py +536 -0
- prismiq/middleware.py +346 -0
- prismiq/permissions.py +87 -0
- prismiq/persistence/__init__.py +45 -0
- prismiq/persistence/models.py +208 -0
- prismiq/persistence/postgres_store.py +1119 -0
- prismiq/persistence/saved_query_store.py +336 -0
- prismiq/persistence/schema.sql +95 -0
- prismiq/persistence/setup.py +222 -0
- prismiq/persistence/tables.py +76 -0
- prismiq/pins.py +72 -0
- prismiq/py.typed +0 -0
- prismiq/query.py +1233 -0
- prismiq/schema.py +333 -0
- prismiq/schema_config.py +354 -0
- prismiq/sql_utils.py +147 -0
- prismiq/sql_validator.py +219 -0
- prismiq/sqlalchemy_builder.py +577 -0
- prismiq/timeseries.py +410 -0
- prismiq/transforms.py +471 -0
- prismiq/trends.py +573 -0
- prismiq/types.py +688 -0
- prismiq-0.1.0.dist-info/METADATA +109 -0
- prismiq-0.1.0.dist-info/RECORD +39 -0
- prismiq-0.1.0.dist-info/WHEEL +4 -0
prismiq/types.py
ADDED
|
@@ -0,0 +1,688 @@
|
|
|
1
|
+
"""Prismiq type definitions.
|
|
2
|
+
|
|
3
|
+
This module contains all Pydantic models and custom exceptions for the
|
|
4
|
+
Prismiq embedded analytics platform.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from enum import Enum
|
|
10
|
+
from typing import TYPE_CHECKING, Any
|
|
11
|
+
|
|
12
|
+
from pydantic import BaseModel, ConfigDict, field_validator, model_validator
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
# ============================================================================
|
|
18
|
+
# Schema Types - Database metadata models
|
|
19
|
+
# ============================================================================
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ColumnSchema(BaseModel):
|
|
23
|
+
"""Schema information for a single database column."""
|
|
24
|
+
|
|
25
|
+
model_config = ConfigDict()
|
|
26
|
+
|
|
27
|
+
name: str
|
|
28
|
+
"""Column name."""
|
|
29
|
+
|
|
30
|
+
data_type: str
|
|
31
|
+
"""PostgreSQL data type (e.g., 'integer', 'character varying')."""
|
|
32
|
+
|
|
33
|
+
is_nullable: bool
|
|
34
|
+
"""Whether the column allows NULL values."""
|
|
35
|
+
|
|
36
|
+
is_primary_key: bool = False
|
|
37
|
+
"""Whether this column is part of the primary key."""
|
|
38
|
+
|
|
39
|
+
default_value: str | None = None
|
|
40
|
+
"""Default value expression, if any."""
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class TableSchema(BaseModel):
|
|
44
|
+
"""Schema information for a database table."""
|
|
45
|
+
|
|
46
|
+
model_config = ConfigDict()
|
|
47
|
+
|
|
48
|
+
name: str
|
|
49
|
+
"""Table name."""
|
|
50
|
+
|
|
51
|
+
schema_name: str = "public"
|
|
52
|
+
"""Database schema (namespace) containing the table."""
|
|
53
|
+
|
|
54
|
+
columns: list[ColumnSchema]
|
|
55
|
+
"""List of columns in the table."""
|
|
56
|
+
|
|
57
|
+
row_count: int | None = None
|
|
58
|
+
"""Approximate row count (from pg_class.reltuples). None if not fetched."""
|
|
59
|
+
|
|
60
|
+
def get_column(self, column_name: str) -> ColumnSchema | None:
|
|
61
|
+
"""Get a column by name, or None if not found."""
|
|
62
|
+
for col in self.columns:
|
|
63
|
+
if col.name == column_name:
|
|
64
|
+
return col
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
def has_column(self, column_name: str) -> bool:
|
|
68
|
+
"""Check if the table has a column with the given name."""
|
|
69
|
+
return self.get_column(column_name) is not None
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class Relationship(BaseModel):
|
|
73
|
+
"""Foreign key relationship between two tables."""
|
|
74
|
+
|
|
75
|
+
model_config = ConfigDict()
|
|
76
|
+
|
|
77
|
+
from_table: str
|
|
78
|
+
"""Name of the table containing the foreign key."""
|
|
79
|
+
|
|
80
|
+
from_column: str
|
|
81
|
+
"""Column name in the from_table."""
|
|
82
|
+
|
|
83
|
+
to_table: str
|
|
84
|
+
"""Name of the referenced table."""
|
|
85
|
+
|
|
86
|
+
to_column: str
|
|
87
|
+
"""Column name in the to_table (usually primary key)."""
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
class DatabaseSchema(BaseModel):
|
|
91
|
+
"""Complete schema for an exposed database."""
|
|
92
|
+
|
|
93
|
+
model_config = ConfigDict()
|
|
94
|
+
|
|
95
|
+
tables: list[TableSchema]
|
|
96
|
+
"""List of exposed tables."""
|
|
97
|
+
|
|
98
|
+
relationships: list[Relationship]
|
|
99
|
+
"""Foreign key relationships between tables."""
|
|
100
|
+
|
|
101
|
+
def get_table(self, table_name: str) -> TableSchema | None:
|
|
102
|
+
"""Get a table by name, or None if not found."""
|
|
103
|
+
for table in self.tables:
|
|
104
|
+
if table.name == table_name:
|
|
105
|
+
return table
|
|
106
|
+
return None
|
|
107
|
+
|
|
108
|
+
def has_table(self, table_name: str) -> bool:
|
|
109
|
+
"""Check if the schema contains a table with the given name."""
|
|
110
|
+
return self.get_table(table_name) is not None
|
|
111
|
+
|
|
112
|
+
def table_names(self) -> list[str]:
|
|
113
|
+
"""Get list of all table names."""
|
|
114
|
+
return [t.name for t in self.tables]
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
# ============================================================================
|
|
118
|
+
# Query Types - Query definition models
|
|
119
|
+
# ============================================================================
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
class QueryTable(BaseModel):
|
|
123
|
+
"""A table reference in a query."""
|
|
124
|
+
|
|
125
|
+
model_config = ConfigDict()
|
|
126
|
+
|
|
127
|
+
id: str
|
|
128
|
+
"""Unique identifier for this table in the query (e.g., 't1', 't2')."""
|
|
129
|
+
|
|
130
|
+
name: str
|
|
131
|
+
"""Actual table name in the database."""
|
|
132
|
+
|
|
133
|
+
alias: str | None = None
|
|
134
|
+
"""Optional alias for the table in the query."""
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class JoinType(str, Enum):
|
|
138
|
+
"""SQL join types."""
|
|
139
|
+
|
|
140
|
+
INNER = "INNER"
|
|
141
|
+
LEFT = "LEFT"
|
|
142
|
+
RIGHT = "RIGHT"
|
|
143
|
+
FULL = "FULL"
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class JoinDefinition(BaseModel):
|
|
147
|
+
"""Definition of a join between two tables."""
|
|
148
|
+
|
|
149
|
+
model_config = ConfigDict()
|
|
150
|
+
|
|
151
|
+
from_table_id: str
|
|
152
|
+
"""ID of the left table in the join."""
|
|
153
|
+
|
|
154
|
+
from_column: str
|
|
155
|
+
"""Column name in the left table."""
|
|
156
|
+
|
|
157
|
+
to_table_id: str
|
|
158
|
+
"""ID of the right table in the join."""
|
|
159
|
+
|
|
160
|
+
to_column: str
|
|
161
|
+
"""Column name in the right table."""
|
|
162
|
+
|
|
163
|
+
join_type: JoinType = JoinType.INNER
|
|
164
|
+
"""Type of join to perform."""
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
class AggregationType(str, Enum):
|
|
168
|
+
"""SQL aggregation functions."""
|
|
169
|
+
|
|
170
|
+
NONE = "none"
|
|
171
|
+
SUM = "sum"
|
|
172
|
+
AVG = "avg"
|
|
173
|
+
COUNT = "count"
|
|
174
|
+
COUNT_DISTINCT = "count_distinct"
|
|
175
|
+
MIN = "min"
|
|
176
|
+
MAX = "max"
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
class DateTruncInterval(str, Enum):
|
|
180
|
+
"""Date truncation intervals for date/timestamp columns."""
|
|
181
|
+
|
|
182
|
+
YEAR = "year"
|
|
183
|
+
QUARTER = "quarter"
|
|
184
|
+
MONTH = "month"
|
|
185
|
+
WEEK = "week"
|
|
186
|
+
DAY = "day"
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
class TimeSeriesInterval(str, Enum):
|
|
190
|
+
"""Time series interval options for date bucketing."""
|
|
191
|
+
|
|
192
|
+
DAY = "day"
|
|
193
|
+
WEEK = "week"
|
|
194
|
+
MONTH = "month"
|
|
195
|
+
QUARTER = "quarter"
|
|
196
|
+
YEAR = "year"
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
class ColumnSelection(BaseModel):
|
|
200
|
+
"""A column to select in a query."""
|
|
201
|
+
|
|
202
|
+
model_config = ConfigDict()
|
|
203
|
+
|
|
204
|
+
table_id: str
|
|
205
|
+
"""ID of the table containing the column."""
|
|
206
|
+
|
|
207
|
+
column: str
|
|
208
|
+
"""Column name."""
|
|
209
|
+
|
|
210
|
+
aggregation: AggregationType = AggregationType.NONE
|
|
211
|
+
"""Aggregation function to apply."""
|
|
212
|
+
|
|
213
|
+
alias: str | None = None
|
|
214
|
+
"""Optional alias for the result column."""
|
|
215
|
+
|
|
216
|
+
date_trunc: str | None = None
|
|
217
|
+
"""Date truncation unit (e.g., 'year', 'month', 'day') for date columns."""
|
|
218
|
+
|
|
219
|
+
date_format: str | None = None
|
|
220
|
+
"""Date format string for display (e.g., 'MMM-yyyy')."""
|
|
221
|
+
|
|
222
|
+
sql_expression: str | None = None
|
|
223
|
+
"""
|
|
224
|
+
Pre-computed SQL expression for calculated fields.
|
|
225
|
+
When provided, this is used directly instead of looking up the column.
|
|
226
|
+
"""
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
class FilterOperator(str, Enum):
|
|
230
|
+
"""SQL filter operators.
|
|
231
|
+
|
|
232
|
+
SECURITY NOTE for IN_SUBQUERY:
|
|
233
|
+
The IN_SUBQUERY operator interpolates SQL directly without parameterization.
|
|
234
|
+
This is by design since subqueries cannot be parameterized. Callers MUST
|
|
235
|
+
ensure the SQL is generated from trusted internal code (e.g., RLS rules),
|
|
236
|
+
never from user input. The SQL should reference only allowed tables/schemas.
|
|
237
|
+
"""
|
|
238
|
+
|
|
239
|
+
EQ = "eq"
|
|
240
|
+
NEQ = "neq"
|
|
241
|
+
GT = "gt"
|
|
242
|
+
GTE = "gte"
|
|
243
|
+
LT = "lt"
|
|
244
|
+
LTE = "lte"
|
|
245
|
+
IN = "in_"
|
|
246
|
+
NOT_IN = "not_in"
|
|
247
|
+
IN_OR_NULL = "in_or_null"
|
|
248
|
+
LIKE = "like"
|
|
249
|
+
ILIKE = "ilike"
|
|
250
|
+
NOT_LIKE = "not_like"
|
|
251
|
+
NOT_ILIKE = "not_ilike"
|
|
252
|
+
BETWEEN = "between"
|
|
253
|
+
IS_NULL = "is_null"
|
|
254
|
+
IS_NOT_NULL = "is_not_null"
|
|
255
|
+
IN_SUBQUERY = "in_subquery"
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
class FilterDefinition(BaseModel):
|
|
259
|
+
"""A filter condition in a query."""
|
|
260
|
+
|
|
261
|
+
model_config = ConfigDict()
|
|
262
|
+
|
|
263
|
+
table_id: str
|
|
264
|
+
"""ID of the table containing the column to filter."""
|
|
265
|
+
|
|
266
|
+
column: str
|
|
267
|
+
"""Column name to filter on."""
|
|
268
|
+
|
|
269
|
+
operator: FilterOperator
|
|
270
|
+
"""Filter operator."""
|
|
271
|
+
|
|
272
|
+
value: Any = None
|
|
273
|
+
"""
|
|
274
|
+
Value(s) for the filter.
|
|
275
|
+
- Single value for eq, neq, gt, gte, lt, lte, like, ilike
|
|
276
|
+
- List for in_, not_in, in_or_null
|
|
277
|
+
- Tuple of (min, max) for between
|
|
278
|
+
- None for is_null, is_not_null
|
|
279
|
+
- Dict with 'sql' key for in_subquery: {"sql": "SELECT id FROM ..."}
|
|
280
|
+
SECURITY: The SQL is interpolated directly. Must be from trusted code only.
|
|
281
|
+
"""
|
|
282
|
+
|
|
283
|
+
sql_expression: str | None = None
|
|
284
|
+
"""
|
|
285
|
+
Pre-computed SQL expression for calculated fields.
|
|
286
|
+
When provided, this is used directly instead of looking up the column.
|
|
287
|
+
"""
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
class SortDirection(str, Enum):
|
|
291
|
+
"""SQL sort directions."""
|
|
292
|
+
|
|
293
|
+
ASC = "ASC"
|
|
294
|
+
DESC = "DESC"
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
class SortDefinition(BaseModel):
|
|
298
|
+
"""A sort order definition."""
|
|
299
|
+
|
|
300
|
+
model_config = ConfigDict()
|
|
301
|
+
|
|
302
|
+
table_id: str
|
|
303
|
+
"""ID of the table containing the column to sort by."""
|
|
304
|
+
|
|
305
|
+
column: str
|
|
306
|
+
"""Column name to sort by."""
|
|
307
|
+
|
|
308
|
+
direction: SortDirection = SortDirection.ASC
|
|
309
|
+
"""Sort direction."""
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
class GroupByDefinition(BaseModel):
|
|
313
|
+
"""A group by column definition."""
|
|
314
|
+
|
|
315
|
+
model_config = ConfigDict()
|
|
316
|
+
|
|
317
|
+
table_id: str
|
|
318
|
+
"""ID of the table containing the column."""
|
|
319
|
+
|
|
320
|
+
column: str
|
|
321
|
+
"""Column name to group by."""
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
class CalculatedField(BaseModel):
|
|
325
|
+
"""A calculated field definition with an expression.
|
|
326
|
+
|
|
327
|
+
Calculated fields allow defining computed columns using expressions
|
|
328
|
+
that can reference other columns and calculated fields.
|
|
329
|
+
"""
|
|
330
|
+
|
|
331
|
+
model_config = ConfigDict()
|
|
332
|
+
|
|
333
|
+
name: str
|
|
334
|
+
"""Name of the calculated field."""
|
|
335
|
+
|
|
336
|
+
expression: str
|
|
337
|
+
"""
|
|
338
|
+
Expression defining the calculation.
|
|
339
|
+
Uses a SQL-like expression language with functions like:
|
|
340
|
+
- if(condition, true_val, false_val)
|
|
341
|
+
- sum(expr), avg(expr), count(expr)
|
|
342
|
+
- year(date), month(date), today()
|
|
343
|
+
- Field references: [field_name]
|
|
344
|
+
"""
|
|
345
|
+
|
|
346
|
+
sql_expression: str | None = None
|
|
347
|
+
"""
|
|
348
|
+
Pre-computed SQL expression with all field references resolved.
|
|
349
|
+
When provided, this is used directly instead of parsing `expression`.
|
|
350
|
+
This allows the caller to handle inter-field dependency resolution.
|
|
351
|
+
|
|
352
|
+
IMPORTANT: This is an internal field for SQL generation. The SQL should:
|
|
353
|
+
- Have all column references fully qualified (e.g., "table"."column")
|
|
354
|
+
- Have all inter-field dependencies already resolved
|
|
355
|
+
- Be valid PostgreSQL syntax
|
|
356
|
+
"""
|
|
357
|
+
|
|
358
|
+
has_internal_aggregation: bool = False
|
|
359
|
+
"""
|
|
360
|
+
Whether this calculated field's expression contains aggregation functions.
|
|
361
|
+
When True, this field should NOT be included in GROUP BY clauses.
|
|
362
|
+
The caller (e.g., converter) should set this based on expression analysis.
|
|
363
|
+
"""
|
|
364
|
+
|
|
365
|
+
data_type: str = "number"
|
|
366
|
+
"""Data type of the result: 'number', 'string', 'date', 'boolean'."""
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
class TimeSeriesConfig(BaseModel):
|
|
370
|
+
"""Configuration for time series queries.
|
|
371
|
+
|
|
372
|
+
When provided in a QueryDefinition, the query will automatically
|
|
373
|
+
bucket dates using PostgreSQL's date_trunc function.
|
|
374
|
+
"""
|
|
375
|
+
|
|
376
|
+
model_config = ConfigDict()
|
|
377
|
+
|
|
378
|
+
table_id: str
|
|
379
|
+
"""ID of the table containing the date column."""
|
|
380
|
+
|
|
381
|
+
date_column: str
|
|
382
|
+
"""Name of the date/timestamp column to bucket."""
|
|
383
|
+
|
|
384
|
+
interval: str
|
|
385
|
+
"""Time interval for bucketing (minute, hour, day, week, month, quarter, year)."""
|
|
386
|
+
|
|
387
|
+
fill_missing: bool = True
|
|
388
|
+
"""Whether to fill missing time buckets with default values."""
|
|
389
|
+
|
|
390
|
+
fill_value: Any = 0
|
|
391
|
+
"""Value to use for missing time buckets."""
|
|
392
|
+
|
|
393
|
+
alias: str | None = None
|
|
394
|
+
"""Optional alias for the date bucket column."""
|
|
395
|
+
|
|
396
|
+
@field_validator("interval")
|
|
397
|
+
@classmethod
|
|
398
|
+
def validate_interval(cls, v: str) -> str:
|
|
399
|
+
"""Validate that interval is a valid TimeInterval value."""
|
|
400
|
+
valid_intervals = {"minute", "hour", "day", "week", "month", "quarter", "year"}
|
|
401
|
+
if v.lower() not in valid_intervals:
|
|
402
|
+
raise ValueError(
|
|
403
|
+
f"Invalid interval '{v}'. Must be one of: {', '.join(valid_intervals)}"
|
|
404
|
+
)
|
|
405
|
+
return v.lower()
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
class QueryDefinition(BaseModel):
|
|
409
|
+
"""Complete query definition."""
|
|
410
|
+
|
|
411
|
+
model_config = ConfigDict()
|
|
412
|
+
|
|
413
|
+
tables: list[QueryTable]
|
|
414
|
+
"""Tables used in the query."""
|
|
415
|
+
|
|
416
|
+
joins: list[JoinDefinition] = []
|
|
417
|
+
"""Join definitions between tables."""
|
|
418
|
+
|
|
419
|
+
columns: list[ColumnSelection]
|
|
420
|
+
"""Columns to select."""
|
|
421
|
+
|
|
422
|
+
filters: list[FilterDefinition] = []
|
|
423
|
+
"""Filter conditions."""
|
|
424
|
+
|
|
425
|
+
group_by: list[GroupByDefinition] = []
|
|
426
|
+
"""
|
|
427
|
+
Explicit group by columns.
|
|
428
|
+
If empty and aggregations are present, will be auto-derived.
|
|
429
|
+
"""
|
|
430
|
+
|
|
431
|
+
order_by: list[SortDefinition] = []
|
|
432
|
+
"""Sort order."""
|
|
433
|
+
|
|
434
|
+
limit: int | None = None
|
|
435
|
+
"""Maximum number of rows to return."""
|
|
436
|
+
|
|
437
|
+
offset: int | None = None
|
|
438
|
+
"""Number of rows to skip."""
|
|
439
|
+
|
|
440
|
+
time_series: TimeSeriesConfig | None = None
|
|
441
|
+
"""
|
|
442
|
+
Optional time series configuration.
|
|
443
|
+
When present, the query will bucket dates automatically.
|
|
444
|
+
"""
|
|
445
|
+
|
|
446
|
+
calculated_fields: list[CalculatedField] = []
|
|
447
|
+
"""
|
|
448
|
+
Calculated field definitions.
|
|
449
|
+
These fields can be referenced in columns, filters, etc.
|
|
450
|
+
"""
|
|
451
|
+
|
|
452
|
+
@field_validator("tables")
|
|
453
|
+
@classmethod
|
|
454
|
+
def validate_tables_not_empty(cls, v: list[QueryTable]) -> list[QueryTable]:
|
|
455
|
+
"""Ensure at least one table is specified."""
|
|
456
|
+
if not v:
|
|
457
|
+
raise ValueError("At least one table must be specified")
|
|
458
|
+
return v
|
|
459
|
+
|
|
460
|
+
@field_validator("columns")
|
|
461
|
+
@classmethod
|
|
462
|
+
def validate_columns_not_empty(cls, v: list[ColumnSelection]) -> list[ColumnSelection]:
|
|
463
|
+
"""Ensure at least one column is selected."""
|
|
464
|
+
if not v:
|
|
465
|
+
raise ValueError("At least one column must be selected")
|
|
466
|
+
return v
|
|
467
|
+
|
|
468
|
+
@model_validator(mode="after")
|
|
469
|
+
def validate_table_references(self) -> QueryDefinition:
|
|
470
|
+
"""Validate that all table_id references point to defined tables."""
|
|
471
|
+
table_ids = {t.id for t in self.tables}
|
|
472
|
+
|
|
473
|
+
# Check joins
|
|
474
|
+
for join in self.joins:
|
|
475
|
+
if join.from_table_id not in table_ids:
|
|
476
|
+
raise ValueError(f"Join references unknown table_id: {join.from_table_id}")
|
|
477
|
+
if join.to_table_id not in table_ids:
|
|
478
|
+
raise ValueError(f"Join references unknown table_id: {join.to_table_id}")
|
|
479
|
+
|
|
480
|
+
# Check columns
|
|
481
|
+
for col in self.columns:
|
|
482
|
+
if col.table_id not in table_ids:
|
|
483
|
+
raise ValueError(f"Column selection references unknown table_id: {col.table_id}")
|
|
484
|
+
|
|
485
|
+
# Check filters
|
|
486
|
+
for f in self.filters:
|
|
487
|
+
if f.table_id not in table_ids:
|
|
488
|
+
raise ValueError(f"Filter references unknown table_id: {f.table_id}")
|
|
489
|
+
|
|
490
|
+
# Check group_by
|
|
491
|
+
for g in self.group_by:
|
|
492
|
+
if g.table_id not in table_ids:
|
|
493
|
+
raise ValueError(f"Group by references unknown table_id: {g.table_id}")
|
|
494
|
+
|
|
495
|
+
# Check order_by
|
|
496
|
+
for o in self.order_by:
|
|
497
|
+
if o.table_id not in table_ids:
|
|
498
|
+
raise ValueError(f"Order by references unknown table_id: {o.table_id}")
|
|
499
|
+
|
|
500
|
+
# Check time_series
|
|
501
|
+
if self.time_series and self.time_series.table_id not in table_ids:
|
|
502
|
+
raise ValueError(
|
|
503
|
+
f"Time series references unknown table_id: {self.time_series.table_id}"
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
return self
|
|
507
|
+
|
|
508
|
+
def get_table_by_id(self, table_id: str) -> QueryTable | None:
|
|
509
|
+
"""Get a QueryTable by its ID."""
|
|
510
|
+
for t in self.tables:
|
|
511
|
+
if t.id == table_id:
|
|
512
|
+
return t
|
|
513
|
+
return None
|
|
514
|
+
|
|
515
|
+
def has_aggregations(self) -> bool:
|
|
516
|
+
"""Check if any column has an aggregation."""
|
|
517
|
+
return any(col.aggregation != AggregationType.NONE for col in self.columns)
|
|
518
|
+
|
|
519
|
+
def get_non_aggregated_columns(self) -> list[ColumnSelection]:
|
|
520
|
+
"""Get columns that don't have aggregations applied."""
|
|
521
|
+
return [col for col in self.columns if col.aggregation == AggregationType.NONE]
|
|
522
|
+
|
|
523
|
+
def derive_group_by(self) -> list[GroupByDefinition]:
|
|
524
|
+
"""Auto-derive GROUP BY from non-aggregated columns.
|
|
525
|
+
|
|
526
|
+
When aggregations are present but group_by is empty, all non-
|
|
527
|
+
aggregated columns should be in GROUP BY.
|
|
528
|
+
"""
|
|
529
|
+
if not self.has_aggregations():
|
|
530
|
+
return []
|
|
531
|
+
|
|
532
|
+
if self.group_by:
|
|
533
|
+
# Explicit group_by provided
|
|
534
|
+
return self.group_by
|
|
535
|
+
|
|
536
|
+
# Auto-derive from non-aggregated columns
|
|
537
|
+
return [
|
|
538
|
+
GroupByDefinition(table_id=col.table_id, column=col.column)
|
|
539
|
+
for col in self.get_non_aggregated_columns()
|
|
540
|
+
]
|
|
541
|
+
|
|
542
|
+
|
|
543
|
+
# ============================================================================
|
|
544
|
+
# Result Types
|
|
545
|
+
# ============================================================================
|
|
546
|
+
|
|
547
|
+
|
|
548
|
+
class QueryResult(BaseModel):
|
|
549
|
+
"""Result of executing a query."""
|
|
550
|
+
|
|
551
|
+
model_config = ConfigDict()
|
|
552
|
+
|
|
553
|
+
columns: list[str]
|
|
554
|
+
"""Column names in the result."""
|
|
555
|
+
|
|
556
|
+
column_types: list[str]
|
|
557
|
+
"""PostgreSQL data types for each column."""
|
|
558
|
+
|
|
559
|
+
rows: list[list[Any]]
|
|
560
|
+
"""Result rows as a list of lists."""
|
|
561
|
+
|
|
562
|
+
row_count: int
|
|
563
|
+
"""Number of rows returned."""
|
|
564
|
+
|
|
565
|
+
truncated: bool = False
|
|
566
|
+
"""Whether the result was truncated due to row limit."""
|
|
567
|
+
|
|
568
|
+
execution_time_ms: float
|
|
569
|
+
"""Query execution time in milliseconds."""
|
|
570
|
+
|
|
571
|
+
|
|
572
|
+
# ============================================================================
|
|
573
|
+
# Saved Query Types
|
|
574
|
+
# ============================================================================
|
|
575
|
+
|
|
576
|
+
|
|
577
|
+
class SavedQuery(BaseModel):
|
|
578
|
+
"""A saved query for reuse across dashboards."""
|
|
579
|
+
|
|
580
|
+
model_config = ConfigDict()
|
|
581
|
+
|
|
582
|
+
id: str
|
|
583
|
+
"""Unique identifier for the saved query."""
|
|
584
|
+
|
|
585
|
+
name: str
|
|
586
|
+
"""Display name for the saved query."""
|
|
587
|
+
|
|
588
|
+
description: str | None = None
|
|
589
|
+
"""Optional description of what the query does."""
|
|
590
|
+
|
|
591
|
+
query: QueryDefinition
|
|
592
|
+
"""The query definition."""
|
|
593
|
+
|
|
594
|
+
tenant_id: str
|
|
595
|
+
"""Tenant that owns this saved query."""
|
|
596
|
+
|
|
597
|
+
owner_id: str | None = None
|
|
598
|
+
"""User who created this query (None for shared queries)."""
|
|
599
|
+
|
|
600
|
+
is_shared: bool = False
|
|
601
|
+
"""Whether the query is shared with all users in the tenant."""
|
|
602
|
+
|
|
603
|
+
created_at: str | None = None
|
|
604
|
+
"""ISO timestamp when the query was created."""
|
|
605
|
+
|
|
606
|
+
updated_at: str | None = None
|
|
607
|
+
"""ISO timestamp when the query was last updated."""
|
|
608
|
+
|
|
609
|
+
|
|
610
|
+
class SavedQueryCreate(BaseModel):
|
|
611
|
+
"""Data for creating a saved query."""
|
|
612
|
+
|
|
613
|
+
model_config = ConfigDict()
|
|
614
|
+
|
|
615
|
+
name: str
|
|
616
|
+
"""Display name for the saved query."""
|
|
617
|
+
|
|
618
|
+
description: str | None = None
|
|
619
|
+
"""Optional description of what the query does."""
|
|
620
|
+
|
|
621
|
+
query: QueryDefinition
|
|
622
|
+
"""The query definition to save."""
|
|
623
|
+
|
|
624
|
+
is_shared: bool = False
|
|
625
|
+
"""Whether to share the query with all users in the tenant."""
|
|
626
|
+
|
|
627
|
+
|
|
628
|
+
class SavedQueryUpdate(BaseModel):
|
|
629
|
+
"""Data for updating a saved query."""
|
|
630
|
+
|
|
631
|
+
model_config = ConfigDict()
|
|
632
|
+
|
|
633
|
+
name: str | None = None
|
|
634
|
+
"""New display name."""
|
|
635
|
+
|
|
636
|
+
description: str | None = None
|
|
637
|
+
"""New description."""
|
|
638
|
+
|
|
639
|
+
query: QueryDefinition | None = None
|
|
640
|
+
"""Updated query definition."""
|
|
641
|
+
|
|
642
|
+
is_shared: bool | None = None
|
|
643
|
+
"""Whether to share the query."""
|
|
644
|
+
|
|
645
|
+
|
|
646
|
+
# ============================================================================
|
|
647
|
+
# Exception Types
|
|
648
|
+
# ============================================================================
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
class PrismiqError(Exception):
|
|
652
|
+
"""Base exception for all Prismiq errors."""
|
|
653
|
+
|
|
654
|
+
def __init__(self, message: str) -> None:
|
|
655
|
+
self.message = message
|
|
656
|
+
super().__init__(message)
|
|
657
|
+
|
|
658
|
+
|
|
659
|
+
class QueryValidationError(PrismiqError):
|
|
660
|
+
"""Raised when a query fails validation."""
|
|
661
|
+
|
|
662
|
+
def __init__(self, message: str, errors: list[str] | None = None) -> None:
|
|
663
|
+
super().__init__(message)
|
|
664
|
+
self.errors = errors or []
|
|
665
|
+
|
|
666
|
+
|
|
667
|
+
class QueryTimeoutError(PrismiqError):
|
|
668
|
+
"""Raised when a query exceeds the timeout limit."""
|
|
669
|
+
|
|
670
|
+
def __init__(self, message: str, timeout_seconds: float) -> None:
|
|
671
|
+
super().__init__(message)
|
|
672
|
+
self.timeout_seconds = timeout_seconds
|
|
673
|
+
|
|
674
|
+
|
|
675
|
+
class QueryExecutionError(PrismiqError):
|
|
676
|
+
"""Raised when query execution fails."""
|
|
677
|
+
|
|
678
|
+
def __init__(self, message: str, sql: str | None = None) -> None:
|
|
679
|
+
super().__init__(message)
|
|
680
|
+
self.sql = sql
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
class TableNotFoundError(PrismiqError):
|
|
684
|
+
"""Raised when a requested table is not found or not exposed."""
|
|
685
|
+
|
|
686
|
+
def __init__(self, table_name: str) -> None:
|
|
687
|
+
super().__init__(f"Table not found: {table_name}")
|
|
688
|
+
self.table_name = table_name
|