prismiq 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prismiq/__init__.py +543 -0
- prismiq/api.py +1889 -0
- prismiq/auth.py +108 -0
- prismiq/cache.py +527 -0
- prismiq/calculated_field_processor.py +231 -0
- prismiq/calculated_fields.py +819 -0
- prismiq/dashboard_store.py +1219 -0
- prismiq/dashboards.py +374 -0
- prismiq/dates.py +247 -0
- prismiq/engine.py +1315 -0
- prismiq/executor.py +345 -0
- prismiq/filter_merge.py +397 -0
- prismiq/formatting.py +298 -0
- prismiq/logging.py +489 -0
- prismiq/metrics.py +536 -0
- prismiq/middleware.py +346 -0
- prismiq/permissions.py +87 -0
- prismiq/persistence/__init__.py +45 -0
- prismiq/persistence/models.py +208 -0
- prismiq/persistence/postgres_store.py +1119 -0
- prismiq/persistence/saved_query_store.py +336 -0
- prismiq/persistence/schema.sql +95 -0
- prismiq/persistence/setup.py +222 -0
- prismiq/persistence/tables.py +76 -0
- prismiq/pins.py +72 -0
- prismiq/py.typed +0 -0
- prismiq/query.py +1233 -0
- prismiq/schema.py +333 -0
- prismiq/schema_config.py +354 -0
- prismiq/sql_utils.py +147 -0
- prismiq/sql_validator.py +219 -0
- prismiq/sqlalchemy_builder.py +577 -0
- prismiq/timeseries.py +410 -0
- prismiq/transforms.py +471 -0
- prismiq/trends.py +573 -0
- prismiq/types.py +688 -0
- prismiq-0.1.0.dist-info/METADATA +109 -0
- prismiq-0.1.0.dist-info/RECORD +39 -0
- prismiq-0.1.0.dist-info/WHEEL +4 -0
prismiq/logging.py
ADDED
|
@@ -0,0 +1,489 @@
|
|
|
1
|
+
"""Structured logging for Prismiq.
|
|
2
|
+
|
|
3
|
+
This module provides structured logging with JSON output, request
|
|
4
|
+
correlation, and performance tracking.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import contextvars
|
|
10
|
+
import logging
|
|
11
|
+
import sys
|
|
12
|
+
import time
|
|
13
|
+
import uuid
|
|
14
|
+
from collections.abc import Callable
|
|
15
|
+
from dataclasses import dataclass, field
|
|
16
|
+
from datetime import datetime, timezone
|
|
17
|
+
from typing import TYPE_CHECKING, Any
|
|
18
|
+
|
|
19
|
+
from pydantic import BaseModel, ConfigDict
|
|
20
|
+
from starlette.middleware.base import BaseHTTPMiddleware
|
|
21
|
+
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
from starlette.requests import Request
|
|
24
|
+
from starlette.responses import Response
|
|
25
|
+
|
|
26
|
+
# Context variable for request ID (available across async calls)
|
|
27
|
+
_request_id: contextvars.ContextVar[str | None] = contextvars.ContextVar("request_id", default=None)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def get_request_id() -> str | None:
|
|
31
|
+
"""Get the current request ID from context.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Request ID if in request context, None otherwise.
|
|
35
|
+
"""
|
|
36
|
+
return _request_id.get()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def set_request_id(request_id: str) -> None:
|
|
40
|
+
"""Set the request ID in context.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
request_id: Request ID to set.
|
|
44
|
+
"""
|
|
45
|
+
_request_id.set(request_id)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class LogConfig(BaseModel):
|
|
49
|
+
"""Configuration for logging."""
|
|
50
|
+
|
|
51
|
+
model_config = ConfigDict(strict=True)
|
|
52
|
+
|
|
53
|
+
level: str = "INFO"
|
|
54
|
+
"""Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)."""
|
|
55
|
+
|
|
56
|
+
format: str = "json"
|
|
57
|
+
"""Output format: 'json' or 'text'."""
|
|
58
|
+
|
|
59
|
+
include_timestamp: bool = True
|
|
60
|
+
"""Include ISO timestamp in logs."""
|
|
61
|
+
|
|
62
|
+
include_request_id: bool = True
|
|
63
|
+
"""Include request ID in logs."""
|
|
64
|
+
|
|
65
|
+
include_caller: bool = False
|
|
66
|
+
"""Include caller file and line number."""
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@dataclass
|
|
70
|
+
class LogContext:
|
|
71
|
+
"""Context for structured logging."""
|
|
72
|
+
|
|
73
|
+
extra: dict[str, Any] = field(default_factory=dict)
|
|
74
|
+
"""Extra fields to include in all logs."""
|
|
75
|
+
|
|
76
|
+
def with_field(self, key: str, value: Any) -> LogContext:
|
|
77
|
+
"""Create a new context with an additional field.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
key: Field name.
|
|
81
|
+
value: Field value.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
New LogContext with the additional field.
|
|
85
|
+
"""
|
|
86
|
+
new_extra = {**self.extra, key: value}
|
|
87
|
+
return LogContext(extra=new_extra)
|
|
88
|
+
|
|
89
|
+
def with_fields(self, **fields: Any) -> LogContext:
|
|
90
|
+
"""Create a new context with additional fields.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
**fields: Fields to add.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
New LogContext with the additional fields.
|
|
97
|
+
"""
|
|
98
|
+
new_extra = {**self.extra, **fields}
|
|
99
|
+
return LogContext(extra=new_extra)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class StructuredFormatter(logging.Formatter):
|
|
103
|
+
"""JSON-formatted log output.
|
|
104
|
+
|
|
105
|
+
Outputs log records as JSON with structured fields.
|
|
106
|
+
"""
|
|
107
|
+
|
|
108
|
+
def __init__(self, config: LogConfig | None = None) -> None:
|
|
109
|
+
"""Initialize formatter.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
config: Logging configuration.
|
|
113
|
+
"""
|
|
114
|
+
super().__init__()
|
|
115
|
+
self._config = config or LogConfig()
|
|
116
|
+
|
|
117
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
118
|
+
"""Format log record as JSON."""
|
|
119
|
+
import json
|
|
120
|
+
|
|
121
|
+
log_data: dict[str, Any] = {
|
|
122
|
+
"level": record.levelname,
|
|
123
|
+
"message": record.getMessage(),
|
|
124
|
+
"logger": record.name,
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if self._config.include_timestamp:
|
|
128
|
+
log_data["timestamp"] = datetime.now(timezone.utc).isoformat()
|
|
129
|
+
|
|
130
|
+
if self._config.include_request_id:
|
|
131
|
+
request_id = get_request_id()
|
|
132
|
+
if request_id:
|
|
133
|
+
log_data["request_id"] = request_id
|
|
134
|
+
|
|
135
|
+
if self._config.include_caller:
|
|
136
|
+
log_data["caller"] = {
|
|
137
|
+
"file": record.filename,
|
|
138
|
+
"line": record.lineno,
|
|
139
|
+
"function": record.funcName,
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
# Include exception info if present
|
|
143
|
+
if record.exc_info:
|
|
144
|
+
log_data["exception"] = self.formatException(record.exc_info)
|
|
145
|
+
|
|
146
|
+
# Include extra fields from record
|
|
147
|
+
if hasattr(record, "extra_fields"):
|
|
148
|
+
log_data.update(record.extra_fields) # type: ignore[attr-defined]
|
|
149
|
+
|
|
150
|
+
return json.dumps(log_data, default=str)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class TextFormatter(logging.Formatter):
|
|
154
|
+
"""Human-readable text log output.
|
|
155
|
+
|
|
156
|
+
Outputs log records in a readable text format.
|
|
157
|
+
"""
|
|
158
|
+
|
|
159
|
+
def __init__(self, config: LogConfig | None = None) -> None:
|
|
160
|
+
"""Initialize formatter.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
config: Logging configuration.
|
|
164
|
+
"""
|
|
165
|
+
super().__init__()
|
|
166
|
+
self._config = config or LogConfig()
|
|
167
|
+
|
|
168
|
+
def format(self, record: logging.LogRecord) -> str:
|
|
169
|
+
"""Format log record as text."""
|
|
170
|
+
parts = []
|
|
171
|
+
|
|
172
|
+
if self._config.include_timestamp:
|
|
173
|
+
parts.append(datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S"))
|
|
174
|
+
|
|
175
|
+
parts.append(f"[{record.levelname}]")
|
|
176
|
+
|
|
177
|
+
if self._config.include_request_id:
|
|
178
|
+
request_id = get_request_id()
|
|
179
|
+
if request_id:
|
|
180
|
+
parts.append(f"[{request_id[:8]}]")
|
|
181
|
+
|
|
182
|
+
parts.append(record.name)
|
|
183
|
+
parts.append("-")
|
|
184
|
+
parts.append(record.getMessage())
|
|
185
|
+
|
|
186
|
+
# Include extra fields
|
|
187
|
+
if hasattr(record, "extra_fields") and record.extra_fields: # type: ignore[attr-defined]
|
|
188
|
+
extra_str = " ".join(
|
|
189
|
+
f"{k}={v}"
|
|
190
|
+
for k, v in record.extra_fields.items() # type: ignore[attr-defined]
|
|
191
|
+
)
|
|
192
|
+
parts.append(f"| {extra_str}")
|
|
193
|
+
|
|
194
|
+
result = " ".join(parts)
|
|
195
|
+
|
|
196
|
+
# Include exception info
|
|
197
|
+
if record.exc_info:
|
|
198
|
+
result += "\n" + self.formatException(record.exc_info)
|
|
199
|
+
|
|
200
|
+
return result
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
class Logger:
|
|
204
|
+
"""Structured logger with context support.
|
|
205
|
+
|
|
206
|
+
Wraps Python's standard logger with structured logging capabilities.
|
|
207
|
+
"""
|
|
208
|
+
|
|
209
|
+
def __init__(
|
|
210
|
+
self,
|
|
211
|
+
name: str,
|
|
212
|
+
context: LogContext | None = None,
|
|
213
|
+
) -> None:
|
|
214
|
+
"""Initialize logger.
|
|
215
|
+
|
|
216
|
+
Args:
|
|
217
|
+
name: Logger name.
|
|
218
|
+
context: Initial context.
|
|
219
|
+
"""
|
|
220
|
+
self._logger = logging.getLogger(name)
|
|
221
|
+
self._context = context or LogContext()
|
|
222
|
+
|
|
223
|
+
def with_context(self, **fields: Any) -> Logger:
|
|
224
|
+
"""Create a new logger with additional context fields.
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
**fields: Fields to add to context.
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
New Logger with additional context.
|
|
231
|
+
"""
|
|
232
|
+
new_context = self._context.with_fields(**fields)
|
|
233
|
+
return Logger(self._logger.name, new_context)
|
|
234
|
+
|
|
235
|
+
def _log(self, level: int, msg: str, **fields: Any) -> None:
|
|
236
|
+
"""Log a message with context."""
|
|
237
|
+
# Merge context and field-specific extras
|
|
238
|
+
extra_fields = {**self._context.extra, **fields}
|
|
239
|
+
|
|
240
|
+
# Add request ID if available
|
|
241
|
+
request_id = get_request_id()
|
|
242
|
+
if request_id:
|
|
243
|
+
extra_fields["request_id"] = request_id
|
|
244
|
+
|
|
245
|
+
# Create record with extra fields
|
|
246
|
+
record = self._logger.makeRecord(
|
|
247
|
+
self._logger.name,
|
|
248
|
+
level,
|
|
249
|
+
"(unknown file)",
|
|
250
|
+
0,
|
|
251
|
+
msg,
|
|
252
|
+
(),
|
|
253
|
+
None,
|
|
254
|
+
)
|
|
255
|
+
record.extra_fields = extra_fields # type: ignore[attr-defined]
|
|
256
|
+
|
|
257
|
+
self._logger.handle(record)
|
|
258
|
+
|
|
259
|
+
def debug(self, msg: str, **fields: Any) -> None:
|
|
260
|
+
"""Log at DEBUG level."""
|
|
261
|
+
self._log(logging.DEBUG, msg, **fields)
|
|
262
|
+
|
|
263
|
+
def info(self, msg: str, **fields: Any) -> None:
|
|
264
|
+
"""Log at INFO level."""
|
|
265
|
+
self._log(logging.INFO, msg, **fields)
|
|
266
|
+
|
|
267
|
+
def warning(self, msg: str, **fields: Any) -> None:
|
|
268
|
+
"""Log at WARNING level."""
|
|
269
|
+
self._log(logging.WARNING, msg, **fields)
|
|
270
|
+
|
|
271
|
+
def error(self, msg: str, **fields: Any) -> None:
|
|
272
|
+
"""Log at ERROR level."""
|
|
273
|
+
self._log(logging.ERROR, msg, **fields)
|
|
274
|
+
|
|
275
|
+
def critical(self, msg: str, **fields: Any) -> None:
|
|
276
|
+
"""Log at CRITICAL level."""
|
|
277
|
+
self._log(logging.CRITICAL, msg, **fields)
|
|
278
|
+
|
|
279
|
+
def exception(self, msg: str, **fields: Any) -> None:
|
|
280
|
+
"""Log an exception at ERROR level."""
|
|
281
|
+
import traceback
|
|
282
|
+
|
|
283
|
+
fields["exception"] = traceback.format_exc()
|
|
284
|
+
self._log(logging.ERROR, msg, **fields)
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def configure_logging(config: LogConfig | None = None) -> None:
|
|
288
|
+
"""Configure the root logger with structured output.
|
|
289
|
+
|
|
290
|
+
Args:
|
|
291
|
+
config: Logging configuration.
|
|
292
|
+
"""
|
|
293
|
+
config = config or LogConfig()
|
|
294
|
+
|
|
295
|
+
# Get root logger
|
|
296
|
+
root_logger = logging.getLogger()
|
|
297
|
+
root_logger.setLevel(getattr(logging, config.level))
|
|
298
|
+
|
|
299
|
+
# Remove existing handlers
|
|
300
|
+
for handler in root_logger.handlers[:]:
|
|
301
|
+
root_logger.removeHandler(handler)
|
|
302
|
+
|
|
303
|
+
# Create handler
|
|
304
|
+
handler = logging.StreamHandler(sys.stdout)
|
|
305
|
+
|
|
306
|
+
# Set formatter based on config
|
|
307
|
+
formatter: logging.Formatter = (
|
|
308
|
+
StructuredFormatter(config) if config.format == "json" else TextFormatter(config)
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
handler.setFormatter(formatter)
|
|
312
|
+
root_logger.addHandler(handler)
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def get_logger(name: str) -> Logger:
|
|
316
|
+
"""Get a structured logger.
|
|
317
|
+
|
|
318
|
+
Args:
|
|
319
|
+
name: Logger name (usually __name__).
|
|
320
|
+
|
|
321
|
+
Returns:
|
|
322
|
+
Logger instance.
|
|
323
|
+
"""
|
|
324
|
+
return Logger(name)
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
class RequestLoggingMiddleware(BaseHTTPMiddleware):
|
|
328
|
+
"""Middleware for logging HTTP requests.
|
|
329
|
+
|
|
330
|
+
Logs request start, completion, and performance metrics.
|
|
331
|
+
"""
|
|
332
|
+
|
|
333
|
+
def __init__(
|
|
334
|
+
self,
|
|
335
|
+
app: Any,
|
|
336
|
+
logger: Logger | None = None,
|
|
337
|
+
log_request_body: bool = False,
|
|
338
|
+
log_response_body: bool = False,
|
|
339
|
+
exclude_paths: list[str] | None = None,
|
|
340
|
+
) -> None:
|
|
341
|
+
"""Initialize request logging middleware.
|
|
342
|
+
|
|
343
|
+
Args:
|
|
344
|
+
app: ASGI application.
|
|
345
|
+
logger: Logger to use. Creates default if not provided.
|
|
346
|
+
log_request_body: Whether to log request bodies.
|
|
347
|
+
log_response_body: Whether to log response bodies.
|
|
348
|
+
exclude_paths: Paths to exclude from logging.
|
|
349
|
+
"""
|
|
350
|
+
super().__init__(app)
|
|
351
|
+
self._logger = logger or get_logger("prismiq.http")
|
|
352
|
+
self._log_request_body = log_request_body
|
|
353
|
+
self._log_response_body = log_response_body
|
|
354
|
+
self._exclude_paths = set(exclude_paths or [])
|
|
355
|
+
|
|
356
|
+
async def dispatch(self, request: Request, call_next: Callable[..., Any]) -> Response:
|
|
357
|
+
"""Process request through logging middleware."""
|
|
358
|
+
# Skip excluded paths
|
|
359
|
+
if request.url.path in self._exclude_paths:
|
|
360
|
+
return await call_next(request)
|
|
361
|
+
|
|
362
|
+
# Generate request ID
|
|
363
|
+
request_id = request.headers.get("X-Request-ID") or str(uuid.uuid4())
|
|
364
|
+
set_request_id(request_id)
|
|
365
|
+
|
|
366
|
+
# Start timing
|
|
367
|
+
start_time = time.perf_counter()
|
|
368
|
+
|
|
369
|
+
# Log request
|
|
370
|
+
self._logger.info(
|
|
371
|
+
"Request started",
|
|
372
|
+
method=request.method,
|
|
373
|
+
path=request.url.path,
|
|
374
|
+
query=str(request.query_params),
|
|
375
|
+
client_ip=request.client.host if request.client else "unknown",
|
|
376
|
+
)
|
|
377
|
+
|
|
378
|
+
# Process request
|
|
379
|
+
try:
|
|
380
|
+
response = await call_next(request)
|
|
381
|
+
|
|
382
|
+
# Calculate duration
|
|
383
|
+
duration_ms = (time.perf_counter() - start_time) * 1000
|
|
384
|
+
|
|
385
|
+
# Log response
|
|
386
|
+
self._logger.info(
|
|
387
|
+
"Request completed",
|
|
388
|
+
method=request.method,
|
|
389
|
+
path=request.url.path,
|
|
390
|
+
status_code=response.status_code,
|
|
391
|
+
duration_ms=round(duration_ms, 2),
|
|
392
|
+
)
|
|
393
|
+
|
|
394
|
+
# Add request ID to response headers
|
|
395
|
+
response.headers["X-Request-ID"] = request_id
|
|
396
|
+
|
|
397
|
+
return response
|
|
398
|
+
|
|
399
|
+
except Exception:
|
|
400
|
+
# Calculate duration
|
|
401
|
+
duration_ms = (time.perf_counter() - start_time) * 1000
|
|
402
|
+
|
|
403
|
+
# Log error
|
|
404
|
+
self._logger.exception(
|
|
405
|
+
"Request failed",
|
|
406
|
+
method=request.method,
|
|
407
|
+
path=request.url.path,
|
|
408
|
+
duration_ms=round(duration_ms, 2),
|
|
409
|
+
)
|
|
410
|
+
raise
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
@dataclass
|
|
414
|
+
class QueryLog:
|
|
415
|
+
"""Log entry for a database query."""
|
|
416
|
+
|
|
417
|
+
query: str
|
|
418
|
+
"""SQL query executed."""
|
|
419
|
+
|
|
420
|
+
duration_ms: float
|
|
421
|
+
"""Query execution time in milliseconds."""
|
|
422
|
+
|
|
423
|
+
row_count: int
|
|
424
|
+
"""Number of rows returned/affected."""
|
|
425
|
+
|
|
426
|
+
timestamp: datetime = field(default_factory=lambda: datetime.now(timezone.utc))
|
|
427
|
+
"""When the query was executed."""
|
|
428
|
+
|
|
429
|
+
parameters: dict[str, Any] | None = None
|
|
430
|
+
"""Query parameters (if any)."""
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
class QueryLogger:
|
|
434
|
+
"""Logger for database queries.
|
|
435
|
+
|
|
436
|
+
Tracks and logs query performance for monitoring.
|
|
437
|
+
"""
|
|
438
|
+
|
|
439
|
+
def __init__(self, logger: Logger | None = None) -> None:
|
|
440
|
+
"""Initialize query logger.
|
|
441
|
+
|
|
442
|
+
Args:
|
|
443
|
+
logger: Logger to use.
|
|
444
|
+
"""
|
|
445
|
+
self._logger = logger or get_logger("prismiq.query")
|
|
446
|
+
self._slow_query_threshold_ms = 1000.0
|
|
447
|
+
|
|
448
|
+
def log_query(
|
|
449
|
+
self,
|
|
450
|
+
query: str,
|
|
451
|
+
duration_ms: float,
|
|
452
|
+
row_count: int,
|
|
453
|
+
parameters: dict[str, Any] | None = None,
|
|
454
|
+
) -> QueryLog:
|
|
455
|
+
"""Log a query execution.
|
|
456
|
+
|
|
457
|
+
Args:
|
|
458
|
+
query: SQL query.
|
|
459
|
+
duration_ms: Execution time in milliseconds.
|
|
460
|
+
row_count: Number of rows.
|
|
461
|
+
parameters: Query parameters.
|
|
462
|
+
|
|
463
|
+
Returns:
|
|
464
|
+
QueryLog entry.
|
|
465
|
+
"""
|
|
466
|
+
log_entry = QueryLog(
|
|
467
|
+
query=query,
|
|
468
|
+
duration_ms=duration_ms,
|
|
469
|
+
row_count=row_count,
|
|
470
|
+
parameters=parameters,
|
|
471
|
+
)
|
|
472
|
+
|
|
473
|
+
# Determine log level based on duration
|
|
474
|
+
if duration_ms >= self._slow_query_threshold_ms:
|
|
475
|
+
self._logger.warning(
|
|
476
|
+
"Slow query executed",
|
|
477
|
+
query=query[:200], # Truncate long queries
|
|
478
|
+
duration_ms=round(duration_ms, 2),
|
|
479
|
+
row_count=row_count,
|
|
480
|
+
)
|
|
481
|
+
else:
|
|
482
|
+
self._logger.debug(
|
|
483
|
+
"Query executed",
|
|
484
|
+
query=query[:200],
|
|
485
|
+
duration_ms=round(duration_ms, 2),
|
|
486
|
+
row_count=row_count,
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
return log_entry
|