prismiq 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prismiq/__init__.py +543 -0
- prismiq/api.py +1889 -0
- prismiq/auth.py +108 -0
- prismiq/cache.py +527 -0
- prismiq/calculated_field_processor.py +231 -0
- prismiq/calculated_fields.py +819 -0
- prismiq/dashboard_store.py +1219 -0
- prismiq/dashboards.py +374 -0
- prismiq/dates.py +247 -0
- prismiq/engine.py +1315 -0
- prismiq/executor.py +345 -0
- prismiq/filter_merge.py +397 -0
- prismiq/formatting.py +298 -0
- prismiq/logging.py +489 -0
- prismiq/metrics.py +536 -0
- prismiq/middleware.py +346 -0
- prismiq/permissions.py +87 -0
- prismiq/persistence/__init__.py +45 -0
- prismiq/persistence/models.py +208 -0
- prismiq/persistence/postgres_store.py +1119 -0
- prismiq/persistence/saved_query_store.py +336 -0
- prismiq/persistence/schema.sql +95 -0
- prismiq/persistence/setup.py +222 -0
- prismiq/persistence/tables.py +76 -0
- prismiq/pins.py +72 -0
- prismiq/py.typed +0 -0
- prismiq/query.py +1233 -0
- prismiq/schema.py +333 -0
- prismiq/schema_config.py +354 -0
- prismiq/sql_utils.py +147 -0
- prismiq/sql_validator.py +219 -0
- prismiq/sqlalchemy_builder.py +577 -0
- prismiq/timeseries.py +410 -0
- prismiq/transforms.py +471 -0
- prismiq/trends.py +573 -0
- prismiq/types.py +688 -0
- prismiq-0.1.0.dist-info/METADATA +109 -0
- prismiq-0.1.0.dist-info/RECORD +39 -0
- prismiq-0.1.0.dist-info/WHEEL +4 -0
prismiq/auth.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
"""Authentication context protocol for multi-tenancy."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Protocol, runtime_checkable
|
|
7
|
+
|
|
8
|
+
from fastapi import Request
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@runtime_checkable
|
|
12
|
+
class AuthContext(Protocol):
|
|
13
|
+
"""Protocol for authentication context.
|
|
14
|
+
|
|
15
|
+
Developers implement this interface with their auth system.
|
|
16
|
+
Prismiq only requires tenant_id and user_id properties.
|
|
17
|
+
Developers can add any extra fields their app needs.
|
|
18
|
+
|
|
19
|
+
Example implementations:
|
|
20
|
+
- Extract from JWT claims
|
|
21
|
+
- Extract from Clerk session
|
|
22
|
+
- Extract from API key lookup
|
|
23
|
+
- Extract from request headers
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
@property
|
|
27
|
+
def tenant_id(self) -> str:
|
|
28
|
+
"""Tenant/organization ID for data isolation.
|
|
29
|
+
|
|
30
|
+
All dashboard and widget operations are scoped to this tenant.
|
|
31
|
+
This is REQUIRED for all operations.
|
|
32
|
+
"""
|
|
33
|
+
...
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def user_id(self) -> str | None:
|
|
37
|
+
"""User ID for ownership and permissions.
|
|
38
|
+
|
|
39
|
+
Used for:
|
|
40
|
+
- Setting owner_id on created dashboards
|
|
41
|
+
- Checking edit/delete permissions
|
|
42
|
+
- Filtering dashboards by allowed_viewers
|
|
43
|
+
|
|
44
|
+
Can be None for system/API-key based access.
|
|
45
|
+
"""
|
|
46
|
+
...
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def schema_name(self) -> str | None:
|
|
50
|
+
"""PostgreSQL schema name for this tenant.
|
|
51
|
+
|
|
52
|
+
Used for schema-based multi-tenancy where each tenant has their
|
|
53
|
+
own PostgreSQL schema (e.g., "org_123", "tenant_abc").
|
|
54
|
+
|
|
55
|
+
If None, the engine's default schema is used (typically "public").
|
|
56
|
+
"""
|
|
57
|
+
...
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass(frozen=True)
|
|
61
|
+
class SimpleAuthContext:
|
|
62
|
+
"""Simple implementation of AuthContext for basic use cases.
|
|
63
|
+
|
|
64
|
+
Use this when you have simple header-based authentication. For
|
|
65
|
+
production, implement your own AuthContext with your auth system.
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
tenant_id: str
|
|
69
|
+
user_id: str | None = None
|
|
70
|
+
schema_name: str | None = None
|
|
71
|
+
|
|
72
|
+
# Optional: add extra fields your app needs
|
|
73
|
+
email: str | None = None
|
|
74
|
+
roles: list[str] | None = None
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def create_header_auth_dependency():
|
|
78
|
+
"""Create a FastAPI dependency that extracts auth from headers.
|
|
79
|
+
|
|
80
|
+
Returns a factory function that creates the dependency.
|
|
81
|
+
This is the simplest way to add multi-tenancy.
|
|
82
|
+
|
|
83
|
+
Usage:
|
|
84
|
+
get_auth = create_header_auth_dependency()
|
|
85
|
+
router = create_router(engine, get_auth_context=get_auth)
|
|
86
|
+
|
|
87
|
+
Headers:
|
|
88
|
+
X-Tenant-ID: Required tenant identifier
|
|
89
|
+
X-User-ID: Optional user identifier
|
|
90
|
+
X-Schema-Name: Optional PostgreSQL schema name for per-tenant schema isolation
|
|
91
|
+
"""
|
|
92
|
+
from fastapi import HTTPException
|
|
93
|
+
|
|
94
|
+
async def get_auth_context(request: Request) -> SimpleAuthContext:
|
|
95
|
+
tenant_id = request.headers.get("X-Tenant-ID")
|
|
96
|
+
if not tenant_id:
|
|
97
|
+
raise HTTPException(status_code=400, detail="X-Tenant-ID header is required")
|
|
98
|
+
|
|
99
|
+
user_id = request.headers.get("X-User-ID")
|
|
100
|
+
schema_name = request.headers.get("X-Schema-Name")
|
|
101
|
+
|
|
102
|
+
return SimpleAuthContext(
|
|
103
|
+
tenant_id=tenant_id,
|
|
104
|
+
user_id=user_id,
|
|
105
|
+
schema_name=schema_name,
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
return get_auth_context
|
prismiq/cache.py
ADDED
|
@@ -0,0 +1,527 @@
|
|
|
1
|
+
"""Caching layer for Prismiq with Redis and in-memory backends.
|
|
2
|
+
|
|
3
|
+
This module provides cache abstractions for storing query results,
|
|
4
|
+
schema metadata, and other frequently accessed data.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import fnmatch
|
|
10
|
+
import hashlib
|
|
11
|
+
import json
|
|
12
|
+
import time
|
|
13
|
+
from abc import ABC, abstractmethod
|
|
14
|
+
from typing import TYPE_CHECKING, Any
|
|
15
|
+
|
|
16
|
+
from pydantic import BaseModel, ConfigDict
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from prismiq.types import QueryDefinition, QueryResult
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class CacheBackend(ABC):
|
|
23
|
+
"""Abstract cache backend interface.
|
|
24
|
+
|
|
25
|
+
All cache implementations must inherit from this class and implement
|
|
26
|
+
the required methods.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
@abstractmethod
|
|
30
|
+
async def get(self, key: str) -> Any | None:
|
|
31
|
+
"""Get a value from cache.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
key: Cache key to retrieve.
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
Cached value or None if not found or expired.
|
|
38
|
+
"""
|
|
39
|
+
...
|
|
40
|
+
|
|
41
|
+
@abstractmethod
|
|
42
|
+
async def set(self, key: str, value: Any, ttl: int | None = None) -> None:
|
|
43
|
+
"""Set a value in cache with optional TTL.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
key: Cache key.
|
|
47
|
+
value: Value to cache (must be JSON-serializable).
|
|
48
|
+
ttl: Time to live in seconds. None means no expiration.
|
|
49
|
+
"""
|
|
50
|
+
...
|
|
51
|
+
|
|
52
|
+
@abstractmethod
|
|
53
|
+
async def delete(self, key: str) -> bool:
|
|
54
|
+
"""Delete a key from cache.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
key: Cache key to delete.
|
|
58
|
+
|
|
59
|
+
Returns:
|
|
60
|
+
True if key existed and was deleted, False otherwise.
|
|
61
|
+
"""
|
|
62
|
+
...
|
|
63
|
+
|
|
64
|
+
@abstractmethod
|
|
65
|
+
async def clear(self, pattern: str | None = None) -> int:
|
|
66
|
+
"""Clear cache entries, optionally matching a pattern.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
pattern: Glob-style pattern (e.g., "query:*"). If None, clears all.
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
Number of entries cleared.
|
|
73
|
+
"""
|
|
74
|
+
...
|
|
75
|
+
|
|
76
|
+
@abstractmethod
|
|
77
|
+
async def exists(self, key: str) -> bool:
|
|
78
|
+
"""Check if a key exists in cache.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
key: Cache key to check.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
True if key exists and is not expired.
|
|
85
|
+
"""
|
|
86
|
+
...
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class InMemoryCache(CacheBackend):
|
|
90
|
+
"""In-memory cache for development and testing.
|
|
91
|
+
|
|
92
|
+
Stores values with optional TTL-based expiration. Not suitable for
|
|
93
|
+
production use with multiple processes.
|
|
94
|
+
"""
|
|
95
|
+
|
|
96
|
+
def __init__(self) -> None:
|
|
97
|
+
"""Initialize empty cache."""
|
|
98
|
+
# Store (value, expiration_time) tuples
|
|
99
|
+
self._cache: dict[str, tuple[Any, float | None]] = {}
|
|
100
|
+
|
|
101
|
+
async def get(self, key: str) -> Any | None:
|
|
102
|
+
"""Get a value from cache."""
|
|
103
|
+
if key not in self._cache:
|
|
104
|
+
return None
|
|
105
|
+
|
|
106
|
+
value, expires_at = self._cache[key]
|
|
107
|
+
|
|
108
|
+
# Check expiration
|
|
109
|
+
if expires_at is not None and time.time() > expires_at:
|
|
110
|
+
del self._cache[key]
|
|
111
|
+
return None
|
|
112
|
+
|
|
113
|
+
return value
|
|
114
|
+
|
|
115
|
+
async def set(self, key: str, value: Any, ttl: int | None = None) -> None:
|
|
116
|
+
"""Set a value in cache."""
|
|
117
|
+
expires_at = time.time() + ttl if ttl is not None else None
|
|
118
|
+
self._cache[key] = (value, expires_at)
|
|
119
|
+
|
|
120
|
+
async def delete(self, key: str) -> bool:
|
|
121
|
+
"""Delete a key from cache."""
|
|
122
|
+
if key in self._cache:
|
|
123
|
+
del self._cache[key]
|
|
124
|
+
return True
|
|
125
|
+
return False
|
|
126
|
+
|
|
127
|
+
async def exists(self, key: str) -> bool:
|
|
128
|
+
"""Check if a key exists."""
|
|
129
|
+
if key not in self._cache:
|
|
130
|
+
return False
|
|
131
|
+
|
|
132
|
+
_, expires_at = self._cache[key]
|
|
133
|
+
|
|
134
|
+
if expires_at is not None and time.time() > expires_at:
|
|
135
|
+
del self._cache[key]
|
|
136
|
+
return False
|
|
137
|
+
|
|
138
|
+
return True
|
|
139
|
+
|
|
140
|
+
async def clear(self, pattern: str | None = None) -> int:
|
|
141
|
+
"""Clear cache entries matching pattern."""
|
|
142
|
+
if pattern is None:
|
|
143
|
+
count = len(self._cache)
|
|
144
|
+
self._cache.clear()
|
|
145
|
+
return count
|
|
146
|
+
|
|
147
|
+
# Find keys matching pattern
|
|
148
|
+
keys_to_delete = [key for key in self._cache if fnmatch.fnmatch(key, pattern)]
|
|
149
|
+
|
|
150
|
+
for key in keys_to_delete:
|
|
151
|
+
del self._cache[key]
|
|
152
|
+
|
|
153
|
+
return len(keys_to_delete)
|
|
154
|
+
|
|
155
|
+
def _cleanup_expired(self) -> None:
|
|
156
|
+
"""Remove expired entries (for testing/maintenance)."""
|
|
157
|
+
current_time = time.time()
|
|
158
|
+
keys_to_delete = [
|
|
159
|
+
key
|
|
160
|
+
for key, (_, expires_at) in self._cache.items()
|
|
161
|
+
if expires_at is not None and current_time > expires_at
|
|
162
|
+
]
|
|
163
|
+
for key in keys_to_delete:
|
|
164
|
+
del self._cache[key]
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
class RedisCache(CacheBackend):
|
|
168
|
+
"""Redis-backed cache for production use.
|
|
169
|
+
|
|
170
|
+
Requires redis-py async client. Install with:
|
|
171
|
+
pip install redis
|
|
172
|
+
|
|
173
|
+
Example:
|
|
174
|
+
>>> cache = RedisCache("redis://localhost:6379/0")
|
|
175
|
+
>>> await cache.connect()
|
|
176
|
+
>>> await cache.set("key", {"data": "value"}, ttl=300)
|
|
177
|
+
>>> await cache.disconnect()
|
|
178
|
+
"""
|
|
179
|
+
|
|
180
|
+
def __init__(self, redis_url: str, key_prefix: str = "prismiq:") -> None:
|
|
181
|
+
"""Initialize Redis cache.
|
|
182
|
+
|
|
183
|
+
Args:
|
|
184
|
+
redis_url: Redis connection URL (e.g., "redis://localhost:6379/0").
|
|
185
|
+
key_prefix: Prefix for all cache keys.
|
|
186
|
+
"""
|
|
187
|
+
self._redis_url = redis_url
|
|
188
|
+
self._key_prefix = key_prefix
|
|
189
|
+
self._redis: Any | None = None
|
|
190
|
+
|
|
191
|
+
async def connect(self) -> None:
|
|
192
|
+
"""Connect to Redis.
|
|
193
|
+
|
|
194
|
+
Must be called before using the cache.
|
|
195
|
+
"""
|
|
196
|
+
try:
|
|
197
|
+
# pyright: ignore[reportMissingImports]
|
|
198
|
+
from redis.asyncio import Redis # type: ignore[import-not-found]
|
|
199
|
+
except ImportError as e:
|
|
200
|
+
raise ImportError(
|
|
201
|
+
"redis package is required for RedisCache. Install with: pip install redis"
|
|
202
|
+
) from e
|
|
203
|
+
|
|
204
|
+
self._redis = Redis.from_url(self._redis_url, decode_responses=True)
|
|
205
|
+
# Test connection
|
|
206
|
+
await self._redis.ping() # type: ignore[union-attr]
|
|
207
|
+
|
|
208
|
+
async def disconnect(self) -> None:
|
|
209
|
+
"""Disconnect from Redis."""
|
|
210
|
+
if self._redis is not None:
|
|
211
|
+
await self._redis.close()
|
|
212
|
+
self._redis = None
|
|
213
|
+
|
|
214
|
+
def _make_key(self, key: str) -> str:
|
|
215
|
+
"""Add prefix to key."""
|
|
216
|
+
return f"{self._key_prefix}{key}"
|
|
217
|
+
|
|
218
|
+
async def get(self, key: str) -> Any | None:
|
|
219
|
+
"""Get a value from Redis."""
|
|
220
|
+
if self._redis is None:
|
|
221
|
+
raise RuntimeError("RedisCache not connected. Call connect() first.")
|
|
222
|
+
|
|
223
|
+
full_key = self._make_key(key)
|
|
224
|
+
value = await self._redis.get(full_key)
|
|
225
|
+
|
|
226
|
+
if value is None:
|
|
227
|
+
return None
|
|
228
|
+
|
|
229
|
+
return json.loads(value)
|
|
230
|
+
|
|
231
|
+
async def set(self, key: str, value: Any, ttl: int | None = None) -> None:
|
|
232
|
+
"""Set a value in Redis."""
|
|
233
|
+
if self._redis is None:
|
|
234
|
+
raise RuntimeError("RedisCache not connected. Call connect() first.")
|
|
235
|
+
|
|
236
|
+
full_key = self._make_key(key)
|
|
237
|
+
serialized = json.dumps(value, default=str)
|
|
238
|
+
|
|
239
|
+
if ttl is not None:
|
|
240
|
+
await self._redis.setex(full_key, ttl, serialized)
|
|
241
|
+
else:
|
|
242
|
+
await self._redis.set(full_key, serialized)
|
|
243
|
+
|
|
244
|
+
async def delete(self, key: str) -> bool:
|
|
245
|
+
"""Delete a key from Redis."""
|
|
246
|
+
if self._redis is None:
|
|
247
|
+
raise RuntimeError("RedisCache not connected. Call connect() first.")
|
|
248
|
+
|
|
249
|
+
full_key = self._make_key(key)
|
|
250
|
+
result = await self._redis.delete(full_key)
|
|
251
|
+
return result > 0
|
|
252
|
+
|
|
253
|
+
async def exists(self, key: str) -> bool:
|
|
254
|
+
"""Check if a key exists in Redis."""
|
|
255
|
+
if self._redis is None:
|
|
256
|
+
raise RuntimeError("RedisCache not connected. Call connect() first.")
|
|
257
|
+
|
|
258
|
+
full_key = self._make_key(key)
|
|
259
|
+
return await self._redis.exists(full_key) > 0
|
|
260
|
+
|
|
261
|
+
async def clear(self, pattern: str | None = None) -> int:
|
|
262
|
+
"""Clear cache entries matching pattern."""
|
|
263
|
+
if self._redis is None:
|
|
264
|
+
raise RuntimeError("RedisCache not connected. Call connect() first.")
|
|
265
|
+
|
|
266
|
+
# Determine search pattern with prefix
|
|
267
|
+
search_pattern = f"{self._key_prefix}*" if pattern is None else self._make_key(pattern)
|
|
268
|
+
|
|
269
|
+
# Use SCAN to find matching keys (safer than KEYS for large datasets)
|
|
270
|
+
count = 0
|
|
271
|
+
cursor = 0
|
|
272
|
+
while True:
|
|
273
|
+
cursor, keys = await self._redis.scan(cursor, match=search_pattern, count=100)
|
|
274
|
+
if keys:
|
|
275
|
+
await self._redis.delete(*keys)
|
|
276
|
+
count += len(keys)
|
|
277
|
+
if cursor == 0:
|
|
278
|
+
break
|
|
279
|
+
|
|
280
|
+
return count
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
class CacheConfig(BaseModel):
|
|
284
|
+
"""Configuration for cache behavior."""
|
|
285
|
+
|
|
286
|
+
model_config = ConfigDict(strict=True)
|
|
287
|
+
|
|
288
|
+
default_ttl: int = 86400
|
|
289
|
+
"""Default TTL in seconds (24 hours)."""
|
|
290
|
+
|
|
291
|
+
schema_ttl: int = 3600
|
|
292
|
+
"""TTL for schema cache (1 hour)."""
|
|
293
|
+
|
|
294
|
+
query_ttl: int = 86400
|
|
295
|
+
"""TTL for query results (24 hours)."""
|
|
296
|
+
|
|
297
|
+
max_result_size: int = 1_000_000
|
|
298
|
+
"""Maximum size in bytes for cached query results."""
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
class QueryCache:
|
|
302
|
+
"""High-level cache for query results.
|
|
303
|
+
|
|
304
|
+
Handles serialization, key generation, and table-based invalidation.
|
|
305
|
+
Supports multi-tenancy via schema_name parameter for cache key isolation.
|
|
306
|
+
"""
|
|
307
|
+
|
|
308
|
+
def __init__(
|
|
309
|
+
self,
|
|
310
|
+
backend: CacheBackend,
|
|
311
|
+
config: CacheConfig | None = None,
|
|
312
|
+
schema_name: str = "public",
|
|
313
|
+
) -> None:
|
|
314
|
+
"""Initialize query cache.
|
|
315
|
+
|
|
316
|
+
Args:
|
|
317
|
+
backend: Cache backend to use.
|
|
318
|
+
config: Cache configuration.
|
|
319
|
+
schema_name: PostgreSQL schema name for cache key isolation (multi-tenancy).
|
|
320
|
+
"""
|
|
321
|
+
self._backend = backend
|
|
322
|
+
self._config = config or CacheConfig()
|
|
323
|
+
self._schema_name = schema_name
|
|
324
|
+
# Track which tables are used in each cached query
|
|
325
|
+
self._table_keys: dict[str, set[str]] = {}
|
|
326
|
+
|
|
327
|
+
def make_key(self, query: QueryDefinition) -> str:
|
|
328
|
+
"""Generate a cache key from a query definition.
|
|
329
|
+
|
|
330
|
+
Uses a hash of the query's JSON representation.
|
|
331
|
+
Includes schema_name for multi-tenant isolation.
|
|
332
|
+
|
|
333
|
+
Args:
|
|
334
|
+
query: Query definition to hash.
|
|
335
|
+
|
|
336
|
+
Returns:
|
|
337
|
+
Cache key string.
|
|
338
|
+
"""
|
|
339
|
+
# Serialize query to JSON
|
|
340
|
+
query_json = query.model_dump_json(exclude_none=True)
|
|
341
|
+
# Create hash
|
|
342
|
+
query_hash = hashlib.sha256(query_json.encode()).hexdigest()[:16]
|
|
343
|
+
return f"query:{self._schema_name}:{query_hash}"
|
|
344
|
+
|
|
345
|
+
async def get_result(self, query: QueryDefinition) -> QueryResult | None:
|
|
346
|
+
"""Get a cached query result.
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
query: Query definition to look up.
|
|
350
|
+
|
|
351
|
+
Returns:
|
|
352
|
+
Cached QueryResult or None if not found.
|
|
353
|
+
"""
|
|
354
|
+
from prismiq.types import QueryResult
|
|
355
|
+
|
|
356
|
+
key = self.make_key(query)
|
|
357
|
+
cached = await self._backend.get(key)
|
|
358
|
+
|
|
359
|
+
if cached is None:
|
|
360
|
+
return None
|
|
361
|
+
|
|
362
|
+
return QueryResult.model_validate(cached)
|
|
363
|
+
|
|
364
|
+
async def cache_result(
|
|
365
|
+
self,
|
|
366
|
+
query: QueryDefinition,
|
|
367
|
+
result: QueryResult,
|
|
368
|
+
ttl: int | None = None,
|
|
369
|
+
) -> float:
|
|
370
|
+
"""Cache a query result.
|
|
371
|
+
|
|
372
|
+
Args:
|
|
373
|
+
query: Query definition (used for key generation).
|
|
374
|
+
result: Query result to cache.
|
|
375
|
+
ttl: TTL in seconds (uses default if not specified).
|
|
376
|
+
|
|
377
|
+
Returns:
|
|
378
|
+
Unix timestamp when the result was cached.
|
|
379
|
+
"""
|
|
380
|
+
key = self.make_key(query)
|
|
381
|
+
effective_ttl = ttl if ttl is not None else self._config.query_ttl
|
|
382
|
+
cached_at = time.time()
|
|
383
|
+
|
|
384
|
+
# Check result size
|
|
385
|
+
result_json = result.model_dump_json()
|
|
386
|
+
if len(result_json) > self._config.max_result_size:
|
|
387
|
+
# Skip caching oversized results
|
|
388
|
+
return cached_at
|
|
389
|
+
|
|
390
|
+
# Track tables used in this query for invalidation
|
|
391
|
+
table_names = [t.name for t in query.tables]
|
|
392
|
+
for table_name in table_names:
|
|
393
|
+
if table_name not in self._table_keys:
|
|
394
|
+
self._table_keys[table_name] = set()
|
|
395
|
+
self._table_keys[table_name].add(key)
|
|
396
|
+
|
|
397
|
+
# Store result and metadata
|
|
398
|
+
await self._backend.set(key, result.model_dump(), effective_ttl)
|
|
399
|
+
await self._backend.set(
|
|
400
|
+
f"meta:{key}", {"cached_at": cached_at, "ttl": effective_ttl}, effective_ttl
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
return cached_at
|
|
404
|
+
|
|
405
|
+
async def get_cache_metadata(self, query: QueryDefinition) -> dict[str, float | int] | None:
|
|
406
|
+
"""Get cache metadata for a query.
|
|
407
|
+
|
|
408
|
+
Args:
|
|
409
|
+
query: Query definition to look up.
|
|
410
|
+
|
|
411
|
+
Returns:
|
|
412
|
+
Dict with 'cached_at' timestamp and 'ttl', or None if not cached.
|
|
413
|
+
"""
|
|
414
|
+
key = self.make_key(query)
|
|
415
|
+
metadata = await self._backend.get(f"meta:{key}")
|
|
416
|
+
return metadata
|
|
417
|
+
|
|
418
|
+
async def invalidate_table(self, table_name: str) -> int:
|
|
419
|
+
"""Invalidate all cached queries involving a table.
|
|
420
|
+
|
|
421
|
+
Args:
|
|
422
|
+
table_name: Name of the table that changed.
|
|
423
|
+
|
|
424
|
+
Returns:
|
|
425
|
+
Number of cache entries invalidated.
|
|
426
|
+
"""
|
|
427
|
+
if table_name not in self._table_keys:
|
|
428
|
+
return 0
|
|
429
|
+
|
|
430
|
+
keys_to_invalidate = self._table_keys[table_name]
|
|
431
|
+
count = 0
|
|
432
|
+
|
|
433
|
+
for key in list(keys_to_invalidate):
|
|
434
|
+
if await self._backend.delete(key):
|
|
435
|
+
count += 1
|
|
436
|
+
# Also delete metadata
|
|
437
|
+
await self._backend.delete(f"meta:{key}")
|
|
438
|
+
|
|
439
|
+
# Clear the tracking set
|
|
440
|
+
self._table_keys[table_name].clear()
|
|
441
|
+
|
|
442
|
+
return count
|
|
443
|
+
|
|
444
|
+
async def invalidate_all(self) -> int:
|
|
445
|
+
"""Invalidate all cached query results for this schema.
|
|
446
|
+
|
|
447
|
+
Returns:
|
|
448
|
+
Number of cache entries invalidated.
|
|
449
|
+
"""
|
|
450
|
+
count = await self._backend.clear(f"query:{self._schema_name}:*")
|
|
451
|
+
# Also clear all metadata for this schema
|
|
452
|
+
await self._backend.clear(f"meta:query:{self._schema_name}:*")
|
|
453
|
+
self._table_keys.clear()
|
|
454
|
+
return count
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
class SchemaCache:
|
|
458
|
+
"""High-level cache for database schema.
|
|
459
|
+
|
|
460
|
+
Provides caching for schema introspection results.
|
|
461
|
+
Supports multi-tenancy via schema_name parameter for cache key isolation.
|
|
462
|
+
"""
|
|
463
|
+
|
|
464
|
+
def __init__(
|
|
465
|
+
self,
|
|
466
|
+
backend: CacheBackend,
|
|
467
|
+
ttl: int = 3600,
|
|
468
|
+
schema_name: str = "public",
|
|
469
|
+
) -> None:
|
|
470
|
+
"""Initialize schema cache.
|
|
471
|
+
|
|
472
|
+
Args:
|
|
473
|
+
backend: Cache backend to use.
|
|
474
|
+
ttl: TTL for schema cache in seconds.
|
|
475
|
+
schema_name: PostgreSQL schema name for cache key isolation (multi-tenancy).
|
|
476
|
+
"""
|
|
477
|
+
self._backend = backend
|
|
478
|
+
self._ttl = ttl
|
|
479
|
+
self._schema_name = schema_name
|
|
480
|
+
|
|
481
|
+
def _make_key(self, key: str) -> str:
|
|
482
|
+
"""Create a cache key with schema prefix for tenant isolation."""
|
|
483
|
+
return f"schema:{self._schema_name}:{key}"
|
|
484
|
+
|
|
485
|
+
async def get_schema(self) -> dict[str, Any] | None:
|
|
486
|
+
"""Get cached schema.
|
|
487
|
+
|
|
488
|
+
Returns:
|
|
489
|
+
Cached schema dict or None if not found.
|
|
490
|
+
"""
|
|
491
|
+
return await self._backend.get(self._make_key("full"))
|
|
492
|
+
|
|
493
|
+
async def set_schema(self, schema_dict: dict[str, Any]) -> None:
|
|
494
|
+
"""Cache schema.
|
|
495
|
+
|
|
496
|
+
Args:
|
|
497
|
+
schema_dict: Schema dictionary to cache.
|
|
498
|
+
"""
|
|
499
|
+
await self._backend.set(self._make_key("full"), schema_dict, self._ttl)
|
|
500
|
+
|
|
501
|
+
async def get_table(self, table_name: str) -> dict[str, Any] | None:
|
|
502
|
+
"""Get cached table schema.
|
|
503
|
+
|
|
504
|
+
Args:
|
|
505
|
+
table_name: Name of the table.
|
|
506
|
+
|
|
507
|
+
Returns:
|
|
508
|
+
Cached table dict or None if not found.
|
|
509
|
+
"""
|
|
510
|
+
return await self._backend.get(self._make_key(f"table:{table_name}"))
|
|
511
|
+
|
|
512
|
+
async def set_table(self, table_name: str, table_dict: dict[str, Any]) -> None:
|
|
513
|
+
"""Cache table schema.
|
|
514
|
+
|
|
515
|
+
Args:
|
|
516
|
+
table_name: Name of the table.
|
|
517
|
+
table_dict: Table dictionary to cache.
|
|
518
|
+
"""
|
|
519
|
+
await self._backend.set(self._make_key(f"table:{table_name}"), table_dict, self._ttl)
|
|
520
|
+
|
|
521
|
+
async def invalidate(self) -> int:
|
|
522
|
+
"""Invalidate all schema cache for this schema.
|
|
523
|
+
|
|
524
|
+
Returns:
|
|
525
|
+
Number of cache entries invalidated.
|
|
526
|
+
"""
|
|
527
|
+
return await self._backend.clear(f"schema:{self._schema_name}:*")
|