paskia 0.8.1__py3-none-any.whl → 0.9.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- paskia/_version.py +2 -2
- paskia/aaguid/__init__.py +5 -4
- paskia/authsession.py +15 -43
- paskia/bootstrap.py +31 -103
- paskia/db/__init__.py +27 -55
- paskia/db/background.py +20 -40
- paskia/db/jsonl.py +196 -46
- paskia/db/logging.py +233 -0
- paskia/db/migrations.py +33 -0
- paskia/db/operations.py +409 -825
- paskia/db/structs.py +408 -94
- paskia/fastapi/__main__.py +25 -28
- paskia/fastapi/admin.py +147 -329
- paskia/fastapi/api.py +68 -110
- paskia/fastapi/logging.py +218 -0
- paskia/fastapi/mainapp.py +25 -8
- paskia/fastapi/remote.py +16 -39
- paskia/fastapi/reset.py +27 -19
- paskia/fastapi/response.py +22 -0
- paskia/fastapi/session.py +2 -2
- paskia/fastapi/user.py +24 -30
- paskia/fastapi/ws.py +25 -60
- paskia/fastapi/wschat.py +62 -0
- paskia/fastapi/wsutil.py +15 -2
- paskia/frontend-build/auth/admin/index.html +5 -5
- paskia/frontend-build/auth/assets/{AccessDenied-Bc249ASC.css → AccessDenied-DPkUS8LZ.css} +1 -1
- paskia/frontend-build/auth/assets/AccessDenied-Fmeb6EtF.js +8 -0
- paskia/frontend-build/auth/assets/{RestrictedAuth-DgdJyscT.css → RestrictedAuth-CvR33_Z0.css} +1 -1
- paskia/frontend-build/auth/assets/RestrictedAuth-DsJXicIw.js +1 -0
- paskia/frontend-build/auth/assets/{_plugin-vue_export-helper-rKFEraYH.js → _plugin-vue_export-helper-nhjnO_bd.js} +1 -1
- paskia/frontend-build/auth/assets/admin-CPE1pLMm.js +1 -0
- paskia/frontend-build/auth/assets/{admin-BeNu48FR.css → admin-DzzjSg72.css} +1 -1
- paskia/frontend-build/auth/assets/{auth-BKX7shEe.css → auth-C7k64Wad.css} +1 -1
- paskia/frontend-build/auth/assets/auth-YIZvPlW_.js +1 -0
- paskia/frontend-build/auth/assets/{forward-Dzg-aE1C.js → forward-DmqVHZ7e.js} +1 -1
- paskia/frontend-build/auth/assets/reset-Chtv69AT.css +1 -0
- paskia/frontend-build/auth/assets/reset-s20PATTN.js +1 -0
- paskia/frontend-build/auth/assets/{restricted-C0IQufuH.js → restricted-D3AJx3_6.js} +1 -1
- paskia/frontend-build/auth/index.html +5 -5
- paskia/frontend-build/auth/restricted/index.html +4 -4
- paskia/frontend-build/int/forward/index.html +4 -4
- paskia/frontend-build/int/reset/index.html +3 -3
- paskia/globals.py +2 -2
- paskia/migrate/__init__.py +67 -60
- paskia/migrate/sql.py +94 -37
- paskia/remoteauth.py +7 -8
- paskia/sansio.py +6 -12
- {paskia-0.8.1.dist-info → paskia-0.9.1.dist-info}/METADATA +1 -1
- paskia-0.9.1.dist-info/RECORD +60 -0
- paskia/frontend-build/auth/assets/AccessDenied-aTdCvz9k.js +0 -8
- paskia/frontend-build/auth/assets/RestrictedAuth-BLMK7-nL.js +0 -1
- paskia/frontend-build/auth/assets/admin-tVs8oyLv.js +0 -1
- paskia/frontend-build/auth/assets/auth-Dk3q4pNS.js +0 -1
- paskia/frontend-build/auth/assets/reset-BWF4cWKR.css +0 -1
- paskia/frontend-build/auth/assets/reset-C_Td1_jn.js +0 -1
- paskia-0.8.1.dist-info/RECORD +0 -55
- {paskia-0.8.1.dist-info → paskia-0.9.1.dist-info}/WHEEL +0 -0
- {paskia-0.8.1.dist-info → paskia-0.9.1.dist-info}/entry_points.txt +0 -0
paskia/db/jsonl.py
CHANGED
|
@@ -1,19 +1,24 @@
|
|
|
1
1
|
"""
|
|
2
2
|
JSONL persistence layer for the database.
|
|
3
|
-
|
|
4
|
-
Handles file I/O, JSON diffs, and persistence. Works with plain JSON/dict data.
|
|
5
|
-
Uses aiofiles for async I/O operations.
|
|
6
3
|
"""
|
|
7
4
|
|
|
5
|
+
import copy
|
|
8
6
|
import logging
|
|
9
7
|
from collections import deque
|
|
10
|
-
from
|
|
8
|
+
from contextlib import contextmanager
|
|
9
|
+
from datetime import UTC, datetime
|
|
11
10
|
from pathlib import Path
|
|
11
|
+
from typing import Any
|
|
12
|
+
from uuid import UUID
|
|
12
13
|
|
|
13
14
|
import aiofiles
|
|
14
15
|
import jsondiff
|
|
15
16
|
import msgspec
|
|
16
17
|
|
|
18
|
+
from paskia.db.logging import log_change
|
|
19
|
+
from paskia.db.migrations import DBVER, apply_all_migrations
|
|
20
|
+
from paskia.db.structs import DB, SessionContext
|
|
21
|
+
|
|
17
22
|
_logger = logging.getLogger(__name__)
|
|
18
23
|
|
|
19
24
|
# Default database path
|
|
@@ -25,6 +30,7 @@ class _ChangeRecord(msgspec.Struct, omit_defaults=True):
|
|
|
25
30
|
|
|
26
31
|
ts: datetime
|
|
27
32
|
a: str # action - describes the operation (e.g., "migrate", "login", "create_user")
|
|
33
|
+
v: int # schema version after this change
|
|
28
34
|
u: str | None = None # user UUID who performed the action (None for system)
|
|
29
35
|
diff: dict = {}
|
|
30
36
|
|
|
@@ -33,43 +39,6 @@ class _ChangeRecord(msgspec.Struct, omit_defaults=True):
|
|
|
33
39
|
_change_encoder = msgspec.json.Encoder()
|
|
34
40
|
|
|
35
41
|
|
|
36
|
-
async def load_jsonl(db_path: Path) -> dict:
|
|
37
|
-
"""Load data from disk by applying change log.
|
|
38
|
-
|
|
39
|
-
Replays all changes from JSONL file using plain dicts (to handle
|
|
40
|
-
schema evolution).
|
|
41
|
-
|
|
42
|
-
Args:
|
|
43
|
-
db_path: Path to the JSONL database file
|
|
44
|
-
|
|
45
|
-
Returns:
|
|
46
|
-
The final state after applying all changes
|
|
47
|
-
|
|
48
|
-
Raises:
|
|
49
|
-
ValueError: If file doesn't exist or cannot be loaded
|
|
50
|
-
"""
|
|
51
|
-
if not db_path.exists():
|
|
52
|
-
raise ValueError(f"Database file not found: {db_path}")
|
|
53
|
-
data_dict: dict = {}
|
|
54
|
-
try:
|
|
55
|
-
# Read entire file at once and split into lines
|
|
56
|
-
async with aiofiles.open(db_path, "rb") as f:
|
|
57
|
-
content = await f.read()
|
|
58
|
-
for line_num, line in enumerate(content.split(b"\n"), 1):
|
|
59
|
-
line = line.strip()
|
|
60
|
-
if not line:
|
|
61
|
-
continue
|
|
62
|
-
try:
|
|
63
|
-
change = msgspec.json.decode(line)
|
|
64
|
-
# Apply the diff to current state (marshal=True for $-prefixed keys)
|
|
65
|
-
data_dict = jsondiff.patch(data_dict, change["diff"], marshal=True)
|
|
66
|
-
except Exception as e:
|
|
67
|
-
raise ValueError(f"Error parsing line {line_num}: {e}")
|
|
68
|
-
except (OSError, ValueError, msgspec.DecodeError) as e:
|
|
69
|
-
raise ValueError(f"Failed to load database: {e}")
|
|
70
|
-
return data_dict
|
|
71
|
-
|
|
72
|
-
|
|
73
42
|
def compute_diff(previous: dict, current: dict) -> dict | None:
|
|
74
43
|
"""Compute JSON diff between two states.
|
|
75
44
|
|
|
@@ -85,17 +54,22 @@ def compute_diff(previous: dict, current: dict) -> dict | None:
|
|
|
85
54
|
|
|
86
55
|
|
|
87
56
|
def create_change_record(
|
|
88
|
-
action: str, diff: dict, user: str | None = None
|
|
57
|
+
action: str, version: int, diff: dict, user: str | None = None
|
|
89
58
|
) -> _ChangeRecord:
|
|
90
59
|
"""Create a change record for persistence."""
|
|
91
60
|
return _ChangeRecord(
|
|
92
|
-
ts=datetime.now(
|
|
61
|
+
ts=datetime.now(UTC),
|
|
93
62
|
a=action,
|
|
63
|
+
v=version,
|
|
94
64
|
u=user,
|
|
95
65
|
diff=diff,
|
|
96
66
|
)
|
|
97
67
|
|
|
98
68
|
|
|
69
|
+
# Actions that are allowed to create a new database file
|
|
70
|
+
_BOOTSTRAP_ACTIONS = frozenset({"bootstrap", "migrate:sql"})
|
|
71
|
+
|
|
72
|
+
|
|
99
73
|
async def flush_changes(
|
|
100
74
|
db_path: Path,
|
|
101
75
|
pending_changes: deque[_ChangeRecord],
|
|
@@ -112,15 +86,25 @@ async def flush_changes(
|
|
|
112
86
|
if not pending_changes:
|
|
113
87
|
return True
|
|
114
88
|
|
|
115
|
-
|
|
89
|
+
if not db_path.exists():
|
|
90
|
+
first_action = pending_changes[0].a
|
|
91
|
+
if first_action not in _BOOTSTRAP_ACTIONS:
|
|
92
|
+
_logger.error(
|
|
93
|
+
"Refusing to create database file with action '%s' - "
|
|
94
|
+
"only bootstrap or migrate can create a new database",
|
|
95
|
+
first_action,
|
|
96
|
+
)
|
|
97
|
+
pending_changes.clear()
|
|
98
|
+
return False
|
|
99
|
+
|
|
116
100
|
changes_to_write = list(pending_changes)
|
|
117
101
|
pending_changes.clear()
|
|
118
102
|
|
|
119
103
|
try:
|
|
120
|
-
# Build lines to append (keep as bytes, join with \n)
|
|
121
104
|
lines = [_change_encoder.encode(change) for change in changes_to_write]
|
|
105
|
+
if not lines:
|
|
106
|
+
return True
|
|
122
107
|
|
|
123
|
-
# Append all lines in a single write (binary mode for Windows compatibility)
|
|
124
108
|
async with aiofiles.open(db_path, "ab") as f:
|
|
125
109
|
await f.write(b"\n".join(lines) + b"\n")
|
|
126
110
|
return True
|
|
@@ -130,3 +114,169 @@ async def flush_changes(
|
|
|
130
114
|
for change in reversed(changes_to_write):
|
|
131
115
|
pending_changes.appendleft(change)
|
|
132
116
|
return False
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
class JsonlStore:
|
|
120
|
+
"""JSONL persistence layer for a DB instance."""
|
|
121
|
+
|
|
122
|
+
def __init__(self, db: DB, db_path: str = DB_PATH_DEFAULT):
|
|
123
|
+
self.db: DB = db
|
|
124
|
+
self.db_path = Path(db_path)
|
|
125
|
+
self._previous_builtins: dict[str, Any] = {}
|
|
126
|
+
self._pending_changes: deque[_ChangeRecord] = deque()
|
|
127
|
+
self._current_action: str = "system"
|
|
128
|
+
self._current_user: str | None = None
|
|
129
|
+
self._in_transaction: bool = False
|
|
130
|
+
self._transaction_snapshot: dict[str, Any] | None = None
|
|
131
|
+
self._current_version: int = DBVER # Schema version for new databases
|
|
132
|
+
|
|
133
|
+
async def load(self, db_path: str | None = None) -> None:
|
|
134
|
+
"""Load data from JSONL change log."""
|
|
135
|
+
if db_path is not None:
|
|
136
|
+
self.db_path = Path(db_path)
|
|
137
|
+
if not self.db_path.exists():
|
|
138
|
+
return
|
|
139
|
+
|
|
140
|
+
# Replay change log to reconstruct state
|
|
141
|
+
data_dict: dict = {}
|
|
142
|
+
try:
|
|
143
|
+
async with aiofiles.open(self.db_path, "rb") as f:
|
|
144
|
+
content = await f.read()
|
|
145
|
+
for line_num, line in enumerate(content.split(b"\n"), 1):
|
|
146
|
+
line = line.strip()
|
|
147
|
+
if not line:
|
|
148
|
+
continue
|
|
149
|
+
try:
|
|
150
|
+
change = msgspec.json.decode(line)
|
|
151
|
+
data_dict = jsondiff.patch(data_dict, change["diff"], marshal=True)
|
|
152
|
+
self._current_version = change.get("v", 0)
|
|
153
|
+
except Exception as e:
|
|
154
|
+
raise ValueError(f"Error parsing line {line_num}: {e}")
|
|
155
|
+
except (OSError, ValueError, msgspec.DecodeError) as e:
|
|
156
|
+
raise ValueError(f"Failed to load database: {e}")
|
|
157
|
+
|
|
158
|
+
if not data_dict:
|
|
159
|
+
return
|
|
160
|
+
|
|
161
|
+
# Set previous state for diffing (will be updated by _queue_change)
|
|
162
|
+
self._previous_builtins = copy.deepcopy(data_dict)
|
|
163
|
+
|
|
164
|
+
# Callback to persist each migration
|
|
165
|
+
async def persist_migration(
|
|
166
|
+
action: str, new_version: int, current: dict
|
|
167
|
+
) -> None:
|
|
168
|
+
self._current_version = new_version
|
|
169
|
+
self._queue_change(action, new_version, current)
|
|
170
|
+
|
|
171
|
+
# Apply schema migrations one at a time
|
|
172
|
+
await apply_all_migrations(data_dict, self._current_version, persist_migration)
|
|
173
|
+
|
|
174
|
+
# Decode to msgspec struct
|
|
175
|
+
decoder = msgspec.json.Decoder(DB)
|
|
176
|
+
self.db = decoder.decode(msgspec.json.encode(data_dict))
|
|
177
|
+
self.db._store = self
|
|
178
|
+
|
|
179
|
+
# Normalize via msgspec round-trip (handles omit_defaults etc.)
|
|
180
|
+
# This ensures _previous_builtins matches what msgspec would produce
|
|
181
|
+
normalized_dict = msgspec.to_builtins(self.db)
|
|
182
|
+
await persist_migration(
|
|
183
|
+
"migrate:msgspec", self._current_version, normalized_dict
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
def _queue_change(
|
|
187
|
+
self, action: str, version: int, current: dict, user: str | None = None
|
|
188
|
+
) -> None:
|
|
189
|
+
"""Queue a change record and log it.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
action: The action name for the change record
|
|
193
|
+
version: The schema version for the change record
|
|
194
|
+
current: The current state as a plain dict
|
|
195
|
+
user: Optional user UUID who performed the action
|
|
196
|
+
"""
|
|
197
|
+
diff = compute_diff(self._previous_builtins, current)
|
|
198
|
+
if not diff:
|
|
199
|
+
return
|
|
200
|
+
self._pending_changes.append(create_change_record(action, version, diff, user))
|
|
201
|
+
self._previous_builtins = copy.deepcopy(current)
|
|
202
|
+
|
|
203
|
+
# Log the change with user display name if available
|
|
204
|
+
user_display = None
|
|
205
|
+
if user:
|
|
206
|
+
try:
|
|
207
|
+
user_uuid = UUID(user)
|
|
208
|
+
if user_uuid in self.db.users:
|
|
209
|
+
user_display = self.db.users[user_uuid].display_name
|
|
210
|
+
except (ValueError, KeyError):
|
|
211
|
+
user_display = user
|
|
212
|
+
|
|
213
|
+
log_change(action, diff, user_display)
|
|
214
|
+
|
|
215
|
+
@contextmanager
|
|
216
|
+
def transaction(
|
|
217
|
+
self,
|
|
218
|
+
action: str,
|
|
219
|
+
ctx: SessionContext | None = None,
|
|
220
|
+
*,
|
|
221
|
+
user: str | None = None,
|
|
222
|
+
):
|
|
223
|
+
"""Wrap writes in transaction. Queues change on successful exit.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
action: Describes the operation (e.g., "Created user", "Login")
|
|
227
|
+
ctx: Session context of user performing the action (None for system operations)
|
|
228
|
+
user: User UUID string (alternative to ctx when full context unavailable)
|
|
229
|
+
"""
|
|
230
|
+
if self._in_transaction:
|
|
231
|
+
raise RuntimeError("Nested transactions are not supported")
|
|
232
|
+
|
|
233
|
+
# Check for out-of-transaction modifications
|
|
234
|
+
current_state = msgspec.to_builtins(self.db)
|
|
235
|
+
if current_state != self._previous_builtins:
|
|
236
|
+
# Allow bootstrap/migrate to create a new database from empty state
|
|
237
|
+
is_bootstrap = action in _BOOTSTRAP_ACTIONS or action.startswith("migrate:")
|
|
238
|
+
if is_bootstrap and not self._previous_builtins:
|
|
239
|
+
pass # Expected: creating database from scratch
|
|
240
|
+
else:
|
|
241
|
+
diff = compute_diff(self._previous_builtins, current_state)
|
|
242
|
+
diff_json = msgspec.json.encode(diff).decode()
|
|
243
|
+
_logger.critical(
|
|
244
|
+
"Database state modified outside of transaction! "
|
|
245
|
+
"This indicates a bug where DB changes occurred without a transaction wrapper.\n"
|
|
246
|
+
f"Changes detected:\n{diff_json}"
|
|
247
|
+
)
|
|
248
|
+
raise SystemExit(1)
|
|
249
|
+
|
|
250
|
+
old_action = self._current_action
|
|
251
|
+
old_user = self._current_user
|
|
252
|
+
self._current_action = action
|
|
253
|
+
# Prefer ctx.user.uuid if ctx provided, otherwise use user param
|
|
254
|
+
self._current_user = str(ctx.user.uuid) if ctx else user
|
|
255
|
+
self._in_transaction = True
|
|
256
|
+
self._transaction_snapshot = current_state
|
|
257
|
+
|
|
258
|
+
try:
|
|
259
|
+
yield
|
|
260
|
+
current = msgspec.to_builtins(self.db)
|
|
261
|
+
self._queue_change(
|
|
262
|
+
self._current_action, self._current_version, current, self._current_user
|
|
263
|
+
)
|
|
264
|
+
except Exception:
|
|
265
|
+
# Rollback on error: restore from snapshot
|
|
266
|
+
_logger.warning("Transaction '%s' failed, rolling back changes", action)
|
|
267
|
+
if self._transaction_snapshot is not None:
|
|
268
|
+
decoder = msgspec.json.Decoder(DB)
|
|
269
|
+
self.db = decoder.decode(
|
|
270
|
+
msgspec.json.encode(self._transaction_snapshot)
|
|
271
|
+
)
|
|
272
|
+
self.db._store = self
|
|
273
|
+
raise
|
|
274
|
+
finally:
|
|
275
|
+
self._current_action = old_action
|
|
276
|
+
self._current_user = old_user
|
|
277
|
+
self._in_transaction = False
|
|
278
|
+
self._transaction_snapshot = None
|
|
279
|
+
|
|
280
|
+
async def flush(self) -> bool:
|
|
281
|
+
"""Write all pending changes to disk."""
|
|
282
|
+
return await flush_changes(self.db_path, self._pending_changes)
|
paskia/db/logging.py
ADDED
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database change logging with pretty-printed diffs.
|
|
3
|
+
|
|
4
|
+
Provides a logger for JSONL database changes that formats diffs
|
|
5
|
+
in a human-readable path.notation style with color coding.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
import re
|
|
10
|
+
import sys
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger("paskia.db")
|
|
14
|
+
|
|
15
|
+
# Pattern to match control characters and bidirectional overrides
|
|
16
|
+
_UNSAFE_CHARS = re.compile(
|
|
17
|
+
r"[\x00-\x1f\x7f-\x9f" # C0 and C1 control characters
|
|
18
|
+
r"\u200e\u200f" # LRM, RLM
|
|
19
|
+
r"\u202a-\u202e" # LRE, RLE, PDF, LRO, RLO
|
|
20
|
+
r"\u2066-\u2069" # LRI, RLI, FSI, PDI
|
|
21
|
+
r"]"
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
# ANSI color codes (matching FastAPI logging style)
|
|
25
|
+
_RESET = "\033[0m"
|
|
26
|
+
_DIM = "\033[2m"
|
|
27
|
+
_PATH_PREFIX = "\033[1;30m" # Dark grey for path prefix (like host in access log)
|
|
28
|
+
_PATH_FINAL = "\033[0m" # Default for final element (like path in access log)
|
|
29
|
+
_REPLACE = "\033[0;33m" # Yellow for replacements
|
|
30
|
+
_DELETE = "\033[0;31m" # Red for deletions
|
|
31
|
+
_ADD = "\033[0;32m" # Green for additions
|
|
32
|
+
_ACTION = "\033[1;34m" # Bold blue for action name
|
|
33
|
+
_USER = "\033[0;34m" # Blue for user display
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _use_color() -> bool:
|
|
37
|
+
"""Check if we should use color output."""
|
|
38
|
+
return sys.stderr.isatty()
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _format_value(value: Any, use_color: bool, max_len: int = 60) -> str:
|
|
42
|
+
"""Format a value for display, truncating if needed."""
|
|
43
|
+
if value is None:
|
|
44
|
+
return "null"
|
|
45
|
+
|
|
46
|
+
if isinstance(value, bool):
|
|
47
|
+
return "true" if value else "false"
|
|
48
|
+
|
|
49
|
+
if isinstance(value, (int, float)):
|
|
50
|
+
return str(value)
|
|
51
|
+
|
|
52
|
+
if isinstance(value, str):
|
|
53
|
+
# Filter out control characters and bidirectional overrides
|
|
54
|
+
value = _UNSAFE_CHARS.sub("", value)
|
|
55
|
+
# Truncate long strings
|
|
56
|
+
if len(value) > max_len:
|
|
57
|
+
return value[: max_len - 3] + "..."
|
|
58
|
+
return value
|
|
59
|
+
|
|
60
|
+
if isinstance(value, dict):
|
|
61
|
+
if not value:
|
|
62
|
+
return "{}"
|
|
63
|
+
# For small dicts, show inline
|
|
64
|
+
if len(value) == 1:
|
|
65
|
+
k, v = next(iter(value.items()))
|
|
66
|
+
return "{" + f"{k}: {_format_value(v, use_color, max_len=30)}" + "}"
|
|
67
|
+
return f"{{...{len(value)} keys}}"
|
|
68
|
+
|
|
69
|
+
if isinstance(value, list):
|
|
70
|
+
if not value:
|
|
71
|
+
return "[]"
|
|
72
|
+
if len(value) == 1:
|
|
73
|
+
return "[" + _format_value(value[0], use_color, max_len=30) + "]"
|
|
74
|
+
return f"[...{len(value)} items]"
|
|
75
|
+
|
|
76
|
+
# Fallback for other types
|
|
77
|
+
text = str(value)
|
|
78
|
+
if len(text) > max_len:
|
|
79
|
+
text = text[: max_len - 3] + "..."
|
|
80
|
+
return text
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _format_path(path: list[str], use_color: bool) -> str:
|
|
84
|
+
"""Format a path as dot notation with prefix in dark grey, final in default."""
|
|
85
|
+
if not path:
|
|
86
|
+
return ""
|
|
87
|
+
if not use_color:
|
|
88
|
+
return ".".join(path)
|
|
89
|
+
if len(path) == 1:
|
|
90
|
+
return f"{_PATH_FINAL}{path[0]}{_RESET}"
|
|
91
|
+
prefix = ".".join(path[:-1])
|
|
92
|
+
final = path[-1]
|
|
93
|
+
return f"{_PATH_PREFIX}{prefix}.{_RESET}{_PATH_FINAL}{final}{_RESET}"
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _collect_changes(
|
|
97
|
+
diff: dict, path: list[str], changes: list[tuple[str, list[str], Any, Any | None]]
|
|
98
|
+
) -> None:
|
|
99
|
+
"""
|
|
100
|
+
Recursively collect changes from a diff into a flat list.
|
|
101
|
+
|
|
102
|
+
Each change is a tuple of (change_type, path, new_value, old_value).
|
|
103
|
+
change_type is one of: 'set', 'replace', 'delete'
|
|
104
|
+
"""
|
|
105
|
+
if not isinstance(diff, dict):
|
|
106
|
+
# Leaf value - this is a set operation
|
|
107
|
+
changes.append(("set", path, diff, None))
|
|
108
|
+
return
|
|
109
|
+
|
|
110
|
+
for key, value in diff.items():
|
|
111
|
+
if key == "$delete":
|
|
112
|
+
# $delete contains a list of keys to delete
|
|
113
|
+
if isinstance(value, list):
|
|
114
|
+
for deleted_key in value:
|
|
115
|
+
changes.append(("delete", path + [str(deleted_key)], None, None))
|
|
116
|
+
else:
|
|
117
|
+
changes.append(("delete", path + [str(value)], None, None))
|
|
118
|
+
|
|
119
|
+
elif key == "$replace":
|
|
120
|
+
# $replace contains the new value for this path
|
|
121
|
+
if isinstance(value, dict):
|
|
122
|
+
# Replacing with a dict - show each key as a replacement
|
|
123
|
+
for rkey, rval in value.items():
|
|
124
|
+
changes.append(("replace", path + [str(rkey)], rval, None))
|
|
125
|
+
if not value:
|
|
126
|
+
# Empty replacement - clearing the collection
|
|
127
|
+
changes.append(("replace", path, {}, None))
|
|
128
|
+
else:
|
|
129
|
+
changes.append(("replace", path, value, None))
|
|
130
|
+
|
|
131
|
+
elif key.startswith("$"):
|
|
132
|
+
# Other special operations (future-proofing)
|
|
133
|
+
changes.append(("set", path, {key: value}, None))
|
|
134
|
+
|
|
135
|
+
else:
|
|
136
|
+
# Regular nested key
|
|
137
|
+
_collect_changes(value, path + [str(key)], changes)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def _format_change_line(
|
|
141
|
+
change_type: str, path: list[str], value: Any, use_color: bool
|
|
142
|
+
) -> str:
|
|
143
|
+
"""Format a single change as a one-line string."""
|
|
144
|
+
path_str = _format_path(path, use_color)
|
|
145
|
+
value_str = _format_value(value, use_color)
|
|
146
|
+
|
|
147
|
+
if change_type == "delete":
|
|
148
|
+
if use_color:
|
|
149
|
+
return f" ❌ {path_str}"
|
|
150
|
+
return f" - {path_str}"
|
|
151
|
+
|
|
152
|
+
if change_type == "replace":
|
|
153
|
+
if use_color:
|
|
154
|
+
return f" {_REPLACE}⟳{_RESET} {path_str} {_DIM}={_RESET} {value_str}"
|
|
155
|
+
return f" ~ {path_str} = {value_str}"
|
|
156
|
+
|
|
157
|
+
# Default: set/add
|
|
158
|
+
if use_color:
|
|
159
|
+
return f" {_ADD}+{_RESET} {path_str} {_DIM}={_RESET} {value_str}"
|
|
160
|
+
return f" + {path_str} = {value_str}"
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def format_diff(diff: dict) -> list[str]:
|
|
164
|
+
"""
|
|
165
|
+
Format a JSON diff as human-readable lines.
|
|
166
|
+
|
|
167
|
+
Returns a list of formatted lines (without newlines).
|
|
168
|
+
Single changes return one line, multiple changes return multiple lines.
|
|
169
|
+
"""
|
|
170
|
+
use_color = _use_color()
|
|
171
|
+
changes: list[tuple[str, list[str], Any, Any | None]] = []
|
|
172
|
+
_collect_changes(diff, [], changes)
|
|
173
|
+
|
|
174
|
+
if not changes:
|
|
175
|
+
return []
|
|
176
|
+
|
|
177
|
+
# Format each change
|
|
178
|
+
lines = []
|
|
179
|
+
for change_type, path, value, _ in changes:
|
|
180
|
+
lines.append(_format_change_line(change_type, path, value, use_color))
|
|
181
|
+
|
|
182
|
+
return lines
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def format_action_header(action: str, user_display: str | None = None) -> str:
|
|
186
|
+
"""Format the action header line."""
|
|
187
|
+
use_color = _use_color()
|
|
188
|
+
|
|
189
|
+
if use_color:
|
|
190
|
+
action_str = f"{_ACTION}{action}{_RESET}"
|
|
191
|
+
if user_display:
|
|
192
|
+
user_str = f"{_USER}{user_display}{_RESET}"
|
|
193
|
+
return f"{action_str} by {user_str}"
|
|
194
|
+
return action_str
|
|
195
|
+
else:
|
|
196
|
+
if user_display:
|
|
197
|
+
return f"{action} by {user_display}"
|
|
198
|
+
return action
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def log_change(action: str, diff: dict, user_display: str | None = None) -> None:
|
|
202
|
+
"""
|
|
203
|
+
Log a database change with pretty-printed diff.
|
|
204
|
+
|
|
205
|
+
Args:
|
|
206
|
+
action: The action name (e.g., "login", "admin:delete_user")
|
|
207
|
+
diff: The JSON diff dict
|
|
208
|
+
user_display: Optional display name of the user who performed the action
|
|
209
|
+
"""
|
|
210
|
+
header = format_action_header(action, user_display)
|
|
211
|
+
diff_lines = format_diff(diff)
|
|
212
|
+
|
|
213
|
+
if not diff_lines:
|
|
214
|
+
logger.info(header)
|
|
215
|
+
return
|
|
216
|
+
|
|
217
|
+
if len(diff_lines) == 1:
|
|
218
|
+
# Single change - combine on one line
|
|
219
|
+
logger.info(f"{header}{diff_lines[0]}")
|
|
220
|
+
else:
|
|
221
|
+
# Multiple changes - header on its own line, then changes
|
|
222
|
+
logger.info(header)
|
|
223
|
+
for line in diff_lines:
|
|
224
|
+
logger.info(line)
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def configure_db_logging() -> None:
|
|
228
|
+
"""Configure the database logger to output to stderr without prefix."""
|
|
229
|
+
handler = logging.StreamHandler(sys.stderr)
|
|
230
|
+
handler.setFormatter(logging.Formatter("%(message)s"))
|
|
231
|
+
logger.addHandler(handler)
|
|
232
|
+
logger.setLevel(logging.INFO)
|
|
233
|
+
logger.propagate = False
|
paskia/db/migrations.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database schema migrations.
|
|
3
|
+
|
|
4
|
+
Migrations are applied during database load based on the version field.
|
|
5
|
+
Each migration should be idempotent and only run when needed.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from collections.abc import Awaitable, Callable
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def migrate_v1(d: dict) -> None:
|
|
12
|
+
"""Remove Org.created_at fields."""
|
|
13
|
+
for org_data in d["orgs"].values():
|
|
14
|
+
org_data.pop("created_at", None)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
migrations = sorted(
|
|
18
|
+
[f for n, f in globals().items() if n.startswith("migrate_v")],
|
|
19
|
+
key=lambda f: int(f.__name__.removeprefix("migrate_v")),
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
DBVER = len(migrations) # Used by bootstrap and migrate:sql to set initial version
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
async def apply_all_migrations(
|
|
26
|
+
data_dict: dict,
|
|
27
|
+
current_version: int,
|
|
28
|
+
persist: Callable[[str, int, dict], Awaitable[None]],
|
|
29
|
+
) -> None:
|
|
30
|
+
while current_version < DBVER:
|
|
31
|
+
migrations[current_version](data_dict)
|
|
32
|
+
current_version += 1
|
|
33
|
+
await persist(f"migrate:v{current_version}", current_version, data_dict)
|