paskia 0.9.0__tar.gz → 0.9.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. {paskia-0.9.0 → paskia-0.9.1}/PKG-INFO +1 -1
  2. {paskia-0.9.0 → paskia-0.9.1}/paskia/_version.py +2 -2
  3. {paskia-0.9.0 → paskia-0.9.1}/paskia/aaguid/__init__.py +5 -4
  4. {paskia-0.9.0 → paskia-0.9.1}/paskia/authsession.py +4 -19
  5. {paskia-0.9.0 → paskia-0.9.1}/paskia/db/__init__.py +2 -4
  6. {paskia-0.9.0 → paskia-0.9.1}/paskia/db/background.py +3 -3
  7. {paskia-0.9.0 → paskia-0.9.1}/paskia/db/jsonl.py +100 -112
  8. paskia-0.9.1/paskia/db/logging.py +233 -0
  9. paskia-0.9.1/paskia/db/migrations.py +33 -0
  10. {paskia-0.9.0 → paskia-0.9.1}/paskia/db/operations.py +99 -192
  11. paskia-0.9.1/paskia/db/structs.py +462 -0
  12. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/__main__.py +1 -0
  13. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/admin.py +70 -193
  14. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/api.py +49 -55
  15. paskia-0.9.1/paskia/fastapi/logging.py +218 -0
  16. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/mainapp.py +12 -2
  17. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/remote.py +4 -4
  18. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/reset.py +0 -2
  19. paskia-0.9.1/paskia/fastapi/response.py +22 -0
  20. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/user.py +7 -7
  21. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/ws.py +6 -6
  22. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/wsutil.py +15 -2
  23. {paskia-0.9.0 → paskia-0.9.1}/paskia/migrate/__init__.py +9 -9
  24. {paskia-0.9.0 → paskia-0.9.1}/paskia/migrate/sql.py +26 -19
  25. {paskia-0.9.0 → paskia-0.9.1}/paskia/remoteauth.py +6 -6
  26. {paskia-0.9.0 → paskia-0.9.1}/pyproject.toml +0 -4
  27. paskia-0.9.0/paskia/db/migrations.py +0 -34
  28. paskia-0.9.0/paskia/db/structs.py +0 -272
  29. {paskia-0.9.0 → paskia-0.9.1}/.gitignore +0 -0
  30. {paskia-0.9.0 → paskia-0.9.1}/README.md +0 -0
  31. {paskia-0.9.0 → paskia-0.9.1}/paskia/__init__.py +0 -0
  32. {paskia-0.9.0 → paskia-0.9.1}/paskia/aaguid/combined_aaguid.json +0 -0
  33. {paskia-0.9.0 → paskia-0.9.1}/paskia/bootstrap.py +0 -0
  34. {paskia-0.9.0 → paskia-0.9.1}/paskia/config.py +0 -0
  35. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/__init__.py +0 -0
  36. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/auth_host.py +0 -0
  37. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/authz.py +0 -0
  38. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/session.py +0 -0
  39. {paskia-0.9.0 → paskia-0.9.1}/paskia/fastapi/wschat.py +0 -0
  40. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/admin/index.html +0 -0
  41. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/AccessDenied-DPkUS8LZ.css +0 -0
  42. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/AccessDenied-Fmeb6EtF.js +0 -0
  43. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/RestrictedAuth-CvR33_Z0.css +0 -0
  44. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/RestrictedAuth-DsJXicIw.js +0 -0
  45. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/_plugin-vue_export-helper-BTzJAQlS.css +0 -0
  46. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/_plugin-vue_export-helper-nhjnO_bd.js +0 -0
  47. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/admin-CPE1pLMm.js +0 -0
  48. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/admin-DzzjSg72.css +0 -0
  49. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/auth-C7k64Wad.css +0 -0
  50. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/auth-YIZvPlW_.js +0 -0
  51. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/forward-DmqVHZ7e.js +0 -0
  52. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/helpers-DzjFIx78.js +0 -0
  53. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/pow-2N9bxgAo.js +0 -0
  54. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/reset-Chtv69AT.css +0 -0
  55. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/reset-s20PATTN.js +0 -0
  56. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/assets/restricted-D3AJx3_6.js +0 -0
  57. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/index.html +0 -0
  58. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/auth/restricted/index.html +0 -0
  59. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/int/forward/index.html +0 -0
  60. {paskia-0.9.0 → paskia-0.9.1}/paskia/frontend-build/int/reset/index.html +0 -0
  61. {paskia-0.9.0 → paskia-0.9.1}/paskia/globals.py +0 -0
  62. {paskia-0.9.0 → paskia-0.9.1}/paskia/sansio.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: paskia
3
- Version: 0.9.0
3
+ Version: 0.9.1
4
4
  Summary: Passkey Auth made easy: all sites and APIs can be guarded even without any changes on the protected site.
5
5
  Project-URL: Homepage, https://git.zi.fi/LeoVasanko/paskia
6
6
  Project-URL: Repository, https://github.com/LeoVasanko/paskia
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.9.0'
32
- __version_tuple__ = version_tuple = (0, 9, 0)
31
+ __version__ = version = '0.9.1'
32
+ __version_tuple__ = version_tuple = (0, 9, 1)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -10,6 +10,7 @@ This module provides functionality to:
10
10
  import json
11
11
  from collections.abc import Iterable
12
12
  from importlib.resources import files
13
+ from uuid import UUID
13
14
 
14
15
  __ALL__ = ["AAGUID", "filter"]
15
16
 
@@ -18,15 +19,15 @@ AAGUID_FILE = files("paskia") / "aaguid" / "combined_aaguid.json"
18
19
  AAGUID: dict[str, dict] = json.loads(AAGUID_FILE.read_text(encoding="utf-8"))
19
20
 
20
21
 
21
- def filter(aaguids: Iterable[str]) -> dict[str, dict]:
22
+ def filter(aaguids: Iterable[UUID]) -> dict[str, dict]:
22
23
  """
23
24
  Get AAGUID information only for the provided set of AAGUIDs.
24
25
 
25
26
  Args:
26
- aaguids: Set of AAGUID strings that the user has credentials for
27
+ aaguids: Iterable of AAGUIDs (UUIDs) that the user has credentials for
27
28
 
28
29
  Returns:
29
- Dictionary mapping AAGUID to authenticator information for only
30
+ Dictionary mapping AAGUID string to authenticator information for only
30
31
  the AAGUIDs that the user has and that we have data for
31
32
  """
32
- return {aaguid: AAGUID[aaguid] for aaguid in aaguids if aaguid in AAGUID}
33
+ return {(s := str(a)): AAGUID[s] for a in aaguids if (s := str(a)) in AAGUID}
@@ -8,7 +8,7 @@ independent of any web framework:
8
8
  - Credential management
9
9
  """
10
10
 
11
- from datetime import datetime, timezone
11
+ from datetime import UTC, datetime
12
12
  from typing import TYPE_CHECKING
13
13
  from uuid import UUID
14
14
 
@@ -23,11 +23,11 @@ EXPIRES = SESSION_LIFETIME
23
23
 
24
24
 
25
25
  def expires() -> datetime:
26
- return datetime.now(timezone.utc) + EXPIRES
26
+ return datetime.now(UTC) + EXPIRES
27
27
 
28
28
 
29
29
  def reset_expires() -> datetime:
30
- return datetime.now(timezone.utc) + RESET_LIFETIME
30
+ return datetime.now(UTC) + RESET_LIFETIME
31
31
 
32
32
 
33
33
  def get_reset(token: str) -> "ResetToken":
@@ -39,24 +39,9 @@ def get_reset(token: str) -> "ResetToken":
39
39
  raise ValueError("This authentication link is no longer valid.")
40
40
 
41
41
 
42
- def refresh_session_token(token: str, *, ip: str, user_agent: str):
43
- """Refresh a session extending its expiry."""
44
- session_record = db.data().sessions.get(token)
45
- if not session_record:
46
- raise ValueError("Session not found or expired")
47
- updated = db.update_session(
48
- token,
49
- ip=ip,
50
- user_agent=user_agent,
51
- expiry=expires(),
52
- )
53
- if not updated:
54
- raise ValueError("Session not found or expired")
55
-
56
-
57
42
  def delete_credential(credential_uuid: UUID, auth: str, host: str | None = None):
58
43
  """Delete a specific credential for the current user."""
59
- ctx = db.get_session_context(auth, hostutil.normalize_host(host))
44
+ ctx = db.data().session_ctx(auth, hostutil.normalize_host(host))
60
45
  if not ctx:
61
46
  raise ValueError("Session expired")
62
47
  db.delete_credential(credential_uuid, ctx.user.uuid)
@@ -2,7 +2,7 @@
2
2
  Database module for WebAuthn passkey authentication.
3
3
 
4
4
  Read: Access data() directly, use build_* to convert to public structs.
5
- CTX: get_session_context(key) returns SessionContext with effective permissions.
5
+ CTX: data().session_ctx(key) returns SessionContext with effective permissions.
6
6
  Write: Functions validate and commit, or raise ValueError.
7
7
 
8
8
  Usage:
@@ -13,7 +13,7 @@ Usage:
13
13
  user = db.build_user(user_uuid)
14
14
 
15
15
  # Context
16
- ctx = db.get_session_context(session_key)
16
+ ctx = db.data().session_ctx(session_key)
17
17
 
18
18
  # Write
19
19
  db.create_user(user)
@@ -49,7 +49,6 @@ from paskia.db.operations import (
49
49
  delete_user,
50
50
  get_organization_users,
51
51
  get_reset_token,
52
- get_session_context,
53
52
  get_user_credential_ids,
54
53
  get_user_organization,
55
54
  init,
@@ -113,7 +112,6 @@ __all__ = [
113
112
  # Read ops
114
113
  "get_organization_users",
115
114
  "get_reset_token",
116
- "get_session_context",
117
115
  "get_user_credential_ids",
118
116
  "get_user_organization",
119
117
  # Write ops
@@ -6,7 +6,7 @@ Periodically flushes pending changes to disk and cleans up expired items.
6
6
 
7
7
  import asyncio
8
8
  import logging
9
- from datetime import datetime, timezone
9
+ from datetime import UTC, datetime
10
10
 
11
11
  from paskia.db.operations import _store, cleanup_expired
12
12
 
@@ -33,7 +33,7 @@ async def _background_loop():
33
33
  cleanup_expired()
34
34
  await flush()
35
35
 
36
- last_cleanup = datetime.now(timezone.utc)
36
+ last_cleanup = datetime.now(UTC)
37
37
 
38
38
  while True:
39
39
  try:
@@ -42,7 +42,7 @@ async def _background_loop():
42
42
  await flush()
43
43
 
44
44
  # Run cleanup periodically
45
- now = datetime.now(timezone.utc)
45
+ now = datetime.now(UTC)
46
46
  if (now - last_cleanup).total_seconds() >= CLEANUP_INTERVAL:
47
47
  cleanup_expired()
48
48
  await flush() # Flush cleanup changes
@@ -2,15 +2,11 @@
2
2
  JSONL persistence layer for the database.
3
3
  """
4
4
 
5
- from __future__ import annotations
6
-
7
5
  import copy
8
- import json
9
6
  import logging
10
- import sys
11
7
  from collections import deque
12
8
  from contextlib import contextmanager
13
- from datetime import datetime, timezone
9
+ from datetime import UTC, datetime
14
10
  from pathlib import Path
15
11
  from typing import Any
16
12
  from uuid import UUID
@@ -19,7 +15,8 @@ import aiofiles
19
15
  import jsondiff
20
16
  import msgspec
21
17
 
22
- from paskia.db.migrations import apply_migrations
18
+ from paskia.db.logging import log_change
19
+ from paskia.db.migrations import DBVER, apply_all_migrations
23
20
  from paskia.db.structs import DB, SessionContext
24
21
 
25
22
  _logger = logging.getLogger(__name__)
@@ -33,6 +30,7 @@ class _ChangeRecord(msgspec.Struct, omit_defaults=True):
33
30
 
34
31
  ts: datetime
35
32
  a: str # action - describes the operation (e.g., "migrate", "login", "create_user")
33
+ v: int # schema version after this change
36
34
  u: str | None = None # user UUID who performed the action (None for system)
37
35
  diff: dict = {}
38
36
 
@@ -41,43 +39,6 @@ class _ChangeRecord(msgspec.Struct, omit_defaults=True):
41
39
  _change_encoder = msgspec.json.Encoder()
42
40
 
43
41
 
44
- async def load_jsonl(db_path: Path) -> dict:
45
- """Load data from disk by applying change log.
46
-
47
- Replays all changes from JSONL file using plain dicts (to handle
48
- schema evolution).
49
-
50
- Args:
51
- db_path: Path to the JSONL database file
52
-
53
- Returns:
54
- The final state after applying all changes
55
-
56
- Raises:
57
- ValueError: If file doesn't exist or cannot be loaded
58
- """
59
- if not db_path.exists():
60
- raise ValueError(f"Database file not found: {db_path}")
61
- data_dict: dict = {}
62
- try:
63
- # Read entire file at once and split into lines
64
- async with aiofiles.open(db_path, "rb") as f:
65
- content = await f.read()
66
- for line_num, line in enumerate(content.split(b"\n"), 1):
67
- line = line.strip()
68
- if not line:
69
- continue
70
- try:
71
- change = msgspec.json.decode(line)
72
- # Apply the diff to current state (marshal=True for $-prefixed keys)
73
- data_dict = jsondiff.patch(data_dict, change["diff"], marshal=True)
74
- except Exception as e:
75
- raise ValueError(f"Error parsing line {line_num}: {e}")
76
- except (OSError, ValueError, msgspec.DecodeError) as e:
77
- raise ValueError(f"Failed to load database: {e}")
78
- return data_dict
79
-
80
-
81
42
  def compute_diff(previous: dict, current: dict) -> dict | None:
82
43
  """Compute JSON diff between two states.
83
44
 
@@ -93,19 +54,20 @@ def compute_diff(previous: dict, current: dict) -> dict | None:
93
54
 
94
55
 
95
56
  def create_change_record(
96
- action: str, diff: dict, user: str | None = None
57
+ action: str, version: int, diff: dict, user: str | None = None
97
58
  ) -> _ChangeRecord:
98
59
  """Create a change record for persistence."""
99
60
  return _ChangeRecord(
100
- ts=datetime.now(timezone.utc),
61
+ ts=datetime.now(UTC),
101
62
  a=action,
63
+ v=version,
102
64
  u=user,
103
65
  diff=diff,
104
66
  )
105
67
 
106
68
 
107
69
  # Actions that are allowed to create a new database file
108
- _BOOTSTRAP_ACTIONS = frozenset({"bootstrap", "migrate"})
70
+ _BOOTSTRAP_ACTIONS = frozenset({"bootstrap", "migrate:sql"})
109
71
 
110
72
 
111
73
  async def flush_changes(
@@ -166,66 +128,89 @@ class JsonlStore:
166
128
  self._current_user: str | None = None
167
129
  self._in_transaction: bool = False
168
130
  self._transaction_snapshot: dict[str, Any] | None = None
131
+ self._current_version: int = DBVER # Schema version for new databases
169
132
 
170
133
  async def load(self, db_path: str | None = None) -> None:
171
134
  """Load data from JSONL change log."""
172
135
  if db_path is not None:
173
136
  self.db_path = Path(db_path)
174
- try:
175
- data_dict = await load_jsonl(self.db_path)
176
- if data_dict:
177
- # Preserve original state before migrations (deep copy for nested dicts)
178
- original_dict = copy.deepcopy(data_dict)
137
+ if not self.db_path.exists():
138
+ return
179
139
 
180
- # Apply schema migrations (modifies data_dict in place)
181
- migrated = apply_migrations(data_dict)
182
-
183
- decoder = msgspec.json.Decoder(DB)
184
- self.db = decoder.decode(msgspec.json.encode(data_dict))
185
- self.db._store = self
140
+ # Replay change log to reconstruct state
141
+ data_dict: dict = {}
142
+ try:
143
+ async with aiofiles.open(self.db_path, "rb") as f:
144
+ content = await f.read()
145
+ for line_num, line in enumerate(content.split(b"\n"), 1):
146
+ line = line.strip()
147
+ if not line:
148
+ continue
149
+ try:
150
+ change = msgspec.json.decode(line)
151
+ data_dict = jsondiff.patch(data_dict, change["diff"], marshal=True)
152
+ self._current_version = change.get("v", 0)
153
+ except Exception as e:
154
+ raise ValueError(f"Error parsing line {line_num}: {e}")
155
+ except (OSError, ValueError, msgspec.DecodeError) as e:
156
+ raise ValueError(f"Failed to load database: {e}")
157
+
158
+ if not data_dict:
159
+ return
160
+
161
+ # Set previous state for diffing (will be updated by _queue_change)
162
+ self._previous_builtins = copy.deepcopy(data_dict)
163
+
164
+ # Callback to persist each migration
165
+ async def persist_migration(
166
+ action: str, new_version: int, current: dict
167
+ ) -> None:
168
+ self._current_version = new_version
169
+ self._queue_change(action, new_version, current)
170
+
171
+ # Apply schema migrations one at a time
172
+ await apply_all_migrations(data_dict, self._current_version, persist_migration)
173
+
174
+ # Decode to msgspec struct
175
+ decoder = msgspec.json.Decoder(DB)
176
+ self.db = decoder.decode(msgspec.json.encode(data_dict))
177
+ self.db._store = self
178
+
179
+ # Normalize via msgspec round-trip (handles omit_defaults etc.)
180
+ # This ensures _previous_builtins matches what msgspec would produce
181
+ normalized_dict = msgspec.to_builtins(self.db)
182
+ await persist_migration(
183
+ "migrate:msgspec", self._current_version, normalized_dict
184
+ )
185
+
186
+ def _queue_change(
187
+ self, action: str, version: int, current: dict, user: str | None = None
188
+ ) -> None:
189
+ """Queue a change record and log it.
186
190
 
187
- # Update previous state to migrated data FIRST (to avoid transaction hardening reset)
188
- self._previous_builtins = data_dict
189
-
190
- # Persist migration by manually computing and queueing the diff
191
- if migrated:
192
- diff = compute_diff(original_dict, data_dict)
193
- if diff:
194
- self._pending_changes.append(
195
- create_change_record("migrate", diff, user=None)
196
- )
197
- _logger.info("Queued migration changes for persistence")
198
- await self.flush()
199
- except ValueError:
200
- if self.db_path.exists():
201
- raise
202
-
203
- def _queue_change(self) -> None:
204
- current = msgspec.to_builtins(self.db)
191
+ Args:
192
+ action: The action name for the change record
193
+ version: The schema version for the change record
194
+ current: The current state as a plain dict
195
+ user: Optional user UUID who performed the action
196
+ """
205
197
  diff = compute_diff(self._previous_builtins, current)
206
- if diff:
207
- self._pending_changes.append(
208
- create_change_record(self._current_action, diff, self._current_user)
209
- )
210
- self._previous_builtins = current
211
- # Log the change with user display name if available
212
- user_display = None
213
- if self._current_user:
214
- try:
215
- user_uuid = UUID(self._current_user)
216
- if user_uuid in self.db.users:
217
- user_display = self.db.users[user_uuid].display_name
218
- except (ValueError, KeyError):
219
- user_display = self._current_user
220
-
221
- diff_json = json.dumps(diff, default=str)
222
- if user_display:
223
- print(
224
- f"{self._current_action} by {user_display}: {diff_json}",
225
- file=sys.stderr,
226
- )
227
- else:
228
- print(f"{self._current_action}: {diff_json}", file=sys.stderr)
198
+ if not diff:
199
+ return
200
+ self._pending_changes.append(create_change_record(action, version, diff, user))
201
+ self._previous_builtins = copy.deepcopy(current)
202
+
203
+ # Log the change with user display name if available
204
+ user_display = None
205
+ if user:
206
+ try:
207
+ user_uuid = UUID(user)
208
+ if user_uuid in self.db.users:
209
+ user_display = self.db.users[user_uuid].display_name
210
+ except (ValueError, KeyError):
211
+ user_display = user
212
+
213
+ log_change(action, diff, user_display)
229
214
 
230
215
  @contextmanager
231
216
  def transaction(
@@ -248,19 +233,19 @@ class JsonlStore:
248
233
  # Check for out-of-transaction modifications
249
234
  current_state = msgspec.to_builtins(self.db)
250
235
  if current_state != self._previous_builtins:
251
- diff = compute_diff(self._previous_builtins, current_state)
252
- diff_json = json.dumps(diff, default=str, indent=2)
253
- _logger.error(
254
- "Database state modified outside of transaction! "
255
- "This indicates a bug where DB changes occurred without a transaction wrapper. "
256
- "Resetting to last known state from JSONL file.\n"
257
- f"Changes detected:\n{diff_json}"
258
- )
259
- # Hard reset to last known good state
260
- decoder = msgspec.json.Decoder(DB)
261
- self.db = decoder.decode(msgspec.json.encode(self._previous_builtins))
262
- self.db._store = self
263
- current_state = self._previous_builtins.copy()
236
+ # Allow bootstrap/migrate to create a new database from empty state
237
+ is_bootstrap = action in _BOOTSTRAP_ACTIONS or action.startswith("migrate:")
238
+ if is_bootstrap and not self._previous_builtins:
239
+ pass # Expected: creating database from scratch
240
+ else:
241
+ diff = compute_diff(self._previous_builtins, current_state)
242
+ diff_json = msgspec.json.encode(diff).decode()
243
+ _logger.critical(
244
+ "Database state modified outside of transaction! "
245
+ "This indicates a bug where DB changes occurred without a transaction wrapper.\n"
246
+ f"Changes detected:\n{diff_json}"
247
+ )
248
+ raise SystemExit(1)
264
249
 
265
250
  old_action = self._current_action
266
251
  old_user = self._current_user
@@ -272,7 +257,10 @@ class JsonlStore:
272
257
 
273
258
  try:
274
259
  yield
275
- self._queue_change()
260
+ current = msgspec.to_builtins(self.db)
261
+ self._queue_change(
262
+ self._current_action, self._current_version, current, self._current_user
263
+ )
276
264
  except Exception:
277
265
  # Rollback on error: restore from snapshot
278
266
  _logger.warning("Transaction '%s' failed, rolling back changes", action)
@@ -0,0 +1,233 @@
1
+ """
2
+ Database change logging with pretty-printed diffs.
3
+
4
+ Provides a logger for JSONL database changes that formats diffs
5
+ in a human-readable path.notation style with color coding.
6
+ """
7
+
8
+ import logging
9
+ import re
10
+ import sys
11
+ from typing import Any
12
+
13
+ logger = logging.getLogger("paskia.db")
14
+
15
+ # Pattern to match control characters and bidirectional overrides
16
+ _UNSAFE_CHARS = re.compile(
17
+ r"[\x00-\x1f\x7f-\x9f" # C0 and C1 control characters
18
+ r"\u200e\u200f" # LRM, RLM
19
+ r"\u202a-\u202e" # LRE, RLE, PDF, LRO, RLO
20
+ r"\u2066-\u2069" # LRI, RLI, FSI, PDI
21
+ r"]"
22
+ )
23
+
24
+ # ANSI color codes (matching FastAPI logging style)
25
+ _RESET = "\033[0m"
26
+ _DIM = "\033[2m"
27
+ _PATH_PREFIX = "\033[1;30m" # Dark grey for path prefix (like host in access log)
28
+ _PATH_FINAL = "\033[0m" # Default for final element (like path in access log)
29
+ _REPLACE = "\033[0;33m" # Yellow for replacements
30
+ _DELETE = "\033[0;31m" # Red for deletions
31
+ _ADD = "\033[0;32m" # Green for additions
32
+ _ACTION = "\033[1;34m" # Bold blue for action name
33
+ _USER = "\033[0;34m" # Blue for user display
34
+
35
+
36
+ def _use_color() -> bool:
37
+ """Check if we should use color output."""
38
+ return sys.stderr.isatty()
39
+
40
+
41
+ def _format_value(value: Any, use_color: bool, max_len: int = 60) -> str:
42
+ """Format a value for display, truncating if needed."""
43
+ if value is None:
44
+ return "null"
45
+
46
+ if isinstance(value, bool):
47
+ return "true" if value else "false"
48
+
49
+ if isinstance(value, (int, float)):
50
+ return str(value)
51
+
52
+ if isinstance(value, str):
53
+ # Filter out control characters and bidirectional overrides
54
+ value = _UNSAFE_CHARS.sub("", value)
55
+ # Truncate long strings
56
+ if len(value) > max_len:
57
+ return value[: max_len - 3] + "..."
58
+ return value
59
+
60
+ if isinstance(value, dict):
61
+ if not value:
62
+ return "{}"
63
+ # For small dicts, show inline
64
+ if len(value) == 1:
65
+ k, v = next(iter(value.items()))
66
+ return "{" + f"{k}: {_format_value(v, use_color, max_len=30)}" + "}"
67
+ return f"{{...{len(value)} keys}}"
68
+
69
+ if isinstance(value, list):
70
+ if not value:
71
+ return "[]"
72
+ if len(value) == 1:
73
+ return "[" + _format_value(value[0], use_color, max_len=30) + "]"
74
+ return f"[...{len(value)} items]"
75
+
76
+ # Fallback for other types
77
+ text = str(value)
78
+ if len(text) > max_len:
79
+ text = text[: max_len - 3] + "..."
80
+ return text
81
+
82
+
83
+ def _format_path(path: list[str], use_color: bool) -> str:
84
+ """Format a path as dot notation with prefix in dark grey, final in default."""
85
+ if not path:
86
+ return ""
87
+ if not use_color:
88
+ return ".".join(path)
89
+ if len(path) == 1:
90
+ return f"{_PATH_FINAL}{path[0]}{_RESET}"
91
+ prefix = ".".join(path[:-1])
92
+ final = path[-1]
93
+ return f"{_PATH_PREFIX}{prefix}.{_RESET}{_PATH_FINAL}{final}{_RESET}"
94
+
95
+
96
+ def _collect_changes(
97
+ diff: dict, path: list[str], changes: list[tuple[str, list[str], Any, Any | None]]
98
+ ) -> None:
99
+ """
100
+ Recursively collect changes from a diff into a flat list.
101
+
102
+ Each change is a tuple of (change_type, path, new_value, old_value).
103
+ change_type is one of: 'set', 'replace', 'delete'
104
+ """
105
+ if not isinstance(diff, dict):
106
+ # Leaf value - this is a set operation
107
+ changes.append(("set", path, diff, None))
108
+ return
109
+
110
+ for key, value in diff.items():
111
+ if key == "$delete":
112
+ # $delete contains a list of keys to delete
113
+ if isinstance(value, list):
114
+ for deleted_key in value:
115
+ changes.append(("delete", path + [str(deleted_key)], None, None))
116
+ else:
117
+ changes.append(("delete", path + [str(value)], None, None))
118
+
119
+ elif key == "$replace":
120
+ # $replace contains the new value for this path
121
+ if isinstance(value, dict):
122
+ # Replacing with a dict - show each key as a replacement
123
+ for rkey, rval in value.items():
124
+ changes.append(("replace", path + [str(rkey)], rval, None))
125
+ if not value:
126
+ # Empty replacement - clearing the collection
127
+ changes.append(("replace", path, {}, None))
128
+ else:
129
+ changes.append(("replace", path, value, None))
130
+
131
+ elif key.startswith("$"):
132
+ # Other special operations (future-proofing)
133
+ changes.append(("set", path, {key: value}, None))
134
+
135
+ else:
136
+ # Regular nested key
137
+ _collect_changes(value, path + [str(key)], changes)
138
+
139
+
140
+ def _format_change_line(
141
+ change_type: str, path: list[str], value: Any, use_color: bool
142
+ ) -> str:
143
+ """Format a single change as a one-line string."""
144
+ path_str = _format_path(path, use_color)
145
+ value_str = _format_value(value, use_color)
146
+
147
+ if change_type == "delete":
148
+ if use_color:
149
+ return f" ❌ {path_str}"
150
+ return f" - {path_str}"
151
+
152
+ if change_type == "replace":
153
+ if use_color:
154
+ return f" {_REPLACE}⟳{_RESET} {path_str} {_DIM}={_RESET} {value_str}"
155
+ return f" ~ {path_str} = {value_str}"
156
+
157
+ # Default: set/add
158
+ if use_color:
159
+ return f" {_ADD}+{_RESET} {path_str} {_DIM}={_RESET} {value_str}"
160
+ return f" + {path_str} = {value_str}"
161
+
162
+
163
+ def format_diff(diff: dict) -> list[str]:
164
+ """
165
+ Format a JSON diff as human-readable lines.
166
+
167
+ Returns a list of formatted lines (without newlines).
168
+ Single changes return one line, multiple changes return multiple lines.
169
+ """
170
+ use_color = _use_color()
171
+ changes: list[tuple[str, list[str], Any, Any | None]] = []
172
+ _collect_changes(diff, [], changes)
173
+
174
+ if not changes:
175
+ return []
176
+
177
+ # Format each change
178
+ lines = []
179
+ for change_type, path, value, _ in changes:
180
+ lines.append(_format_change_line(change_type, path, value, use_color))
181
+
182
+ return lines
183
+
184
+
185
+ def format_action_header(action: str, user_display: str | None = None) -> str:
186
+ """Format the action header line."""
187
+ use_color = _use_color()
188
+
189
+ if use_color:
190
+ action_str = f"{_ACTION}{action}{_RESET}"
191
+ if user_display:
192
+ user_str = f"{_USER}{user_display}{_RESET}"
193
+ return f"{action_str} by {user_str}"
194
+ return action_str
195
+ else:
196
+ if user_display:
197
+ return f"{action} by {user_display}"
198
+ return action
199
+
200
+
201
+ def log_change(action: str, diff: dict, user_display: str | None = None) -> None:
202
+ """
203
+ Log a database change with pretty-printed diff.
204
+
205
+ Args:
206
+ action: The action name (e.g., "login", "admin:delete_user")
207
+ diff: The JSON diff dict
208
+ user_display: Optional display name of the user who performed the action
209
+ """
210
+ header = format_action_header(action, user_display)
211
+ diff_lines = format_diff(diff)
212
+
213
+ if not diff_lines:
214
+ logger.info(header)
215
+ return
216
+
217
+ if len(diff_lines) == 1:
218
+ # Single change - combine on one line
219
+ logger.info(f"{header}{diff_lines[0]}")
220
+ else:
221
+ # Multiple changes - header on its own line, then changes
222
+ logger.info(header)
223
+ for line in diff_lines:
224
+ logger.info(line)
225
+
226
+
227
+ def configure_db_logging() -> None:
228
+ """Configure the database logger to output to stderr without prefix."""
229
+ handler = logging.StreamHandler(sys.stderr)
230
+ handler.setFormatter(logging.Formatter("%(message)s"))
231
+ logger.addHandler(handler)
232
+ logger.setLevel(logging.INFO)
233
+ logger.propagate = False