paskia 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- paskia/_version.py +2 -2
- paskia/authsession.py +14 -27
- paskia/bootstrap.py +31 -103
- paskia/config.py +0 -1
- paskia/db/__init__.py +26 -51
- paskia/db/background.py +17 -37
- paskia/db/jsonl.py +168 -6
- paskia/db/migrations.py +34 -0
- paskia/db/operations.py +400 -723
- paskia/db/structs.py +214 -90
- paskia/fastapi/__main__.py +89 -189
- paskia/fastapi/admin.py +103 -162
- paskia/fastapi/api.py +49 -85
- paskia/fastapi/mainapp.py +30 -19
- paskia/fastapi/remote.py +16 -39
- paskia/fastapi/reset.py +27 -17
- paskia/fastapi/session.py +2 -2
- paskia/fastapi/user.py +21 -27
- paskia/fastapi/ws.py +27 -62
- paskia/fastapi/wschat.py +62 -0
- paskia/frontend-build/auth/admin/index.html +5 -5
- paskia/frontend-build/auth/assets/{AccessDenied-Bc249ASC.css → AccessDenied-DPkUS8LZ.css} +1 -1
- paskia/frontend-build/auth/assets/AccessDenied-Fmeb6EtF.js +8 -0
- paskia/frontend-build/auth/assets/{RestrictedAuth-DgdJyscT.css → RestrictedAuth-CvR33_Z0.css} +1 -1
- paskia/frontend-build/auth/assets/RestrictedAuth-DsJXicIw.js +1 -0
- paskia/frontend-build/auth/assets/{_plugin-vue_export-helper-rKFEraYH.js → _plugin-vue_export-helper-nhjnO_bd.js} +1 -1
- paskia/frontend-build/auth/assets/admin-CPE1pLMm.js +1 -0
- paskia/frontend-build/auth/assets/{admin-BeNu48FR.css → admin-DzzjSg72.css} +1 -1
- paskia/frontend-build/auth/assets/{auth-BKX7shEe.css → auth-C7k64Wad.css} +1 -1
- paskia/frontend-build/auth/assets/auth-YIZvPlW_.js +1 -0
- paskia/frontend-build/auth/assets/{forward-Dzg-aE1C.js → forward-DmqVHZ7e.js} +1 -1
- paskia/frontend-build/auth/assets/reset-Chtv69AT.css +1 -0
- paskia/frontend-build/auth/assets/reset-s20PATTN.js +1 -0
- paskia/frontend-build/auth/assets/{restricted-C0IQufuH.js → restricted-D3AJx3_6.js} +1 -1
- paskia/frontend-build/auth/index.html +5 -5
- paskia/frontend-build/auth/restricted/index.html +4 -4
- paskia/frontend-build/int/forward/index.html +4 -4
- paskia/frontend-build/int/reset/index.html +3 -3
- paskia/globals.py +2 -2
- paskia/migrate/__init__.py +62 -55
- paskia/migrate/sql.py +72 -22
- paskia/remoteauth.py +1 -2
- paskia/sansio.py +6 -12
- {paskia-0.8.0.dist-info → paskia-0.9.0.dist-info}/METADATA +3 -2
- paskia-0.9.0.dist-info/RECORD +57 -0
- paskia/frontend-build/auth/assets/AccessDenied-aTdCvz9k.js +0 -8
- paskia/frontend-build/auth/assets/RestrictedAuth-BLMK7-nL.js +0 -1
- paskia/frontend-build/auth/assets/admin-tVs8oyLv.js +0 -1
- paskia/frontend-build/auth/assets/auth-Dk3q4pNS.js +0 -1
- paskia/frontend-build/auth/assets/reset-BWF4cWKR.css +0 -1
- paskia/frontend-build/auth/assets/reset-C_Td1_jn.js +0 -1
- paskia/util/frontend.py +0 -75
- paskia/util/hostutil.py +0 -76
- paskia/util/htmlutil.py +0 -47
- paskia/util/passphrase.py +0 -20
- paskia/util/permutil.py +0 -43
- paskia/util/pow.py +0 -45
- paskia/util/querysafe.py +0 -11
- paskia/util/sessionutil.py +0 -38
- paskia/util/startupbox.py +0 -75
- paskia/util/timeutil.py +0 -47
- paskia/util/useragent.py +0 -10
- paskia/util/userinfo.py +0 -145
- paskia/util/wordlist.py +0 -54
- paskia-0.8.0.dist-info/RECORD +0 -68
- {paskia-0.8.0.dist-info → paskia-0.9.0.dist-info}/WHEEL +0 -0
- {paskia-0.8.0.dist-info → paskia-0.9.0.dist-info}/entry_points.txt +0 -0
paskia/db/jsonl.py
CHANGED
|
@@ -1,19 +1,27 @@
|
|
|
1
1
|
"""
|
|
2
2
|
JSONL persistence layer for the database.
|
|
3
|
-
|
|
4
|
-
Handles file I/O, JSON diffs, and persistence. Works with plain JSON/dict data.
|
|
5
|
-
Uses aiofiles for async I/O operations.
|
|
6
3
|
"""
|
|
7
4
|
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
import copy
|
|
8
|
+
import json
|
|
8
9
|
import logging
|
|
10
|
+
import sys
|
|
9
11
|
from collections import deque
|
|
12
|
+
from contextlib import contextmanager
|
|
10
13
|
from datetime import datetime, timezone
|
|
11
14
|
from pathlib import Path
|
|
15
|
+
from typing import Any
|
|
16
|
+
from uuid import UUID
|
|
12
17
|
|
|
13
18
|
import aiofiles
|
|
14
19
|
import jsondiff
|
|
15
20
|
import msgspec
|
|
16
21
|
|
|
22
|
+
from paskia.db.migrations import apply_migrations
|
|
23
|
+
from paskia.db.structs import DB, SessionContext
|
|
24
|
+
|
|
17
25
|
_logger = logging.getLogger(__name__)
|
|
18
26
|
|
|
19
27
|
# Default database path
|
|
@@ -96,6 +104,10 @@ def create_change_record(
|
|
|
96
104
|
)
|
|
97
105
|
|
|
98
106
|
|
|
107
|
+
# Actions that are allowed to create a new database file
|
|
108
|
+
_BOOTSTRAP_ACTIONS = frozenset({"bootstrap", "migrate"})
|
|
109
|
+
|
|
110
|
+
|
|
99
111
|
async def flush_changes(
|
|
100
112
|
db_path: Path,
|
|
101
113
|
pending_changes: deque[_ChangeRecord],
|
|
@@ -112,15 +124,25 @@ async def flush_changes(
|
|
|
112
124
|
if not pending_changes:
|
|
113
125
|
return True
|
|
114
126
|
|
|
115
|
-
|
|
127
|
+
if not db_path.exists():
|
|
128
|
+
first_action = pending_changes[0].a
|
|
129
|
+
if first_action not in _BOOTSTRAP_ACTIONS:
|
|
130
|
+
_logger.error(
|
|
131
|
+
"Refusing to create database file with action '%s' - "
|
|
132
|
+
"only bootstrap or migrate can create a new database",
|
|
133
|
+
first_action,
|
|
134
|
+
)
|
|
135
|
+
pending_changes.clear()
|
|
136
|
+
return False
|
|
137
|
+
|
|
116
138
|
changes_to_write = list(pending_changes)
|
|
117
139
|
pending_changes.clear()
|
|
118
140
|
|
|
119
141
|
try:
|
|
120
|
-
# Build lines to append (keep as bytes, join with \n)
|
|
121
142
|
lines = [_change_encoder.encode(change) for change in changes_to_write]
|
|
143
|
+
if not lines:
|
|
144
|
+
return True
|
|
122
145
|
|
|
123
|
-
# Append all lines in a single write (binary mode for Windows compatibility)
|
|
124
146
|
async with aiofiles.open(db_path, "ab") as f:
|
|
125
147
|
await f.write(b"\n".join(lines) + b"\n")
|
|
126
148
|
return True
|
|
@@ -130,3 +152,143 @@ async def flush_changes(
|
|
|
130
152
|
for change in reversed(changes_to_write):
|
|
131
153
|
pending_changes.appendleft(change)
|
|
132
154
|
return False
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class JsonlStore:
|
|
158
|
+
"""JSONL persistence layer for a DB instance."""
|
|
159
|
+
|
|
160
|
+
def __init__(self, db: DB, db_path: str = DB_PATH_DEFAULT):
|
|
161
|
+
self.db: DB = db
|
|
162
|
+
self.db_path = Path(db_path)
|
|
163
|
+
self._previous_builtins: dict[str, Any] = {}
|
|
164
|
+
self._pending_changes: deque[_ChangeRecord] = deque()
|
|
165
|
+
self._current_action: str = "system"
|
|
166
|
+
self._current_user: str | None = None
|
|
167
|
+
self._in_transaction: bool = False
|
|
168
|
+
self._transaction_snapshot: dict[str, Any] | None = None
|
|
169
|
+
|
|
170
|
+
async def load(self, db_path: str | None = None) -> None:
|
|
171
|
+
"""Load data from JSONL change log."""
|
|
172
|
+
if db_path is not None:
|
|
173
|
+
self.db_path = Path(db_path)
|
|
174
|
+
try:
|
|
175
|
+
data_dict = await load_jsonl(self.db_path)
|
|
176
|
+
if data_dict:
|
|
177
|
+
# Preserve original state before migrations (deep copy for nested dicts)
|
|
178
|
+
original_dict = copy.deepcopy(data_dict)
|
|
179
|
+
|
|
180
|
+
# Apply schema migrations (modifies data_dict in place)
|
|
181
|
+
migrated = apply_migrations(data_dict)
|
|
182
|
+
|
|
183
|
+
decoder = msgspec.json.Decoder(DB)
|
|
184
|
+
self.db = decoder.decode(msgspec.json.encode(data_dict))
|
|
185
|
+
self.db._store = self
|
|
186
|
+
|
|
187
|
+
# Update previous state to migrated data FIRST (to avoid transaction hardening reset)
|
|
188
|
+
self._previous_builtins = data_dict
|
|
189
|
+
|
|
190
|
+
# Persist migration by manually computing and queueing the diff
|
|
191
|
+
if migrated:
|
|
192
|
+
diff = compute_diff(original_dict, data_dict)
|
|
193
|
+
if diff:
|
|
194
|
+
self._pending_changes.append(
|
|
195
|
+
create_change_record("migrate", diff, user=None)
|
|
196
|
+
)
|
|
197
|
+
_logger.info("Queued migration changes for persistence")
|
|
198
|
+
await self.flush()
|
|
199
|
+
except ValueError:
|
|
200
|
+
if self.db_path.exists():
|
|
201
|
+
raise
|
|
202
|
+
|
|
203
|
+
def _queue_change(self) -> None:
|
|
204
|
+
current = msgspec.to_builtins(self.db)
|
|
205
|
+
diff = compute_diff(self._previous_builtins, current)
|
|
206
|
+
if diff:
|
|
207
|
+
self._pending_changes.append(
|
|
208
|
+
create_change_record(self._current_action, diff, self._current_user)
|
|
209
|
+
)
|
|
210
|
+
self._previous_builtins = current
|
|
211
|
+
# Log the change with user display name if available
|
|
212
|
+
user_display = None
|
|
213
|
+
if self._current_user:
|
|
214
|
+
try:
|
|
215
|
+
user_uuid = UUID(self._current_user)
|
|
216
|
+
if user_uuid in self.db.users:
|
|
217
|
+
user_display = self.db.users[user_uuid].display_name
|
|
218
|
+
except (ValueError, KeyError):
|
|
219
|
+
user_display = self._current_user
|
|
220
|
+
|
|
221
|
+
diff_json = json.dumps(diff, default=str)
|
|
222
|
+
if user_display:
|
|
223
|
+
print(
|
|
224
|
+
f"{self._current_action} by {user_display}: {diff_json}",
|
|
225
|
+
file=sys.stderr,
|
|
226
|
+
)
|
|
227
|
+
else:
|
|
228
|
+
print(f"{self._current_action}: {diff_json}", file=sys.stderr)
|
|
229
|
+
|
|
230
|
+
@contextmanager
|
|
231
|
+
def transaction(
|
|
232
|
+
self,
|
|
233
|
+
action: str,
|
|
234
|
+
ctx: SessionContext | None = None,
|
|
235
|
+
*,
|
|
236
|
+
user: str | None = None,
|
|
237
|
+
):
|
|
238
|
+
"""Wrap writes in transaction. Queues change on successful exit.
|
|
239
|
+
|
|
240
|
+
Args:
|
|
241
|
+
action: Describes the operation (e.g., "Created user", "Login")
|
|
242
|
+
ctx: Session context of user performing the action (None for system operations)
|
|
243
|
+
user: User UUID string (alternative to ctx when full context unavailable)
|
|
244
|
+
"""
|
|
245
|
+
if self._in_transaction:
|
|
246
|
+
raise RuntimeError("Nested transactions are not supported")
|
|
247
|
+
|
|
248
|
+
# Check for out-of-transaction modifications
|
|
249
|
+
current_state = msgspec.to_builtins(self.db)
|
|
250
|
+
if current_state != self._previous_builtins:
|
|
251
|
+
diff = compute_diff(self._previous_builtins, current_state)
|
|
252
|
+
diff_json = json.dumps(diff, default=str, indent=2)
|
|
253
|
+
_logger.error(
|
|
254
|
+
"Database state modified outside of transaction! "
|
|
255
|
+
"This indicates a bug where DB changes occurred without a transaction wrapper. "
|
|
256
|
+
"Resetting to last known state from JSONL file.\n"
|
|
257
|
+
f"Changes detected:\n{diff_json}"
|
|
258
|
+
)
|
|
259
|
+
# Hard reset to last known good state
|
|
260
|
+
decoder = msgspec.json.Decoder(DB)
|
|
261
|
+
self.db = decoder.decode(msgspec.json.encode(self._previous_builtins))
|
|
262
|
+
self.db._store = self
|
|
263
|
+
current_state = self._previous_builtins.copy()
|
|
264
|
+
|
|
265
|
+
old_action = self._current_action
|
|
266
|
+
old_user = self._current_user
|
|
267
|
+
self._current_action = action
|
|
268
|
+
# Prefer ctx.user.uuid if ctx provided, otherwise use user param
|
|
269
|
+
self._current_user = str(ctx.user.uuid) if ctx else user
|
|
270
|
+
self._in_transaction = True
|
|
271
|
+
self._transaction_snapshot = current_state
|
|
272
|
+
|
|
273
|
+
try:
|
|
274
|
+
yield
|
|
275
|
+
self._queue_change()
|
|
276
|
+
except Exception:
|
|
277
|
+
# Rollback on error: restore from snapshot
|
|
278
|
+
_logger.warning("Transaction '%s' failed, rolling back changes", action)
|
|
279
|
+
if self._transaction_snapshot is not None:
|
|
280
|
+
decoder = msgspec.json.Decoder(DB)
|
|
281
|
+
self.db = decoder.decode(
|
|
282
|
+
msgspec.json.encode(self._transaction_snapshot)
|
|
283
|
+
)
|
|
284
|
+
self.db._store = self
|
|
285
|
+
raise
|
|
286
|
+
finally:
|
|
287
|
+
self._current_action = old_action
|
|
288
|
+
self._current_user = old_user
|
|
289
|
+
self._in_transaction = False
|
|
290
|
+
self._transaction_snapshot = None
|
|
291
|
+
|
|
292
|
+
async def flush(self) -> bool:
|
|
293
|
+
"""Write all pending changes to disk."""
|
|
294
|
+
return await flush_changes(self.db_path, self._pending_changes)
|
paskia/db/migrations.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Database schema migrations.
|
|
3
|
+
|
|
4
|
+
Migrations are applied during database load based on the version field.
|
|
5
|
+
Each migration should be idempotent and only run when needed.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
|
|
10
|
+
_logger = logging.getLogger(__name__)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def apply_migrations(data_dict: dict) -> bool:
|
|
14
|
+
"""Apply any pending schema migrations to the database dictionary.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
data_dict: The raw database dictionary loaded from JSONL
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
True if any migrations were applied, False otherwise
|
|
21
|
+
"""
|
|
22
|
+
db_version = data_dict.get("v", 0)
|
|
23
|
+
migrated = False
|
|
24
|
+
|
|
25
|
+
if db_version == 0:
|
|
26
|
+
# Migration v0 -> v1: Remove created_at from orgs (field removed from schema)
|
|
27
|
+
if "orgs" in data_dict:
|
|
28
|
+
for org_data in data_dict["orgs"].values():
|
|
29
|
+
org_data.pop("created_at", None)
|
|
30
|
+
data_dict["v"] = 1
|
|
31
|
+
migrated = True
|
|
32
|
+
_logger.info("Applied schema migration: v0 -> v1 (removed org.created_at)")
|
|
33
|
+
|
|
34
|
+
return migrated
|