vention-storage 0.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- storage/__init__.py +0 -0
- storage/accessor.py +280 -0
- storage/auditor.py +69 -0
- storage/bootstrap.py +47 -0
- storage/database.py +107 -0
- storage/hooks.py +47 -0
- storage/io_helpers.py +171 -0
- storage/router_database.py +275 -0
- storage/router_model.py +247 -0
- storage/utils.py +20 -0
- vention_storage-0.0.0.dist-info/METADATA +509 -0
- vention_storage-0.0.0.dist-info/RECORD +13 -0
- vention_storage-0.0.0.dist-info/WHEEL +4 -0
storage/io_helpers.py
ADDED
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import csv
|
|
4
|
+
import io
|
|
5
|
+
import sqlite3
|
|
6
|
+
import os
|
|
7
|
+
import zipfile
|
|
8
|
+
import tempfile
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import List
|
|
11
|
+
|
|
12
|
+
from sqlalchemy import select
|
|
13
|
+
from sqlalchemy.sql.schema import Table
|
|
14
|
+
from sqlmodel import Session, SQLModel
|
|
15
|
+
from storage import database
|
|
16
|
+
from storage.utils import to_primitive
|
|
17
|
+
from fastapi import UploadFile
|
|
18
|
+
|
|
19
|
+
_SQLITE_INTERNAL_PREFIX = "sqlite_"
|
|
20
|
+
|
|
21
|
+
__all__ = [
|
|
22
|
+
"discover_user_tables",
|
|
23
|
+
"write_table_csv_buffer",
|
|
24
|
+
"build_export_zip_bytes",
|
|
25
|
+
"db_file_path",
|
|
26
|
+
"build_backup_bytes",
|
|
27
|
+
"validate_sqlite_file",
|
|
28
|
+
"safe_unlink",
|
|
29
|
+
]
|
|
30
|
+
|
|
31
|
+
# ---------- Table discovery & CSV ----------
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def discover_user_tables() -> List[Table]:
|
|
35
|
+
"""Return user tables (excludes SQLite internal)."""
|
|
36
|
+
user_tables: List[Table] = []
|
|
37
|
+
for table in SQLModel.metadata.sorted_tables:
|
|
38
|
+
if not table.name.startswith(_SQLITE_INTERNAL_PREFIX):
|
|
39
|
+
user_tables.append(table)
|
|
40
|
+
return user_tables
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def write_table_csv_buffer(session: Session, table: Table) -> io.StringIO:
|
|
44
|
+
"""SELECT * and return CSV (header + rows) in a StringIO."""
|
|
45
|
+
buffer = io.StringIO(newline="")
|
|
46
|
+
columns = list(table.columns)
|
|
47
|
+
writer = csv.DictWriter(
|
|
48
|
+
buffer, fieldnames=[column.name for column in columns], extrasaction="ignore"
|
|
49
|
+
)
|
|
50
|
+
writer.writeheader()
|
|
51
|
+
result = session.exec(select(table))
|
|
52
|
+
for row in result:
|
|
53
|
+
writer.writerow(
|
|
54
|
+
{column.name: to_primitive(row._mapping[column]) for column in columns}
|
|
55
|
+
)
|
|
56
|
+
return buffer
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def build_export_zip_bytes(tables: list[Table]) -> bytes:
|
|
60
|
+
"""Return ZIP bytes with one CSV per table (valid empty ZIP if none)."""
|
|
61
|
+
mem = io.BytesIO()
|
|
62
|
+
with zipfile.ZipFile(mem, mode="w", compression=zipfile.ZIP_DEFLATED) as zip_file:
|
|
63
|
+
if tables:
|
|
64
|
+
with database.use_session() as session:
|
|
65
|
+
for table in tables:
|
|
66
|
+
try:
|
|
67
|
+
csv_buffer = write_table_csv_buffer(session, table)
|
|
68
|
+
except Exception as inner: # surface table context upstream
|
|
69
|
+
raise RuntimeError(f"table={table.name}: {inner}") from inner
|
|
70
|
+
zip_file.writestr(f"{table.name}.csv", csv_buffer.getvalue())
|
|
71
|
+
return mem.getvalue()
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
# ---------- Backup ----------
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def db_file_path() -> str:
|
|
78
|
+
"""Return absolute DB file path."""
|
|
79
|
+
engine = database.get_engine()
|
|
80
|
+
db_path = engine.url.database or ""
|
|
81
|
+
return str(Path(db_path).resolve())
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def build_backup_bytes(db_path: str) -> bytes:
|
|
85
|
+
"""
|
|
86
|
+
Build a consistent .sqlite backup and return bytes.
|
|
87
|
+
"""
|
|
88
|
+
db_dir = Path(db_path).resolve().parent
|
|
89
|
+
with tempfile.NamedTemporaryFile(
|
|
90
|
+
prefix="backup-", suffix=".sqlite", dir=db_dir, delete=False
|
|
91
|
+
) as tmp:
|
|
92
|
+
tmp_path = Path(tmp.name)
|
|
93
|
+
|
|
94
|
+
try:
|
|
95
|
+
database.get_engine().dispose()
|
|
96
|
+
with (
|
|
97
|
+
sqlite3.connect(str(Path(db_path))) as db_connection,
|
|
98
|
+
sqlite3.connect(str(tmp_path)) as tmp_connection,
|
|
99
|
+
):
|
|
100
|
+
db_connection.backup(tmp_connection)
|
|
101
|
+
|
|
102
|
+
return tmp_path.read_bytes()
|
|
103
|
+
finally:
|
|
104
|
+
safe_unlink(tmp_path)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
# ---------- Restore helpers ----------
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def validate_sqlite_file(path: Path, *, run_integrity_check: bool = True) -> None:
|
|
111
|
+
"""Raise ValueError on validation failure; returns None on success."""
|
|
112
|
+
# Header check
|
|
113
|
+
try:
|
|
114
|
+
with path.open("rb") as file:
|
|
115
|
+
signature = file.read(16)
|
|
116
|
+
except Exception as e:
|
|
117
|
+
raise ValueError(f"Cannot read uploaded file: {e}") from e
|
|
118
|
+
if signature != b"SQLite format 3\x00":
|
|
119
|
+
raise ValueError("Invalid SQLite header")
|
|
120
|
+
|
|
121
|
+
if not run_integrity_check:
|
|
122
|
+
return
|
|
123
|
+
|
|
124
|
+
# integrity_check
|
|
125
|
+
try:
|
|
126
|
+
uri = f"file:{path.as_posix()}?mode=ro"
|
|
127
|
+
connection = sqlite3.connect(uri, uri=True)
|
|
128
|
+
try:
|
|
129
|
+
row = connection.execute("PRAGMA integrity_check;").fetchone()
|
|
130
|
+
ok = row and str(row[0]).lower() == "ok"
|
|
131
|
+
if not ok:
|
|
132
|
+
raise ValueError(
|
|
133
|
+
f"Integrity check failed: {row[0] if row else 'unknown'}"
|
|
134
|
+
)
|
|
135
|
+
finally:
|
|
136
|
+
connection.close()
|
|
137
|
+
except ValueError:
|
|
138
|
+
raise
|
|
139
|
+
except Exception as e:
|
|
140
|
+
raise ValueError(f"Integrity check error: {e}") from e
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def save_upload_to_temp(file: UploadFile, dest_dir: Path) -> tuple[Path, int]:
|
|
144
|
+
with tempfile.NamedTemporaryFile(
|
|
145
|
+
prefix="restore-", suffix=".sqlite", dir=dest_dir, delete=False
|
|
146
|
+
) as tmp:
|
|
147
|
+
tmp_path = Path(tmp.name)
|
|
148
|
+
total = 0
|
|
149
|
+
while True:
|
|
150
|
+
chunk = file.file.read(1024 * 1024)
|
|
151
|
+
if not chunk:
|
|
152
|
+
break
|
|
153
|
+
tmp.write(chunk)
|
|
154
|
+
total += len(chunk)
|
|
155
|
+
try:
|
|
156
|
+
file.file.close()
|
|
157
|
+
except Exception:
|
|
158
|
+
pass
|
|
159
|
+
return tmp_path, total
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def atomic_replace_db(tmp_path: Path, db_path: Path) -> None:
|
|
163
|
+
database.get_engine().dispose()
|
|
164
|
+
os.replace(str(tmp_path), str(db_path))
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def safe_unlink(path: Path) -> None:
|
|
168
|
+
try:
|
|
169
|
+
path.unlink(missing_ok=True)
|
|
170
|
+
except Exception:
|
|
171
|
+
pass
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
from typing import Dict, List, Optional
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from fastapi import APIRouter, HTTPException, Query, Response, UploadFile, File
|
|
8
|
+
from sqlmodel import SQLModel, select
|
|
9
|
+
from sqlalchemy import desc
|
|
10
|
+
|
|
11
|
+
from storage import database, io_helpers
|
|
12
|
+
from storage.auditor import AuditLog
|
|
13
|
+
from storage.utils import Operation
|
|
14
|
+
|
|
15
|
+
from storage.io_helpers import (
|
|
16
|
+
discover_user_tables,
|
|
17
|
+
build_export_zip_bytes,
|
|
18
|
+
db_file_path,
|
|
19
|
+
build_backup_bytes,
|
|
20
|
+
validate_sqlite_file,
|
|
21
|
+
safe_unlink,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
__all__ = ["build_db_router"]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def build_db_router(
|
|
28
|
+
*,
|
|
29
|
+
audit_default_limit: int = 100,
|
|
30
|
+
audit_max_limit: int = 1000,
|
|
31
|
+
) -> APIRouter:
|
|
32
|
+
"""
|
|
33
|
+
Build a FastAPI router exposing database-wide utilities.
|
|
34
|
+
|
|
35
|
+
Endpoints:
|
|
36
|
+
- /db/health : Verify DB engine is available
|
|
37
|
+
- /db/audit : Query audit logs (filters + pagination)
|
|
38
|
+
- /db/diagram.svg : Schema diagram (requires Graphviz)
|
|
39
|
+
- /db/export.zip : CSV export (one CSV per table)
|
|
40
|
+
- /db/backup.sqlite : Full SQLite backup file
|
|
41
|
+
- /db/restore : Upload and restore a .sqlite backup (atomic replace)
|
|
42
|
+
"""
|
|
43
|
+
router = APIRouter(prefix="/db", tags=["db"])
|
|
44
|
+
|
|
45
|
+
@router.get("/health")
|
|
46
|
+
def health() -> Dict[str, str]:
|
|
47
|
+
"""
|
|
48
|
+
Check database connectivity.
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
dict: {"status": "ok"} if the database engine can be initialized.
|
|
52
|
+
"""
|
|
53
|
+
_ = database.get_engine()
|
|
54
|
+
return {"status": "ok"}
|
|
55
|
+
|
|
56
|
+
@router.get("/audit")
|
|
57
|
+
def read_audit(
|
|
58
|
+
component: Optional[str] = Query(None, description="Filter by component name"),
|
|
59
|
+
record_id: Optional[int] = Query(None, description="Filter by record ID"),
|
|
60
|
+
actor: Optional[str] = Query(None, description="Filter by actor identifier"),
|
|
61
|
+
operation: Optional[Operation] = Query(
|
|
62
|
+
None, description="Filter by operation type"
|
|
63
|
+
),
|
|
64
|
+
since: Optional[datetime] = Query(
|
|
65
|
+
None, description="Include only logs on/after this timestamp"
|
|
66
|
+
),
|
|
67
|
+
until: Optional[datetime] = Query(
|
|
68
|
+
None, description="Include only logs before this timestamp"
|
|
69
|
+
),
|
|
70
|
+
limit: int = Query(
|
|
71
|
+
audit_default_limit,
|
|
72
|
+
ge=1,
|
|
73
|
+
le=audit_max_limit,
|
|
74
|
+
description="Maximum rows to return",
|
|
75
|
+
),
|
|
76
|
+
offset: int = Query(
|
|
77
|
+
0, ge=0, description="Number of rows to skip (for pagination)"
|
|
78
|
+
),
|
|
79
|
+
) -> List[AuditLog]:
|
|
80
|
+
"""
|
|
81
|
+
Query the audit log table.
|
|
82
|
+
|
|
83
|
+
Supports filtering by component, record_id, actor, operation,
|
|
84
|
+
and timestamp range, with pagination.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
component (str, optional): Restrict to a specific component.
|
|
88
|
+
record_id (int, optional): Restrict to a specific record ID.
|
|
89
|
+
actor (str, optional): Restrict to a specific actor (user/system).
|
|
90
|
+
operation (Operation, optional): Restrict to a specific operation.
|
|
91
|
+
since (datetime, optional): Include only logs since this timestamp.
|
|
92
|
+
until (datetime, optional): Include only logs before this timestamp.
|
|
93
|
+
limit (int): Maximum number of logs to return (bounded by audit_max_limit).
|
|
94
|
+
offset (int): Number of rows to skip for pagination.
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
List[AuditLog]: A list of audit log entries matching the criteria.
|
|
98
|
+
"""
|
|
99
|
+
with database.use_session() as session:
|
|
100
|
+
statement = select(AuditLog)
|
|
101
|
+
if component:
|
|
102
|
+
statement = statement.where(AuditLog.component == component)
|
|
103
|
+
if record_id is not None:
|
|
104
|
+
statement = statement.where(AuditLog.record_id == record_id)
|
|
105
|
+
if actor:
|
|
106
|
+
statement = statement.where(AuditLog.actor == actor)
|
|
107
|
+
if operation:
|
|
108
|
+
statement = statement.where(AuditLog.operation == operation)
|
|
109
|
+
if since is not None:
|
|
110
|
+
statement = statement.where(AuditLog.timestamp >= since)
|
|
111
|
+
if until is not None:
|
|
112
|
+
statement = statement.where(AuditLog.timestamp < until)
|
|
113
|
+
statement = (
|
|
114
|
+
statement.order_by(desc(AuditLog.timestamp)).offset(offset).limit(limit)
|
|
115
|
+
)
|
|
116
|
+
rows: List[AuditLog] = session.exec(statement).all()
|
|
117
|
+
return rows
|
|
118
|
+
|
|
119
|
+
@router.get("/diagram.svg", response_class=Response)
|
|
120
|
+
def diagram_svg() -> Response:
|
|
121
|
+
"""
|
|
122
|
+
Generate a database schema diagram in SVG format.
|
|
123
|
+
|
|
124
|
+
Requires `sqlalchemy-schemadisplay` and Graphviz to be installed.
|
|
125
|
+
The diagram reflects the current SQLModel metadata.
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
Response: SVG image of the database schema.
|
|
129
|
+
|
|
130
|
+
Raises:
|
|
131
|
+
HTTPException 503: If required dependencies are missing
|
|
132
|
+
or Graphviz is not available.
|
|
133
|
+
"""
|
|
134
|
+
try:
|
|
135
|
+
# import here to avoid hard dependency if not used
|
|
136
|
+
from sqlalchemy_schemadisplay import create_schema_graph
|
|
137
|
+
except Exception as e:
|
|
138
|
+
raise HTTPException(
|
|
139
|
+
status_code=503,
|
|
140
|
+
detail=(
|
|
141
|
+
"sqlalchemy-schemadisplay is required. "
|
|
142
|
+
"Install with: pip install sqlalchemy-schemadisplay"
|
|
143
|
+
),
|
|
144
|
+
) from e
|
|
145
|
+
|
|
146
|
+
try:
|
|
147
|
+
graph = create_schema_graph(
|
|
148
|
+
engine=database.get_engine(),
|
|
149
|
+
metadata=SQLModel.metadata,
|
|
150
|
+
show_datatypes=True,
|
|
151
|
+
show_indexes=False,
|
|
152
|
+
concentrate=False,
|
|
153
|
+
)
|
|
154
|
+
return Response(content=graph.create_svg(), media_type="image/svg+xml")
|
|
155
|
+
except Exception as e:
|
|
156
|
+
msg = str(e).lower()
|
|
157
|
+
if "executable" in msg or "dot not found" in msg or "graphviz" in msg:
|
|
158
|
+
raise HTTPException(
|
|
159
|
+
status_code=503,
|
|
160
|
+
detail=(
|
|
161
|
+
"Graphviz is required to render the diagram. "
|
|
162
|
+
"Install it (e.g. brew install graphviz / apt-get install graphviz)."
|
|
163
|
+
),
|
|
164
|
+
) from e
|
|
165
|
+
raise
|
|
166
|
+
|
|
167
|
+
@router.get("/export.zip")
|
|
168
|
+
def export_zip() -> Response:
|
|
169
|
+
"""
|
|
170
|
+
Export the entire database as a ZIP archive.
|
|
171
|
+
|
|
172
|
+
The archive contains one CSV file per user-defined table found in SQLModel metadata.
|
|
173
|
+
SQLite internal tables (e.g., "sqlite_sequence") are excluded.
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
Response: application/zip payload with "{table}.csv" entries.
|
|
177
|
+
"""
|
|
178
|
+
headers = {"Content-Disposition": 'attachment; filename="export.zip"'}
|
|
179
|
+
try:
|
|
180
|
+
zip_bytes = build_export_zip_bytes(discover_user_tables())
|
|
181
|
+
except Exception as e:
|
|
182
|
+
raise HTTPException(
|
|
183
|
+
status_code=503, detail=f"Failed to build export.zip: {e}"
|
|
184
|
+
) from e
|
|
185
|
+
return Response(
|
|
186
|
+
content=zip_bytes, media_type="application/zip", headers=headers
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
@router.get("/backup.sqlite")
|
|
190
|
+
def backup_sqlite() -> Response:
|
|
191
|
+
"""
|
|
192
|
+
Create and return a consistent SQLite backup of the current database file.
|
|
193
|
+
|
|
194
|
+
Uses the SQLite Backup API for correctness.
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
Response: application/x-sqlite3 payload with a `.sqlite` file.
|
|
198
|
+
|
|
199
|
+
Raises:
|
|
200
|
+
HTTPException 503: Operational failure creating the backup.
|
|
201
|
+
"""
|
|
202
|
+
path = db_file_path()
|
|
203
|
+
headers = {
|
|
204
|
+
"Content-Disposition": f'attachment; filename="backup-{_backup_timestamp_slug()}.sqlite"'
|
|
205
|
+
}
|
|
206
|
+
try:
|
|
207
|
+
data = build_backup_bytes(path)
|
|
208
|
+
except Exception as e:
|
|
209
|
+
raise HTTPException(
|
|
210
|
+
status_code=503, detail=f"Failed to create backup: {e}"
|
|
211
|
+
) from e
|
|
212
|
+
return Response(
|
|
213
|
+
content=data, media_type="application/x-sqlite3", headers=headers
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
@router.post("/restore")
|
|
217
|
+
def restore_sqlite(
|
|
218
|
+
file: UploadFile = File(..., description="SQLite .sqlite backup to restore"),
|
|
219
|
+
integrity_check: bool = Query(
|
|
220
|
+
True, description="Run PRAGMA integrity_check before replacing"
|
|
221
|
+
),
|
|
222
|
+
dry_run: bool = Query(
|
|
223
|
+
False, description="Validate only; do not modify current database"
|
|
224
|
+
),
|
|
225
|
+
) -> Dict[str, object]:
|
|
226
|
+
"""
|
|
227
|
+
Restore the database from an uploaded SQLite file by atomically replacing the current DB file.
|
|
228
|
+
|
|
229
|
+
Steps:
|
|
230
|
+
1. Save upload to a temporary path.
|
|
231
|
+
2. Validate header and (optionally) PRAGMA integrity_check.
|
|
232
|
+
3. If dry_run: report validation OK and exit.
|
|
233
|
+
4. Dispose engine connections and os.replace(temp, db_path).
|
|
234
|
+
|
|
235
|
+
Returns:
|
|
236
|
+
dict: {status: "ok", restored: bool, bytes: int}
|
|
237
|
+
|
|
238
|
+
Raises:
|
|
239
|
+
HTTPException 422: Invalid SQLite file or failed integrity check.
|
|
240
|
+
HTTPException 503: Operational failure during file I/O.
|
|
241
|
+
"""
|
|
242
|
+
path = db_file_path()
|
|
243
|
+
db_dir = Path(path).resolve().parent
|
|
244
|
+
try:
|
|
245
|
+
tmp_path, total = io_helpers.save_upload_to_temp(file, db_dir)
|
|
246
|
+
except Exception as e:
|
|
247
|
+
raise HTTPException(503, f"Failed to save upload: {e}") from e
|
|
248
|
+
|
|
249
|
+
try:
|
|
250
|
+
validate_sqlite_file(tmp_path, run_integrity_check=integrity_check)
|
|
251
|
+
except ValueError as ve:
|
|
252
|
+
safe_unlink(tmp_path)
|
|
253
|
+
raise HTTPException(422, str(ve)) from ve
|
|
254
|
+
except Exception as e:
|
|
255
|
+
safe_unlink(tmp_path)
|
|
256
|
+
raise HTTPException(422, f"Invalid SQLite file: {e}") from e
|
|
257
|
+
|
|
258
|
+
if dry_run:
|
|
259
|
+
safe_unlink(tmp_path)
|
|
260
|
+
return {"status": "ok", "restored": False, "bytes": total}
|
|
261
|
+
|
|
262
|
+
try:
|
|
263
|
+
io_helpers.atomic_replace_db(tmp_path, Path(path))
|
|
264
|
+
except Exception as e:
|
|
265
|
+
safe_unlink(tmp_path)
|
|
266
|
+
raise HTTPException(503, f"Failed to replace database file: {e}") from e
|
|
267
|
+
|
|
268
|
+
return {"status": "ok", "restored": True, "bytes": total}
|
|
269
|
+
|
|
270
|
+
return router
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
def _backup_timestamp_slug() -> str:
|
|
274
|
+
"""Timestamp safe for filenames (UTC, no colons)."""
|
|
275
|
+
return datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ")
|
storage/router_model.py
ADDED
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, Dict, List, Optional, Type
|
|
4
|
+
|
|
5
|
+
from fastapi import (
|
|
6
|
+
APIRouter,
|
|
7
|
+
Depends,
|
|
8
|
+
HTTPException,
|
|
9
|
+
Query,
|
|
10
|
+
Request,
|
|
11
|
+
Body,
|
|
12
|
+
Response,
|
|
13
|
+
status,
|
|
14
|
+
)
|
|
15
|
+
from sqlalchemy.exc import DataError, IntegrityError, StatementError
|
|
16
|
+
|
|
17
|
+
from storage.accessor import ModelAccessor
|
|
18
|
+
from storage.utils import ModelType
|
|
19
|
+
|
|
20
|
+
__all__ = ["build_crud_router"]
|
|
21
|
+
|
|
22
|
+
DEFAULT_MAX_RECORDS_PER_MODEL = 100
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def get_actor(request: Request) -> str:
|
|
26
|
+
"""
|
|
27
|
+
Extract the audit actor from the `X-User` header. Eg: 'Operator', 'Admin', etc.
|
|
28
|
+
|
|
29
|
+
Notes:
|
|
30
|
+
- Required for **mutating** endpoints (POST, PUT, DELETE, restore).
|
|
31
|
+
- Optional for read-only endpoints.
|
|
32
|
+
- The value is stored verbatim in `AuditLog.actor`.
|
|
33
|
+
- If missing on a mutating endpoint, a 400 error is returned.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
The `X-User` header value.
|
|
37
|
+
|
|
38
|
+
Raises:
|
|
39
|
+
HTTPException(400): If the header is missing for a mutating endpoint.
|
|
40
|
+
"""
|
|
41
|
+
actor = request.headers.get("X-User")
|
|
42
|
+
if not actor:
|
|
43
|
+
raise HTTPException(status_code=400, detail="Missing X-User header")
|
|
44
|
+
return actor
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def build_crud_router(
|
|
48
|
+
accessor: ModelAccessor[ModelType],
|
|
49
|
+
*,
|
|
50
|
+
max_records: Optional[int] = DEFAULT_MAX_RECORDS_PER_MODEL,
|
|
51
|
+
) -> APIRouter:
|
|
52
|
+
"""
|
|
53
|
+
Build a FastAPI router exposing CRUD + restore endpoints for a single SQLModel component.
|
|
54
|
+
"""
|
|
55
|
+
model: Type[ModelType] = accessor.model
|
|
56
|
+
router = APIRouter(prefix=f"/{accessor.component}", tags=[accessor.component])
|
|
57
|
+
|
|
58
|
+
# ---------------- READS ----------------
|
|
59
|
+
|
|
60
|
+
@router.get("/", response_model=List[model]) # type: ignore[valid-type]
|
|
61
|
+
def list_records(
|
|
62
|
+
include_deleted: bool = Query(False, description="Include soft-deleted rows"),
|
|
63
|
+
) -> List[ModelType]:
|
|
64
|
+
"""
|
|
65
|
+
List all records for this model.
|
|
66
|
+
|
|
67
|
+
By default, soft-deleted records are excluded from results.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
include_deleted (bool): Set to true to include soft-deleted records. Defaults to false.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
List[model]: List of model instances matching the criteria.
|
|
74
|
+
"""
|
|
75
|
+
return accessor.all(include_deleted=include_deleted)
|
|
76
|
+
|
|
77
|
+
@router.get("/{record_id}", response_model=model)
|
|
78
|
+
def get_record(
|
|
79
|
+
record_id: int,
|
|
80
|
+
include_deleted: bool = Query(False, description="Include soft-deleted row"),
|
|
81
|
+
) -> ModelType:
|
|
82
|
+
"""
|
|
83
|
+
Retrieve a single record by its ID.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
record_id (int): ID of the record to fetch.
|
|
87
|
+
include_deleted (bool): Set to true to allow returning a soft-deleted record. Defaults to false.
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
model: The model instance if found.
|
|
91
|
+
|
|
92
|
+
Raises:
|
|
93
|
+
HTTPException 404: If the record does not exist or is soft-deleted (when include_deleted=false).
|
|
94
|
+
"""
|
|
95
|
+
obj = accessor.get(record_id, include_deleted=include_deleted)
|
|
96
|
+
if not obj:
|
|
97
|
+
raise HTTPException(status_code=404, detail="Not found")
|
|
98
|
+
return obj
|
|
99
|
+
|
|
100
|
+
# ---------------- WRITES ----------------
|
|
101
|
+
|
|
102
|
+
@router.post("/", response_model=model)
|
|
103
|
+
def create_record(
|
|
104
|
+
payload: Dict[str, Any] = Body(...),
|
|
105
|
+
actor: str = Depends(get_actor),
|
|
106
|
+
) -> ModelType:
|
|
107
|
+
"""
|
|
108
|
+
Create a new record.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
payload (Dict[str, Any]): JSON body with the record fields.
|
|
112
|
+
actor (str): User identifier from the `X-User` header.
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
model: The newly created model instance.
|
|
116
|
+
|
|
117
|
+
Raises:
|
|
118
|
+
HTTPException 409: If the maximum number of records has been reached.
|
|
119
|
+
HTTPException 422: If the payload violates schema or database constraints.
|
|
120
|
+
"""
|
|
121
|
+
if max_records is not None:
|
|
122
|
+
total = len(accessor.all(include_deleted=True))
|
|
123
|
+
if total >= max_records:
|
|
124
|
+
raise HTTPException(
|
|
125
|
+
status_code=409,
|
|
126
|
+
detail=f"Max {max_records} records allowed for {accessor.component}",
|
|
127
|
+
)
|
|
128
|
+
obj = model(**payload)
|
|
129
|
+
|
|
130
|
+
try:
|
|
131
|
+
return accessor.insert(obj, actor=actor)
|
|
132
|
+
except (IntegrityError, DataError, StatementError) as e:
|
|
133
|
+
raise HTTPException(status_code=422, detail=str(e)) from e
|
|
134
|
+
|
|
135
|
+
@router.put("/{record_id}", response_model=model)
|
|
136
|
+
def update_record(
|
|
137
|
+
record_id: int,
|
|
138
|
+
response: Response,
|
|
139
|
+
payload: Dict[str, Any] = Body(...),
|
|
140
|
+
actor: str = Depends(get_actor),
|
|
141
|
+
) -> ModelType:
|
|
142
|
+
"""
|
|
143
|
+
Upsert a record (PUT semantics).
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
record_id (int): ID of the record to update or create.
|
|
147
|
+
payload (Dict[str, Any]): JSON body with the record fields (the `id` key, if present, is ignored).
|
|
148
|
+
actor (str): User identifier from the `X-User` header.
|
|
149
|
+
response (Response): Used to adjust the HTTP status code.
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
model: The updated or newly created model instance.
|
|
153
|
+
|
|
154
|
+
If the record exists, it is updated in place (200 OK).
|
|
155
|
+
If the record does not exist, it is created at this ID (201 Created).
|
|
156
|
+
"""
|
|
157
|
+
existed = accessor.get(record_id, include_deleted=True) is not None
|
|
158
|
+
|
|
159
|
+
# If this PUT will create a new row, enforce the max records.
|
|
160
|
+
if not existed and max_records is not None:
|
|
161
|
+
total = len(accessor.all(include_deleted=True))
|
|
162
|
+
if total >= max_records:
|
|
163
|
+
raise HTTPException(
|
|
164
|
+
status_code=409,
|
|
165
|
+
detail=f"Max {max_records} records allowed for {accessor.component}",
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
# Build instance (ignore `id` from body if present)
|
|
169
|
+
payload_no_id = {key: value for key, value in payload.items() if key != "id"}
|
|
170
|
+
obj = model(id=record_id, **payload_no_id)
|
|
171
|
+
|
|
172
|
+
try:
|
|
173
|
+
saved = accessor.save(obj, actor=actor)
|
|
174
|
+
except (IntegrityError, DataError, StatementError) as e:
|
|
175
|
+
raise HTTPException(status_code=422, detail=str(e)) from e
|
|
176
|
+
|
|
177
|
+
if not existed:
|
|
178
|
+
response.status_code = status.HTTP_201_CREATED
|
|
179
|
+
return saved
|
|
180
|
+
|
|
181
|
+
@router.delete("/{record_id}")
|
|
182
|
+
def delete_record(
|
|
183
|
+
record_id: int,
|
|
184
|
+
actor: str = Depends(get_actor),
|
|
185
|
+
) -> Dict[str, str]:
|
|
186
|
+
"""
|
|
187
|
+
Delete a record by ID.
|
|
188
|
+
|
|
189
|
+
If the model supports soft-delete, the record is marked as deleted.
|
|
190
|
+
Otherwise, the record is permanently removed.
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
record_id (int): ID of the record to delete.
|
|
194
|
+
actor (str): User identifier from the `X-User` header.
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
dict: {"status": "deleted"} on success.
|
|
198
|
+
|
|
199
|
+
Raises:
|
|
200
|
+
HTTPException 404: If the record does not exist.
|
|
201
|
+
"""
|
|
202
|
+
ok = accessor.delete(record_id, actor=actor)
|
|
203
|
+
if not ok:
|
|
204
|
+
raise HTTPException(status_code=404, detail="Not found")
|
|
205
|
+
return {"status": "deleted"}
|
|
206
|
+
|
|
207
|
+
# ---------------- RESTORE (soft-delete only) ----------------
|
|
208
|
+
|
|
209
|
+
@router.post("/{record_id}/restore")
|
|
210
|
+
def restore_record(
|
|
211
|
+
record_id: int,
|
|
212
|
+
actor: str = Depends(get_actor),
|
|
213
|
+
) -> Dict[str, Any]:
|
|
214
|
+
"""
|
|
215
|
+
Restore a soft-deleted record.
|
|
216
|
+
|
|
217
|
+
This endpoint only applies if the model has a `deleted_at` field.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
record_id (int): ID of the record to restore.
|
|
221
|
+
actor (str): User identifier from the `X-User` header.
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
dict: {"status": "ok", "restored": True} if the record was restored,
|
|
225
|
+
{"status": "ok", "restored": False} if it was not soft-deleted.
|
|
226
|
+
|
|
227
|
+
Raises:
|
|
228
|
+
HTTPException 404: If the record does not exist.
|
|
229
|
+
HTTPException 409: If the model does not support soft-delete/restore.
|
|
230
|
+
"""
|
|
231
|
+
if not hasattr(accessor.model, "deleted_at"):
|
|
232
|
+
raise HTTPException(
|
|
233
|
+
status_code=409,
|
|
234
|
+
detail=f"{accessor.component} does not support soft delete/restore",
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
obj = accessor.get(record_id, include_deleted=True)
|
|
238
|
+
if not obj:
|
|
239
|
+
raise HTTPException(status_code=404, detail="Not found")
|
|
240
|
+
|
|
241
|
+
if getattr(obj, "deleted_at") is None:
|
|
242
|
+
return {"status": "ok", "restored": False}
|
|
243
|
+
|
|
244
|
+
accessor.restore(record_id, actor=actor)
|
|
245
|
+
return {"status": "ok", "restored": True}
|
|
246
|
+
|
|
247
|
+
return router
|