scitex 2.16.2__py3-none-any.whl → 2.17.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- scitex/_mcp_resources/_cheatsheet.py +1 -1
- scitex/_mcp_resources/_modules.py +1 -1
- scitex/_mcp_tools/__init__.py +2 -0
- scitex/_mcp_tools/verify.py +256 -0
- scitex/cli/main.py +2 -0
- scitex/cli/verify.py +476 -0
- scitex/dev/plt/__init__.py +1 -1
- scitex/dev/plt/data/mpl/PLOTTING_FUNCTIONS.yaml +90 -0
- scitex/dev/plt/data/mpl/PLOTTING_SIGNATURES.yaml +1571 -0
- scitex/dev/plt/data/mpl/PLOTTING_SIGNATURES_DETAILED.yaml +6262 -0
- scitex/dev/plt/data/mpl/SIGNATURES_FLATTENED.yaml +1274 -0
- scitex/dev/plt/data/mpl/dir_ax.txt +459 -0
- scitex/dev/plt/mpl/get_dir_ax.py +1 -1
- scitex/dev/plt/mpl/get_signatures.py +1 -1
- scitex/dev/plt/mpl/get_signatures_details.py +1 -1
- scitex/io/_load.py +8 -1
- scitex/io/_save.py +12 -0
- scitex/scholar/data/.gitkeep +0 -0
- scitex/scholar/data/README.md +44 -0
- scitex/scholar/data/bib_files/bibliography.bib +1952 -0
- scitex/scholar/data/bib_files/neurovista.bib +277 -0
- scitex/scholar/data/bib_files/neurovista_enriched.bib +441 -0
- scitex/scholar/data/bib_files/neurovista_enriched_enriched.bib +441 -0
- scitex/scholar/data/bib_files/neurovista_processed.bib +338 -0
- scitex/scholar/data/bib_files/openaccess.bib +89 -0
- scitex/scholar/data/bib_files/pac-seizure_prediction_enriched.bib +2178 -0
- scitex/scholar/data/bib_files/pac.bib +698 -0
- scitex/scholar/data/bib_files/pac_enriched.bib +1061 -0
- scitex/scholar/data/bib_files/pac_processed.bib +0 -0
- scitex/scholar/data/bib_files/pac_titles.txt +75 -0
- scitex/scholar/data/bib_files/paywalled.bib +98 -0
- scitex/scholar/data/bib_files/related-papers-by-coauthors.bib +58 -0
- scitex/scholar/data/bib_files/related-papers-by-coauthors_enriched.bib +87 -0
- scitex/scholar/data/bib_files/seizure_prediction.bib +694 -0
- scitex/scholar/data/bib_files/seizure_prediction_processed.bib +0 -0
- scitex/scholar/data/bib_files/test_complete_enriched.bib +437 -0
- scitex/scholar/data/bib_files/test_final_enriched.bib +437 -0
- scitex/scholar/data/bib_files/test_seizure.bib +46 -0
- scitex/scholar/data/impact_factor/JCR_IF_2022.xlsx +0 -0
- scitex/scholar/data/impact_factor/JCR_IF_2024.db +0 -0
- scitex/scholar/data/impact_factor/JCR_IF_2024.xlsx +0 -0
- scitex/scholar/data/impact_factor/JCR_IF_2024_v01.db +0 -0
- scitex/scholar/data/impact_factor.db +0 -0
- scitex/session/README.md +2 -2
- scitex/session/__init__.py +1 -0
- scitex/session/_decorator.py +57 -33
- scitex/session/_lifecycle/__init__.py +23 -0
- scitex/session/_lifecycle/_close.py +225 -0
- scitex/session/_lifecycle/_config.py +112 -0
- scitex/session/_lifecycle/_matplotlib.py +83 -0
- scitex/session/_lifecycle/_start.py +246 -0
- scitex/session/_lifecycle/_utils.py +186 -0
- scitex/session/_manager.py +40 -3
- scitex/session/template.py +1 -1
- scitex/template/_templates/plt.py +1 -1
- scitex/template/_templates/session.py +1 -1
- scitex/verify/README.md +312 -0
- scitex/verify/__init__.py +212 -0
- scitex/verify/_chain.py +369 -0
- scitex/verify/_db.py +600 -0
- scitex/verify/_hash.py +187 -0
- scitex/verify/_integration.py +127 -0
- scitex/verify/_rerun.py +253 -0
- scitex/verify/_tracker.py +330 -0
- scitex/verify/_visualize.py +48 -0
- scitex/verify/_viz/__init__.py +56 -0
- scitex/verify/_viz/_colors.py +84 -0
- scitex/verify/_viz/_format.py +302 -0
- scitex/verify/_viz/_json.py +192 -0
- scitex/verify/_viz/_mermaid.py +440 -0
- scitex/verify/_viz/_plotly.py +193 -0
- scitex/verify/_viz/_templates.py +246 -0
- scitex/verify/_viz/_utils.py +56 -0
- {scitex-2.16.2.dist-info → scitex-2.17.0.dist-info}/METADATA +1 -1
- {scitex-2.16.2.dist-info → scitex-2.17.0.dist-info}/RECORD +78 -29
- scitex/scholar/url_finder/.tmp/open_url/KNOWN_RESOLVERS.py +0 -462
- scitex/scholar/url_finder/.tmp/open_url/README.md +0 -223
- scitex/scholar/url_finder/.tmp/open_url/_DOIToURLResolver.py +0 -694
- scitex/scholar/url_finder/.tmp/open_url/_OpenURLResolver.py +0 -1160
- scitex/scholar/url_finder/.tmp/open_url/_ResolverLinkFinder.py +0 -344
- scitex/scholar/url_finder/.tmp/open_url/__init__.py +0 -24
- scitex/session/_lifecycle.py +0 -827
- {scitex-2.16.2.dist-info → scitex-2.17.0.dist-info}/WHEEL +0 -0
- {scitex-2.16.2.dist-info → scitex-2.17.0.dist-info}/entry_points.txt +0 -0
- {scitex-2.16.2.dist-info → scitex-2.17.0.dist-info}/licenses/LICENSE +0 -0
scitex/verify/_db.py
ADDED
|
@@ -0,0 +1,600 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# Timestamp: "2026-02-01 (ywatanabe)"
|
|
3
|
+
# File: /home/ywatanabe/proj/scitex-python/src/scitex/verify/_db.py
|
|
4
|
+
"""SQLite database for verification tracking."""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import sqlite3
|
|
10
|
+
from contextlib import contextmanager
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any, Dict, List, Optional, Union
|
|
14
|
+
|
|
15
|
+
from scitex.config import get_paths
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class VerificationDB:
|
|
19
|
+
"""
|
|
20
|
+
SQLite database for tracking session runs and file hashes.
|
|
21
|
+
|
|
22
|
+
Stores:
|
|
23
|
+
- runs: session_id, script_path, timestamps, status
|
|
24
|
+
- file_hashes: session_id, file_path, hash, role (input/script/output)
|
|
25
|
+
- chains: parent-child relationships between sessions
|
|
26
|
+
|
|
27
|
+
Examples
|
|
28
|
+
--------
|
|
29
|
+
>>> db = VerificationDB()
|
|
30
|
+
>>> db.add_run("2025Y-11M-18D-09h12m03s_HmH5", "/path/script.py")
|
|
31
|
+
>>> db.add_file_hash("2025Y-11M-18D-09h12m03s_HmH5", "data.csv", "a1b2c3", "input")
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
def __init__(self, db_path: Optional[Union[str, Path]] = None):
|
|
35
|
+
"""
|
|
36
|
+
Initialize database connection.
|
|
37
|
+
|
|
38
|
+
Parameters
|
|
39
|
+
----------
|
|
40
|
+
db_path : str or Path, optional
|
|
41
|
+
Path to database file. Defaults to ~/.scitex/verification.db
|
|
42
|
+
"""
|
|
43
|
+
if db_path is None:
|
|
44
|
+
db_path = get_paths().base / "verification.db"
|
|
45
|
+
self.db_path = Path(db_path)
|
|
46
|
+
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
47
|
+
self._init_schema()
|
|
48
|
+
|
|
49
|
+
def _init_schema(self) -> None:
|
|
50
|
+
"""Create database tables if they don't exist."""
|
|
51
|
+
with self._connect() as conn:
|
|
52
|
+
conn.executescript(
|
|
53
|
+
"""
|
|
54
|
+
CREATE TABLE IF NOT EXISTS runs (
|
|
55
|
+
session_id TEXT PRIMARY KEY,
|
|
56
|
+
script_path TEXT,
|
|
57
|
+
script_hash TEXT,
|
|
58
|
+
started_at TIMESTAMP,
|
|
59
|
+
finished_at TIMESTAMP,
|
|
60
|
+
status TEXT,
|
|
61
|
+
exit_code INTEGER,
|
|
62
|
+
parent_session TEXT,
|
|
63
|
+
combined_hash TEXT,
|
|
64
|
+
metadata TEXT
|
|
65
|
+
);
|
|
66
|
+
|
|
67
|
+
CREATE TABLE IF NOT EXISTS file_hashes (
|
|
68
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
69
|
+
session_id TEXT NOT NULL,
|
|
70
|
+
file_path TEXT NOT NULL,
|
|
71
|
+
hash TEXT NOT NULL,
|
|
72
|
+
role TEXT NOT NULL,
|
|
73
|
+
recorded_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
74
|
+
FOREIGN KEY (session_id) REFERENCES runs(session_id),
|
|
75
|
+
UNIQUE(session_id, file_path, role)
|
|
76
|
+
);
|
|
77
|
+
|
|
78
|
+
CREATE INDEX IF NOT EXISTS idx_file_path
|
|
79
|
+
ON file_hashes(file_path);
|
|
80
|
+
CREATE INDEX IF NOT EXISTS idx_session
|
|
81
|
+
ON file_hashes(session_id);
|
|
82
|
+
CREATE INDEX IF NOT EXISTS idx_role
|
|
83
|
+
ON file_hashes(role);
|
|
84
|
+
CREATE INDEX IF NOT EXISTS idx_runs_status
|
|
85
|
+
ON runs(status);
|
|
86
|
+
CREATE INDEX IF NOT EXISTS idx_runs_parent
|
|
87
|
+
ON runs(parent_session);
|
|
88
|
+
|
|
89
|
+
CREATE TABLE IF NOT EXISTS verification_results (
|
|
90
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
91
|
+
session_id TEXT NOT NULL,
|
|
92
|
+
verified_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
93
|
+
level TEXT NOT NULL,
|
|
94
|
+
status TEXT NOT NULL,
|
|
95
|
+
FOREIGN KEY (session_id) REFERENCES runs(session_id)
|
|
96
|
+
);
|
|
97
|
+
|
|
98
|
+
CREATE INDEX IF NOT EXISTS idx_verification_session
|
|
99
|
+
ON verification_results(session_id);
|
|
100
|
+
"""
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
@contextmanager
|
|
104
|
+
def _connect(self):
|
|
105
|
+
"""Context manager for database connection."""
|
|
106
|
+
conn = sqlite3.connect(self.db_path)
|
|
107
|
+
conn.row_factory = sqlite3.Row
|
|
108
|
+
try:
|
|
109
|
+
yield conn
|
|
110
|
+
conn.commit()
|
|
111
|
+
finally:
|
|
112
|
+
conn.close()
|
|
113
|
+
|
|
114
|
+
# -------------------------------------------------------------------------
|
|
115
|
+
# Run operations
|
|
116
|
+
# -------------------------------------------------------------------------
|
|
117
|
+
|
|
118
|
+
def add_run(
|
|
119
|
+
self,
|
|
120
|
+
session_id: str,
|
|
121
|
+
script_path: str,
|
|
122
|
+
script_hash: Optional[str] = None,
|
|
123
|
+
parent_session: Optional[str] = None,
|
|
124
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
125
|
+
) -> None:
|
|
126
|
+
"""
|
|
127
|
+
Add a new run to the database.
|
|
128
|
+
|
|
129
|
+
Parameters
|
|
130
|
+
----------
|
|
131
|
+
session_id : str
|
|
132
|
+
Unique session identifier
|
|
133
|
+
script_path : str
|
|
134
|
+
Path to the script that was run
|
|
135
|
+
script_hash : str, optional
|
|
136
|
+
Hash of the script file
|
|
137
|
+
parent_session : str, optional
|
|
138
|
+
Parent session ID for chain tracking
|
|
139
|
+
metadata : dict, optional
|
|
140
|
+
Additional metadata to store
|
|
141
|
+
"""
|
|
142
|
+
with self._connect() as conn:
|
|
143
|
+
conn.execute(
|
|
144
|
+
"""
|
|
145
|
+
INSERT OR REPLACE INTO runs
|
|
146
|
+
(session_id, script_path, script_hash, started_at, status,
|
|
147
|
+
parent_session, metadata)
|
|
148
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
149
|
+
""",
|
|
150
|
+
(
|
|
151
|
+
session_id,
|
|
152
|
+
script_path,
|
|
153
|
+
script_hash,
|
|
154
|
+
datetime.now().isoformat(),
|
|
155
|
+
"running",
|
|
156
|
+
parent_session,
|
|
157
|
+
json.dumps(metadata) if metadata else None,
|
|
158
|
+
),
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
def finish_run(
|
|
162
|
+
self,
|
|
163
|
+
session_id: str,
|
|
164
|
+
status: str = "success",
|
|
165
|
+
exit_code: int = 0,
|
|
166
|
+
combined_hash: Optional[str] = None,
|
|
167
|
+
) -> None:
|
|
168
|
+
"""
|
|
169
|
+
Mark a run as finished.
|
|
170
|
+
|
|
171
|
+
Parameters
|
|
172
|
+
----------
|
|
173
|
+
session_id : str
|
|
174
|
+
Session identifier
|
|
175
|
+
status : str, optional
|
|
176
|
+
Final status (success, failed, error)
|
|
177
|
+
exit_code : int, optional
|
|
178
|
+
Exit code of the script
|
|
179
|
+
combined_hash : str, optional
|
|
180
|
+
Combined hash of all inputs/outputs
|
|
181
|
+
"""
|
|
182
|
+
with self._connect() as conn:
|
|
183
|
+
conn.execute(
|
|
184
|
+
"""
|
|
185
|
+
UPDATE runs
|
|
186
|
+
SET finished_at = ?, status = ?, exit_code = ?, combined_hash = ?
|
|
187
|
+
WHERE session_id = ?
|
|
188
|
+
""",
|
|
189
|
+
(
|
|
190
|
+
datetime.now().isoformat(),
|
|
191
|
+
status,
|
|
192
|
+
exit_code,
|
|
193
|
+
combined_hash,
|
|
194
|
+
session_id,
|
|
195
|
+
),
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
def set_parent(self, session_id: str, parent_session: str) -> None:
|
|
199
|
+
"""
|
|
200
|
+
Set the parent session for a run.
|
|
201
|
+
|
|
202
|
+
Parameters
|
|
203
|
+
----------
|
|
204
|
+
session_id : str
|
|
205
|
+
Session identifier
|
|
206
|
+
parent_session : str
|
|
207
|
+
Parent session identifier
|
|
208
|
+
"""
|
|
209
|
+
with self._connect() as conn:
|
|
210
|
+
conn.execute(
|
|
211
|
+
"""
|
|
212
|
+
UPDATE runs SET parent_session = ? WHERE session_id = ?
|
|
213
|
+
""",
|
|
214
|
+
(parent_session, session_id),
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
def get_run(self, session_id: str) -> Optional[Dict[str, Any]]:
|
|
218
|
+
"""Get run information by session ID."""
|
|
219
|
+
with self._connect() as conn:
|
|
220
|
+
row = conn.execute(
|
|
221
|
+
"SELECT * FROM runs WHERE session_id = ?", (session_id,)
|
|
222
|
+
).fetchone()
|
|
223
|
+
return dict(row) if row else None
|
|
224
|
+
|
|
225
|
+
def list_runs(
|
|
226
|
+
self,
|
|
227
|
+
status: Optional[str] = None,
|
|
228
|
+
limit: int = 100,
|
|
229
|
+
offset: int = 0,
|
|
230
|
+
) -> List[Dict[str, Any]]:
|
|
231
|
+
"""
|
|
232
|
+
List runs with optional filtering.
|
|
233
|
+
|
|
234
|
+
Parameters
|
|
235
|
+
----------
|
|
236
|
+
status : str, optional
|
|
237
|
+
Filter by status
|
|
238
|
+
limit : int, optional
|
|
239
|
+
Maximum number of results
|
|
240
|
+
offset : int, optional
|
|
241
|
+
Offset for pagination
|
|
242
|
+
|
|
243
|
+
Returns
|
|
244
|
+
-------
|
|
245
|
+
list of dict
|
|
246
|
+
List of run records
|
|
247
|
+
"""
|
|
248
|
+
with self._connect() as conn:
|
|
249
|
+
if status:
|
|
250
|
+
rows = conn.execute(
|
|
251
|
+
"""
|
|
252
|
+
SELECT * FROM runs
|
|
253
|
+
WHERE status = ?
|
|
254
|
+
ORDER BY started_at DESC
|
|
255
|
+
LIMIT ? OFFSET ?
|
|
256
|
+
""",
|
|
257
|
+
(status, limit, offset),
|
|
258
|
+
).fetchall()
|
|
259
|
+
else:
|
|
260
|
+
rows = conn.execute(
|
|
261
|
+
"""
|
|
262
|
+
SELECT * FROM runs
|
|
263
|
+
ORDER BY started_at DESC
|
|
264
|
+
LIMIT ? OFFSET ?
|
|
265
|
+
""",
|
|
266
|
+
(limit, offset),
|
|
267
|
+
).fetchall()
|
|
268
|
+
return [dict(row) for row in rows]
|
|
269
|
+
|
|
270
|
+
# -------------------------------------------------------------------------
|
|
271
|
+
# File hash operations
|
|
272
|
+
# -------------------------------------------------------------------------
|
|
273
|
+
|
|
274
|
+
def add_file_hash(
|
|
275
|
+
self,
|
|
276
|
+
session_id: str,
|
|
277
|
+
file_path: str,
|
|
278
|
+
hash_value: str,
|
|
279
|
+
role: str,
|
|
280
|
+
) -> None:
|
|
281
|
+
"""
|
|
282
|
+
Add a file hash record.
|
|
283
|
+
|
|
284
|
+
Parameters
|
|
285
|
+
----------
|
|
286
|
+
session_id : str
|
|
287
|
+
Session identifier
|
|
288
|
+
file_path : str
|
|
289
|
+
Path to the file
|
|
290
|
+
hash_value : str
|
|
291
|
+
Hash of the file
|
|
292
|
+
role : str
|
|
293
|
+
Role of the file (input, script, output)
|
|
294
|
+
"""
|
|
295
|
+
with self._connect() as conn:
|
|
296
|
+
conn.execute(
|
|
297
|
+
"""
|
|
298
|
+
INSERT OR REPLACE INTO file_hashes
|
|
299
|
+
(session_id, file_path, hash, role)
|
|
300
|
+
VALUES (?, ?, ?, ?)
|
|
301
|
+
""",
|
|
302
|
+
(session_id, file_path, hash_value, role),
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
def add_file_hashes(
|
|
306
|
+
self,
|
|
307
|
+
session_id: str,
|
|
308
|
+
hashes: Dict[str, str],
|
|
309
|
+
role: str,
|
|
310
|
+
) -> None:
|
|
311
|
+
"""
|
|
312
|
+
Add multiple file hashes at once.
|
|
313
|
+
|
|
314
|
+
Parameters
|
|
315
|
+
----------
|
|
316
|
+
session_id : str
|
|
317
|
+
Session identifier
|
|
318
|
+
hashes : dict
|
|
319
|
+
Mapping of file paths to hashes
|
|
320
|
+
role : str
|
|
321
|
+
Role of the files (input, script, output)
|
|
322
|
+
"""
|
|
323
|
+
with self._connect() as conn:
|
|
324
|
+
conn.executemany(
|
|
325
|
+
"""
|
|
326
|
+
INSERT OR REPLACE INTO file_hashes
|
|
327
|
+
(session_id, file_path, hash, role)
|
|
328
|
+
VALUES (?, ?, ?, ?)
|
|
329
|
+
""",
|
|
330
|
+
[(session_id, path, h, role) for path, h in hashes.items()],
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
def get_file_hashes(
|
|
334
|
+
self,
|
|
335
|
+
session_id: str,
|
|
336
|
+
role: Optional[str] = None,
|
|
337
|
+
) -> Dict[str, str]:
|
|
338
|
+
"""
|
|
339
|
+
Get file hashes for a session.
|
|
340
|
+
|
|
341
|
+
Parameters
|
|
342
|
+
----------
|
|
343
|
+
session_id : str
|
|
344
|
+
Session identifier
|
|
345
|
+
role : str, optional
|
|
346
|
+
Filter by role
|
|
347
|
+
|
|
348
|
+
Returns
|
|
349
|
+
-------
|
|
350
|
+
dict
|
|
351
|
+
Mapping of file paths to hashes
|
|
352
|
+
"""
|
|
353
|
+
with self._connect() as conn:
|
|
354
|
+
if role:
|
|
355
|
+
rows = conn.execute(
|
|
356
|
+
"""
|
|
357
|
+
SELECT file_path, hash FROM file_hashes
|
|
358
|
+
WHERE session_id = ? AND role = ?
|
|
359
|
+
""",
|
|
360
|
+
(session_id, role),
|
|
361
|
+
).fetchall()
|
|
362
|
+
else:
|
|
363
|
+
rows = conn.execute(
|
|
364
|
+
"""
|
|
365
|
+
SELECT file_path, hash FROM file_hashes
|
|
366
|
+
WHERE session_id = ?
|
|
367
|
+
""",
|
|
368
|
+
(session_id,),
|
|
369
|
+
).fetchall()
|
|
370
|
+
return {row["file_path"]: row["hash"] for row in rows}
|
|
371
|
+
|
|
372
|
+
def find_session_by_file(
|
|
373
|
+
self,
|
|
374
|
+
file_path: str,
|
|
375
|
+
role: Optional[str] = None,
|
|
376
|
+
) -> List[str]:
|
|
377
|
+
"""
|
|
378
|
+
Find sessions that used a specific file.
|
|
379
|
+
|
|
380
|
+
Parameters
|
|
381
|
+
----------
|
|
382
|
+
file_path : str
|
|
383
|
+
Path to the file
|
|
384
|
+
role : str, optional
|
|
385
|
+
Filter by role (input, output)
|
|
386
|
+
|
|
387
|
+
Returns
|
|
388
|
+
-------
|
|
389
|
+
list of str
|
|
390
|
+
List of session IDs
|
|
391
|
+
"""
|
|
392
|
+
with self._connect() as conn:
|
|
393
|
+
if role:
|
|
394
|
+
rows = conn.execute(
|
|
395
|
+
"""
|
|
396
|
+
SELECT DISTINCT session_id FROM file_hashes
|
|
397
|
+
WHERE file_path = ? AND role = ?
|
|
398
|
+
ORDER BY recorded_at DESC
|
|
399
|
+
""",
|
|
400
|
+
(file_path, role),
|
|
401
|
+
).fetchall()
|
|
402
|
+
else:
|
|
403
|
+
rows = conn.execute(
|
|
404
|
+
"""
|
|
405
|
+
SELECT DISTINCT session_id FROM file_hashes
|
|
406
|
+
WHERE file_path = ?
|
|
407
|
+
ORDER BY recorded_at DESC
|
|
408
|
+
""",
|
|
409
|
+
(file_path,),
|
|
410
|
+
).fetchall()
|
|
411
|
+
return [row["session_id"] for row in rows]
|
|
412
|
+
|
|
413
|
+
# -------------------------------------------------------------------------
|
|
414
|
+
# Chain operations
|
|
415
|
+
# -------------------------------------------------------------------------
|
|
416
|
+
|
|
417
|
+
def get_chain(self, session_id: str) -> List[str]:
|
|
418
|
+
"""
|
|
419
|
+
Get the chain of parent sessions for a given session.
|
|
420
|
+
|
|
421
|
+
Parameters
|
|
422
|
+
----------
|
|
423
|
+
session_id : str
|
|
424
|
+
Session identifier
|
|
425
|
+
|
|
426
|
+
Returns
|
|
427
|
+
-------
|
|
428
|
+
list of str
|
|
429
|
+
List of session IDs from current to root
|
|
430
|
+
"""
|
|
431
|
+
chain = [session_id]
|
|
432
|
+
current = session_id
|
|
433
|
+
|
|
434
|
+
with self._connect() as conn:
|
|
435
|
+
while True:
|
|
436
|
+
row = conn.execute(
|
|
437
|
+
"SELECT parent_session FROM runs WHERE session_id = ?",
|
|
438
|
+
(current,),
|
|
439
|
+
).fetchone()
|
|
440
|
+
if not row or not row["parent_session"]:
|
|
441
|
+
break
|
|
442
|
+
current = row["parent_session"]
|
|
443
|
+
chain.append(current)
|
|
444
|
+
|
|
445
|
+
return chain
|
|
446
|
+
|
|
447
|
+
def get_children(self, session_id: str) -> List[str]:
|
|
448
|
+
"""Get child sessions that depend on this session."""
|
|
449
|
+
with self._connect() as conn:
|
|
450
|
+
rows = conn.execute(
|
|
451
|
+
"""
|
|
452
|
+
SELECT session_id FROM runs
|
|
453
|
+
WHERE parent_session = ?
|
|
454
|
+
ORDER BY started_at
|
|
455
|
+
""",
|
|
456
|
+
(session_id,),
|
|
457
|
+
).fetchall()
|
|
458
|
+
return [row["session_id"] for row in rows]
|
|
459
|
+
|
|
460
|
+
# -------------------------------------------------------------------------
|
|
461
|
+
# Statistics
|
|
462
|
+
# -------------------------------------------------------------------------
|
|
463
|
+
|
|
464
|
+
# -------------------------------------------------------------------------
|
|
465
|
+
# Verification result operations
|
|
466
|
+
# -------------------------------------------------------------------------
|
|
467
|
+
|
|
468
|
+
def record_verification(
|
|
469
|
+
self,
|
|
470
|
+
session_id: str,
|
|
471
|
+
level: str,
|
|
472
|
+
status: str,
|
|
473
|
+
) -> None:
|
|
474
|
+
"""
|
|
475
|
+
Record a verification result.
|
|
476
|
+
|
|
477
|
+
Parameters
|
|
478
|
+
----------
|
|
479
|
+
session_id : str
|
|
480
|
+
Session identifier
|
|
481
|
+
level : str
|
|
482
|
+
Verification level (cache, from_scratch)
|
|
483
|
+
status : str
|
|
484
|
+
Verification status (verified, mismatch, missing, unknown)
|
|
485
|
+
"""
|
|
486
|
+
with self._connect() as conn:
|
|
487
|
+
conn.execute(
|
|
488
|
+
"""
|
|
489
|
+
INSERT INTO verification_results
|
|
490
|
+
(session_id, level, status)
|
|
491
|
+
VALUES (?, ?, ?)
|
|
492
|
+
""",
|
|
493
|
+
(session_id, level, status),
|
|
494
|
+
)
|
|
495
|
+
|
|
496
|
+
def get_latest_verification(
|
|
497
|
+
self,
|
|
498
|
+
session_id: str,
|
|
499
|
+
) -> Optional[Dict[str, Any]]:
|
|
500
|
+
"""
|
|
501
|
+
Get the most recent verification result for a session.
|
|
502
|
+
|
|
503
|
+
Parameters
|
|
504
|
+
----------
|
|
505
|
+
session_id : str
|
|
506
|
+
Session identifier
|
|
507
|
+
|
|
508
|
+
Returns
|
|
509
|
+
-------
|
|
510
|
+
dict or None
|
|
511
|
+
Latest verification result with level, status, and timestamp
|
|
512
|
+
"""
|
|
513
|
+
with self._connect() as conn:
|
|
514
|
+
row = conn.execute(
|
|
515
|
+
"""
|
|
516
|
+
SELECT level, status, verified_at
|
|
517
|
+
FROM verification_results
|
|
518
|
+
WHERE session_id = ?
|
|
519
|
+
ORDER BY verified_at DESC
|
|
520
|
+
LIMIT 1
|
|
521
|
+
""",
|
|
522
|
+
(session_id,),
|
|
523
|
+
).fetchone()
|
|
524
|
+
return dict(row) if row else None
|
|
525
|
+
|
|
526
|
+
def get_verification_history(
|
|
527
|
+
self,
|
|
528
|
+
session_id: str,
|
|
529
|
+
limit: int = 10,
|
|
530
|
+
) -> List[Dict[str, Any]]:
|
|
531
|
+
"""
|
|
532
|
+
Get verification history for a session.
|
|
533
|
+
|
|
534
|
+
Parameters
|
|
535
|
+
----------
|
|
536
|
+
session_id : str
|
|
537
|
+
Session identifier
|
|
538
|
+
limit : int, optional
|
|
539
|
+
Maximum number of results
|
|
540
|
+
|
|
541
|
+
Returns
|
|
542
|
+
-------
|
|
543
|
+
list of dict
|
|
544
|
+
Verification results ordered by timestamp (newest first)
|
|
545
|
+
"""
|
|
546
|
+
with self._connect() as conn:
|
|
547
|
+
rows = conn.execute(
|
|
548
|
+
"""
|
|
549
|
+
SELECT level, status, verified_at
|
|
550
|
+
FROM verification_results
|
|
551
|
+
WHERE session_id = ?
|
|
552
|
+
ORDER BY verified_at DESC
|
|
553
|
+
LIMIT ?
|
|
554
|
+
""",
|
|
555
|
+
(session_id, limit),
|
|
556
|
+
).fetchall()
|
|
557
|
+
return [dict(row) for row in rows]
|
|
558
|
+
|
|
559
|
+
# -------------------------------------------------------------------------
|
|
560
|
+
# Statistics
|
|
561
|
+
# -------------------------------------------------------------------------
|
|
562
|
+
|
|
563
|
+
def stats(self) -> Dict[str, Any]:
|
|
564
|
+
"""Get database statistics."""
|
|
565
|
+
with self._connect() as conn:
|
|
566
|
+
total_runs = conn.execute("SELECT COUNT(*) FROM runs").fetchone()[0]
|
|
567
|
+
success_runs = conn.execute(
|
|
568
|
+
"SELECT COUNT(*) FROM runs WHERE status = 'success'"
|
|
569
|
+
).fetchone()[0]
|
|
570
|
+
failed_runs = conn.execute(
|
|
571
|
+
"SELECT COUNT(*) FROM runs WHERE status = 'failed'"
|
|
572
|
+
).fetchone()[0]
|
|
573
|
+
total_files = conn.execute("SELECT COUNT(*) FROM file_hashes").fetchone()[0]
|
|
574
|
+
unique_files = conn.execute(
|
|
575
|
+
"SELECT COUNT(DISTINCT file_path) FROM file_hashes"
|
|
576
|
+
).fetchone()[0]
|
|
577
|
+
|
|
578
|
+
return {
|
|
579
|
+
"total_runs": total_runs,
|
|
580
|
+
"success_runs": success_runs,
|
|
581
|
+
"failed_runs": failed_runs,
|
|
582
|
+
"total_file_records": total_files,
|
|
583
|
+
"unique_files": unique_files,
|
|
584
|
+
"db_path": str(self.db_path),
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
|
|
588
|
+
# Global instance
|
|
589
|
+
_DB_INSTANCE: Optional[VerificationDB] = None
|
|
590
|
+
|
|
591
|
+
|
|
592
|
+
def get_db() -> VerificationDB:
|
|
593
|
+
"""Get or create the global database instance."""
|
|
594
|
+
global _DB_INSTANCE
|
|
595
|
+
if _DB_INSTANCE is None:
|
|
596
|
+
_DB_INSTANCE = VerificationDB()
|
|
597
|
+
return _DB_INSTANCE
|
|
598
|
+
|
|
599
|
+
|
|
600
|
+
# EOF
|