repr-cli 0.2.16__py3-none-any.whl → 0.2.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- repr/__init__.py +1 -1
- repr/api.py +363 -62
- repr/auth.py +47 -38
- repr/change_synthesis.py +478 -0
- repr/cli.py +4306 -364
- repr/config.py +119 -11
- repr/configure.py +889 -0
- repr/cron.py +419 -0
- repr/dashboard/__init__.py +9 -0
- repr/dashboard/build.py +126 -0
- repr/dashboard/dist/assets/index-B-aCjaCw.js +384 -0
- repr/dashboard/dist/assets/index-BYFVbEev.css +1 -0
- repr/dashboard/dist/assets/index-BrrhyJFO.css +1 -0
- repr/dashboard/dist/assets/index-C7Gzxc4f.js +384 -0
- repr/dashboard/dist/assets/index-CQdMXo6g.js +391 -0
- repr/dashboard/dist/assets/index-CcEg74ts.js +270 -0
- repr/dashboard/dist/assets/index-Cerc-iA_.js +377 -0
- repr/dashboard/dist/assets/index-CjVcBW2L.css +1 -0
- repr/dashboard/dist/assets/index-Cs8ofFGd.js +384 -0
- repr/dashboard/dist/assets/index-Dfl3mR5E.js +377 -0
- repr/dashboard/dist/assets/index-DwN0SeMc.css +1 -0
- repr/dashboard/dist/assets/index-YFch_e0S.js +384 -0
- repr/dashboard/dist/favicon.svg +4 -0
- repr/dashboard/dist/index.html +14 -0
- repr/dashboard/manager.py +234 -0
- repr/dashboard/server.py +1489 -0
- repr/db.py +980 -0
- repr/hooks.py +3 -2
- repr/loaders/__init__.py +22 -0
- repr/loaders/base.py +156 -0
- repr/loaders/claude_code.py +287 -0
- repr/loaders/clawdbot.py +313 -0
- repr/loaders/gemini_antigravity.py +381 -0
- repr/mcp_server.py +1196 -0
- repr/models.py +503 -0
- repr/openai_analysis.py +25 -0
- repr/session_extractor.py +481 -0
- repr/storage.py +328 -0
- repr/story_synthesis.py +1296 -0
- repr/templates.py +68 -4
- repr/timeline.py +710 -0
- repr/tools.py +17 -8
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/METADATA +48 -10
- repr_cli-0.2.18.dist-info/RECORD +58 -0
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/WHEEL +1 -1
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/entry_points.txt +1 -0
- repr_cli-0.2.16.dist-info/RECORD +0 -26
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/licenses/LICENSE +0 -0
- {repr_cli-0.2.16.dist-info → repr_cli-0.2.18.dist-info}/top_level.txt +0 -0
repr/db.py
ADDED
|
@@ -0,0 +1,980 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Central SQLite database for repr stories.
|
|
3
|
+
|
|
4
|
+
Replaces distributed .repr/store.json files with a single ~/.repr/stories.db
|
|
5
|
+
for faster queries, FTS5 search, and staleness tracking.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import sqlite3
|
|
10
|
+
from contextlib import contextmanager
|
|
11
|
+
from datetime import datetime, timezone
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
from .models import Story, FileChange, CodeSnippet
|
|
16
|
+
from .storage import generate_ulid
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
# Schema version for migrations
|
|
20
|
+
SCHEMA_VERSION = 8
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_db_path() -> Path:
|
|
24
|
+
"""Get database path (lazy evaluation for testing)."""
|
|
25
|
+
from .storage import REPR_HOME
|
|
26
|
+
return REPR_HOME / "stories.db"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _serialize_json_list(items: list) -> str:
|
|
30
|
+
"""Serialize a list to JSON string for storage."""
|
|
31
|
+
return json.dumps(items) if items else "[]"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _serialize_json(obj: list | dict | None) -> str:
|
|
35
|
+
"""Serialize an object to JSON string for storage, handling Pydantic models."""
|
|
36
|
+
if not obj:
|
|
37
|
+
return "[]" if isinstance(obj, list) or obj is None else "{}"
|
|
38
|
+
# Handle list of Pydantic models
|
|
39
|
+
if isinstance(obj, list) and obj and hasattr(obj[0], 'model_dump'):
|
|
40
|
+
return json.dumps([item.model_dump() for item in obj])
|
|
41
|
+
return json.dumps(obj)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _deserialize_file_changes(data: str | None) -> list[FileChange]:
|
|
45
|
+
"""Deserialize JSON string to list of FileChange objects."""
|
|
46
|
+
if not data:
|
|
47
|
+
return []
|
|
48
|
+
try:
|
|
49
|
+
items = json.loads(data)
|
|
50
|
+
return [FileChange(**item) for item in items]
|
|
51
|
+
except (json.JSONDecodeError, TypeError):
|
|
52
|
+
return []
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _deserialize_key_snippets(data: str | None) -> list[CodeSnippet]:
|
|
56
|
+
"""Deserialize JSON string to list of CodeSnippet objects."""
|
|
57
|
+
if not data:
|
|
58
|
+
return []
|
|
59
|
+
try:
|
|
60
|
+
items = json.loads(data)
|
|
61
|
+
return [CodeSnippet(**item) for item in items]
|
|
62
|
+
except (json.JSONDecodeError, TypeError):
|
|
63
|
+
return []
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _deserialize_json_list(data: str | None) -> list:
|
|
67
|
+
"""Deserialize a JSON string to list."""
|
|
68
|
+
if not data:
|
|
69
|
+
return []
|
|
70
|
+
try:
|
|
71
|
+
return json.loads(data)
|
|
72
|
+
except json.JSONDecodeError:
|
|
73
|
+
return []
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def _datetime_to_iso(dt: datetime | None) -> str | None:
|
|
77
|
+
"""Convert datetime to ISO string."""
|
|
78
|
+
if dt is None:
|
|
79
|
+
return None
|
|
80
|
+
return dt.isoformat()
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _iso_to_datetime(iso_str: str | None) -> datetime | None:
|
|
84
|
+
"""Convert ISO string to datetime."""
|
|
85
|
+
if not iso_str:
|
|
86
|
+
return None
|
|
87
|
+
try:
|
|
88
|
+
return datetime.fromisoformat(iso_str.replace("Z", "+00:00"))
|
|
89
|
+
except (ValueError, AttributeError):
|
|
90
|
+
return None
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class ReprDatabase:
|
|
94
|
+
"""Central SQLite database for repr stories."""
|
|
95
|
+
|
|
96
|
+
def __init__(self, db_path: Path | None = None):
|
|
97
|
+
self.db_path = db_path or get_db_path()
|
|
98
|
+
self._ensure_dir()
|
|
99
|
+
|
|
100
|
+
def _ensure_dir(self):
|
|
101
|
+
"""Ensure the database directory exists."""
|
|
102
|
+
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
103
|
+
|
|
104
|
+
@contextmanager
|
|
105
|
+
def connect(self):
|
|
106
|
+
"""Context manager for database connections."""
|
|
107
|
+
conn = sqlite3.connect(str(self.db_path), timeout=30.0)
|
|
108
|
+
conn.row_factory = sqlite3.Row
|
|
109
|
+
# Enable WAL mode for better concurrency
|
|
110
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
111
|
+
conn.execute("PRAGMA foreign_keys=ON")
|
|
112
|
+
try:
|
|
113
|
+
yield conn
|
|
114
|
+
conn.commit()
|
|
115
|
+
except Exception:
|
|
116
|
+
conn.rollback()
|
|
117
|
+
raise
|
|
118
|
+
finally:
|
|
119
|
+
conn.close()
|
|
120
|
+
|
|
121
|
+
def init_schema(self):
|
|
122
|
+
"""Initialize database schema with all tables."""
|
|
123
|
+
with self.connect() as conn:
|
|
124
|
+
# Projects table (registry with freshness tracking)
|
|
125
|
+
conn.execute("""
|
|
126
|
+
CREATE TABLE IF NOT EXISTS projects (
|
|
127
|
+
id TEXT PRIMARY KEY,
|
|
128
|
+
path TEXT UNIQUE NOT NULL,
|
|
129
|
+
name TEXT NOT NULL,
|
|
130
|
+
last_generated TEXT,
|
|
131
|
+
last_commit_sha TEXT,
|
|
132
|
+
last_commit_at TEXT,
|
|
133
|
+
created_at TEXT NOT NULL
|
|
134
|
+
)
|
|
135
|
+
""")
|
|
136
|
+
|
|
137
|
+
# Stories table
|
|
138
|
+
conn.execute("""
|
|
139
|
+
CREATE TABLE IF NOT EXISTS stories (
|
|
140
|
+
id TEXT PRIMARY KEY,
|
|
141
|
+
project_id TEXT NOT NULL REFERENCES projects(id),
|
|
142
|
+
created_at TEXT NOT NULL,
|
|
143
|
+
updated_at TEXT NOT NULL,
|
|
144
|
+
title TEXT NOT NULL,
|
|
145
|
+
problem TEXT DEFAULT '',
|
|
146
|
+
approach TEXT DEFAULT '',
|
|
147
|
+
tradeoffs TEXT DEFAULT '',
|
|
148
|
+
outcome TEXT DEFAULT '',
|
|
149
|
+
category TEXT DEFAULT 'feature',
|
|
150
|
+
scope TEXT DEFAULT 'internal',
|
|
151
|
+
technologies TEXT DEFAULT '[]',
|
|
152
|
+
started_at TEXT,
|
|
153
|
+
ended_at TEXT,
|
|
154
|
+
implementation_details TEXT,
|
|
155
|
+
decisions TEXT,
|
|
156
|
+
lessons TEXT,
|
|
157
|
+
public_post TEXT DEFAULT '',
|
|
158
|
+
public_show TEXT,
|
|
159
|
+
internal_post TEXT DEFAULT '',
|
|
160
|
+
internal_show TEXT,
|
|
161
|
+
internal_details TEXT DEFAULT '[]'
|
|
162
|
+
)
|
|
163
|
+
""")
|
|
164
|
+
|
|
165
|
+
# Create indexes for common queries
|
|
166
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_stories_project ON stories(project_id)")
|
|
167
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_stories_category ON stories(category)")
|
|
168
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_stories_created_at ON stories(created_at)")
|
|
169
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_stories_started_at ON stories(started_at)")
|
|
170
|
+
|
|
171
|
+
# FTS5 for full-text search
|
|
172
|
+
conn.execute("""
|
|
173
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS stories_fts USING fts5(
|
|
174
|
+
title, problem, approach,
|
|
175
|
+
content='stories', content_rowid='rowid'
|
|
176
|
+
)
|
|
177
|
+
""")
|
|
178
|
+
|
|
179
|
+
# Triggers to keep FTS in sync
|
|
180
|
+
conn.execute("""
|
|
181
|
+
CREATE TRIGGER IF NOT EXISTS stories_ai AFTER INSERT ON stories BEGIN
|
|
182
|
+
INSERT INTO stories_fts(rowid, title, problem, approach)
|
|
183
|
+
VALUES (new.rowid, new.title, new.problem, new.approach);
|
|
184
|
+
END
|
|
185
|
+
""")
|
|
186
|
+
conn.execute("""
|
|
187
|
+
CREATE TRIGGER IF NOT EXISTS stories_ad AFTER DELETE ON stories BEGIN
|
|
188
|
+
INSERT INTO stories_fts(stories_fts, rowid, title, problem, approach)
|
|
189
|
+
VALUES ('delete', old.rowid, old.title, old.problem, old.approach);
|
|
190
|
+
END
|
|
191
|
+
""")
|
|
192
|
+
conn.execute("""
|
|
193
|
+
CREATE TRIGGER IF NOT EXISTS stories_au AFTER UPDATE ON stories BEGIN
|
|
194
|
+
INSERT INTO stories_fts(stories_fts, rowid, title, problem, approach)
|
|
195
|
+
VALUES ('delete', old.rowid, old.title, old.problem, old.approach);
|
|
196
|
+
INSERT INTO stories_fts(rowid, title, problem, approach)
|
|
197
|
+
VALUES (new.rowid, new.title, new.problem, new.approach);
|
|
198
|
+
END
|
|
199
|
+
""")
|
|
200
|
+
|
|
201
|
+
# Story-file relationships
|
|
202
|
+
conn.execute("""
|
|
203
|
+
CREATE TABLE IF NOT EXISTS story_files (
|
|
204
|
+
story_id TEXT REFERENCES stories(id) ON DELETE CASCADE,
|
|
205
|
+
file_path TEXT,
|
|
206
|
+
PRIMARY KEY (story_id, file_path)
|
|
207
|
+
)
|
|
208
|
+
""")
|
|
209
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_story_files_path ON story_files(file_path)")
|
|
210
|
+
|
|
211
|
+
# Story-commit relationships
|
|
212
|
+
conn.execute("""
|
|
213
|
+
CREATE TABLE IF NOT EXISTS story_commits (
|
|
214
|
+
story_id TEXT REFERENCES stories(id) ON DELETE CASCADE,
|
|
215
|
+
commit_sha TEXT,
|
|
216
|
+
PRIMARY KEY (story_id, commit_sha)
|
|
217
|
+
)
|
|
218
|
+
""")
|
|
219
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_story_commits_sha ON story_commits(commit_sha)")
|
|
220
|
+
|
|
221
|
+
# Story-session relationships
|
|
222
|
+
conn.execute("""
|
|
223
|
+
CREATE TABLE IF NOT EXISTS story_sessions (
|
|
224
|
+
story_id TEXT REFERENCES stories(id) ON DELETE CASCADE,
|
|
225
|
+
session_id TEXT,
|
|
226
|
+
PRIMARY KEY (story_id, session_id)
|
|
227
|
+
)
|
|
228
|
+
""")
|
|
229
|
+
|
|
230
|
+
# Schema version tracking
|
|
231
|
+
conn.execute("""
|
|
232
|
+
CREATE TABLE IF NOT EXISTS schema_version (
|
|
233
|
+
version INTEGER PRIMARY KEY
|
|
234
|
+
)
|
|
235
|
+
""")
|
|
236
|
+
# Don't set version here - let migrations handle it
|
|
237
|
+
|
|
238
|
+
# Run migrations for existing databases
|
|
239
|
+
self._run_migrations()
|
|
240
|
+
|
|
241
|
+
def _ensure_columns_exist(self, conn):
|
|
242
|
+
"""Ensure all required columns exist (recovery for botched migrations)."""
|
|
243
|
+
# All columns that should exist, with their defaults
|
|
244
|
+
required_columns = [
|
|
245
|
+
# v2: technologies
|
|
246
|
+
("technologies", "'[]'"),
|
|
247
|
+
# v3: build log columns
|
|
248
|
+
("public_post", "''"),
|
|
249
|
+
("public_show", "NULL"),
|
|
250
|
+
("internal_post", "''"),
|
|
251
|
+
("internal_show", "NULL"),
|
|
252
|
+
("internal_details", "'[]'"),
|
|
253
|
+
# v4: recall/diff columns
|
|
254
|
+
("file_changes", "'[]'"),
|
|
255
|
+
("key_snippets", "'[]'"),
|
|
256
|
+
("total_insertions", "0"),
|
|
257
|
+
("total_deletions", "0"),
|
|
258
|
+
# v5: Tripartite Codex fields
|
|
259
|
+
("hook", "''"),
|
|
260
|
+
("what", "''"),
|
|
261
|
+
("value", "''"),
|
|
262
|
+
("insight", "''"),
|
|
263
|
+
("show", "NULL"),
|
|
264
|
+
# v6: post_body
|
|
265
|
+
("post_body", "''"),
|
|
266
|
+
# v7: diagram
|
|
267
|
+
("diagram", "NULL"),
|
|
268
|
+
# v8: author_name
|
|
269
|
+
("author_name", "'unknown'"),
|
|
270
|
+
# v9: author_email for Gravatar
|
|
271
|
+
("author_email", "''"),
|
|
272
|
+
# v10: user_id and visibility for cloud sync
|
|
273
|
+
("user_id", "NULL"),
|
|
274
|
+
("visibility", "'private'"),
|
|
275
|
+
]
|
|
276
|
+
|
|
277
|
+
for col, default in required_columns:
|
|
278
|
+
try:
|
|
279
|
+
conn.execute(f"ALTER TABLE stories ADD COLUMN {col} TEXT DEFAULT {default}")
|
|
280
|
+
except sqlite3.OperationalError:
|
|
281
|
+
# Column already exists
|
|
282
|
+
pass
|
|
283
|
+
|
|
284
|
+
def _run_migrations(self):
|
|
285
|
+
"""Run schema migrations for existing databases."""
|
|
286
|
+
with self.connect() as conn:
|
|
287
|
+
# Get current schema version
|
|
288
|
+
try:
|
|
289
|
+
row = conn.execute("SELECT version FROM schema_version").fetchone()
|
|
290
|
+
current_version = row["version"] if row else 0
|
|
291
|
+
except sqlite3.OperationalError:
|
|
292
|
+
current_version = 0
|
|
293
|
+
|
|
294
|
+
# Recovery: Always try to add columns that might be missing
|
|
295
|
+
# This handles databases where version was set before columns were added
|
|
296
|
+
self._ensure_columns_exist(conn)
|
|
297
|
+
|
|
298
|
+
# Migration v1 -> v2: Add technologies column
|
|
299
|
+
if current_version < 2:
|
|
300
|
+
try:
|
|
301
|
+
conn.execute("ALTER TABLE stories ADD COLUMN technologies TEXT DEFAULT '[]'")
|
|
302
|
+
except sqlite3.OperationalError:
|
|
303
|
+
# Column already exists
|
|
304
|
+
pass
|
|
305
|
+
|
|
306
|
+
conn.execute(
|
|
307
|
+
"INSERT OR REPLACE INTO schema_version (version) VALUES (?)",
|
|
308
|
+
(2,)
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
# Migration v2 -> v3: Add build log columns
|
|
312
|
+
if current_version < 3:
|
|
313
|
+
for col, default in [
|
|
314
|
+
("public_post", "''"),
|
|
315
|
+
("public_show", "NULL"),
|
|
316
|
+
("internal_post", "''"),
|
|
317
|
+
("internal_show", "NULL"),
|
|
318
|
+
("internal_details", "'[]'"),
|
|
319
|
+
]:
|
|
320
|
+
try:
|
|
321
|
+
conn.execute(f"ALTER TABLE stories ADD COLUMN {col} TEXT DEFAULT {default}")
|
|
322
|
+
except sqlite3.OperationalError:
|
|
323
|
+
# Column already exists
|
|
324
|
+
pass
|
|
325
|
+
|
|
326
|
+
conn.execute(
|
|
327
|
+
"INSERT OR REPLACE INTO schema_version (version) VALUES (?)",
|
|
328
|
+
(3,)
|
|
329
|
+
)
|
|
330
|
+
|
|
331
|
+
# Migration v3 -> v4: Add recall/diff columns
|
|
332
|
+
if current_version < 4:
|
|
333
|
+
for col, default in [
|
|
334
|
+
("file_changes", "'[]'"),
|
|
335
|
+
("key_snippets", "'[]'"),
|
|
336
|
+
("total_insertions", "0"),
|
|
337
|
+
("total_deletions", "0"),
|
|
338
|
+
]:
|
|
339
|
+
try:
|
|
340
|
+
conn.execute(f"ALTER TABLE stories ADD COLUMN {col} TEXT DEFAULT {default}")
|
|
341
|
+
except sqlite3.OperationalError:
|
|
342
|
+
# Column already exists
|
|
343
|
+
pass
|
|
344
|
+
|
|
345
|
+
conn.execute(
|
|
346
|
+
"INSERT OR REPLACE INTO schema_version (version) VALUES (?)",
|
|
347
|
+
(4,)
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
# Migration v4 -> v5: Add Tripartite Codex fields (hook, what, value, insight, show)
|
|
351
|
+
if current_version < 5:
|
|
352
|
+
for col, default in [
|
|
353
|
+
("hook", "''"),
|
|
354
|
+
("what", "''"),
|
|
355
|
+
("value", "''"),
|
|
356
|
+
("insight", "''"),
|
|
357
|
+
("show", "NULL"),
|
|
358
|
+
]:
|
|
359
|
+
try:
|
|
360
|
+
conn.execute(f"ALTER TABLE stories ADD COLUMN {col} TEXT DEFAULT {default}")
|
|
361
|
+
except sqlite3.OperationalError:
|
|
362
|
+
# Column already exists
|
|
363
|
+
pass
|
|
364
|
+
|
|
365
|
+
conn.execute(
|
|
366
|
+
"INSERT OR REPLACE INTO schema_version (version) VALUES (?)",
|
|
367
|
+
(5,)
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
# Migration v5 -> v6: Add post_body field
|
|
371
|
+
if current_version < 6:
|
|
372
|
+
try:
|
|
373
|
+
conn.execute("ALTER TABLE stories ADD COLUMN post_body TEXT DEFAULT ''")
|
|
374
|
+
except sqlite3.OperationalError:
|
|
375
|
+
# Column already exists
|
|
376
|
+
pass
|
|
377
|
+
|
|
378
|
+
conn.execute(
|
|
379
|
+
"INSERT OR REPLACE INTO schema_version (version) VALUES (?)",
|
|
380
|
+
(6,)
|
|
381
|
+
)
|
|
382
|
+
|
|
383
|
+
# Migration v6 -> v7: Add diagram field
|
|
384
|
+
if current_version < 7:
|
|
385
|
+
try:
|
|
386
|
+
conn.execute("ALTER TABLE stories ADD COLUMN diagram TEXT DEFAULT NULL")
|
|
387
|
+
except sqlite3.OperationalError:
|
|
388
|
+
# Column already exists
|
|
389
|
+
pass
|
|
390
|
+
|
|
391
|
+
conn.execute(
|
|
392
|
+
"INSERT OR REPLACE INTO schema_version (version) VALUES (?)",
|
|
393
|
+
(7,)
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
# Migration v7 -> v8: Add user_id and visibility for cloud sync
|
|
397
|
+
if current_version < 8:
|
|
398
|
+
for col, default in [
|
|
399
|
+
("user_id", "NULL"),
|
|
400
|
+
("visibility", "'private'"),
|
|
401
|
+
]:
|
|
402
|
+
try:
|
|
403
|
+
conn.execute(f"ALTER TABLE stories ADD COLUMN {col} TEXT DEFAULT {default}")
|
|
404
|
+
except sqlite3.OperationalError:
|
|
405
|
+
# Column already exists
|
|
406
|
+
pass
|
|
407
|
+
|
|
408
|
+
conn.execute(
|
|
409
|
+
"INSERT OR REPLACE INTO schema_version (version) VALUES (?)",
|
|
410
|
+
(8,)
|
|
411
|
+
)
|
|
412
|
+
|
|
413
|
+
# Ensure schema version is set for fresh databases
|
|
414
|
+
conn.execute(
|
|
415
|
+
"INSERT OR IGNORE INTO schema_version (version) VALUES (?)",
|
|
416
|
+
(SCHEMA_VERSION,)
|
|
417
|
+
)
|
|
418
|
+
|
|
419
|
+
# =========================================================================
|
|
420
|
+
# Project Management
|
|
421
|
+
# =========================================================================
|
|
422
|
+
|
|
423
|
+
def register_project(self, path: Path, name: str) -> str:
|
|
424
|
+
"""Register a project and return its ID."""
|
|
425
|
+
project_id = generate_ulid()
|
|
426
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
427
|
+
|
|
428
|
+
with self.connect() as conn:
|
|
429
|
+
conn.execute(
|
|
430
|
+
"""
|
|
431
|
+
INSERT INTO projects (id, path, name, created_at)
|
|
432
|
+
VALUES (?, ?, ?, ?)
|
|
433
|
+
ON CONFLICT(path) DO UPDATE SET name=excluded.name
|
|
434
|
+
""",
|
|
435
|
+
(project_id, str(path.resolve()), name, now)
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
# Get the actual ID (might be existing)
|
|
439
|
+
row = conn.execute(
|
|
440
|
+
"SELECT id FROM projects WHERE path = ?",
|
|
441
|
+
(str(path.resolve()),)
|
|
442
|
+
).fetchone()
|
|
443
|
+
return row["id"] if row else project_id
|
|
444
|
+
|
|
445
|
+
def get_project_by_path(self, path: Path) -> dict | None:
|
|
446
|
+
"""Get project by path."""
|
|
447
|
+
with self.connect() as conn:
|
|
448
|
+
row = conn.execute(
|
|
449
|
+
"SELECT * FROM projects WHERE path = ?",
|
|
450
|
+
(str(path.resolve()),)
|
|
451
|
+
).fetchone()
|
|
452
|
+
return dict(row) if row else None
|
|
453
|
+
|
|
454
|
+
def get_project_by_id(self, project_id: str) -> dict | None:
|
|
455
|
+
"""Get project by ID."""
|
|
456
|
+
with self.connect() as conn:
|
|
457
|
+
row = conn.execute(
|
|
458
|
+
"SELECT * FROM projects WHERE id = ?",
|
|
459
|
+
(project_id,)
|
|
460
|
+
).fetchone()
|
|
461
|
+
return dict(row) if row else None
|
|
462
|
+
|
|
463
|
+
def list_projects(self) -> list[dict]:
|
|
464
|
+
"""List all registered projects."""
|
|
465
|
+
with self.connect() as conn:
|
|
466
|
+
rows = conn.execute("SELECT * FROM projects ORDER BY name").fetchall()
|
|
467
|
+
return [dict(row) for row in rows]
|
|
468
|
+
|
|
469
|
+
def update_freshness(
|
|
470
|
+
self,
|
|
471
|
+
project_id: str,
|
|
472
|
+
commit_sha: str,
|
|
473
|
+
commit_at: datetime
|
|
474
|
+
):
|
|
475
|
+
"""Update project freshness after story generation."""
|
|
476
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
477
|
+
with self.connect() as conn:
|
|
478
|
+
conn.execute(
|
|
479
|
+
"""
|
|
480
|
+
UPDATE projects
|
|
481
|
+
SET last_generated = ?,
|
|
482
|
+
last_commit_sha = ?,
|
|
483
|
+
last_commit_at = ?
|
|
484
|
+
WHERE id = ?
|
|
485
|
+
""",
|
|
486
|
+
(now, commit_sha, _datetime_to_iso(commit_at), project_id)
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
def check_freshness(self, path: Path) -> dict:
|
|
490
|
+
"""
|
|
491
|
+
Check if a project's stories are up to date.
|
|
492
|
+
|
|
493
|
+
Returns:
|
|
494
|
+
{
|
|
495
|
+
"needs_refresh": bool,
|
|
496
|
+
"reason": str | None,
|
|
497
|
+
"last_generated": str | None,
|
|
498
|
+
"last_commit_sha": str | None
|
|
499
|
+
}
|
|
500
|
+
"""
|
|
501
|
+
project = self.get_project_by_path(path)
|
|
502
|
+
if not project:
|
|
503
|
+
return {
|
|
504
|
+
"needs_refresh": True,
|
|
505
|
+
"reason": "Project not registered",
|
|
506
|
+
"last_generated": None,
|
|
507
|
+
"last_commit_sha": None,
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
if not project.get("last_generated"):
|
|
511
|
+
return {
|
|
512
|
+
"needs_refresh": True,
|
|
513
|
+
"reason": "No stories generated yet",
|
|
514
|
+
"last_generated": None,
|
|
515
|
+
"last_commit_sha": project.get("last_commit_sha"),
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
# Get latest commit from git
|
|
519
|
+
try:
|
|
520
|
+
import subprocess
|
|
521
|
+
result = subprocess.run(
|
|
522
|
+
["git", "-C", str(path), "log", "-1", "--format=%H"],
|
|
523
|
+
capture_output=True,
|
|
524
|
+
text=True,
|
|
525
|
+
)
|
|
526
|
+
latest_sha = result.stdout.strip() if result.returncode == 0 else None
|
|
527
|
+
except Exception:
|
|
528
|
+
latest_sha = None
|
|
529
|
+
|
|
530
|
+
if latest_sha and latest_sha != project.get("last_commit_sha"):
|
|
531
|
+
return {
|
|
532
|
+
"needs_refresh": True,
|
|
533
|
+
"reason": f"New commits since last generation",
|
|
534
|
+
"last_generated": project.get("last_generated"),
|
|
535
|
+
"last_commit_sha": project.get("last_commit_sha"),
|
|
536
|
+
"current_commit_sha": latest_sha,
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
return {
|
|
540
|
+
"needs_refresh": False,
|
|
541
|
+
"reason": None,
|
|
542
|
+
"last_generated": project.get("last_generated"),
|
|
543
|
+
"last_commit_sha": project.get("last_commit_sha"),
|
|
544
|
+
}
|
|
545
|
+
|
|
546
|
+
# =========================================================================
|
|
547
|
+
# Story CRUD
|
|
548
|
+
# =========================================================================
|
|
549
|
+
|
|
550
|
+
def save_story(self, story: Story, project_id: str) -> str:
|
|
551
|
+
"""Save a story to the database."""
|
|
552
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
553
|
+
|
|
554
|
+
with self.connect() as conn:
|
|
555
|
+
# Insert or update story
|
|
556
|
+
conn.execute(
|
|
557
|
+
"""
|
|
558
|
+
INSERT INTO stories (
|
|
559
|
+
id, project_id, created_at, updated_at,
|
|
560
|
+
title, problem, approach, tradeoffs, outcome,
|
|
561
|
+
category, scope, technologies, started_at, ended_at,
|
|
562
|
+
implementation_details, decisions, lessons,
|
|
563
|
+
hook, what, value, insight, show, diagram, post_body,
|
|
564
|
+
public_post, public_show, internal_post, internal_show, internal_details,
|
|
565
|
+
file_changes, key_snippets, total_insertions, total_deletions,
|
|
566
|
+
author_name, author_email, user_id, visibility
|
|
567
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
568
|
+
ON CONFLICT(id) DO UPDATE SET
|
|
569
|
+
updated_at=excluded.updated_at,
|
|
570
|
+
title=excluded.title,
|
|
571
|
+
problem=excluded.problem,
|
|
572
|
+
approach=excluded.approach,
|
|
573
|
+
tradeoffs=excluded.tradeoffs,
|
|
574
|
+
outcome=excluded.outcome,
|
|
575
|
+
category=excluded.category,
|
|
576
|
+
scope=excluded.scope,
|
|
577
|
+
technologies=excluded.technologies,
|
|
578
|
+
started_at=excluded.started_at,
|
|
579
|
+
ended_at=excluded.ended_at,
|
|
580
|
+
implementation_details=excluded.implementation_details,
|
|
581
|
+
decisions=excluded.decisions,
|
|
582
|
+
lessons=excluded.lessons,
|
|
583
|
+
hook=excluded.hook,
|
|
584
|
+
what=excluded.what,
|
|
585
|
+
value=excluded.value,
|
|
586
|
+
insight=excluded.insight,
|
|
587
|
+
show=excluded.show,
|
|
588
|
+
diagram=excluded.diagram,
|
|
589
|
+
post_body=excluded.post_body,
|
|
590
|
+
public_post=excluded.public_post,
|
|
591
|
+
public_show=excluded.public_show,
|
|
592
|
+
internal_post=excluded.internal_post,
|
|
593
|
+
internal_show=excluded.internal_show,
|
|
594
|
+
internal_details=excluded.internal_details,
|
|
595
|
+
file_changes=excluded.file_changes,
|
|
596
|
+
key_snippets=excluded.key_snippets,
|
|
597
|
+
total_insertions=excluded.total_insertions,
|
|
598
|
+
total_deletions=excluded.total_deletions,
|
|
599
|
+
author_name=excluded.author_name,
|
|
600
|
+
author_email=excluded.author_email,
|
|
601
|
+
user_id=COALESCE(excluded.user_id, stories.user_id),
|
|
602
|
+
visibility=COALESCE(excluded.visibility, stories.visibility)
|
|
603
|
+
""",
|
|
604
|
+
(
|
|
605
|
+
story.id,
|
|
606
|
+
project_id,
|
|
607
|
+
_datetime_to_iso(story.created_at),
|
|
608
|
+
now,
|
|
609
|
+
story.title,
|
|
610
|
+
story.problem,
|
|
611
|
+
story.approach,
|
|
612
|
+
story.tradeoffs,
|
|
613
|
+
story.outcome,
|
|
614
|
+
story.category,
|
|
615
|
+
story.scope,
|
|
616
|
+
_serialize_json_list(story.technologies),
|
|
617
|
+
_datetime_to_iso(story.started_at),
|
|
618
|
+
_datetime_to_iso(story.ended_at),
|
|
619
|
+
_serialize_json_list(story.implementation_details),
|
|
620
|
+
_serialize_json_list(story.decisions),
|
|
621
|
+
_serialize_json_list(story.lessons),
|
|
622
|
+
story.hook,
|
|
623
|
+
story.what,
|
|
624
|
+
story.value,
|
|
625
|
+
story.insight,
|
|
626
|
+
story.show,
|
|
627
|
+
story.diagram,
|
|
628
|
+
story.post_body,
|
|
629
|
+
story.public_post,
|
|
630
|
+
story.public_show,
|
|
631
|
+
story.internal_post,
|
|
632
|
+
story.internal_show,
|
|
633
|
+
_serialize_json_list(story.internal_details),
|
|
634
|
+
_serialize_json(story.file_changes),
|
|
635
|
+
_serialize_json(story.key_snippets),
|
|
636
|
+
story.total_insertions,
|
|
637
|
+
story.total_deletions,
|
|
638
|
+
story.author_name,
|
|
639
|
+
story.author_email,
|
|
640
|
+
story.user_id,
|
|
641
|
+
story.visibility,
|
|
642
|
+
)
|
|
643
|
+
)
|
|
644
|
+
|
|
645
|
+
# Update file relationships
|
|
646
|
+
conn.execute("DELETE FROM story_files WHERE story_id = ?", (story.id,))
|
|
647
|
+
if story.files:
|
|
648
|
+
conn.executemany(
|
|
649
|
+
"INSERT INTO story_files (story_id, file_path) VALUES (?, ?)",
|
|
650
|
+
[(story.id, f) for f in story.files]
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
# Update commit relationships
|
|
654
|
+
conn.execute("DELETE FROM story_commits WHERE story_id = ?", (story.id,))
|
|
655
|
+
if story.commit_shas:
|
|
656
|
+
conn.executemany(
|
|
657
|
+
"INSERT INTO story_commits (story_id, commit_sha) VALUES (?, ?)",
|
|
658
|
+
[(story.id, sha) for sha in story.commit_shas]
|
|
659
|
+
)
|
|
660
|
+
|
|
661
|
+
# Update session relationships
|
|
662
|
+
conn.execute("DELETE FROM story_sessions WHERE story_id = ?", (story.id,))
|
|
663
|
+
if story.session_ids:
|
|
664
|
+
conn.executemany(
|
|
665
|
+
"INSERT INTO story_sessions (story_id, session_id) VALUES (?, ?)",
|
|
666
|
+
[(story.id, sid) for sid in story.session_ids]
|
|
667
|
+
)
|
|
668
|
+
|
|
669
|
+
return story.id
|
|
670
|
+
|
|
671
|
+
def update_story_visibility(self, story_id: str, visibility: str) -> bool:
|
|
672
|
+
"""Update the visibility of a story."""
|
|
673
|
+
with self.connect() as conn:
|
|
674
|
+
cursor = conn.execute(
|
|
675
|
+
"UPDATE stories SET visibility = ?, updated_at = ? WHERE id = ?",
|
|
676
|
+
(visibility, datetime.now(timezone.utc).isoformat(), story_id)
|
|
677
|
+
)
|
|
678
|
+
return cursor.rowcount > 0
|
|
679
|
+
|
|
680
|
+
def update_story_user_id(self, story_id: str, user_id: str) -> bool:
|
|
681
|
+
"""Update the user_id of a story (for linking to cloud account)."""
|
|
682
|
+
with self.connect() as conn:
|
|
683
|
+
cursor = conn.execute(
|
|
684
|
+
"UPDATE stories SET user_id = ?, updated_at = ? WHERE id = ?",
|
|
685
|
+
(user_id, datetime.now(timezone.utc).isoformat(), story_id)
|
|
686
|
+
)
|
|
687
|
+
return cursor.rowcount > 0
|
|
688
|
+
|
|
689
|
+
def _row_to_story(self, row: sqlite3.Row, conn: sqlite3.Connection) -> Story:
|
|
690
|
+
"""Convert a database row to a Story object."""
|
|
691
|
+
story_id = row["id"]
|
|
692
|
+
|
|
693
|
+
# Get related files
|
|
694
|
+
files = [
|
|
695
|
+
r["file_path"] for r in
|
|
696
|
+
conn.execute("SELECT file_path FROM story_files WHERE story_id = ?", (story_id,))
|
|
697
|
+
]
|
|
698
|
+
|
|
699
|
+
# Get related commits
|
|
700
|
+
commit_shas = [
|
|
701
|
+
r["commit_sha"] for r in
|
|
702
|
+
conn.execute("SELECT commit_sha FROM story_commits WHERE story_id = ?", (story_id,))
|
|
703
|
+
]
|
|
704
|
+
|
|
705
|
+
# Get related sessions
|
|
706
|
+
session_ids = [
|
|
707
|
+
r["session_id"] for r in
|
|
708
|
+
conn.execute("SELECT session_id FROM story_sessions WHERE story_id = ?", (story_id,))
|
|
709
|
+
]
|
|
710
|
+
|
|
711
|
+
# Helper to safely get column
|
|
712
|
+
def _get(col: str, default=""):
|
|
713
|
+
return row[col] if col in row.keys() else default
|
|
714
|
+
|
|
715
|
+
return Story(
|
|
716
|
+
id=story_id,
|
|
717
|
+
project_id=row["project_id"],
|
|
718
|
+
created_at=_iso_to_datetime(row["created_at"]) or datetime.now(timezone.utc),
|
|
719
|
+
updated_at=_iso_to_datetime(row["updated_at"]) or datetime.now(timezone.utc),
|
|
720
|
+
title=row["title"],
|
|
721
|
+
problem=row["problem"] or "",
|
|
722
|
+
approach=row["approach"] or "",
|
|
723
|
+
tradeoffs=row["tradeoffs"] or "",
|
|
724
|
+
outcome=row["outcome"] or "",
|
|
725
|
+
category=row["category"] or "feature",
|
|
726
|
+
scope=row["scope"] or "internal",
|
|
727
|
+
technologies=_deserialize_json_list(row["technologies"]) if "technologies" in row.keys() else [],
|
|
728
|
+
started_at=_iso_to_datetime(row["started_at"]),
|
|
729
|
+
ended_at=_iso_to_datetime(row["ended_at"]),
|
|
730
|
+
implementation_details=_deserialize_json_list(row["implementation_details"]),
|
|
731
|
+
decisions=_deserialize_json_list(row["decisions"]),
|
|
732
|
+
lessons=_deserialize_json_list(row["lessons"]),
|
|
733
|
+
files=files,
|
|
734
|
+
commit_shas=commit_shas,
|
|
735
|
+
session_ids=session_ids,
|
|
736
|
+
# Tripartite Codex fields
|
|
737
|
+
hook=_get("hook", ""),
|
|
738
|
+
what=_get("what", ""),
|
|
739
|
+
value=_get("value", ""),
|
|
740
|
+
insight=_get("insight", ""),
|
|
741
|
+
show=_get("show", None),
|
|
742
|
+
diagram=_get("diagram", None),
|
|
743
|
+
post_body=_get("post_body", ""),
|
|
744
|
+
# Legacy fields
|
|
745
|
+
public_post=_get("public_post", ""),
|
|
746
|
+
public_show=_get("public_show", None),
|
|
747
|
+
internal_post=_get("internal_post", ""),
|
|
748
|
+
internal_show=_get("internal_show", None),
|
|
749
|
+
internal_details=_deserialize_json_list(_get("internal_details", "[]")),
|
|
750
|
+
# Recall data
|
|
751
|
+
file_changes=_deserialize_file_changes(_get("file_changes", "[]")),
|
|
752
|
+
key_snippets=_deserialize_key_snippets(_get("key_snippets", "[]")),
|
|
753
|
+
total_insertions=int(_get("total_insertions", 0) or 0),
|
|
754
|
+
total_deletions=int(_get("total_deletions", 0) or 0),
|
|
755
|
+
author_name=_get("author_name", "unknown"),
|
|
756
|
+
author_email=_get("author_email", ""),
|
|
757
|
+
# Cloud sync fields
|
|
758
|
+
user_id=_get("user_id", None) or None,
|
|
759
|
+
visibility=_get("visibility", "private"),
|
|
760
|
+
)
|
|
761
|
+
|
|
762
|
+
def get_story(self, story_id: str) -> Story | None:
|
|
763
|
+
"""Get a story by ID."""
|
|
764
|
+
with self.connect() as conn:
|
|
765
|
+
row = conn.execute(
|
|
766
|
+
"SELECT * FROM stories WHERE id = ?",
|
|
767
|
+
(story_id,)
|
|
768
|
+
).fetchone()
|
|
769
|
+
|
|
770
|
+
if not row:
|
|
771
|
+
return None
|
|
772
|
+
|
|
773
|
+
return self._row_to_story(row, conn)
|
|
774
|
+
|
|
775
|
+
def delete_story(self, story_id: str) -> bool:
|
|
776
|
+
"""Delete a story by ID."""
|
|
777
|
+
with self.connect() as conn:
|
|
778
|
+
cursor = conn.execute("DELETE FROM stories WHERE id = ?", (story_id,))
|
|
779
|
+
return cursor.rowcount > 0
|
|
780
|
+
|
|
781
|
+
def list_stories(
|
|
782
|
+
self,
|
|
783
|
+
project_id: str | None = None,
|
|
784
|
+
category: str | None = None,
|
|
785
|
+
since: datetime | None = None,
|
|
786
|
+
limit: int = 100,
|
|
787
|
+
) -> list[Story]:
|
|
788
|
+
"""List stories with optional filters."""
|
|
789
|
+
conditions = []
|
|
790
|
+
params = []
|
|
791
|
+
|
|
792
|
+
if project_id:
|
|
793
|
+
conditions.append("project_id = ?")
|
|
794
|
+
params.append(project_id)
|
|
795
|
+
|
|
796
|
+
if category:
|
|
797
|
+
conditions.append("category = ?")
|
|
798
|
+
params.append(category)
|
|
799
|
+
|
|
800
|
+
if since:
|
|
801
|
+
conditions.append("(started_at >= ? OR created_at >= ?)")
|
|
802
|
+
iso_since = _datetime_to_iso(since)
|
|
803
|
+
params.extend([iso_since, iso_since])
|
|
804
|
+
|
|
805
|
+
where_clause = " AND ".join(conditions) if conditions else "1=1"
|
|
806
|
+
|
|
807
|
+
with self.connect() as conn:
|
|
808
|
+
rows = conn.execute(
|
|
809
|
+
f"""
|
|
810
|
+
SELECT * FROM stories
|
|
811
|
+
WHERE {where_clause}
|
|
812
|
+
ORDER BY COALESCE(started_at, created_at) DESC
|
|
813
|
+
LIMIT ?
|
|
814
|
+
""",
|
|
815
|
+
params + [limit]
|
|
816
|
+
).fetchall()
|
|
817
|
+
|
|
818
|
+
return [self._row_to_story(row, conn) for row in rows]
|
|
819
|
+
|
|
820
|
+
def search_stories(
|
|
821
|
+
self,
|
|
822
|
+
query: str,
|
|
823
|
+
files: list[str] | None = None,
|
|
824
|
+
limit: int = 20,
|
|
825
|
+
) -> list[Story]:
|
|
826
|
+
"""
|
|
827
|
+
Search stories using FTS5.
|
|
828
|
+
|
|
829
|
+
Args:
|
|
830
|
+
query: Search query (keywords)
|
|
831
|
+
files: Optional file paths to filter by
|
|
832
|
+
limit: Maximum results
|
|
833
|
+
|
|
834
|
+
Returns:
|
|
835
|
+
List of matching stories, scored by relevance
|
|
836
|
+
"""
|
|
837
|
+
with self.connect() as conn:
|
|
838
|
+
# FTS search
|
|
839
|
+
if files:
|
|
840
|
+
# Search with file filter using JOIN
|
|
841
|
+
rows = conn.execute(
|
|
842
|
+
"""
|
|
843
|
+
SELECT DISTINCT s.*, bm25(stories_fts) as score
|
|
844
|
+
FROM stories s
|
|
845
|
+
JOIN stories_fts fts ON s.rowid = fts.rowid
|
|
846
|
+
JOIN story_files sf ON s.id = sf.story_id
|
|
847
|
+
WHERE stories_fts MATCH ?
|
|
848
|
+
AND sf.file_path IN ({})
|
|
849
|
+
ORDER BY score
|
|
850
|
+
LIMIT ?
|
|
851
|
+
""".format(",".join("?" * len(files))),
|
|
852
|
+
[query] + files + [limit]
|
|
853
|
+
).fetchall()
|
|
854
|
+
else:
|
|
855
|
+
# Pure FTS search
|
|
856
|
+
rows = conn.execute(
|
|
857
|
+
"""
|
|
858
|
+
SELECT s.*, bm25(stories_fts) as score
|
|
859
|
+
FROM stories s
|
|
860
|
+
JOIN stories_fts fts ON s.rowid = fts.rowid
|
|
861
|
+
WHERE stories_fts MATCH ?
|
|
862
|
+
ORDER BY score
|
|
863
|
+
LIMIT ?
|
|
864
|
+
""",
|
|
865
|
+
(query, limit)
|
|
866
|
+
).fetchall()
|
|
867
|
+
|
|
868
|
+
return [self._row_to_story(row, conn) for row in rows]
|
|
869
|
+
|
|
870
|
+
def get_stories_by_file(self, file_path: str, limit: int = 20) -> list[Story]:
|
|
871
|
+
"""Get stories that touch a specific file."""
|
|
872
|
+
with self.connect() as conn:
|
|
873
|
+
rows = conn.execute(
|
|
874
|
+
"""
|
|
875
|
+
SELECT s.* FROM stories s
|
|
876
|
+
JOIN story_files sf ON s.id = sf.story_id
|
|
877
|
+
WHERE sf.file_path = ?
|
|
878
|
+
ORDER BY s.created_at DESC
|
|
879
|
+
LIMIT ?
|
|
880
|
+
""",
|
|
881
|
+
(file_path, limit)
|
|
882
|
+
).fetchall()
|
|
883
|
+
|
|
884
|
+
return [self._row_to_story(row, conn) for row in rows]
|
|
885
|
+
|
|
886
|
+
def get_stories_by_commit(self, commit_sha: str) -> list[Story]:
|
|
887
|
+
"""Get stories that include a specific commit."""
|
|
888
|
+
with self.connect() as conn:
|
|
889
|
+
rows = conn.execute(
|
|
890
|
+
"""
|
|
891
|
+
SELECT s.* FROM stories s
|
|
892
|
+
JOIN story_commits sc ON s.id = sc.story_id
|
|
893
|
+
WHERE sc.commit_sha = ? OR sc.commit_sha LIKE ?
|
|
894
|
+
ORDER BY s.created_at DESC
|
|
895
|
+
""",
|
|
896
|
+
(commit_sha, commit_sha + "%")
|
|
897
|
+
).fetchall()
|
|
898
|
+
|
|
899
|
+
return [self._row_to_story(row, conn) for row in rows]
|
|
900
|
+
|
|
901
|
+
def get_processed_commits(self, project_id: str) -> set[str]:
|
|
902
|
+
"""Get all commit SHAs that are already part of stories for a project."""
|
|
903
|
+
with self.connect() as conn:
|
|
904
|
+
rows = conn.execute(
|
|
905
|
+
"""
|
|
906
|
+
SELECT DISTINCT sc.commit_sha
|
|
907
|
+
FROM story_commits sc
|
|
908
|
+
JOIN stories s ON sc.story_id = s.id
|
|
909
|
+
WHERE s.project_id = ?
|
|
910
|
+
""",
|
|
911
|
+
(project_id,)
|
|
912
|
+
).fetchall()
|
|
913
|
+
return {row["commit_sha"] for row in rows}
|
|
914
|
+
|
|
915
|
+
# =========================================================================
|
|
916
|
+
# Migration
|
|
917
|
+
# =========================================================================
|
|
918
|
+
|
|
919
|
+
def import_from_store(self, store: "ReprStore", project_path: Path) -> int:
|
|
920
|
+
"""
|
|
921
|
+
Import stories from a ReprStore (JSON) into SQLite.
|
|
922
|
+
|
|
923
|
+
Args:
|
|
924
|
+
store: ReprStore loaded from .repr/store.json
|
|
925
|
+
project_path: Path to the project
|
|
926
|
+
|
|
927
|
+
Returns:
|
|
928
|
+
Number of stories imported
|
|
929
|
+
"""
|
|
930
|
+
# Register project
|
|
931
|
+
project_id = self.register_project(project_path, project_path.name)
|
|
932
|
+
|
|
933
|
+
imported = 0
|
|
934
|
+
for story in store.stories:
|
|
935
|
+
self.save_story(story, project_id)
|
|
936
|
+
imported += 1
|
|
937
|
+
|
|
938
|
+
return imported
|
|
939
|
+
|
|
940
|
+
def get_stats(self) -> dict:
|
|
941
|
+
"""Get database statistics."""
|
|
942
|
+
with self.connect() as conn:
|
|
943
|
+
story_count = conn.execute("SELECT COUNT(*) FROM stories").fetchone()[0]
|
|
944
|
+
project_count = conn.execute("SELECT COUNT(*) FROM projects").fetchone()[0]
|
|
945
|
+
file_count = conn.execute("SELECT COUNT(DISTINCT file_path) FROM story_files").fetchone()[0]
|
|
946
|
+
commit_count = conn.execute("SELECT COUNT(DISTINCT commit_sha) FROM story_commits").fetchone()[0]
|
|
947
|
+
|
|
948
|
+
# Category breakdown
|
|
949
|
+
categories = {}
|
|
950
|
+
for row in conn.execute("SELECT category, COUNT(*) as cnt FROM stories GROUP BY category"):
|
|
951
|
+
categories[row["category"]] = row["cnt"]
|
|
952
|
+
|
|
953
|
+
return {
|
|
954
|
+
"story_count": story_count,
|
|
955
|
+
"project_count": project_count,
|
|
956
|
+
"unique_files": file_count,
|
|
957
|
+
"unique_commits": commit_count,
|
|
958
|
+
"categories": categories,
|
|
959
|
+
"db_path": str(self.db_path),
|
|
960
|
+
"db_size_bytes": self.db_path.stat().st_size if self.db_path.exists() else 0,
|
|
961
|
+
}
|
|
962
|
+
|
|
963
|
+
|
|
964
|
+
# Singleton instance
|
|
965
|
+
_db_instance: ReprDatabase | None = None
|
|
966
|
+
|
|
967
|
+
|
|
968
|
+
def get_db() -> ReprDatabase:
|
|
969
|
+
"""Get the singleton database instance."""
|
|
970
|
+
global _db_instance
|
|
971
|
+
if _db_instance is None:
|
|
972
|
+
_db_instance = ReprDatabase()
|
|
973
|
+
_db_instance.init_schema()
|
|
974
|
+
return _db_instance
|
|
975
|
+
|
|
976
|
+
|
|
977
|
+
def reset_db_instance():
|
|
978
|
+
"""Reset the singleton (for testing)."""
|
|
979
|
+
global _db_instance
|
|
980
|
+
_db_instance = None
|