opencode-lcm 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/CHANGELOG.md +83 -0
  2. package/LICENSE +21 -0
  3. package/README.md +207 -0
  4. package/dist/archive-transform.d.ts +45 -0
  5. package/dist/archive-transform.js +81 -0
  6. package/dist/constants.d.ts +12 -0
  7. package/dist/constants.js +16 -0
  8. package/dist/doctor.d.ts +22 -0
  9. package/dist/doctor.js +44 -0
  10. package/dist/index.d.ts +4 -0
  11. package/dist/index.js +306 -0
  12. package/dist/logging.d.ts +14 -0
  13. package/dist/logging.js +28 -0
  14. package/dist/options.d.ts +3 -0
  15. package/dist/options.js +217 -0
  16. package/dist/preview-providers.d.ts +20 -0
  17. package/dist/preview-providers.js +246 -0
  18. package/dist/privacy.d.ts +16 -0
  19. package/dist/privacy.js +92 -0
  20. package/dist/search-ranking.d.ts +12 -0
  21. package/dist/search-ranking.js +98 -0
  22. package/dist/sql-utils.d.ts +31 -0
  23. package/dist/sql-utils.js +80 -0
  24. package/dist/store-artifacts.d.ts +50 -0
  25. package/dist/store-artifacts.js +374 -0
  26. package/dist/store-retention.d.ts +39 -0
  27. package/dist/store-retention.js +90 -0
  28. package/dist/store-search.d.ts +37 -0
  29. package/dist/store-search.js +298 -0
  30. package/dist/store-snapshot.d.ts +133 -0
  31. package/dist/store-snapshot.js +325 -0
  32. package/dist/store-types.d.ts +14 -0
  33. package/dist/store-types.js +5 -0
  34. package/dist/store.d.ts +316 -0
  35. package/dist/store.js +3673 -0
  36. package/dist/types.d.ts +117 -0
  37. package/dist/types.js +1 -0
  38. package/dist/utils.d.ts +35 -0
  39. package/dist/utils.js +414 -0
  40. package/dist/workspace-path.d.ts +1 -0
  41. package/dist/workspace-path.js +15 -0
  42. package/dist/worktree-key.d.ts +1 -0
  43. package/dist/worktree-key.js +6 -0
  44. package/package.json +61 -0
  45. package/src/archive-transform.ts +147 -0
  46. package/src/bun-sqlite.d.ts +18 -0
  47. package/src/constants.ts +20 -0
  48. package/src/doctor.ts +83 -0
  49. package/src/index.ts +330 -0
  50. package/src/logging.ts +41 -0
  51. package/src/options.ts +297 -0
  52. package/src/preview-providers.ts +298 -0
  53. package/src/privacy.ts +122 -0
  54. package/src/search-ranking.ts +145 -0
  55. package/src/sql-utils.ts +107 -0
  56. package/src/store-artifacts.ts +666 -0
  57. package/src/store-retention.ts +152 -0
  58. package/src/store-search.ts +440 -0
  59. package/src/store-snapshot.ts +582 -0
  60. package/src/store-types.ts +16 -0
  61. package/src/store.ts +4926 -0
  62. package/src/types.ts +132 -0
  63. package/src/utils.ts +444 -0
  64. package/src/workspace-path.ts +20 -0
  65. package/src/worktree-key.ts +5 -0
@@ -0,0 +1,152 @@
1
+ import type { SqlDatabaseLike } from './store-types.js';
2
+
3
+ /**
4
+ * Retention policy operations.
5
+ * Handles stale/deleted session pruning and orphan blob cleanup.
6
+ */
7
+
8
+ export type RetentionSessionCandidate = {
9
+ session_id: string;
10
+ title: string | null;
11
+ session_directory: string | null;
12
+ root_session_id: string | null;
13
+ pinned: number;
14
+ deleted: number;
15
+ updated_at: number;
16
+ event_count: number;
17
+ message_count: number;
18
+ artifact_count: number;
19
+ };
20
+
21
+ export type RetentionBlobCandidate = {
22
+ content_hash: string;
23
+ char_count: number;
24
+ created_at: number;
25
+ };
26
+
27
+ export type ResolvedRetentionPolicy = {
28
+ staleSessionDays?: number;
29
+ deletedSessionDays?: number;
30
+ orphanBlobDays?: number;
31
+ };
32
+
33
+ export type RetentionPruneResult = {
34
+ deletedSessions: number;
35
+ deletedBlobs: number;
36
+ deletedBlobChars: number;
37
+ };
38
+
39
+ export function retentionCutoff(days: number): number {
40
+ return Date.now() - days * 24 * 60 * 60 * 1000;
41
+ }
42
+
43
+ export function readSessionRetentionCandidates(
44
+ db: SqlDatabaseLike,
45
+ deleted: boolean,
46
+ days: number,
47
+ limit?: number,
48
+ ): RetentionSessionCandidate[] {
49
+ const params: Array<number | string> = [retentionCutoff(days), deleted ? 1 : 0];
50
+ const sql = `
51
+ SELECT
52
+ s.session_id,
53
+ s.title,
54
+ s.session_directory,
55
+ s.root_session_id,
56
+ s.pinned,
57
+ s.deleted,
58
+ s.updated_at,
59
+ s.event_count,
60
+ (SELECT COUNT(*) FROM messages m WHERE m.session_id = s.session_id) AS message_count,
61
+ (SELECT COUNT(*) FROM artifacts a WHERE a.session_id = s.session_id) AS artifact_count
62
+ FROM sessions s
63
+ WHERE s.updated_at <= ?
64
+ AND s.deleted = ?
65
+ AND s.pinned = 0
66
+ AND NOT EXISTS (
67
+ SELECT 1 FROM sessions child WHERE child.parent_session_id = s.session_id
68
+ )
69
+ ORDER BY s.updated_at ASC
70
+ ${limit ? 'LIMIT ?' : ''}`;
71
+
72
+ if (limit) params.push(limit);
73
+ return db.prepare(sql).all(...params) as RetentionSessionCandidate[];
74
+ }
75
+
76
+ export function countSessionRetentionCandidates(
77
+ db: SqlDatabaseLike,
78
+ deleted: boolean,
79
+ days: number,
80
+ ): number {
81
+ const row = db
82
+ .prepare(
83
+ `SELECT COUNT(*) AS count
84
+ FROM sessions s
85
+ WHERE s.updated_at <= ?
86
+ AND s.deleted = ?
87
+ AND s.pinned = 0
88
+ AND NOT EXISTS (
89
+ SELECT 1 FROM sessions child WHERE child.parent_session_id = s.session_id
90
+ )`,
91
+ )
92
+ .get(retentionCutoff(days), deleted ? 1 : 0) as { count: number };
93
+ return row.count;
94
+ }
95
+
96
+ export function readOrphanBlobRetentionCandidates(
97
+ db: SqlDatabaseLike,
98
+ days: number,
99
+ limit?: number,
100
+ ): RetentionBlobCandidate[] {
101
+ const params: Array<number> = [retentionCutoff(days)];
102
+ const sql = `
103
+ SELECT content_hash, char_count, created_at
104
+ FROM artifact_blobs b
105
+ WHERE b.created_at <= ?
106
+ AND NOT EXISTS (
107
+ SELECT 1 FROM artifacts a WHERE a.content_hash = b.content_hash
108
+ )
109
+ ORDER BY char_count DESC, created_at ASC
110
+ ${limit ? 'LIMIT ?' : ''}`;
111
+ if (limit) params.push(limit);
112
+ return db.prepare(sql).all(...params) as RetentionBlobCandidate[];
113
+ }
114
+
115
+ export function countOrphanBlobRetentionCandidates(db: SqlDatabaseLike, days: number): number {
116
+ const row = db
117
+ .prepare(
118
+ `SELECT COUNT(*) AS count
119
+ FROM artifact_blobs b
120
+ WHERE b.created_at <= ?
121
+ AND NOT EXISTS (
122
+ SELECT 1 FROM artifacts a WHERE a.content_hash = b.content_hash
123
+ )`,
124
+ )
125
+ .get(retentionCutoff(days)) as { count: number };
126
+ return row.count;
127
+ }
128
+
129
+ export function sumOrphanBlobRetentionChars(db: SqlDatabaseLike, days: number): number {
130
+ const row = db
131
+ .prepare(
132
+ `SELECT COALESCE(SUM(char_count), 0) AS chars
133
+ FROM artifact_blobs b
134
+ WHERE b.created_at <= ?
135
+ AND NOT EXISTS (
136
+ SELECT 1 FROM artifacts a WHERE a.content_hash = b.content_hash
137
+ )`,
138
+ )
139
+ .get(retentionCutoff(days)) as { chars: number };
140
+ return row.chars;
141
+ }
142
+
143
+ export function clearSessionData(db: SqlDatabaseLike, sessionID: string): void {
144
+ db.prepare('DELETE FROM parts WHERE session_id = ?').run(sessionID);
145
+ db.prepare('DELETE FROM messages WHERE session_id = ?').run(sessionID);
146
+ db.prepare('DELETE FROM artifacts WHERE session_id = ?').run(sessionID);
147
+ db.prepare('DELETE FROM sessions WHERE session_id = ?').run(sessionID);
148
+ db.prepare('DELETE FROM resumes WHERE session_id = ?').run(sessionID);
149
+ db.prepare('DELETE FROM summary_nodes WHERE session_id = ?').run(sessionID);
150
+ db.prepare('DELETE FROM summary_edges WHERE session_id = ?').run(sessionID);
151
+ db.prepare('DELETE FROM summary_state WHERE session_id = ?').run(sessionID);
152
+ }
@@ -0,0 +1,440 @@
1
+ import { getLogger } from './logging.js';
2
+ import { rankSearchCandidates, type SearchCandidate } from './search-ranking.js';
3
+ import type { ArtifactRow, SummaryNodeRow } from './store-snapshot.js';
4
+ import type { SqlDatabaseLike } from './store-types.js';
5
+ import type { NormalizedSession, SearchResult } from './types.js';
6
+ import { buildSnippet, sanitizeFtsTokens, tokenizeQuery } from './utils.js';
7
+
8
+ type FtsDeps = {
9
+ getDb(): SqlDatabaseLike;
10
+ readScopedSessionsSync(sessionIDs?: string[]): NormalizedSession[];
11
+ readScopedSummaryRowsSync(sessionIDs?: string[]): SummaryNodeRow[];
12
+ readScopedArtifactRowsSync(sessionIDs?: string[]): ArtifactRow[];
13
+ buildArtifactSearchContent(row: ArtifactRow): string;
14
+ ignoreToolPrefixes: string[];
15
+ guessMessageText(
16
+ message: NormalizedSession['messages'][number],
17
+ ignorePrefixes: string[],
18
+ ): string;
19
+ };
20
+
21
+ function deleteScopedFtsRows(
22
+ db: SqlDatabaseLike,
23
+ table: 'message_fts' | 'summary_fts' | 'artifact_fts',
24
+ sessionIDs?: string[],
25
+ ): void {
26
+ if (!sessionIDs) {
27
+ db.prepare(`DELETE FROM ${table}`).run();
28
+ return;
29
+ }
30
+ if (sessionIDs.length === 0) return;
31
+
32
+ db.prepare(
33
+ `DELETE FROM ${table} WHERE session_id IN (${sessionIDs.map(() => '?').join(', ')})`,
34
+ ).run(...sessionIDs);
35
+ }
36
+
37
+ export function buildFtsQuery(query: string): string | undefined {
38
+ const phrases = [...query.matchAll(/"([^"]+)"/g)]
39
+ .map((match) => sanitizeFtsTokens(tokenizeQuery(match[1])).join(' '))
40
+ .filter(Boolean)
41
+ .map((phrase) => `"${phrase}"`);
42
+ const remainder = query.replace(/"[^"]+"/g, ' ');
43
+ const tokens = sanitizeFtsTokens(tokenizeQuery(remainder)).map((token) => `${token}*`);
44
+ const parts = [...phrases, ...tokens];
45
+ if (parts.length === 0) return undefined;
46
+ return parts.join(' AND ');
47
+ }
48
+
49
+ /**
50
+ * Compute TF-IDF weights for candidate query tokens against the FTS5 corpus.
51
+ * Returns tokens sorted by descending IDF score (most informative first).
52
+ * Tokens that appear in >80% of documents are dropped as corpus-common noise.
53
+ *
54
+ * Uses a single FTS5 query per token to get document frequency, which is
55
+ * acceptable since automatic retrieval works with ≤10 candidate tokens.
56
+ */
57
+ function getTotalDocCount(db: SqlDatabaseLike): number {
58
+ const messageCount = (db.prepare('SELECT COUNT(*) AS count FROM message_fts').get() as {
59
+ count: number;
60
+ }) ?? { count: 0 };
61
+ const summaryCount = (db.prepare('SELECT COUNT(*) AS count FROM summary_fts').get() as {
62
+ count: number;
63
+ }) ?? { count: 0 };
64
+ const artifactCount = (db.prepare('SELECT COUNT(*) AS count FROM artifact_fts').get() as {
65
+ count: number;
66
+ }) ?? { count: 0 };
67
+ return Math.max(1, messageCount.count + summaryCount.count + artifactCount.count);
68
+ }
69
+
70
+ export function computeTfidfWeights(
71
+ db: SqlDatabaseLike,
72
+ candidateTokens: string[],
73
+ ): Array<{ token: string; idf: number; docFreq: number }> {
74
+ if (candidateTokens.length === 0) return [];
75
+
76
+ const totalDocs = getTotalDocCount(db);
77
+
78
+ const results: Array<{ token: string; idf: number; docFreq: number }> = [];
79
+
80
+ for (const token of candidateTokens) {
81
+ // Query document frequency across all FTS tables
82
+ // FTS5 MATCH 'token*' finds all documents containing terms with this prefix
83
+ const query = `${token}*`;
84
+ let docFreq = 0;
85
+
86
+ try {
87
+ const msgFreq = db
88
+ .prepare('SELECT COUNT(*) AS count FROM message_fts WHERE message_fts MATCH ?')
89
+ .get(query) as { count: number } | undefined;
90
+ docFreq += msgFreq?.count ?? 0;
91
+ } catch (error) {
92
+ getLogger().debug('TF-IDF message_fts query failed for token', { token, error });
93
+ }
94
+
95
+ try {
96
+ const sumFreq = db
97
+ .prepare('SELECT COUNT(*) AS count FROM summary_fts WHERE summary_fts MATCH ?')
98
+ .get(query) as { count: number } | undefined;
99
+ docFreq += sumFreq?.count ?? 0;
100
+ } catch (error) {
101
+ getLogger().debug('TF-IDF summary_fts query failed for token', { token, error });
102
+ }
103
+
104
+ try {
105
+ const artFreq = db
106
+ .prepare('SELECT COUNT(*) AS count FROM artifact_fts WHERE artifact_fts MATCH ?')
107
+ .get(query) as { count: number } | undefined;
108
+ docFreq += artFreq?.count ?? 0;
109
+ } catch (error) {
110
+ getLogger().debug('TF-IDF artifact_fts query failed for token', { token, error });
111
+ }
112
+
113
+ // Smoothed IDF: log(N / (df + 1)) + 1
114
+ // Smoothing prevents division by zero and ensures non-zero weights
115
+ const idf = Math.log(totalDocs / (docFreq + 1)) + 1;
116
+ results.push({ token, idf, docFreq });
117
+ }
118
+
119
+ // Sort by descending IDF — most informative tokens first
120
+ results.sort((a, b) => b.idf - a.idf);
121
+
122
+ return results;
123
+ }
124
+
125
+ /**
126
+ * Filter candidate tokens using TF-IDF weights.
127
+ * Drops tokens whose IDF is below the median (corpus-common terms)
128
+ * and tokens that appear in >80% of documents.
129
+ * Returns tokens sorted by descending IDF.
130
+ */
131
+ export function filterTokensByTfidf(
132
+ db: SqlDatabaseLike,
133
+ candidateTokens: string[],
134
+ options?: { maxCommonRatio?: number; minTokens?: number },
135
+ ): string[] {
136
+ const { maxCommonRatio = 0.8, minTokens = 1 } = options ?? {};
137
+
138
+ const weights = computeTfidfWeights(db, candidateTokens);
139
+ if (weights.length === 0) return candidateTokens;
140
+
141
+ // Get total docs for common-ratio threshold (already computed inside computeTfidfWeights, but needed here for ratio)
142
+ const totalDocs = getTotalDocCount(db);
143
+
144
+ // Compute median IDF
145
+ const sortedIdfs = weights.map((w) => w.idf).sort((a, b) => a - b);
146
+ const medianIdf =
147
+ sortedIdfs.length % 2 === 0
148
+ ? (sortedIdfs[sortedIdfs.length / 2 - 1] + sortedIdfs[sortedIdfs.length / 2]) / 2
149
+ : sortedIdfs[Math.floor(sortedIdfs.length / 2)];
150
+
151
+ // Filter: keep tokens with IDF >= median AND below common-ratio threshold
152
+ // Always keep at least minTokens tokens (the highest-IDF ones)
153
+ const filtered = weights.filter((w) => {
154
+ const docRatio = w.docFreq / totalDocs;
155
+ return w.idf >= medianIdf && docRatio <= maxCommonRatio;
156
+ });
157
+
158
+ // Ensure minimum token count
159
+ if (filtered.length < minTokens) {
160
+ return weights.slice(0, minTokens).map((w) => w.token);
161
+ }
162
+
163
+ return filtered.map((w) => w.token);
164
+ }
165
+
166
+ export function searchWithFts(
167
+ deps: FtsDeps,
168
+ query: string,
169
+ sessionIDs?: string[],
170
+ limit = 5,
171
+ ): SearchResult[] {
172
+ const ftsQuery = buildFtsQuery(query);
173
+ if (!ftsQuery) return [];
174
+ if (sessionIDs && sessionIDs.length === 0) return [];
175
+
176
+ try {
177
+ const db = deps.getDb();
178
+ const fetchLimit = Math.max(limit * 8, 12);
179
+ const buildScopeClause = (ids: string[] | undefined) => {
180
+ if (!ids) return { clause: '', params: [] as string[] };
181
+ return {
182
+ clause: `session_id IN (${ids.map(() => '?').join(', ')}) AND `,
183
+ params: ids,
184
+ };
185
+ };
186
+ const scope = buildScopeClause(sessionIDs);
187
+
188
+ const messageRows = db
189
+ .prepare(
190
+ `SELECT message_id, session_id, role, created_at, content, snippet(message_fts, 4, '[', ']', '...', 12) AS snippet, bm25(message_fts) AS rank
191
+ FROM message_fts
192
+ WHERE ${scope.clause}message_fts MATCH ?
193
+ ORDER BY rank, created_at DESC
194
+ LIMIT ?`,
195
+ )
196
+ .all(...scope.params, ftsQuery, fetchLimit) as Array<{
197
+ message_id: string;
198
+ session_id: string;
199
+ role: string;
200
+ created_at: string | number;
201
+ content: string;
202
+ snippet: string;
203
+ rank: number;
204
+ }>;
205
+
206
+ const summaryRows = db
207
+ .prepare(
208
+ `SELECT node_id, session_id, created_at, content, snippet(summary_fts, 4, '[', ']', '...', 14) AS snippet, bm25(summary_fts) AS rank
209
+ FROM summary_fts
210
+ WHERE ${scope.clause}summary_fts MATCH ?
211
+ ORDER BY rank, created_at DESC
212
+ LIMIT ?`,
213
+ )
214
+ .all(...scope.params, ftsQuery, fetchLimit) as Array<{
215
+ node_id: string;
216
+ session_id: string;
217
+ created_at: string | number;
218
+ content: string;
219
+ snippet: string;
220
+ rank: number;
221
+ }>;
222
+
223
+ const artifactRows = db
224
+ .prepare(
225
+ `SELECT artifact_id, session_id, artifact_kind, created_at, content, snippet(artifact_fts, 6, '[', ']', '...', 14) AS snippet, bm25(artifact_fts) AS rank
226
+ FROM artifact_fts
227
+ WHERE ${scope.clause}artifact_fts MATCH ?
228
+ ORDER BY rank, created_at DESC
229
+ LIMIT ?`,
230
+ )
231
+ .all(...scope.params, ftsQuery, fetchLimit) as Array<{
232
+ artifact_id: string;
233
+ session_id: string;
234
+ artifact_kind: string;
235
+ created_at: string | number;
236
+ content: string;
237
+ snippet: string;
238
+ rank: number;
239
+ }>;
240
+
241
+ const candidates: SearchCandidate[] = [
242
+ ...messageRows.map((row, index) => ({
243
+ id: row.message_id,
244
+ type: row.role,
245
+ sessionID: row.session_id,
246
+ timestamp: Number(row.created_at),
247
+ snippet: row.snippet || buildSnippet(row.content, query),
248
+ content: row.content,
249
+ sourceKind: 'message' as const,
250
+ sourceOrder: index,
251
+ })),
252
+ ...summaryRows.map((row, index) => ({
253
+ id: row.node_id,
254
+ type: 'summary',
255
+ sessionID: row.session_id,
256
+ timestamp: Number(row.created_at),
257
+ snippet: row.snippet || buildSnippet(row.content, query),
258
+ content: row.content,
259
+ sourceKind: 'summary' as const,
260
+ sourceOrder: index,
261
+ })),
262
+ ...artifactRows.map((row, index) => ({
263
+ id: row.artifact_id,
264
+ type: `artifact:${row.artifact_kind}`,
265
+ sessionID: row.session_id,
266
+ timestamp: Number(row.created_at),
267
+ snippet: row.snippet || buildSnippet(row.content, query),
268
+ content: row.content,
269
+ sourceKind: 'artifact' as const,
270
+ sourceOrder: index,
271
+ })),
272
+ ];
273
+
274
+ return rankSearchCandidates(candidates, query, limit);
275
+ } catch (error) {
276
+ getLogger().debug('FTS search failed, returning empty results', { query, error });
277
+ return [];
278
+ }
279
+ }
280
+
281
+ export function searchByScan(
282
+ deps: FtsDeps,
283
+ query: string,
284
+ sessionIDs?: string[],
285
+ limit = 5,
286
+ ): SearchResult[] {
287
+ const sessions = deps.readScopedSessionsSync(sessionIDs);
288
+ const candidates: SearchCandidate[] = [];
289
+
290
+ for (const session of sessions) {
291
+ for (const [index, message] of session.messages.entries()) {
292
+ const blob = deps.guessMessageText(message, deps.ignoreToolPrefixes);
293
+ if (!blob.toLowerCase().includes(query)) continue;
294
+
295
+ candidates.push({
296
+ id: message.info.id,
297
+ type: message.info.role,
298
+ sessionID: session.sessionID,
299
+ timestamp: message.info.time.created,
300
+ snippet: buildSnippet(blob, query),
301
+ content: blob,
302
+ sourceKind: 'message',
303
+ sourceOrder: index,
304
+ });
305
+ }
306
+ }
307
+
308
+ const summaryRows = deps.readScopedSummaryRowsSync(sessionIDs);
309
+
310
+ summaryRows.forEach((row, index) => {
311
+ if (!row.summary_text.toLowerCase().includes(query)) return;
312
+ candidates.push({
313
+ id: row.node_id,
314
+ type: 'summary',
315
+ sessionID: row.session_id,
316
+ timestamp: row.created_at,
317
+ snippet: buildSnippet(row.summary_text, query),
318
+ content: row.summary_text,
319
+ sourceKind: 'summary',
320
+ sourceOrder: index,
321
+ });
322
+ });
323
+
324
+ const artifactRows = deps.readScopedArtifactRowsSync(sessionIDs);
325
+
326
+ for (const [index, row] of artifactRows.entries()) {
327
+ const haystack = `${row.preview_text}\n${row.content_text}`.toLowerCase();
328
+ if (!haystack.includes(query)) continue;
329
+
330
+ candidates.push({
331
+ id: row.artifact_id,
332
+ type: `artifact:${row.artifact_kind}`,
333
+ sessionID: row.session_id,
334
+ timestamp: row.created_at,
335
+ snippet: buildSnippet(`${row.preview_text}\n${row.content_text}`, query),
336
+ content: row.content_text,
337
+ sourceKind: 'artifact',
338
+ sourceOrder: index,
339
+ });
340
+ }
341
+
342
+ return rankSearchCandidates(candidates, query, limit);
343
+ }
344
+
345
+ export function replaceMessageSearchRowsSync(deps: FtsDeps, session: NormalizedSession): void {
346
+ const db = deps.getDb();
347
+ db.prepare('DELETE FROM message_fts WHERE session_id = ?').run(session.sessionID);
348
+ insertMessageSearchRowsSync(deps, session);
349
+ }
350
+
351
+ function insertMessageSearchRowsSync(deps: FtsDeps, session: NormalizedSession): void {
352
+ const db = deps.getDb();
353
+ const insert = db.prepare(
354
+ 'INSERT INTO message_fts (session_id, message_id, role, created_at, content) VALUES (?, ?, ?, ?, ?)',
355
+ );
356
+
357
+ for (const message of session.messages) {
358
+ const content = deps.guessMessageText(message, deps.ignoreToolPrefixes);
359
+ if (!content) continue;
360
+ insert.run(
361
+ session.sessionID,
362
+ message.info.id,
363
+ message.info.role,
364
+ String(message.info.time.created),
365
+ content,
366
+ );
367
+ }
368
+ }
369
+
370
+ export function replaceMessageSearchRowSync(
371
+ deps: FtsDeps,
372
+ sessionID: string,
373
+ message: NormalizedSession['messages'][number],
374
+ ): void {
375
+ const db = deps.getDb();
376
+ db.prepare('DELETE FROM message_fts WHERE message_id = ?').run(message.info.id);
377
+
378
+ const content = deps.guessMessageText(message, deps.ignoreToolPrefixes);
379
+ if (!content) return;
380
+
381
+ db.prepare(
382
+ 'INSERT INTO message_fts (session_id, message_id, role, created_at, content) VALUES (?, ?, ?, ?, ?)',
383
+ ).run(sessionID, message.info.id, message.info.role, String(message.info.time.created), content);
384
+ }
385
+
386
+ export function replaceSummarySearchRowsSync(deps: FtsDeps, sessionIDs?: string[]): void {
387
+ const db = deps.getDb();
388
+ deleteScopedFtsRows(db, 'summary_fts', sessionIDs);
389
+
390
+ const summaryRows = deps.readScopedSummaryRowsSync(sessionIDs);
391
+ const insert = db.prepare(
392
+ 'INSERT INTO summary_fts (session_id, node_id, level, created_at, content) VALUES (?, ?, ?, ?, ?)',
393
+ );
394
+ for (const row of summaryRows) {
395
+ insert.run(
396
+ row.session_id,
397
+ row.node_id,
398
+ String(row.level),
399
+ String(row.created_at),
400
+ row.summary_text,
401
+ );
402
+ }
403
+ }
404
+
405
+ export function replaceArtifactSearchRowsSync(deps: FtsDeps, sessionIDs?: string[]): void {
406
+ const db = deps.getDb();
407
+ deleteScopedFtsRows(db, 'artifact_fts', sessionIDs);
408
+
409
+ const artifactRows = deps.readScopedArtifactRowsSync(sessionIDs);
410
+ const insert = db.prepare(
411
+ 'INSERT INTO artifact_fts (session_id, artifact_id, message_id, part_id, artifact_kind, created_at, content) VALUES (?, ?, ?, ?, ?, ?, ?)',
412
+ );
413
+ for (const row of artifactRows) {
414
+ insert.run(
415
+ row.session_id,
416
+ row.artifact_id,
417
+ row.message_id,
418
+ row.part_id,
419
+ row.artifact_kind,
420
+ String(row.created_at),
421
+ deps.buildArtifactSearchContent(row),
422
+ );
423
+ }
424
+ }
425
+
426
+ export function refreshSearchIndexesSync(deps: FtsDeps, sessionIDs?: string[]): void {
427
+ const db = deps.getDb();
428
+ deleteScopedFtsRows(db, 'message_fts', sessionIDs);
429
+
430
+ for (const session of deps.readScopedSessionsSync(sessionIDs)) {
431
+ insertMessageSearchRowsSync(deps, session);
432
+ }
433
+
434
+ replaceSummarySearchRowsSync(deps, sessionIDs);
435
+ replaceArtifactSearchRowsSync(deps, sessionIDs);
436
+ }
437
+
438
+ export function rebuildSearchIndexesSync(deps: FtsDeps): void {
439
+ refreshSearchIndexesSync(deps);
440
+ }