claude-mem-lite 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/server.mjs ADDED
@@ -0,0 +1,938 @@
1
+ #!/usr/bin/env node
2
+ // claude-mem-lite MCP Server — All-in-one memory system
3
+ // FTS5 search, zero LLM calls, single process
4
+
5
+ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
6
+ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
7
+ import { jaccardSimilarity, truncate, typeIcon, sanitizeFtsQuery, inferProject, computeMinHash, scrubSecrets, fmtDate, isoWeekKey, debugLog, debugCatch } from './utils.mjs';
8
+ import { ensureDb, DB_PATH } from './schema.mjs';
9
+ import { reRankWithContext, markSuperseded, extractPRFTerms, expandQueryByConcepts } from './server-internals.mjs';
10
+ import { memSearchSchema, memTimelineSchema, memGetSchema, memDeleteSchema, memSaveSchema, memStatsSchema, memCompressSchema } from './tool-schemas.mjs';
11
+
12
+ // ─── Database ───────────────────────────────────────────────────────────────
13
+
14
+ import { rmSync } from 'fs';
15
+
16
+ let db;
17
+ try {
18
+ db = ensureDb();
19
+ } catch (firstErr) {
20
+ // Recovery: remove WAL/SHM files (corrupt WAL is the most common cause) and retry
21
+ debugLog('WARN', 'server', `DB open failed, attempting WAL recovery: ${firstErr.message}`);
22
+ try { rmSync(DB_PATH + '-wal', { force: true }); } catch {}
23
+ try { rmSync(DB_PATH + '-shm', { force: true }); } catch {}
24
+ try {
25
+ db = ensureDb();
26
+ debugLog('INFO', 'server', 'DB recovered after WAL cleanup');
27
+ } catch (retryErr) {
28
+ // Fatal: log and exit with descriptive message (Claude Code shows stderr)
29
+ console.error(`[claude-mem-lite] FATAL: Database cannot be opened: ${retryErr.message}`);
30
+ console.error(`[claude-mem-lite] Try: rm "${DB_PATH}-wal" "${DB_PATH}-shm" or reinstall with: node install.mjs install`);
31
+ process.exit(1);
32
+ }
33
+ }
34
+ // Server process uses longer busy_timeout for concurrent MCP requests
35
+ db.pragma('busy_timeout = 5000');
36
+
37
+ // inferProject, jaccardSimilarity, sanitizeFtsQuery, typeIcon, truncate, fmtDate imported from utils.mjs
38
+
39
+ // ─── Scoring Model Constants ────────────────────────────────────────────────
40
+ //
41
+ // Composite scoring: BM25(weights) × recency_decay × [project_boost] × [importance] × [access_bonus]
42
+ //
43
+ // BM25 column weights — higher weight = matches in that column score higher:
44
+ // observations_fts: title=10, subtitle=5, narrative=5, text=3, facts=3, concepts=2
45
+ // session_summaries_fts: request=5, investigated=3, learned=3, completed=3, next_steps=2, notes=1
46
+ //
47
+ // Recency decay — exponential half-life:
48
+ // factor = 1 + e^(-ln2 × age_ms / half_life_ms)
49
+ // At age=0: 2.0 (full boost) → at half_life: 1.5 → at ∞: 1.0
50
+ // 0.693 = ln(2), ensures exact halving at each half-life interval
51
+ //
52
+ // Optional per-query modifiers:
53
+ // Project boost: 2× for current project matches
54
+ // Importance: 0.5 + 0.5 × importance (range 0.5–2.0)
55
+ // Access bonus: 1 + 0.1 × ln(1 + access_count)
56
+
57
+ const OBS_BM25 = 'bm25(observations_fts, 10, 5, 5, 3, 3, 2)';
58
+ const SESS_BM25 = 'bm25(session_summaries_fts, 5, 3, 3, 3, 2, 1)';
59
+ const RECENCY_HALF_LIFE_MS = 1209600000; // 14 days in milliseconds
60
+
61
+ // ─── MCP Server ─────────────────────────────────────────────────────────────
62
+
63
+ const server = new McpServer(
64
+ { name: 'claude-mem-lite', version: '2.0.0' },
65
+ {
66
+ instructions: [
67
+ 'Proactively use mem_search when:',
68
+ '- Errors occur: search for related past fixes (obs_type="bugfix")',
69
+ '- Before significant file changes: search for file history',
70
+ '- Architecture decisions: check past decisions (obs_type="decision")',
71
+ '- Stuck/blocked: search for similar past work',
72
+ '',
73
+ 'Workflow: mem_search → mem_timeline(anchor=ID) → mem_get(ids=[...]) for full context.',
74
+ ].join('\n'),
75
+ },
76
+ );
77
+
78
+ function safeHandler(fn) {
79
+ return async (args, extra) => {
80
+ try {
81
+ return await fn(args, extra);
82
+ } catch (err) {
83
+ return { content: [{ type: 'text', text: `Error: ${err.message}` }], isError: true };
84
+ }
85
+ };
86
+ }
87
+
88
+ // ─── Tool: mem_search — helper functions ────────────────────────────────────
89
+
90
+ function searchObservations(ctx) {
91
+ const { ftsQuery, args, epochFrom, epochTo, perSourceLimit, perSourceOffset, currentProject, limit } = ctx;
92
+ const results = [];
93
+
94
+ if (ftsQuery) {
95
+ const now = Date.now();
96
+ const projectBoost = args.project ? null : currentProject;
97
+ const rows = db.prepare(`
98
+ SELECT o.id, o.type, o.title, o.subtitle, o.project, o.created_at, o.importance,
99
+ o.files_modified,
100
+ snippet(observations_fts, 2, '»', '«', '…', 10) as match_snippet,
101
+ ${OBS_BM25}
102
+ * (1.0 + EXP(-0.693 * (? - o.created_at_epoch) / ${RECENCY_HALF_LIFE_MS}.0))
103
+ * (CASE WHEN ? IS NOT NULL AND o.project = ? THEN 2.0 ELSE 1.0 END)
104
+ * (0.5 + 0.5 * COALESCE(o.importance, 1))
105
+ * (1.0 + 0.1 * LN(1 + COALESCE(o.access_count, 0))) as score
106
+ FROM observations_fts
107
+ JOIN observations o ON observations_fts.rowid = o.id
108
+ WHERE observations_fts MATCH ?
109
+ AND COALESCE(o.compressed_into, 0) = 0
110
+ AND (? IS NULL OR o.project = ?)
111
+ AND (? IS NULL OR o.type = ?)
112
+ AND (? IS NULL OR o.created_at_epoch >= ?)
113
+ AND (? IS NULL OR o.created_at_epoch <= ?)
114
+ AND (? IS NULL OR COALESCE(o.importance, 1) >= ?)
115
+ ORDER BY score
116
+ LIMIT ? OFFSET ?
117
+ `).all(
118
+ now,
119
+ projectBoost, projectBoost,
120
+ ftsQuery,
121
+ args.project ?? null, args.project ?? null,
122
+ args.obs_type ?? null, args.obs_type ?? null,
123
+ epochFrom, epochFrom,
124
+ epochTo, epochTo,
125
+ args.importance ?? null, args.importance ?? null,
126
+ perSourceLimit, perSourceOffset
127
+ );
128
+ for (const r of rows) {
129
+ results.push({ source: 'obs', id: r.id, type: r.type, title: r.title, subtitle: r.subtitle, project: r.project, date: r.created_at, score: r.score, files_modified: r.files_modified, importance: r.importance, snippet: r.match_snippet || '' });
130
+ }
131
+
132
+ // Two-phase query expansion for sparse results
133
+ if (rows.length > 0 && results.length < limit) {
134
+ const existingIds = new Set(results.map(r => r.id));
135
+ expandObsByConceptCo(ctx, now, existingIds, results);
136
+ expandObsByPRF(ctx, now, rows.length, existingIds, results);
137
+ }
138
+ } else {
139
+ const params = [];
140
+ const wheres = ['COALESCE(compressed_into, 0) = 0'];
141
+ if (args.project) { wheres.push('project = ?'); params.push(args.project); }
142
+ if (args.obs_type) { wheres.push('type = ?'); params.push(args.obs_type); }
143
+ if (epochFrom !== null) { wheres.push('created_at_epoch >= ?'); params.push(epochFrom); }
144
+ if (epochTo !== null) { wheres.push('created_at_epoch <= ?'); params.push(epochTo); }
145
+ if (args.importance) { wheres.push('COALESCE(importance, 1) >= ?'); params.push(args.importance); }
146
+ const where = `WHERE ${wheres.join(' AND ')}`;
147
+ params.push(perSourceLimit, perSourceOffset);
148
+ const rows = db.prepare(`
149
+ SELECT id, type, title, subtitle, project, created_at, created_at_epoch, files_modified, importance
150
+ FROM observations ${where}
151
+ ORDER BY created_at_epoch DESC
152
+ LIMIT ? OFFSET ?
153
+ `).all(...params);
154
+ for (const r of rows) {
155
+ results.push({ source: 'obs', id: r.id, type: r.type, title: r.title, subtitle: r.subtitle, project: r.project, date: r.created_at, dateEpoch: r.created_at_epoch });
156
+ }
157
+ }
158
+
159
+ return results;
160
+ }
161
+
162
+ function expandObsByConceptCo(ctx, now, existingIds, results) {
163
+ const { ftsQuery, args, epochFrom, epochTo, limit } = ctx;
164
+ if (results.length >= Math.ceil(limit / 2)) return;
165
+ const expanded = expandQueryByConcepts(db, ftsQuery, args.project);
166
+ if (expanded.length === 0) return;
167
+ const expansionFts = expanded.map(c => `"${c.replace(/"/g, '""')}"`).join(' OR ');
168
+ try {
169
+ const expRows = db.prepare(`
170
+ SELECT o.id, o.type, o.title, o.subtitle, o.project, o.created_at, o.importance,
171
+ o.files_modified,
172
+ ${OBS_BM25}
173
+ * (1.0 + EXP(-0.693 * (? - o.created_at_epoch) / ${RECENCY_HALF_LIFE_MS}.0))
174
+ * (0.5 + 0.5 * COALESCE(o.importance, 1)) as score
175
+ FROM observations_fts
176
+ JOIN observations o ON observations_fts.rowid = o.id
177
+ WHERE observations_fts MATCH ?
178
+ AND COALESCE(o.compressed_into, 0) = 0
179
+ AND (? IS NULL OR o.project = ?)
180
+ AND (? IS NULL OR o.type = ?)
181
+ AND (? IS NULL OR o.created_at_epoch >= ?)
182
+ AND (? IS NULL OR o.created_at_epoch <= ?)
183
+ AND (? IS NULL OR COALESCE(o.importance, 1) >= ?)
184
+ ORDER BY score
185
+ LIMIT ?
186
+ `).all(
187
+ now, expansionFts,
188
+ args.project ?? null, args.project ?? null,
189
+ args.obs_type ?? null, args.obs_type ?? null,
190
+ epochFrom, epochFrom,
191
+ epochTo, epochTo,
192
+ args.importance ?? null, args.importance ?? null,
193
+ limit
194
+ );
195
+ for (const r of expRows) {
196
+ if (!existingIds.has(r.id)) {
197
+ existingIds.add(r.id);
198
+ results.push({ source: 'obs', id: r.id, type: r.type, title: r.title, subtitle: r.subtitle, project: r.project, date: r.created_at, score: r.score * 0.7, files_modified: r.files_modified, importance: r.importance, snippet: '' });
199
+ }
200
+ }
201
+ } catch (e) { debugLog('WARN', 'mem_search', `concept expansion error: ${e.message}`); }
202
+ }
203
+
204
+ function expandObsByPRF(ctx, now, primaryCount, existingIds, results) {
205
+ const { ftsQuery, args, epochFrom, epochTo, limit } = ctx;
206
+ if (primaryCount < 3) return;
207
+ const topResults = db.prepare(`
208
+ SELECT o.title, o.narrative FROM observations_fts
209
+ JOIN observations o ON observations_fts.rowid = o.id
210
+ WHERE observations_fts MATCH ? AND COALESCE(o.compressed_into, 0) = 0
211
+ AND (? IS NULL OR o.project = ?)
212
+ ORDER BY ${OBS_BM25}
213
+ LIMIT 8
214
+ `).all(ftsQuery, args.project ?? null, args.project ?? null);
215
+ const prfTerms = extractPRFTerms(topResults, ftsQuery);
216
+ if (prfTerms.length === 0) return;
217
+ const prfFts = prfTerms.map(t => `"${t.replace(/"/g, '""')}"`).join(' OR ');
218
+ try {
219
+ const prfRows = db.prepare(`
220
+ SELECT o.id, o.type, o.title, o.subtitle, o.project, o.created_at, o.importance,
221
+ o.files_modified,
222
+ ${OBS_BM25}
223
+ * (1.0 + EXP(-0.693 * (? - o.created_at_epoch) / ${RECENCY_HALF_LIFE_MS}.0))
224
+ * (0.5 + 0.5 * COALESCE(o.importance, 1)) as score
225
+ FROM observations_fts
226
+ JOIN observations o ON observations_fts.rowid = o.id
227
+ WHERE observations_fts MATCH ?
228
+ AND COALESCE(o.compressed_into, 0) = 0
229
+ AND (? IS NULL OR o.project = ?)
230
+ AND (? IS NULL OR o.type = ?)
231
+ AND (? IS NULL OR o.created_at_epoch >= ?)
232
+ AND (? IS NULL OR o.created_at_epoch <= ?)
233
+ AND (? IS NULL OR COALESCE(o.importance, 1) >= ?)
234
+ ORDER BY score
235
+ LIMIT ?
236
+ `).all(
237
+ now, prfFts,
238
+ args.project ?? null, args.project ?? null,
239
+ args.obs_type ?? null, args.obs_type ?? null,
240
+ epochFrom, epochFrom,
241
+ epochTo, epochTo,
242
+ args.importance ?? null, args.importance ?? null,
243
+ limit
244
+ );
245
+ for (const r of prfRows) {
246
+ if (!existingIds.has(r.id)) {
247
+ existingIds.add(r.id);
248
+ results.push({ source: 'obs', id: r.id, type: r.type, title: r.title, subtitle: r.subtitle, project: r.project, date: r.created_at, score: r.score * 0.6, files_modified: r.files_modified, importance: r.importance, snippet: '' });
249
+ }
250
+ }
251
+ } catch (e) { debugLog('WARN', 'mem_search', `PRF expansion error: ${e.message}`); }
252
+ }
253
+
254
+ function searchSessions(ctx) {
255
+ const { ftsQuery, searchType, args, epochFrom, epochTo, perSourceLimit, perSourceOffset, currentProject } = ctx;
256
+ const results = [];
257
+
258
+ if (ftsQuery) {
259
+ const now = Date.now();
260
+ const sessionProjectBoost = args.project ? null : currentProject;
261
+ const rows = db.prepare(`
262
+ SELECT s.id, s.request, s.completed, s.project, s.created_at,
263
+ ${SESS_BM25}
264
+ * (1.0 + EXP(-0.693 * (? - s.created_at_epoch) / ${RECENCY_HALF_LIFE_MS}.0))
265
+ * (CASE WHEN ? IS NOT NULL AND s.project = ? THEN 2.0 ELSE 1.0 END) as score
266
+ FROM session_summaries_fts
267
+ JOIN session_summaries s ON session_summaries_fts.rowid = s.id
268
+ WHERE session_summaries_fts MATCH ?
269
+ AND (? IS NULL OR s.project = ?)
270
+ AND (? IS NULL OR s.created_at_epoch >= ?)
271
+ AND (? IS NULL OR s.created_at_epoch <= ?)
272
+ ORDER BY score
273
+ LIMIT ? OFFSET ?
274
+ `).all(
275
+ now,
276
+ sessionProjectBoost, sessionProjectBoost,
277
+ ftsQuery,
278
+ args.project ?? null, args.project ?? null,
279
+ epochFrom, epochFrom,
280
+ epochTo, epochTo,
281
+ perSourceLimit, perSourceOffset
282
+ );
283
+ for (const r of rows) {
284
+ results.push({ source: 'session', id: r.id, request: r.request, completed: r.completed, project: r.project, date: r.created_at, score: r.score });
285
+ }
286
+ } else if (!searchType) {
287
+ // Skip sessions in unfiltered no-query mode (too noisy)
288
+ } else {
289
+ const params = [];
290
+ const wheres = [];
291
+ if (args.project) { wheres.push('project = ?'); params.push(args.project); }
292
+ if (epochFrom !== null) { wheres.push('created_at_epoch >= ?'); params.push(epochFrom); }
293
+ if (epochTo !== null) { wheres.push('created_at_epoch <= ?'); params.push(epochTo); }
294
+ const where = wheres.length ? `WHERE ${wheres.join(' AND ')}` : '';
295
+ params.push(perSourceLimit, perSourceOffset);
296
+ const rows = db.prepare(`
297
+ SELECT id, request, completed, project, created_at, created_at_epoch
298
+ FROM session_summaries ${where}
299
+ ORDER BY created_at_epoch DESC
300
+ LIMIT ? OFFSET ?
301
+ `).all(...params);
302
+ for (const r of rows) {
303
+ results.push({ source: 'session', id: r.id, request: r.request, completed: r.completed, project: r.project, date: r.created_at, dateEpoch: r.created_at_epoch });
304
+ }
305
+ }
306
+
307
+ return results;
308
+ }
309
+
310
+ function searchPrompts(ctx) {
311
+ const { ftsQuery, searchType, args, epochFrom, epochTo, perSourceLimit, perSourceOffset } = ctx;
312
+ const results = [];
313
+
314
+ if (ftsQuery) {
315
+ const rows = db.prepare(`
316
+ SELECT p.id, p.prompt_text, p.content_session_id, p.created_at,
317
+ bm25(user_prompts_fts, 1) as score
318
+ FROM user_prompts_fts
319
+ JOIN user_prompts p ON user_prompts_fts.rowid = p.id
320
+ JOIN sdk_sessions s ON p.content_session_id = s.content_session_id
321
+ WHERE user_prompts_fts MATCH ?
322
+ AND (? IS NULL OR s.project = ?)
323
+ AND (? IS NULL OR p.created_at_epoch >= ?)
324
+ AND (? IS NULL OR p.created_at_epoch <= ?)
325
+ ORDER BY score
326
+ LIMIT ? OFFSET ?
327
+ `).all(
328
+ ftsQuery,
329
+ args.project ?? null, args.project ?? null,
330
+ epochFrom, epochFrom,
331
+ epochTo, epochTo,
332
+ perSourceLimit, perSourceOffset
333
+ );
334
+ for (const r of rows) {
335
+ results.push({ source: 'prompt', id: r.id, text: r.prompt_text, session: r.content_session_id, date: r.created_at, score: r.score });
336
+ }
337
+ } else if (searchType === 'prompts') {
338
+ const params = [];
339
+ const wheres = [];
340
+ if (args.project) { wheres.push('s.project = ?'); params.push(args.project); }
341
+ if (epochFrom !== null) { wheres.push('p.created_at_epoch >= ?'); params.push(epochFrom); }
342
+ if (epochTo !== null) { wheres.push('p.created_at_epoch <= ?'); params.push(epochTo); }
343
+ const where = wheres.length ? `WHERE ${wheres.join(' AND ')}` : '';
344
+ params.push(perSourceLimit, perSourceOffset);
345
+ const rows = db.prepare(`
346
+ SELECT p.id, p.prompt_text, p.content_session_id, p.created_at, p.created_at_epoch
347
+ FROM user_prompts p
348
+ JOIN sdk_sessions s ON p.content_session_id = s.content_session_id
349
+ ${where}
350
+ ORDER BY p.created_at_epoch DESC
351
+ LIMIT ? OFFSET ?
352
+ `).all(...params);
353
+ for (const r of rows) {
354
+ results.push({ source: 'prompt', id: r.id, text: r.prompt_text, session: r.content_session_id, date: r.created_at, dateEpoch: r.created_at_epoch });
355
+ }
356
+ }
357
+
358
+ return results;
359
+ }
360
+
361
+ function formatSearchOutput(paginatedResults, args, ftsQuery, totalCount, isCrossSource) {
362
+ if (paginatedResults.length === 0) {
363
+ const hint = ['No results found.'];
364
+ if (args.query) {
365
+ const expanded = ftsQuery || args.query;
366
+ if (expanded !== args.query) hint.push(`Searched as: ${expanded}`);
367
+ hint.push('Tip: check spelling, try broader terms, or use mem_stats to see available data.');
368
+ }
369
+ return { content: [{ type: 'text', text: hint.join('\n') }] };
370
+ }
371
+
372
+ const lines = [];
373
+ const countLabel = isCrossSource && totalCount > paginatedResults.length
374
+ ? `${paginatedResults.length} of ${totalCount}`
375
+ : `${paginatedResults.length}`;
376
+ lines.push(`Found ${countLabel} result(s)${args.query ? ` for "${args.query}"` : ''}:\n`);
377
+
378
+ for (const r of paginatedResults) {
379
+ if (r.source === 'obs') {
380
+ const supersededTag = r.superseded ? ' [SUPERSEDED]' : '';
381
+ lines.push(`#${r.id} ${typeIcon(r.type)} [${r.type}] ${truncate(r.title || r.subtitle || '(untitled)')} | ${r.project} | ${fmtDate(r.date)}${supersededTag}`);
382
+ if (r.snippet && r.snippet.length > 10 && r.snippet !== r.title) {
383
+ lines.push(` ${truncate(r.snippet, 100)}`);
384
+ }
385
+ } else if (r.source === 'session') {
386
+ lines.push(`S#${r.id} 📋 ${truncate(r.request || r.completed || '(no summary)')} | ${r.project} | ${fmtDate(r.date)}`);
387
+ } else if (r.source === 'prompt') {
388
+ lines.push(`P#${r.id} 💬 ${truncate(r.text)} | ${fmtDate(r.date)}`);
389
+ }
390
+ }
391
+
392
+ lines.push(`\nWorkflow: mem_timeline(anchor=ID) for context | mem_get(ids=[...]) for full details`);
393
+ return { content: [{ type: 'text', text: lines.join('\n') }] };
394
+ }
395
+
396
+ // ─── Tool: mem_search ───────────────────────────────────────────────────────
397
+
398
+ server.registerTool(
399
+ 'mem_search',
400
+ {
401
+ description: 'FTS5 full-text search across observations, sessions, and prompts with BM25 ranking. Returns compact index (use mem_get for details).',
402
+ inputSchema: memSearchSchema,
403
+ },
404
+ safeHandler(async (args) => {
405
+ const limit = args.limit ?? 20;
406
+ const offset = args.offset ?? 0;
407
+ const ftsQuery = sanitizeFtsQuery(args.query);
408
+ const searchType = args.type;
409
+ const currentProject = inferProject();
410
+
411
+ const isCrossSource = !searchType;
412
+ const perSourceLimit = isCrossSource ? Math.max(limit * 3, offset + limit + 10) : limit;
413
+ const perSourceOffset = isCrossSource ? 0 : offset;
414
+
415
+ // Parse date bounds to epoch (with validation)
416
+ // date_to with date-only format (YYYY-MM-DD) extends to end-of-day (23:59:59.999Z)
417
+ const epochFrom = args.date_from ? new Date(args.date_from).getTime() : null;
418
+ let epochTo = args.date_to ? new Date(args.date_to).getTime() : null;
419
+ if (epochTo !== null && args.date_to && /^\d{4}-\d{2}-\d{2}$/.test(args.date_to)) {
420
+ epochTo += 86400000 - 1; // extend to 23:59:59.999
421
+ }
422
+ if (epochFrom !== null && isNaN(epochFrom)) throw new Error(`Invalid date_from: ${args.date_from}`);
423
+ if (epochTo !== null && isNaN(epochTo)) throw new Error(`Invalid date_to: ${args.date_to}`);
424
+
425
+ const ctx = { ftsQuery, searchType, args, epochFrom, epochTo, perSourceLimit, perSourceOffset, currentProject, limit };
426
+ const results = [];
427
+
428
+ if (!searchType || searchType === 'observations') results.push(...searchObservations(ctx));
429
+ if (!searchType || searchType === 'sessions') results.push(...searchSessions(ctx));
430
+ if (!searchType || searchType === 'prompts') results.push(...searchPrompts(ctx));
431
+
432
+ // Global sort (cross-source)
433
+ if (isCrossSource && results.length > 0) {
434
+ if (ftsQuery) {
435
+ results.sort((a, b) => (a.score ?? 0) - (b.score ?? 0));
436
+ } else {
437
+ results.sort((a, b) => (b.dateEpoch ?? 0) - (a.dateEpoch ?? 0));
438
+ }
439
+ }
440
+
441
+ // Re-rank observations by file context overlap and mark superseded
442
+ if (ftsQuery && results.some(r => r.source === 'obs')) {
443
+ const obsResults = results.filter(r => r.source === 'obs');
444
+ reRankWithContext(db, obsResults, currentProject);
445
+ markSuperseded(obsResults);
446
+ results.sort((a, b) => (a.score ?? 0) - (b.score ?? 0));
447
+ }
448
+
449
+ const totalBeforePagination = results.length;
450
+ const paginatedResults = isCrossSource ? results.slice(offset, offset + limit) : results;
451
+
452
+ return formatSearchOutput(paginatedResults, args, ftsQuery, totalBeforePagination, isCrossSource);
453
+ })
454
+ );
455
+
456
+ // ─── Tool: mem_timeline ─────────────────────────────────────────────────────
457
+
458
+ server.registerTool(
459
+ 'mem_timeline',
460
+ {
461
+ description: 'Browse observations as a timeline around an anchor point. Use query to auto-find anchor, or specify anchor ID directly.',
462
+ inputSchema: memTimelineSchema,
463
+ },
464
+ safeHandler(async (args) => {
465
+ const before = args.before ?? 5;
466
+ const after = args.after ?? 5;
467
+ let anchorId = args.anchor;
468
+
469
+ // Auto-find anchor via FTS (with recency decay)
470
+ if (!anchorId && args.query) {
471
+ const ftsQuery = sanitizeFtsQuery(args.query);
472
+ if (ftsQuery) {
473
+ const nowT = Date.now();
474
+ const row = db.prepare(`
475
+ SELECT o.id
476
+ FROM observations_fts
477
+ JOIN observations o ON observations_fts.rowid = o.id
478
+ WHERE observations_fts MATCH ?
479
+ AND (? IS NULL OR o.project = ?)
480
+ AND COALESCE(o.compressed_into, 0) = 0
481
+ ORDER BY ${OBS_BM25}
482
+ * (1.0 + EXP(-0.693 * (? - o.created_at_epoch) / ${RECENCY_HALF_LIFE_MS}.0))
483
+ LIMIT 1
484
+ `).get(ftsQuery, args.project ?? null, args.project ?? null, nowT);
485
+ if (row) anchorId = row.id;
486
+ }
487
+ }
488
+
489
+ // No anchor: return most recent
490
+ if (!anchorId) {
491
+ const compressedFilter = 'COALESCE(compressed_into, 0) = 0';
492
+ const projectFilter = args.project ? `WHERE ${compressedFilter} AND project = ?` : `WHERE ${compressedFilter}`;
493
+ const params = args.project ? [args.project, before + after + 1] : [before + after + 1];
494
+ const rows = db.prepare(`
495
+ SELECT id, type, title, subtitle, project, created_at
496
+ FROM observations ${projectFilter}
497
+ ORDER BY created_at_epoch DESC
498
+ LIMIT ?
499
+ `).all(...params);
500
+
501
+ if (rows.length === 0) {
502
+ return { content: [{ type: 'text', text: 'No observations found.' }] };
503
+ }
504
+
505
+ const lines = [`Timeline (most recent ${rows.length}):\n`];
506
+ for (const r of rows.reverse()) {
507
+ lines.push(`#${r.id} ${typeIcon(r.type)} [${r.type}] ${truncate(r.title || r.subtitle || '(untitled)')} | ${r.project} | ${fmtDate(r.created_at)}`);
508
+ }
509
+ return { content: [{ type: 'text', text: lines.join('\n') }] };
510
+ }
511
+
512
+ // Get anchor epoch
513
+ const anchorRow = db.prepare('SELECT created_at_epoch, project FROM observations WHERE id = ?').get(anchorId);
514
+ if (!anchorRow) {
515
+ return { content: [{ type: 'text', text: `Observation #${anchorId} not found.` }] };
516
+ }
517
+
518
+ const projectFilter = args.project ? 'AND project = ?' : '';
519
+ const baseParams = args.project ? [args.project] : [];
520
+
521
+ // Before anchor
522
+ const beforeRows = db.prepare(`
523
+ SELECT id, type, title, subtitle, project, created_at
524
+ FROM observations
525
+ WHERE created_at_epoch < ? AND COALESCE(compressed_into, 0) = 0 ${projectFilter}
526
+ ORDER BY created_at_epoch DESC
527
+ LIMIT ?
528
+ `).all(anchorRow.created_at_epoch, ...baseParams, before);
529
+
530
+ // After anchor
531
+ const afterRows = db.prepare(`
532
+ SELECT id, type, title, subtitle, project, created_at
533
+ FROM observations
534
+ WHERE created_at_epoch > ? AND COALESCE(compressed_into, 0) = 0 ${projectFilter}
535
+ ORDER BY created_at_epoch ASC
536
+ LIMIT ?
537
+ `).all(anchorRow.created_at_epoch, ...baseParams, after);
538
+
539
+ // Anchor itself
540
+ const anchor = db.prepare('SELECT id, type, title, subtitle, project, created_at FROM observations WHERE id = ?').get(anchorId);
541
+
542
+ const all = [...beforeRows.reverse(), anchor, ...afterRows];
543
+ const lines = [`Timeline around #${anchorId}:\n`];
544
+ for (const r of all) {
545
+ const marker = r.id === anchorId ? ' ◀' : '';
546
+ lines.push(`#${r.id} ${typeIcon(r.type)} [${r.type}] ${truncate(r.title || r.subtitle || '(untitled)')} | ${r.project} | ${fmtDate(r.created_at)}${marker}`);
547
+ }
548
+
549
+ return { content: [{ type: 'text', text: lines.join('\n') }] };
550
+ })
551
+ );
552
+
553
+ // ─── Tool: mem_get ──────────────────────────────────────────────────────────
554
+
555
+ server.registerTool(
556
+ 'mem_get',
557
+ {
558
+ description: 'Get full details for one or more records by ID. Use after mem_search to drill into specific records.',
559
+ inputSchema: memGetSchema,
560
+ },
561
+ safeHandler(async (args) => {
562
+ const source = args.source || 'obs';
563
+ const placeholders = args.ids.map(() => '?').join(',');
564
+
565
+ let rows, allFields, prefix;
566
+ if (source === 'session') {
567
+ rows = db.prepare(`SELECT * FROM session_summaries WHERE id IN (${placeholders}) ORDER BY created_at_epoch ASC`).all(...args.ids);
568
+ allFields = ['id', 'request', 'investigated', 'learned', 'completed', 'next_steps', 'files_read', 'files_edited', 'notes', 'project', 'created_at', 'memory_session_id', 'prompt_number'];
569
+ prefix = 'S#';
570
+ } else if (source === 'prompt') {
571
+ rows = db.prepare(`SELECT * FROM user_prompts WHERE id IN (${placeholders}) ORDER BY created_at_epoch ASC`).all(...args.ids);
572
+ allFields = ['id', 'prompt_text', 'content_session_id', 'prompt_number', 'created_at'];
573
+ prefix = 'P#';
574
+ } else {
575
+ // Increment access_count for retrieved observations (batch UPDATE)
576
+ db.prepare(
577
+ `UPDATE observations SET access_count = COALESCE(access_count, 0) + 1 WHERE id IN (${placeholders})`
578
+ ).run(...args.ids);
579
+ rows = db.prepare(`SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch ASC`).all(...args.ids);
580
+ allFields = ['id', 'type', 'title', 'subtitle', 'narrative', 'text', 'facts', 'concepts', 'files_read', 'files_modified', 'project', 'created_at', 'memory_session_id', 'prompt_number', 'importance', 'related_ids', 'access_count'];
581
+ prefix = '#';
582
+ }
583
+
584
+ if (rows.length === 0) {
585
+ return { content: [{ type: 'text', text: `No ${source === 'session' ? 'sessions' : source === 'prompt' ? 'prompts' : 'observations'} found for given IDs.` }] };
586
+ }
587
+
588
+ const fields = args.fields?.length ? args.fields.filter(f => allFields.includes(f)) : allFields;
589
+
590
+ const parts = [];
591
+ for (const row of rows) {
592
+ const lines = [`── ${prefix}${row.id} ──`];
593
+ for (const f of fields) {
594
+ const val = row[f];
595
+ if (val === null || val === undefined || val === '') continue;
596
+ lines.push(`${f}: ${typeof val === 'string' && val.length > 200 ? val.slice(0, 200) + '…' : val}`);
597
+ }
598
+ parts.push(lines.join('\n'));
599
+ }
600
+
601
+ return { content: [{ type: 'text', text: parts.join('\n\n') }] };
602
+ })
603
+ );
604
+
605
+ // ─── Tool: mem_delete ────────────────────────────────────────────────────────
606
+
607
+ server.registerTool(
608
+ 'mem_delete',
609
+ {
610
+ description: 'Delete observations by ID. Use confirm=false to preview, confirm=true to execute. FTS5 cleanup is automatic via triggers.',
611
+ inputSchema: memDeleteSchema,
612
+ },
613
+ safeHandler(async (args) => {
614
+ const placeholders = args.ids.map(() => '?').join(',');
615
+ const rows = db.prepare(`
616
+ SELECT id, type, title, project FROM observations WHERE id IN (${placeholders})
617
+ `).all(...args.ids);
618
+
619
+ if (rows.length === 0) {
620
+ return { content: [{ type: 'text', text: 'No observations found for given IDs.' }] };
621
+ }
622
+
623
+ if (!args.confirm) {
624
+ // Preview mode
625
+ const lines = [`Preview: ${rows.length} observation(s) will be deleted:\n`];
626
+ for (const r of rows) {
627
+ lines.push(` #${r.id} [${r.type}] ${truncate(r.title || '(untitled)', 80)} | ${r.project}`);
628
+ }
629
+ lines.push(`\nCall mem_delete(ids=[...], confirm=true) to execute.`);
630
+ return { content: [{ type: 'text', text: lines.join('\n') }] };
631
+ }
632
+
633
+ // Wrap cleanup + deletion in a transaction for consistency
634
+ const deletedIds = new Set(args.ids);
635
+ const deleteTx = db.transaction(() => {
636
+ // Clean up stale references in other observations' related_ids
637
+ // Use LIKE filter to avoid O(N) full-table scan — only fetch rows that may reference deleted IDs
638
+ const likeConditions = args.ids.map(() => `related_ids LIKE ?`).join(' OR ');
639
+ const likeParams = args.ids.map(id => `%${id}%`);
640
+ const referencing = db.prepare(`
641
+ SELECT id, related_ids FROM observations
642
+ WHERE related_ids IS NOT NULL AND related_ids != '[]'
643
+ AND (${likeConditions})
644
+ `).all(...likeParams);
645
+ for (const r of referencing) {
646
+ let ids;
647
+ try { ids = JSON.parse(r.related_ids); } catch (e) { debugCatch(e, 'deleteRelatedIds'); continue; }
648
+ if (!Array.isArray(ids) || !ids.every(id => Number.isInteger(id))) continue;
649
+ const filtered = ids.filter(id => !deletedIds.has(id));
650
+ if (filtered.length !== ids.length) {
651
+ db.prepare('UPDATE observations SET related_ids = ? WHERE id = ?').run(JSON.stringify(filtered), r.id);
652
+ }
653
+ }
654
+ // Execute deletion (FTS5 cleanup handled by observations_ad trigger)
655
+ return db.prepare(`DELETE FROM observations WHERE id IN (${placeholders})`).run(...args.ids);
656
+ });
657
+ const result = deleteTx();
658
+
659
+ return { content: [{ type: 'text', text: `Deleted ${result.changes} observation(s).` }] };
660
+ })
661
+ );
662
+
663
+ // ─── Tool: mem_save ─────────────────────────────────────────────────────────
664
+
665
+ server.registerTool(
666
+ 'mem_save',
667
+ {
668
+ description: 'Manually save a memory/observation. Use for important findings, decisions, or notes worth preserving.',
669
+ inputSchema: memSaveSchema,
670
+ },
671
+ safeHandler(async (args) => {
672
+ const now = new Date();
673
+ const project = args.project || inferProject();
674
+ const type = args.type || 'discovery';
675
+ const title = args.title || args.content.slice(0, 100);
676
+ const sessionId = `manual-${project}`;
677
+
678
+ // Ensure session exists (INSERT OR IGNORE avoids race condition on concurrent calls)
679
+ db.prepare(`
680
+ INSERT OR IGNORE INTO sdk_sessions (content_session_id, memory_session_id, project, started_at, started_at_epoch, status)
681
+ VALUES (?, ?, ?, ?, ?, 'active')
682
+ `).run(sessionId, sessionId, project, now.toISOString(), now.getTime());
683
+
684
+ // Dedup: skip if a similar title or content was saved recently (5 min window)
685
+ const fiveMinAgo = now.getTime() - 5 * 60 * 1000;
686
+ const recent = db.prepare(`
687
+ SELECT title, text FROM observations
688
+ WHERE project = ? AND created_at_epoch > ?
689
+ ORDER BY created_at_epoch DESC LIMIT 50
690
+ `).all(project, fiveMinAgo);
691
+
692
+ if (title && recent.some(r =>
693
+ jaccardSimilarity(r.title, title) > 0.7 ||
694
+ jaccardSimilarity(r.text || '', args.content) > 0.7
695
+ )) {
696
+ return { content: [{ type: 'text', text: `Skipped: a similar observation already exists in project "${project}".` }] };
697
+ }
698
+
699
+ const safeContent = scrubSecrets(args.content);
700
+ const safeTitle = scrubSecrets(title);
701
+ const minhashSig = computeMinHash(safeTitle + ' ' + safeContent);
702
+
703
+ const result = db.prepare(`
704
+ INSERT INTO observations (memory_session_id, project, text, type, title, narrative, concepts, facts, files_read, files_modified, importance, minhash_sig, created_at, created_at_epoch)
705
+ VALUES (?, ?, ?, ?, ?, ?, '', '', '[]', '[]', ?, ?, ?, ?)
706
+ `).run(sessionId, project, safeContent, type, safeTitle, safeContent, args.importance ?? 1, minhashSig, now.toISOString(), now.getTime());
707
+
708
+ return { content: [{ type: 'text', text: `Saved as observation #${result.lastInsertRowid} [${type}] in project "${project}".` }] };
709
+ })
710
+ );
711
+
712
+ // ─── Tool: mem_stats ────────────────────────────────────────────────────────
713
+
714
+ server.registerTool(
715
+ 'mem_stats',
716
+ {
717
+ description: 'Get statistics about stored memories: counts, types, projects, recent activity.',
718
+ inputSchema: memStatsSchema,
719
+ },
720
+ safeHandler(async (args) => {
721
+ const days = args.days ?? 30;
722
+ const cutoff = Date.now() - days * 86400000;
723
+ const projectFilter = args.project ? 'AND project = ?' : '';
724
+ const baseParams = args.project ? [args.project] : [];
725
+
726
+ // Total counts
727
+ const obsTotal = db.prepare(`SELECT COUNT(*) as c FROM observations WHERE 1=1 ${projectFilter}`).get(...baseParams);
728
+ const sessTotal = db.prepare(`SELECT COUNT(*) as c FROM session_summaries WHERE 1=1 ${projectFilter}`).get(...baseParams);
729
+ const promptTotal = args.project
730
+ ? db.prepare(`SELECT COUNT(*) as c FROM user_prompts p JOIN sdk_sessions s ON p.content_session_id = s.content_session_id WHERE s.project = ?`).get(args.project)
731
+ : db.prepare(`SELECT COUNT(*) as c FROM user_prompts`).get();
732
+
733
+ // Recent counts
734
+ const obsRecent = db.prepare(`SELECT COUNT(*) as c FROM observations WHERE created_at_epoch >= ? ${projectFilter}`).get(cutoff, ...baseParams);
735
+ const sessRecent = db.prepare(`SELECT COUNT(*) as c FROM session_summaries WHERE created_at_epoch >= ? ${projectFilter}`).get(cutoff, ...baseParams);
736
+
737
+ // Type distribution (recent)
738
+ const types = db.prepare(`
739
+ SELECT type, COUNT(*) as c FROM observations
740
+ WHERE created_at_epoch >= ? ${projectFilter}
741
+ GROUP BY type ORDER BY c DESC
742
+ `).all(cutoff, ...baseParams);
743
+
744
+ // Projects (global view — skipped when filtering by single project)
745
+ const projects = args.project ? [] : db.prepare(`
746
+ SELECT project, COUNT(*) as c FROM observations
747
+ GROUP BY project ORDER BY c DESC
748
+ LIMIT 20
749
+ `).all();
750
+
751
+ // Daily activity (last 7 days)
752
+ const daily = db.prepare(`
753
+ SELECT date(created_at) as day, COUNT(*) as c FROM observations
754
+ WHERE created_at_epoch >= ? ${projectFilter}
755
+ GROUP BY day ORDER BY day DESC
756
+ LIMIT 7
757
+ `).all(Date.now() - 7 * 86400000, ...baseParams);
758
+
759
+ // Health metrics
760
+ const tokenEst = db.prepare(`
761
+ SELECT SUM(LENGTH(COALESCE(title,'')) + LENGTH(COALESCE(narrative,'')) + LENGTH(COALESCE(text,''))) / 4 as t
762
+ FROM observations WHERE 1=1 ${projectFilter}
763
+ `).get(...baseParams);
764
+
765
+ const avgImp = db.prepare(`
766
+ SELECT AVG(COALESCE(importance,1)) as v FROM observations WHERE 1=1 ${projectFilter}
767
+ `).get(...baseParams);
768
+
769
+ const thirtyDaysAgo = Date.now() - 30 * 86400000;
770
+ const lowVal = db.prepare(`
771
+ SELECT COUNT(*) as c FROM observations
772
+ WHERE COALESCE(importance,1) = 1 AND COALESCE(access_count,0) = 0
773
+ AND created_at_epoch < ? ${projectFilter}
774
+ `).get(thirtyDaysAgo, ...baseParams);
775
+
776
+ const noiseRatio = obsTotal.c > 0 ? lowVal.c / obsTotal.c : 0;
777
+ const compressedCount = db.prepare(`
778
+ SELECT COUNT(*) as c FROM observations WHERE compressed_into IS NOT NULL ${projectFilter}
779
+ `).get(...baseParams);
780
+
781
+ const lines = [
782
+ `Memory Statistics${args.project ? ` (project: ${args.project})` : ''}:`,
783
+ '',
784
+ `Total: ${obsTotal.c} observations | ${sessTotal.c} sessions | ${promptTotal.c} prompts`,
785
+ `Last ${days}d: ${obsRecent.c} observations | ${sessRecent.c} sessions`,
786
+ '',
787
+ 'Type distribution (recent):',
788
+ ...types.map(t => ` ${typeIcon(t.type)} ${t.type}: ${t.c}`),
789
+ '',
790
+ ...(projects.length ? ['Top projects:', ...projects.map(p => ` ${p.project}: ${p.c}`)] : []),
791
+ '',
792
+ 'Daily activity (last 7d):',
793
+ ...daily.map(d => ` ${d.day}: ${d.c} observations`),
794
+ '',
795
+ 'Data Health:',
796
+ ` Est. tokens: ${tokenEst.t ?? 0}`,
797
+ ` Avg importance: ${(avgImp.v ?? 1).toFixed(2)}`,
798
+ ` Low-value (imp=1, never accessed, >30d): ${lowVal.c} (${(noiseRatio * 100).toFixed(1)}% noise)`,
799
+ ` Compressed: ${compressedCount.c}`,
800
+ ...(noiseRatio > 0.6 ? [' ⚠️ High noise ratio — consider running mem_compress'] : []),
801
+ ];
802
+
803
+ return { content: [{ type: 'text', text: lines.join('\n') }] };
804
+ })
805
+ );
806
+
807
+ // ─── Tool: mem_compress ──────────────────────────────────────────────────────
808
+
809
+ server.registerTool(
810
+ 'mem_compress',
811
+ {
812
+ description: 'Compress old low-value observations into weekly summaries. Use preview=true to see candidates first.',
813
+ inputSchema: memCompressSchema,
814
+ },
815
+ safeHandler(async (args) => {
816
+ const preview = args.preview !== false;
817
+ const ageDays = args.age_days ?? 60;
818
+ const cutoff = Date.now() - ageDays * 86400000;
819
+ const projectFilter = args.project ? 'AND project = ?' : '';
820
+ const baseParams = args.project ? [args.project] : [];
821
+
822
+ // Find low-value candidates: importance=1, never accessed, old, not already compressed
823
+ const candidates = db.prepare(`
824
+ SELECT id, project, type, title, created_at, created_at_epoch
825
+ FROM observations
826
+ WHERE COALESCE(importance, 1) = 1
827
+ AND COALESCE(access_count, 0) = 0
828
+ AND created_at_epoch < ?
829
+ AND compressed_into IS NULL
830
+ ${projectFilter}
831
+ ORDER BY project, created_at_epoch
832
+ `).all(cutoff, ...baseParams);
833
+
834
+ if (candidates.length === 0) {
835
+ return { content: [{ type: 'text', text: 'No candidates for compression.' }] };
836
+ }
837
+
838
+ // Group by project + ISO week
839
+ const groups = new Map();
840
+ for (const c of candidates) {
841
+ const key = `${c.project}::${isoWeekKey(c.created_at_epoch)}`;
842
+ if (!groups.has(key)) groups.set(key, []);
843
+ groups.get(key).push(c);
844
+ }
845
+
846
+ // Filter groups with < 3 observations (not worth compressing)
847
+ const compressableGroups = [...groups.entries()].filter(([, obs]) => obs.length >= 3);
848
+
849
+ if (preview) {
850
+ const totalCandidates = compressableGroups.reduce((s, [, obs]) => s + obs.length, 0);
851
+ const lines = [
852
+ `Compression preview:`,
853
+ ` Total candidates: ${candidates.length}`,
854
+ ` Compressable groups (≥3 obs): ${compressableGroups.length}`,
855
+ ` Observations to compress: ${totalCandidates}`,
856
+ '',
857
+ 'Groups:',
858
+ ...compressableGroups.slice(0, 20).map(([key, obs]) => {
859
+ const [proj, week] = key.split('::');
860
+ const types = {};
861
+ for (const o of obs) types[o.type] = (types[o.type] || 0) + 1;
862
+ const typeStr = Object.entries(types).map(([t, c]) => `${c} ${t}`).join(', ');
863
+ return ` ${proj} ${week}: ${obs.length} obs (${typeStr})`;
864
+ }),
865
+ '',
866
+ `Call mem_compress(preview=false${args.age_days ? `, age_days=${args.age_days}` : ''}${args.project ? `, project="${args.project}"` : ''}) to execute.`,
867
+ ];
868
+ return { content: [{ type: 'text', text: lines.join('\n') }] };
869
+ }
870
+
871
+ // Execute compression
872
+ let totalCompressed = 0;
873
+ const insertSummary = db.prepare(`
874
+ INSERT INTO observations (memory_session_id, project, text, type, title, subtitle, narrative, concepts, facts, files_read, files_modified, importance, created_at, created_at_epoch)
875
+ VALUES (?, ?, ?, ?, ?, '', ?, '', '', '[]', '[]', 2, ?, ?)
876
+ `);
877
+ const compress = db.transaction(() => {
878
+ for (const [key, obs] of compressableGroups) {
879
+ const [proj] = key.split('::');
880
+ const types = {};
881
+ for (const o of obs) types[o.type] = (types[o.type] || 0) + 1;
882
+ const dominantType = Object.entries(types).sort((a, b) => b[1] - a[1])[0][0];
883
+ const title = `Weekly summary: ${obs.length} ${dominantType} observations`;
884
+ const narrative = obs.map(o => `- ${o.title || '(untitled)'}`).join('\n');
885
+ const sessionId = obs[0].project ? `compress-${obs[0].project}` : 'compress-manual';
886
+
887
+ // Ensure session exists (INSERT OR IGNORE avoids race condition)
888
+ const now = new Date();
889
+ db.prepare(`
890
+ INSERT OR IGNORE INTO sdk_sessions (content_session_id, memory_session_id, project, started_at, started_at_epoch, status)
891
+ VALUES (?, ?, ?, ?, ?, 'active')
892
+ `).run(sessionId, sessionId, proj, now.toISOString(), now.getTime());
893
+
894
+ const summaryResult = insertSummary.run(
895
+ sessionId, proj, narrative, dominantType, title, narrative,
896
+ now.toISOString(), now.getTime()
897
+ );
898
+ const summaryId = Number(summaryResult.lastInsertRowid);
899
+
900
+ // Batch UPDATE instead of per-row loop
901
+ const obsIds = obs.map(o => o.id);
902
+ const obsPh = obsIds.map(() => '?').join(',');
903
+ db.prepare(`UPDATE observations SET compressed_into = ? WHERE id IN (${obsPh})`).run(summaryId, ...obsIds);
904
+ totalCompressed += obs.length;
905
+ }
906
+ });
907
+ compress();
908
+
909
+ return { content: [{ type: 'text', text: `Compressed ${totalCompressed} observations into ${compressableGroups.length} weekly summaries.` }] };
910
+ })
911
+ );
912
+
913
+ // ─── WAL Checkpoint (periodic) ───────────────────────────────────────────────
914
+
915
+ // Checkpoint WAL every 5 minutes to prevent unbounded growth
916
+ const WAL_CHECKPOINT_INTERVAL = 5 * 60 * 1000;
917
+ const walTimer = setInterval(() => {
918
+ try { db.pragma('wal_checkpoint(PASSIVE)'); } catch (e) { debugCatch(e, 'walCheckpoint'); }
919
+ }, WAL_CHECKPOINT_INTERVAL);
920
+ walTimer.unref(); // Don't keep process alive just for checkpoints
921
+
922
+ // ─── Shutdown Cleanup ────────────────────────────────────────────────────────
923
+
924
+ function shutdown(exitCode = 0) {
925
+ clearInterval(walTimer);
926
+ try { db.pragma('wal_checkpoint(TRUNCATE)'); } catch {}
927
+ try { db.close(); } catch {}
928
+ process.exit(exitCode);
929
+ }
930
+ process.on('SIGINT', () => shutdown(0));
931
+ process.on('SIGTERM', () => shutdown(0));
932
+ process.on('uncaughtException', (err) => { debugCatch(err, 'uncaughtException'); shutdown(1); });
933
+ process.on('unhandledRejection', (err) => { debugCatch(err, 'unhandledRejection'); shutdown(1); });
934
+
935
+ // ─── Start Server ───────────────────────────────────────────────────────────
936
+
937
+ const transport = new StdioServerTransport();
938
+ await server.connect(transport);