hippo-memory 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/store.js CHANGED
@@ -1,12 +1,17 @@
1
1
  /**
2
2
  * Storage layer for Hippo.
3
- * Reads/writes MemoryEntry as markdown + YAML frontmatter.
4
- * Maintains a JSON index for fast lookups.
3
+ *
4
+ * SQLite is the source of truth.
5
+ * Markdown + JSON files remain as human-readable compatibility mirrors.
5
6
  */
6
7
  import * as fs from 'fs';
7
8
  import * as path from 'path';
8
9
  import { Layer } from './memory.js';
9
10
  import { dumpFrontmatter, parseFrontmatter } from './yaml.js';
11
+ import { openHippoDb, closeHippoDb, getMeta, setMeta, isFtsAvailable, pruneConsolidationRuns, getHippoDbPath, } from './db.js';
12
+ const INDEX_VERSION = 2;
13
+ const MEMORY_SELECT_COLUMNS = `id, created, last_retrieved, retrieval_count, strength, half_life_days, layer, tags_json, emotional_valence, schema_fit, source, outcome_score, conflicts_with_json, pinned, confidence, content`;
14
+ const DEFAULT_SEARCH_CANDIDATE_LIMIT = 200;
10
15
  function layerDir(root, layer) {
11
16
  return path.join(root, layer);
12
17
  }
@@ -17,6 +22,17 @@ export function isInitialized(hippoRoot) {
17
22
  return fs.existsSync(hippoRoot);
18
23
  }
19
24
  export function initStore(hippoRoot) {
25
+ ensureMirrorDirectories(hippoRoot);
26
+ const db = openHippoDb(hippoRoot);
27
+ try {
28
+ bootstrapLegacyStore(db, hippoRoot);
29
+ syncMirrorFiles(hippoRoot, db);
30
+ }
31
+ finally {
32
+ closeHippoDb(db);
33
+ }
34
+ }
35
+ function ensureMirrorDirectories(hippoRoot) {
20
36
  const dirs = [
21
37
  hippoRoot,
22
38
  path.join(hippoRoot, 'buffer'),
@@ -25,37 +41,9 @@ export function initStore(hippoRoot) {
25
41
  path.join(hippoRoot, 'conflicts'),
26
42
  ];
27
43
  for (const dir of dirs) {
28
- if (!fs.existsSync(dir)) {
29
- fs.mkdirSync(dir, { recursive: true });
30
- }
31
- }
32
- const indexPath = path.join(hippoRoot, 'index.json');
33
- if (!fs.existsSync(indexPath)) {
34
- const empty = { version: 1, entries: {}, last_retrieval_ids: [] };
35
- fs.writeFileSync(indexPath, JSON.stringify(empty, null, 2), 'utf8');
36
- }
37
- const statsPath = path.join(hippoRoot, 'stats.json');
38
- if (!fs.existsSync(statsPath)) {
39
- const stats = {
40
- total_remembered: 0,
41
- total_recalled: 0,
42
- total_forgotten: 0,
43
- consolidation_runs: [],
44
- };
45
- fs.writeFileSync(statsPath, JSON.stringify(stats, null, 2), 'utf8');
44
+ fs.mkdirSync(dir, { recursive: true });
46
45
  }
47
46
  }
48
- export function loadIndex(hippoRoot) {
49
- const indexPath = path.join(hippoRoot, 'index.json');
50
- if (!fs.existsSync(indexPath)) {
51
- return { version: 1, entries: {}, last_retrieval_ids: [] };
52
- }
53
- return JSON.parse(fs.readFileSync(indexPath, 'utf8'));
54
- }
55
- export function saveIndex(hippoRoot, index) {
56
- const indexPath = path.join(hippoRoot, 'index.json');
57
- fs.writeFileSync(indexPath, JSON.stringify(index, null, 2), 'utf8');
58
- }
59
47
  /**
60
48
  * Serialize a MemoryEntry to markdown with YAML frontmatter.
61
49
  */
@@ -94,153 +82,726 @@ export function deserializeEntry(raw) {
94
82
  strength: Number(data['strength'] ?? 1.0),
95
83
  half_life_days: Number(data['half_life_days'] ?? 7),
96
84
  layer: data['layer'],
97
- tags: data['tags'] ?? [],
85
+ tags: normalizeStringArray(data['tags']),
98
86
  emotional_valence: data['emotional_valence'] ?? 'neutral',
99
87
  schema_fit: Number(data['schema_fit'] ?? 0.5),
100
88
  source: String(data['source'] ?? 'cli'),
101
- outcome_score: data['outcome_score'] === null ? null : Number(data['outcome_score']),
102
- conflicts_with: data['conflicts_with'] ?? [],
89
+ outcome_score: data['outcome_score'] === null || data['outcome_score'] === undefined ? null : Number(data['outcome_score']),
90
+ conflicts_with: normalizeStringArray(data['conflicts_with']),
103
91
  pinned: Boolean(data['pinned'] ?? false),
104
92
  confidence: data['confidence'] ?? 'observed',
105
93
  content: content.trim(),
106
94
  };
107
95
  }
96
+ function normalizeStringArray(value) {
97
+ if (!Array.isArray(value))
98
+ return [];
99
+ return value.map((item) => String(item));
100
+ }
101
+ function rowToEntry(row) {
102
+ return {
103
+ id: row.id,
104
+ created: row.created,
105
+ last_retrieved: row.last_retrieved,
106
+ retrieval_count: Number(row.retrieval_count ?? 0),
107
+ strength: Number(row.strength ?? 1),
108
+ half_life_days: Number(row.half_life_days ?? 7),
109
+ layer: row.layer,
110
+ tags: parseJsonArray(row.tags_json),
111
+ emotional_valence: row.emotional_valence ?? 'neutral',
112
+ schema_fit: Number(row.schema_fit ?? 0.5),
113
+ source: row.source ?? 'cli',
114
+ outcome_score: row.outcome_score === null || row.outcome_score === undefined ? null : Number(row.outcome_score),
115
+ conflicts_with: parseJsonArray(row.conflicts_with_json),
116
+ pinned: Boolean(row.pinned),
117
+ confidence: row.confidence ?? 'observed',
118
+ content: row.content,
119
+ };
120
+ }
121
+ function parseJsonArray(raw) {
122
+ if (!raw)
123
+ return [];
124
+ try {
125
+ const parsed = JSON.parse(raw);
126
+ return Array.isArray(parsed) ? parsed.map((item) => String(item)) : [];
127
+ }
128
+ catch {
129
+ return [];
130
+ }
131
+ }
132
+ function rowToTaskSnapshot(row) {
133
+ return {
134
+ id: Number(row.id),
135
+ task: row.task,
136
+ summary: row.summary,
137
+ next_step: row.next_step,
138
+ status: row.status,
139
+ source: row.source,
140
+ created_at: row.created_at,
141
+ updated_at: row.updated_at,
142
+ };
143
+ }
144
+ function rowToMemoryConflict(row) {
145
+ return {
146
+ id: Number(row.id),
147
+ memory_a_id: row.memory_a_id,
148
+ memory_b_id: row.memory_b_id,
149
+ reason: row.reason,
150
+ score: Number(row.score ?? 0),
151
+ status: row.status,
152
+ detected_at: row.detected_at,
153
+ updated_at: row.updated_at,
154
+ };
155
+ }
156
+ function writeActiveTaskMirror(hippoRoot, snapshot) {
157
+ const filePath = path.join(hippoRoot, 'buffer', 'active-task.md');
158
+ const fm = dumpFrontmatter({
159
+ id: snapshot.id,
160
+ task: snapshot.task,
161
+ status: snapshot.status,
162
+ source: snapshot.source,
163
+ created_at: snapshot.created_at,
164
+ updated_at: snapshot.updated_at,
165
+ next_step: snapshot.next_step,
166
+ });
167
+ const body = [
168
+ `# Active Task Snapshot`,
169
+ '',
170
+ `## Summary`,
171
+ snapshot.summary,
172
+ '',
173
+ `## Next step`,
174
+ snapshot.next_step,
175
+ '',
176
+ `## Task`,
177
+ snapshot.task,
178
+ '',
179
+ ].join('\n');
180
+ fs.mkdirSync(path.dirname(filePath), { recursive: true });
181
+ fs.writeFileSync(filePath, `${fm}\n\n${body}`, 'utf8');
182
+ }
183
+ function removeActiveTaskMirror(hippoRoot) {
184
+ const filePath = path.join(hippoRoot, 'buffer', 'active-task.md');
185
+ if (fs.existsSync(filePath)) {
186
+ fs.unlinkSync(filePath);
187
+ }
188
+ }
189
+ function writeConflictMirrors(hippoRoot, conflicts) {
190
+ const conflictDir = path.join(hippoRoot, 'conflicts');
191
+ fs.mkdirSync(conflictDir, { recursive: true });
192
+ const keep = new Set();
193
+ for (const conflict of conflicts) {
194
+ const filename = `conflict_${conflict.id}.md`;
195
+ keep.add(filename);
196
+ const fm = dumpFrontmatter({
197
+ id: conflict.id,
198
+ memory_a_id: conflict.memory_a_id,
199
+ memory_b_id: conflict.memory_b_id,
200
+ reason: conflict.reason,
201
+ score: Math.round(conflict.score * 10000) / 10000,
202
+ status: conflict.status,
203
+ detected_at: conflict.detected_at,
204
+ updated_at: conflict.updated_at,
205
+ });
206
+ const body = [
207
+ '# Memory Conflict',
208
+ '',
209
+ `- Memory A: ${conflict.memory_a_id}`,
210
+ `- Memory B: ${conflict.memory_b_id}`,
211
+ `- Reason: ${conflict.reason}`,
212
+ `- Score: ${conflict.score.toFixed(3)}`,
213
+ `- Status: ${conflict.status}`,
214
+ '',
215
+ ].join('\n');
216
+ fs.writeFileSync(path.join(conflictDir, filename), `${fm}\n\n${body}`, 'utf8');
217
+ }
218
+ for (const existing of fs.readdirSync(conflictDir)) {
219
+ if (existing === '.gitkeep')
220
+ continue;
221
+ if (!keep.has(existing)) {
222
+ fs.unlinkSync(path.join(conflictDir, existing));
223
+ }
224
+ }
225
+ }
226
+ function canonicalConflictPair(aId, bId) {
227
+ return aId < bId
228
+ ? { memory_a_id: aId, memory_b_id: bId }
229
+ : { memory_a_id: bId, memory_b_id: aId };
230
+ }
231
+ function tokenizeSearchQuery(query) {
232
+ return query
233
+ .toLowerCase()
234
+ .replace(/[^\w\s]/g, ' ')
235
+ .split(/\s+/)
236
+ .filter((term) => term.length > 1);
237
+ }
238
+ function loadSearchRows(db, query, limit) {
239
+ const terms = Array.from(new Set(tokenizeSearchQuery(query)));
240
+ if (terms.length === 0) {
241
+ return db.prepare(`SELECT ${MEMORY_SELECT_COLUMNS} FROM memories ORDER BY created ASC, id ASC`).all();
242
+ }
243
+ if (isFtsAvailable(db)) {
244
+ try {
245
+ const ftsQuery = terms.map((term) => `${term.replace(/"/g, '""')}*`).join(' OR ');
246
+ const rows = db.prepare(`
247
+ SELECT ${MEMORY_SELECT_COLUMNS}
248
+ FROM memories m
249
+ JOIN memories_fts f ON f.id = m.id
250
+ WHERE memories_fts MATCH ?
251
+ ORDER BY bm25(memories_fts), m.updated_at DESC
252
+ LIMIT ?
253
+ `).all(ftsQuery, limit);
254
+ if (rows.length > 0)
255
+ return rows;
256
+ }
257
+ catch {
258
+ // Fall back to LIKE matching below.
259
+ }
260
+ }
261
+ const where = terms.map(() => `(LOWER(content) LIKE ? OR LOWER(tags_json) LIKE ?)`).join(' OR ');
262
+ const params = terms.flatMap((term) => {
263
+ const like = `%${term}%`;
264
+ return [like, like];
265
+ });
266
+ const rows = db.prepare(`
267
+ SELECT ${MEMORY_SELECT_COLUMNS}
268
+ FROM memories
269
+ WHERE ${where}
270
+ ORDER BY updated_at DESC, created DESC
271
+ LIMIT ?
272
+ `).all(...params, limit);
273
+ if (rows.length > 0)
274
+ return rows;
275
+ return db.prepare(`SELECT ${MEMORY_SELECT_COLUMNS} FROM memories ORDER BY created ASC, id ASC`).all();
276
+ }
277
+ function writeMarkdownMirror(hippoRoot, entry) {
278
+ removeEntryMirrors(hippoRoot, entry.id);
279
+ const dir = layerDir(hippoRoot, entry.layer);
280
+ fs.mkdirSync(dir, { recursive: true });
281
+ fs.writeFileSync(path.join(dir, `${entry.id}.md`), serializeEntry(entry), 'utf8');
282
+ }
283
+ function removeEntryMirrors(hippoRoot, id) {
284
+ for (const layer of [Layer.Buffer, Layer.Episodic, Layer.Semantic]) {
285
+ const file = path.join(layerDir(hippoRoot, layer), `${id}.md`);
286
+ if (fs.existsSync(file)) {
287
+ fs.unlinkSync(file);
288
+ }
289
+ }
290
+ }
291
+ function bootstrapLegacyStore(db, hippoRoot) {
292
+ const countRow = db.prepare(`SELECT COUNT(*) AS count FROM memories`).get();
293
+ const memoryCount = Number(countRow?.count ?? 0);
294
+ if (memoryCount > 0)
295
+ return;
296
+ const legacyEntries = loadLegacyEntriesFromMarkdown(hippoRoot);
297
+ if (legacyEntries.length === 0)
298
+ return;
299
+ db.exec('BEGIN');
300
+ try {
301
+ for (const entry of legacyEntries) {
302
+ upsertEntryRow(db, entry);
303
+ }
304
+ const legacyIndex = loadLegacyIndexFile(hippoRoot);
305
+ setMeta(db, 'last_retrieval_ids', JSON.stringify(legacyIndex.last_retrieval_ids ?? []));
306
+ const legacyStats = loadLegacyStatsFile(hippoRoot);
307
+ setMeta(db, 'total_remembered', String(Number(legacyStats.total_remembered ?? 0)));
308
+ setMeta(db, 'total_recalled', String(Number(legacyStats.total_recalled ?? 0)));
309
+ setMeta(db, 'total_forgotten', String(Number(legacyStats.total_forgotten ?? 0)));
310
+ const runs = Array.isArray(legacyStats.consolidation_runs) ? legacyStats.consolidation_runs : [];
311
+ const insertRun = db.prepare(`INSERT INTO consolidation_runs(timestamp, decayed, merged, removed) VALUES (?, ?, ?, ?)`);
312
+ for (const run of runs) {
313
+ if (!run || typeof run !== 'object')
314
+ continue;
315
+ const row = run;
316
+ insertRun.run(String(row.timestamp ?? new Date().toISOString()), Number(row.decayed ?? 0), Number(row.merged ?? 0), Number(row.removed ?? 0));
317
+ }
318
+ db.exec('COMMIT');
319
+ }
320
+ catch (error) {
321
+ db.exec('ROLLBACK');
322
+ throw error;
323
+ }
324
+ }
325
+ function loadLegacyEntriesFromMarkdown(hippoRoot) {
326
+ const entries = [];
327
+ for (const layer of [Layer.Buffer, Layer.Episodic, Layer.Semantic]) {
328
+ const dir = layerDir(hippoRoot, layer);
329
+ if (!fs.existsSync(dir))
330
+ continue;
331
+ for (const file of fs.readdirSync(dir)) {
332
+ if (!file.endsWith('.md'))
333
+ continue;
334
+ const raw = fs.readFileSync(path.join(dir, file), 'utf8');
335
+ const entry = deserializeEntry(raw);
336
+ if (entry)
337
+ entries.push(entry);
338
+ }
339
+ }
340
+ return entries;
341
+ }
342
+ function loadLegacyIndexFile(hippoRoot) {
343
+ const indexPath = path.join(hippoRoot, 'index.json');
344
+ if (!fs.existsSync(indexPath)) {
345
+ return { version: 1, entries: {}, last_retrieval_ids: [] };
346
+ }
347
+ try {
348
+ return JSON.parse(fs.readFileSync(indexPath, 'utf8'));
349
+ }
350
+ catch {
351
+ return { version: 1, entries: {}, last_retrieval_ids: [] };
352
+ }
353
+ }
354
+ function loadLegacyStatsFile(hippoRoot) {
355
+ const statsPath = path.join(hippoRoot, 'stats.json');
356
+ if (!fs.existsSync(statsPath)) {
357
+ return {
358
+ total_remembered: 0,
359
+ total_recalled: 0,
360
+ total_forgotten: 0,
361
+ consolidation_runs: [],
362
+ };
363
+ }
364
+ try {
365
+ return JSON.parse(fs.readFileSync(statsPath, 'utf8'));
366
+ }
367
+ catch {
368
+ return {
369
+ total_remembered: 0,
370
+ total_recalled: 0,
371
+ total_forgotten: 0,
372
+ consolidation_runs: [],
373
+ };
374
+ }
375
+ }
376
+ function upsertEntryRow(db, entry) {
377
+ db.prepare(`
378
+ INSERT INTO memories(
379
+ id, created, last_retrieved, retrieval_count, strength, half_life_days, layer,
380
+ tags_json, emotional_valence, schema_fit, source, outcome_score,
381
+ conflicts_with_json, pinned, confidence, content, updated_at
382
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, datetime('now'))
383
+ ON CONFLICT(id) DO UPDATE SET
384
+ created = excluded.created,
385
+ last_retrieved = excluded.last_retrieved,
386
+ retrieval_count = excluded.retrieval_count,
387
+ strength = excluded.strength,
388
+ half_life_days = excluded.half_life_days,
389
+ layer = excluded.layer,
390
+ tags_json = excluded.tags_json,
391
+ emotional_valence = excluded.emotional_valence,
392
+ schema_fit = excluded.schema_fit,
393
+ source = excluded.source,
394
+ outcome_score = excluded.outcome_score,
395
+ conflicts_with_json = excluded.conflicts_with_json,
396
+ pinned = excluded.pinned,
397
+ confidence = excluded.confidence,
398
+ content = excluded.content,
399
+ updated_at = datetime('now')
400
+ `).run(entry.id, entry.created, entry.last_retrieved, entry.retrieval_count, entry.strength, entry.half_life_days, entry.layer, JSON.stringify(entry.tags ?? []), entry.emotional_valence, entry.schema_fit, entry.source, entry.outcome_score, JSON.stringify(entry.conflicts_with ?? []), entry.pinned ? 1 : 0, entry.confidence, entry.content);
401
+ syncFtsRow(db, entry);
402
+ }
403
+ function syncFtsRow(db, entry) {
404
+ if (!isFtsAvailable(db))
405
+ return;
406
+ try {
407
+ db.prepare(`DELETE FROM memories_fts WHERE id = ?`).run(entry.id);
408
+ db.prepare(`INSERT INTO memories_fts(id, content, tags) VALUES (?, ?, ?)`).run(entry.id, entry.content, entry.tags.join(' '));
409
+ }
410
+ catch {
411
+ // Best effort only. SQLite store is still authoritative even if FTS is unavailable.
412
+ }
413
+ }
414
+ function deleteFtsRow(db, id) {
415
+ if (!isFtsAvailable(db))
416
+ return;
417
+ try {
418
+ db.prepare(`DELETE FROM memories_fts WHERE id = ?`).run(id);
419
+ }
420
+ catch {
421
+ // Best effort.
422
+ }
423
+ }
424
+ function buildIndexFromDb(db) {
425
+ const rows = db.prepare(`SELECT id, created, last_retrieved, strength, layer, tags_json, pinned FROM memories ORDER BY created ASC, id ASC`).all();
426
+ const entries = {};
427
+ for (const row of rows) {
428
+ const layer = row.layer;
429
+ entries[row.id] = {
430
+ id: row.id,
431
+ file: path.join(layer, `${row.id}.md`),
432
+ layer,
433
+ strength: Number(row.strength ?? 0),
434
+ tags: parseJsonArray(row.tags_json),
435
+ created: row.created,
436
+ last_retrieved: row.last_retrieved,
437
+ pinned: Boolean(row.pinned),
438
+ };
439
+ }
440
+ return {
441
+ version: INDEX_VERSION,
442
+ entries,
443
+ last_retrieval_ids: parseJsonArray(getMeta(db, 'last_retrieval_ids', '[]')),
444
+ };
445
+ }
446
+ function buildStatsFromDb(db) {
447
+ const runs = db.prepare(`SELECT timestamp, decayed, merged, removed FROM consolidation_runs ORDER BY timestamp ASC, id ASC`).all();
448
+ return {
449
+ total_remembered: Number(getMeta(db, 'total_remembered', '0')),
450
+ total_recalled: Number(getMeta(db, 'total_recalled', '0')),
451
+ total_forgotten: Number(getMeta(db, 'total_forgotten', '0')),
452
+ consolidation_runs: runs,
453
+ };
454
+ }
455
+ function writeIndexMirror(hippoRoot, index) {
456
+ fs.writeFileSync(path.join(hippoRoot, 'index.json'), JSON.stringify(index, null, 2), 'utf8');
457
+ }
458
+ function writeStatsMirror(hippoRoot, stats) {
459
+ fs.writeFileSync(path.join(hippoRoot, 'stats.json'), JSON.stringify(stats, null, 2), 'utf8');
460
+ }
461
+ function syncMirrorFiles(hippoRoot, db) {
462
+ const entries = db.prepare(`SELECT id, created, last_retrieved, retrieval_count, strength, half_life_days, layer, tags_json, emotional_valence, schema_fit, source, outcome_score, conflicts_with_json, pinned, confidence, content FROM memories ORDER BY created ASC, id ASC`).all();
463
+ for (const entry of entries.map(rowToEntry)) {
464
+ writeMarkdownMirror(hippoRoot, entry);
465
+ }
466
+ const conflicts = db.prepare(`
467
+ SELECT id, memory_a_id, memory_b_id, reason, score, status, detected_at, updated_at
468
+ FROM memory_conflicts
469
+ WHERE status = 'open'
470
+ ORDER BY updated_at DESC, id DESC
471
+ `).all();
472
+ writeConflictMirrors(hippoRoot, conflicts.map(rowToMemoryConflict));
473
+ writeIndexMirror(hippoRoot, buildIndexFromDb(db));
474
+ writeStatsMirror(hippoRoot, buildStatsFromDb(db));
475
+ }
476
+ /**
477
+ * Load the current derived index from SQLite and refresh the mirror file.
478
+ */
479
+ export function loadIndex(hippoRoot) {
480
+ initStore(hippoRoot);
481
+ const db = openHippoDb(hippoRoot);
482
+ try {
483
+ const index = buildIndexFromDb(db);
484
+ writeIndexMirror(hippoRoot, index);
485
+ return index;
486
+ }
487
+ finally {
488
+ closeHippoDb(db);
489
+ }
490
+ }
491
+ /**
492
+ * Persist mutable index metadata. Entry rows themselves are derived from SQLite.
493
+ */
494
+ export function saveIndex(hippoRoot, index) {
495
+ initStore(hippoRoot);
496
+ const db = openHippoDb(hippoRoot);
497
+ try {
498
+ setMeta(db, 'last_retrieval_ids', JSON.stringify(index.last_retrieval_ids ?? []));
499
+ writeIndexMirror(hippoRoot, buildIndexFromDb(db));
500
+ }
501
+ finally {
502
+ closeHippoDb(db);
503
+ }
504
+ }
108
505
  /**
109
- * Write a memory entry to disk and update the index.
506
+ * Write a memory entry to SQLite and refresh compatibility mirrors.
110
507
  */
111
508
  export function writeEntry(hippoRoot, entry) {
112
- const dir = layerDir(hippoRoot, entry.layer);
113
- const filename = `${entry.id}.md`;
114
- const filepath = path.join(dir, filename);
115
- fs.writeFileSync(filepath, serializeEntry(entry), 'utf8');
116
- // Update index
117
- const index = loadIndex(hippoRoot);
118
- index.entries[entry.id] = {
119
- id: entry.id,
120
- file: path.join(entry.layer, filename),
121
- layer: entry.layer,
122
- strength: entry.strength,
123
- tags: entry.tags,
124
- created: entry.created,
125
- last_retrieved: entry.last_retrieved,
126
- pinned: entry.pinned,
127
- };
128
- saveIndex(hippoRoot, index);
509
+ initStore(hippoRoot);
510
+ const db = openHippoDb(hippoRoot);
511
+ try {
512
+ upsertEntryRow(db, entry);
513
+ writeMarkdownMirror(hippoRoot, entry);
514
+ writeIndexMirror(hippoRoot, buildIndexFromDb(db));
515
+ }
516
+ finally {
517
+ closeHippoDb(db);
518
+ }
129
519
  }
130
520
  /**
131
521
  * Read a memory entry by ID.
132
522
  */
133
523
  export function readEntry(hippoRoot, id) {
134
- const index = loadIndex(hippoRoot);
135
- const ref = index.entries[id];
136
- if (!ref)
137
- return null;
138
- const filepath = path.join(hippoRoot, ref.file);
139
- if (!fs.existsSync(filepath))
140
- return null;
141
- const raw = fs.readFileSync(filepath, 'utf8');
142
- return deserializeEntry(raw);
524
+ initStore(hippoRoot);
525
+ const db = openHippoDb(hippoRoot);
526
+ try {
527
+ const row = db.prepare(`SELECT id, created, last_retrieved, retrieval_count, strength, half_life_days, layer, tags_json, emotional_valence, schema_fit, source, outcome_score, conflicts_with_json, pinned, confidence, content FROM memories WHERE id = ?`).get(id);
528
+ return row ? rowToEntry(row) : null;
529
+ }
530
+ finally {
531
+ closeHippoDb(db);
532
+ }
143
533
  }
144
534
  /**
145
- * Delete an entry from disk and index.
535
+ * Delete an entry from SQLite and mirrors.
146
536
  */
147
537
  export function deleteEntry(hippoRoot, id) {
148
- const index = loadIndex(hippoRoot);
149
- const ref = index.entries[id];
150
- if (!ref)
151
- return false;
152
- const filepath = path.join(hippoRoot, ref.file);
153
- if (fs.existsSync(filepath)) {
154
- fs.unlinkSync(filepath);
155
- }
156
- delete index.entries[id];
157
- saveIndex(hippoRoot, index);
158
- return true;
538
+ initStore(hippoRoot);
539
+ const db = openHippoDb(hippoRoot);
540
+ try {
541
+ const exists = db.prepare(`SELECT id FROM memories WHERE id = ?`).get(id);
542
+ if (!exists?.id)
543
+ return false;
544
+ db.prepare(`DELETE FROM memories WHERE id = ?`).run(id);
545
+ deleteFtsRow(db, id);
546
+ removeEntryMirrors(hippoRoot, id);
547
+ writeIndexMirror(hippoRoot, buildIndexFromDb(db));
548
+ return true;
549
+ }
550
+ finally {
551
+ closeHippoDb(db);
552
+ }
159
553
  }
160
554
  /**
161
- * Load all entries from disk (for search, consolidation, etc.)
555
+ * Load all entries from SQLite.
162
556
  */
163
557
  export function loadAllEntries(hippoRoot) {
164
- const index = loadIndex(hippoRoot);
165
- const entries = [];
166
- for (const ref of Object.values(index.entries)) {
167
- const filepath = path.join(hippoRoot, ref.file);
168
- if (!fs.existsSync(filepath))
169
- continue;
170
- const raw = fs.readFileSync(filepath, 'utf8');
171
- const entry = deserializeEntry(raw);
172
- if (entry)
173
- entries.push(entry);
558
+ initStore(hippoRoot);
559
+ const db = openHippoDb(hippoRoot);
560
+ try {
561
+ const rows = db.prepare(`SELECT ${MEMORY_SELECT_COLUMNS} FROM memories ORDER BY created ASC, id ASC`).all();
562
+ return rows.map(rowToEntry);
563
+ }
564
+ finally {
565
+ closeHippoDb(db);
174
566
  }
175
- return entries;
176
567
  }
177
568
  /**
178
- * Rebuild the index from all markdown files on disk.
569
+ * Load likely search candidates directly from SQLite.
570
+ * Uses FTS5 when available, falls back to LIKE matching, then full-store fallback.
179
571
  */
180
- export function rebuildIndex(hippoRoot) {
181
- const index = { version: 1, entries: {}, last_retrieval_ids: [] };
182
- const layers = [Layer.Buffer, Layer.Episodic, Layer.Semantic];
183
- for (const layer of layers) {
184
- const dir = layerDir(hippoRoot, layer);
185
- if (!fs.existsSync(dir))
186
- continue;
187
- const files = fs.readdirSync(dir).filter((f) => f.endsWith('.md'));
188
- for (const file of files) {
189
- const filepath = path.join(dir, file);
190
- const raw = fs.readFileSync(filepath, 'utf8');
191
- const entry = deserializeEntry(raw);
192
- if (!entry)
193
- continue;
194
- index.entries[entry.id] = {
195
- id: entry.id,
196
- file: path.join(layer, file),
197
- layer: entry.layer,
198
- strength: entry.strength,
199
- tags: entry.tags,
200
- created: entry.created,
201
- last_retrieved: entry.last_retrieved,
202
- pinned: entry.pinned,
203
- };
204
- }
572
+ export function loadSearchEntries(hippoRoot, query, limit = DEFAULT_SEARCH_CANDIDATE_LIMIT) {
573
+ initStore(hippoRoot);
574
+ const db = openHippoDb(hippoRoot);
575
+ try {
576
+ return loadSearchRows(db, query, limit).map(rowToEntry);
577
+ }
578
+ finally {
579
+ closeHippoDb(db);
205
580
  }
206
- saveIndex(hippoRoot, index);
207
- return index;
208
581
  }
209
582
  /**
210
- * Update stats file.
583
+ * Rebuild mirrors from SQLite, importing any legacy markdown files not already present.
211
584
  */
585
+ export function rebuildIndex(hippoRoot) {
586
+ initStore(hippoRoot);
587
+ const db = openHippoDb(hippoRoot);
588
+ try {
589
+ const existingIds = new Set(db.prepare(`SELECT id FROM memories`).all().map((row) => row.id));
590
+ const legacyEntries = loadLegacyEntriesFromMarkdown(hippoRoot).filter((entry) => !existingIds.has(entry.id));
591
+ if (legacyEntries.length > 0) {
592
+ for (const entry of legacyEntries) {
593
+ upsertEntryRow(db, entry);
594
+ }
595
+ }
596
+ syncMirrorFiles(hippoRoot, db);
597
+ return buildIndexFromDb(db);
598
+ }
599
+ finally {
600
+ closeHippoDb(db);
601
+ }
602
+ }
212
603
  export function updateStats(hippoRoot, delta) {
213
- const statsPath = path.join(hippoRoot, 'stats.json');
214
- let stats = { total_remembered: 0, total_recalled: 0, total_forgotten: 0, consolidation_runs: [] };
215
- if (fs.existsSync(statsPath)) {
216
- stats = JSON.parse(fs.readFileSync(statsPath, 'utf8'));
217
- }
218
- if (delta.remembered)
219
- stats.total_remembered += delta.remembered;
220
- if (delta.recalled)
221
- stats.total_recalled += delta.recalled;
222
- if (delta.forgotten)
223
- stats.total_forgotten += delta.forgotten;
224
- fs.writeFileSync(statsPath, JSON.stringify(stats, null, 2), 'utf8');
604
+ initStore(hippoRoot);
605
+ const db = openHippoDb(hippoRoot);
606
+ try {
607
+ const remembered = Number(getMeta(db, 'total_remembered', '0')) + Number(delta.remembered ?? 0);
608
+ const recalled = Number(getMeta(db, 'total_recalled', '0')) + Number(delta.recalled ?? 0);
609
+ const forgotten = Number(getMeta(db, 'total_forgotten', '0')) + Number(delta.forgotten ?? 0);
610
+ setMeta(db, 'total_remembered', String(remembered));
611
+ setMeta(db, 'total_recalled', String(recalled));
612
+ setMeta(db, 'total_forgotten', String(forgotten));
613
+ writeStatsMirror(hippoRoot, buildStatsFromDb(db));
614
+ }
615
+ finally {
616
+ closeHippoDb(db);
617
+ }
225
618
  }
226
619
  export function loadStats(hippoRoot) {
227
- const statsPath = path.join(hippoRoot, 'stats.json');
228
- if (!fs.existsSync(statsPath))
229
- return {};
230
- return JSON.parse(fs.readFileSync(statsPath, 'utf8'));
620
+ initStore(hippoRoot);
621
+ const db = openHippoDb(hippoRoot);
622
+ try {
623
+ const stats = buildStatsFromDb(db);
624
+ writeStatsMirror(hippoRoot, stats);
625
+ return stats;
626
+ }
627
+ finally {
628
+ closeHippoDb(db);
629
+ }
231
630
  }
232
631
  export function appendConsolidationRun(hippoRoot, run) {
233
- const statsPath = path.join(hippoRoot, 'stats.json');
234
- const stats = fs.existsSync(statsPath)
235
- ? JSON.parse(fs.readFileSync(statsPath, 'utf8'))
236
- : { consolidation_runs: [] };
237
- if (!Array.isArray(stats.consolidation_runs))
238
- stats.consolidation_runs = [];
239
- stats.consolidation_runs.push(run);
240
- // Keep last 50 runs
241
- if (stats.consolidation_runs.length > 50) {
242
- stats.consolidation_runs = stats.consolidation_runs.slice(-50);
243
- }
244
- fs.writeFileSync(statsPath, JSON.stringify(stats, null, 2), 'utf8');
632
+ initStore(hippoRoot);
633
+ const db = openHippoDb(hippoRoot);
634
+ try {
635
+ db.prepare(`INSERT INTO consolidation_runs(timestamp, decayed, merged, removed) VALUES (?, ?, ?, ?)`).run(run.timestamp, run.decayed, run.merged, run.removed);
636
+ pruneConsolidationRuns(db, 50);
637
+ writeStatsMirror(hippoRoot, buildStatsFromDb(db));
638
+ }
639
+ finally {
640
+ closeHippoDb(db);
641
+ }
642
+ }
643
+ export function saveActiveTaskSnapshot(hippoRoot, snapshot) {
644
+ initStore(hippoRoot);
645
+ const db = openHippoDb(hippoRoot);
646
+ const now = new Date().toISOString();
647
+ try {
648
+ db.exec('BEGIN');
649
+ db.prepare(`UPDATE task_snapshots SET status = 'superseded', updated_at = ? WHERE status = 'active'`).run(now);
650
+ const result = db.prepare(`
651
+ INSERT INTO task_snapshots(task, summary, next_step, status, source, created_at, updated_at)
652
+ VALUES (?, ?, ?, 'active', ?, ?, ?)
653
+ `).run(snapshot.task, snapshot.summary, snapshot.next_step, snapshot.source ?? 'cli', now, now);
654
+ db.exec('COMMIT');
655
+ const id = Number(result.lastInsertRowid ?? 0);
656
+ const row = db.prepare(`
657
+ SELECT id, task, summary, next_step, status, source, created_at, updated_at
658
+ FROM task_snapshots
659
+ WHERE id = ?
660
+ `).get(id);
661
+ if (!row) {
662
+ throw new Error('Failed to reload saved active task snapshot');
663
+ }
664
+ const loaded = rowToTaskSnapshot(row);
665
+ writeActiveTaskMirror(hippoRoot, loaded);
666
+ return loaded;
667
+ }
668
+ catch (error) {
669
+ try {
670
+ db.exec('ROLLBACK');
671
+ }
672
+ catch {
673
+ // Ignore nested rollback failures.
674
+ }
675
+ throw error;
676
+ }
677
+ finally {
678
+ closeHippoDb(db);
679
+ }
680
+ }
681
+ export function loadActiveTaskSnapshot(hippoRoot) {
682
+ initStore(hippoRoot);
683
+ const db = openHippoDb(hippoRoot);
684
+ try {
685
+ const row = db.prepare(`
686
+ SELECT id, task, summary, next_step, status, source, created_at, updated_at
687
+ FROM task_snapshots
688
+ WHERE status = 'active'
689
+ ORDER BY updated_at DESC, id DESC
690
+ LIMIT 1
691
+ `).get();
692
+ if (!row) {
693
+ removeActiveTaskMirror(hippoRoot);
694
+ return null;
695
+ }
696
+ const loaded = rowToTaskSnapshot(row);
697
+ writeActiveTaskMirror(hippoRoot, loaded);
698
+ return loaded;
699
+ }
700
+ finally {
701
+ closeHippoDb(db);
702
+ }
703
+ }
704
+ export function clearActiveTaskSnapshot(hippoRoot, clearedStatus = 'cleared') {
705
+ initStore(hippoRoot);
706
+ const db = openHippoDb(hippoRoot);
707
+ const now = new Date().toISOString();
708
+ try {
709
+ const active = db.prepare(`SELECT id FROM task_snapshots WHERE status = 'active' ORDER BY updated_at DESC, id DESC LIMIT 1`).get();
710
+ if (!active?.id) {
711
+ removeActiveTaskMirror(hippoRoot);
712
+ return false;
713
+ }
714
+ db.prepare(`UPDATE task_snapshots SET status = ?, updated_at = ? WHERE id = ?`).run(clearedStatus, now, active.id);
715
+ removeActiveTaskMirror(hippoRoot);
716
+ return true;
717
+ }
718
+ finally {
719
+ closeHippoDb(db);
720
+ }
721
+ }
722
+ export function listMemoryConflicts(hippoRoot, status = 'open') {
723
+ initStore(hippoRoot);
724
+ const db = openHippoDb(hippoRoot);
725
+ try {
726
+ const rows = db.prepare(`
727
+ SELECT id, memory_a_id, memory_b_id, reason, score, status, detected_at, updated_at
728
+ FROM memory_conflicts
729
+ WHERE status = ?
730
+ ORDER BY updated_at DESC, id DESC
731
+ `).all(status);
732
+ return rows.map(rowToMemoryConflict);
733
+ }
734
+ finally {
735
+ closeHippoDb(db);
736
+ }
737
+ }
738
+ export function replaceDetectedConflicts(hippoRoot, detected, detectedAt = new Date().toISOString()) {
739
+ initStore(hippoRoot);
740
+ const db = openHippoDb(hippoRoot);
741
+ try {
742
+ db.exec('BEGIN');
743
+ const canonicalDetected = detected.map((conflict) => ({
744
+ ...canonicalConflictPair(conflict.memory_a_id, conflict.memory_b_id),
745
+ reason: conflict.reason,
746
+ score: conflict.score,
747
+ }));
748
+ const detectedKeys = new Set(canonicalDetected.map((conflict) => `${conflict.memory_a_id}::${conflict.memory_b_id}`));
749
+ const openRows = db.prepare(`
750
+ SELECT id, memory_a_id, memory_b_id, reason, score, status, detected_at, updated_at
751
+ FROM memory_conflicts
752
+ WHERE status = 'open'
753
+ `).all();
754
+ for (const row of openRows) {
755
+ const key = `${row.memory_a_id}::${row.memory_b_id}`;
756
+ if (!detectedKeys.has(key)) {
757
+ db.prepare(`UPDATE memory_conflicts SET status = 'resolved', updated_at = ? WHERE id = ?`).run(detectedAt, row.id);
758
+ }
759
+ }
760
+ for (const conflict of canonicalDetected) {
761
+ db.prepare(`
762
+ INSERT INTO memory_conflicts(memory_a_id, memory_b_id, reason, score, status, detected_at, updated_at)
763
+ VALUES (?, ?, ?, ?, 'open', ?, ?)
764
+ ON CONFLICT(memory_a_id, memory_b_id) DO UPDATE SET
765
+ reason = excluded.reason,
766
+ score = excluded.score,
767
+ status = 'open',
768
+ updated_at = excluded.updated_at
769
+ `).run(conflict.memory_a_id, conflict.memory_b_id, conflict.reason, conflict.score, detectedAt, detectedAt);
770
+ }
771
+ const openConflicts = db.prepare(`
772
+ SELECT memory_a_id, memory_b_id
773
+ FROM memory_conflicts
774
+ WHERE status = 'open'
775
+ `).all();
776
+ const refMap = new Map();
777
+ for (const row of openConflicts) {
778
+ if (!refMap.has(row.memory_a_id))
779
+ refMap.set(row.memory_a_id, new Set());
780
+ if (!refMap.has(row.memory_b_id))
781
+ refMap.set(row.memory_b_id, new Set());
782
+ refMap.get(row.memory_a_id).add(row.memory_b_id);
783
+ refMap.get(row.memory_b_id).add(row.memory_a_id);
784
+ }
785
+ const memoryRows = db.prepare(`SELECT id FROM memories`).all();
786
+ for (const memory of memoryRows) {
787
+ const refs = Array.from(refMap.get(memory.id) ?? []).sort();
788
+ db.prepare(`UPDATE memories SET conflicts_with_json = ?, updated_at = datetime('now') WHERE id = ?`).run(JSON.stringify(refs), memory.id);
789
+ }
790
+ db.exec('COMMIT');
791
+ syncMirrorFiles(hippoRoot, db);
792
+ }
793
+ catch (error) {
794
+ try {
795
+ db.exec('ROLLBACK');
796
+ }
797
+ catch {
798
+ // Ignore nested rollback failures.
799
+ }
800
+ throw error;
801
+ }
802
+ finally {
803
+ closeHippoDb(db);
804
+ }
245
805
  }
806
+ export { getHippoDbPath };
246
807
  //# sourceMappingURL=store.js.map