@psiclawops/hypermem 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/ARCHITECTURE.md +296 -0
  2. package/LICENSE +190 -0
  3. package/README.md +243 -0
  4. package/dist/background-indexer.d.ts +117 -0
  5. package/dist/background-indexer.d.ts.map +1 -0
  6. package/dist/background-indexer.js +732 -0
  7. package/dist/compaction-fence.d.ts +89 -0
  8. package/dist/compaction-fence.d.ts.map +1 -0
  9. package/dist/compaction-fence.js +153 -0
  10. package/dist/compositor.d.ts +139 -0
  11. package/dist/compositor.d.ts.map +1 -0
  12. package/dist/compositor.js +1109 -0
  13. package/dist/cross-agent.d.ts +57 -0
  14. package/dist/cross-agent.d.ts.map +1 -0
  15. package/dist/cross-agent.js +254 -0
  16. package/dist/db.d.ts +131 -0
  17. package/dist/db.d.ts.map +1 -0
  18. package/dist/db.js +398 -0
  19. package/dist/desired-state-store.d.ts +100 -0
  20. package/dist/desired-state-store.d.ts.map +1 -0
  21. package/dist/desired-state-store.js +212 -0
  22. package/dist/doc-chunk-store.d.ts +115 -0
  23. package/dist/doc-chunk-store.d.ts.map +1 -0
  24. package/dist/doc-chunk-store.js +278 -0
  25. package/dist/doc-chunker.d.ts +99 -0
  26. package/dist/doc-chunker.d.ts.map +1 -0
  27. package/dist/doc-chunker.js +324 -0
  28. package/dist/episode-store.d.ts +48 -0
  29. package/dist/episode-store.d.ts.map +1 -0
  30. package/dist/episode-store.js +135 -0
  31. package/dist/fact-store.d.ts +57 -0
  32. package/dist/fact-store.d.ts.map +1 -0
  33. package/dist/fact-store.js +175 -0
  34. package/dist/fleet-store.d.ts +144 -0
  35. package/dist/fleet-store.d.ts.map +1 -0
  36. package/dist/fleet-store.js +276 -0
  37. package/dist/hybrid-retrieval.d.ts +60 -0
  38. package/dist/hybrid-retrieval.d.ts.map +1 -0
  39. package/dist/hybrid-retrieval.js +340 -0
  40. package/dist/index.d.ts +611 -0
  41. package/dist/index.d.ts.map +1 -0
  42. package/dist/index.js +1042 -0
  43. package/dist/knowledge-graph.d.ts +110 -0
  44. package/dist/knowledge-graph.d.ts.map +1 -0
  45. package/dist/knowledge-graph.js +305 -0
  46. package/dist/knowledge-store.d.ts +72 -0
  47. package/dist/knowledge-store.d.ts.map +1 -0
  48. package/dist/knowledge-store.js +241 -0
  49. package/dist/library-schema.d.ts +22 -0
  50. package/dist/library-schema.d.ts.map +1 -0
  51. package/dist/library-schema.js +717 -0
  52. package/dist/message-store.d.ts +76 -0
  53. package/dist/message-store.d.ts.map +1 -0
  54. package/dist/message-store.js +273 -0
  55. package/dist/preference-store.d.ts +54 -0
  56. package/dist/preference-store.d.ts.map +1 -0
  57. package/dist/preference-store.js +109 -0
  58. package/dist/preservation-gate.d.ts +82 -0
  59. package/dist/preservation-gate.d.ts.map +1 -0
  60. package/dist/preservation-gate.js +150 -0
  61. package/dist/provider-translator.d.ts +40 -0
  62. package/dist/provider-translator.d.ts.map +1 -0
  63. package/dist/provider-translator.js +349 -0
  64. package/dist/rate-limiter.d.ts +76 -0
  65. package/dist/rate-limiter.d.ts.map +1 -0
  66. package/dist/rate-limiter.js +179 -0
  67. package/dist/redis.d.ts +188 -0
  68. package/dist/redis.d.ts.map +1 -0
  69. package/dist/redis.js +534 -0
  70. package/dist/schema.d.ts +15 -0
  71. package/dist/schema.d.ts.map +1 -0
  72. package/dist/schema.js +203 -0
  73. package/dist/secret-scanner.d.ts +51 -0
  74. package/dist/secret-scanner.d.ts.map +1 -0
  75. package/dist/secret-scanner.js +248 -0
  76. package/dist/seed.d.ts +108 -0
  77. package/dist/seed.d.ts.map +1 -0
  78. package/dist/seed.js +177 -0
  79. package/dist/system-store.d.ts +73 -0
  80. package/dist/system-store.d.ts.map +1 -0
  81. package/dist/system-store.js +182 -0
  82. package/dist/topic-store.d.ts +45 -0
  83. package/dist/topic-store.d.ts.map +1 -0
  84. package/dist/topic-store.js +136 -0
  85. package/dist/types.d.ts +329 -0
  86. package/dist/types.d.ts.map +1 -0
  87. package/dist/types.js +9 -0
  88. package/dist/vector-store.d.ts +132 -0
  89. package/dist/vector-store.d.ts.map +1 -0
  90. package/dist/vector-store.js +498 -0
  91. package/dist/work-store.d.ts +112 -0
  92. package/dist/work-store.d.ts.map +1 -0
  93. package/dist/work-store.js +273 -0
  94. package/package.json +57 -0
@@ -0,0 +1,498 @@
1
+ /**
2
+ * HyperMem Vector Store — Semantic Search via sqlite-vec
3
+ *
4
+ * Provides embedding-backed KNN search over facts, knowledge, episodes,
5
+ * and session registry entries. Uses Ollama (local) for embeddings,
6
+ * sqlite-vec for vector indexing, and coexists with existing FTS5.
7
+ *
8
+ * Architecture:
9
+ * - One vec0 virtual table per indexed content type
10
+ * - Embeddings generated via local Ollama (nomic-embed-text, 768d)
11
+ * - Vectors stored alongside content in the same agent DB
12
+ * - Embedding cache to avoid redundant API calls
13
+ * - Batch embedding support for bulk indexing
14
+ */
15
+ import { createHash } from 'node:crypto';
16
+ const DEFAULT_EMBEDDING_CONFIG = {
17
+ ollamaUrl: 'http://localhost:11434',
18
+ model: 'nomic-embed-text',
19
+ dimensions: 768,
20
+ timeout: 10000,
21
+ batchSize: 32,
22
+ };
23
+ /**
24
+ * Generate embeddings via Ollama API.
25
+ * Supports single and batch embedding.
26
+ */
27
+ export async function generateEmbeddings(texts, config = DEFAULT_EMBEDDING_CONFIG) {
28
+ if (texts.length === 0)
29
+ return [];
30
+ const results = [];
31
+ // Ollama /api/embed supports batch via `input` array
32
+ for (let i = 0; i < texts.length; i += config.batchSize) {
33
+ const batch = texts.slice(i, i + config.batchSize);
34
+ const controller = new AbortController();
35
+ const timer = setTimeout(() => controller.abort(), config.timeout);
36
+ try {
37
+ const response = await fetch(`${config.ollamaUrl}/api/embed`, {
38
+ method: 'POST',
39
+ headers: { 'Content-Type': 'application/json' },
40
+ body: JSON.stringify({
41
+ model: config.model,
42
+ input: batch,
43
+ }),
44
+ signal: controller.signal,
45
+ });
46
+ if (!response.ok) {
47
+ throw new Error(`Ollama embedding failed: ${response.status} ${response.statusText}`);
48
+ }
49
+ const data = await response.json();
50
+ for (const embedding of data.embeddings) {
51
+ if (embedding.length !== config.dimensions) {
52
+ throw new Error(`Embedding dimension mismatch: expected ${config.dimensions}, got ${embedding.length}`);
53
+ }
54
+ results.push(new Float32Array(embedding));
55
+ }
56
+ }
57
+ finally {
58
+ clearTimeout(timer);
59
+ }
60
+ }
61
+ return results;
62
+ }
63
+ /**
64
+ * Serialize a Float32Array to Uint8Array for sqlite-vec binding.
65
+ */
66
+ function vecToBytes(vec) {
67
+ return new Uint8Array(vec.buffer, vec.byteOffset, vec.byteLength);
68
+ }
69
+ /**
70
+ * VectorStore — manages vector indexes in an agent's vector database.
71
+ *
72
+ * The vector DB (vectors.db) stores vec0 virtual tables and the index map.
73
+ * Source content (facts, knowledge, episodes) lives in the library DB.
74
+ * The VectorStore needs both: vectorDb for indexes, libraryDb for content.
75
+ */
76
+ export class VectorStore {
77
+ db; // vectors.db
78
+ libraryDb; // library.db for source content
79
+ config;
80
+ constructor(db, config, libraryDb) {
81
+ this.db = db;
82
+ this.libraryDb = libraryDb || null;
83
+ this.config = { ...DEFAULT_EMBEDDING_CONFIG, ...config };
84
+ }
85
+ /**
86
+ * Create vector index tables if they don't exist.
87
+ * Safe to call multiple times (idempotent).
88
+ */
89
+ ensureTables() {
90
+ const dim = this.config.dimensions;
91
+ // Vector index for facts
92
+ this.db.exec(`
93
+ CREATE VIRTUAL TABLE IF NOT EXISTS vec_facts
94
+ USING vec0(embedding float[${dim}])
95
+ `);
96
+ // Vector index for knowledge
97
+ this.db.exec(`
98
+ CREATE VIRTUAL TABLE IF NOT EXISTS vec_knowledge
99
+ USING vec0(embedding float[${dim}])
100
+ `);
101
+ // Vector index for episodes
102
+ this.db.exec(`
103
+ CREATE VIRTUAL TABLE IF NOT EXISTS vec_episodes
104
+ USING vec0(embedding float[${dim}])
105
+ `);
106
+ // Vector index for session registry (library DB)
107
+ // This is created separately via ensureSessionRegistryTable()
108
+ // Mapping table: links vec rowids to source table rows
109
+ // Using a single mapping table for all vec tables
110
+ this.db.exec(`
111
+ CREATE TABLE IF NOT EXISTS vec_index_map (
112
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
113
+ source_table TEXT NOT NULL,
114
+ source_id INTEGER NOT NULL,
115
+ vec_table TEXT NOT NULL,
116
+ content_hash TEXT NOT NULL,
117
+ indexed_at TEXT NOT NULL,
118
+ UNIQUE(source_table, source_id)
119
+ )
120
+ `);
121
+ this.db.exec('CREATE INDEX IF NOT EXISTS idx_vec_map_source ON vec_index_map(source_table, source_id)');
122
+ this.db.exec('CREATE INDEX IF NOT EXISTS idx_vec_map_vec ON vec_index_map(vec_table, id)');
123
+ }
124
+ /**
125
+ * Index a single content item. Generates embedding and stores in vec table.
126
+ * Skips if content hasn't changed (based on hash).
127
+ */
128
+ /** Allowlisted source tables for vector indexing. Prevents SQL injection via table name interpolation. */
129
+ static ALLOWED_SOURCE_TABLES = new Set(['facts', 'knowledge', 'episodes', 'sessions']);
130
+ validateSourceTable(sourceTable) {
131
+ if (!VectorStore.ALLOWED_SOURCE_TABLES.has(sourceTable)) {
132
+ throw new Error(`Invalid sourceTable: "${sourceTable}". Must be one of: ${[...VectorStore.ALLOWED_SOURCE_TABLES].join(', ')}`);
133
+ }
134
+ }
135
+ async indexItem(sourceTable, sourceId, content, domain) {
136
+ this.validateSourceTable(sourceTable);
137
+ const vecTable = `vec_${sourceTable}`;
138
+ const contentHash = simpleHash(content);
139
+ // Check if already indexed with same content
140
+ const existing = this.db
141
+ .prepare('SELECT id, content_hash FROM vec_index_map WHERE source_table = ? AND source_id = ?')
142
+ .get(sourceTable, sourceId);
143
+ if (existing && existing.content_hash === contentHash) {
144
+ return false; // Already indexed, content unchanged
145
+ }
146
+ // Generate embedding
147
+ const [embedding] = await generateEmbeddings([content], this.config);
148
+ const bytes = vecToBytes(embedding);
149
+ if (existing) {
150
+ // Update: delete old vector, insert new
151
+ this.db.prepare(`DELETE FROM ${vecTable} WHERE rowid = CAST(? AS INTEGER)`).run(existing.id);
152
+ this.db.prepare(`INSERT INTO ${vecTable}(rowid, embedding) VALUES (CAST(? AS INTEGER), ?)`).run(existing.id, bytes);
153
+ this.db
154
+ .prepare('UPDATE vec_index_map SET content_hash = ?, indexed_at = ? WHERE id = ?')
155
+ .run(contentHash, new Date().toISOString(), existing.id);
156
+ }
157
+ else {
158
+ // Insert new mapping row first to get the rowid
159
+ const mapResult = this.db
160
+ .prepare('INSERT INTO vec_index_map (source_table, source_id, vec_table, content_hash, indexed_at) VALUES (?, ?, ?, ?, ?)')
161
+ .run(sourceTable, sourceId, vecTable, contentHash, new Date().toISOString());
162
+ const mapRowId = Number(mapResult.lastInsertRowid);
163
+ // Insert vector with matching rowid
164
+ this.db.prepare(`INSERT INTO ${vecTable}(rowid, embedding) VALUES (CAST(? AS INTEGER), ?)`).run(mapRowId, bytes);
165
+ }
166
+ return true;
167
+ }
168
+ /**
169
+ * Batch index multiple items. More efficient than individual calls.
170
+ */
171
+ async indexBatch(items) {
172
+ let indexed = 0;
173
+ let skipped = 0;
174
+ // Validate all source tables before processing any items
175
+ for (const item of items) {
176
+ this.validateSourceTable(item.sourceTable);
177
+ }
178
+ // Filter out already-indexed items
179
+ const toIndex = [];
180
+ for (const item of items) {
181
+ const contentHash = simpleHash(item.content);
182
+ const existing = this.db
183
+ .prepare('SELECT content_hash FROM vec_index_map WHERE source_table = ? AND source_id = ?')
184
+ .get(item.sourceTable, item.sourceId);
185
+ if (existing && existing.content_hash === contentHash) {
186
+ skipped++;
187
+ }
188
+ else {
189
+ toIndex.push(item);
190
+ }
191
+ }
192
+ if (toIndex.length === 0)
193
+ return { indexed, skipped };
194
+ // Batch generate embeddings
195
+ const texts = toIndex.map(item => item.content);
196
+ const embeddings = await generateEmbeddings(texts, this.config);
197
+ // Insert in a transaction
198
+ this.db.exec('BEGIN');
199
+ try {
200
+ for (let i = 0; i < toIndex.length; i++) {
201
+ const item = toIndex[i];
202
+ const embedding = embeddings[i];
203
+ const vecTable = `vec_${item.sourceTable}`;
204
+ const contentHash = simpleHash(item.content);
205
+ const bytes = vecToBytes(embedding);
206
+ // Check for existing mapping (might need update vs insert)
207
+ const existing = this.db
208
+ .prepare('SELECT id FROM vec_index_map WHERE source_table = ? AND source_id = ?')
209
+ .get(item.sourceTable, item.sourceId);
210
+ if (existing) {
211
+ this.db.prepare(`DELETE FROM ${vecTable} WHERE rowid = CAST(? AS INTEGER)`).run(existing.id);
212
+ this.db.prepare(`INSERT INTO ${vecTable}(rowid, embedding) VALUES (CAST(? AS INTEGER), ?)`).run(existing.id, bytes);
213
+ this.db
214
+ .prepare('UPDATE vec_index_map SET content_hash = ?, indexed_at = ? WHERE id = ?')
215
+ .run(contentHash, new Date().toISOString(), existing.id);
216
+ }
217
+ else {
218
+ const mapResult = this.db
219
+ .prepare('INSERT INTO vec_index_map (source_table, source_id, vec_table, content_hash, indexed_at) VALUES (?, ?, ?, ?, ?)')
220
+ .run(item.sourceTable, item.sourceId, vecTable, contentHash, new Date().toISOString());
221
+ const mapRowId = Number(mapResult.lastInsertRowid);
222
+ this.db.prepare(`INSERT INTO ${vecTable}(rowid, embedding) VALUES (CAST(? AS INTEGER), ?)`).run(mapRowId, bytes);
223
+ }
224
+ indexed++;
225
+ }
226
+ this.db.exec('COMMIT');
227
+ }
228
+ catch (err) {
229
+ this.db.exec('ROLLBACK');
230
+ throw err;
231
+ }
232
+ return { indexed, skipped };
233
+ }
234
+ /**
235
+ * Semantic KNN search across one or all vector tables.
236
+ */
237
+ async search(query, opts) {
238
+ const limit = opts?.limit || 10;
239
+ const tables = opts?.tables || ['facts', 'knowledge', 'episodes'];
240
+ // Validate all table names before any SQL construction
241
+ for (const table of tables) {
242
+ this.validateSourceTable(table);
243
+ }
244
+ // Generate query embedding
245
+ const [queryEmbedding] = await generateEmbeddings([query], this.config);
246
+ const queryBytes = vecToBytes(queryEmbedding);
247
+ const results = [];
248
+ for (const table of tables) {
249
+ const vecTable = `vec_${table}`;
250
+ // Check if the vec table exists
251
+ const tableExists = this.db
252
+ .prepare("SELECT count(*) as cnt FROM sqlite_master WHERE type='table' AND name=?")
253
+ .get(vecTable);
254
+ if (!tableExists || tableExists.cnt === 0)
255
+ continue;
256
+ // KNN query
257
+ const rows = this.db
258
+ .prepare(`SELECT rowid, distance
259
+ FROM ${vecTable}
260
+ WHERE embedding MATCH ?
261
+ ORDER BY distance
262
+ LIMIT ?`)
263
+ .all(queryBytes, limit);
264
+ for (const row of rows) {
265
+ if (opts?.maxDistance !== undefined && row.distance > opts.maxDistance)
266
+ continue;
267
+ // Look up source from mapping table
268
+ const mapping = this.db
269
+ .prepare('SELECT source_table, source_id FROM vec_index_map WHERE id = ?')
270
+ .get(row.rowid);
271
+ if (!mapping)
272
+ continue;
273
+ // Fetch actual content from source table
274
+ const sourceContent = this.getSourceContent(mapping.source_table, mapping.source_id);
275
+ if (!sourceContent)
276
+ continue;
277
+ results.push({
278
+ rowid: row.rowid,
279
+ distance: row.distance,
280
+ sourceTable: mapping.source_table,
281
+ sourceId: mapping.source_id,
282
+ content: sourceContent.content,
283
+ domain: sourceContent.domain,
284
+ agentId: sourceContent.agentId,
285
+ metadata: sourceContent.metadata,
286
+ });
287
+ }
288
+ }
289
+ // Sort all results by distance (cross-table)
290
+ results.sort((a, b) => a.distance - b.distance);
291
+ return results.slice(0, limit);
292
+ }
293
+ /**
294
+ * Get content from a source table by id.
295
+ */
296
+ getSourceContent(table, id) {
297
+ // Source content lives in the library DB (facts, knowledge, episodes)
298
+ // or in the vector DB itself (if old schema). Try library first.
299
+ const sourceDb = this.libraryDb || this.db;
300
+ switch (table) {
301
+ case 'facts': {
302
+ const row = sourceDb
303
+ .prepare('SELECT content, domain, agent_id FROM facts WHERE id = ?')
304
+ .get(id);
305
+ return row ? { content: row.content, domain: row.domain, agentId: row.agent_id } : null;
306
+ }
307
+ case 'knowledge': {
308
+ const row = sourceDb
309
+ .prepare('SELECT content, domain, agent_id, key FROM knowledge WHERE id = ?')
310
+ .get(id);
311
+ return row
312
+ ? { content: row.content, domain: row.domain, agentId: row.agent_id, metadata: row.key }
313
+ : null;
314
+ }
315
+ case 'episodes': {
316
+ const row = sourceDb
317
+ .prepare('SELECT summary, event_type, agent_id, participants FROM episodes WHERE id = ?')
318
+ .get(id);
319
+ return row
320
+ ? {
321
+ content: row.summary,
322
+ domain: row.event_type,
323
+ agentId: row.agent_id,
324
+ metadata: row.participants,
325
+ }
326
+ : null;
327
+ }
328
+ default:
329
+ return null;
330
+ }
331
+ }
332
+ /**
333
+ * Index all un-indexed content in the agent's database.
334
+ * Called by the background indexer.
335
+ */
336
+ async indexAll(agentId) {
337
+ const items = [];
338
+ const sourceDb = this.libraryDb || this.db;
339
+ // Count already-indexed items for accurate skip reporting
340
+ const alreadyIndexed = this.db
341
+ .prepare('SELECT COUNT(*) as cnt FROM vec_index_map')
342
+ .get().cnt;
343
+ // Get IDs already indexed (in vector DB)
344
+ const indexedFacts = new Set(this.db.prepare("SELECT source_id FROM vec_index_map WHERE source_table = 'facts'")
345
+ .all().map(r => r.source_id));
346
+ const indexedKnowledge = new Set(this.db.prepare("SELECT source_id FROM vec_index_map WHERE source_table = 'knowledge'")
347
+ .all().map(r => r.source_id));
348
+ const indexedEpisodes = new Set(this.db.prepare("SELECT source_id FROM vec_index_map WHERE source_table = 'episodes'")
349
+ .all().map(r => r.source_id));
350
+ // Collect un-indexed facts from library DB
351
+ const facts = sourceDb
352
+ .prepare('SELECT id, content, domain FROM facts WHERE agent_id = ? AND superseded_by IS NULL')
353
+ .all(agentId);
354
+ for (const f of facts) {
355
+ if (!indexedFacts.has(f.id)) {
356
+ items.push({ sourceTable: 'facts', sourceId: f.id, content: f.content });
357
+ }
358
+ }
359
+ // Collect un-indexed knowledge from library DB
360
+ const knowledge = sourceDb
361
+ .prepare('SELECT id, content, domain, key FROM knowledge WHERE agent_id = ? AND superseded_by IS NULL')
362
+ .all(agentId);
363
+ for (const k of knowledge) {
364
+ if (!indexedKnowledge.has(k.id)) {
365
+ items.push({
366
+ sourceTable: 'knowledge',
367
+ sourceId: k.id,
368
+ content: `${k.key}: ${k.content}`,
369
+ });
370
+ }
371
+ }
372
+ // Collect un-indexed episodes from library DB
373
+ const episodes = sourceDb
374
+ .prepare('SELECT id, summary, event_type FROM episodes WHERE agent_id = ?')
375
+ .all(agentId);
376
+ for (const e of episodes) {
377
+ if (!indexedEpisodes.has(e.id)) {
378
+ items.push({ sourceTable: 'episodes', sourceId: e.id, content: e.summary });
379
+ }
380
+ }
381
+ if (items.length === 0) {
382
+ return { indexed: 0, skipped: alreadyIndexed };
383
+ }
384
+ const result = await this.indexBatch(items);
385
+ return { indexed: result.indexed, skipped: result.skipped + alreadyIndexed };
386
+ }
387
+ /**
388
+ * Remove vector index entries for deleted source rows.
389
+ */
390
+ pruneOrphans() {
391
+ let pruned = 0;
392
+ const sourceDb = this.libraryDb || this.db;
393
+ for (const table of ['facts', 'knowledge', 'episodes']) {
394
+ // Get all indexed IDs for this table
395
+ const indexed = this.db
396
+ .prepare('SELECT id, vec_table, source_id FROM vec_index_map WHERE source_table = ?')
397
+ .all(table);
398
+ for (const entry of indexed) {
399
+ // Check if source still exists in library DB
400
+ const exists = sourceDb
401
+ .prepare(`SELECT 1 FROM ${table} WHERE id = ?`)
402
+ .get(entry.source_id);
403
+ if (!exists) {
404
+ this.db.prepare(`DELETE FROM ${entry.vec_table} WHERE rowid = CAST(? AS INTEGER)`).run(entry.id);
405
+ this.db.prepare('DELETE FROM vec_index_map WHERE id = ?').run(entry.id);
406
+ pruned++;
407
+ }
408
+ }
409
+ }
410
+ return pruned;
411
+ }
412
+ /**
413
+ * Tombstone vector entries for superseded facts and knowledge.
414
+ *
415
+ * When fact A is superseded by fact B (facts.superseded_by = B.id), the old
416
+ * vector for A should not surface in semantic recall. Without this, recalled
417
+ * context can include contradicted/outdated facts alongside their replacements.
418
+ *
419
+ * Strategy: find all indexed facts/knowledge with superseded_by IS NOT NULL
420
+ * and delete their vec_index_map entries + vec table rows. The source row
421
+ * stays in library.db (audit trail) but disappears from recall.
422
+ *
423
+ * @returns Number of vector entries tombstoned.
424
+ */
425
+ tombstoneSuperseded() {
426
+ const sourceDb = this.libraryDb || this.db;
427
+ let tombstoned = 0;
428
+ for (const table of ['facts', 'knowledge']) {
429
+ // Find all indexed entries whose source row has been superseded
430
+ const indexed = this.db
431
+ .prepare('SELECT vim.id, vim.vec_table, vim.source_id FROM vec_index_map vim WHERE vim.source_table = ?')
432
+ .all(table);
433
+ for (const entry of indexed) {
434
+ const row = sourceDb
435
+ .prepare(`SELECT superseded_by FROM ${table} WHERE id = ?`)
436
+ .get(entry.source_id);
437
+ if (row?.superseded_by != null) {
438
+ // Remove from vector table
439
+ this.db.prepare(`DELETE FROM ${entry.vec_table} WHERE rowid = CAST(? AS INTEGER)`).run(entry.id);
440
+ // Remove from index map
441
+ this.db.prepare('DELETE FROM vec_index_map WHERE id = ?').run(entry.id);
442
+ tombstoned++;
443
+ }
444
+ }
445
+ }
446
+ if (tombstoned > 0) {
447
+ console.log(`[hypermem-vector] tombstoneSuperseded: removed ${tombstoned} stale vector entries`);
448
+ }
449
+ return tombstoned;
450
+ }
451
+ /**
452
+ * Get index statistics.
453
+ */
454
+ getStats() {
455
+ const breakdown = {};
456
+ let total = 0;
457
+ for (const table of ['facts', 'knowledge', 'episodes']) {
458
+ const count = this.db
459
+ .prepare('SELECT COUNT(*) as cnt FROM vec_index_map WHERE source_table = ?')
460
+ .get(table);
461
+ breakdown[table] = count.cnt;
462
+ total += count.cnt;
463
+ }
464
+ const lastIndexed = this.db
465
+ .prepare('SELECT MAX(indexed_at) as last_at FROM vec_index_map')
466
+ .get();
467
+ return {
468
+ totalVectors: total,
469
+ tableBreakdown: breakdown,
470
+ lastIndexedAt: lastIndexed.last_at,
471
+ };
472
+ }
473
+ }
474
+ /**
475
+ * SHA-256 content hash for change detection and deduplication.
476
+ * Replaces the prior 32-bit rolling hash which had collision risk on large corpora.
477
+ */
478
+ function simpleHash(str) {
479
+ return createHash('sha256').update(str).digest('hex').slice(0, 16);
480
+ }
481
+ /**
482
+ * Create vector tables in a library database for session registry search.
483
+ */
484
+ export function ensureSessionVecTable(db, dimensions = 768) {
485
+ db.exec(`
486
+ CREATE VIRTUAL TABLE IF NOT EXISTS vec_sessions
487
+ USING vec0(embedding float[${dimensions}])
488
+ `);
489
+ db.exec(`
490
+ CREATE TABLE IF NOT EXISTS vec_session_map (
491
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
492
+ session_id TEXT NOT NULL UNIQUE,
493
+ content_hash TEXT NOT NULL,
494
+ indexed_at TEXT NOT NULL
495
+ )
496
+ `);
497
+ }
498
+ //# sourceMappingURL=vector-store.js.map
@@ -0,0 +1,112 @@
1
+ /**
2
+ * HyperMem Work Item Store
3
+ *
4
+ * Fleet kanban board in SQL. Replaces WORKQUEUE.md.
5
+ * Lives in the central library DB.
6
+ */
7
+ import type { DatabaseSync } from 'node:sqlite';
8
+ export type WorkStatus = 'incoming' | 'active' | 'blocked' | 'review' | 'completed' | 'cancelled';
9
+ export interface WorkItem {
10
+ id: string;
11
+ title: string;
12
+ description: string | null;
13
+ status: WorkStatus;
14
+ priority: number;
15
+ agentId: string | null;
16
+ createdBy: string;
17
+ domain: string | null;
18
+ parentId: string | null;
19
+ blockedBy: string | null;
20
+ sessionKey: string | null;
21
+ createdAt: string;
22
+ updatedAt: string;
23
+ startedAt: string | null;
24
+ completedAt: string | null;
25
+ dueAt: string | null;
26
+ metadata: Record<string, unknown> | null;
27
+ }
28
+ export interface WorkEvent {
29
+ id: number;
30
+ workItemId: string;
31
+ eventType: string;
32
+ oldStatus: string | null;
33
+ newStatus: string | null;
34
+ agentId: string | null;
35
+ comment: string | null;
36
+ createdAt: string;
37
+ }
38
+ export declare class WorkStore {
39
+ private readonly db;
40
+ constructor(db: DatabaseSync);
41
+ /**
42
+ * Create a new work item.
43
+ */
44
+ create(data: {
45
+ title: string;
46
+ description?: string;
47
+ priority?: number;
48
+ agentId?: string;
49
+ createdBy: string;
50
+ domain?: string;
51
+ parentId?: string;
52
+ dueAt?: string;
53
+ metadata?: Record<string, unknown>;
54
+ }): WorkItem;
55
+ /**
56
+ * Update the status of a work item.
57
+ */
58
+ updateStatus(id: string, newStatus: WorkStatus, agentId?: string, comment?: string): WorkItem | null;
59
+ /**
60
+ * Assign a work item to an agent.
61
+ */
62
+ assign(id: string, agentId: string, assignedBy?: string): WorkItem | null;
63
+ /**
64
+ * Block a work item.
65
+ */
66
+ block(id: string, blockedBy: string, agentId?: string, reason?: string): WorkItem | null;
67
+ /**
68
+ * Get a work item by ID.
69
+ */
70
+ getItem(id: string): WorkItem | null;
71
+ /**
72
+ * Get active work for an agent.
73
+ */
74
+ getAgentWork(agentId: string, status?: WorkStatus): WorkItem[];
75
+ /**
76
+ * Get the fleet kanban — all active work grouped by status.
77
+ */
78
+ getKanban(opts?: {
79
+ domain?: string;
80
+ agentId?: string;
81
+ }): WorkItem[];
82
+ /**
83
+ * Get blocked items across the fleet.
84
+ */
85
+ getBlocked(): WorkItem[];
86
+ /**
87
+ * Get completion stats for the fleet.
88
+ */
89
+ getStats(opts?: {
90
+ agentId?: string;
91
+ since?: string;
92
+ }): {
93
+ total: number;
94
+ incoming: number;
95
+ active: number;
96
+ blocked: number;
97
+ review: number;
98
+ completed: number;
99
+ cancelled: number;
100
+ avgDurationHours: number | null;
101
+ };
102
+ /**
103
+ * Get events for a work item.
104
+ */
105
+ getEvents(workItemId: string, limit?: number): WorkEvent[];
106
+ /**
107
+ * Search work items.
108
+ */
109
+ search(query: string, limit?: number): WorkItem[];
110
+ private recordEvent;
111
+ }
112
+ //# sourceMappingURL=work-store.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"work-store.d.ts","sourceRoot":"","sources":["../src/work-store.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAMhD,MAAM,MAAM,UAAU,GAAG,UAAU,GAAG,QAAQ,GAAG,SAAS,GAAG,QAAQ,GAAG,WAAW,GAAG,WAAW,CAAC;AAElG,MAAM,WAAW,QAAQ;IACvB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,MAAM,EAAE,UAAU,CAAC;IACnB,QAAQ,EAAE,MAAM,CAAC;IACjB,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IACtB,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IACzB,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1B,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IACzB,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,IAAI,CAAC;CAC1C;AAED,MAAM,WAAW,SAAS;IACxB,EAAE,EAAE,MAAM,CAAC;IACX,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IACzB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IACzB,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,SAAS,EAAE,MAAM,CAAC;CACnB;AA+CD,qBAAa,SAAS;IACR,OAAO,CAAC,QAAQ,CAAC,EAAE;gBAAF,EAAE,EAAE,YAAY;IAE7C;;OAEG;IACH,MAAM,CAAC,IAAI,EAAE;QACX,KAAK,EAAE,MAAM,CAAC;QACd,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,SAAS,EAAE,MAAM,CAAC;QAClB,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;QAClB,KAAK,CAAC,EAAE,MAAM,CAAC;QACf,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACpC,GAAG,QAAQ;IA8CZ;;OAEG;IACH,YAAY,CAAC,EAAE,EAAE,MAAM,EAAE,SAAS,EAAE,UAAU,EAAE,OAAO,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,QAAQ,GAAG,IAAI;IA2BpG;;OAEG;IACH,MAAM,CAAC,EAAE,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,MAAM,GAAG,QAAQ,GAAG,IAAI;IAWzE;;OAEG;IACH,KAAK,CAAC,EAAE,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,QAAQ,GAAG,IAAI;IAcxF;;OAEG;IACH,OAAO,CAAC,EAAE,EAAE,MAAM,GAAG,QAAQ,GAAG,IAAI;IAOpC;;OAEG;IACH,YAAY,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,UAAU,GAAG,QAAQ,EAAE;IAiB9D;;OAEG;IACH,SAAS,CAAC,IAAI,CAAC,EAAE;QAAE,MAAM,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,QAAQ,EAAE;IAmBnE;;OAEG;IACH,UAAU,IAAI,QAAQ,EAAE;IAQxB;;OAEG;IACH,QAAQ,CAAC,IAAI,CAAC,EAAE;QAAE,OAAO,CAAC,EAAE,MAAM,CAAC;QAAC,KAAK,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG;QACrD,KAAK,EAAE,MAAM,CAAC;QACd,QAAQ,EAAE,MAAM,CAAC;QACjB,MAAM,EAAE,MAAM,CAAC;QACf,OAAO,EAAE,MAAM,CAAC;QAChB,MAAM,EAAE,MAAM,CAAC;QACf,SAAS,EAAE,MAAM,CAAC;QAClB,SAAS,EAAE,MAAM,CAAC;QAClB,gBAAgB,EAAE,MAAM,GAAG,IAAI,CAAC;KACjC;IAuDD;;OAEG;IACH,SAAS,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,GAAE,MAAW,GAAG,SAAS,EAAE;IAQ9D;;OAEG;IACH,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,KAAK,GAAE,MAAW,GAAG,QAAQ,EAAE;IA0BrD,OAAO,CAAC,WAAW;CAapB"}