metame-cli 1.5.26 → 1.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/index.js +4 -1
  2. package/package.json +1 -1
  3. package/scripts/agent-layer.js +36 -0
  4. package/scripts/core/chunker.js +100 -0
  5. package/scripts/core/embedding.js +225 -0
  6. package/scripts/core/hybrid-search.js +296 -0
  7. package/scripts/core/wiki-db.js +545 -0
  8. package/scripts/core/wiki-prompt.js +88 -0
  9. package/scripts/core/wiki-slug.js +66 -0
  10. package/scripts/core/wiki-staleness.js +18 -0
  11. package/scripts/daemon-agent-commands.js +10 -4
  12. package/scripts/daemon-bridges.js +16 -0
  13. package/scripts/daemon-claude-engine.js +62 -8
  14. package/scripts/daemon-command-router.js +40 -1
  15. package/scripts/daemon-default.yaml +33 -3
  16. package/scripts/daemon-embedding.js +162 -0
  17. package/scripts/daemon-engine-runtime.js +1 -1
  18. package/scripts/daemon-health-scan.js +185 -0
  19. package/scripts/daemon-ops-commands.js +9 -18
  20. package/scripts/daemon-runtime-lifecycle.js +1 -1
  21. package/scripts/daemon-session-commands.js +4 -0
  22. package/scripts/daemon-task-scheduler.js +5 -3
  23. package/scripts/daemon-warm-pool.js +15 -0
  24. package/scripts/daemon-wiki.js +420 -0
  25. package/scripts/daemon.js +10 -5
  26. package/scripts/distill.js +1 -1
  27. package/scripts/docs/file-transfer.md +0 -1
  28. package/scripts/docs/maintenance-manual.md +2 -55
  29. package/scripts/docs/pointer-map.md +0 -34
  30. package/scripts/feishu-adapter.js +25 -0
  31. package/scripts/hooks/intent-file-transfer.js +1 -2
  32. package/scripts/memory-backfill-chunks.js +92 -0
  33. package/scripts/memory-search.js +49 -6
  34. package/scripts/memory-wiki-schema.js +255 -0
  35. package/scripts/memory.js +103 -3
  36. package/scripts/signal-capture.js +1 -1
  37. package/scripts/skill-evolution.js +2 -11
  38. package/scripts/wiki-cluster.js +121 -0
  39. package/scripts/wiki-extract.js +171 -0
  40. package/scripts/wiki-facts.js +351 -0
  41. package/scripts/wiki-import.js +256 -0
  42. package/scripts/wiki-reflect-build.js +441 -0
  43. package/scripts/wiki-reflect-export.js +448 -0
  44. package/scripts/wiki-reflect-query.js +109 -0
  45. package/scripts/wiki-reflect.js +338 -0
  46. package/scripts/wiki-synthesis.js +224 -0
@@ -0,0 +1,92 @@
1
+ #!/usr/bin/env node
2
+
3
+ 'use strict';
4
+
5
+ /**
6
+ * memory-backfill-chunks.js — One-time backfill for existing wiki pages
7
+ *
8
+ * For each wiki page that has content but no content_chunks rows:
9
+ * 1. Splits content into chunks via recursive chunker
10
+ * 2. Inserts chunk rows
11
+ * 3. Enqueues each chunk for embedding generation
12
+ *
13
+ * Idempotent: pages with existing chunks are skipped.
14
+ *
15
+ * Usage: node scripts/memory-backfill-chunks.js
16
+ */
17
+
18
+ const path = require('path');
19
+ const os = require('os');
20
+
21
+ const DB_PATH = path.join(os.homedir(), '.metame', 'memory.db');
22
+
23
+ function main() {
24
+ const { DatabaseSync } = require('node:sqlite');
25
+ const db = new DatabaseSync(DB_PATH);
26
+ db.exec('PRAGMA journal_mode = WAL');
27
+ db.exec('PRAGMA busy_timeout = 3000');
28
+
29
+ // Ensure schema is up to date
30
+ try {
31
+ const { applyWikiSchema } = require('./memory-wiki-schema');
32
+ applyWikiSchema(db);
33
+ } catch (err) {
34
+ process.stderr.write(`Schema init failed: ${err.message}\n`);
35
+ db.close();
36
+ process.exit(1);
37
+ }
38
+
39
+ const { chunkText } = require('./core/chunker');
40
+
41
+ // Find pages without chunks
42
+ const pages = db.prepare(`
43
+ SELECT wp.slug, wp.content
44
+ FROM wiki_pages wp
45
+ WHERE wp.content IS NOT NULL
46
+ AND wp.content != ''
47
+ AND NOT EXISTS (
48
+ SELECT 1 FROM content_chunks cc WHERE cc.page_slug = wp.slug
49
+ )
50
+ `).all();
51
+
52
+ if (pages.length === 0) {
53
+ console.log('All wiki pages already have chunks. Nothing to backfill.');
54
+ db.close();
55
+ return;
56
+ }
57
+
58
+ console.log(`Backfilling ${pages.length} wiki pages...`);
59
+
60
+ const insertChunk = db.prepare(
61
+ 'INSERT INTO content_chunks (id, page_slug, chunk_text, chunk_idx) VALUES (?, ?, ?, ?)',
62
+ );
63
+ const enqueue = db.prepare(
64
+ "INSERT INTO embedding_queue (item_type, item_id) VALUES ('chunk', ?)",
65
+ );
66
+
67
+ let totalChunks = 0;
68
+
69
+ db.prepare('BEGIN').run();
70
+ try {
71
+ for (const page of pages) {
72
+ const chunks = chunkText(page.content, { targetWords: 300 });
73
+ for (let i = 0; i < chunks.length; i++) {
74
+ const chunkId = `ck_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
75
+ insertChunk.run(chunkId, page.slug, chunks[i], i);
76
+ enqueue.run(chunkId);
77
+ totalChunks++;
78
+ }
79
+ }
80
+ db.prepare('COMMIT').run();
81
+ } catch (err) {
82
+ try { db.prepare('ROLLBACK').run(); } catch { }
83
+ process.stderr.write(`Backfill failed: ${err.message}\n`);
84
+ db.close();
85
+ process.exit(1);
86
+ }
87
+
88
+ console.log(`Done. Created ${totalChunks} chunks for ${pages.length} pages. Run daemon-embedding.js to generate embeddings.`);
89
+ db.close();
90
+ }
91
+
92
+ main();
@@ -3,14 +3,15 @@
3
3
  * memory-search.js — Cross-session memory recall CLI
4
4
  *
5
5
  * Usage:
6
- * node memory-search.js "<query>" # hybrid search (QMD + FTS5)
6
+ * node memory-search.js "<query>" # hybrid search (FTS5 + vector + RRF)
7
7
  * node memory-search.js "<q1>" "<q2>" "<q3>" # multi-keyword parallel search
8
8
  * node memory-search.js --facts "<query>" # search facts only
9
9
  * node memory-search.js --sessions "<query>" # search sessions only
10
+ * node memory-search.js --fts-only "<query>" # force pure FTS5 (no vector)
10
11
  * node memory-search.js --recent # show recent sessions
11
12
  *
12
13
  * Multi-keyword: results are deduplicated by fact ID, best rank wins.
13
- * Async: uses QMD hybrid search (BM25 + vector) when available, falls back to FTS5.
14
+ * Hybrid: uses FTS5 + vector embeddings + RRF fusion when available, falls back to FTS5.
14
15
  */
15
16
 
16
17
  'use strict';
@@ -31,8 +32,20 @@ if (!memoryPath) {
31
32
  const memory = require(memoryPath);
32
33
 
33
34
  const args = process.argv.slice(2);
34
- const mode = args[0] && args[0].startsWith('--') ? args[0] : null;
35
- const queries = mode ? args.slice(1) : args;
35
+ // Parse flags: allow multiple -- flags before queries
36
+ const flags = new Set();
37
+ let firstQueryIdx = 0;
38
+ for (let i = 0; i < args.length; i++) {
39
+ if (args[i].startsWith('--')) { flags.add(args[i]); firstQueryIdx = i + 1; }
40
+ else break;
41
+ }
42
+ const mode = flags.has('--facts') ? '--facts'
43
+ : flags.has('--sessions') ? '--sessions'
44
+ : flags.has('--recent') ? '--recent'
45
+ : flags.has('--fts-only') ? '--fts-only'
46
+ : null;
47
+ const ftsOnly = flags.has('--fts-only');
48
+ const queries = args.slice(firstQueryIdx);
36
49
 
37
50
  async function main() {
38
51
  try {
@@ -66,7 +79,7 @@ async function main() {
66
79
  return;
67
80
  }
68
81
 
69
- // Default: search both facts and sessions, all queries in parallel
82
+ // Default: search facts, sessions, and wiki pages in parallel
70
83
  const factResults = await searchMulti(queries, {
71
84
  searchFn: q => useAsync ? memory.searchFactsAsync(q, { limit: 5 }) : Promise.resolve(memory.searchFacts(q, { limit: 5 })),
72
85
  type: 'fact',
@@ -79,7 +92,37 @@ async function main() {
79
92
  limit: 3,
80
93
  });
81
94
 
82
- console.log(JSON.stringify([...factResults, ...sessionResults], null, 2));
95
+ // Wiki pages — hybrid search (FTS5 + vector + RRF) when available
96
+ let wikiResults = [];
97
+ const useHybrid = typeof memory.hybridSearchWiki === 'function';
98
+ try {
99
+ const allWiki = [];
100
+ const seen = new Set();
101
+ for (const q of queries) {
102
+ const { wikiPages } = useHybrid
103
+ ? await memory.hybridSearchWiki(q, { ftsOnly, trackSearch: true })
104
+ : (typeof memory.searchWikiAndFacts === 'function'
105
+ ? memory.searchWikiAndFacts(q, { trackSearch: true })
106
+ : { wikiPages: [] });
107
+ for (const p of (wikiPages || [])) {
108
+ if (!seen.has(p.slug)) {
109
+ seen.add(p.slug);
110
+ allWiki.push({
111
+ type: 'wiki',
112
+ slug: p.slug,
113
+ title: p.title,
114
+ excerpt: p.excerpt,
115
+ score: p.score,
116
+ stale: p.stale,
117
+ source: p.source,
118
+ });
119
+ }
120
+ }
121
+ }
122
+ wikiResults = allWiki.slice(0, 5);
123
+ } catch { /* wiki not available */ }
124
+
125
+ console.log(JSON.stringify([...wikiResults, ...factResults, ...sessionResults], null, 2));
83
126
 
84
127
  } catch (e) {
85
128
  console.log('[]');
@@ -0,0 +1,255 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * memory-wiki-schema.js — Wiki DB schema initializer
5
+ *
6
+ * Exports:
7
+ * applyWikiSchema(db) — accepts a DatabaseSync instance, applies all DDL
8
+ * (IF NOT EXISTS, idempotent — safe to call multiple times)
9
+ *
10
+ * Tables:
11
+ * wiki_pages — topic knowledge pages
12
+ * wiki_topics — controlled topic registry
13
+ * wiki_pages_fts — FTS5 virtual table (content table, trigram tokenizer)
14
+ * content_chunks — chunked page content with optional vector embeddings
15
+ * embedding_queue — durable async queue for embedding generation
16
+ *
17
+ * Triggers:
18
+ * wiki_pages_fts_insert / wiki_pages_fts_update / wiki_pages_fts_delete
19
+ */
20
+
21
+ /**
22
+ * Apply wiki schema to a DatabaseSync instance.
23
+ * @param {import('node:sqlite').DatabaseSync} db
24
+ */
25
+ function applyWikiSchema(db) {
26
+ // ── wiki_pages ──────────────────────────────────────────────────────────────
27
+ db.exec(`
28
+ CREATE TABLE IF NOT EXISTS wiki_pages (
29
+ id TEXT PRIMARY KEY,
30
+ slug TEXT UNIQUE NOT NULL,
31
+ title TEXT NOT NULL,
32
+ content TEXT NOT NULL,
33
+ primary_topic TEXT NOT NULL,
34
+ topic_tags TEXT DEFAULT '[]',
35
+ raw_source_ids TEXT DEFAULT '[]',
36
+ capsule_refs TEXT DEFAULT '[]',
37
+ staleness REAL DEFAULT 0.0,
38
+ raw_source_count INTEGER DEFAULT 0,
39
+ new_facts_since_build INTEGER DEFAULT 0,
40
+ word_count INTEGER DEFAULT 0,
41
+ last_built_at TEXT,
42
+ created_at TEXT DEFAULT (datetime('now')),
43
+ updated_at TEXT DEFAULT (datetime('now'))
44
+ )
45
+ `);
46
+
47
+ // Migration: add timeline column for Compiled Truth + Timeline model (existing DBs)
48
+ try { db.exec("ALTER TABLE wiki_pages ADD COLUMN timeline TEXT DEFAULT ''"); } catch { /* column already exists */ }
49
+
50
+ // ── wiki_topics ─────────────────────────────────────────────────────────────
51
+ db.exec(`
52
+ CREATE TABLE IF NOT EXISTS wiki_topics (
53
+ tag TEXT PRIMARY KEY,
54
+ slug TEXT UNIQUE NOT NULL,
55
+ label TEXT NOT NULL,
56
+ pinned INTEGER DEFAULT 0,
57
+ created_at TEXT DEFAULT (datetime('now'))
58
+ )
59
+ `);
60
+
61
+ // ── wiki_pages_fts (FTS5 content table) ─────────────────────────────────────
62
+ try {
63
+ db.exec(`
64
+ CREATE VIRTUAL TABLE IF NOT EXISTS wiki_pages_fts USING fts5(
65
+ slug, title, content, topic_tags,
66
+ content='wiki_pages',
67
+ content_rowid='rowid',
68
+ tokenize='trigram'
69
+ )
70
+ `);
71
+ } catch { /* already exists */ }
72
+
73
+ // ── FTS5 sync triggers ───────────────────────────────────────────────────────
74
+ db.exec(`
75
+ CREATE TRIGGER IF NOT EXISTS wiki_pages_fts_insert
76
+ AFTER INSERT ON wiki_pages BEGIN
77
+ INSERT INTO wiki_pages_fts(rowid, slug, title, content, topic_tags)
78
+ VALUES (new.rowid, new.slug, new.title, new.content, new.topic_tags);
79
+ END
80
+ `);
81
+
82
+ // DROP+CREATE to upgrade existing unguarded trigger on deployed DBs
83
+ db.exec('DROP TRIGGER IF EXISTS wiki_pages_fts_update');
84
+ db.exec(`
85
+ CREATE TRIGGER wiki_pages_fts_update
86
+ AFTER UPDATE ON wiki_pages
87
+ WHEN old.slug IS NOT new.slug OR old.title IS NOT new.title
88
+ OR old.content IS NOT new.content OR old.topic_tags IS NOT new.topic_tags
89
+ BEGIN
90
+ INSERT INTO wiki_pages_fts(wiki_pages_fts, rowid, slug, title, content, topic_tags)
91
+ VALUES ('delete', old.rowid, old.slug, old.title, old.content, old.topic_tags);
92
+ INSERT INTO wiki_pages_fts(rowid, slug, title, content, topic_tags)
93
+ VALUES (new.rowid, new.slug, new.title, new.content, new.topic_tags);
94
+ END
95
+ `);
96
+
97
+ db.exec(`
98
+ CREATE TRIGGER IF NOT EXISTS wiki_pages_fts_delete
99
+ AFTER DELETE ON wiki_pages BEGIN
100
+ INSERT INTO wiki_pages_fts(wiki_pages_fts, rowid, slug, title, content, topic_tags)
101
+ VALUES ('delete', old.rowid, old.slug, old.title, old.content, old.topic_tags);
102
+ END
103
+ `);
104
+
105
+ // ── content_chunks (vector embedding storage for wiki pages) ────────────────
106
+ db.exec(`
107
+ CREATE TABLE IF NOT EXISTS content_chunks (
108
+ id TEXT PRIMARY KEY,
109
+ page_slug TEXT NOT NULL,
110
+ chunk_text TEXT NOT NULL,
111
+ chunk_idx INTEGER NOT NULL,
112
+ embedding BLOB,
113
+ embedding_model TEXT,
114
+ embedding_dim INTEGER,
115
+ created_at TEXT DEFAULT (datetime('now'))
116
+ )
117
+ `);
118
+ try { db.exec('CREATE INDEX IF NOT EXISTS idx_chunks_slug ON content_chunks(page_slug)'); } catch { }
119
+
120
+ // ── embedding_queue (durable async queue for embedding generation) ──────────
121
+ db.exec(`
122
+ CREATE TABLE IF NOT EXISTS embedding_queue (
123
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
124
+ item_type TEXT NOT NULL,
125
+ item_id TEXT NOT NULL,
126
+ model TEXT DEFAULT 'text-embedding-3-small',
127
+ attempts INTEGER DEFAULT 0,
128
+ last_error TEXT,
129
+ created_at TEXT DEFAULT (datetime('now'))
130
+ )
131
+ `);
132
+
133
+ // ── doc_sources ───────────────────────────────────────────────────────────
134
+ db.exec(`
135
+ CREATE TABLE IF NOT EXISTS doc_sources (
136
+ id INTEGER PRIMARY KEY,
137
+ file_path TEXT UNIQUE NOT NULL,
138
+ file_hash TEXT NOT NULL,
139
+ mtime_ms INTEGER,
140
+ size_bytes INTEGER,
141
+ extracted_text_hash TEXT,
142
+ file_type TEXT NOT NULL CHECK (file_type IN ('md','txt','pdf')),
143
+ extractor TEXT,
144
+ extract_status TEXT DEFAULT 'pending'
145
+ CHECK (extract_status IN ('ok','empty_or_scanned','error','pending')),
146
+ title TEXT,
147
+ slug TEXT UNIQUE NOT NULL,
148
+ status TEXT DEFAULT 'active'
149
+ CHECK (status IN ('active','orphaned','missing')),
150
+ error_message TEXT,
151
+ indexed_at TEXT NOT NULL,
152
+ last_seen_at TEXT,
153
+ built_at TEXT,
154
+ content_stale INTEGER DEFAULT 1
155
+ )
156
+ `);
157
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_doc_sources_status ON doc_sources(status)`);
158
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_doc_sources_file_hash ON doc_sources(file_hash)`);
159
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_doc_sources_slug ON doc_sources(slug)`);
160
+ db.exec(`CREATE INDEX IF NOT EXISTS idx_doc_sources_content_stale ON doc_sources(content_stale)`);
161
+
162
+ // ── wiki_page_doc_sources ─────────────────────────────────────────────────
163
+ db.exec(`
164
+ CREATE TABLE IF NOT EXISTS wiki_page_doc_sources (
165
+ page_slug TEXT NOT NULL,
166
+ doc_source_id INTEGER NOT NULL,
167
+ role TEXT NOT NULL CHECK (role IN ('primary','cluster_member')),
168
+ PRIMARY KEY (page_slug, doc_source_id, role),
169
+ FOREIGN KEY (page_slug) REFERENCES wiki_pages(slug) ON DELETE CASCADE,
170
+ FOREIGN KEY (doc_source_id) REFERENCES doc_sources(id) ON DELETE CASCADE
171
+ )
172
+ `);
173
+
174
+ // ── wiki_pages additions (idempotent ALTER) ───────────────────────────────
175
+ for (const [col, def] of [
176
+ ['source_type', "TEXT DEFAULT 'memory'"],
177
+ ['membership_hash','TEXT'],
178
+ ['cluster_size', 'INTEGER'],
179
+ ]) {
180
+ try { db.exec(`ALTER TABLE wiki_pages ADD COLUMN ${col} ${def}`); } catch { /* already exists */ }
181
+ }
182
+ db.exec("UPDATE wiki_pages SET source_type = 'memory' WHERE source_type IS NULL");
183
+
184
+ // ── doc_sources additions (idempotent ALTER) ──────────────────────────────
185
+ for (const [col, def] of [
186
+ ['doi', 'TEXT'],
187
+ ['year', 'INTEGER'],
188
+ ['venue', 'TEXT'],
189
+ ['zotero_key', 'TEXT'],
190
+ ['citation_count', 'INTEGER'],
191
+ ]) {
192
+ try { db.exec(`ALTER TABLE doc_sources ADD COLUMN ${col} ${def}`); } catch { /* already exists */ }
193
+ }
194
+
195
+ // ── paper_facts ───────────────────────────────────────────────────────────
196
+ db.exec(`
197
+ CREATE TABLE IF NOT EXISTS paper_facts (
198
+ id TEXT PRIMARY KEY,
199
+ doc_source_id INTEGER NOT NULL,
200
+ fact_type TEXT NOT NULL CHECK (fact_type IN (
201
+ 'problem','method','claim','assumption',
202
+ 'dataset','metric','result','baseline',
203
+ 'limitation','future_work','contradiction_note'
204
+ )),
205
+ subject TEXT,
206
+ predicate TEXT,
207
+ object TEXT,
208
+ value TEXT,
209
+ unit TEXT,
210
+ context TEXT,
211
+ evidence_text TEXT NOT NULL,
212
+ section TEXT,
213
+ extraction_source TEXT DEFAULT 'pdf_llm_section'
214
+ CHECK (extraction_source IN (
215
+ 'pdf_llm_section',
216
+ 'zotero_deep_read',
217
+ 'manual'
218
+ )),
219
+ confidence REAL DEFAULT 0.7,
220
+ created_at TEXT DEFAULT (datetime('now')),
221
+ FOREIGN KEY (doc_source_id) REFERENCES doc_sources(id) ON DELETE CASCADE
222
+ )
223
+ `);
224
+ db.exec('CREATE INDEX IF NOT EXISTS idx_paper_facts_doc ON paper_facts(doc_source_id)');
225
+ db.exec('CREATE INDEX IF NOT EXISTS idx_paper_facts_type ON paper_facts(fact_type)');
226
+ db.exec('CREATE INDEX IF NOT EXISTS idx_paper_facts_subject ON paper_facts(subject)');
227
+
228
+ // ── research_entities ─────────────────────────────────────────────────────
229
+ db.exec(`
230
+ CREATE TABLE IF NOT EXISTS research_entities (
231
+ id TEXT PRIMARY KEY,
232
+ entity_type TEXT NOT NULL CHECK (entity_type IN (
233
+ 'problem','concept','method_family','dataset','metric','application'
234
+ )),
235
+ name TEXT NOT NULL UNIQUE,
236
+ aliases TEXT DEFAULT '[]',
237
+ description TEXT,
238
+ created_at TEXT DEFAULT (datetime('now'))
239
+ )
240
+ `);
241
+
242
+ // ── fact_entity_links ─────────────────────────────────────────────────────
243
+ db.exec(`
244
+ CREATE TABLE IF NOT EXISTS fact_entity_links (
245
+ fact_id TEXT NOT NULL,
246
+ entity_id TEXT NOT NULL,
247
+ role TEXT,
248
+ PRIMARY KEY (fact_id, entity_id),
249
+ FOREIGN KEY (fact_id) REFERENCES paper_facts(id) ON DELETE CASCADE,
250
+ FOREIGN KEY (entity_id) REFERENCES research_entities(id) ON DELETE CASCADE
251
+ )
252
+ `);
253
+ }
254
+
255
+ module.exports = { applyWikiSchema };
package/scripts/memory.js CHANGED
@@ -47,6 +47,7 @@ function getDb() {
47
47
 
48
48
  _db.exec('PRAGMA journal_mode = WAL');
49
49
  _db.exec('PRAGMA busy_timeout = 3000');
50
+ _db.exec('PRAGMA foreign_keys = ON');
50
51
 
51
52
  _db.exec(`
52
53
  CREATE TABLE IF NOT EXISTS memory_items (
@@ -108,6 +109,18 @@ function getDb() {
108
109
  try { _db.exec('CREATE INDEX IF NOT EXISTS idx_mi_scope ON memory_items(scope)'); } catch { }
109
110
  try { _db.exec('CREATE INDEX IF NOT EXISTS idx_mi_supersedes ON memory_items(supersedes_id)'); } catch { }
110
111
 
112
+ // Migration: add relation column if not present (existing DBs)
113
+ try { _db.exec('ALTER TABLE memory_items ADD COLUMN relation TEXT'); } catch { /* column already exists */ }
114
+ try { _db.exec('CREATE INDEX IF NOT EXISTS idx_mi_relation ON memory_items(relation)'); } catch { }
115
+
116
+ // Apply wiki schema after memory_items is fully initialized (idempotent, non-fatal)
117
+ try {
118
+ const { applyWikiSchema } = require('./memory-wiki-schema');
119
+ applyWikiSchema(_db);
120
+ } catch (err) {
121
+ process.stderr.write(`[memory] wiki schema init failed: ${err.message}\n`);
122
+ }
123
+
111
124
  return _db;
112
125
  }
113
126
 
@@ -128,16 +141,16 @@ function saveMemoryItem(item) {
128
141
  const stmt = db.prepare(`
129
142
  INSERT INTO memory_items (id, kind, state, title, content, summary, confidence,
130
143
  project, scope, task_key, session_id, agent_key, supersedes_id,
131
- source_type, source_id, search_count, last_searched_at, tags,
144
+ source_type, source_id, relation, search_count, last_searched_at, tags,
132
145
  created_at, updated_at)
133
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))
146
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))
134
147
  ON CONFLICT(id) DO UPDATE SET
135
148
  kind=excluded.kind, state=excluded.state, title=excluded.title,
136
149
  content=excluded.content, summary=excluded.summary, confidence=excluded.confidence,
137
150
  project=excluded.project, scope=excluded.scope, task_key=excluded.task_key,
138
151
  session_id=excluded.session_id, agent_key=excluded.agent_key,
139
152
  supersedes_id=excluded.supersedes_id, source_type=excluded.source_type,
140
- source_id=excluded.source_id, tags=excluded.tags,
153
+ source_id=excluded.source_id, relation=excluded.relation, tags=excluded.tags,
141
154
  updated_at=datetime('now')
142
155
  `);
143
156
  stmt.run(
@@ -156,6 +169,7 @@ function saveMemoryItem(item) {
156
169
  item.supersedes_id || null,
157
170
  item.source_type || null,
158
171
  item.source_id || null,
172
+ item.relation || null,
159
173
  item.search_count || 0,
160
174
  item.last_searched_at || null,
161
175
  typeof item.tags === 'string' ? item.tags : JSON.stringify(Array.isArray(item.tags) ? item.tags : []),
@@ -350,6 +364,7 @@ function saveFacts(sessionId, project, facts, { scope = null, source_type = null
350
364
  session_id: sessionId,
351
365
  source_type: f.source_type || source_type || 'session',
352
366
  source_id: sessionId,
367
+ relation: f.relation,
353
368
  tags,
354
369
  });
355
370
  savedFacts.push({
@@ -359,6 +374,39 @@ function saveFacts(sessionId, project, facts, { scope = null, source_type = null
359
374
  saved++;
360
375
  } catch { skipped++; }
361
376
  }
377
+ // Wiki integration: update staleness + promote eligible topics (non-fatal)
378
+ if (savedFacts.length > 0) {
379
+ try {
380
+ const { updateStalenessForTags, checkTopicThreshold, upsertWikiTopic } = require('./core/wiki-db');
381
+ const db = getDb();
382
+
383
+ // Build tag → new-fact count map from saved facts' tags
384
+ const dirtyTagCounts = new Map();
385
+ for (const f of savedFacts) {
386
+ const tags = Array.isArray(f.tags) ? f.tags : [];
387
+ for (const tag of tags) {
388
+ if (tag && typeof tag === 'string') {
389
+ dirtyTagCounts.set(tag, (dirtyTagCounts.get(tag) || 0) + 1);
390
+ }
391
+ }
392
+ }
393
+
394
+ if (dirtyTagCounts.size > 0) {
395
+ updateStalenessForTags(db, dirtyTagCounts);
396
+
397
+ // Promote tags that cross the threshold to wiki topics
398
+ // Use force:true since we already checked the threshold — avoids double-query
399
+ for (const [tag] of dirtyTagCounts) {
400
+ try {
401
+ if (checkTopicThreshold(db, tag)) {
402
+ upsertWikiTopic(db, tag, { force: true });
403
+ }
404
+ } catch { /* slug error or collision: skip */ }
405
+ }
406
+ }
407
+ } catch { /* wiki not available: non-fatal */ }
408
+ }
409
+
362
410
  return { saved, skipped, superseded: 0, savedFacts };
363
411
  }
364
412
 
@@ -431,6 +479,27 @@ function recentSessions({ limit = 3, project = null, scope = null } = {}) {
431
479
  }));
432
480
  }
433
481
 
482
+ function recentFacts({ limit = 5, project = null, scope = null } = {}) {
483
+ return searchMemoryItems(null, {
484
+ state: 'active',
485
+ project: project || null,
486
+ scope: scope || null,
487
+ limit,
488
+ })
489
+ .filter(r => r.kind === 'insight' || r.kind === 'convention')
490
+ .map(r => ({
491
+ id: r.id,
492
+ entity: (r.title || '').split(' · ')[0] || r.title || '',
493
+ relation: (r.title || '').split(' · ')[1] || '',
494
+ value: r.content,
495
+ confidence: r.confidence >= 0.9 ? 'high' : r.confidence >= 0.6 ? 'medium' : 'low',
496
+ project: r.project,
497
+ scope: r.scope,
498
+ tags: _parseTags(r.tags),
499
+ created_at: r.created_at,
500
+ }));
501
+ }
502
+
434
503
  function stats() {
435
504
  const db = getDb();
436
505
  const row = db.prepare(
@@ -465,6 +534,33 @@ function forceClose() {
465
534
  if (_db) { _db.close(); _db = null; }
466
535
  }
467
536
 
537
+ /**
538
+ * Search wiki pages + memory facts via FTS5.
539
+ * Thin wrapper over core/wiki-db::searchWikiAndFacts that provides the DB instance.
540
+ * trackSearch: true → increments search_count on matched facts.
541
+ */
542
+ function searchWikiAndFacts(query, { trackSearch = true } = {}) {
543
+ try {
544
+ const { searchWikiAndFacts: fn } = require('./core/wiki-db');
545
+ return fn(getDb(), query, { trackSearch });
546
+ } catch {
547
+ return { wikiPages: [], facts: [] };
548
+ }
549
+ }
550
+
551
+ /**
552
+ * Hybrid wiki search (FTS5 + vector + RRF fusion).
553
+ * Falls back to pure FTS5 if hybrid-search module is unavailable.
554
+ */
555
+ async function hybridSearchWiki(query, { ftsOnly = false, expand = false, trackSearch = true } = {}) {
556
+ try {
557
+ const { hybridSearchWiki: fn } = require('./core/hybrid-search');
558
+ return await fn(getDb(), query, { ftsOnly, trackSearch });
559
+ } catch {
560
+ return searchWikiAndFacts(query, { trackSearch });
561
+ }
562
+ }
563
+
468
564
  module.exports = {
469
565
  // core
470
566
  saveMemoryItem,
@@ -474,6 +570,9 @@ module.exports = {
474
570
  bumpSearchCount,
475
571
  readWorkingMemory,
476
572
  assembleContext,
573
+ // wiki
574
+ searchWikiAndFacts,
575
+ hybridSearchWiki,
477
576
  // compatibility
478
577
  saveSession,
479
578
  saveFacts,
@@ -482,6 +581,7 @@ module.exports = {
482
581
  searchFactsAsync,
483
582
  searchSessions,
484
583
  recentSessions,
584
+ recentFacts,
485
585
  stats,
486
586
  // lifecycle
487
587
  acquire,
@@ -101,7 +101,7 @@ function withBufferLock(fn) {
101
101
  acquired = true;
102
102
  break;
103
103
  } catch (e) {
104
- if (e.code !== 'EEXIST') return false; // non-EEXIST errors (EMFILE, EACCES) → skip lock gracefully
104
+ if (e.code !== 'EEXIST') throw e;
105
105
  try {
106
106
  const age = Date.now() - fs.statSync(LOCK_FILE).mtimeMs;
107
107
  if (age > LOCK_STALE_MS) {
@@ -75,7 +75,6 @@ const DEFAULT_POLICY = {
75
75
  min_evidence_for_gap: 3,
76
76
  max_updates_per_analysis: 3,
77
77
  max_gaps_per_analysis: 2,
78
- max_signals_per_analysis: 30, // cap signals sent to Haiku per run
79
78
 
80
79
  // Workflow discovery
81
80
  workflow_discovery_interval: 2, // every N cold-path cycles
@@ -201,7 +200,6 @@ function sanitizePolicy(input) {
201
200
  min_evidence_for_gap: clampInt(merged.min_evidence_for_gap, DEFAULT_POLICY.min_evidence_for_gap, 1, 20),
202
201
  max_updates_per_analysis: clampInt(merged.max_updates_per_analysis, DEFAULT_POLICY.max_updates_per_analysis, 1, 20),
203
202
  max_gaps_per_analysis: clampInt(merged.max_gaps_per_analysis, DEFAULT_POLICY.max_gaps_per_analysis, 1, 20),
204
- max_signals_per_analysis: clampInt(merged.max_signals_per_analysis, DEFAULT_POLICY.max_signals_per_analysis, 5, 100),
205
203
  workflow_discovery_interval: clampInt(merged.workflow_discovery_interval, DEFAULT_POLICY.workflow_discovery_interval, 1, 100),
206
204
  min_signals_for_workflow: clampInt(merged.min_signals_for_workflow, DEFAULT_POLICY.min_signals_for_workflow, 1, 100),
207
205
  workflow_proposal_threshold: clampInt(merged.workflow_proposal_threshold, DEFAULT_POLICY.workflow_proposal_threshold, 2, 50),
@@ -612,15 +610,8 @@ async function distillSkills() {
612
610
  if (!content) return null;
613
611
 
614
612
  const lines = content.split('\n');
615
- const allSignals = lines.map(l => { try { return JSON.parse(l); } catch { return null; } }).filter(Boolean);
616
- if (allSignals.length < policy.min_signals_for_distill) return null;
617
-
618
- // Cap signals sent to Haiku to avoid prompt bloat / timeout.
619
- // Keep most recent signals (higher relevance); overflow is still cleared.
620
- const maxSignals = policy.max_signals_per_analysis || 30;
621
- const signals = allSignals.length > maxSignals
622
- ? allSignals.slice(-maxSignals)
623
- : allSignals;
613
+ const signals = lines.map(l => { try { return JSON.parse(l); } catch { return null; } }).filter(Boolean);
614
+ if (signals.length < policy.min_signals_for_distill) return null;
624
615
 
625
616
  // Get installed skills list
626
617
  const installedSkills = listInstalledSkills();