metame-cli 1.5.26 → 1.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/index.js +4 -1
  2. package/package.json +1 -1
  3. package/scripts/agent-layer.js +36 -0
  4. package/scripts/core/chunker.js +100 -0
  5. package/scripts/core/embedding.js +225 -0
  6. package/scripts/core/hybrid-search.js +296 -0
  7. package/scripts/core/wiki-db.js +545 -0
  8. package/scripts/core/wiki-prompt.js +88 -0
  9. package/scripts/core/wiki-slug.js +66 -0
  10. package/scripts/core/wiki-staleness.js +18 -0
  11. package/scripts/daemon-agent-commands.js +10 -4
  12. package/scripts/daemon-bridges.js +16 -0
  13. package/scripts/daemon-claude-engine.js +62 -8
  14. package/scripts/daemon-command-router.js +40 -1
  15. package/scripts/daemon-default.yaml +33 -3
  16. package/scripts/daemon-embedding.js +162 -0
  17. package/scripts/daemon-engine-runtime.js +1 -1
  18. package/scripts/daemon-health-scan.js +185 -0
  19. package/scripts/daemon-ops-commands.js +9 -18
  20. package/scripts/daemon-runtime-lifecycle.js +1 -1
  21. package/scripts/daemon-session-commands.js +4 -0
  22. package/scripts/daemon-task-scheduler.js +5 -3
  23. package/scripts/daemon-warm-pool.js +15 -0
  24. package/scripts/daemon-wiki.js +420 -0
  25. package/scripts/daemon.js +10 -5
  26. package/scripts/distill.js +1 -1
  27. package/scripts/docs/file-transfer.md +0 -1
  28. package/scripts/docs/maintenance-manual.md +2 -55
  29. package/scripts/docs/pointer-map.md +0 -34
  30. package/scripts/feishu-adapter.js +25 -0
  31. package/scripts/hooks/intent-file-transfer.js +1 -2
  32. package/scripts/memory-backfill-chunks.js +92 -0
  33. package/scripts/memory-search.js +49 -6
  34. package/scripts/memory-wiki-schema.js +255 -0
  35. package/scripts/memory.js +103 -3
  36. package/scripts/signal-capture.js +1 -1
  37. package/scripts/skill-evolution.js +2 -11
  38. package/scripts/wiki-cluster.js +121 -0
  39. package/scripts/wiki-extract.js +171 -0
  40. package/scripts/wiki-facts.js +351 -0
  41. package/scripts/wiki-import.js +256 -0
  42. package/scripts/wiki-reflect-build.js +441 -0
  43. package/scripts/wiki-reflect-export.js +448 -0
  44. package/scripts/wiki-reflect-query.js +109 -0
  45. package/scripts/wiki-reflect.js +338 -0
  46. package/scripts/wiki-synthesis.js +224 -0
@@ -0,0 +1,448 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * wiki-reflect-export.js — File write layer for wiki-reflect
5
+ *
6
+ * Writes wiki pages as Obsidian-compatible Markdown files.
7
+ * No DB access, no LLM calls.
8
+ *
9
+ * Exports:
10
+ * exportWikiPage(slug, frontmatter, content, outputDir) → void
11
+ * rebuildIndex(pages, outputDir) → void
12
+ * exportSessionSummary(entry, outputDir, options) → string
13
+ * rebuildSessionsIndex(entries, outputDir) → void
14
+ * exportCapsuleFile(sourcePath, outputDir) → string|null
15
+ * rebuildCapsulesIndex(capsuleFiles, outputDir) → void
16
+ */
17
+
18
+ const fs = require('fs');
19
+ const path = require('path');
20
+ const os = require('os');
21
+
22
+ const DEFAULT_WIKI_DIR = path.join(os.homedir(), '.metame', 'wiki');
23
+
24
+ /**
25
+ * Write a wiki page as a Markdown file (atomic: write .tmp → rename).
26
+ *
27
+ * @param {string} slug
28
+ * @param {{ title: string, tags: string[], created: string, last_built: string,
29
+ * raw_sources: number, staleness: number }} frontmatter
30
+ * @param {string} content - Article body (no frontmatter)
31
+ * @param {string} [outputDir]
32
+ */
33
+ function exportWikiPage(slug, frontmatter, content, outputDir = DEFAULT_WIKI_DIR) {
34
+ _ensureDir(outputDir);
35
+
36
+ // Ensure slug in frontmatter matches the positional slug argument
37
+ const yaml = _buildFrontmatter({ ...frontmatter, slug });
38
+ const fileContent = `${yaml}\n${content}\n`;
39
+ const filePath = path.join(outputDir, `${slug}.md`);
40
+ const tmpPath = `${filePath}.tmp`;
41
+
42
+ // Remove stale .tmp if present (previous interrupted write)
43
+ try { fs.unlinkSync(tmpPath); } catch { /* not present */ }
44
+
45
+ fs.writeFileSync(tmpPath, fileContent, 'utf8');
46
+ fs.renameSync(tmpPath, filePath);
47
+ }
48
+
49
+ /**
50
+ * Rebuild the _index.md (Map of Content) from all wiki pages.
51
+ * Pages list is provided by caller (from DB query) — this file does not access DB.
52
+ *
53
+ * @param {Array<{ slug: string, title: string, primary_topic: string,
54
+ * staleness: number, last_built_at: string|null,
55
+ * raw_source_count: number }>} pages
56
+ * @param {string} [outputDir]
57
+ */
58
+ function rebuildIndex(pages, outputDir = DEFAULT_WIKI_DIR, options = {}) {
59
+ _ensureDir(outputDir);
60
+ const sessionCount = Number(options.sessionCount) || 0;
61
+ const capsuleCount = Number(options.capsuleCount) || 0;
62
+
63
+ const now = new Date().toISOString().slice(0, 10);
64
+ const lines = [
65
+ '---',
66
+ 'title: Wiki Index',
67
+ `updated: ${now}`,
68
+ '---',
69
+ '',
70
+ '# MetaMe Knowledge Wiki',
71
+ '',
72
+ `> ${pages.length} pages · 自动生成,勿手动编辑`,
73
+ '',
74
+ '| 页面 | 主题标签 | 来源数 | 陈旧度 | 最后更新 |',
75
+ '|------|---------|--------|--------|---------|',
76
+ ];
77
+
78
+ for (const p of pages) {
79
+ const stalePct = Math.round((p.staleness || 0) * 100);
80
+ const built = p.last_built_at ? p.last_built_at.slice(0, 10) : '—';
81
+ lines.push(
82
+ `| [[${p.slug}\\|${p.title}]] | \`${p.primary_topic}\` | ${p.raw_source_count || 0} | ${stalePct}% | ${built} |`
83
+ );
84
+ }
85
+
86
+ lines.push('', '## Navigation', '');
87
+ lines.push(`- [[sessions/_index|Session Summaries]]${sessionCount > 0 ? ` (${sessionCount})` : ''}`);
88
+ lines.push(`- [[capsules/_index|Knowledge Capsules]]${capsuleCount > 0 ? ` (${capsuleCount})` : ''}`);
89
+
90
+ const content = lines.join('\n') + '\n';
91
+ const filePath = path.join(outputDir, '_index.md');
92
+ const tmpPath = `${filePath}.tmp`;
93
+
94
+ try { fs.unlinkSync(tmpPath); } catch { /* not present */ }
95
+ fs.writeFileSync(tmpPath, content, 'utf8');
96
+ fs.renameSync(tmpPath, filePath);
97
+ }
98
+
99
+ function exportSessionSummary(entry, outputDir = DEFAULT_WIKI_DIR, options = {}) {
100
+ const sessionsDir = path.join(outputDir, 'sessions');
101
+ _ensureDir(sessionsDir);
102
+
103
+ const created = String(entry.created_at || '').slice(0, 10);
104
+ const sessionId = String(entry.session_id || entry.id || '');
105
+ const project = String(entry.project || 'unknown');
106
+ const slug = _sanitizeSlug(`${created || 'session'}-${project}-${sessionId.slice(-8)}`, 'session');
107
+ const tags = _safeJsonArray(entry.tags);
108
+ const filePath = path.join(sessionsDir, `${slug}.md`);
109
+ const tmpPath = `${filePath}.tmp`;
110
+ const body = String(entry.content || '').trim() || '(empty)';
111
+ const related = _collectSessionRelated(project, tags, options);
112
+ const yaml = [
113
+ '---',
114
+ `title: ${_yamlStr(entry.title || body.slice(0, 40) || sessionId || slug)}`,
115
+ `session_id: ${_yamlStr(sessionId)}`,
116
+ `project: ${_yamlStr(project)}`,
117
+ `scope: ${_yamlStr(String(entry.scope || ''))}`,
118
+ `created: ${created}`,
119
+ `tags: ${JSON.stringify(tags)}`,
120
+ 'type: session-summary',
121
+ '---',
122
+ '',
123
+ ].join('\n');
124
+
125
+ const parts = [yaml, '## Summary', '', body];
126
+ if (related.wiki.length > 0 || related.capsules.length > 0) {
127
+ parts.push('', '## Related Knowledge', '');
128
+ for (const item of related.wiki) parts.push(`- Wiki: [[${item.path}|${item.label}]]`);
129
+ for (const item of related.capsules) parts.push(`- Capsule: [[${item.path}|${item.label}]]`);
130
+ }
131
+
132
+ try { fs.unlinkSync(tmpPath); } catch { /* not present */ }
133
+ fs.writeFileSync(tmpPath, parts.join('\n') + '\n', 'utf8');
134
+ fs.renameSync(tmpPath, filePath);
135
+ return filePath;
136
+ }
137
+
138
+ function rebuildSessionsIndex(entries, outputDir = DEFAULT_WIKI_DIR) {
139
+ const sessionsDir = path.join(outputDir, 'sessions');
140
+ _ensureDir(sessionsDir);
141
+ const lines = [
142
+ '---',
143
+ 'title: Session Summaries',
144
+ `updated: ${new Date().toISOString().slice(0, 10)}`,
145
+ 'type: session-index',
146
+ '---',
147
+ '',
148
+ '# Session Summaries',
149
+ '',
150
+ `> ${entries.length} sessions`,
151
+ '',
152
+ ];
153
+
154
+ const grouped = new Map();
155
+ for (const entry of entries) {
156
+ const project = String(entry.project || 'unknown');
157
+ if (!grouped.has(project)) grouped.set(project, []);
158
+ grouped.get(project).push(entry);
159
+ }
160
+
161
+ for (const [project, items] of [...grouped.entries()].sort((a, b) => a[0].localeCompare(b[0]))) {
162
+ lines.push(`## ${project}`, '');
163
+ for (const entry of items) {
164
+ const created = String(entry.created_at || '').slice(0, 10);
165
+ const sessionId = String(entry.session_id || entry.id || '');
166
+ const slug = _sanitizeSlug(`${created || 'session'}-${project}-${sessionId.slice(-8)}`, 'session');
167
+ const preview = String(entry.content || '').replace(/\s+/g, ' ').slice(0, 100);
168
+ lines.push(`- [[sessions/${slug}|${created} · ${project}]]`);
169
+ if (preview) lines.push(` ${preview}`);
170
+ }
171
+ lines.push('');
172
+ }
173
+
174
+ const filePath = path.join(sessionsDir, '_index.md');
175
+ const tmpPath = `${filePath}.tmp`;
176
+ try { fs.unlinkSync(tmpPath); } catch { /* not present */ }
177
+ fs.writeFileSync(tmpPath, lines.join('\n') + '\n', 'utf8');
178
+ fs.renameSync(tmpPath, filePath);
179
+ }
180
+
181
+ function exportCapsuleFile(sourcePath, outputDir = DEFAULT_WIKI_DIR) {
182
+ const capsulesDir = path.join(outputDir, 'capsules');
183
+ _ensureDir(capsulesDir);
184
+
185
+ const source = String(sourcePath || '');
186
+ const base = path.basename(source);
187
+ if (!source || !base.endsWith('.md') || !fs.existsSync(source)) return null;
188
+
189
+ const targetPath = path.join(capsulesDir, base);
190
+ const tmpPath = `${targetPath}.tmp`;
191
+ const content = fs.readFileSync(source, 'utf8');
192
+
193
+ try { fs.unlinkSync(tmpPath); } catch { /* not present */ }
194
+ fs.writeFileSync(tmpPath, content.endsWith('\n') ? content : `${content}\n`, 'utf8');
195
+ fs.renameSync(tmpPath, targetPath);
196
+ return targetPath;
197
+ }
198
+
199
+ function rebuildCapsulesIndex(capsuleFiles, outputDir = DEFAULT_WIKI_DIR) {
200
+ const capsulesDir = path.join(outputDir, 'capsules');
201
+ _ensureDir(capsulesDir);
202
+
203
+ const lines = [
204
+ '---',
205
+ 'title: Knowledge Capsules',
206
+ `updated: ${new Date().toISOString().slice(0, 10)}`,
207
+ 'type: capsule-index',
208
+ '---',
209
+ '',
210
+ '# Knowledge Capsules',
211
+ '',
212
+ `> ${capsuleFiles.length} capsules`,
213
+ '',
214
+ ];
215
+
216
+ for (const sourcePath of capsuleFiles) {
217
+ const base = path.basename(String(sourcePath || ''), '.md');
218
+ if (!base) continue;
219
+ lines.push(`- [[capsules/${base}|${base}]]`);
220
+ }
221
+
222
+ const filePath = path.join(capsulesDir, '_index.md');
223
+ const tmpPath = `${filePath}.tmp`;
224
+ try { fs.unlinkSync(tmpPath); } catch { /* not present */ }
225
+ fs.writeFileSync(tmpPath, lines.join('\n') + '\n', 'utf8');
226
+ fs.renameSync(tmpPath, filePath);
227
+ }
228
+
229
+ /**
230
+ * Mirror all .md files from srcDir into outputDir/subdir (atomic write).
231
+ * Pattern mirrors exportCapsuleFile.
232
+ *
233
+ * @param {string} srcDir — e.g. ~/.metame/memory/decisions
234
+ * @param {string} subdir — vault subdirectory name, e.g. 'decisions'
235
+ * @param {string} [outputDir]
236
+ * @returns {string[]} — list of destination file paths written
237
+ */
238
+ function exportReflectDir(srcDir, subdir, outputDir = DEFAULT_WIKI_DIR) {
239
+ if (!fs.existsSync(srcDir) || !fs.statSync(srcDir).isDirectory()) return [];
240
+ const destDir = path.join(outputDir, subdir);
241
+ _ensureDir(destDir);
242
+
243
+ const written = [];
244
+ for (const name of fs.readdirSync(srcDir)) {
245
+ if (!name.endsWith('.md')) continue;
246
+ const src = path.join(srcDir, name);
247
+ const dest = path.join(destDir, name);
248
+ const tmp = `${dest}.tmp`;
249
+ try {
250
+ const content = fs.readFileSync(src, 'utf8');
251
+ try { fs.unlinkSync(tmp); } catch { /* not present */ }
252
+ fs.writeFileSync(tmp, content.endsWith('\n') ? content : `${content}\n`, 'utf8');
253
+ fs.renameSync(tmp, dest);
254
+ written.push(dest);
255
+ } catch { /* skip unreadable file */ }
256
+ }
257
+ return written;
258
+ }
259
+
260
+ /**
261
+ * Write _index.md for a reflect subdirectory (decisions or lessons).
262
+ *
263
+ * @param {string[]} fileNames — bare filenames (not full paths)
264
+ * @param {string} subdir — 'decisions' | 'lessons'
265
+ * @param {string} [outputDir]
266
+ */
267
+ function rebuildReflectDirIndex(fileNames, subdir, outputDir = DEFAULT_WIKI_DIR) {
268
+ const destDir = path.join(outputDir, subdir);
269
+ _ensureDir(destDir);
270
+
271
+ const label = subdir === 'decisions' ? 'Architecture Decisions' : 'Operational Lessons';
272
+ const lines = [
273
+ '---',
274
+ `title: ${label}`,
275
+ `updated: ${new Date().toISOString().slice(0, 10)}`,
276
+ 'type: reflect-index',
277
+ '---',
278
+ '',
279
+ `# ${label}`,
280
+ '',
281
+ `> ${fileNames.length} entries · 自动生成,勿手动编辑`,
282
+ '',
283
+ ];
284
+
285
+ for (const name of [...fileNames].sort().reverse()) {
286
+ const base = path.basename(name, '.md');
287
+ lines.push(`- [[${subdir}/${base}|${base}]]`);
288
+ }
289
+
290
+ const filePath = path.join(destDir, '_index.md');
291
+ const tmpPath = `${filePath}.tmp`;
292
+ try { fs.unlinkSync(tmpPath); } catch { /* not present */ }
293
+ fs.writeFileSync(tmpPath, lines.join('\n') + '\n', 'utf8');
294
+ fs.renameSync(tmpPath, filePath);
295
+ }
296
+
297
+ /**
298
+ * Export all doc/cluster wiki pages from DB to Obsidian vault.
299
+ * Called by runWikiReflect after the memory-topic loop.
300
+ * Pages with empty content are skipped.
301
+ *
302
+ * @param {object} db — DatabaseSync instance
303
+ * @param {string} [outputDir]
304
+ * @returns {{ exported: string[], skipped: string[] }}
305
+ */
306
+ function exportDocPages(db, outputDir = DEFAULT_WIKI_DIR) {
307
+ _ensureDir(outputDir);
308
+ const rows = db.prepare(
309
+ `SELECT slug, title, primary_topic, source_type, content,
310
+ topic_tags, created_at, last_built_at, raw_source_count, staleness
311
+ FROM wiki_pages
312
+ WHERE source_type IN ('doc', 'topic_cluster')
313
+ AND content IS NOT NULL AND content != ''`
314
+ ).all();
315
+
316
+ const exported = [];
317
+ const skipped = [];
318
+
319
+ for (const row of rows) {
320
+ try {
321
+ const tags = _safeJsonArray(row.topic_tags);
322
+ const frontmatter = {
323
+ title: row.title || row.slug,
324
+ slug: row.slug,
325
+ tags,
326
+ created: (row.created_at || '').slice(0, 10),
327
+ last_built: (row.last_built_at || '').slice(0, 10),
328
+ raw_sources: row.raw_source_count || 0,
329
+ staleness: row.staleness || 0,
330
+ };
331
+ exportWikiPage(row.slug, frontmatter, row.content, outputDir);
332
+ exported.push(row.slug);
333
+ } catch {
334
+ skipped.push(row.slug);
335
+ }
336
+ }
337
+
338
+ return { exported, skipped };
339
+ }
340
+
341
+ // ── helpers ───────────────────────────────────────────────────────────────────
342
+
343
+ function _ensureDir(dir) {
344
+ if (!fs.existsSync(dir)) {
345
+ fs.mkdirSync(dir, { recursive: true });
346
+ }
347
+ }
348
+
349
+ /**
350
+ * Serialize frontmatter object to YAML block string.
351
+ */
352
+ function _buildFrontmatter({ title, slug, tags = [], created, last_built, raw_sources, staleness }) {
353
+ const tagsYaml = JSON.stringify(tags); // compact array
354
+ const stalePct = typeof staleness === 'number' ? staleness.toFixed(2) : '0.00';
355
+ return [
356
+ '---',
357
+ `title: ${_yamlStr(title)}`,
358
+ `slug: ${slug}`,
359
+ `tags: ${tagsYaml}`,
360
+ `created: ${created || ''}`,
361
+ `last_built: ${last_built || ''}`,
362
+ `raw_sources: ${raw_sources || 0}`,
363
+ `staleness: ${stalePct}`,
364
+ '---',
365
+ ].join('\n');
366
+ }
367
+
368
+ /**
369
+ * Escape a string value for inline YAML (quote if contains special chars).
370
+ */
371
+ function _yamlStr(s) {
372
+ const str = String(s || '');
373
+ if (/[:#\[\]{}|>&*!,'"]/.test(str)) return `"${str.replace(/"/g, '\\"')}"`;
374
+ return str;
375
+ }
376
+
377
+ function _sanitizeSlug(input, fallback = 'item') {
378
+ const cleaned = String(input || '')
379
+ .trim()
380
+ .toLowerCase()
381
+ .replace(/[^a-z0-9._-]+/g, '-')
382
+ .replace(/^-+|-+$/g, '');
383
+ return cleaned || fallback;
384
+ }
385
+
386
+ function _safeJsonArray(raw) {
387
+ try {
388
+ const parsed = typeof raw === 'string' ? JSON.parse(raw) : raw;
389
+ return Array.isArray(parsed) ? parsed : [];
390
+ } catch {
391
+ return [];
392
+ }
393
+ }
394
+
395
+ function _collectSessionRelated(project, tags, options = {}) {
396
+ const wikiPages = Array.isArray(options.wikiPages) ? options.wikiPages : [];
397
+ const capsuleFiles = Array.isArray(options.capsuleFiles) ? options.capsuleFiles : [];
398
+ const candidates = new Set([
399
+ String(project || '').trim().toLowerCase(),
400
+ ...tags.map(tag => String(tag || '').trim().toLowerCase()),
401
+ ]);
402
+
403
+ const wiki = [];
404
+ for (const page of wikiPages) {
405
+ const slug = String(page.slug || '').trim();
406
+ const topic = String(page.primary_topic || '').trim().toLowerCase();
407
+ if (!slug) continue;
408
+ if (candidates.has(slug.toLowerCase()) || candidates.has(topic)) {
409
+ wiki.push({ path: slug, label: page.title || slug });
410
+ }
411
+ }
412
+
413
+ const capsules = [];
414
+ for (const file of capsuleFiles) {
415
+ const base = path.basename(String(file || ''), '.md');
416
+ const lower = base.toLowerCase();
417
+ if ([...candidates].some(token => token && lower.includes(token.replace(/\s+/g, '-')))) {
418
+ capsules.push({ path: `capsules/${base}`, label: base });
419
+ }
420
+ }
421
+
422
+ return {
423
+ wiki: _dedupeRelated(wiki),
424
+ capsules: _dedupeRelated(capsules),
425
+ };
426
+ }
427
+
428
+ function _dedupeRelated(items) {
429
+ const seen = new Set();
430
+ return items.filter(item => {
431
+ const key = `${item.path}|${item.label}`;
432
+ if (seen.has(key)) return false;
433
+ seen.add(key);
434
+ return true;
435
+ });
436
+ }
437
+
438
+ module.exports = {
439
+ exportWikiPage,
440
+ rebuildIndex,
441
+ exportSessionSummary,
442
+ rebuildSessionsIndex,
443
+ exportCapsuleFile,
444
+ rebuildCapsulesIndex,
445
+ exportReflectDir,
446
+ rebuildReflectDirIndex,
447
+ exportDocPages, // new
448
+ };
@@ -0,0 +1,109 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * wiki-reflect-query.js — DB read layer for wiki-reflect
5
+ *
6
+ * Fetches raw facts and capsule excerpts for a topic.
7
+ * No DB writes, no LLM calls. File reads (capsule excerpts) are intentional and non-fatal.
8
+ *
9
+ * Exports:
10
+ * queryRawFacts(db, tag, { capsulesDir }) → { totalCount, facts, capsuleExcerpts }
11
+ */
12
+
13
+ const fs = require('fs');
14
+ const path = require('path');
15
+ const os = require('os');
16
+
17
+ const DERIVED_RELATIONS = ['synthesized_insight', 'knowledge_capsule'];
18
+ const DEFAULT_CAPSULES_DIR = path.join(os.homedir(), '.metame', 'memory', 'capsules');
19
+ const CAPSULE_EXCERPT_CHARS = 200;
20
+ const CAPSULE_MAX = 3;
21
+ const FACTS_LIMIT = 30;
22
+
23
+ /**
24
+ * Query raw facts for a wiki topic tag.
25
+ *
26
+ * Two-step approach:
27
+ * Step 1: COUNT(*) without LIMIT → totalCount (used as staleness denominator)
28
+ * Step 2: SELECT top 30 ordered by search_count DESC, confidence DESC → facts (LLM prompt)
29
+ *
30
+ * @param {object} db - DatabaseSync instance
31
+ * @param {string} tag - The wiki topic tag
32
+ * @param {{ capsulesDir?: string }} opts
33
+ * @returns {{ totalCount: number, facts: object[], capsuleExcerpts: string }}
34
+ */
35
+ function queryRawFacts(db, tag, { capsulesDir = DEFAULT_CAPSULES_DIR } = {}) {
36
+ const placeholders = DERIVED_RELATIONS.map(() => '?').join(', ');
37
+
38
+ // Step 1: total count (staleness denominator, no LIMIT)
39
+ // Include 'candidate' so topics promoted via saveFacts aren't skipped on first build.
40
+ const countRow = db.prepare(`
41
+ SELECT COUNT(*) as cnt
42
+ FROM memory_items mi
43
+ JOIN json_each(mi.tags) jt ON lower(trim(jt.value)) = lower(trim(?))
44
+ WHERE mi.state IN ('active', 'candidate')
45
+ AND (mi.relation NOT IN (${placeholders}) OR mi.relation IS NULL)
46
+ `).get(tag, ...DERIVED_RELATIONS);
47
+
48
+ const totalCount = countRow ? countRow.cnt : 0;
49
+
50
+ // Step 2: top 30 for LLM prompt — include candidates so first build isn't empty
51
+ const facts = db.prepare(`
52
+ SELECT mi.id, mi.title, mi.content, mi.confidence, mi.search_count,
53
+ mi.created_at, mi.tags
54
+ FROM memory_items mi
55
+ JOIN json_each(mi.tags) jt ON lower(trim(jt.value)) = lower(trim(?))
56
+ WHERE mi.state IN ('active', 'candidate')
57
+ AND (mi.relation NOT IN (${placeholders}) OR mi.relation IS NULL)
58
+ ORDER BY mi.state ASC, mi.search_count DESC, mi.confidence DESC
59
+ LIMIT ?
60
+ `).all(tag, ...DERIVED_RELATIONS, FACTS_LIMIT);
61
+
62
+ // Capsule excerpts: read files from capsulesDir whose name contains the tag
63
+ const capsuleExcerpts = _loadCapsuleExcerpts(tag, capsulesDir);
64
+
65
+ return { totalCount, facts, capsuleExcerpts };
66
+ }
67
+
68
+ /**
69
+ * Load capsule excerpts for the given tag.
70
+ * Reads up to CAPSULE_MAX capsule files whose filename contains the tag slug.
71
+ *
72
+ * @param {string} tag
73
+ * @param {string} capsulesDir
74
+ * @returns {string} Concatenated excerpts, may be empty
75
+ */
76
+ function _loadCapsuleExcerpts(tag, capsulesDir) {
77
+ if (!fs.existsSync(capsulesDir)) return '';
78
+
79
+ let files;
80
+ try {
81
+ files = fs.readdirSync(capsulesDir).filter(f => f.endsWith('.md'));
82
+ } catch {
83
+ return '';
84
+ }
85
+
86
+ // Match files whose name contains tag (lowercased, spaces→hyphens)
87
+ const needle = tag.toLowerCase().replace(/\s+/g, '-');
88
+ const matched = files
89
+ .filter(f => f.toLowerCase().includes(needle))
90
+ .slice(0, CAPSULE_MAX);
91
+
92
+ if (matched.length === 0) return '';
93
+
94
+ const parts = [];
95
+ for (const filename of matched) {
96
+ try {
97
+ const text = fs.readFileSync(path.join(capsulesDir, filename), 'utf8');
98
+ // Strip frontmatter (--- ... ---) before excerpting
99
+ const body = text.replace(/^---[\s\S]*?---\n?/, '').trim();
100
+ if (body) {
101
+ parts.push(`[${filename}]\n${body.slice(0, CAPSULE_EXCERPT_CHARS)}`);
102
+ }
103
+ } catch { /* skip unreadable file */ }
104
+ }
105
+
106
+ return parts.join('\n\n');
107
+ }
108
+
109
+ module.exports = { queryRawFacts };