volute 0.23.0 → 0.25.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/README.md +5 -5
  2. package/dist/{activity-events-3WHHCOBB.js → activity-events-4O37J7PD.js} +2 -2
  3. package/dist/api.d.ts +419 -19
  4. package/dist/{channel-BOOMFULW.js → channel-HZOSHGNF.js} +1 -1
  5. package/dist/{chunk-QIXPN3OO.js → chunk-2767L2RZ.js} +5 -5
  6. package/dist/{chunk-SGPEZ32F.js → chunk-33XAVCS4.js} +16 -0
  7. package/dist/{chunk-VT5QODNE.js → chunk-3AIBT4TW.js} +4 -3
  8. package/dist/{chunk-A4S7H6G6.js → chunk-BFK6SOEJ.js} +1 -1
  9. package/dist/{chunk-RK627D57.js → chunk-BOTQ25QT.js} +3 -3
  10. package/dist/{chunk-TFS25FIM.js → chunk-DG7TO7EE.js} +31 -3
  11. package/dist/{chunk-HGCDWKSP.js → chunk-E7GOKNOT.js} +1 -1
  12. package/dist/{chunk-ISWZ6QUK.js → chunk-PMX4EIJK.js} +804 -115
  13. package/dist/{chunk-M5CNKH4J.js → chunk-SHSWYG2J.js} +7 -7
  14. package/dist/{chunk-XLC342FO.js → chunk-SIAG3QMM.js} +14 -1
  15. package/dist/{chunk-KFI7TQJ6.js → chunk-TRQEV3CD.js} +9 -5
  16. package/dist/{chunk-JG4CCJOA.js → chunk-ZSH4G2P5.js} +33 -15
  17. package/dist/cli.js +18 -18
  18. package/dist/{cloud-sync-PI47U2LT.js → cloud-sync-PPBBJDY6.js} +7 -9
  19. package/dist/{connector-PYT5UOTZ.js → connector-M6XFI6GM.js} +1 -1
  20. package/dist/{create-WIDA3M4C.js → create-VDQJER52.js} +1 -1
  21. package/dist/{daemon-client-ZHCDL4RS.js → daemon-client-JOVQZ52X.js} +1 -1
  22. package/dist/{daemon-restart-RMGOOGPE.js → daemon-restart-FDNOZEAD.js} +5 -5
  23. package/dist/daemon.js +1047 -981
  24. package/dist/{delete-LOIANQGD.js → delete-2MRR4JX5.js} +1 -1
  25. package/dist/{down-WSUASL5E.js → down-674SX2IZ.js} +2 -2
  26. package/dist/{env-4PHIHTF4.js → env-2FPOZK37.js} +1 -1
  27. package/dist/{export-XD6PJBQP.js → export-IKFAPRAO.js} +1 -1
  28. package/dist/{file-X4L5TTOL.js → file-KT3UIQM3.js} +1 -1
  29. package/dist/{history-HTEKRNID.js → history-46WZN5CN.js} +1 -1
  30. package/dist/{import-EAXTHHXL.js → import-TH26J76F.js} +2 -2
  31. package/dist/{log-SRO5Q6AD.js → log-6SGSSR3D.js} +1 -1
  32. package/dist/{logs-HNTNNBDW.js → logs-HRBONI5I.js} +1 -1
  33. package/dist/{merge-B6SYTGI7.js → merge-KSFJKX6T.js} +1 -1
  34. package/dist/{message-delivery-FHV4NO2F.js → message-delivery-XMGV3FUM.js} +6 -6
  35. package/dist/{mind-BTXR5B3C.js → mind-YVWAHL2A.js} +17 -17
  36. package/dist/{mind-activity-tracker-PGC3DBJ7.js → mind-activity-tracker-NMDDEV3K.js} +3 -3
  37. package/dist/{mind-manager-KMY4GA2J.js → mind-manager-4NDNAYAB.js} +2 -2
  38. package/dist/{mind-sleep-FWRBIFBS.js → mind-sleep-GHPTSAYN.js} +1 -1
  39. package/dist/{mind-wake-LJK2YU5X.js → mind-wake-BJDJFMDF.js} +1 -1
  40. package/dist/{package-CUBJ4PKS.js → package-3HF5MXU2.js} +2 -1
  41. package/dist/{pages-YSTRWJR4.js → pages-Y6DRWUOJ.js} +1 -1
  42. package/dist/{publish-BZNHKUUK.js → publish-EEKTZBHW.js} +1 -1
  43. package/dist/{pull-GRQAXM2E.js → pull-D32SPFVU.js} +1 -1
  44. package/dist/{restart-CIDAKGG2.js → restart-5BMNV7KU.js} +1 -1
  45. package/dist/{schedule-NLR3LZLY.js → schedule-YEFDLVMJ.js} +1 -1
  46. package/dist/{seed-3H2MRREW.js → seed-6FEKB3YC.js} +1 -1
  47. package/dist/{send-RP2TA7SG.js → send-IISDYFCL.js} +1 -1
  48. package/dist/{service-7BFXDI6J.js → service-FASYWLTC.js} +3 -3
  49. package/dist/{setup-SSIIXQMI.js → setup-BMLM2UTK.js} +1 -1
  50. package/dist/{shared-2OGT3NSL.js → shared-LWMNTTZN.js} +4 -4
  51. package/dist/{skill-Q2Y6PQ3L.js → skill-T3EMR6IR.js} +11 -3
  52. package/dist/skills/imagegen/SKILL.md +37 -0
  53. package/dist/skills/imagegen/references/INSTALL.md +13 -0
  54. package/dist/skills/imagegen/scripts/imagegen.ts +136 -0
  55. package/dist/skills/resonance/SKILL.md +73 -0
  56. package/dist/skills/resonance/assets/default-config.json +21 -0
  57. package/dist/skills/resonance/references/INSTALL.md +23 -0
  58. package/dist/skills/resonance/scripts/resonance.ts +1250 -0
  59. package/dist/skills/volute-mind/SKILL.md +94 -4
  60. package/dist/{sleep-manager-2TMQ65E4.js → sleep-manager-RKTFZPD3.js} +6 -6
  61. package/dist/{sprout-UKCYBGHK.js → sprout-QJVGJDSH.js} +3 -3
  62. package/dist/{start-JR6CUUWF.js → start-C7XITZ5O.js} +1 -1
  63. package/dist/{status-5XDGYHKP.js → status-LYS4NUOZ.js} +1 -1
  64. package/dist/{status-H2MKDN6L.js → status-SIRPLEZC.js} +4 -3
  65. package/dist/{stop-VKPGK25U.js → stop-CVKBSLXY.js} +1 -1
  66. package/dist/tailscale-AJ4VL5XK.js +49 -0
  67. package/dist/{up-Z5JRG2M2.js → up-CJ26KQLN.js} +2 -2
  68. package/dist/{update-ELC6MEUT.js → update-7XCZMYBT.js} +7 -7
  69. package/dist/{upgrade-GXW2EQY3.js → upgrade-7RUIXGOO.js} +1 -1
  70. package/dist/{variant-A4I7PHXS.js → variant-UGREB4G5.js} +4 -4
  71. package/dist/{version-notify-LKABEJSA.js → version-notify-AZQMC32A.js} +6 -6
  72. package/dist/web-assets/assets/index-CGPSVu19.js +69 -0
  73. package/dist/web-assets/assets/index-V_rNDsM8.css +1 -0
  74. package/dist/web-assets/favicon.png +0 -0
  75. package/dist/web-assets/index.html +5 -4
  76. package/dist/web-assets/logo.png +0 -0
  77. package/drizzle/0013_user_profiles.sql +3 -0
  78. package/drizzle/0014_conversation_reads.sql +7 -0
  79. package/drizzle/meta/0013_snapshot.json +7 -0
  80. package/drizzle/meta/_journal.json +14 -0
  81. package/package.json +2 -1
  82. package/templates/_base/home/public/.gitkeep +0 -0
  83. package/templates/_base/src/lib/format-prefix.ts +18 -2
  84. package/templates/_base/src/lib/routing.ts +2 -1
  85. package/templates/_base/src/lib/types.ts +8 -0
  86. package/dist/chunk-G5KRTU2F.js +0 -76
  87. package/dist/web-assets/assets/index-CZ26vsyY.js +0 -69
  88. package/dist/web-assets/assets/index-DyyAvJwW.css +0 -1
@@ -0,0 +1,1250 @@
1
+ #!/usr/bin/env tsx
2
+
3
+ /**
4
+ * resonance.ts — semantic memory engine
5
+ *
6
+ * Stores text as vector embeddings, finds cross-memory connections via cosine similarity.
7
+ * Memories have strength/decay dynamics — recalled memories strengthen, unused ones drift.
8
+ *
9
+ * Usage:
10
+ * resonance ingest <file> # ingest a file (splits into chunks)
11
+ * resonance ingest-all # ingest all configured memory files
12
+ * resonance search "query" [--limit N] # find resonant memories
13
+ * resonance recall <id> [id2 ...] # boost specific memories (explicit recall)
14
+ * resonance random [--limit N] # pull random memories (for dreams)
15
+ * resonance report [--against <file>] # find cross-memory connections
16
+ * resonance stats # db statistics
17
+ * resonance decay # run decay pass
18
+ */
19
+
20
+ import { execFile } from "node:child_process";
21
+ import { promisify } from "node:util";
22
+
23
+ const execFileAsync = promisify(execFile);
24
+
25
+ import { createHash } from "node:crypto";
26
+ import { copyFileSync, existsSync, mkdirSync, readdirSync, readFileSync } from "node:fs";
27
+ import { createRequire } from "node:module";
28
+ import { basename, join, resolve } from "node:path";
29
+
30
+ const libsqlRequire = createRequire(import.meta.url);
31
+
32
+ // --- types ---
33
+
34
+ interface ResonanceConfig {
35
+ embedding: {
36
+ provider: string;
37
+ url: string;
38
+ model: string;
39
+ dimensions: number;
40
+ apiKeyEnvVar: string;
41
+ };
42
+ ingestion: {
43
+ dirs: string[];
44
+ files: string[];
45
+ chunkSize: number;
46
+ chunkOverlap: number;
47
+ ignorePatterns: string[];
48
+ };
49
+ dynamics: {
50
+ decayRate: number;
51
+ minStrength: number;
52
+ resonanceBoost: number;
53
+ };
54
+ }
55
+
56
+ interface Chunk {
57
+ content: string;
58
+ sourceFile: string;
59
+ sourceType: string;
60
+ chunkIndex: number;
61
+ contentHash: string;
62
+ metadata: Record<string, string>;
63
+ }
64
+
65
+ type SearchMode = "hybrid" | "fts" | "vector";
66
+
67
+ interface SearchResult {
68
+ id: number;
69
+ content: string;
70
+ sourceFile: string;
71
+ sourceType: string;
72
+ distance: number;
73
+ similarity: number;
74
+ weightedSimilarity: number;
75
+ strength: number;
76
+ recallCount: number;
77
+ createdAt: string;
78
+ metadata: Record<string, string>;
79
+ matchType: "vector" | "fts" | "both";
80
+ }
81
+
82
+ // --- config ---
83
+
84
+ export function loadConfig(): ResonanceConfig {
85
+ const defaultConfigPath = resolve(
86
+ new URL(".", import.meta.url).pathname,
87
+ "..",
88
+ "assets",
89
+ "default-config.json",
90
+ );
91
+ const defaultConfig: ResonanceConfig = JSON.parse(readFileSync(defaultConfigPath, "utf-8"));
92
+
93
+ const mindDir = process.env.VOLUTE_MIND_DIR;
94
+ if (!mindDir) return defaultConfig;
95
+
96
+ const overridePath = join(mindDir, "home", ".config", "resonance.json");
97
+ if (!existsSync(overridePath)) return defaultConfig;
98
+
99
+ const override = JSON.parse(readFileSync(overridePath, "utf-8"));
100
+ if (typeof override !== "object" || override === null) {
101
+ throw new Error("Invalid resonance.json: must be a JSON object");
102
+ }
103
+ for (const key of ["embedding", "ingestion", "dynamics"] as const) {
104
+ if (key in override && (typeof override[key] !== "object" || override[key] === null)) {
105
+ throw new Error(`Invalid resonance.json: "${key}" must be an object`);
106
+ }
107
+ }
108
+ return {
109
+ embedding: { ...defaultConfig.embedding, ...override.embedding },
110
+ ingestion: { ...defaultConfig.ingestion, ...override.ingestion },
111
+ dynamics: { ...defaultConfig.dynamics, ...override.dynamics },
112
+ };
113
+ }
114
+
115
+ function getDbPath(): string {
116
+ const mindDir = process.env.VOLUTE_MIND_DIR;
117
+ if (!mindDir) throw new Error("VOLUTE_MIND_DIR not set");
118
+ const dir = join(mindDir, ".mind");
119
+ mkdirSync(dir, { recursive: true });
120
+ return join(dir, "resonance.db");
121
+ }
122
+
123
+ function getHomePath(): string {
124
+ const mindDir = process.env.VOLUTE_MIND_DIR;
125
+ if (!mindDir) throw new Error("VOLUTE_MIND_DIR not set");
126
+ return join(mindDir, "home");
127
+ }
128
+
129
+ function getApiKey(config: ResonanceConfig): string | null {
130
+ return process.env[config.embedding.apiKeyEnvVar] || null;
131
+ }
132
+
133
+ function getSkillDir(): string {
134
+ return resolve(new URL(".", import.meta.url).pathname, "..");
135
+ }
136
+
137
+ function isInstalled(): boolean {
138
+ const mindDir = process.env.VOLUTE_MIND_DIR;
139
+ if (!mindDir) return false;
140
+ return existsSync(join(mindDir, ".mind", "resonance.db"));
141
+ }
142
+
143
+ function requireInstalled(): void {
144
+ if (!isInstalled()) {
145
+ console.error(
146
+ "resonance is not set up yet. run: npx tsx .claude/skills/resonance/scripts/resonance.ts install",
147
+ );
148
+ process.exit(1);
149
+ }
150
+ }
151
+
152
+ async function runInstall(config: ResonanceConfig): Promise<void> {
153
+ const mindDir = process.env.VOLUTE_MIND_DIR;
154
+ if (!mindDir) {
155
+ console.error("VOLUTE_MIND_DIR not set — are you running inside a mind?");
156
+ process.exit(1);
157
+ }
158
+
159
+ // 1. Check API key (optional — enables vector search)
160
+ const apiKeyVar = config.embedding.apiKeyEnvVar;
161
+ const apiKey = getApiKey(config);
162
+ if (apiKey) {
163
+ console.log("verifying embedding API key...");
164
+ try {
165
+ await embed(["test"], apiKey, config);
166
+ console.log("API key verified.");
167
+ } catch (e) {
168
+ console.error(`embedding API test failed: ${e instanceof Error ? e.message : e}`);
169
+ console.error("continuing without embeddings — full-text search will still work.");
170
+ }
171
+ } else {
172
+ console.log(`no ${apiKeyVar} set — installing with full-text search only.`);
173
+ console.log(`to enable vector search later: volute env set ${apiKeyVar} <your-key>`);
174
+ }
175
+
176
+ // 2. Copy default config if none exists
177
+ const configPath = join(mindDir, "home", ".config", "resonance.json");
178
+ if (!existsSync(configPath)) {
179
+ const defaultConfig = join(getSkillDir(), "assets", "default-config.json");
180
+ mkdirSync(join(mindDir, "home", ".config"), { recursive: true });
181
+ copyFileSync(defaultConfig, configPath);
182
+ console.log("created .config/resonance.json (edit to customize).");
183
+ } else {
184
+ console.log(".config/resonance.json already exists, keeping it.");
185
+ }
186
+
187
+ // 3. Initialize DB
188
+ const db = initDb(getDbPath(), config.embedding.dimensions);
189
+ db.close();
190
+ console.log("initialized resonance database.");
191
+
192
+ // 4. Set up nightly schedule
193
+ const scriptPath = ".claude/skills/resonance/scripts/resonance.ts";
194
+ const script = `npx tsx ${scriptPath} ingest-all && npx tsx ${scriptPath} decay`;
195
+ try {
196
+ await execFileAsync("volute", [
197
+ "schedule",
198
+ "add",
199
+ "--cron",
200
+ "0 22 * * *",
201
+ "--script",
202
+ script,
203
+ "--id",
204
+ "resonance-nightly",
205
+ ]);
206
+ console.log('added nightly schedule "resonance-nightly" (10pm: ingest-all + decay).');
207
+ } catch (e) {
208
+ const msg = e instanceof Error ? e.message : String(e);
209
+ console.log(`note: could not add schedule automatically: ${msg}`);
210
+ console.log("you can add it manually:");
211
+ console.log(
212
+ ` volute schedule add --cron "0 22 * * *" --script "${script}" --id resonance-nightly`,
213
+ );
214
+ }
215
+
216
+ // 5. Run initial ingestion
217
+ console.log("\nrunning initial ingestion...");
218
+ const db2 = initDb(getDbPath(), config.embedding.dimensions);
219
+ try {
220
+ const results = await ingestAll(db2, apiKey, config);
221
+ let total = 0;
222
+ for (const [path, count] of Object.entries(results)) {
223
+ if (count > 0) console.log(` ${basename(path)}: ${count} chunks`);
224
+ total += count;
225
+ }
226
+ console.log(`ingested ${total} chunks${apiKey ? "" : " (FTS only, no embeddings)"}.`);
227
+ } finally {
228
+ db2.close();
229
+ }
230
+
231
+ console.log("\nresonance is ready.");
232
+ }
233
+
234
+ // --- database ---
235
+
236
+ type Database = import("libsql").Database;
237
+
238
+ export function initDb(dbPath: string, dimensions = 1536): Database {
239
+ const Database = libsqlRequire("libsql");
240
+ const db = new Database(dbPath) as Database;
241
+ db.exec("PRAGMA journal_mode=WAL");
242
+ db.exec(`
243
+ CREATE TABLE IF NOT EXISTS memories (
244
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
245
+ content TEXT NOT NULL,
246
+ source_file TEXT,
247
+ source_type TEXT,
248
+ chunk_index INTEGER DEFAULT 0,
249
+ content_hash TEXT UNIQUE,
250
+ embedding F32_BLOB(${dimensions}),
251
+ strength REAL DEFAULT 1.0,
252
+ recall_count INTEGER DEFAULT 0,
253
+ last_recalled TEXT,
254
+ last_decayed TEXT,
255
+ created_at TEXT DEFAULT (datetime('now')),
256
+ metadata TEXT
257
+ )
258
+ `);
259
+ db.exec("CREATE INDEX IF NOT EXISTS idx_memories_source ON memories(source_file)");
260
+ db.exec("CREATE INDEX IF NOT EXISTS idx_memories_strength ON memories(strength DESC)");
261
+ db.exec("CREATE INDEX IF NOT EXISTS idx_memories_hash ON memories(content_hash)");
262
+
263
+ // FTS5 full-text search index
264
+ db.exec(`
265
+ CREATE VIRTUAL TABLE IF NOT EXISTS memories_fts USING fts5(
266
+ content, source_file,
267
+ content='memories', content_rowid='id'
268
+ )
269
+ `);
270
+ // Triggers to keep FTS in sync
271
+ db.exec(`
272
+ CREATE TRIGGER IF NOT EXISTS memories_ai AFTER INSERT ON memories BEGIN
273
+ INSERT INTO memories_fts(rowid, content, source_file)
274
+ VALUES (new.id, new.content, new.source_file);
275
+ END
276
+ `);
277
+ db.exec(`
278
+ CREATE TRIGGER IF NOT EXISTS memories_ad AFTER DELETE ON memories BEGIN
279
+ INSERT INTO memories_fts(memories_fts, rowid, content, source_file)
280
+ VALUES ('delete', old.id, old.content, old.source_file);
281
+ END
282
+ `);
283
+ db.exec(`
284
+ CREATE TRIGGER IF NOT EXISTS memories_au AFTER UPDATE ON memories BEGIN
285
+ INSERT INTO memories_fts(memories_fts, rowid, content, source_file)
286
+ VALUES ('delete', old.id, old.content, old.source_file);
287
+ INSERT INTO memories_fts(rowid, content, source_file)
288
+ VALUES (new.id, new.content, new.source_file);
289
+ END
290
+ `);
291
+
292
+ return db;
293
+ }
294
+
295
+ // --- embedding ---
296
+
297
+ async function embed(
298
+ texts: string[],
299
+ apiKey: string,
300
+ config: ResonanceConfig,
301
+ ): Promise<number[][]> {
302
+ const resp = await fetch(config.embedding.url, {
303
+ method: "POST",
304
+ headers: {
305
+ Authorization: `Bearer ${apiKey}`,
306
+ "Content-Type": "application/json",
307
+ },
308
+ body: JSON.stringify({
309
+ model: config.embedding.model,
310
+ input: texts,
311
+ }),
312
+ });
313
+
314
+ if (!resp.ok) {
315
+ const body = await resp.text();
316
+ throw new Error(`Embedding API error ${resp.status}: ${body}`);
317
+ }
318
+
319
+ const data = (await resp.json()) as {
320
+ data: Array<{ index: number; embedding: number[] }>;
321
+ };
322
+ return data.data.sort((a, b) => a.index - b.index).map((e) => e.embedding);
323
+ }
324
+
325
+ function vecToJson(vec: number[]): string {
326
+ return JSON.stringify(vec);
327
+ }
328
+
329
+ // --- chunking ---
330
+
331
+ export function detectSourceType(sourceFile: string): string {
332
+ const sf = sourceFile.toLowerCase();
333
+ if (sf.includes("journal")) return "journal";
334
+ if (sf.includes("reading")) return "reading";
335
+ if (sf.includes("topic")) return "topic";
336
+ if (sf.endsWith("memory.md")) return "memory";
337
+ return "other";
338
+ }
339
+
340
+ export function isMostlyIgnored(text: string, patterns: RegExp[]): boolean {
341
+ if (patterns.length === 0) return false;
342
+ const lines = text.trim().split("\n");
343
+ if (lines.length === 0) return true;
344
+
345
+ let ignoredLines = 0;
346
+ let contentLines = 0;
347
+ for (const line of lines) {
348
+ const trimmed = line.trim();
349
+ if (!trimmed) continue;
350
+ if (patterns.some((p) => p.test(trimmed))) {
351
+ ignoredLines++;
352
+ } else {
353
+ contentLines++;
354
+ }
355
+ }
356
+ const total = ignoredLines + contentLines;
357
+ if (total === 0) return true;
358
+ return ignoredLines / total > 0.5;
359
+ }
360
+
361
+ export function stripIgnoredLines(text: string, patterns: RegExp[]): string {
362
+ if (patterns.length === 0) return text;
363
+ return text
364
+ .split("\n")
365
+ .filter((line) => !patterns.some((p) => p.test(line.trim())))
366
+ .join("\n")
367
+ .trim();
368
+ }
369
+
370
+ export function chunkByWords(
371
+ text: string,
372
+ sourceFile: string,
373
+ sourceType: string,
374
+ chunkSize: number,
375
+ chunkOverlap: number,
376
+ ): Chunk[] {
377
+ const words = text.split(/\s+/).filter(Boolean);
378
+ if (words.length <= chunkSize) {
379
+ return [
380
+ {
381
+ content: text,
382
+ sourceFile,
383
+ sourceType,
384
+ chunkIndex: 0,
385
+ contentHash: "",
386
+ metadata: {},
387
+ },
388
+ ];
389
+ }
390
+
391
+ const chunks: Chunk[] = [];
392
+ let start = 0;
393
+ while (start < words.length) {
394
+ const end = Math.min(start + chunkSize, words.length);
395
+ chunks.push({
396
+ content: words.slice(start, end).join(" "),
397
+ sourceFile,
398
+ sourceType,
399
+ chunkIndex: 0,
400
+ contentHash: "",
401
+ metadata: {},
402
+ });
403
+ if (end >= words.length) break;
404
+ start += chunkSize - chunkOverlap;
405
+ }
406
+ return chunks;
407
+ }
408
+
409
+ export function chunkBySections(
410
+ text: string,
411
+ sourceFile: string,
412
+ sourceType: string,
413
+ config: ResonanceConfig,
414
+ ignoreRe: RegExp[],
415
+ ): Chunk[] {
416
+ const sections = text.split(/^(#{1,6}\s+.+)$/m);
417
+ const chunks: Chunk[] = [];
418
+ let currentHeader = "";
419
+
420
+ for (let i = 0; i < sections.length; i++) {
421
+ let part = sections[i].trim();
422
+ if (/^#{1,6}\s+/.test(part)) {
423
+ currentHeader = part;
424
+ continue;
425
+ }
426
+ if (!part) continue;
427
+
428
+ // Strip ignored lines if patterns configured
429
+ if (ignoreRe.length > 0) {
430
+ part = stripIgnoredLines(part, ignoreRe);
431
+ if (!part || part.split(/\s+/).length < 10) continue;
432
+ }
433
+
434
+ const sectionText = currentHeader ? `${currentHeader}\n\n${part}` : part;
435
+ const words = sectionText.split(/\s+/).filter(Boolean);
436
+
437
+ if (words.length < 15) continue;
438
+
439
+ if (words.length <= config.ingestion.chunkSize) {
440
+ chunks.push({
441
+ content: sectionText,
442
+ sourceFile,
443
+ sourceType,
444
+ chunkIndex: 0,
445
+ contentHash: "",
446
+ metadata: { section: currentHeader },
447
+ });
448
+ } else {
449
+ const subChunks = chunkByWords(
450
+ sectionText,
451
+ sourceFile,
452
+ sourceType,
453
+ config.ingestion.chunkSize,
454
+ config.ingestion.chunkOverlap,
455
+ );
456
+ for (const sc of subChunks) {
457
+ sc.metadata = { section: currentHeader };
458
+ }
459
+ chunks.push(...subChunks);
460
+ }
461
+ }
462
+ return chunks;
463
+ }
464
+
465
+ export function chunkText(text: string, sourceFile: string, config: ResonanceConfig): Chunk[] {
466
+ text = text.trim();
467
+ if (!text) return [];
468
+
469
+ const sourceType = detectSourceType(sourceFile);
470
+ const ignoreRe = config.ingestion.ignorePatterns.map((p) => new RegExp(p, "m"));
471
+
472
+ // Split on markdown headers for all markdown files
473
+ if (/\n#{1,6}\s+/.test(text)) {
474
+ return chunkBySections(text, sourceFile, sourceType, config, ignoreRe);
475
+ }
476
+
477
+ // Fallback: sliding window by words
478
+ const words = text.split(/\s+/).filter(Boolean);
479
+ if (words.length < 15) return [];
480
+ return chunkByWords(
481
+ text,
482
+ sourceFile,
483
+ sourceType,
484
+ config.ingestion.chunkSize,
485
+ config.ingestion.chunkOverlap,
486
+ );
487
+ }
488
+
489
+ function hashContent(content: string): string {
490
+ return createHash("sha256").update(content).digest("hex").slice(0, 16);
491
+ }
492
+
493
+ // --- operations ---
494
+
495
+ async function ingestFile(
496
+ db: Database,
497
+ filePath: string,
498
+ apiKey: string | null,
499
+ config: ResonanceConfig,
500
+ ): Promise<number> {
501
+ const text = readFileSync(filePath, "utf-8");
502
+ const chunks = chunkText(text, filePath, config);
503
+ if (chunks.length === 0) return 0;
504
+
505
+ // Assign indices and hashes
506
+ for (let i = 0; i < chunks.length; i++) {
507
+ chunks[i].chunkIndex = i;
508
+ chunks[i].contentHash = hashContent(chunks[i].content);
509
+ }
510
+
511
+ // Check which chunks are new
512
+ const checkStmt = db.prepare("SELECT id FROM memories WHERE content_hash = ?");
513
+ const currentHashes = new Set(chunks.map((c) => c.contentHash));
514
+ const newChunks = chunks.filter((c) => !checkStmt.get(c.contentHash));
515
+
516
+ // Remove stale chunks from previous versions of this file
517
+ const staleRows = db
518
+ .prepare("SELECT id, content_hash FROM memories WHERE source_file = ?")
519
+ .all(filePath) as Array<{ id: number; content_hash: string }>;
520
+ const staleIds = staleRows.filter((r) => !currentHashes.has(r.content_hash)).map((r) => r.id);
521
+ if (staleIds.length > 0) {
522
+ db.prepare(`DELETE FROM memories WHERE id IN (${staleIds.map(() => "?").join(",")})`).run(
523
+ ...staleIds,
524
+ );
525
+ }
526
+
527
+ if (newChunks.length === 0) return -staleIds.length; // negative = only removals
528
+
529
+ if (apiKey) {
530
+ // Full ingestion with embeddings
531
+ const insertStmt = db.prepare(`
532
+ INSERT OR IGNORE INTO memories
533
+ (content, source_file, source_type, chunk_index, content_hash, embedding, metadata)
534
+ VALUES (?, ?, ?, ?, ?, vector(?), ?)
535
+ `);
536
+
537
+ const batchSize = 20;
538
+ for (let batchStart = 0; batchStart < newChunks.length; batchStart += batchSize) {
539
+ const batch = newChunks.slice(batchStart, batchStart + batchSize);
540
+ const texts = batch.map((c) => c.content.slice(0, 8000));
541
+ const embeddings = await embed(texts, apiKey, config);
542
+
543
+ db.exec("BEGIN");
544
+ try {
545
+ for (let j = 0; j < batch.length; j++) {
546
+ const chunk = batch[j];
547
+ insertStmt.run(
548
+ chunk.content,
549
+ chunk.sourceFile,
550
+ chunk.sourceType,
551
+ chunk.chunkIndex,
552
+ chunk.contentHash,
553
+ vecToJson(embeddings[j]),
554
+ JSON.stringify(chunk.metadata),
555
+ );
556
+ }
557
+ db.exec("COMMIT");
558
+ } catch (e) {
559
+ db.exec("ROLLBACK");
560
+ throw e;
561
+ }
562
+
563
+ if (batchStart + batchSize < newChunks.length) {
564
+ await new Promise((r) => setTimeout(r, 500)); // rate limiting
565
+ }
566
+ }
567
+ } else {
568
+ // FTS-only ingestion (no embeddings)
569
+ const insertStmt = db.prepare(`
570
+ INSERT OR IGNORE INTO memories
571
+ (content, source_file, source_type, chunk_index, content_hash, metadata)
572
+ VALUES (?, ?, ?, ?, ?, ?)
573
+ `);
574
+
575
+ db.exec("BEGIN");
576
+ try {
577
+ for (const chunk of newChunks) {
578
+ insertStmt.run(
579
+ chunk.content,
580
+ chunk.sourceFile,
581
+ chunk.sourceType,
582
+ chunk.chunkIndex,
583
+ chunk.contentHash,
584
+ JSON.stringify(chunk.metadata),
585
+ );
586
+ }
587
+ db.exec("COMMIT");
588
+ } catch (e) {
589
+ db.exec("ROLLBACK");
590
+ throw e;
591
+ }
592
+ }
593
+
594
+ return newChunks.length;
595
+ }
596
+
597
+ async function ingestAll(
598
+ db: Database,
599
+ apiKey: string | null,
600
+ config: ResonanceConfig,
601
+ ): Promise<Record<string, number>> {
602
+ const home = getHomePath();
603
+ const results: Record<string, number> = {};
604
+
605
+ for (const file of config.ingestion.files) {
606
+ const filePath = join(home, file);
607
+ if (existsSync(filePath)) {
608
+ results[file] = await ingestFile(db, filePath, apiKey, config);
609
+ } else {
610
+ console.error(` warning: configured file not found: ${file}`);
611
+ }
612
+ }
613
+
614
+ for (const dir of config.ingestion.dirs) {
615
+ const dirPath = join(home, dir);
616
+ if (!existsSync(dirPath)) {
617
+ console.error(` warning: configured directory not found: ${dir}`);
618
+ continue;
619
+ }
620
+ const files = readdirSync(dirPath)
621
+ .filter((f) => f.endsWith(".md"))
622
+ .sort();
623
+ for (const file of files) {
624
+ const filePath = join(dirPath, file);
625
+ const key = join(dir, file);
626
+ results[key] = await ingestFile(db, filePath, apiKey, config);
627
+ }
628
+ }
629
+
630
+ return results;
631
+ }
632
+
633
+ export function searchFts(
634
+ db: Database,
635
+ query: string,
636
+ limit = 5,
637
+ minStrength = 0.0,
638
+ ): SearchResult[] {
639
+ // Sanitize and build FTS5 query — strip special syntax, quote each term
640
+ const ftsQuery = query
641
+ .split(/\s+/)
642
+ .filter(Boolean)
643
+ .map((w) => w.replace(/["""()*^{}:]/g, "").replace(/\b(AND|OR|NOT|NEAR)\b/gi, ""))
644
+ .filter((w) => w.length > 0)
645
+ .map((w) => `"${w}"`)
646
+ .join(" OR ");
647
+
648
+ if (!ftsQuery) return [];
649
+
650
+ const rows = db
651
+ .prepare(
652
+ `SELECT m.id, m.content, m.source_file, m.source_type,
653
+ m.strength, m.recall_count, m.created_at, m.metadata,
654
+ rank
655
+ FROM memories_fts f
656
+ JOIN memories m ON m.id = f.rowid
657
+ WHERE memories_fts MATCH ? AND m.strength >= ?
658
+ ORDER BY rank
659
+ LIMIT ?`,
660
+ )
661
+ .all(ftsQuery, minStrength, limit) as Array<{
662
+ id: number;
663
+ content: string;
664
+ source_file: string;
665
+ source_type: string;
666
+ strength: number;
667
+ recall_count: number;
668
+ created_at: string;
669
+ metadata: string;
670
+ rank: number;
671
+ }>;
672
+
673
+ // BM25 rank is negative (closer to 0 = worse match). Convert to a positive score.
674
+ // Use a sigmoid-like mapping so scores stay bounded but don't inflate weak matches.
675
+ return rows.map((row) => {
676
+ const rawScore = Math.abs(row.rank);
677
+ // Map raw BM25 score through a saturating curve: score / (score + 1)
678
+ // This gives 0.5 at rawScore=1, ~0.91 at rawScore=10, approaching 1.0 asymptotically
679
+ const similarity = rawScore / (rawScore + 1);
680
+ return {
681
+ id: row.id,
682
+ content: row.content,
683
+ sourceFile: row.source_file,
684
+ sourceType: row.source_type,
685
+ distance: 1 - similarity,
686
+ similarity,
687
+ weightedSimilarity: similarity * (0.7 + 0.3 * row.strength),
688
+ strength: row.strength,
689
+ recallCount: row.recall_count,
690
+ createdAt: row.created_at,
691
+ metadata: row.metadata ? JSON.parse(row.metadata) : {},
692
+ matchType: "fts" as const,
693
+ };
694
+ });
695
+ }
696
+
697
+ async function searchVector(
698
+ db: Database,
699
+ query: string,
700
+ apiKey: string,
701
+ config: ResonanceConfig,
702
+ limit = 5,
703
+ minStrength = 0.0,
704
+ ): Promise<SearchResult[]> {
705
+ const [queryEmb] = await embed([query], apiKey, config);
706
+ const queryVec = vecToJson(queryEmb);
707
+
708
+ const rows = db
709
+ .prepare(
710
+ `SELECT id, content, source_file, source_type,
711
+ vector_distance_cos(embedding, vector(?)) AS distance,
712
+ strength, recall_count, created_at, metadata
713
+ FROM memories
714
+ WHERE embedding IS NOT NULL AND strength >= ?
715
+ ORDER BY distance ASC
716
+ LIMIT ?`,
717
+ )
718
+ .all(queryVec, minStrength, limit * 3) as Array<{
719
+ id: number;
720
+ content: string;
721
+ source_file: string;
722
+ source_type: string;
723
+ distance: number;
724
+ strength: number;
725
+ recall_count: number;
726
+ created_at: string;
727
+ metadata: string;
728
+ }>;
729
+
730
+ // Weight by strength — stronger memories surface more easily
731
+ return rows.map((row) => {
732
+ const similarity = 1 - row.distance;
733
+ const weightedSimilarity = similarity * (0.7 + 0.3 * row.strength);
734
+ return {
735
+ id: row.id,
736
+ content: row.content,
737
+ sourceFile: row.source_file,
738
+ sourceType: row.source_type,
739
+ distance: row.distance,
740
+ similarity,
741
+ weightedSimilarity,
742
+ strength: row.strength,
743
+ recallCount: row.recall_count,
744
+ createdAt: row.created_at,
745
+ metadata: row.metadata ? JSON.parse(row.metadata) : {},
746
+ matchType: "vector" as const,
747
+ };
748
+ });
749
+ }
750
+
751
+ async function search(
752
+ db: Database,
753
+ query: string,
754
+ apiKey: string | null,
755
+ config: ResonanceConfig,
756
+ limit = 5,
757
+ minStrength = 0.0,
758
+ mode: SearchMode = "hybrid",
759
+ ): Promise<SearchResult[]> {
760
+ let results: SearchResult[];
761
+
762
+ if (mode === "fts") {
763
+ results = searchFts(db, query, limit, minStrength);
764
+ } else if (mode === "vector") {
765
+ if (!apiKey) throw new Error("vector search requires an API key");
766
+ const vectorResults = await searchVector(db, query, apiKey, config, limit, minStrength);
767
+ vectorResults.sort((a, b) => b.weightedSimilarity - a.weightedSimilarity);
768
+ results = vectorResults.slice(0, limit);
769
+ } else {
770
+ // Hybrid: combine vector + FTS results
771
+ let vectorResults: SearchResult[] = [];
772
+ if (apiKey) {
773
+ try {
774
+ vectorResults = await searchVector(db, query, apiKey, config, limit, minStrength);
775
+ } catch {
776
+ // Fall back to FTS-only if vector search fails
777
+ }
778
+ }
779
+ const ftsResults = searchFts(db, query, limit, minStrength);
780
+
781
+ if (vectorResults.length === 0) {
782
+ results = ftsResults.slice(0, limit);
783
+ } else if (ftsResults.length === 0) {
784
+ vectorResults.sort((a, b) => b.weightedSimilarity - a.weightedSimilarity);
785
+ results = vectorResults.slice(0, limit);
786
+ } else {
787
+ // Merge: normalize scores across both sets, blend with weighting
788
+ const vectorWeight = 0.7;
789
+ const ftsWeight = 0.3;
790
+
791
+ // Normalize vector similarities to 0-1 within this result set
792
+ const maxVecSim = Math.max(...vectorResults.map((r) => r.weightedSimilarity));
793
+ const maxFtsSim = Math.max(...ftsResults.map((r) => r.weightedSimilarity));
794
+
795
+ const merged = new Map<number, SearchResult>();
796
+
797
+ for (const r of vectorResults) {
798
+ const normVec = maxVecSim > 0 ? r.weightedSimilarity / maxVecSim : 0;
799
+ merged.set(r.id, {
800
+ ...r,
801
+ weightedSimilarity: normVec * vectorWeight,
802
+ matchType: "vector",
803
+ });
804
+ }
805
+
806
+ for (const r of ftsResults) {
807
+ const normFts = maxFtsSim > 0 ? r.weightedSimilarity / maxFtsSim : 0;
808
+ const existing = merged.get(r.id);
809
+ if (existing) {
810
+ // Found in both — combine scores
811
+ existing.weightedSimilarity += normFts * ftsWeight;
812
+ existing.matchType = "both";
813
+ } else {
814
+ merged.set(r.id, {
815
+ ...r,
816
+ weightedSimilarity: normFts * ftsWeight,
817
+ matchType: "fts",
818
+ });
819
+ }
820
+ }
821
+
822
+ const combined = [...merged.values()];
823
+ combined.sort((a, b) => b.weightedSimilarity - a.weightedSimilarity);
824
+ results = combined.slice(0, limit);
825
+ }
826
+ }
827
+
828
+ return results;
829
+ }
830
+
831
+ function recallMemories(db: Database, ids: number[], config: ResonanceConfig): void {
832
+ if (ids.length === 0) return;
833
+ const now = new Date().toISOString().replace("T", " ").slice(0, 19);
834
+ const updateStmt = db.prepare(
835
+ `UPDATE memories
836
+ SET recall_count = recall_count + 1,
837
+ last_recalled = ?,
838
+ strength = MIN(1.0, strength + ?)
839
+ WHERE id = ?`,
840
+ );
841
+ for (const id of ids) {
842
+ updateStmt.run(now, config.dynamics.resonanceBoost, id);
843
+ }
844
+ }
845
+
846
+ function runDecay(db: Database, config: ResonanceConfig): { decayed: number; total: number } {
847
+ const rows = db
848
+ .prepare("SELECT id, strength, last_recalled, last_decayed, created_at FROM memories")
849
+ .all() as Array<{
850
+ id: number;
851
+ strength: number;
852
+ last_recalled: string | null;
853
+ last_decayed: string | null;
854
+ created_at: string;
855
+ }>;
856
+
857
+ const now = new Date();
858
+ const nowIso = now.toISOString().slice(0, 19).replace("T", " ");
859
+ const updateStmt = db.prepare("UPDATE memories SET strength = ?, last_decayed = ? WHERE id = ?");
860
+ let decayed = 0;
861
+
862
+ for (const row of rows) {
863
+ // Use last_decayed if available, otherwise fall back to last_recalled or created_at
864
+ const since = row.last_decayed || row.last_recalled || row.created_at;
865
+ if (!since) continue;
866
+ const sinceMs = new Date(since).getTime();
867
+ if (Number.isNaN(sinceMs)) continue;
868
+ const daysSince = Math.floor((now.getTime() - sinceMs) / (1000 * 60 * 60 * 24));
869
+ if (daysSince <= 0) continue;
870
+
871
+ const newStrength = Math.max(
872
+ config.dynamics.minStrength,
873
+ row.strength - config.dynamics.decayRate * daysSince,
874
+ );
875
+ if (newStrength !== row.strength) {
876
+ updateStmt.run(newStrength, nowIso, row.id);
877
+ decayed++;
878
+ }
879
+ }
880
+
881
+ return { decayed, total: rows.length };
882
+ }
883
+
884
+ interface RandomResult {
885
+ id: number;
886
+ content: string;
887
+ sourceFile: string;
888
+ sourceType: string;
889
+ strength: number;
890
+ recallCount: number;
891
+ createdAt: string;
892
+ }
893
+
894
+ function randomMemories(
895
+ db: Database,
896
+ limit = 5,
897
+ minStrength = 0.0,
898
+ maxStrength = 1.0,
899
+ ): RandomResult[] {
900
+ return (
901
+ db
902
+ .prepare(
903
+ `SELECT id, content, source_file, source_type, strength, recall_count, created_at
904
+ FROM memories
905
+ WHERE strength >= ? AND strength <= ?
906
+ ORDER BY RANDOM()
907
+ LIMIT ?`,
908
+ )
909
+ .all(minStrength, maxStrength, limit) as Array<{
910
+ id: number;
911
+ content: string;
912
+ source_file: string;
913
+ source_type: string;
914
+ strength: number;
915
+ recall_count: number;
916
+ created_at: string;
917
+ }>
918
+ ).map((row) => ({
919
+ id: row.id,
920
+ content: row.content,
921
+ sourceFile: row.source_file,
922
+ sourceType: row.source_type,
923
+ strength: row.strength,
924
+ recallCount: row.recall_count,
925
+ createdAt: row.created_at,
926
+ }));
927
+ }
928
+
929
+ async function resonanceReport(
930
+ db: Database,
931
+ apiKey: string | null,
932
+ config: ResonanceConfig,
933
+ againstFile?: string,
934
+ ): Promise<string> {
935
+ let text: string;
936
+ let source: string;
937
+
938
+ if (againstFile) {
939
+ const filePath = resolve(againstFile);
940
+ if (!existsSync(filePath)) return `file not found: ${filePath}`;
941
+ text = readFileSync(filePath, "utf-8");
942
+ source = filePath;
943
+ } else {
944
+ const home = getHomePath();
945
+ const today = new Date().toISOString().slice(0, 10);
946
+ const journalPath = join(home, "memory", "journal", `${today}.md`);
947
+ if (!existsSync(journalPath)) return "no journal entry for today.";
948
+ text = readFileSync(journalPath, "utf-8");
949
+ source = journalPath;
950
+ }
951
+
952
+ const chunks = chunkText(text, source, config);
953
+ if (chunks.length === 0) return "nothing to search against.";
954
+
955
+ interface Resonance {
956
+ queryExcerpt: string;
957
+ matchExcerpt: string;
958
+ source: string;
959
+ similarity: number;
960
+ strength: number;
961
+ recallCount: number;
962
+ }
963
+
964
+ const allResonances: Resonance[] = [];
965
+ const searchChunks = chunks.slice(0, 10);
966
+
967
+ for (const chunk of searchChunks) {
968
+ const results = await search(db, chunk.content.slice(0, 2000), apiKey, config, 3);
969
+ for (const r of results) {
970
+ if (r.sourceFile === source) continue;
971
+ if (r.similarity < 0.4) continue;
972
+ allResonances.push({
973
+ queryExcerpt: `${chunk.content.slice(0, 100)}...`,
974
+ matchExcerpt: `${r.content.slice(0, 200)}...`,
975
+ source: r.sourceFile,
976
+ similarity: r.similarity,
977
+ strength: r.strength,
978
+ recallCount: r.recallCount,
979
+ });
980
+ }
981
+ }
982
+
983
+ if (allResonances.length === 0) return "no strong resonances found today.";
984
+
985
+ // Deduplicate
986
+ const seen = new Set<string>();
987
+ const unique: Resonance[] = [];
988
+ for (const r of allResonances) {
989
+ const key = `${r.source}:${r.similarity.toFixed(2)}`;
990
+ if (!seen.has(key)) {
991
+ seen.add(key);
992
+ unique.push(r);
993
+ }
994
+ }
995
+
996
+ unique.sort((a, b) => b.similarity - a.similarity);
997
+
998
+ const lines = ["# resonance report", ""];
999
+ for (const r of unique.slice(0, 10)) {
1000
+ const sourceShort = basename(r.source);
1001
+ lines.push(
1002
+ `**${sourceShort}** (sim: ${r.similarity.toFixed(3)}, strength: ${r.strength.toFixed(2)}, recalls: ${r.recallCount})`,
1003
+ );
1004
+ lines.push(`> ${r.matchExcerpt.slice(0, 150)}`);
1005
+ lines.push(` <- triggered by: ${r.queryExcerpt.slice(0, 80)}`);
1006
+ lines.push("");
1007
+ }
1008
+
1009
+ return lines.join("\n");
1010
+ }
1011
+
1012
+ function getStats(db: Database): Record<string, unknown> {
1013
+ const total = (db.prepare("SELECT COUNT(*) as c FROM memories").get() as { c: number }).c;
1014
+ const byType = db
1015
+ .prepare("SELECT source_type, COUNT(*) as c FROM memories GROUP BY source_type")
1016
+ .all() as Array<{ source_type: string; c: number }>;
1017
+ const avgRow = db.prepare("SELECT AVG(strength) as a FROM memories").get() as {
1018
+ a: number | null;
1019
+ };
1020
+ const avgStrength = avgRow.a != null ? Math.round(avgRow.a * 1000) / 1000 : 0;
1021
+ const mostRecalled = db
1022
+ .prepare(
1023
+ "SELECT content, recall_count, source_file FROM memories ORDER BY recall_count DESC LIMIT 5",
1024
+ )
1025
+ .all() as Array<{ content: string; recall_count: number; source_file: string }>;
1026
+ const strongest = db
1027
+ .prepare("SELECT content, strength, source_file FROM memories ORDER BY strength DESC LIMIT 5")
1028
+ .all() as Array<{ content: string; strength: number; source_file: string }>;
1029
+ const weakest = db
1030
+ .prepare(
1031
+ "SELECT content, strength, source_file FROM memories WHERE strength < 0.5 ORDER BY strength ASC LIMIT 5",
1032
+ )
1033
+ .all() as Array<{ content: string; strength: number; source_file: string }>;
1034
+
1035
+ return {
1036
+ total_memories: total,
1037
+ by_type: Object.fromEntries(byType.map((r) => [r.source_type, r.c])),
1038
+ avg_strength: avgStrength,
1039
+ most_recalled: mostRecalled.map((r) => ({
1040
+ content: r.content.slice(0, 80),
1041
+ recalls: r.recall_count,
1042
+ source: basename(r.source_file),
1043
+ })),
1044
+ strongest: strongest.map((r) => ({
1045
+ content: r.content.slice(0, 80),
1046
+ strength: r.strength,
1047
+ source: basename(r.source_file),
1048
+ })),
1049
+ weakest: weakest.map((r) => ({
1050
+ content: r.content.slice(0, 80),
1051
+ strength: r.strength,
1052
+ source: basename(r.source_file),
1053
+ })),
1054
+ };
1055
+ }
1056
+
1057
+ // --- cli ---
1058
+
1059
+ async function main() {
1060
+ const args = process.argv.slice(2);
1061
+ const cmd = args[0];
1062
+
1063
+ if (!cmd) {
1064
+ console.log(
1065
+ "Usage: resonance <install|ingest|ingest-all|search|recall|random|report|stats|decay> [args]",
1066
+ );
1067
+ process.exit(0);
1068
+ }
1069
+
1070
+ const config = loadConfig();
1071
+
1072
+ if (cmd === "install") {
1073
+ await runInstall(config);
1074
+ return;
1075
+ }
1076
+
1077
+ requireInstalled();
1078
+ const db = initDb(getDbPath(), config.embedding.dimensions);
1079
+
1080
+ try {
1081
+ if (cmd === "ingest") {
1082
+ if (!args[1]) {
1083
+ console.log("Usage: resonance ingest <file>");
1084
+ process.exit(1);
1085
+ }
1086
+ const apiKey = getApiKey(config);
1087
+ const filePath = resolve(args[1]);
1088
+ if (!existsSync(filePath)) {
1089
+ console.error(`file not found: ${filePath}`);
1090
+ process.exit(1);
1091
+ }
1092
+ const count = await ingestFile(db, filePath, apiKey, config);
1093
+ const mode = apiKey ? "" : " (FTS only, no embeddings)";
1094
+ if (count < 0) {
1095
+ console.log(`removed ${-count} stale chunks from ${basename(filePath)}`);
1096
+ } else {
1097
+ console.log(`ingested ${count} new chunks from ${basename(filePath)}${mode}`);
1098
+ }
1099
+ } else if (cmd === "ingest-all") {
1100
+ const apiKey = getApiKey(config);
1101
+ const results = await ingestAll(db, apiKey, config);
1102
+ let added = 0;
1103
+ let removed = 0;
1104
+ for (const [path, count] of Object.entries(results)) {
1105
+ if (count > 0) console.log(` ${basename(path)}: +${count} chunks`);
1106
+ else if (count < 0) console.log(` ${basename(path)}: ${count} stale chunks removed`);
1107
+ if (count > 0) added += count;
1108
+ else removed += -count;
1109
+ }
1110
+ const mode = apiKey ? "" : " (FTS only, no embeddings)";
1111
+ const parts: string[] = [];
1112
+ if (added > 0) parts.push(`${added} new chunks ingested${mode}`);
1113
+ if (removed > 0) parts.push(`${removed} stale chunks removed`);
1114
+ console.log(`\n${parts.length > 0 ? parts.join(", ") : "no changes"}`);
1115
+ } else if (cmd === "search") {
1116
+ if (!args[1]) {
1117
+ console.log('Usage: resonance search "query" [--limit N] [--fts] [--vector]');
1118
+ process.exit(1);
1119
+ }
1120
+ const apiKey = getApiKey(config);
1121
+ const query = args[1];
1122
+ let limit = 5;
1123
+ const limitIdx = args.indexOf("--limit");
1124
+ if (limitIdx !== -1 && args[limitIdx + 1]) {
1125
+ limit = parseInt(args[limitIdx + 1], 10);
1126
+ }
1127
+ let searchMode: SearchMode = "hybrid";
1128
+ if (args.includes("--fts")) searchMode = "fts";
1129
+ else if (args.includes("--vector")) searchMode = "vector";
1130
+
1131
+ const results = await search(db, query, apiKey, config, limit, 0.0, searchMode);
1132
+ if (results.length === 0) {
1133
+ const total = (db.prepare("SELECT COUNT(*) as c FROM memories").get() as { c: number }).c;
1134
+ if (total === 0) {
1135
+ console.log("no memories ingested yet. run ingest-all first.");
1136
+ } else {
1137
+ console.log("no resonant memories found for this query.");
1138
+ }
1139
+ }
1140
+ for (let i = 0; i < results.length; i++) {
1141
+ const r = results[i];
1142
+ const sourceShort = basename(r.sourceFile);
1143
+ const matchTag = searchMode === "hybrid" ? ` [${r.matchType}]` : "";
1144
+ console.log(
1145
+ `\n--- ${i + 1}. [id:${r.id}] ${sourceShort} (sim: ${r.similarity.toFixed(3)}, str: ${r.strength.toFixed(2)}, recalls: ${r.recallCount})${matchTag} ---`,
1146
+ );
1147
+ console.log(r.content.slice(0, 300));
1148
+ }
1149
+ } else if (cmd === "recall") {
1150
+ if (!args[1]) {
1151
+ console.log("Usage: resonance recall <id> [id2 id3 ...]");
1152
+ process.exit(1);
1153
+ }
1154
+ const ids = args
1155
+ .slice(1)
1156
+ .map((s) => parseInt(s, 10))
1157
+ .filter((n) => !Number.isNaN(n));
1158
+ recallMemories(db, ids, config);
1159
+ console.log(
1160
+ `recalled ${ids.length} memories (strength boosted by ${config.dynamics.resonanceBoost})`,
1161
+ );
1162
+ } else if (cmd === "random") {
1163
+ let limit = 5;
1164
+ let minStr = 0.0;
1165
+ let maxStr = 1.0;
1166
+ const limitIdx = args.indexOf("--limit");
1167
+ if (limitIdx !== -1 && args[limitIdx + 1]) {
1168
+ limit = parseInt(args[limitIdx + 1], 10);
1169
+ }
1170
+ const minIdx = args.indexOf("--min-strength");
1171
+ if (minIdx !== -1 && args[minIdx + 1]) {
1172
+ minStr = parseFloat(args[minIdx + 1]);
1173
+ }
1174
+ const maxIdx = args.indexOf("--max-strength");
1175
+ if (maxIdx !== -1 && args[maxIdx + 1]) {
1176
+ maxStr = parseFloat(args[maxIdx + 1]);
1177
+ }
1178
+ const results = randomMemories(db, limit, minStr, maxStr);
1179
+ if (results.length === 0) {
1180
+ console.log("no memories in the specified strength range.");
1181
+ }
1182
+ for (let i = 0; i < results.length; i++) {
1183
+ const r = results[i];
1184
+ const sourceShort = basename(r.sourceFile);
1185
+ console.log(
1186
+ `\n--- ${i + 1}. [id:${r.id}] ${sourceShort} (str: ${r.strength.toFixed(2)}, recalls: ${r.recallCount}) ---`,
1187
+ );
1188
+ console.log(r.content.slice(0, 300));
1189
+ }
1190
+ } else if (cmd === "report") {
1191
+ const apiKey = getApiKey(config);
1192
+ let against: string | undefined;
1193
+ const againstIdx = args.indexOf("--against");
1194
+ if (againstIdx !== -1 && args[againstIdx + 1]) {
1195
+ against = resolve(args[againstIdx + 1]);
1196
+ }
1197
+ const report = await resonanceReport(db, apiKey, config, against);
1198
+ console.log(report);
1199
+ } else if (cmd === "stats") {
1200
+ const s = getStats(db);
1201
+ const stats = s as {
1202
+ total_memories: number;
1203
+ by_type: Record<string, number>;
1204
+ avg_strength: number;
1205
+ most_recalled: Array<{ content: string; recalls: number; source: string }>;
1206
+ weakest: Array<{ content: string; strength: number; source: string }>;
1207
+ };
1208
+ console.log(`total memories: ${stats.total_memories}`);
1209
+ if (stats.total_memories === 0) {
1210
+ console.log("\nno memories yet. run ingest-all to populate from your memory files.");
1211
+ } else {
1212
+ console.log(`by type: ${JSON.stringify(stats.by_type)}`);
1213
+ console.log(`avg strength: ${stats.avg_strength}`);
1214
+ }
1215
+ if (stats.most_recalled.length > 0) {
1216
+ console.log("\nmost recalled:");
1217
+ for (const m of stats.most_recalled) {
1218
+ console.log(` [${m.recalls}x] ${m.source}: ${m.content}`);
1219
+ }
1220
+ }
1221
+ if (stats.weakest.length > 0) {
1222
+ console.log("\nweakest (drifting):");
1223
+ for (const m of stats.weakest) {
1224
+ console.log(` [str: ${m.strength.toFixed(2)}] ${m.source}: ${m.content}`);
1225
+ }
1226
+ }
1227
+ } else if (cmd === "decay") {
1228
+ const result = runDecay(db, config);
1229
+ console.log(`decay pass: ${result.decayed}/${result.total} memories decayed`);
1230
+ } else {
1231
+ console.error(`unknown command: ${cmd}`);
1232
+ process.exit(1);
1233
+ }
1234
+ } finally {
1235
+ db.close();
1236
+ }
1237
+ }
1238
+
1239
+ // Only run CLI when executed directly (not when imported by tests)
1240
+ const isDirectRun =
1241
+ process.argv[1] !== undefined &&
1242
+ (import.meta.url === `file://${process.argv[1]}` ||
1243
+ import.meta.url === `file://${resolve(process.argv[1])}`);
1244
+
1245
+ if (isDirectRun) {
1246
+ main().catch((err) => {
1247
+ console.error(err);
1248
+ process.exit(1);
1249
+ });
1250
+ }