neuromcp 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,2024 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/index.ts
4
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
5
+
6
+ // src/config.ts
7
+ import { resolve } from "path";
8
+ import { homedir } from "os";
9
+ function env(key, fallback) {
10
+ return process.env[key] ?? fallback;
11
+ }
12
+ function envNum(key, fallback) {
13
+ const raw = process.env[key];
14
+ if (raw === void 0) return fallback;
15
+ const n = Number(raw);
16
+ return Number.isFinite(n) ? n : fallback;
17
+ }
18
+ function envBool(key, fallback) {
19
+ const raw = process.env[key];
20
+ if (raw === void 0) return fallback;
21
+ return raw === "true" || raw === "1";
22
+ }
23
+ function loadConfig() {
24
+ const defaultDbPath = resolve(homedir(), ".neuromcp", "memory.db");
25
+ return {
26
+ dbPath: env("NEUROMCP_DB_PATH", defaultDbPath),
27
+ maxDbSizeMb: envNum("NEUROMCP_MAX_DB_SIZE_MB", 500),
28
+ embeddingProvider: env("NEUROMCP_EMBEDDING_PROVIDER", "auto"),
29
+ embeddingModel: env("NEUROMCP_EMBEDDING_MODEL", "auto"),
30
+ ollamaHost: env("OLLAMA_HOST", "http://localhost:11434"),
31
+ embeddingUrl: process.env["NEUROMCP_EMBEDDING_URL"] ?? null,
32
+ defaultNamespace: env("NEUROMCP_DEFAULT_NAMESPACE", "default"),
33
+ tombstoneTtlDays: envNum("NEUROMCP_TOMBSTONE_TTL_DAYS", 30),
34
+ autoConsolidate: envBool("NEUROMCP_AUTO_CONSOLIDATE", false),
35
+ consolidateIntervalHours: envNum("NEUROMCP_CONSOLIDATE_INTERVAL_HOURS", 24),
36
+ decayLambda: envNum("NEUROMCP_DECAY_LAMBDA", 0.01),
37
+ dedupThreshold: envNum("NEUROMCP_DEDUP_THRESHOLD", 0.92),
38
+ minImportance: envNum("NEUROMCP_MIN_IMPORTANCE", 0.05),
39
+ autoCommitSimilarity: envNum("NEUROMCP_AUTO_COMMIT_SIMILARITY", 0.95),
40
+ sweepIntervalHours: envNum("NEUROMCP_SWEEP_INTERVAL_HOURS", 6),
41
+ claudeCodeIntegration: env("NEUROMCP_CLAUDE_CODE_INTEGRATION", "auto"),
42
+ logLevel: env("NEUROMCP_LOG_LEVEL", "info"),
43
+ logFormat: env("NEUROMCP_LOG_FORMAT", "text")
44
+ };
45
+ }
46
+
47
+ // src/observability/logger.ts
48
+ var LEVELS = { debug: 0, info: 1, warn: 2, error: 3 };
49
+ function formatText(entry) {
50
+ const parts = [
51
+ entry.timestamp,
52
+ entry.level.toUpperCase().padEnd(5),
53
+ `[${entry.component}]`,
54
+ entry.message
55
+ ];
56
+ if (entry.operation_id !== void 0) {
57
+ parts.push(`op=${entry.operation_id}`);
58
+ }
59
+ if (entry.duration_ms !== void 0) {
60
+ parts.push(`${entry.duration_ms}ms`);
61
+ }
62
+ const skip = /* @__PURE__ */ new Set(["timestamp", "level", "component", "message", "operation_id", "duration_ms"]);
63
+ for (const [k, v] of Object.entries(entry)) {
64
+ if (!skip.has(k)) {
65
+ parts.push(`${k}=${JSON.stringify(v)}`);
66
+ }
67
+ }
68
+ return parts.join(" ");
69
+ }
70
+ function formatJson(entry) {
71
+ return JSON.stringify(entry);
72
+ }
73
+ function createLogger(options) {
74
+ const threshold = LEVELS[options.level];
75
+ const format = options.format === "json" ? formatJson : formatText;
76
+ function log(level, component, message, metadata, operationId) {
77
+ if (LEVELS[level] < threshold) return;
78
+ const entry = {
79
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
80
+ level,
81
+ component,
82
+ message,
83
+ ...operationId !== void 0 ? { operation_id: operationId } : {},
84
+ ...metadata ?? {}
85
+ };
86
+ process.stderr.write(format(entry) + "\n");
87
+ }
88
+ return {
89
+ debug: (component, message, metadata, operationId) => log("debug", component, message, metadata, operationId),
90
+ info: (component, message, metadata, operationId) => log("info", component, message, metadata, operationId),
91
+ warn: (component, message, metadata, operationId) => log("warn", component, message, metadata, operationId),
92
+ error: (component, message, metadata, operationId) => log("error", component, message, metadata, operationId)
93
+ };
94
+ }
95
+
96
+ // src/observability/metrics.ts
97
+ import { randomBytes } from "crypto";
98
+ function percentile(sorted, p) {
99
+ if (sorted.length === 0) return 0;
100
+ const idx = Math.ceil(p / 100 * sorted.length) - 1;
101
+ return sorted[Math.max(0, idx)];
102
+ }
103
+ function createMetrics() {
104
+ const counters = /* @__PURE__ */ new Map();
105
+ const histograms = /* @__PURE__ */ new Map();
106
+ const gauges = /* @__PURE__ */ new Map();
107
+ return {
108
+ increment(name, by = 1) {
109
+ counters.set(name, (counters.get(name) ?? 0) + by);
110
+ },
111
+ record(name, value) {
112
+ const existing = histograms.get(name);
113
+ if (existing !== void 0) {
114
+ existing.push(value);
115
+ } else {
116
+ histograms.set(name, [value]);
117
+ }
118
+ },
119
+ gauge(name, value) {
120
+ gauges.set(name, value);
121
+ },
122
+ snapshot() {
123
+ const histogramSnapshot = {};
124
+ for (const [name, values] of histograms) {
125
+ const sorted = [...values].sort((a, b) => a - b);
126
+ histogramSnapshot[name] = {
127
+ p50: percentile(sorted, 50),
128
+ p95: percentile(sorted, 95),
129
+ p99: percentile(sorted, 99),
130
+ count: sorted.length
131
+ };
132
+ }
133
+ return {
134
+ counters: Object.fromEntries(counters),
135
+ histograms: histogramSnapshot,
136
+ gauges: Object.fromEntries(gauges)
137
+ };
138
+ },
139
+ newOperationId() {
140
+ return randomBytes(16).toString("hex");
141
+ }
142
+ };
143
+ }
144
+
145
+ // src/storage/database.ts
146
+ import { mkdirSync } from "fs";
147
+ import { dirname } from "path";
148
+ import Database from "better-sqlite3";
149
+ var _db = null;
150
+ var PRAGMAS = [
151
+ "PRAGMA journal_mode = WAL;",
152
+ "PRAGMA synchronous = NORMAL;",
153
+ "PRAGMA foreign_keys = ON;",
154
+ "PRAGMA busy_timeout = 5000;",
155
+ "PRAGMA cache_size = -64000;",
156
+ "PRAGMA auto_vacuum = INCREMENTAL;"
157
+ ];
158
+ function openDatabase(path) {
159
+ mkdirSync(dirname(path), { recursive: true });
160
+ const db = new Database(path);
161
+ for (const pragma of PRAGMAS) {
162
+ db.pragma(pragma.replace(/^PRAGMA\s+/, "").replace(";", ""));
163
+ }
164
+ _db = db;
165
+ return db;
166
+ }
167
+
168
+ // src/storage/migrations.ts
169
+ import { copyFileSync } from "fs";
170
+
171
+ // src/storage/schema.ts
172
+ var SCHEMA_VERSION = 1;
173
+ var CREATE_TABLES = `
174
+ CREATE TABLE IF NOT EXISTS memories (
175
+ id TEXT PRIMARY KEY,
176
+ content_hash TEXT NOT NULL,
177
+ content TEXT NOT NULL,
178
+ summary TEXT,
179
+ embedding_model TEXT NOT NULL DEFAULT '',
180
+ embedding_dim INTEGER NOT NULL DEFAULT 0,
181
+ namespace TEXT NOT NULL DEFAULT 'default',
182
+ project_id TEXT,
183
+ agent_id TEXT,
184
+ source TEXT NOT NULL DEFAULT 'user',
185
+ source_trust TEXT NOT NULL DEFAULT 'medium',
186
+ visibility TEXT NOT NULL DEFAULT 'namespace',
187
+ schema_version INTEGER NOT NULL DEFAULT 1,
188
+ category TEXT NOT NULL DEFAULT 'general',
189
+ tags TEXT NOT NULL DEFAULT '[]',
190
+ importance REAL NOT NULL DEFAULT 0.5,
191
+ access_count INTEGER NOT NULL DEFAULT 0,
192
+ created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')),
193
+ updated_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')),
194
+ last_accessed_at TEXT,
195
+ expires_at TEXT,
196
+ is_deleted INTEGER NOT NULL DEFAULT 0,
197
+ tombstoned_at TEXT,
198
+ supersedes_id TEXT,
199
+ superseded_by_id TEXT,
200
+ metadata TEXT NOT NULL DEFAULT '{}'
201
+ );
202
+
203
+ CREATE TABLE IF NOT EXISTS consolidation_log (
204
+ id TEXT PRIMARY KEY,
205
+ operation_id TEXT NOT NULL,
206
+ action TEXT NOT NULL,
207
+ source_ids TEXT NOT NULL DEFAULT '[]',
208
+ result_id TEXT,
209
+ plan_snapshot TEXT,
210
+ reason TEXT,
211
+ namespace TEXT,
212
+ created_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now'))
213
+ );
214
+
215
+ CREATE TABLE IF NOT EXISTS operations (
216
+ id TEXT PRIMARY KEY,
217
+ type TEXT NOT NULL,
218
+ status TEXT NOT NULL DEFAULT 'running',
219
+ namespace TEXT,
220
+ started_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')),
221
+ completed_at TEXT,
222
+ items_total INTEGER,
223
+ items_processed INTEGER NOT NULL DEFAULT 0,
224
+ error TEXT,
225
+ metadata TEXT NOT NULL DEFAULT '{}'
226
+ );
227
+
228
+ CREATE TABLE IF NOT EXISTS schema_version (
229
+ version INTEGER PRIMARY KEY,
230
+ applied_at TEXT NOT NULL DEFAULT (strftime('%Y-%m-%dT%H:%M:%fZ', 'now')),
231
+ description TEXT
232
+ );
233
+ `;
234
+ var CREATE_INDEXES = `
235
+ CREATE INDEX IF NOT EXISTS idx_memories_namespace ON memories(namespace);
236
+ CREATE INDEX IF NOT EXISTS idx_memories_content_hash ON memories(content_hash);
237
+ CREATE INDEX IF NOT EXISTS idx_memories_category ON memories(category);
238
+ CREATE INDEX IF NOT EXISTS idx_memories_importance ON memories(importance);
239
+ CREATE INDEX IF NOT EXISTS idx_memories_created_at ON memories(created_at);
240
+ CREATE INDEX IF NOT EXISTS idx_memories_updated_at ON memories(updated_at);
241
+ CREATE INDEX IF NOT EXISTS idx_memories_is_deleted ON memories(is_deleted);
242
+ CREATE INDEX IF NOT EXISTS idx_memories_expires_at ON memories(expires_at);
243
+ CREATE INDEX IF NOT EXISTS idx_memories_project_id ON memories(project_id);
244
+ CREATE INDEX IF NOT EXISTS idx_memories_agent_id ON memories(agent_id);
245
+ CREATE INDEX IF NOT EXISTS idx_memories_source ON memories(source);
246
+ CREATE INDEX IF NOT EXISTS idx_memories_namespace_deleted ON memories(namespace, is_deleted);
247
+ CREATE INDEX IF NOT EXISTS idx_consolidation_log_operation_id ON consolidation_log(operation_id);
248
+ CREATE INDEX IF NOT EXISTS idx_consolidation_log_namespace ON consolidation_log(namespace);
249
+ CREATE INDEX IF NOT EXISTS idx_operations_type ON operations(type);
250
+ CREATE INDEX IF NOT EXISTS idx_operations_status ON operations(status);
251
+ CREATE INDEX IF NOT EXISTS idx_operations_namespace ON operations(namespace);
252
+ `;
253
+ function ftsTableExists(db) {
254
+ const row = db.prepare(
255
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='memories_fts'"
256
+ ).get();
257
+ return row !== void 0;
258
+ }
259
+ function applySchema(db) {
260
+ db.exec(CREATE_TABLES);
261
+ db.exec(CREATE_INDEXES);
262
+ if (!ftsTableExists(db)) {
263
+ db.exec(`
264
+ CREATE VIRTUAL TABLE memories_fts USING fts5(
265
+ content,
266
+ summary,
267
+ tags,
268
+ category,
269
+ content='memories',
270
+ content_rowid='rowid'
271
+ );
272
+ `);
273
+ }
274
+ }
275
+
276
+ // src/storage/migrations.ts
277
+ function getCurrentVersion(db) {
278
+ const row = db.prepare(
279
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='schema_version'"
280
+ ).get();
281
+ if (row === void 0) {
282
+ return 0;
283
+ }
284
+ const versionRow = db.prepare(
285
+ "SELECT MAX(version) AS version FROM schema_version"
286
+ ).get();
287
+ return versionRow?.version ?? 0;
288
+ }
289
+ function recordVersion(db, version, description) {
290
+ db.prepare(
291
+ "INSERT OR REPLACE INTO schema_version (version, applied_at, description) VALUES (?, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'), ?)"
292
+ ).run(version, description);
293
+ }
294
+ function runMigrations(db, dbPath, logger) {
295
+ const currentVersion = getCurrentVersion(db);
296
+ if (currentVersion >= SCHEMA_VERSION) {
297
+ logger.debug("migrations", "Schema is up to date", {
298
+ currentVersion,
299
+ targetVersion: SCHEMA_VERSION
300
+ });
301
+ return;
302
+ }
303
+ if (currentVersion > 0) {
304
+ const backupPath = `${dbPath}.backup-v${currentVersion}`;
305
+ logger.info("migrations", "Backing up database before migration", {
306
+ from: currentVersion,
307
+ to: SCHEMA_VERSION,
308
+ backupPath
309
+ });
310
+ copyFileSync(dbPath, backupPath);
311
+ }
312
+ logger.info("migrations", "Applying schema migration", {
313
+ from: currentVersion,
314
+ to: SCHEMA_VERSION
315
+ });
316
+ applySchema(db);
317
+ recordVersion(db, SCHEMA_VERSION, `Migration from v${currentVersion} to v${SCHEMA_VERSION}`);
318
+ logger.info("migrations", "Schema migration complete", {
319
+ version: SCHEMA_VERSION
320
+ });
321
+ }
322
+
323
+ // src/vectors/sqlite-vec.ts
324
+ import * as sqliteVec from "sqlite-vec";
325
+ var SqliteVecStore = class {
326
+ name = "sqlite-vec";
327
+ dimensions;
328
+ db = null;
329
+ constructor(dimensions) {
330
+ this.dimensions = dimensions;
331
+ }
332
+ initialize(db) {
333
+ sqliteVec.load(db);
334
+ db.exec(`
335
+ CREATE VIRTUAL TABLE IF NOT EXISTS memories_vec
336
+ USING vec0(
337
+ id TEXT PRIMARY KEY,
338
+ embedding float[${this.dimensions}]
339
+ );
340
+ `);
341
+ this.db = db;
342
+ }
343
+ upsert(id, embedding) {
344
+ const db = this.getDb();
345
+ const buf = Buffer.from(embedding.buffer, embedding.byteOffset, embedding.byteLength);
346
+ db.prepare("DELETE FROM memories_vec WHERE id = ?").run(id);
347
+ db.prepare("INSERT INTO memories_vec (id, embedding) VALUES (?, ?)").run(id, buf);
348
+ }
349
+ upsertBatch(entries) {
350
+ const db = this.getDb();
351
+ const deleteStmt = db.prepare("DELETE FROM memories_vec WHERE id = ?");
352
+ const insertStmt = db.prepare("INSERT INTO memories_vec (id, embedding) VALUES (?, ?)");
353
+ const runBatch = db.transaction(() => {
354
+ for (const entry of entries) {
355
+ const buf = Buffer.from(
356
+ entry.embedding.buffer,
357
+ entry.embedding.byteOffset,
358
+ entry.embedding.byteLength
359
+ );
360
+ deleteStmt.run(entry.id);
361
+ insertStmt.run(entry.id, buf);
362
+ }
363
+ });
364
+ runBatch();
365
+ }
366
+ search(query, k) {
367
+ const db = this.getDb();
368
+ const buf = Buffer.from(query.buffer, query.byteOffset, query.byteLength);
369
+ const rows = db.prepare(
370
+ "SELECT id, distance FROM memories_vec WHERE embedding MATCH ? ORDER BY distance LIMIT ?"
371
+ ).all(buf, k);
372
+ return rows.map((row) => ({
373
+ id: row.id,
374
+ distance: row.distance
375
+ }));
376
+ }
377
+ remove(id) {
378
+ const db = this.getDb();
379
+ db.prepare("DELETE FROM memories_vec WHERE id = ?").run(id);
380
+ }
381
+ clear() {
382
+ const db = this.getDb();
383
+ db.prepare("DELETE FROM memories_vec").run();
384
+ }
385
+ count() {
386
+ const db = this.getDb();
387
+ const row = db.prepare("SELECT COUNT(*) as cnt FROM memories_vec").get();
388
+ return row.cnt;
389
+ }
390
+ getDb() {
391
+ if (this.db === null) {
392
+ throw new Error("SqliteVecStore not initialized. Call initialize() first.");
393
+ }
394
+ return this.db;
395
+ }
396
+ };
397
+
398
+ // src/embeddings/onnx.ts
399
+ import { resolve as resolve2, dirname as dirname2 } from "path";
400
+ import { existsSync } from "fs";
401
+ import { fileURLToPath } from "url";
402
+ var ort = null;
403
+ async function loadOrt() {
404
+ if (ort !== null) return ort;
405
+ const mod = await import("onnxruntime-node");
406
+ ort = { InferenceSession: mod.InferenceSession, Tensor: mod.Tensor };
407
+ return ort;
408
+ }
409
+ var MODEL_FILENAME = "bge-small-en-v1.5.onnx";
410
+ function resolveModelPath() {
411
+ const thisDir = dirname2(fileURLToPath(import.meta.url));
412
+ const candidates = [
413
+ // From dist/ (bundled) → ../models/ (package root)
414
+ resolve2(thisDir, "..", "models", MODEL_FILENAME),
415
+ // From src/embeddings/ (dev) → ../../models/
416
+ resolve2(thisDir, "..", "..", "models", MODEL_FILENAME),
417
+ // Relative to cwd
418
+ resolve2(process.cwd(), "models", MODEL_FILENAME),
419
+ // Common install location: node_modules/neuromcp/models/
420
+ resolve2(thisDir, "models", MODEL_FILENAME)
421
+ ];
422
+ const unique = [...new Set(candidates)];
423
+ for (const candidate of unique) {
424
+ if (existsSync(candidate)) return candidate;
425
+ }
426
+ return null;
427
+ }
428
+ function simpleTokenize(text, maxLength) {
429
+ const VOCAB_SIZE = 30522;
430
+ const CLS_ID = 101n;
431
+ const SEP_ID = 102n;
432
+ const PAD_ID = 0n;
433
+ const words = text.toLowerCase().trim().split(/\s+/).filter((w) => w.length > 0);
434
+ const wordIds = words.slice(0, maxLength - 2).map((word) => {
435
+ let hash = 0;
436
+ for (let i = 0; i < word.length; i++) {
437
+ hash = (hash << 5) - hash + word.charCodeAt(i) | 0;
438
+ }
439
+ const id = (hash % (VOCAB_SIZE - 2) + (VOCAB_SIZE - 2)) % (VOCAB_SIZE - 2) + 1;
440
+ return BigInt(id);
441
+ });
442
+ const tokenCount = wordIds.length + 2;
443
+ const inputIds = new BigInt64Array(maxLength);
444
+ const attentionMask = new BigInt64Array(maxLength);
445
+ const tokenTypeIds = new BigInt64Array(maxLength);
446
+ inputIds[0] = CLS_ID;
447
+ attentionMask[0] = 1n;
448
+ for (let i = 0; i < wordIds.length; i++) {
449
+ inputIds[i + 1] = wordIds[i];
450
+ attentionMask[i + 1] = 1n;
451
+ }
452
+ inputIds[wordIds.length + 1] = SEP_ID;
453
+ attentionMask[wordIds.length + 1] = 1n;
454
+ for (let i = tokenCount; i < maxLength; i++) {
455
+ inputIds[i] = PAD_ID;
456
+ attentionMask[i] = 0n;
457
+ tokenTypeIds[i] = 0n;
458
+ }
459
+ return { inputIds, attentionMask, tokenTypeIds };
460
+ }
461
+ function meanPool(lastHiddenState, attentionMask, seqLen, hiddenSize) {
462
+ const result = new Float32Array(hiddenSize);
463
+ let tokenCount = 0;
464
+ for (let t = 0; t < seqLen; t++) {
465
+ if (attentionMask[t] === 0n) continue;
466
+ tokenCount++;
467
+ const offset = t * hiddenSize;
468
+ for (let h = 0; h < hiddenSize; h++) {
469
+ result[h] += lastHiddenState[offset + h];
470
+ }
471
+ }
472
+ if (tokenCount > 0) {
473
+ for (let h = 0; h < hiddenSize; h++) {
474
+ result[h] /= tokenCount;
475
+ }
476
+ }
477
+ return result;
478
+ }
479
+ function l2Normalize(vec) {
480
+ let sumSq = 0;
481
+ for (let i = 0; i < vec.length; i++) {
482
+ sumSq += vec[i] * vec[i];
483
+ }
484
+ const norm = Math.sqrt(sumSq);
485
+ if (norm > 0) {
486
+ for (let i = 0; i < vec.length; i++) {
487
+ vec[i] /= norm;
488
+ }
489
+ }
490
+ return vec;
491
+ }
492
+ var OnnxEmbeddingProvider = class {
493
+ name = "bge-small-en-v1.5";
494
+ dimensions = 384;
495
+ maxTokens = 512;
496
+ session = null;
497
+ modelPath;
498
+ constructor(modelPath) {
499
+ this.modelPath = modelPath ?? resolveModelPath();
500
+ }
501
+ async isAvailable() {
502
+ try {
503
+ await this.ensureSession();
504
+ return true;
505
+ } catch {
506
+ return false;
507
+ }
508
+ }
509
+ async embed(text) {
510
+ const session = await this.ensureSession();
511
+ const { inputIds, attentionMask, tokenTypeIds } = simpleTokenize(text, this.maxTokens);
512
+ const { Tensor } = await loadOrt();
513
+ const feeds = {
514
+ input_ids: new Tensor("int64", inputIds, [1, this.maxTokens]),
515
+ attention_mask: new Tensor("int64", attentionMask, [1, this.maxTokens]),
516
+ token_type_ids: new Tensor("int64", tokenTypeIds, [1, this.maxTokens])
517
+ };
518
+ const output = await session.run(feeds);
519
+ const outputTensor = output["last_hidden_state"] ?? Object.values(output)[0];
520
+ if (outputTensor === void 0) {
521
+ throw new Error("ONNX model returned no output tensors");
522
+ }
523
+ const hiddenState = outputTensor.data;
524
+ const pooled = meanPool(hiddenState, attentionMask, this.maxTokens, this.dimensions);
525
+ return l2Normalize(pooled);
526
+ }
527
+ async embedBatch(texts) {
528
+ const results = [];
529
+ for (const text of texts) {
530
+ results.push(await this.embed(text));
531
+ }
532
+ return results;
533
+ }
534
+ async ensureSession() {
535
+ if (this.session !== null) return this.session;
536
+ if (this.modelPath === null) {
537
+ throw new Error(
538
+ `ONNX model not found. Run: npx tsx scripts/download-model.ts
539
+ Expected location: models/${MODEL_FILENAME}`
540
+ );
541
+ }
542
+ const { InferenceSession: IS } = await loadOrt();
543
+ this.session = await IS.create(this.modelPath, {
544
+ executionProviders: ["cpu"]
545
+ });
546
+ return this.session;
547
+ }
548
+ };
549
+
550
+ // src/embeddings/factory.ts
551
+ async function createEmbeddingProvider(config, logger) {
552
+ const requestedProvider = config.embeddingProvider;
553
+ if (requestedProvider === "auto" || requestedProvider === "onnx") {
554
+ const provider = new OnnxEmbeddingProvider();
555
+ const available = await provider.isAvailable();
556
+ if (available) {
557
+ logger.info("embeddings", `Loaded ONNX provider: ${provider.name}`, {
558
+ dimensions: provider.dimensions
559
+ });
560
+ return provider;
561
+ }
562
+ if (requestedProvider === "onnx") {
563
+ throw new Error(
564
+ "ONNX embedding provider requested but unavailable. Run: npx tsx scripts/download-model.ts"
565
+ );
566
+ }
567
+ }
568
+ throw new Error(
569
+ `No embedding provider available (requested: ${requestedProvider}). Currently only ONNX is supported. Run: npx tsx scripts/download-model.ts`
570
+ );
571
+ }
572
+
573
+ // src/server.ts
574
+ import { z as z2 } from "zod";
575
+ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
576
+
577
+ // src/tools/store.ts
578
+ import { createHash } from "crypto";
579
+
580
+ // src/governance/trust.ts
581
+ var TRUST_RANKS = {
582
+ unverified: 0,
583
+ low: 1,
584
+ medium: 2,
585
+ high: 3
586
+ };
587
+ function isHigherTrust(a, b) {
588
+ return TRUST_RANKS[a] > TRUST_RANKS[b];
589
+ }
590
+ function defaultTrustForSource(source) {
591
+ switch (source) {
592
+ case "user":
593
+ return "high";
594
+ case "consolidation":
595
+ case "auto":
596
+ case "claude-code":
597
+ return "medium";
598
+ case "error":
599
+ return "low";
600
+ }
601
+ }
602
+ function meetsMinTrust(memoryTrust, minTrust) {
603
+ return TRUST_RANKS[memoryTrust] >= TRUST_RANKS[minTrust];
604
+ }
605
+
606
+ // src/tools/store.ts
607
+ function generateId() {
608
+ return createHash("sha256").update(Date.now() + "-" + Math.random()).digest("hex").slice(0, 32);
609
+ }
610
+ function contentHash(content) {
611
+ return createHash("sha256").update(content).digest("hex");
612
+ }
613
+ function mergeTags(existing, incoming) {
614
+ const existingTags = JSON.parse(existing);
615
+ const merged = [.../* @__PURE__ */ new Set([...existingTags, ...incoming])];
616
+ return JSON.stringify(merged);
617
+ }
618
+ async function storeMemory(input, deps) {
619
+ const { db, vecStore, embedder, logger, metrics, config } = deps;
620
+ const start = Date.now();
621
+ const namespace = input.namespace ?? config.defaultNamespace;
622
+ const source = input.source ?? "user";
623
+ const sourceTrust = input.source_trust ?? defaultTrustForSource(source);
624
+ const category = input.category ?? "general";
625
+ const tags = input.tags ?? [];
626
+ const importance = input.importance ?? 0.5;
627
+ const metadata = input.metadata ?? {};
628
+ const tagsJson = JSON.stringify([...tags]);
629
+ const metadataJson = JSON.stringify(metadata);
630
+ const hash = contentHash(input.content);
631
+ const exactMatch = db.prepare(
632
+ "SELECT id, importance, tags FROM memories WHERE content_hash = ? AND namespace = ? AND is_deleted = 0 LIMIT 1"
633
+ ).get(hash, namespace);
634
+ if (exactMatch !== void 0) {
635
+ const newImportance = Math.max(exactMatch.importance, importance);
636
+ const mergedTags = mergeTags(exactMatch.tags, [...tags]);
637
+ db.prepare(
638
+ "UPDATE memories SET importance = ?, tags = ?, access_count = access_count + 1, last_accessed_at = strftime('%Y-%m-%dT%H:%M:%fZ', 'now'), updated_at = strftime('%Y-%m-%dT%H:%M:%fZ', 'now') WHERE id = ?"
639
+ ).run(newImportance, mergedTags, exactMatch.id);
640
+ logger.info("store", "exact dedup match", { id: exactMatch.id, namespace });
641
+ metrics.increment("store.dedup_exact");
642
+ metrics.record("store.duration_ms", Date.now() - start);
643
+ return { id: exactMatch.id, matched: true, similarity: 1 };
644
+ }
645
+ const embedding = await embedder.embed(input.content);
646
+ const neighbors = vecStore.search(embedding, 5);
647
+ for (const neighbor of neighbors) {
648
+ const similarity = 1 - neighbor.distance;
649
+ if (similarity > config.dedupThreshold) {
650
+ const existing = db.prepare(
651
+ "SELECT id, importance, tags FROM memories WHERE id = ? AND namespace = ? AND is_deleted = 0 LIMIT 1"
652
+ ).get(neighbor.id, namespace);
653
+ if (existing !== void 0) {
654
+ const newImportance = Math.max(existing.importance, importance);
655
+ const mergedTags = mergeTags(existing.tags, [...tags]);
656
+ db.prepare(
657
+ "UPDATE memories SET importance = ?, tags = ?, access_count = access_count + 1, last_accessed_at = strftime('%Y-%m-%dT%H:%M:%fZ', 'now'), updated_at = strftime('%Y-%m-%dT%H:%M:%fZ', 'now') WHERE id = ?"
658
+ ).run(newImportance, mergedTags, existing.id);
659
+ logger.info("store", "semantic dedup match", {
660
+ id: existing.id,
661
+ similarity,
662
+ namespace
663
+ });
664
+ metrics.increment("store.dedup_semantic");
665
+ metrics.record("store.duration_ms", Date.now() - start);
666
+ return { id: existing.id, matched: true, similarity };
667
+ }
668
+ }
669
+ }
670
+ const id = generateId();
671
+ const now = (/* @__PURE__ */ new Date()).toISOString();
672
+ db.prepare(
673
+ `INSERT INTO memories (
674
+ id, content_hash, content, summary, embedding_model, embedding_dim,
675
+ namespace, project_id, agent_id, source, source_trust, visibility,
676
+ schema_version, category, tags, importance, access_count,
677
+ created_at, updated_at, last_accessed_at, expires_at,
678
+ is_deleted, tombstoned_at, supersedes_id, superseded_by_id, metadata
679
+ ) VALUES (
680
+ ?, ?, ?, NULL, ?, ?, ?, ?, ?, ?, ?, 'namespace', 1, ?, ?, ?, 0,
681
+ ?, ?, NULL, ?, 0, NULL, NULL, NULL, ?
682
+ )`
683
+ ).run(
684
+ id,
685
+ hash,
686
+ input.content,
687
+ embedder.name,
688
+ embedder.dimensions,
689
+ namespace,
690
+ input.project_id ?? null,
691
+ input.agent_id ?? null,
692
+ source,
693
+ sourceTrust,
694
+ category,
695
+ tagsJson,
696
+ importance,
697
+ now,
698
+ now,
699
+ input.expires_at ?? null,
700
+ metadataJson
701
+ );
702
+ vecStore.upsert(id, embedding);
703
+ const row = db.prepare("SELECT rowid FROM memories WHERE id = ?").get(id);
704
+ db.prepare(
705
+ "INSERT INTO memories_fts (rowid, content, summary, tags, category) VALUES (?, ?, NULL, ?, ?)"
706
+ ).run(row.rowid, input.content, tagsJson, category);
707
+ logger.info("store", "new memory stored", { id, namespace, category });
708
+ metrics.increment("store.new");
709
+ metrics.record("store.duration_ms", Date.now() - start);
710
+ return { id, matched: false };
711
+ }
712
+
713
+ // src/governance/namespace.ts
714
+ function namespaceFilter(namespace, defaultNs) {
715
+ const ns = namespace ?? defaultNs;
716
+ if (ns === "*") return { clause: "1=1", params: [] };
717
+ return { clause: "namespace = ?", params: [ns] };
718
+ }
719
+
720
+ // src/tools/search.ts
721
+ var RRF_K = 60;
722
+ function sanitizeFtsQuery(query) {
723
+ return '"' + query.replace(/"/g, '""') + '"';
724
+ }
725
+ async function searchMemory(input, deps) {
726
+ const { db, vecStore, embedder, logger, metrics, config } = deps;
727
+ const start = Date.now();
728
+ const limit = input.limit ?? 10;
729
+ const hybrid = input.hybrid !== false;
730
+ const namespace = input.namespace ?? config.defaultNamespace;
731
+ const minTrust = input.min_trust ?? "low";
732
+ const embedding = await embedder.embed(input.query);
733
+ const vecResults = vecStore.search(embedding, limit * 3);
734
+ const vecRanks = /* @__PURE__ */ new Map();
735
+ vecResults.forEach((r, i) => {
736
+ vecRanks.set(r.id, i + 1);
737
+ });
738
+ const ftsRanks = /* @__PURE__ */ new Map();
739
+ if (hybrid) {
740
+ try {
741
+ const ftsQuery = sanitizeFtsQuery(input.query);
742
+ const ftsRows = db.prepare(
743
+ `SELECT m.id FROM memories_fts f
744
+ JOIN memories m ON m.rowid = f.rowid
745
+ WHERE memories_fts MATCH ? AND m.is_deleted = 0
746
+ ORDER BY rank LIMIT ?`
747
+ ).all(ftsQuery, limit * 3);
748
+ ftsRows.forEach((row, i) => {
749
+ ftsRanks.set(row.id, i + 1);
750
+ });
751
+ } catch {
752
+ logger.warn("search", "FTS5 query failed, falling back to vector-only", {
753
+ query: input.query
754
+ });
755
+ }
756
+ }
757
+ const allIds = /* @__PURE__ */ new Set([...vecRanks.keys(), ...ftsRanks.keys()]);
758
+ const scored = [];
759
+ for (const id of allIds) {
760
+ const vecRank = vecRanks.get(id);
761
+ const ftsRank = ftsRanks.get(id);
762
+ let score = 0;
763
+ if (vecRank !== void 0) {
764
+ score += 1 / (RRF_K + vecRank);
765
+ }
766
+ if (ftsRank !== void 0) {
767
+ score += 1 / (RRF_K + ftsRank);
768
+ }
769
+ scored.push({ id, score });
770
+ }
771
+ scored.sort((a, b) => b.score - a.score);
772
+ const nsFilter = namespaceFilter(input.namespace, config.defaultNamespace);
773
+ const results = [];
774
+ for (const candidate of scored) {
775
+ if (results.length >= limit) break;
776
+ const memory = db.prepare("SELECT * FROM memories WHERE id = ?").get(candidate.id);
777
+ if (memory === void 0) continue;
778
+ if (memory.is_deleted === 1) continue;
779
+ if (nsFilter.params.length > 0 && memory.namespace !== nsFilter.params[0]) {
780
+ continue;
781
+ }
782
+ if (!meetsMinTrust(memory.source_trust, minTrust)) {
783
+ continue;
784
+ }
785
+ if (input.category !== void 0 && memory.category !== input.category) {
786
+ continue;
787
+ }
788
+ if (input.tags !== void 0 && input.tags.length > 0) {
789
+ const memTags = JSON.parse(memory.tags);
790
+ const hasAll = input.tags.every((t) => memTags.includes(t));
791
+ if (!hasAll) continue;
792
+ }
793
+ if (input.min_importance !== void 0 && memory.importance < input.min_importance) {
794
+ continue;
795
+ }
796
+ if (input.after !== void 0 && memory.created_at < input.after) {
797
+ continue;
798
+ }
799
+ if (input.before !== void 0 && memory.created_at > input.before) {
800
+ continue;
801
+ }
802
+ results.push({ ...memory, similarity_score: candidate.score });
803
+ }
804
+ if (results.length > 0) {
805
+ const bumpStmt = db.prepare(
806
+ "UPDATE memories SET access_count = access_count + 1, last_accessed_at = strftime('%Y-%m-%dT%H:%M:%fZ', 'now') WHERE id = ?"
807
+ );
808
+ const bumpAll = db.transaction(() => {
809
+ for (const r of results) {
810
+ bumpStmt.run(r.id);
811
+ }
812
+ });
813
+ bumpAll();
814
+ }
815
+ logger.info("search", "search complete", {
816
+ query: input.query,
817
+ results: results.length,
818
+ hybrid,
819
+ namespace
820
+ });
821
+ metrics.increment("search.queries");
822
+ metrics.record("search.results", results.length);
823
+ metrics.record("search.duration_ms", Date.now() - start);
824
+ return results;
825
+ }
826
+
827
+ // src/tools/recall.ts
828
+ function recallMemory(input, db, config, logger, metrics) {
829
+ const start = Date.now();
830
+ const limit = input.limit ?? 20;
831
+ const namespace = input.namespace ?? config.defaultNamespace;
832
+ const conditions = ["is_deleted = 0"];
833
+ const params = [];
834
+ if (namespace !== "*") {
835
+ conditions.push("namespace = ?");
836
+ params.push(namespace);
837
+ }
838
+ if (input.id !== void 0) {
839
+ conditions.push("id = ?");
840
+ params.push(input.id);
841
+ }
842
+ if (input.category !== void 0) {
843
+ conditions.push("category = ?");
844
+ params.push(input.category);
845
+ }
846
+ const sql = `SELECT * FROM memories WHERE ${conditions.join(" AND ")} ORDER BY created_at DESC LIMIT ?`;
847
+ params.push(limit);
848
+ const rows = db.prepare(sql).all(...params);
849
+ const filtered = input.tags !== void 0 && input.tags.length > 0 ? rows.filter((row) => {
850
+ const memTags = JSON.parse(row.tags);
851
+ return input.tags.every((t) => memTags.includes(t));
852
+ }) : rows;
853
+ if (filtered.length > 0) {
854
+ const bumpStmt = db.prepare(
855
+ "UPDATE memories SET access_count = access_count + 1, last_accessed_at = strftime('%Y-%m-%dT%H:%M:%fZ', 'now') WHERE id = ?"
856
+ );
857
+ const bumpAll = db.transaction(() => {
858
+ for (const row of filtered) {
859
+ bumpStmt.run(row.id);
860
+ }
861
+ });
862
+ bumpAll();
863
+ }
864
+ logger.info("recall", "recall complete", {
865
+ results: filtered.length,
866
+ namespace
867
+ });
868
+ metrics.increment("recall.queries");
869
+ metrics.record("recall.results", filtered.length);
870
+ metrics.record("recall.duration_ms", Date.now() - start);
871
+ return filtered;
872
+ }
873
+
874
+ // src/tools/forget.ts
875
+ import { randomBytes as randomBytes2 } from "crypto";
876
+
877
+ // src/governance/tombstone.ts
878
+ function tombstone(db, id) {
879
+ db.prepare(
880
+ "UPDATE memories SET is_deleted = 1, tombstoned_at = strftime('%Y-%m-%dT%H:%M:%SZ', 'now') WHERE id = ?"
881
+ ).run(id);
882
+ }
883
+ function tombstoneWithLineage(db, tombstoneId, supersededById) {
884
+ db.prepare(
885
+ `UPDATE memories
886
+ SET is_deleted = 1,
887
+ tombstoned_at = strftime('%Y-%m-%dT%H:%M:%SZ', 'now'),
888
+ superseded_by_id = ?
889
+ WHERE id = ?`
890
+ ).run(supersededById, tombstoneId);
891
+ }
892
+
893
+ // src/tools/forget.ts
894
+ function forgetMemory(input, db, vecStore, config, logger, metrics) {
895
+ const start = Date.now();
896
+ const dryRun = input.dry_run ?? false;
897
+ const hasFilter = input.id !== void 0 || input.namespace !== void 0 || input.tags !== void 0 && input.tags.length > 0 || input.older_than_days !== void 0 || input.below_importance !== void 0;
898
+ if (!hasFilter) {
899
+ throw new Error(
900
+ "forgetMemory requires at least one filter (id, namespace, tags, older_than_days, or below_importance)"
901
+ );
902
+ }
903
+ const conditions = ["is_deleted = 0"];
904
+ const params = [];
905
+ if (input.id !== void 0) {
906
+ conditions.push("id = ?");
907
+ params.push(input.id);
908
+ }
909
+ if (input.namespace !== void 0) {
910
+ conditions.push("namespace = ?");
911
+ params.push(input.namespace);
912
+ }
913
+ if (input.older_than_days !== void 0) {
914
+ const cutoff = new Date(
915
+ Date.now() - input.older_than_days * 864e5
916
+ ).toISOString();
917
+ conditions.push("created_at < ?");
918
+ params.push(cutoff);
919
+ }
920
+ if (input.below_importance !== void 0) {
921
+ conditions.push("importance < ?");
922
+ params.push(input.below_importance);
923
+ }
924
+ const sql = `SELECT id, tags FROM memories WHERE ${conditions.join(" AND ")}`;
925
+ const rows = db.prepare(sql).all(...params);
926
+ const matched = input.tags !== void 0 && input.tags.length > 0 ? rows.filter((row) => {
927
+ const memTags = JSON.parse(row.tags);
928
+ return input.tags.every((t) => memTags.includes(t));
929
+ }) : rows;
930
+ const ids = matched.map((r) => r.id);
931
+ if (!dryRun && ids.length > 0) {
932
+ const operationId = randomBytes2(16).toString("hex");
933
+ const runDelete = db.transaction(() => {
934
+ for (const id of ids) {
935
+ tombstone(db, id);
936
+ vecStore.remove(id);
937
+ const logId = randomBytes2(16).toString("hex");
938
+ db.prepare(
939
+ `INSERT INTO consolidation_log (id, operation_id, action, source_ids, result_id, reason, namespace)
940
+ VALUES (?, ?, 'tombstone', ?, NULL, 'forget', ?)`
941
+ ).run(logId, operationId, JSON.stringify([id]), input.namespace ?? null);
942
+ }
943
+ });
944
+ runDelete();
945
+ logger.info("forget", "memories deleted", {
946
+ count: ids.length,
947
+ operationId
948
+ });
949
+ metrics.increment("forget.deleted", ids.length);
950
+ }
951
+ metrics.record("forget.duration_ms", Date.now() - start);
952
+ return { count: ids.length, ids, dry_run: dryRun };
953
+ }
954
+
955
+ // src/consolidation/planner.ts
956
+ import { randomBytes as randomBytes3 } from "crypto";
957
+
958
+ // src/consolidation/dedup.ts
959
+ function findDuplicates(db, vecStore, namespace, threshold) {
960
+ const isAll = namespace === "*";
961
+ const nsClause = isAll ? "1=1" : "namespace = ?";
962
+ const nsParams = isAll ? [] : [namespace];
963
+ const rows = db.prepare(
964
+ `SELECT id, content, tags, importance, source_trust FROM memories WHERE is_deleted = 0 AND ${nsClause}`
965
+ ).all(...nsParams);
966
+ const alreadyMerged = /* @__PURE__ */ new Set();
967
+ const merges = [];
968
+ for (const row of rows) {
969
+ if (alreadyMerged.has(row.id)) continue;
970
+ const vecRow = db.prepare("SELECT embedding FROM memories_vec WHERE id = ?").get(row.id);
971
+ if (vecRow === void 0) continue;
972
+ const embedding = new Float32Array(
973
+ vecRow.embedding.buffer,
974
+ vecRow.embedding.byteOffset,
975
+ vecRow.embedding.byteLength / 4
976
+ );
977
+ const neighbors = vecStore.search(embedding, 10);
978
+ for (const neighbor of neighbors) {
979
+ if (neighbor.id === row.id) continue;
980
+ if (alreadyMerged.has(neighbor.id)) continue;
981
+ const similarity = 1 - neighbor.distance;
982
+ if (similarity <= threshold) continue;
983
+ const neighborRow = db.prepare(
984
+ "SELECT id, content, tags, importance, source_trust FROM memories WHERE id = ? AND is_deleted = 0"
985
+ ).get(neighbor.id);
986
+ if (neighborRow === void 0) continue;
987
+ if (!isAll) {
988
+ const nsCheck = db.prepare("SELECT namespace FROM memories WHERE id = ?").get(neighbor.id);
989
+ if (nsCheck === void 0 || nsCheck.namespace !== namespace) continue;
990
+ }
991
+ const { keepId, tombstoneId, keepRow: winner, loseRow: loser } = pickWinner(
992
+ row,
993
+ neighborRow
994
+ );
995
+ const keepTags = JSON.parse(winner.tags);
996
+ const loseTags = JSON.parse(loser.tags);
997
+ const mergedTags = [.../* @__PURE__ */ new Set([...keepTags, ...loseTags])];
998
+ const mergedImportance = Math.max(winner.importance, loser.importance);
999
+ merges.push({
1000
+ keep_id: keepId,
1001
+ tombstone_id: tombstoneId,
1002
+ similarity,
1003
+ merged_tags: mergedTags,
1004
+ merged_importance: mergedImportance,
1005
+ reason: `cosine similarity ${similarity.toFixed(3)} exceeds threshold ${threshold}`
1006
+ });
1007
+ alreadyMerged.add(tombstoneId);
1008
+ }
1009
+ }
1010
+ return merges;
1011
+ }
1012
+ function pickWinner(a, b) {
1013
+ if (isHigherTrust(a.source_trust, b.source_trust)) {
1014
+ return { keepId: a.id, tombstoneId: b.id, keepRow: a, loseRow: b };
1015
+ }
1016
+ if (isHigherTrust(b.source_trust, a.source_trust)) {
1017
+ return { keepId: b.id, tombstoneId: a.id, keepRow: b, loseRow: a };
1018
+ }
1019
+ if (a.content.length >= b.content.length) {
1020
+ return { keepId: a.id, tombstoneId: b.id, keepRow: a, loseRow: b };
1021
+ }
1022
+ return { keepId: b.id, tombstoneId: a.id, keepRow: b, loseRow: a };
1023
+ }
1024
+
1025
+ // src/consolidation/decay.ts
1026
+ function computeDecay(db, namespace, lambda, minImportance) {
1027
+ const isAll = namespace === "*";
1028
+ const nsClause = isAll ? "1=1" : "namespace = ?";
1029
+ const nsParams = isAll ? [] : [namespace];
1030
+ const rows = db.prepare(
1031
+ `SELECT id, importance, access_count, created_at, last_accessed_at, source, source_trust
1032
+ FROM memories
1033
+ WHERE is_deleted = 0 AND ${nsClause}`
1034
+ ).all(...nsParams);
1035
+ const now = Date.now();
1036
+ const decays = [];
1037
+ const prunes = [];
1038
+ for (const row of rows) {
1039
+ const referenceDate = row.last_accessed_at ?? row.created_at;
1040
+ const refMs = new Date(referenceDate).getTime();
1041
+ const daysSinceAccess = (now - refMs) / 864e5;
1042
+ const newImportance = row.importance * Math.exp(-lambda * daysSinceAccess);
1043
+ if (Math.abs(newImportance - row.importance) > 0.01) {
1044
+ decays.push({
1045
+ id: row.id,
1046
+ current_importance: row.importance,
1047
+ new_importance: newImportance,
1048
+ days_since_access: daysSinceAccess
1049
+ });
1050
+ }
1051
+ const ageMs = now - new Date(row.created_at).getTime();
1052
+ const ageDays = ageMs / 864e5;
1053
+ if (newImportance < minImportance && row.access_count < 3 && ageDays > 30 && row.source_trust !== "high" && row.source !== "user") {
1054
+ prunes.push({
1055
+ id: row.id,
1056
+ importance: newImportance,
1057
+ access_count: row.access_count,
1058
+ age_days: ageDays,
1059
+ reason: `decayed importance ${newImportance.toFixed(4)} below threshold ${minImportance}`
1060
+ });
1061
+ }
1062
+ }
1063
+ return { decays, prunes };
1064
+ }
1065
+
1066
+ // src/consolidation/sweep.ts
1067
+ function findExpired(db, namespace) {
1068
+ const isAll = namespace === "*";
1069
+ const nsClause = isAll ? "1=1" : "namespace = ?";
1070
+ const nsParams = isAll ? [] : [namespace];
1071
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1072
+ const rows = db.prepare(
1073
+ `SELECT id, expires_at FROM memories
1074
+ WHERE is_deleted = 0
1075
+ AND expires_at IS NOT NULL
1076
+ AND expires_at < ?
1077
+ AND ${nsClause}`
1078
+ ).all(now, ...nsParams);
1079
+ return rows.map((row) => ({
1080
+ id: row.id,
1081
+ expired_at: row.expires_at
1082
+ }));
1083
+ }
1084
+
1085
+ // src/consolidation/planner.ts
1086
+ function createConsolidationPlan(db, vecStore, namespace, options, metrics) {
1087
+ const operationId = randomBytes3(16).toString("hex");
1088
+ const start = Date.now();
1089
+ const proposedMerges = findDuplicates(
1090
+ db,
1091
+ vecStore,
1092
+ namespace,
1093
+ options.similarity_threshold
1094
+ );
1095
+ const { decays: proposedDecays, prunes: proposedPrunes } = computeDecay(
1096
+ db,
1097
+ namespace,
1098
+ options.decay_lambda,
1099
+ options.min_importance_after_decay
1100
+ );
1101
+ const proposedSweeps = findExpired(db, namespace);
1102
+ metrics.record("consolidation.plan_duration_ms", Date.now() - start);
1103
+ return {
1104
+ operation_id: operationId,
1105
+ namespace,
1106
+ created_at: (/* @__PURE__ */ new Date()).toISOString(),
1107
+ proposed_merges: proposedMerges,
1108
+ proposed_decays: proposedDecays,
1109
+ proposed_prunes: proposedPrunes,
1110
+ proposed_ttl_sweeps: proposedSweeps,
1111
+ summary: {
1112
+ merge_count: proposedMerges.length,
1113
+ decay_count: proposedDecays.length,
1114
+ prune_count: proposedPrunes.length,
1115
+ sweep_count: proposedSweeps.length
1116
+ }
1117
+ };
1118
+ }
1119
+
1120
+ // src/consolidation/executor.ts
1121
+ import { randomBytes as randomBytes4 } from "crypto";
1122
+ function executeConsolidationPlan(plan, db, vecStore, logger, metrics) {
1123
+ const start = Date.now();
1124
+ let merged = 0;
1125
+ let decayed = 0;
1126
+ let pruned = 0;
1127
+ let swept = 0;
1128
+ const logAction = db.prepare(
1129
+ `INSERT INTO consolidation_log (id, operation_id, action, source_ids, result_id, reason, namespace)
1130
+ VALUES (?, ?, ?, ?, ?, ?, ?)`
1131
+ );
1132
+ const updateImportance = db.prepare(
1133
+ "UPDATE memories SET importance = ?, updated_at = strftime('%Y-%m-%dT%H:%M:%fZ', 'now') WHERE id = ?"
1134
+ );
1135
+ const updateWinner = db.prepare(
1136
+ `UPDATE memories
1137
+ SET tags = ?,
1138
+ importance = ?,
1139
+ supersedes_id = ?,
1140
+ updated_at = strftime('%Y-%m-%dT%H:%M:%fZ', 'now')
1141
+ WHERE id = ?`
1142
+ );
1143
+ const runAll = db.transaction(() => {
1144
+ for (const merge of plan.proposed_merges) {
1145
+ tombstoneWithLineage(db, merge.tombstone_id, merge.keep_id);
1146
+ vecStore.remove(merge.tombstone_id);
1147
+ updateWinner.run(
1148
+ JSON.stringify([...merge.merged_tags]),
1149
+ merge.merged_importance,
1150
+ merge.tombstone_id,
1151
+ merge.keep_id
1152
+ );
1153
+ logAction.run(
1154
+ randomBytes4(16).toString("hex"),
1155
+ plan.operation_id,
1156
+ "merge",
1157
+ JSON.stringify([merge.keep_id, merge.tombstone_id]),
1158
+ merge.keep_id,
1159
+ merge.reason,
1160
+ plan.namespace
1161
+ );
1162
+ merged++;
1163
+ }
1164
+ for (const decay of plan.proposed_decays) {
1165
+ updateImportance.run(decay.new_importance, decay.id);
1166
+ logAction.run(
1167
+ randomBytes4(16).toString("hex"),
1168
+ plan.operation_id,
1169
+ "decay",
1170
+ JSON.stringify([decay.id]),
1171
+ decay.id,
1172
+ `importance ${decay.current_importance.toFixed(4)} -> ${decay.new_importance.toFixed(4)}`,
1173
+ plan.namespace
1174
+ );
1175
+ decayed++;
1176
+ }
1177
+ for (const prune of plan.proposed_prunes) {
1178
+ tombstone(db, prune.id);
1179
+ vecStore.remove(prune.id);
1180
+ logAction.run(
1181
+ randomBytes4(16).toString("hex"),
1182
+ plan.operation_id,
1183
+ "prune",
1184
+ JSON.stringify([prune.id]),
1185
+ null,
1186
+ prune.reason,
1187
+ plan.namespace
1188
+ );
1189
+ pruned++;
1190
+ }
1191
+ for (const sweep of plan.proposed_ttl_sweeps) {
1192
+ tombstone(db, sweep.id);
1193
+ vecStore.remove(sweep.id);
1194
+ logAction.run(
1195
+ randomBytes4(16).toString("hex"),
1196
+ plan.operation_id,
1197
+ "tombstone",
1198
+ JSON.stringify([sweep.id]),
1199
+ null,
1200
+ `expired at ${sweep.expired_at}`,
1201
+ plan.namespace
1202
+ );
1203
+ swept++;
1204
+ }
1205
+ });
1206
+ runAll();
1207
+ logger.info("consolidation", "plan executed", {
1208
+ operationId: plan.operation_id,
1209
+ merged,
1210
+ decayed,
1211
+ pruned,
1212
+ swept
1213
+ });
1214
+ metrics.increment("consolidation.merged", merged);
1215
+ metrics.increment("consolidation.decayed", decayed);
1216
+ metrics.increment("consolidation.pruned", pruned);
1217
+ metrics.increment("consolidation.swept", swept);
1218
+ metrics.record("consolidation.exec_duration_ms", Date.now() - start);
1219
+ return {
1220
+ operation_id: plan.operation_id,
1221
+ merged,
1222
+ decayed,
1223
+ pruned,
1224
+ swept
1225
+ };
1226
+ }
1227
+
1228
+ // src/tools/consolidate.ts
1229
+ function consolidate(input, db, vecStore, embedder, config, logger, metrics) {
1230
+ const namespace = input.namespace ?? config.defaultNamespace;
1231
+ const similarityThreshold = input.similarity_threshold ?? config.dedupThreshold;
1232
+ const decayLambda = input.decay_lambda ?? config.decayLambda;
1233
+ const minImportance = input.min_importance_after_decay ?? config.minImportance;
1234
+ const plan = createConsolidationPlan(db, vecStore, namespace, {
1235
+ similarity_threshold: similarityThreshold,
1236
+ decay_lambda: decayLambda,
1237
+ min_importance_after_decay: minImportance
1238
+ }, metrics);
1239
+ if (!input.commit) {
1240
+ return { type: "plan", plan };
1241
+ }
1242
+ const result = executeConsolidationPlan(plan, db, vecStore, logger, metrics);
1243
+ return { type: "result", result };
1244
+ }
1245
+
1246
+ // src/tools/stats.ts
1247
+ import { statSync } from "fs";
1248
+ function memoryStats(input, db, embedder, config) {
1249
+ const namespace = input.namespace ?? config.defaultNamespace;
1250
+ const isAll = namespace === "*";
1251
+ const nsClause = isAll ? "1=1" : "namespace = ?";
1252
+ const nsParams = isAll ? [] : [namespace];
1253
+ const totalRow = db.prepare(
1254
+ `SELECT COUNT(*) as count FROM memories WHERE is_deleted = 0 AND ${nsClause}`
1255
+ ).get(...nsParams);
1256
+ const catRows = db.prepare(
1257
+ `SELECT category, COUNT(*) as count FROM memories WHERE is_deleted = 0 AND ${nsClause} GROUP BY category`
1258
+ ).all(...nsParams);
1259
+ const byCategory = {};
1260
+ for (const row of catRows) {
1261
+ byCategory[row.category] = row.count;
1262
+ }
1263
+ const srcRows = db.prepare(
1264
+ `SELECT source, COUNT(*) as count FROM memories WHERE is_deleted = 0 AND ${nsClause} GROUP BY source`
1265
+ ).all(...nsParams);
1266
+ const bySource = {};
1267
+ for (const row of srcRows) {
1268
+ bySource[row.source] = row.count;
1269
+ }
1270
+ const trustRows = db.prepare(
1271
+ `SELECT source_trust, COUNT(*) as count FROM memories WHERE is_deleted = 0 AND ${nsClause} GROUP BY source_trust`
1272
+ ).all(...nsParams);
1273
+ const byTrust = {};
1274
+ for (const row of trustRows) {
1275
+ byTrust[row.source_trust] = row.count;
1276
+ }
1277
+ const avgRow = db.prepare(
1278
+ `SELECT AVG(importance) as avg_imp FROM memories WHERE is_deleted = 0 AND ${nsClause}`
1279
+ ).get(...nsParams);
1280
+ const rangeRow = db.prepare(
1281
+ `SELECT MIN(created_at) as oldest, MAX(created_at) as newest FROM memories WHERE is_deleted = 0 AND ${nsClause}`
1282
+ ).get(...nsParams);
1283
+ let dbSizeBytes = 0;
1284
+ try {
1285
+ dbSizeBytes = statSync(config.dbPath).size;
1286
+ } catch {
1287
+ }
1288
+ const lastConsRow = db.prepare(
1289
+ `SELECT MAX(created_at) as last_cons FROM consolidation_log`
1290
+ ).get();
1291
+ const tombstoneRow = db.prepare(
1292
+ `SELECT COUNT(*) as count FROM memories WHERE is_deleted = 1`
1293
+ ).get();
1294
+ return {
1295
+ total: totalRow.count,
1296
+ by_category: byCategory,
1297
+ by_source: bySource,
1298
+ by_trust: byTrust,
1299
+ avg_importance: avgRow.avg_imp ?? 0,
1300
+ oldest: rangeRow.oldest,
1301
+ newest: rangeRow.newest,
1302
+ embedding_model: embedder.name,
1303
+ db_size_bytes: dbSizeBytes,
1304
+ last_consolidation: lastConsRow.last_cons,
1305
+ tombstone_count: tombstoneRow.count
1306
+ };
1307
+ }
1308
+
1309
+ // src/tools/admin.ts
1310
+ import { createHash as createHash2, randomBytes as randomBytes5 } from "crypto";
1311
+ function exportMemories(input, db, config) {
1312
+ const namespace = input.namespace ?? config.defaultNamespace;
1313
+ const format = input.format ?? "jsonl";
1314
+ const includeTombstoned = input.include_tombstoned ?? false;
1315
+ const isAll = namespace === "*";
1316
+ const conditions = [];
1317
+ const params = [];
1318
+ if (!isAll) {
1319
+ conditions.push("namespace = ?");
1320
+ params.push(namespace);
1321
+ }
1322
+ if (!includeTombstoned) {
1323
+ conditions.push("is_deleted = 0");
1324
+ }
1325
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1326
+ const rows = db.prepare(`SELECT * FROM memories ${whereClause} ORDER BY created_at ASC`).all(...params);
1327
+ const stripped = rows.map((row) => {
1328
+ const { embedding_model, embedding_dim, ...rest } = row;
1329
+ return rest;
1330
+ });
1331
+ if (format === "json") {
1332
+ return JSON.stringify(stripped, null, 2);
1333
+ }
1334
+ return stripped.map((r) => JSON.stringify(r)).join("\n");
1335
+ }
1336
+ function contentHash2(content) {
1337
+ return createHash2("sha256").update(content).digest("hex");
1338
+ }
1339
+ function generateId2() {
1340
+ return randomBytes5(16).toString("hex");
1341
+ }
1342
+ async function importMemories(input, db, vecStore, embedder, config, logger, metrics) {
1343
+ const trust = input.trust ?? "unverified";
1344
+ const namespace = input.namespace ?? config.defaultNamespace;
1345
+ let records;
1346
+ const trimmed = input.data.trim();
1347
+ if (trimmed.startsWith("[")) {
1348
+ records = JSON.parse(trimmed);
1349
+ } else {
1350
+ records = trimmed.split("\n").filter((line) => line.trim().length > 0).map((line) => JSON.parse(line));
1351
+ }
1352
+ let imported = 0;
1353
+ let skipped = 0;
1354
+ let errors = 0;
1355
+ for (const record of records) {
1356
+ try {
1357
+ if (record.content === void 0 || record.content.length === 0) {
1358
+ errors++;
1359
+ continue;
1360
+ }
1361
+ const hash = contentHash2(record.content);
1362
+ const existing = db.prepare(
1363
+ "SELECT id FROM memories WHERE content_hash = ? AND namespace = ? AND is_deleted = 0 LIMIT 1"
1364
+ ).get(hash, namespace);
1365
+ if (existing !== void 0) {
1366
+ skipped++;
1367
+ continue;
1368
+ }
1369
+ const id = generateId2();
1370
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1371
+ const tags = typeof record.tags === "string" ? record.tags : JSON.stringify(record.tags ?? []);
1372
+ const metadata = typeof record.metadata === "string" ? record.metadata : JSON.stringify(record.metadata ?? {});
1373
+ const category = record.category ?? "general";
1374
+ const importance = record.importance ?? 0.5;
1375
+ db.prepare(
1376
+ `INSERT INTO memories (
1377
+ id, content_hash, content, summary, embedding_model, embedding_dim,
1378
+ namespace, project_id, agent_id, source, source_trust, visibility,
1379
+ schema_version, category, tags, importance, access_count,
1380
+ created_at, updated_at, last_accessed_at, expires_at,
1381
+ is_deleted, tombstoned_at, supersedes_id, superseded_by_id, metadata
1382
+ ) VALUES (
1383
+ ?, ?, ?, NULL, ?, ?, ?, ?, ?, 'auto', ?, 'namespace', 1, ?, ?, ?, 0,
1384
+ ?, ?, NULL, ?, 0, NULL, NULL, NULL, ?
1385
+ )`
1386
+ ).run(
1387
+ id,
1388
+ hash,
1389
+ record.content,
1390
+ embedder.name,
1391
+ embedder.dimensions,
1392
+ namespace,
1393
+ record.project_id ?? null,
1394
+ record.agent_id ?? null,
1395
+ trust,
1396
+ category,
1397
+ tags,
1398
+ importance,
1399
+ now,
1400
+ now,
1401
+ record.expires_at ?? null,
1402
+ metadata
1403
+ );
1404
+ const embedding = await embedder.embed(record.content);
1405
+ vecStore.upsert(id, embedding);
1406
+ const row = db.prepare("SELECT rowid FROM memories WHERE id = ?").get(id);
1407
+ db.prepare(
1408
+ "INSERT INTO memories_fts (rowid, content, summary, tags, category) VALUES (?, ?, NULL, ?, ?)"
1409
+ ).run(row.rowid, record.content, tags, category);
1410
+ imported++;
1411
+ } catch (err) {
1412
+ logger.error("import", "failed to import record", {
1413
+ error: err instanceof Error ? err.message : String(err)
1414
+ });
1415
+ errors++;
1416
+ }
1417
+ }
1418
+ logger.info("import", "import complete", { imported, skipped, errors });
1419
+ metrics.increment("import.imported", imported);
1420
+ metrics.increment("import.skipped", skipped);
1421
+ metrics.increment("import.errors", errors);
1422
+ return { imported, skipped, errors };
1423
+ }
1424
+
1425
+ // src/resources/index.ts
1426
+ import { ResourceTemplate } from "@modelcontextprotocol/sdk/server/mcp.js";
1427
+ function registerResources(server, deps) {
1428
+ const { db, embedder, config, metrics } = deps;
1429
+ server.registerResource("global_stats", "memory://stats", {
1430
+ description: "Global memory statistics across all namespaces",
1431
+ mimeType: "application/json"
1432
+ }, () => ({
1433
+ contents: [{
1434
+ uri: "memory://stats",
1435
+ mimeType: "application/json",
1436
+ text: JSON.stringify(memoryStats({ namespace: "*" }, db, embedder, config))
1437
+ }]
1438
+ }));
1439
+ server.registerResource("recent_memories", "memory://recent", {
1440
+ description: "Last 20 memories across all namespaces",
1441
+ mimeType: "application/json"
1442
+ }, () => {
1443
+ const rows = db.prepare(
1444
+ "SELECT * FROM memories WHERE is_deleted = 0 ORDER BY created_at DESC LIMIT 20"
1445
+ ).all();
1446
+ return {
1447
+ contents: [{
1448
+ uri: "memory://recent",
1449
+ mimeType: "application/json",
1450
+ text: JSON.stringify(rows)
1451
+ }]
1452
+ };
1453
+ });
1454
+ server.registerResource("namespaces", "memory://namespaces", {
1455
+ description: "List all namespaces with memory counts",
1456
+ mimeType: "application/json"
1457
+ }, () => {
1458
+ const rows = db.prepare(
1459
+ "SELECT namespace, COUNT(*) as count FROM memories WHERE is_deleted = 0 GROUP BY namespace ORDER BY count DESC"
1460
+ ).all();
1461
+ return {
1462
+ contents: [{
1463
+ uri: "memory://namespaces",
1464
+ mimeType: "application/json",
1465
+ text: JSON.stringify(rows)
1466
+ }]
1467
+ };
1468
+ });
1469
+ server.registerResource("consolidation_log", "memory://consolidation/log", {
1470
+ description: "Recent consolidation log entries",
1471
+ mimeType: "application/json"
1472
+ }, () => {
1473
+ const rows = db.prepare(
1474
+ "SELECT * FROM consolidation_log ORDER BY created_at DESC LIMIT 50"
1475
+ ).all();
1476
+ return {
1477
+ contents: [{
1478
+ uri: "memory://consolidation/log",
1479
+ mimeType: "application/json",
1480
+ text: JSON.stringify(rows)
1481
+ }]
1482
+ };
1483
+ });
1484
+ server.registerResource("operations", "memory://operations", {
1485
+ description: "Active and recent operations",
1486
+ mimeType: "application/json"
1487
+ }, () => {
1488
+ const rows = db.prepare(
1489
+ "SELECT * FROM operations ORDER BY started_at DESC LIMIT 20"
1490
+ ).all();
1491
+ return {
1492
+ contents: [{
1493
+ uri: "memory://operations",
1494
+ mimeType: "application/json",
1495
+ text: JSON.stringify(rows)
1496
+ }]
1497
+ };
1498
+ });
1499
+ server.registerResource("health", "memory://health", {
1500
+ description: "Server health check with metrics snapshot",
1501
+ mimeType: "application/json"
1502
+ }, () => {
1503
+ const snapshot = metrics.snapshot();
1504
+ const totalRow = db.prepare(
1505
+ "SELECT COUNT(*) as count FROM memories WHERE is_deleted = 0"
1506
+ ).get();
1507
+ const health = {
1508
+ status: "ok",
1509
+ version: "0.1.0",
1510
+ memory_count: totalRow.count,
1511
+ embedding_model: embedder.name,
1512
+ embedding_dimensions: embedder.dimensions,
1513
+ metrics: snapshot
1514
+ };
1515
+ return {
1516
+ contents: [{
1517
+ uri: "memory://health",
1518
+ mimeType: "application/json",
1519
+ text: JSON.stringify(health)
1520
+ }]
1521
+ };
1522
+ });
1523
+ server.registerResource(
1524
+ "namespace_stats",
1525
+ new ResourceTemplate("memory://stats/{namespace}", { list: void 0 }),
1526
+ {
1527
+ description: "Statistics for a specific namespace",
1528
+ mimeType: "application/json"
1529
+ },
1530
+ (uri, variables) => {
1531
+ const namespace = String(variables.namespace);
1532
+ const stats = memoryStats({ namespace }, db, embedder, config);
1533
+ return {
1534
+ contents: [{
1535
+ uri: uri.href,
1536
+ mimeType: "application/json",
1537
+ text: JSON.stringify(stats)
1538
+ }]
1539
+ };
1540
+ }
1541
+ );
1542
+ server.registerResource(
1543
+ "namespace_recent",
1544
+ new ResourceTemplate("memory://recent/{namespace}", { list: void 0 }),
1545
+ {
1546
+ description: "Last 20 memories in a specific namespace",
1547
+ mimeType: "application/json"
1548
+ },
1549
+ (uri, variables) => {
1550
+ const namespace = String(variables.namespace);
1551
+ const rows = db.prepare(
1552
+ "SELECT * FROM memories WHERE is_deleted = 0 AND namespace = ? ORDER BY created_at DESC LIMIT 20"
1553
+ ).all(namespace);
1554
+ return {
1555
+ contents: [{
1556
+ uri: uri.href,
1557
+ mimeType: "application/json",
1558
+ text: JSON.stringify(rows)
1559
+ }]
1560
+ };
1561
+ }
1562
+ );
1563
+ server.registerResource(
1564
+ "memory_by_id",
1565
+ new ResourceTemplate("memory://id/{id}", { list: void 0 }),
1566
+ {
1567
+ description: "Retrieve a specific memory by its ID",
1568
+ mimeType: "application/json"
1569
+ },
1570
+ (uri, variables) => {
1571
+ const id = String(variables.id);
1572
+ const row = db.prepare(
1573
+ "SELECT * FROM memories WHERE id = ?"
1574
+ ).get(id);
1575
+ if (row === void 0) {
1576
+ return { contents: [{ uri: uri.href, mimeType: "application/json", text: JSON.stringify({ error: "not_found", id }) }] };
1577
+ }
1578
+ return {
1579
+ contents: [{
1580
+ uri: uri.href,
1581
+ mimeType: "application/json",
1582
+ text: JSON.stringify(row)
1583
+ }]
1584
+ };
1585
+ }
1586
+ );
1587
+ server.registerResource(
1588
+ "memories_by_tag",
1589
+ new ResourceTemplate("memory://tag/{tag}", { list: void 0 }),
1590
+ {
1591
+ description: "Memories containing a specific tag (across all namespaces)",
1592
+ mimeType: "application/json"
1593
+ },
1594
+ (uri, variables) => {
1595
+ const tag = String(variables.tag);
1596
+ const rows = db.prepare(
1597
+ "SELECT * FROM memories WHERE is_deleted = 0 AND tags LIKE ? ORDER BY created_at DESC LIMIT 50"
1598
+ ).all(`%"${tag}"%`);
1599
+ return {
1600
+ contents: [{
1601
+ uri: uri.href,
1602
+ mimeType: "application/json",
1603
+ text: JSON.stringify(rows)
1604
+ }]
1605
+ };
1606
+ }
1607
+ );
1608
+ server.registerResource(
1609
+ "memories_by_namespace_tag",
1610
+ new ResourceTemplate("memory://tag/{namespace}/{tag}", { list: void 0 }),
1611
+ {
1612
+ description: "Memories with a specific tag in a specific namespace",
1613
+ mimeType: "application/json"
1614
+ },
1615
+ (uri, variables) => {
1616
+ const namespace = String(variables.namespace);
1617
+ const tag = String(variables.tag);
1618
+ const rows = db.prepare(
1619
+ "SELECT * FROM memories WHERE is_deleted = 0 AND namespace = ? AND tags LIKE ? ORDER BY created_at DESC LIMIT 50"
1620
+ ).all(namespace, `%"${tag}"%`);
1621
+ return {
1622
+ contents: [{
1623
+ uri: uri.href,
1624
+ mimeType: "application/json",
1625
+ text: JSON.stringify(rows)
1626
+ }]
1627
+ };
1628
+ }
1629
+ );
1630
+ server.registerResource(
1631
+ "namespace_memories",
1632
+ new ResourceTemplate("memory://namespace/{ns}", { list: void 0 }),
1633
+ {
1634
+ description: "All memories in a namespace (up to 100)",
1635
+ mimeType: "application/json"
1636
+ },
1637
+ (uri, variables) => {
1638
+ const ns = String(variables.ns);
1639
+ const rows = db.prepare(
1640
+ "SELECT * FROM memories WHERE is_deleted = 0 AND namespace = ? ORDER BY created_at DESC LIMIT 100"
1641
+ ).all(ns);
1642
+ return {
1643
+ contents: [{
1644
+ uri: uri.href,
1645
+ mimeType: "application/json",
1646
+ text: JSON.stringify(rows)
1647
+ }]
1648
+ };
1649
+ }
1650
+ );
1651
+ server.registerResource(
1652
+ "consolidation_log_by_op",
1653
+ new ResourceTemplate("memory://consolidation/log/{operation_id}", { list: void 0 }),
1654
+ {
1655
+ description: "Consolidation log entries for a specific operation",
1656
+ mimeType: "application/json"
1657
+ },
1658
+ (uri, variables) => {
1659
+ const operationId = String(variables.operation_id);
1660
+ const rows = db.prepare(
1661
+ "SELECT * FROM consolidation_log WHERE operation_id = ? ORDER BY created_at ASC"
1662
+ ).all(operationId);
1663
+ return {
1664
+ contents: [{
1665
+ uri: uri.href,
1666
+ mimeType: "application/json",
1667
+ text: JSON.stringify(rows)
1668
+ }]
1669
+ };
1670
+ }
1671
+ );
1672
+ }
1673
+
1674
+ // src/prompts/index.ts
1675
+ import { z } from "zod";
1676
+ function registerPrompts(server, deps) {
1677
+ const { db, vecStore, embedder, config, logger, metrics } = deps;
1678
+ server.registerPrompt("memory_context_for_task", {
1679
+ description: "Search memories relevant to a task and format them as context for an LLM.",
1680
+ argsSchema: {
1681
+ task: z.string().describe("Description of the task to find relevant memories for"),
1682
+ namespace: z.string().optional().describe("Namespace to search"),
1683
+ limit: z.string().optional().describe("Max memories to include (default: 5)")
1684
+ }
1685
+ }, async (args) => {
1686
+ const limit = args.limit !== void 0 ? parseInt(args.limit, 10) : 5;
1687
+ const results = await searchMemory(
1688
+ { query: args.task, namespace: args.namespace, limit },
1689
+ { db, vecStore, embedder, logger, metrics, config }
1690
+ );
1691
+ if (results.length === 0) {
1692
+ return {
1693
+ messages: [{
1694
+ role: "user",
1695
+ content: {
1696
+ type: "text",
1697
+ text: `No relevant memories found for task: "${args.task}"`
1698
+ }
1699
+ }]
1700
+ };
1701
+ }
1702
+ const memoryBlock = results.map((m, i) => {
1703
+ const tags = JSON.parse(m.tags);
1704
+ return [
1705
+ `### Memory ${i + 1} (score: ${m.similarity_score.toFixed(4)})`,
1706
+ `- **Category**: ${m.category}`,
1707
+ `- **Tags**: ${tags.length > 0 ? tags.join(", ") : "none"}`,
1708
+ `- **Importance**: ${m.importance}`,
1709
+ `- **Created**: ${m.created_at}`,
1710
+ "",
1711
+ m.content
1712
+ ].join("\n");
1713
+ }).join("\n\n---\n\n");
1714
+ return {
1715
+ messages: [{
1716
+ role: "user",
1717
+ content: {
1718
+ type: "text",
1719
+ text: [
1720
+ `# Relevant Memories for Task`,
1721
+ "",
1722
+ `**Task**: ${args.task}`,
1723
+ `**Found**: ${results.length} relevant memories`,
1724
+ "",
1725
+ memoryBlock
1726
+ ].join("\n")
1727
+ }
1728
+ }]
1729
+ };
1730
+ });
1731
+ server.registerPrompt("review_memory_candidate", {
1732
+ description: "Show a proposed memory alongside existing near-duplicates to help decide whether to store it.",
1733
+ argsSchema: {
1734
+ content: z.string().describe("The proposed memory content to review"),
1735
+ namespace: z.string().optional().describe("Namespace to check for duplicates")
1736
+ }
1737
+ }, async (args) => {
1738
+ const namespace = args.namespace ?? config.defaultNamespace;
1739
+ const results = await searchMemory(
1740
+ { query: args.content, namespace, limit: 5 },
1741
+ { db, vecStore, embedder, logger, metrics, config }
1742
+ );
1743
+ const duplicateBlock = results.length === 0 ? "No similar memories found. This appears to be a new topic." : results.map((m, i) => {
1744
+ const tags = JSON.parse(m.tags);
1745
+ return [
1746
+ `### Existing Memory ${i + 1} (similarity: ${m.similarity_score.toFixed(4)})`,
1747
+ `- **ID**: ${m.id}`,
1748
+ `- **Category**: ${m.category}`,
1749
+ `- **Tags**: ${tags.length > 0 ? tags.join(", ") : "none"}`,
1750
+ `- **Importance**: ${m.importance}`,
1751
+ "",
1752
+ m.content
1753
+ ].join("\n");
1754
+ }).join("\n\n---\n\n");
1755
+ return {
1756
+ messages: [{
1757
+ role: "user",
1758
+ content: {
1759
+ type: "text",
1760
+ text: [
1761
+ "# Memory Candidate Review",
1762
+ "",
1763
+ "## Proposed Memory",
1764
+ "",
1765
+ args.content,
1766
+ "",
1767
+ `## Existing Similar Memories (namespace: ${namespace})`,
1768
+ "",
1769
+ duplicateBlock,
1770
+ "",
1771
+ "## Decision",
1772
+ "",
1773
+ "Should this memory be stored? Consider:",
1774
+ "1. Is it a duplicate of an existing memory?",
1775
+ "2. Does it add new information not captured above?",
1776
+ "3. Would it be better to update an existing memory instead?"
1777
+ ].join("\n")
1778
+ }
1779
+ }]
1780
+ };
1781
+ });
1782
+ server.registerPrompt("consolidation_dry_run", {
1783
+ description: "Preview proposed consolidation actions (merges, decays, prunes, sweeps) without applying them.",
1784
+ argsSchema: {
1785
+ namespace: z.string().optional().describe("Namespace to consolidate (default: config default)")
1786
+ }
1787
+ }, (args) => {
1788
+ const output = consolidate(
1789
+ { namespace: args.namespace, commit: false },
1790
+ db,
1791
+ vecStore,
1792
+ embedder,
1793
+ config,
1794
+ logger,
1795
+ metrics
1796
+ );
1797
+ if (output.type !== "plan") {
1798
+ return {
1799
+ messages: [{
1800
+ role: "user",
1801
+ content: { type: "text", text: "Unexpected: consolidation returned a result instead of a plan." }
1802
+ }]
1803
+ };
1804
+ }
1805
+ const { plan } = output;
1806
+ const sections = [
1807
+ "# Consolidation Dry Run",
1808
+ "",
1809
+ `**Namespace**: ${plan.namespace}`,
1810
+ `**Operation ID**: ${plan.operation_id}`,
1811
+ "",
1812
+ "## Summary",
1813
+ "",
1814
+ `- Proposed merges: ${plan.summary.merge_count}`,
1815
+ `- Proposed decays: ${plan.summary.decay_count}`,
1816
+ `- Proposed prunes: ${plan.summary.prune_count}`,
1817
+ `- Proposed TTL sweeps: ${plan.summary.sweep_count}`
1818
+ ];
1819
+ if (plan.proposed_merges.length > 0) {
1820
+ sections.push("", "## Proposed Merges", "");
1821
+ for (const merge of plan.proposed_merges) {
1822
+ sections.push(
1823
+ `- **Keep** \`${merge.keep_id}\` / **Tombstone** \`${merge.tombstone_id}\``,
1824
+ ` Similarity: ${merge.similarity.toFixed(4)} | Reason: ${merge.reason}`
1825
+ );
1826
+ }
1827
+ }
1828
+ if (plan.proposed_decays.length > 0) {
1829
+ sections.push("", "## Proposed Decays", "");
1830
+ for (const decay of plan.proposed_decays) {
1831
+ sections.push(
1832
+ `- \`${decay.id}\`: ${decay.current_importance.toFixed(3)} -> ${decay.new_importance.toFixed(3)} (${decay.days_since_access} days since access)`
1833
+ );
1834
+ }
1835
+ }
1836
+ if (plan.proposed_prunes.length > 0) {
1837
+ sections.push("", "## Proposed Prunes", "");
1838
+ for (const prune of plan.proposed_prunes) {
1839
+ sections.push(
1840
+ `- \`${prune.id}\`: importance=${prune.importance.toFixed(3)}, accesses=${prune.access_count}, age=${prune.age_days}d | ${prune.reason}`
1841
+ );
1842
+ }
1843
+ }
1844
+ if (plan.proposed_ttl_sweeps.length > 0) {
1845
+ sections.push("", "## Proposed TTL Sweeps", "");
1846
+ for (const sweep of plan.proposed_ttl_sweeps) {
1847
+ sections.push(`- \`${sweep.id}\`: expired at ${sweep.expired_at}`);
1848
+ }
1849
+ }
1850
+ return {
1851
+ messages: [{
1852
+ role: "user",
1853
+ content: { type: "text", text: sections.join("\n") }
1854
+ }]
1855
+ };
1856
+ });
1857
+ }
1858
+
1859
+ // src/server.ts
1860
+ function textResult(data) {
1861
+ return { content: [{ type: "text", text: JSON.stringify(data) }] };
1862
+ }
1863
+ function createServer(deps) {
1864
+ const { db, vecStore, embedder, config, logger, metrics } = deps;
1865
+ const server = new McpServer(
1866
+ { name: "neuromcp", version: "0.1.0" },
1867
+ {
1868
+ capabilities: {
1869
+ resources: {},
1870
+ tools: {},
1871
+ prompts: {}
1872
+ }
1873
+ }
1874
+ );
1875
+ server.registerTool("store_memory", {
1876
+ description: "Store a new memory with semantic deduplication. Returns the memory ID and whether it matched an existing memory.",
1877
+ inputSchema: {
1878
+ content: z2.string().describe("The memory content to store"),
1879
+ namespace: z2.string().optional().describe("Namespace to store in (default: config default)"),
1880
+ category: z2.string().optional().describe('Category label (e.g. "code", "conversation", "fact")'),
1881
+ tags: z2.array(z2.string()).optional().describe("Tags for filtering"),
1882
+ importance: z2.number().min(0).max(1).optional().describe("Importance score 0-1 (default: 0.5)"),
1883
+ source: z2.enum(["user", "auto", "consolidation", "claude-code", "error"]).optional().describe("Source of the memory"),
1884
+ source_trust: z2.enum(["high", "medium", "low", "unverified"]).optional().describe("Trust level"),
1885
+ project_id: z2.string().optional().describe("Project identifier"),
1886
+ agent_id: z2.string().optional().describe("Agent identifier"),
1887
+ metadata: z2.record(z2.unknown()).optional().describe("Arbitrary metadata"),
1888
+ expires_at: z2.string().optional().describe("ISO 8601 expiration timestamp")
1889
+ }
1890
+ }, async (args) => {
1891
+ const result = await storeMemory(args, { db, vecStore, embedder, logger, metrics, config });
1892
+ return textResult(result);
1893
+ });
1894
+ server.registerTool("search_memory", {
1895
+ description: "Search memories using hybrid vector + full-text search with RRF ranking.",
1896
+ inputSchema: {
1897
+ query: z2.string().describe("Search query text"),
1898
+ namespace: z2.string().optional().describe("Namespace to search (default: config default)"),
1899
+ limit: z2.number().int().min(1).max(100).optional().describe("Max results (default: 10)"),
1900
+ category: z2.string().optional().describe("Filter by category"),
1901
+ tags: z2.array(z2.string()).optional().describe("Filter: all tags must be present"),
1902
+ min_importance: z2.number().min(0).max(1).optional().describe("Minimum importance threshold"),
1903
+ min_trust: z2.enum(["high", "medium", "low", "unverified"]).optional().describe("Minimum trust level"),
1904
+ after: z2.string().optional().describe("Only memories created after this ISO timestamp"),
1905
+ before: z2.string().optional().describe("Only memories created before this ISO timestamp"),
1906
+ hybrid: z2.boolean().optional().describe("Use hybrid search (default: true)")
1907
+ }
1908
+ }, async (args) => {
1909
+ const results = await searchMemory(args, { db, vecStore, embedder, logger, metrics, config });
1910
+ return textResult(results);
1911
+ });
1912
+ server.registerTool("recall_memory", {
1913
+ description: "Recall memories by ID, namespace, category, or tags without semantic search.",
1914
+ inputSchema: {
1915
+ id: z2.string().optional().describe("Specific memory ID to recall"),
1916
+ namespace: z2.string().optional().describe("Namespace filter"),
1917
+ category: z2.string().optional().describe("Category filter"),
1918
+ tags: z2.array(z2.string()).optional().describe("Tags filter: all must match"),
1919
+ limit: z2.number().int().min(1).max(100).optional().describe("Max results (default: 20)")
1920
+ }
1921
+ }, (args) => {
1922
+ const results = recallMemory(args, db, config, logger, metrics);
1923
+ return textResult(results);
1924
+ });
1925
+ server.registerTool("forget_memory", {
1926
+ description: "Tombstone (soft-delete) memories matching the given filters. At least one filter is required.",
1927
+ inputSchema: {
1928
+ id: z2.string().optional().describe("Specific memory ID to forget"),
1929
+ namespace: z2.string().optional().describe("Namespace filter"),
1930
+ tags: z2.array(z2.string()).optional().describe("Tags filter"),
1931
+ older_than_days: z2.number().int().min(1).optional().describe("Delete memories older than N days"),
1932
+ below_importance: z2.number().min(0).max(1).optional().describe("Delete memories below this importance"),
1933
+ dry_run: z2.boolean().optional().describe("Preview what would be deleted without actually deleting")
1934
+ }
1935
+ }, (args) => {
1936
+ const result = forgetMemory(args, db, vecStore, config, logger, metrics);
1937
+ return textResult(result);
1938
+ });
1939
+ server.registerTool("consolidate", {
1940
+ description: "Run consolidation: merge near-duplicates, decay stale memories, prune low-value, sweep expired. Set commit=true to apply.",
1941
+ inputSchema: {
1942
+ namespace: z2.string().optional().describe("Namespace to consolidate (default: config default)"),
1943
+ similarity_threshold: z2.number().min(0).max(1).optional().describe("Similarity threshold for merging"),
1944
+ decay_lambda: z2.number().optional().describe("Decay rate parameter"),
1945
+ min_importance_after_decay: z2.number().min(0).max(1).optional().describe("Prune threshold after decay"),
1946
+ commit: z2.boolean().describe("If false, returns a dry-run plan. If true, executes the plan.")
1947
+ }
1948
+ }, (args) => {
1949
+ const output = consolidate(args, db, vecStore, embedder, config, logger, metrics);
1950
+ return textResult(output);
1951
+ });
1952
+ server.registerTool("memory_stats", {
1953
+ description: "Get statistics about stored memories: counts, categories, trust levels, importance, and database size.",
1954
+ inputSchema: {
1955
+ namespace: z2.string().optional().describe('Namespace to get stats for (default: config default, "*" for all)')
1956
+ }
1957
+ }, (args) => {
1958
+ const stats = memoryStats(args, db, embedder, config);
1959
+ return textResult(stats);
1960
+ });
1961
+ server.registerTool("export_memories", {
1962
+ description: "Export memories as JSONL or JSON for backup or migration.",
1963
+ inputSchema: {
1964
+ namespace: z2.string().optional().describe('Namespace to export (default: config default, "*" for all)'),
1965
+ format: z2.enum(["jsonl", "json"]).optional().describe("Export format (default: jsonl)"),
1966
+ include_tombstoned: z2.boolean().optional().describe("Include soft-deleted memories")
1967
+ }
1968
+ }, (args) => {
1969
+ const data = exportMemories(args, db, config);
1970
+ return textResult({ data });
1971
+ });
1972
+ server.registerTool("import_memories", {
1973
+ description: "Import memories from JSONL or JSON data. Deduplicates by content hash.",
1974
+ inputSchema: {
1975
+ data: z2.string().describe("JSONL or JSON array string of memory records"),
1976
+ namespace: z2.string().optional().describe("Override namespace for all imported memories"),
1977
+ trust: z2.enum(["high", "medium", "low", "unverified"]).optional().describe("Trust level for imported memories (default: unverified)")
1978
+ }
1979
+ }, async (args) => {
1980
+ const result = await importMemories(args, db, vecStore, embedder, config, logger, metrics);
1981
+ return textResult(result);
1982
+ });
1983
+ registerResources(server, deps);
1984
+ registerPrompts(server, deps);
1985
+ logger.info("server", "MCP server created", {
1986
+ tools: 8,
1987
+ resources: 13,
1988
+ prompts: 3
1989
+ });
1990
+ return server;
1991
+ }
1992
+
1993
+ // src/index.ts
1994
+ async function main() {
1995
+ const config = loadConfig();
1996
+ const logger = createLogger({ level: config.logLevel, format: config.logFormat });
1997
+ const metrics = createMetrics();
1998
+ logger.info("startup", "Loading neuromcp", {
1999
+ dbPath: config.dbPath,
2000
+ embeddingProvider: config.embeddingProvider,
2001
+ defaultNamespace: config.defaultNamespace
2002
+ });
2003
+ const db = openDatabase(config.dbPath);
2004
+ runMigrations(db, config.dbPath, logger);
2005
+ const embedder = await createEmbeddingProvider(config, logger);
2006
+ const vecStore = new SqliteVecStore(embedder.dimensions);
2007
+ vecStore.initialize(db);
2008
+ const server = createServer({ db, vecStore, embedder, config, logger, metrics });
2009
+ const transport = new StdioServerTransport();
2010
+ await server.connect(transport);
2011
+ logger.info("startup", "neuromcp MCP server running on stdio");
2012
+ }
2013
+ main().catch((error) => {
2014
+ const message = error instanceof Error ? error.message : String(error);
2015
+ const stack = error instanceof Error ? error.stack : void 0;
2016
+ process.stderr.write(`Fatal error: ${message}
2017
+ `);
2018
+ if (stack !== void 0) {
2019
+ process.stderr.write(`${stack}
2020
+ `);
2021
+ }
2022
+ process.exit(1);
2023
+ });
2024
+ //# sourceMappingURL=index.js.map