@coreyuan/vector-mind 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,1222 @@
1
+ #!/usr/bin/env node
2
+ import path from "node:path";
3
+ import fs from "node:fs";
4
+ import crypto from "node:crypto";
5
+ import os from "node:os";
6
+ import chokidar from "chokidar";
7
+ import Database from "better-sqlite3";
8
+ import { z } from "zod";
9
+ import { Server } from "@modelcontextprotocol/sdk/server/index.js";
10
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
11
+ import { toJsonSchemaCompat } from "@modelcontextprotocol/sdk/server/zod-json-schema-compat.js";
12
+ import { CallToolRequestSchema, ListToolsRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
13
+ const SERVER_NAME = "vector-mind";
14
+ const SERVER_VERSION = "1.0.0";
15
+ const rootFromEnv = process.env.VECTORMIND_ROOT?.trim();
16
+ const projectRoot = rootFromEnv ? path.resolve(rootFromEnv) : process.cwd();
17
+ if (rootFromEnv) {
18
+ try {
19
+ const st = fs.statSync(projectRoot);
20
+ if (!st.isDirectory())
21
+ throw new Error("VECTORMIND_ROOT is not a directory");
22
+ }
23
+ catch (err) {
24
+ console.error(`[VectorMind] Invalid VECTORMIND_ROOT: ${projectRoot}. ` +
25
+ `Set it to an existing project directory. (${String(err)})`);
26
+ process.exit(1);
27
+ }
28
+ }
29
+ const dbPath = path.join(projectRoot, ".vectormind.db");
30
+ const db = new Database(dbPath);
31
+ db.pragma("journal_mode = WAL");
32
+ db.pragma("foreign_keys = ON");
33
+ db.exec(`
34
+ CREATE TABLE IF NOT EXISTS requirements (
35
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
36
+ title TEXT NOT NULL,
37
+ status TEXT DEFAULT 'active',
38
+ context_data TEXT,
39
+ created_at DATETIME DEFAULT CURRENT_TIMESTAMP
40
+ );
41
+
42
+ CREATE TABLE IF NOT EXISTS change_logs (
43
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
44
+ req_id INTEGER,
45
+ file_path TEXT,
46
+ intent_summary TEXT,
47
+ timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
48
+ FOREIGN KEY(req_id) REFERENCES requirements(id)
49
+ );
50
+
51
+ CREATE TABLE IF NOT EXISTS symbols (
52
+ name TEXT,
53
+ type TEXT,
54
+ file_path TEXT,
55
+ signature TEXT,
56
+ PRIMARY KEY(name, file_path)
57
+ );
58
+
59
+ CREATE INDEX IF NOT EXISTS idx_change_logs_req_id_timestamp
60
+ ON change_logs(req_id, timestamp DESC);
61
+
62
+ CREATE INDEX IF NOT EXISTS idx_symbols_name
63
+ ON symbols(name);
64
+
65
+ CREATE TABLE IF NOT EXISTS memory_items (
66
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
67
+ kind TEXT NOT NULL,
68
+ title TEXT,
69
+ content TEXT NOT NULL,
70
+ file_path TEXT,
71
+ start_line INTEGER,
72
+ end_line INTEGER,
73
+ req_id INTEGER,
74
+ metadata_json TEXT,
75
+ content_hash TEXT,
76
+ created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
77
+ updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
78
+ );
79
+
80
+ CREATE UNIQUE INDEX IF NOT EXISTS idx_memory_items_chunk_locator
81
+ ON memory_items(kind, file_path, start_line, end_line);
82
+
83
+ CREATE UNIQUE INDEX IF NOT EXISTS idx_memory_items_project_summary
84
+ ON memory_items(kind) WHERE kind = 'project_summary';
85
+
86
+ CREATE INDEX IF NOT EXISTS idx_memory_items_kind_updated_at
87
+ ON memory_items(kind, updated_at DESC);
88
+
89
+ CREATE INDEX IF NOT EXISTS idx_memory_items_file_path
90
+ ON memory_items(file_path);
91
+
92
+ CREATE INDEX IF NOT EXISTS idx_memory_items_req_id
93
+ ON memory_items(req_id);
94
+
95
+ CREATE TABLE IF NOT EXISTS embeddings (
96
+ memory_id INTEGER PRIMARY KEY,
97
+ dim INTEGER NOT NULL,
98
+ vector BLOB NOT NULL,
99
+ content_hash TEXT,
100
+ created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
101
+ updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
102
+ FOREIGN KEY(memory_id) REFERENCES memory_items(id) ON DELETE CASCADE
103
+ );
104
+
105
+ CREATE INDEX IF NOT EXISTS idx_embeddings_updated_at
106
+ ON embeddings(updated_at DESC);
107
+
108
+ CREATE TABLE IF NOT EXISTS pending_changes (
109
+ file_path TEXT PRIMARY KEY,
110
+ last_event TEXT NOT NULL,
111
+ updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
112
+ );
113
+
114
+ CREATE INDEX IF NOT EXISTS idx_pending_changes_updated_at
115
+ ON pending_changes(updated_at DESC);
116
+ `);
117
+ const insertRequirementStmt = db.prepare(`INSERT INTO requirements (title, context_data, status) VALUES (?, ?, 'active')`);
118
+ const getActiveRequirementStmt = db.prepare(`SELECT id, title, status, context_data, created_at
119
+ FROM requirements
120
+ WHERE status = 'active'
121
+ ORDER BY created_at DESC, id DESC
122
+ LIMIT 1`);
123
+ const listRecentRequirementsStmt = db.prepare(`SELECT id, title, status, context_data, created_at
124
+ FROM requirements
125
+ ORDER BY created_at DESC, id DESC
126
+ LIMIT ?`);
127
+ const listChangeLogsForRequirementStmt = db.prepare(`SELECT id, req_id, file_path, intent_summary, timestamp
128
+ FROM change_logs
129
+ WHERE req_id = ?
130
+ ORDER BY timestamp DESC, id DESC
131
+ LIMIT ?`);
132
+ const insertChangeLogStmt = db.prepare(`INSERT INTO change_logs (req_id, file_path, intent_summary) VALUES (?, ?, ?)`);
133
+ const insertMemoryItemStmt = db.prepare(`INSERT INTO memory_items
134
+ (kind, title, content, file_path, start_line, end_line, req_id, metadata_json, content_hash)
135
+ VALUES
136
+ (?, ?, ?, ?, ?, ?, ?, ?, ?)`);
137
+ const getMemoryItemByIdStmt = db.prepare(`SELECT id, kind, title, content, file_path, start_line, end_line, req_id, metadata_json, content_hash, created_at, updated_at
138
+ FROM memory_items
139
+ WHERE id = ?`);
140
+ const upsertProjectSummaryStmt = db.prepare(`INSERT INTO memory_items (kind, title, content, metadata_json, content_hash)
141
+ VALUES ('project_summary', 'Project Summary', ?, ?, ?)
142
+ ON CONFLICT DO UPDATE SET
143
+ title = excluded.title,
144
+ content = excluded.content,
145
+ metadata_json = excluded.metadata_json,
146
+ content_hash = excluded.content_hash,
147
+ updated_at = CURRENT_TIMESTAMP`);
148
+ const getProjectSummaryStmt = db.prepare(`SELECT id, kind, title, content, file_path, start_line, end_line, req_id, metadata_json, content_hash, created_at, updated_at
149
+ FROM memory_items
150
+ WHERE kind = 'project_summary'
151
+ LIMIT 1`);
152
+ const listRecentNotesStmt = db.prepare(`SELECT id, kind, title, content, file_path, start_line, end_line, req_id, metadata_json, content_hash, created_at, updated_at
153
+ FROM memory_items
154
+ WHERE kind = 'note'
155
+ ORDER BY updated_at DESC, id DESC
156
+ LIMIT ?`);
157
+ const deleteFileChunkItemsStmt = db.prepare(`DELETE FROM memory_items
158
+ WHERE file_path = ?
159
+ AND (kind = 'code_chunk' OR kind = 'doc_chunk')`);
160
+ const getEmbeddingMetaStmt = db.prepare(`SELECT memory_id, dim, content_hash
161
+ FROM embeddings
162
+ WHERE memory_id = ?`);
163
+ const upsertEmbeddingStmt = db.prepare(`INSERT INTO embeddings (memory_id, dim, vector, content_hash)
164
+ VALUES (?, ?, ?, ?)
165
+ ON CONFLICT(memory_id) DO UPDATE SET
166
+ dim = excluded.dim,
167
+ vector = excluded.vector,
168
+ content_hash = excluded.content_hash,
169
+ updated_at = CURRENT_TIMESTAMP`);
170
+ const upsertPendingChangeStmt = db.prepare(`INSERT INTO pending_changes (file_path, last_event)
171
+ VALUES (?, ?)
172
+ ON CONFLICT(file_path) DO UPDATE SET
173
+ last_event = excluded.last_event,
174
+ updated_at = CURRENT_TIMESTAMP`);
175
+ const listPendingChangesStmt = db.prepare(`SELECT file_path, last_event, updated_at
176
+ FROM pending_changes
177
+ ORDER BY updated_at DESC`);
178
+ const deletePendingChangeStmt = db.prepare(`DELETE FROM pending_changes WHERE file_path = ?`);
179
+ const deleteAllPendingChangesStmt = db.prepare(`DELETE FROM pending_changes`);
180
+ const deleteSymbolsForFileStmt = db.prepare(`DELETE FROM symbols WHERE file_path = ?`);
181
+ const upsertSymbolStmt = db.prepare(`INSERT OR REPLACE INTO symbols (name, type, file_path, signature) VALUES (?, ?, ?, ?)`);
182
+ const searchSymbolsStmt = db.prepare(`SELECT name, type, file_path, signature
183
+ FROM symbols
184
+ WHERE name LIKE ? ESCAPE '\\'
185
+ OR signature LIKE ? ESCAPE '\\'
186
+ ORDER BY
187
+ CASE
188
+ WHEN name = ? THEN 0
189
+ WHEN name LIKE ? ESCAPE '\\' THEN 1
190
+ ELSE 2
191
+ END,
192
+ name
193
+ LIMIT ?`);
194
+ function normalizeToDbPath(inputPath) {
195
+ const abs = path.isAbsolute(inputPath) ? inputPath : path.join(projectRoot, inputPath);
196
+ const rel = path.relative(projectRoot, abs);
197
+ const inCwd = !!rel && !rel.startsWith("..") && !path.isAbsolute(rel);
198
+ const candidate = inCwd ? rel : abs;
199
+ return candidate.replace(/\\/g, "/");
200
+ }
201
+ function shouldIgnorePath(inputPath) {
202
+ const normalizedAbs = path.resolve(inputPath);
203
+ const rel = path.relative(projectRoot, normalizedAbs);
204
+ if (rel.startsWith("..") || path.isAbsolute(rel))
205
+ return true;
206
+ const relPosix = rel.replace(/\\/g, "/");
207
+ if (relPosix === ".vectormind.db" ||
208
+ relPosix.startsWith(".vectormind.db-") ||
209
+ relPosix === ".vectormind.db-journal") {
210
+ return true;
211
+ }
212
+ const top = relPosix.split("/")[0];
213
+ if (top === "node_modules" || top === ".git" || top === "dist")
214
+ return true;
215
+ return false;
216
+ }
217
+ function isSymbolIndexableFile(filePath) {
218
+ const ext = path.extname(filePath).toLowerCase();
219
+ const allowed = new Set([
220
+ ".ts",
221
+ ".tsx",
222
+ ".js",
223
+ ".jsx",
224
+ ".mjs",
225
+ ".cjs",
226
+ ".py",
227
+ ".go",
228
+ ".rs",
229
+ ".java",
230
+ ".kt",
231
+ ".cs",
232
+ ".c",
233
+ ".cc",
234
+ ".cpp",
235
+ ".h",
236
+ ".hpp",
237
+ ]);
238
+ return allowed.has(ext);
239
+ }
240
+ function shouldIgnoreContentFile(filePath) {
241
+ const base = path.basename(filePath).toLowerCase();
242
+ const ignoreNames = new Set([
243
+ "package-lock.json",
244
+ "pnpm-lock.yaml",
245
+ "yarn.lock",
246
+ "bun.lockb",
247
+ "cargo.lock",
248
+ "composer.lock",
249
+ ]);
250
+ if (ignoreNames.has(base))
251
+ return true;
252
+ if (base.endsWith(".min.js") || base.endsWith(".min.css"))
253
+ return true;
254
+ return false;
255
+ }
256
+ function getContentChunkKind(filePath) {
257
+ const ext = path.extname(filePath).toLowerCase();
258
+ const docExt = new Set([
259
+ ".md",
260
+ ".mdx",
261
+ ".txt",
262
+ ".rst",
263
+ ".adoc",
264
+ ".org",
265
+ ".json",
266
+ ".yml",
267
+ ".yaml",
268
+ ".toml",
269
+ ".ini",
270
+ ".env",
271
+ ".sql",
272
+ ]);
273
+ if (docExt.has(ext))
274
+ return "doc_chunk";
275
+ if (isSymbolIndexableFile(filePath))
276
+ return "code_chunk";
277
+ return null;
278
+ }
279
+ function isContentIndexableFile(filePath) {
280
+ if (shouldIgnoreContentFile(filePath))
281
+ return false;
282
+ return getContentChunkKind(filePath) !== null;
283
+ }
284
+ function extractSymbols(filePath, content) {
285
+ const ext = path.extname(filePath).toLowerCase();
286
+ if (ext === ".py")
287
+ return extractPythonSymbols(content);
288
+ if (ext === ".rs")
289
+ return extractRustSymbols(content);
290
+ if (ext === ".go")
291
+ return extractGoSymbols(content);
292
+ if (ext === ".ts" ||
293
+ ext === ".tsx" ||
294
+ ext === ".js" ||
295
+ ext === ".jsx" ||
296
+ ext === ".mjs" ||
297
+ ext === ".cjs") {
298
+ return extractJsTsSymbols(content);
299
+ }
300
+ return extractCLikeSymbols(content);
301
+ }
302
+ function extractJsTsSymbols(content) {
303
+ const symbols = [];
304
+ const lines = content.split(/\r?\n/);
305
+ for (const line of lines) {
306
+ const trimmed = line.trim();
307
+ if (!trimmed)
308
+ continue;
309
+ if (trimmed.startsWith("//"))
310
+ continue;
311
+ let match;
312
+ match = trimmed.match(/^(export\s+)?(default\s+)?class\s+([A-Za-z_$][\w$]*)/);
313
+ if (match) {
314
+ symbols.push({ name: match[3], type: "class", signature: trimmed });
315
+ continue;
316
+ }
317
+ match = trimmed.match(/^(export\s+)?(async\s+)?function\s+([A-Za-z_$][\w$]*)\s*\(/);
318
+ if (match) {
319
+ symbols.push({ name: match[3], type: "function", signature: trimmed });
320
+ continue;
321
+ }
322
+ match = trimmed.match(/^(export\s+)?interface\s+([A-Za-z_$][\w$]*)\b/);
323
+ if (match) {
324
+ symbols.push({ name: match[2], type: "interface", signature: trimmed });
325
+ continue;
326
+ }
327
+ match = trimmed.match(/^(export\s+)?type\s+([A-Za-z_$][\w$]*)\s*=/);
328
+ if (match) {
329
+ symbols.push({ name: match[2], type: "type", signature: trimmed });
330
+ continue;
331
+ }
332
+ match = trimmed.match(/^(export\s+)?enum\s+([A-Za-z_$][\w$]*)\b/);
333
+ if (match) {
334
+ symbols.push({ name: match[2], type: "enum", signature: trimmed });
335
+ continue;
336
+ }
337
+ match = trimmed.match(/^(export\s+)?const\s+([A-Za-z_$][\w$]*)\s*=\s*(async\s*)?\(.*=>/);
338
+ if (match) {
339
+ symbols.push({ name: match[2], type: "function", signature: trimmed });
340
+ continue;
341
+ }
342
+ match = trimmed.match(/^(export\s+)?const\s+([A-Za-z_$][\w$]*)\s*=\s*(async\s*)?function\s*\(/);
343
+ if (match) {
344
+ symbols.push({ name: match[2], type: "function", signature: trimmed });
345
+ continue;
346
+ }
347
+ }
348
+ return symbols;
349
+ }
350
+ function extractPythonSymbols(content) {
351
+ const symbols = [];
352
+ const lines = content.split(/\r?\n/);
353
+ for (const line of lines) {
354
+ const trimmed = line.trim();
355
+ if (!trimmed || trimmed.startsWith("#"))
356
+ continue;
357
+ let match;
358
+ match = trimmed.match(/^class\s+([A-Za-z_][\w]*)\b/);
359
+ if (match) {
360
+ symbols.push({ name: match[1], type: "class", signature: trimmed });
361
+ continue;
362
+ }
363
+ match = trimmed.match(/^(async\s+)?def\s+([A-Za-z_][\w]*)\s*\(/);
364
+ if (match) {
365
+ symbols.push({ name: match[2], type: "function", signature: trimmed });
366
+ continue;
367
+ }
368
+ }
369
+ return symbols;
370
+ }
371
+ function extractRustSymbols(content) {
372
+ const symbols = [];
373
+ const lines = content.split(/\r?\n/);
374
+ for (const line of lines) {
375
+ const trimmed = line.trim();
376
+ if (!trimmed || trimmed.startsWith("//"))
377
+ continue;
378
+ let match;
379
+ match = trimmed.match(/^(pub\s+)?(struct|enum|trait)\s+([A-Za-z_][\w]*)\b/);
380
+ if (match) {
381
+ symbols.push({ name: match[3], type: match[2], signature: trimmed });
382
+ continue;
383
+ }
384
+ match = trimmed.match(/^(pub\s+)?fn\s+([A-Za-z_][\w]*)\s*\(/);
385
+ if (match) {
386
+ symbols.push({ name: match[2], type: "function", signature: trimmed });
387
+ continue;
388
+ }
389
+ }
390
+ return symbols;
391
+ }
392
+ function extractGoSymbols(content) {
393
+ const symbols = [];
394
+ const lines = content.split(/\r?\n/);
395
+ for (const line of lines) {
396
+ const trimmed = line.trim();
397
+ if (!trimmed || trimmed.startsWith("//"))
398
+ continue;
399
+ let match;
400
+ match = trimmed.match(/^type\s+([A-Za-z_][\w]*)\s+(struct|interface)\b/);
401
+ if (match) {
402
+ symbols.push({ name: match[1], type: match[2], signature: trimmed });
403
+ continue;
404
+ }
405
+ match = trimmed.match(/^func\s+([A-Za-z_][\w]*)\s*\(/);
406
+ if (match) {
407
+ symbols.push({ name: match[1], type: "function", signature: trimmed });
408
+ continue;
409
+ }
410
+ match = trimmed.match(/^func\s+\([^)]*\)\s+([A-Za-z_][\w]*)\s*\(/);
411
+ if (match) {
412
+ symbols.push({ name: match[1], type: "method", signature: trimmed });
413
+ continue;
414
+ }
415
+ }
416
+ return symbols;
417
+ }
418
+ function extractCLikeSymbols(content) {
419
+ const symbols = [];
420
+ const lines = content.split(/\r?\n/);
421
+ for (const line of lines) {
422
+ const trimmed = line.trim();
423
+ if (!trimmed)
424
+ continue;
425
+ if (trimmed.startsWith("//") || trimmed.startsWith("/*") || trimmed.startsWith("*"))
426
+ continue;
427
+ let match;
428
+ match = trimmed.match(/^(class|struct|interface|enum)\s+([A-Za-z_][\w]*)\b/);
429
+ if (match) {
430
+ symbols.push({ name: match[2], type: match[1], signature: trimmed });
431
+ continue;
432
+ }
433
+ match = trimmed.match(/^[A-Za-z_][\w:<>,\s\*&]*\s+([A-Za-z_][\w]*)\s*\(/);
434
+ if (match) {
435
+ symbols.push({ name: match[1], type: "function", signature: trimmed });
436
+ continue;
437
+ }
438
+ }
439
+ return symbols;
440
+ }
441
+ const indexFileSymbolsTx = db.transaction((filePath, symbols) => {
442
+ deleteSymbolsForFileStmt.run(filePath);
443
+ for (const s of symbols) {
444
+ upsertSymbolStmt.run(s.name, s.type, filePath, s.signature);
445
+ }
446
+ });
447
+ function chunkTextByLines(content, opts) {
448
+ const lines = content.split(/\r?\n/);
449
+ if (lines.length === 0)
450
+ return [];
451
+ const chunks = [];
452
+ let startLine = 1;
453
+ let currentLines = [];
454
+ let currentChars = 0;
455
+ for (let idx = 0; idx < lines.length; idx++) {
456
+ const line = lines[idx];
457
+ const nextChars = currentChars + line.length + 1;
458
+ const nextLines = currentLines.length + 1;
459
+ if (currentLines.length > 0 && (nextChars > opts.maxChars || nextLines > opts.maxLines)) {
460
+ const endLine = startLine + currentLines.length - 1;
461
+ chunks.push({ startLine, endLine, content: currentLines.join("\n") });
462
+ startLine = idx + 1;
463
+ currentLines = [];
464
+ currentChars = 0;
465
+ }
466
+ currentLines.push(line);
467
+ currentChars += line.length + 1;
468
+ }
469
+ if (currentLines.length > 0) {
470
+ const endLine = startLine + currentLines.length - 1;
471
+ chunks.push({ startLine, endLine, content: currentLines.join("\n") });
472
+ }
473
+ return chunks;
474
+ }
475
+ function indexFileContentChunks(dbFilePath, absPath, content, reason) {
476
+ const kind = getContentChunkKind(absPath);
477
+ if (!kind)
478
+ return;
479
+ const opts = kind === "code_chunk"
480
+ ? { maxChars: 10_000, maxLines: 200 }
481
+ : { maxChars: 14_000, maxLines: 260 };
482
+ const chunks = chunkTextByLines(content, opts);
483
+ const ext = path.extname(absPath).toLowerCase();
484
+ const metadata = safeJson({ ext });
485
+ const tx = db.transaction(() => {
486
+ deleteFileChunkItemsStmt.run(dbFilePath);
487
+ for (const chunk of chunks) {
488
+ const title = `${dbFilePath}#L${chunk.startLine}-L${chunk.endLine}`;
489
+ const contentHash = sha256Hex(chunk.content);
490
+ const info = insertMemoryItemStmt.run(kind, title, chunk.content, dbFilePath, chunk.startLine, chunk.endLine, null, metadata, contentHash);
491
+ const memoryId = Number(info.lastInsertRowid);
492
+ if (shouldEmbedFileChunks(reason)) {
493
+ enqueueEmbedding(memoryId);
494
+ }
495
+ }
496
+ });
497
+ try {
498
+ tx();
499
+ }
500
+ catch (err) {
501
+ console.error("[vectormind] failed to index file chunks:", dbFilePath, err);
502
+ }
503
+ }
504
+ function recordPendingChange(absPath, event) {
505
+ if (shouldIgnorePath(absPath))
506
+ return;
507
+ const track = isSymbolIndexableFile(absPath) || isContentIndexableFile(absPath);
508
+ if (!track)
509
+ return;
510
+ const filePath = normalizeToDbPath(absPath);
511
+ try {
512
+ upsertPendingChangeStmt.run(filePath, event);
513
+ }
514
+ catch (err) {
515
+ console.error("[vectormind] failed to record pending change:", filePath, err);
516
+ }
517
+ }
518
+ function indexFile(absPath, reason) {
519
+ if (shouldIgnorePath(absPath))
520
+ return;
521
+ const indexSymbols = isSymbolIndexableFile(absPath);
522
+ const indexContent = isContentIndexableFile(absPath);
523
+ if (!indexSymbols && !indexContent)
524
+ return;
525
+ let stat;
526
+ try {
527
+ stat = fs.statSync(absPath);
528
+ }
529
+ catch {
530
+ return;
531
+ }
532
+ if (!stat.isFile())
533
+ return;
534
+ if (stat.size > 1_000_000)
535
+ return;
536
+ let content;
537
+ try {
538
+ content = fs.readFileSync(absPath, "utf8");
539
+ }
540
+ catch {
541
+ return;
542
+ }
543
+ if (content.includes("\u0000"))
544
+ return;
545
+ const filePath = normalizeToDbPath(absPath);
546
+ if (indexSymbols) {
547
+ const symbols = extractSymbols(absPath, content);
548
+ try {
549
+ indexFileSymbolsTx(filePath, symbols);
550
+ }
551
+ catch (err) {
552
+ console.error("[vectormind] failed to index symbols:", filePath, err);
553
+ }
554
+ }
555
+ if (indexContent) {
556
+ indexFileContentChunks(filePath, absPath, content, reason);
557
+ }
558
+ }
559
+ function removeFileIndexes(absPath) {
560
+ if (shouldIgnorePath(absPath))
561
+ return;
562
+ const filePath = normalizeToDbPath(absPath);
563
+ try {
564
+ deleteSymbolsForFileStmt.run(filePath);
565
+ }
566
+ catch (err) {
567
+ console.error("[vectormind] failed to remove symbols:", filePath, err);
568
+ }
569
+ try {
570
+ deleteFileChunkItemsStmt.run(filePath);
571
+ }
572
+ catch (err) {
573
+ console.error("[vectormind] failed to remove file chunks:", filePath, err);
574
+ }
575
+ }
576
+ const StartRequirementArgsSchema = z.object({
577
+ title: z.string().min(1),
578
+ background: z.string().optional().default(""),
579
+ });
580
+ const SyncChangeIntentArgsSchema = z
581
+ .object({
582
+ intent: z.string().min(1),
583
+ files: z.array(z.string().min(1)).optional(),
584
+ affected_files: z.array(z.string().min(1)).optional(),
585
+ })
586
+ .transform((v) => ({
587
+ intent: v.intent,
588
+ files: (v.files ?? v.affected_files ?? []).filter(Boolean),
589
+ }));
590
+ const QueryCodebaseArgsSchema = z.object({
591
+ query: z.string().min(1),
592
+ });
593
+ const UpsertProjectSummaryArgsSchema = z.object({
594
+ summary: z.string().min(1),
595
+ });
596
+ const AddNoteArgsSchema = z.object({
597
+ title: z.string().optional().default(""),
598
+ content: z.string().min(1),
599
+ tags: z.array(z.string().min(1)).optional(),
600
+ });
601
+ const BootstrapContextArgsSchema = z.object({
602
+ query: z.string().optional(),
603
+ top_k: z.number().int().min(1).max(50).optional().default(10),
604
+ kinds: z.array(z.string().min(1)).optional(),
605
+ include_content: z.boolean().optional().default(false),
606
+ });
607
+ const SemanticSearchArgsSchema = z.object({
608
+ query: z.string().min(1),
609
+ top_k: z.number().int().min(1).max(50).optional().default(10),
610
+ kinds: z.array(z.string().min(1)).optional(),
611
+ include_content: z.boolean().optional().default(false),
612
+ });
613
+ function escapeLike(pattern) {
614
+ return pattern.replace(/[\\\\%_]/g, (m) => `\\${m}`);
615
+ }
616
+ function sha256Hex(input) {
617
+ return crypto.createHash("sha256").update(input).digest("hex");
618
+ }
619
+ function safeJson(value) {
620
+ if (value === undefined)
621
+ return null;
622
+ try {
623
+ return JSON.stringify(value);
624
+ }
625
+ catch {
626
+ return null;
627
+ }
628
+ }
629
+ const embeddingsEnabled = !["0", "false", "off", "disabled"].includes((process.env.VECTORMIND_EMBEDDINGS ?? "on").toLowerCase());
630
+ const embedFilesMode = (process.env.VECTORMIND_EMBED_FILES ?? "all").toLowerCase();
631
+ const embedModelName = process.env.VECTORMIND_EMBED_MODEL ?? "Xenova/all-MiniLM-L6-v2";
632
+ const embedCacheDir = process.env.VECTORMIND_EMBED_CACHE_DIR ??
633
+ path.join(os.homedir(), ".cache", "vectormind");
634
+ const allowRemoteModels = !["0", "false", "off"].includes((process.env.VECTORMIND_ALLOW_REMOTE_MODELS ?? "true").toLowerCase());
635
+ function shouldEmbedFileChunks(reason) {
636
+ if (!embeddingsEnabled)
637
+ return false;
638
+ if (embedFilesMode === "none" || embedFilesMode === "off" || embedFilesMode === "disabled")
639
+ return false;
640
+ if (embedFilesMode === "all")
641
+ return true;
642
+ return reason !== "add";
643
+ }
644
+ let embedderPromise = null;
645
+ async function getEmbedder() {
646
+ if (embedderPromise)
647
+ return embedderPromise;
648
+ embedderPromise = (async () => {
649
+ fs.mkdirSync(embedCacheDir, { recursive: true });
650
+ const mod = await import("@xenova/transformers");
651
+ const env = mod.env;
652
+ if (env) {
653
+ if (typeof env.cacheDir === "string" || env.cacheDir === undefined) {
654
+ env.cacheDir = embedCacheDir;
655
+ }
656
+ if (typeof env.allowRemoteModels === "boolean" || env.allowRemoteModels === undefined) {
657
+ env.allowRemoteModels = allowRemoteModels;
658
+ }
659
+ if (typeof env.allowLocalModels === "boolean" || env.allowLocalModels === undefined) {
660
+ env.allowLocalModels = true;
661
+ }
662
+ }
663
+ const pipeline = mod.pipeline;
664
+ const extractor = await pipeline("feature-extraction", embedModelName);
665
+ return async (text) => {
666
+ const input = text.trim() || " ";
667
+ const out = await extractor(input, { pooling: "mean", normalize: true });
668
+ const data = out?.data ?? out;
669
+ if (data instanceof Float32Array)
670
+ return data;
671
+ if (ArrayBuffer.isView(data)) {
672
+ return new Float32Array(data.buffer, data.byteOffset, data.byteLength / 4);
673
+ }
674
+ if (Array.isArray(data)) {
675
+ return Float32Array.from(data.flat(Infinity));
676
+ }
677
+ if (typeof out?.tolist === "function") {
678
+ return Float32Array.from(out.tolist().flat(Infinity));
679
+ }
680
+ throw new Error("Unexpected embedding output from embedder");
681
+ };
682
+ })();
683
+ return embedderPromise;
684
+ }
685
+ function buildEmbeddingInput(item) {
686
+ const headerParts = [];
687
+ headerParts.push(`kind: ${item.kind}`);
688
+ if (item.req_id != null)
689
+ headerParts.push(`req_id: ${item.req_id}`);
690
+ if (item.file_path)
691
+ headerParts.push(`file: ${item.file_path}`);
692
+ if (item.start_line != null && item.end_line != null) {
693
+ headerParts.push(`lines: ${item.start_line}-${item.end_line}`);
694
+ }
695
+ if (item.title)
696
+ headerParts.push(`title: ${item.title}`);
697
+ const body = item.content ?? "";
698
+ return `${headerParts.join(" | ")}\n\n${body}`.trim();
699
+ }
700
+ async function embedMemoryItemById(memoryId) {
701
+ if (!embeddingsEnabled)
702
+ return;
703
+ const item = getMemoryItemByIdStmt.get(memoryId);
704
+ if (!item)
705
+ return;
706
+ const input = buildEmbeddingInput(item);
707
+ const inputHash = sha256Hex(input);
708
+ const existing = getEmbeddingMetaStmt.get(memoryId);
709
+ if (existing?.content_hash === inputHash)
710
+ return;
711
+ const embedder = await getEmbedder();
712
+ const vector = await embedder(input);
713
+ const dim = vector.length;
714
+ const bytes = Buffer.from(vector.buffer.slice(vector.byteOffset, vector.byteOffset + vector.byteLength));
715
+ upsertEmbeddingStmt.run(memoryId, dim, bytes, inputHash);
716
+ }
717
+ const embeddingQueue = [];
718
+ const embeddingQueued = new Set();
719
+ let embeddingWorkerRunning = false;
720
+ function enqueueEmbedding(memoryId) {
721
+ if (!embeddingsEnabled)
722
+ return;
723
+ if (embeddingQueued.has(memoryId))
724
+ return;
725
+ embeddingQueued.add(memoryId);
726
+ embeddingQueue.push(memoryId);
727
+ void runEmbeddingWorker();
728
+ }
729
+ async function runEmbeddingWorker() {
730
+ if (embeddingWorkerRunning)
731
+ return;
732
+ embeddingWorkerRunning = true;
733
+ try {
734
+ while (embeddingQueue.length) {
735
+ const id = embeddingQueue.shift();
736
+ if (id == null)
737
+ break;
738
+ embeddingQueued.delete(id);
739
+ try {
740
+ await embedMemoryItemById(id);
741
+ }
742
+ catch (err) {
743
+ console.error("[vectormind] embedding failed:", { id, err });
744
+ }
745
+ }
746
+ }
747
+ finally {
748
+ embeddingWorkerRunning = false;
749
+ }
750
+ }
751
+ function dotProduct(a, b) {
752
+ let s = 0;
753
+ const n = Math.min(a.length, b.length);
754
+ for (let i = 0; i < n; i++)
755
+ s += a[i] * b[i];
756
+ return s;
757
+ }
758
+ async function semanticSearchInternal(opts) {
759
+ if (!embeddingsEnabled) {
760
+ throw new Error("Embeddings are disabled");
761
+ }
762
+ const q = opts.query.trim();
763
+ const embedder = await getEmbedder();
764
+ const qVec = await embedder(q);
765
+ let candidateRows = [];
766
+ if (opts.kinds?.length) {
767
+ const placeholders = opts.kinds.map(() => "?").join(", ");
768
+ const stmt = db.prepare(`SELECT e.memory_id as memory_id, e.dim as dim, e.vector as vector
769
+ FROM embeddings e
770
+ JOIN memory_items m ON m.id = e.memory_id
771
+ WHERE m.kind IN (${placeholders})`);
772
+ candidateRows = stmt.all(...opts.kinds);
773
+ }
774
+ else {
775
+ candidateRows = db
776
+ .prepare(`SELECT memory_id, dim, vector FROM embeddings`)
777
+ .all();
778
+ }
779
+ const top = [];
780
+ for (const row of candidateRows) {
781
+ const buf = row.vector;
782
+ if (!buf || buf.byteLength % 4 !== 0)
783
+ continue;
784
+ const v = new Float32Array(buf.buffer, buf.byteOffset, buf.byteLength / 4);
785
+ if (row.dim !== v.length || v.length !== qVec.length)
786
+ continue;
787
+ const score = dotProduct(qVec, v);
788
+ if (top.length < opts.topK) {
789
+ top.push({ memory_id: row.memory_id, score });
790
+ top.sort((a, b) => b.score - a.score);
791
+ continue;
792
+ }
793
+ if (score <= top[top.length - 1].score)
794
+ continue;
795
+ top[top.length - 1] = { memory_id: row.memory_id, score };
796
+ top.sort((a, b) => b.score - a.score);
797
+ }
798
+ const matches = top
799
+ .map((t) => {
800
+ const item = getMemoryItemByIdStmt.get(t.memory_id);
801
+ if (!item)
802
+ return null;
803
+ const preview = item.content.length > 400 ? `${item.content.slice(0, 400)}…` : item.content;
804
+ return {
805
+ score: t.score,
806
+ item: {
807
+ id: item.id,
808
+ kind: item.kind,
809
+ title: item.title,
810
+ file_path: item.file_path,
811
+ start_line: item.start_line,
812
+ end_line: item.end_line,
813
+ req_id: item.req_id,
814
+ preview,
815
+ content: opts.includeContent ? item.content : undefined,
816
+ metadata_json: item.metadata_json,
817
+ updated_at: item.updated_at,
818
+ },
819
+ };
820
+ })
821
+ .filter(Boolean);
822
+ return { query: q, top_k: opts.topK, matches };
823
+ }
824
+ const watcher = chokidar.watch(projectRoot, {
825
+ ignored: (p) => shouldIgnorePath(p),
826
+ ignoreInitial: false,
827
+ persistent: true,
828
+ awaitWriteFinish: { stabilityThreshold: 200, pollInterval: 50 },
829
+ });
830
+ let watcherReady = false;
831
+ watcher.on("add", (p) => {
832
+ if (watcherReady)
833
+ recordPendingChange(p, "add");
834
+ indexFile(p, "add");
835
+ });
836
+ watcher.on("change", (p) => {
837
+ if (watcherReady)
838
+ recordPendingChange(p, "change");
839
+ indexFile(p, "change");
840
+ });
841
+ watcher.on("unlink", (p) => {
842
+ if (watcherReady)
843
+ recordPendingChange(p, "unlink");
844
+ removeFileIndexes(p);
845
+ });
846
+ watcher.on("ready", () => {
847
+ watcherReady = true;
848
+ });
849
+ watcher.on("error", (err) => console.error("[vectormind] watcher error:", err));
850
+ const server = new Server({ name: SERVER_NAME, version: SERVER_VERSION }, {
851
+ capabilities: { tools: {} },
852
+ instructions: [
853
+ "VectorMind MCP is available in this session. Use it to avoid guessing project context.",
854
+ "",
855
+ "Required workflow:",
856
+ "- On every new conversation/session: call bootstrap_context({ query: <current goal> }) first (or at least get_brain_dump()) to restore context and retrieve relevant matches from the local vector store.",
857
+ "- BEFORE editing code: call start_requirement(title, background) to set the active requirement.",
858
+ "- AFTER editing + saving: call get_pending_changes() to see unsynced files, then call sync_change_intent(intent, files). (You can omit files to auto-link all pending changes.)",
859
+ "- After major milestones/decisions: call upsert_project_summary(summary) and/or add_note(...) to persist durable context locally.",
860
+ "- When asked to locate code (class/function/type): call query_codebase(query) instead of guessing.",
861
+ "- When you need to recall relevant context from history/code/docs: call semantic_search(query, ...) instead of guessing.",
862
+ "",
863
+ "If tool output conflicts with assumptions, trust the tool output.",
864
+ ].join("\n"),
865
+ });
866
+ server.setRequestHandler(ListToolsRequestSchema, async () => {
867
+ return {
868
+ tools: [
869
+ {
870
+ name: "start_requirement",
871
+ description: "MUST call BEFORE editing code. Starts/activates a requirement so all subsequent code changes can be linked to a concrete intent (do not edit code without an active requirement).",
872
+ inputSchema: toJsonSchemaCompat(StartRequirementArgsSchema),
873
+ },
874
+ {
875
+ name: "sync_change_intent",
876
+ description: "MUST call AFTER you edit code and save files. Archives the intent summary and links affected files to the current active requirement. If you omit files, the server will auto-link all pending changed files since the last sync.",
877
+ inputSchema: toJsonSchemaCompat(SyncChangeIntentArgsSchema),
878
+ },
879
+ {
880
+ name: "get_brain_dump",
881
+ description: "Restore recent requirements/changes/notes/summary/pending changes. Prefer bootstrap_context() at session start when you also want semantic recall from the local vector store.",
882
+ inputSchema: { type: "object", properties: {}, additionalProperties: false },
883
+ },
884
+ {
885
+ name: "bootstrap_context",
886
+ description: "MUST call at the start of every new chat/session. Returns brain dump + pending changes, and (if you pass query) semantic matches from the local vector store to avoid guessing.",
887
+ inputSchema: toJsonSchemaCompat(BootstrapContextArgsSchema),
888
+ },
889
+ {
890
+ name: "get_pending_changes",
891
+ description: "List files that changed locally but have not been acknowledged by sync_change_intent yet. Use this to see what needs syncing (or omit files in sync_change_intent to auto-link them).",
892
+ inputSchema: { type: "object", properties: {}, additionalProperties: false },
893
+ },
894
+ {
895
+ name: "query_codebase",
896
+ description: "Search the symbol index for class/function/type names (or substrings) to locate definitions by file path and signature. Use this when you need to find code—do not guess locations.",
897
+ inputSchema: toJsonSchemaCompat(QueryCodebaseArgsSchema),
898
+ },
899
+ {
900
+ name: "upsert_project_summary",
901
+ description: "Save/update the project-level context summary (written by the AI in the conversation). Call this after major milestones/decisions so future sessions can recover context quickly.",
902
+ inputSchema: toJsonSchemaCompat(UpsertProjectSummaryArgsSchema),
903
+ },
904
+ {
905
+ name: "add_note",
906
+ description: "Save a durable project note (decision, constraint, TODO, architecture detail). Use this to persist important context locally instead of relying on chat memory.",
907
+ inputSchema: toJsonSchemaCompat(AddNoteArgsSchema),
908
+ },
909
+ {
910
+ name: "semantic_search",
911
+ description: "Semantic search across the local memory store (requirements, change intents, notes, project summary, and indexed code/doc chunks). Use this to retrieve relevant context instead of guessing.",
912
+ inputSchema: toJsonSchemaCompat(SemanticSearchArgsSchema),
913
+ },
914
+ ],
915
+ };
916
+ });
917
+ server.setRequestHandler(CallToolRequestSchema, async (request) => {
918
+ const toolName = request.params.name;
919
+ const rawArgs = (request.params.arguments ?? {});
920
+ try {
921
+ if (toolName === "start_requirement") {
922
+ const args = StartRequirementArgsSchema.parse(rawArgs);
923
+ const info = insertRequirementStmt.run(args.title, args.background || null);
924
+ const id = Number(info.lastInsertRowid);
925
+ const background = args.background?.trim() ?? "";
926
+ const content = background ? `${args.title}\n\n${background}` : args.title;
927
+ const memoryInfo = insertMemoryItemStmt.run("requirement", args.title, content, null, null, null, id, safeJson({ status: "active" }), sha256Hex(content));
928
+ const memory_id = Number(memoryInfo.lastInsertRowid);
929
+ enqueueEmbedding(memory_id);
930
+ return {
931
+ content: [
932
+ {
933
+ type: "text",
934
+ text: JSON.stringify({ ok: true, requirement: { id, title: args.title }, memory_item: { id: memory_id } }, null, 2),
935
+ },
936
+ ],
937
+ };
938
+ }
939
+ if (toolName === "sync_change_intent") {
940
+ const args = SyncChangeIntentArgsSchema.parse(rawArgs);
941
+ const active = getActiveRequirementStmt.get();
942
+ if (!active) {
943
+ return {
944
+ isError: true,
945
+ content: [
946
+ {
947
+ type: "text",
948
+ text: JSON.stringify({
949
+ ok: false,
950
+ error: "No active requirement. Call start_requirement(title, background) before syncing change intent.",
951
+ }, null, 2),
952
+ },
953
+ ],
954
+ };
955
+ }
956
+ const created = [];
957
+ const synced_files = [];
958
+ const insertTx = db.transaction(() => {
959
+ const targets = [];
960
+ if (args.files.length) {
961
+ for (const f of args.files) {
962
+ const rawFile = String(f);
963
+ const dbFilePath = normalizeToDbPath(rawFile);
964
+ targets.push({ rawFile, dbFilePath, event: "manual", source: "args" });
965
+ }
966
+ for (const t of targets) {
967
+ deletePendingChangeStmt.run(t.dbFilePath);
968
+ }
969
+ }
970
+ else {
971
+ const pending = listPendingChangesStmt.all();
972
+ if (pending.length) {
973
+ for (const p of pending) {
974
+ targets.push({
975
+ rawFile: p.file_path,
976
+ dbFilePath: p.file_path,
977
+ event: p.last_event,
978
+ source: "pending",
979
+ });
980
+ }
981
+ deleteAllPendingChangesStmt.run();
982
+ }
983
+ else {
984
+ targets.push({
985
+ rawFile: "(unspecified)",
986
+ dbFilePath: "(unspecified)",
987
+ event: "manual",
988
+ source: "unspecified",
989
+ });
990
+ }
991
+ }
992
+ for (const t of targets) {
993
+ const isUnspecified = t.dbFilePath === "(unspecified)";
994
+ const changeInfo = insertChangeLogStmt.run(active.id, t.dbFilePath, args.intent);
995
+ const change_log_id = Number(changeInfo.lastInsertRowid);
996
+ const memoryInfo = insertMemoryItemStmt.run("change_intent", active.title, args.intent, isUnspecified ? null : t.dbFilePath, null, null, active.id, safeJson({ change_log_id, event: t.event, source: t.source }), sha256Hex(args.intent));
997
+ const memory_item_id = Number(memoryInfo.lastInsertRowid);
998
+ enqueueEmbedding(memory_item_id);
999
+ synced_files.push({ file_path: t.dbFilePath, event: t.event, source: t.source });
1000
+ created.push({
1001
+ file_path: t.dbFilePath,
1002
+ event: t.event,
1003
+ source: t.source,
1004
+ change_log_id,
1005
+ memory_item_id,
1006
+ });
1007
+ if (!isUnspecified && t.event !== "unlink") {
1008
+ const abs = path.isAbsolute(t.rawFile)
1009
+ ? t.rawFile
1010
+ : path.join(projectRoot, t.rawFile);
1011
+ indexFile(abs, "manual");
1012
+ }
1013
+ }
1014
+ });
1015
+ insertTx();
1016
+ return {
1017
+ content: [
1018
+ {
1019
+ type: "text",
1020
+ text: JSON.stringify({
1021
+ ok: true,
1022
+ linked_to_requirement: { id: active.id, title: active.title },
1023
+ synced_files,
1024
+ created,
1025
+ }, null, 2),
1026
+ },
1027
+ ],
1028
+ };
1029
+ }
1030
+ if (toolName === "bootstrap_context") {
1031
+ const args = BootstrapContextArgsSchema.parse(rawArgs);
1032
+ const recent = listRecentRequirementsStmt.all(5);
1033
+ const items = recent.map((req) => {
1034
+ const changes = listChangeLogsForRequirementStmt.all(req.id, 20);
1035
+ return { requirement: req, recent_changes: changes };
1036
+ });
1037
+ const project_summary = getProjectSummaryStmt.get();
1038
+ const recent_notes = listRecentNotesStmt.all(10);
1039
+ const pending_changes = listPendingChangesStmt.all();
1040
+ const q = args.query?.trim() ?? "";
1041
+ const semantic = q && embeddingsEnabled
1042
+ ? await semanticSearchInternal({
1043
+ query: q,
1044
+ topK: args.top_k,
1045
+ kinds: args.kinds?.length ? args.kinds : null,
1046
+ includeContent: args.include_content,
1047
+ })
1048
+ : null;
1049
+ return {
1050
+ content: [
1051
+ {
1052
+ type: "text",
1053
+ text: JSON.stringify({
1054
+ ok: true,
1055
+ generated_at: new Date().toISOString(),
1056
+ embeddings: {
1057
+ enabled: embeddingsEnabled,
1058
+ model: embedModelName,
1059
+ embed_files: embedFilesMode,
1060
+ },
1061
+ project_summary: project_summary ?? null,
1062
+ recent_notes,
1063
+ pending_changes,
1064
+ items,
1065
+ semantic,
1066
+ }, null, 2),
1067
+ },
1068
+ ],
1069
+ };
1070
+ }
1071
+ if (toolName === "get_brain_dump") {
1072
+ const recent = listRecentRequirementsStmt.all(5);
1073
+ const items = recent.map((req) => {
1074
+ const changes = listChangeLogsForRequirementStmt.all(req.id, 20);
1075
+ return { requirement: req, recent_changes: changes };
1076
+ });
1077
+ const project_summary = getProjectSummaryStmt.get();
1078
+ const recent_notes = listRecentNotesStmt.all(10);
1079
+ const pending_changes = listPendingChangesStmt.all();
1080
+ return {
1081
+ content: [
1082
+ {
1083
+ type: "text",
1084
+ text: JSON.stringify({
1085
+ ok: true,
1086
+ generated_at: new Date().toISOString(),
1087
+ embeddings: {
1088
+ enabled: embeddingsEnabled,
1089
+ model: embedModelName,
1090
+ embed_files: embedFilesMode,
1091
+ },
1092
+ project_summary: project_summary ?? null,
1093
+ recent_notes,
1094
+ pending_changes,
1095
+ items,
1096
+ }, null, 2),
1097
+ },
1098
+ ],
1099
+ };
1100
+ }
1101
+ if (toolName === "get_pending_changes") {
1102
+ const pending = listPendingChangesStmt.all();
1103
+ return {
1104
+ content: [
1105
+ {
1106
+ type: "text",
1107
+ text: JSON.stringify({ ok: true, pending }, null, 2),
1108
+ },
1109
+ ],
1110
+ };
1111
+ }
1112
+ if (toolName === "query_codebase") {
1113
+ const args = QueryCodebaseArgsSchema.parse(rawArgs);
1114
+ const q = args.query.trim();
1115
+ const escaped = escapeLike(q);
1116
+ const like = `%${escaped}%`;
1117
+ const rows = searchSymbolsStmt.all(like, like, q, like, 50);
1118
+ return {
1119
+ content: [
1120
+ {
1121
+ type: "text",
1122
+ text: JSON.stringify({ ok: true, query: q, matches: rows }, null, 2),
1123
+ },
1124
+ ],
1125
+ };
1126
+ }
1127
+ if (toolName === "upsert_project_summary") {
1128
+ const args = UpsertProjectSummaryArgsSchema.parse(rawArgs);
1129
+ const summary = args.summary.trim();
1130
+ const contentHash = sha256Hex(summary);
1131
+ upsertProjectSummaryStmt.run(summary, safeJson({ source: "assistant" }), contentHash);
1132
+ const row = getProjectSummaryStmt.get();
1133
+ if (row)
1134
+ enqueueEmbedding(row.id);
1135
+ return {
1136
+ content: [
1137
+ {
1138
+ type: "text",
1139
+ text: JSON.stringify({ ok: true, project_summary: row ?? null }, null, 2),
1140
+ },
1141
+ ],
1142
+ };
1143
+ }
1144
+ if (toolName === "add_note") {
1145
+ const args = AddNoteArgsSchema.parse(rawArgs);
1146
+ const title = args.title?.trim() ?? "";
1147
+ const content = args.content.trim();
1148
+ const info = insertMemoryItemStmt.run("note", title || null, content, null, null, null, null, safeJson({ tags: args.tags ?? [] }), sha256Hex(content));
1149
+ const id = Number(info.lastInsertRowid);
1150
+ enqueueEmbedding(id);
1151
+ return {
1152
+ content: [
1153
+ {
1154
+ type: "text",
1155
+ text: JSON.stringify({ ok: true, note: { id } }, null, 2),
1156
+ },
1157
+ ],
1158
+ };
1159
+ }
1160
+ if (toolName === "semantic_search") {
1161
+ if (!embeddingsEnabled) {
1162
+ return {
1163
+ isError: true,
1164
+ content: [
1165
+ {
1166
+ type: "text",
1167
+ text: JSON.stringify({
1168
+ ok: false,
1169
+ error: "Embeddings are disabled (set VECTORMIND_EMBEDDINGS=on) so semantic_search is unavailable.",
1170
+ }, null, 2),
1171
+ },
1172
+ ],
1173
+ };
1174
+ }
1175
+ const args = SemanticSearchArgsSchema.parse(rawArgs);
1176
+ const result = await semanticSearchInternal({
1177
+ query: args.query,
1178
+ topK: args.top_k,
1179
+ kinds: args.kinds?.length ? args.kinds : null,
1180
+ includeContent: args.include_content,
1181
+ });
1182
+ return {
1183
+ content: [
1184
+ {
1185
+ type: "text",
1186
+ text: JSON.stringify({ ok: true, ...result }, null, 2),
1187
+ },
1188
+ ],
1189
+ };
1190
+ }
1191
+ return {
1192
+ isError: true,
1193
+ content: [{ type: "text", text: `Unknown tool: ${toolName}` }],
1194
+ };
1195
+ }
1196
+ catch (err) {
1197
+ return {
1198
+ isError: true,
1199
+ content: [{ type: "text", text: String(err) }],
1200
+ };
1201
+ }
1202
+ });
1203
+ const transport = new StdioServerTransport();
1204
+ await server.connect(transport);
1205
+ async function shutdown(signal) {
1206
+ try {
1207
+ await watcher.close();
1208
+ }
1209
+ catch (err) {
1210
+ console.error("[vectormind] watcher close error:", err);
1211
+ }
1212
+ try {
1213
+ db.close();
1214
+ }
1215
+ catch (err) {
1216
+ console.error("[vectormind] db close error:", err);
1217
+ }
1218
+ process.exit(signal === "SIGTERM" ? 143 : 130);
1219
+ }
1220
+ process.once("SIGINT", () => void shutdown("SIGINT"));
1221
+ process.once("SIGTERM", () => void shutdown("SIGTERM"));
1222
+ //# sourceMappingURL=index.js.map