@coreyuan/vector-mind 1.0.20 → 1.0.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,8 +1,10 @@
1
1
  #!/usr/bin/env node
2
2
  import path from "node:path";
3
3
  import fs from "node:fs";
4
+ import * as readline from "node:readline";
4
5
  import crypto from "node:crypto";
5
6
  import os from "node:os";
7
+ import { spawnSync } from "node:child_process";
6
8
  import { fileURLToPath } from "node:url";
7
9
  import chokidar from "chokidar";
8
10
  import Database from "better-sqlite3";
@@ -11,8 +13,10 @@ import { Server } from "@modelcontextprotocol/sdk/server/index.js";
11
13
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
12
14
  import { toJsonSchemaCompat } from "@modelcontextprotocol/sdk/server/zod-json-schema-compat.js";
13
15
  import { CallToolRequestSchema, ListToolsRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
16
+ import { BUILTIN_CONVENTIONS } from "./builtin-conventions.js";
17
+ import { BUILTIN_ARCHITECTURE_AND_CODE_ORGANIZATION_INSTRUCTIONS, BUILTIN_DESTRUCTIVE_OPERATION_GUARD_INSTRUCTIONS, BUILTIN_LOW_OVERHEAD_WORKFLOW_INSTRUCTIONS, BUILTIN_PLAN_LITE_INSTRUCTIONS, BUILTIN_WRITE_POLICY_INSTRUCTIONS, } from "./builtin-instructions.js";
14
18
  const SERVER_NAME = "vector-mind";
15
- const SERVER_VERSION = "1.0.19";
19
+ const SERVER_VERSION = "1.0.32";
16
20
  const rootFromEnv = process.env.VECTORMIND_ROOT?.trim() ?? "";
17
21
  const prettyJsonOutput = ["1", "true", "on", "yes"].includes((process.env.VECTORMIND_PRETTY_JSON ?? "").trim().toLowerCase());
18
22
  const debugLogEnabled = ["1", "true", "on", "yes"].includes((process.env.VECTORMIND_DEBUG_LOG ?? "").trim().toLowerCase());
@@ -61,6 +65,33 @@ const PENDING_PRUNE_EVERY = (() => {
61
65
  return 500;
62
66
  return n;
63
67
  })();
68
+ const RIPGREP_RESOLVE_TIMEOUT_MS = 5_000;
69
+ const RIPGREP_SEARCH_TIMEOUT_MS = 30_000;
70
+ const RIPGREP_MAX_BUFFER_BYTES = 16 * 1024 * 1024;
71
+ let cachedRipgrepCommand;
72
+ let cachedRipgrepResolveError = null;
73
+ function getCodexHomeDir() {
74
+ const raw = process.env.CODEX_HOME?.trim();
75
+ if (raw)
76
+ return path.resolve(raw);
77
+ return path.join(os.homedir(), ".codex");
78
+ }
79
+ function getAgentsHomeDir() {
80
+ const raw = process.env.AGENTS_HOME?.trim();
81
+ if (raw)
82
+ return path.resolve(raw);
83
+ return path.join(os.homedir(), ".agents");
84
+ }
85
+ function getAllowedCodexTextRoots() {
86
+ const codexHome = getCodexHomeDir();
87
+ const agentsHome = getAgentsHomeDir();
88
+ return Array.from(new Set([
89
+ path.join(codexHome, "skills"),
90
+ path.join(codexHome, "prompts"),
91
+ path.join(codexHome, "rules"),
92
+ path.join(agentsHome, "skills"),
93
+ ].map((p) => path.resolve(p))));
94
+ }
64
95
  const INDEX_MAX_CODE_BYTES = (() => {
65
96
  const raw = process.env.VECTORMIND_INDEX_MAX_CODE_BYTES?.trim();
66
97
  if (!raw)
@@ -227,8 +258,18 @@ function summarizeActivityEvent(e) {
227
258
  return `pending_list returned=${String(d.returned ?? "")} total=${String(d.total ?? "")}`;
228
259
  case "semantic_search":
229
260
  return `semantic_search mode=${String(d.mode ?? "")} q=${String(d.query ?? "")} matches=${String(d.matches ?? "")}`;
261
+ case "grep":
262
+ return `grep backend=${String(d.backend ?? "")} q=${String(d.query ?? "")} matches=${String(d.matches ?? "")} truncated=${String(d.truncated ?? "")}`;
230
263
  case "query_codebase":
231
264
  return `query_codebase q=${String(d.query ?? "")} matches=${String(d.matches ?? "")}`;
265
+ case "read_file_lines":
266
+ return `read_file_lines file=${String(d.file_path ?? "")} returned=${String(d.returned ?? "")} truncated=${String(d.truncated ?? "")}`;
267
+ case "read_file_text":
268
+ return `read_file_text file=${String(d.file_path ?? "")} returned=${String(d.returned_chars ?? "")}/${String(d.total_chars ?? "")} truncated=${String(d.truncated ?? "")}`;
269
+ case "list_project_files":
270
+ return `list_project_files path=${String(d.path ?? "")} returned=${String(d.returned ?? "")} scanned=${String(d.scanned ?? "")} truncated=${String(d.truncated ?? "")}`;
271
+ case "read_codex_text_file":
272
+ return `read_codex_text_file file=${String(d.file_path ?? "")} returned=${String(d.returned_chars ?? "")}/${String(d.total_chars ?? "")} truncated=${String(d.truncated ?? "")}`;
232
273
  case "start_requirement":
233
274
  return `start_requirement #${String(d.req_id ?? "")} ${String(d.title ?? "")}`;
234
275
  case "sync_change_intent":
@@ -470,6 +511,22 @@ const IGNORED_PATH_SEGMENTS = new Set([
470
511
  "x64",
471
512
  "x86",
472
513
  ].map((s) => s.toLowerCase()));
514
+ const NOISE_FILE_SUFFIXES = [
515
+ ".min.js",
516
+ ".min.css",
517
+ ".bundle.js",
518
+ ".bundle.css",
519
+ ".chunk.js",
520
+ ".chunk.css",
521
+ ];
522
+ const NOISE_FILE_BASENAMES = [
523
+ "package-lock.json",
524
+ "pnpm-lock.yaml",
525
+ "yarn.lock",
526
+ "bun.lockb",
527
+ "cargo.lock",
528
+ "composer.lock",
529
+ ];
473
530
  const IGNORED_LIKE_PATTERNS = (() => {
474
531
  const patterns = [];
475
532
  for (const seg of IGNORED_PATH_SEGMENTS) {
@@ -573,27 +630,11 @@ function pruneIgnoredIndexesByPathPatterns() {
573
630
  function pruneFilenameNoiseIndexes() {
574
631
  if (!db)
575
632
  return { chunks_deleted: 0, symbols_deleted: 0 };
576
- const suffixes = [
577
- ".min.js",
578
- ".min.css",
579
- ".bundle.js",
580
- ".bundle.css",
581
- ".chunk.js",
582
- ".chunk.css",
583
- ];
584
- const baseNames = [
585
- "package-lock.json",
586
- "pnpm-lock.yaml",
587
- "yarn.lock",
588
- "bun.lockb",
589
- "cargo.lock",
590
- "composer.lock",
591
- ];
592
633
  try {
593
- const suffixWhere = suffixes.map(() => "LOWER(file_path) LIKE ?").join(" OR ");
594
- const baseWhere = baseNames.map(() => "LOWER(file_path) LIKE ?").join(" OR ");
595
- const suffixArgs = suffixes.map((s) => `%${s}`);
596
- const baseArgs = baseNames.map((n) => `%/${n}`);
634
+ const suffixWhere = NOISE_FILE_SUFFIXES.map(() => "LOWER(file_path) LIKE ?").join(" OR ");
635
+ const baseWhere = NOISE_FILE_BASENAMES.map(() => "LOWER(file_path) LIKE ?").join(" OR ");
636
+ const suffixArgs = NOISE_FILE_SUFFIXES.map((s) => `%${s}`);
637
+ const baseArgs = NOISE_FILE_BASENAMES.map((n) => `%/${n}`);
597
638
  const whereParts = [];
598
639
  const args = [];
599
640
  if (suffixWhere) {
@@ -675,21 +716,9 @@ function isSymbolIndexableFile(filePath) {
675
716
  }
676
717
  function shouldIgnoreContentFile(filePath) {
677
718
  const base = path.basename(filePath).toLowerCase();
678
- const ignoreNames = new Set([
679
- "package-lock.json",
680
- "pnpm-lock.yaml",
681
- "yarn.lock",
682
- "bun.lockb",
683
- "cargo.lock",
684
- "composer.lock",
685
- ]);
686
- if (ignoreNames.has(base))
687
- return true;
688
- if (base.endsWith(".min.js") || base.endsWith(".min.css"))
689
- return true;
690
- if (base.endsWith(".bundle.js") || base.endsWith(".bundle.css"))
719
+ if (NOISE_FILE_BASENAMES.includes(base))
691
720
  return true;
692
- if (base.endsWith(".chunk.js") || base.endsWith(".chunk.css"))
721
+ if (NOISE_FILE_SUFFIXES.some((suffix) => base.endsWith(suffix)))
693
722
  return true;
694
723
  return false;
695
724
  }
@@ -1133,6 +1162,66 @@ const SyncChangeIntentArgsSchema = ProjectRootArgSchema.merge(z.object({
1133
1162
  const QueryCodebaseArgsSchema = ProjectRootArgSchema.merge(z.object({
1134
1163
  query: z.string().min(1),
1135
1164
  }));
1165
+ const GrepArgsSchema = ProjectRootArgSchema.merge(z.object({
1166
+ // Pattern to search for. Defaults to regex mode for parity with tools like ripgrep.
1167
+ query: z.string().min(1),
1168
+ mode: z.enum(["regex", "literal"]).optional().default("regex"),
1169
+ // If case_sensitive is omitted and smart_case=true, uppercase => case-sensitive, otherwise case-insensitive.
1170
+ smart_case: z.boolean().optional().default(true),
1171
+ case_sensitive: z.boolean().optional(),
1172
+ // Compatibility knob for the indexed fallback when ripgrep is unavailable.
1173
+ literal_hint: z.string().optional().default(""),
1174
+ // Compatibility knob for the indexed fallback when ripgrep is unavailable.
1175
+ kinds: z.array(z.string().min(1)).optional(),
1176
+ include_paths: z.array(z.string().min(1)).optional(),
1177
+ exclude_paths: z.array(z.string().min(1)).optional(),
1178
+ max_results: z.number().int().min(1).max(5000).optional().default(200),
1179
+ // Compatibility knob for the indexed fallback when ripgrep is unavailable.
1180
+ max_candidates: z.number().int().min(1).max(50_000).optional(),
1181
+ }));
1182
+ const ReadFileLinesArgsSchema = ProjectRootArgSchema.merge(z.object({
1183
+ // Relative to project_root, or an absolute path under project_root.
1184
+ path: z.string().min(1),
1185
+ from_line: z.number().int().min(1).optional().default(1),
1186
+ to_line: z.number().int().min(1).optional(),
1187
+ // Convenience for "head": if set, reads from_line..(from_line+total_count-1) unless to_line is provided.
1188
+ total_count: z.number().int().min(1).optional(),
1189
+ // Hard limits to avoid huge token blow-ups.
1190
+ max_lines: z.number().int().min(1).max(2000).optional().default(400),
1191
+ max_chars: z.number().int().min(200).max(200_000).optional().default(20_000),
1192
+ }));
1193
+ const ReadFileTextArgsSchema = ProjectRootArgSchema.merge(z.object({
1194
+ // Relative to project_root, or an absolute path under project_root.
1195
+ path: z.string().min(1),
1196
+ // Character offset in the decoded UTF-8 text.
1197
+ offset: z.number().int().min(0).optional().default(0),
1198
+ // Hard limit on returned text to avoid huge outputs.
1199
+ max_chars: z.number().int().min(1).max(200_000).optional().default(20_000),
1200
+ // Safety guard for raw reads; use read_file_lines on larger files.
1201
+ max_file_bytes: z.number().int().min(1_000).max(5_000_000).optional().default(1_000_000),
1202
+ }));
1203
+ const ReadCodexTextFileArgsSchema = ProjectRootArgSchema.merge(z.object({
1204
+ // Absolute path, file:// URI, or a path under CODEX_HOME / AGENTS_HOME allowed roots.
1205
+ path: z.string().min(1),
1206
+ offset: z.number().int().min(0).optional().default(0),
1207
+ max_chars: z.number().int().min(1).max(200_000).optional().default(20_000),
1208
+ max_file_bytes: z.number().int().min(1_000).max(5_000_000).optional().default(1_000_000),
1209
+ }));
1210
+ const ListProjectFilesArgsSchema = ProjectRootArgSchema.merge(z.object({
1211
+ // Relative directory/file path under project_root. "." means the project root.
1212
+ path: z.string().optional().default("."),
1213
+ recursive: z.boolean().optional().default(false),
1214
+ max_depth: z.number().int().min(1).max(20).optional().default(4),
1215
+ include_files: z.boolean().optional().default(true),
1216
+ include_dirs: z.boolean().optional().default(true),
1217
+ include_hidden: z.boolean().optional().default(false),
1218
+ respect_ignore: z.boolean().optional().default(true),
1219
+ include_paths: z.array(z.string().min(1)).optional(),
1220
+ exclude_paths: z.array(z.string().min(1)).optional(),
1221
+ extensions: z.array(z.string().min(1)).optional(),
1222
+ max_results: z.number().int().min(1).max(5000).optional().default(200),
1223
+ include_stats: z.boolean().optional().default(false),
1224
+ }));
1136
1225
  const UpsertProjectSummaryArgsSchema = ProjectRootArgSchema.merge(z.object({
1137
1226
  summary: z.string().min(1),
1138
1227
  }));
@@ -1429,6 +1518,34 @@ function toMemoryItemPreview(row, includeContent, previewChars, contentMaxChars)
1429
1518
  updated_at: row.updated_at,
1430
1519
  };
1431
1520
  }
1521
+ function getBuiltinConventionRows() {
1522
+ return BUILTIN_CONVENTIONS.map((spec, idx) => ({
1523
+ id: -1000 - idx,
1524
+ kind: "convention",
1525
+ title: spec.key,
1526
+ content: spec.content,
1527
+ file_path: null,
1528
+ start_line: null,
1529
+ end_line: null,
1530
+ req_id: null,
1531
+ metadata_json: safeJson({ source: "builtin", key: spec.key, tags: spec.tags ?? [] }),
1532
+ content_hash: sha256Hex(spec.content),
1533
+ created_at: "builtin",
1534
+ updated_at: "builtin",
1535
+ }));
1536
+ }
1537
+ function getConventionPreviews(conventionsLimit, previewChars, contentMaxChars) {
1538
+ if (conventionsLimit <= 0)
1539
+ return [];
1540
+ const builtin = getBuiltinConventionRows()
1541
+ .map((row) => toMemoryItemPreview(row, false, previewChars, contentMaxChars))
1542
+ .slice(0, conventionsLimit);
1543
+ if (builtin.length >= conventionsLimit)
1544
+ return builtin;
1545
+ const remaining = conventionsLimit - builtin.length;
1546
+ const stored = listConventionsStmt.all(remaining).map((c) => toMemoryItemPreview(c, false, previewChars, contentMaxChars));
1547
+ return [...builtin, ...stored];
1548
+ }
1432
1549
  function toRequirementPreview(req, includeContent, previewChars, contentMaxChars) {
1433
1550
  const context = req.context_data ?? null;
1434
1551
  const contextPreview = context ? makePreviewText(context, previewChars) : null;
@@ -1687,18 +1804,740 @@ async function semanticSearchHybridInternal(opts) {
1687
1804
  }
1688
1805
  return likeSearchInternal(opts);
1689
1806
  }
1690
- const server = new Server({ name: SERVER_NAME, version: SERVER_VERSION }, {
1691
- capabilities: { tools: {} },
1692
- instructions: [
1807
+ function escapeRegExp(literal) {
1808
+ return literal.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
1809
+ }
1810
+ function hasUppercaseAscii(s) {
1811
+ return /[A-Z]/.test(s);
1812
+ }
1813
+ function extractLongestLiteralFromRegex(pattern) {
1814
+ // Best-effort extraction: pull the longest literal run to use as an indexed candidate hint.
1815
+ // This is intentionally conservative; if we can't find a reasonable literal anchor, callers
1816
+ // should pass `literal_hint` or narrow with include_paths.
1817
+ let best = "";
1818
+ let cur = "";
1819
+ let inClass = false;
1820
+ const flush = () => {
1821
+ if (cur.length > best.length)
1822
+ best = cur;
1823
+ cur = "";
1824
+ };
1825
+ for (let i = 0; i < pattern.length; i++) {
1826
+ const ch = pattern[i] ?? "";
1827
+ if (!ch)
1828
+ break;
1829
+ if (inClass) {
1830
+ // Skip until the closing bracket.
1831
+ if (ch === "]")
1832
+ inClass = false;
1833
+ flush();
1834
+ continue;
1835
+ }
1836
+ if (ch === "[") {
1837
+ inClass = true;
1838
+ flush();
1839
+ continue;
1840
+ }
1841
+ if (ch === "\\") {
1842
+ const next = pattern[i + 1] ?? "";
1843
+ if (!next) {
1844
+ flush();
1845
+ continue;
1846
+ }
1847
+ // Common regex escapes that are NOT literal characters.
1848
+ if (/[dDsSwWbB0-9]/.test(next)) {
1849
+ flush();
1850
+ i += 1;
1851
+ continue;
1852
+ }
1853
+ // Treat \x as literal x (e.g. \( \) \. \\).
1854
+ cur += next;
1855
+ i += 1;
1856
+ continue;
1857
+ }
1858
+ // Regex metacharacters.
1859
+ if (".*+?^$|(){}".includes(ch)) {
1860
+ flush();
1861
+ continue;
1862
+ }
1863
+ cur += ch;
1864
+ }
1865
+ flush();
1866
+ return best;
1867
+ }
1868
+ function normalizePathNeedle(s) {
1869
+ return s.replace(/\\/g, "/").toLowerCase();
1870
+ }
1871
+ function passesPathFilters(filePath, includePaths, excludePaths) {
1872
+ const fp = filePath.toLowerCase();
1873
+ if (excludePaths?.length) {
1874
+ for (const raw of excludePaths) {
1875
+ const n = normalizePathNeedle(raw);
1876
+ if (!n)
1877
+ continue;
1878
+ if (fp.includes(n))
1879
+ return false;
1880
+ }
1881
+ }
1882
+ if (includePaths?.length) {
1883
+ for (const raw of includePaths) {
1884
+ const n = normalizePathNeedle(raw);
1885
+ if (!n)
1886
+ continue;
1887
+ if (fp.includes(n))
1888
+ return true;
1889
+ }
1890
+ return false;
1891
+ }
1892
+ return true;
1893
+ }
1894
+ function buildLineStarts(text) {
1895
+ const starts = [0];
1896
+ for (let i = 0; i < text.length; i++) {
1897
+ if (text.charCodeAt(i) === 10)
1898
+ starts.push(i + 1); // '\n'
1899
+ }
1900
+ return starts;
1901
+ }
1902
+ function lineIndexForOffset(lineStarts, offset) {
1903
+ let lo = 0;
1904
+ let hi = lineStarts.length - 1;
1905
+ while (lo <= hi) {
1906
+ const mid = (lo + hi) >> 1;
1907
+ const v = lineStarts[mid] ?? 0;
1908
+ if (v <= offset)
1909
+ lo = mid + 1;
1910
+ else
1911
+ hi = mid - 1;
1912
+ }
1913
+ return Math.max(0, lo - 1);
1914
+ }
1915
+ function compileGrepRegex(opts) {
1916
+ const flags = `${opts.caseSensitive ? "" : "i"}gm`;
1917
+ const source = opts.mode === "literal" ? escapeRegExp(opts.query) : opts.query;
1918
+ return new RegExp(source, flags);
1919
+ }
1920
+ function trimGrepText(input, maxChars) {
1921
+ if (input.length <= maxChars)
1922
+ return input;
1923
+ return `${input.slice(0, maxChars)}…`;
1924
+ }
1925
+ function buildGrepPreviewSnippet(lineText, col, maxChars = 500) {
1926
+ const clean = lineText.replace(/\r$/, "");
1927
+ if (clean.length <= maxChars)
1928
+ return clean;
1929
+ const matchIndex = Math.max(0, col - 1);
1930
+ let start = Math.max(0, matchIndex - Math.floor(maxChars * 0.35));
1931
+ if (start + maxChars > clean.length)
1932
+ start = Math.max(0, clean.length - maxChars);
1933
+ const end = Math.min(clean.length, start + maxChars);
1934
+ let snippet = clean.slice(start, end);
1935
+ if (start > 0)
1936
+ snippet = `…${snippet}`;
1937
+ if (end < clean.length)
1938
+ snippet = `${snippet}…`;
1939
+ return snippet;
1940
+ }
1941
+ function extractGrepMatchText(opts) {
1942
+ const clean = opts.lineText.replace(/\r$/, "");
1943
+ const startIndex = Math.max(0, opts.col - 1);
1944
+ if (opts.mode === "literal") {
1945
+ const slice = clean.slice(startIndex, startIndex + opts.query.length) || opts.query;
1946
+ return trimGrepText(slice, 200);
1947
+ }
1948
+ try {
1949
+ const flags = opts.caseSensitive ? "m" : "im";
1950
+ const anchored = new RegExp(opts.query, flags);
1951
+ const tail = clean.slice(startIndex);
1952
+ const found = anchored.exec(tail);
1953
+ if (found?.index === 0 && found[0])
1954
+ return trimGrepText(found[0], 200);
1955
+ }
1956
+ catch { }
1957
+ const fallback = clean.slice(startIndex, Math.min(clean.length, startIndex + 200));
1958
+ return trimGrepText(fallback || opts.query, 200);
1959
+ }
1960
+ function toProcessText(value) {
1961
+ if (typeof value === "string")
1962
+ return value;
1963
+ if (value == null)
1964
+ return "";
1965
+ return value.toString("utf8");
1966
+ }
1967
+ function formatProcessFailure(result) {
1968
+ if (result.error)
1969
+ return `${result.error.name}: ${result.error.message}`;
1970
+ const stderr = toProcessText(result.stderr).trim();
1971
+ if (stderr)
1972
+ return stderr;
1973
+ const stdout = toProcessText(result.stdout).trim();
1974
+ if (stdout)
1975
+ return stdout;
1976
+ if (typeof result.status === "number")
1977
+ return `exit ${result.status}`;
1978
+ if (result.signal)
1979
+ return `signal ${result.signal}`;
1980
+ return "unknown failure";
1981
+ }
1982
+ function buildRipgrepEnv() {
1983
+ const env = { ...process.env };
1984
+ delete env.RIPGREP_CONFIG_PATH;
1985
+ return env;
1986
+ }
1987
+ function pushUniqueCandidate(candidates, seen, raw) {
1988
+ const value = raw?.trim();
1989
+ if (!value || seen.has(value))
1990
+ return;
1991
+ seen.add(value);
1992
+ candidates.push(value);
1993
+ }
1994
+ function listChildDirsSafe(dirPath) {
1995
+ try {
1996
+ return fs
1997
+ .readdirSync(dirPath, { withFileTypes: true })
1998
+ .filter((entry) => entry.isDirectory())
1999
+ .map((entry) => path.join(dirPath, entry.name));
2000
+ }
2001
+ catch {
2002
+ return [];
2003
+ }
2004
+ }
2005
+ function collectRipgrepCandidates() {
2006
+ const candidates = [];
2007
+ const seen = new Set();
2008
+ const override = process.env.VECTORMIND_RG_PATH?.trim();
2009
+ if (override)
2010
+ pushUniqueCandidate(candidates, seen, path.resolve(override));
2011
+ if (process.platform === "win32") {
2012
+ pushUniqueCandidate(candidates, seen, "rg.exe");
2013
+ pushUniqueCandidate(candidates, seen, "rg");
2014
+ }
2015
+ else {
2016
+ pushUniqueCandidate(candidates, seen, "rg");
2017
+ }
2018
+ for (const rawDir of (process.env.PATH ?? "").split(path.delimiter)) {
2019
+ const dir = rawDir.trim().replace(/^"+|"+$/g, "");
2020
+ if (!dir)
2021
+ continue;
2022
+ if (process.platform === "win32") {
2023
+ pushUniqueCandidate(candidates, seen, path.join(dir, "rg.exe"));
2024
+ pushUniqueCandidate(candidates, seen, path.join(dir, "rg"));
2025
+ }
2026
+ else {
2027
+ pushUniqueCandidate(candidates, seen, path.join(dir, "rg"));
2028
+ }
2029
+ }
2030
+ if (process.platform === "win32") {
2031
+ const localAppData = process.env.LOCALAPPDATA?.trim();
2032
+ const programsDir = localAppData ? path.join(localAppData, "Programs") : "";
2033
+ if (programsDir && fs.existsSync(programsDir)) {
2034
+ for (const appDir of listChildDirsSafe(programsDir)) {
2035
+ pushUniqueCandidate(candidates, seen, path.join(appDir, "resources", "app", "node_modules", "@vscode", "ripgrep", "bin", "rg.exe"));
2036
+ pushUniqueCandidate(candidates, seen, path.join(appDir, "resources", "app", "extensions", "kiro.kiro-agent", "node_modules", "@vscode", "ripgrep", "bin", "rg.exe"));
2037
+ for (const childDir of listChildDirsSafe(appDir)) {
2038
+ pushUniqueCandidate(candidates, seen, path.join(childDir, "resources", "app", "node_modules", "@vscode", "ripgrep", "bin", "rg.exe"));
2039
+ }
2040
+ }
2041
+ }
2042
+ }
2043
+ return candidates;
2044
+ }
2045
+ function resolveRipgrepCommand() {
2046
+ if (typeof cachedRipgrepCommand !== "undefined") {
2047
+ if (cachedRipgrepCommand)
2048
+ return { ok: true, command: cachedRipgrepCommand };
2049
+ return { ok: false, error: cachedRipgrepResolveError ?? "ripgrep unavailable", attempts: [] };
2050
+ }
2051
+ const env = buildRipgrepEnv();
2052
+ const attempts = [];
2053
+ for (const candidate of collectRipgrepCandidates()) {
2054
+ const probe = spawnSync(candidate, ["--version"], {
2055
+ cwd: projectRoot || undefined,
2056
+ env,
2057
+ encoding: "utf8",
2058
+ windowsHide: true,
2059
+ timeout: RIPGREP_RESOLVE_TIMEOUT_MS,
2060
+ maxBuffer: 256 * 1024,
2061
+ });
2062
+ if (probe.status === 0) {
2063
+ cachedRipgrepCommand = candidate;
2064
+ cachedRipgrepResolveError = null;
2065
+ return { ok: true, command: candidate };
2066
+ }
2067
+ attempts.push(`${candidate}: ${formatProcessFailure(probe)}`);
2068
+ }
2069
+ cachedRipgrepCommand = null;
2070
+ cachedRipgrepResolveError = attempts.slice(0, 8).join(" | ") || "ripgrep unavailable";
2071
+ return { ok: false, error: cachedRipgrepResolveError, attempts };
2072
+ }
2073
+ function appendBuiltInRipgrepExcludes(args) {
2074
+ for (const segment of IGNORED_PATH_SEGMENTS) {
2075
+ args.push("-g", `!${segment}/**`);
2076
+ args.push("-g", `!**/${segment}/**`);
2077
+ }
2078
+ for (const baseName of NOISE_FILE_BASENAMES) {
2079
+ args.push("-g", `!**/${baseName}`);
2080
+ }
2081
+ for (const suffix of NOISE_FILE_SUFFIXES) {
2082
+ args.push("-g", `!**/*${suffix}`);
2083
+ }
2084
+ }
2085
+ function runRipgrepSearch(opts) {
2086
+ const resolved = resolveRipgrepCommand();
2087
+ if (!resolved.ok) {
2088
+ return { ok: false, unavailable: true, error: resolved.error, attempts: resolved.attempts };
2089
+ }
2090
+ const args = ["--vimgrep", "--no-heading", "--color", "never", "-m", String(opts.maxResults)];
2091
+ args.push(opts.caseSensitive ? "-s" : "-i");
2092
+ if (opts.mode === "literal")
2093
+ args.push("-F");
2094
+ appendBuiltInRipgrepExcludes(args);
2095
+ args.push("--", opts.query, ".");
2096
+ const result = spawnSync(resolved.command, args, {
2097
+ cwd: projectRoot,
2098
+ env: buildRipgrepEnv(),
2099
+ encoding: "utf8",
2100
+ windowsHide: true,
2101
+ timeout: RIPGREP_SEARCH_TIMEOUT_MS,
2102
+ maxBuffer: RIPGREP_MAX_BUFFER_BYTES,
2103
+ });
2104
+ if (result.error) {
2105
+ return {
2106
+ ok: false,
2107
+ unavailable: false,
2108
+ error: formatProcessFailure(result),
2109
+ attempts: [],
2110
+ rg_command: resolved.command,
2111
+ exit_status: result.status,
2112
+ };
2113
+ }
2114
+ const status = result.status ?? 0;
2115
+ if (status !== 0 && status !== 1) {
2116
+ return {
2117
+ ok: false,
2118
+ unavailable: false,
2119
+ error: formatProcessFailure(result),
2120
+ attempts: [],
2121
+ rg_command: resolved.command,
2122
+ exit_status: status,
2123
+ };
2124
+ }
2125
+ const matches = [];
2126
+ let totalMatches = 0;
2127
+ let truncated = false;
2128
+ for (const rawLine of toProcessText(result.stdout).split(/\r?\n/)) {
2129
+ if (!rawLine)
2130
+ continue;
2131
+ const parsed = /^(.*?):(\d+):(\d+):(.*)$/.exec(rawLine);
2132
+ if (!parsed)
2133
+ continue;
2134
+ const filePath = path.posix
2135
+ .normalize(parsed[1].replace(/\\/g, "/"))
2136
+ .replace(/^\.\/+/, "");
2137
+ const lineNumber = Number.parseInt(parsed[2] ?? "0", 10);
2138
+ const colNumber = Number.parseInt(parsed[3] ?? "0", 10);
2139
+ const lineText = (parsed[4] ?? "").replace(/\r$/, "");
2140
+ if (!filePath || !Number.isFinite(lineNumber) || !Number.isFinite(colNumber))
2141
+ continue;
2142
+ if (shouldIgnoreDbFilePath(filePath))
2143
+ continue;
2144
+ if (shouldIgnoreContentFile(filePath))
2145
+ continue;
2146
+ if (!passesPathFilters(filePath, opts.includePaths, opts.excludePaths))
2147
+ continue;
2148
+ totalMatches += 1;
2149
+ if (matches.length >= opts.maxResults) {
2150
+ truncated = true;
2151
+ continue;
2152
+ }
2153
+ matches.push({
2154
+ file_path: filePath,
2155
+ kind: "file_match",
2156
+ line: lineNumber,
2157
+ col: colNumber,
2158
+ preview: buildGrepPreviewSnippet(lineText, colNumber),
2159
+ match: extractGrepMatchText({
2160
+ lineText,
2161
+ query: opts.query,
2162
+ mode: opts.mode,
2163
+ caseSensitive: opts.caseSensitive,
2164
+ col: colNumber,
2165
+ }),
2166
+ });
2167
+ }
2168
+ return {
2169
+ ok: true,
2170
+ backend: "ripgrep",
2171
+ rg_command: resolved.command,
2172
+ matches,
2173
+ truncated,
2174
+ total_matches: totalMatches,
2175
+ };
2176
+ }
2177
+ function runIndexedGrepSearch(opts) {
2178
+ const hint = (() => {
2179
+ if (opts.mode === "literal")
2180
+ return opts.query;
2181
+ const explicit = opts.literalHint.trim();
2182
+ if (explicit)
2183
+ return explicit;
2184
+ return extractLongestLiteralFromRegex(opts.query);
2185
+ })();
2186
+ if (opts.mode === "regex" && hint.trim().length < 3) {
2187
+ throw new Error("Regex has no sufficiently long literal anchor for indexed narrowing. Provide literal_hint (>= 3 chars) or narrow with include_paths.");
2188
+ }
2189
+ let re;
2190
+ try {
2191
+ re = compileGrepRegex({
2192
+ query: opts.query,
2193
+ mode: opts.mode,
2194
+ caseSensitive: opts.caseSensitive,
2195
+ });
2196
+ }
2197
+ catch (err) {
2198
+ throw new Error(`Invalid pattern: ${String(err)}`);
2199
+ }
2200
+ const maxCandidates = opts.maxCandidates ?? Math.min(50_000, Math.max(1000, opts.maxResults * 200));
2201
+ const candidates = (() => {
2202
+ if (ftsAvailable) {
2203
+ const matchQuery = buildFtsMatchQuery(hint);
2204
+ const placeholders = opts.kinds.map(() => "?").join(", ");
2205
+ const stmt = db.prepare(`
2206
+ SELECT
2207
+ m.id as id,
2208
+ m.kind as kind,
2209
+ m.content as content,
2210
+ m.file_path as file_path,
2211
+ m.start_line as start_line,
2212
+ m.end_line as end_line
2213
+ FROM ${FTS_TABLE_NAME}
2214
+ JOIN memory_items m ON m.id = ${FTS_TABLE_NAME}.rowid
2215
+ WHERE ${FTS_TABLE_NAME} MATCH ?
2216
+ AND m.kind IN (${placeholders})
2217
+ ORDER BY m.file_path ASC, m.start_line ASC, m.id ASC
2218
+ LIMIT ?
2219
+ `);
2220
+ return stmt.all(matchQuery, ...opts.kinds, maxCandidates);
2221
+ }
2222
+ const needle = opts.mode === "literal" ? opts.query : hint;
2223
+ const escaped = escapeLike(needle);
2224
+ const like = `%${escaped}%`;
2225
+ const placeholders = opts.kinds.map(() => "?").join(", ");
2226
+ const stmt = db.prepare(`
2227
+ SELECT
2228
+ id,
2229
+ kind,
2230
+ content,
2231
+ file_path,
2232
+ start_line,
2233
+ end_line
2234
+ FROM memory_items
2235
+ WHERE content LIKE ? ESCAPE '\\'
2236
+ AND kind IN (${placeholders})
2237
+ ORDER BY file_path ASC, start_line ASC, id ASC
2238
+ LIMIT ?
2239
+ `);
2240
+ return stmt.all(like, ...opts.kinds, maxCandidates);
2241
+ })();
2242
+ const matches = [];
2243
+ let candidatesScanned = 0;
2244
+ let truncated = false;
2245
+ for (const c of candidates) {
2246
+ candidatesScanned += 1;
2247
+ if (!c.file_path || c.start_line == null)
2248
+ continue;
2249
+ if (shouldIgnoreDbFilePath(c.file_path))
2250
+ continue;
2251
+ if (!passesPathFilters(c.file_path, opts.includePaths, opts.excludePaths))
2252
+ continue;
2253
+ const content = c.content ?? "";
2254
+ const lineStarts = buildLineStarts(content);
2255
+ re.lastIndex = 0;
2256
+ let m;
2257
+ while ((m = re.exec(content)) !== null) {
2258
+ const idx = m.index ?? 0;
2259
+ const matched = m[0] ?? "";
2260
+ if (!matched) {
2261
+ if (re.lastIndex >= content.length)
2262
+ break;
2263
+ re.lastIndex += 1;
2264
+ continue;
2265
+ }
2266
+ const lineIdx = lineIndexForOffset(lineStarts, idx);
2267
+ const lineStart = lineStarts[lineIdx] ?? 0;
2268
+ const lineEnd = lineIdx + 1 < lineStarts.length
2269
+ ? (lineStarts[lineIdx + 1] ?? content.length) - 1
2270
+ : content.length;
2271
+ const previewRaw = content.slice(lineStart, Math.max(lineStart, lineEnd));
2272
+ matches.push({
2273
+ file_path: c.file_path,
2274
+ kind: c.kind,
2275
+ line: c.start_line + lineIdx,
2276
+ col: idx - lineStart + 1,
2277
+ preview: trimGrepText(previewRaw, 500),
2278
+ match: trimGrepText(matched, 200),
2279
+ });
2280
+ if (matches.length >= opts.maxResults) {
2281
+ truncated = true;
2282
+ break;
2283
+ }
2284
+ }
2285
+ if (truncated)
2286
+ break;
2287
+ }
2288
+ return {
2289
+ backend: "indexed_fallback",
2290
+ hint,
2291
+ kinds: opts.kinds,
2292
+ include_paths: opts.includePaths ?? [],
2293
+ exclude_paths: opts.excludePaths ?? [],
2294
+ candidates: { total: candidates.length, scanned: candidatesScanned },
2295
+ matches,
2296
+ truncated,
2297
+ };
2298
+ }
2299
+ function resolveProjectPathUnderRoot(inputPath, opts = {}) {
2300
+ const normalizedInput = inputPath.trim() || ".";
2301
+ const abs = path.isAbsolute(normalizedInput) ? normalizedInput : path.join(projectRoot, normalizedInput);
2302
+ const absPath = path.resolve(abs);
2303
+ const root = path.resolve(projectRoot);
2304
+ const rel = path.relative(root, absPath);
2305
+ const insideRoot = rel === "" || (!rel.startsWith("..") && !path.isAbsolute(rel));
2306
+ if (!insideRoot) {
2307
+ throw new Error(`[VectorMind] Path must be under project_root: ${inputPath}`);
2308
+ }
2309
+ if (rel === "" && !opts.allowRoot) {
2310
+ throw new Error(`[VectorMind] Path must not be the project_root itself: ${inputPath}`);
2311
+ }
2312
+ return {
2313
+ absPath,
2314
+ dbFilePath: rel === "" ? "." : normalizeToDbPath(absPath),
2315
+ };
2316
+ }
2317
+ function resolveReadPathUnderProjectRoot(inputPath) {
2318
+ return resolveProjectPathUnderRoot(inputPath, { allowRoot: false });
2319
+ }
2320
+ function resolveCodexTextPath(inputPath) {
2321
+ const trimmed = inputPath.trim();
2322
+ if (!trimmed)
2323
+ throw new Error("[VectorMind] path is required");
2324
+ const uriPath = trimmed.startsWith("file:") ? parseFileUriToPath(trimmed) : null;
2325
+ const absPath = path.resolve(uriPath ?? trimmed);
2326
+ const allowedRoot = getAllowedCodexTextRoots().find((root) => {
2327
+ const rel = path.relative(root, absPath);
2328
+ return rel === "" || (!rel.startsWith("..") && !path.isAbsolute(rel));
2329
+ });
2330
+ if (!allowedRoot) {
2331
+ throw new Error(`[VectorMind] Path must be under one of the allowed local text roots: ${getAllowedCodexTextRoots().join(", ")}`);
2332
+ }
2333
+ return { absPath, displayPath: absPath, allowedRoot };
2334
+ }
2335
+ function isHiddenBaseName(name) {
2336
+ return name.startsWith(".") && name !== "." && name !== "..";
2337
+ }
2338
+ function normalizeExtensionsFilter(values) {
2339
+ if (!values?.length)
2340
+ return null;
2341
+ const normalized = values
2342
+ .map((v) => v.trim().toLowerCase())
2343
+ .filter(Boolean)
2344
+ .map((v) => (v.startsWith(".") ? v : `.${v}`));
2345
+ return normalized.length ? Array.from(new Set(normalized)) : null;
2346
+ }
2347
+ async function readTextFileLines(opts) {
2348
+ let lineNo = 0;
2349
+ const lines = [];
2350
+ let totalChars = 0;
2351
+ let truncated = false;
2352
+ const stream = fs.createReadStream(opts.absPath, { encoding: "utf8" });
2353
+ const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
2354
+ try {
2355
+ for await (const line of rl) {
2356
+ lineNo += 1;
2357
+ if (lineNo < opts.fromLine)
2358
+ continue;
2359
+ if (lineNo > opts.toLine)
2360
+ break;
2361
+ const rendered = `${lineNo}:${line}`;
2362
+ totalChars += rendered.length + 1;
2363
+ if (lines.length >= opts.maxLines || totalChars > opts.maxChars) {
2364
+ truncated = true;
2365
+ break;
2366
+ }
2367
+ lines.push(rendered);
2368
+ }
2369
+ }
2370
+ finally {
2371
+ try {
2372
+ rl.close();
2373
+ }
2374
+ catch { }
2375
+ try {
2376
+ stream.destroy();
2377
+ }
2378
+ catch { }
2379
+ }
2380
+ return { text: lines.join("\n"), returned: lines.length, truncated };
2381
+ }
2382
+ function readTextFileSlice(opts) {
2383
+ const st = fs.statSync(opts.absPath);
2384
+ if (!st.isFile())
2385
+ throw new Error("Not a file");
2386
+ if (st.size > opts.maxFileBytes) {
2387
+ throw new Error(`File is too large for raw text read (${st.size} bytes > limit ${opts.maxFileBytes}). Use read_file_lines instead.`);
2388
+ }
2389
+ const text = fs.readFileSync(opts.absPath, "utf8");
2390
+ const totalChars = text.length;
2391
+ const safeOffset = Math.min(opts.offset, totalChars);
2392
+ const slice = text.slice(safeOffset, safeOffset + opts.maxChars);
2393
+ const returnedChars = slice.length;
2394
+ const truncated = safeOffset + returnedChars < totalChars;
2395
+ return { text: slice, totalChars, returnedChars, truncated };
2396
+ }
2397
+ function listProjectFilesInternal(opts) {
2398
+ const entries = [];
2399
+ let scanned = 0;
2400
+ let truncated = false;
2401
+ const pushEntry = (entry) => {
2402
+ if (entries.length >= opts.maxResults) {
2403
+ truncated = true;
2404
+ return;
2405
+ }
2406
+ entries.push(entry);
2407
+ };
2408
+ const startStat = fs.statSync(opts.startAbsPath);
2409
+ if (startStat.isFile()) {
2410
+ const relPath = opts.startDbPath;
2411
+ if ((!opts.respectIgnore || !shouldIgnoreDbFilePath(relPath)) && passesPathFilters(relPath, opts.includePaths, opts.excludePaths)) {
2412
+ const ext = path.extname(relPath).toLowerCase();
2413
+ if (!opts.extensions || opts.extensions.includes(ext)) {
2414
+ pushEntry({
2415
+ path: relPath,
2416
+ kind: "file",
2417
+ depth: 0,
2418
+ ...(opts.includeStats ? { size: startStat.size, mtime: startStat.mtime.toISOString() } : {}),
2419
+ });
2420
+ }
2421
+ }
2422
+ return { entries, returned: entries.length, scanned: 1, truncated };
2423
+ }
2424
+ const effectiveMaxDepth = opts.recursive ? opts.maxDepth : 1;
2425
+ const stack = [{ absPath: opts.startAbsPath, depth: 0 }];
2426
+ while (stack.length > 0) {
2427
+ const current = stack.pop();
2428
+ if (!current)
2429
+ break;
2430
+ let dirEntries;
2431
+ try {
2432
+ dirEntries = fs.readdirSync(current.absPath, { withFileTypes: true });
2433
+ }
2434
+ catch {
2435
+ continue;
2436
+ }
2437
+ dirEntries.sort((a, b) => a.name.localeCompare(b.name));
2438
+ for (let idx = dirEntries.length - 1; idx >= 0; idx -= 1) {
2439
+ const child = dirEntries[idx];
2440
+ if (!child)
2441
+ continue;
2442
+ if (!opts.includeHidden && isHiddenBaseName(child.name))
2443
+ continue;
2444
+ const childAbs = path.join(current.absPath, child.name);
2445
+ const childRel = normalizeToDbPath(childAbs);
2446
+ if (opts.respectIgnore && shouldIgnoreDbFilePath(childRel))
2447
+ continue;
2448
+ scanned += 1;
2449
+ const childDepth = current.depth + 1;
2450
+ const matchesPath = passesPathFilters(childRel, opts.includePaths, opts.excludePaths);
2451
+ if (child.isDirectory()) {
2452
+ if (opts.includeDirs && matchesPath) {
2453
+ let stats = null;
2454
+ if (opts.includeStats) {
2455
+ try {
2456
+ stats = fs.statSync(childAbs);
2457
+ }
2458
+ catch {
2459
+ stats = null;
2460
+ }
2461
+ }
2462
+ pushEntry({
2463
+ path: childRel,
2464
+ kind: "dir",
2465
+ depth: childDepth,
2466
+ ...(stats ? { size: stats.size, mtime: stats.mtime.toISOString() } : {}),
2467
+ });
2468
+ if (truncated)
2469
+ break;
2470
+ }
2471
+ if (childDepth < effectiveMaxDepth) {
2472
+ stack.push({ absPath: childAbs, depth: childDepth });
2473
+ }
2474
+ continue;
2475
+ }
2476
+ if (!child.isFile())
2477
+ continue;
2478
+ if (!opts.includeFiles || !matchesPath)
2479
+ continue;
2480
+ const ext = path.extname(childRel).toLowerCase();
2481
+ if (opts.extensions && !opts.extensions.includes(ext))
2482
+ continue;
2483
+ let stats = null;
2484
+ if (opts.includeStats) {
2485
+ try {
2486
+ stats = fs.statSync(childAbs);
2487
+ }
2488
+ catch {
2489
+ stats = null;
2490
+ }
2491
+ }
2492
+ pushEntry({
2493
+ path: childRel,
2494
+ kind: "file",
2495
+ depth: childDepth,
2496
+ ...(stats ? { size: stats.size, mtime: stats.mtime.toISOString() } : {}),
2497
+ });
2498
+ if (truncated)
2499
+ break;
2500
+ }
2501
+ if (truncated)
2502
+ break;
2503
+ }
2504
+ entries.sort((a, b) => a.path.localeCompare(b.path));
2505
+ return { entries, returned: entries.length, scanned, truncated };
2506
+ }
2507
+ function buildServerInstructions() {
2508
+ return [
1693
2509
  "VectorMind MCP is available in this session. Use it to avoid guessing project context.",
2510
+ "This package ships built-in baseline policy. If a client supports MCP instructions, these rules auto-apply as soon as the MCP is installed and connected; no user-side config file is required.",
2511
+ "The write-operation rules below are strict workflow constraints. Do not claim that the environment has real git branch locks, checkout APIs, or file-lock tools unless such tools are actually available in the current client/runtime. If such tools are absent, you must still enforce the same exclusivity semantics through explicit coordination and serialized same-file edits.",
1694
2512
  "Project root resolution order: tool argument project_root (recommended for clients without roots/list), then VECTORMIND_ROOT (avoid hardcoding in global config), then MCP roots/list (best-effort; falls back quickly if unsupported), then process.cwd() (so start your MCP client in the project directory for per-project isolation).",
1695
2513
  "If root_source is fallback, file watching/indexing is disabled (pass project_root to enable per-project tracking).",
1696
2514
  "",
2515
+ "Built-in write-operation policy:",
2516
+ BUILTIN_WRITE_POLICY_INSTRUCTIONS,
2517
+ "",
2518
+ "Built-in task-list / Plan-Lite policy:",
2519
+ BUILTIN_PLAN_LITE_INSTRUCTIONS,
2520
+ "",
2521
+ "Built-in destructive-operation guard policy:",
2522
+ BUILTIN_DESTRUCTIVE_OPERATION_GUARD_INSTRUCTIONS,
2523
+ "",
2524
+ "Built-in architecture and code-organization policy:",
2525
+ BUILTIN_ARCHITECTURE_AND_CODE_ORGANIZATION_INSTRUCTIONS,
2526
+ "",
2527
+ "Built-in low-overhead execution and heavy-thread policy:",
2528
+ BUILTIN_LOW_OVERHEAD_WORKFLOW_INSTRUCTIONS,
2529
+ "",
1697
2530
  "Required workflow:",
1698
- "- On every new conversation/session: call bootstrap_context({ query: <current goal> }) first (or at least get_brain_dump()) to restore context and retrieve relevant matches from the local memory store (vector if enabled; otherwise FTS/LIKE).",
2531
+ "- On every new conversation/session for analysis/design/development work: call bootstrap_context({ query: <current goal> }) first (or at least get_brain_dump()) to restore context and retrieve relevant matches from the local memory store (vector if enabled; otherwise FTS/LIKE).",
1699
2532
  " - Output is compact by default. Use include_content=true only when you truly need full text (it increases tokens).",
1700
2533
  " - Tune output size with: requirements_limit/changes_limit/notes_limit, preview_chars, pending_limit/pending_offset.",
1701
2534
  " - Prefer read_memory_item(id, offset, limit) to fetch full text on demand instead of returning large content in other tool outputs.",
2535
+ "- For pure execution-first tasks with explicit targets (for example compile/build/run/launch/package/publish/test rerun), you may skip retrieval and go straight to the minimum necessary shell or host tools unless code/context lookup is actually needed to unblock execution.",
2536
+ "- To read local Codex skill/prompt/rule files (for example SKILL.md under CODEX_HOME or AGENTS_HOME), prefer read_codex_text_file({ path }) instead of assuming a filesystem MCP resource server exists.",
2537
+ "- For project file/directory browsing, prefer list_project_files({ path, recursive?, max_depth? }) over shelling out to Get-ChildItem/ls. It respects ignore rules and keeps output bounded.",
2538
+ "- For small/medium raw file reads, prefer read_file_text({ path, offset?, max_chars? }) over Get-Content -Raw. Use read_file_lines(...) when you need deterministic line ranges or the file may be large.",
2539
+ "- For raw repo text search with exact file+line+col matches, prefer grep({ query: <pattern> }). It uses ripgrep against real project files when available, applies built-in noise filters, and only falls back to indexed search if ripgrep is unavailable.",
2540
+ "- To read a bounded segment of a file, prefer read_file_lines({ path: <file>, from_line/to_line or total_count }) over unbounded file reads.",
1702
2541
  "- BEFORE editing code: call start_requirement(title, background) to set the active requirement.",
1703
2542
  "- AFTER editing + saving: call get_pending_changes() to see unsynced files, then call sync_change_intent(intent, files). (You can omit files to auto-link all pending changes.)",
1704
2543
  "- After major milestones/decisions: call upsert_project_summary(summary) and/or add_note(...) to persist durable context locally.",
@@ -1706,9 +2545,14 @@ const server = new Server({ name: SERVER_NAME, version: SERVER_VERSION }, {
1706
2545
  "- When you need full text for a specific note/summary/match: call read_memory_item(id, offset, limit) and page through it.",
1707
2546
  "- When asked to locate code (class/function/type): call query_codebase(query) instead of guessing.",
1708
2547
  "- When you need to recall relevant context from history/code/docs: call semantic_search(query, ...) instead of guessing.",
2548
+ "- If the current thread is already heavy or the user reports it has become slow, switch to a lighter workflow: avoid redundant retrieval, keep outputs compact, and recommend continuing substantial new analysis in a fresh thread after a short handoff summary.",
1709
2549
  "",
1710
2550
  "If tool output conflicts with assumptions, trust the tool output.",
1711
- ].join("\n"),
2551
+ ].join("\n");
2552
+ }
2553
+ const server = new Server({ name: SERVER_NAME, version: SERVER_VERSION }, {
2554
+ capabilities: { tools: {} },
2555
+ instructions: buildServerInstructions(),
1712
2556
  });
1713
2557
  async function resolveProjectRootFromMcpRoots() {
1714
2558
  const caps = server.getClientCapabilities();
@@ -2265,6 +3109,31 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
2265
3109
  description: "Clear the in-memory debug activity log. Enable logging with VECTORMIND_DEBUG_LOG=1.",
2266
3110
  inputSchema: toJsonSchemaCompat(ClearActivityLogArgsSchema),
2267
3111
  },
3112
+ {
3113
+ name: "grep",
3114
+ description: "Repo text search with precise file/line/col matches, powered by ripgrep against real project files plus built-in noise filters. Falls back to indexed search only when ripgrep is unavailable.",
3115
+ inputSchema: toJsonSchemaCompat(GrepArgsSchema),
3116
+ },
3117
+ {
3118
+ name: "list_project_files",
3119
+ description: "AI-friendly, ignore-aware file/directory listing under project_root with bounded output. Prefer this over Get-ChildItem/ls for local repository browsing.",
3120
+ inputSchema: toJsonSchemaCompat(ListProjectFilesArgsSchema),
3121
+ },
3122
+ {
3123
+ name: "read_codex_text_file",
3124
+ description: "Read bounded text from local Codex/agents files such as SKILL.md, prompt files, and rules under CODEX_HOME/AGENTS_HOME. Prefer this over assuming a filesystem MCP resource server exists.",
3125
+ inputSchema: toJsonSchemaCompat(ReadCodexTextFileArgsSchema),
3126
+ },
3127
+ {
3128
+ name: "read_file_lines",
3129
+ description: "Read a specific line range from a file under project_root (with strict size limits). Prefer this over Get-Content for deterministic reads.",
3130
+ inputSchema: toJsonSchemaCompat(ReadFileLinesArgsSchema),
3131
+ },
3132
+ {
3133
+ name: "read_file_text",
3134
+ description: "Read bounded raw UTF-8 text from a file under project_root. Prefer this over Get-Content -Raw for small/medium text files; use read_file_lines for large files or line-specific reads.",
3135
+ inputSchema: toJsonSchemaCompat(ReadFileTextArgsSchema),
3136
+ },
2268
3137
  {
2269
3138
  name: "query_codebase",
2270
3139
  description: "Search the symbol index for class/function/type names (or substrings) to locate definitions by file path and signature. Use this when you need to find code—do not guess locations.",
@@ -2610,7 +3479,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
2610
3479
  ? toMemoryItemPreview(projectSummaryRow, includeContent, previewChars, contentMaxChars)
2611
3480
  : null;
2612
3481
  const recent_notes = listRecentNotesStmt.all(notesLimit).map((n) => toMemoryItemPreview(n, includeContent, previewChars, contentMaxChars));
2613
- const conventions = listConventionsStmt.all(conventionsLimit).map((c) => toMemoryItemPreview(c, false, previewChars, contentMaxChars));
3482
+ const conventions = getConventionPreviews(conventionsLimit, previewChars, contentMaxChars);
2614
3483
  const pending_total = Number(countPendingChangesStmt.get()?.total ?? 0);
2615
3484
  const pending_offset = args.pending_offset;
2616
3485
  const pending_limit = args.pending_limit;
@@ -2706,7 +3575,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
2706
3575
  ? toMemoryItemPreview(projectSummaryRow, includeContent, previewChars, contentMaxChars)
2707
3576
  : null;
2708
3577
  const recent_notes = listRecentNotesStmt.all(notesLimit).map((n) => toMemoryItemPreview(n, includeContent, previewChars, contentMaxChars));
2709
- const conventions = listConventionsStmt.all(conventionsLimit).map((c) => toMemoryItemPreview(c, false, previewChars, contentMaxChars));
3578
+ const conventions = getConventionPreviews(conventionsLimit, previewChars, contentMaxChars);
2710
3579
  const pending_total = Number(countPendingChangesStmt.get()?.total ?? 0);
2711
3580
  const pending_offset = args.pending_offset;
2712
3581
  const pending_limit = args.pending_limit;
@@ -2935,6 +3804,418 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
2935
3804
  clearActivityLog();
2936
3805
  return { content: [{ type: "text", text: toolJson({ ok: true }) }] };
2937
3806
  }
3807
+ if (toolName === "grep") {
3808
+ const args = GrepArgsSchema.parse(rawArgs);
3809
+ const q = args.query;
3810
+ const mode = args.mode;
3811
+ const smartCase = args.smart_case;
3812
+ const kinds = args.kinds?.length ? args.kinds : ["code_chunk", "doc_chunk"];
3813
+ const includePaths = args.include_paths?.length ? args.include_paths : null;
3814
+ const excludePaths = args.exclude_paths?.length ? args.exclude_paths : null;
3815
+ const maxResults = args.max_results;
3816
+ const caseSensitive = args.case_sensitive ?? (smartCase ? hasUppercaseAscii(q) : true);
3817
+ const ripgrepResult = runRipgrepSearch({
3818
+ query: q,
3819
+ mode,
3820
+ smartCase,
3821
+ caseSensitive,
3822
+ includePaths,
3823
+ excludePaths,
3824
+ maxResults,
3825
+ });
3826
+ if (ripgrepResult.ok) {
3827
+ logActivity("grep", {
3828
+ backend: ripgrepResult.backend,
3829
+ rg_command: ripgrepResult.rg_command,
3830
+ query: q,
3831
+ mode,
3832
+ case_sensitive: caseSensitive,
3833
+ smart_case: smartCase,
3834
+ include_paths: includePaths ?? [],
3835
+ exclude_paths: excludePaths ?? [],
3836
+ matches: ripgrepResult.matches.length,
3837
+ total_matches: ripgrepResult.total_matches,
3838
+ truncated: ripgrepResult.truncated,
3839
+ });
3840
+ return {
3841
+ content: [
3842
+ {
3843
+ type: "text",
3844
+ text: toolJson({
3845
+ ok: true,
3846
+ backend: ripgrepResult.backend,
3847
+ rg_command: ripgrepResult.rg_command,
3848
+ query: q,
3849
+ mode,
3850
+ case_sensitive: caseSensitive,
3851
+ smart_case: smartCase,
3852
+ include_paths: includePaths ?? [],
3853
+ exclude_paths: excludePaths ?? [],
3854
+ matches: ripgrepResult.matches,
3855
+ total_matches: ripgrepResult.total_matches,
3856
+ truncated: ripgrepResult.truncated,
3857
+ }),
3858
+ },
3859
+ ],
3860
+ };
3861
+ }
3862
+ if (!ripgrepResult.unavailable) {
3863
+ return {
3864
+ isError: true,
3865
+ content: [
3866
+ {
3867
+ type: "text",
3868
+ text: toolJson({
3869
+ ok: false,
3870
+ backend: "ripgrep",
3871
+ error: ripgrepResult.error,
3872
+ rg_command: ripgrepResult.rg_command,
3873
+ exit_status: ripgrepResult.exit_status,
3874
+ query: q,
3875
+ mode,
3876
+ }),
3877
+ },
3878
+ ],
3879
+ };
3880
+ }
3881
+ let indexedResult;
3882
+ try {
3883
+ indexedResult = runIndexedGrepSearch({
3884
+ query: q,
3885
+ mode,
3886
+ smartCase,
3887
+ caseSensitive,
3888
+ literalHint: args.literal_hint,
3889
+ kinds,
3890
+ includePaths,
3891
+ excludePaths,
3892
+ maxResults,
3893
+ maxCandidates: args.max_candidates,
3894
+ });
3895
+ }
3896
+ catch (err) {
3897
+ return {
3898
+ isError: true,
3899
+ content: [
3900
+ {
3901
+ type: "text",
3902
+ text: toolJson({
3903
+ ok: false,
3904
+ backend: "indexed_fallback",
3905
+ fallback_reason: "ripgrep_unavailable",
3906
+ ripgrep_error: ripgrepResult.error,
3907
+ ripgrep_attempts: ripgrepResult.attempts,
3908
+ error: String(err),
3909
+ query: q,
3910
+ mode,
3911
+ literal_hint: args.literal_hint,
3912
+ }),
3913
+ },
3914
+ ],
3915
+ };
3916
+ }
3917
+ logActivity("grep", {
3918
+ backend: indexedResult.backend,
3919
+ fallback_reason: "ripgrep_unavailable",
3920
+ ripgrep_error: ripgrepResult.error,
3921
+ query: q,
3922
+ mode,
3923
+ case_sensitive: caseSensitive,
3924
+ smart_case: smartCase,
3925
+ hint: indexedResult.hint,
3926
+ kinds,
3927
+ include_paths: includePaths ?? [],
3928
+ exclude_paths: excludePaths ?? [],
3929
+ candidates: indexedResult.candidates.total,
3930
+ candidates_scanned: indexedResult.candidates.scanned,
3931
+ matches: indexedResult.matches.length,
3932
+ truncated: indexedResult.truncated,
3933
+ });
3934
+ return {
3935
+ content: [
3936
+ {
3937
+ type: "text",
3938
+ text: toolJson({
3939
+ ok: true,
3940
+ backend: indexedResult.backend,
3941
+ fallback_reason: "ripgrep_unavailable",
3942
+ ripgrep_error: ripgrepResult.error,
3943
+ ripgrep_attempts: ripgrepResult.attempts,
3944
+ query: q,
3945
+ mode,
3946
+ case_sensitive: caseSensitive,
3947
+ smart_case: smartCase,
3948
+ hint: indexedResult.hint,
3949
+ kinds,
3950
+ include_paths: includePaths ?? [],
3951
+ exclude_paths: excludePaths ?? [],
3952
+ candidates: indexedResult.candidates,
3953
+ matches: indexedResult.matches,
3954
+ truncated: indexedResult.truncated,
3955
+ }),
3956
+ },
3957
+ ],
3958
+ };
3959
+ }
3960
+ if (toolName === "list_project_files") {
3961
+ const args = ListProjectFilesArgsSchema.parse(rawArgs);
3962
+ const resolved = resolveProjectPathUnderRoot(args.path, { allowRoot: true });
3963
+ let st;
3964
+ try {
3965
+ st = fs.statSync(resolved.absPath);
3966
+ }
3967
+ catch (err) {
3968
+ return {
3969
+ isError: true,
3970
+ content: [{ type: "text", text: toolJson({ ok: false, error: `Path not found: ${String(err)}` }) }],
3971
+ };
3972
+ }
3973
+ const includePaths = args.include_paths?.length ? args.include_paths : null;
3974
+ const excludePaths = args.exclude_paths?.length ? args.exclude_paths : null;
3975
+ const extensions = normalizeExtensionsFilter(args.extensions);
3976
+ const result = listProjectFilesInternal({
3977
+ startAbsPath: resolved.absPath,
3978
+ startDbPath: resolved.dbFilePath,
3979
+ recursive: args.recursive,
3980
+ maxDepth: args.max_depth,
3981
+ includeFiles: args.include_files,
3982
+ includeDirs: args.include_dirs,
3983
+ includeHidden: args.include_hidden,
3984
+ respectIgnore: args.respect_ignore,
3985
+ includePaths,
3986
+ excludePaths,
3987
+ extensions,
3988
+ maxResults: args.max_results,
3989
+ includeStats: args.include_stats,
3990
+ });
3991
+ logActivity("list_project_files", {
3992
+ path: resolved.dbFilePath,
3993
+ recursive: args.recursive,
3994
+ max_depth: args.max_depth,
3995
+ include_files: args.include_files,
3996
+ include_dirs: args.include_dirs,
3997
+ include_hidden: args.include_hidden,
3998
+ respect_ignore: args.respect_ignore,
3999
+ include_paths: includePaths ?? [],
4000
+ exclude_paths: excludePaths ?? [],
4001
+ extensions: extensions ?? [],
4002
+ returned: result.returned,
4003
+ scanned: result.scanned,
4004
+ truncated: result.truncated,
4005
+ path_kind: st.isFile() ? "file" : st.isDirectory() ? "dir" : "other",
4006
+ });
4007
+ return {
4008
+ content: [
4009
+ {
4010
+ type: "text",
4011
+ text: toolJson({
4012
+ ok: true,
4013
+ path: resolved.dbFilePath,
4014
+ path_kind: st.isFile() ? "file" : st.isDirectory() ? "dir" : "other",
4015
+ recursive: args.recursive,
4016
+ max_depth: args.recursive ? args.max_depth : 1,
4017
+ include_files: args.include_files,
4018
+ include_dirs: args.include_dirs,
4019
+ include_hidden: args.include_hidden,
4020
+ respect_ignore: args.respect_ignore,
4021
+ include_paths: includePaths ?? [],
4022
+ exclude_paths: excludePaths ?? [],
4023
+ extensions: extensions ?? [],
4024
+ returned: result.returned,
4025
+ scanned: result.scanned,
4026
+ truncated: result.truncated,
4027
+ entries: result.entries,
4028
+ }),
4029
+ },
4030
+ ],
4031
+ };
4032
+ }
4033
+ if (toolName === "read_file_text") {
4034
+ const args = ReadFileTextArgsSchema.parse(rawArgs);
4035
+ const resolved = resolveReadPathUnderProjectRoot(args.path);
4036
+ let st;
4037
+ try {
4038
+ st = fs.statSync(resolved.absPath);
4039
+ }
4040
+ catch (err) {
4041
+ return {
4042
+ isError: true,
4043
+ content: [{ type: "text", text: toolJson({ ok: false, error: `File not found: ${String(err)}` }) }],
4044
+ };
4045
+ }
4046
+ if (!st.isFile()) {
4047
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: "Not a file" }) }] };
4048
+ }
4049
+ let result;
4050
+ try {
4051
+ result = readTextFileSlice({
4052
+ absPath: resolved.absPath,
4053
+ offset: args.offset,
4054
+ maxChars: args.max_chars,
4055
+ maxFileBytes: args.max_file_bytes,
4056
+ });
4057
+ }
4058
+ catch (err) {
4059
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: String(err) }) }] };
4060
+ }
4061
+ logActivity("read_file_text", {
4062
+ file_path: resolved.dbFilePath,
4063
+ offset: args.offset,
4064
+ returned_chars: result.returnedChars,
4065
+ total_chars: result.totalChars,
4066
+ truncated: result.truncated,
4067
+ });
4068
+ return {
4069
+ content: [
4070
+ {
4071
+ type: "text",
4072
+ text: toolJson({
4073
+ ok: true,
4074
+ file_path: resolved.dbFilePath,
4075
+ offset: args.offset,
4076
+ returned_chars: result.returnedChars,
4077
+ total_chars: result.totalChars,
4078
+ truncated: result.truncated,
4079
+ text: result.text,
4080
+ }),
4081
+ },
4082
+ ],
4083
+ };
4084
+ }
4085
+ if (toolName === "read_codex_text_file") {
4086
+ const args = ReadCodexTextFileArgsSchema.parse(rawArgs);
4087
+ let resolved;
4088
+ try {
4089
+ resolved = resolveCodexTextPath(args.path);
4090
+ }
4091
+ catch (err) {
4092
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: String(err) }) }] };
4093
+ }
4094
+ let st;
4095
+ try {
4096
+ st = fs.statSync(resolved.absPath);
4097
+ }
4098
+ catch (err) {
4099
+ return {
4100
+ isError: true,
4101
+ content: [{ type: "text", text: toolJson({ ok: false, error: `File not found: ${String(err)}` }) }],
4102
+ };
4103
+ }
4104
+ if (!st.isFile()) {
4105
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: "Not a file" }) }] };
4106
+ }
4107
+ let result;
4108
+ try {
4109
+ result = readTextFileSlice({
4110
+ absPath: resolved.absPath,
4111
+ offset: args.offset,
4112
+ maxChars: args.max_chars,
4113
+ maxFileBytes: args.max_file_bytes,
4114
+ });
4115
+ }
4116
+ catch (err) {
4117
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: String(err) }) }] };
4118
+ }
4119
+ logActivity("read_codex_text_file", {
4120
+ file_path: resolved.displayPath,
4121
+ allowed_root: resolved.allowedRoot,
4122
+ offset: args.offset,
4123
+ returned_chars: result.returnedChars,
4124
+ total_chars: result.totalChars,
4125
+ truncated: result.truncated,
4126
+ });
4127
+ return {
4128
+ content: [
4129
+ {
4130
+ type: "text",
4131
+ text: toolJson({
4132
+ ok: true,
4133
+ file_path: resolved.displayPath,
4134
+ allowed_root: resolved.allowedRoot,
4135
+ offset: args.offset,
4136
+ returned_chars: result.returnedChars,
4137
+ total_chars: result.totalChars,
4138
+ truncated: result.truncated,
4139
+ text: result.text,
4140
+ }),
4141
+ },
4142
+ ],
4143
+ };
4144
+ }
4145
+ if (toolName === "read_file_lines") {
4146
+ const args = ReadFileLinesArgsSchema.parse(rawArgs);
4147
+ const resolved = resolveReadPathUnderProjectRoot(args.path);
4148
+ let fromLine = args.from_line;
4149
+ let toLine = args.to_line;
4150
+ if (toLine == null) {
4151
+ const total = args.total_count ?? 200;
4152
+ toLine = fromLine + total - 1;
4153
+ }
4154
+ if (toLine < fromLine) {
4155
+ return {
4156
+ isError: true,
4157
+ content: [
4158
+ {
4159
+ type: "text",
4160
+ text: toolJson({
4161
+ ok: false,
4162
+ error: "to_line must be >= from_line",
4163
+ path: args.path,
4164
+ from_line: fromLine,
4165
+ to_line: toLine,
4166
+ }),
4167
+ },
4168
+ ],
4169
+ };
4170
+ }
4171
+ let st;
4172
+ try {
4173
+ st = fs.statSync(resolved.absPath);
4174
+ }
4175
+ catch (err) {
4176
+ return {
4177
+ isError: true,
4178
+ content: [
4179
+ { type: "text", text: toolJson({ ok: false, error: `File not found: ${String(err)}` }) },
4180
+ ],
4181
+ };
4182
+ }
4183
+ if (!st.isFile()) {
4184
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: "Not a file" }) }] };
4185
+ }
4186
+ const maxLines = Math.max(1, Math.min(2000, args.max_lines));
4187
+ const maxChars = Math.max(200, Math.min(200_000, args.max_chars));
4188
+ const result = await readTextFileLines({
4189
+ absPath: resolved.absPath,
4190
+ fromLine,
4191
+ toLine,
4192
+ maxLines,
4193
+ maxChars,
4194
+ });
4195
+ logActivity("read_file_lines", {
4196
+ file_path: resolved.dbFilePath,
4197
+ from_line: fromLine,
4198
+ to_line: toLine,
4199
+ returned: result.returned,
4200
+ truncated: result.truncated,
4201
+ });
4202
+ return {
4203
+ content: [
4204
+ {
4205
+ type: "text",
4206
+ text: toolJson({
4207
+ ok: true,
4208
+ file_path: resolved.dbFilePath,
4209
+ from_line: fromLine,
4210
+ to_line: toLine,
4211
+ returned: result.returned,
4212
+ truncated: result.truncated,
4213
+ text: result.text,
4214
+ }),
4215
+ },
4216
+ ],
4217
+ };
4218
+ }
2938
4219
  if (toolName === "query_codebase") {
2939
4220
  const args = QueryCodebaseArgsSchema.parse(rawArgs);
2940
4221
  const q = args.query.trim();