@coreyuan/vector-mind 1.0.20 → 1.0.30

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,6 +1,7 @@
1
1
  #!/usr/bin/env node
2
2
  import path from "node:path";
3
3
  import fs from "node:fs";
4
+ import * as readline from "node:readline";
4
5
  import crypto from "node:crypto";
5
6
  import os from "node:os";
6
7
  import { fileURLToPath } from "node:url";
@@ -11,8 +12,10 @@ import { Server } from "@modelcontextprotocol/sdk/server/index.js";
11
12
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
12
13
  import { toJsonSchemaCompat } from "@modelcontextprotocol/sdk/server/zod-json-schema-compat.js";
13
14
  import { CallToolRequestSchema, ListToolsRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
15
+ import { BUILTIN_CONVENTIONS } from "./builtin-conventions.js";
16
+ import { BUILTIN_ARCHITECTURE_AND_CODE_ORGANIZATION_INSTRUCTIONS, BUILTIN_DESTRUCTIVE_OPERATION_GUARD_INSTRUCTIONS, BUILTIN_PLAN_LITE_INSTRUCTIONS, BUILTIN_WRITE_POLICY_INSTRUCTIONS, } from "./builtin-instructions.js";
14
17
  const SERVER_NAME = "vector-mind";
15
- const SERVER_VERSION = "1.0.19";
18
+ const SERVER_VERSION = "1.0.30";
16
19
  const rootFromEnv = process.env.VECTORMIND_ROOT?.trim() ?? "";
17
20
  const prettyJsonOutput = ["1", "true", "on", "yes"].includes((process.env.VECTORMIND_PRETTY_JSON ?? "").trim().toLowerCase());
18
21
  const debugLogEnabled = ["1", "true", "on", "yes"].includes((process.env.VECTORMIND_DEBUG_LOG ?? "").trim().toLowerCase());
@@ -61,6 +64,28 @@ const PENDING_PRUNE_EVERY = (() => {
61
64
  return 500;
62
65
  return n;
63
66
  })();
67
+ function getCodexHomeDir() {
68
+ const raw = process.env.CODEX_HOME?.trim();
69
+ if (raw)
70
+ return path.resolve(raw);
71
+ return path.join(os.homedir(), ".codex");
72
+ }
73
+ function getAgentsHomeDir() {
74
+ const raw = process.env.AGENTS_HOME?.trim();
75
+ if (raw)
76
+ return path.resolve(raw);
77
+ return path.join(os.homedir(), ".agents");
78
+ }
79
+ function getAllowedCodexTextRoots() {
80
+ const codexHome = getCodexHomeDir();
81
+ const agentsHome = getAgentsHomeDir();
82
+ return Array.from(new Set([
83
+ path.join(codexHome, "skills"),
84
+ path.join(codexHome, "prompts"),
85
+ path.join(codexHome, "rules"),
86
+ path.join(agentsHome, "skills"),
87
+ ].map((p) => path.resolve(p))));
88
+ }
64
89
  const INDEX_MAX_CODE_BYTES = (() => {
65
90
  const raw = process.env.VECTORMIND_INDEX_MAX_CODE_BYTES?.trim();
66
91
  if (!raw)
@@ -227,8 +252,18 @@ function summarizeActivityEvent(e) {
227
252
  return `pending_list returned=${String(d.returned ?? "")} total=${String(d.total ?? "")}`;
228
253
  case "semantic_search":
229
254
  return `semantic_search mode=${String(d.mode ?? "")} q=${String(d.query ?? "")} matches=${String(d.matches ?? "")}`;
255
+ case "grep":
256
+ return `grep q=${String(d.query ?? "")} matches=${String(d.matches ?? "")} truncated=${String(d.truncated ?? "")}`;
230
257
  case "query_codebase":
231
258
  return `query_codebase q=${String(d.query ?? "")} matches=${String(d.matches ?? "")}`;
259
+ case "read_file_lines":
260
+ return `read_file_lines file=${String(d.file_path ?? "")} returned=${String(d.returned ?? "")} truncated=${String(d.truncated ?? "")}`;
261
+ case "read_file_text":
262
+ return `read_file_text file=${String(d.file_path ?? "")} returned=${String(d.returned_chars ?? "")}/${String(d.total_chars ?? "")} truncated=${String(d.truncated ?? "")}`;
263
+ case "list_project_files":
264
+ return `list_project_files path=${String(d.path ?? "")} returned=${String(d.returned ?? "")} scanned=${String(d.scanned ?? "")} truncated=${String(d.truncated ?? "")}`;
265
+ case "read_codex_text_file":
266
+ return `read_codex_text_file file=${String(d.file_path ?? "")} returned=${String(d.returned_chars ?? "")}/${String(d.total_chars ?? "")} truncated=${String(d.truncated ?? "")}`;
232
267
  case "start_requirement":
233
268
  return `start_requirement #${String(d.req_id ?? "")} ${String(d.title ?? "")}`;
234
269
  case "sync_change_intent":
@@ -1133,6 +1168,65 @@ const SyncChangeIntentArgsSchema = ProjectRootArgSchema.merge(z.object({
1133
1168
  const QueryCodebaseArgsSchema = ProjectRootArgSchema.merge(z.object({
1134
1169
  query: z.string().min(1),
1135
1170
  }));
1171
+ const GrepArgsSchema = ProjectRootArgSchema.merge(z.object({
1172
+ // Pattern to search for. Defaults to regex mode for parity with tools like ripgrep.
1173
+ query: z.string().min(1),
1174
+ mode: z.enum(["regex", "literal"]).optional().default("regex"),
1175
+ // If case_sensitive is omitted and smart_case=true, uppercase => case-sensitive, otherwise case-insensitive.
1176
+ smart_case: z.boolean().optional().default(true),
1177
+ case_sensitive: z.boolean().optional(),
1178
+ // Optional hint used to narrow candidates quickly when mode=regex and the pattern has few literals.
1179
+ literal_hint: z.string().optional().default(""),
1180
+ // Defaults to code/doc chunks; can be widened if needed.
1181
+ kinds: z.array(z.string().min(1)).optional(),
1182
+ include_paths: z.array(z.string().min(1)).optional(),
1183
+ exclude_paths: z.array(z.string().min(1)).optional(),
1184
+ max_results: z.number().int().min(1).max(5000).optional().default(200),
1185
+ max_candidates: z.number().int().min(1).max(50_000).optional(),
1186
+ }));
1187
+ const ReadFileLinesArgsSchema = ProjectRootArgSchema.merge(z.object({
1188
+ // Relative to project_root, or an absolute path under project_root.
1189
+ path: z.string().min(1),
1190
+ from_line: z.number().int().min(1).optional().default(1),
1191
+ to_line: z.number().int().min(1).optional(),
1192
+ // Convenience for "head": if set, reads from_line..(from_line+total_count-1) unless to_line is provided.
1193
+ total_count: z.number().int().min(1).optional(),
1194
+ // Hard limits to avoid huge token blow-ups.
1195
+ max_lines: z.number().int().min(1).max(2000).optional().default(400),
1196
+ max_chars: z.number().int().min(200).max(200_000).optional().default(20_000),
1197
+ }));
1198
+ const ReadFileTextArgsSchema = ProjectRootArgSchema.merge(z.object({
1199
+ // Relative to project_root, or an absolute path under project_root.
1200
+ path: z.string().min(1),
1201
+ // Character offset in the decoded UTF-8 text.
1202
+ offset: z.number().int().min(0).optional().default(0),
1203
+ // Hard limit on returned text to avoid huge outputs.
1204
+ max_chars: z.number().int().min(1).max(200_000).optional().default(20_000),
1205
+ // Safety guard for raw reads; use read_file_lines on larger files.
1206
+ max_file_bytes: z.number().int().min(1_000).max(5_000_000).optional().default(1_000_000),
1207
+ }));
1208
+ const ReadCodexTextFileArgsSchema = ProjectRootArgSchema.merge(z.object({
1209
+ // Absolute path, file:// URI, or a path under CODEX_HOME / AGENTS_HOME allowed roots.
1210
+ path: z.string().min(1),
1211
+ offset: z.number().int().min(0).optional().default(0),
1212
+ max_chars: z.number().int().min(1).max(200_000).optional().default(20_000),
1213
+ max_file_bytes: z.number().int().min(1_000).max(5_000_000).optional().default(1_000_000),
1214
+ }));
1215
+ const ListProjectFilesArgsSchema = ProjectRootArgSchema.merge(z.object({
1216
+ // Relative directory/file path under project_root. "." means the project root.
1217
+ path: z.string().optional().default("."),
1218
+ recursive: z.boolean().optional().default(false),
1219
+ max_depth: z.number().int().min(1).max(20).optional().default(4),
1220
+ include_files: z.boolean().optional().default(true),
1221
+ include_dirs: z.boolean().optional().default(true),
1222
+ include_hidden: z.boolean().optional().default(false),
1223
+ respect_ignore: z.boolean().optional().default(true),
1224
+ include_paths: z.array(z.string().min(1)).optional(),
1225
+ exclude_paths: z.array(z.string().min(1)).optional(),
1226
+ extensions: z.array(z.string().min(1)).optional(),
1227
+ max_results: z.number().int().min(1).max(5000).optional().default(200),
1228
+ include_stats: z.boolean().optional().default(false),
1229
+ }));
1136
1230
  const UpsertProjectSummaryArgsSchema = ProjectRootArgSchema.merge(z.object({
1137
1231
  summary: z.string().min(1),
1138
1232
  }));
@@ -1429,6 +1523,34 @@ function toMemoryItemPreview(row, includeContent, previewChars, contentMaxChars)
1429
1523
  updated_at: row.updated_at,
1430
1524
  };
1431
1525
  }
1526
+ function getBuiltinConventionRows() {
1527
+ return BUILTIN_CONVENTIONS.map((spec, idx) => ({
1528
+ id: -1000 - idx,
1529
+ kind: "convention",
1530
+ title: spec.key,
1531
+ content: spec.content,
1532
+ file_path: null,
1533
+ start_line: null,
1534
+ end_line: null,
1535
+ req_id: null,
1536
+ metadata_json: safeJson({ source: "builtin", key: spec.key, tags: spec.tags ?? [] }),
1537
+ content_hash: sha256Hex(spec.content),
1538
+ created_at: "builtin",
1539
+ updated_at: "builtin",
1540
+ }));
1541
+ }
1542
+ function getConventionPreviews(conventionsLimit, previewChars, contentMaxChars) {
1543
+ if (conventionsLimit <= 0)
1544
+ return [];
1545
+ const builtin = getBuiltinConventionRows()
1546
+ .map((row) => toMemoryItemPreview(row, false, previewChars, contentMaxChars))
1547
+ .slice(0, conventionsLimit);
1548
+ if (builtin.length >= conventionsLimit)
1549
+ return builtin;
1550
+ const remaining = conventionsLimit - builtin.length;
1551
+ const stored = listConventionsStmt.all(remaining).map((c) => toMemoryItemPreview(c, false, previewChars, contentMaxChars));
1552
+ return [...builtin, ...stored];
1553
+ }
1432
1554
  function toRequirementPreview(req, includeContent, previewChars, contentMaxChars) {
1433
1555
  const context = req.context_data ?? null;
1434
1556
  const contextPreview = context ? makePreviewText(context, previewChars) : null;
@@ -1687,18 +1809,357 @@ async function semanticSearchHybridInternal(opts) {
1687
1809
  }
1688
1810
  return likeSearchInternal(opts);
1689
1811
  }
1690
- const server = new Server({ name: SERVER_NAME, version: SERVER_VERSION }, {
1691
- capabilities: { tools: {} },
1692
- instructions: [
1812
+ function escapeRegExp(literal) {
1813
+ return literal.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
1814
+ }
1815
+ function hasUppercaseAscii(s) {
1816
+ return /[A-Z]/.test(s);
1817
+ }
1818
+ function extractLongestLiteralFromRegex(pattern) {
1819
+ // Best-effort extraction: pull the longest literal run to use as an indexed candidate hint.
1820
+ // This is intentionally conservative; if we can't find a reasonable literal anchor, callers
1821
+ // should pass `literal_hint` or narrow with include_paths.
1822
+ let best = "";
1823
+ let cur = "";
1824
+ let inClass = false;
1825
+ const flush = () => {
1826
+ if (cur.length > best.length)
1827
+ best = cur;
1828
+ cur = "";
1829
+ };
1830
+ for (let i = 0; i < pattern.length; i++) {
1831
+ const ch = pattern[i] ?? "";
1832
+ if (!ch)
1833
+ break;
1834
+ if (inClass) {
1835
+ // Skip until the closing bracket.
1836
+ if (ch === "]")
1837
+ inClass = false;
1838
+ flush();
1839
+ continue;
1840
+ }
1841
+ if (ch === "[") {
1842
+ inClass = true;
1843
+ flush();
1844
+ continue;
1845
+ }
1846
+ if (ch === "\\") {
1847
+ const next = pattern[i + 1] ?? "";
1848
+ if (!next) {
1849
+ flush();
1850
+ continue;
1851
+ }
1852
+ // Common regex escapes that are NOT literal characters.
1853
+ if (/[dDsSwWbB0-9]/.test(next)) {
1854
+ flush();
1855
+ i += 1;
1856
+ continue;
1857
+ }
1858
+ // Treat \x as literal x (e.g. \( \) \. \\).
1859
+ cur += next;
1860
+ i += 1;
1861
+ continue;
1862
+ }
1863
+ // Regex metacharacters.
1864
+ if (".*+?^$|(){}".includes(ch)) {
1865
+ flush();
1866
+ continue;
1867
+ }
1868
+ cur += ch;
1869
+ }
1870
+ flush();
1871
+ return best;
1872
+ }
1873
+ function normalizePathNeedle(s) {
1874
+ return s.replace(/\\/g, "/").toLowerCase();
1875
+ }
1876
+ function passesPathFilters(filePath, includePaths, excludePaths) {
1877
+ const fp = filePath.toLowerCase();
1878
+ if (excludePaths?.length) {
1879
+ for (const raw of excludePaths) {
1880
+ const n = normalizePathNeedle(raw);
1881
+ if (!n)
1882
+ continue;
1883
+ if (fp.includes(n))
1884
+ return false;
1885
+ }
1886
+ }
1887
+ if (includePaths?.length) {
1888
+ for (const raw of includePaths) {
1889
+ const n = normalizePathNeedle(raw);
1890
+ if (!n)
1891
+ continue;
1892
+ if (fp.includes(n))
1893
+ return true;
1894
+ }
1895
+ return false;
1896
+ }
1897
+ return true;
1898
+ }
1899
+ function buildLineStarts(text) {
1900
+ const starts = [0];
1901
+ for (let i = 0; i < text.length; i++) {
1902
+ if (text.charCodeAt(i) === 10)
1903
+ starts.push(i + 1); // '\n'
1904
+ }
1905
+ return starts;
1906
+ }
1907
+ function lineIndexForOffset(lineStarts, offset) {
1908
+ let lo = 0;
1909
+ let hi = lineStarts.length - 1;
1910
+ while (lo <= hi) {
1911
+ const mid = (lo + hi) >> 1;
1912
+ const v = lineStarts[mid] ?? 0;
1913
+ if (v <= offset)
1914
+ lo = mid + 1;
1915
+ else
1916
+ hi = mid - 1;
1917
+ }
1918
+ return Math.max(0, lo - 1);
1919
+ }
1920
+ function compileGrepRegex(opts) {
1921
+ const flags = `${opts.caseSensitive ? "" : "i"}gm`;
1922
+ const source = opts.mode === "literal" ? escapeRegExp(opts.query) : opts.query;
1923
+ return new RegExp(source, flags);
1924
+ }
1925
+ function resolveProjectPathUnderRoot(inputPath, opts = {}) {
1926
+ const normalizedInput = inputPath.trim() || ".";
1927
+ const abs = path.isAbsolute(normalizedInput) ? normalizedInput : path.join(projectRoot, normalizedInput);
1928
+ const absPath = path.resolve(abs);
1929
+ const root = path.resolve(projectRoot);
1930
+ const rel = path.relative(root, absPath);
1931
+ const insideRoot = rel === "" || (!rel.startsWith("..") && !path.isAbsolute(rel));
1932
+ if (!insideRoot) {
1933
+ throw new Error(`[VectorMind] Path must be under project_root: ${inputPath}`);
1934
+ }
1935
+ if (rel === "" && !opts.allowRoot) {
1936
+ throw new Error(`[VectorMind] Path must not be the project_root itself: ${inputPath}`);
1937
+ }
1938
+ return {
1939
+ absPath,
1940
+ dbFilePath: rel === "" ? "." : normalizeToDbPath(absPath),
1941
+ };
1942
+ }
1943
+ function resolveReadPathUnderProjectRoot(inputPath) {
1944
+ return resolveProjectPathUnderRoot(inputPath, { allowRoot: false });
1945
+ }
1946
+ function resolveCodexTextPath(inputPath) {
1947
+ const trimmed = inputPath.trim();
1948
+ if (!trimmed)
1949
+ throw new Error("[VectorMind] path is required");
1950
+ const uriPath = trimmed.startsWith("file:") ? parseFileUriToPath(trimmed) : null;
1951
+ const absPath = path.resolve(uriPath ?? trimmed);
1952
+ const allowedRoot = getAllowedCodexTextRoots().find((root) => {
1953
+ const rel = path.relative(root, absPath);
1954
+ return rel === "" || (!rel.startsWith("..") && !path.isAbsolute(rel));
1955
+ });
1956
+ if (!allowedRoot) {
1957
+ throw new Error(`[VectorMind] Path must be under one of the allowed local text roots: ${getAllowedCodexTextRoots().join(", ")}`);
1958
+ }
1959
+ return { absPath, displayPath: absPath, allowedRoot };
1960
+ }
1961
+ function isHiddenBaseName(name) {
1962
+ return name.startsWith(".") && name !== "." && name !== "..";
1963
+ }
1964
+ function normalizeExtensionsFilter(values) {
1965
+ if (!values?.length)
1966
+ return null;
1967
+ const normalized = values
1968
+ .map((v) => v.trim().toLowerCase())
1969
+ .filter(Boolean)
1970
+ .map((v) => (v.startsWith(".") ? v : `.${v}`));
1971
+ return normalized.length ? Array.from(new Set(normalized)) : null;
1972
+ }
1973
+ async function readTextFileLines(opts) {
1974
+ let lineNo = 0;
1975
+ const lines = [];
1976
+ let totalChars = 0;
1977
+ let truncated = false;
1978
+ const stream = fs.createReadStream(opts.absPath, { encoding: "utf8" });
1979
+ const rl = readline.createInterface({ input: stream, crlfDelay: Infinity });
1980
+ try {
1981
+ for await (const line of rl) {
1982
+ lineNo += 1;
1983
+ if (lineNo < opts.fromLine)
1984
+ continue;
1985
+ if (lineNo > opts.toLine)
1986
+ break;
1987
+ const rendered = `${lineNo}:${line}`;
1988
+ totalChars += rendered.length + 1;
1989
+ if (lines.length >= opts.maxLines || totalChars > opts.maxChars) {
1990
+ truncated = true;
1991
+ break;
1992
+ }
1993
+ lines.push(rendered);
1994
+ }
1995
+ }
1996
+ finally {
1997
+ try {
1998
+ rl.close();
1999
+ }
2000
+ catch { }
2001
+ try {
2002
+ stream.destroy();
2003
+ }
2004
+ catch { }
2005
+ }
2006
+ return { text: lines.join("\n"), returned: lines.length, truncated };
2007
+ }
2008
+ function readTextFileSlice(opts) {
2009
+ const st = fs.statSync(opts.absPath);
2010
+ if (!st.isFile())
2011
+ throw new Error("Not a file");
2012
+ if (st.size > opts.maxFileBytes) {
2013
+ throw new Error(`File is too large for raw text read (${st.size} bytes > limit ${opts.maxFileBytes}). Use read_file_lines instead.`);
2014
+ }
2015
+ const text = fs.readFileSync(opts.absPath, "utf8");
2016
+ const totalChars = text.length;
2017
+ const safeOffset = Math.min(opts.offset, totalChars);
2018
+ const slice = text.slice(safeOffset, safeOffset + opts.maxChars);
2019
+ const returnedChars = slice.length;
2020
+ const truncated = safeOffset + returnedChars < totalChars;
2021
+ return { text: slice, totalChars, returnedChars, truncated };
2022
+ }
2023
+ function listProjectFilesInternal(opts) {
2024
+ const entries = [];
2025
+ let scanned = 0;
2026
+ let truncated = false;
2027
+ const pushEntry = (entry) => {
2028
+ if (entries.length >= opts.maxResults) {
2029
+ truncated = true;
2030
+ return;
2031
+ }
2032
+ entries.push(entry);
2033
+ };
2034
+ const startStat = fs.statSync(opts.startAbsPath);
2035
+ if (startStat.isFile()) {
2036
+ const relPath = opts.startDbPath;
2037
+ if ((!opts.respectIgnore || !shouldIgnoreDbFilePath(relPath)) && passesPathFilters(relPath, opts.includePaths, opts.excludePaths)) {
2038
+ const ext = path.extname(relPath).toLowerCase();
2039
+ if (!opts.extensions || opts.extensions.includes(ext)) {
2040
+ pushEntry({
2041
+ path: relPath,
2042
+ kind: "file",
2043
+ depth: 0,
2044
+ ...(opts.includeStats ? { size: startStat.size, mtime: startStat.mtime.toISOString() } : {}),
2045
+ });
2046
+ }
2047
+ }
2048
+ return { entries, returned: entries.length, scanned: 1, truncated };
2049
+ }
2050
+ const effectiveMaxDepth = opts.recursive ? opts.maxDepth : 1;
2051
+ const stack = [{ absPath: opts.startAbsPath, depth: 0 }];
2052
+ while (stack.length > 0) {
2053
+ const current = stack.pop();
2054
+ if (!current)
2055
+ break;
2056
+ let dirEntries;
2057
+ try {
2058
+ dirEntries = fs.readdirSync(current.absPath, { withFileTypes: true });
2059
+ }
2060
+ catch {
2061
+ continue;
2062
+ }
2063
+ dirEntries.sort((a, b) => a.name.localeCompare(b.name));
2064
+ for (let idx = dirEntries.length - 1; idx >= 0; idx -= 1) {
2065
+ const child = dirEntries[idx];
2066
+ if (!child)
2067
+ continue;
2068
+ if (!opts.includeHidden && isHiddenBaseName(child.name))
2069
+ continue;
2070
+ const childAbs = path.join(current.absPath, child.name);
2071
+ const childRel = normalizeToDbPath(childAbs);
2072
+ if (opts.respectIgnore && shouldIgnoreDbFilePath(childRel))
2073
+ continue;
2074
+ scanned += 1;
2075
+ const childDepth = current.depth + 1;
2076
+ const matchesPath = passesPathFilters(childRel, opts.includePaths, opts.excludePaths);
2077
+ if (child.isDirectory()) {
2078
+ if (opts.includeDirs && matchesPath) {
2079
+ let stats = null;
2080
+ if (opts.includeStats) {
2081
+ try {
2082
+ stats = fs.statSync(childAbs);
2083
+ }
2084
+ catch {
2085
+ stats = null;
2086
+ }
2087
+ }
2088
+ pushEntry({
2089
+ path: childRel,
2090
+ kind: "dir",
2091
+ depth: childDepth,
2092
+ ...(stats ? { size: stats.size, mtime: stats.mtime.toISOString() } : {}),
2093
+ });
2094
+ if (truncated)
2095
+ break;
2096
+ }
2097
+ if (childDepth < effectiveMaxDepth) {
2098
+ stack.push({ absPath: childAbs, depth: childDepth });
2099
+ }
2100
+ continue;
2101
+ }
2102
+ if (!child.isFile())
2103
+ continue;
2104
+ if (!opts.includeFiles || !matchesPath)
2105
+ continue;
2106
+ const ext = path.extname(childRel).toLowerCase();
2107
+ if (opts.extensions && !opts.extensions.includes(ext))
2108
+ continue;
2109
+ let stats = null;
2110
+ if (opts.includeStats) {
2111
+ try {
2112
+ stats = fs.statSync(childAbs);
2113
+ }
2114
+ catch {
2115
+ stats = null;
2116
+ }
2117
+ }
2118
+ pushEntry({
2119
+ path: childRel,
2120
+ kind: "file",
2121
+ depth: childDepth,
2122
+ ...(stats ? { size: stats.size, mtime: stats.mtime.toISOString() } : {}),
2123
+ });
2124
+ if (truncated)
2125
+ break;
2126
+ }
2127
+ if (truncated)
2128
+ break;
2129
+ }
2130
+ entries.sort((a, b) => a.path.localeCompare(b.path));
2131
+ return { entries, returned: entries.length, scanned, truncated };
2132
+ }
2133
+ function buildServerInstructions() {
2134
+ return [
1693
2135
  "VectorMind MCP is available in this session. Use it to avoid guessing project context.",
2136
+ "This package ships built-in baseline policy. If a client supports MCP instructions, these rules auto-apply as soon as the MCP is installed and connected; no user-side config file is required.",
2137
+ "The write-operation rules below are strict workflow constraints. Do not claim that the environment has real git branch locks, checkout APIs, or file-lock tools unless such tools are actually available in the current client/runtime. If such tools are absent, you must still enforce the same exclusivity semantics through explicit coordination and serialized same-file edits.",
1694
2138
  "Project root resolution order: tool argument project_root (recommended for clients without roots/list), then VECTORMIND_ROOT (avoid hardcoding in global config), then MCP roots/list (best-effort; falls back quickly if unsupported), then process.cwd() (so start your MCP client in the project directory for per-project isolation).",
1695
2139
  "If root_source is fallback, file watching/indexing is disabled (pass project_root to enable per-project tracking).",
1696
2140
  "",
2141
+ "Built-in write-operation policy:",
2142
+ BUILTIN_WRITE_POLICY_INSTRUCTIONS,
2143
+ "",
2144
+ "Built-in task-list / Plan-Lite policy:",
2145
+ BUILTIN_PLAN_LITE_INSTRUCTIONS,
2146
+ "",
2147
+ "Built-in destructive-operation guard policy:",
2148
+ BUILTIN_DESTRUCTIVE_OPERATION_GUARD_INSTRUCTIONS,
2149
+ "",
2150
+ "Built-in architecture and code-organization policy:",
2151
+ BUILTIN_ARCHITECTURE_AND_CODE_ORGANIZATION_INSTRUCTIONS,
2152
+ "",
1697
2153
  "Required workflow:",
1698
2154
  "- On every new conversation/session: call bootstrap_context({ query: <current goal> }) first (or at least get_brain_dump()) to restore context and retrieve relevant matches from the local memory store (vector if enabled; otherwise FTS/LIKE).",
1699
2155
  " - Output is compact by default. Use include_content=true only when you truly need full text (it increases tokens).",
1700
2156
  " - Tune output size with: requirements_limit/changes_limit/notes_limit, preview_chars, pending_limit/pending_offset.",
1701
2157
  " - Prefer read_memory_item(id, offset, limit) to fetch full text on demand instead of returning large content in other tool outputs.",
2158
+ "- To read local Codex skill/prompt/rule files (for example SKILL.md under CODEX_HOME or AGENTS_HOME), prefer read_codex_text_file({ path }) instead of assuming a filesystem MCP resource server exists.",
2159
+ "- For project file/directory browsing, prefer list_project_files({ path, recursive?, max_depth? }) over shelling out to Get-ChildItem/ls. It respects ignore rules and keeps output bounded.",
2160
+ "- For small/medium raw file reads, prefer read_file_text({ path, offset?, max_chars? }) over Get-Content -Raw. Use read_file_lines(...) when you need deterministic line ranges or the file may be large.",
2161
+ "- For an rg/Select-String-style search with exact file+line+col matches, prefer grep({ query: <pattern> }) over shelling out (uses the indexed code/doc chunks).",
2162
+ "- To read a bounded segment of a file, prefer read_file_lines({ path: <file>, from_line/to_line or total_count }) over unbounded file reads.",
1702
2163
  "- BEFORE editing code: call start_requirement(title, background) to set the active requirement.",
1703
2164
  "- AFTER editing + saving: call get_pending_changes() to see unsynced files, then call sync_change_intent(intent, files). (You can omit files to auto-link all pending changes.)",
1704
2165
  "- After major milestones/decisions: call upsert_project_summary(summary) and/or add_note(...) to persist durable context locally.",
@@ -1708,7 +2169,11 @@ const server = new Server({ name: SERVER_NAME, version: SERVER_VERSION }, {
1708
2169
  "- When you need to recall relevant context from history/code/docs: call semantic_search(query, ...) instead of guessing.",
1709
2170
  "",
1710
2171
  "If tool output conflicts with assumptions, trust the tool output.",
1711
- ].join("\n"),
2172
+ ].join("\n");
2173
+ }
2174
+ const server = new Server({ name: SERVER_NAME, version: SERVER_VERSION }, {
2175
+ capabilities: { tools: {} },
2176
+ instructions: buildServerInstructions(),
1712
2177
  });
1713
2178
  async function resolveProjectRootFromMcpRoots() {
1714
2179
  const caps = server.getClientCapabilities();
@@ -2265,6 +2730,31 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
2265
2730
  description: "Clear the in-memory debug activity log. Enable logging with VECTORMIND_DEBUG_LOG=1.",
2266
2731
  inputSchema: toJsonSchemaCompat(ClearActivityLogArgsSchema),
2267
2732
  },
2733
+ {
2734
+ name: "grep",
2735
+ description: "Fast indexed grep across code/doc chunks with precise file/line/col matches. Prefer this over shelling out to rg/Select-String when possible.",
2736
+ inputSchema: toJsonSchemaCompat(GrepArgsSchema),
2737
+ },
2738
+ {
2739
+ name: "list_project_files",
2740
+ description: "AI-friendly, ignore-aware file/directory listing under project_root with bounded output. Prefer this over Get-ChildItem/ls for local repository browsing.",
2741
+ inputSchema: toJsonSchemaCompat(ListProjectFilesArgsSchema),
2742
+ },
2743
+ {
2744
+ name: "read_codex_text_file",
2745
+ description: "Read bounded text from local Codex/agents files such as SKILL.md, prompt files, and rules under CODEX_HOME/AGENTS_HOME. Prefer this over assuming a filesystem MCP resource server exists.",
2746
+ inputSchema: toJsonSchemaCompat(ReadCodexTextFileArgsSchema),
2747
+ },
2748
+ {
2749
+ name: "read_file_lines",
2750
+ description: "Read a specific line range from a file under project_root (with strict size limits). Prefer this over Get-Content for deterministic reads.",
2751
+ inputSchema: toJsonSchemaCompat(ReadFileLinesArgsSchema),
2752
+ },
2753
+ {
2754
+ name: "read_file_text",
2755
+ description: "Read bounded raw UTF-8 text from a file under project_root. Prefer this over Get-Content -Raw for small/medium text files; use read_file_lines for large files or line-specific reads.",
2756
+ inputSchema: toJsonSchemaCompat(ReadFileTextArgsSchema),
2757
+ },
2268
2758
  {
2269
2759
  name: "query_codebase",
2270
2760
  description: "Search the symbol index for class/function/type names (or substrings) to locate definitions by file path and signature. Use this when you need to find code—do not guess locations.",
@@ -2610,7 +3100,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
2610
3100
  ? toMemoryItemPreview(projectSummaryRow, includeContent, previewChars, contentMaxChars)
2611
3101
  : null;
2612
3102
  const recent_notes = listRecentNotesStmt.all(notesLimit).map((n) => toMemoryItemPreview(n, includeContent, previewChars, contentMaxChars));
2613
- const conventions = listConventionsStmt.all(conventionsLimit).map((c) => toMemoryItemPreview(c, false, previewChars, contentMaxChars));
3103
+ const conventions = getConventionPreviews(conventionsLimit, previewChars, contentMaxChars);
2614
3104
  const pending_total = Number(countPendingChangesStmt.get()?.total ?? 0);
2615
3105
  const pending_offset = args.pending_offset;
2616
3106
  const pending_limit = args.pending_limit;
@@ -2706,7 +3196,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
2706
3196
  ? toMemoryItemPreview(projectSummaryRow, includeContent, previewChars, contentMaxChars)
2707
3197
  : null;
2708
3198
  const recent_notes = listRecentNotesStmt.all(notesLimit).map((n) => toMemoryItemPreview(n, includeContent, previewChars, contentMaxChars));
2709
- const conventions = listConventionsStmt.all(conventionsLimit).map((c) => toMemoryItemPreview(c, false, previewChars, contentMaxChars));
3199
+ const conventions = getConventionPreviews(conventionsLimit, previewChars, contentMaxChars);
2710
3200
  const pending_total = Number(countPendingChangesStmt.get()?.total ?? 0);
2711
3201
  const pending_offset = args.pending_offset;
2712
3202
  const pending_limit = args.pending_limit;
@@ -2935,6 +3425,441 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
2935
3425
  clearActivityLog();
2936
3426
  return { content: [{ type: "text", text: toolJson({ ok: true }) }] };
2937
3427
  }
3428
+ if (toolName === "grep") {
3429
+ const args = GrepArgsSchema.parse(rawArgs);
3430
+ const q = args.query;
3431
+ const mode = args.mode;
3432
+ const smartCase = args.smart_case;
3433
+ const kinds = args.kinds?.length ? args.kinds : ["code_chunk", "doc_chunk"];
3434
+ const includePaths = args.include_paths?.length ? args.include_paths : null;
3435
+ const excludePaths = args.exclude_paths?.length ? args.exclude_paths : null;
3436
+ const maxResults = args.max_results;
3437
+ const hint = (() => {
3438
+ if (mode === "literal")
3439
+ return q;
3440
+ const explicit = args.literal_hint.trim();
3441
+ if (explicit)
3442
+ return explicit;
3443
+ return extractLongestLiteralFromRegex(q);
3444
+ })();
3445
+ if (mode === "regex" && hint.trim().length < 3) {
3446
+ return {
3447
+ isError: true,
3448
+ content: [
3449
+ {
3450
+ type: "text",
3451
+ text: toolJson({
3452
+ ok: false,
3453
+ error: "Regex has no sufficiently long literal anchor for indexed narrowing. Provide literal_hint (>= 3 chars) or narrow with include_paths.",
3454
+ query: q,
3455
+ mode,
3456
+ literal_hint: args.literal_hint,
3457
+ }),
3458
+ },
3459
+ ],
3460
+ };
3461
+ }
3462
+ const caseSensitive = args.case_sensitive ?? (smartCase ? hasUppercaseAscii(q) : true);
3463
+ let re;
3464
+ try {
3465
+ re = compileGrepRegex({ query: q, mode, caseSensitive });
3466
+ }
3467
+ catch (err) {
3468
+ return {
3469
+ isError: true,
3470
+ content: [
3471
+ {
3472
+ type: "text",
3473
+ text: toolJson({ ok: false, error: `Invalid pattern: ${String(err)}`, query: q, mode }),
3474
+ },
3475
+ ],
3476
+ };
3477
+ }
3478
+ const maxCandidates = args.max_candidates ?? Math.min(50_000, Math.max(1000, maxResults * 200));
3479
+ const candidates = (() => {
3480
+ if (ftsAvailable) {
3481
+ const matchQuery = buildFtsMatchQuery(hint);
3482
+ const placeholders = kinds.map(() => "?").join(", ");
3483
+ const stmt = db.prepare(`
3484
+ SELECT
3485
+ m.id as id,
3486
+ m.kind as kind,
3487
+ m.content as content,
3488
+ m.file_path as file_path,
3489
+ m.start_line as start_line,
3490
+ m.end_line as end_line
3491
+ FROM ${FTS_TABLE_NAME}
3492
+ JOIN memory_items m ON m.id = ${FTS_TABLE_NAME}.rowid
3493
+ WHERE ${FTS_TABLE_NAME} MATCH ?
3494
+ AND m.kind IN (${placeholders})
3495
+ ORDER BY m.file_path ASC, m.start_line ASC, m.id ASC
3496
+ LIMIT ?
3497
+ `);
3498
+ return stmt.all(matchQuery, ...kinds, maxCandidates);
3499
+ }
3500
+ const needle = mode === "literal" ? q : hint;
3501
+ const escaped = escapeLike(needle);
3502
+ const like = `%${escaped}%`;
3503
+ const placeholders = kinds.map(() => "?").join(", ");
3504
+ const stmt = db.prepare(`
3505
+ SELECT
3506
+ id,
3507
+ kind,
3508
+ content,
3509
+ file_path,
3510
+ start_line,
3511
+ end_line
3512
+ FROM memory_items
3513
+ WHERE content LIKE ? ESCAPE '\\'
3514
+ AND kind IN (${placeholders})
3515
+ ORDER BY file_path ASC, start_line ASC, id ASC
3516
+ LIMIT ?
3517
+ `);
3518
+ return stmt.all(like, ...kinds, maxCandidates);
3519
+ })();
3520
+ const matches = [];
3521
+ let candidatesScanned = 0;
3522
+ let truncated = false;
3523
+ for (const c of candidates) {
3524
+ candidatesScanned += 1;
3525
+ if (!c.file_path || c.start_line == null)
3526
+ continue;
3527
+ if (shouldIgnoreDbFilePath(c.file_path))
3528
+ continue;
3529
+ if (!passesPathFilters(c.file_path, includePaths, excludePaths))
3530
+ continue;
3531
+ const content = c.content ?? "";
3532
+ const lineStarts = buildLineStarts(content);
3533
+ re.lastIndex = 0;
3534
+ let m;
3535
+ while ((m = re.exec(content)) !== null) {
3536
+ const idx = m.index ?? 0;
3537
+ const matched = m[0] ?? "";
3538
+ if (!matched) {
3539
+ if (re.lastIndex >= content.length)
3540
+ break;
3541
+ re.lastIndex += 1;
3542
+ continue;
3543
+ }
3544
+ const lineIdx = lineIndexForOffset(lineStarts, idx);
3545
+ const lineStart = lineStarts[lineIdx] ?? 0;
3546
+ const lineEnd = lineIdx + 1 < lineStarts.length
3547
+ ? (lineStarts[lineIdx + 1] ?? content.length) - 1
3548
+ : content.length;
3549
+ const previewRaw = content.slice(lineStart, Math.max(lineStart, lineEnd));
3550
+ const preview = previewRaw.length > 500 ? `${previewRaw.slice(0, 500)}…` : previewRaw;
3551
+ const matchText = matched.length > 200 ? `${matched.slice(0, 200)}…` : matched;
3552
+ matches.push({
3553
+ file_path: c.file_path,
3554
+ kind: c.kind,
3555
+ line: c.start_line + lineIdx,
3556
+ col: idx - lineStart + 1,
3557
+ preview,
3558
+ match: matchText,
3559
+ });
3560
+ if (matches.length >= maxResults) {
3561
+ truncated = true;
3562
+ break;
3563
+ }
3564
+ }
3565
+ if (truncated)
3566
+ break;
3567
+ }
3568
+ logActivity("grep", {
3569
+ query: q,
3570
+ mode,
3571
+ case_sensitive: caseSensitive,
3572
+ smart_case: smartCase,
3573
+ hint,
3574
+ kinds,
3575
+ include_paths: includePaths ?? [],
3576
+ exclude_paths: excludePaths ?? [],
3577
+ candidates: candidates.length,
3578
+ candidates_scanned: candidatesScanned,
3579
+ matches: matches.length,
3580
+ truncated,
3581
+ });
3582
+ return {
3583
+ content: [
3584
+ {
3585
+ type: "text",
3586
+ text: toolJson({
3587
+ ok: true,
3588
+ query: q,
3589
+ mode,
3590
+ case_sensitive: caseSensitive,
3591
+ smart_case: smartCase,
3592
+ hint,
3593
+ kinds,
3594
+ include_paths: includePaths ?? [],
3595
+ exclude_paths: excludePaths ?? [],
3596
+ candidates: { total: candidates.length, scanned: candidatesScanned },
3597
+ matches,
3598
+ truncated,
3599
+ }),
3600
+ },
3601
+ ],
3602
+ };
3603
+ }
3604
+ if (toolName === "list_project_files") {
3605
+ const args = ListProjectFilesArgsSchema.parse(rawArgs);
3606
+ const resolved = resolveProjectPathUnderRoot(args.path, { allowRoot: true });
3607
+ let st;
3608
+ try {
3609
+ st = fs.statSync(resolved.absPath);
3610
+ }
3611
+ catch (err) {
3612
+ return {
3613
+ isError: true,
3614
+ content: [{ type: "text", text: toolJson({ ok: false, error: `Path not found: ${String(err)}` }) }],
3615
+ };
3616
+ }
3617
+ const includePaths = args.include_paths?.length ? args.include_paths : null;
3618
+ const excludePaths = args.exclude_paths?.length ? args.exclude_paths : null;
3619
+ const extensions = normalizeExtensionsFilter(args.extensions);
3620
+ const result = listProjectFilesInternal({
3621
+ startAbsPath: resolved.absPath,
3622
+ startDbPath: resolved.dbFilePath,
3623
+ recursive: args.recursive,
3624
+ maxDepth: args.max_depth,
3625
+ includeFiles: args.include_files,
3626
+ includeDirs: args.include_dirs,
3627
+ includeHidden: args.include_hidden,
3628
+ respectIgnore: args.respect_ignore,
3629
+ includePaths,
3630
+ excludePaths,
3631
+ extensions,
3632
+ maxResults: args.max_results,
3633
+ includeStats: args.include_stats,
3634
+ });
3635
+ logActivity("list_project_files", {
3636
+ path: resolved.dbFilePath,
3637
+ recursive: args.recursive,
3638
+ max_depth: args.max_depth,
3639
+ include_files: args.include_files,
3640
+ include_dirs: args.include_dirs,
3641
+ include_hidden: args.include_hidden,
3642
+ respect_ignore: args.respect_ignore,
3643
+ include_paths: includePaths ?? [],
3644
+ exclude_paths: excludePaths ?? [],
3645
+ extensions: extensions ?? [],
3646
+ returned: result.returned,
3647
+ scanned: result.scanned,
3648
+ truncated: result.truncated,
3649
+ path_kind: st.isFile() ? "file" : st.isDirectory() ? "dir" : "other",
3650
+ });
3651
+ return {
3652
+ content: [
3653
+ {
3654
+ type: "text",
3655
+ text: toolJson({
3656
+ ok: true,
3657
+ path: resolved.dbFilePath,
3658
+ path_kind: st.isFile() ? "file" : st.isDirectory() ? "dir" : "other",
3659
+ recursive: args.recursive,
3660
+ max_depth: args.recursive ? args.max_depth : 1,
3661
+ include_files: args.include_files,
3662
+ include_dirs: args.include_dirs,
3663
+ include_hidden: args.include_hidden,
3664
+ respect_ignore: args.respect_ignore,
3665
+ include_paths: includePaths ?? [],
3666
+ exclude_paths: excludePaths ?? [],
3667
+ extensions: extensions ?? [],
3668
+ returned: result.returned,
3669
+ scanned: result.scanned,
3670
+ truncated: result.truncated,
3671
+ entries: result.entries,
3672
+ }),
3673
+ },
3674
+ ],
3675
+ };
3676
+ }
3677
+ if (toolName === "read_file_text") {
3678
+ const args = ReadFileTextArgsSchema.parse(rawArgs);
3679
+ const resolved = resolveReadPathUnderProjectRoot(args.path);
3680
+ let st;
3681
+ try {
3682
+ st = fs.statSync(resolved.absPath);
3683
+ }
3684
+ catch (err) {
3685
+ return {
3686
+ isError: true,
3687
+ content: [{ type: "text", text: toolJson({ ok: false, error: `File not found: ${String(err)}` }) }],
3688
+ };
3689
+ }
3690
+ if (!st.isFile()) {
3691
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: "Not a file" }) }] };
3692
+ }
3693
+ let result;
3694
+ try {
3695
+ result = readTextFileSlice({
3696
+ absPath: resolved.absPath,
3697
+ offset: args.offset,
3698
+ maxChars: args.max_chars,
3699
+ maxFileBytes: args.max_file_bytes,
3700
+ });
3701
+ }
3702
+ catch (err) {
3703
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: String(err) }) }] };
3704
+ }
3705
+ logActivity("read_file_text", {
3706
+ file_path: resolved.dbFilePath,
3707
+ offset: args.offset,
3708
+ returned_chars: result.returnedChars,
3709
+ total_chars: result.totalChars,
3710
+ truncated: result.truncated,
3711
+ });
3712
+ return {
3713
+ content: [
3714
+ {
3715
+ type: "text",
3716
+ text: toolJson({
3717
+ ok: true,
3718
+ file_path: resolved.dbFilePath,
3719
+ offset: args.offset,
3720
+ returned_chars: result.returnedChars,
3721
+ total_chars: result.totalChars,
3722
+ truncated: result.truncated,
3723
+ text: result.text,
3724
+ }),
3725
+ },
3726
+ ],
3727
+ };
3728
+ }
3729
+ if (toolName === "read_codex_text_file") {
3730
+ const args = ReadCodexTextFileArgsSchema.parse(rawArgs);
3731
+ let resolved;
3732
+ try {
3733
+ resolved = resolveCodexTextPath(args.path);
3734
+ }
3735
+ catch (err) {
3736
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: String(err) }) }] };
3737
+ }
3738
+ let st;
3739
+ try {
3740
+ st = fs.statSync(resolved.absPath);
3741
+ }
3742
+ catch (err) {
3743
+ return {
3744
+ isError: true,
3745
+ content: [{ type: "text", text: toolJson({ ok: false, error: `File not found: ${String(err)}` }) }],
3746
+ };
3747
+ }
3748
+ if (!st.isFile()) {
3749
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: "Not a file" }) }] };
3750
+ }
3751
+ let result;
3752
+ try {
3753
+ result = readTextFileSlice({
3754
+ absPath: resolved.absPath,
3755
+ offset: args.offset,
3756
+ maxChars: args.max_chars,
3757
+ maxFileBytes: args.max_file_bytes,
3758
+ });
3759
+ }
3760
+ catch (err) {
3761
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: String(err) }) }] };
3762
+ }
3763
+ logActivity("read_codex_text_file", {
3764
+ file_path: resolved.displayPath,
3765
+ allowed_root: resolved.allowedRoot,
3766
+ offset: args.offset,
3767
+ returned_chars: result.returnedChars,
3768
+ total_chars: result.totalChars,
3769
+ truncated: result.truncated,
3770
+ });
3771
+ return {
3772
+ content: [
3773
+ {
3774
+ type: "text",
3775
+ text: toolJson({
3776
+ ok: true,
3777
+ file_path: resolved.displayPath,
3778
+ allowed_root: resolved.allowedRoot,
3779
+ offset: args.offset,
3780
+ returned_chars: result.returnedChars,
3781
+ total_chars: result.totalChars,
3782
+ truncated: result.truncated,
3783
+ text: result.text,
3784
+ }),
3785
+ },
3786
+ ],
3787
+ };
3788
+ }
3789
+ if (toolName === "read_file_lines") {
3790
+ const args = ReadFileLinesArgsSchema.parse(rawArgs);
3791
+ const resolved = resolveReadPathUnderProjectRoot(args.path);
3792
+ let fromLine = args.from_line;
3793
+ let toLine = args.to_line;
3794
+ if (toLine == null) {
3795
+ const total = args.total_count ?? 200;
3796
+ toLine = fromLine + total - 1;
3797
+ }
3798
+ if (toLine < fromLine) {
3799
+ return {
3800
+ isError: true,
3801
+ content: [
3802
+ {
3803
+ type: "text",
3804
+ text: toolJson({
3805
+ ok: false,
3806
+ error: "to_line must be >= from_line",
3807
+ path: args.path,
3808
+ from_line: fromLine,
3809
+ to_line: toLine,
3810
+ }),
3811
+ },
3812
+ ],
3813
+ };
3814
+ }
3815
+ let st;
3816
+ try {
3817
+ st = fs.statSync(resolved.absPath);
3818
+ }
3819
+ catch (err) {
3820
+ return {
3821
+ isError: true,
3822
+ content: [
3823
+ { type: "text", text: toolJson({ ok: false, error: `File not found: ${String(err)}` }) },
3824
+ ],
3825
+ };
3826
+ }
3827
+ if (!st.isFile()) {
3828
+ return { isError: true, content: [{ type: "text", text: toolJson({ ok: false, error: "Not a file" }) }] };
3829
+ }
3830
+ const maxLines = Math.max(1, Math.min(2000, args.max_lines));
3831
+ const maxChars = Math.max(200, Math.min(200_000, args.max_chars));
3832
+ const result = await readTextFileLines({
3833
+ absPath: resolved.absPath,
3834
+ fromLine,
3835
+ toLine,
3836
+ maxLines,
3837
+ maxChars,
3838
+ });
3839
+ logActivity("read_file_lines", {
3840
+ file_path: resolved.dbFilePath,
3841
+ from_line: fromLine,
3842
+ to_line: toLine,
3843
+ returned: result.returned,
3844
+ truncated: result.truncated,
3845
+ });
3846
+ return {
3847
+ content: [
3848
+ {
3849
+ type: "text",
3850
+ text: toolJson({
3851
+ ok: true,
3852
+ file_path: resolved.dbFilePath,
3853
+ from_line: fromLine,
3854
+ to_line: toLine,
3855
+ returned: result.returned,
3856
+ truncated: result.truncated,
3857
+ text: result.text,
3858
+ }),
3859
+ },
3860
+ ],
3861
+ };
3862
+ }
2938
3863
  if (toolName === "query_codebase") {
2939
3864
  const args = QueryCodebaseArgsSchema.parse(rawArgs);
2940
3865
  const q = args.query.trim();