@levnikolaevich/hex-line-mcp 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +293 -0
- package/benchmark.mjs +1180 -0
- package/hook.mjs +299 -0
- package/lib/bulk-replace.mjs +55 -0
- package/lib/changes.mjs +174 -0
- package/lib/coerce.mjs +43 -0
- package/lib/edit.mjs +420 -0
- package/lib/graph-enrich.mjs +208 -0
- package/lib/hash.mjs +109 -0
- package/lib/info.mjs +109 -0
- package/lib/normalize.mjs +106 -0
- package/lib/outline.mjs +200 -0
- package/lib/read.mjs +129 -0
- package/lib/search.mjs +132 -0
- package/lib/security.mjs +114 -0
- package/lib/setup.mjs +132 -0
- package/lib/tree.mjs +162 -0
- package/lib/update-check.mjs +56 -0
- package/lib/verify.mjs +54 -0
- package/package.json +57 -0
- package/server.mjs +368 -0
package/package.json
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@levnikolaevich/hex-line-mcp",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"description": "Hash-verified file editing MCP + token efficiency hook for AI coding agents. 11 tools: read, edit, write, grep, outline, verify, directory_tree, file_info, setup_hooks, changes, bulk_replace.",
|
|
6
|
+
"main": "server.mjs",
|
|
7
|
+
"bin": {
|
|
8
|
+
"hex-line-mcp": "server.mjs"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"server.mjs",
|
|
12
|
+
"hook.mjs",
|
|
13
|
+
"benchmark.mjs",
|
|
14
|
+
"lib/",
|
|
15
|
+
"README.md"
|
|
16
|
+
],
|
|
17
|
+
"scripts": {
|
|
18
|
+
"start": "node server.mjs",
|
|
19
|
+
"lint": "eslint .",
|
|
20
|
+
"lint:fix": "eslint . --fix",
|
|
21
|
+
"test": "node --test test/*.mjs",
|
|
22
|
+
"check": "node --check server.mjs && node --check hook.mjs"
|
|
23
|
+
},
|
|
24
|
+
"dependencies": {
|
|
25
|
+
"@modelcontextprotocol/sdk": "^1.17.0",
|
|
26
|
+
"diff": "^8.0.3",
|
|
27
|
+
"tree-sitter-wasms": "^0.1.0",
|
|
28
|
+
"web-tree-sitter": "^0.25.0"
|
|
29
|
+
},
|
|
30
|
+
"license": "MIT",
|
|
31
|
+
"keywords": [
|
|
32
|
+
"mcp",
|
|
33
|
+
"hex-line",
|
|
34
|
+
"ast",
|
|
35
|
+
"outline",
|
|
36
|
+
"file-edit",
|
|
37
|
+
"coding-agent",
|
|
38
|
+
"model-context-protocol",
|
|
39
|
+
"hash-verified",
|
|
40
|
+
"token-efficiency",
|
|
41
|
+
"hook",
|
|
42
|
+
"rtk",
|
|
43
|
+
"bulk-replace"
|
|
44
|
+
],
|
|
45
|
+
"engines": {
|
|
46
|
+
"node": ">=18.0.0"
|
|
47
|
+
},
|
|
48
|
+
"repository": {
|
|
49
|
+
"type": "git",
|
|
50
|
+
"url": "https://github.com/levnikolaevich/claude-code-skills",
|
|
51
|
+
"directory": "mcp/hex-line-mcp"
|
|
52
|
+
},
|
|
53
|
+
"homepage": "https://github.com/levnikolaevich/claude-code-skills/tree/master/mcp/hex-line-mcp",
|
|
54
|
+
"devDependencies": {
|
|
55
|
+
"eslint": "^10.1.0"
|
|
56
|
+
}
|
|
57
|
+
}
|
package/server.mjs
ADDED
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* hex-line-mcp — MCP server for hash-verified file operations.
|
|
4
|
+
*
|
|
5
|
+
* 11 tools: read_file, edit_file, write_file, grep_search, outline, verify, directory_tree, get_file_info, setup_hooks, changes, bulk_replace
|
|
6
|
+
* FNV-1a 2-char tags + range checksums (trueline-compatible)
|
|
7
|
+
* Security: root policy, path validation, binary/size rejection
|
|
8
|
+
* Transport: stdio
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { writeFileSync, mkdirSync } from "node:fs";
|
|
12
|
+
import { dirname } from "node:path";
|
|
13
|
+
import { z } from "zod";
|
|
14
|
+
// LLM clients may send booleans as strings ("true"/"false").
|
|
15
|
+
// z.coerce.boolean() is unsafe: Boolean("false") === true.
|
|
16
|
+
const flexBool = () => z.preprocess(
|
|
17
|
+
v => typeof v === "string" ? v === "true" : v,
|
|
18
|
+
z.boolean().optional()
|
|
19
|
+
).optional();
|
|
20
|
+
// LLM clients may send numbers as strings ("5" instead of 5).
|
|
21
|
+
// z.coerce.number() generates {"type":"number"} → strict MCP clients reject strings.
|
|
22
|
+
// flexNum generates schema accepting both, coerces at runtime.
|
|
23
|
+
// Outer .optional() ensures JSON Schema marks field as not-required.
|
|
24
|
+
const flexNum = () => z.preprocess(
|
|
25
|
+
v => typeof v === "string" ? Number(v) : v,
|
|
26
|
+
z.number().optional()
|
|
27
|
+
).optional();
|
|
28
|
+
|
|
29
|
+
import { readFile } from "./lib/read.mjs";
|
|
30
|
+
import { editFile } from "./lib/edit.mjs";
|
|
31
|
+
import { grepSearch } from "./lib/search.mjs";
|
|
32
|
+
import { fileOutline } from "./lib/outline.mjs";
|
|
33
|
+
import { verifyChecksums } from "./lib/verify.mjs";
|
|
34
|
+
import { validateWritePath } from "./lib/security.mjs";
|
|
35
|
+
import { directoryTree } from "./lib/tree.mjs";
|
|
36
|
+
import { fileInfo } from "./lib/info.mjs";
|
|
37
|
+
import { setupHooks } from "./lib/setup.mjs";
|
|
38
|
+
import { fileChanges } from "./lib/changes.mjs";
|
|
39
|
+
import { bulkReplace } from "./lib/bulk-replace.mjs";
|
|
40
|
+
import { coerceParams } from "./lib/coerce.mjs";
|
|
41
|
+
import { checkForUpdates } from "./lib/update-check.mjs";
|
|
42
|
+
|
|
43
|
+
// --- SDK ---
|
|
44
|
+
|
|
45
|
+
let McpServer, StdioServerTransport;
|
|
46
|
+
try {
|
|
47
|
+
({ McpServer } = await import("@modelcontextprotocol/sdk/server/mcp.js"));
|
|
48
|
+
({ StdioServerTransport } = await import("@modelcontextprotocol/sdk/server/stdio.js"));
|
|
49
|
+
} catch {
|
|
50
|
+
process.stderr.write(
|
|
51
|
+
"hex-line-mcp: @modelcontextprotocol/sdk not found.\n" +
|
|
52
|
+
"Run: cd mcp/hex-line-mcp && npm install\n"
|
|
53
|
+
);
|
|
54
|
+
process.exit(1);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const server = new McpServer({ name: "hex-line-mcp", version: "1.0.0" });
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
// ==================== read_file ====================
|
|
61
|
+
|
|
62
|
+
server.registerTool("read_file", {
|
|
63
|
+
title: "Read File",
|
|
64
|
+
description:
|
|
65
|
+
"Read a file with FNV-1a hash-annotated lines (tag.lineNum\\tcontent) and range checksums. " +
|
|
66
|
+
"All file types: code, markdown, config, text. Directory listing if path is a directory. " +
|
|
67
|
+
"For large code files: use outline first, then read_file with offset/limit. " +
|
|
68
|
+
"For markdown/config: read_file directly.",
|
|
69
|
+
inputSchema: z.object({
|
|
70
|
+
path: z.string().optional().describe("File or directory path"),
|
|
71
|
+
paths: z.array(z.string()).optional().describe("Array of file paths to read (batch mode)"),
|
|
72
|
+
offset: flexNum().describe("Start line (1-indexed, default: 1)"),
|
|
73
|
+
limit: flexNum().describe("Max lines (default: 2000, 0 = all)"),
|
|
74
|
+
plain: flexBool().describe("Omit hashes (lineNum|content)"),
|
|
75
|
+
}),
|
|
76
|
+
annotations: { readOnlyHint: true, destructiveHint: false, idempotentHint: true },
|
|
77
|
+
}, async (rawParams) => {
|
|
78
|
+
const { path: p, paths: multi, offset, limit, plain } = coerceParams(rawParams);
|
|
79
|
+
try {
|
|
80
|
+
if (multi && multi.length > 0 && !p) {
|
|
81
|
+
const results = [];
|
|
82
|
+
for (const fp of multi) {
|
|
83
|
+
try {
|
|
84
|
+
results.push(readFile(fp, { offset, limit, plain }));
|
|
85
|
+
} catch (e) {
|
|
86
|
+
results.push(`File: ${fp}\n\nERROR: ${e.message}`);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
return { content: [{ type: "text", text: results.join("\n\n---\n\n") }] };
|
|
90
|
+
}
|
|
91
|
+
if (!p) throw new Error("Either 'path' or 'paths' is required");
|
|
92
|
+
return { content: [{ type: "text", text: readFile(p, { offset, limit, plain }) }] };
|
|
93
|
+
} catch (e) {
|
|
94
|
+
return { content: [{ type: "text", text: e.message }], isError: true };
|
|
95
|
+
}
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
// ==================== edit_file ====================
|
|
100
|
+
|
|
101
|
+
server.registerTool("edit_file", {
|
|
102
|
+
title: "Edit File",
|
|
103
|
+
description:
|
|
104
|
+
"Edit a file using hash-verified anchors or text replacement. Returns diff. " +
|
|
105
|
+
"Anchors: set_line {anchor:'ab.12',new_text:'...'}, replace_lines, insert_after. " +
|
|
106
|
+
"Text: replace {old_text,new_text,all}. " +
|
|
107
|
+
"For anchor-based edits, use read_file first to get hashes. For text replace, read_file is optional.",
|
|
108
|
+
inputSchema: z.object({
|
|
109
|
+
path: z.string().describe("File to edit"),
|
|
110
|
+
edits: z.string().describe(
|
|
111
|
+
'JSON array. Examples:\n' +
|
|
112
|
+
'{"set_line":{"anchor":"ab.12","new_text":"new"}} — replace line\n' +
|
|
113
|
+
'{"replace_lines":{"start_anchor":"ab.10","end_anchor":"cd.15","new_text":"..."}} — range\n' +
|
|
114
|
+
'{"insert_after":{"anchor":"ab.20","text":"inserted"}} — insert below\n' +
|
|
115
|
+
'{"replace":{"old_text":"find","new_text":"replace","all":false}} — text match',
|
|
116
|
+
),
|
|
117
|
+
dry_run: flexBool().describe("Preview changes without writing"),
|
|
118
|
+
}),
|
|
119
|
+
annotations: { readOnlyHint: false, destructiveHint: false, idempotentHint: false },
|
|
120
|
+
}, async (rawParams) => {
|
|
121
|
+
const { path: p, edits: json, dry_run } = coerceParams(rawParams);
|
|
122
|
+
try {
|
|
123
|
+
const parsed = JSON.parse(json);
|
|
124
|
+
if (!Array.isArray(parsed) || !parsed.length) throw new Error("Edits: non-empty JSON array required");
|
|
125
|
+
return { content: [{ type: "text", text: editFile(p, parsed, { dryRun: dry_run }) }] };
|
|
126
|
+
} catch (e) {
|
|
127
|
+
return { content: [{ type: "text", text: e.message }], isError: true };
|
|
128
|
+
}
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
// ==================== write_file ====================
|
|
133
|
+
|
|
134
|
+
server.registerTool("write_file", {
|
|
135
|
+
title: "Write File",
|
|
136
|
+
description:
|
|
137
|
+
"Create a new file or overwrite existing. Creates parent dirs. " +
|
|
138
|
+
"For existing files prefer edit_file (shows diff, verifies hashes).",
|
|
139
|
+
inputSchema: z.object({
|
|
140
|
+
path: z.string().describe("File path"),
|
|
141
|
+
content: z.string().describe("File content"),
|
|
142
|
+
}),
|
|
143
|
+
annotations: { readOnlyHint: false, destructiveHint: false, idempotentHint: true },
|
|
144
|
+
}, async (rawParams) => {
|
|
145
|
+
const { path: p, content } = coerceParams(rawParams);
|
|
146
|
+
try {
|
|
147
|
+
const abs = validateWritePath(p);
|
|
148
|
+
mkdirSync(dirname(abs), { recursive: true });
|
|
149
|
+
writeFileSync(abs, content, "utf-8");
|
|
150
|
+
return { content: [{ type: "text", text: `Created ${p} (${content.split("\n").length} lines)` }] };
|
|
151
|
+
} catch (e) {
|
|
152
|
+
return { content: [{ type: "text", text: e.message }], isError: true };
|
|
153
|
+
}
|
|
154
|
+
});
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
// ==================== grep_search ====================
|
|
158
|
+
|
|
159
|
+
server.registerTool("grep_search", {
|
|
160
|
+
title: "Search Files",
|
|
161
|
+
description:
|
|
162
|
+
"Search file contents with ripgrep. Returns hash-annotated matches for direct editing. " +
|
|
163
|
+
"ALWAYS prefer over shell grep/rg/findstr — instant and returns edit-ready hashes. " +
|
|
164
|
+
"Use to find code locations before read_file or edit_file. " +
|
|
165
|
+
"When codegraph DB available (.codegraph/index.db), matches annotated with symbol type and call counts [fn N↓ M↑].",
|
|
166
|
+
inputSchema: z.object({
|
|
167
|
+
pattern: z.string().describe("Regex search pattern"),
|
|
168
|
+
path: z.string().optional().describe("Search dir/file (default: cwd)"),
|
|
169
|
+
glob: z.string().optional().describe('Glob filter (e.g. "*.ts")'),
|
|
170
|
+
type: z.string().optional().describe('File type (e.g. "js", "py")'),
|
|
171
|
+
case_insensitive: flexBool().describe("Ignore case"),
|
|
172
|
+
context: flexNum().describe("Context lines around matches"),
|
|
173
|
+
limit: flexNum().describe("Max matches per file (default: 100)"),
|
|
174
|
+
plain: flexBool().describe("Omit hash tags, return file:line:content"),
|
|
175
|
+
}),
|
|
176
|
+
annotations: { readOnlyHint: true, destructiveHint: false, idempotentHint: true },
|
|
177
|
+
}, async (rawParams) => {
|
|
178
|
+
const { pattern, path: p, glob, type, case_insensitive, context, limit, plain } = coerceParams(rawParams);
|
|
179
|
+
try {
|
|
180
|
+
const result = await grepSearch(pattern, {
|
|
181
|
+
path: p, glob, type, caseInsensitive: case_insensitive, context, limit, plain,
|
|
182
|
+
});
|
|
183
|
+
return { content: [{ type: "text", text: result }] };
|
|
184
|
+
} catch (e) {
|
|
185
|
+
return { content: [{ type: "text", text: e.message }], isError: true };
|
|
186
|
+
}
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
// ==================== outline ====================
|
|
191
|
+
|
|
192
|
+
server.registerTool("outline", {
|
|
193
|
+
title: "File Outline",
|
|
194
|
+
description:
|
|
195
|
+
"AST-based structural outline: functions, classes, interfaces with line ranges. " +
|
|
196
|
+
"Code files only (.js/.ts/.py/.go/.rs/.java/.c/.cpp/.cs/.rb/.php/.kt/.swift/.sh). " +
|
|
197
|
+
"NOT for .md/.json/.yaml/.txt — use read_file directly for those. " +
|
|
198
|
+
"10-20 lines instead of 500 — 95% token reduction. " +
|
|
199
|
+
"Output maps directly to read_file ranges. Use before reading large code files.",
|
|
200
|
+
inputSchema: z.object({
|
|
201
|
+
path: z.string().describe("Source file path"),
|
|
202
|
+
}),
|
|
203
|
+
annotations: { readOnlyHint: true, destructiveHint: false, idempotentHint: true },
|
|
204
|
+
}, async (rawParams) => {
|
|
205
|
+
const { path: p } = coerceParams(rawParams);
|
|
206
|
+
try {
|
|
207
|
+
const result = await fileOutline(p);
|
|
208
|
+
return { content: [{ type: "text", text: result }] };
|
|
209
|
+
} catch (e) {
|
|
210
|
+
return { content: [{ type: "text", text: e.message }], isError: true };
|
|
211
|
+
}
|
|
212
|
+
});
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
// ==================== verify ====================
|
|
216
|
+
|
|
217
|
+
server.registerTool("verify", {
|
|
218
|
+
title: "Verify Checksums",
|
|
219
|
+
description:
|
|
220
|
+
"Check if range checksums from prior reads are still valid. " +
|
|
221
|
+
"Single-line response when nothing changed. Avoids full re-read for staleness check. " +
|
|
222
|
+
"Use to check if file changed since last read, without re-reading.",
|
|
223
|
+
inputSchema: z.object({
|
|
224
|
+
path: z.string().describe("File path"),
|
|
225
|
+
checksums: z.string().describe('JSON array of checksum strings, e.g. ["1-50:f7e2a1b0", "51-100:abcd1234"]'),
|
|
226
|
+
}),
|
|
227
|
+
annotations: { readOnlyHint: true, destructiveHint: false, idempotentHint: true },
|
|
228
|
+
}, async (rawParams) => {
|
|
229
|
+
const { path: p, checksums } = coerceParams(rawParams);
|
|
230
|
+
try {
|
|
231
|
+
const parsed = JSON.parse(checksums);
|
|
232
|
+
if (!Array.isArray(parsed)) throw new Error("checksums must be a JSON array of strings");
|
|
233
|
+
return { content: [{ type: "text", text: verifyChecksums(p, parsed) }] };
|
|
234
|
+
} catch (e) {
|
|
235
|
+
return { content: [{ type: "text", text: e.message }], isError: true };
|
|
236
|
+
}
|
|
237
|
+
});
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
// ==================== directory_tree ====================
|
|
241
|
+
|
|
242
|
+
server.registerTool("directory_tree", {
|
|
243
|
+
title: "Directory Tree",
|
|
244
|
+
description:
|
|
245
|
+
"Compact directory tree with file sizes and .gitignore support. " +
|
|
246
|
+
"Use to understand repo structure before reading files. " +
|
|
247
|
+
"Skips node_modules, .git, dist by default.",
|
|
248
|
+
inputSchema: z.object({
|
|
249
|
+
path: z.string().describe("Directory path"),
|
|
250
|
+
max_depth: flexNum().describe("Max recursion depth (default: 3)"),
|
|
251
|
+
gitignore: flexBool().describe("Respect .gitignore patterns (default: true)"),
|
|
252
|
+
format: z.enum(["compact", "full"]).optional().describe('"compact" = names only, no sizes, depth 1. "full" = default with sizes'),
|
|
253
|
+
}),
|
|
254
|
+
annotations: { readOnlyHint: true, destructiveHint: false, idempotentHint: true },
|
|
255
|
+
}, async (rawParams) => {
|
|
256
|
+
const { path: p, max_depth, gitignore, format } = coerceParams(rawParams);
|
|
257
|
+
try {
|
|
258
|
+
return { content: [{ type: "text", text: directoryTree(p, { max_depth, gitignore, format }) }] };
|
|
259
|
+
} catch (e) {
|
|
260
|
+
return { content: [{ type: "text", text: e.message }], isError: true };
|
|
261
|
+
}
|
|
262
|
+
});
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
// ==================== get_file_info ====================
|
|
266
|
+
|
|
267
|
+
server.registerTool("get_file_info", {
|
|
268
|
+
title: "File Info",
|
|
269
|
+
description:
|
|
270
|
+
"File metadata without reading content: size, line count, modification time, type, binary detection. " +
|
|
271
|
+
"Use before reading large files to check size.",
|
|
272
|
+
inputSchema: z.object({
|
|
273
|
+
path: z.string().describe("File path"),
|
|
274
|
+
}),
|
|
275
|
+
annotations: { readOnlyHint: true, destructiveHint: false, idempotentHint: true },
|
|
276
|
+
}, async (rawParams) => {
|
|
277
|
+
const { path: p } = coerceParams(rawParams);
|
|
278
|
+
try {
|
|
279
|
+
return { content: [{ type: "text", text: fileInfo(p) }] };
|
|
280
|
+
} catch (e) {
|
|
281
|
+
return { content: [{ type: "text", text: e.message }], isError: true };
|
|
282
|
+
}
|
|
283
|
+
});
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
// ==================== setup_hooks ====================
|
|
287
|
+
|
|
288
|
+
server.registerTool("setup_hooks", {
|
|
289
|
+
title: "Setup Hooks",
|
|
290
|
+
description:
|
|
291
|
+
"Configure hex-line hooks in CLI agent settings. " +
|
|
292
|
+
"Claude: writes PreToolUse + PostToolUse to .claude/settings.local.json. " +
|
|
293
|
+
"Gemini/Codex: returns guidance (no hook support). " +
|
|
294
|
+
"Idempotent: re-running produces no changes if already configured.",
|
|
295
|
+
inputSchema: z.object({
|
|
296
|
+
agent: z.string().optional().describe('Target agent: "claude", "gemini", "codex", or "all" (default: "all")'),
|
|
297
|
+
}),
|
|
298
|
+
annotations: { readOnlyHint: false, destructiveHint: false, idempotentHint: true },
|
|
299
|
+
}, async (rawParams) => {
|
|
300
|
+
const { agent } = coerceParams(rawParams);
|
|
301
|
+
try {
|
|
302
|
+
return { content: [{ type: "text", text: setupHooks(agent) }] };
|
|
303
|
+
} catch (e) {
|
|
304
|
+
return { content: [{ type: "text", text: e.message }], isError: true };
|
|
305
|
+
}
|
|
306
|
+
});
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
// ==================== changes ====================
|
|
310
|
+
|
|
311
|
+
server.registerTool("changes", {
|
|
312
|
+
title: "Semantic Diff",
|
|
313
|
+
description:
|
|
314
|
+
"Compare file or directory against git ref (default: HEAD). For files: shows added/removed/modified symbols at AST level. " +
|
|
315
|
+
"For directories: lists changed files with insertions/deletions stats. Use to understand what changed before committing.",
|
|
316
|
+
inputSchema: z.object({
|
|
317
|
+
path: z.string().describe("File or directory path"),
|
|
318
|
+
compare_against: z.string().optional().describe('Git ref to compare against (default: "HEAD")'),
|
|
319
|
+
}),
|
|
320
|
+
annotations: { readOnlyHint: true, destructiveHint: false, idempotentHint: true },
|
|
321
|
+
}, async (rawParams) => {
|
|
322
|
+
const { path: p, compare_against } = coerceParams(rawParams);
|
|
323
|
+
try {
|
|
324
|
+
return { content: [{ type: "text", text: await fileChanges(p, compare_against) }] };
|
|
325
|
+
} catch (e) {
|
|
326
|
+
return { content: [{ type: "text", text: e.message }], isError: true };
|
|
327
|
+
}
|
|
328
|
+
});
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
// ==================== bulk_replace ====================
|
|
332
|
+
|
|
333
|
+
server.registerTool("bulk_replace", {
|
|
334
|
+
title: "Bulk Replace",
|
|
335
|
+
description:
|
|
336
|
+
"Search-and-replace across multiple files. Finds files by glob, applies ordered text replacements, returns per-file diffs. " +
|
|
337
|
+
"Use dry_run:true to preview. For single-file edits use edit_file instead.",
|
|
338
|
+
inputSchema: z.object({
|
|
339
|
+
replacements: z.string().describe('JSON array of {old, new} pairs: [{"old":"foo","new":"bar"}]'),
|
|
340
|
+
glob: z.string().optional().describe('File glob (default: "**/*.{md,mjs,json,yml,ts,js}")'),
|
|
341
|
+
path: z.string().optional().describe("Root directory (default: cwd)"),
|
|
342
|
+
dry_run: flexBool().describe("Preview without writing (default: false)"),
|
|
343
|
+
max_files: flexNum().describe("Max files to process (default: 100)"),
|
|
344
|
+
}),
|
|
345
|
+
annotations: { readOnlyHint: false, destructiveHint: true, idempotentHint: false },
|
|
346
|
+
}, async (rawParams) => {
|
|
347
|
+
try {
|
|
348
|
+
const params = coerceParams(rawParams);
|
|
349
|
+
const replacements = JSON.parse(params.replacements);
|
|
350
|
+
if (!Array.isArray(replacements) || !replacements.length) throw new Error("replacements: non-empty JSON array of {old, new} required");
|
|
351
|
+
const result = bulkReplace(
|
|
352
|
+
params.path || process.cwd(),
|
|
353
|
+
params.glob || "**/*.{md,mjs,json,yml,ts,js}",
|
|
354
|
+
replacements,
|
|
355
|
+
{ dryRun: params.dry_run || false, maxFiles: params.max_files || 100 }
|
|
356
|
+
);
|
|
357
|
+
return { content: [{ type: "text", text: result }] };
|
|
358
|
+
} catch (e) {
|
|
359
|
+
return { content: [{ type: "text", text: e.message }], isError: true };
|
|
360
|
+
}
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
// --- Start ---
|
|
365
|
+
|
|
366
|
+
const transport = new StdioServerTransport();
|
|
367
|
+
await server.connect(transport);
|
|
368
|
+
void checkForUpdates("@levnikolaevich/hex-line-mcp", "1.0.0");
|