@ebowwa/mcp-nm 1.1.0 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +944 -17
- package/package.json +14 -4
- package/src/index.ts +1186 -25
package/dist/index.js
CHANGED
|
@@ -13642,13 +13642,61 @@ import { exec } from "child_process";
|
|
|
13642
13642
|
import { promisify } from "util";
|
|
13643
13643
|
var execAsync = promisify(exec);
|
|
13644
13644
|
var server = new Server({
|
|
13645
|
-
name: "@ebowwa/nm
|
|
13646
|
-
version: "
|
|
13645
|
+
name: "@ebowwa/mcp-nm",
|
|
13646
|
+
version: "2.0.0"
|
|
13647
13647
|
}, {
|
|
13648
13648
|
capabilities: {
|
|
13649
13649
|
tools: {}
|
|
13650
13650
|
}
|
|
13651
13651
|
});
|
|
13652
|
+
async function analyzeFile(filePath) {
|
|
13653
|
+
try {
|
|
13654
|
+
const { stdout: fileInfo } = await execAsync(`file "${filePath}"`);
|
|
13655
|
+
const info = fileInfo.toLowerCase();
|
|
13656
|
+
let fileType = "unknown";
|
|
13657
|
+
let hasDynamicSymbols = false;
|
|
13658
|
+
if (info.includes("elf")) {
|
|
13659
|
+
if (info.includes("shared object") || info.includes("dynamically linked")) {
|
|
13660
|
+
fileType = info.includes("shared object") ? "elf-shared" : "elf-executable";
|
|
13661
|
+
hasDynamicSymbols = true;
|
|
13662
|
+
} else if (info.includes("statically linked")) {
|
|
13663
|
+
fileType = "elf-executable";
|
|
13664
|
+
hasDynamicSymbols = false;
|
|
13665
|
+
} else if (info.includes("relocatable")) {
|
|
13666
|
+
fileType = "object-file";
|
|
13667
|
+
hasDynamicSymbols = false;
|
|
13668
|
+
}
|
|
13669
|
+
} else if (info.includes("mach-o")) {
|
|
13670
|
+
if (info.includes("dynamically linked") || info.includes("shared library") || info.includes("dylib")) {
|
|
13671
|
+
fileType = "mach-o";
|
|
13672
|
+
hasDynamicSymbols = true;
|
|
13673
|
+
} else if (info.includes("bundle") || info.includes("statically linked")) {
|
|
13674
|
+
fileType = "mach-o";
|
|
13675
|
+
hasDynamicSymbols = false;
|
|
13676
|
+
} else {
|
|
13677
|
+
fileType = "mach-o";
|
|
13678
|
+
hasDynamicSymbols = info.includes("executable");
|
|
13679
|
+
}
|
|
13680
|
+
} else if (info.includes("ar archive") || info.includes("current ar archive")) {
|
|
13681
|
+
fileType = "static-library";
|
|
13682
|
+
hasDynamicSymbols = false;
|
|
13683
|
+
} else if (info.includes("relocatable")) {
|
|
13684
|
+
fileType = "object-file";
|
|
13685
|
+
hasDynamicSymbols = false;
|
|
13686
|
+
}
|
|
13687
|
+
return {
|
|
13688
|
+
hasDynamicSymbols,
|
|
13689
|
+
fileType,
|
|
13690
|
+
format: fileInfo.trim()
|
|
13691
|
+
};
|
|
13692
|
+
} catch {
|
|
13693
|
+
return {
|
|
13694
|
+
hasDynamicSymbols: false,
|
|
13695
|
+
fileType: "unknown",
|
|
13696
|
+
format: "Unable to determine file type"
|
|
13697
|
+
};
|
|
13698
|
+
}
|
|
13699
|
+
}
|
|
13652
13700
|
async function runNm(filePath, options = {}) {
|
|
13653
13701
|
const args = [];
|
|
13654
13702
|
if (options.externalOnly)
|
|
@@ -13917,32 +13965,100 @@ async function handleXxdExtract(args) {
|
|
|
13917
13965
|
return { content: [{ type: "text", text: summary }] };
|
|
13918
13966
|
}
|
|
13919
13967
|
async function handleXxdFindPattern(args) {
|
|
13920
|
-
const result = await runXxd(args.filePath, {
|
|
13921
|
-
plainHex: true,
|
|
13922
|
-
length: args.maxLength
|
|
13923
|
-
});
|
|
13924
|
-
const fileHex = result.output.replace(/\s/g, "").toLowerCase();
|
|
13925
13968
|
let searchPattern;
|
|
13926
13969
|
if (args.patternFormat === "text" || !args.patternFormat) {
|
|
13927
13970
|
searchPattern = args.pattern.split("").map((c) => c.charCodeAt(0).toString(16).padStart(2, "0")).join("");
|
|
13928
13971
|
} else {
|
|
13929
13972
|
searchPattern = args.pattern.replace(/\s/g, "").toLowerCase();
|
|
13930
13973
|
}
|
|
13974
|
+
const { stdout: sizeStr } = await execAsync(`stat -f%z "${args.filePath}" 2>/dev/null || stat -c%s "${args.filePath}"`);
|
|
13975
|
+
const fileSize = parseInt(sizeStr.trim(), 10);
|
|
13976
|
+
const maxInMemory = args.maxLength ?? 50 * 1024 * 1024;
|
|
13931
13977
|
const matches = [];
|
|
13932
|
-
|
|
13933
|
-
|
|
13934
|
-
|
|
13935
|
-
|
|
13936
|
-
const contextEnd = Math.min(fileHex.length, idx + searchPattern.length + 20);
|
|
13937
|
-
const contextHex = fileHex.slice(contextStart, contextEnd);
|
|
13938
|
-
matches.push({
|
|
13939
|
-
offset: byteOffset,
|
|
13940
|
-
context: contextHex.match(/.{1,2}/g)?.join(" ") || contextHex
|
|
13978
|
+
if (fileSize <= maxInMemory) {
|
|
13979
|
+
const result = await runXxd(args.filePath, {
|
|
13980
|
+
plainHex: true,
|
|
13981
|
+
length: args.maxLength
|
|
13941
13982
|
});
|
|
13942
|
-
|
|
13983
|
+
const fileHex = result.output.replace(/\s/g, "").toLowerCase();
|
|
13984
|
+
let idx = 0;
|
|
13985
|
+
while ((idx = fileHex.indexOf(searchPattern, idx)) !== -1) {
|
|
13986
|
+
const byteOffset = Math.floor(idx / 2);
|
|
13987
|
+
const contextStart = Math.max(0, idx - 20);
|
|
13988
|
+
const contextEnd = Math.min(fileHex.length, idx + searchPattern.length + 20);
|
|
13989
|
+
const contextHex = fileHex.slice(contextStart, contextEnd);
|
|
13990
|
+
matches.push({
|
|
13991
|
+
offset: byteOffset,
|
|
13992
|
+
context: contextHex.match(/.{1,2}/g)?.join(" ") || contextHex
|
|
13993
|
+
});
|
|
13994
|
+
idx++;
|
|
13995
|
+
if (matches.length >= 1000)
|
|
13996
|
+
break;
|
|
13997
|
+
}
|
|
13998
|
+
} else {
|
|
13999
|
+
try {
|
|
14000
|
+
const patternBytes = searchPattern.match(/.{2}/g) ?? [];
|
|
14001
|
+
const escapedPattern = patternBytes.map((b) => `\\x${b}`).join("");
|
|
14002
|
+
const { stdout: grepResult } = await execAsync(`grep -abo "${escapedPattern}" "${args.filePath}" 2>/dev/null | head -1000`, { maxBuffer: 10 * 1024 * 1024 });
|
|
14003
|
+
const lines = grepResult.trim().split(`
|
|
14004
|
+
`).filter(Boolean);
|
|
14005
|
+
for (const line of lines) {
|
|
14006
|
+
const match = line.match(/^(\d+):/);
|
|
14007
|
+
if (match) {
|
|
14008
|
+
const offset = parseInt(match[1], 10);
|
|
14009
|
+
try {
|
|
14010
|
+
const contextOffset = Math.max(0, offset - 10);
|
|
14011
|
+
const contextLen = searchPattern.length / 2 + 20;
|
|
14012
|
+
const { stdout: contextHex } = await execAsync(`dd if="${args.filePath}" bs=1 skip=${contextOffset} count=${contextLen} 2>/dev/null | xxd -p | tr -d '\\n'`);
|
|
14013
|
+
matches.push({
|
|
14014
|
+
offset,
|
|
14015
|
+
context: contextHex.match(/.{1,2}/g)?.join(" ") || contextHex
|
|
14016
|
+
});
|
|
14017
|
+
} catch {
|
|
14018
|
+
matches.push({ offset, context: "(context unavailable)" });
|
|
14019
|
+
}
|
|
14020
|
+
}
|
|
14021
|
+
}
|
|
14022
|
+
} catch {
|
|
14023
|
+
try {
|
|
14024
|
+
const { stdout: stringsResult } = await execAsync(`strings -t x -n ${Math.floor(searchPattern.length / 2)} "${args.filePath}" | grep -i "${args.pattern}" | head -1000`, { maxBuffer: 10 * 1024 * 1024 });
|
|
14025
|
+
const lines = stringsResult.trim().split(`
|
|
14026
|
+
`).filter(Boolean);
|
|
14027
|
+
for (const line of lines) {
|
|
14028
|
+
const match = line.match(/^\s*([0-9a-fA-F]+)\s+(.+)$/);
|
|
14029
|
+
if (match) {
|
|
14030
|
+
matches.push({
|
|
14031
|
+
offset: parseInt(match[1], 16),
|
|
14032
|
+
context: match[2].slice(0, 40)
|
|
14033
|
+
});
|
|
14034
|
+
}
|
|
14035
|
+
}
|
|
14036
|
+
} catch {
|
|
14037
|
+
const chunkSize = 10 * 1024 * 1024;
|
|
14038
|
+
for (let chunkStart = 0;chunkStart < fileSize; chunkStart += chunkSize) {
|
|
14039
|
+
try {
|
|
14040
|
+
const { stdout: chunkHex } = await execAsync(`dd if="${args.filePath}" bs=1 skip=${chunkStart} count=${chunkSize} 2>/dev/null | xxd -p | tr -d '\\n'`);
|
|
14041
|
+
const hex = chunkHex.toLowerCase();
|
|
14042
|
+
let idx = 0;
|
|
14043
|
+
while ((idx = hex.indexOf(searchPattern, idx)) !== -1) {
|
|
14044
|
+
matches.push({
|
|
14045
|
+
offset: chunkStart + Math.floor(idx / 2),
|
|
14046
|
+
context: hex.slice(Math.max(0, idx - 20), idx + searchPattern.length + 20).match(/.{1,2}/g)?.join(" ") || ""
|
|
14047
|
+
});
|
|
14048
|
+
idx++;
|
|
14049
|
+
if (matches.length >= 1000)
|
|
14050
|
+
break;
|
|
14051
|
+
}
|
|
14052
|
+
if (matches.length >= 1000)
|
|
14053
|
+
break;
|
|
14054
|
+
} catch {}
|
|
14055
|
+
}
|
|
14056
|
+
}
|
|
14057
|
+
}
|
|
13943
14058
|
}
|
|
13944
14059
|
const summary = [
|
|
13945
14060
|
`Pattern search in: ${args.filePath}`,
|
|
14061
|
+
`File size: ${(fileSize / 1024 / 1024).toFixed(2)} MB`,
|
|
13946
14062
|
`Pattern: ${args.pattern} (${args.patternFormat || "text"})`,
|
|
13947
14063
|
`Hex pattern: ${searchPattern}`,
|
|
13948
14064
|
`Matches found: ${matches.length}`,
|
|
@@ -14066,6 +14182,42 @@ async function handleDefinedSymbols(args) {
|
|
|
14066
14182
|
return { content: [{ type: "text", text: summary }] };
|
|
14067
14183
|
}
|
|
14068
14184
|
async function handleDynamicSymbols(args) {
|
|
14185
|
+
const analysis = await analyzeFile(args.filePath);
|
|
14186
|
+
if (!analysis.hasDynamicSymbols) {
|
|
14187
|
+
const suggestions = [];
|
|
14188
|
+
switch (analysis.fileType) {
|
|
14189
|
+
case "static-library":
|
|
14190
|
+
suggestions.push("Static libraries (.a) contain object files without dynamic symbols.");
|
|
14191
|
+
suggestions.push("Use nm_list_symbols or nm_defined_symbols to see available symbols.");
|
|
14192
|
+
break;
|
|
14193
|
+
case "object-file":
|
|
14194
|
+
suggestions.push("Object files (.o) are not linked and have no dynamic symbol table.");
|
|
14195
|
+
suggestions.push("Use nm_list_symbols to see symbols in this object file.");
|
|
14196
|
+
break;
|
|
14197
|
+
case "elf-executable":
|
|
14198
|
+
suggestions.push("This appears to be a statically linked executable.");
|
|
14199
|
+
suggestions.push("Use nm_list_symbols or nm_defined_symbols instead.");
|
|
14200
|
+
break;
|
|
14201
|
+
default:
|
|
14202
|
+
suggestions.push("This file type does not have a dynamic symbol table.");
|
|
14203
|
+
suggestions.push("Try nm_list_symbols or nm_defined_symbols instead.");
|
|
14204
|
+
}
|
|
14205
|
+
return {
|
|
14206
|
+
content: [{
|
|
14207
|
+
type: "text",
|
|
14208
|
+
text: [
|
|
14209
|
+
`Error: File has no dynamic symbol table`,
|
|
14210
|
+
"",
|
|
14211
|
+
`File: ${args.filePath}`,
|
|
14212
|
+
`Type: ${analysis.format}`,
|
|
14213
|
+
"",
|
|
14214
|
+
...suggestions
|
|
14215
|
+
].join(`
|
|
14216
|
+
`)
|
|
14217
|
+
}],
|
|
14218
|
+
isError: true
|
|
14219
|
+
};
|
|
14220
|
+
}
|
|
14069
14221
|
const result = await runNm(args.filePath, {
|
|
14070
14222
|
dynamicSymbols: true,
|
|
14071
14223
|
demangle: args.demangle ?? true,
|
|
@@ -14073,6 +14225,7 @@ async function handleDynamicSymbols(args) {
|
|
|
14073
14225
|
});
|
|
14074
14226
|
const summary = [
|
|
14075
14227
|
`File: ${result.filePath}`,
|
|
14228
|
+
`File type: ${analysis.format}`,
|
|
14076
14229
|
`Dynamic symbols: ${result.totalCount}`,
|
|
14077
14230
|
"",
|
|
14078
14231
|
"Symbols:",
|
|
@@ -14193,6 +14346,531 @@ async function handleSummary(args) {
|
|
|
14193
14346
|
`);
|
|
14194
14347
|
return { content: [{ type: "text", text: summary }] };
|
|
14195
14348
|
}
|
|
14349
|
+
async function handleStrings(args) {
|
|
14350
|
+
const minLen = args.minLength ?? 4;
|
|
14351
|
+
const encFlags = {
|
|
14352
|
+
ascii: "-a",
|
|
14353
|
+
unicode: "-e l",
|
|
14354
|
+
all: "-a -e l"
|
|
14355
|
+
};
|
|
14356
|
+
const encFlag = encFlags[args.encoding ?? "all"];
|
|
14357
|
+
try {
|
|
14358
|
+
const { stdout } = await execAsync(`strings ${encFlag} -n ${minLen} "${args.filePath}"`, { maxBuffer: 50 * 1024 * 1024 });
|
|
14359
|
+
const strings = stdout.trim().split(`
|
|
14360
|
+
`).filter(Boolean);
|
|
14361
|
+
const summary = [
|
|
14362
|
+
`Strings in: ${args.filePath}`,
|
|
14363
|
+
`Encoding: ${args.encoding ?? "all"}`,
|
|
14364
|
+
`Minimum length: ${minLen}`,
|
|
14365
|
+
`Total strings: ${strings.length}`,
|
|
14366
|
+
"",
|
|
14367
|
+
"Strings:",
|
|
14368
|
+
...strings.slice(0, 200).map((s) => ` ${s}`),
|
|
14369
|
+
strings.length > 200 ? ` ... and ${strings.length - 200} more` : ""
|
|
14370
|
+
].join(`
|
|
14371
|
+
`);
|
|
14372
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14373
|
+
} catch (error2) {
|
|
14374
|
+
throw new Error(`strings command failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14375
|
+
}
|
|
14376
|
+
}
|
|
14377
|
+
async function handleFileInfo(args) {
|
|
14378
|
+
try {
|
|
14379
|
+
const { stdout: mimeInfo } = await execAsync(`file --mime --brief "${args.filePath}"`);
|
|
14380
|
+
const { stdout: detailedInfo } = await execAsync(`file "${args.filePath}"`);
|
|
14381
|
+
const summary = [
|
|
14382
|
+
`File: ${args.filePath}`,
|
|
14383
|
+
"",
|
|
14384
|
+
"MIME Type:",
|
|
14385
|
+
` ${mimeInfo.trim()}`,
|
|
14386
|
+
"",
|
|
14387
|
+
"Detailed Type:",
|
|
14388
|
+
` ${detailedInfo.trim()}`
|
|
14389
|
+
].join(`
|
|
14390
|
+
`);
|
|
14391
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14392
|
+
} catch (error2) {
|
|
14393
|
+
throw new Error(`file command failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14394
|
+
}
|
|
14395
|
+
}
|
|
14396
|
+
async function handleSectionSizes(args) {
|
|
14397
|
+
try {
|
|
14398
|
+
const { stdout } = await execAsync(`size -A -x "${args.filePath}"`);
|
|
14399
|
+
const lines = stdout.trim().split(`
|
|
14400
|
+
`);
|
|
14401
|
+
const summary = [
|
|
14402
|
+
`Section Sizes: ${args.filePath}`,
|
|
14403
|
+
"",
|
|
14404
|
+
...lines.map((l) => ` ${l}`)
|
|
14405
|
+
].join(`
|
|
14406
|
+
`);
|
|
14407
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14408
|
+
} catch (error2) {
|
|
14409
|
+
try {
|
|
14410
|
+
const { stdout } = await execAsync(`size -m "${args.filePath}"`);
|
|
14411
|
+
return { content: [{ type: "text", text: `Section Sizes:
|
|
14412
|
+
${stdout}` }] };
|
|
14413
|
+
} catch {
|
|
14414
|
+
throw new Error(`size command failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14415
|
+
}
|
|
14416
|
+
}
|
|
14417
|
+
}
|
|
14418
|
+
async function handleObjdumpSections(args) {
|
|
14419
|
+
try {
|
|
14420
|
+
const { stdout } = await execAsync(`objdump -h "${args.filePath}"`, {
|
|
14421
|
+
maxBuffer: 10 * 1024 * 1024
|
|
14422
|
+
});
|
|
14423
|
+
const summary = [
|
|
14424
|
+
`Section Headers: ${args.filePath}`,
|
|
14425
|
+
"",
|
|
14426
|
+
stdout
|
|
14427
|
+
].join(`
|
|
14428
|
+
`);
|
|
14429
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14430
|
+
} catch (error2) {
|
|
14431
|
+
throw new Error(`objdump -h failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14432
|
+
}
|
|
14433
|
+
}
|
|
14434
|
+
async function handleObjdumpProgramHeaders(args) {
|
|
14435
|
+
try {
|
|
14436
|
+
const { stdout } = await execAsync(`objdump -p "${args.filePath}"`, {
|
|
14437
|
+
maxBuffer: 10 * 1024 * 1024
|
|
14438
|
+
});
|
|
14439
|
+
const summary = [
|
|
14440
|
+
`Program Headers / Imports: ${args.filePath}`,
|
|
14441
|
+
"",
|
|
14442
|
+
stdout
|
|
14443
|
+
].join(`
|
|
14444
|
+
`);
|
|
14445
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14446
|
+
} catch (error2) {
|
|
14447
|
+
throw new Error(`objdump -p failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14448
|
+
}
|
|
14449
|
+
}
|
|
14450
|
+
async function handleOtoolLibs(args) {
|
|
14451
|
+
try {
|
|
14452
|
+
const { stdout } = await execAsync(`otool -L "${args.filePath}"`);
|
|
14453
|
+
const lines = stdout.trim().split(`
|
|
14454
|
+
`);
|
|
14455
|
+
const summary = [
|
|
14456
|
+
`Dynamic Library Dependencies: ${args.filePath}`,
|
|
14457
|
+
"",
|
|
14458
|
+
...lines.map((l) => ` ${l}`)
|
|
14459
|
+
].join(`
|
|
14460
|
+
`);
|
|
14461
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14462
|
+
} catch (error2) {
|
|
14463
|
+
throw new Error(`otool -L failed (macOS only): ${error2 instanceof Error ? error2.message : error2}`);
|
|
14464
|
+
}
|
|
14465
|
+
}
|
|
14466
|
+
async function handleReadelf(args) {
|
|
14467
|
+
const sections = args.sections ?? ["all"];
|
|
14468
|
+
const flags = [];
|
|
14469
|
+
if (sections.includes("all")) {
|
|
14470
|
+
flags.push("-a");
|
|
14471
|
+
} else {
|
|
14472
|
+
if (sections.includes("headers"))
|
|
14473
|
+
flags.push("-h");
|
|
14474
|
+
if (sections.includes("sections"))
|
|
14475
|
+
flags.push("-S");
|
|
14476
|
+
if (sections.includes("segments"))
|
|
14477
|
+
flags.push("-l");
|
|
14478
|
+
if (sections.includes("symbols"))
|
|
14479
|
+
flags.push("-s");
|
|
14480
|
+
if (sections.includes("dynamic"))
|
|
14481
|
+
flags.push("-d");
|
|
14482
|
+
}
|
|
14483
|
+
try {
|
|
14484
|
+
const { stdout } = await execAsync(`readelf ${flags.join(" ")} "${args.filePath}"`, {
|
|
14485
|
+
maxBuffer: 20 * 1024 * 1024
|
|
14486
|
+
});
|
|
14487
|
+
const summary = [
|
|
14488
|
+
`ELF Analysis: ${args.filePath}`,
|
|
14489
|
+
`Sections: ${sections.join(", ")}`,
|
|
14490
|
+
"",
|
|
14491
|
+
stdout.slice(0, 50000),
|
|
14492
|
+
stdout.length > 50000 ? `
|
|
14493
|
+
... truncated (${stdout.length - 50000} more chars)` : ""
|
|
14494
|
+
].join(`
|
|
14495
|
+
`);
|
|
14496
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14497
|
+
} catch (error2) {
|
|
14498
|
+
throw new Error(`readelf failed (Linux ELF only): ${error2 instanceof Error ? error2.message : error2}`);
|
|
14499
|
+
}
|
|
14500
|
+
}
|
|
14501
|
+
async function handleLdd(args) {
|
|
14502
|
+
try {
|
|
14503
|
+
const { stdout } = await execAsync(`ldd "${args.filePath}"`);
|
|
14504
|
+
const lines = stdout.trim().split(`
|
|
14505
|
+
`).filter(Boolean);
|
|
14506
|
+
const summary = [
|
|
14507
|
+
`Shared Library Dependencies: ${args.filePath}`,
|
|
14508
|
+
"",
|
|
14509
|
+
...lines.map((l) => ` ${l}`)
|
|
14510
|
+
].join(`
|
|
14511
|
+
`);
|
|
14512
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14513
|
+
} catch (error2) {
|
|
14514
|
+
throw new Error(`ldd failed (Linux only): ${error2 instanceof Error ? error2.message : error2}`);
|
|
14515
|
+
}
|
|
14516
|
+
}
|
|
14517
|
+
async function handleDisassembly(args) {
|
|
14518
|
+
let cmd = "objdump -d";
|
|
14519
|
+
if (args.symbol) {
|
|
14520
|
+
cmd += ` --disassemble="${args.symbol}"`;
|
|
14521
|
+
}
|
|
14522
|
+
cmd += ` "${args.filePath}"`;
|
|
14523
|
+
try {
|
|
14524
|
+
const { stdout } = await execAsync(cmd, {
|
|
14525
|
+
maxBuffer: 50 * 1024 * 1024
|
|
14526
|
+
});
|
|
14527
|
+
let output = stdout;
|
|
14528
|
+
if (args.startOffset !== undefined || args.length !== undefined) {
|
|
14529
|
+
const lines = stdout.split(`
|
|
14530
|
+
`);
|
|
14531
|
+
const start = args.startOffset ?? 0;
|
|
14532
|
+
const len = args.length ?? 500;
|
|
14533
|
+
output = lines.slice(start, start + len).join(`
|
|
14534
|
+
`);
|
|
14535
|
+
}
|
|
14536
|
+
const summary = [
|
|
14537
|
+
`Disassembly: ${args.filePath}`,
|
|
14538
|
+
args.symbol ? `Symbol: ${args.symbol}` : "",
|
|
14539
|
+
"",
|
|
14540
|
+
output.slice(0, 1e5),
|
|
14541
|
+
output.length > 1e5 ? `
|
|
14542
|
+
... truncated (${output.length - 1e5} more chars)` : ""
|
|
14543
|
+
].filter(Boolean).join(`
|
|
14544
|
+
`);
|
|
14545
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14546
|
+
} catch (error2) {
|
|
14547
|
+
throw new Error(`objdump -d failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14548
|
+
}
|
|
14549
|
+
}
|
|
14550
|
+
async function handleSecurityAudit(args) {
|
|
14551
|
+
const results = [];
|
|
14552
|
+
try {
|
|
14553
|
+
const { stdout: fileInfo } = await execAsync(`file "${args.filePath}"`);
|
|
14554
|
+
const info = fileInfo.toLowerCase();
|
|
14555
|
+
if (info.includes("pie") || info.includes("position independent")) {
|
|
14556
|
+
results.push({ check: "PIE", status: "ENABLED", details: "Position Independent Executable" });
|
|
14557
|
+
} else if (info.includes("executable")) {
|
|
14558
|
+
results.push({ check: "PIE", status: "DISABLED", details: "Not a PIE binary" });
|
|
14559
|
+
}
|
|
14560
|
+
try {
|
|
14561
|
+
const { stdout: relro } = await execAsync(`readelf -l "${args.filePath}" 2>/dev/null | grep -i gnu_relro`);
|
|
14562
|
+
if (relro.trim()) {
|
|
14563
|
+
results.push({ check: "RELRO", status: "ENABLED", details: "Read-Only relocations" });
|
|
14564
|
+
}
|
|
14565
|
+
} catch {}
|
|
14566
|
+
try {
|
|
14567
|
+
const { stdout: canary } = await execAsync(`nm "${args.filePath}" 2>/dev/null | grep -i "__stack_chk_fail"`);
|
|
14568
|
+
if (canary.trim()) {
|
|
14569
|
+
results.push({ check: "Stack Canary", status: "ENABLED", details: "Stack smashing detected symbol found" });
|
|
14570
|
+
} else {
|
|
14571
|
+
results.push({ check: "Stack Canary", status: "UNKNOWN", details: "No stack canary symbol found" });
|
|
14572
|
+
}
|
|
14573
|
+
} catch {
|
|
14574
|
+
results.push({ check: "Stack Canary", status: "UNKNOWN", details: "Could not check" });
|
|
14575
|
+
}
|
|
14576
|
+
try {
|
|
14577
|
+
const { stdout: nx } = await execAsync(`readelf -l "${args.filePath}" 2>/dev/null | grep -i "gnu_stack"`);
|
|
14578
|
+
if (nx.toLowerCase().includes("rwe")) {
|
|
14579
|
+
results.push({ check: "NX", status: "DISABLED", details: "Stack is executable (RWE)" });
|
|
14580
|
+
} else if (nx.trim()) {
|
|
14581
|
+
results.push({ check: "NX", status: "ENABLED", details: "Non-executable stack" });
|
|
14582
|
+
}
|
|
14583
|
+
} catch {}
|
|
14584
|
+
try {
|
|
14585
|
+
const { stdout: fortify } = await execAsync(`nm "${args.filePath}" 2>/dev/null | grep -i "_chk@"`);
|
|
14586
|
+
if (fortify.trim()) {
|
|
14587
|
+
results.push({ check: "FORTIFY", status: "ENABLED", details: "Fortified functions detected" });
|
|
14588
|
+
}
|
|
14589
|
+
} catch {}
|
|
14590
|
+
if (info.includes("mach-o")) {
|
|
14591
|
+
try {
|
|
14592
|
+
const { stdout: loadCmds } = await execAsync(`otool -l "${args.filePath}"`);
|
|
14593
|
+
if (loadCmds.toLowerCase().includes("lc_main")) {
|
|
14594
|
+
results.push({ check: "PIE", status: "ENABLED", details: "Mach-O with LC_MAIN (likely PIE)" });
|
|
14595
|
+
}
|
|
14596
|
+
} catch {}
|
|
14597
|
+
}
|
|
14598
|
+
} catch (error2) {
|
|
14599
|
+
results.push({ check: "Error", status: "FAILED", details: error2 instanceof Error ? error2.message : String(error2) });
|
|
14600
|
+
}
|
|
14601
|
+
const summary = [
|
|
14602
|
+
`Security Audit: ${args.filePath}`,
|
|
14603
|
+
"",
|
|
14604
|
+
"Security Features:",
|
|
14605
|
+
...results.map((r) => ` ${r.check}: ${r.status} - ${r.details}`),
|
|
14606
|
+
"",
|
|
14607
|
+
"Recommendations:",
|
|
14608
|
+
...results.filter((r) => r.status === "DISABLED" || r.status === "UNKNOWN").map((r) => ` - Consider enabling ${r.check}`)
|
|
14609
|
+
].join(`
|
|
14610
|
+
`);
|
|
14611
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14612
|
+
}
|
|
14613
|
+
async function handleEntropyAnalysis(args) {
|
|
14614
|
+
const blockSize = args.blockSize ?? 1024;
|
|
14615
|
+
try {
|
|
14616
|
+
const { stdout: hexData } = await execAsync(`xxd -p "${args.filePath}" | tr -d '\\n'`);
|
|
14617
|
+
const bytes = hexData.match(/.{2}/g)?.map((h) => parseInt(h, 16)) ?? [];
|
|
14618
|
+
const results = [];
|
|
14619
|
+
for (let i = 0;i < bytes.length; i += blockSize) {
|
|
14620
|
+
const block = bytes.slice(i, i + blockSize);
|
|
14621
|
+
if (block.length === 0)
|
|
14622
|
+
continue;
|
|
14623
|
+
const freq = {};
|
|
14624
|
+
for (const byte of block) {
|
|
14625
|
+
freq[byte] = (freq[byte] ?? 0) + 1;
|
|
14626
|
+
}
|
|
14627
|
+
let entropy = 0;
|
|
14628
|
+
for (const count of Object.values(freq)) {
|
|
14629
|
+
const p = count / block.length;
|
|
14630
|
+
entropy -= p * Math.log2(p);
|
|
14631
|
+
}
|
|
14632
|
+
const normalizedEntropy = entropy / 8;
|
|
14633
|
+
let status = "normal";
|
|
14634
|
+
if (normalizedEntropy > 0.95)
|
|
14635
|
+
status = "high (possibly encrypted/compressed)";
|
|
14636
|
+
else if (normalizedEntropy < 0.3)
|
|
14637
|
+
status = "low (possibly padding/zeros)";
|
|
14638
|
+
results.push({
|
|
14639
|
+
section: `Block ${Math.floor(i / blockSize)}`,
|
|
14640
|
+
offset: i,
|
|
14641
|
+
entropy: Math.round(entropy * 100) / 100,
|
|
14642
|
+
status
|
|
14643
|
+
});
|
|
14644
|
+
}
|
|
14645
|
+
const avgEntropy = results.reduce((a, b) => a + b.entropy, 0) / results.length;
|
|
14646
|
+
const summary = [
|
|
14647
|
+
`Entropy Analysis: ${args.filePath}`,
|
|
14648
|
+
`Block size: ${blockSize} bytes`,
|
|
14649
|
+
`Total blocks: ${results.length}`,
|
|
14650
|
+
`Average entropy: ${(avgEntropy / 8 * 100).toFixed(1)}% of max`,
|
|
14651
|
+
"",
|
|
14652
|
+
"Blocks with unusual entropy:",
|
|
14653
|
+
...results.filter((r) => r.status !== "normal").slice(0, 50).map((r) => ` ${r.section} (offset ${r.offset}): ${r.entropy}/8 bits - ${r.status}`),
|
|
14654
|
+
results.filter((r) => r.status !== "normal").length > 50 ? ` ... and ${results.filter((r) => r.status !== "normal").length - 50} more` : ""
|
|
14655
|
+
].join(`
|
|
14656
|
+
`);
|
|
14657
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14658
|
+
} catch (error2) {
|
|
14659
|
+
throw new Error(`Entropy analysis failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14660
|
+
}
|
|
14661
|
+
}
|
|
14662
|
+
async function handleImportExport(args) {
|
|
14663
|
+
const analysis = await analyzeFile(args.filePath);
|
|
14664
|
+
const imports = [];
|
|
14665
|
+
const exports = [];
|
|
14666
|
+
try {
|
|
14667
|
+
if (analysis.format.toLowerCase().includes("mach-o")) {
|
|
14668
|
+
const { stdout: importSyms } = await execAsync(`nm -u "${args.filePath}" 2>/dev/null | awk '{print $2}'`);
|
|
14669
|
+
imports.push(...importSyms.trim().split(`
|
|
14670
|
+
`).filter(Boolean));
|
|
14671
|
+
const { stdout: exportSyms } = await execAsync(`nm -g "${args.filePath}" 2>/dev/null | grep -v "U " | awk '{print $3}'`);
|
|
14672
|
+
exports.push(...exportSyms.trim().split(`
|
|
14673
|
+
`).filter(Boolean));
|
|
14674
|
+
} else {
|
|
14675
|
+
const { stdout: importSyms } = await execAsync(`nm -D -u "${args.filePath}" 2>/dev/null | awk '{print $2}'`);
|
|
14676
|
+
imports.push(...importSyms.trim().split(`
|
|
14677
|
+
`).filter(Boolean));
|
|
14678
|
+
const { stdout: exportSyms } = await execAsync(`nm -D --defined-only "${args.filePath}" 2>/dev/null | awk '{print $3}'`);
|
|
14679
|
+
exports.push(...exportSyms.trim().split(`
|
|
14680
|
+
`).filter(Boolean));
|
|
14681
|
+
}
|
|
14682
|
+
} catch {
|
|
14683
|
+
try {
|
|
14684
|
+
const { stdout: allSyms } = await execAsync(`nm "${args.filePath}" 2>/dev/null`);
|
|
14685
|
+
for (const line of allSyms.split(`
|
|
14686
|
+
`)) {
|
|
14687
|
+
const match = line.match(/^\s*([0-9a-fA-F]+)?\s+([UTDDBRC])\s+(.+)/);
|
|
14688
|
+
if (match) {
|
|
14689
|
+
if (match[2] === "U")
|
|
14690
|
+
imports.push(match[3]);
|
|
14691
|
+
else if (match[1] && match[2] === match[2].toUpperCase())
|
|
14692
|
+
exports.push(match[3]);
|
|
14693
|
+
}
|
|
14694
|
+
}
|
|
14695
|
+
} catch {}
|
|
14696
|
+
}
|
|
14697
|
+
const summary = [
|
|
14698
|
+
`Import/Export Analysis: ${args.filePath}`,
|
|
14699
|
+
`File type: ${analysis.format}`,
|
|
14700
|
+
"",
|
|
14701
|
+
`Imports (${imports.length}):`,
|
|
14702
|
+
...imports.slice(0, 100).map((s) => ` - ${s}`),
|
|
14703
|
+
imports.length > 100 ? ` ... and ${imports.length - 100} more` : "",
|
|
14704
|
+
"",
|
|
14705
|
+
`Exports (${exports.length}):`,
|
|
14706
|
+
...exports.slice(0, 100).map((s) => ` + ${s}`),
|
|
14707
|
+
exports.length > 100 ? ` ... and ${exports.length - 100} more` : ""
|
|
14708
|
+
].join(`
|
|
14709
|
+
`);
|
|
14710
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14711
|
+
}
|
|
14712
|
+
async function handleBinaryDiff(args) {
|
|
14713
|
+
const contextBytes = args.contextBytes ?? 32;
|
|
14714
|
+
try {
|
|
14715
|
+
const { stdout: hex1 } = await execAsync(`xxd -p "${args.file1}" | tr -d '\\n'`);
|
|
14716
|
+
const { stdout: hex2 } = await execAsync(`xxd -p "${args.file2}" | tr -d '\\n'`);
|
|
14717
|
+
const bytes1 = hex1.match(/.{2}/g) ?? [];
|
|
14718
|
+
const bytes2 = hex2.match(/.{2}/g) ?? [];
|
|
14719
|
+
const maxLen = Math.max(bytes1.length, bytes2.length);
|
|
14720
|
+
const diffs = [];
|
|
14721
|
+
for (let i = 0;i < maxLen; i++) {
|
|
14722
|
+
const b1 = bytes1[i] ?? "??";
|
|
14723
|
+
const b2 = bytes2[i] ?? "??";
|
|
14724
|
+
if (b1 !== b2) {
|
|
14725
|
+
diffs.push({ offset: i, file1: b1, file2: b2 });
|
|
14726
|
+
}
|
|
14727
|
+
}
|
|
14728
|
+
const diffPercent = (diffs.length / maxLen * 100).toFixed(2);
|
|
14729
|
+
const summary = [
|
|
14730
|
+
`Binary Diff: ${args.file1} vs ${args.file2}`,
|
|
14731
|
+
"",
|
|
14732
|
+
`File 1 size: ${bytes1.length} bytes`,
|
|
14733
|
+
`File 2 size: ${bytes2.length} bytes`,
|
|
14734
|
+
`Differences: ${diffs.length} bytes (${diffPercent}%)`,
|
|
14735
|
+
"",
|
|
14736
|
+
"Differences (first 200):",
|
|
14737
|
+
...diffs.slice(0, 200).map((d) => {
|
|
14738
|
+
const ctx1 = bytes1.slice(Math.max(0, d.offset - 4), d.offset + 5).join(" ");
|
|
14739
|
+
const ctx2 = bytes2.slice(Math.max(0, d.offset - 4), d.offset + 5).join(" ");
|
|
14740
|
+
return ` Offset 0x${d.offset.toString(16).padStart(8, "0")}: ${d.file1} -> ${d.file2}
|
|
14741
|
+
Context: [${ctx1}] -> [${ctx2}]`;
|
|
14742
|
+
}),
|
|
14743
|
+
diffs.length > 200 ? ` ... and ${diffs.length - 200} more differences` : ""
|
|
14744
|
+
].join(`
|
|
14745
|
+
`);
|
|
14746
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14747
|
+
} catch (error2) {
|
|
14748
|
+
throw new Error(`Binary diff failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14749
|
+
}
|
|
14750
|
+
}
|
|
14751
|
+
async function handleArchiveExtract(args) {
|
|
14752
|
+
const outputDir = args.outputDir ?? `/tmp/archive_${Date.now()}`;
|
|
14753
|
+
try {
|
|
14754
|
+
const { stdout: fileInfo } = await execAsync(`file "${args.filePath}"`);
|
|
14755
|
+
if (!fileInfo.toLowerCase().includes("ar archive") && !fileInfo.toLowerCase().includes("archive")) {
|
|
14756
|
+
return {
|
|
14757
|
+
content: [{ type: "text", text: `Error: ${args.filePath} is not an archive file.
|
|
14758
|
+
${fileInfo}` }],
|
|
14759
|
+
isError: true
|
|
14760
|
+
};
|
|
14761
|
+
}
|
|
14762
|
+
const { stdout: contents } = await execAsync(`ar -t "${args.filePath}"`);
|
|
14763
|
+
const files = contents.trim().split(`
|
|
14764
|
+
`).filter(Boolean);
|
|
14765
|
+
if (args.listOnly) {
|
|
14766
|
+
return {
|
|
14767
|
+
content: [{
|
|
14768
|
+
type: "text",
|
|
14769
|
+
text: [
|
|
14770
|
+
`Archive Contents: ${args.filePath}`,
|
|
14771
|
+
`Total files: ${files.length}`,
|
|
14772
|
+
"",
|
|
14773
|
+
"Files:",
|
|
14774
|
+
...files.map((f) => ` - ${f}`)
|
|
14775
|
+
].join(`
|
|
14776
|
+
`)
|
|
14777
|
+
}]
|
|
14778
|
+
};
|
|
14779
|
+
}
|
|
14780
|
+
await execAsync(`mkdir -p "${outputDir}"`);
|
|
14781
|
+
await execAsync(`cd "${outputDir}" && ar -x "${args.filePath}"`);
|
|
14782
|
+
const summary = [
|
|
14783
|
+
`Archive Extracted: ${args.filePath}`,
|
|
14784
|
+
`Output directory: ${outputDir}`,
|
|
14785
|
+
`Files extracted: ${files.length}`,
|
|
14786
|
+
"",
|
|
14787
|
+
"Extracted files:",
|
|
14788
|
+
...files.map((f) => ` - ${f}`)
|
|
14789
|
+
].join(`
|
|
14790
|
+
`);
|
|
14791
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14792
|
+
} catch (error2) {
|
|
14793
|
+
throw new Error(`Archive extraction failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14794
|
+
}
|
|
14795
|
+
}
|
|
14796
|
+
async function handlePatchBytes(args) {
|
|
14797
|
+
const backup = args.createBackup ?? true;
|
|
14798
|
+
try {
|
|
14799
|
+
const hex = args.hexData.replace(/\s/g, "");
|
|
14800
|
+
if (!/^[0-9a-fA-F]*$/.test(hex) || hex.length % 2 !== 0) {
|
|
14801
|
+
throw new Error("Invalid hex data. Must be even number of hex characters.");
|
|
14802
|
+
}
|
|
14803
|
+
if (backup) {
|
|
14804
|
+
await execAsync(`cp "${args.filePath}" "${args.filePath}.bak"`);
|
|
14805
|
+
}
|
|
14806
|
+
const bytes = hex.match(/.{2}/g)?.map((h) => parseInt(h, 16)) ?? [];
|
|
14807
|
+
const hexBytes = bytes.map((b) => `\\x${b.toString(16).padStart(2, "0")}`).join("");
|
|
14808
|
+
await execAsync(`printf '${hexBytes}' | dd of="${args.filePath}" bs=1 seek=${args.offset} conv=notrunc 2>/dev/null`);
|
|
14809
|
+
const summary = [
|
|
14810
|
+
`Bytes Patched: ${args.filePath}`,
|
|
14811
|
+
`Offset: 0x${args.offset.toString(16)} (${args.offset})`,
|
|
14812
|
+
`Bytes written: ${bytes.length}`,
|
|
14813
|
+
`Hex data: ${hex}`,
|
|
14814
|
+
backup ? `Backup created: ${args.filePath}.bak` : "WARNING: No backup created"
|
|
14815
|
+
].join(`
|
|
14816
|
+
`);
|
|
14817
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14818
|
+
} catch (error2) {
|
|
14819
|
+
throw new Error(`Patch failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14820
|
+
}
|
|
14821
|
+
}
|
|
14822
|
+
async function handleNopSled(args) {
|
|
14823
|
+
const backup = args.createBackup ?? true;
|
|
14824
|
+
const nopHex = "90".repeat(args.count);
|
|
14825
|
+
return handlePatchBytes({
|
|
14826
|
+
filePath: args.filePath,
|
|
14827
|
+
offset: args.offset,
|
|
14828
|
+
hexData: nopHex,
|
|
14829
|
+
createBackup: backup
|
|
14830
|
+
});
|
|
14831
|
+
}
|
|
14832
|
+
async function handleHexEditor(args) {
|
|
14833
|
+
try {
|
|
14834
|
+
const { stdout: currentHex } = await execAsync(`xxd -s ${args.offset} -l ${args.length} -p "${args.filePath}" | tr -d '\\n'`);
|
|
14835
|
+
if (args.newHex) {
|
|
14836
|
+
const hex = args.newHex.replace(/\s/g, "");
|
|
14837
|
+
if (!/^[0-9a-fA-F]*$/.test(hex)) {
|
|
14838
|
+
throw new Error("Invalid hex data.");
|
|
14839
|
+
}
|
|
14840
|
+
await execAsync(`cp "${args.filePath}" "${args.filePath}.bak"`);
|
|
14841
|
+
const bytes = hex.match(/.{2}/g)?.map((h) => parseInt(h, 16)) ?? [];
|
|
14842
|
+
const hexBytes = bytes.map((b) => `\\x${b.toString(16).padStart(2, "0")}`).join("");
|
|
14843
|
+
await execAsync(`printf '${hexBytes}' | dd of="${args.filePath}" bs=1 seek=${args.offset} conv=notrunc 2>/dev/null`);
|
|
14844
|
+
return {
|
|
14845
|
+
content: [{
|
|
14846
|
+
type: "text",
|
|
14847
|
+
text: [
|
|
14848
|
+
`Hex Editor: ${args.filePath}`,
|
|
14849
|
+
`Offset: 0x${args.offset.toString(16)}`,
|
|
14850
|
+
"",
|
|
14851
|
+
`Previous: ${currentHex}`,
|
|
14852
|
+
`New: ${hex}`,
|
|
14853
|
+
`Backup: ${args.filePath}.bak`
|
|
14854
|
+
].join(`
|
|
14855
|
+
`)
|
|
14856
|
+
}]
|
|
14857
|
+
};
|
|
14858
|
+
}
|
|
14859
|
+
const summary = [
|
|
14860
|
+
`Hex Editor (read mode): ${args.filePath}`,
|
|
14861
|
+
`Offset: 0x${args.offset.toString(16)} (${args.offset})`,
|
|
14862
|
+
`Length: ${args.length} bytes`,
|
|
14863
|
+
"",
|
|
14864
|
+
`Hex: ${currentHex}`,
|
|
14865
|
+
"",
|
|
14866
|
+
"To modify, provide newHex parameter"
|
|
14867
|
+
].join(`
|
|
14868
|
+
`);
|
|
14869
|
+
return { content: [{ type: "text", text: summary }] };
|
|
14870
|
+
} catch (error2) {
|
|
14871
|
+
throw new Error(`Hex editor failed: ${error2 instanceof Error ? error2.message : error2}`);
|
|
14872
|
+
}
|
|
14873
|
+
}
|
|
14196
14874
|
var TOOLS = [
|
|
14197
14875
|
{
|
|
14198
14876
|
name: "nm_list_symbols",
|
|
@@ -14413,6 +15091,221 @@ var TOOLS = [
|
|
|
14413
15091
|
},
|
|
14414
15092
|
required: ["filePath", "pattern"]
|
|
14415
15093
|
}
|
|
15094
|
+
},
|
|
15095
|
+
{
|
|
15096
|
+
name: "bin_strings",
|
|
15097
|
+
description: "Extract readable strings (ASCII, Unicode) from a binary file",
|
|
15098
|
+
inputSchema: {
|
|
15099
|
+
type: "object",
|
|
15100
|
+
properties: {
|
|
15101
|
+
filePath: { type: "string", description: "Path to the binary file" },
|
|
15102
|
+
minLength: { type: "number", description: "Minimum string length (default: 4)" },
|
|
15103
|
+
encoding: {
|
|
15104
|
+
type: "string",
|
|
15105
|
+
enum: ["ascii", "unicode", "all"],
|
|
15106
|
+
description: "String encoding to search (default: all)"
|
|
15107
|
+
}
|
|
15108
|
+
},
|
|
15109
|
+
required: ["filePath"]
|
|
15110
|
+
}
|
|
15111
|
+
},
|
|
15112
|
+
{
|
|
15113
|
+
name: "bin_file_info",
|
|
15114
|
+
description: "Get detailed file type information with MIME type",
|
|
15115
|
+
inputSchema: {
|
|
15116
|
+
type: "object",
|
|
15117
|
+
properties: {
|
|
15118
|
+
filePath: { type: "string", description: "Path to the file" }
|
|
15119
|
+
},
|
|
15120
|
+
required: ["filePath"]
|
|
15121
|
+
}
|
|
15122
|
+
},
|
|
15123
|
+
{
|
|
15124
|
+
name: "bin_section_sizes",
|
|
15125
|
+
description: "Get section sizes (text, data, bss) of a binary",
|
|
15126
|
+
inputSchema: {
|
|
15127
|
+
type: "object",
|
|
15128
|
+
properties: {
|
|
15129
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15130
|
+
},
|
|
15131
|
+
required: ["filePath"]
|
|
15132
|
+
}
|
|
15133
|
+
},
|
|
15134
|
+
{
|
|
15135
|
+
name: "bin_objdump_sections",
|
|
15136
|
+
description: "Get section headers with flags using objdump -h",
|
|
15137
|
+
inputSchema: {
|
|
15138
|
+
type: "object",
|
|
15139
|
+
properties: {
|
|
15140
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15141
|
+
},
|
|
15142
|
+
required: ["filePath"]
|
|
15143
|
+
}
|
|
15144
|
+
},
|
|
15145
|
+
{
|
|
15146
|
+
name: "bin_objdump_program_headers",
|
|
15147
|
+
description: "Get program headers and DLL imports using objdump -p",
|
|
15148
|
+
inputSchema: {
|
|
15149
|
+
type: "object",
|
|
15150
|
+
properties: {
|
|
15151
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15152
|
+
},
|
|
15153
|
+
required: ["filePath"]
|
|
15154
|
+
}
|
|
15155
|
+
},
|
|
15156
|
+
{
|
|
15157
|
+
name: "bin_otool_libs",
|
|
15158
|
+
description: "Get dynamic library dependencies for Mach-O files (macOS only)",
|
|
15159
|
+
inputSchema: {
|
|
15160
|
+
type: "object",
|
|
15161
|
+
properties: {
|
|
15162
|
+
filePath: { type: "string", description: "Path to the Mach-O file" }
|
|
15163
|
+
},
|
|
15164
|
+
required: ["filePath"]
|
|
15165
|
+
}
|
|
15166
|
+
},
|
|
15167
|
+
{
|
|
15168
|
+
name: "bin_readelf",
|
|
15169
|
+
description: "Comprehensive ELF structure analysis using readelf",
|
|
15170
|
+
inputSchema: {
|
|
15171
|
+
type: "object",
|
|
15172
|
+
properties: {
|
|
15173
|
+
filePath: { type: "string", description: "Path to the ELF file" },
|
|
15174
|
+
sections: {
|
|
15175
|
+
type: "array",
|
|
15176
|
+
items: { type: "string", enum: ["headers", "sections", "segments", "symbols", "dynamic", "all"] },
|
|
15177
|
+
description: "Sections to analyze (default: all)"
|
|
15178
|
+
}
|
|
15179
|
+
},
|
|
15180
|
+
required: ["filePath"]
|
|
15181
|
+
}
|
|
15182
|
+
},
|
|
15183
|
+
{
|
|
15184
|
+
name: "bin_ldd",
|
|
15185
|
+
description: "Get shared library dependencies using ldd (Linux only)",
|
|
15186
|
+
inputSchema: {
|
|
15187
|
+
type: "object",
|
|
15188
|
+
properties: {
|
|
15189
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15190
|
+
},
|
|
15191
|
+
required: ["filePath"]
|
|
15192
|
+
}
|
|
15193
|
+
},
|
|
15194
|
+
{
|
|
15195
|
+
name: "bin_disassembly",
|
|
15196
|
+
description: "Disassemble binary code using objdump -d",
|
|
15197
|
+
inputSchema: {
|
|
15198
|
+
type: "object",
|
|
15199
|
+
properties: {
|
|
15200
|
+
filePath: { type: "string", description: "Path to the binary file" },
|
|
15201
|
+
symbol: { type: "string", description: "Specific symbol to disassemble (optional)" },
|
|
15202
|
+
startOffset: { type: "number", description: "Line offset for output pagination" },
|
|
15203
|
+
length: { type: "number", description: "Number of lines to output" }
|
|
15204
|
+
},
|
|
15205
|
+
required: ["filePath"]
|
|
15206
|
+
}
|
|
15207
|
+
},
|
|
15208
|
+
{
|
|
15209
|
+
name: "bin_security_audit",
|
|
15210
|
+
description: "Check binary security features (ASLR, PIE, RELRO, stack canary, NX bit)",
|
|
15211
|
+
inputSchema: {
|
|
15212
|
+
type: "object",
|
|
15213
|
+
properties: {
|
|
15214
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15215
|
+
},
|
|
15216
|
+
required: ["filePath"]
|
|
15217
|
+
}
|
|
15218
|
+
},
|
|
15219
|
+
{
|
|
15220
|
+
name: "bin_entropy",
|
|
15221
|
+
description: "Analyze section entropy to detect packed/encrypted sections",
|
|
15222
|
+
inputSchema: {
|
|
15223
|
+
type: "object",
|
|
15224
|
+
properties: {
|
|
15225
|
+
filePath: { type: "string", description: "Path to the binary file" },
|
|
15226
|
+
blockSize: { type: "number", description: "Block size for entropy calculation (default: 1024)" }
|
|
15227
|
+
},
|
|
15228
|
+
required: ["filePath"]
|
|
15229
|
+
}
|
|
15230
|
+
},
|
|
15231
|
+
{
|
|
15232
|
+
name: "bin_import_export",
|
|
15233
|
+
description: "Get detailed import/export table analysis",
|
|
15234
|
+
inputSchema: {
|
|
15235
|
+
type: "object",
|
|
15236
|
+
properties: {
|
|
15237
|
+
filePath: { type: "string", description: "Path to the binary file" }
|
|
15238
|
+
},
|
|
15239
|
+
required: ["filePath"]
|
|
15240
|
+
}
|
|
15241
|
+
},
|
|
15242
|
+
{
|
|
15243
|
+
name: "bin_diff",
|
|
15244
|
+
description: "Byte-level comparison between two binary files",
|
|
15245
|
+
inputSchema: {
|
|
15246
|
+
type: "object",
|
|
15247
|
+
properties: {
|
|
15248
|
+
file1: { type: "string", description: "First binary file" },
|
|
15249
|
+
file2: { type: "string", description: "Second binary file" },
|
|
15250
|
+
contextBytes: { type: "number", description: "Bytes of context to show around differences (default: 32)" }
|
|
15251
|
+
},
|
|
15252
|
+
required: ["file1", "file2"]
|
|
15253
|
+
}
|
|
15254
|
+
},
|
|
15255
|
+
{
|
|
15256
|
+
name: "bin_archive_extract",
|
|
15257
|
+
description: "Extract or list contents of static library (.a) archives",
|
|
15258
|
+
inputSchema: {
|
|
15259
|
+
type: "object",
|
|
15260
|
+
properties: {
|
|
15261
|
+
filePath: { type: "string", description: "Path to the archive file" },
|
|
15262
|
+
outputDir: { type: "string", description: "Output directory for extraction (optional)" },
|
|
15263
|
+
listOnly: { type: "boolean", description: "Only list contents without extracting (default: false)" }
|
|
15264
|
+
},
|
|
15265
|
+
required: ["filePath"]
|
|
15266
|
+
}
|
|
15267
|
+
},
|
|
15268
|
+
{
|
|
15269
|
+
name: "bin_patch_bytes",
|
|
15270
|
+
description: "Patch bytes at a specific offset in a binary file (DESTRUCTIVE)",
|
|
15271
|
+
inputSchema: {
|
|
15272
|
+
type: "object",
|
|
15273
|
+
properties: {
|
|
15274
|
+
filePath: { type: "string", description: "Path to the binary file" },
|
|
15275
|
+
offset: { type: "number", description: "Byte offset to patch" },
|
|
15276
|
+
hexData: { type: "string", description: "Hex bytes to write (e.g., '90 90 90')" },
|
|
15277
|
+
createBackup: { type: "boolean", description: "Create .bak backup before patching (default: true)" }
|
|
15278
|
+
},
|
|
15279
|
+
required: ["filePath", "offset", "hexData"]
|
|
15280
|
+
}
|
|
15281
|
+
},
|
|
15282
|
+
{
|
|
15283
|
+
name: "bin_nop_sled",
|
|
15284
|
+
description: "Insert NOP instructions at a specific offset (DESTRUCTIVE)",
|
|
15285
|
+
inputSchema: {
|
|
15286
|
+
type: "object",
|
|
15287
|
+
properties: {
|
|
15288
|
+
filePath: { type: "string", description: "Path to the binary file" },
|
|
15289
|
+
offset: { type: "number", description: "Byte offset for NOP sled" },
|
|
15290
|
+
count: { type: "number", description: "Number of NOP instructions to insert" },
|
|
15291
|
+
createBackup: { type: "boolean", description: "Create .bak backup (default: true)" }
|
|
15292
|
+
},
|
|
15293
|
+
required: ["filePath", "offset", "count"]
|
|
15294
|
+
}
|
|
15295
|
+
},
|
|
15296
|
+
{
|
|
15297
|
+
name: "bin_hex_editor",
|
|
15298
|
+
description: "Read or modify bytes at a specific offset (hex editor mode)",
|
|
15299
|
+
inputSchema: {
|
|
15300
|
+
type: "object",
|
|
15301
|
+
properties: {
|
|
15302
|
+
filePath: { type: "string", description: "Path to the file" },
|
|
15303
|
+
offset: { type: "number", description: "Byte offset" },
|
|
15304
|
+
length: { type: "number", description: "Number of bytes to read/modify" },
|
|
15305
|
+
newHex: { type: "string", description: "New hex bytes to write (optional - read mode if omitted)" }
|
|
15306
|
+
},
|
|
15307
|
+
required: ["filePath", "offset", "length"]
|
|
15308
|
+
}
|
|
14416
15309
|
}
|
|
14417
15310
|
];
|
|
14418
15311
|
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
|
@@ -14454,6 +15347,40 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
|
14454
15347
|
return await handleXxdExtract(args);
|
|
14455
15348
|
case "xxd_find_pattern":
|
|
14456
15349
|
return await handleXxdFindPattern(args);
|
|
15350
|
+
case "bin_strings":
|
|
15351
|
+
return await handleStrings(args);
|
|
15352
|
+
case "bin_file_info":
|
|
15353
|
+
return await handleFileInfo(args);
|
|
15354
|
+
case "bin_section_sizes":
|
|
15355
|
+
return await handleSectionSizes(args);
|
|
15356
|
+
case "bin_objdump_sections":
|
|
15357
|
+
return await handleObjdumpSections(args);
|
|
15358
|
+
case "bin_objdump_program_headers":
|
|
15359
|
+
return await handleObjdumpProgramHeaders(args);
|
|
15360
|
+
case "bin_otool_libs":
|
|
15361
|
+
return await handleOtoolLibs(args);
|
|
15362
|
+
case "bin_readelf":
|
|
15363
|
+
return await handleReadelf(args);
|
|
15364
|
+
case "bin_ldd":
|
|
15365
|
+
return await handleLdd(args);
|
|
15366
|
+
case "bin_disassembly":
|
|
15367
|
+
return await handleDisassembly(args);
|
|
15368
|
+
case "bin_security_audit":
|
|
15369
|
+
return await handleSecurityAudit(args);
|
|
15370
|
+
case "bin_entropy":
|
|
15371
|
+
return await handleEntropyAnalysis(args);
|
|
15372
|
+
case "bin_import_export":
|
|
15373
|
+
return await handleImportExport(args);
|
|
15374
|
+
case "bin_diff":
|
|
15375
|
+
return await handleBinaryDiff(args);
|
|
15376
|
+
case "bin_archive_extract":
|
|
15377
|
+
return await handleArchiveExtract(args);
|
|
15378
|
+
case "bin_patch_bytes":
|
|
15379
|
+
return await handlePatchBytes(args);
|
|
15380
|
+
case "bin_nop_sled":
|
|
15381
|
+
return await handleNopSled(args);
|
|
15382
|
+
case "bin_hex_editor":
|
|
15383
|
+
return await handleHexEditor(args);
|
|
14457
15384
|
default:
|
|
14458
15385
|
throw new Error(`Unknown tool: ${name}`);
|
|
14459
15386
|
}
|